diff
stringlengths
262
553k
is_single_chunk
bool
2 classes
is_single_function
bool
1 class
buggy_function
stringlengths
20
391k
fixed_function
stringlengths
0
392k
diff --git a/src/main/java/fi/csc/microarray/client/visualisation/methods/gbrowser/track/CoverageAndSNPTrack.java b/src/main/java/fi/csc/microarray/client/visualisation/methods/gbrowser/track/CoverageAndSNPTrack.java index 0cc4d93f0..1837e3782 100644 --- a/src/main/java/fi/csc/microarray/client/visualisation/methods/gbrowser/track/CoverageAndSNPTrack.java +++ b/src/main/java/fi/csc/microarray/client/visualisation/methods/gbrowser/track/CoverageAndSNPTrack.java @@ -1,232 +1,232 @@ package fi.csc.microarray.client.visualisation.methods.gbrowser.track; import java.awt.Color; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeSet; import fi.csc.microarray.client.visualisation.methods.gbrowser.BaseStorage; import fi.csc.microarray.client.visualisation.methods.gbrowser.BaseStorage.Base; import fi.csc.microarray.client.visualisation.methods.gbrowser.BaseStorage.Nucleotide; import fi.csc.microarray.client.visualisation.methods.gbrowser.DataSource; import fi.csc.microarray.client.visualisation.methods.gbrowser.View; import fi.csc.microarray.client.visualisation.methods.gbrowser.drawable.Drawable; import fi.csc.microarray.client.visualisation.methods.gbrowser.drawable.RectDrawable; import fi.csc.microarray.client.visualisation.methods.gbrowser.fileFormat.ColumnType; import fi.csc.microarray.client.visualisation.methods.gbrowser.fileFormat.Strand; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.AreaResult; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.BpCoord; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.Chromosome; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.RegionContent; /** * Track for showing the coverage of reads. Profile is drawn by calculating * the number of nucleotides hitting each basepair location. Should look * similar to {@link IntensityTrack}, but is exact. Also shows where there are * large amounts of SNP's as bars chart. * * If reverseColor is not null, then strands are visualised separately and * SNPs are disabled. * */ public class CoverageAndSNPTrack extends Track { private long maxBpLength; private long minBpLength; private Color forwardColor; private Color reverseColor; private boolean highlightSNP = false; private BaseStorage theBaseCacheThang = new BaseStorage(); private DataSource refFile; private Collection<RegionContent> refReads = new TreeSet<RegionContent>(); private ReadpartDataProvider readpartProvider; public CoverageAndSNPTrack(View view, DataSource file, ReadpartDataProvider readpartProvider, DataSource refFile, Color forwardColor, Color reverseColor, long minBpLength, long maxBpLength) { super(view, file); this.forwardColor = forwardColor; this.reverseColor = reverseColor; this.minBpLength = minBpLength; this.maxBpLength = maxBpLength; this.readpartProvider = readpartProvider; setStrand(Strand.BOTH); this.refFile = refFile; } @Override public void initializeListener() { super.initializeListener(); // Add listener for reference file if (file != null && refFile != null) { view.getQueueManager().addResultListener(refFile, this); } } /** * Get drawables for a collection of reads. * * @return */ private Collection<Drawable> getDrawableReads(Strand dataStrand, Color color) { Collection<Drawable> drawables = getEmptyDrawCollection(); Chromosome chr = getView().getBpRegion().start.chr; // If SNP highlight mode is on, we need reference sequence data char[] refSeq = SeqBlockTrack.getReferenceArray(refReads, view, Strand.FORWARD); // Count nucleotides for each location theBaseCacheThang.getNucleotideCounts(readpartProvider.getReadparts(dataStrand), view, refSeq); // Count width of a single bp in pixels - float bpWidth = ((float) (getView().getWidth()) / getView().getBpRegion().getLength()); + float bpWidth = (float) (getView().getWidth() / getView().getBpRegionDouble().getLength()); // Count maximum y coordinate (the bottom of the track) int bottomlineY = 0; // prepare lines that make up the profile for drawing Iterator<Base> bases = theBaseCacheThang.iterator(); // draw lines for each bp region that has some items while (bases.hasNext()) { Base currentBase = bases.next(); float startX = getView().bpToTrackFloat(new BpCoord(currentBase.getBpLocation(), chr)); //Round together with position dividends to get the same result than where next block will start int endX = (int)(startX + bpWidth) - (int)startX; int profileY = currentBase.getCoverage(); drawables.add(new RectDrawable((int)startX, bottomlineY, endX, (int)(bottomlineY + profileY), color, null)); drawSNPBar(drawables, (int)bpWidth, bottomlineY, currentBase, (int)startX); } return drawables; } private void drawSNPBar(Collection<Drawable> drawables, int bpWidth, int bottomlineY, Base currentBase, int endX) { if (highlightSNP && currentBase.hasSignificantSNPs()) { int y = bottomlineY; for (Nucleotide nt : Nucleotide.values()) { int increment = currentBase.getSNPCounts()[nt.ordinal()]; if (increment > 0) { Color c = SeqBlockTrack.charColors[nt.ordinal()]; drawables.add(new RectDrawable(endX, y, bpWidth, increment, c, null)); y += increment; } } } } @Override public Collection<Drawable> getDrawables() { Collection<Drawable> drawables = getEmptyDrawCollection(); if (reverseColor == null) { // add drawables according to sum of both strands drawables.addAll(getDrawableReads(Strand.BOTH, forwardColor)); } else { // add drawables of both strands separately drawables.addAll(getDrawableReads(Strand.FORWARD, forwardColor)); drawables.addAll(getDrawableReads(Strand.REVERSED, reverseColor)); } return drawables; } public void processAreaResult(AreaResult areaResult) { // Do not listen to actual read data, because that is taken care by ReadpartDataProvider // "Spy" on reference sequence data, if available if (areaResult.getStatus().file == refFile) { this.refReads.addAll(areaResult.getContents()); } } @Override public Integer getHeight() { if (isVisible()) { //return super.getHeight(); return 100; } else { return 0; } } @Override public boolean isStretchable() { // stretchable unless hidden //return isVisible(); return false; } @Override public boolean isVisible() { // visible region is not suitable return (super.isVisible() && getView().getBpRegion().getLength() > minBpLength && getView().getBpRegion().getLength() <= maxBpLength); } @Override public Map<DataSource, Set<ColumnType>> requestedData() { HashMap<DataSource, Set<ColumnType>> datas = new HashMap<DataSource, Set<ColumnType>>(); datas.put(file, new HashSet<ColumnType>(Arrays.asList(new ColumnType[] { ColumnType.ID, ColumnType.SEQUENCE, ColumnType.STRAND, ColumnType.QUALITY, ColumnType.CIGAR}))); return datas; } @Override public boolean isConcised() { return false; } /** * @see View#drawView */ @Override public boolean canExpandDrawables() { return true; } public void enableSNPHighlight() { // turn on highlighting mode highlightSNP = true; } public void disableSNPHighlight() { highlightSNP = false; } }
true
true
private Collection<Drawable> getDrawableReads(Strand dataStrand, Color color) { Collection<Drawable> drawables = getEmptyDrawCollection(); Chromosome chr = getView().getBpRegion().start.chr; // If SNP highlight mode is on, we need reference sequence data char[] refSeq = SeqBlockTrack.getReferenceArray(refReads, view, Strand.FORWARD); // Count nucleotides for each location theBaseCacheThang.getNucleotideCounts(readpartProvider.getReadparts(dataStrand), view, refSeq); // Count width of a single bp in pixels float bpWidth = ((float) (getView().getWidth()) / getView().getBpRegion().getLength()); // Count maximum y coordinate (the bottom of the track) int bottomlineY = 0; // prepare lines that make up the profile for drawing Iterator<Base> bases = theBaseCacheThang.iterator(); // draw lines for each bp region that has some items while (bases.hasNext()) { Base currentBase = bases.next(); float startX = getView().bpToTrackFloat(new BpCoord(currentBase.getBpLocation(), chr)); //Round together with position dividends to get the same result than where next block will start int endX = (int)(startX + bpWidth) - (int)startX; int profileY = currentBase.getCoverage(); drawables.add(new RectDrawable((int)startX, bottomlineY, endX, (int)(bottomlineY + profileY), color, null)); drawSNPBar(drawables, (int)bpWidth, bottomlineY, currentBase, (int)startX); } return drawables; }
private Collection<Drawable> getDrawableReads(Strand dataStrand, Color color) { Collection<Drawable> drawables = getEmptyDrawCollection(); Chromosome chr = getView().getBpRegion().start.chr; // If SNP highlight mode is on, we need reference sequence data char[] refSeq = SeqBlockTrack.getReferenceArray(refReads, view, Strand.FORWARD); // Count nucleotides for each location theBaseCacheThang.getNucleotideCounts(readpartProvider.getReadparts(dataStrand), view, refSeq); // Count width of a single bp in pixels float bpWidth = (float) (getView().getWidth() / getView().getBpRegionDouble().getLength()); // Count maximum y coordinate (the bottom of the track) int bottomlineY = 0; // prepare lines that make up the profile for drawing Iterator<Base> bases = theBaseCacheThang.iterator(); // draw lines for each bp region that has some items while (bases.hasNext()) { Base currentBase = bases.next(); float startX = getView().bpToTrackFloat(new BpCoord(currentBase.getBpLocation(), chr)); //Round together with position dividends to get the same result than where next block will start int endX = (int)(startX + bpWidth) - (int)startX; int profileY = currentBase.getCoverage(); drawables.add(new RectDrawable((int)startX, bottomlineY, endX, (int)(bottomlineY + profileY), color, null)); drawSNPBar(drawables, (int)bpWidth, bottomlineY, currentBase, (int)startX); } return drawables; }
diff --git a/core-api/src/main/java/no/schibstedsok/searchportal/view/velocity/UrlEncodeDirective.java b/core-api/src/main/java/no/schibstedsok/searchportal/view/velocity/UrlEncodeDirective.java index f4e539a0a..aa8365de7 100755 --- a/core-api/src/main/java/no/schibstedsok/searchportal/view/velocity/UrlEncodeDirective.java +++ b/core-api/src/main/java/no/schibstedsok/searchportal/view/velocity/UrlEncodeDirective.java @@ -1,85 +1,85 @@ // Copyright (2006) Schibsted Søk AS /* * UrlEncodeDirective.java * * Created on February 6, 2006, 3:45 PM * */ package no.schibstedsok.searchportal.view.velocity; import java.io.IOException; import java.io.Writer; import java.net.URLEncoder; import org.apache.log4j.Logger; import org.apache.velocity.context.InternalContextAdapter; import org.apache.velocity.exception.MethodInvocationException; import org.apache.velocity.exception.ParseErrorException; import org.apache.velocity.exception.ResourceNotFoundException; import org.apache.velocity.runtime.directive.Directive; import org.apache.velocity.runtime.parser.Token; import org.apache.velocity.runtime.parser.node.Node; /** * * A velocity directive to do url encoding. * * <code> * #urlencode('&q=hej') * #urlencode('&q=hej', 'iso-8859-1') * </code> * * The default charset is utf-8. * * @author magnuse */ public final class UrlEncodeDirective extends Directive { private static final Logger LOG = Logger.getLogger(UrlEncodeDirective.class); private static final String NAME = "urlencode"; private static final String DEFAULT_CHARSET = "utf-8"; /** * {@inheritDoc} */ public String getName() { return NAME; } /** * {@inheritDoc} */ public int getType() { return LINE; } /** * {@inheritDoc} */ public boolean render(final InternalContextAdapter context, final Writer writer, final Node node) throws IOException, ResourceNotFoundException, ParseErrorException, MethodInvocationException { if (node.jjtGetNumChildren() < 1) { rsvc.error("#" + getName() + " - missing argument"); return false; } String charset = DEFAULT_CHARSET; - final String input = node.jjtGetChild(0).value(context).toString(); + final String input = node.jjtGetChild(0).value(context) != null ? node.jjtGetChild(0).value(context).toString() : ""; if (node.jjtGetNumChildren() == 2) { charset = node.jjtGetChild(1).value(context).toString(); } writer.write(URLEncoder.encode(input, charset)); final Token lastToken = node.getLastToken(); if (lastToken.image.endsWith("\n")) { writer.write("\n"); } return true; } }
true
true
public boolean render(final InternalContextAdapter context, final Writer writer, final Node node) throws IOException, ResourceNotFoundException, ParseErrorException, MethodInvocationException { if (node.jjtGetNumChildren() < 1) { rsvc.error("#" + getName() + " - missing argument"); return false; } String charset = DEFAULT_CHARSET; final String input = node.jjtGetChild(0).value(context).toString(); if (node.jjtGetNumChildren() == 2) { charset = node.jjtGetChild(1).value(context).toString(); } writer.write(URLEncoder.encode(input, charset)); final Token lastToken = node.getLastToken(); if (lastToken.image.endsWith("\n")) { writer.write("\n"); } return true; }
public boolean render(final InternalContextAdapter context, final Writer writer, final Node node) throws IOException, ResourceNotFoundException, ParseErrorException, MethodInvocationException { if (node.jjtGetNumChildren() < 1) { rsvc.error("#" + getName() + " - missing argument"); return false; } String charset = DEFAULT_CHARSET; final String input = node.jjtGetChild(0).value(context) != null ? node.jjtGetChild(0).value(context).toString() : ""; if (node.jjtGetNumChildren() == 2) { charset = node.jjtGetChild(1).value(context).toString(); } writer.write(URLEncoder.encode(input, charset)); final Token lastToken = node.getLastToken(); if (lastToken.image.endsWith("\n")) { writer.write("\n"); } return true; }
diff --git a/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/p2/artifact/repository/CompositeArtifactRepository.java b/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/p2/artifact/repository/CompositeArtifactRepository.java index 2efd93960..89394ab28 100644 --- a/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/p2/artifact/repository/CompositeArtifactRepository.java +++ b/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/p2/artifact/repository/CompositeArtifactRepository.java @@ -1,461 +1,461 @@ /******************************************************************************* * Copyright (c) 2008 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.equinox.internal.p2.artifact.repository; import java.io.*; import java.net.URI; import java.net.URISyntaxException; import java.util.*; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; import org.eclipse.core.runtime.*; import org.eclipse.equinox.internal.p2.core.helpers.LogHelper; import org.eclipse.equinox.internal.p2.core.helpers.ServiceHelper; import org.eclipse.equinox.internal.p2.persistence.CompositeRepositoryIO; import org.eclipse.equinox.internal.p2.persistence.CompositeRepositoryIO.CompositeRepositoryState; import org.eclipse.equinox.internal.provisional.p2.artifact.repository.*; import org.eclipse.equinox.internal.provisional.p2.core.ProvisionException; import org.eclipse.equinox.internal.provisional.p2.core.repository.ICompositeRepository; import org.eclipse.equinox.internal.provisional.p2.core.repository.IRepository; import org.eclipse.equinox.internal.provisional.p2.metadata.IArtifactKey; import org.eclipse.equinox.internal.provisional.spi.p2.artifact.repository.AbstractArtifactRepository; public class CompositeArtifactRepository extends AbstractArtifactRepository implements IArtifactRepository, ICompositeRepository { static final public String REPOSITORY_TYPE = CompositeArtifactRepository.class.getName(); static final private Integer REPOSITORY_VERSION = new Integer(1); static final public String XML_EXTENSION = ".xml"; //$NON-NLS-1$ static final public String JAR_EXTENSION = ".jar"; //$NON-NLS-1$ static final public String CONTENT_FILENAME = "compositeArtifacts"; //$NON-NLS-1$ private ArrayList childrenURIs = new ArrayList(); private IArtifactRepositoryManager getManager() { return (IArtifactRepositoryManager) ServiceHelper.getService(Activator.getContext(), IArtifactRepositoryManager.class.getName()); } /* * This is only called by the parser when loading a repository. */ public CompositeArtifactRepository(CompositeRepositoryState state) { super(state.Name, state.Type, state.Version, null, state.Description, state.Provider, state.Properties); for (int i = 0; i < state.Children.length; i++) { //duplicate checking if (!childrenURIs.contains(state.Children[i])) childrenURIs.add(state.Children[i]); } } public CompositeArtifactRepository(String repositoryName, URI location, Map properties) { super(repositoryName, REPOSITORY_TYPE, REPOSITORY_VERSION.toString(), location, null, null, properties); initializeAfterLoad(location); save(); } public static URI getActualLocation(URI base, boolean compress) { return getActualLocation(base, compress ? JAR_EXTENSION : XML_EXTENSION); } private static URI getActualLocation(URI base, String extension) { final String name = CONTENT_FILENAME + extension; String spec = base.toString(); if (spec.endsWith(name)) return base; if (spec.endsWith("/")) //$NON-NLS-1$ spec += name; else spec += "/" + name; //$NON-NLS-1$ try { return new URI(spec); } catch (URISyntaxException e) { return null; } } private boolean isLocal() { return "file".equalsIgnoreCase(location.getScheme()); //$NON-NLS-1$ } public boolean isModifiable() { return isLocal(); } // use this method to setup any transient fields etc after the object has been restored from a stream public synchronized void initializeAfterLoad(URI location) { this.location = location; } public void addChild(URI childURI) { if (!childrenURIs.contains(childURI)) { childrenURIs.add(childURI); save(); } } public boolean addChild(URI childURI, String comparatorID) { if (isSane(childURI, comparatorID)) { addChild(childURI); //Add was successful return true; } //Add was not successful return false; } public void removeChild(URI childURI) { childrenURIs.remove(childURI); save(); } public void removeAllChildren() { childrenURIs.clear(); save(); } public ArrayList getChildren() { return childrenURIs; } /** * Composite repositories should be unable to directly modify their sub repositories */ public synchronized void addDescriptor(IArtifactDescriptor descriptor) { throw new UnsupportedOperationException(Messages.exception_unsupportedAddToComposite); } /** * Composite repositories should be unable to directly modify their sub repositories */ public void addDescriptors(IArtifactDescriptor[] descriptors) { throw new UnsupportedOperationException(Messages.exception_unsupportedAddToComposite); } /** * Composite repositories should be unable to directly modify their sub repositories */ public void removeDescriptor(IArtifactKey key) { throw new UnsupportedOperationException(Messages.exception_unsupportedRemoveFromComposite); } /** * Composite repositories should be unable to directly modify their sub repositories */ public void removeDescriptor(IArtifactDescriptor descriptor) { throw new UnsupportedOperationException(Messages.exception_unsupportedRemoveFromComposite); } /** * Composite repositories should be unable to directly modify their sub repositories */ public synchronized void removeAll() { throw new UnsupportedOperationException(Messages.exception_unsupportedRemoveFromComposite); } public boolean contains(IArtifactKey key) { boolean contains = false; for (Iterator repositoryIterator = childrenURIs.iterator(); repositoryIterator.hasNext() && !contains;) { try { URI currentURI = (URI) repositoryIterator.next(); IArtifactRepository current = load(currentURI); contains = current.contains(key); } catch (ProvisionException e) { //repository failed to load. fall through LogHelper.log(e); } } return contains; } public boolean contains(IArtifactDescriptor descriptor) { boolean contains = false; for (Iterator repositoryIterator = childrenURIs.iterator(); repositoryIterator.hasNext() && !contains;) { try { URI currentURI = (URI) repositoryIterator.next(); IArtifactRepository current = load(currentURI); contains = current.contains(descriptor); } catch (ProvisionException e) { //repository failed to load. fall through LogHelper.log(e); } } return contains; } public IArtifactDescriptor[] getArtifactDescriptors(IArtifactKey key) { ArrayList result = new ArrayList(); for (Iterator repositoryIterator = childrenURIs.iterator(); repositoryIterator.hasNext();) { try { URI currentURI = (URI) repositoryIterator.next(); IArtifactRepository current = load(currentURI); IArtifactDescriptor[] tempResult = current.getArtifactDescriptors(key); for (int i = 0; i < tempResult.length; i++) //duplicate checking if (!result.contains(tempResult[i])) result.add(tempResult[i]); } catch (ProvisionException e) { //repository failed to load. fall through LogHelper.log(e); } } return (IArtifactDescriptor[]) result.toArray(new IArtifactDescriptor[result.size()]); } public IArtifactKey[] getArtifactKeys() { ArrayList result = new ArrayList(); for (Iterator repositoryIterator = childrenURIs.iterator(); repositoryIterator.hasNext();) { try { URI currentURI = (URI) repositoryIterator.next(); IArtifactRepository current = load(currentURI); IArtifactKey[] tempResult = current.getArtifactKeys(); for (int i = 0; i < tempResult.length; i++) //duplicate checking if (!result.contains(tempResult[i])) result.add(tempResult[i]); } catch (ProvisionException e) { //repository failed to load. fall through LogHelper.log(e); } } return (IArtifactKey[]) result.toArray(new IArtifactKey[result.size()]); } public IStatus getArtifacts(IArtifactRequest[] requests, IProgressMonitor monitor) { SubMonitor subMonitor = SubMonitor.convert(monitor, requests.length); MultiStatus multiStatus = new MultiStatus(Activator.ID, IStatus.OK, Messages.message_childrenRepos, null); for (Iterator repositoryIterator = childrenURIs.iterator(); repositoryIterator.hasNext() && requests.length > 0;) { try { URI currentURI = (URI) repositoryIterator.next(); IArtifactRepository current = load(currentURI); IArtifactRequest[] applicable = getRequestsForRepository(current, requests); IStatus dlStatus = current.getArtifacts(applicable, subMonitor.newChild(requests.length)); multiStatus.add(dlStatus); if (dlStatus.getSeverity() == IStatus.CANCEL) return multiStatus; requests = filterUnfetched(requests); subMonitor.setWorkRemaining(requests.length); } catch (ProvisionException e) { //repository failed the load. Fall through. LogHelper.log(e); } } return multiStatus; } public IStatus getArtifact(IArtifactDescriptor descriptor, OutputStream destination, IProgressMonitor monitor) { SubMonitor subMonitor = SubMonitor.convert(monitor, childrenURIs.size()); Iterator repositoryIterator = childrenURIs.iterator(); MultiStatus multiStatus = new MultiStatus(Activator.ID, IStatus.OK, Messages.message_childrenRepos, null); while (repositoryIterator.hasNext()) { try { URI currentURI = (URI) repositoryIterator.next(); IArtifactRepository current = load(currentURI); IStatus status = current.getArtifact(descriptor, destination, subMonitor.newChild(1)); if (status.isOK()) return status; //getArtifact failed multiStatus.add(status); } catch (ProvisionException e) { //repository failed the load. Fall through. LogHelper.log(e); } } return multiStatus; } public IStatus getRawArtifact(IArtifactDescriptor descriptor, OutputStream destination, IProgressMonitor monitor) { SubMonitor subMonitor = SubMonitor.convert(monitor, childrenURIs.size()); Iterator repositoryIterator = childrenURIs.iterator(); MultiStatus multiStatus = new MultiStatus(Activator.ID, IStatus.OK, Messages.message_childrenRepos, null); while (repositoryIterator.hasNext()) { try { URI currentURI = (URI) repositoryIterator.next(); IArtifactRepository current = load(currentURI); IStatus status = current.getRawArtifact(descriptor, destination, subMonitor.newChild(1)); if (status.isOK()) return status; //getRawArtifact failed multiStatus.add(status); } catch (ProvisionException e) { //repository failed the load. Fall through. LogHelper.log(e); } } return multiStatus; } private IArtifactRequest[] filterUnfetched(IArtifactRequest[] requests) { ArrayList filteredRequests = new ArrayList(); for (int i = 0; i < requests.length; i++) { if (requests[i].getResult() == null || !requests[i].getResult().isOK()) { filteredRequests.add(requests[i]); } } IArtifactRequest[] filteredArtifactRequests = new IArtifactRequest[filteredRequests.size()]; filteredRequests.toArray(filteredArtifactRequests); return filteredArtifactRequests; } private IArtifactRequest[] getRequestsForRepository(IArtifactRepository repository, IArtifactRequest[] requests) { ArrayList applicable = new ArrayList(); for (int i = 0; i < requests.length; i++) { if (repository.contains(requests[i].getArtifactKey())) applicable.add(requests[i]); } return (IArtifactRequest[]) applicable.toArray(new IArtifactRequest[applicable.size()]); } public void save() { boolean compress = "true".equalsIgnoreCase((String) properties.get(PROP_COMPRESSED)); //$NON-NLS-1$ save(compress); } public void save(boolean compress) { assertModifiable(); OutputStream os = null; try { try { URI actualLocation = getActualLocation(location, false); File artifactsFile = URIUtil.toFile(actualLocation); File jarFile = URIUtil.toFile(getActualLocation(location, true)); if (!compress) { if (jarFile.exists()) { jarFile.delete(); } if (!artifactsFile.exists()) { // create parent folders artifactsFile.getParentFile().mkdirs(); } os = new FileOutputStream(artifactsFile); } else { if (artifactsFile.exists()) { artifactsFile.delete(); } if (!jarFile.exists()) { if (!jarFile.getParentFile().exists()) jarFile.getParentFile().mkdirs(); jarFile.createNewFile(); } JarOutputStream jOs = new JarOutputStream(new FileOutputStream(jarFile)); jOs.putNextEntry(new JarEntry(new Path(artifactsFile.getAbsolutePath()).lastSegment())); os = jOs; } super.setProperty(IRepository.PROP_TIMESTAMP, Long.toString(System.currentTimeMillis())); new CompositeRepositoryIO().write(this, os); } catch (IOException e) { // TODO proper exception handling e.printStackTrace(); } finally { if (os != null) os.close(); } } catch (IOException e) { e.printStackTrace(); } } private IArtifactRepository load(URI repoURI) throws ProvisionException { boolean loaded = getManager().contains(repoURI); IArtifactRepository repo = getManager().loadRepository(repoURI, null); if (!loaded) { //set enabled to false so repositories do not get polled twice getManager().setEnabled(repoURI, false); //set repository to system to hide from users getManager().setRepositoryProperty(repoURI, IRepository.PROP_SYSTEM, String.valueOf(true)); } return repo; } /** * A wrapper method that ensures a specified repository is compared against all children. * @param toCheckURI * @param comparatorID * @return true if toCheckRepo is consistent, false if toCheckRepo contains an equal descriptor to that of s child and they refer to different artifacts on disk. */ private boolean isSane(URI toCheckURI, String comparatorID) { return isSane(toCheckURI, comparatorID, 0); } /** * A method to check if the content of a repository is consistent with the other children by * comparing content using the artifactComparator specified by the comparatorID * startingIndex is used for optimization purposes (ensuring no redundant or self checks are made) * @param toCheckURI * @param comparatorID * @param startingIndex * @return true if toCheckRepo is consistent, false if toCheckRepo contains an equal descriptor to that of s child and they refer to different artifacts on disk. */ private boolean isSane(URI toCheckURI, String comparatorID, int startingIndex) { IArtifactRepository toCheckRepo = null; try { toCheckRepo = load(toCheckURI); } catch (ProvisionException e) { //repository failed the load. LogHelper.log(e); return false; } IArtifactComparator comparator = ArtifactComparatorFactory.getArtifactComparator(comparatorID); for (int m = startingIndex; m < childrenURIs.size(); m++) { try { URI currentURI = (URI) childrenURIs.get(m); IArtifactRepository current = load(currentURI); if (!current.equals(toCheckRepo)) { IArtifactKey[] toCheckKeys = toCheckRepo.getArtifactKeys(); for (int i = 0; i < toCheckKeys.length; i++) { IArtifactKey key = toCheckKeys[i]; if (!current.contains(key)) continue; IArtifactDescriptor[] toCheckDescriptors = toCheckRepo.getArtifactDescriptors(key); IArtifactDescriptor[] currentDescriptors = current.getArtifactDescriptors(key); for (int j = 0; j < toCheckDescriptors.length; j++) { if (!current.contains(toCheckDescriptors[j])) continue; for (int k = 0; k < currentDescriptors.length; k++) { if (currentDescriptors[k].equals(toCheckDescriptors[j])) { - IStatus compareResult = ArtifactComparatorFactory.getArtifactComparator(comparatorID).compare(current, currentDescriptors[k], toCheckRepo, toCheckDescriptors[j]); + IStatus compareResult = comparator.compare(current, currentDescriptors[k], toCheckRepo, toCheckDescriptors[j]); if (!compareResult.isOK()) { LogHelper.log(compareResult); return false; } break; } } } } } } catch (ProvisionException e) { //repository failed the load. LogHelper.log(e); } } return true; } /** * A method that verifies that all children with matching artifact descriptors contain the same set of bytes * The verification is done using the artifactComparator specified by comparatorID * Assumes more valuable logging and output is the responsibility of the artifactComparator implementation. * @param comparatorID * @returns true if the repository is consistent, false if two equal descriptors refer to different artifacts on disk. */ public boolean validate(String comparatorID) { for (int i = 0; i < childrenURIs.size(); i++) { if (!isSane((URI) childrenURIs.get(i), comparatorID, i + 1)) return false; } return true; } }
true
true
private boolean isSane(URI toCheckURI, String comparatorID, int startingIndex) { IArtifactRepository toCheckRepo = null; try { toCheckRepo = load(toCheckURI); } catch (ProvisionException e) { //repository failed the load. LogHelper.log(e); return false; } IArtifactComparator comparator = ArtifactComparatorFactory.getArtifactComparator(comparatorID); for (int m = startingIndex; m < childrenURIs.size(); m++) { try { URI currentURI = (URI) childrenURIs.get(m); IArtifactRepository current = load(currentURI); if (!current.equals(toCheckRepo)) { IArtifactKey[] toCheckKeys = toCheckRepo.getArtifactKeys(); for (int i = 0; i < toCheckKeys.length; i++) { IArtifactKey key = toCheckKeys[i]; if (!current.contains(key)) continue; IArtifactDescriptor[] toCheckDescriptors = toCheckRepo.getArtifactDescriptors(key); IArtifactDescriptor[] currentDescriptors = current.getArtifactDescriptors(key); for (int j = 0; j < toCheckDescriptors.length; j++) { if (!current.contains(toCheckDescriptors[j])) continue; for (int k = 0; k < currentDescriptors.length; k++) { if (currentDescriptors[k].equals(toCheckDescriptors[j])) { IStatus compareResult = ArtifactComparatorFactory.getArtifactComparator(comparatorID).compare(current, currentDescriptors[k], toCheckRepo, toCheckDescriptors[j]); if (!compareResult.isOK()) { LogHelper.log(compareResult); return false; } break; } } } } } } catch (ProvisionException e) { //repository failed the load. LogHelper.log(e); } } return true; }
private boolean isSane(URI toCheckURI, String comparatorID, int startingIndex) { IArtifactRepository toCheckRepo = null; try { toCheckRepo = load(toCheckURI); } catch (ProvisionException e) { //repository failed the load. LogHelper.log(e); return false; } IArtifactComparator comparator = ArtifactComparatorFactory.getArtifactComparator(comparatorID); for (int m = startingIndex; m < childrenURIs.size(); m++) { try { URI currentURI = (URI) childrenURIs.get(m); IArtifactRepository current = load(currentURI); if (!current.equals(toCheckRepo)) { IArtifactKey[] toCheckKeys = toCheckRepo.getArtifactKeys(); for (int i = 0; i < toCheckKeys.length; i++) { IArtifactKey key = toCheckKeys[i]; if (!current.contains(key)) continue; IArtifactDescriptor[] toCheckDescriptors = toCheckRepo.getArtifactDescriptors(key); IArtifactDescriptor[] currentDescriptors = current.getArtifactDescriptors(key); for (int j = 0; j < toCheckDescriptors.length; j++) { if (!current.contains(toCheckDescriptors[j])) continue; for (int k = 0; k < currentDescriptors.length; k++) { if (currentDescriptors[k].equals(toCheckDescriptors[j])) { IStatus compareResult = comparator.compare(current, currentDescriptors[k], toCheckRepo, toCheckDescriptors[j]); if (!compareResult.isOK()) { LogHelper.log(compareResult); return false; } break; } } } } } } catch (ProvisionException e) { //repository failed the load. LogHelper.log(e); } } return true; }
diff --git a/src/main/java/de/lessvoid/nifty/effects/impl/Move.java b/src/main/java/de/lessvoid/nifty/effects/impl/Move.java index bb5654c0..d37af916 100644 --- a/src/main/java/de/lessvoid/nifty/effects/impl/Move.java +++ b/src/main/java/de/lessvoid/nifty/effects/impl/Move.java @@ -1,130 +1,130 @@ package de.lessvoid.nifty.effects.impl; import java.util.logging.Logger; import de.lessvoid.nifty.Nifty; import de.lessvoid.nifty.effects.EffectImpl; import de.lessvoid.nifty.effects.EffectProperties; import de.lessvoid.nifty.effects.Falloff; import de.lessvoid.nifty.elements.Element; import de.lessvoid.nifty.render.NiftyRenderEngine; import de.lessvoid.nifty.tools.TargetElementResolver; /** * Move - move stuff around. * @author void */ public class Move implements EffectImpl { private Logger log = Logger.getLogger(Move.class.getName()); private static final String LEFT = "left"; private static final String RIGHT = "right"; private static final String TOP = "top"; private static final String BOTTOM = "bottom"; private String direction; private long offset = 0; private long startOffset = 0; private int offsetDir = 0; private float offsetY; private float startOffsetY; private int startOffsetX; private float offsetX; private boolean withTarget = false; private boolean fromOffset = false; private boolean toOffset = false; public void activate(final Nifty nifty, final Element element, final EffectProperties parameter) { + String mode = parameter.getProperty("mode"); direction = parameter.getProperty("direction"); if (LEFT.equals(direction)) { offset = element.getX() + element.getWidth(); } else if (RIGHT.equals(direction)) { - offset = element.getX() + element.getWidth(); + offset = nifty.getRenderEngine().getWidth() - element.getX(); } else if (TOP.equals(direction)) { offset = element.getY() + element.getHeight(); } else if (BOTTOM.equals(direction)) { - offset = element.getY() + element.getHeight(); + offset = nifty.getRenderEngine().getHeight() - element.getY(); } else { offset = 0; } - String mode = parameter.getProperty("mode"); if ("out".equals(mode)) { startOffset = 0; offsetDir = -1; withTarget = false; } else if ("in".equals(mode)) { startOffset = offset; offsetDir = 1; withTarget = false; } else if ("fromPosition".equals(mode)) { withTarget = true; } else if ("toPosition".equals(mode)) { withTarget = true; } else if ("fromOffset".equals(mode)) { fromOffset = true; startOffsetX = Integer.valueOf(parameter.getProperty("offsetX", "0")); startOffsetY = Integer.valueOf(parameter.getProperty("offsetY", "0")); offsetX = Math.abs(startOffsetX); offsetY = Math.abs(startOffsetY); } else if ("toOffset".equals(mode)) { toOffset = true; startOffsetX = 0; startOffsetY = 0; offsetX = Integer.valueOf(parameter.getProperty("offsetX", "0")); offsetY = Integer.valueOf(parameter.getProperty("offsetY", "0")); } String target = parameter.getProperty("targetElement"); if (target != null) { TargetElementResolver resolver = new TargetElementResolver(nifty.getCurrentScreen(), element); Element targetElement = resolver.resolve(target); if (targetElement == null) { log.warning("move effect for element [" + element.getId() + "] was unable to find target element [" + target + "] at screen [" + nifty.getCurrentScreen().getScreenId() + "]"); return; } if ("fromPosition".equals(mode)) { startOffsetX = targetElement.getX() - element.getX(); startOffsetY = targetElement.getY() - element.getY(); offsetX = -(targetElement.getX() - element.getX()); offsetY = -(targetElement.getY() - element.getY()); } else if ("toPosition".equals(mode)) { startOffsetX = 0; startOffsetY = 0; offsetX = (targetElement.getX() - element.getX()); offsetY = (targetElement.getY() - element.getY()); } } } public void execute( final Element element, final float normalizedTime, final Falloff falloff, final NiftyRenderEngine r) { if (fromOffset || toOffset) { float moveToX = startOffsetX + normalizedTime * offsetX; float moveToY = startOffsetY + normalizedTime * offsetY; r.moveTo(moveToX, moveToY); } else if (withTarget) { float moveToX = startOffsetX + normalizedTime * offsetX; float moveToY = startOffsetY + normalizedTime * offsetY; r.moveTo(moveToX, moveToY); } else { if (LEFT.equals(direction)) { r.moveTo(-startOffset + offsetDir * normalizedTime * offset, 0); } else if (RIGHT.equals(direction)) { r.moveTo(startOffset - offsetDir * normalizedTime * offset, 0); } else if (TOP.equals(direction)) { r.moveTo(0, -startOffset + offsetDir * normalizedTime * offset); } else if (BOTTOM.equals(direction)) { r.moveTo(0, startOffset - offsetDir * normalizedTime * offset); } } } public void deactivate() { } }
false
true
public void activate(final Nifty nifty, final Element element, final EffectProperties parameter) { direction = parameter.getProperty("direction"); if (LEFT.equals(direction)) { offset = element.getX() + element.getWidth(); } else if (RIGHT.equals(direction)) { offset = element.getX() + element.getWidth(); } else if (TOP.equals(direction)) { offset = element.getY() + element.getHeight(); } else if (BOTTOM.equals(direction)) { offset = element.getY() + element.getHeight(); } else { offset = 0; } String mode = parameter.getProperty("mode"); if ("out".equals(mode)) { startOffset = 0; offsetDir = -1; withTarget = false; } else if ("in".equals(mode)) { startOffset = offset; offsetDir = 1; withTarget = false; } else if ("fromPosition".equals(mode)) { withTarget = true; } else if ("toPosition".equals(mode)) { withTarget = true; } else if ("fromOffset".equals(mode)) { fromOffset = true; startOffsetX = Integer.valueOf(parameter.getProperty("offsetX", "0")); startOffsetY = Integer.valueOf(parameter.getProperty("offsetY", "0")); offsetX = Math.abs(startOffsetX); offsetY = Math.abs(startOffsetY); } else if ("toOffset".equals(mode)) { toOffset = true; startOffsetX = 0; startOffsetY = 0; offsetX = Integer.valueOf(parameter.getProperty("offsetX", "0")); offsetY = Integer.valueOf(parameter.getProperty("offsetY", "0")); } String target = parameter.getProperty("targetElement"); if (target != null) { TargetElementResolver resolver = new TargetElementResolver(nifty.getCurrentScreen(), element); Element targetElement = resolver.resolve(target); if (targetElement == null) { log.warning("move effect for element [" + element.getId() + "] was unable to find target element [" + target + "] at screen [" + nifty.getCurrentScreen().getScreenId() + "]"); return; } if ("fromPosition".equals(mode)) { startOffsetX = targetElement.getX() - element.getX(); startOffsetY = targetElement.getY() - element.getY(); offsetX = -(targetElement.getX() - element.getX()); offsetY = -(targetElement.getY() - element.getY()); } else if ("toPosition".equals(mode)) { startOffsetX = 0; startOffsetY = 0; offsetX = (targetElement.getX() - element.getX()); offsetY = (targetElement.getY() - element.getY()); } } }
public void activate(final Nifty nifty, final Element element, final EffectProperties parameter) { String mode = parameter.getProperty("mode"); direction = parameter.getProperty("direction"); if (LEFT.equals(direction)) { offset = element.getX() + element.getWidth(); } else if (RIGHT.equals(direction)) { offset = nifty.getRenderEngine().getWidth() - element.getX(); } else if (TOP.equals(direction)) { offset = element.getY() + element.getHeight(); } else if (BOTTOM.equals(direction)) { offset = nifty.getRenderEngine().getHeight() - element.getY(); } else { offset = 0; } if ("out".equals(mode)) { startOffset = 0; offsetDir = -1; withTarget = false; } else if ("in".equals(mode)) { startOffset = offset; offsetDir = 1; withTarget = false; } else if ("fromPosition".equals(mode)) { withTarget = true; } else if ("toPosition".equals(mode)) { withTarget = true; } else if ("fromOffset".equals(mode)) { fromOffset = true; startOffsetX = Integer.valueOf(parameter.getProperty("offsetX", "0")); startOffsetY = Integer.valueOf(parameter.getProperty("offsetY", "0")); offsetX = Math.abs(startOffsetX); offsetY = Math.abs(startOffsetY); } else if ("toOffset".equals(mode)) { toOffset = true; startOffsetX = 0; startOffsetY = 0; offsetX = Integer.valueOf(parameter.getProperty("offsetX", "0")); offsetY = Integer.valueOf(parameter.getProperty("offsetY", "0")); } String target = parameter.getProperty("targetElement"); if (target != null) { TargetElementResolver resolver = new TargetElementResolver(nifty.getCurrentScreen(), element); Element targetElement = resolver.resolve(target); if (targetElement == null) { log.warning("move effect for element [" + element.getId() + "] was unable to find target element [" + target + "] at screen [" + nifty.getCurrentScreen().getScreenId() + "]"); return; } if ("fromPosition".equals(mode)) { startOffsetX = targetElement.getX() - element.getX(); startOffsetY = targetElement.getY() - element.getY(); offsetX = -(targetElement.getX() - element.getX()); offsetY = -(targetElement.getY() - element.getY()); } else if ("toPosition".equals(mode)) { startOffsetX = 0; startOffsetY = 0; offsetX = (targetElement.getX() - element.getX()); offsetY = (targetElement.getY() - element.getY()); } } }
diff --git a/src/main/java/org/jboss/pressgang/ccms/contentspec/client/commands/PushTranslationCommand.java b/src/main/java/org/jboss/pressgang/ccms/contentspec/client/commands/PushTranslationCommand.java index 6a86f7f..391ac35 100644 --- a/src/main/java/org/jboss/pressgang/ccms/contentspec/client/commands/PushTranslationCommand.java +++ b/src/main/java/org/jboss/pressgang/ccms/contentspec/client/commands/PushTranslationCommand.java @@ -1,631 +1,631 @@ package org.jboss.pressgang.ccms.contentspec.client.commands; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; import org.jboss.pressgang.ccms.contentspec.ContentSpec; import org.jboss.pressgang.ccms.contentspec.SpecTopic; import org.jboss.pressgang.ccms.contentspec.builder.constants.BuilderConstants; import org.jboss.pressgang.ccms.contentspec.client.commands.base.BaseCommandImpl; import org.jboss.pressgang.ccms.contentspec.client.config.ClientConfiguration; import org.jboss.pressgang.ccms.contentspec.client.config.ContentSpecConfiguration; import org.jboss.pressgang.ccms.contentspec.client.constants.Constants; import org.jboss.pressgang.ccms.contentspec.client.utils.ClientUtilities; import org.jboss.pressgang.ccms.contentspec.processor.ContentSpecParser; import org.jboss.pressgang.ccms.contentspec.processor.ContentSpecProcessor; import org.jboss.pressgang.ccms.contentspec.processor.structures.ProcessingOptions; import org.jboss.pressgang.ccms.contentspec.structures.StringToCSNodeCollection; import org.jboss.pressgang.ccms.contentspec.utils.CSTransformer; import org.jboss.pressgang.ccms.contentspec.utils.EntityUtilities; import org.jboss.pressgang.ccms.contentspec.utils.TranslationUtilities; import org.jboss.pressgang.ccms.contentspec.utils.logging.ErrorLoggerManager; import org.jboss.pressgang.ccms.provider.ContentSpecProvider; import org.jboss.pressgang.ccms.provider.DataProviderFactory; import org.jboss.pressgang.ccms.provider.TopicProvider; import org.jboss.pressgang.ccms.provider.TranslatedContentSpecProvider; import org.jboss.pressgang.ccms.provider.TranslatedTopicProvider; import org.jboss.pressgang.ccms.utils.common.DocBookUtilities; import org.jboss.pressgang.ccms.utils.common.HashUtilities; import org.jboss.pressgang.ccms.utils.common.XMLUtilities; import org.jboss.pressgang.ccms.utils.structures.Pair; import org.jboss.pressgang.ccms.utils.structures.StringToNodeCollection; import org.jboss.pressgang.ccms.wrapper.ContentSpecWrapper; import org.jboss.pressgang.ccms.wrapper.TopicWrapper; import org.jboss.pressgang.ccms.wrapper.TranslatedCSNodeWrapper; import org.jboss.pressgang.ccms.wrapper.TranslatedContentSpecWrapper; import org.jboss.pressgang.ccms.wrapper.TranslatedTopicWrapper; import org.jboss.pressgang.ccms.zanata.ZanataConstants; import org.jboss.pressgang.ccms.zanata.ZanataDetails; import org.jboss.pressgang.ccms.zanata.ZanataInterface; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.zanata.common.ContentType; import org.zanata.common.LocaleId; import org.zanata.common.ResourceType; import org.zanata.rest.dto.resource.Resource; import org.zanata.rest.dto.resource.TextFlow; @Parameters(commandDescription = "Push a Content Specification and it's topics to Zanata for translation.") public class PushTranslationCommand extends BaseCommandImpl { @Parameter(metaVar = "[ID]") private List<Integer> ids = new ArrayList<Integer>(); @Parameter(names = Constants.ZANATA_SERVER_LONG_PARAM, description = "The Zanata server to be associated with the Content Specification.") private String zanataUrl = null; @Parameter(names = Constants.ZANATA_PROJECT_LONG_PARAM, description = "The Zanata project name to be associated with the Content Specification.") private String zanataProject = null; @Parameter(names = Constants.ZANATA_PROJECT_VERSION_LONG_PARAM, description = "The Zanata project version to be associated with the Content Specification.") private String zanataVersion = null; @Parameter(names = Constants.CONTENT_SPEC_ONLY_LONG_PARAM, description = "Only push the the content specification to Zanata.") private Boolean contentSpecOnly = false; @Parameter(names = {Constants.YES_LONG_PARAM, Constants.YES_SHORT_PARAM}, description = "Automatically answer \"yes\" to any questions.") private Boolean answerYes = false; private ContentSpecProcessor csp; public PushTranslationCommand(final JCommander parser, final ContentSpecConfiguration cspConfig, final ClientConfiguration clientConfig) { super(parser, cspConfig, clientConfig); } @Override public String getCommandName() { return Constants.PUSH_TRANSLATION_COMMAND_NAME; } public List<Integer> getIds() { return ids; } public void setIds(final List<Integer> ids) { this.ids = ids; } public String getZanataUrl() { return zanataUrl; } public void setZanataUrl(final String zanataUrl) { this.zanataUrl = zanataUrl; } public String getZanataProject() { return zanataProject; } public void setZanataProject(final String zanataProject) { this.zanataProject = zanataProject; } public String getZanataVersion() { return zanataVersion; } public void setZanataVersion(final String zanataVersion) { this.zanataVersion = zanataVersion; } public Boolean getAnswerYes() { return answerYes; } public void setAnswerYes(Boolean answerYes) { this.answerYes = answerYes; } public Boolean getContentSpecOnly() { return contentSpecOnly; } public void setContentSpecOnly(Boolean contentSpecOnly) { this.contentSpecOnly = contentSpecOnly; } @Override public boolean validateServerUrl() { setupZanataOptions(); if (!super.validateServerUrl()) return false; final ZanataDetails zanataDetails = getCspConfig().getZanataDetails(); // Print the zanata server url JCommander.getConsole().println(String.format(Constants.ZANATA_WEBSERVICE_MSG, zanataDetails.getServer())); // Test that the server address is valid if (!ClientUtilities.validateServerExists(zanataDetails.getServer())) { // Print a line to separate content JCommander.getConsole().println(""); printErrorAndShutdown(Constants.EXIT_NO_SERVER, Constants.UNABLE_TO_FIND_SERVER_MSG, false); } return true; } /** * Sets the zanata options applied by the command line * to the options that were set via configuration files. */ protected void setupZanataOptions() { // Set the zanata url if (getZanataUrl() != null) { // Find the zanata server if the url is a reference to the zanata server name for (final String serverName : getClientConfig().getZanataServers().keySet()) { if (serverName.equals(getZanataUrl())) { setZanataUrl(getClientConfig().getZanataServers().get(serverName).getUrl()); break; } } getCspConfig().getZanataDetails().setServer(ClientUtilities.fixHostURL(getZanataUrl())); } // Set the zanata project if (getZanataProject() != null) { getCspConfig().getZanataDetails().setProject(getZanataProject()); } // Set the zanata version if (getZanataVersion() != null) { getCspConfig().getZanataDetails().setVersion(getZanataVersion()); } } protected boolean isValid() { final ZanataDetails zanataDetails = getCspConfig().getZanataDetails(); // Check that we even have some zanata details. if (zanataDetails == null) return false; // Check that none of the fields are invalid. if (zanataDetails.getServer() == null || zanataDetails.getServer().isEmpty() || zanataDetails.getProject() == null || zanataDetails.getProject().isEmpty() || zanataDetails.getVersion() == null || zanataDetails.getVersion().isEmpty() || zanataDetails.getToken() == null || zanataDetails.getToken().isEmpty() || zanataDetails.getUsername() == null || zanataDetails.getUsername().isEmpty()) { return false; } // At this point the zanata details are valid, so save the details. System.setProperty(ZanataConstants.ZANATA_SERVER_PROPERTY, zanataDetails.getServer()); System.setProperty(ZanataConstants.ZANATA_PROJECT_PROPERTY, zanataDetails.getProject()); System.setProperty(ZanataConstants.ZANATA_PROJECT_VERSION_PROPERTY, zanataDetails.getVersion()); System.setProperty(ZanataConstants.ZANATA_USERNAME_PROPERTY, zanataDetails.getUsername()); System.setProperty(ZanataConstants.ZANATA_TOKEN_PROPERTY, zanataDetails.getToken()); return true; } @Override public void process() { final ContentSpecProvider contentSpecProvider = getProviderFactory().getProvider(ContentSpecProvider.class); final TopicProvider topicProvider = getProviderFactory().getProvider(TopicProvider.class); // Load the ids and validate that one and only one exists ClientUtilities.prepareAndValidateIds(this, getCspConfig(), getIds()); // Check that the zanata details are valid if (!isValid()) { printErrorAndShutdown(Constants.EXIT_CONFIG_ERROR, Constants.ERROR_PUSH_NO_ZANATA_DETAILS_MSG, false); } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); final ContentSpecWrapper contentSpecEntity = ClientUtilities.getContentSpecEntity(contentSpecProvider, ids.get(0), null); if (contentSpecEntity == null) { printErrorAndShutdown(Constants.EXIT_FAILURE, Constants.ERROR_NO_ID_FOUND_MSG, false); } // Check that the content spec isn't a failed one if (contentSpecEntity.getFailed() != null) { printErrorAndShutdown(Constants.EXIT_FAILURE, Constants.ERROR_INVALID_CONTENT_SPEC, false); } // Transform the content spec final ContentSpec contentSpec = CSTransformer.transform(contentSpecEntity, getProviderFactory()); // Setup the processing options final ProcessingOptions processingOptions = new ProcessingOptions(); processingOptions.setValidating(true); processingOptions.setIgnoreChecksum(true); processingOptions.setAllowNewTopics(false); // Validate and parse the Content Specification final ErrorLoggerManager loggerManager = new ErrorLoggerManager(); csp = new ContentSpecProcessor(getProviderFactory(), loggerManager, processingOptions); boolean success = csp.processContentSpec(contentSpec, getUsername(), ContentSpecParser.ParsingMode.EITHER); // Print the error/warning messages JCommander.getConsole().println(loggerManager.generateLogs()); // Check that everything validated fine if (!success) { shutdown(Constants.EXIT_TOPIC_INVALID); } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); // Initialise the topic wrappers in the spec topics initialiseSpecTopics(topicProvider, contentSpec); // Good point to check for a shutdown allowShutdownToContinueIfRequested(); if (!pushToZanata(getProviderFactory(), contentSpec, contentSpecEntity)) { printErrorAndShutdown(Constants.EXIT_FAILURE, Constants.ERROR_ZANATA_PUSH_FAILED_MSG, false); shutdown(Constants.EXIT_FAILURE); } else { JCommander.getConsole().println(Constants.SUCCESSFUL_ZANATA_PUSH_MSG); } } @Override public boolean loadFromCSProcessorCfg() { return ids.size() == 0; } protected void initialiseSpecTopics(final TopicProvider topicProvider, final ContentSpec contentSpec) { final List<SpecTopic> specTopics = contentSpec.getSpecTopics(); for (final SpecTopic specTopic : specTopics) { if (specTopic.getDBId() != null && specTopic.getDBId() > 0 && specTopic.getRevision() == null) { specTopic.setTopic(topicProvider.getTopic(specTopic.getDBId())); } else if (specTopic.getDBId() != null && specTopic.getDBId() > 0 && specTopic.getRevision() != null) { specTopic.setTopic(topicProvider.getTopic(specTopic.getDBId(), specTopic.getRevision())); } } } /** * Pushes a content spec and its topics to zanata. * * @param providerFactory * @param contentSpec * @param contentSpecEntity * @return True if the push was successful otherwise false. */ protected boolean pushToZanata(final DataProviderFactory providerFactory, final ContentSpec contentSpec, final ContentSpecWrapper contentSpecEntity) { final Map<TopicWrapper, SpecTopic> topicToSpecTopic = new HashMap<TopicWrapper, SpecTopic>(); boolean error = false; final ZanataInterface zanataInterface = new ZanataInterface(0.2); // Convert all the topics to DOM Documents first so we know if any are invalid final Map<Pair<Integer, Integer>, TopicWrapper> topics = new HashMap<Pair<Integer, Integer>, TopicWrapper>(); final List<SpecTopic> specTopics = contentSpec.getSpecTopics(); for (final SpecTopic specTopic : specTopics) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); final Pair<Integer, Integer> topicId = new Pair<Integer, Integer>(topic.getId(), topic.getRevision()); // Only process the topic if it hasn't already been added, since the same topic can exist twice if (!topics.containsKey(topicId)) { topics.put(topicId, topic); // Convert the XML String into a DOM object. Document doc = null; try { doc = XMLUtilities.convertStringToDocument(topic.getXml()); } catch (Exception e) { // Do Nothing as we handle the error below. } if (doc == null) { JCommander.getConsole().println( "ERROR: Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " does not have valid XML"); error = true; } else { specTopic.setXMLDocument(doc); topicToSpecTopic.put(topic, specTopic); } } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); } // Return if creating the documents failed if (error) { return false; } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); - final float total = topics.size() + 1; + final float total = getContentSpecOnly() ? 1 : (topics.size() + 1); float current = 0; final int showPercent = 5; int lastPercent = 0; final String answer; if (getAnswerYes()) { JCommander.getConsole().println("Pushing " + ((int) total) + " topics to zanata."); answer = "yes"; } else { JCommander.getConsole().println("You are about to push " + ((int) total) + " topics to zanata. Continue? (Yes/No)"); answer = JCommander.getConsole().readLine(); } final List<String> messages = new ArrayList<String>(); if (answer.equalsIgnoreCase("yes") || answer.equalsIgnoreCase("y")) { JCommander.getConsole().println("Starting to push topics to zanata..."); // Upload the content specification to zanata first so we can reference the nodes when pushing topics final TranslatedContentSpecWrapper translatedContentSpec = pushContentSpecToZanata(providerFactory, contentSpecEntity, zanataInterface, messages); if (translatedContentSpec == null) { error = true; } else if (!getContentSpecOnly()) { // Loop through each topic and upload it to zanata for (final Entry<TopicWrapper, SpecTopic> topicEntry : topicToSpecTopic.entrySet()) { ++current; final int percent = Math.round(current / total * 100); if (percent - lastPercent >= showPercent) { lastPercent = percent; JCommander.getConsole().println("\tPushing topics to zanata " + percent + "% Done"); } final SpecTopic specTopic = topicEntry.getValue(); // Find the matching translated CSNode and if one can't be found then produce an error. final TranslatedCSNodeWrapper translatedCSNode = findTopicTranslatedCSNode(translatedContentSpec, specTopic); if (translatedCSNode == null) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); messages.add( "Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " failed to be created in Zanata."); error = true; } else { if (!pushTopicToZanata(providerFactory, specTopic, translatedCSNode, zanataInterface, messages)) { error = true; } } } } } // Print the info/error messages if (messages.size() > 0) { JCommander.getConsole().println("Output:"); for (final String message : messages) { JCommander.getConsole().println("\t" + message); } } return !error; } protected TranslatedCSNodeWrapper findTopicTranslatedCSNode(final TranslatedContentSpecWrapper translatedContentSpec, final SpecTopic specTopic) { final List<TranslatedCSNodeWrapper> translatedCSNodes = translatedContentSpec.getTranslatedNodes().getItems(); for (final TranslatedCSNodeWrapper translatedCSNode : translatedCSNodes) { if (specTopic.getUniqueId() != null && specTopic.getUniqueId().equals(translatedCSNode.getNodeId().toString())) { return translatedCSNode; } } return null; } /** * @param providerFactory * @param specTopic * @param zanataInterface * @param messages * @return True if the topic was pushed successful otherwise false. */ protected boolean pushTopicToZanata(final DataProviderFactory providerFactory, final SpecTopic specTopic, final TranslatedCSNodeWrapper translatedCSNode, final ZanataInterface zanataInterface, final List<String> messages) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); final Document doc = specTopic.getXMLDocument(); boolean error = false; // Create the zanata id based on whether a condition has been specified or not final String zanataId = getTopicZanataId(specTopic, translatedCSNode); // Get the condition if the xml has any conditions and see if a matching translated topic already exists. boolean xmlHasConditions = !DocBookUtilities.getConditionNodes(doc).isEmpty(); final String condition = xmlHasConditions ? specTopic.getConditionStatement(true) : null; final boolean translatedTopicExists = EntityUtilities.getTranslatedTopicByTopicAndNodeId(providerFactory, topic.getId(), topic.getRevision(), condition == null ? null : translatedCSNode.getId(), topic.getLocale()) != null; // Check if the zanata document already exists, if it does than the topic can be ignored. final Resource zanataFile = zanataInterface.getZanataResource(zanataId); if (zanataFile == null) { // Process the conditions, if any exist, to remove any nodes that wouldn't be seen for the content spec. DocBookUtilities.processConditions(condition, doc, BuilderConstants.DEFAULT_CONDITION); // Create the document to be created in Zanata final Resource resource = new Resource(); resource.setContentType(ContentType.TextPlain); resource.setLang(LocaleId.fromJavaName(topic.getLocale())); resource.setName(zanataId); resource.setRevision(1); resource.setType(ResourceType.FILE); // Get the translatable nodes final List<StringToNodeCollection> translatableStrings = XMLUtilities.getTranslatableStringsV2(doc, false); // Add the translatable nodes to the zanata document for (final StringToNodeCollection translatableStringData : translatableStrings) { final String translatableString = translatableStringData.getTranslationString(); if (!translatableString.trim().isEmpty()) { final TextFlow textFlow = new TextFlow(); textFlow.setContents(translatableString); textFlow.setLang(LocaleId.fromJavaName(topic.getLocale())); textFlow.setId(HashUtilities.generateMD5(translatableString)); textFlow.setRevision(1); resource.getTextFlows().add(textFlow); } } // Create the document in zanata and then in PressGang if the document was successfully created in Zanata. if (!zanataInterface.createFile(resource)) { messages.add("Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " failed to be created in Zanata."); error = true; } else if (!translatedTopicExists) { createPressGangTranslatedTopic(providerFactory, topic, condition, translatedCSNode, messages); } } else if (!translatedTopicExists) { createPressGangTranslatedTopic(providerFactory, topic, condition, translatedCSNode, messages); } else { messages.add("Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " already exists - Skipping."); } return !error; } protected boolean createPressGangTranslatedTopic(final DataProviderFactory providerFactory, final TopicWrapper topic, String condition, final TranslatedCSNodeWrapper translatedCSNode, final List<String> messages) { final TranslatedTopicProvider translatedTopicProvider = providerFactory.getProvider(TranslatedTopicProvider.class); // Create the Translated Topic based on if it has a condition or not. final TranslatedTopicWrapper translatedTopic; if (condition == null) { translatedTopic = TranslationUtilities.createTranslatedTopic(providerFactory, topic, null, null); } else { translatedTopic = TranslationUtilities.createTranslatedTopic(providerFactory, topic, translatedCSNode, condition); } // Save the Translated Topic try { if (translatedTopicProvider.createTranslatedTopic(translatedTopic) == null) { messages.add("Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " failed to be created in " + "PressGang."); return false; } } catch (Exception e) { messages.add("Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " failed to be created in " + "PressGang."); return false; } return true; } /** * Gets the Zanata ID for a topic based on whether or not the topic has any conditional text. * * @param specTopic The topic to create the Zanata ID for. * @param translatedCSNode * @return The unique Zanata ID that can be used to create a document in Zanata. */ protected String getTopicZanataId(final SpecTopic specTopic, final TranslatedCSNodeWrapper translatedCSNode) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); Map<Node, List<String>> conditionNodes = DocBookUtilities.getConditionNodes(specTopic.getXMLDocument()); // Create the zanata id based on whether a condition has been specified or not final String zanataId; if (specTopic.getConditionStatement(true) != null && !conditionNodes.isEmpty()) { zanataId = topic.getId() + "-" + topic.getRevision() + "-" + translatedCSNode.getId(); } else { zanataId = topic.getId() + "-" + topic.getRevision(); } return zanataId; } /** * @param providerFactory * @param contentSpecEntity * @param zanataInterface * @param messages * @return */ protected TranslatedContentSpecWrapper pushContentSpecToZanata(final DataProviderFactory providerFactory, final ContentSpecWrapper contentSpecEntity, final ZanataInterface zanataInterface, final List<String> messages) { final String zanataId = "CS" + contentSpecEntity.getId() + "-" + contentSpecEntity.getRevision(); final Resource zanataFile = zanataInterface.getZanataResource(zanataId); TranslatedContentSpecWrapper translatedContentSpec = EntityUtilities.getTranslatedContentSpecById(providerFactory, contentSpecEntity.getId(), contentSpecEntity.getRevision()); if (zanataFile == null) { final Resource resource = new Resource(); resource.setContentType(ContentType.TextPlain); resource.setLang(LocaleId.fromJavaName(contentSpecEntity.getLocale())); resource.setName(zanataId); resource.setRevision(1); resource.setType(ResourceType.FILE); final List<StringToCSNodeCollection> translatableStrings = TranslationUtilities.getTranslatableStrings(contentSpecEntity, false); for (final StringToCSNodeCollection translatableStringData : translatableStrings) { final String translatableString = translatableStringData.getTranslationString(); if (!translatableString.trim().isEmpty()) { final TextFlow textFlow = new TextFlow(); textFlow.setContents(translatableString); textFlow.setLang(LocaleId.fromJavaName(contentSpecEntity.getLocale())); textFlow.setId(HashUtilities.generateMD5(translatableString)); textFlow.setRevision(1); resource.getTextFlows().add(textFlow); } } // Create the document in Zanata if (!zanataInterface.createFile(resource)) { messages.add("Content Spec ID " + contentSpecEntity.getId() + ", Revision " + contentSpecEntity.getRevision() + " " + "failed to be created in Zanata."); return null; } else if (translatedContentSpec == null) { return createPressGangTranslatedContentSpec(providerFactory, contentSpecEntity, messages); } } else if (translatedContentSpec == null) { return createPressGangTranslatedContentSpec(providerFactory, contentSpecEntity, messages); } else { messages.add("Content Spec ID " + contentSpecEntity.getId() + ", Revision " + contentSpecEntity.getRevision() + " already " + "exists - Skipping."); } return translatedContentSpec; } protected TranslatedContentSpecWrapper createPressGangTranslatedContentSpec(final DataProviderFactory providerFactory, final ContentSpecWrapper contentSpecEntity, final List<String> messages) { final TranslatedContentSpecProvider translatedContentSpecProvider = providerFactory.getProvider( TranslatedContentSpecProvider.class); // Create the Translated Content Spec and it's nodes final TranslatedContentSpecWrapper newTranslatedContentSpec = TranslationUtilities.createTranslatedContentSpec(providerFactory, contentSpecEntity); try { // Save the translated content spec final TranslatedContentSpecWrapper translatedContentSpec = translatedContentSpecProvider.createTranslatedContentSpec( newTranslatedContentSpec); if (translatedContentSpec == null) { messages.add("Content Spec ID " + contentSpecEntity.getId() + ", " + "Revision " + contentSpecEntity.getRevision() + " failed to be created in PressGang."); return null; } else { return translatedContentSpec; } } catch (Exception e) { messages.add("Content Spec ID " + contentSpecEntity.getId() + ", Revision " + contentSpecEntity.getRevision() + " " + "failed to be created in PressGang."); return null; } } @Override public boolean requiresExternalConnection() { return true; } }
true
true
protected boolean pushToZanata(final DataProviderFactory providerFactory, final ContentSpec contentSpec, final ContentSpecWrapper contentSpecEntity) { final Map<TopicWrapper, SpecTopic> topicToSpecTopic = new HashMap<TopicWrapper, SpecTopic>(); boolean error = false; final ZanataInterface zanataInterface = new ZanataInterface(0.2); // Convert all the topics to DOM Documents first so we know if any are invalid final Map<Pair<Integer, Integer>, TopicWrapper> topics = new HashMap<Pair<Integer, Integer>, TopicWrapper>(); final List<SpecTopic> specTopics = contentSpec.getSpecTopics(); for (final SpecTopic specTopic : specTopics) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); final Pair<Integer, Integer> topicId = new Pair<Integer, Integer>(topic.getId(), topic.getRevision()); // Only process the topic if it hasn't already been added, since the same topic can exist twice if (!topics.containsKey(topicId)) { topics.put(topicId, topic); // Convert the XML String into a DOM object. Document doc = null; try { doc = XMLUtilities.convertStringToDocument(topic.getXml()); } catch (Exception e) { // Do Nothing as we handle the error below. } if (doc == null) { JCommander.getConsole().println( "ERROR: Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " does not have valid XML"); error = true; } else { specTopic.setXMLDocument(doc); topicToSpecTopic.put(topic, specTopic); } } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); } // Return if creating the documents failed if (error) { return false; } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); final float total = topics.size() + 1; float current = 0; final int showPercent = 5; int lastPercent = 0; final String answer; if (getAnswerYes()) { JCommander.getConsole().println("Pushing " + ((int) total) + " topics to zanata."); answer = "yes"; } else { JCommander.getConsole().println("You are about to push " + ((int) total) + " topics to zanata. Continue? (Yes/No)"); answer = JCommander.getConsole().readLine(); } final List<String> messages = new ArrayList<String>(); if (answer.equalsIgnoreCase("yes") || answer.equalsIgnoreCase("y")) { JCommander.getConsole().println("Starting to push topics to zanata..."); // Upload the content specification to zanata first so we can reference the nodes when pushing topics final TranslatedContentSpecWrapper translatedContentSpec = pushContentSpecToZanata(providerFactory, contentSpecEntity, zanataInterface, messages); if (translatedContentSpec == null) { error = true; } else if (!getContentSpecOnly()) { // Loop through each topic and upload it to zanata for (final Entry<TopicWrapper, SpecTopic> topicEntry : topicToSpecTopic.entrySet()) { ++current; final int percent = Math.round(current / total * 100); if (percent - lastPercent >= showPercent) { lastPercent = percent; JCommander.getConsole().println("\tPushing topics to zanata " + percent + "% Done"); } final SpecTopic specTopic = topicEntry.getValue(); // Find the matching translated CSNode and if one can't be found then produce an error. final TranslatedCSNodeWrapper translatedCSNode = findTopicTranslatedCSNode(translatedContentSpec, specTopic); if (translatedCSNode == null) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); messages.add( "Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " failed to be created in Zanata."); error = true; } else { if (!pushTopicToZanata(providerFactory, specTopic, translatedCSNode, zanataInterface, messages)) { error = true; } } } } } // Print the info/error messages if (messages.size() > 0) { JCommander.getConsole().println("Output:"); for (final String message : messages) { JCommander.getConsole().println("\t" + message); } } return !error; }
protected boolean pushToZanata(final DataProviderFactory providerFactory, final ContentSpec contentSpec, final ContentSpecWrapper contentSpecEntity) { final Map<TopicWrapper, SpecTopic> topicToSpecTopic = new HashMap<TopicWrapper, SpecTopic>(); boolean error = false; final ZanataInterface zanataInterface = new ZanataInterface(0.2); // Convert all the topics to DOM Documents first so we know if any are invalid final Map<Pair<Integer, Integer>, TopicWrapper> topics = new HashMap<Pair<Integer, Integer>, TopicWrapper>(); final List<SpecTopic> specTopics = contentSpec.getSpecTopics(); for (final SpecTopic specTopic : specTopics) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); final Pair<Integer, Integer> topicId = new Pair<Integer, Integer>(topic.getId(), topic.getRevision()); // Only process the topic if it hasn't already been added, since the same topic can exist twice if (!topics.containsKey(topicId)) { topics.put(topicId, topic); // Convert the XML String into a DOM object. Document doc = null; try { doc = XMLUtilities.convertStringToDocument(topic.getXml()); } catch (Exception e) { // Do Nothing as we handle the error below. } if (doc == null) { JCommander.getConsole().println( "ERROR: Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " does not have valid XML"); error = true; } else { specTopic.setXMLDocument(doc); topicToSpecTopic.put(topic, specTopic); } } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); } // Return if creating the documents failed if (error) { return false; } // Good point to check for a shutdown allowShutdownToContinueIfRequested(); final float total = getContentSpecOnly() ? 1 : (topics.size() + 1); float current = 0; final int showPercent = 5; int lastPercent = 0; final String answer; if (getAnswerYes()) { JCommander.getConsole().println("Pushing " + ((int) total) + " topics to zanata."); answer = "yes"; } else { JCommander.getConsole().println("You are about to push " + ((int) total) + " topics to zanata. Continue? (Yes/No)"); answer = JCommander.getConsole().readLine(); } final List<String> messages = new ArrayList<String>(); if (answer.equalsIgnoreCase("yes") || answer.equalsIgnoreCase("y")) { JCommander.getConsole().println("Starting to push topics to zanata..."); // Upload the content specification to zanata first so we can reference the nodes when pushing topics final TranslatedContentSpecWrapper translatedContentSpec = pushContentSpecToZanata(providerFactory, contentSpecEntity, zanataInterface, messages); if (translatedContentSpec == null) { error = true; } else if (!getContentSpecOnly()) { // Loop through each topic and upload it to zanata for (final Entry<TopicWrapper, SpecTopic> topicEntry : topicToSpecTopic.entrySet()) { ++current; final int percent = Math.round(current / total * 100); if (percent - lastPercent >= showPercent) { lastPercent = percent; JCommander.getConsole().println("\tPushing topics to zanata " + percent + "% Done"); } final SpecTopic specTopic = topicEntry.getValue(); // Find the matching translated CSNode and if one can't be found then produce an error. final TranslatedCSNodeWrapper translatedCSNode = findTopicTranslatedCSNode(translatedContentSpec, specTopic); if (translatedCSNode == null) { final TopicWrapper topic = (TopicWrapper) specTopic.getTopic(); messages.add( "Topic ID " + topic.getId() + ", Revision " + topic.getRevision() + " failed to be created in Zanata."); error = true; } else { if (!pushTopicToZanata(providerFactory, specTopic, translatedCSNode, zanataInterface, messages)) { error = true; } } } } } // Print the info/error messages if (messages.size() > 0) { JCommander.getConsole().println("Output:"); for (final String message : messages) { JCommander.getConsole().println("\t" + message); } } return !error; }
diff --git a/amibe/src/org/jcae/mesh/MeshOEMMViewer3d.java b/amibe/src/org/jcae/mesh/MeshOEMMViewer3d.java index 1a488114..cc3f3284 100644 --- a/amibe/src/org/jcae/mesh/MeshOEMMViewer3d.java +++ b/amibe/src/org/jcae/mesh/MeshOEMMViewer3d.java @@ -1,172 +1,173 @@ /* jCAE stand for Java Computer Aided Engineering. Features are : Small CAD modeler, Finit element mesher, Plugin architecture. Copyright (C) 2005 Jerome Robert <[email protected]> This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jcae.mesh; import org.jcae.mesh.oemm.OEMM; import org.jcae.mesh.oemm.Storage; import org.jcae.mesh.amibe.ds.Mesh; import org.jcae.mesh.amibe.ds.Triangle; import org.jcae.mesh.xmldata.MeshWriter; import org.jcae.mesh.amibe.validation.*; import org.apache.log4j.Logger; import java.io.File; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.util.Iterator; import java.util.HashMap; import javax.swing.JFrame; import javax.swing.WindowConstants; import org.jcae.viewer3d.OEMMViewer; import org.jcae.viewer3d.bg.ViewableBG; import org.jcae.viewer3d.fe.amibe.AmibeProvider; import org.jcae.viewer3d.fe.ViewableFE; import org.jcae.viewer3d.fe.FEDomain; import org.jcae.viewer3d.View; import gnu.trove.TIntHashSet; /** * This class illustrates how to perform quality checks. */ public class MeshOEMMViewer3d { private static Logger logger=Logger.getLogger(MeshOEMMViewer3d.class); private static ViewableBG fineMesh; private static ViewableFE decMesh; private static boolean showOctree = true; private static boolean showAxis = true; public static void main(String args[]) { if (args.length < 1) { System.out.println("Usage: MeshOEMMViewer3d dir"); System.exit(0); } String dir=args[0]; JFrame feFrame=new JFrame("jCAE Demo"); feFrame.setSize(800,600); feFrame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); final OEMM oemm = Storage.readOEMMStructure(dir); final View bgView=new View(feFrame); final ViewableBG octree = new ViewableBG(OEMMViewer.bgOEMM(oemm, true)); try { bgView.add(octree); bgView.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent event) { if(event.getKeyChar()=='n') { if (fineMesh != null) bgView.remove(fineMesh); if (decMesh != null) bgView.remove(decMesh); fineMesh = new ViewableBG(OEMMViewer.meshOEMM(oemm, octree.getResultSet())); octree.unselectAll(); bgView.add(fineMesh); } else if(event.getKeyChar()=='o') { showOctree = !showOctree; if (showOctree) { bgView.add(octree); bgView.setCurrentViewable(octree); } else bgView.remove(octree); } else if(event.getKeyChar()=='s') { TIntHashSet leaves = octree.getResultSet(); if (leaves.size() == 1) { int idx = leaves.iterator().next(); OEMM.Node current = oemm.leaves[idx]; Mesh amesh = Storage.loadNodeWithNeighbours(oemm, idx, false); MinAngleFace qproc = new MinAngleFace(); QualityFloat data = new QualityFloat(amesh.getTriangles().size()); data.setQualityProcedure(qproc); for (Iterator itf = amesh.getTriangles().iterator(); itf.hasNext();) { Triangle f = (Triangle) itf.next(); if (f.getGroupId() == idx) data.compute(f); } data.setTarget((float) Math.PI/3.0f); String outFile = oemm.getDirectory()+File.separator+current.file+"q"; data.writeRawData(outFile); logger.info("Quality factor written into "+outFile); } else { logger.error("Only one node must be selected!"); } } else if(event.getKeyChar()=='d') { if (fineMesh != null) bgView.remove(fineMesh); if (decMesh != null) bgView.remove(decMesh); Mesh amesh = Storage.loadNodes(oemm, octree.getResultSet(), true); HashMap opts = new HashMap(); opts.put("maxtriangles", Integer.toString(amesh.getTriangles().size() / 100)); new org.jcae.mesh.amibe.algos3d.DecimateHalfEdge(amesh, opts).compute(); String xmlDir = "dec-tmp"; String xmlFile = "jcae3d"; MeshWriter.writeObject3D(amesh, xmlDir, xmlFile, ".", "tmp.brep"); octree.unselectAll(); try { AmibeProvider ap = new AmibeProvider(new File(xmlDir)); decMesh = new ViewableFE(ap); - logger.info("Nr. of triangles: "+((FEDomain)ap.getDomain(0)).getNumberOfTria3()); + int [] ids = ap.getDomainIDs(); + logger.info("Nr. of triangles: "+((FEDomain)ap.getDomain(ids[0])).getNumberOfTria3()); bgView.add(decMesh); } catch (Exception ex) { ex.printStackTrace(); } } else if(event.getKeyChar()=='a') { showAxis = !showAxis; bgView.setOriginAxisVisible(showAxis); } else if(event.getKeyChar()=='q') System.exit(0); } }); bgView.fitAll(); feFrame.getContentPane().add(bgView); feFrame.setVisible(true); bgView.setOriginAxisVisible(showAxis); } catch(Exception ex) { ex.printStackTrace(); } } }
true
true
public static void main(String args[]) { if (args.length < 1) { System.out.println("Usage: MeshOEMMViewer3d dir"); System.exit(0); } String dir=args[0]; JFrame feFrame=new JFrame("jCAE Demo"); feFrame.setSize(800,600); feFrame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); final OEMM oemm = Storage.readOEMMStructure(dir); final View bgView=new View(feFrame); final ViewableBG octree = new ViewableBG(OEMMViewer.bgOEMM(oemm, true)); try { bgView.add(octree); bgView.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent event) { if(event.getKeyChar()=='n') { if (fineMesh != null) bgView.remove(fineMesh); if (decMesh != null) bgView.remove(decMesh); fineMesh = new ViewableBG(OEMMViewer.meshOEMM(oemm, octree.getResultSet())); octree.unselectAll(); bgView.add(fineMesh); } else if(event.getKeyChar()=='o') { showOctree = !showOctree; if (showOctree) { bgView.add(octree); bgView.setCurrentViewable(octree); } else bgView.remove(octree); } else if(event.getKeyChar()=='s') { TIntHashSet leaves = octree.getResultSet(); if (leaves.size() == 1) { int idx = leaves.iterator().next(); OEMM.Node current = oemm.leaves[idx]; Mesh amesh = Storage.loadNodeWithNeighbours(oemm, idx, false); MinAngleFace qproc = new MinAngleFace(); QualityFloat data = new QualityFloat(amesh.getTriangles().size()); data.setQualityProcedure(qproc); for (Iterator itf = amesh.getTriangles().iterator(); itf.hasNext();) { Triangle f = (Triangle) itf.next(); if (f.getGroupId() == idx) data.compute(f); } data.setTarget((float) Math.PI/3.0f); String outFile = oemm.getDirectory()+File.separator+current.file+"q"; data.writeRawData(outFile); logger.info("Quality factor written into "+outFile); } else { logger.error("Only one node must be selected!"); } } else if(event.getKeyChar()=='d') { if (fineMesh != null) bgView.remove(fineMesh); if (decMesh != null) bgView.remove(decMesh); Mesh amesh = Storage.loadNodes(oemm, octree.getResultSet(), true); HashMap opts = new HashMap(); opts.put("maxtriangles", Integer.toString(amesh.getTriangles().size() / 100)); new org.jcae.mesh.amibe.algos3d.DecimateHalfEdge(amesh, opts).compute(); String xmlDir = "dec-tmp"; String xmlFile = "jcae3d"; MeshWriter.writeObject3D(amesh, xmlDir, xmlFile, ".", "tmp.brep"); octree.unselectAll(); try { AmibeProvider ap = new AmibeProvider(new File(xmlDir)); decMesh = new ViewableFE(ap); logger.info("Nr. of triangles: "+((FEDomain)ap.getDomain(0)).getNumberOfTria3()); bgView.add(decMesh); } catch (Exception ex) { ex.printStackTrace(); } } else if(event.getKeyChar()=='a') { showAxis = !showAxis; bgView.setOriginAxisVisible(showAxis); } else if(event.getKeyChar()=='q') System.exit(0); } }); bgView.fitAll(); feFrame.getContentPane().add(bgView); feFrame.setVisible(true); bgView.setOriginAxisVisible(showAxis); } catch(Exception ex) { ex.printStackTrace(); } }
public static void main(String args[]) { if (args.length < 1) { System.out.println("Usage: MeshOEMMViewer3d dir"); System.exit(0); } String dir=args[0]; JFrame feFrame=new JFrame("jCAE Demo"); feFrame.setSize(800,600); feFrame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); final OEMM oemm = Storage.readOEMMStructure(dir); final View bgView=new View(feFrame); final ViewableBG octree = new ViewableBG(OEMMViewer.bgOEMM(oemm, true)); try { bgView.add(octree); bgView.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent event) { if(event.getKeyChar()=='n') { if (fineMesh != null) bgView.remove(fineMesh); if (decMesh != null) bgView.remove(decMesh); fineMesh = new ViewableBG(OEMMViewer.meshOEMM(oemm, octree.getResultSet())); octree.unselectAll(); bgView.add(fineMesh); } else if(event.getKeyChar()=='o') { showOctree = !showOctree; if (showOctree) { bgView.add(octree); bgView.setCurrentViewable(octree); } else bgView.remove(octree); } else if(event.getKeyChar()=='s') { TIntHashSet leaves = octree.getResultSet(); if (leaves.size() == 1) { int idx = leaves.iterator().next(); OEMM.Node current = oemm.leaves[idx]; Mesh amesh = Storage.loadNodeWithNeighbours(oemm, idx, false); MinAngleFace qproc = new MinAngleFace(); QualityFloat data = new QualityFloat(amesh.getTriangles().size()); data.setQualityProcedure(qproc); for (Iterator itf = amesh.getTriangles().iterator(); itf.hasNext();) { Triangle f = (Triangle) itf.next(); if (f.getGroupId() == idx) data.compute(f); } data.setTarget((float) Math.PI/3.0f); String outFile = oemm.getDirectory()+File.separator+current.file+"q"; data.writeRawData(outFile); logger.info("Quality factor written into "+outFile); } else { logger.error("Only one node must be selected!"); } } else if(event.getKeyChar()=='d') { if (fineMesh != null) bgView.remove(fineMesh); if (decMesh != null) bgView.remove(decMesh); Mesh amesh = Storage.loadNodes(oemm, octree.getResultSet(), true); HashMap opts = new HashMap(); opts.put("maxtriangles", Integer.toString(amesh.getTriangles().size() / 100)); new org.jcae.mesh.amibe.algos3d.DecimateHalfEdge(amesh, opts).compute(); String xmlDir = "dec-tmp"; String xmlFile = "jcae3d"; MeshWriter.writeObject3D(amesh, xmlDir, xmlFile, ".", "tmp.brep"); octree.unselectAll(); try { AmibeProvider ap = new AmibeProvider(new File(xmlDir)); decMesh = new ViewableFE(ap); int [] ids = ap.getDomainIDs(); logger.info("Nr. of triangles: "+((FEDomain)ap.getDomain(ids[0])).getNumberOfTria3()); bgView.add(decMesh); } catch (Exception ex) { ex.printStackTrace(); } } else if(event.getKeyChar()=='a') { showAxis = !showAxis; bgView.setOriginAxisVisible(showAxis); } else if(event.getKeyChar()=='q') System.exit(0); } }); bgView.fitAll(); feFrame.getContentPane().add(bgView); feFrame.setVisible(true); bgView.setOriginAxisVisible(showAxis); } catch(Exception ex) { ex.printStackTrace(); } }
diff --git a/bundles/org.eclipse.rap.examples/src/org/eclipse/rap/examples/Infobox.java b/bundles/org.eclipse.rap.examples/src/org/eclipse/rap/examples/Infobox.java index 19c2278d1..20d88c426 100644 --- a/bundles/org.eclipse.rap.examples/src/org/eclipse/rap/examples/Infobox.java +++ b/bundles/org.eclipse.rap.examples/src/org/eclipse/rap/examples/Infobox.java @@ -1,56 +1,57 @@ /******************************************************************************* * Copyright (c) 2012 EclipseSource and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * EclipseSource - initial API and implementation ******************************************************************************/ package org.eclipse.rap.examples; import org.eclipse.rap.rwt.RWT; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; public class Infobox extends Composite { private final Composite contentComp; public Infobox( Composite parent ) { super( parent, SWT.NONE ); setLayout( ExampleUtil.createGridLayout( 1, false, true, false ) ); setLayoutData( ExampleUtil.createFillData() ); contentComp = createInfoboxContentComposite(); } private Composite createInfoboxContentComposite() { Composite contentComp = new Composite( this, SWT.NONE ); + contentComp.setBackgroundMode( SWT.INHERIT_FORCE ); contentComp.setData( RWT.CUSTOM_VARIANT, "infobox" ); GridLayout layout = ExampleUtil.createGridLayoutWithoutMargin( 1, false ); layout.marginHeight = 35; layout.marginWidth = 35; layout.verticalSpacing = 20; contentComp.setLayout( layout ); contentComp.setLayoutData( ExampleUtil.createHorzFillData() ); return contentComp; } public void addHeading( String text ) { Label label = new Label( contentComp, SWT.NONE ); label.setText( text.replace( "&", "&&" ) ); label.setData( RWT.CUSTOM_VARIANT, "infobox-heading" ); } public void addParagraph( String text ) { Label label = new Label( contentComp, SWT.WRAP ); label.setText( text ); label.setLayoutData( ExampleUtil.createFillData() ); label.setData( RWT.CUSTOM_VARIANT, "infobox" ); } }
true
true
private Composite createInfoboxContentComposite() { Composite contentComp = new Composite( this, SWT.NONE ); contentComp.setData( RWT.CUSTOM_VARIANT, "infobox" ); GridLayout layout = ExampleUtil.createGridLayoutWithoutMargin( 1, false ); layout.marginHeight = 35; layout.marginWidth = 35; layout.verticalSpacing = 20; contentComp.setLayout( layout ); contentComp.setLayoutData( ExampleUtil.createHorzFillData() ); return contentComp; }
private Composite createInfoboxContentComposite() { Composite contentComp = new Composite( this, SWT.NONE ); contentComp.setBackgroundMode( SWT.INHERIT_FORCE ); contentComp.setData( RWT.CUSTOM_VARIANT, "infobox" ); GridLayout layout = ExampleUtil.createGridLayoutWithoutMargin( 1, false ); layout.marginHeight = 35; layout.marginWidth = 35; layout.verticalSpacing = 20; contentComp.setLayout( layout ); contentComp.setLayoutData( ExampleUtil.createHorzFillData() ); return contentComp; }
diff --git a/src/main/java/hudson/scm/SubversionSCM.java b/src/main/java/hudson/scm/SubversionSCM.java index 6d9e944..6af4c77 100644 --- a/src/main/java/hudson/scm/SubversionSCM.java +++ b/src/main/java/hudson/scm/SubversionSCM.java @@ -1,2117 +1,2121 @@ /* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Fulvio Cavarretta, * Jean-Baptiste Quenot, Luca Domenico Milanesio, Renaud Bruyeron, Stephen Connolly, * Tom Huybrechts, Yahoo! Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.scm; import com.thoughtworks.xstream.XStream; import com.trilead.ssh2.DebugLogger; import com.trilead.ssh2.SCPClient; import hudson.FilePath; import hudson.FilePath.FileCallable; import hudson.Launcher; import hudson.Util; import hudson.XmlFile; import hudson.Functions; import hudson.Extension; import hudson.security.csrf.CrumbIssuer; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.BuildListener; import hudson.model.Hudson; import hudson.model.ModelObject; import hudson.model.ParametersAction; import hudson.model.TaskListener; import hudson.model.Run; import hudson.model.Node; import hudson.model.Computer; import hudson.model.Hudson.MasterComputer; import hudson.remoting.Callable; import hudson.remoting.DelegatingCallable; import hudson.remoting.Channel; import hudson.remoting.VirtualChannel; import hudson.scm.subversion.Messages; import hudson.triggers.SCMTrigger; import hudson.util.EditDistance; import hudson.util.IOException2; import hudson.util.MultipartFormDataParser; import hudson.util.Scrambler; import hudson.util.StreamCopyThread; import hudson.util.XStream2; import hudson.util.FormValidation; import hudson.util.TimeUnit2; import org.apache.commons.fileupload.FileItem; import org.apache.commons.io.FileUtils; import org.apache.commons.beanutils.PropertyUtils; import org.apache.tools.ant.Project; import org.apache.tools.ant.taskdefs.Chmod; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.putty.PuTTYKey; import org.tmatesoft.svn.core.SVNDirEntry; import org.tmatesoft.svn.core.SVNErrorCode; import org.tmatesoft.svn.core.SVNErrorMessage; import org.tmatesoft.svn.core.SVNException; import org.tmatesoft.svn.core.SVNNodeKind; import org.tmatesoft.svn.core.SVNURL; import org.tmatesoft.svn.core.SVNCancelException; import org.tmatesoft.svn.core.ISVNLogEntryHandler; import org.tmatesoft.svn.core.SVNLogEntry; import org.tmatesoft.svn.core.SVNProperties; import org.tmatesoft.svn.core.auth.ISVNAuthenticationManager; import org.tmatesoft.svn.core.auth.ISVNAuthenticationProvider; import org.tmatesoft.svn.core.auth.SVNAuthentication; import org.tmatesoft.svn.core.auth.SVNPasswordAuthentication; import org.tmatesoft.svn.core.auth.SVNSSHAuthentication; import org.tmatesoft.svn.core.auth.SVNSSLAuthentication; import org.tmatesoft.svn.core.auth.SVNUserNameAuthentication; import org.tmatesoft.svn.core.internal.io.dav.DAVRepositoryFactory; import org.tmatesoft.svn.core.internal.io.dav.http.DefaultHTTPConnectionFactory; import org.tmatesoft.svn.core.internal.io.fs.FSRepositoryFactory; import org.tmatesoft.svn.core.internal.io.svn.SVNRepositoryFactoryImpl; import org.tmatesoft.svn.core.internal.util.SVNPathUtil; import org.tmatesoft.svn.core.internal.wc.DefaultSVNAuthenticationManager; import org.tmatesoft.svn.core.internal.wc.SVNErrorManager; import org.tmatesoft.svn.core.internal.wc.SVNExternal; import org.tmatesoft.svn.core.internal.wc.admin.SVNAdminAreaFactory; import org.tmatesoft.svn.core.io.SVNCapability; import org.tmatesoft.svn.core.io.SVNRepository; import org.tmatesoft.svn.core.io.SVNRepositoryFactory; import org.tmatesoft.svn.core.wc.SVNClientManager; import org.tmatesoft.svn.core.wc.SVNInfo; import org.tmatesoft.svn.core.wc.SVNRevision; import org.tmatesoft.svn.core.wc.SVNUpdateClient; import org.tmatesoft.svn.core.wc.SVNWCClient; import org.tmatesoft.svn.core.wc.SVNWCUtil; import org.tmatesoft.svn.core.wc.SVNLogClient; import javax.servlet.ServletException; import javax.servlet.http.HttpServletResponse; import javax.xml.transform.stream.StreamResult; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.io.PipedInputStream; import java.io.PipedOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import java.util.StringTokenizer; import java.util.UUID; import java.util.Iterator; import java.util.logging.Level; import java.util.logging.Logger; import static java.util.logging.Level.FINE; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import java.lang.reflect.InvocationTargetException; import net.sf.json.JSONObject; /** * Subversion SCM. * * <h2>Plugin Developer Notes</h2> * <p> * Plugins that interact with Subversion can use {@link DescriptorImpl#createAuthenticationProvider()} * so that it can use the credentials (username, password, etc.) that the user entered for Hudson. * See the javadoc of this method for the precautions you need to take if you run Subversion operations * remotely on slaves. * * <h2>Implementation Notes</h2> * <p> * Because this instance refers to some other classes that are not necessarily * Java serializable (like {@link #browser}), remotable {@link FileCallable}s all * need to be declared as static inner classes. * * @author Kohsuke Kawaguchi */ public class SubversionSCM extends SCM implements Serializable { /** * the locations field is used to store all configured SVN locations (with * their local and remote part). Direct access to this filed should be * avoided and the getLocations() method should be used instead. This is * needed to make importing of old hudson-configurations possible as * getLocations() will check if the modules field has been set and import * the data. * * @since 1.91 */ private ModuleLocation[] locations = new ModuleLocation[0]; private boolean useUpdate; private final SubversionRepositoryBrowser browser; private String excludedRegions; private String excludedUsers; private String excludedRevprop; // No longer in use but left for serialization compatibility. @Deprecated private String modules; /** * @deprecated as of 1.286 */ public SubversionSCM(String[] remoteLocations, String[] localLocations, boolean useUpdate, SubversionRepositoryBrowser browser) { this(remoteLocations,localLocations, useUpdate, browser, null, null, null); } /** * @deprecated as of 1.311 */ public SubversionSCM(String[] remoteLocations, String[] localLocations, boolean useUpdate, SubversionRepositoryBrowser browser, String excludedRegions) { this(ModuleLocation.parse(remoteLocations,localLocations), useUpdate, browser, excludedRegions, null, null); } /** * @deprecated as of 1.315 */ public SubversionSCM(String[] remoteLocations, String[] localLocations, boolean useUpdate, SubversionRepositoryBrowser browser, String excludedRegions, String excludedUsers, String excludedRevprop) { this(ModuleLocation.parse(remoteLocations,localLocations), useUpdate, browser, excludedRegions, excludedUsers, excludedRevprop); } /** * @deprecated as of 1.315 */ public SubversionSCM(List<ModuleLocation> locations, boolean useUpdate, SubversionRepositoryBrowser browser, String excludedRegions) { this(locations, useUpdate, browser, excludedRegions, null, null); } @DataBoundConstructor public SubversionSCM(List<ModuleLocation> locations, boolean useUpdate, SubversionRepositoryBrowser browser, String excludedRegions, String excludedUsers, String excludedRevprop) { for (Iterator<ModuleLocation> itr = locations.iterator(); itr.hasNext();) { ModuleLocation ml = itr.next(); if(ml.remote==null) itr.remove(); } this.locations = locations.toArray(new ModuleLocation[locations.size()]); this.useUpdate = useUpdate; this.browser = browser; this.excludedRegions = excludedRegions; this.excludedUsers = excludedUsers; this.excludedRevprop = excludedRevprop; } /** * Convenience constructor, especially during testing. */ public SubversionSCM(String svnUrl) { this(svnUrl,"."); } /** * Convenience constructor, especially during testing. */ public SubversionSCM(String svnUrl, String local) { this(new String[]{svnUrl},new String[]{local},true,null,null,null,null); } /** * @deprecated * as of 1.91. Use {@link #getLocations()} instead. */ public String getModules() { return null; } /** * list of all configured svn locations * * @since 1.91 */ public ModuleLocation[] getLocations() { return getLocations(null); } /** * list of all configured svn locations, expanded according to * build parameters values; * * @param build * If non-null, variable expansions are performed against the build parameters. * * @since 1.252 */ public ModuleLocation[] getLocations(AbstractBuild<?,?> build) { // check if we've got a old location if (modules != null) { // import the old configuration List<ModuleLocation> oldLocations = new ArrayList<ModuleLocation>(); StringTokenizer tokens = new StringTokenizer(modules); while (tokens.hasMoreTokens()) { // the remote (repository location) // the normalized name is always without the trailing '/' String remoteLoc = Util.removeTrailingSlash(tokens.nextToken()); oldLocations.add(new ModuleLocation(remoteLoc, null)); } locations = oldLocations.toArray(new ModuleLocation[oldLocations.size()]); modules = null; } if(build == null) return locations; ModuleLocation[] outLocations = new ModuleLocation[locations.length]; for (int i = 0; i < outLocations.length; i++) { outLocations[i] = locations[i].getExpandedLocation(build); } return outLocations; } public boolean isUseUpdate() { return useUpdate; } @Override public SubversionRepositoryBrowser getBrowser() { return browser; } public String getExcludedRegions() { return excludedRegions; } public String[] getExcludedRegionsNormalized() { return excludedRegions == null ? null : excludedRegions.split("[\\r\\n]+"); } private Pattern[] getExcludedRegionsPatterns() { String[] excludedRegions = getExcludedRegionsNormalized(); if (excludedRegions != null) { Pattern[] patterns = new Pattern[excludedRegions.length]; int i = 0; for (String excludedRegion : excludedRegions) { patterns[i++] = Pattern.compile(excludedRegion); } return patterns; } return null; } public String getExcludedUsers() { return excludedUsers; } public String[] getExcludedUsersNormalized() { if (excludedUsers == null) { return null; } ArrayList<String> users = new ArrayList<String>(); for (String user : excludedUsers.split("[\\r\\n]+")) { users.add(user.trim()); } return users.toArray(new String[users.size()]); } public String getExcludedRevprop() { return excludedRevprop; } /** * Sets the <tt>SVN_REVISION</tt> environment variable during the build. */ @Override public void buildEnvVars(AbstractBuild build, Map<String, String> env) { super.buildEnvVars(build, env); ModuleLocation[] locations = getLocations(build); try { Map<String,Long> revisions = parseRevisionFile(build); if(locations.length==1) { Long rev = revisions.get(locations[0].remote); if(rev!=null) env.put("SVN_REVISION",rev.toString()); } // it's not clear what to do if there are more than one modules. // if we always return locations[0].remote, it'll be difficult // to change this later (to something more sensible, such as // choosing the "root module" or whatever), so let's not set // anything for now. // besides, one can always use 'svnversion' to obtain the revision more explicitly. } catch (IOException e) { // ignore this error } } /** * Called after checkout/update has finished to compute the changelog. */ private boolean calcChangeLog(AbstractBuild<?,?> build, File changelogFile, BuildListener listener, List<External> externals) throws IOException, InterruptedException { if(build.getPreviousBuild()==null) { // nothing to compare against return createEmptyChangeLog(changelogFile, listener, "log"); } // some users reported that the file gets created with size 0. I suspect // maybe some XSLT engine doesn't close the stream properly. // so let's do it by ourselves to be really sure that the stream gets closed. OutputStream os = new BufferedOutputStream(new FileOutputStream(changelogFile)); boolean created; try { created = new SubversionChangeLogBuilder(build, listener, this).run(externals, new StreamResult(os)); } finally { os.close(); } if(!created) createEmptyChangeLog(changelogFile, listener, "log"); return true; } /** * Reads the revision file of the specified build. * * @return * map from {@link SvnInfo#url Subversion URL} to its revision. */ /*package*/ static Map<String,Long> parseRevisionFile(AbstractBuild build) throws IOException { Map<String,Long> revisions = new HashMap<String,Long>(); // module -> revision {// read the revision file of the last build File file = getRevisionFile(build); if(!file.exists()) // nothing to compare against return revisions; BufferedReader br = new BufferedReader(new FileReader(file)); try { String line; while((line=br.readLine())!=null) { int index = line.lastIndexOf('/'); if(index<0) { continue; // invalid line? } try { revisions.put(line.substring(0,index), Long.parseLong(line.substring(index+1))); } catch (NumberFormatException e) { // perhaps a corrupted line. ignore } } } finally { br.close(); } } return revisions; } /** * Parses the file that stores the locations in the workspace where modules loaded by svn:external * is placed. * * <p> * Note that the format of the file has changed in 1.180 from simple text file to XML. * * @return * immutable list. Can be empty but never null. */ /*package*/ static List<External> parseExternalsFile(AbstractProject project) throws IOException { File file = getExternalsFile(project); if(file.exists()) { try { return (List<External>)new XmlFile(External.XSTREAM,file).read(); } catch (IOException e) { // in < 1.180 this file was a text file, so it may fail to parse as XML, // in which case let's just fall back } } return Collections.emptyList(); } /** * Polling can happen on the master and does not require a workspace. */ @Override public boolean requiresWorkspaceForPolling() { return false; } public boolean checkout(AbstractBuild build, Launcher launcher, FilePath workspace, final BuildListener listener, File changelogFile) throws IOException, InterruptedException { List<External> externals = checkout(build,workspace,listener); if(externals==null) return false; // write out the revision file PrintWriter w = new PrintWriter(new FileOutputStream(getRevisionFile(build))); try { Map<String,SvnInfo> revMap = workspace.act(new BuildRevisionMapTask(build, this, listener, externals)); for (Entry<String,SvnInfo> e : revMap.entrySet()) { w.println( e.getKey() +'/'+ e.getValue().revision ); } build.addAction(new SubversionTagAction(build,revMap.values())); } finally { w.close(); } // write out the externals info new XmlFile(External.XSTREAM,getExternalsFile(build.getProject())).write(externals); return calcChangeLog(build, changelogFile, listener, externals); } /** * Performs the checkout or update, depending on the configuration and workspace state. * * <p> * Use canonical path to avoid SVNKit/symlink problem as described in * https://wiki.svnkit.com/SVNKit_FAQ * * @return null * if the operation failed. Otherwise the set of local workspace paths * (relative to the workspace root) that has loaded due to svn:external. */ private List<External> checkout(AbstractBuild build, FilePath workspace, TaskListener listener) throws IOException, InterruptedException { if (repositoryLocationsNoLongerExist(build, listener)) { Run lsb = build.getProject().getLastSuccessfulBuild(); if (lsb != null && build.getNumber()-lsb.getNumber()>10 && build.getTimestamp().getTimeInMillis()-lsb.getTimestamp().getTimeInMillis() > TimeUnit2.DAYS.toMillis(1)) { // Disable this project if the location doesn't exist any more, see issue #763 // but only do so if there was at least some successful build, // to make sure that initial configuration error won't disable the build. see issue #1567 // finally, only disable a build if the failure persists for some time. // see http://www.nabble.com/Should-Hudson-have-an-option-for-a-content-fingerprint--td24022683.html listener.getLogger().println("One or more repository locations do not exist anymore for " + build.getProject().getName() + ", project will be disabled."); build.getProject().makeDisabled(true); return null; } } Boolean isUpdatable = useUpdate && workspace.act(new IsUpdatableTask(build, this, listener)); return workspace.act(new CheckOutTask(build, this, build.getTimestamp().getTime(), isUpdatable, listener)); } /** * Either run "svn co" or "svn up" equivalent. */ private static class CheckOutTask implements FileCallable<List<External>> { private final ISVNAuthenticationProvider authProvider; private final Date timestamp; // true to "svn update", false to "svn checkout". private boolean update; private final TaskListener listener; private final ModuleLocation[] locations; private final RevisionParameterAction revisions; public CheckOutTask(AbstractBuild<?, ?> build, SubversionSCM parent, Date timestamp, boolean update, TaskListener listener) { this.authProvider = parent.getDescriptor().createAuthenticationProvider(); this.timestamp = timestamp; this.update = update; this.listener = listener; this.locations = parent.getLocations(build); revisions = build.getAction(RevisionParameterAction.class); } public List<External> invoke(File ws, VirtualChannel channel) throws IOException { final SVNClientManager manager = createSvnClientManager(authProvider); try { final SVNUpdateClient svnuc = manager.getUpdateClient(); final List<External> externals = new ArrayList<External>(); // store discovered externals to here if(update) { for (final ModuleLocation l : locations) { try { listener.getLogger().println("Updating "+ l.remote); File local = new File(ws, l.getLocalDir()); svnuc.setEventHandler(new SubversionUpdateEventHandler(listener.getLogger(), externals,local,l.getLocalDir())); SVNRevision r = getRevision(l); svnuc.doUpdate(local.getCanonicalFile(), r, true); } catch (final SVNException e) { if(e.getErrorMessage().getErrorCode()== SVNErrorCode.WC_LOCKED) { // work space locked. try fresh check out listener.getLogger().println("Workspace appear to be locked, so getting a fresh workspace"); update = false; return invoke(ws,channel); } if(e.getErrorMessage().getErrorCode()== SVNErrorCode.WC_OBSTRUCTED_UPDATE) { // HUDSON-1882. If existence of local files cause an update to fail, // revert to fresh check out listener.getLogger().println(e.getMessage()); // show why this happened. Sometimes this is caused by having a build artifact in the repository. listener.getLogger().println("Updated failed due to local files. Getting a fresh workspace"); update = false; return invoke(ws,channel); } e.printStackTrace(listener.error("Failed to update "+l.remote)); // trouble-shooting probe for #591 if(e.getErrorMessage().getErrorCode()== SVNErrorCode.WC_NOT_LOCKED) { listener.getLogger().println("Polled jobs are "+ Hudson.getInstance().getDescriptorByType(SCMTrigger.DescriptorImpl.class).getItemsBeingPolled()); } return null; } } } else { Util.deleteContentsRecursive(ws); // buffer the output by a separate thread so that the update operation // won't be blocked by the remoting of the data PipedOutputStream pos = new PipedOutputStream(); StreamCopyThread sct = new StreamCopyThread("svn log copier", new PipedInputStream(pos), listener.getLogger()); sct.start(); for (final ModuleLocation l : locations) { try { listener.getLogger().println("Checking out "+l.remote); File local = new File(ws, l.getLocalDir()); svnuc.setEventHandler(new SubversionUpdateEventHandler(new PrintStream(pos), externals, local, l.getLocalDir())); svnuc.doCheckout(l.getSVNURL(), local.getCanonicalFile(), SVNRevision.HEAD, getRevision(l), true); } catch (SVNException e) { e.printStackTrace(listener.error("Failed to check out "+l.remote)); return null; } } pos.close(); try { sct.join(); // wait for all data to be piped. } catch (InterruptedException e) { throw new IOException2("interrupted",e); } } try { for (final ModuleLocation l : locations) { SVNDirEntry dir = manager.createRepository(l.getSVNURL(),true).info("/",-1); if(dir!=null) {// I don't think this can ever be null, but be defensive if(dir.getDate()!=null && dir.getDate().after(new Date())) // see http://www.nabble.com/NullPointerException-in-SVN-Checkout-Update-td21609781.html that reported this being null. listener.getLogger().println(Messages.SubversionSCM_ClockOutOfSync()); } } } catch (SVNException e) { LOGGER.log(Level.INFO,"Failed to estimate the remote time stamp",e); } return externals; } finally { manager.dispose(); } } private SVNRevision getRevision(ModuleLocation l) { // for the SVN revision, we will use the first off: // - a @NNN prefix of the SVN url // - a value found in a RevisionParameterAction // - the revision corresponding to the build timestamp SVNRevision r = null; if (revisions != null) { r = revisions.getRevision(l.getURL()); } if (r == null) { r = SVNRevision.create(timestamp); } r = l.getRevision(r); return r; } private static final long serialVersionUID = 1L; } /** * Creates {@link SVNClientManager}. * * <p> * This method must be executed on the slave where svn operations are performed. * * @param authProvider * The value obtained from {@link DescriptorImpl#createAuthenticationProvider()}. * If the operation runs on slaves, * (and properly remoted, if the svn operations run on slaves.) */ public static SVNClientManager createSvnClientManager(ISVNAuthenticationProvider authProvider) { ISVNAuthenticationManager sam = SVNWCUtil.createDefaultAuthenticationManager(); sam.setAuthenticationProvider(authProvider); return SVNClientManager.newInstance(SVNWCUtil.createDefaultOptions(true),sam); } /** * Creates {@link SVNClientManager} for code running on the master. * <p> * CAUTION: this code only works when invoked on master. On slaves, use * {@link #createSvnClientManager(ISVNAuthenticationProvider)} and get {@link ISVNAuthenticationProvider} * from the master via remoting. */ public static SVNClientManager createSvnClientManager() { return createSvnClientManager(Hudson.getInstance().getDescriptorByType(DescriptorImpl.class).createAuthenticationProvider()); } public static final class SvnInfo implements Serializable, Comparable<SvnInfo> { /** * Decoded repository URL. */ public final String url; public final long revision; public SvnInfo(String url, long revision) { this.url = url; this.revision = revision; } public SvnInfo(SVNInfo info) { this( info.getURL().toDecodedString(), info.getCommittedRevision().getNumber() ); } public SVNURL getSVNURL() throws SVNException { return SVNURL.parseURIDecoded(url); } public int compareTo(SvnInfo that) { int r = this.url.compareTo(that.url); if(r!=0) return r; if(this.revision<that.revision) return -1; if(this.revision>that.revision) return +1; return 0; } public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SvnInfo svnInfo = (SvnInfo) o; if (revision != svnInfo.revision) return false; return url.equals(svnInfo.url); } public int hashCode() { int result; result = url.hashCode(); result = 31 * result + (int) (revision ^ (revision >>> 32)); return result; } public String toString() { return String.format("%s (rev.%s)",url,revision); } private static final long serialVersionUID = 1L; } /** * Information about svn:external */ static final class External implements Serializable { /** * Relative path within the workspace where this <tt>svn:exteranls</tt> exist. */ final String path; /** * External SVN URL to be fetched. */ final String url; /** * If the svn:external link is with the -r option, its number. * Otherwise -1 to indicate that the head revision of the external repository should be fetched. */ final long revision; /** * @param modulePath * The root of the current module that svn was checking out when it hits 'ext'. * Since we call svnkit multiple times in general case to check out from multiple locations, * we use this to make the path relative to the entire workspace, not just the particular module. */ External(String modulePath,SVNExternal ext) { this.path = modulePath+'/'+ext.getPath(); this.url = ext.getResolvedURL().toDecodedString(); this.revision = ext.getRevision().getNumber(); } /** * Returns true if this reference is to a fixed revision. */ boolean isRevisionFixed() { return revision!=-1; } private static final long serialVersionUID = 1L; private static final XStream XSTREAM = new XStream2(); static { XSTREAM.alias("external",External.class); } } /** * Gets the SVN metadata for the given local workspace. * * @param workspace * The target to run "svn info". */ private static SVNInfo parseSvnInfo(File workspace, ISVNAuthenticationProvider authProvider) throws SVNException { final SVNClientManager manager = createSvnClientManager(authProvider); try { final SVNWCClient svnWc = manager.getWCClient(); return svnWc.doInfo(workspace,SVNRevision.WORKING); } finally { manager.dispose(); } } /** * Gets the SVN metadata for the remote repository. * * @param remoteUrl * The target to run "svn info". */ private static SVNInfo parseSvnInfo(SVNURL remoteUrl, ISVNAuthenticationProvider authProvider) throws SVNException { final SVNClientManager manager = createSvnClientManager(authProvider); try { final SVNWCClient svnWc = manager.getWCClient(); return svnWc.doInfo(remoteUrl, SVNRevision.HEAD, SVNRevision.HEAD); } finally { manager.dispose(); } } /** * Checks .svn files in the workspace and finds out revisions of the modules * that the workspace has. * * @return * null if the parsing somehow fails. Otherwise a map from the repository URL to revisions. */ private static class BuildRevisionMapTask implements FileCallable<Map<String,SvnInfo>> { private final ISVNAuthenticationProvider authProvider; private final TaskListener listener; private final List<External> externals; private final ModuleLocation[] locations; public BuildRevisionMapTask(AbstractBuild<?, ?> build, SubversionSCM parent, TaskListener listener, List<External> externals) { this.authProvider = parent.getDescriptor().createAuthenticationProvider(); this.listener = listener; this.externals = externals; this.locations = parent.getLocations(build); } public Map<String,SvnInfo> invoke(File ws, VirtualChannel channel) throws IOException { Map<String/*module name*/,SvnInfo> revisions = new HashMap<String,SvnInfo>(); final SVNClientManager manager = createSvnClientManager(authProvider); try { final SVNWCClient svnWc = manager.getWCClient(); // invoke the "svn info" for( ModuleLocation module : locations ) { try { SvnInfo info = new SvnInfo(svnWc.doInfo(new File(ws,module.getLocalDir()), SVNRevision.WORKING)); revisions.put(info.url,info); } catch (SVNException e) { e.printStackTrace(listener.error("Failed to parse svn info for "+module.remote)); } } for(External ext : externals){ try { SvnInfo info = new SvnInfo(svnWc.doInfo(new File(ws,ext.path),SVNRevision.WORKING)); revisions.put(info.url,info); } catch (SVNException e) { e.printStackTrace(listener.error("Failed to parse svn info for external "+ext.url+" at "+ext.path)); } } return revisions; } finally { manager.dispose(); } } private static final long serialVersionUID = 1L; } /** * Gets the file that stores the revision. */ public static File getRevisionFile(AbstractBuild build) { return new File(build.getRootDir(),"revision.txt"); } /** * Gets the file that stores the externals. */ private static File getExternalsFile(AbstractProject project) { return new File(project.getRootDir(),"svnexternals.txt"); } /** * Returns true if we can use "svn update" instead of "svn checkout" */ private static class IsUpdatableTask implements FileCallable<Boolean> { private final TaskListener listener; private final ISVNAuthenticationProvider authProvider; private final ModuleLocation[] locations; IsUpdatableTask(AbstractBuild<?, ?> build, SubversionSCM parent,TaskListener listener) { this.authProvider = parent.getDescriptor().createAuthenticationProvider(); this.listener = listener; this.locations = parent.getLocations(build); } public Boolean invoke(File ws, VirtualChannel channel) throws IOException { for (ModuleLocation l : locations) { String moduleName = l.getLocalDir(); File module = new File(ws,moduleName).getCanonicalFile(); // canonicalize to remove ".." and ".". See #474 if(!module.exists()) { listener.getLogger().println("Checking out a fresh workspace because "+module+" doesn't exist"); return false; } try { SVNInfo svnkitInfo = parseSvnInfo(module, authProvider); SvnInfo svnInfo = new SvnInfo(svnkitInfo); String url = l.getURL(); if(!svnInfo.url.equals(url)) { listener.getLogger().println("Checking out a fresh workspace because the workspace is not "+url); return false; } } catch (SVNException e) { listener.getLogger().println("Checking out a fresh workspace because Hudson failed to detect the current workspace "+module); e.printStackTrace(listener.error(e.getMessage())); return false; } } return true; } private static final long serialVersionUID = 1L; } public boolean pollChanges(AbstractProject project, Launcher launcher, FilePath workspace, final TaskListener listener) throws IOException, InterruptedException { AbstractBuild lastBuild = (AbstractBuild) project.getLastBuild(); if (lastBuild == null) { listener.getLogger().println( "No existing build. Starting a new one"); return true; } if (repositoryLocationsNoLongerExist(lastBuild, listener)) { // Disable this project, see issue #763 listener.getLogger().println( "One or more repository locations do not exist anymore for " + project + ", project will be disabled."); project.makeDisabled(true); return false; } // current workspace revision final Map<String,Long> wsRev = parseRevisionFile(lastBuild); final List<External> externals = parseExternalsFile(project); - // are the locations checked out in the workspace consistent with the current configuration? - for( ModuleLocation loc : getLocations(lastBuild) ) { - if(!wsRev.containsKey(loc.getURL())) { - listener.getLogger().println("Workspace doesn't contain "+loc.getURL()+". Need a new build"); - return true; - } - } + // First check to see if the lastBuild is still running - if it is, we skip this next section, + // to deal with https://hudson.dev.java.net/issues/show_bug.cgi?id=4270. + if (!lastBuild.isBuilding()) { + // are the locations checked out in the workspace consistent with the current configuration? + for( ModuleLocation loc : getLocations(lastBuild) ) { + if(!wsRev.containsKey(loc.getURL())) { + listener.getLogger().println("Workspace doesn't contain "+loc.getURL()+". Need a new build"); + return true; + } + } + } // determine where to perform polling. prefer the node where the build happened, // in case a cluster is non-uniform. see http://www.nabble.com/svn-connection-from-slave-only-td24970587.html VirtualChannel ch=null; Node n = lastBuild.getBuiltOn(); if (n!=null) { Computer c = n.toComputer(); if (c!=null) ch = c.getChannel(); } if (ch==null) ch= MasterComputer.localChannel; // check the corresponding remote revision return ch.call(new DelegatingCallable<Boolean,IOException> () { final ISVNAuthenticationProvider authProvider = getDescriptor().createAuthenticationProvider(); final String globalExcludedRevprop = getDescriptor().getGlobalExcludedRevprop(); public ClassLoader getClassLoader() { return Hudson.getInstance().getPluginManager().uberClassLoader; } public Boolean call() throws IOException { OUTER: for (Map.Entry<String,Long> localInfo : wsRev.entrySet()) { // skip if this is an external reference to a fixed revision String url = localInfo.getKey(); for (External ext : externals) if(ext.url.equals(url) && ext.isRevisionFixed()) continue OUTER; try { final SVNURL decodedURL = SVNURL.parseURIDecoded(url); SvnInfo remoteInfo = new SvnInfo(parseSvnInfo(decodedURL,authProvider)); listener.getLogger().println(Messages.SubversionSCM_pollChanges_remoteRevisionAt(url,remoteInfo.revision)); if(remoteInfo.revision > localInfo.getValue()) { boolean changesFound = true; Pattern[] excludedPatterns = getExcludedRegionsPatterns(); String[] excludedUsers = getExcludedUsersNormalized(); String excludedRevprop = Util.fixEmptyAndTrim(getExcludedRevprop()); if (excludedRevprop == null) { // Fall back to global setting excludedRevprop = globalExcludedRevprop; } if (excludedPatterns != null || excludedUsers != null || excludedRevprop != null) { SVNLogHandler handler = new SVNLogHandler(listener, excludedPatterns, excludedUsers, excludedRevprop); final SVNClientManager manager = createSvnClientManager(authProvider); try { final SVNLogClient svnlc = manager.getLogClient(); svnlc.doLog(decodedURL, null, SVNRevision.UNDEFINED, SVNRevision.create(localInfo.getValue() + 1), // get log entries from the local revision + 1 SVNRevision.create(remoteInfo.revision), // to the remote revision false, // Don't stop on copy. true, // Report paths. false, // Don't included merged revisions 0, // Retrieve log entries for unlimited number of revisions. null, // Retrieve all revprops handler); } finally { manager.dispose(); } changesFound = handler.isChangesFound(); } if (changesFound) { listener.getLogger().println(Messages.SubversionSCM_pollChanges_changedFrom(localInfo.getValue())); return true; } } } catch (SVNException e) { e.printStackTrace(listener.error("Failed to check repository revision for "+ url)); } } return false; // no change } }); } private final class SVNLogHandler implements ISVNLogEntryHandler { private boolean changesFound = false; private TaskListener listener; private Pattern[] excludedPatterns; private HashSet<String> excludedUsers; private String excludedRevprop; private SVNLogHandler(TaskListener listener, Pattern[] excludedPatterns, String[] excludedUsers, String excludedRevprop) { this.listener = listener; this.excludedPatterns = excludedPatterns == null ? new Pattern[0] : excludedPatterns; this.excludedUsers = new HashSet<String>(Arrays.asList(excludedUsers == null ? new String[0] : excludedUsers)); this.excludedRevprop = excludedRevprop; } public boolean isChangesFound() { return changesFound; } /** * Handles a log entry passed. * Check for log entries that should be excluded from triggering a build. * If an entry is not an entry that should be excluded, set changesFound to true * * @param logEntry an {@link org.tmatesoft.svn.core.SVNLogEntry} object * that represents per revision information * (committed paths, log message, etc.) * @throws org.tmatesoft.svn.core.SVNException */ public void handleLogEntry(SVNLogEntry logEntry) throws SVNException { if (checkLogEntry(logEntry)) { changesFound = true; } } /** * Checks if the given log entry should be considered for the purposes * of SCM polling. * * @return <code>true</code> if the should trigger polling, <code>false</code> otherwise */ private boolean checkLogEntry(SVNLogEntry logEntry) { if (excludedRevprop != null) { // If the entry includes the exclusion revprop, don't count it as a change SVNProperties revprops = logEntry.getRevisionProperties(); if (revprops != null && revprops.containsName(excludedRevprop)) { listener.getLogger().println(Messages.SubversionSCM_pollChanges_ignoredRevision( logEntry.getRevision(), Messages.SubversionSCM_pollChanges_ignoredRevision_revprop(excludedRevprop))); return false; } } String author = logEntry.getAuthor(); if (excludedUsers.contains(author)) { // If the author is an excluded user, don't count this entry as a change listener.getLogger().println(Messages.SubversionSCM_pollChanges_ignoredRevision( logEntry.getRevision(), Messages.SubversionSCM_pollChanges_ignoredRevision_author(author))); return false; } // If there were no changes, don't count this entry as a change Map changedPaths = logEntry.getChangedPaths(); if (changedPaths.isEmpty()) { return false; } // Else, check each changed path List<String> excludedPaths = new ArrayList<String>(); for (String path : (Set<String>)changedPaths.keySet()) { for (Pattern pattern : excludedPatterns) { if (pattern.matcher(path).matches()) { excludedPaths.add(path); break; } } } // If all paths are in an excluded region, don't count this entry as a change if (changedPaths.size() == excludedPaths.size()) { listener.getLogger().println(Messages.SubversionSCM_pollChanges_ignoredRevision( logEntry.getRevision(), Messages.SubversionSCM_pollChanges_ignoredRevision_path(Util.join(excludedPaths, ", ")))); return false; } // Otherwise, a change is a change return true; } } public ChangeLogParser createChangeLogParser() { return new SubversionChangeLogParser(); } public DescriptorImpl getDescriptor() { return (DescriptorImpl)super.getDescriptor(); } public FilePath getModuleRoot(FilePath workspace) { if (getLocations().length > 0) return workspace.child(getLocations()[0].getLocalDir()); return workspace; } public FilePath[] getModuleRoots(FilePath workspace) { final ModuleLocation[] moduleLocations = getLocations(); if (moduleLocations.length > 0) { FilePath[] moduleRoots = new FilePath[moduleLocations.length]; for (int i = 0; i < moduleLocations.length; i++) { moduleRoots[i] = workspace.child(moduleLocations[i].getLocalDir()); } return moduleRoots; } return new FilePath[] { getModuleRoot(workspace) }; } private static String getLastPathComponent(String s) { String[] tokens = s.split("/"); return tokens[tokens.length-1]; // return the last token } @Extension public static class DescriptorImpl extends SCMDescriptor<SubversionSCM> implements ModelObject { /** * SVN authentication realm to its associated credentials. */ private final Map<String,Credential> credentials = new Hashtable<String,Credential>(); /** * Stores name of Subversion revision property to globally exclude */ private String globalExcludedRevprop = null; /** * Stores whether a realm supports revision properties */ private final Map<String,Boolean> revPropSupport = new Hashtable<String,Boolean>(); /** * Stores {@link SVNAuthentication} for a single realm. * * <p> * {@link Credential} holds data in a persistence-friendly way, * and it's capable of creating {@link SVNAuthentication} object, * to be passed to SVNKit. */ public static abstract class Credential implements Serializable { /** * @param kind * One of the constants defined in {@link ISVNAuthenticationManager}, * indicating what subype of {@link SVNAuthentication} is expected. */ public abstract SVNAuthentication createSVNAuthentication(String kind) throws SVNException; } /** * Username/password based authentication. */ private static final class PasswordCredential extends Credential { private final String userName; private final String password; // scrambled by base64 public PasswordCredential(String userName, String password) { this.userName = userName; this.password = Scrambler.scramble(password); } @Override public SVNAuthentication createSVNAuthentication(String kind) { if(kind.equals(ISVNAuthenticationManager.SSH)) return new SVNSSHAuthentication(userName,Scrambler.descramble(password),-1,false); else return new SVNPasswordAuthentication(userName,Scrambler.descramble(password),false); } } /** * Publickey authentication for Subversion over SSH. */ private static final class SshPublicKeyCredential extends Credential { private final String userName; private final String passphrase; // scrambled by base64 private final String id; /** * @param keyFile * stores SSH private key. The file will be copied. */ public SshPublicKeyCredential(String userName, String passphrase, File keyFile) throws SVNException { this.userName = userName; this.passphrase = Scrambler.scramble(passphrase); Random r = new Random(); StringBuilder buf = new StringBuilder(); for(int i=0;i<16;i++) buf.append(Integer.toHexString(r.nextInt(16))); this.id = buf.toString(); try { FileUtils.copyFile(keyFile,getKeyFile()); } catch (IOException e) { throw new SVNException(SVNErrorMessage.create(SVNErrorCode.AUTHN_CREDS_UNAVAILABLE,"Unable to save private key"),e); } } /** * Gets the location where the private key will be permanently stored. */ private File getKeyFile() { File dir = new File(Hudson.getInstance().getRootDir(),"subversion-credentials"); if(dir.mkdirs()) { // make sure the directory exists. if we created it, try to set the permission to 600 // since this is sensitive information try { Chmod chmod = new Chmod(); chmod.setProject(new Project()); chmod.setFile(dir); chmod.setPerm("600"); chmod.execute(); } catch (Throwable e) { // if we failed to set the permission, that's fine. LOGGER.log(Level.WARNING, "Failed to set directory permission of "+dir,e); } } return new File(dir,id); } @Override public SVNSSHAuthentication createSVNAuthentication(String kind) throws SVNException { if(kind.equals(ISVNAuthenticationManager.SSH)) { try { Channel channel = Channel.current(); String privateKey; if(channel!=null) { // remote privateKey = channel.call(new Callable<String,IOException>() { public String call() throws IOException { return FileUtils.readFileToString(getKeyFile(),"iso-8859-1"); } }); } else { privateKey = FileUtils.readFileToString(getKeyFile(),"iso-8859-1"); } return new SVNSSHAuthentication(userName, privateKey.toCharArray(), Scrambler.descramble(passphrase),-1,false); } catch (IOException e) { throw new SVNException(SVNErrorMessage.create(SVNErrorCode.AUTHN_CREDS_UNAVAILABLE,"Unable to load private key"),e); } catch (InterruptedException e) { throw new SVNException(SVNErrorMessage.create(SVNErrorCode.AUTHN_CREDS_UNAVAILABLE,"Unable to load private key"),e); } } else return null; // unknown } } /** * SSL client certificate based authentication. */ private static final class SslClientCertificateCredential extends Credential { private final String password; // scrambled by base64 public SslClientCertificateCredential(File certificate, String password) { this.password = Scrambler.scramble(password); } @Override public SVNAuthentication createSVNAuthentication(String kind) { if(kind.equals(ISVNAuthenticationManager.SSL)) return new SVNSSLAuthentication(null,Scrambler.descramble(password),false); else return null; // unexpected authentication type } } /** * Remoting interface that allows remote {@link ISVNAuthenticationProvider} * to read from local {@link DescriptorImpl#credentials}. */ private interface RemotableSVNAuthenticationProvider { Credential getCredential(SVNURL url, String realm); } /** * There's no point in exporting multiple {@link RemotableSVNAuthenticationProviderImpl} instances, * so let's just use one instance. */ private transient final RemotableSVNAuthenticationProviderImpl remotableProvider = new RemotableSVNAuthenticationProviderImpl(); private final class RemotableSVNAuthenticationProviderImpl implements RemotableSVNAuthenticationProvider, Serializable { public Credential getCredential(SVNURL url, String realm) { for (SubversionCredentialProvider p : SubversionCredentialProvider.all()) { Credential c = p.getCredential(url,realm); if(c!=null) { LOGGER.fine(String.format("getCredential(%s)=>%s by %s",realm,c,p)); return c; } } LOGGER.fine(String.format("getCredential(%s)=>%s",realm,credentials.get(realm))); return credentials.get(realm); } /** * When sent to the remote node, send a proxy. */ private Object writeReplace() { return Channel.current().export(RemotableSVNAuthenticationProvider.class, this); } } /** * See {@link DescriptorImpl#createAuthenticationProvider()}. */ private static final class SVNAuthenticationProviderImpl implements ISVNAuthenticationProvider, Serializable { private final RemotableSVNAuthenticationProvider source; public SVNAuthenticationProviderImpl(RemotableSVNAuthenticationProvider source) { this.source = source; } public SVNAuthentication requestClientAuthentication(String kind, SVNURL url, String realm, SVNErrorMessage errorMessage, SVNAuthentication previousAuth, boolean authMayBeStored) { Credential cred = source.getCredential(url,realm); LOGGER.fine(String.format("requestClientAuthentication(%s,%s,%s)=>%s",kind,url,realm,cred)); try { SVNAuthentication auth=null; if(cred!=null) auth = cred.createSVNAuthentication(kind); if(previousAuth!=null && compareSVNAuthentications(auth,previousAuth)) { // See HUDSON-2909 // this comparison is necessary, unlike the original fix of HUDSON-2909, since SVNKit may use // other ISVNAuthenticationProviders and their failed auth might be passed to us. // see HUDSON-3936 LOGGER.fine("Previous authentication attempt failed, so aborting: "+previousAuth); return null; } if(auth==null && ISVNAuthenticationManager.USERNAME.equals(kind)) { // this happens with file:// URL and svn+ssh (in this case this method gets invoked twice.) // The base class does this, too. // user auth shouldn't be null. return new SVNUserNameAuthentication("",false); } return auth; } catch (SVNException e) { LOGGER.log(Level.SEVERE, "Failed to authorize",e); throw new RuntimeException("Failed to authorize",e); } } public int acceptServerAuthentication(SVNURL url, String realm, Object certificate, boolean resultMayBeStored) { return ACCEPTED_TEMPORARY; } private static final long serialVersionUID = 1L; } @Override public SCM newInstance(StaplerRequest staplerRequest, JSONObject jsonObject) throws FormException { return super.newInstance(staplerRequest, jsonObject); } public DescriptorImpl() { super(SubversionRepositoryBrowser.class); load(); } protected DescriptorImpl(Class clazz, Class<? extends RepositoryBrowser> repositoryBrowser) { super(clazz,repositoryBrowser); } public String getDisplayName() { return "Subversion"; } public String getGlobalExcludedRevprop() { return globalExcludedRevprop; } @Override public boolean configure(StaplerRequest req) throws FormException { globalExcludedRevprop = Util.fixEmptyAndTrim( req.getParameter("svn.global_excluded_revprop")); // Save configuration save(); return super.configure(req); } /** * Creates {@link ISVNAuthenticationProvider} backed by {@link #credentials}. * This method must be invoked on the master, but the returned object is remotable. * * <p> * Therefore, to access {@link ISVNAuthenticationProvider}, you need to call this method * on the master, then pass the object to the slave side, then call * {@link SubversionSCM#createSvnClientManager(ISVNAuthenticationProvider)} on the slave. * * @see SubversionSCM#createSvnClientManager(ISVNAuthenticationProvider) */ public ISVNAuthenticationProvider createAuthenticationProvider() { return new SVNAuthenticationProviderImpl(remotableProvider); } /** * Submits the authentication info. */ // TODO: stapler should do multipart/form-data handling public void doPostCredential(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException { Hudson.getInstance().checkPermission(Hudson.ADMINISTER); MultipartFormDataParser parser = new MultipartFormDataParser(req); CrumbIssuer crumbIssuer = Hudson.getInstance().getCrumbIssuer(); if (crumbIssuer!=null && !crumbIssuer.validateCrumb(req, parser)) { rsp.sendError(HttpServletResponse.SC_FORBIDDEN,"No crumb found"); return; } String url = parser.get("url"); String kind = parser.get("kind"); int idx = Arrays.asList("","password","publickey","certificate").indexOf(kind); final String username = parser.get("username"+idx); final String password = parser.get("password"+idx); // SVNKit wants a key in a file final File keyFile; FileItem item=null; if(idx <= 1) { keyFile = null; } else { item = parser.getFileItem(kind.equals("publickey")?"privateKey":"certificate"); keyFile = File.createTempFile("hudson","key"); if(item!=null) { try { item.write(keyFile); } catch (Exception e) { throw new IOException2(e); } if(PuTTYKey.isPuTTYKeyFile(keyFile)) { // TODO: we need a passphrase support LOGGER.info("Converting "+keyFile+" from PuTTY format to OpenSSH format"); new PuTTYKey(keyFile,null).toOpenSSH(keyFile); } } } // we'll record what credential we are trying here. StringWriter log = new StringWriter(); final PrintWriter logWriter = new PrintWriter(log); try { postCredential(url, username, password, keyFile, logWriter); rsp.sendRedirect("credentialOK"); } catch (SVNException e) { logWriter.println("FAILED: "+e.getErrorMessage()); req.setAttribute("message",log.toString()); req.setAttribute("pre",true); req.setAttribute("exception",e); rsp.forward(Hudson.getInstance(),"error",req); } finally { if(keyFile!=null) keyFile.delete(); if(item!=null) item.delete(); } } /** * Submits the authentication info. * * This code is fairly ugly because of the way SVNKit handles credentials. */ public void postCredential(String url, final String username, final String password, final File keyFile, final PrintWriter logWriter) throws SVNException, IOException { SVNRepository repository = null; try { final boolean[] authenticationAttemped = new boolean[1]; final boolean[] authenticationAcknowled = new boolean[1]; // the way it works with SVNKit is that // 1) svnkit calls AuthenticationManager asking for a credential. // this is when we can see the 'realm', which identifies the user domain. // 2) DefaultSVNAuthenticationManager returns the username and password we set below // 3) if the authentication is successful, svnkit calls back acknowledgeAuthentication // (so we store the password info here) repository = SVNRepositoryFactory.create(SVNURL.parseURIDecoded(url)); repository.setTunnelProvider(SVNWCUtil.createDefaultOptions(true)); repository.setAuthenticationManager(new DefaultSVNAuthenticationManager(SVNWCUtil.getDefaultConfigurationDirectory(), true, username, password, keyFile, password) { Credential cred = null; @Override public SVNAuthentication getFirstAuthentication(String kind, String realm, SVNURL url) throws SVNException { authenticationAttemped[0] = true; if (kind.equals(ISVNAuthenticationManager.USERNAME)) // when using svn+ssh, svnkit first asks for ISVNAuthenticationManager.SSH // authentication to connect via SSH, then calls this method one more time // to get the user name. Perhaps svn takes user name on its own, separate // from OS user name? In any case, we need to return the same user name. // I don't set the cred field here, so that the 1st credential for ssh // won't get clobbered. return new SVNUserNameAuthentication(username, false); if (kind.equals(ISVNAuthenticationManager.PASSWORD)) { logWriter.println("Passing user name " + username + " and password you entered"); cred = new PasswordCredential(username, password); } if (kind.equals(ISVNAuthenticationManager.SSH)) { if (keyFile == null) { logWriter.println("Passing user name " + username + " and password you entered to SSH"); cred = new PasswordCredential(username, password); } else { logWriter.println("Attempting a public key authentication with username " + username); cred = new SshPublicKeyCredential(username, password, keyFile); } } if (kind.equals(ISVNAuthenticationManager.SSL)) { logWriter.println("Attempting an SSL client certificate authentcation"); cred = new SslClientCertificateCredential(keyFile, password); } if (cred == null) { logWriter.println("Unknown authentication method: " + kind); return null; } return cred.createSVNAuthentication(kind); } /** * Getting here means the authentication tried in {@link #getFirstAuthentication(String, String, SVNURL)} * didn't work. */ @Override public SVNAuthentication getNextAuthentication(String kind, String realm, SVNURL url) throws SVNException { SVNErrorManager.authenticationFailed("Authentication failed for " + url, null); return null; } @Override public void acknowledgeAuthentication(boolean accepted, String kind, String realm, SVNErrorMessage errorMessage, SVNAuthentication authentication) throws SVNException { authenticationAcknowled[0] = true; if (accepted) { assert cred != null; credentials.put(realm, cred); save(); } else { logWriter.println("Failed to authenticate: " + errorMessage); if (errorMessage.getCause() != null) errorMessage.getCause().printStackTrace(logWriter); } super.acknowledgeAuthentication(accepted, kind, realm, errorMessage, authentication); } }); repository.testConnection(); if(!authenticationAttemped[0]) { logWriter.println("No authentication was attemped."); throw new SVNCancelException(); } if (!authenticationAcknowled[0]) { logWriter.println("Authentication was not acknowledged."); throw new SVNCancelException(); } } finally { if (repository != null) repository.closeSession(); } } /** * validate the value for a remote (repository) location. */ public FormValidation doCheckRemote(StaplerRequest req, @QueryParameter String value) { // syntax check first String url = Util.nullify(value); if (url == null) return FormValidation.ok(); // remove unneeded whitespaces url = url.trim(); if(!URL_PATTERN.matcher(url).matches()) return FormValidation.errorWithMarkup("Invalid URL syntax. See " + "<a href=\"http://svnbook.red-bean.com/en/1.2/svn-book.html#svn.basic.in-action.wc.tbl-1\">this</a> " + "for information about valid URLs."); // Test the connection only if we have admin permission if (!Hudson.getInstance().hasPermission(Hudson.ADMINISTER)) return FormValidation.ok(); try { SVNURL repoURL = SVNURL.parseURIDecoded(url); if (checkRepositoryPath(repoURL)!=SVNNodeKind.NONE) // something exists return FormValidation.ok(); SVNRepository repository = null; try { repository = getRepository(repoURL); long rev = repository.getLatestRevision(); // now go back the tree and find if there's anything that exists String repoPath = getRelativePath(repoURL, repository); String p = repoPath; while(p.length()>0) { p = SVNPathUtil.removeTail(p); if(repository.checkPath(p,rev)==SVNNodeKind.DIR) { // found a matching path List<SVNDirEntry> entries = new ArrayList<SVNDirEntry>(); repository.getDir(p,rev,false,entries); // build up the name list List<String> paths = new ArrayList<String>(); for (SVNDirEntry e : entries) if(e.getKind()==SVNNodeKind.DIR) paths.add(e.getName()); String head = SVNPathUtil.head(repoPath.substring(p.length() + 1)); String candidate = EditDistance.findNearest(head,paths); return FormValidation.error("'%1$s/%2$s' doesn't exist in the repository. Maybe you meant '%1$s/%3$s'?", p, head, candidate); } } return FormValidation.error(repoPath+" doesn't exist in the repository"); } finally { if (repository != null) repository.closeSession(); } } catch (SVNException e) { String message=""; message += "Unable to access "+Util.escape(url)+" : "+Util.escape( e.getErrorMessage().getFullMessage()); message += " <a href='#' id=svnerrorlink onclick='javascript:" + "document.getElementById(\"svnerror\").style.display=\"block\";" + "document.getElementById(\"svnerrorlink\").style.display=\"none\";" + "return false;'>(show details)</a>"; message += "<pre id=svnerror style='display:none'>"+Functions.printThrowable(e)+"</pre>"; message += " (Maybe you need to <a target='_new' href='"+req.getContextPath()+"/scm/SubversionSCM/enterCredential?"+url+"'>enter credential</a>?)"; message += "<br>"; LOGGER.log(Level.INFO, "Failed to access subversion repository "+url,e); return FormValidation.errorWithMarkup(message); } } public SVNNodeKind checkRepositoryPath(SVNURL repoURL) throws SVNException { SVNRepository repository = null; try { repository = getRepository(repoURL); repository.testConnection(); long rev = repository.getLatestRevision(); String repoPath = getRelativePath(repoURL, repository); return repository.checkPath(repoPath, rev); } finally { if (repository != null) repository.closeSession(); } } protected SVNRepository getRepository(SVNURL repoURL) throws SVNException { SVNRepository repository = SVNRepositoryFactory.create(repoURL); ISVNAuthenticationManager sam = SVNWCUtil.createDefaultAuthenticationManager(); sam = new FilterSVNAuthenticationManager(sam) { // If there's no time out, the blocking read operation may hang forever, because TCP itself // has no timeout. So always use some time out. If the underlying implementation gives us some // value (which may come from ~/.subversion), honor that, as long as it sets some timeout value. @Override public int getReadTimeout(SVNRepository repository) { int r = super.getReadTimeout(repository); if(r<=0) r = DEFAULT_TIMEOUT; return r; } }; sam.setAuthenticationProvider(createAuthenticationProvider()); repository.setAuthenticationManager(sam); return repository; } public static String getRelativePath(SVNURL repoURL, SVNRepository repository) throws SVNException { String repoPath = repoURL.getPath().substring(repository.getRepositoryRoot(false).getPath().length()); if(!repoPath.startsWith("/")) repoPath="/"+repoPath; return repoPath; } /** * validate the value for a local location (local checkout directory). */ public FormValidation doCheckLocal(@QueryParameter String value) throws IOException, ServletException { String v = Util.nullify(value); if (v == null) // local directory is optional so this is ok return FormValidation.ok(); v = v.trim(); // check if a absolute path has been supplied // (the last check with the regex will match windows drives) if (v.startsWith("/") || v.startsWith("\\") || v.startsWith("..") || v.matches("^[A-Za-z]:")) return FormValidation.error("absolute path is not allowed"); // all tests passed so far return FormValidation.ok(); } /** * Validates the excludeRegions Regex */ public FormValidation doCheckExcludedRegions(@QueryParameter String value) throws IOException, ServletException { for (String region : Util.fixNull(value).trim().split("[\\r\\n]+")) try { Pattern.compile(region); } catch (PatternSyntaxException e) { return FormValidation.error("Invalid regular expression. " + e.getMessage()); } return FormValidation.ok(); } private static final Pattern USERNAME_PATTERN = Pattern.compile("\\w+"); /** * Validates the excludeUsers field */ public FormValidation doCheckExcludedUsers(@QueryParameter String value) throws IOException, ServletException { for (String user : Util.fixNull(value).trim().split("[\\r\\n]+")) { user = user.trim(); if ("".equals(user)) { continue; } if (!USERNAME_PATTERN.matcher(user).matches()) { return FormValidation.error("Invalid username: " + user); } } return FormValidation.ok(); } /** * Validates the remote server supports custom revision properties */ public FormValidation doCheckRevisionPropertiesSupported(@QueryParameter String value) throws IOException, ServletException { String v = Util.fixNull(value).trim(); if (v.length() == 0) return FormValidation.ok(); // Test the connection only if we have admin permission if (!Hudson.getInstance().hasPermission(Hudson.ADMINISTER)) return FormValidation.ok(); try { SVNURL repoURL = SVNURL.parseURIDecoded(v); if (checkRepositoryPath(repoURL)!=SVNNodeKind.NONE) // something exists return FormValidation.ok(); SVNRepository repository = null; try { repository = getRepository(repoURL); if (repository.hasCapability(SVNCapability.LOG_REVPROPS)) return FormValidation.ok(); } finally { if (repository != null) repository.closeSession(); } } catch (SVNException e) { String message=""; message += "Unable to access "+Util.escape(v)+" : "+Util.escape( e.getErrorMessage().getFullMessage()); LOGGER.log(Level.INFO, "Failed to access subversion repository "+v,e); return FormValidation.errorWithMarkup(message); } return FormValidation.warning(Messages.SubversionSCM_excludedRevprop_notSupported(v)); } static { new Initializer(); } } public boolean repositoryLocationsNoLongerExist(AbstractBuild<?,?> build, TaskListener listener) { PrintStream out = listener.getLogger(); for (ModuleLocation l : getLocations(build)) try { if (getDescriptor().checkRepositoryPath(l.getSVNURL()) == SVNNodeKind.NONE) { out.println("Location '" + l.remote + "' does not exist"); ParametersAction params = build.getAction(ParametersAction.class); if (params != null) { // since this is used to disable projects, be conservative LOGGER.fine("Location could be expanded on build '" + build + "' parameters values:"); return false; } return true; } } catch (SVNException e) { // be conservative, since we are just trying to be helpful in detecting // non existent locations. If we can't detect that, we'll do nothing LOGGER.log(FINE, "Location check failed",e); } return false; } static final Pattern URL_PATTERN = Pattern.compile("(https?|svn(\\+[a-z0-9]+)?|file)://.+"); private static final long serialVersionUID = 1L; // noop, but this forces the initializer to run. public static void init() {} static { new Initializer(); } private static final class Initializer { static { if(Boolean.getBoolean("hudson.spool-svn")) DAVRepositoryFactory.setup(new DefaultHTTPConnectionFactory(null,true,null)); else DAVRepositoryFactory.setup(); // http, https SVNRepositoryFactoryImpl.setup(); // svn, svn+xxx FSRepositoryFactory.setup(); // file // disable the connection pooling, which causes problems like // http://www.nabble.com/SSH-connection-problems-p12028339.html if(System.getProperty("svnkit.ssh2.persistent")==null) System.setProperty("svnkit.ssh2.persistent","false"); // use SVN1.4 compatible workspace by default. SVNAdminAreaFactory.setSelector(new SubversionWorkspaceSelector()); } } /** * small structure to store local and remote (repository) location * information of the repository. As a addition it holds the invalid field * to make failure messages when doing a checkout possible */ public static final class ModuleLocation implements Serializable { /** * Subversion URL to check out. * * This may include "@NNN" at the end to indicate a fixed revision. */ public final String remote; /** * Remembers the user-given value. * Can be null. * * @deprecated * Code should use {@link #getLocalDir()}. This field is only intended for form binding. */ public final String local; /** * Cache of the repository UUID. */ private transient volatile UUID repositoryUUID; private transient volatile SVNURL repositoryRoot; @DataBoundConstructor public ModuleLocation(String remote, String local) { this.remote = Util.removeTrailingSlash(Util.fixNull(remote).trim()); this.local = Util.fixEmptyAndTrim(local); } /** * Local directory to place the file to. * Relative to the workspace root. */ public String getLocalDir() { if(local==null) return getLastPathComponent(remote); return local; } /** * Returns the pure URL portion of {@link #remote} by removing * possible "@NNN" suffix. */ public String getURL() { int idx = remote.lastIndexOf('@'); if(idx>0) { try { String n = remote.substring(idx+1); Long.parseLong(n); return remote.substring(0,idx); } catch (NumberFormatException e) { // not a revision number } } return remote; } /** * Gets {@link #remote} as {@link SVNURL}. */ public SVNURL getSVNURL() throws SVNException { return SVNURL.parseURIEncoded(getURL()); } /** * Repository UUID. Lazy computed and cached. */ public UUID getUUID() throws SVNException { if(repositoryUUID==null || repositoryRoot==null) { synchronized (this) { SVNRepository r = openRepository(); r.testConnection(); // make sure values are fetched repositoryUUID = UUID.fromString(r.getRepositoryUUID(false)); repositoryRoot = r.getRepositoryRoot(false); } } return repositoryUUID; } public SVNRepository openRepository() throws SVNException { return Hudson.getInstance().getDescriptorByType(DescriptorImpl.class).getRepository(getSVNURL()); } public SVNURL getRepositoryRoot() throws SVNException { getUUID(); return repositoryRoot; } /** * Figures out which revision to check out. * * If {@link #remote} is {@code url@rev}, then this method * returns that specific revision. * * @param defaultValue * If "@NNN" portion is not in the URL, this value will be returned. * Normally, this is the SVN revision timestamped at the build date. */ public SVNRevision getRevision(SVNRevision defaultValue) { int idx = remote.lastIndexOf('@'); if(idx>0) { try { String n = remote.substring(idx+1); return SVNRevision.create(Long.parseLong(n)); } catch (NumberFormatException e) { // not a revision number } } return defaultValue; } private String getExpandedRemote(AbstractBuild<?,?> build) { String outRemote = remote; ParametersAction parameters = build.getAction(ParametersAction.class); if (parameters != null) outRemote = parameters.substitute(build, remote); return outRemote; } /** * Expand location value based on Build parametric execution. * * @param build * Build instance for expanding parameters into their values * * @return Output ModuleLocation expanded according to Build parameters * values. */ public ModuleLocation getExpandedLocation(AbstractBuild<?, ?> build) { return new ModuleLocation(getExpandedRemote(build), getLocalDir()); } public String toString() { return remote; } private static final long serialVersionUID = 1L; public static List<ModuleLocation> parse(String[] remoteLocations, String[] localLocations) { List<ModuleLocation> modules = new ArrayList<ModuleLocation>(); if (remoteLocations != null && localLocations != null) { int entries = Math.min(remoteLocations.length, localLocations.length); for (int i = 0; i < entries; i++) { // the remote (repository) location String remoteLoc = Util.nullify(remoteLocations[i]); if (remoteLoc != null) {// null if skipped remoteLoc = Util.removeTrailingSlash(remoteLoc.trim()); modules.add(new ModuleLocation(remoteLoc, Util.nullify(localLocations[i]))); } } } return modules; } } private static final Logger LOGGER = Logger.getLogger(SubversionSCM.class.getName()); /** * Network timeout in milliseconds. * The main point of this is to prevent infinite hang, so it should be a rather long value to avoid * accidental time out problem. */ public static int DEFAULT_TIMEOUT = Integer.getInteger(SubversionSCM.class.getName()+".timeout",3600*1000); /** * Enables trace logging of Ganymed SSH library. * <p> * Intended to be invoked from Groovy console. */ public static void enableSshDebug(Level level) { if(level==null) level= Level.FINEST; // default final Level lv = level; com.trilead.ssh2.log.Logger.enabled=true; com.trilead.ssh2.log.Logger.logger = new DebugLogger() { private final Logger LOGGER = Logger.getLogger(SCPClient.class.getPackage().getName()); public void log(int level, String className, String message) { LOGGER.log(lv,className+' '+message); } }; } /*package*/ static boolean compareSVNAuthentications(SVNAuthentication a1, SVNAuthentication a2) { if (a1==null && a2==null) return true; if (a1==null || a2==null) return false; if (a1.getClass()!=a2.getClass()) return false; try { return describeBean(a1).equals(describeBean(a2)); } catch (IllegalAccessException e) { return false; } catch (InvocationTargetException e) { return false; } catch (NoSuchMethodException e) { return false; } } /** * In preparation for a comparison, char[] needs to be converted that supports value equality. */ private static Map describeBean(Object o) throws InvocationTargetException, NoSuchMethodException, IllegalAccessException { Map<?,?> m = PropertyUtils.describe(o); for (Entry e : m.entrySet()) { Object v = e.getValue(); if (v instanceof char[]) { char[] chars = (char[]) v; e.setValue(new String(chars)); } } return m; } }
true
true
public boolean pollChanges(AbstractProject project, Launcher launcher, FilePath workspace, final TaskListener listener) throws IOException, InterruptedException { AbstractBuild lastBuild = (AbstractBuild) project.getLastBuild(); if (lastBuild == null) { listener.getLogger().println( "No existing build. Starting a new one"); return true; } if (repositoryLocationsNoLongerExist(lastBuild, listener)) { // Disable this project, see issue #763 listener.getLogger().println( "One or more repository locations do not exist anymore for " + project + ", project will be disabled."); project.makeDisabled(true); return false; } // current workspace revision final Map<String,Long> wsRev = parseRevisionFile(lastBuild); final List<External> externals = parseExternalsFile(project); // are the locations checked out in the workspace consistent with the current configuration? for( ModuleLocation loc : getLocations(lastBuild) ) { if(!wsRev.containsKey(loc.getURL())) { listener.getLogger().println("Workspace doesn't contain "+loc.getURL()+". Need a new build"); return true; } } // determine where to perform polling. prefer the node where the build happened, // in case a cluster is non-uniform. see http://www.nabble.com/svn-connection-from-slave-only-td24970587.html VirtualChannel ch=null; Node n = lastBuild.getBuiltOn(); if (n!=null) { Computer c = n.toComputer(); if (c!=null) ch = c.getChannel(); } if (ch==null) ch= MasterComputer.localChannel; // check the corresponding remote revision return ch.call(new DelegatingCallable<Boolean,IOException> () { final ISVNAuthenticationProvider authProvider = getDescriptor().createAuthenticationProvider(); final String globalExcludedRevprop = getDescriptor().getGlobalExcludedRevprop(); public ClassLoader getClassLoader() { return Hudson.getInstance().getPluginManager().uberClassLoader; } public Boolean call() throws IOException { OUTER: for (Map.Entry<String,Long> localInfo : wsRev.entrySet()) { // skip if this is an external reference to a fixed revision String url = localInfo.getKey(); for (External ext : externals) if(ext.url.equals(url) && ext.isRevisionFixed()) continue OUTER; try { final SVNURL decodedURL = SVNURL.parseURIDecoded(url); SvnInfo remoteInfo = new SvnInfo(parseSvnInfo(decodedURL,authProvider)); listener.getLogger().println(Messages.SubversionSCM_pollChanges_remoteRevisionAt(url,remoteInfo.revision)); if(remoteInfo.revision > localInfo.getValue()) { boolean changesFound = true; Pattern[] excludedPatterns = getExcludedRegionsPatterns(); String[] excludedUsers = getExcludedUsersNormalized(); String excludedRevprop = Util.fixEmptyAndTrim(getExcludedRevprop()); if (excludedRevprop == null) { // Fall back to global setting excludedRevprop = globalExcludedRevprop; } if (excludedPatterns != null || excludedUsers != null || excludedRevprop != null) { SVNLogHandler handler = new SVNLogHandler(listener, excludedPatterns, excludedUsers, excludedRevprop); final SVNClientManager manager = createSvnClientManager(authProvider); try { final SVNLogClient svnlc = manager.getLogClient(); svnlc.doLog(decodedURL, null, SVNRevision.UNDEFINED, SVNRevision.create(localInfo.getValue() + 1), // get log entries from the local revision + 1 SVNRevision.create(remoteInfo.revision), // to the remote revision false, // Don't stop on copy. true, // Report paths. false, // Don't included merged revisions 0, // Retrieve log entries for unlimited number of revisions. null, // Retrieve all revprops handler); } finally { manager.dispose(); } changesFound = handler.isChangesFound(); } if (changesFound) { listener.getLogger().println(Messages.SubversionSCM_pollChanges_changedFrom(localInfo.getValue())); return true; } } } catch (SVNException e) { e.printStackTrace(listener.error("Failed to check repository revision for "+ url)); } } return false; // no change } }); }
public boolean pollChanges(AbstractProject project, Launcher launcher, FilePath workspace, final TaskListener listener) throws IOException, InterruptedException { AbstractBuild lastBuild = (AbstractBuild) project.getLastBuild(); if (lastBuild == null) { listener.getLogger().println( "No existing build. Starting a new one"); return true; } if (repositoryLocationsNoLongerExist(lastBuild, listener)) { // Disable this project, see issue #763 listener.getLogger().println( "One or more repository locations do not exist anymore for " + project + ", project will be disabled."); project.makeDisabled(true); return false; } // current workspace revision final Map<String,Long> wsRev = parseRevisionFile(lastBuild); final List<External> externals = parseExternalsFile(project); // First check to see if the lastBuild is still running - if it is, we skip this next section, // to deal with https://hudson.dev.java.net/issues/show_bug.cgi?id=4270. if (!lastBuild.isBuilding()) { // are the locations checked out in the workspace consistent with the current configuration? for( ModuleLocation loc : getLocations(lastBuild) ) { if(!wsRev.containsKey(loc.getURL())) { listener.getLogger().println("Workspace doesn't contain "+loc.getURL()+". Need a new build"); return true; } } } // determine where to perform polling. prefer the node where the build happened, // in case a cluster is non-uniform. see http://www.nabble.com/svn-connection-from-slave-only-td24970587.html VirtualChannel ch=null; Node n = lastBuild.getBuiltOn(); if (n!=null) { Computer c = n.toComputer(); if (c!=null) ch = c.getChannel(); } if (ch==null) ch= MasterComputer.localChannel; // check the corresponding remote revision return ch.call(new DelegatingCallable<Boolean,IOException> () { final ISVNAuthenticationProvider authProvider = getDescriptor().createAuthenticationProvider(); final String globalExcludedRevprop = getDescriptor().getGlobalExcludedRevprop(); public ClassLoader getClassLoader() { return Hudson.getInstance().getPluginManager().uberClassLoader; } public Boolean call() throws IOException { OUTER: for (Map.Entry<String,Long> localInfo : wsRev.entrySet()) { // skip if this is an external reference to a fixed revision String url = localInfo.getKey(); for (External ext : externals) if(ext.url.equals(url) && ext.isRevisionFixed()) continue OUTER; try { final SVNURL decodedURL = SVNURL.parseURIDecoded(url); SvnInfo remoteInfo = new SvnInfo(parseSvnInfo(decodedURL,authProvider)); listener.getLogger().println(Messages.SubversionSCM_pollChanges_remoteRevisionAt(url,remoteInfo.revision)); if(remoteInfo.revision > localInfo.getValue()) { boolean changesFound = true; Pattern[] excludedPatterns = getExcludedRegionsPatterns(); String[] excludedUsers = getExcludedUsersNormalized(); String excludedRevprop = Util.fixEmptyAndTrim(getExcludedRevprop()); if (excludedRevprop == null) { // Fall back to global setting excludedRevprop = globalExcludedRevprop; } if (excludedPatterns != null || excludedUsers != null || excludedRevprop != null) { SVNLogHandler handler = new SVNLogHandler(listener, excludedPatterns, excludedUsers, excludedRevprop); final SVNClientManager manager = createSvnClientManager(authProvider); try { final SVNLogClient svnlc = manager.getLogClient(); svnlc.doLog(decodedURL, null, SVNRevision.UNDEFINED, SVNRevision.create(localInfo.getValue() + 1), // get log entries from the local revision + 1 SVNRevision.create(remoteInfo.revision), // to the remote revision false, // Don't stop on copy. true, // Report paths. false, // Don't included merged revisions 0, // Retrieve log entries for unlimited number of revisions. null, // Retrieve all revprops handler); } finally { manager.dispose(); } changesFound = handler.isChangesFound(); } if (changesFound) { listener.getLogger().println(Messages.SubversionSCM_pollChanges_changedFrom(localInfo.getValue())); return true; } } } catch (SVNException e) { e.printStackTrace(listener.error("Failed to check repository revision for "+ url)); } } return false; // no change } }); }
diff --git a/jbpm-console-ng-human-tasks/jbpm-console-ng-human-tasks-client/src/main/java/org/jbpm/console/ng/ht/client/editors/taskassignments/TaskAssignmentsPopupViewImpl.java b/jbpm-console-ng-human-tasks/jbpm-console-ng-human-tasks-client/src/main/java/org/jbpm/console/ng/ht/client/editors/taskassignments/TaskAssignmentsPopupViewImpl.java index c21473410..bdd77a539 100644 --- a/jbpm-console-ng-human-tasks/jbpm-console-ng-human-tasks-client/src/main/java/org/jbpm/console/ng/ht/client/editors/taskassignments/TaskAssignmentsPopupViewImpl.java +++ b/jbpm-console-ng-human-tasks/jbpm-console-ng-human-tasks-client/src/main/java/org/jbpm/console/ng/ht/client/editors/taskassignments/TaskAssignmentsPopupViewImpl.java @@ -1,134 +1,134 @@ /* * Copyright 2012 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.console.ng.ht.client.editors.taskassignments; import com.github.gwtbootstrap.client.ui.Button; import javax.enterprise.context.Dependent; import javax.enterprise.event.Event; import javax.inject.Inject; import com.github.gwtbootstrap.client.ui.ControlLabel; import com.github.gwtbootstrap.client.ui.Label; import com.github.gwtbootstrap.client.ui.TextBox; import com.github.gwtbootstrap.client.ui.base.UnorderedList; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.HTMLPanel; import org.jboss.errai.ui.shared.api.annotations.DataField; import org.jboss.errai.ui.shared.api.annotations.EventHandler; import org.jboss.errai.ui.shared.api.annotations.Templated; import org.jbpm.console.ng.ht.client.i18n.Constants; import org.uberfire.client.mvp.PlaceManager; import org.uberfire.workbench.events.NotificationEvent; @Dependent @Templated(value = "TaskAssignmentsPopupViewImpl.html") public class TaskAssignmentsPopupViewImpl extends Composite implements TaskAssignmentsPopupPresenter.TaskAssignmentsPopupView { private TaskAssignmentsPopupPresenter presenter; @Inject @DataField public Label taskIdText; @Inject @DataField public Label taskNameText; @Inject @DataField public Label userOrGroupLabel; @Inject @DataField public Label usersGroupsControlsLabel; @Inject @DataField public TextBox userOrGroupText; @Inject @DataField public Button delegateButton; @Inject @DataField public Label usersGroupsControlsPanel; @Inject @DataField public ControlLabel detailsAccordionLabel; @Inject private PlaceManager placeManager; @Inject @DataField public UnorderedList navBarUL; @Inject private Event<NotificationEvent> notification; private Constants constants = GWT.create( Constants.class ); @Override public void init( TaskAssignmentsPopupPresenter presenter ) { this.presenter = presenter; userOrGroupLabel.setText(constants.UserOrGroup()); detailsAccordionLabel.add( new HTMLPanel( constants.Details()) ); delegateButton.setText(constants.Delegate()); usersGroupsControlsLabel.setText(constants.Potential_Owners()); - usersGroupsControlsLabel.setStyleName(""); + usersGroupsControlsPanel.setStyleName(""); } @EventHandler("delegateButton") public void delegateButton( ClickEvent e ) { presenter.delegateTask(Long.parseLong( getTaskIdText().getText() ), userOrGroupText.getText()); } @Override public Label getUsersGroupsControlsPanel() { return usersGroupsControlsPanel; } @Override public Label getTaskIdText() { return taskIdText; } @Override public Label getTaskNameText() { return taskNameText; } @Override public void displayNotification( String text ) { notification.fire( new NotificationEvent( text ) ); } @Override public UnorderedList getNavBarUL() { return navBarUL; } }
true
true
public void init( TaskAssignmentsPopupPresenter presenter ) { this.presenter = presenter; userOrGroupLabel.setText(constants.UserOrGroup()); detailsAccordionLabel.add( new HTMLPanel( constants.Details()) ); delegateButton.setText(constants.Delegate()); usersGroupsControlsLabel.setText(constants.Potential_Owners()); usersGroupsControlsLabel.setStyleName(""); }
public void init( TaskAssignmentsPopupPresenter presenter ) { this.presenter = presenter; userOrGroupLabel.setText(constants.UserOrGroup()); detailsAccordionLabel.add( new HTMLPanel( constants.Details()) ); delegateButton.setText(constants.Delegate()); usersGroupsControlsLabel.setText(constants.Potential_Owners()); usersGroupsControlsPanel.setStyleName(""); }
diff --git a/com.isencia.passerelle.workbench.model/src/main/java/com/isencia/passerelle/workbench/model/launch/ModelRunner.java b/com.isencia.passerelle.workbench.model/src/main/java/com/isencia/passerelle/workbench/model/launch/ModelRunner.java index 274aedc..0300db3 100644 --- a/com.isencia.passerelle.workbench.model/src/main/java/com/isencia/passerelle/workbench/model/launch/ModelRunner.java +++ b/com.isencia.passerelle.workbench.model/src/main/java/com/isencia/passerelle/workbench/model/launch/ModelRunner.java @@ -1,250 +1,250 @@ package com.isencia.passerelle.workbench.model.launch; import java.io.FileReader; import java.io.Reader; import java.util.ArrayList; import java.util.List; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.Platform; import org.eclipse.equinox.app.IApplication; import org.eclipse.equinox.app.IApplicationContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ptolemy.actor.CompositeActor; import ptolemy.actor.Manager; import ptolemy.kernel.util.Workspace; import ptolemy.moml.MoMLParser; import com.isencia.passerelle.core.PasserelleException; import com.isencia.passerelle.domain.cap.Director; import com.isencia.passerelle.ext.ErrorCollector; import com.isencia.passerelle.workbench.model.jmx.RemoteManagerAgent; import com.isencia.passerelle.workbench.model.utils.ModelUtils; import com.isencia.passerelle.workbench.model.utils.SubModelUtils; public class ModelRunner implements IApplication { private static Logger logger = LoggerFactory.getLogger(ModelRunner.class); private static ModelRunner currentInstance; public static ModelRunner getRunningInstance() { return currentInstance; } private Manager manager; /** * */ @Override public Object start(IApplicationContext applicationContextMightBeNull) throws Exception { String model = System.getProperty("model"); runModel(model, "true".equals(System.getProperty("com.isencia.jmx.service.terminate"))); return IApplication.EXIT_OK; } @Override public void stop() { if (manager!=null) { try { manager.stop(); } catch (Throwable ne) { logger.error("Cannot stop manager for model.", ne); } manager = null; } } /** * Sometimes can be called * @param modelPath */ public void runModel(final String modelPath, final boolean doSystemExit) throws Exception { if (!Platform.isRunning()) throw new Exception("ModelRunner is designed to be used with an eclipse application!"); final List<Exception> exceptions = new ArrayList<Exception>(1); final long start = System.currentTimeMillis(); try { //TODO Check that path works when model is run... Edna actors currently use // workspace to get resources. // When run from command line may need to set variable for workspace. String workspacePath = System.getProperty("com.isencia.jmx.service.workspace"); if (workspacePath==null) workspacePath = ResourcesPlugin.getWorkspace().getRoot().getLocation().toOSString(); System.setProperty("eclipse.workspace.home", workspacePath); System.setProperty("be.isencia.home", workspacePath); logger.info("Workspace folder set to: "+workspacePath); Reader reader = null; RemoteManagerAgent modelAgent = null; CompositeActor compositeActor = null; try { currentInstance = this; SubModelUtils.readSubModels(); if( modelPath==null) { throw new IllegalArgumentException("No model specified",null); } else { logger.info("Running model : " + modelPath); // The manager JMX service is used to control the workflow from // the RCP workspace. This starts the registry on a port and has two // JMX objects in the registry, one for calling method on the workbench // from actors and one for giving access to controlling the workflow. // If this has been set up the property "com.isencia.jmx.service.port" // will have been set to the free port being used. Otherwise the workflow // service will not be added to the registry. if (System.getProperty("com.isencia.jmx.service.port")!=null) { logger.debug("The jmx port is set to : '"+System.getProperty("com.isencia.jmx.service.port")+"'"); modelAgent = new RemoteManagerAgent(manager); modelAgent.start(); } notifyModelChangeStart(); reader = new FileReader(modelPath); // In debug mode the same model can be run in the // same VM several times. We purge before running for this reason. MoMLParser.purgeModelRecord(modelPath); MoMLParser.purgeAllModelRecords(); final Workspace workspace = ModelUtils.getWorkspace(modelPath); final MoMLParser moMLParser = new MoMLParser(workspace); compositeActor = (CompositeActor) moMLParser.parse(null, reader); if (System.getProperty("com.isencia.require.file.source")!=null) { compositeActor.setSource(modelPath); } this.manager = new Manager(compositeActor.workspace(), getUniqueName()); manager.setPersistent(false); // Important for test decks to pass. compositeActor.setManager(manager); // Errors final Director director = (Director)compositeActor.getDirector(); director.addErrorCollector(new ErrorCollector() { @Override public void acceptError(PasserelleException e) { exceptions.add(e); manager.stop(); } }); manager.execute(); // Blocks until done // Well almost if (manager!=null) while (manager.isExitingAfterWrapup()) { logger.info("Waiting for manager to wrap up."); Thread.sleep(100); } } } finally { notifyModelChangeEnd(0); if (modelAgent!=null) { modelAgent.stop(); logger.info("Closed model agent"); } if (reader != null) { reader.close(); logger.info("Closed reader"); } manager = null; currentInstance = null; System.gc(); } } finally { // Required or test decks which run many momls in // one VM will fail horribly. MoMLParser.purgeModelRecord(modelPath); MoMLParser.purgeAllModelRecords(); logger.info("End model : "+modelPath); final long end = System.currentTimeMillis(); // Did not like the DateFormat version, there may be something better than this. final long time = end-start; logger.info("Model completed in "+(time/(60*1000))+"m "+((time/1000)%60)+"s "+(time%1000)+"ms"); if (doSystemExit) { // We have to do this in case daemons are started. // We must exit this vm once the model is finished. logger.info("Passerelle shut down."); - System.exit(1); + System.exit(0); } if (!exceptions.isEmpty()) { throw exceptions.get(0); } } } /** * Ensures that the manager that runs with the actors has a unique * name for every run. This is one way that an actor can know which * runner they are dealing with and clear caches if required. * * @return */ private String getUniqueName() { return "Model_"+System.currentTimeMillis(); } private void notifyModelChangeStart() { try { final IConfigurationElement[] ele = Platform.getExtensionRegistry().getConfigurationElementsFor("com.isencia.passerelle.engine.model.listener"); for (IConfigurationElement i : ele) { final IModelListener l = (IModelListener)i.createExecutableExtension("modelListener"); l.executionStarted(); } } catch (Exception ne) { logger.error("Cannot notify model listeners"); } } private void notifyModelChangeEnd(final int returnCode) { try { final IConfigurationElement[] ele = Platform.getExtensionRegistry().getConfigurationElementsFor("com.isencia.passerelle.engine.model.listener"); for (IConfigurationElement i : ele) { final IModelListener l = (IModelListener)i.createExecutableExtension("modelListener"); l.executionTerminated(returnCode); } } catch (Exception ne) { logger.error("Cannot notify model listeners"); } } public static void main(String[] args) throws Throwable { String model = null; // The model is specified with argument -model moml_file if( args==null) return; for (int i = 0; i < args.length; i++) { if( i>0 && "-model".equals(args[i-1])) { model = args[i]; break; } } final ModelRunner runner = new ModelRunner(); runner.runModel(model, true); } }
true
true
public void runModel(final String modelPath, final boolean doSystemExit) throws Exception { if (!Platform.isRunning()) throw new Exception("ModelRunner is designed to be used with an eclipse application!"); final List<Exception> exceptions = new ArrayList<Exception>(1); final long start = System.currentTimeMillis(); try { //TODO Check that path works when model is run... Edna actors currently use // workspace to get resources. // When run from command line may need to set variable for workspace. String workspacePath = System.getProperty("com.isencia.jmx.service.workspace"); if (workspacePath==null) workspacePath = ResourcesPlugin.getWorkspace().getRoot().getLocation().toOSString(); System.setProperty("eclipse.workspace.home", workspacePath); System.setProperty("be.isencia.home", workspacePath); logger.info("Workspace folder set to: "+workspacePath); Reader reader = null; RemoteManagerAgent modelAgent = null; CompositeActor compositeActor = null; try { currentInstance = this; SubModelUtils.readSubModels(); if( modelPath==null) { throw new IllegalArgumentException("No model specified",null); } else { logger.info("Running model : " + modelPath); // The manager JMX service is used to control the workflow from // the RCP workspace. This starts the registry on a port and has two // JMX objects in the registry, one for calling method on the workbench // from actors and one for giving access to controlling the workflow. // If this has been set up the property "com.isencia.jmx.service.port" // will have been set to the free port being used. Otherwise the workflow // service will not be added to the registry. if (System.getProperty("com.isencia.jmx.service.port")!=null) { logger.debug("The jmx port is set to : '"+System.getProperty("com.isencia.jmx.service.port")+"'"); modelAgent = new RemoteManagerAgent(manager); modelAgent.start(); } notifyModelChangeStart(); reader = new FileReader(modelPath); // In debug mode the same model can be run in the // same VM several times. We purge before running for this reason. MoMLParser.purgeModelRecord(modelPath); MoMLParser.purgeAllModelRecords(); final Workspace workspace = ModelUtils.getWorkspace(modelPath); final MoMLParser moMLParser = new MoMLParser(workspace); compositeActor = (CompositeActor) moMLParser.parse(null, reader); if (System.getProperty("com.isencia.require.file.source")!=null) { compositeActor.setSource(modelPath); } this.manager = new Manager(compositeActor.workspace(), getUniqueName()); manager.setPersistent(false); // Important for test decks to pass. compositeActor.setManager(manager); // Errors final Director director = (Director)compositeActor.getDirector(); director.addErrorCollector(new ErrorCollector() { @Override public void acceptError(PasserelleException e) { exceptions.add(e); manager.stop(); } }); manager.execute(); // Blocks until done // Well almost if (manager!=null) while (manager.isExitingAfterWrapup()) { logger.info("Waiting for manager to wrap up."); Thread.sleep(100); } } } finally { notifyModelChangeEnd(0); if (modelAgent!=null) { modelAgent.stop(); logger.info("Closed model agent"); } if (reader != null) { reader.close(); logger.info("Closed reader"); } manager = null; currentInstance = null; System.gc(); } } finally { // Required or test decks which run many momls in // one VM will fail horribly. MoMLParser.purgeModelRecord(modelPath); MoMLParser.purgeAllModelRecords(); logger.info("End model : "+modelPath); final long end = System.currentTimeMillis(); // Did not like the DateFormat version, there may be something better than this. final long time = end-start; logger.info("Model completed in "+(time/(60*1000))+"m "+((time/1000)%60)+"s "+(time%1000)+"ms"); if (doSystemExit) { // We have to do this in case daemons are started. // We must exit this vm once the model is finished. logger.info("Passerelle shut down."); System.exit(1); } if (!exceptions.isEmpty()) { throw exceptions.get(0); } } }
public void runModel(final String modelPath, final boolean doSystemExit) throws Exception { if (!Platform.isRunning()) throw new Exception("ModelRunner is designed to be used with an eclipse application!"); final List<Exception> exceptions = new ArrayList<Exception>(1); final long start = System.currentTimeMillis(); try { //TODO Check that path works when model is run... Edna actors currently use // workspace to get resources. // When run from command line may need to set variable for workspace. String workspacePath = System.getProperty("com.isencia.jmx.service.workspace"); if (workspacePath==null) workspacePath = ResourcesPlugin.getWorkspace().getRoot().getLocation().toOSString(); System.setProperty("eclipse.workspace.home", workspacePath); System.setProperty("be.isencia.home", workspacePath); logger.info("Workspace folder set to: "+workspacePath); Reader reader = null; RemoteManagerAgent modelAgent = null; CompositeActor compositeActor = null; try { currentInstance = this; SubModelUtils.readSubModels(); if( modelPath==null) { throw new IllegalArgumentException("No model specified",null); } else { logger.info("Running model : " + modelPath); // The manager JMX service is used to control the workflow from // the RCP workspace. This starts the registry on a port and has two // JMX objects in the registry, one for calling method on the workbench // from actors and one for giving access to controlling the workflow. // If this has been set up the property "com.isencia.jmx.service.port" // will have been set to the free port being used. Otherwise the workflow // service will not be added to the registry. if (System.getProperty("com.isencia.jmx.service.port")!=null) { logger.debug("The jmx port is set to : '"+System.getProperty("com.isencia.jmx.service.port")+"'"); modelAgent = new RemoteManagerAgent(manager); modelAgent.start(); } notifyModelChangeStart(); reader = new FileReader(modelPath); // In debug mode the same model can be run in the // same VM several times. We purge before running for this reason. MoMLParser.purgeModelRecord(modelPath); MoMLParser.purgeAllModelRecords(); final Workspace workspace = ModelUtils.getWorkspace(modelPath); final MoMLParser moMLParser = new MoMLParser(workspace); compositeActor = (CompositeActor) moMLParser.parse(null, reader); if (System.getProperty("com.isencia.require.file.source")!=null) { compositeActor.setSource(modelPath); } this.manager = new Manager(compositeActor.workspace(), getUniqueName()); manager.setPersistent(false); // Important for test decks to pass. compositeActor.setManager(manager); // Errors final Director director = (Director)compositeActor.getDirector(); director.addErrorCollector(new ErrorCollector() { @Override public void acceptError(PasserelleException e) { exceptions.add(e); manager.stop(); } }); manager.execute(); // Blocks until done // Well almost if (manager!=null) while (manager.isExitingAfterWrapup()) { logger.info("Waiting for manager to wrap up."); Thread.sleep(100); } } } finally { notifyModelChangeEnd(0); if (modelAgent!=null) { modelAgent.stop(); logger.info("Closed model agent"); } if (reader != null) { reader.close(); logger.info("Closed reader"); } manager = null; currentInstance = null; System.gc(); } } finally { // Required or test decks which run many momls in // one VM will fail horribly. MoMLParser.purgeModelRecord(modelPath); MoMLParser.purgeAllModelRecords(); logger.info("End model : "+modelPath); final long end = System.currentTimeMillis(); // Did not like the DateFormat version, there may be something better than this. final long time = end-start; logger.info("Model completed in "+(time/(60*1000))+"m "+((time/1000)%60)+"s "+(time%1000)+"ms"); if (doSystemExit) { // We have to do this in case daemons are started. // We must exit this vm once the model is finished. logger.info("Passerelle shut down."); System.exit(0); } if (!exceptions.isEmpty()) { throw exceptions.get(0); } } }
diff --git a/java/engine/org/apache/derby/iapi/store/access/GlobalXact.java b/java/engine/org/apache/derby/iapi/store/access/GlobalXact.java index 7ba5f18fb..39977c125 100644 --- a/java/engine/org/apache/derby/iapi/store/access/GlobalXact.java +++ b/java/engine/org/apache/derby/iapi/store/access/GlobalXact.java @@ -1,131 +1,139 @@ /* Derby - Class org.apache.derby.iapi.store.access.GlobalXact Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.iapi.store.access; /** This abstract class represents a global transaction id which can be tested for equality against other transaction ids, which can be hashed into a hash table, and which can be output as a string. <P> This class has 2 direct subclasses. <UL> <LI> org.apache.derby.iapi.store.access.xa.XAXactId : this class is a specific implementation of the JTA Xid interface <LI> org.apache.derby.impl.store.access.GlobalXactId : this class represents internal Derby transaction ids </UL> <P> The main reason for this class is to ensure that equality etc. works in a consistent way across both subclasses. **/ public abstract class GlobalXact { /************************************************************************** * Protected Fields of the class ************************************************************************** */ protected int format_id; protected byte[] global_id; protected byte[] branch_id; public boolean equals(Object other) { if (other == this) return true; if (other instanceof GlobalXact) { GlobalXact other_xact = (GlobalXact) other; return( java.util.Arrays.equals( other_xact.global_id, this.global_id) && java.util.Arrays.equals( other_xact.branch_id, this.branch_id) && other_xact.format_id == this.format_id); } return false; } public String toString() { String globalhex = ""; String branchhex = ""; if (global_id != null) { int mask = 0; for (int i = 0; i < global_id.length; i++) { mask = (global_id[i] & 0xFF); - globalhex += Integer.toHexString(mask); + if (mask < 16) { + globalhex += "0" + Integer.toHexString(mask); + } else { + globalhex += Integer.toHexString(mask); + } } } if (branch_id != null) { int mask = 0; for (int i = 0; i < branch_id.length; i++) { mask = (branch_id[i] & 0xFF); - branchhex += Integer.toHexString(mask); + if (mask < 16) { + branchhex += "0" + Integer.toHexString(mask); + } else { + branchhex += Integer.toHexString(mask); + } } } return("(" + format_id + "," + globalhex + "," + branchhex + ")"); } /** Provide a hashCode which is compatable with the equals() method. @see java.lang.Object#hashCode **/ public int hashCode() { // make sure hash does not overflow int, the only unknown is // format_id. Lop off top bits. int hash = global_id.length + branch_id.length + (format_id & 0xFFFFFFF); for (int i = 0; i < global_id.length; i++) { hash += global_id[i]; } for (int i = 0; i < branch_id.length; i++) { hash += branch_id[i]; } return(hash); } }
false
true
public String toString() { String globalhex = ""; String branchhex = ""; if (global_id != null) { int mask = 0; for (int i = 0; i < global_id.length; i++) { mask = (global_id[i] & 0xFF); globalhex += Integer.toHexString(mask); } } if (branch_id != null) { int mask = 0; for (int i = 0; i < branch_id.length; i++) { mask = (branch_id[i] & 0xFF); branchhex += Integer.toHexString(mask); } } return("(" + format_id + "," + globalhex + "," + branchhex + ")"); }
public String toString() { String globalhex = ""; String branchhex = ""; if (global_id != null) { int mask = 0; for (int i = 0; i < global_id.length; i++) { mask = (global_id[i] & 0xFF); if (mask < 16) { globalhex += "0" + Integer.toHexString(mask); } else { globalhex += Integer.toHexString(mask); } } } if (branch_id != null) { int mask = 0; for (int i = 0; i < branch_id.length; i++) { mask = (branch_id[i] & 0xFF); if (mask < 16) { branchhex += "0" + Integer.toHexString(mask); } else { branchhex += Integer.toHexString(mask); } } } return("(" + format_id + "," + globalhex + "," + branchhex + ")"); }
diff --git a/esmska/src/esmska/gui/ConfigFrame.java b/esmska/src/esmska/gui/ConfigFrame.java index 6e2455b1..147527e9 100644 --- a/esmska/src/esmska/gui/ConfigFrame.java +++ b/esmska/src/esmska/gui/ConfigFrame.java @@ -1,445 +1,445 @@ /* * ConfigFrame.java * * Created on 20. červenec 2007, 18:59 */ package esmska.gui; import com.jgoodies.looks.plastic.PlasticLookAndFeel; import com.jgoodies.looks.plastic.PlasticTheme; import esmska.*; import java.awt.Font; import java.awt.event.KeyEvent; import java.util.ArrayList; import javax.swing.DefaultComboBoxModel; import javax.swing.ImageIcon; import javax.swing.JLabel; import org.jvnet.substance.SubstanceLookAndFeel; import org.jvnet.substance.skin.SkinInfo; import esmska.data.Config; import esmska.persistence.PersistenceManager; /** * * @author ripper */ public class ConfigFrame extends javax.swing.JFrame { private static final String RES = "/esmska/resources/"; private Config config = PersistenceManager.getConfig(); private boolean fullyInicialized; private final String LAF_SYSTEM = "Systémový"; private final String LAF_CROSSPLATFORM = "Meziplatformní"; private final String LAF_JGOODIES = "JGoodies"; private final String LAF_SUBSTANCE = "Substance"; /** Creates new form ConfigFrame */ public ConfigFrame() { initComponents(); useSenderIDCheckBoxActionPerformed(null); tabbedPane.setMnemonicAt(0, KeyEvent.VK_O); tabbedPane.setMnemonicAt(1, KeyEvent.VK_H); tabbedPane.setMnemonicAt(2, KeyEvent.VK_V); tabbedPane.setIconAt(0, new ImageIcon(getClass().getResource(RES + "config-small.png"))); tabbedPane.setIconAt(1, new ImageIcon(getClass().getResource(RES + "appearance-small.png"))); tabbedPane.setIconAt(2, new ImageIcon(getClass().getResource(RES + "operators/Vodafone.png"))); closeButton.requestFocusInWindow(); lafComboBox.setModel(new DefaultComboBoxModel(new String[] { LAF_SYSTEM, LAF_CROSSPLATFORM, LAF_JGOODIES, LAF_SUBSTANCE})); if (config.getLookAndFeel().equals(ThemeManager.LAF_SYSTEM)) lafComboBox.setSelectedItem(LAF_SYSTEM); else if (config.getLookAndFeel().equals(ThemeManager.LAF_CROSSPLATFORM)) lafComboBox.setSelectedItem(LAF_CROSSPLATFORM); else if (config.getLookAndFeel().equals(ThemeManager.LAF_JGOODIES)) lafComboBox.setSelectedItem(LAF_JGOODIES); else if (config.getLookAndFeel().equals(ThemeManager.LAF_SUBSTANCE)) lafComboBox.setSelectedItem(LAF_SUBSTANCE); updateThemeComboBox(); fullyInicialized = true; } private void updateThemeComboBox() { themeComboBox.setEnabled(false); String laf = (String) lafComboBox.getSelectedItem(); if (laf.equals(LAF_JGOODIES)) { ArrayList<String> themes = new ArrayList<String>(); for (Object o : PlasticLookAndFeel.getInstalledThemes()) themes.add(((PlasticTheme)o).getName()); themeComboBox.setModel(new DefaultComboBoxModel(themes.toArray())); themeComboBox.setSelectedItem(config.getLafJGoodiesTheme()); themeComboBox.setEnabled(true); } else if (laf.equals(LAF_SUBSTANCE)) { ArrayList<String> themes = new ArrayList<String>(); new SubstanceLookAndFeel(); for (SkinInfo skinInfo : SubstanceLookAndFeel.getAllSkins().values()) themes.add(skinInfo.getDisplayName()); themeComboBox.setModel(new DefaultComboBoxModel(themes.toArray())); themeComboBox.setSelectedItem(config.getLafSubstanceSkin()); themeComboBox.setEnabled(true); } } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc=" Generated Code ">//GEN-BEGIN:initComponents private void initComponents() { tabbedPane = new javax.swing.JTabbedPane(); jPanel1 = new javax.swing.JPanel(); rememberQueueCheckBox = new javax.swing.JCheckBox(); rememberLayoutCheckBox = new javax.swing.JCheckBox(); jPanel3 = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); lafComboBox = new javax.swing.JComboBox(); jLabel7 = new javax.swing.JLabel(); themeComboBox = new javax.swing.JComboBox(); jLabel6 = new javax.swing.JLabel(); windowDecorationsCheckBox = new javax.swing.JCheckBox(); jPanel2 = new javax.swing.JPanel(); useSenderIDCheckBox = new javax.swing.JCheckBox(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); senderNumberTextField = new javax.swing.JTextField(); jLabel3 = new javax.swing.JLabel(); senderNameTextField = new javax.swing.JTextField(); closeButton = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Nastaven\u00ed"); setIconImage(new ImageIcon(getClass().getResource(RES + "esmska.png")).getImage()); addWindowFocusListener(new java.awt.event.WindowFocusListener() { public void windowGainedFocus(java.awt.event.WindowEvent evt) { } public void windowLostFocus(java.awt.event.WindowEvent evt) { formWindowLostFocus(evt); } }); rememberQueueCheckBox.setMnemonic('f'); rememberQueueCheckBox.setSelected(config.isRememberQueue()); rememberQueueCheckBox.setText("Ukl\u00e1dat frontu neodeslan\u00fdch sms"); rememberQueueCheckBox.setToolTipText("<html>\nP\u0159i ukon\u010den\u00ed programu uchov\u00e1v\u00e1 frontu neodeslan\u00fdch sms pro p\u0159\u00ed\u0161t\u00ed spu\u0161t\u011bn\u00ed programu.\n</html>"); rememberQueueCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); rememberQueueCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rememberQueueCheckBoxActionPerformed(evt); } }); rememberLayoutCheckBox.setMnemonic('r'); rememberLayoutCheckBox.setSelected(config.isRememberLayout()); rememberLayoutCheckBox.setText("Pamatovat rozvr\u017een\u00ed formul\u00e1\u0159e"); rememberLayoutCheckBox.setToolTipText("<html>\nPou\u017eije aktu\u00e1ln\u00ed rozm\u011bry programu a prvk\u016f formul\u00e1\u0159e p\u0159i p\u0159\u00ed\u0161t\u00edm spu\u0161t\u011bn\u00ed programu.\n</html>"); rememberLayoutCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); rememberLayoutCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rememberLayoutCheckBoxActionPerformed(evt); } }); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(rememberLayoutCheckBox) .addComponent(rememberQueueCheckBox)) .addContainerGap(202, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(rememberLayoutCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(rememberQueueCheckBox) .addContainerGap(187, Short.MAX_VALUE)) ); tabbedPane.addTab("Obecn\u00e9", jPanel1); jLabel4.setDisplayedMnemonic('v'); jLabel4.setLabelFor(lafComboBox); jLabel4.setText("Vzhled:"); jLabel4.setToolTipText(lafComboBox.getToolTipText()); lafComboBox.setToolTipText("<html>\nUmo\u017en\u00ed v\u00e1m zm\u011bnit vzhled programu.\n</html>"); lafComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { lafComboBoxActionPerformed(evt); } }); jLabel7.setFont(jLabel7.getFont().deriveFont(Font.ITALIC)); jLabel7.setText("Pro projeven\u00ed zm\u011bn je nutn\u00fd restart programu!"); themeComboBox.setToolTipText("<html>\nBarevn\u00e1 sch\u00e9mata pro zvolen\u00fd vzhled.\n</html>"); themeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { themeComboBoxActionPerformed(evt); } }); jLabel6.setDisplayedMnemonic('m'); jLabel6.setLabelFor(themeComboBox); jLabel6.setText("Motiv:"); jLabel6.setToolTipText(themeComboBox.getToolTipText()); windowDecorationsCheckBox.setMnemonic('p'); windowDecorationsCheckBox.setSelected(config.isLafWindowDecorated()); windowDecorationsCheckBox.setText("Pou\u017e\u00edt vzhled i na okraje oken"); windowDecorationsCheckBox.setToolTipText("<html>\nZda m\u00e1 m\u00edsto opera\u010dn\u00edho syst\u00e9mu vykreslovat<br>\nr\u00e1me\u010dky oken zvolen\u00fd vzhled.\n</html>"); windowDecorationsCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); windowDecorationsCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { windowDecorationsCheckBoxActionPerformed(evt); } }); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel4) .addComponent(jLabel6)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(themeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(lafComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addComponent(windowDecorationsCheckBox) .addComponent(jLabel7, javax.swing.GroupLayout.DEFAULT_SIZE, 419, Short.MAX_VALUE)) .addContainerGap()) ); jPanel3Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {lafComboBox, themeComboBox}); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(lafComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel6) .addComponent(themeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(windowDecorationsCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 121, Short.MAX_VALUE) .addComponent(jLabel7) .addContainerGap()) ); jPanel3Layout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {lafComboBox, themeComboBox}); tabbedPane.addTab("Vzhled", jPanel3); useSenderIDCheckBox.setMnemonic('p'); useSenderIDCheckBox.setSelected(config.isUseSenderID()); useSenderIDCheckBox.setText("P\u0159ipojovat podpis odesilatele"); useSenderIDCheckBox.setToolTipText("<html>P\u0159i p\u0159ipojen\u00ed podpisu p\u0159ijde sms adres\u00e1tovi ze zadan\u00e9ho \u010d\u00edsla<br>\na s dan\u00fdm jm\u00e9nem napsan\u00fdm na konci zpr\u00e1vy.</html>"); useSenderIDCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); useSenderIDCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { useSenderIDCheckBoxActionPerformed(evt); } }); jLabel1.setDisplayedMnemonic('l'); jLabel1.setLabelFor(senderNumberTextField); jLabel1.setText("\u010c\u00edslo"); jLabel2.setText("+420"); senderNumberTextField.setColumns(9); senderNumberTextField.setText((config.getSenderNumber() != null ? - config.getSenderNumber().substring(4) : null)); + config.getSenderNumber().replaceFirst("^\\+420", "") : null)); senderNumberTextField.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { senderNumberTextFieldActionPerformed(evt); } }); jLabel3.setDisplayedMnemonic('m'); jLabel3.setLabelFor(senderNameTextField); jLabel3.setText("Jm\u00e9no"); senderNameTextField.setText(config.getSenderName()); senderNameTextField.setToolTipText("<html>P\u0159i vypln\u011bn\u00ed jm\u00e9na je p\u0159ipojeno na konec zpr\u00e1vy,<br>\ntak\u017ee je sms ve skute\u010dnosti o n\u011bco del\u0161\u00ed.</html>"); senderNameTextField.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { senderNameTextFieldActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(useSenderIDCheckBox) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(17, 17, 17) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel3) .addComponent(jLabel1)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(senderNumberTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(senderNameTextField)))) .addContainerGap(215, Short.MAX_VALUE)) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addComponent(useSenderIDCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(jLabel2) .addComponent(senderNumberTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel3) .addComponent(senderNameTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(160, Short.MAX_VALUE)) ); tabbedPane.addTab("Vodafone", jPanel2); closeButton.setMnemonic('z'); closeButton.setText("Zav\u0159\u00edt"); closeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { closeButtonActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(closeButton, javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(tabbedPane, javax.swing.GroupLayout.DEFAULT_SIZE, 448, Short.MAX_VALUE)) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addComponent(tabbedPane, javax.swing.GroupLayout.DEFAULT_SIZE, 264, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(closeButton) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents private void windowDecorationsCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_windowDecorationsCheckBoxActionPerformed config.setLafWindowDecorated(windowDecorationsCheckBox.isSelected()); }//GEN-LAST:event_windowDecorationsCheckBoxActionPerformed private void themeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_themeComboBoxActionPerformed String laf = (String) lafComboBox.getSelectedItem(); if (laf.equals(LAF_JGOODIES)) config.setLafJGoodiesTheme((String)themeComboBox.getSelectedItem()); else if (laf.equals(LAF_SUBSTANCE)) config.setLafSubstanceSkin((String)themeComboBox.getSelectedItem()); }//GEN-LAST:event_themeComboBoxActionPerformed private void lafComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_lafComboBoxActionPerformed if (!fullyInicialized) return; String laf = (String) lafComboBox.getSelectedItem(); if (laf.equals(LAF_SYSTEM)) config.setLookAndFeel(ThemeManager.LAF_SYSTEM); else if (laf.equals(LAF_CROSSPLATFORM)) config.setLookAndFeel(ThemeManager.LAF_CROSSPLATFORM); else if (laf.equals(LAF_JGOODIES)) config.setLookAndFeel(ThemeManager.LAF_JGOODIES); else if (laf.equals(LAF_SUBSTANCE)) config.setLookAndFeel(ThemeManager.LAF_SUBSTANCE); updateThemeComboBox(); }//GEN-LAST:event_lafComboBoxActionPerformed private void rememberLayoutCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rememberLayoutCheckBoxActionPerformed config.setRememberLayout(rememberLayoutCheckBox.isSelected()); }//GEN-LAST:event_rememberLayoutCheckBoxActionPerformed private void formWindowLostFocus(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowLostFocus senderNameTextFieldActionPerformed(null); senderNumberTextFieldActionPerformed(null); }//GEN-LAST:event_formWindowLostFocus private void senderNameTextFieldActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_senderNameTextFieldActionPerformed config.setSenderName(senderNameTextField.getText()); }//GEN-LAST:event_senderNameTextFieldActionPerformed private void senderNumberTextFieldActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_senderNumberTextFieldActionPerformed config.setSenderNumber("+420" + senderNumberTextField.getText()); }//GEN-LAST:event_senderNumberTextFieldActionPerformed private void useSenderIDCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_useSenderIDCheckBoxActionPerformed senderNameTextField.setEnabled(useSenderIDCheckBox.isSelected()); senderNumberTextField.setEnabled(useSenderIDCheckBox.isSelected()); config.setUseSenderID(useSenderIDCheckBox.isSelected()); }//GEN-LAST:event_useSenderIDCheckBoxActionPerformed private void rememberQueueCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rememberQueueCheckBoxActionPerformed config.setRememberQueue(rememberQueueCheckBox.isSelected()); }//GEN-LAST:event_rememberQueueCheckBoxActionPerformed private void closeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_closeButtonActionPerformed this.setVisible(false); this.dispose(); }//GEN-LAST:event_closeButtonActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton closeButton; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JComboBox lafComboBox; private javax.swing.JCheckBox rememberLayoutCheckBox; private javax.swing.JCheckBox rememberQueueCheckBox; private javax.swing.JTextField senderNameTextField; private javax.swing.JTextField senderNumberTextField; private javax.swing.JTabbedPane tabbedPane; private javax.swing.JComboBox themeComboBox; private javax.swing.JCheckBox useSenderIDCheckBox; private javax.swing.JCheckBox windowDecorationsCheckBox; // End of variables declaration//GEN-END:variables }
true
true
private void initComponents() { tabbedPane = new javax.swing.JTabbedPane(); jPanel1 = new javax.swing.JPanel(); rememberQueueCheckBox = new javax.swing.JCheckBox(); rememberLayoutCheckBox = new javax.swing.JCheckBox(); jPanel3 = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); lafComboBox = new javax.swing.JComboBox(); jLabel7 = new javax.swing.JLabel(); themeComboBox = new javax.swing.JComboBox(); jLabel6 = new javax.swing.JLabel(); windowDecorationsCheckBox = new javax.swing.JCheckBox(); jPanel2 = new javax.swing.JPanel(); useSenderIDCheckBox = new javax.swing.JCheckBox(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); senderNumberTextField = new javax.swing.JTextField(); jLabel3 = new javax.swing.JLabel(); senderNameTextField = new javax.swing.JTextField(); closeButton = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Nastaven\u00ed"); setIconImage(new ImageIcon(getClass().getResource(RES + "esmska.png")).getImage()); addWindowFocusListener(new java.awt.event.WindowFocusListener() { public void windowGainedFocus(java.awt.event.WindowEvent evt) { } public void windowLostFocus(java.awt.event.WindowEvent evt) { formWindowLostFocus(evt); } }); rememberQueueCheckBox.setMnemonic('f'); rememberQueueCheckBox.setSelected(config.isRememberQueue()); rememberQueueCheckBox.setText("Ukl\u00e1dat frontu neodeslan\u00fdch sms"); rememberQueueCheckBox.setToolTipText("<html>\nP\u0159i ukon\u010den\u00ed programu uchov\u00e1v\u00e1 frontu neodeslan\u00fdch sms pro p\u0159\u00ed\u0161t\u00ed spu\u0161t\u011bn\u00ed programu.\n</html>"); rememberQueueCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); rememberQueueCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rememberQueueCheckBoxActionPerformed(evt); } }); rememberLayoutCheckBox.setMnemonic('r'); rememberLayoutCheckBox.setSelected(config.isRememberLayout()); rememberLayoutCheckBox.setText("Pamatovat rozvr\u017een\u00ed formul\u00e1\u0159e"); rememberLayoutCheckBox.setToolTipText("<html>\nPou\u017eije aktu\u00e1ln\u00ed rozm\u011bry programu a prvk\u016f formul\u00e1\u0159e p\u0159i p\u0159\u00ed\u0161t\u00edm spu\u0161t\u011bn\u00ed programu.\n</html>"); rememberLayoutCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); rememberLayoutCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rememberLayoutCheckBoxActionPerformed(evt); } }); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(rememberLayoutCheckBox) .addComponent(rememberQueueCheckBox)) .addContainerGap(202, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(rememberLayoutCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(rememberQueueCheckBox) .addContainerGap(187, Short.MAX_VALUE)) ); tabbedPane.addTab("Obecn\u00e9", jPanel1); jLabel4.setDisplayedMnemonic('v'); jLabel4.setLabelFor(lafComboBox); jLabel4.setText("Vzhled:"); jLabel4.setToolTipText(lafComboBox.getToolTipText()); lafComboBox.setToolTipText("<html>\nUmo\u017en\u00ed v\u00e1m zm\u011bnit vzhled programu.\n</html>"); lafComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { lafComboBoxActionPerformed(evt); } }); jLabel7.setFont(jLabel7.getFont().deriveFont(Font.ITALIC)); jLabel7.setText("Pro projeven\u00ed zm\u011bn je nutn\u00fd restart programu!"); themeComboBox.setToolTipText("<html>\nBarevn\u00e1 sch\u00e9mata pro zvolen\u00fd vzhled.\n</html>"); themeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { themeComboBoxActionPerformed(evt); } }); jLabel6.setDisplayedMnemonic('m'); jLabel6.setLabelFor(themeComboBox); jLabel6.setText("Motiv:"); jLabel6.setToolTipText(themeComboBox.getToolTipText()); windowDecorationsCheckBox.setMnemonic('p'); windowDecorationsCheckBox.setSelected(config.isLafWindowDecorated()); windowDecorationsCheckBox.setText("Pou\u017e\u00edt vzhled i na okraje oken"); windowDecorationsCheckBox.setToolTipText("<html>\nZda m\u00e1 m\u00edsto opera\u010dn\u00edho syst\u00e9mu vykreslovat<br>\nr\u00e1me\u010dky oken zvolen\u00fd vzhled.\n</html>"); windowDecorationsCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); windowDecorationsCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { windowDecorationsCheckBoxActionPerformed(evt); } }); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel4) .addComponent(jLabel6)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(themeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(lafComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addComponent(windowDecorationsCheckBox) .addComponent(jLabel7, javax.swing.GroupLayout.DEFAULT_SIZE, 419, Short.MAX_VALUE)) .addContainerGap()) ); jPanel3Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {lafComboBox, themeComboBox}); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(lafComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel6) .addComponent(themeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(windowDecorationsCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 121, Short.MAX_VALUE) .addComponent(jLabel7) .addContainerGap()) ); jPanel3Layout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {lafComboBox, themeComboBox}); tabbedPane.addTab("Vzhled", jPanel3); useSenderIDCheckBox.setMnemonic('p'); useSenderIDCheckBox.setSelected(config.isUseSenderID()); useSenderIDCheckBox.setText("P\u0159ipojovat podpis odesilatele"); useSenderIDCheckBox.setToolTipText("<html>P\u0159i p\u0159ipojen\u00ed podpisu p\u0159ijde sms adres\u00e1tovi ze zadan\u00e9ho \u010d\u00edsla<br>\na s dan\u00fdm jm\u00e9nem napsan\u00fdm na konci zpr\u00e1vy.</html>"); useSenderIDCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); useSenderIDCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { useSenderIDCheckBoxActionPerformed(evt); } }); jLabel1.setDisplayedMnemonic('l'); jLabel1.setLabelFor(senderNumberTextField); jLabel1.setText("\u010c\u00edslo"); jLabel2.setText("+420"); senderNumberTextField.setColumns(9); senderNumberTextField.setText((config.getSenderNumber() != null ? config.getSenderNumber().substring(4) : null)); senderNumberTextField.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { senderNumberTextFieldActionPerformed(evt); } }); jLabel3.setDisplayedMnemonic('m'); jLabel3.setLabelFor(senderNameTextField); jLabel3.setText("Jm\u00e9no"); senderNameTextField.setText(config.getSenderName()); senderNameTextField.setToolTipText("<html>P\u0159i vypln\u011bn\u00ed jm\u00e9na je p\u0159ipojeno na konec zpr\u00e1vy,<br>\ntak\u017ee je sms ve skute\u010dnosti o n\u011bco del\u0161\u00ed.</html>"); senderNameTextField.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { senderNameTextFieldActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(useSenderIDCheckBox) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(17, 17, 17) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel3) .addComponent(jLabel1)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(senderNumberTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(senderNameTextField)))) .addContainerGap(215, Short.MAX_VALUE)) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addComponent(useSenderIDCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(jLabel2) .addComponent(senderNumberTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel3) .addComponent(senderNameTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(160, Short.MAX_VALUE)) ); tabbedPane.addTab("Vodafone", jPanel2); closeButton.setMnemonic('z'); closeButton.setText("Zav\u0159\u00edt"); closeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { closeButtonActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(closeButton, javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(tabbedPane, javax.swing.GroupLayout.DEFAULT_SIZE, 448, Short.MAX_VALUE)) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addComponent(tabbedPane, javax.swing.GroupLayout.DEFAULT_SIZE, 264, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(closeButton) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents
private void initComponents() { tabbedPane = new javax.swing.JTabbedPane(); jPanel1 = new javax.swing.JPanel(); rememberQueueCheckBox = new javax.swing.JCheckBox(); rememberLayoutCheckBox = new javax.swing.JCheckBox(); jPanel3 = new javax.swing.JPanel(); jLabel4 = new javax.swing.JLabel(); lafComboBox = new javax.swing.JComboBox(); jLabel7 = new javax.swing.JLabel(); themeComboBox = new javax.swing.JComboBox(); jLabel6 = new javax.swing.JLabel(); windowDecorationsCheckBox = new javax.swing.JCheckBox(); jPanel2 = new javax.swing.JPanel(); useSenderIDCheckBox = new javax.swing.JCheckBox(); jLabel1 = new javax.swing.JLabel(); jLabel2 = new javax.swing.JLabel(); senderNumberTextField = new javax.swing.JTextField(); jLabel3 = new javax.swing.JLabel(); senderNameTextField = new javax.swing.JTextField(); closeButton = new javax.swing.JButton(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); setTitle("Nastaven\u00ed"); setIconImage(new ImageIcon(getClass().getResource(RES + "esmska.png")).getImage()); addWindowFocusListener(new java.awt.event.WindowFocusListener() { public void windowGainedFocus(java.awt.event.WindowEvent evt) { } public void windowLostFocus(java.awt.event.WindowEvent evt) { formWindowLostFocus(evt); } }); rememberQueueCheckBox.setMnemonic('f'); rememberQueueCheckBox.setSelected(config.isRememberQueue()); rememberQueueCheckBox.setText("Ukl\u00e1dat frontu neodeslan\u00fdch sms"); rememberQueueCheckBox.setToolTipText("<html>\nP\u0159i ukon\u010den\u00ed programu uchov\u00e1v\u00e1 frontu neodeslan\u00fdch sms pro p\u0159\u00ed\u0161t\u00ed spu\u0161t\u011bn\u00ed programu.\n</html>"); rememberQueueCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); rememberQueueCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rememberQueueCheckBoxActionPerformed(evt); } }); rememberLayoutCheckBox.setMnemonic('r'); rememberLayoutCheckBox.setSelected(config.isRememberLayout()); rememberLayoutCheckBox.setText("Pamatovat rozvr\u017een\u00ed formul\u00e1\u0159e"); rememberLayoutCheckBox.setToolTipText("<html>\nPou\u017eije aktu\u00e1ln\u00ed rozm\u011bry programu a prvk\u016f formul\u00e1\u0159e p\u0159i p\u0159\u00ed\u0161t\u00edm spu\u0161t\u011bn\u00ed programu.\n</html>"); rememberLayoutCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); rememberLayoutCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { rememberLayoutCheckBoxActionPerformed(evt); } }); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(rememberLayoutCheckBox) .addComponent(rememberQueueCheckBox)) .addContainerGap(202, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(rememberLayoutCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(rememberQueueCheckBox) .addContainerGap(187, Short.MAX_VALUE)) ); tabbedPane.addTab("Obecn\u00e9", jPanel1); jLabel4.setDisplayedMnemonic('v'); jLabel4.setLabelFor(lafComboBox); jLabel4.setText("Vzhled:"); jLabel4.setToolTipText(lafComboBox.getToolTipText()); lafComboBox.setToolTipText("<html>\nUmo\u017en\u00ed v\u00e1m zm\u011bnit vzhled programu.\n</html>"); lafComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { lafComboBoxActionPerformed(evt); } }); jLabel7.setFont(jLabel7.getFont().deriveFont(Font.ITALIC)); jLabel7.setText("Pro projeven\u00ed zm\u011bn je nutn\u00fd restart programu!"); themeComboBox.setToolTipText("<html>\nBarevn\u00e1 sch\u00e9mata pro zvolen\u00fd vzhled.\n</html>"); themeComboBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { themeComboBoxActionPerformed(evt); } }); jLabel6.setDisplayedMnemonic('m'); jLabel6.setLabelFor(themeComboBox); jLabel6.setText("Motiv:"); jLabel6.setToolTipText(themeComboBox.getToolTipText()); windowDecorationsCheckBox.setMnemonic('p'); windowDecorationsCheckBox.setSelected(config.isLafWindowDecorated()); windowDecorationsCheckBox.setText("Pou\u017e\u00edt vzhled i na okraje oken"); windowDecorationsCheckBox.setToolTipText("<html>\nZda m\u00e1 m\u00edsto opera\u010dn\u00edho syst\u00e9mu vykreslovat<br>\nr\u00e1me\u010dky oken zvolen\u00fd vzhled.\n</html>"); windowDecorationsCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); windowDecorationsCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { windowDecorationsCheckBoxActionPerformed(evt); } }); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel4) .addComponent(jLabel6)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(themeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(lafComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addComponent(windowDecorationsCheckBox) .addComponent(jLabel7, javax.swing.GroupLayout.DEFAULT_SIZE, 419, Short.MAX_VALUE)) .addContainerGap()) ); jPanel3Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {lafComboBox, themeComboBox}); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(lafComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel6) .addComponent(themeComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(windowDecorationsCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 121, Short.MAX_VALUE) .addComponent(jLabel7) .addContainerGap()) ); jPanel3Layout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {lafComboBox, themeComboBox}); tabbedPane.addTab("Vzhled", jPanel3); useSenderIDCheckBox.setMnemonic('p'); useSenderIDCheckBox.setSelected(config.isUseSenderID()); useSenderIDCheckBox.setText("P\u0159ipojovat podpis odesilatele"); useSenderIDCheckBox.setToolTipText("<html>P\u0159i p\u0159ipojen\u00ed podpisu p\u0159ijde sms adres\u00e1tovi ze zadan\u00e9ho \u010d\u00edsla<br>\na s dan\u00fdm jm\u00e9nem napsan\u00fdm na konci zpr\u00e1vy.</html>"); useSenderIDCheckBox.setMargin(new java.awt.Insets(0, 0, 0, 0)); useSenderIDCheckBox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { useSenderIDCheckBoxActionPerformed(evt); } }); jLabel1.setDisplayedMnemonic('l'); jLabel1.setLabelFor(senderNumberTextField); jLabel1.setText("\u010c\u00edslo"); jLabel2.setText("+420"); senderNumberTextField.setColumns(9); senderNumberTextField.setText((config.getSenderNumber() != null ? config.getSenderNumber().replaceFirst("^\\+420", "") : null)); senderNumberTextField.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { senderNumberTextFieldActionPerformed(evt); } }); jLabel3.setDisplayedMnemonic('m'); jLabel3.setLabelFor(senderNameTextField); jLabel3.setText("Jm\u00e9no"); senderNameTextField.setText(config.getSenderName()); senderNameTextField.setToolTipText("<html>P\u0159i vypln\u011bn\u00ed jm\u00e9na je p\u0159ipojeno na konec zpr\u00e1vy,<br>\ntak\u017ee je sms ve skute\u010dnosti o n\u011bco del\u0161\u00ed.</html>"); senderNameTextField.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { senderNameTextFieldActionPerformed(evt); } }); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(useSenderIDCheckBox) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(17, 17, 17) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel3) .addComponent(jLabel1)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel2) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(senderNumberTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(senderNameTextField)))) .addContainerGap(215, Short.MAX_VALUE)) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addContainerGap() .addComponent(useSenderIDCheckBox) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(jLabel2) .addComponent(senderNumberTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel3) .addComponent(senderNameTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(160, Short.MAX_VALUE)) ); tabbedPane.addTab("Vodafone", jPanel2); closeButton.setMnemonic('z'); closeButton.setText("Zav\u0159\u00edt"); closeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { closeButtonActionPerformed(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(closeButton, javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(tabbedPane, javax.swing.GroupLayout.DEFAULT_SIZE, 448, Short.MAX_VALUE)) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addComponent(tabbedPane, javax.swing.GroupLayout.DEFAULT_SIZE, 264, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(closeButton) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents
diff --git a/juanspdf.java b/juanspdf.java index 3fe6a66..dca538e 100644 --- a/juanspdf.java +++ b/juanspdf.java @@ -1,71 +1,72 @@ import java.util.*; import java.io.*; public class juanspdf { public static void main (String[] args) throws FileNotFoundException { Scanner console = new Scanner(System.in); Scanner input = new Scanner(new File("comptia.txt")); boolean[] bool = new boolean[208]; String[] answers = new String[208]; String[] guesses = new String[208]; int[] number = new int[208]; String a = " "; String b = " "; String c = " "; String d = " "; String e = " "; String answer = ""; - int right = 0; + double right = 0; int count = 0; while(input.hasNextLine()) { String s = input.nextLine(); System.out.println(s); if(s.equals("END")) { String choices = input.nextLine(); a = choices; System.out.println(a); choices = input.nextLine(); b = choices; System.out.println(b); choices = input.nextLine(); c = choices; System.out.println(c); choices = input.nextLine(); if(choices.startsWith("D.")) { d = choices; System.out.println(d); choices = input.nextLine(); } if(choices.startsWith("E.")) { e = choices; System.out.println(e); choices = input.nextLine(); } Scanner data = new Scanner(choices); String idk = data.next(); + idk = data.next(); answer = idk; System.out.println("what is your guess? (if theres more than 1 answer put a comma with no spaces in between)"); String guess = console.next(); if(answer.equals(guess)) { right++; count++; } else { count++; bool[count-1] = true; answers[count-1] = answer; guesses[count-1] = guess; number[count-1] = count; } } } - System.out.println("You got " + right + "right out of 208"); + System.out.println("You got " + right + " right out of 208"); System.out.println("Your total is " + right/208); for(int i = 0; i < 208; i++) { if(bool[i]) { System.out.println("For number " + number[i] + " you guessed " + guesses[i] + " but the answer was " + answers[i]); } } } }
false
true
public static void main (String[] args) throws FileNotFoundException { Scanner console = new Scanner(System.in); Scanner input = new Scanner(new File("comptia.txt")); boolean[] bool = new boolean[208]; String[] answers = new String[208]; String[] guesses = new String[208]; int[] number = new int[208]; String a = " "; String b = " "; String c = " "; String d = " "; String e = " "; String answer = ""; int right = 0; int count = 0; while(input.hasNextLine()) { String s = input.nextLine(); System.out.println(s); if(s.equals("END")) { String choices = input.nextLine(); a = choices; System.out.println(a); choices = input.nextLine(); b = choices; System.out.println(b); choices = input.nextLine(); c = choices; System.out.println(c); choices = input.nextLine(); if(choices.startsWith("D.")) { d = choices; System.out.println(d); choices = input.nextLine(); } if(choices.startsWith("E.")) { e = choices; System.out.println(e); choices = input.nextLine(); } Scanner data = new Scanner(choices); String idk = data.next(); answer = idk; System.out.println("what is your guess? (if theres more than 1 answer put a comma with no spaces in between)"); String guess = console.next(); if(answer.equals(guess)) { right++; count++; } else { count++; bool[count-1] = true; answers[count-1] = answer; guesses[count-1] = guess; number[count-1] = count; } } } System.out.println("You got " + right + "right out of 208"); System.out.println("Your total is " + right/208); for(int i = 0; i < 208; i++) { if(bool[i]) { System.out.println("For number " + number[i] + " you guessed " + guesses[i] + " but the answer was " + answers[i]); } } }
public static void main (String[] args) throws FileNotFoundException { Scanner console = new Scanner(System.in); Scanner input = new Scanner(new File("comptia.txt")); boolean[] bool = new boolean[208]; String[] answers = new String[208]; String[] guesses = new String[208]; int[] number = new int[208]; String a = " "; String b = " "; String c = " "; String d = " "; String e = " "; String answer = ""; double right = 0; int count = 0; while(input.hasNextLine()) { String s = input.nextLine(); System.out.println(s); if(s.equals("END")) { String choices = input.nextLine(); a = choices; System.out.println(a); choices = input.nextLine(); b = choices; System.out.println(b); choices = input.nextLine(); c = choices; System.out.println(c); choices = input.nextLine(); if(choices.startsWith("D.")) { d = choices; System.out.println(d); choices = input.nextLine(); } if(choices.startsWith("E.")) { e = choices; System.out.println(e); choices = input.nextLine(); } Scanner data = new Scanner(choices); String idk = data.next(); idk = data.next(); answer = idk; System.out.println("what is your guess? (if theres more than 1 answer put a comma with no spaces in between)"); String guess = console.next(); if(answer.equals(guess)) { right++; count++; } else { count++; bool[count-1] = true; answers[count-1] = answer; guesses[count-1] = guess; number[count-1] = count; } } } System.out.println("You got " + right + " right out of 208"); System.out.println("Your total is " + right/208); for(int i = 0; i < 208; i++) { if(bool[i]) { System.out.println("For number " + number[i] + " you guessed " + guesses[i] + " but the answer was " + answers[i]); } } }
diff --git a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java index a15478039..ff0cf8021 100644 --- a/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java +++ b/spring-batch-core/src/main/java/org/springframework/batch/core/partition/support/TaskExecutorPartitionHandler.java @@ -1,97 +1,93 @@ package org.springframework.batch.core.partition.support; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.FutureTask; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.Step; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.partition.PartitionHandler; import org.springframework.batch.core.partition.StepExecutionSplitter; import org.springframework.batch.repeat.ExitStatus; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.task.SyncTaskExecutor; import org.springframework.core.task.TaskExecutor; import org.springframework.core.task.TaskRejectedException; import org.springframework.util.Assert; public class TaskExecutorPartitionHandler implements PartitionHandler, InitializingBean { private int gridSize = 1; private TaskExecutor taskExecutor = new SyncTaskExecutor(); private Step step; public void afterPropertiesSet() throws Exception { Assert.notNull(step, "A Step must be provided."); } public void setGridSize(int gridSize) { this.gridSize = gridSize; } public void setTaskExecutor(TaskExecutor taskExecutor) { this.taskExecutor = taskExecutor; } public void setStep(Step step) { this.step = step; } /** * @see PartitionHandler#handle(StepExecutionSplitter, StepExecution) */ public Collection<StepExecution> handle(StepExecutionSplitter stepExecutionSplitter, StepExecution masterStepExecution) throws Exception { Set<FutureTask<StepExecution>> tasks = new HashSet<FutureTask<StepExecution>>(gridSize); Collection<StepExecution> result = new ArrayList<StepExecution>(); for (final StepExecution stepExecution : stepExecutionSplitter.split(masterStepExecution, gridSize)) { final FutureTask<StepExecution> task = new FutureTask<StepExecution>(new Callable<StepExecution>() { public StepExecution call() throws Exception { step.execute(stepExecution); return stepExecution; } }); try { - taskExecutor.execute(new Runnable() { - public void run() { - task.run(); - } - }); + taskExecutor.execute(task); tasks.add(task); } catch (TaskRejectedException e) { // couldn't execute one of the tasks ExitStatus exitStatus = ExitStatus.FAILED .addExitDescription("TaskExecutor rejected the task for this step."); /* * This stepExecution hasn't been saved yet, but we'll set the * status anyway in case the caller is tracking it through the * JobExecution. */ stepExecution.setStatus(BatchStatus.FAILED); stepExecution.setExitStatus(exitStatus); result.add(stepExecution); } } for (FutureTask<StepExecution> task : tasks) { // TODO: timeout / heart beat result.add(task.get()); } return result; } }
true
true
public Collection<StepExecution> handle(StepExecutionSplitter stepExecutionSplitter, StepExecution masterStepExecution) throws Exception { Set<FutureTask<StepExecution>> tasks = new HashSet<FutureTask<StepExecution>>(gridSize); Collection<StepExecution> result = new ArrayList<StepExecution>(); for (final StepExecution stepExecution : stepExecutionSplitter.split(masterStepExecution, gridSize)) { final FutureTask<StepExecution> task = new FutureTask<StepExecution>(new Callable<StepExecution>() { public StepExecution call() throws Exception { step.execute(stepExecution); return stepExecution; } }); try { taskExecutor.execute(new Runnable() { public void run() { task.run(); } }); tasks.add(task); } catch (TaskRejectedException e) { // couldn't execute one of the tasks ExitStatus exitStatus = ExitStatus.FAILED .addExitDescription("TaskExecutor rejected the task for this step."); /* * This stepExecution hasn't been saved yet, but we'll set the * status anyway in case the caller is tracking it through the * JobExecution. */ stepExecution.setStatus(BatchStatus.FAILED); stepExecution.setExitStatus(exitStatus); result.add(stepExecution); } } for (FutureTask<StepExecution> task : tasks) { // TODO: timeout / heart beat result.add(task.get()); } return result; }
public Collection<StepExecution> handle(StepExecutionSplitter stepExecutionSplitter, StepExecution masterStepExecution) throws Exception { Set<FutureTask<StepExecution>> tasks = new HashSet<FutureTask<StepExecution>>(gridSize); Collection<StepExecution> result = new ArrayList<StepExecution>(); for (final StepExecution stepExecution : stepExecutionSplitter.split(masterStepExecution, gridSize)) { final FutureTask<StepExecution> task = new FutureTask<StepExecution>(new Callable<StepExecution>() { public StepExecution call() throws Exception { step.execute(stepExecution); return stepExecution; } }); try { taskExecutor.execute(task); tasks.add(task); } catch (TaskRejectedException e) { // couldn't execute one of the tasks ExitStatus exitStatus = ExitStatus.FAILED .addExitDescription("TaskExecutor rejected the task for this step."); /* * This stepExecution hasn't been saved yet, but we'll set the * status anyway in case the caller is tracking it through the * JobExecution. */ stepExecution.setStatus(BatchStatus.FAILED); stepExecution.setExitStatus(exitStatus); result.add(stepExecution); } } for (FutureTask<StepExecution> task : tasks) { // TODO: timeout / heart beat result.add(task.get()); } return result; }
diff --git a/src/main/java/com/ctb/pilot/chat/service/ChatServlet.java b/src/main/java/com/ctb/pilot/chat/service/ChatServlet.java index 80e25ba..5ffe48d 100644 --- a/src/main/java/com/ctb/pilot/chat/service/ChatServlet.java +++ b/src/main/java/com/ctb/pilot/chat/service/ChatServlet.java @@ -1,74 +1,74 @@ package com.ctb.pilot.chat.service; import java.io.IOException; import java.util.List; import javax.servlet.RequestDispatcher; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import com.ctb.pilot.chat.dao.MessageDao; import com.ctb.pilot.chat.dao.jdbc.JdbcMessageDao; import com.ctb.pilot.chat.model.Message; import com.ctb.pilot.chat.model.User; public class ChatServlet extends HttpServlet { /** * */ private static final long serialVersionUID = 1L; private MessageDao messageDao = new JdbcMessageDao(); @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String requestURI = req.getRequestURI(); System.out.println("In doGet(), requestURI: " + requestURI); int rowCount = 100; List<Message> messages = messageDao.getMessagesWithRowCount(rowCount); req.setAttribute("messages", messages); req.setAttribute("maxRowCount", rowCount); String viewUri = "/chat/chat_view.jsp"; // FIXME: Handle a deploy problem temporarily. ServletContext servletContext = req.getServletContext(); String contextPath = servletContext.getContextPath(); System.out.println("contextPath: " + contextPath); - if (contextPath.equals("/")) { + if (!contextPath.equals("/pilot")) { viewUri = "/pilot" + viewUri; } RequestDispatcher dispatcher = req.getRequestDispatcher(viewUri); dispatcher.forward(req, resp); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String requestURI = req.getRequestURI(); System.out.println("In doPost(), requestURI: " + requestURI); HttpSession session = req.getSession(); User user = (User) session.getAttribute("user"); int userSequence = user.getSequence(); req.setCharacterEncoding("utf8"); String message = req.getParameter("message"); if (message == null || message.isEmpty()) { throw new ServletException("Message is null or empty."); } messageDao.insertMessage(userSequence, message); resp.sendRedirect("chat"); } }
true
true
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String requestURI = req.getRequestURI(); System.out.println("In doGet(), requestURI: " + requestURI); int rowCount = 100; List<Message> messages = messageDao.getMessagesWithRowCount(rowCount); req.setAttribute("messages", messages); req.setAttribute("maxRowCount", rowCount); String viewUri = "/chat/chat_view.jsp"; // FIXME: Handle a deploy problem temporarily. ServletContext servletContext = req.getServletContext(); String contextPath = servletContext.getContextPath(); System.out.println("contextPath: " + contextPath); if (contextPath.equals("/")) { viewUri = "/pilot" + viewUri; } RequestDispatcher dispatcher = req.getRequestDispatcher(viewUri); dispatcher.forward(req, resp); }
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String requestURI = req.getRequestURI(); System.out.println("In doGet(), requestURI: " + requestURI); int rowCount = 100; List<Message> messages = messageDao.getMessagesWithRowCount(rowCount); req.setAttribute("messages", messages); req.setAttribute("maxRowCount", rowCount); String viewUri = "/chat/chat_view.jsp"; // FIXME: Handle a deploy problem temporarily. ServletContext servletContext = req.getServletContext(); String contextPath = servletContext.getContextPath(); System.out.println("contextPath: " + contextPath); if (!contextPath.equals("/pilot")) { viewUri = "/pilot" + viewUri; } RequestDispatcher dispatcher = req.getRequestDispatcher(viewUri); dispatcher.forward(req, resp); }
diff --git a/gerrit-pgm/src/main/java/com/google/gerrit/pgm/util/IoUtil.java b/gerrit-pgm/src/main/java/com/google/gerrit/pgm/util/IoUtil.java index f756890cc..6d1ba7791 100644 --- a/gerrit-pgm/src/main/java/com/google/gerrit/pgm/util/IoUtil.java +++ b/gerrit-pgm/src/main/java/com/google/gerrit/pgm/util/IoUtil.java @@ -1,54 +1,55 @@ // Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.pgm.util; import org.eclipse.jgit.util.StringUtils; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; public final class IoUtil { public static final boolean isWin32() { final String osDotName = System.getProperty("os.name"); return osDotName != null && StringUtils.toLowerCase(osDotName).indexOf("windows") != -1; } public static void copyWithThread(final InputStream src, final OutputStream dst) { new Thread("IoUtil-Copy") { + @Override public void run() { try { final byte[] buf = new byte[256]; int n; while (0 < (n = src.read(buf))) { dst.write(buf, 0, n); } } catch (IOException e) { e.printStackTrace(); } finally { try { src.close(); } catch (IOException e2) { } } } }.start(); } private IoUtil() { } }
true
true
public static void copyWithThread(final InputStream src, final OutputStream dst) { new Thread("IoUtil-Copy") { public void run() { try { final byte[] buf = new byte[256]; int n; while (0 < (n = src.read(buf))) { dst.write(buf, 0, n); } } catch (IOException e) { e.printStackTrace(); } finally { try { src.close(); } catch (IOException e2) { } } } }.start(); }
public static void copyWithThread(final InputStream src, final OutputStream dst) { new Thread("IoUtil-Copy") { @Override public void run() { try { final byte[] buf = new byte[256]; int n; while (0 < (n = src.read(buf))) { dst.write(buf, 0, n); } } catch (IOException e) { e.printStackTrace(); } finally { try { src.close(); } catch (IOException e2) { } } } }.start(); }
diff --git a/assets/src/org/ruboto/ScriptLoader.java b/assets/src/org/ruboto/ScriptLoader.java index fc09409..1ca0b92 100644 --- a/assets/src/org/ruboto/ScriptLoader.java +++ b/assets/src/org/ruboto/ScriptLoader.java @@ -1,146 +1,147 @@ package org.ruboto; import java.io.IOException; import android.app.ProgressDialog; import android.content.Context; import android.os.Bundle; public class ScriptLoader { /** Return true if we are called from JRuby. */ public static boolean isCalledFromJRuby() { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); int maxLookBack = Math.min(9, stackTraceElements.length); for(int i = 0; i < maxLookBack ; i++){ if (stackTraceElements[i].getClassName().startsWith("org.jruby.javasupport.JavaMethod")) { return true; } } return false; } public static void loadScript(final RubotoComponent component, Object... args) { try { if (component.getScriptInfo().getScriptName() != null) { System.out.println("Looking for Ruby class: " + component.getScriptInfo().getRubyClassName()); Object rubyClass = JRubyAdapter.get(component.getScriptInfo().getRubyClassName()); System.out.println("Found: " + rubyClass); final Script rubyScript = new Script(component.getScriptInfo().getScriptName()); Object rubyInstance; if (rubyScript.exists()) { rubyInstance = component; final String script = rubyScript.getContents(); boolean scriptContainsClass = script.matches("(?s).*class " + component.getScriptInfo().getRubyClassName() + ".*"); boolean hasBackingJavaClass = component.getScriptInfo().getRubyClassName() .equals(component.getClass().getSimpleName()); if (scriptContainsClass) { if (hasBackingJavaClass) { if (rubyClass != null && !rubyClass.toString().startsWith("Java::")) { System.out.println("Found Ruby class instead of Java class. Reloading."); rubyClass = null; } } else { System.out.println("Script defines methods on meta class"); // FIXME(uwe): Simplify when we stop support for RubotoCore 0.4.7 if (JRubyAdapter.isJRubyPreOneSeven() || JRubyAdapter.isRubyOneEight()) { JRubyAdapter.put("$java_instance", component); rubyClass = JRubyAdapter.runScriptlet("class << $java_instance; self; end"); } else if (JRubyAdapter.isJRubyOneSeven() && JRubyAdapter.isRubyOneNine()) { JRubyAdapter.runScriptlet("Java::" + component.getClass().getName() + ".__persistent__ = true"); rubyClass = JRubyAdapter.runRubyMethod(component, "singleton_class"); } else { throw new RuntimeException("Unknown JRuby/Ruby version: " + JRubyAdapter.get("JRUBY_VERSION") + "/" + JRubyAdapter.get("RUBY_VERSION")); } // EMXIF } } if (rubyClass == null || !hasBackingJavaClass) { System.out.println("Loading script: " + component.getScriptInfo().getScriptName()); if (scriptContainsClass) { System.out.println("Script contains class definition"); if (rubyClass == null && hasBackingJavaClass) { System.out.println("Script has separate Java class"); // FIXME(uwe): Simplify when we stop support for JRuby < 1.7.0 if (!JRubyAdapter.isJRubyPreOneSeven()) { JRubyAdapter.runScriptlet("Java::" + component.getClass().getName() + ".__persistent__ = true"); } // EMXIF rubyClass = JRubyAdapter.runScriptlet("Java::" + component.getClass().getName()); } System.out.println("Set class: " + rubyClass); JRubyAdapter.put(component.getScriptInfo().getRubyClassName(), rubyClass); - Thread t = new Thread(new Runnable(){ + // FIXME(uwe): Collect these threads in a ThreadGroup ? + Thread t = new Thread(null, new Runnable(){ public void run() { JRubyAdapter.setScriptFilename(rubyScript.getAbsolutePath()); JRubyAdapter.runScriptlet(script); } - }); + }, "ScriptLoader for " + rubyClass, 128 * 1024); try { t.start(); t.join(); } catch(InterruptedException ie) { Thread.currentThread().interrupt(); throw new RuntimeException("Interrupted loading script.", ie); } } else { throw new RuntimeException("Expected file " + component.getScriptInfo().getScriptName() + " to define class " + component.getScriptInfo().getRubyClassName()); } } } else if (rubyClass != null) { // We have a predefined Ruby class without corresponding Ruby source file. System.out.println("Create separate Ruby instance for class: " + rubyClass); rubyInstance = JRubyAdapter.runRubyMethod(rubyClass, "new"); JRubyAdapter.runRubyMethod(rubyInstance, "instance_variable_set", "@ruboto_java_instance", component); } else { // Neither script file nor predefined class throw new RuntimeException("Either script or predefined class must be present."); } if (rubyClass != null) { if (component instanceof android.content.Context) { callOnCreate(rubyInstance, args, component.getScriptInfo().getRubyClassName()); } } component.getScriptInfo().setRubyInstance(rubyInstance); } } catch(IOException e){ e.printStackTrace(); if (component instanceof android.content.Context) { ProgressDialog.show((android.content.Context) component, "Script failed", "Something bad happened", true, true); } } } private static final void callOnCreate(Object rubyInstance, Object[] args, String rubyClassName) { System.out.println("Call onCreate on: " + rubyInstance + ", " + JRubyAdapter.get("JRUBY_VERSION")); // FIXME(uwe): Simplify when we stop support for RubotoCore 0.4.7 if (JRubyAdapter.isJRubyPreOneSeven()) { if (args.length > 0) { JRubyAdapter.put("$bundle", args[0]); } JRubyAdapter.put("$ruby_instance", rubyInstance); JRubyAdapter.runScriptlet("$ruby_instance.on_create(" + (args.length > 0 ? "$bundle" : "") + ")"); } else if (JRubyAdapter.isJRubyOneSeven()) { // FIXME(uwe): Simplify when we stop support for snake case aliasing interface callback methods. if ((Boolean)JRubyAdapter.runScriptlet(rubyClassName + ".instance_methods(false).any?{|m| m.to_sym == :onCreate}")) { JRubyAdapter.runRubyMethod(rubyInstance, "onCreate", args); } else if ((Boolean)JRubyAdapter.runScriptlet(rubyClassName + ".instance_methods(false).any?{|m| m.to_sym == :on_create}")) { JRubyAdapter.runRubyMethod(rubyInstance, "on_create", args); } // EMXIF } else { throw new RuntimeException("Unknown JRuby version: " + JRubyAdapter.get("JRUBY_VERSION")); } // EMXIF } }
false
true
public static void loadScript(final RubotoComponent component, Object... args) { try { if (component.getScriptInfo().getScriptName() != null) { System.out.println("Looking for Ruby class: " + component.getScriptInfo().getRubyClassName()); Object rubyClass = JRubyAdapter.get(component.getScriptInfo().getRubyClassName()); System.out.println("Found: " + rubyClass); final Script rubyScript = new Script(component.getScriptInfo().getScriptName()); Object rubyInstance; if (rubyScript.exists()) { rubyInstance = component; final String script = rubyScript.getContents(); boolean scriptContainsClass = script.matches("(?s).*class " + component.getScriptInfo().getRubyClassName() + ".*"); boolean hasBackingJavaClass = component.getScriptInfo().getRubyClassName() .equals(component.getClass().getSimpleName()); if (scriptContainsClass) { if (hasBackingJavaClass) { if (rubyClass != null && !rubyClass.toString().startsWith("Java::")) { System.out.println("Found Ruby class instead of Java class. Reloading."); rubyClass = null; } } else { System.out.println("Script defines methods on meta class"); // FIXME(uwe): Simplify when we stop support for RubotoCore 0.4.7 if (JRubyAdapter.isJRubyPreOneSeven() || JRubyAdapter.isRubyOneEight()) { JRubyAdapter.put("$java_instance", component); rubyClass = JRubyAdapter.runScriptlet("class << $java_instance; self; end"); } else if (JRubyAdapter.isJRubyOneSeven() && JRubyAdapter.isRubyOneNine()) { JRubyAdapter.runScriptlet("Java::" + component.getClass().getName() + ".__persistent__ = true"); rubyClass = JRubyAdapter.runRubyMethod(component, "singleton_class"); } else { throw new RuntimeException("Unknown JRuby/Ruby version: " + JRubyAdapter.get("JRUBY_VERSION") + "/" + JRubyAdapter.get("RUBY_VERSION")); } // EMXIF } } if (rubyClass == null || !hasBackingJavaClass) { System.out.println("Loading script: " + component.getScriptInfo().getScriptName()); if (scriptContainsClass) { System.out.println("Script contains class definition"); if (rubyClass == null && hasBackingJavaClass) { System.out.println("Script has separate Java class"); // FIXME(uwe): Simplify when we stop support for JRuby < 1.7.0 if (!JRubyAdapter.isJRubyPreOneSeven()) { JRubyAdapter.runScriptlet("Java::" + component.getClass().getName() + ".__persistent__ = true"); } // EMXIF rubyClass = JRubyAdapter.runScriptlet("Java::" + component.getClass().getName()); } System.out.println("Set class: " + rubyClass); JRubyAdapter.put(component.getScriptInfo().getRubyClassName(), rubyClass); Thread t = new Thread(new Runnable(){ public void run() { JRubyAdapter.setScriptFilename(rubyScript.getAbsolutePath()); JRubyAdapter.runScriptlet(script); } }); try { t.start(); t.join(); } catch(InterruptedException ie) { Thread.currentThread().interrupt(); throw new RuntimeException("Interrupted loading script.", ie); } } else { throw new RuntimeException("Expected file " + component.getScriptInfo().getScriptName() + " to define class " + component.getScriptInfo().getRubyClassName()); } } } else if (rubyClass != null) { // We have a predefined Ruby class without corresponding Ruby source file. System.out.println("Create separate Ruby instance for class: " + rubyClass); rubyInstance = JRubyAdapter.runRubyMethod(rubyClass, "new"); JRubyAdapter.runRubyMethod(rubyInstance, "instance_variable_set", "@ruboto_java_instance", component); } else { // Neither script file nor predefined class throw new RuntimeException("Either script or predefined class must be present."); } if (rubyClass != null) { if (component instanceof android.content.Context) { callOnCreate(rubyInstance, args, component.getScriptInfo().getRubyClassName()); } } component.getScriptInfo().setRubyInstance(rubyInstance); } } catch(IOException e){ e.printStackTrace(); if (component instanceof android.content.Context) { ProgressDialog.show((android.content.Context) component, "Script failed", "Something bad happened", true, true); } } }
public static void loadScript(final RubotoComponent component, Object... args) { try { if (component.getScriptInfo().getScriptName() != null) { System.out.println("Looking for Ruby class: " + component.getScriptInfo().getRubyClassName()); Object rubyClass = JRubyAdapter.get(component.getScriptInfo().getRubyClassName()); System.out.println("Found: " + rubyClass); final Script rubyScript = new Script(component.getScriptInfo().getScriptName()); Object rubyInstance; if (rubyScript.exists()) { rubyInstance = component; final String script = rubyScript.getContents(); boolean scriptContainsClass = script.matches("(?s).*class " + component.getScriptInfo().getRubyClassName() + ".*"); boolean hasBackingJavaClass = component.getScriptInfo().getRubyClassName() .equals(component.getClass().getSimpleName()); if (scriptContainsClass) { if (hasBackingJavaClass) { if (rubyClass != null && !rubyClass.toString().startsWith("Java::")) { System.out.println("Found Ruby class instead of Java class. Reloading."); rubyClass = null; } } else { System.out.println("Script defines methods on meta class"); // FIXME(uwe): Simplify when we stop support for RubotoCore 0.4.7 if (JRubyAdapter.isJRubyPreOneSeven() || JRubyAdapter.isRubyOneEight()) { JRubyAdapter.put("$java_instance", component); rubyClass = JRubyAdapter.runScriptlet("class << $java_instance; self; end"); } else if (JRubyAdapter.isJRubyOneSeven() && JRubyAdapter.isRubyOneNine()) { JRubyAdapter.runScriptlet("Java::" + component.getClass().getName() + ".__persistent__ = true"); rubyClass = JRubyAdapter.runRubyMethod(component, "singleton_class"); } else { throw new RuntimeException("Unknown JRuby/Ruby version: " + JRubyAdapter.get("JRUBY_VERSION") + "/" + JRubyAdapter.get("RUBY_VERSION")); } // EMXIF } } if (rubyClass == null || !hasBackingJavaClass) { System.out.println("Loading script: " + component.getScriptInfo().getScriptName()); if (scriptContainsClass) { System.out.println("Script contains class definition"); if (rubyClass == null && hasBackingJavaClass) { System.out.println("Script has separate Java class"); // FIXME(uwe): Simplify when we stop support for JRuby < 1.7.0 if (!JRubyAdapter.isJRubyPreOneSeven()) { JRubyAdapter.runScriptlet("Java::" + component.getClass().getName() + ".__persistent__ = true"); } // EMXIF rubyClass = JRubyAdapter.runScriptlet("Java::" + component.getClass().getName()); } System.out.println("Set class: " + rubyClass); JRubyAdapter.put(component.getScriptInfo().getRubyClassName(), rubyClass); // FIXME(uwe): Collect these threads in a ThreadGroup ? Thread t = new Thread(null, new Runnable(){ public void run() { JRubyAdapter.setScriptFilename(rubyScript.getAbsolutePath()); JRubyAdapter.runScriptlet(script); } }, "ScriptLoader for " + rubyClass, 128 * 1024); try { t.start(); t.join(); } catch(InterruptedException ie) { Thread.currentThread().interrupt(); throw new RuntimeException("Interrupted loading script.", ie); } } else { throw new RuntimeException("Expected file " + component.getScriptInfo().getScriptName() + " to define class " + component.getScriptInfo().getRubyClassName()); } } } else if (rubyClass != null) { // We have a predefined Ruby class without corresponding Ruby source file. System.out.println("Create separate Ruby instance for class: " + rubyClass); rubyInstance = JRubyAdapter.runRubyMethod(rubyClass, "new"); JRubyAdapter.runRubyMethod(rubyInstance, "instance_variable_set", "@ruboto_java_instance", component); } else { // Neither script file nor predefined class throw new RuntimeException("Either script or predefined class must be present."); } if (rubyClass != null) { if (component instanceof android.content.Context) { callOnCreate(rubyInstance, args, component.getScriptInfo().getRubyClassName()); } } component.getScriptInfo().setRubyInstance(rubyInstance); } } catch(IOException e){ e.printStackTrace(); if (component instanceof android.content.Context) { ProgressDialog.show((android.content.Context) component, "Script failed", "Something bad happened", true, true); } } }
diff --git a/common/com/github/soniex2/endermoney/trading/tileentity/TileEntityCreativeItemTrader.java b/common/com/github/soniex2/endermoney/trading/tileentity/TileEntityCreativeItemTrader.java index 5dc43d7..634b557 100644 --- a/common/com/github/soniex2/endermoney/trading/tileentity/TileEntityCreativeItemTrader.java +++ b/common/com/github/soniex2/endermoney/trading/tileentity/TileEntityCreativeItemTrader.java @@ -1,233 +1,242 @@ package com.github.soniex2.endermoney.trading.tileentity; import java.math.BigInteger; import java.util.HashMap; import java.util.Iterator; import java.util.Map.Entry; import java.util.Set; import net.minecraft.inventory.IInventory; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import com.github.soniex2.endermoney.core.EnderCoin; import com.github.soniex2.endermoney.core.EnderMoney; import com.github.soniex2.endermoney.trading.TradeError; import com.github.soniex2.endermoney.trading.base.AbstractTraderTileEntity; import com.github.soniex2.endermoney.trading.helper.item.ItemStackMapKey; public class TileEntityCreativeItemTrader extends AbstractTraderTileEntity { public TileEntityCreativeItemTrader() { super(18); } public ItemStack[] getTradeInputs() { ItemStack[] tradeInputs = new ItemStack[9]; for (int i = 0; i < 9; i++) { tradeInputs[i] = inv[i] != null ? inv[i].copy() : null; } return tradeInputs; } public ItemStack[] getTradeOutputs() { ItemStack[] tradeOutputs = new ItemStack[9]; for (int i = 0; i < 9; i++) { tradeOutputs[i] = inv[i + 9] != null ? inv[i + 9].copy() : null; } return tradeOutputs; } public boolean doTrade(IInventory fakeInv, int inputMinSlot, int inputMaxSlot, int outputMinSlot, int outputMaxSlot) throws TradeError { if (fakeInv == null) { throw new TradeError(1, "Invalid inventory", new NullPointerException()); } HashMap<ItemStackMapKey, Integer> tradeInputs = new HashMap<ItemStackMapKey, Integer>(); BigInteger moneyRequired = BigInteger.ZERO; for (ItemStack i : getTradeInputs()) { if (i == null) { continue; } if (i.getItem() == EnderMoney.coin) { moneyRequired = moneyRequired.add(BigInteger.valueOf( EnderCoin.getValueFromItemStack(i)).multiply( BigInteger.valueOf(i.stackSize))); continue; } ItemStackMapKey index = new ItemStackMapKey(i); if (tradeInputs.containsKey(index)) { tradeInputs.put(index, i.stackSize + tradeInputs.get(index)); } else { tradeInputs.put(index, i.stackSize); } } HashMap<ItemStackMapKey, Integer> tradeInput = new HashMap<ItemStackMapKey, Integer>(); BigInteger money = BigInteger.ZERO; for (int i = inputMinSlot; i <= inputMaxSlot; i++) { ItemStack is = fakeInv.getStackInSlot(i); if (is == null) { continue; } if (is.getItem() == EnderMoney.coin) { moneyRequired = moneyRequired.add(BigInteger.valueOf( EnderCoin.getValueFromItemStack(is)).multiply( BigInteger.valueOf(is.stackSize))); continue; } ItemStackMapKey index = new ItemStackMapKey(is); if (tradeInput.containsKey(index)) { tradeInput.put(index, is.stackSize + tradeInput.get(index)); } else { tradeInput.put(index, is.stackSize); } } if (money.compareTo(moneyRequired) < 0) { return false; } BigInteger newMoney = money.subtract(moneyRequired); Set<Entry<ItemStackMapKey, Integer>> itemsRequired = tradeInputs.entrySet(); Iterator<Entry<ItemStackMapKey, Integer>> i = itemsRequired.iterator(); HashMap<ItemStackMapKey, Integer> newInput = new HashMap<ItemStackMapKey, Integer>(); while (i.hasNext()) { Entry<ItemStackMapKey, Integer> entry = i.next(); ItemStackMapKey item = entry.getKey(); Integer amount = entry.getValue(); - Integer available = tradeInput.get(item); // TODO fix this + Integer available = tradeInput.get(item); if (available == null) { return false; } if (available < amount) { return false; } if (available - amount == 0) { continue; } newInput.put(item, available - amount); } if (newMoney.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { BigInteger[] coinCount = newMoney .divideAndRemainder(BigInteger.valueOf(Long.MAX_VALUE)); int a = coinCount[0].intValue(); long b = coinCount[1].longValue(); ItemStack is1 = ((EnderCoin) EnderMoney.coin).getItemStack(Long.MAX_VALUE, 1); ItemStack is2 = ((EnderCoin) EnderMoney.coin).getItemStack(b, 1); ItemStackMapKey index1 = new ItemStackMapKey(is1); ItemStackMapKey index2 = new ItemStackMapKey(is2); newInput.put(index1, a); newInput.put(index2, 1); } ItemStack[] tradeOutputs = getTradeOutputs(); // TODO put commented out code below somewhere else /* * int[] something = new int[tradeOutputs.length]; * int[][] lookAt = new int[][] { { 1, 0, 0 }, { 0, 1, 0 }, { 0, 0, 1 }, * { -1, 0, 0 }, * { 0, -1, 0 }, { 0, 0, -1 } }; * for (int a = 0; a < lookAt.length; a++) { * TileEntity tileEntity = this.worldObj.getBlockTileEntity(this.xCoord * + lookAt[a][0], * this.yCoord + lookAt[a][1], this.zCoord + lookAt[a][2]); * if (tileEntity == null) continue; * if (tileEntity instanceof IInventory) { * IInventory iinv = (IInventory) tileEntity; * for (int b = 0; b < iinv.getSizeInventory(); b++) { * ItemStack is = iinv.getStackInSlot(b); * if (is == null) continue; * for (int c = 0; c < tradeOutputs.length; c++) { * if (tradeOutputs[c] == null) continue; * if (tradeOutputs[c].isItemEqual(is) && * ItemStack.areItemStackTagsEqual(tradeOutputs[c], is)) { * something[c] += is.stackSize; * } * } * } * } * } */ ItemStack[] oldOutInv = new ItemStack[outputMaxSlot - outputMinSlot + 1]; for (int a = outputMinSlot; a <= outputMaxSlot; a++) { - oldOutInv[a - outputMinSlot] = fakeInv.getStackInSlot(a); + oldOutInv[a - outputMinSlot] = fakeInv.getStackInSlot(a) != null ? fakeInv + .getStackInSlot(a).copy() : null; } for (int a = outputMinSlot; a <= outputMaxSlot; a++) { ItemStack is = fakeInv.getStackInSlot(a); for (int b = 0; b < tradeOutputs.length; b++) { - if (is != null && ItemStack.areItemStacksEqual(is, tradeOutputs[b])) { + if (is != null && tradeOutputs[b] != null && is.isItemEqual(tradeOutputs[b]) + && ItemStack.areItemStackTagsEqual(is, tradeOutputs[b])) { if (is.isStackable()) { if (is.stackSize < is.getMaxStackSize()) { if (is.stackSize + tradeOutputs[b].stackSize > is.getMaxStackSize()) { - tradeOutputs[b].stackSize = (is.stackSize + tradeOutputs[b].stackSize) - - is.getMaxStackSize(); + int newStackSize = tradeOutputs[b].stackSize + is.stackSize; + if (newStackSize > is.getMaxStackSize()) { + newStackSize = newStackSize - is.getMaxStackSize(); + } + tradeOutputs[b].stackSize = newStackSize; is.stackSize = is.getMaxStackSize(); } else { is.stackSize = is.stackSize + tradeOutputs[b].stackSize; tradeOutputs[b] = null; } } } } else if (is == null && tradeOutputs[b] != null) { fakeInv.setInventorySlotContents(a, tradeOutputs[b]); + is = fakeInv.getStackInSlot(a); + tradeOutputs[b] = null; + } + if (tradeOutputs[b] != null && tradeOutputs[b].stackSize <= 0) { tradeOutputs[b] = null; } } } for (int a = 0; a < tradeOutputs.length; a++) { if (tradeOutputs[a] != null) { for (int b = 0; b < oldOutInv.length; b++) { fakeInv.setInventorySlotContents(b + outputMinSlot, oldOutInv[b]); } throw new TradeError(0, "Couldn't complete trade: Out of inventory space"); } } for (int _i = inputMinSlot; _i < inputMaxSlot; _i++) { fakeInv.setInventorySlotContents(_i, null); } Set<Entry<ItemStackMapKey, Integer>> input = newInput.entrySet(); Iterator<Entry<ItemStackMapKey, Integer>> it = input.iterator(); int slot = inputMinSlot; while (it.hasNext()) { if (slot >= inputMaxSlot) { throw new TradeError(0, "Couldn't complete trade: Out of inventory space"); } if (fakeInv.getStackInSlot(slot) != null) { slot++; continue; } Entry<ItemStackMapKey, Integer> entry = it.next(); ItemStackMapKey itemData = entry.getKey(); ItemStack item = new ItemStack(itemData.itemID, 1, itemData.damage); item.stackTagCompound = (NBTTagCompound) itemData.getTag(); Integer amount = entry.getValue(); if (amount == 0) { // shouldn't happen but who knows... continue; } int stacks = amount / item.getMaxStackSize(); int extra = amount % item.getMaxStackSize(); ItemStack newItem = item.copy(); newItem.stackSize = item.getMaxStackSize(); for (int n = slot; n < slot + stacks; n++) { fakeInv.setInventorySlotContents(n, newItem); } slot += stacks; newItem = item.copy(); newItem.stackSize = extra; fakeInv.setInventorySlotContents(slot, newItem); slot++; } return true; } @Override public String getInvName() { return "endermoney.traders.item"; } @Override public boolean isInvNameLocalized() { return false; } @Override public void openChest() { } @Override public void closeChest() { } }
false
true
public boolean doTrade(IInventory fakeInv, int inputMinSlot, int inputMaxSlot, int outputMinSlot, int outputMaxSlot) throws TradeError { if (fakeInv == null) { throw new TradeError(1, "Invalid inventory", new NullPointerException()); } HashMap<ItemStackMapKey, Integer> tradeInputs = new HashMap<ItemStackMapKey, Integer>(); BigInteger moneyRequired = BigInteger.ZERO; for (ItemStack i : getTradeInputs()) { if (i == null) { continue; } if (i.getItem() == EnderMoney.coin) { moneyRequired = moneyRequired.add(BigInteger.valueOf( EnderCoin.getValueFromItemStack(i)).multiply( BigInteger.valueOf(i.stackSize))); continue; } ItemStackMapKey index = new ItemStackMapKey(i); if (tradeInputs.containsKey(index)) { tradeInputs.put(index, i.stackSize + tradeInputs.get(index)); } else { tradeInputs.put(index, i.stackSize); } } HashMap<ItemStackMapKey, Integer> tradeInput = new HashMap<ItemStackMapKey, Integer>(); BigInteger money = BigInteger.ZERO; for (int i = inputMinSlot; i <= inputMaxSlot; i++) { ItemStack is = fakeInv.getStackInSlot(i); if (is == null) { continue; } if (is.getItem() == EnderMoney.coin) { moneyRequired = moneyRequired.add(BigInteger.valueOf( EnderCoin.getValueFromItemStack(is)).multiply( BigInteger.valueOf(is.stackSize))); continue; } ItemStackMapKey index = new ItemStackMapKey(is); if (tradeInput.containsKey(index)) { tradeInput.put(index, is.stackSize + tradeInput.get(index)); } else { tradeInput.put(index, is.stackSize); } } if (money.compareTo(moneyRequired) < 0) { return false; } BigInteger newMoney = money.subtract(moneyRequired); Set<Entry<ItemStackMapKey, Integer>> itemsRequired = tradeInputs.entrySet(); Iterator<Entry<ItemStackMapKey, Integer>> i = itemsRequired.iterator(); HashMap<ItemStackMapKey, Integer> newInput = new HashMap<ItemStackMapKey, Integer>(); while (i.hasNext()) { Entry<ItemStackMapKey, Integer> entry = i.next(); ItemStackMapKey item = entry.getKey(); Integer amount = entry.getValue(); Integer available = tradeInput.get(item); // TODO fix this if (available == null) { return false; } if (available < amount) { return false; } if (available - amount == 0) { continue; } newInput.put(item, available - amount); } if (newMoney.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { BigInteger[] coinCount = newMoney .divideAndRemainder(BigInteger.valueOf(Long.MAX_VALUE)); int a = coinCount[0].intValue(); long b = coinCount[1].longValue(); ItemStack is1 = ((EnderCoin) EnderMoney.coin).getItemStack(Long.MAX_VALUE, 1); ItemStack is2 = ((EnderCoin) EnderMoney.coin).getItemStack(b, 1); ItemStackMapKey index1 = new ItemStackMapKey(is1); ItemStackMapKey index2 = new ItemStackMapKey(is2); newInput.put(index1, a); newInput.put(index2, 1); } ItemStack[] tradeOutputs = getTradeOutputs(); // TODO put commented out code below somewhere else /* * int[] something = new int[tradeOutputs.length]; * int[][] lookAt = new int[][] { { 1, 0, 0 }, { 0, 1, 0 }, { 0, 0, 1 }, * { -1, 0, 0 }, * { 0, -1, 0 }, { 0, 0, -1 } }; * for (int a = 0; a < lookAt.length; a++) { * TileEntity tileEntity = this.worldObj.getBlockTileEntity(this.xCoord * + lookAt[a][0], * this.yCoord + lookAt[a][1], this.zCoord + lookAt[a][2]); * if (tileEntity == null) continue; * if (tileEntity instanceof IInventory) { * IInventory iinv = (IInventory) tileEntity; * for (int b = 0; b < iinv.getSizeInventory(); b++) { * ItemStack is = iinv.getStackInSlot(b); * if (is == null) continue; * for (int c = 0; c < tradeOutputs.length; c++) { * if (tradeOutputs[c] == null) continue; * if (tradeOutputs[c].isItemEqual(is) && * ItemStack.areItemStackTagsEqual(tradeOutputs[c], is)) { * something[c] += is.stackSize; * } * } * } * } * } */ ItemStack[] oldOutInv = new ItemStack[outputMaxSlot - outputMinSlot + 1]; for (int a = outputMinSlot; a <= outputMaxSlot; a++) { oldOutInv[a - outputMinSlot] = fakeInv.getStackInSlot(a); } for (int a = outputMinSlot; a <= outputMaxSlot; a++) { ItemStack is = fakeInv.getStackInSlot(a); for (int b = 0; b < tradeOutputs.length; b++) { if (is != null && ItemStack.areItemStacksEqual(is, tradeOutputs[b])) { if (is.isStackable()) { if (is.stackSize < is.getMaxStackSize()) { if (is.stackSize + tradeOutputs[b].stackSize > is.getMaxStackSize()) { tradeOutputs[b].stackSize = (is.stackSize + tradeOutputs[b].stackSize) - is.getMaxStackSize(); is.stackSize = is.getMaxStackSize(); } else { is.stackSize = is.stackSize + tradeOutputs[b].stackSize; tradeOutputs[b] = null; } } } } else if (is == null && tradeOutputs[b] != null) { fakeInv.setInventorySlotContents(a, tradeOutputs[b]); tradeOutputs[b] = null; } } } for (int a = 0; a < tradeOutputs.length; a++) { if (tradeOutputs[a] != null) { for (int b = 0; b < oldOutInv.length; b++) { fakeInv.setInventorySlotContents(b + outputMinSlot, oldOutInv[b]); } throw new TradeError(0, "Couldn't complete trade: Out of inventory space"); } } for (int _i = inputMinSlot; _i < inputMaxSlot; _i++) { fakeInv.setInventorySlotContents(_i, null); } Set<Entry<ItemStackMapKey, Integer>> input = newInput.entrySet(); Iterator<Entry<ItemStackMapKey, Integer>> it = input.iterator(); int slot = inputMinSlot; while (it.hasNext()) { if (slot >= inputMaxSlot) { throw new TradeError(0, "Couldn't complete trade: Out of inventory space"); } if (fakeInv.getStackInSlot(slot) != null) { slot++; continue; } Entry<ItemStackMapKey, Integer> entry = it.next(); ItemStackMapKey itemData = entry.getKey(); ItemStack item = new ItemStack(itemData.itemID, 1, itemData.damage); item.stackTagCompound = (NBTTagCompound) itemData.getTag(); Integer amount = entry.getValue(); if (amount == 0) { // shouldn't happen but who knows... continue; } int stacks = amount / item.getMaxStackSize(); int extra = amount % item.getMaxStackSize(); ItemStack newItem = item.copy(); newItem.stackSize = item.getMaxStackSize(); for (int n = slot; n < slot + stacks; n++) { fakeInv.setInventorySlotContents(n, newItem); } slot += stacks; newItem = item.copy(); newItem.stackSize = extra; fakeInv.setInventorySlotContents(slot, newItem); slot++; } return true; }
public boolean doTrade(IInventory fakeInv, int inputMinSlot, int inputMaxSlot, int outputMinSlot, int outputMaxSlot) throws TradeError { if (fakeInv == null) { throw new TradeError(1, "Invalid inventory", new NullPointerException()); } HashMap<ItemStackMapKey, Integer> tradeInputs = new HashMap<ItemStackMapKey, Integer>(); BigInteger moneyRequired = BigInteger.ZERO; for (ItemStack i : getTradeInputs()) { if (i == null) { continue; } if (i.getItem() == EnderMoney.coin) { moneyRequired = moneyRequired.add(BigInteger.valueOf( EnderCoin.getValueFromItemStack(i)).multiply( BigInteger.valueOf(i.stackSize))); continue; } ItemStackMapKey index = new ItemStackMapKey(i); if (tradeInputs.containsKey(index)) { tradeInputs.put(index, i.stackSize + tradeInputs.get(index)); } else { tradeInputs.put(index, i.stackSize); } } HashMap<ItemStackMapKey, Integer> tradeInput = new HashMap<ItemStackMapKey, Integer>(); BigInteger money = BigInteger.ZERO; for (int i = inputMinSlot; i <= inputMaxSlot; i++) { ItemStack is = fakeInv.getStackInSlot(i); if (is == null) { continue; } if (is.getItem() == EnderMoney.coin) { moneyRequired = moneyRequired.add(BigInteger.valueOf( EnderCoin.getValueFromItemStack(is)).multiply( BigInteger.valueOf(is.stackSize))); continue; } ItemStackMapKey index = new ItemStackMapKey(is); if (tradeInput.containsKey(index)) { tradeInput.put(index, is.stackSize + tradeInput.get(index)); } else { tradeInput.put(index, is.stackSize); } } if (money.compareTo(moneyRequired) < 0) { return false; } BigInteger newMoney = money.subtract(moneyRequired); Set<Entry<ItemStackMapKey, Integer>> itemsRequired = tradeInputs.entrySet(); Iterator<Entry<ItemStackMapKey, Integer>> i = itemsRequired.iterator(); HashMap<ItemStackMapKey, Integer> newInput = new HashMap<ItemStackMapKey, Integer>(); while (i.hasNext()) { Entry<ItemStackMapKey, Integer> entry = i.next(); ItemStackMapKey item = entry.getKey(); Integer amount = entry.getValue(); Integer available = tradeInput.get(item); if (available == null) { return false; } if (available < amount) { return false; } if (available - amount == 0) { continue; } newInput.put(item, available - amount); } if (newMoney.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { BigInteger[] coinCount = newMoney .divideAndRemainder(BigInteger.valueOf(Long.MAX_VALUE)); int a = coinCount[0].intValue(); long b = coinCount[1].longValue(); ItemStack is1 = ((EnderCoin) EnderMoney.coin).getItemStack(Long.MAX_VALUE, 1); ItemStack is2 = ((EnderCoin) EnderMoney.coin).getItemStack(b, 1); ItemStackMapKey index1 = new ItemStackMapKey(is1); ItemStackMapKey index2 = new ItemStackMapKey(is2); newInput.put(index1, a); newInput.put(index2, 1); } ItemStack[] tradeOutputs = getTradeOutputs(); // TODO put commented out code below somewhere else /* * int[] something = new int[tradeOutputs.length]; * int[][] lookAt = new int[][] { { 1, 0, 0 }, { 0, 1, 0 }, { 0, 0, 1 }, * { -1, 0, 0 }, * { 0, -1, 0 }, { 0, 0, -1 } }; * for (int a = 0; a < lookAt.length; a++) { * TileEntity tileEntity = this.worldObj.getBlockTileEntity(this.xCoord * + lookAt[a][0], * this.yCoord + lookAt[a][1], this.zCoord + lookAt[a][2]); * if (tileEntity == null) continue; * if (tileEntity instanceof IInventory) { * IInventory iinv = (IInventory) tileEntity; * for (int b = 0; b < iinv.getSizeInventory(); b++) { * ItemStack is = iinv.getStackInSlot(b); * if (is == null) continue; * for (int c = 0; c < tradeOutputs.length; c++) { * if (tradeOutputs[c] == null) continue; * if (tradeOutputs[c].isItemEqual(is) && * ItemStack.areItemStackTagsEqual(tradeOutputs[c], is)) { * something[c] += is.stackSize; * } * } * } * } * } */ ItemStack[] oldOutInv = new ItemStack[outputMaxSlot - outputMinSlot + 1]; for (int a = outputMinSlot; a <= outputMaxSlot; a++) { oldOutInv[a - outputMinSlot] = fakeInv.getStackInSlot(a) != null ? fakeInv .getStackInSlot(a).copy() : null; } for (int a = outputMinSlot; a <= outputMaxSlot; a++) { ItemStack is = fakeInv.getStackInSlot(a); for (int b = 0; b < tradeOutputs.length; b++) { if (is != null && tradeOutputs[b] != null && is.isItemEqual(tradeOutputs[b]) && ItemStack.areItemStackTagsEqual(is, tradeOutputs[b])) { if (is.isStackable()) { if (is.stackSize < is.getMaxStackSize()) { if (is.stackSize + tradeOutputs[b].stackSize > is.getMaxStackSize()) { int newStackSize = tradeOutputs[b].stackSize + is.stackSize; if (newStackSize > is.getMaxStackSize()) { newStackSize = newStackSize - is.getMaxStackSize(); } tradeOutputs[b].stackSize = newStackSize; is.stackSize = is.getMaxStackSize(); } else { is.stackSize = is.stackSize + tradeOutputs[b].stackSize; tradeOutputs[b] = null; } } } } else if (is == null && tradeOutputs[b] != null) { fakeInv.setInventorySlotContents(a, tradeOutputs[b]); is = fakeInv.getStackInSlot(a); tradeOutputs[b] = null; } if (tradeOutputs[b] != null && tradeOutputs[b].stackSize <= 0) { tradeOutputs[b] = null; } } } for (int a = 0; a < tradeOutputs.length; a++) { if (tradeOutputs[a] != null) { for (int b = 0; b < oldOutInv.length; b++) { fakeInv.setInventorySlotContents(b + outputMinSlot, oldOutInv[b]); } throw new TradeError(0, "Couldn't complete trade: Out of inventory space"); } } for (int _i = inputMinSlot; _i < inputMaxSlot; _i++) { fakeInv.setInventorySlotContents(_i, null); } Set<Entry<ItemStackMapKey, Integer>> input = newInput.entrySet(); Iterator<Entry<ItemStackMapKey, Integer>> it = input.iterator(); int slot = inputMinSlot; while (it.hasNext()) { if (slot >= inputMaxSlot) { throw new TradeError(0, "Couldn't complete trade: Out of inventory space"); } if (fakeInv.getStackInSlot(slot) != null) { slot++; continue; } Entry<ItemStackMapKey, Integer> entry = it.next(); ItemStackMapKey itemData = entry.getKey(); ItemStack item = new ItemStack(itemData.itemID, 1, itemData.damage); item.stackTagCompound = (NBTTagCompound) itemData.getTag(); Integer amount = entry.getValue(); if (amount == 0) { // shouldn't happen but who knows... continue; } int stacks = amount / item.getMaxStackSize(); int extra = amount % item.getMaxStackSize(); ItemStack newItem = item.copy(); newItem.stackSize = item.getMaxStackSize(); for (int n = slot; n < slot + stacks; n++) { fakeInv.setInventorySlotContents(n, newItem); } slot += stacks; newItem = item.copy(); newItem.stackSize = extra; fakeInv.setInventorySlotContents(slot, newItem); slot++; } return true; }
diff --git a/Essentials/src/com/earth2me/essentials/commands/Commandspawner.java b/Essentials/src/com/earth2me/essentials/commands/Commandspawner.java index 5e95bad6..ae92670c 100644 --- a/Essentials/src/com/earth2me/essentials/commands/Commandspawner.java +++ b/Essentials/src/com/earth2me/essentials/commands/Commandspawner.java @@ -1,44 +1,47 @@ package com.earth2me.essentials.commands; import com.earth2me.essentials.User; import com.earth2me.essentials.Util; import org.bukkit.Material; import org.bukkit.Server; import org.bukkit.block.Block; import org.bukkit.craftbukkit.block.CraftCreatureSpawner; import org.bukkit.entity.CreatureType; public class Commandspawner extends EssentialsCommand { public Commandspawner() { super("spawner"); } @Override protected void run(Server server, User user, String commandLabel, String[] args) throws Exception { if (args.length < 1 || args[0].length() < 2) { throw new NotEnoughArgumentsException(); } final Block target = user.getTarget().getTargetBlock(); if (target.getType() != Material.MOB_SPAWNER) { throw new Exception(Util.i18n("mobSpawnTarget")); } charge(user); try { - final String name = args[0].substring(0, 1).toUpperCase() + args[0].substring(1).toLowerCase(); + String name = args[0].substring(0, 1).toUpperCase() + args[0].substring(1).toLowerCase(); + if (name.equalsIgnoreCase("Pigzombie")) { + name = "PigZombie"; + } new CraftCreatureSpawner(target).setCreatureType(CreatureType.fromName(name)); } catch (Throwable ex) { throw new Exception(Util.i18n("mobSpawnError"), ex); } } }
true
true
protected void run(Server server, User user, String commandLabel, String[] args) throws Exception { if (args.length < 1 || args[0].length() < 2) { throw new NotEnoughArgumentsException(); } final Block target = user.getTarget().getTargetBlock(); if (target.getType() != Material.MOB_SPAWNER) { throw new Exception(Util.i18n("mobSpawnTarget")); } charge(user); try { final String name = args[0].substring(0, 1).toUpperCase() + args[0].substring(1).toLowerCase(); new CraftCreatureSpawner(target).setCreatureType(CreatureType.fromName(name)); } catch (Throwable ex) { throw new Exception(Util.i18n("mobSpawnError"), ex); } }
protected void run(Server server, User user, String commandLabel, String[] args) throws Exception { if (args.length < 1 || args[0].length() < 2) { throw new NotEnoughArgumentsException(); } final Block target = user.getTarget().getTargetBlock(); if (target.getType() != Material.MOB_SPAWNER) { throw new Exception(Util.i18n("mobSpawnTarget")); } charge(user); try { String name = args[0].substring(0, 1).toUpperCase() + args[0].substring(1).toLowerCase(); if (name.equalsIgnoreCase("Pigzombie")) { name = "PigZombie"; } new CraftCreatureSpawner(target).setCreatureType(CreatureType.fromName(name)); } catch (Throwable ex) { throw new Exception(Util.i18n("mobSpawnError"), ex); } }
diff --git a/src/net/rptools/maptool/client/ui/zone/ZoneRenderer.java b/src/net/rptools/maptool/client/ui/zone/ZoneRenderer.java index bca20258..7651a8e4 100644 --- a/src/net/rptools/maptool/client/ui/zone/ZoneRenderer.java +++ b/src/net/rptools/maptool/client/ui/zone/ZoneRenderer.java @@ -1,3571 +1,3573 @@ /* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package net.rptools.maptool.client.ui.zone; import java.awt.AlphaComposite; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Composite; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Font; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Image; import java.awt.Paint; import java.awt.Point; import java.awt.Rectangle; import java.awt.RenderingHints; import java.awt.Shape; import java.awt.Stroke; import java.awt.Toolkit; import java.awt.Transparency; import java.awt.dnd.DropTargetDragEvent; import java.awt.dnd.DropTargetDropEvent; import java.awt.dnd.DropTargetEvent; import java.awt.dnd.DropTargetListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; import java.awt.font.FontRenderContext; import java.awt.font.TextLayout; import java.awt.geom.AffineTransform; import java.awt.geom.Area; import java.awt.geom.GeneralPath; import java.awt.geom.QuadCurve2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import java.awt.image.ImageObserver; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TooManyListenersException; import javax.imageio.ImageIO; import javax.swing.JComponent; import javax.swing.SwingUtilities; import net.rptools.lib.CodeTimer; import net.rptools.lib.MD5Key; import net.rptools.lib.image.ImageUtil; import net.rptools.lib.swing.ImageBorder; import net.rptools.lib.swing.ImageLabel; import net.rptools.lib.swing.SwingUtil; import net.rptools.maptool.client.AppActions; import net.rptools.maptool.client.AppConstants; import net.rptools.maptool.client.AppPreferences; import net.rptools.maptool.client.AppState; import net.rptools.maptool.client.AppStyle; import net.rptools.maptool.client.AppUtil; import net.rptools.maptool.client.MapTool; import net.rptools.maptool.client.MapToolUtil; import net.rptools.maptool.client.ScreenPoint; import net.rptools.maptool.client.TransferableHelper; import net.rptools.maptool.client.functions.TokenMoveFunctions; import net.rptools.maptool.client.tool.PointerTool; import net.rptools.maptool.client.tool.StampTool; import net.rptools.maptool.client.tool.drawing.FreehandExposeTool; import net.rptools.maptool.client.tool.drawing.OvalExposeTool; import net.rptools.maptool.client.tool.drawing.PolygonExposeTool; import net.rptools.maptool.client.tool.drawing.RectangleExposeTool; import net.rptools.maptool.client.ui.Scale; import net.rptools.maptool.client.ui.Tool; import net.rptools.maptool.client.ui.htmlframe.HTMLFrameFactory; import net.rptools.maptool.client.ui.token.AbstractTokenOverlay; import net.rptools.maptool.client.ui.token.BarTokenOverlay; import net.rptools.maptool.client.ui.token.NewTokenDialog; import net.rptools.maptool.client.walker.ZoneWalker; import net.rptools.maptool.model.AbstractPoint; import net.rptools.maptool.model.Asset; import net.rptools.maptool.model.AssetManager; import net.rptools.maptool.model.CellPoint; import net.rptools.maptool.model.ExposedAreaMetaData; import net.rptools.maptool.model.GUID; import net.rptools.maptool.model.Grid; import net.rptools.maptool.model.GridCapabilities; import net.rptools.maptool.model.Label; import net.rptools.maptool.model.LightSource; import net.rptools.maptool.model.ModelChangeEvent; import net.rptools.maptool.model.ModelChangeListener; import net.rptools.maptool.model.Path; import net.rptools.maptool.model.Player; import net.rptools.maptool.model.TextMessage; import net.rptools.maptool.model.Token; import net.rptools.maptool.model.TokenFootprint; import net.rptools.maptool.model.Zone; import net.rptools.maptool.model.ZonePoint; import net.rptools.maptool.model.drawing.Drawable; import net.rptools.maptool.model.drawing.DrawableTexturePaint; import net.rptools.maptool.model.drawing.DrawnElement; import net.rptools.maptool.model.drawing.Pen; import net.rptools.maptool.util.GraphicsUtil; import net.rptools.maptool.util.ImageManager; import net.rptools.maptool.util.StringUtil; import net.rptools.maptool.util.TokenUtil; import org.apache.log4j.Logger; /** */ public class ZoneRenderer extends JComponent implements DropTargetListener, Comparable<Object> { private static final long serialVersionUID = 3832897780066104884L; private static final Logger log = Logger.getLogger(ZoneRenderer.class); public static final int MIN_GRID_SIZE = 10; private static LightSourceIconOverlay lightSourceIconOverlay = new LightSourceIconOverlay(); protected Zone zone; private final ZoneView zoneView; private Scale zoneScale; private final DrawableRenderer backgroundDrawableRenderer = new PartitionedDrawableRenderer(); private final DrawableRenderer objectDrawableRenderer = new PartitionedDrawableRenderer(); private final DrawableRenderer tokenDrawableRenderer = new PartitionedDrawableRenderer(); private final DrawableRenderer gmDrawableRenderer = new PartitionedDrawableRenderer(); private final List<ZoneOverlay> overlayList = new ArrayList<ZoneOverlay>(); private final Map<Zone.Layer, List<TokenLocation>> tokenLocationMap = new HashMap<Zone.Layer, List<TokenLocation>>(); private Set<GUID> selectedTokenSet = new LinkedHashSet<GUID>(); private final List<Set<GUID>> selectedTokenSetHistory = new ArrayList<Set<GUID>>(); private final List<LabelLocation> labelLocationList = new LinkedList<LabelLocation>(); private Map<Token, Set<Token>> tokenStackMap; private final Map<GUID, SelectionSet> selectionSetMap = new HashMap<GUID, SelectionSet>(); private final Map<Token, TokenLocation> tokenLocationCache = new HashMap<Token, TokenLocation>(); private final List<TokenLocation> markerLocationList = new ArrayList<TokenLocation>(); private GeneralPath facingArrow; private final List<Token> showPathList = new ArrayList<Token>(); // Optimizations private final Map<GUID, BufferedImage> labelRenderingCache = new HashMap<GUID, BufferedImage>(); private final Map<Token, BufferedImage> replacementImageMap = new HashMap<Token, BufferedImage>(); private final Map<Token, BufferedImage> flipImageMap = new HashMap<Token, BufferedImage>(); private Token tokenUnderMouse; private ScreenPoint pointUnderMouse; private Zone.Layer activeLayer; private String loadingProgress; private boolean isLoaded; private BufferedImage fogBuffer; // I don't like this, at all, but it'll work for now, basically keep track // of when the fog cache // needs to be flushed in the case of switching views private boolean flushFog = true; private Area exposedFogArea; // In screen space private BufferedImage miniImage; private BufferedImage backbuffer; private boolean drawBackground = true; private int lastX; private int lastY; private BufferedImage cellShape; private double lastScale; private Area visibleScreenArea; private final List<ItemRenderer> itemRenderList = new LinkedList<ItemRenderer>(); private PlayerView lastView; private Set<GUID> visibleTokenSet; private CodeTimer timer; public static enum TokenMoveCompletion { TRUE, FALSE, OTHER } public ZoneRenderer(Zone zone) { if (zone == null) { throw new IllegalArgumentException("Zone cannot be null"); } this.zone = zone; zone.addModelChangeListener(new ZoneModelChangeListener()); setFocusable(true); setZoneScale(new Scale()); zoneView = new ZoneView(zone); // DnD setTransferHandler(new TransferableHelper()); try { getDropTarget().addDropTargetListener(this); } catch (TooManyListenersException e1) { // Should never happen because the transfer handler fixes this // problem. } // Focus addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { requestFocusInWindow(); } @Override public void mouseExited(MouseEvent e) { pointUnderMouse = null; } @Override public void mouseEntered(MouseEvent e) { } }); addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseMoved(MouseEvent e) { pointUnderMouse = new ScreenPoint(e.getX(), e.getY()); } }); // fps.start(); } public void showPath(Token token, boolean show) { if (show) { showPathList.add(token); } else { showPathList.remove(token); } } public void centerOn(Token token) { if (token == null) { return; } centerOn(new ZonePoint(token.getX(), token.getY())); MapTool.getFrame().getToolbox().setSelectedTool(token.isToken() ? PointerTool.class : StampTool.class); setActiveLayer(token.getLayer()); selectToken(token.getId()); requestFocusInWindow(); } public ZonePoint getCenterPoint() { return new ScreenPoint(getSize().width / 2, getSize().height / 2).convertToZone(this); } public boolean isPathShowing(Token token) { return showPathList.contains(token); } public void clearShowPaths() { showPathList.clear(); repaint(); } public Scale getZoneScale() { return zoneScale; } public void setZoneScale(Scale scale) { zoneScale = scale; invalidateCurrentViewCache(); scale.addPropertyChangeListener(new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { if (Scale.PROPERTY_SCALE.equals(evt.getPropertyName())) { tokenLocationCache.clear(); flushFog = true; } if (Scale.PROPERTY_OFFSET.equals(evt.getPropertyName())) { // flushFog = true; } visibleScreenArea = null; repaint(); } }); } /** * I _hate_ this method. But couldn't think of a better way to tell the drawable renderer that a new image had * arrived TODO: FIX THIS ! Perhaps add a new app listener for when new images show up, add the drawable renderer as * a listener */ public void flushDrawableRenderer() { backgroundDrawableRenderer.flush(); objectDrawableRenderer.flush(); tokenDrawableRenderer.flush(); gmDrawableRenderer.flush(); } public ScreenPoint getPointUnderMouse() { return pointUnderMouse; } public void setMouseOver(Token token) { if (tokenUnderMouse == token) { return; } tokenUnderMouse = token; repaint(); } @Override public boolean isOpaque() { return false; } public void addMoveSelectionSet(String playerId, GUID keyToken, Set<GUID> tokenList, boolean clearLocalSelected) { // I'm not supposed to be moving a token when someone else is already moving it if (clearLocalSelected) { for (GUID guid : tokenList) { selectedTokenSet.remove(guid); } } selectionSetMap.put(keyToken, new SelectionSet(playerId, keyToken, tokenList)); repaint(); } public boolean hasMoveSelectionSetMoved(GUID keyToken, ZonePoint point) { SelectionSet set = selectionSetMap.get(keyToken); if (set == null) { return false; } Token token = zone.getToken(keyToken); int x = point.x - token.getX(); int y = point.y - token.getY(); return set.offsetX != x || set.offsetY != y; } public void updateMoveSelectionSet(GUID keyToken, ZonePoint offset) { SelectionSet set = selectionSetMap.get(keyToken); if (set == null) { return; } Token token = zone.getToken(keyToken); set.setOffset(offset.x - token.getX(), offset.y - token.getY()); repaint(); } public void toggleMoveSelectionSetWaypoint(GUID keyToken, ZonePoint location) { SelectionSet set = selectionSetMap.get(keyToken); if (set == null) { return; } set.toggleWaypoint(location); repaint(); } public ZonePoint getLastWaypoint(GUID keyToken) { SelectionSet set = selectionSetMap.get(keyToken); if (set == null) { return null; } return set.getLastWaypoint(); } public void removeMoveSelectionSet(GUID keyToken) { SelectionSet set = selectionSetMap.remove(keyToken); if (set == null) { return; } repaint(); } @SuppressWarnings("unchecked") // this is for Path<?> public void commitMoveSelectionSet(GUID keyTokenId) { // TODO: Quick hack to handle updating server state SelectionSet set = selectionSetMap.get(keyTokenId); if (set == null) { return; } removeMoveSelectionSet(keyTokenId); MapTool.serverCommand().stopTokenMove(getZone().getId(), keyTokenId); Token keyToken = zone.getToken(keyTokenId); CellPoint originPoint = zone.getGrid().convert(new ZonePoint(keyToken.getX(), keyToken.getY())); Path<? extends AbstractPoint> path = set.getWalker() != null ? set.getWalker().getPath() : set.gridlessPath; Set<GUID> selectionSet = set.getTokens(); List<GUID> filteredTokens = new ArrayList<GUID>(); BigDecimal tmc = null; for (GUID tokenGUID : selectionSet) { Token token = zone.getToken(tokenGUID); // If the token has been deleted, the GUID will still be in the set but getToken() will return null. if (token == null) continue; CellPoint tokenCell = zone.getGrid().convert(new ZonePoint(token.getX(), token.getY())); int cellOffX = originPoint.x - tokenCell.x; int cellOffY = originPoint.y - tokenCell.y; token.applyMove(set.getOffsetX(), set.getOffsetY(), path != null ? path.derive(cellOffX, cellOffY) : null); flush(token); MapTool.serverCommand().putToken(zone.getId(), token); zone.putToken(token); // No longer need this version replacementImageMap.remove(token); // Only add certain tokens to the list to process in the move // Macro function(s). if (token.isToken() && token.isVisible()) { filteredTokens.add(tokenGUID); } } if (filteredTokens != null) { // run onTokenMove for each token in the // filtered selection list, canceling if for (GUID tokenGUID : filteredTokens) { Token token = zone.getToken(tokenGUID); tmc = TokenMoveFunctions.tokenMoved(token, path, filteredTokens); if (tmc != null && tmc == BigDecimal.ONE) { denyMovement(token); } } } // Multiple tokens, the list of tokens and call // onMultipleTokensMove macro function. if (filteredTokens != null && filteredTokens.size() > 1) { tmc = TokenMoveFunctions.multipleTokensMoved(filteredTokens); // now determine if the macro returned false and if so // revert each token's move to the last path. if (tmc != null && tmc == BigDecimal.ONE) { for (GUID tokenGUID : filteredTokens) { Token token = zone.getToken(tokenGUID); denyMovement(token); } } } MapTool.getFrame().updateTokenTree(); } /** * @param token */ @SuppressWarnings("unchecked") private void denyMovement(final Token token) { Path<?> path = token.getLastPath(); if (path != null) { ZonePoint zp = null; if (path.getCellPath().get(0) instanceof CellPoint) { zp = zone.getGrid().convert((CellPoint) path.getCellPath().get(0)); } else { zp = (ZonePoint) path.getCellPath().get(0); } // Relocate token.setX(zp.x); token.setY(zp.y); // Do it again to cancel out the last move position token.setX(zp.x); token.setY(zp.y); // No more last path token.setLastPath(null); MapTool.serverCommand().putToken(zone.getId(), token); // Cache clearing flush(token); } } public boolean isTokenMoving(Token token) { for (SelectionSet set : selectionSetMap.values()) { if (set.contains(token)) { return true; } } return false; } protected void setViewOffset(int x, int y) { zoneScale.setOffset(x, y); } public void centerOn(ZonePoint point) { int x = point.x; int y = point.y; x = getSize().width / 2 - (int) (x * getScale()) - 1; y = getSize().height / 2 - (int) (y * getScale()) - 1; setViewOffset(x, y); repaint(); } public void centerOn(CellPoint point) { centerOn(zone.getGrid().convert(point)); } public void flush(Token token) { tokenLocationCache.remove(token); flipImageMap.remove(token); replacementImageMap.remove(token); labelRenderingCache.remove(token.getId()); // This should be smarter, but whatever visibleScreenArea = null; // This could also be smarter tokenStackMap = null; flushFog = true; renderedLightMap = null; renderedAuraMap = null; zoneView.flush(token); } public ZoneView getZoneView() { return zoneView; } /** * Clear internal caches and backbuffers */ public void flush() { if (zone.getBackgroundPaint() instanceof DrawableTexturePaint) { ImageManager.flushImage(((DrawableTexturePaint) zone.getBackgroundPaint()).getAssetId()); } ImageManager.flushImage(zone.getMapAssetId()); //MCL: I think these should be added, but I'm not sure so I'm not doing it. // tokenLocationMap.clear(); // tokenLocationCache.clear(); flushDrawableRenderer(); replacementImageMap.clear(); flipImageMap.clear(); fogBuffer = null; renderedLightMap = null; renderedAuraMap = null; isLoaded = false; } public void flushLight() { renderedLightMap = null; renderedAuraMap = null; zoneView.flush(); repaint(); } public void flushFog() { flushFog = true; visibleScreenArea = null; repaint(); } public Zone getZone() { return zone; } public void addOverlay(ZoneOverlay overlay) { overlayList.add(overlay); } public void removeOverlay(ZoneOverlay overlay) { overlayList.remove(overlay); } public void moveViewBy(int dx, int dy) { setViewOffset(getViewOffsetX() + dx, getViewOffsetY() + dy); } public void zoomReset(int x, int y) { zoneScale.zoomReset(x, y); MapTool.getFrame().getZoomStatusBar().update(); } public void zoomIn(int x, int y) { zoneScale.zoomIn(x, y); MapTool.getFrame().getZoomStatusBar().update(); } public void zoomOut(int x, int y) { zoneScale.zoomOut(x, y); MapTool.getFrame().getZoomStatusBar().update(); } public void setView(int x, int y, double scale) { setViewOffset(x, y); zoneScale.setScale(scale); MapTool.getFrame().getZoomStatusBar().update(); } public void enforceView(int x, int y, double scale, int gmWidth, int gmHeight) { int width = getWidth(); int height = getHeight(); // if (((double) width / height) < ((double) gmWidth / gmHeight)) if ((width * gmHeight) < (height * gmWidth)) { // Our aspect ratio is narrower than server's, so fit to width scale = scale * width / gmWidth; } else { // Our aspect ratio is shorter than server's, so fit to height scale = scale * height / gmHeight; } setScale(scale); centerOn(new ZonePoint(x, y)); } public void forcePlayersView() { ZonePoint zp = new ScreenPoint(getWidth() / 2, getHeight() / 2).convertToZone(this); MapTool.serverCommand().enforceZoneView(getZone().getId(), zp.x, zp.y, getScale(), getWidth(), getHeight()); } public void maybeForcePlayersView() { if (AppState.isPlayerViewLinked() && MapTool.getPlayer().isGM()) { forcePlayersView(); } } public BufferedImage getMiniImage(int size) { // if (miniImage == null && getTileImage() != // ImageManager.UNKNOWN_IMAGE) { // miniImage = new BufferedImage(size, size, Transparency.OPAQUE); // Graphics2D g = miniImage.createGraphics(); // g.setPaint(new TexturePaint(getTileImage(), new Rectangle(0, 0, // miniImage.getWidth(), miniImage.getHeight()))); // g.fillRect(0, 0, size, size); // g.dispose(); // } return miniImage; } @Override public void paintComponent(Graphics g) { Graphics2D g2d = (Graphics2D) g; renderZone(g2d, getPlayerView()); int noteVPos = 20; if (!zone.isVisible()) { GraphicsUtil.drawBoxedString(g2d, "Map not visible to players", getSize().width / 2, noteVPos); noteVPos += 20; } if (AppState.isShowAsPlayer()) { GraphicsUtil.drawBoxedString(g2d, "Player View", getSize().width / 2, noteVPos); } } public PlayerView getPlayerView() { Player.Role role = MapTool.getPlayer().getRole(); if (role == Player.Role.GM && AppState.isShowAsPlayer()) { role = Player.Role.PLAYER; } return getPlayerView(role); } public PlayerView getPlayerView(Player.Role role) { List<Token> selectedTokens = null; if (getSelectedTokenSet() != null && !getSelectedTokenSet().isEmpty()) { selectedTokens = getSelectedTokensList(); for (ListIterator<Token> iter = selectedTokens.listIterator(); iter.hasNext();) { Token token = iter.next(); if (!token.getHasSight() || !AppUtil.playerOwns(token)) { iter.remove(); } } } return new PlayerView(role, selectedTokens); } public Rectangle fogExtents() { return zone.getExposedArea().getBounds(); } /** * Get a bounding box, in Zone coordinates, of all the elements in the zone. This method was created by copying * renderZone() and then replacing each bit of rendering with a routine to simply aggregate the extents of the * object that would have been rendered. * * @return a new Rectangle with the bounding box of all the elements in the Zone */ public Rectangle zoneExtents(PlayerView view) { // Can't initialize extents to any set x/y values, because // we don't know if the actual map contains that x/y. // So we need a flag to say extents is 'unset', and the best I // could come up with is checking for 'null' on each loop iteration. Rectangle extents = null; // We don't iterate over the layers in the same order as rendering // because its cleaner to group them by type and the order doesn't matter. // First background image extents // TODO: when the background image can be resized, fix this! if (zone.getMapAssetId() != null) { extents = new Rectangle(zone.getBoardX(), zone.getBoardY(), ImageManager.getImage(zone.getMapAssetId(), this).getWidth(), ImageManager.getImage(zone.getMapAssetId(), this).getHeight()); } // next, extents of drawing objects List<DrawnElement> drawableList = new LinkedList<DrawnElement>(); drawableList.addAll(zone.getBackgroundDrawnElements()); drawableList.addAll(zone.getObjectDrawnElements()); drawableList.addAll(zone.getDrawnElements()); if (view.isGMView()) { drawableList.addAll(zone.getGMDrawnElements()); } for (DrawnElement element : drawableList) { Drawable drawable = element.getDrawable(); Rectangle drawnBounds = new Rectangle(drawable.getBounds()); // Handle pen size // This slightly over-estimates the size of the pen, but we want to // make sure to include the anti-aliased edges. Pen pen = element.getPen(); int penSize = (int) Math.ceil((pen.getThickness() / 2) + 1); drawnBounds.setBounds(drawnBounds.x - penSize, drawnBounds.y - penSize, drawnBounds.width + (penSize * 2), drawnBounds.height + (penSize * 2)); if (extents == null) extents = drawnBounds; else extents.add(drawnBounds); } // now, add the stamps/tokens // tokens and stamps are the same thing, just treated differently // This loop structure is a hack: but the getStamps-type methods return unmodifiable lists, // so we can't concat them, and there are a fixed number of layers, so its not really extensible anyway. for (int layer = 0; layer < 4; layer++) { List<Token> stampList = null; switch (layer) { case 0: stampList = zone.getBackgroundStamps(); break; // background layer case 1: stampList = zone.getStampTokens(); break; // object layer case 2: if (!view.isGMView()) { // hidden layer continue; } else { stampList = zone.getGMStamps(); break; } case 3: stampList = zone.getTokens(); break; // token layer } for (Token element : stampList) { Rectangle drawnBounds = element.getBounds(zone); if (element.hasFacing()) { // Get the facing and do a quick fix to make the math easier: -90 is 'unrotated' for some reason Integer facing = element.getFacing() + 90; if (facing > 180) { facing -= 360; } // if 90 degrees, just swap w and h // also swap them if rotated more than 90 (optimization for non-90deg rotations) if (facing != 0 && facing != 180) { if (Math.abs(facing) >= 90) { drawnBounds.setSize(drawnBounds.height, drawnBounds.width); // swapping h and w } // if rotated to non-axis direction, assume the worst case 45 deg // also assumes the rectangle rotates around its center // This will usually makes the bounds bigger than necessary, but its quick. // Also, for quickness, we assume its a square token using the larger dimension // At 45 deg, the bounds of the square will be sqrt(2) bigger, and the UL corner will // shift by 1/2 of the length. // The size increase is: (sqrt*(2) - 1) * size ~= 0.42 * size. if (facing != 0 && facing != 180 && facing != 90 && facing != -90) { Integer size = Math.max(drawnBounds.width, drawnBounds.height); Integer x = drawnBounds.x - (int) (0.21 * size); Integer y = drawnBounds.y - (int) (0.21 * size); Integer w = drawnBounds.width + (int) (0.42 * size); Integer h = drawnBounds.height + (int) (0.42 * size); drawnBounds.setBounds(x, y, w, h); } } } // TODO: Handle auras here? if (extents == null) extents = drawnBounds; else extents.add(drawnBounds); } } if (zone.hasFog()) { if (extents == null) extents = fogExtents(); else extents.add(fogExtents()); } // TODO: What are token templates? //renderTokenTemplates(g2d, view); // TODO: Do lights make the area of interest larger? // see: renderLights(g2d, view); // TODO: Do auras make the area of interest larger? // see: renderAuras(g2d, view); return extents; } /** * This method clears {@link #renderedAuraMap}, {@link #renderedLightMap}, {@link #visibleScreenArea}, and * {@link #lastView}. It also flushes the {@link #zoneView} and sets the {@link #flushFog} flag so that fog will be * recalculated. */ public void invalidateCurrentViewCache() { flushFog = true; renderedLightMap = null; renderedAuraMap = null; visibleScreenArea = null; lastView = null; if (zoneView != null) { zoneView.flush(); } } /** * This is the top-level method of the rendering pipeline that coordinates all other calls. * {@link #paintComponent(Graphics)} calls this method, then adds the two optional strings, * "Map not visible to players" and "Player View" as appropriate. * * @param g2d * Graphics2D object normally passed in by {@link #paintComponent(Graphics)} * @param view * PlayerView object that describes whether the view is a Player or GM view */ public void renderZone(Graphics2D g2d, PlayerView view) { timer = new CodeTimer("ZoneRenderer.renderZone"); timer.setEnabled(AppState.isCollectProfilingData() || log.isDebugEnabled()); timer.setThreshold(10); timer.start("setup"); g2d.setFont(AppStyle.labelFont); Object oldAA = SwingUtil.useAntiAliasing(g2d); // much of the raster code assumes the user clip is set boolean resetClip = false; if (g2d.getClipBounds() == null) { g2d.setClip(0, 0, getBounds().width, getBounds().height); resetClip = true; } // Are we still waiting to show the zone ? if (isLoading()) { Dimension size = getSize(); g2d.setColor(Color.black); g2d.fillRect(0, 0, size.width, size.height); GraphicsUtil.drawBoxedString(g2d, loadingProgress, size.width / 2, size.height / 2); return; } if (MapTool.getCampaign().isBeingSerialized()) { Dimension size = getSize(); g2d.setColor(Color.black); g2d.fillRect(0, 0, size.width, size.height); GraphicsUtil.drawBoxedString(g2d, " Please Wait ", size.width / 2, size.height / 2); return; } if (zone == null) { return; } if (lastView != null && !lastView.equals(view)) { invalidateCurrentViewCache(); } lastView = view; // Clear internal state tokenLocationMap.clear(); markerLocationList.clear(); itemRenderList.clear(); timer.stop("setup"); // Calculations timer.start("calcs"); if (zoneView.isUsingVision() && zoneView.getVisibleArea(view) != null && visibleScreenArea == null) { AffineTransform af = new AffineTransform(); af.translate(zoneScale.getOffsetX(), zoneScale.getOffsetY()); af.scale(getScale(), getScale()); visibleScreenArea = zoneView.getVisibleArea(view).createTransformedArea(af); } exposedFogArea = new Area(zone.getExposedArea()); if (exposedFogArea != null) { AffineTransform af = new AffineTransform(); af.translate(getViewOffsetX(), getViewOffsetY()); af.scale(getScale(), getScale()); exposedFogArea.transform(af); } else { // fully exposed (screen area) exposedFogArea = new Area(new Rectangle(0, 0, getSize().width, getSize().height)); } timer.stop("calcs"); // Rendering pipeline if (zone.drawBoard()) { timer.start("board"); renderBoard(g2d, view); timer.stop("board"); } if (Zone.Layer.BACKGROUND.isEnabled()) { timer.start("drawableBackground"); renderDrawableOverlay(g2d, backgroundDrawableRenderer, view, zone.getBackgroundDrawnElements()); timer.stop("drawableBackground"); timer.start("tokensBackground"); renderTokens(g2d, zone.getBackgroundStamps(), view); timer.stop("tokensBackground"); } if (Zone.Layer.OBJECT.isEnabled()) { // Drawables on the object layer are always below the grid, and... timer.start("drawableObjects"); renderDrawableOverlay(g2d, objectDrawableRenderer, view, zone.getObjectDrawnElements()); timer.stop("drawableObjects"); } timer.start("grid"); renderGrid(g2d, view); timer.stop("grid"); if (Zone.Layer.OBJECT.isEnabled()) { // ... Images on the object layer are always ABOVE the grid. timer.start("tokensStamp"); renderTokens(g2d, zone.getStampTokens(), view); timer.stop("tokensStamp"); } if (Zone.Layer.TOKEN.isEnabled()) { timer.start("lights"); renderLights(g2d, view); timer.stop("lights"); timer.start("auras"); renderAuras(g2d, view); timer.stop("auras"); } /** * The following sections used to handle rendering of the Hidden (i.e. "GM") layer followed by the Token layer. * The problem was that we want all drawables to appear below all tokens, and the old configuration performed * the rendering in the following order: * <ol> * <li>Render Hidden-layer tokens * <li>Render Hidden-layer drawables * <li>Render Token-layer drawables * <li>Render Token-layer tokens * </ol> * That's fine for players, but clearly wrong if the view is for the GM. We now use: * <ol> * <li>Render Token-layer drawables // Player-drawn images shouldn't obscure GM's images? * <li>Render Hidden-layer drawables // GM could always use "View As Player" if needed? * <li>Render Hidden-layer tokens * <li>Render Token-layer tokens * </ol> */ if (Zone.Layer.TOKEN.isEnabled()) { timer.start("drawableTokens"); renderDrawableOverlay(g2d, tokenDrawableRenderer, view, zone.getDrawnElements()); timer.stop("drawableTokens"); } if (view.isGMView()) { if (Zone.Layer.GM.isEnabled()) { timer.start("drawableGM"); renderDrawableOverlay(g2d, gmDrawableRenderer, view, zone.getGMDrawnElements()); timer.stop("drawableGM"); timer.start("tokensGM"); renderTokens(g2d, zone.getGMStamps(), view); timer.stop("tokensGM"); } } if (Zone.Layer.TOKEN.isEnabled()) { timer.start("tokens"); renderTokens(g2d, zone.getTokens(), view); timer.stop("tokens"); timer.start("unowned movement"); renderMoveSelectionSets(g2d, view, getUnOwnedMovementSet(view)); timer.stop("unowned movement"); timer.start("owned movement"); renderMoveSelectionSets(g2d, view, getOwnedMovementSet(view)); timer.stop("owned movement"); // Text associated with tokens being moved is added to a list to be drawn after, i.e. on top of, the tokens themselves. // So if one moving token is on top of another moving token, at least the textual identifiers will be visible. timer.start("token name/labels"); renderRenderables(g2d); timer.stop("token name/labels"); } /** * FJE It's probably not appropriate for labels to be above everything, including tokens. Above drawables, yes. * Above tokens, no. */ timer.start("labels"); renderLabels(g2d, view); timer.stop("labels"); // this check is redundant for now, since renderFog checks also if (zone.hasFog()) { timer.start("fog"); renderFog(g2d, view); timer.stop("fog"); } // if (zone.visionType ...) if (view.isGMView()) { timer.start("visionOverlayGM"); renderGMVisionOverlay(g2d, view); timer.stop("visionOverlayGM"); } else { timer.start("visionOverlayPlayer"); renderPlayerVisionOverlay(g2d, view); timer.stop("visionOverlayPlayer"); } timer.start("overlays"); for (int i = 0; i < overlayList.size(); i++) { ZoneOverlay overlay = overlayList.get(i); overlay.paintOverlay(this, g2d); } timer.stop("overlays"); renderCoordinates(g2d, view); if (Zone.Layer.TOKEN.isEnabled()) { if (view.isGMView() && AppState.isShowLightSources()) { lightSourceIconOverlay.paintOverlay(this, g2d); } } // g2d.setColor(Color.red); // for (AreaMeta meta : getTopologyAreaData().getAreaList()) { // Area area = new Area(meta.getArea().getBounds()).createTransformedArea(AffineTransform.getScaleInstance(getScale(), getScale())); // area = area.createTransformedArea(AffineTransform.getTranslateInstance(zoneScale.getOffsetX(), zoneScale.getOffsetY())); // g2d.draw(area); // } SwingUtil.restoreAntiAliasing(g2d, oldAA); if (AppState.isCollectProfilingData() || log.isDebugEnabled()) { String results = timer.toString(); MapTool.getProfilingNoteFrame().addText(results); if (log.isDebugEnabled()) log.debug(results); } if (resetClip) { g2d.setClip(null); } } private void delayRendering(ItemRenderer renderer) { itemRenderList.add(renderer); } private void renderRenderables(Graphics2D g) { for (ItemRenderer renderer : itemRenderList) { renderer.render(g); } } public CodeTimer getCodeTimer() { return timer; } private Map<Paint, List<Area>> renderedLightMap; private void renderLights(Graphics2D g, PlayerView view) { // Setup timer.start("lights-1"); Graphics2D newG = (Graphics2D) g.create(); if (!view.isGMView() && visibleScreenArea != null) { Area clip = new Area(g.getClip()); clip.intersect(visibleScreenArea); newG.setClip(clip); } SwingUtil.useAntiAliasing(newG); timer.stop("lights-1"); timer.start("lights-2"); AffineTransform af = g.getTransform(); af.translate(getViewOffsetX(), getViewOffsetY()); af.scale(getScale(), getScale()); newG.setTransform(af); newG.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, AppPreferences.getLightOverlayOpacity() / 255.0f)); timer.stop("lights-2"); if (renderedLightMap == null) { timer.start("lights-3"); // Organize Map<Paint, List<Area>> colorMap = new HashMap<Paint, List<Area>>(); List<DrawableLight> otherLightList = new LinkedList<DrawableLight>(); for (DrawableLight light : zoneView.getDrawableLights()) { if (light.getType() == LightSource.Type.NORMAL) { if (zone.getVisionType() == Zone.VisionType.NIGHT && light.getPaint() != null) { List<Area> areaList = colorMap.get(light.getPaint().getPaint()); if (areaList == null) { areaList = new ArrayList<Area>(); colorMap.put(light.getPaint().getPaint(), areaList); } areaList.add(new Area(light.getArea())); } } else { // I'm not a huge fan of this hard wiring, but I haven't thought of a better way yet, so this'll work fine for now otherLightList.add(light); } } timer.stop("lights-3"); timer.start("lights-4"); // Combine same colors to avoid ugly overlap // Avoid combining _all_ of the lights as the area adds are very expensive, just combine those that overlap for (List<Area> areaList : colorMap.values()) { List<Area> sourceList = new LinkedList<Area>(areaList); areaList.clear(); outter: while (sourceList.size() > 0) { Area area = sourceList.remove(0); for (ListIterator<Area> iter = sourceList.listIterator(); iter.hasNext();) { Area currArea = iter.next(); if (currArea.getBounds().intersects(area.getBounds())) { iter.remove(); area.add(currArea); sourceList.add(area); continue outter; } } // If we are here, we didn't find any other area to merge // with areaList.add(area); } // Cut out the bright light if (areaList.size() > 0) { for (Area area : areaList) { for (Area brightArea : zoneView.getBrightLights()) { area.subtract(brightArea); } } } } renderedLightMap = new LinkedHashMap<Paint, List<Area>>(); for (Entry<Paint, List<Area>> entry : colorMap.entrySet()) { renderedLightMap.put(entry.getKey(), entry.getValue()); } timer.stop("lights-4"); } // Draw timer.start("lights-5"); for (Entry<Paint, List<Area>> entry : renderedLightMap.entrySet()) { newG.setPaint(entry.getKey()); for (Area area : entry.getValue()) { newG.fill(area); } } timer.stop("lights-5"); newG.dispose(); } private Map<Paint, Area> renderedAuraMap; private void renderAuras(Graphics2D g, PlayerView view) { // Setup timer.start("auras-1"); Graphics2D newG = (Graphics2D) g.create(); if (!view.isGMView() && visibleScreenArea != null) { Area clip = new Area(g.getClip()); clip.intersect(visibleScreenArea); newG.setClip(clip); } SwingUtil.useAntiAliasing(newG); timer.stop("auras-1"); timer.start("auras-2"); AffineTransform af = g.getTransform(); af.translate(getViewOffsetX(), getViewOffsetY()); af.scale(getScale(), getScale()); newG.setTransform(af); newG.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, AppPreferences.getAuraOverlayOpacity() / 255.0f)); timer.stop("auras-2"); if (renderedAuraMap == null) { // Organize Map<Paint, List<Area>> colorMap = new HashMap<Paint, List<Area>>(); timer.start("auras-4"); Color paintColor = new Color(255, 255, 255, 150); for (DrawableLight light : zoneView.getLights(LightSource.Type.AURA)) { Paint paint = light.getPaint() != null ? light.getPaint().getPaint() : paintColor; List<Area> list = colorMap.get(paint); if (list == null) { list = new LinkedList<Area>(); list.add(new Area(light.getArea())); colorMap.put(paint, list); } else { list.get(0).add(new Area(light.getArea())); } } renderedAuraMap = new LinkedHashMap<Paint, Area>(); for (Entry<Paint, List<Area>> entry : colorMap.entrySet()) { renderedAuraMap.put(entry.getKey(), entry.getValue().get(0)); } timer.stop("auras-4"); } // Draw timer.start("auras-5"); for (Entry<Paint, Area> entry : renderedAuraMap.entrySet()) { newG.setPaint(entry.getKey()); newG.fill(entry.getValue()); } timer.stop("auras-5"); newG.dispose(); } /** * This outlines the area visible to the token under the cursor, clipped to the current fog-of-war. This is * appropriate for the player view, but the GM sees everything. */ private void renderPlayerVisionOverlay(Graphics2D g, PlayerView view) { Graphics2D g2 = (Graphics2D) g.create(); if (zone.hasFog() && exposedFogArea != null) { Area clip = new Area(new Rectangle(getSize().width, getSize().height)); Area viewArea = new Area(); if (view.getTokens() != null && !view.getTokens().isEmpty()) { for (Token tok : view.getTokens()) { if (!AppUtil.playerOwns(tok)) { continue; } ExposedAreaMetaData exposedMeta = zone.getExposedAreaMetaData(tok.getExposedAreaGUID()); viewArea.add(exposedMeta.getExposedAreaHistory()); } } viewArea.add(zone.getExposedArea()); clip.intersect(viewArea); AffineTransform af = new AffineTransform(); //af.translate(zoneScale.getOffsetX(), zoneScale.getOffsetY()); af.scale(+1, +1); Area newClip = clip.createTransformedArea(af); g2.setClip(newClip); } renderVisionOverlay(g2, view); g2.dispose(); } /** * Render the vision overlay as though the view were the GM. */ private void renderGMVisionOverlay(Graphics2D g, PlayerView view) { renderVisionOverlay(g, view); } /** * This outlines the area visible to the token under the cursor. */ private void renderVisionOverlay(Graphics2D g, PlayerView view) { Area currentTokenVisionArea = zoneView.getVisibleArea(tokenUnderMouse); if (currentTokenVisionArea == null) return; Area combined = new Area(currentTokenVisionArea); ExposedAreaMetaData meta = zone.getExposedAreaMetaData(tokenUnderMouse.getExposedAreaGUID()); Area tmpArea = new Area(meta.getExposedAreaHistory()); tmpArea.add(zone.getExposedArea()); combined.intersect(tmpArea); boolean isOwner = AppUtil.playerOwns(tokenUnderMouse); boolean tokenIsPC = tokenUnderMouse.getType() == Token.Type.PC; boolean strictOwnership = MapTool.getServerPolicy() == null ? false : MapTool.getServerPolicy().useStrictTokenManagement(); // String player = MapTool.getPlayer().getName(); // System.err.print("tokenUnderMouse.ownedBy(" + player + "): " + isOwner); // System.err.print(", tokenIsPC: " + tokenIsPC); // System.err.print(", isGMView(): " + view.isGMView()); // System.err.println(", strictOwnership: " + strictOwnership); /* * The vision arc and optional halo-filled visible area shouldn't be shown to everyone. If we are in GM view, or * if we are the owner of the token in question, or if the token is a PC and strict token ownership is off... * then the vision arc should be displayed. */ if (isOwner || view.isGMView() || (tokenIsPC && !strictOwnership)) { AffineTransform af = new AffineTransform(); af.translate(zoneScale.getOffsetX(), zoneScale.getOffsetY()); af.scale(getScale(), getScale()); Area area = combined.createTransformedArea(af); g.setClip(this.getBounds()); SwingUtil.useAntiAliasing(g); //g.setStroke(new BasicStroke(2)); g.setColor(new Color(255, 255, 255)); // outline around visible area g.draw(area); boolean useHaloColor = tokenUnderMouse.getHaloColor() != null && AppPreferences.getUseHaloColorOnVisionOverlay(); if (tokenUnderMouse.getVisionOverlayColor() != null || useHaloColor) { Color visionColor = useHaloColor ? tokenUnderMouse.getHaloColor() : tokenUnderMouse.getVisionOverlayColor(); g.setColor(new Color(visionColor.getRed(), visionColor.getGreen(), visionColor.getBlue(), AppPreferences.getHaloOverlayOpacity())); g.fill(area); } } } private void renderLabels(Graphics2D g, PlayerView view) { labelLocationList.clear(); for (Label label : zone.getLabels()) { ZonePoint zp = new ZonePoint(label.getX(), label.getY()); if (!zone.isPointVisible(zp, view)) { continue; } ScreenPoint sp = ScreenPoint.fromZonePointRnd(this, zp.x, zp.y); Rectangle bounds = null; if (label.isShowBackground()) { bounds = GraphicsUtil.drawBoxedString(g, label.getLabel(), (int) sp.x, (int) sp.y, SwingUtilities.CENTER, GraphicsUtil.GREY_LABEL, label.getForegroundColor()); } else { FontMetrics fm = g.getFontMetrics(); int strWidth = SwingUtilities.computeStringWidth(fm, label.getLabel()); int x = (int) (sp.x - strWidth / 2); int y = (int) (sp.y - fm.getAscent()); g.setColor(label.getForegroundColor()); g.drawString(label.getLabel(), x, y + fm.getAscent()); bounds = new Rectangle(x, y, strWidth, fm.getHeight()); } labelLocationList.add(new LabelLocation(bounds, label)); } } Integer fogX = null; Integer fogY = null; private void renderFog(Graphics2D g, PlayerView view) { if (!zone.hasFog()) { return; } Dimension size = getSize(); // Optimization for panning Area fogClip = null; if (!flushFog && fogX != null && fogY != null && (fogX != getViewOffsetX() || fogY != getViewOffsetY())) { // This optimization does not seem to keep the alpha channel correctly, and sometimes leaves // lines on some graphics boards, we'll leave it out for now // if (Math.abs(fogX - getViewOffsetX()) < size.width && Math.abs(fogY - getViewOffsetY()) < size.height) { // int deltaX = getViewOffsetX() - fogX; // int deltaY = getViewOffsetY() - fogY; // // Graphics2D buffG = fogBuffer.createGraphics(); // // buffG.setComposite(AlphaComposite.Src); // buffG.copyArea(0, 0, size.width, size.height, deltaX, deltaY); // // buffG.dispose(); // // fogClip = new Area(); // if (deltaX < 0) { // fogClip.add(new Area(new Rectangle(size.width+deltaX, 0, -deltaX, size.height))); // } else if (deltaX > 0){ // fogClip.add(new Area(new Rectangle(0, 0, deltaX, size.height))); // } // // if (deltaY < 0) { // fogClip.add(new Area(new Rectangle(0, size.height + deltaY, size.width, -deltaY))); // } else if (deltaY > 0) { // fogClip.add(new Area(new Rectangle(0, 0, size.width, deltaY))); // } // // } flushFog = true; } if (flushFog || fogBuffer == null || fogBuffer.getWidth() != size.width || fogBuffer.getHeight() != size.height) { fogX = getViewOffsetX(); fogY = getViewOffsetY(); boolean newImage = false; if (fogBuffer == null || fogBuffer.getWidth() != size.width || fogBuffer.getHeight() != size.height) { newImage = true; fogBuffer = new BufferedImage(size.width, size.height, view.isGMView() ? Transparency.TRANSLUCENT : Transparency.BITMASK); } Graphics2D buffG = fogBuffer.createGraphics(); buffG.setClip(fogClip != null ? fogClip : new Rectangle(0, 0, size.width, size.height)); SwingUtil.useAntiAliasing(buffG); if (!newImage) { Composite oldComposite = buffG.getComposite(); buffG.setComposite(AlphaComposite.Clear); buffG.fillRect(0, 0, size.width, size.height); buffG.setComposite(oldComposite); } // Fill buffG.setPaint(zone.getFogPaint().getPaint(getViewOffsetX(), getViewOffsetY(), getScale())); buffG.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC, view.isGMView() ? .6f : 1f)); // JFJ this fixes the GM exposed area view. buffG.fillRect(0, 0, size.width, size.height); // Cut out the exposed area AffineTransform af = new AffineTransform(); af.translate(getViewOffsetX(), getViewOffsetY()); af.scale(getScale(), getScale()); buffG.setTransform(af); buffG.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC, view.isGMView() ? .6f : 1f)); buffG.setComposite(AlphaComposite.getInstance(AlphaComposite.CLEAR)); Area visibleArea = zoneView.getVisibleArea(view); Area combined = zone.getExposedArea(view); renderFogArea(buffG, view, combined, visibleArea); renderFogOutline(buffG, view, combined); buffG.dispose(); flushFog = false; } g.drawImage(fogBuffer, 0, 0, this); } private void renderFogArea(final Graphics2D buffG, final PlayerView view, Area softFog, Area visibleArea) { if (zoneView.isUsingVision()) { buffG.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC)); if (visibleArea != null) { buffG.setColor(new Color(0, 0, 0, AppPreferences.getFogOverlayOpacity())); if (zone.hasFog()) { // Fill in the exposed area buffG.fill(softFog); buffG.setComposite(AlphaComposite.getInstance(AlphaComposite.CLEAR)); Shape oldClip = buffG.getClip(); buffG.setClip(softFog); buffG.fill(visibleArea); buffG.setClip(oldClip); } else { buffG.setColor(new Color(255, 255, 255, 40)); // was 255,255,255,40 buffG.fill(visibleArea); } } else { if (zone.hasFog()) { buffG.setColor(new Color(0, 0, 0, 80)); buffG.fill(softFog); } } } } private void renderFogOutline(final Graphics2D buffG, PlayerView view, Area softFog) { float alpha = view.isGMView() ? AppPreferences.getFogOverlayOpacity() / 255.0f : 1f; if (false && AppPreferences.getUseSoftFogEdges()) { GraphicsUtil.renderSoftClipping(buffG, softFog, (int) (zone.getGrid().getSize() * getScale() * .25), alpha); } else { if (visibleScreenArea != null) { Shape oldClip = buffG.getClip(); // buffG.setClip(softFog); buffG.setTransform(new AffineTransform()); buffG.setComposite(AlphaComposite.Src); buffG.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); buffG.setStroke(new BasicStroke(1)); buffG.setColor(Color.BLACK); buffG.draw(visibleScreenArea); // buffG.setClip(oldClip); } } } public Area getVisibleArea(Token token) { return zoneView.getVisibleArea(token); } public boolean isLoading() { if (isLoaded) { // We're done, until the cache is cleared return false; } // Get a list of all the assets in the zone Set<MD5Key> assetSet = zone.getAllAssetIds(); assetSet.remove(null); // remove bad data // Make sure they are loaded int downloadCount = 0; int cacheCount = 0; boolean loaded = true; for (MD5Key id : assetSet) { // Have we gotten the actual data yet ? Asset asset = AssetManager.getAsset(id); if (asset == null) { AssetManager.getAssetAsynchronously(id); loaded = false; continue; } downloadCount++; // Have we loaded the image into memory yet ? Image image = ImageManager.getImage(asset.getId(), this); if (image == null || image == ImageManager.TRANSFERING_IMAGE) { loaded = false; continue; } cacheCount++; } loadingProgress = String.format(" Loading Map '%s' - %d/%d Loaded %d/%d Cached", zone.getName(), downloadCount, assetSet.size(), cacheCount, assetSet.size()); isLoaded = loaded; if (isLoaded) { // Notify the token tree that it should update MapTool.getFrame().updateTokenTree(); } return !isLoaded; } protected void renderDrawableOverlay(Graphics g, DrawableRenderer renderer, PlayerView view, List<DrawnElement> drawnElements) { Rectangle viewport = new Rectangle(zoneScale.getOffsetX(), zoneScale.getOffsetY(), getSize().width, getSize().height); List<DrawnElement> list = new ArrayList<DrawnElement>(); list.addAll(drawnElements); renderer.renderDrawables(g, list, viewport, getScale()); } protected void renderBoard(Graphics2D g, PlayerView view) { Dimension size = getSize(); if (backbuffer == null || backbuffer.getWidth() != size.width || backbuffer.getHeight() != size.height) { backbuffer = new BufferedImage(size.width, size.height, Transparency.OPAQUE); drawBackground = true; } Scale scale = getZoneScale(); if (scale.getOffsetX() != lastX || scale.getOffsetY() != lastY || scale.getScale() != lastScale) { drawBackground = true; } if (zone.isBoardChanged()) { drawBackground = true; zone.setBoardChanged(false); } if (drawBackground) { Graphics2D bbg = backbuffer.createGraphics(); // Background texture Paint paint = zone.getBackgroundPaint().getPaint(getViewOffsetX(), getViewOffsetY(), getScale(), this); bbg.setPaint(paint); bbg.fillRect(0, 0, size.width, size.height); // Map if (zone.getMapAssetId() != null) { BufferedImage mapImage = ImageManager.getImage(zone.getMapAssetId(), this); double scaleFactor = getScale(); bbg.drawImage(mapImage, getViewOffsetX() + (int) (zone.getBoardX() * scaleFactor), getViewOffsetY() + (int) (zone.getBoardY() * scaleFactor), (int) (mapImage.getWidth() * scaleFactor), (int) (mapImage.getHeight() * scaleFactor), null); } bbg.dispose(); drawBackground = false; } lastX = scale.getOffsetX(); lastY = scale.getOffsetY(); lastScale = scale.getScale(); g.drawImage(backbuffer, 0, 0, this); } protected void renderGrid(Graphics2D g, PlayerView view) { int gridSize = (int) (zone.getGrid().getSize() * getScale()); if (!AppState.isShowGrid() || gridSize < MIN_GRID_SIZE) { return; } zone.getGrid().draw(this, g, g.getClipBounds()); } protected void renderCoordinates(Graphics2D g, PlayerView view) { if (AppState.isShowCoordinates()) { zone.getGrid().drawCoordinatesOverlay(g, this); } } private Set<SelectionSet> getOwnedMovementSet(PlayerView view) { Set<SelectionSet> movementSet = new HashSet<SelectionSet>(); for (SelectionSet selection : selectionSetMap.values()) { if (selection.getPlayerId().equals(MapTool.getPlayer().getName())) { movementSet.add(selection); } } return movementSet; } private Set<SelectionSet> getUnOwnedMovementSet(PlayerView view) { Set<SelectionSet> movementSet = new HashSet<SelectionSet>(); for (SelectionSet selection : selectionSetMap.values()) { if (!selection.getPlayerId().equals(MapTool.getPlayer().getName())) { movementSet.add(selection); } } return movementSet; } protected void renderMoveSelectionSets(Graphics2D g, PlayerView view, Set<SelectionSet> movementSet) { if (selectionSetMap.isEmpty()) { return; } double scale = zoneScale.getScale(); boolean clipInstalled = false; for (SelectionSet set : movementSet) { Token keyToken = zone.getToken(set.getKeyToken()); if (keyToken == null) { // It was removed ? selectionSetMap.remove(set.getKeyToken()); continue; } // Hide the hidden layer if (keyToken.getLayer() == Zone.Layer.GM && !view.isGMView()) { continue; } ZoneWalker walker = set.getWalker(); for (GUID tokenGUID : set.getTokens()) { Token token = zone.getToken(tokenGUID); // Perhaps deleted? if (token == null) continue; // Don't bother if it's not visible if (!token.isVisible() && !view.isGMView()) continue; // ... or if it's visible only to the owner and that's not us! if (token.isVisibleOnlyToOwner() && !AppUtil.playerOwns(token)) continue; // ... or if it doesn't have an image to display. (Hm, should still show *something*?) Asset asset = AssetManager.getAsset(token.getImageAssetId()); if (asset == null) continue; // OPTIMIZE: combine this with the code in renderTokens() Rectangle footprintBounds = token.getBounds(zone); ScreenPoint newScreenPoint = ScreenPoint.fromZonePoint(this, footprintBounds.x + set.getOffsetX(), footprintBounds.y + set.getOffsetY()); BufferedImage image = ImageManager.getImage(token.getImageAssetId()); int scaledWidth = (int) (footprintBounds.width * scale); int scaledHeight = (int) (footprintBounds.height * scale); // Tokens are centered on the image center point int x = (int) (newScreenPoint.x); int y = (int) (newScreenPoint.y); // Vision visibility boolean isOwner = view.isGMView() || set.getPlayerId().equals(MapTool.getPlayer().getName()); if (!view.isGMView() && visibleScreenArea != null && !isOwner) { // FJE Um, why not just assign the clipping area at the top of the routine? if (!clipInstalled) { // Only show the part of the path that is visible Area visibleArea = new Area(g.getClipBounds()); visibleArea.intersect(visibleScreenArea); g = (Graphics2D) g.create(); g.setClip(new GeneralPath(visibleArea)); clipInstalled = true; // System.out.println("Adding Clip: " + MapTool.getPlayer().getName()); } } // Show path only on the key token if (token == keyToken) { if (!token.isStamp()) { renderPath(g, walker != null ? walker.getPath() : set.gridlessPath, token.getFootprint(zone.getGrid())); } } // handle flipping BufferedImage workImage = image; if (token.isFlippedX() || token.isFlippedY()) { workImage = new BufferedImage(image.getWidth(), image.getHeight(), image.getTransparency()); int workW = image.getWidth() * (token.isFlippedX() ? -1 : 1); int workH = image.getHeight() * (token.isFlippedY() ? -1 : 1); int workX = token.isFlippedX() ? image.getWidth() : 0; int workY = token.isFlippedY() ? image.getHeight() : 0; Graphics2D wig = workImage.createGraphics(); wig.drawImage(image, workX, workY, workW, workH, null); wig.dispose(); } // Draw token Dimension imgSize = new Dimension(workImage.getWidth(), workImage.getHeight()); SwingUtil.constrainTo(imgSize, footprintBounds.width, footprintBounds.height); int offsetx = 0; int offsety = 0; if (token.isSnapToScale()) { offsetx = (int) (imgSize.width < footprintBounds.width ? (footprintBounds.width - imgSize.width) / 2 * getScale() : 0); offsety = (int) (imgSize.height < footprintBounds.height ? (footprintBounds.height - imgSize.height) / 2 * getScale() : 0); } int tx = x + offsetx; int ty = y + offsety; AffineTransform at = new AffineTransform(); at.translate(tx, ty); if (token.hasFacing() && token.getShape() == Token.TokenShape.TOP_DOWN) { at.rotate(Math.toRadians(-token.getFacing() - 90), scaledWidth / 2 - token.getAnchor().x * scale - offsetx, scaledHeight / 2 - token.getAnchor().y * scale - offsety); // facing defaults to down, or -90 degrees } if (token.isSnapToScale()) { at.scale((double) imgSize.width / workImage.getWidth(), (double) imgSize.height / workImage.getHeight()); at.scale(getScale(), getScale()); } else { at.scale((double) scaledWidth / workImage.getWidth(), (double) scaledHeight / workImage.getHeight()); } g.drawImage(workImage, at, this); // Other details if (token == keyToken) { Rectangle bounds = new Rectangle(tx, ty, imgSize.width, imgSize.height); bounds.width *= getScale(); bounds.height *= getScale(); Grid grid = zone.getGrid(); boolean checkForFog = MapTool.getServerPolicy().isUseIndividualFOW() && zoneView.isUsingVision(); boolean showLabels = view.isGMView() || set.getPlayerId().equals(MapTool.getPlayer().getName()); if (checkForFog) { Path<? extends AbstractPoint> path = set.getWalker() != null ? set.getWalker().getPath() : set.gridlessPath; List<? extends AbstractPoint> thePoints = path.getCellPath(); /* * now that we have the last point, we can check to see if it's gridless or not. If not * gridless, get the last point the token was at and see if the token's footprint is inside the * visible area to show the label. */ if (thePoints.isEmpty()) { showLabels = false; } else { AbstractPoint lastPoint = thePoints.get(thePoints.size() - 1); Rectangle tokenRectangle = null; if (lastPoint instanceof CellPoint) { tokenRectangle = token.getFootprint(grid).getBounds(grid, (CellPoint) lastPoint); } else { Rectangle tokBounds = token.getBounds(zone); tokenRectangle = new Rectangle(); tokenRectangle.setBounds(lastPoint.x, lastPoint.y, (int) tokBounds.getWidth(), (int) tokBounds.getHeight()); } showLabels = showLabels || zoneView.getVisibleArea(view).intersects(tokenRectangle); } } else { // !isUseIndividualFOW() showLabels = showLabels || (visibleScreenArea == null && !zone.hasFog()); // no vision - fog showLabels = showLabels || (visibleScreenArea == null && zone.hasFog() && exposedFogArea.intersects(bounds)); // no vision + fog showLabels = showLabels || (visibleScreenArea != null && visibleScreenArea.intersects(bounds) && exposedFogArea.intersects(bounds)); // vision } if (showLabels) { // if the token is visible on the screen it will be in the location cache if (tokenLocationCache.containsKey(token)) { y += 10 + scaledHeight; x += scaledWidth / 2; if (!token.isStamp()) { if (AppState.getShowMovementMeasurements()) { String distance = ""; if (walker != null) { // This wouldn't be true unless token.isSnapToGrid() && grid.isPathingSupported() int distanceTraveled = walker.getDistance(); if (distanceTraveled >= 1) { distance = Integer.toString(distanceTraveled); } } else { double c = 0; ZonePoint lastPoint = null; for (ZonePoint zp : set.gridlessPath.getCellPath()) { if (lastPoint == null) { lastPoint = zp; continue; } int a = lastPoint.x - zp.x; int b = lastPoint.y - zp.y; c += Math.hypot(a, b); lastPoint = zp; } c /= zone.getGrid().getSize(); // Number of "cells" c *= zone.getUnitsPerCell(); // "actual" distance traveled distance = String.format("%.1f", c); } if (distance.length() > 0) { delayRendering(new LabelRenderer(distance, x, y)); y += 20; } } } if (set.getPlayerId() != null && set.getPlayerId().length() >= 1) { delayRendering(new LabelRenderer(set.getPlayerId(), x, y)); } } // !token.isStamp() } // showLabels } // token == keyToken } } } @SuppressWarnings("unchecked") public void renderPath(Graphics2D g, Path path, TokenFootprint footprint) { Object oldRendering = g.getRenderingHint(RenderingHints.KEY_ANTIALIASING); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); if (path.getCellPath().size() == 0) { return; } Grid grid = zone.getGrid(); double scale = getScale(); Rectangle footprintBounds = footprint.getBounds(grid); if (path.getCellPath().get(0) instanceof CellPoint) { timer.start("renderPath-1"); CellPoint previousPoint = null; Point previousHalfPoint = null; List<CellPoint> cellPath = path.getCellPath(); Set<CellPoint> pathSet = new HashSet<CellPoint>(); List<ZonePoint> waypointList = new LinkedList<ZonePoint>(); for (CellPoint p : cellPath) { pathSet.addAll(footprint.getOccupiedCells(p)); if (path.isWaypoint(p) && previousPoint != null) { ZonePoint zp = grid.convert(p); zp.x += footprintBounds.width / 2; zp.y += footprintBounds.height / 2; waypointList.add(zp); } previousPoint = p; } // Don't show the final path point as a waypoint, it's redundant, and ugly if (waypointList.size() > 0) { waypointList.remove(waypointList.size() - 1); } timer.stop("renderPath-1"); timer.start("renderPath-2"); Dimension cellOffset = zone.getGrid().getCellOffset(); for (CellPoint p : pathSet) { ZonePoint zp = grid.convert(p); zp.x += grid.getCellWidth() / 2 + cellOffset.width; zp.y += grid.getCellHeight() / 2 + cellOffset.height; highlightCell(g, zp, grid.getCellHighlight(), 1.0f); } for (ZonePoint p : waypointList) { ZonePoint zp = new ZonePoint(p.x + cellOffset.width, p.y + cellOffset.height); highlightCell(g, zp, AppStyle.cellWaypointImage, .333f); } // Line path if (grid.getCapabilities().isPathLineSupported()) { ZonePoint lineOffset = new ZonePoint(footprintBounds.x + footprintBounds.width / 2 - grid.getOffsetX(), footprintBounds.y + footprintBounds.height / 2 - grid.getOffsetY()); int xOffset = (int) (lineOffset.x * scale); int yOffset = (int) (lineOffset.y * scale); g.setColor(Color.blue); previousPoint = null; for (CellPoint p : cellPath) { if (previousPoint != null) { ZonePoint ozp = grid.convert(previousPoint); int ox = ozp.x; int oy = ozp.y; ZonePoint dzp = grid.convert(p); int dx = dzp.x; int dy = dzp.y; ScreenPoint origin = ScreenPoint.fromZonePoint(this, ox, oy); ScreenPoint destination = ScreenPoint.fromZonePoint(this, dx, dy); int halfx = (int) ((origin.x + destination.x) / 2); int halfy = (int) ((origin.y + destination.y) / 2); Point halfPoint = new Point(halfx, halfy); if (previousHalfPoint != null) { int x1 = previousHalfPoint.x + xOffset; int y1 = previousHalfPoint.y + yOffset; int x2 = (int) origin.x + xOffset; int y2 = (int) origin.y + yOffset; int xh = halfPoint.x + xOffset; int yh = halfPoint.y + yOffset; QuadCurve2D curve = new QuadCurve2D.Float(x1, y1, x2, y2, xh, yh); g.draw(curve); } previousHalfPoint = halfPoint; } previousPoint = p; } } timer.stop("renderPath-2"); } else { timer.start("renderPath-3"); // Zone point/gridless path int scaledWidth = (int) (footprintBounds.width * scale); int scaledHeight = (int) (footprintBounds.height * scale); // Line Color highlight = new Color(255, 255, 255, 80); Stroke highlightStroke = new BasicStroke(9); Stroke oldStroke = g.getStroke(); Object oldAA = SwingUtil.useAntiAliasing(g); ScreenPoint lastPoint = null; List<ZonePoint> pathList = path.getCellPath(); for (ZonePoint zp : pathList) { if (lastPoint == null) { lastPoint = ScreenPoint.fromZonePointRnd(this, zp.x + (footprintBounds.width / 2) * footprint.getScale(), zp.y + (footprintBounds.height / 2) * footprint.getScale()); continue; } ScreenPoint nextPoint = ScreenPoint.fromZonePoint(this, zp.x + (footprintBounds.width / 2) * footprint.getScale(), zp.y + (footprintBounds.height / 2) * footprint.getScale()); g.setColor(highlight); g.setStroke(highlightStroke); g.drawLine((int) lastPoint.x, (int) lastPoint.y, (int) nextPoint.x, (int) nextPoint.y); g.setStroke(oldStroke); g.setColor(Color.blue); g.drawLine((int) lastPoint.x, (int) lastPoint.y, (int) nextPoint.x, (int) nextPoint.y); lastPoint = nextPoint; } SwingUtil.restoreAntiAliasing(g, oldAA); // Waypoints boolean originPoint = true; for (ZonePoint p : pathList) { // Skip the first point (it's the path origin) if (originPoint) { originPoint = false; continue; } // Skip the final point if (p == pathList.get(pathList.size() - 1)) { continue; } p = new ZonePoint((int) (p.x + (footprintBounds.width / 2) * footprint.getScale()), (int) (p.y + (footprintBounds.height / 2) * footprint.getScale())); highlightCell(g, p, AppStyle.cellWaypointImage, .333f); } timer.stop("renderPath-3"); } g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, oldRendering); } public void highlightCell(Graphics2D g, ZonePoint point, BufferedImage image, float size) { Grid grid = zone.getGrid(); double cwidth = grid.getCellWidth() * getScale(); double cheight = grid.getCellHeight() * getScale(); double iwidth = cwidth * size; double iheight = cheight * size; ScreenPoint sp = ScreenPoint.fromZonePoint(this, point); g.drawImage(image, (int) (sp.x - iwidth / 2), (int) (sp.y - iheight / 2), (int) iwidth, (int) iheight, this); } /** * Get a list of tokens currently visible on the screen. The list is ordered by location starting in the top left * and going to the bottom right. * * @return */ public List<Token> getTokensOnScreen() { List<Token> list = new ArrayList<Token>(); // Always assume tokens, for now List<TokenLocation> tokenLocationListCopy = new ArrayList<TokenLocation>(); tokenLocationListCopy.addAll(getTokenLocations(Zone.Layer.TOKEN)); for (TokenLocation location : tokenLocationListCopy) { list.add(location.token); } // Sort by location on screen, top left to bottom right Collections.sort(list, new Comparator<Token>() { public int compare(Token o1, Token o2) { if (o1.getY() < o2.getY()) { return -1; } if (o1.getY() > o2.getY()) { return 1; } if (o1.getX() < o2.getX()) { return -1; } if (o1.getX() > o2.getX()) { return 1; } return 0; } }); return list; } public Zone.Layer getActiveLayer() { return activeLayer != null ? activeLayer : Zone.Layer.TOKEN; } public void setActiveLayer(Zone.Layer layer) { activeLayer = layer; selectedTokenSet.clear(); repaint(); } /** * Get the token locations for the given layer, creates an empty list if there are not locations for the given layer */ private List<TokenLocation> getTokenLocations(Zone.Layer layer) { List<TokenLocation> list = tokenLocationMap.get(layer); if (list == null) { list = new LinkedList<TokenLocation>(); tokenLocationMap.put(layer, list); } return list; } // TODO: I don't like this hardwiring protected Shape getCircleFacingArrow(int angle, int size) { int base = (int) (size * .75); int width = (int) (size * .35); facingArrow = new GeneralPath(); facingArrow.moveTo(base, -width); facingArrow.lineTo(size, 0); facingArrow.lineTo(base, width); facingArrow.lineTo(base, -width); GeneralPath gp = (GeneralPath) facingArrow.createTransformedShape(AffineTransform.getRotateInstance(-Math.toRadians(angle))); return gp.createTransformedShape(AffineTransform.getScaleInstance(getScale(), getScale())); } // TODO: I don't like this hardwiring protected Shape getSquareFacingArrow(int angle, int size) { int base = (int) (size * .75); int width = (int) (size * .35); facingArrow = new GeneralPath(); facingArrow.moveTo(0, 0); facingArrow.lineTo(-(size - base), -width); facingArrow.lineTo(-(size - base), width); facingArrow.lineTo(0, 0); GeneralPath gp = (GeneralPath) facingArrow.createTransformedShape(AffineTransform.getRotateInstance(-Math.toRadians(angle))); return gp.createTransformedShape(AffineTransform.getScaleInstance(getScale(), getScale())); } protected void renderTokens(Graphics2D g, List<Token> tokenList, PlayerView view) { Graphics2D clippedG = g; timer.start("createClip"); if (!view.isGMView() && visibleScreenArea != null && !tokenList.isEmpty() && tokenList.get(0).isToken()) { clippedG = (Graphics2D) g.create(); Area visibleArea = new Area(g.getClipBounds()); visibleArea.intersect(visibleScreenArea); clippedG.setClip(new GeneralPath(visibleArea)); } timer.stop("createClip"); // This is in screen coordinates Rectangle viewport = new Rectangle(0, 0, getSize().width, getSize().height); Rectangle clipBounds = g.getClipBounds(); double scale = zoneScale.getScale(); Set<GUID> tempVisTokens = new HashSet<GUID>(); // calculations boolean calculateStacks = !tokenList.isEmpty() && !tokenList.get(0).isStamp() && tokenStackMap == null; if (calculateStacks) { tokenStackMap = new HashMap<Token, Set<Token>>(); } List<Token> tokenPostProcessing = new ArrayList<Token>(tokenList.size()); for (Token token : tokenList) { timer.start("tokenlist-1"); if (token.isStamp() && isTokenMoving(token)) { continue; } timer.stop("tokenlist-1"); timer.start("tokenlist-1.1"); TokenLocation location = tokenLocationCache.get(token); if (location != null && !location.maybeOnscreen(viewport)) { continue; } timer.stop("tokenlist-1.1"); timer.start("tokenlist-1a"); // Don't bother if it's not visible // NOTE: Not going to use zone.isTokenVisible as it is very slow. In fact, it's faster // to just draw the tokens and let them be clipped if (!token.isVisible() && !view.isGMView()) { continue; } if (token.isVisibleOnlyToOwner() && !AppUtil.playerOwns(token)) { continue; } Rectangle footprintBounds = token.getBounds(zone); timer.stop("tokenlist-1a"); timer.start("tokenlist-1b"); BufferedImage image = ImageManager.getImage(token.getImageAssetId(), this); timer.stop("tokenlist-1b"); timer.start("tokenlist-1c"); double scaledWidth = (footprintBounds.width * scale); double scaledHeight = (footprintBounds.height * scale); // if (!token.isStamp()) { // // Fit inside the grid // scaledWidth --; // scaledHeight --; // } ScreenPoint tokenScreenLocation = ScreenPoint.fromZonePoint(this, footprintBounds.x, footprintBounds.y); timer.stop("tokenlist-1c"); timer.start("tokenlist-1d"); // Tokens are centered on the image center point double x = tokenScreenLocation.x; double y = tokenScreenLocation.y; Rectangle2D origBounds = new Rectangle2D.Double(x, y, scaledWidth, scaledHeight); Area tokenBounds = new Area(origBounds); if (token.hasFacing() && token.getShape() == Token.TokenShape.TOP_DOWN) { double sx = scaledWidth / 2 + x - (token.getAnchor().x * scale); double sy = scaledHeight / 2 + y - (token.getAnchor().x * scale); tokenBounds.transform(AffineTransform.getRotateInstance(Math.toRadians(-token.getFacing() - 90), sx, sy)); // facing defaults to down, or -90 degrees } timer.stop("tokenlist-1d"); timer.start("tokenlist-1e"); location = new TokenLocation(tokenBounds, origBounds, token, x, y, footprintBounds.width, footprintBounds.height, scaledWidth, scaledHeight); tokenLocationCache.put(token, location); // Too small ? if (location.scaledHeight < 1 || location.scaledWidth < 1) { continue; } // Vision visibility if (!view.isGMView() && token.isToken() && zoneView.isUsingVision()) { if (!GraphicsUtil.intersects(visibleScreenArea, location.bounds)) { continue; } } timer.stop("tokenlist-1e"); // Markers timer.start("renderTokens:Markers"); if (token.isMarker() && canSeeMarker(token)) { markerLocationList.add(location); } timer.stop("renderTokens:Markers"); // Stacking check if (calculateStacks) { timer.start("tokenStack"); // System.out.println(token.getName() + " - " + location.boundsCache); Set<Token> tokenStackSet = null; for (TokenLocation currLocation : getTokenLocations(Zone.Layer.TOKEN)) { // Are we covering anyone ? // System.out.println("\t" + currLocation.token.getName() + " - " + location.boundsCache.contains(currLocation.boundsCache)); if (location.boundsCache.contains(currLocation.boundsCache)) { if (tokenStackSet == null) { tokenStackSet = new HashSet<Token>(); tokenStackMap.put(token, tokenStackSet); tokenStackSet.add(token); } tokenStackSet.add(currLocation.token); if (tokenStackMap.get(currLocation.token) != null) { tokenStackSet.addAll(tokenStackMap.get(currLocation.token)); tokenStackMap.remove(currLocation.token); } } } timer.stop("tokenStack"); } // Keep track of the location on the screen // Note the order where the top most token is at the end of the list timer.start("renderTokens:Locations"); List<TokenLocation> locationList = null; // TODO: Why not just call token.getLayer() ??? if (!token.isStamp()) { locationList = getTokenLocations(Zone.Layer.TOKEN); } else { if (token.isObjectStamp()) { locationList = getTokenLocations(Zone.Layer.OBJECT); } if (token.isBackgroundStamp()) { locationList = getTokenLocations(Zone.Layer.BACKGROUND); } if (token.isGMStamp()) { locationList = getTokenLocations(Zone.Layer.GM); } } if (locationList != null) { locationList.add(location); } timer.stop("renderTokens:Locations"); // Add the token to our visible set. tempVisTokens.add(token.getId()); // Only draw if we're visible // NOTE: this takes place AFTER resizing the image, that's so that the user // suffers a pause only once while scaling, and not as new tokens are // scrolled onto the screen timer.start("renderTokens:OnscreenCheck"); if (!location.bounds.intersects(clipBounds)) { timer.stop("renderTokens:OnscreenCheck"); continue; } timer.stop("renderTokens:OnscreenCheck"); // Moving ? timer.start("renderTokens:ShowMovement"); if (isTokenMoving(token)) { BufferedImage replacementImage = replacementImageMap.get(token); if (replacementImage == null) { replacementImage = ImageUtil.rgbToGrayscale(image); replacementImageMap.put(token, replacementImage); } image = replacementImage; } timer.stop("renderTokens:ShowMovement"); // Previous path timer.start("renderTokens:ShowPath"); if (showPathList.contains(token) && token.getLastPath() != null) { renderPath(g, token.getLastPath(), token.getFootprint(zone.getGrid())); } timer.stop("renderTokens:ShowPath"); timer.start("tokenlist-4"); // Halo (TOPDOWN, CIRCLE) if (token.hasHalo() && (token.getShape() == Token.TokenShape.TOP_DOWN || token.getShape() == Token.TokenShape.CIRCLE)) { Stroke oldStroke = clippedG.getStroke(); clippedG.setStroke(new BasicStroke(AppPreferences.getHaloLineWidth())); clippedG.setColor(token.getHaloColor()); clippedG.draw(new Rectangle2D.Double(location.x, location.y, location.scaledWidth, location.scaledHeight)); clippedG.setStroke(oldStroke); } timer.stop("tokenlist-4"); timer.start("tokenlist-5"); // handle flipping BufferedImage workImage = image; if (token.isFlippedX() || token.isFlippedY()) { workImage = flipImageMap.get(token); if (workImage == null) { workImage = new BufferedImage(image.getWidth(), image.getHeight(), image.getTransparency()); int workW = image.getWidth() * (token.isFlippedX() ? -1 : 1); int workH = image.getHeight() * (token.isFlippedY() ? -1 : 1); int workX = token.isFlippedX() ? image.getWidth() : 0; int workY = token.isFlippedY() ? image.getHeight() : 0; Graphics2D wig = workImage.createGraphics(); wig.drawImage(image, workX, workY, workW, workH, null); wig.dispose(); flipImageMap.put(token, workImage); } } timer.stop("tokenlist-5"); timer.start("tokenlist-6"); // Position Dimension imgSize = new Dimension(workImage.getWidth(), workImage.getHeight()); SwingUtil.constrainTo(imgSize, footprintBounds.width, footprintBounds.height); int offsetx = 0; int offsety = 0; if (token.isSnapToScale()) { offsetx = (int) (imgSize.width < footprintBounds.width ? (footprintBounds.width - imgSize.width) / 2 * getScale() : 0); offsety = (int) (imgSize.height < footprintBounds.height ? (footprintBounds.height - imgSize.height) / 2 * getScale() : 0); } double tx = location.x + offsetx; double ty = location.y + offsety; AffineTransform at = new AffineTransform(); at.translate(tx, ty); // Rotated if (token.hasFacing() && token.getShape() == Token.TokenShape.TOP_DOWN) { at.rotate(Math.toRadians(-token.getFacing() - 90), location.scaledWidth / 2 - (token.getAnchor().x * scale) - offsetx, location.scaledHeight / 2 - (token.getAnchor().y * scale) - offsety); // facing defaults to down, or -90 degrees } // Draw the token if (token.isSnapToScale()) { at.scale(((double) imgSize.width) / workImage.getWidth(), ((double) imgSize.height) / workImage.getHeight()); at.scale(getScale(), getScale()); } else { at.scale((scaledWidth) / workImage.getWidth(), (scaledHeight) / workImage.getHeight()); } timer.stop("tokenlist-6"); timer.start("tokenlist-7"); clippedG.drawImage(workImage, at, this); timer.stop("tokenlist-7"); timer.start("tokenlist-8"); // Halo (SQUARE) // XXX Why are square halos drawn separately?! if (token.hasHalo() && token.getShape() == Token.TokenShape.SQUARE) { Stroke oldStroke = g.getStroke(); clippedG.setStroke(new BasicStroke(AppPreferences.getHaloLineWidth())); clippedG.setColor(token.getHaloColor()); clippedG.draw(new Rectangle2D.Double(location.x, location.y, location.scaledWidth, location.scaledHeight)); clippedG.setStroke(oldStroke); } // Facing ? // TODO: Optimize this by doing it once per token per facing if (token.hasFacing()) { Token.TokenShape tokenType = token.getShape(); switch (tokenType) { case CIRCLE: Shape arrow = getCircleFacingArrow(token.getFacing(), footprintBounds.width / 2); double cx = location.x + location.scaledWidth / 2; double cy = location.y + location.scaledHeight / 2; clippedG.translate(cx, cy); clippedG.setColor(Color.yellow); clippedG.fill(arrow); clippedG.setColor(Color.darkGray); clippedG.draw(arrow); clippedG.translate(-cx, -cy); break; case SQUARE: int facing = token.getFacing(); while (facing < 0) { facing += 360; } // TODO: this should really be done in Token.setFacing() but I didn't want to take the chance of breaking something, so change this when it's safe to break stuff facing %= 360; arrow = getSquareFacingArrow(facing, footprintBounds.width / 2); cx = location.x + location.scaledWidth / 2; cy = location.y + location.scaledHeight / 2; // Find the edge of the image // TODO: Man, this is horrible, there's gotta be a better way to do this double xp = location.scaledWidth / 2; double yp = location.scaledHeight / 2; if (facing >= 45 && facing <= 135 || facing >= 225 && facing <= 315) { xp = (int) (yp / Math.tan(Math.toRadians(facing))); if (facing > 180) { xp = -xp; yp = -yp; } } else { yp = (int) (xp * Math.tan(Math.toRadians(facing))); if (facing > 90 && facing < 270) { xp = -xp; yp = -yp; } } cx += xp; cy -= yp; clippedG.translate(cx, cy); clippedG.setColor(Color.yellow); clippedG.fill(arrow); clippedG.setColor(Color.darkGray); clippedG.draw(arrow); clippedG.translate(-cx, -cy); break; } } timer.stop("tokenlist-8"); timer.start("tokenlist-9"); // Set up the graphics so that the overlay can just be painted. Graphics2D locg = (Graphics2D) clippedG.create((int) location.x, (int) location.y, (int) Math.ceil(location.scaledWidth), (int) Math.ceil(location.scaledHeight)); Rectangle bounds = new Rectangle(0, 0, (int) Math.ceil(location.scaledWidth), (int) Math.ceil(location.scaledHeight)); // Check each of the set values for (String state : MapTool.getCampaign().getTokenStatesMap().keySet()) { Object stateValue = token.getState(state); AbstractTokenOverlay overlay = MapTool.getCampaign().getTokenStatesMap().get(state); if (stateValue instanceof AbstractTokenOverlay) { overlay = (AbstractTokenOverlay) stateValue; } if (overlay == null || overlay.isMouseover() && token != tokenUnderMouse || !overlay.showPlayer(token, MapTool.getPlayer())) { continue; } overlay.paintOverlay(locg, token, bounds, stateValue); } timer.stop("tokenlist-9"); timer.start("tokenlist-10"); for (String bar : MapTool.getCampaign().getTokenBarsMap().keySet()) { Object barValue = token.getState(bar); BarTokenOverlay overlay = MapTool.getCampaign().getTokenBarsMap().get(bar); if (overlay == null || overlay.isMouseover() && token != tokenUnderMouse || !overlay.showPlayer(token, MapTool.getPlayer())) { continue; } overlay.paintOverlay(locg, token, bounds, barValue); } // endfor locg.dispose(); timer.stop("tokenlist-10"); // Keep track of which tokens have been drawn so we can perform post-processing on them later // (such as selection borders and names/labels) if (getActiveLayer().equals(token.getLayer())) tokenPostProcessing.add(token); // DEBUGGING // ScreenPoint tmpsp = ScreenPoint.fromZonePoint(this, new ZonePoint(token.getX(), token.getY())); // g.setColor(Color.red); // g.drawLine(tmpsp.x, 0, tmpsp.x, getSize().height); // g.drawLine(0, tmpsp.y, getSize().width, tmpsp.y); } timer.start("tokenlist-12"); boolean useIF = MapTool.getServerPolicy().isUseIndividualFOW(); // Selection and labels for (Token token : tokenPostProcessing) { TokenLocation location = tokenLocationCache.get(token); + if (location == null) + continue; Area bounds = location.bounds; // TODO: This isn't entirely accurate as it doesn't account for the actual text // to be in the clipping bounds, but I'll fix that later if (!bounds.getBounds().intersects(clipBounds)) { continue; } Rectangle footprintBounds = token.getBounds(zone); boolean isSelected = selectedTokenSet.contains(token.getId()); if (isSelected) { ScreenPoint sp = ScreenPoint.fromZonePoint(this, footprintBounds.x, footprintBounds.y); double width = footprintBounds.width * getScale(); double height = footprintBounds.height * getScale(); ImageBorder selectedBorder = token.isStamp() ? AppStyle.selectedStampBorder : AppStyle.selectedBorder; if (highlightCommonMacros.contains(token)) { selectedBorder = AppStyle.commonMacroBorder; } if (!AppUtil.playerOwns(token)) { selectedBorder = AppStyle.selectedUnownedBorder; } if (useIF && !token.isStamp() && zoneView.isUsingVision()) { Tool tool = MapTool.getFrame().getToolbox().getSelectedTool(); if (tool instanceof RectangleExposeTool // XXX Change to use marker interface such as ExposeTool? || tool instanceof OvalExposeTool || tool instanceof FreehandExposeTool || tool instanceof PolygonExposeTool) selectedBorder = AppConstants.FOW_TOOLS_BORDER; } if (token.hasFacing() && (token.getShape() == Token.TokenShape.TOP_DOWN || token.isStamp())) { AffineTransform oldTransform = clippedG.getTransform(); // Rotated clippedG.translate(sp.x, sp.y); clippedG.rotate(Math.toRadians(-token.getFacing() - 90), width / 2 - (token.getAnchor().x * scale), height / 2 - (token.getAnchor().y * scale)); // facing defaults to down, or -90 degrees selectedBorder.paintAround(clippedG, 0, 0, (int) width, (int) height); clippedG.setTransform(oldTransform); } else { selectedBorder.paintAround(clippedG, (int) sp.x, (int) sp.y, (int) width, (int) height); } // Remove labels from the cache if the corresponding tokens are deselected } else if (!AppState.isShowTokenNames() && labelRenderingCache.containsKey(token.getId())) { labelRenderingCache.remove(token.getId()); } // Token names and labels boolean showCurrentTokenLabel = AppState.isShowTokenNames() || token == tokenUnderMouse; if (showCurrentTokenLabel) { GUID tokId = token.getId(); int offset = 3; // Keep it from tramping on the token border. ImageLabel background; Color foreground; if (token.isVisible()) { if (token.getType() == Token.Type.NPC) { background = GraphicsUtil.BLUE_LABEL; foreground = Color.WHITE; } else { background = GraphicsUtil.GREY_LABEL; foreground = Color.BLACK; } } else { background = GraphicsUtil.DARK_GREY_LABEL; foreground = Color.WHITE; } String name = token.getName(); if (view.isGMView() && token.getGMName() != null && !StringUtil.isEmpty(token.getGMName())) { name += " (" + token.getGMName() + ")"; } if (!view.equals(lastView) || !labelRenderingCache.containsKey(tokId)) { // if ((lastView != null && !lastView.equals(view)) || !labelRenderingCache.containsKey(tokId)) { boolean hasLabel = false; // Calculate image dimensions FontMetrics fm = g.getFontMetrics(); Font f = g.getFont(); int strWidth = SwingUtilities.computeStringWidth(fm, name); int width = strWidth + GraphicsUtil.BOX_PADDINGX * 2; int height = fm.getHeight() + GraphicsUtil.BOX_PADDINGY * 2; int labelHeight = height; // If token has a label (in addition to name). if (token.getLabel() != null && token.getLabel().trim().length() > 0) { hasLabel = true; height = height * 2; // Double the image height for two boxed strings. int labelWidth = SwingUtilities.computeStringWidth(fm, token.getLabel()) + GraphicsUtil.BOX_PADDINGX * 2; width = (width > labelWidth) ? width : labelWidth; } // Set up the image BufferedImage labelRender = new BufferedImage(width, height, Transparency.TRANSLUCENT); Graphics2D gLabelRender = labelRender.createGraphics(); gLabelRender.setFont(f); // Match font used in the main graphics context. gLabelRender.setRenderingHints(g.getRenderingHints()); // Match rendering style. // Draw name and label to image if (hasLabel) { GraphicsUtil.drawBoxedString(gLabelRender, token.getLabel(), width / 2, height - (labelHeight / 2), SwingUtilities.CENTER, background, foreground); } GraphicsUtil.drawBoxedString(gLabelRender, name, width / 2, labelHeight / 2, SwingUtilities.CENTER, background, foreground); // Add image to cache labelRenderingCache.put(tokId, labelRender); } // Create LabelRenderer using cached label. delayRendering(new LabelRenderer(name, bounds.getBounds().x + bounds.getBounds().width / 2, bounds.getBounds().y + bounds.getBounds().height + offset, SwingUtilities.CENTER, background, foreground, tokId)); } } timer.stop("tokenlist-12"); timer.start("tokenlist-13"); // Stacks if (!tokenList.isEmpty() && !tokenList.get(0).isStamp()) { // TODO: find a cleaner way to indicate token layer if (tokenStackMap != null) { // FIXME Needed to prevent NPE but how can it be null? for (Token token : tokenStackMap.keySet()) { Area bounds = getTokenBounds(token); if (bounds == null) { // token is offscreen continue; } BufferedImage stackImage = AppStyle.stackImage; clippedG.drawImage(stackImage, bounds.getBounds().x + bounds.getBounds().width - stackImage.getWidth() + 2, bounds.getBounds().y - 2, null); } } } // Markers // for (TokenLocation location : getMarkerLocations()) { // BufferedImage stackImage = AppStyle.markerImage; // g.drawImage(stackImage, location.bounds.getBounds().x, location.bounds.getBounds().y, null); // } if (clippedG != g) { clippedG.dispose(); } timer.stop("tokenlist-13"); visibleTokenSet = Collections.unmodifiableSet(tempVisTokens); } private boolean canSeeMarker(Token token) { return MapTool.getPlayer().isGM() || !StringUtil.isEmpty(token.getNotes()); } public Set<GUID> getSelectedTokenSet() { return selectedTokenSet; } /** * Convenience method to return a set of tokens filtered by ownership * * @param tokenSet * the set of GUIDs to filter */ public Set<GUID> getOwnedTokens(Set<GUID> tokenSet) { Set<GUID> ownedTokens = new LinkedHashSet<GUID>(); if (tokenSet != null) { for (GUID guid : tokenSet) { if (!AppUtil.playerOwns(zone.getToken(guid))) { continue; } ownedTokens.add(guid); } } return ownedTokens; } /** * A convienence method to get selected tokens ordered by name * * @return List<Token> */ public List<Token> getSelectedTokensList() { List<Token> tokenList = new ArrayList<Token>(); for (GUID g : selectedTokenSet) { if (zone.getToken(g) != null) { tokenList.add(zone.getToken(g)); } } // Commented out to preserve selection order // Collections.sort(tokenList, Token.NAME_COMPARATOR); return tokenList; } public boolean isTokenSelectable(GUID tokenGUID) { if (tokenGUID == null) { return false; } Token token = zone.getToken(tokenGUID); if (token == null) { return false; } if (!zone.isTokenVisible(token)) { if (AppUtil.playerOwns(token)) { return true; } return false; } return true; } public void deselectToken(GUID tokenGUID) { addToSelectionHistory(selectedTokenSet); selectedTokenSet.remove(tokenGUID); MapTool.getFrame().resetTokenPanels(); HTMLFrameFactory.selectedListChanged(); repaint(); } public boolean selectToken(GUID tokenGUID) { if (!isTokenSelectable(tokenGUID)) { return false; } addToSelectionHistory(selectedTokenSet); selectedTokenSet.add(tokenGUID); repaint(); MapTool.getFrame().resetTokenPanels(); HTMLFrameFactory.selectedListChanged(); return true; } public void selectTokens(Collection<GUID> tokens) { for (GUID tokenGUID : tokens) { if (!isTokenSelectable(tokenGUID)) { continue; } selectedTokenSet.add(tokenGUID); } addToSelectionHistory(selectedTokenSet); repaint(); MapTool.getFrame().resetTokenPanels(); HTMLFrameFactory.selectedListChanged(); } /** * Screen space rectangle */ public void selectTokens(Rectangle rect) { List<GUID> selectedList = new LinkedList<GUID>(); for (TokenLocation location : getTokenLocations(getActiveLayer())) { if (rect.intersects(location.bounds.getBounds())) { selectedList.add(location.token.getId()); } } selectTokens(selectedList); } public void clearSelectedTokens() { addToSelectionHistory(selectedTokenSet); clearShowPaths(); selectedTokenSet.clear(); MapTool.getFrame().resetTokenPanels(); HTMLFrameFactory.selectedListChanged(); repaint(); } public void undoSelectToken() { // System.out.println("num history items: " + selectedTokenSetHistory.size()); /* * for (Set<GUID> set : selectedTokenSetHistory) { System.out.println("history item"); for (GUID guid : set) { * System.out.println(zone.getToken(guid).getName()); } } */ if (selectedTokenSetHistory.size() > 0) { selectedTokenSet = selectedTokenSetHistory.remove(0); // user may have deleted some of the tokens that are contained in // the selection history. // find them and filter them otherwise the selectionSet will have // orphaned GUIDs and // they will cause NPE Set<GUID> invalidTokenSet = new HashSet<GUID>(); for (GUID guid : selectedTokenSet) { if (zone.getToken(guid) == null) { invalidTokenSet.add(guid); } } selectedTokenSet.removeAll(invalidTokenSet); // if there is no token left in the set, undo again if (selectedTokenSet.size() == 0) { undoSelectToken(); } } // TODO: if selection history is empty, notify the selection panel to // disable the undo button. MapTool.getFrame().resetTokenPanels(); HTMLFrameFactory.selectedListChanged(); repaint(); } private void addToSelectionHistory(Set<GUID> selectionSet) { // don't add empty selections to history if (selectionSet.size() == 0) { return; } Set<GUID> history = new HashSet<GUID>(selectionSet); selectedTokenSetHistory.add(0, history); // limit the history to a certain size if (selectedTokenSetHistory.size() > 20) { selectedTokenSetHistory.subList(20, selectedTokenSetHistory.size() - 1).clear(); } } public void cycleSelectedToken(int direction) { List<Token> visibleTokens = getTokensOnScreen(); Set<GUID> selectedTokenSet = getSelectedTokenSet(); Integer newSelection = null; if (visibleTokens.size() == 0) { return; } if (selectedTokenSet.size() == 0) { newSelection = 0; } else { // Find the first selected token on the screen for (int i = 0; i < visibleTokens.size(); i++) { Token token = visibleTokens.get(i); if (!isTokenSelectable(token.getId())) { continue; } if (getSelectedTokenSet().contains(token.getId())) { newSelection = i; break; } } // Pick the next newSelection += direction; } if (newSelection < 0) { newSelection = visibleTokens.size() - 1; } if (newSelection >= visibleTokens.size()) { newSelection = 0; } // Make the selection clearSelectedTokens(); selectToken(visibleTokens.get(newSelection).getId()); } /** * Convenience function to check if a player owns all the tokens in the selection set * * @return true if every token in selectedTokenSet is owned by the player */ public boolean playerOwnsAllSelected() { if (selectedTokenSet.isEmpty()) { return false; } for (GUID tokenGUID : selectedTokenSet) { if (!AppUtil.playerOwns(zone.getToken(tokenGUID))) { return false; } } return true; } public Area getTokenBounds(Token token) { TokenLocation location = tokenLocationCache.get(token); if (location != null && !location.maybeOnscreen(new Rectangle(0, 0, getSize().width, getSize().height))) { location = null; } return location != null ? location.bounds : null; } public Area getMarkerBounds(Token token) { for (TokenLocation location : markerLocationList) { if (location.token == token) { return location.bounds; } } return null; } public Rectangle getLabelBounds(Label label) { for (LabelLocation location : labelLocationList) { if (location.label == label) { return location.bounds; } } return null; } /** * Returns the token at screen location x, y (not cell location). * <p> * TODO: Add a check so that tokens owned by the current player are given priority. * * @param x * @param y * @return */ public Token getTokenAt(int x, int y) { List<TokenLocation> locationList = new ArrayList<TokenLocation>(); locationList.addAll(getTokenLocations(getActiveLayer())); Collections.reverse(locationList); for (TokenLocation location : locationList) { if (location.bounds.contains(x, y)) { return location.token; } } return null; } public Token getMarkerAt(int x, int y) { List<TokenLocation> locationList = new ArrayList<TokenLocation>(); locationList.addAll(markerLocationList); Collections.reverse(locationList); for (TokenLocation location : locationList) { if (location.bounds.contains(x, y)) { return location.token; } } return null; } public List<Token> getTokenStackAt(int x, int y) { Token token = getTokenAt(x, y); if (token == null || tokenStackMap == null || !tokenStackMap.containsKey(token)) { return null; } List<Token> tokenList = new ArrayList<Token>(tokenStackMap.get(token)); Collections.sort(tokenList, Token.COMPARE_BY_NAME); return tokenList; } /** * Returns the label at screen location x, y (not cell location). To get the token at a cell location, use * getGameMap() and use that. * * @param x * @param y * @return */ public Label getLabelAt(int x, int y) { List<LabelLocation> labelList = new ArrayList<LabelLocation>(); labelList.addAll(labelLocationList); Collections.reverse(labelList); for (LabelLocation location : labelList) { if (location.bounds.contains(x, y)) { return location.label; } } return null; } public int getViewOffsetX() { return zoneScale.getOffsetX(); } public int getViewOffsetY() { return zoneScale.getOffsetY(); } public void adjustGridSize(int delta) { zone.getGrid().setSize(Math.max(0, zone.getGrid().getSize() + delta)); repaint(); } public void moveGridBy(int dx, int dy) { int gridOffsetX = zone.getGrid().getOffsetX(); int gridOffsetY = zone.getGrid().getOffsetY(); gridOffsetX += dx; gridOffsetY += dy; if (gridOffsetY > 0) { gridOffsetY = gridOffsetY - (int) zone.getGrid().getCellHeight(); } if (gridOffsetX > 0) { gridOffsetX = gridOffsetX - (int) zone.getGrid().getCellWidth(); } zone.getGrid().setOffset(gridOffsetX, gridOffsetY); repaint(); } /** * Since the map can be scaled, this is a convenience method to find out what cell is at this location. * * @param screenPoint * Find the cell for this point. * @return The cell coordinates of the passed screen point. */ public CellPoint getCellAt(ScreenPoint screenPoint) { ZonePoint zp = screenPoint.convertToZone(this); return zone.getGrid().convert(zp); } public void setScale(double scale) { if (zoneScale.getScale() != scale) { /* * MCL: I think it is correct to clear these caches (if not more). */ tokenLocationCache.clear(); invalidateCurrentViewCache(); zoneScale.zoomScale(getWidth() / 2, getHeight() / 2, scale); MapTool.getFrame().getZoomStatusBar().update(); } } public double getScale() { return zoneScale.getScale(); } public double getScaledGridSize() { // Optimize: only need to calc this when grid size or scale changes return getScale() * zone.getGrid().getSize(); } /** * This makes sure that any image updates get refreshed. This could be a little smarter. */ @Override public boolean imageUpdate(Image img, int infoflags, int x, int y, int w, int h) { repaint(); return super.imageUpdate(img, infoflags, x, y, w, h); } private interface ItemRenderer { public void render(Graphics2D g); } /** * Represents a delayed label render */ private class LabelRenderer implements ItemRenderer { private final String text; private int x; private final int y; private final int align; private final Color foreground; private final ImageLabel background; // Used for drawing from label cache. private final GUID tokenId; private int width, height; public LabelRenderer(String text, int x, int y) { this(text, x, y, null); } public LabelRenderer(String text, int x, int y, GUID tId) { this.text = text; this.x = x; this.y = y; // Defaults this.align = SwingUtilities.CENTER; this.background = GraphicsUtil.GREY_LABEL; this.foreground = Color.black; tokenId = tId; if (tokenId != null) { width = labelRenderingCache.get(tokenId).getWidth(); height = labelRenderingCache.get(tokenId).getHeight(); } } public LabelRenderer(String text, int x, int y, int align, ImageLabel background, Color foreground) { this(text, x, y, align, background, foreground, null); } public LabelRenderer(String text, int x, int y, int align, ImageLabel background, Color foreground, GUID tId) { this.text = text; this.x = x; this.y = y; this.align = align; this.foreground = foreground; this.background = background; tokenId = tId; if (tokenId != null) { width = labelRenderingCache.get(tokenId).getWidth(); height = labelRenderingCache.get(tokenId).getHeight(); } } public void render(Graphics2D g) { if (tokenId != null) { // Use cached image. switch (align) { case SwingUtilities.CENTER: x = x - width / 2; break; case SwingUtilities.RIGHT: x = x - width; break; case SwingUtilities.LEFT: break; } BufferedImage img = labelRenderingCache.get(tokenId); if (img != null) { g.drawImage(img, x, y, width, height, null); } else { // Draw as normal GraphicsUtil.drawBoxedString(g, text, x, y, align, background, foreground); } } else { // Draw as normal. GraphicsUtil.drawBoxedString(g, text, x, y, align, background, foreground); } } } /** * Represents a movement set */ private class SelectionSet { private final HashSet<GUID> selectionSet = new HashSet<GUID>(); private final GUID keyToken; private final String playerId; private ZoneWalker walker; private final Token token; private Path<ZonePoint> gridlessPath; // Pixel distance from keyToken's origin private int offsetX; private int offsetY; public SelectionSet(String playerId, GUID tokenGUID, Set<GUID> selectionList) { selectionSet.addAll(selectionList); keyToken = tokenGUID; this.playerId = playerId; token = zone.getToken(tokenGUID); if (token.isSnapToGrid() && zone.getGrid().getCapabilities().isSnapToGridSupported()) { if (zone.getGrid().getCapabilities().isPathingSupported()) { CellPoint tokenPoint = zone.getGrid().convert(new ZonePoint(token.getX(), token.getY())); walker = zone.getGrid().createZoneWalker(); walker.setWaypoints(tokenPoint, tokenPoint); } } else { gridlessPath = new Path<ZonePoint>(); gridlessPath.addPathCell(new ZonePoint(token.getX(), token.getY())); } } public ZoneWalker getWalker() { return walker; } public GUID getKeyToken() { return keyToken; } public Set<GUID> getTokens() { return selectionSet; } public boolean contains(Token token) { return selectionSet.contains(token.getId()); } public void setOffset(int x, int y) { offsetX = x; offsetY = y; ZonePoint zp = new ZonePoint(token.getX() + x, token.getY() + y); if (ZoneRenderer.this.zone.getGrid().getCapabilities().isPathingSupported() && token.isSnapToGrid()) { CellPoint point = zone.getGrid().convert(zp); walker.replaceLastWaypoint(point); } else { if (gridlessPath.getCellPath().size() > 1) { gridlessPath.replaceLastPoint(zp); } else { gridlessPath.addPathCell(zp); } } } /** * Add the waypoint if it is a new waypoint. If it is an old waypoint remove it. * * @param location * The point where the waypoint is toggled. */ public void toggleWaypoint(ZonePoint location) { // CellPoint cp = renderer.getZone().getGrid().convert(new ZonePoint(dragStartX, dragStartY)); if (walker != null && token.isSnapToGrid() && getZone().getGrid() != null) { walker.toggleWaypoint(getZone().getGrid().convert(location)); } else { gridlessPath.addWayPoint(location); gridlessPath.addPathCell(location); } } /** * Retrieves the last waypoint, or if there isn't one then the start point of the first path segment. * * @param location */ public ZonePoint getLastWaypoint() { ZonePoint zp; if (walker != null && token.isSnapToGrid() && getZone().getGrid() != null) { CellPoint cp = walker.getLastPoint(); zp = getZone().getGrid().convert(cp); } else { zp = gridlessPath.getLastJunctionPoint(); } return zp; } public int getOffsetX() { return offsetX; } public int getOffsetY() { return offsetY; } public String getPlayerId() { return playerId; } } private class TokenLocation { public Area bounds; public Rectangle2D origBounds; public Token token; public Rectangle boundsCache; public int height; public int width; public double scaledHeight; public double scaledWidth; public double x; public double y; public int offsetX; public int offsetY; public TokenLocation(Area bounds, Rectangle2D origBounds, Token token, double x, double y, int width, int height, double scaledWidth, double scaledHeight) { this.bounds = bounds; this.token = token; this.origBounds = origBounds; this.width = width; this.height = height; this.scaledWidth = scaledWidth; this.scaledHeight = scaledHeight; this.x = x; this.y = y; offsetX = getViewOffsetX(); offsetY = getViewOffsetY(); boundsCache = bounds.getBounds(); } public boolean maybeOnscreen(Rectangle viewport) { int deltaX = getViewOffsetX() - offsetX; int deltaY = getViewOffsetY() - offsetY; boundsCache.x += deltaX; boundsCache.y += deltaY; offsetX = getViewOffsetX(); offsetY = getViewOffsetY(); timer.start("maybeOnsceen"); if (!boundsCache.intersects(viewport)) { timer.stop("maybeOnsceen"); return false; } timer.stop("maybeOnsceen"); return true; } } private static class LabelLocation { public Rectangle bounds; public Label label; public LabelLocation(Rectangle bounds, Label label) { this.bounds = bounds; this.label = label; } } // // DROP TARGET LISTENER /* * (non-Javadoc) * * @see java.awt.dnd.DropTargetListener#dragEnter(java.awt.dnd.DropTargetDragEvent ) */ public void dragEnter(DropTargetDragEvent dtde) { } /* * (non-Javadoc) * * @see java.awt.dnd.DropTargetListener#dragExit(java.awt.dnd.DropTargetEvent) */ public void dragExit(DropTargetEvent dte) { } /* * (non-Javadoc) * * @see java.awt.dnd.DropTargetListener#dragOver (java.awt.dnd.DropTargetDragEvent) */ public void dragOver(DropTargetDragEvent dtde) { } private void addTokens(List<Token> tokens, ZonePoint zp, List<Boolean> configureTokens, boolean showDialog) { GridCapabilities gridCaps = zone.getGrid().getCapabilities(); boolean isGM = MapTool.getPlayer().isGM(); List<String> failedPaste = new ArrayList<String>(tokens.size()); List<GUID> selectThese = new ArrayList<GUID>(tokens.size()); ScreenPoint sp = ScreenPoint.fromZonePoint(this, zp); Point dropPoint = new Point((int) sp.x, (int) sp.y); SwingUtilities.convertPointToScreen(dropPoint, this); int tokenIndex = 0; for (Token token : tokens) { boolean configureToken = configureTokens.get(tokenIndex++); // Get the snap to grid value for the current prefs and abilities token.setSnapToGrid(gridCaps.isSnapToGridSupported() && AppPreferences.getTokensStartSnapToGrid()); if (gridCaps.isSnapToGridSupported() && token.isSnapToGrid()) { zp = zone.getGrid().convert(zone.getGrid().convert(zp)); } token.setX(zp.x); token.setY(zp.y); // Set the image properties if (configureToken) { BufferedImage image = ImageManager.getImageAndWait(token.getImageAssetId()); token.setShape(TokenUtil.guessTokenType(image)); token.setWidth(image.getWidth(null)); token.setHeight(image.getHeight(null)); token.setFootprint(zone.getGrid(), zone.getGrid().getDefaultFootprint()); } // Always set the layer token.setLayer(getActiveLayer()); // He who drops, owns, if there are not players already set // and if there are already players set, add the current one to the list. // (Cannot use AppUtil.playerOwns() since that checks 'isStrictTokenManagement' and we want real ownership here. if (!isGM && (!token.hasOwners() || !token.isOwner(MapTool.getPlayer().getName()))) { token.addOwner(MapTool.getPlayer().getName()); } // Token type Rectangle size = token.getBounds(zone); switch (getActiveLayer()) { case TOKEN: // Players can't drop invisible tokens token.setVisible(!isGM || AppPreferences.getNewTokensVisible()); if (AppPreferences.getTokensStartFreesize()) { token.setSnapToScale(false); } break; case BACKGROUND: token.setShape(Token.TokenShape.TOP_DOWN); token.setSnapToScale(!AppPreferences.getBackgroundsStartFreesize()); token.setSnapToGrid(AppPreferences.getBackgroundsStartSnapToGrid()); token.setVisible(AppPreferences.getNewBackgroundsVisible()); // Center on drop point if (!token.isSnapToScale() && !token.isSnapToGrid()) { token.setX(token.getX() - size.width / 2); token.setY(token.getY() - size.height / 2); } break; case OBJECT: token.setShape(Token.TokenShape.TOP_DOWN); token.setSnapToScale(!AppPreferences.getObjectsStartFreesize()); token.setSnapToGrid(AppPreferences.getObjectsStartSnapToGrid()); token.setVisible(AppPreferences.getNewObjectsVisible()); // Center on drop point if (!token.isSnapToScale() && !token.isSnapToGrid()) { token.setX(token.getX() - size.width / 2); token.setY(token.getY() - size.height / 2); } break; } // FJE Yes, this looks redundant. But calling getType() retrieves the type of // the Token and returns NPC if the type can't be determined (raw image, // corrupted token file, etc). So retrieving it and then turning around and // setting it ensures it has a valid value without necessarily changing what // it was. :) Token.Type type = token.getType(); token.setType(type); // Token type if (isGM) { // Check the name (after Token layer is set as name relies on layer) token.setName(MapToolUtil.nextTokenId(zone, token)); if (getActiveLayer() == Zone.Layer.TOKEN) { if (AppPreferences.getShowDialogOnNewToken() || showDialog) { NewTokenDialog dialog = new NewTokenDialog(token, dropPoint.x, dropPoint.y); dialog.showDialog(); if (!dialog.isSuccess()) { continue; } } } } else { // Player dropped, ensure it's a PC token // (Why? Couldn't a Player drop an RPTOK that represents an NPC, such as for a summoned monster? // Unfortunately, we can't know at this point whether the original input was an RPTOK or not.) token.setType(Token.Type.PC); // For Players, check to see if the name is already in use. If it is already in use, make sure the current Player // owns the token being duplicated (to avoid subtle ways of manipulating someone else's token!). Token tokenNameUsed = zone.getTokenByName(token.getName()); if (tokenNameUsed != null) { if (!AppUtil.playerOwns(tokenNameUsed)) { failedPaste.add(token.getName()); continue; } String newName = MapToolUtil.nextTokenId(zone, token); token.setName(newName); } } // Make sure all the assets are transfered for (MD5Key id : token.getAllImageAssets()) { Asset asset = AssetManager.getAsset(id); if (asset == null) { log.error("Could not find image for asset: " + id); continue; } MapToolUtil.uploadAsset(asset); } // Save the token and tell everybody about it zone.putToken(token); MapTool.serverCommand().putToken(zone.getId(), token); selectThese.add(token.getId()); } // For convenience, select them clearSelectedTokens(); selectTokens(selectThese); if (!isGM) MapTool.addMessage(TextMessage.gm(null, "Tokens dropped onto map '" + zone.getName() + "'")); if (!failedPaste.isEmpty()) { String mesg = "Failed to paste token(s) with duplicate name(s): " + failedPaste; TextMessage msg = TextMessage.gm(null, mesg); MapTool.addMessage(msg); // msg.setChannel(Channel.ME); // MapTool.addMessage(msg); } // Copy them to the clipboard so that we can quickly copy them onto the map AppActions.copyTokens(tokens); requestFocusInWindow(); repaint(); } /* * (non-Javadoc) * * @see java.awt.dnd.DropTargetListener#drop (java.awt.dnd.DropTargetDropEvent) */ public void drop(DropTargetDropEvent dtde) { ZonePoint zp = new ScreenPoint((int) dtde.getLocation().getX(), (int) dtde.getLocation().getY()).convertToZone(this); TransferableHelper th = (TransferableHelper) getTransferHandler(); List<Token> tokens = th.getTokens(); if (tokens != null && !tokens.isEmpty()) addTokens(tokens, zp, th.getConfigureTokens(), false); } public Set<GUID> getVisibleTokenSet() { return visibleTokenSet; } /* * (non-Javadoc) * * @see java.awt.dnd.DropTargetListener#dropActionChanged (java.awt.dnd.DropTargetDragEvent) */ public void dropActionChanged(DropTargetDragEvent dtde) { } // // ZONE MODEL CHANGE LISTENER private class ZoneModelChangeListener implements ModelChangeListener { public void modelChanged(ModelChangeEvent event) { Object evt = event.getEvent(); if (evt == Zone.Event.TOPOLOGY_CHANGED) { flushFog(); flushLight(); } if (evt == Zone.Event.TOKEN_CHANGED || evt == Zone.Event.TOKEN_REMOVED || evt == Zone.Event.TOKEN_ADDED) { flush((Token) event.getArg()); } if (evt == Zone.Event.FOG_CHANGED) { flushFog = true; } MapTool.getFrame().updateTokenTree(); repaint(); } } // // COMPARABLE public int compareTo(Object o) { if (!(o instanceof ZoneRenderer)) { return 0; } return zone.getCreationTime() < ((ZoneRenderer) o).zone.getCreationTime() ? -1 : 1; } // Begin token common macro identification private List<Token> highlightCommonMacros = new ArrayList<Token>(); public List<Token> getHighlightCommonMacros() { return highlightCommonMacros; } public void setHighlightCommonMacros(List<Token> affectedTokens) { highlightCommonMacros = affectedTokens; repaint(); } // End token common macro identification // // IMAGE OBSERVER private final ImageObserver drawableObserver = new ImageObserver() { public boolean imageUpdate(Image img, int infoflags, int x, int y, int width, int height) { ZoneRenderer.this.flushDrawableRenderer(); MapTool.getFrame().refresh(); return true; } }; /* * (non-Javadoc) * * @see java.awt.Component#setCursor(java.awt.Cursor) */ @Override public void setCursor(Cursor cursor) { // System.out.println("Setting cursor on ZoneRenderer: " + cursor.toString()); if (false && cursor == Cursor.getDefaultCursor()) { // if (custom == null) custom = createCustomCursor("image/cursor.png", "Group"); cursor = custom; } super.setCursor(cursor); } private Cursor custom = null; public Cursor createCustomCursor(String resource, String tokenName) { Cursor c = null; try { // Dimension d = Toolkit.getDefaultToolkit().getBestCursorSize(16, 16); // On OSX returns any size up to 1/2 of (screen width, screen height) // System.out.println("Best cursor size: " + d); BufferedImage img = ImageIO.read(MapTool.class.getResourceAsStream(resource)); Font font = AppStyle.labelFont; Graphics2D z = (Graphics2D) this.getGraphics(); z.setFont(font); FontRenderContext frc = z.getFontRenderContext(); TextLayout tl = new TextLayout(tokenName, font, frc); Rectangle textbox = tl.getPixelBounds(null, 0, 0); // Now create a larger BufferedImage that will hold both the existing cursor and a token name // Use the larger of the image width or string width, and the height of the image + the height of the string // to represent the bounding box of the 'arrow+tokenName' Rectangle bounds = new Rectangle(Math.max(img.getWidth(), textbox.width), img.getHeight() + textbox.height); BufferedImage cursor = new BufferedImage(bounds.width, bounds.height, Transparency.TRANSLUCENT); Graphics2D g2d = cursor.createGraphics(); g2d.setFont(font); g2d.setComposite(z.getComposite()); g2d.setStroke(z.getStroke()); g2d.setPaintMode(); z.dispose(); Object oldAA = SwingUtil.useAntiAliasing(g2d); // g2d.setTransform( ((Graphics2D)this.getGraphics()).getTransform() ); // g2d.drawImage(img, null, 0, 0); g2d.drawImage(img, new AffineTransform(1f, 0f, 0f, 1f, 0, 0), null); // Draw the arrow at 1:1 resolution g2d.translate(0, img.getHeight() + textbox.height / 2); // g2d.transform(new AffineTransform(0.5f, 0f, 0f, 0.5f, 0, 0)); // Why do I need this to scale down the text?? g2d.setColor(Color.BLACK); GraphicsUtil.drawBoxedString(g2d, tokenName, 0, 0, SwingUtilities.LEFT); // The text draw here is not nearly as nice looking as normal // g2d.setBackground(Color.BLACK); // g2d.setColor(Color.WHITE); // g2d.fillRect(0, bounds.height-textbox.height, textbox.width, textbox.height); // g2d.drawString(tokenName, 0F, bounds.height - descent); g2d.dispose(); c = Toolkit.getDefaultToolkit().createCustomCursor(cursor, new Point(0, 0), tokenName); SwingUtil.restoreAntiAliasing(g2d, oldAA); img.flush(); // Try to be friendly about memory usage. ;-) cursor.flush(); } catch (Exception e) { } return c; } }
true
true
protected void renderTokens(Graphics2D g, List<Token> tokenList, PlayerView view) { Graphics2D clippedG = g; timer.start("createClip"); if (!view.isGMView() && visibleScreenArea != null && !tokenList.isEmpty() && tokenList.get(0).isToken()) { clippedG = (Graphics2D) g.create(); Area visibleArea = new Area(g.getClipBounds()); visibleArea.intersect(visibleScreenArea); clippedG.setClip(new GeneralPath(visibleArea)); } timer.stop("createClip"); // This is in screen coordinates Rectangle viewport = new Rectangle(0, 0, getSize().width, getSize().height); Rectangle clipBounds = g.getClipBounds(); double scale = zoneScale.getScale(); Set<GUID> tempVisTokens = new HashSet<GUID>(); // calculations boolean calculateStacks = !tokenList.isEmpty() && !tokenList.get(0).isStamp() && tokenStackMap == null; if (calculateStacks) { tokenStackMap = new HashMap<Token, Set<Token>>(); } List<Token> tokenPostProcessing = new ArrayList<Token>(tokenList.size()); for (Token token : tokenList) { timer.start("tokenlist-1"); if (token.isStamp() && isTokenMoving(token)) { continue; } timer.stop("tokenlist-1"); timer.start("tokenlist-1.1"); TokenLocation location = tokenLocationCache.get(token); if (location != null && !location.maybeOnscreen(viewport)) { continue; } timer.stop("tokenlist-1.1"); timer.start("tokenlist-1a"); // Don't bother if it's not visible // NOTE: Not going to use zone.isTokenVisible as it is very slow. In fact, it's faster // to just draw the tokens and let them be clipped if (!token.isVisible() && !view.isGMView()) { continue; } if (token.isVisibleOnlyToOwner() && !AppUtil.playerOwns(token)) { continue; } Rectangle footprintBounds = token.getBounds(zone); timer.stop("tokenlist-1a"); timer.start("tokenlist-1b"); BufferedImage image = ImageManager.getImage(token.getImageAssetId(), this); timer.stop("tokenlist-1b"); timer.start("tokenlist-1c"); double scaledWidth = (footprintBounds.width * scale); double scaledHeight = (footprintBounds.height * scale); // if (!token.isStamp()) { // // Fit inside the grid // scaledWidth --; // scaledHeight --; // } ScreenPoint tokenScreenLocation = ScreenPoint.fromZonePoint(this, footprintBounds.x, footprintBounds.y); timer.stop("tokenlist-1c"); timer.start("tokenlist-1d"); // Tokens are centered on the image center point double x = tokenScreenLocation.x; double y = tokenScreenLocation.y; Rectangle2D origBounds = new Rectangle2D.Double(x, y, scaledWidth, scaledHeight); Area tokenBounds = new Area(origBounds); if (token.hasFacing() && token.getShape() == Token.TokenShape.TOP_DOWN) { double sx = scaledWidth / 2 + x - (token.getAnchor().x * scale); double sy = scaledHeight / 2 + y - (token.getAnchor().x * scale); tokenBounds.transform(AffineTransform.getRotateInstance(Math.toRadians(-token.getFacing() - 90), sx, sy)); // facing defaults to down, or -90 degrees } timer.stop("tokenlist-1d"); timer.start("tokenlist-1e"); location = new TokenLocation(tokenBounds, origBounds, token, x, y, footprintBounds.width, footprintBounds.height, scaledWidth, scaledHeight); tokenLocationCache.put(token, location); // Too small ? if (location.scaledHeight < 1 || location.scaledWidth < 1) { continue; } // Vision visibility if (!view.isGMView() && token.isToken() && zoneView.isUsingVision()) { if (!GraphicsUtil.intersects(visibleScreenArea, location.bounds)) { continue; } } timer.stop("tokenlist-1e"); // Markers timer.start("renderTokens:Markers"); if (token.isMarker() && canSeeMarker(token)) { markerLocationList.add(location); } timer.stop("renderTokens:Markers"); // Stacking check if (calculateStacks) { timer.start("tokenStack"); // System.out.println(token.getName() + " - " + location.boundsCache); Set<Token> tokenStackSet = null; for (TokenLocation currLocation : getTokenLocations(Zone.Layer.TOKEN)) { // Are we covering anyone ? // System.out.println("\t" + currLocation.token.getName() + " - " + location.boundsCache.contains(currLocation.boundsCache)); if (location.boundsCache.contains(currLocation.boundsCache)) { if (tokenStackSet == null) { tokenStackSet = new HashSet<Token>(); tokenStackMap.put(token, tokenStackSet); tokenStackSet.add(token); } tokenStackSet.add(currLocation.token); if (tokenStackMap.get(currLocation.token) != null) { tokenStackSet.addAll(tokenStackMap.get(currLocation.token)); tokenStackMap.remove(currLocation.token); } } } timer.stop("tokenStack"); } // Keep track of the location on the screen // Note the order where the top most token is at the end of the list timer.start("renderTokens:Locations"); List<TokenLocation> locationList = null; // TODO: Why not just call token.getLayer() ??? if (!token.isStamp()) { locationList = getTokenLocations(Zone.Layer.TOKEN); } else { if (token.isObjectStamp()) { locationList = getTokenLocations(Zone.Layer.OBJECT); } if (token.isBackgroundStamp()) { locationList = getTokenLocations(Zone.Layer.BACKGROUND); } if (token.isGMStamp()) { locationList = getTokenLocations(Zone.Layer.GM); } } if (locationList != null) { locationList.add(location); } timer.stop("renderTokens:Locations"); // Add the token to our visible set. tempVisTokens.add(token.getId()); // Only draw if we're visible // NOTE: this takes place AFTER resizing the image, that's so that the user // suffers a pause only once while scaling, and not as new tokens are // scrolled onto the screen timer.start("renderTokens:OnscreenCheck"); if (!location.bounds.intersects(clipBounds)) { timer.stop("renderTokens:OnscreenCheck"); continue; } timer.stop("renderTokens:OnscreenCheck"); // Moving ? timer.start("renderTokens:ShowMovement"); if (isTokenMoving(token)) { BufferedImage replacementImage = replacementImageMap.get(token); if (replacementImage == null) { replacementImage = ImageUtil.rgbToGrayscale(image); replacementImageMap.put(token, replacementImage); } image = replacementImage; } timer.stop("renderTokens:ShowMovement"); // Previous path timer.start("renderTokens:ShowPath"); if (showPathList.contains(token) && token.getLastPath() != null) { renderPath(g, token.getLastPath(), token.getFootprint(zone.getGrid())); } timer.stop("renderTokens:ShowPath"); timer.start("tokenlist-4"); // Halo (TOPDOWN, CIRCLE) if (token.hasHalo() && (token.getShape() == Token.TokenShape.TOP_DOWN || token.getShape() == Token.TokenShape.CIRCLE)) { Stroke oldStroke = clippedG.getStroke(); clippedG.setStroke(new BasicStroke(AppPreferences.getHaloLineWidth())); clippedG.setColor(token.getHaloColor()); clippedG.draw(new Rectangle2D.Double(location.x, location.y, location.scaledWidth, location.scaledHeight)); clippedG.setStroke(oldStroke); } timer.stop("tokenlist-4"); timer.start("tokenlist-5"); // handle flipping BufferedImage workImage = image; if (token.isFlippedX() || token.isFlippedY()) { workImage = flipImageMap.get(token); if (workImage == null) { workImage = new BufferedImage(image.getWidth(), image.getHeight(), image.getTransparency()); int workW = image.getWidth() * (token.isFlippedX() ? -1 : 1); int workH = image.getHeight() * (token.isFlippedY() ? -1 : 1); int workX = token.isFlippedX() ? image.getWidth() : 0; int workY = token.isFlippedY() ? image.getHeight() : 0; Graphics2D wig = workImage.createGraphics(); wig.drawImage(image, workX, workY, workW, workH, null); wig.dispose(); flipImageMap.put(token, workImage); } } timer.stop("tokenlist-5"); timer.start("tokenlist-6"); // Position Dimension imgSize = new Dimension(workImage.getWidth(), workImage.getHeight()); SwingUtil.constrainTo(imgSize, footprintBounds.width, footprintBounds.height); int offsetx = 0; int offsety = 0; if (token.isSnapToScale()) { offsetx = (int) (imgSize.width < footprintBounds.width ? (footprintBounds.width - imgSize.width) / 2 * getScale() : 0); offsety = (int) (imgSize.height < footprintBounds.height ? (footprintBounds.height - imgSize.height) / 2 * getScale() : 0); } double tx = location.x + offsetx; double ty = location.y + offsety; AffineTransform at = new AffineTransform(); at.translate(tx, ty); // Rotated if (token.hasFacing() && token.getShape() == Token.TokenShape.TOP_DOWN) { at.rotate(Math.toRadians(-token.getFacing() - 90), location.scaledWidth / 2 - (token.getAnchor().x * scale) - offsetx, location.scaledHeight / 2 - (token.getAnchor().y * scale) - offsety); // facing defaults to down, or -90 degrees } // Draw the token if (token.isSnapToScale()) { at.scale(((double) imgSize.width) / workImage.getWidth(), ((double) imgSize.height) / workImage.getHeight()); at.scale(getScale(), getScale()); } else { at.scale((scaledWidth) / workImage.getWidth(), (scaledHeight) / workImage.getHeight()); } timer.stop("tokenlist-6"); timer.start("tokenlist-7"); clippedG.drawImage(workImage, at, this); timer.stop("tokenlist-7"); timer.start("tokenlist-8"); // Halo (SQUARE) // XXX Why are square halos drawn separately?! if (token.hasHalo() && token.getShape() == Token.TokenShape.SQUARE) { Stroke oldStroke = g.getStroke(); clippedG.setStroke(new BasicStroke(AppPreferences.getHaloLineWidth())); clippedG.setColor(token.getHaloColor()); clippedG.draw(new Rectangle2D.Double(location.x, location.y, location.scaledWidth, location.scaledHeight)); clippedG.setStroke(oldStroke); } // Facing ? // TODO: Optimize this by doing it once per token per facing if (token.hasFacing()) { Token.TokenShape tokenType = token.getShape(); switch (tokenType) { case CIRCLE: Shape arrow = getCircleFacingArrow(token.getFacing(), footprintBounds.width / 2); double cx = location.x + location.scaledWidth / 2; double cy = location.y + location.scaledHeight / 2; clippedG.translate(cx, cy); clippedG.setColor(Color.yellow); clippedG.fill(arrow); clippedG.setColor(Color.darkGray); clippedG.draw(arrow); clippedG.translate(-cx, -cy); break; case SQUARE: int facing = token.getFacing(); while (facing < 0) { facing += 360; } // TODO: this should really be done in Token.setFacing() but I didn't want to take the chance of breaking something, so change this when it's safe to break stuff facing %= 360; arrow = getSquareFacingArrow(facing, footprintBounds.width / 2); cx = location.x + location.scaledWidth / 2; cy = location.y + location.scaledHeight / 2; // Find the edge of the image // TODO: Man, this is horrible, there's gotta be a better way to do this double xp = location.scaledWidth / 2; double yp = location.scaledHeight / 2; if (facing >= 45 && facing <= 135 || facing >= 225 && facing <= 315) { xp = (int) (yp / Math.tan(Math.toRadians(facing))); if (facing > 180) { xp = -xp; yp = -yp; } } else { yp = (int) (xp * Math.tan(Math.toRadians(facing))); if (facing > 90 && facing < 270) { xp = -xp; yp = -yp; } } cx += xp; cy -= yp; clippedG.translate(cx, cy); clippedG.setColor(Color.yellow); clippedG.fill(arrow); clippedG.setColor(Color.darkGray); clippedG.draw(arrow); clippedG.translate(-cx, -cy); break; } } timer.stop("tokenlist-8"); timer.start("tokenlist-9"); // Set up the graphics so that the overlay can just be painted. Graphics2D locg = (Graphics2D) clippedG.create((int) location.x, (int) location.y, (int) Math.ceil(location.scaledWidth), (int) Math.ceil(location.scaledHeight)); Rectangle bounds = new Rectangle(0, 0, (int) Math.ceil(location.scaledWidth), (int) Math.ceil(location.scaledHeight)); // Check each of the set values for (String state : MapTool.getCampaign().getTokenStatesMap().keySet()) { Object stateValue = token.getState(state); AbstractTokenOverlay overlay = MapTool.getCampaign().getTokenStatesMap().get(state); if (stateValue instanceof AbstractTokenOverlay) { overlay = (AbstractTokenOverlay) stateValue; } if (overlay == null || overlay.isMouseover() && token != tokenUnderMouse || !overlay.showPlayer(token, MapTool.getPlayer())) { continue; } overlay.paintOverlay(locg, token, bounds, stateValue); } timer.stop("tokenlist-9"); timer.start("tokenlist-10"); for (String bar : MapTool.getCampaign().getTokenBarsMap().keySet()) { Object barValue = token.getState(bar); BarTokenOverlay overlay = MapTool.getCampaign().getTokenBarsMap().get(bar); if (overlay == null || overlay.isMouseover() && token != tokenUnderMouse || !overlay.showPlayer(token, MapTool.getPlayer())) { continue; } overlay.paintOverlay(locg, token, bounds, barValue); } // endfor locg.dispose(); timer.stop("tokenlist-10"); // Keep track of which tokens have been drawn so we can perform post-processing on them later // (such as selection borders and names/labels) if (getActiveLayer().equals(token.getLayer())) tokenPostProcessing.add(token); // DEBUGGING // ScreenPoint tmpsp = ScreenPoint.fromZonePoint(this, new ZonePoint(token.getX(), token.getY())); // g.setColor(Color.red); // g.drawLine(tmpsp.x, 0, tmpsp.x, getSize().height); // g.drawLine(0, tmpsp.y, getSize().width, tmpsp.y); } timer.start("tokenlist-12"); boolean useIF = MapTool.getServerPolicy().isUseIndividualFOW(); // Selection and labels for (Token token : tokenPostProcessing) { TokenLocation location = tokenLocationCache.get(token); Area bounds = location.bounds; // TODO: This isn't entirely accurate as it doesn't account for the actual text // to be in the clipping bounds, but I'll fix that later if (!bounds.getBounds().intersects(clipBounds)) { continue; } Rectangle footprintBounds = token.getBounds(zone); boolean isSelected = selectedTokenSet.contains(token.getId()); if (isSelected) { ScreenPoint sp = ScreenPoint.fromZonePoint(this, footprintBounds.x, footprintBounds.y); double width = footprintBounds.width * getScale(); double height = footprintBounds.height * getScale(); ImageBorder selectedBorder = token.isStamp() ? AppStyle.selectedStampBorder : AppStyle.selectedBorder; if (highlightCommonMacros.contains(token)) { selectedBorder = AppStyle.commonMacroBorder; } if (!AppUtil.playerOwns(token)) { selectedBorder = AppStyle.selectedUnownedBorder; } if (useIF && !token.isStamp() && zoneView.isUsingVision()) { Tool tool = MapTool.getFrame().getToolbox().getSelectedTool(); if (tool instanceof RectangleExposeTool // XXX Change to use marker interface such as ExposeTool? || tool instanceof OvalExposeTool || tool instanceof FreehandExposeTool || tool instanceof PolygonExposeTool) selectedBorder = AppConstants.FOW_TOOLS_BORDER; } if (token.hasFacing() && (token.getShape() == Token.TokenShape.TOP_DOWN || token.isStamp())) { AffineTransform oldTransform = clippedG.getTransform(); // Rotated clippedG.translate(sp.x, sp.y); clippedG.rotate(Math.toRadians(-token.getFacing() - 90), width / 2 - (token.getAnchor().x * scale), height / 2 - (token.getAnchor().y * scale)); // facing defaults to down, or -90 degrees selectedBorder.paintAround(clippedG, 0, 0, (int) width, (int) height); clippedG.setTransform(oldTransform); } else { selectedBorder.paintAround(clippedG, (int) sp.x, (int) sp.y, (int) width, (int) height); } // Remove labels from the cache if the corresponding tokens are deselected } else if (!AppState.isShowTokenNames() && labelRenderingCache.containsKey(token.getId())) { labelRenderingCache.remove(token.getId()); } // Token names and labels boolean showCurrentTokenLabel = AppState.isShowTokenNames() || token == tokenUnderMouse; if (showCurrentTokenLabel) { GUID tokId = token.getId(); int offset = 3; // Keep it from tramping on the token border. ImageLabel background; Color foreground; if (token.isVisible()) { if (token.getType() == Token.Type.NPC) { background = GraphicsUtil.BLUE_LABEL; foreground = Color.WHITE; } else { background = GraphicsUtil.GREY_LABEL; foreground = Color.BLACK; } } else { background = GraphicsUtil.DARK_GREY_LABEL; foreground = Color.WHITE; } String name = token.getName(); if (view.isGMView() && token.getGMName() != null && !StringUtil.isEmpty(token.getGMName())) { name += " (" + token.getGMName() + ")"; } if (!view.equals(lastView) || !labelRenderingCache.containsKey(tokId)) { // if ((lastView != null && !lastView.equals(view)) || !labelRenderingCache.containsKey(tokId)) { boolean hasLabel = false; // Calculate image dimensions FontMetrics fm = g.getFontMetrics(); Font f = g.getFont(); int strWidth = SwingUtilities.computeStringWidth(fm, name); int width = strWidth + GraphicsUtil.BOX_PADDINGX * 2; int height = fm.getHeight() + GraphicsUtil.BOX_PADDINGY * 2; int labelHeight = height; // If token has a label (in addition to name). if (token.getLabel() != null && token.getLabel().trim().length() > 0) { hasLabel = true; height = height * 2; // Double the image height for two boxed strings. int labelWidth = SwingUtilities.computeStringWidth(fm, token.getLabel()) + GraphicsUtil.BOX_PADDINGX * 2; width = (width > labelWidth) ? width : labelWidth; } // Set up the image BufferedImage labelRender = new BufferedImage(width, height, Transparency.TRANSLUCENT); Graphics2D gLabelRender = labelRender.createGraphics(); gLabelRender.setFont(f); // Match font used in the main graphics context. gLabelRender.setRenderingHints(g.getRenderingHints()); // Match rendering style. // Draw name and label to image if (hasLabel) { GraphicsUtil.drawBoxedString(gLabelRender, token.getLabel(), width / 2, height - (labelHeight / 2), SwingUtilities.CENTER, background, foreground); } GraphicsUtil.drawBoxedString(gLabelRender, name, width / 2, labelHeight / 2, SwingUtilities.CENTER, background, foreground); // Add image to cache labelRenderingCache.put(tokId, labelRender); } // Create LabelRenderer using cached label. delayRendering(new LabelRenderer(name, bounds.getBounds().x + bounds.getBounds().width / 2, bounds.getBounds().y + bounds.getBounds().height + offset, SwingUtilities.CENTER, background, foreground, tokId)); } } timer.stop("tokenlist-12"); timer.start("tokenlist-13"); // Stacks if (!tokenList.isEmpty() && !tokenList.get(0).isStamp()) { // TODO: find a cleaner way to indicate token layer if (tokenStackMap != null) { // FIXME Needed to prevent NPE but how can it be null? for (Token token : tokenStackMap.keySet()) { Area bounds = getTokenBounds(token); if (bounds == null) { // token is offscreen continue; } BufferedImage stackImage = AppStyle.stackImage; clippedG.drawImage(stackImage, bounds.getBounds().x + bounds.getBounds().width - stackImage.getWidth() + 2, bounds.getBounds().y - 2, null); } } } // Markers // for (TokenLocation location : getMarkerLocations()) { // BufferedImage stackImage = AppStyle.markerImage; // g.drawImage(stackImage, location.bounds.getBounds().x, location.bounds.getBounds().y, null); // } if (clippedG != g) { clippedG.dispose(); } timer.stop("tokenlist-13"); visibleTokenSet = Collections.unmodifiableSet(tempVisTokens); }
protected void renderTokens(Graphics2D g, List<Token> tokenList, PlayerView view) { Graphics2D clippedG = g; timer.start("createClip"); if (!view.isGMView() && visibleScreenArea != null && !tokenList.isEmpty() && tokenList.get(0).isToken()) { clippedG = (Graphics2D) g.create(); Area visibleArea = new Area(g.getClipBounds()); visibleArea.intersect(visibleScreenArea); clippedG.setClip(new GeneralPath(visibleArea)); } timer.stop("createClip"); // This is in screen coordinates Rectangle viewport = new Rectangle(0, 0, getSize().width, getSize().height); Rectangle clipBounds = g.getClipBounds(); double scale = zoneScale.getScale(); Set<GUID> tempVisTokens = new HashSet<GUID>(); // calculations boolean calculateStacks = !tokenList.isEmpty() && !tokenList.get(0).isStamp() && tokenStackMap == null; if (calculateStacks) { tokenStackMap = new HashMap<Token, Set<Token>>(); } List<Token> tokenPostProcessing = new ArrayList<Token>(tokenList.size()); for (Token token : tokenList) { timer.start("tokenlist-1"); if (token.isStamp() && isTokenMoving(token)) { continue; } timer.stop("tokenlist-1"); timer.start("tokenlist-1.1"); TokenLocation location = tokenLocationCache.get(token); if (location != null && !location.maybeOnscreen(viewport)) { continue; } timer.stop("tokenlist-1.1"); timer.start("tokenlist-1a"); // Don't bother if it's not visible // NOTE: Not going to use zone.isTokenVisible as it is very slow. In fact, it's faster // to just draw the tokens and let them be clipped if (!token.isVisible() && !view.isGMView()) { continue; } if (token.isVisibleOnlyToOwner() && !AppUtil.playerOwns(token)) { continue; } Rectangle footprintBounds = token.getBounds(zone); timer.stop("tokenlist-1a"); timer.start("tokenlist-1b"); BufferedImage image = ImageManager.getImage(token.getImageAssetId(), this); timer.stop("tokenlist-1b"); timer.start("tokenlist-1c"); double scaledWidth = (footprintBounds.width * scale); double scaledHeight = (footprintBounds.height * scale); // if (!token.isStamp()) { // // Fit inside the grid // scaledWidth --; // scaledHeight --; // } ScreenPoint tokenScreenLocation = ScreenPoint.fromZonePoint(this, footprintBounds.x, footprintBounds.y); timer.stop("tokenlist-1c"); timer.start("tokenlist-1d"); // Tokens are centered on the image center point double x = tokenScreenLocation.x; double y = tokenScreenLocation.y; Rectangle2D origBounds = new Rectangle2D.Double(x, y, scaledWidth, scaledHeight); Area tokenBounds = new Area(origBounds); if (token.hasFacing() && token.getShape() == Token.TokenShape.TOP_DOWN) { double sx = scaledWidth / 2 + x - (token.getAnchor().x * scale); double sy = scaledHeight / 2 + y - (token.getAnchor().x * scale); tokenBounds.transform(AffineTransform.getRotateInstance(Math.toRadians(-token.getFacing() - 90), sx, sy)); // facing defaults to down, or -90 degrees } timer.stop("tokenlist-1d"); timer.start("tokenlist-1e"); location = new TokenLocation(tokenBounds, origBounds, token, x, y, footprintBounds.width, footprintBounds.height, scaledWidth, scaledHeight); tokenLocationCache.put(token, location); // Too small ? if (location.scaledHeight < 1 || location.scaledWidth < 1) { continue; } // Vision visibility if (!view.isGMView() && token.isToken() && zoneView.isUsingVision()) { if (!GraphicsUtil.intersects(visibleScreenArea, location.bounds)) { continue; } } timer.stop("tokenlist-1e"); // Markers timer.start("renderTokens:Markers"); if (token.isMarker() && canSeeMarker(token)) { markerLocationList.add(location); } timer.stop("renderTokens:Markers"); // Stacking check if (calculateStacks) { timer.start("tokenStack"); // System.out.println(token.getName() + " - " + location.boundsCache); Set<Token> tokenStackSet = null; for (TokenLocation currLocation : getTokenLocations(Zone.Layer.TOKEN)) { // Are we covering anyone ? // System.out.println("\t" + currLocation.token.getName() + " - " + location.boundsCache.contains(currLocation.boundsCache)); if (location.boundsCache.contains(currLocation.boundsCache)) { if (tokenStackSet == null) { tokenStackSet = new HashSet<Token>(); tokenStackMap.put(token, tokenStackSet); tokenStackSet.add(token); } tokenStackSet.add(currLocation.token); if (tokenStackMap.get(currLocation.token) != null) { tokenStackSet.addAll(tokenStackMap.get(currLocation.token)); tokenStackMap.remove(currLocation.token); } } } timer.stop("tokenStack"); } // Keep track of the location on the screen // Note the order where the top most token is at the end of the list timer.start("renderTokens:Locations"); List<TokenLocation> locationList = null; // TODO: Why not just call token.getLayer() ??? if (!token.isStamp()) { locationList = getTokenLocations(Zone.Layer.TOKEN); } else { if (token.isObjectStamp()) { locationList = getTokenLocations(Zone.Layer.OBJECT); } if (token.isBackgroundStamp()) { locationList = getTokenLocations(Zone.Layer.BACKGROUND); } if (token.isGMStamp()) { locationList = getTokenLocations(Zone.Layer.GM); } } if (locationList != null) { locationList.add(location); } timer.stop("renderTokens:Locations"); // Add the token to our visible set. tempVisTokens.add(token.getId()); // Only draw if we're visible // NOTE: this takes place AFTER resizing the image, that's so that the user // suffers a pause only once while scaling, and not as new tokens are // scrolled onto the screen timer.start("renderTokens:OnscreenCheck"); if (!location.bounds.intersects(clipBounds)) { timer.stop("renderTokens:OnscreenCheck"); continue; } timer.stop("renderTokens:OnscreenCheck"); // Moving ? timer.start("renderTokens:ShowMovement"); if (isTokenMoving(token)) { BufferedImage replacementImage = replacementImageMap.get(token); if (replacementImage == null) { replacementImage = ImageUtil.rgbToGrayscale(image); replacementImageMap.put(token, replacementImage); } image = replacementImage; } timer.stop("renderTokens:ShowMovement"); // Previous path timer.start("renderTokens:ShowPath"); if (showPathList.contains(token) && token.getLastPath() != null) { renderPath(g, token.getLastPath(), token.getFootprint(zone.getGrid())); } timer.stop("renderTokens:ShowPath"); timer.start("tokenlist-4"); // Halo (TOPDOWN, CIRCLE) if (token.hasHalo() && (token.getShape() == Token.TokenShape.TOP_DOWN || token.getShape() == Token.TokenShape.CIRCLE)) { Stroke oldStroke = clippedG.getStroke(); clippedG.setStroke(new BasicStroke(AppPreferences.getHaloLineWidth())); clippedG.setColor(token.getHaloColor()); clippedG.draw(new Rectangle2D.Double(location.x, location.y, location.scaledWidth, location.scaledHeight)); clippedG.setStroke(oldStroke); } timer.stop("tokenlist-4"); timer.start("tokenlist-5"); // handle flipping BufferedImage workImage = image; if (token.isFlippedX() || token.isFlippedY()) { workImage = flipImageMap.get(token); if (workImage == null) { workImage = new BufferedImage(image.getWidth(), image.getHeight(), image.getTransparency()); int workW = image.getWidth() * (token.isFlippedX() ? -1 : 1); int workH = image.getHeight() * (token.isFlippedY() ? -1 : 1); int workX = token.isFlippedX() ? image.getWidth() : 0; int workY = token.isFlippedY() ? image.getHeight() : 0; Graphics2D wig = workImage.createGraphics(); wig.drawImage(image, workX, workY, workW, workH, null); wig.dispose(); flipImageMap.put(token, workImage); } } timer.stop("tokenlist-5"); timer.start("tokenlist-6"); // Position Dimension imgSize = new Dimension(workImage.getWidth(), workImage.getHeight()); SwingUtil.constrainTo(imgSize, footprintBounds.width, footprintBounds.height); int offsetx = 0; int offsety = 0; if (token.isSnapToScale()) { offsetx = (int) (imgSize.width < footprintBounds.width ? (footprintBounds.width - imgSize.width) / 2 * getScale() : 0); offsety = (int) (imgSize.height < footprintBounds.height ? (footprintBounds.height - imgSize.height) / 2 * getScale() : 0); } double tx = location.x + offsetx; double ty = location.y + offsety; AffineTransform at = new AffineTransform(); at.translate(tx, ty); // Rotated if (token.hasFacing() && token.getShape() == Token.TokenShape.TOP_DOWN) { at.rotate(Math.toRadians(-token.getFacing() - 90), location.scaledWidth / 2 - (token.getAnchor().x * scale) - offsetx, location.scaledHeight / 2 - (token.getAnchor().y * scale) - offsety); // facing defaults to down, or -90 degrees } // Draw the token if (token.isSnapToScale()) { at.scale(((double) imgSize.width) / workImage.getWidth(), ((double) imgSize.height) / workImage.getHeight()); at.scale(getScale(), getScale()); } else { at.scale((scaledWidth) / workImage.getWidth(), (scaledHeight) / workImage.getHeight()); } timer.stop("tokenlist-6"); timer.start("tokenlist-7"); clippedG.drawImage(workImage, at, this); timer.stop("tokenlist-7"); timer.start("tokenlist-8"); // Halo (SQUARE) // XXX Why are square halos drawn separately?! if (token.hasHalo() && token.getShape() == Token.TokenShape.SQUARE) { Stroke oldStroke = g.getStroke(); clippedG.setStroke(new BasicStroke(AppPreferences.getHaloLineWidth())); clippedG.setColor(token.getHaloColor()); clippedG.draw(new Rectangle2D.Double(location.x, location.y, location.scaledWidth, location.scaledHeight)); clippedG.setStroke(oldStroke); } // Facing ? // TODO: Optimize this by doing it once per token per facing if (token.hasFacing()) { Token.TokenShape tokenType = token.getShape(); switch (tokenType) { case CIRCLE: Shape arrow = getCircleFacingArrow(token.getFacing(), footprintBounds.width / 2); double cx = location.x + location.scaledWidth / 2; double cy = location.y + location.scaledHeight / 2; clippedG.translate(cx, cy); clippedG.setColor(Color.yellow); clippedG.fill(arrow); clippedG.setColor(Color.darkGray); clippedG.draw(arrow); clippedG.translate(-cx, -cy); break; case SQUARE: int facing = token.getFacing(); while (facing < 0) { facing += 360; } // TODO: this should really be done in Token.setFacing() but I didn't want to take the chance of breaking something, so change this when it's safe to break stuff facing %= 360; arrow = getSquareFacingArrow(facing, footprintBounds.width / 2); cx = location.x + location.scaledWidth / 2; cy = location.y + location.scaledHeight / 2; // Find the edge of the image // TODO: Man, this is horrible, there's gotta be a better way to do this double xp = location.scaledWidth / 2; double yp = location.scaledHeight / 2; if (facing >= 45 && facing <= 135 || facing >= 225 && facing <= 315) { xp = (int) (yp / Math.tan(Math.toRadians(facing))); if (facing > 180) { xp = -xp; yp = -yp; } } else { yp = (int) (xp * Math.tan(Math.toRadians(facing))); if (facing > 90 && facing < 270) { xp = -xp; yp = -yp; } } cx += xp; cy -= yp; clippedG.translate(cx, cy); clippedG.setColor(Color.yellow); clippedG.fill(arrow); clippedG.setColor(Color.darkGray); clippedG.draw(arrow); clippedG.translate(-cx, -cy); break; } } timer.stop("tokenlist-8"); timer.start("tokenlist-9"); // Set up the graphics so that the overlay can just be painted. Graphics2D locg = (Graphics2D) clippedG.create((int) location.x, (int) location.y, (int) Math.ceil(location.scaledWidth), (int) Math.ceil(location.scaledHeight)); Rectangle bounds = new Rectangle(0, 0, (int) Math.ceil(location.scaledWidth), (int) Math.ceil(location.scaledHeight)); // Check each of the set values for (String state : MapTool.getCampaign().getTokenStatesMap().keySet()) { Object stateValue = token.getState(state); AbstractTokenOverlay overlay = MapTool.getCampaign().getTokenStatesMap().get(state); if (stateValue instanceof AbstractTokenOverlay) { overlay = (AbstractTokenOverlay) stateValue; } if (overlay == null || overlay.isMouseover() && token != tokenUnderMouse || !overlay.showPlayer(token, MapTool.getPlayer())) { continue; } overlay.paintOverlay(locg, token, bounds, stateValue); } timer.stop("tokenlist-9"); timer.start("tokenlist-10"); for (String bar : MapTool.getCampaign().getTokenBarsMap().keySet()) { Object barValue = token.getState(bar); BarTokenOverlay overlay = MapTool.getCampaign().getTokenBarsMap().get(bar); if (overlay == null || overlay.isMouseover() && token != tokenUnderMouse || !overlay.showPlayer(token, MapTool.getPlayer())) { continue; } overlay.paintOverlay(locg, token, bounds, barValue); } // endfor locg.dispose(); timer.stop("tokenlist-10"); // Keep track of which tokens have been drawn so we can perform post-processing on them later // (such as selection borders and names/labels) if (getActiveLayer().equals(token.getLayer())) tokenPostProcessing.add(token); // DEBUGGING // ScreenPoint tmpsp = ScreenPoint.fromZonePoint(this, new ZonePoint(token.getX(), token.getY())); // g.setColor(Color.red); // g.drawLine(tmpsp.x, 0, tmpsp.x, getSize().height); // g.drawLine(0, tmpsp.y, getSize().width, tmpsp.y); } timer.start("tokenlist-12"); boolean useIF = MapTool.getServerPolicy().isUseIndividualFOW(); // Selection and labels for (Token token : tokenPostProcessing) { TokenLocation location = tokenLocationCache.get(token); if (location == null) continue; Area bounds = location.bounds; // TODO: This isn't entirely accurate as it doesn't account for the actual text // to be in the clipping bounds, but I'll fix that later if (!bounds.getBounds().intersects(clipBounds)) { continue; } Rectangle footprintBounds = token.getBounds(zone); boolean isSelected = selectedTokenSet.contains(token.getId()); if (isSelected) { ScreenPoint sp = ScreenPoint.fromZonePoint(this, footprintBounds.x, footprintBounds.y); double width = footprintBounds.width * getScale(); double height = footprintBounds.height * getScale(); ImageBorder selectedBorder = token.isStamp() ? AppStyle.selectedStampBorder : AppStyle.selectedBorder; if (highlightCommonMacros.contains(token)) { selectedBorder = AppStyle.commonMacroBorder; } if (!AppUtil.playerOwns(token)) { selectedBorder = AppStyle.selectedUnownedBorder; } if (useIF && !token.isStamp() && zoneView.isUsingVision()) { Tool tool = MapTool.getFrame().getToolbox().getSelectedTool(); if (tool instanceof RectangleExposeTool // XXX Change to use marker interface such as ExposeTool? || tool instanceof OvalExposeTool || tool instanceof FreehandExposeTool || tool instanceof PolygonExposeTool) selectedBorder = AppConstants.FOW_TOOLS_BORDER; } if (token.hasFacing() && (token.getShape() == Token.TokenShape.TOP_DOWN || token.isStamp())) { AffineTransform oldTransform = clippedG.getTransform(); // Rotated clippedG.translate(sp.x, sp.y); clippedG.rotate(Math.toRadians(-token.getFacing() - 90), width / 2 - (token.getAnchor().x * scale), height / 2 - (token.getAnchor().y * scale)); // facing defaults to down, or -90 degrees selectedBorder.paintAround(clippedG, 0, 0, (int) width, (int) height); clippedG.setTransform(oldTransform); } else { selectedBorder.paintAround(clippedG, (int) sp.x, (int) sp.y, (int) width, (int) height); } // Remove labels from the cache if the corresponding tokens are deselected } else if (!AppState.isShowTokenNames() && labelRenderingCache.containsKey(token.getId())) { labelRenderingCache.remove(token.getId()); } // Token names and labels boolean showCurrentTokenLabel = AppState.isShowTokenNames() || token == tokenUnderMouse; if (showCurrentTokenLabel) { GUID tokId = token.getId(); int offset = 3; // Keep it from tramping on the token border. ImageLabel background; Color foreground; if (token.isVisible()) { if (token.getType() == Token.Type.NPC) { background = GraphicsUtil.BLUE_LABEL; foreground = Color.WHITE; } else { background = GraphicsUtil.GREY_LABEL; foreground = Color.BLACK; } } else { background = GraphicsUtil.DARK_GREY_LABEL; foreground = Color.WHITE; } String name = token.getName(); if (view.isGMView() && token.getGMName() != null && !StringUtil.isEmpty(token.getGMName())) { name += " (" + token.getGMName() + ")"; } if (!view.equals(lastView) || !labelRenderingCache.containsKey(tokId)) { // if ((lastView != null && !lastView.equals(view)) || !labelRenderingCache.containsKey(tokId)) { boolean hasLabel = false; // Calculate image dimensions FontMetrics fm = g.getFontMetrics(); Font f = g.getFont(); int strWidth = SwingUtilities.computeStringWidth(fm, name); int width = strWidth + GraphicsUtil.BOX_PADDINGX * 2; int height = fm.getHeight() + GraphicsUtil.BOX_PADDINGY * 2; int labelHeight = height; // If token has a label (in addition to name). if (token.getLabel() != null && token.getLabel().trim().length() > 0) { hasLabel = true; height = height * 2; // Double the image height for two boxed strings. int labelWidth = SwingUtilities.computeStringWidth(fm, token.getLabel()) + GraphicsUtil.BOX_PADDINGX * 2; width = (width > labelWidth) ? width : labelWidth; } // Set up the image BufferedImage labelRender = new BufferedImage(width, height, Transparency.TRANSLUCENT); Graphics2D gLabelRender = labelRender.createGraphics(); gLabelRender.setFont(f); // Match font used in the main graphics context. gLabelRender.setRenderingHints(g.getRenderingHints()); // Match rendering style. // Draw name and label to image if (hasLabel) { GraphicsUtil.drawBoxedString(gLabelRender, token.getLabel(), width / 2, height - (labelHeight / 2), SwingUtilities.CENTER, background, foreground); } GraphicsUtil.drawBoxedString(gLabelRender, name, width / 2, labelHeight / 2, SwingUtilities.CENTER, background, foreground); // Add image to cache labelRenderingCache.put(tokId, labelRender); } // Create LabelRenderer using cached label. delayRendering(new LabelRenderer(name, bounds.getBounds().x + bounds.getBounds().width / 2, bounds.getBounds().y + bounds.getBounds().height + offset, SwingUtilities.CENTER, background, foreground, tokId)); } } timer.stop("tokenlist-12"); timer.start("tokenlist-13"); // Stacks if (!tokenList.isEmpty() && !tokenList.get(0).isStamp()) { // TODO: find a cleaner way to indicate token layer if (tokenStackMap != null) { // FIXME Needed to prevent NPE but how can it be null? for (Token token : tokenStackMap.keySet()) { Area bounds = getTokenBounds(token); if (bounds == null) { // token is offscreen continue; } BufferedImage stackImage = AppStyle.stackImage; clippedG.drawImage(stackImage, bounds.getBounds().x + bounds.getBounds().width - stackImage.getWidth() + 2, bounds.getBounds().y - 2, null); } } } // Markers // for (TokenLocation location : getMarkerLocations()) { // BufferedImage stackImage = AppStyle.markerImage; // g.drawImage(stackImage, location.bounds.getBounds().x, location.bounds.getBounds().y, null); // } if (clippedG != g) { clippedG.dispose(); } timer.stop("tokenlist-13"); visibleTokenSet = Collections.unmodifiableSet(tempVisTokens); }
diff --git a/gwiki/src/main/java/de/micromata/genome/gwiki/page/impl/wiki/macros/GWikiPageTreeMacro.java b/gwiki/src/main/java/de/micromata/genome/gwiki/page/impl/wiki/macros/GWikiPageTreeMacro.java index 9f02b213..bf56c98f 100644 --- a/gwiki/src/main/java/de/micromata/genome/gwiki/page/impl/wiki/macros/GWikiPageTreeMacro.java +++ b/gwiki/src/main/java/de/micromata/genome/gwiki/page/impl/wiki/macros/GWikiPageTreeMacro.java @@ -1,162 +1,162 @@ //////////////////////////////////////////////////////////////////////////// // // Copyright (C) 2010 Micromata GmbH // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////// package de.micromata.genome.gwiki.page.impl.wiki.macros; import org.apache.commons.lang.StringUtils; import de.micromata.genome.gwiki.model.GWikiElement; import de.micromata.genome.gwiki.page.GWikiContext; import de.micromata.genome.gwiki.page.impl.wiki.GWikiMacroBean; import de.micromata.genome.gwiki.page.impl.wiki.GWikiWithHeaderPrepare; import de.micromata.genome.gwiki.page.impl.wiki.MacroAttributes; /** * @author Christian Claus ([email protected]) * */ public class GWikiPageTreeMacro extends GWikiMacroBean implements GWikiWithHeaderPrepare { private static final long serialVersionUID = 4865948210393357947L; private String rootPageId; /** * height of the container */ private String height; /** * width of the container */ private String width; @Override public boolean renderImpl(final GWikiContext ctx, MacroAttributes attrs) { ctx.append("<div id='filechooser' style='margin-top: 20px; font-family: verdana; font-size: 10px;"); if (StringUtils.isNotEmpty(width)) { ctx.append("width: " + width + "; "); } if (StringUtils.isNotBlank(height)) { ctx.append("height: " + height + "; "); } final String path = ctx.getServlet().getServletContext().getContextPath() + ctx.getRequest().getServletPath(); ctx.append("'></div>"); ctx.append("<script type='text/javascript'>"); ctx.append("$.jstree._themes = '" + path + "/static/js/jstree/themes/';"); - ctx.append("$(function () {"); + ctx.append("$(document).ready(function () {"); ctx.append(" $(\"#filechooser\").jstree({"); ctx.append(" \"themes\" : { \"theme\" : \"classic\", \"dots\" : true, \"icons\" : true },"); ctx.append(" \"plugins\" : [ \"themes\", \"html_data\", \"ui\" ],"); ctx.append(" \"html_data\" : {\n"); ctx.append(" \"ajax\" : {"); ctx.append(" \"url\" : "); ctx.append("\"").append(path); ctx.append("/edit/TreeChildren\",\n"); ctx.append(" \"data\" : function(n) { return { \"method_onLoadAsync\" : \"true\", " + "\"id\" : n.attr ? n.attr(\"id\") : \"" + getRootPage(ctx) + "\"," + "\"target\" : \"true\"" + " }; }\n"); ctx.append(" }"); ctx.append(" }\n"); ctx.append(" });\n"); ctx.append("});\n"); ctx.append("</script>"); ctx.flush(); return true; } public void prepareHeader(final GWikiContext ctx, MacroAttributes attrs) { ctx.getRequiredJs().add("/static/js/jstree/jquery.jstree.js"); } /** * @param rootPageId the rootPageId to set */ public void setRootPageId(String rootPageId) { this.rootPageId = rootPageId; } /** * @return the rootPageId */ public String getRootPageId() { return rootPageId; } /** * @return the rootPage */ private String getRootPage(final GWikiContext ctx) { if (StringUtils.isBlank(rootPageId)) { GWikiElement home = ctx.getWikiWeb().getHomeElement(ctx); if (home != null) { rootPageId = home.getElementInfo().getId(); } } return rootPageId; } /** * @param height the height to set */ public void setHeight(String height) { this.height = height; } /** * @return the height */ public String getHeight() { return height; } /** * @param width the width to set */ public void setWidth(String width) { this.width = width; } /** * @return the width */ public String getWidth() { return width; } }
true
true
public boolean renderImpl(final GWikiContext ctx, MacroAttributes attrs) { ctx.append("<div id='filechooser' style='margin-top: 20px; font-family: verdana; font-size: 10px;"); if (StringUtils.isNotEmpty(width)) { ctx.append("width: " + width + "; "); } if (StringUtils.isNotBlank(height)) { ctx.append("height: " + height + "; "); } final String path = ctx.getServlet().getServletContext().getContextPath() + ctx.getRequest().getServletPath(); ctx.append("'></div>"); ctx.append("<script type='text/javascript'>"); ctx.append("$.jstree._themes = '" + path + "/static/js/jstree/themes/';"); ctx.append("$(function () {"); ctx.append(" $(\"#filechooser\").jstree({"); ctx.append(" \"themes\" : { \"theme\" : \"classic\", \"dots\" : true, \"icons\" : true },"); ctx.append(" \"plugins\" : [ \"themes\", \"html_data\", \"ui\" ],"); ctx.append(" \"html_data\" : {\n"); ctx.append(" \"ajax\" : {"); ctx.append(" \"url\" : "); ctx.append("\"").append(path); ctx.append("/edit/TreeChildren\",\n"); ctx.append(" \"data\" : function(n) { return { \"method_onLoadAsync\" : \"true\", " + "\"id\" : n.attr ? n.attr(\"id\") : \"" + getRootPage(ctx) + "\"," + "\"target\" : \"true\"" + " }; }\n"); ctx.append(" }"); ctx.append(" }\n"); ctx.append(" });\n"); ctx.append("});\n"); ctx.append("</script>"); ctx.flush(); return true; }
public boolean renderImpl(final GWikiContext ctx, MacroAttributes attrs) { ctx.append("<div id='filechooser' style='margin-top: 20px; font-family: verdana; font-size: 10px;"); if (StringUtils.isNotEmpty(width)) { ctx.append("width: " + width + "; "); } if (StringUtils.isNotBlank(height)) { ctx.append("height: " + height + "; "); } final String path = ctx.getServlet().getServletContext().getContextPath() + ctx.getRequest().getServletPath(); ctx.append("'></div>"); ctx.append("<script type='text/javascript'>"); ctx.append("$.jstree._themes = '" + path + "/static/js/jstree/themes/';"); ctx.append("$(document).ready(function () {"); ctx.append(" $(\"#filechooser\").jstree({"); ctx.append(" \"themes\" : { \"theme\" : \"classic\", \"dots\" : true, \"icons\" : true },"); ctx.append(" \"plugins\" : [ \"themes\", \"html_data\", \"ui\" ],"); ctx.append(" \"html_data\" : {\n"); ctx.append(" \"ajax\" : {"); ctx.append(" \"url\" : "); ctx.append("\"").append(path); ctx.append("/edit/TreeChildren\",\n"); ctx.append(" \"data\" : function(n) { return { \"method_onLoadAsync\" : \"true\", " + "\"id\" : n.attr ? n.attr(\"id\") : \"" + getRootPage(ctx) + "\"," + "\"target\" : \"true\"" + " }; }\n"); ctx.append(" }"); ctx.append(" }\n"); ctx.append(" });\n"); ctx.append("});\n"); ctx.append("</script>"); ctx.flush(); return true; }
diff --git a/xjc/src/com/sun/tools/xjc/reader/xmlschema/RawTypeSetBuilder.java b/xjc/src/com/sun/tools/xjc/reader/xmlschema/RawTypeSetBuilder.java index 1f66a063..e23a5a63 100644 --- a/xjc/src/com/sun/tools/xjc/reader/xmlschema/RawTypeSetBuilder.java +++ b/xjc/src/com/sun/tools/xjc/reader/xmlschema/RawTypeSetBuilder.java @@ -1,384 +1,389 @@ /* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved. * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common Development * and Distribution License("CDDL") (collectively, the "License"). You * may not use this file except in compliance with the License. You can obtain * a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html * or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific * language governing permissions and limitations under the License. * * When distributing the software, include this License Header Notice in each * file and include the License file at glassfish/bootstrap/legal/LICENSE.txt. * Sun designates this particular file as subject to the "Classpath" exception * as provided by Sun in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the License * Header, with the fields enclosed by brackets [] replaced by your own * identifying information: "Portions Copyrighted [year] * [name of copyright owner]" * * Contributor(s): * * If you wish your version of this file to be governed by only the CDDL or * only the GPL Version 2, indicate your decision by adding "[Contributor] * elects to include this software in this distribution under the [CDDL or GPL * Version 2] license." If you don't indicate a single choice of license, a * recipient has the option to distribute your version of this file under * either the CDDL, the GPL Version 2 or to extend the choice of license to * its licensees as provided above. However, if you add GPL Version 2 code * and therefore, elected the GPL Version 2 license, then the option applies * only if the new code is made subject to such option by the copyright * holder. */ package com.sun.tools.xjc.reader.xmlschema; import java.util.HashSet; import java.util.Set; import javax.activation.MimeType; import javax.xml.namespace.QName; import com.sun.tools.xjc.model.CAdapter; import com.sun.tools.xjc.model.CClass; import com.sun.tools.xjc.model.CClassInfo; import com.sun.tools.xjc.model.CCustomizations; import com.sun.tools.xjc.model.CElement; import com.sun.tools.xjc.model.CElementInfo; import com.sun.tools.xjc.model.CElementPropertyInfo; import com.sun.tools.xjc.model.CReferencePropertyInfo; import com.sun.tools.xjc.model.CTypeRef; import com.sun.tools.xjc.model.Model; import com.sun.tools.xjc.model.Multiplicity; import com.sun.tools.xjc.model.TypeUse; import com.sun.tools.xjc.reader.RawTypeSet; import com.sun.tools.xjc.reader.Ring; import com.sun.tools.xjc.reader.xmlschema.bindinfo.BIDom; import com.sun.tools.xjc.reader.xmlschema.bindinfo.BIGlobalBinding; import com.sun.tools.xjc.reader.xmlschema.bindinfo.BIXSubstitutable; import com.sun.xml.bind.v2.model.core.ID; import com.sun.xml.bind.v2.model.core.WildcardMode; import com.sun.xml.xsom.XSElementDecl; import com.sun.xml.xsom.XSModelGroup; import com.sun.xml.xsom.XSModelGroupDecl; import com.sun.xml.xsom.XSParticle; import com.sun.xml.xsom.XSWildcard; import com.sun.xml.xsom.visitor.XSTermVisitor; /** * Builds {@link RawTypeSet} for XML Schema. * * @author Kohsuke Kawaguchi */ public class RawTypeSetBuilder implements XSTermVisitor { /** * @param optional * if this whole property is optional due to the * occurence constraints on ancestors, set this to true. * this will prevent the primitive types to be generated. */ public static RawTypeSet build( XSParticle p, boolean optional ) { RawTypeSetBuilder rtsb = new RawTypeSetBuilder(); rtsb.particle(p); Multiplicity mul = MultiplicityCounter.theInstance.particle(p); if(optional) mul = mul.makeOptional(); return new RawTypeSet(rtsb.refs,mul); } /** * To avoid declaring the same element twice for a content model like * (A,A), we keep track of element names here while we are building up * this instance. */ private final Set<QName> elementNames = new HashSet<QName>(); private final Set<RawTypeSet.Ref> refs = new HashSet<RawTypeSet.Ref>(); protected final BGMBuilder builder = Ring.get(BGMBuilder.class); public RawTypeSetBuilder() {} /** * Gets the {@link RawTypeSet.Ref}s that were built. */ public Set<RawTypeSet.Ref> getRefs() { return refs; } /** * Build up {@link #refs} and compute the total multiplicity of this {@link RawTypeSet.Ref} set. */ private void particle( XSParticle p ) { // if the DOM customization is present, bind it like a wildcard BIDom dom = builder.getLocalDomCustomization(p); if(dom!=null) { dom.markAsAcknowledged(); refs.add(new WildcardRef(WildcardMode.SKIP)); } else { p.getTerm().visit(this); } } public void wildcard(XSWildcard wc) { refs.add(new WildcardRef(wc)); } public void modelGroupDecl(XSModelGroupDecl decl) { modelGroup(decl.getModelGroup()); } public void modelGroup(XSModelGroup group) { for( XSParticle p : group.getChildren()) particle(p); } public void elementDecl(XSElementDecl decl) { QName n = BGMBuilder.getName(decl); if(elementNames.add(n)) { CElement elementBean = Ring.get(ClassSelector.class).bindToType(decl,null); if(elementBean==null) refs.add(new XmlTypeRef(decl)); else { // yikes! if(elementBean instanceof CClass) refs.add(new CClassRef(decl,(CClass)elementBean)); else refs.add(new CElementInfoRef(decl,(CElementInfo)elementBean)); } } } /** * Reference to a wildcard. */ public static final class WildcardRef extends RawTypeSet.Ref { private final WildcardMode mode; WildcardRef(XSWildcard wildcard) { this.mode = getMode(wildcard); } WildcardRef(WildcardMode mode) { this.mode = mode; } private static WildcardMode getMode(XSWildcard wildcard) { switch(wildcard.getMode()) { case XSWildcard.LAX: return WildcardMode.LAX; case XSWildcard.STRTICT: return WildcardMode.STRICT; case XSWildcard.SKIP: return WildcardMode.SKIP; default: throw new IllegalStateException(); } } protected CTypeRef toTypeRef(CElementPropertyInfo ep) { // we don't allow a mapping to typeRef if the wildcard is present throw new IllegalStateException(); } protected void toElementRef(CReferencePropertyInfo prop) { prop.setWildcard(mode); } protected RawTypeSet.Mode canBeType(RawTypeSet parent) { return RawTypeSet.Mode.MUST_BE_REFERENCE; } protected boolean isListOfValues() { return false; } protected ID id() { return ID.NONE; } } /** * Reference to a class that maps from an element. */ public static final class CClassRef extends RawTypeSet.Ref { public final CClass target; public final XSElementDecl decl; CClassRef(XSElementDecl decl, CClass target) { this.decl = decl; this.target = target; } protected CTypeRef toTypeRef(CElementPropertyInfo ep) { return new CTypeRef(target,decl); } protected void toElementRef(CReferencePropertyInfo prop) { prop.getElements().add(target); } protected RawTypeSet.Mode canBeType(RawTypeSet parent) { // if element substitution can occur, no way it can be mapped to a list of types if(decl.getSubstitutables().size()>1) return RawTypeSet.Mode.MUST_BE_REFERENCE; return RawTypeSet.Mode.SHOULD_BE_TYPEREF; } protected boolean isListOfValues() { return false; } protected ID id() { return ID.NONE; } } /** * Reference to a class that maps from an element. */ public final class CElementInfoRef extends RawTypeSet.Ref { public final CElementInfo target; public final XSElementDecl decl; CElementInfoRef(XSElementDecl decl, CElementInfo target) { this.decl = decl; this.target = target; } protected CTypeRef toTypeRef(CElementPropertyInfo ep) { assert !target.isCollection(); CAdapter a = target.getProperty().getAdapter(); if(a!=null && ep!=null) ep.setAdapter(a); return new CTypeRef(target.getContentType(),decl); } protected void toElementRef(CReferencePropertyInfo prop) { prop.getElements().add(target); } protected RawTypeSet.Mode canBeType(RawTypeSet parent) { // if element substitution can occur, no way it can be mapped to a list of types if(decl.getSubstitutables().size()>1) return RawTypeSet.Mode.MUST_BE_REFERENCE; // BIXSubstitutable also simulates this effect. Useful for separate compilation BIXSubstitutable subst = builder.getBindInfo(decl).get(BIXSubstitutable.class); if(subst!=null) { subst.markAsAcknowledged(); return RawTypeSet.Mode.MUST_BE_REFERENCE; } // we have no place to put an adater if this thing maps to a type CElementPropertyInfo p = target.getProperty(); // if we have an adapter or IDness, which requires special // annotation, and there's more than one element, // we have no place to put the special annotation, so we need JAXBElement. if((parent.refs.size()>1 || !parent.mul.isAtMostOnce()) && p.id()!=ID.NONE) return RawTypeSet.Mode.MUST_BE_REFERENCE; if(parent.refs.size() > 1 && p.getAdapter() != null) return RawTypeSet.Mode.MUST_BE_REFERENCE; - return RawTypeSet.Mode.SHOULD_BE_TYPEREF; + if(target.hasClass()) + // if the CElementInfo was explicitly bound to a class (which happen if and only if + // the user requested so, then map that to reference property so that the user sees a class + return RawTypeSet.Mode.CAN_BE_TYPEREF; + else + return RawTypeSet.Mode.SHOULD_BE_TYPEREF; } protected boolean isListOfValues() { return target.getProperty().isValueList(); } protected ID id() { return target.getProperty().id(); } protected MimeType getExpectedMimeType() { return target.getProperty().getExpectedMimeType(); } } /** * References to a type. Could be global or local. */ public static final class XmlTypeRef extends RawTypeSet.Ref { private final XSElementDecl decl; private final TypeUse target; public XmlTypeRef(XSElementDecl decl) { this.decl = decl; SimpleTypeBuilder stb = Ring.get(SimpleTypeBuilder.class); stb.refererStack.push(decl); TypeUse r = Ring.get(ClassSelector.class).bindToType(decl.getType(),decl); stb.refererStack.pop(); target = r; } protected CTypeRef toTypeRef(CElementPropertyInfo ep) { if(ep!=null && target.getAdapterUse()!=null) ep.setAdapter(target.getAdapterUse()); return new CTypeRef(target.getInfo(),decl); } /** * The whole type set can be later bound to a reference property, * in which case we need to generate additional code to wrap this * type reference into an element class. * * This method generates such an element class and returns it. */ protected void toElementRef(CReferencePropertyInfo prop) { CClassInfo scope = Ring.get(ClassSelector.class).getCurrentBean(); Model model = Ring.get(Model.class); CCustomizations custs = Ring.get(BGMBuilder.class).getBindInfo(decl).toCustomizationList(); if(target instanceof CClassInfo && Ring.get(BIGlobalBinding.class).isSimpleMode()) { CClassInfo bean = new CClassInfo(model,scope, model.getNameConverter().toClassName(decl.getName()), decl.getLocator(), null, BGMBuilder.getName(decl), decl, custs); bean.setBaseClass((CClassInfo)target); prop.getElements().add(bean); } else { CElementInfo e = new CElementInfo(model,BGMBuilder.getName(decl),scope,target, decl.getDefaultValue(), decl, custs, decl.getLocator()); prop.getElements().add(e); } } protected RawTypeSet.Mode canBeType(RawTypeSet parent) { // if we have an adapter or IDness, which requires special // annotation, and there's more than one element, // we have no place to put the special annotation, so we need JAXBElement. if((parent.refs.size()>1 || !parent.mul.isAtMostOnce()) && target.idUse()!=ID.NONE) return RawTypeSet.Mode.MUST_BE_REFERENCE; if(parent.refs.size() > 1 && target.getAdapterUse() != null) return RawTypeSet.Mode.MUST_BE_REFERENCE; // nillable and optional at the same time. needs an element wrapper to distinguish those // two states. But this is not a hard requirement. if(decl.isNillable() && parent.mul.isOptional()) return RawTypeSet.Mode.CAN_BE_TYPEREF; return RawTypeSet.Mode.SHOULD_BE_TYPEREF; } protected boolean isListOfValues() { return target.isCollection(); } protected ID id() { return target.idUse(); } protected MimeType getExpectedMimeType() { return target.getExpectedMimeType(); } } }
true
true
protected RawTypeSet.Mode canBeType(RawTypeSet parent) { // if element substitution can occur, no way it can be mapped to a list of types if(decl.getSubstitutables().size()>1) return RawTypeSet.Mode.MUST_BE_REFERENCE; // BIXSubstitutable also simulates this effect. Useful for separate compilation BIXSubstitutable subst = builder.getBindInfo(decl).get(BIXSubstitutable.class); if(subst!=null) { subst.markAsAcknowledged(); return RawTypeSet.Mode.MUST_BE_REFERENCE; } // we have no place to put an adater if this thing maps to a type CElementPropertyInfo p = target.getProperty(); // if we have an adapter or IDness, which requires special // annotation, and there's more than one element, // we have no place to put the special annotation, so we need JAXBElement. if((parent.refs.size()>1 || !parent.mul.isAtMostOnce()) && p.id()!=ID.NONE) return RawTypeSet.Mode.MUST_BE_REFERENCE; if(parent.refs.size() > 1 && p.getAdapter() != null) return RawTypeSet.Mode.MUST_BE_REFERENCE; return RawTypeSet.Mode.SHOULD_BE_TYPEREF; }
protected RawTypeSet.Mode canBeType(RawTypeSet parent) { // if element substitution can occur, no way it can be mapped to a list of types if(decl.getSubstitutables().size()>1) return RawTypeSet.Mode.MUST_BE_REFERENCE; // BIXSubstitutable also simulates this effect. Useful for separate compilation BIXSubstitutable subst = builder.getBindInfo(decl).get(BIXSubstitutable.class); if(subst!=null) { subst.markAsAcknowledged(); return RawTypeSet.Mode.MUST_BE_REFERENCE; } // we have no place to put an adater if this thing maps to a type CElementPropertyInfo p = target.getProperty(); // if we have an adapter or IDness, which requires special // annotation, and there's more than one element, // we have no place to put the special annotation, so we need JAXBElement. if((parent.refs.size()>1 || !parent.mul.isAtMostOnce()) && p.id()!=ID.NONE) return RawTypeSet.Mode.MUST_BE_REFERENCE; if(parent.refs.size() > 1 && p.getAdapter() != null) return RawTypeSet.Mode.MUST_BE_REFERENCE; if(target.hasClass()) // if the CElementInfo was explicitly bound to a class (which happen if and only if // the user requested so, then map that to reference property so that the user sees a class return RawTypeSet.Mode.CAN_BE_TYPEREF; else return RawTypeSet.Mode.SHOULD_BE_TYPEREF; }
diff --git a/szoftlab4/src/hu/miracle/workers/Main.java b/szoftlab4/src/hu/miracle/workers/Main.java index 7fab0cc..93284e1 100644 --- a/szoftlab4/src/hu/miracle/workers/Main.java +++ b/szoftlab4/src/hu/miracle/workers/Main.java @@ -1,152 +1,151 @@ package hu.miracle.workers; import java.awt.Point; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; public class Main { private static Game game; private static Scene scene; private static Timer timer; public static int showMenu() { int result = 0; boolean success = false; String menu[] = { "Hangya szuletese", "Hangya etelfelvetele", "Hangya mereg altali pusztulasa", "Hangyaszsun szuletese", "Hangyairto spray fujasa", "Szagtalanito spray fujasa", "Idozito tick", "Hangya akadalyelkerulese", "Hangya hangyalesobe lepese", "Jatek szuneteltetese", "Jatek folytatasa", "Jatek nehezsegenek beallitasa", "Toplista mentese", "Kilepes" }; BufferedReader bfread = new BufferedReader(new InputStreamReader(System.in)); System.out.println("Jatek szimulalasa\nValasszon az alabbi menupontok kozul:\n"); for (int i = 0; i < menu.length; i++) { String scenario = menu[i]; System.out.println(String.format("%2d. %s", i + 1, scenario)); } while (!success) { try { System.out.print("\nValasztott menupont: "); result = Integer.parseInt(bfread.readLine().trim()); if (0 < result && result <= menu.length) { success = true; } else { System.out.println("Nem megfelelo ertek! Kerem a menupontok kozul valasszon!"); } } catch (NumberFormatException e) { System.out.println("Nem megfelelo ertek! Kerem egy egesz szamot adjon meg!"); } catch (IOException e) { } } return result; } public static void main(String[] args) { System.out.println(Main.class.getCanonicalName() + ".main()"); scene = new Scene(); game = new Game(scene); timer = new Timer(game, 1000); game.setTimer(timer); timer.start(); - //timer.stopTimer(); while (true) { int menuresult = showMenu(); switch (menuresult) { case 1: // TODO: hangya utnak inditasa break; case 2: // TODO: hangya etelfelvetele break; case 3: // TODO: hangya mereg miatt elpusztul break; case 4: // TODO: hangyaszsun elinditasa break; case 5: // TODO: mereg spray fujas break; case 6: // TODO: szagtalanito spray fujas break; case 7: timer.tick(); break; case 8: // TODO: hangya kikeruli az akadalyt break; case 9: // Inicializálás Point c9pos = new Point(0, 0); AntHill c9hill = new AntHill(c9pos, scene, 1, 1); AntSinker c9sink = new AntSinker(c9pos); Ant c9ant = new Ant(c9pos, scene, c9hill); scene.getObstacles().add(c9sink); scene.getAnts().add(c9ant); // Tick System.out.println("<START>"); c9ant.handleTick(); System.out.println("<END>"); break; case 10: timer.stopTimer(); break; case 11: timer.startTimer(); break; case 12: try { System.out.println("Kérem adjon meg egy nehézségi szintet (1-3):"); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); game.setDifficulty(Integer.parseInt(br.readLine())); } catch (NumberFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } break; case 13: try { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); int score; score = Integer.parseInt(br.readLine()); game.writeTopList(score); } catch (NumberFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } break; case 14: System.exit(0); break; default: break; } } } }
true
true
public static void main(String[] args) { System.out.println(Main.class.getCanonicalName() + ".main()"); scene = new Scene(); game = new Game(scene); timer = new Timer(game, 1000); game.setTimer(timer); timer.start(); //timer.stopTimer(); while (true) { int menuresult = showMenu(); switch (menuresult) { case 1: // TODO: hangya utnak inditasa break; case 2: // TODO: hangya etelfelvetele break; case 3: // TODO: hangya mereg miatt elpusztul break; case 4: // TODO: hangyaszsun elinditasa break; case 5: // TODO: mereg spray fujas break; case 6: // TODO: szagtalanito spray fujas break; case 7: timer.tick(); break; case 8: // TODO: hangya kikeruli az akadalyt break; case 9: // Inicializálás Point c9pos = new Point(0, 0); AntHill c9hill = new AntHill(c9pos, scene, 1, 1); AntSinker c9sink = new AntSinker(c9pos); Ant c9ant = new Ant(c9pos, scene, c9hill); scene.getObstacles().add(c9sink); scene.getAnts().add(c9ant); // Tick System.out.println("<START>"); c9ant.handleTick(); System.out.println("<END>"); break; case 10: timer.stopTimer(); break; case 11: timer.startTimer(); break; case 12: try { System.out.println("Kérem adjon meg egy nehézségi szintet (1-3):"); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); game.setDifficulty(Integer.parseInt(br.readLine())); } catch (NumberFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } break; case 13: try { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); int score; score = Integer.parseInt(br.readLine()); game.writeTopList(score); } catch (NumberFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } break; case 14: System.exit(0); break; default: break; } } }
public static void main(String[] args) { System.out.println(Main.class.getCanonicalName() + ".main()"); scene = new Scene(); game = new Game(scene); timer = new Timer(game, 1000); game.setTimer(timer); timer.start(); while (true) { int menuresult = showMenu(); switch (menuresult) { case 1: // TODO: hangya utnak inditasa break; case 2: // TODO: hangya etelfelvetele break; case 3: // TODO: hangya mereg miatt elpusztul break; case 4: // TODO: hangyaszsun elinditasa break; case 5: // TODO: mereg spray fujas break; case 6: // TODO: szagtalanito spray fujas break; case 7: timer.tick(); break; case 8: // TODO: hangya kikeruli az akadalyt break; case 9: // Inicializálás Point c9pos = new Point(0, 0); AntHill c9hill = new AntHill(c9pos, scene, 1, 1); AntSinker c9sink = new AntSinker(c9pos); Ant c9ant = new Ant(c9pos, scene, c9hill); scene.getObstacles().add(c9sink); scene.getAnts().add(c9ant); // Tick System.out.println("<START>"); c9ant.handleTick(); System.out.println("<END>"); break; case 10: timer.stopTimer(); break; case 11: timer.startTimer(); break; case 12: try { System.out.println("Kérem adjon meg egy nehézségi szintet (1-3):"); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); game.setDifficulty(Integer.parseInt(br.readLine())); } catch (NumberFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } break; case 13: try { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); int score; score = Integer.parseInt(br.readLine()); game.writeTopList(score); } catch (NumberFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } break; case 14: System.exit(0); break; default: break; } } }
diff --git a/contentconnector/contentconnector-lucene/src/main/java/com/gentics/cr/lucene/indexer/index/LuceneIndexUpdateChecker.java b/contentconnector/contentconnector-lucene/src/main/java/com/gentics/cr/lucene/indexer/index/LuceneIndexUpdateChecker.java index 93f2bfe8..ba0f6a6f 100644 --- a/contentconnector/contentconnector-lucene/src/main/java/com/gentics/cr/lucene/indexer/index/LuceneIndexUpdateChecker.java +++ b/contentconnector/contentconnector-lucene/src/main/java/com/gentics/cr/lucene/indexer/index/LuceneIndexUpdateChecker.java @@ -1,179 +1,175 @@ package com.gentics.cr.lucene.indexer.index; import java.io.IOException; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Vector; import org.apache.log4j.Logger; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermDocs; import org.apache.lucene.store.Directory; import com.gentics.api.lib.resolving.Resolvable; import com.gentics.cr.lucene.indexaccessor.IndexAccessor; import com.gentics.cr.monitoring.MonitorFactory; import com.gentics.cr.monitoring.UseCase; import com.gentics.cr.util.indexing.IndexUpdateChecker; /** * Lucene Implementation of IndexUpdateChecker. * Walks an Index and compares Identifyer/Timestamp pairs to the Objects in the Index * * Last changed: $Date: 2009-09-02 17:57:48 +0200 (Mi, 02 Sep 2009) $ * @version $Revision: 180 $ * @author $Author: [email protected] $ */ public class LuceneIndexUpdateChecker extends IndexUpdateChecker { LuceneIndexLocation indexLocation; IndexAccessor indexAccessor; LinkedHashMap<String, Integer> docs; Iterator<String> docIT; Vector<String> checkedDocuments; private static final Logger log = Logger.getLogger(LuceneIndexUpdateChecker.class); /** * Initializes the Lucene Implementation of {@link IndexUpdateChecker}. * @param indexLocation * @param termKey - Key under wich the termValue is stored in the Index e.g. * CRID * @param termValue - Value wich to use for iteration e.g. CRID_1 * @param idAttribute - ID-Attribute key that will be used for Identifyer * comparison. This has to represent the field where the identifyer in the * method {@link #checkUpToDate(String, int)} is present. * @throws IOException */ public LuceneIndexUpdateChecker(final LuceneIndexLocation indexLocation, final String termKey, final String termValue, final String idAttribute) throws IOException { this.indexLocation = indexLocation; indexAccessor = indexLocation.getAccessor(); IndexReader reader = indexAccessor.getReader(true); TermDocs termDocs = reader.termDocs(new Term(termKey, termValue)); log.debug("Fetching sorted documents from index..."); docs = fetchSortedDocs(termDocs, reader, idAttribute); log.debug("Fetched sorted docs from index"); docIT = docs.keySet().iterator(); checkedDocuments = new Vector<String>(100); //TODO CONTINUE HERE PREPARE TO USE ITERATOR IN CHECK METHOD indexAccessor.release(reader, true); } @Override protected final boolean checkUpToDate(final String identifyer, final Object timestamp, final String timestampattribute, final Resolvable object) { String timestampString; if (timestamp == null) { return false; } else { timestampString = timestamp.toString(); } if ("".equals(timestampString)) { return false; } boolean readerWithWritePermissions = false; if (docs.containsKey(identifyer)) { Integer documentId = docs.get(identifyer); try { IndexReader reader = indexAccessor.getReader(readerWithWritePermissions); Document document = reader.document(documentId); checkedDocuments.add(identifyer); Object documentUpdateTimestamp = null; try { documentUpdateTimestamp = document.get(timestampattribute); } catch (NumberFormatException e) { log.debug("Got an error getting the document for " + identifyer + " from index", e); } indexAccessor.release(reader, readerWithWritePermissions); //Use strings to compare the attributes if (documentUpdateTimestamp != null && !(documentUpdateTimestamp instanceof String)) { documentUpdateTimestamp = documentUpdateTimestamp.toString(); } if (documentUpdateTimestamp == null || !documentUpdateTimestamp.equals(timestampString)) { - if (log.isTraceEnabled()) { - log.debug(identifyer + ": object is not up to date."); - } + log.debug(identifyer + ": object is not up to date."); return false; } - if (log.isTraceEnabled()) { - log.debug(identifyer + ": object is up to date."); - } + log.debug(identifyer + ": object is up to date."); return true; } catch (IOException e) { //TODO specify witch index is not readable String directories = ""; Directory[] dirs = indexLocation.getDirectories(); for (Directory dir : dirs) { directories += dir.toString() + '\n'; } log.error("Cannot open index for reading. (Directory: " + directories + ")", e); return true; } } else { //object is not yet in the index => it is not up to date return false; } } @Override public void deleteStaleObjects() { log.debug(checkedDocuments.size() + " objects checked, " + docs.size() + " objects already in the index."); IndexReader writeReader = null; boolean readerNeedsWrite = true; UseCase deleteStale = MonitorFactory.startUseCase("LuceneIndexUpdateChecker.deleteStaleObjects(" + indexLocation.getName() + ")"); try { boolean objectsDeleted = false; for (String contentId : docs.keySet()) { if (!checkedDocuments.contains(contentId)) { log.debug("Object " + contentId + " wasn't checked in the last run. So i will delete it."); if (writeReader == null) { writeReader = indexAccessor.getReader(readerNeedsWrite); } writeReader.deleteDocument(docs.get(contentId)); objectsDeleted = true; } } if (objectsDeleted) { indexLocation.createReopenFile(); } } catch (IOException e) { log.error("Cannot delete objects from index.", e); } finally { //always release writeReader it blocks other threads if you don't if (writeReader != null) { indexAccessor.release(writeReader, readerNeedsWrite); } log.debug("Finished cleaning stale documents"); deleteStale.stop(); } checkedDocuments.clear(); } private LinkedHashMap<String, Integer> fetchSortedDocs(TermDocs termDocs, IndexReader reader, String idAttribute) throws IOException { LinkedHashMap<String, Integer> tmp = new LinkedHashMap<String, Integer>(); while (termDocs.next()) { Document doc = reader.document(termDocs.doc()); String docID = doc.get(idAttribute); tmp.put(docID, termDocs.doc()); } LinkedHashMap<String, Integer> ret = new LinkedHashMap<String, Integer>(tmp.size()); Vector<String> v = new Vector<String>(tmp.keySet()); Collections.sort(v); for (String id : v) { ret.put(id, tmp.get(id)); } return ret; } }
false
true
protected final boolean checkUpToDate(final String identifyer, final Object timestamp, final String timestampattribute, final Resolvable object) { String timestampString; if (timestamp == null) { return false; } else { timestampString = timestamp.toString(); } if ("".equals(timestampString)) { return false; } boolean readerWithWritePermissions = false; if (docs.containsKey(identifyer)) { Integer documentId = docs.get(identifyer); try { IndexReader reader = indexAccessor.getReader(readerWithWritePermissions); Document document = reader.document(documentId); checkedDocuments.add(identifyer); Object documentUpdateTimestamp = null; try { documentUpdateTimestamp = document.get(timestampattribute); } catch (NumberFormatException e) { log.debug("Got an error getting the document for " + identifyer + " from index", e); } indexAccessor.release(reader, readerWithWritePermissions); //Use strings to compare the attributes if (documentUpdateTimestamp != null && !(documentUpdateTimestamp instanceof String)) { documentUpdateTimestamp = documentUpdateTimestamp.toString(); } if (documentUpdateTimestamp == null || !documentUpdateTimestamp.equals(timestampString)) { if (log.isTraceEnabled()) { log.debug(identifyer + ": object is not up to date."); } return false; } if (log.isTraceEnabled()) { log.debug(identifyer + ": object is up to date."); } return true; } catch (IOException e) { //TODO specify witch index is not readable String directories = ""; Directory[] dirs = indexLocation.getDirectories(); for (Directory dir : dirs) { directories += dir.toString() + '\n'; } log.error("Cannot open index for reading. (Directory: " + directories + ")", e); return true; } } else { //object is not yet in the index => it is not up to date return false; } }
protected final boolean checkUpToDate(final String identifyer, final Object timestamp, final String timestampattribute, final Resolvable object) { String timestampString; if (timestamp == null) { return false; } else { timestampString = timestamp.toString(); } if ("".equals(timestampString)) { return false; } boolean readerWithWritePermissions = false; if (docs.containsKey(identifyer)) { Integer documentId = docs.get(identifyer); try { IndexReader reader = indexAccessor.getReader(readerWithWritePermissions); Document document = reader.document(documentId); checkedDocuments.add(identifyer); Object documentUpdateTimestamp = null; try { documentUpdateTimestamp = document.get(timestampattribute); } catch (NumberFormatException e) { log.debug("Got an error getting the document for " + identifyer + " from index", e); } indexAccessor.release(reader, readerWithWritePermissions); //Use strings to compare the attributes if (documentUpdateTimestamp != null && !(documentUpdateTimestamp instanceof String)) { documentUpdateTimestamp = documentUpdateTimestamp.toString(); } if (documentUpdateTimestamp == null || !documentUpdateTimestamp.equals(timestampString)) { log.debug(identifyer + ": object is not up to date."); return false; } log.debug(identifyer + ": object is up to date."); return true; } catch (IOException e) { //TODO specify witch index is not readable String directories = ""; Directory[] dirs = indexLocation.getDirectories(); for (Directory dir : dirs) { directories += dir.toString() + '\n'; } log.error("Cannot open index for reading. (Directory: " + directories + ")", e); return true; } } else { //object is not yet in the index => it is not up to date return false; } }
diff --git a/src/main/java/org/generationcp/ibpworkbench/util/DatasetExporter.java b/src/main/java/org/generationcp/ibpworkbench/util/DatasetExporter.java index 70e1eb2..828a988 100644 --- a/src/main/java/org/generationcp/ibpworkbench/util/DatasetExporter.java +++ b/src/main/java/org/generationcp/ibpworkbench/util/DatasetExporter.java @@ -1,503 +1,511 @@ package org.generationcp.ibpworkbench.util; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.util.CellRangeAddress; import org.generationcp.commons.util.PoiUtil; import org.generationcp.middleware.exceptions.MiddlewareQueryException; import org.generationcp.middleware.manager.api.StudyDataManager; import org.generationcp.middleware.manager.api.TraitDataManager; import org.generationcp.middleware.pojos.CharacterDataElement; import org.generationcp.middleware.pojos.CharacterLevelElement; import org.generationcp.middleware.pojos.DatasetCondition; import org.generationcp.middleware.pojos.Factor; import org.generationcp.middleware.pojos.NumericDataElement; import org.generationcp.middleware.pojos.NumericLevelElement; import org.generationcp.middleware.pojos.Scale; import org.generationcp.middleware.pojos.Study; import org.generationcp.middleware.pojos.Trait; import org.generationcp.middleware.pojos.TraitMethod; import org.generationcp.middleware.pojos.Variate; public class DatasetExporter { private static final int conditionListHeaderRowIndex = 8; private StudyDataManager studyDataManager; private TraitDataManager traitDataManager; private Integer studyId; private Integer representationId; public DatasetExporter(StudyDataManager studyDataManager, TraitDataManager traitDataManager, Integer studyId, Integer representationId) { this.studyDataManager = studyDataManager; this.traitDataManager = traitDataManager; this.studyId = studyId; this.representationId = representationId; } public FileOutputStream exportToFieldBookExcel(String filename) throws DatasetExporterException { //create workbook Workbook workbook = new HSSFWorkbook(); CellStyle cellStyle = workbook.createCellStyle(); CellStyle cellStyleForObservationSheet = workbook.createCellStyle(); //create two sheets, one for description and nother for measurements Sheet descriptionSheet = workbook.createSheet("Description"); Sheet observationSheet = workbook.createSheet("Observation"); //this map is for mapping the columns names of the dataset to their column index in the excel sheet Map<String, Integer> columnsMap = new HashMap<String, Integer>(); int observationSheetColumnIndex = 0; //write the details on the first sheet - description //get the study first Study study = null; try { study = this.studyDataManager.getStudyByID(this.studyId); } catch (MiddlewareQueryException ex) { throw new DatasetExporterException("Error with getting Study with id: " + this.studyId, ex); } if(study != null) { //get the needed study details String name = study.getName(); String title = study.getTitle(); Integer pmkey = study.getProjectKey(); String objective = study.getObjective(); Integer startDate = study.getStartDate(); Integer endDate = study.getEndDate(); String type = study.getType(); //add to the sheet Row row0 = descriptionSheet.createRow(0); row0.createCell(0).setCellValue("STUDY"); row0.createCell(1).setCellValue(name); Row row1 = descriptionSheet.createRow(1); row1.createCell(0).setCellValue("TITLE"); row1.createCell(1).setCellValue(title); Row row2 = descriptionSheet.createRow(2); row2.createCell(0).setCellValue("PMKEY"); Cell pmKeyCell = PoiUtil.createCell(cellStyle, row2, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); pmKeyCell.setCellValue(pmkey); Row row3 = descriptionSheet.createRow(3); row3.createCell(0).setCellValue("OBJECTIVE"); row3.createCell(1).setCellValue(objective); Row row4 = descriptionSheet.createRow(4); row4.createCell(0).setCellValue("START DATE"); Cell startDateCell = PoiUtil.createCell(cellStyle, row4, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); startDateCell.setCellValue(startDate); Row row5 = descriptionSheet.createRow(5); row5.createCell(0).setCellValue("END DATE"); Cell endDateCell = PoiUtil.createCell(cellStyle, row5, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); endDateCell.setCellValue(endDate); Row row6 = descriptionSheet.createRow(6); row6.createCell(0).setCellValue("STUDY TYPE"); row6.createCell(1).setCellValue(type); //merge cells for the study details for(int ctr = 0; ctr < 7; ctr++) { descriptionSheet.addMergedRegion(new CellRangeAddress(ctr, ctr, 1, 7)); } //empty row Row row7 = descriptionSheet.createRow(7); //row with headings for condition list Row conditionHeaderRow = descriptionSheet.createRow(this.conditionListHeaderRowIndex); conditionHeaderRow.createCell(0).setCellValue("CONDITION"); conditionHeaderRow.createCell(1).setCellValue("DESCRIPTION"); conditionHeaderRow.createCell(2).setCellValue("PROPERTY"); conditionHeaderRow.createCell(3).setCellValue("SCALE"); conditionHeaderRow.createCell(4).setCellValue("METHOD"); conditionHeaderRow.createCell(5).setCellValue("DATA TYPE"); conditionHeaderRow.createCell(6).setCellValue("VALUE"); conditionHeaderRow.createCell(7).setCellValue("LABEL"); //get the conditions and their details List<DatasetCondition> conditions = new ArrayList<DatasetCondition>(); try { conditions.addAll(this.studyDataManager.getConditionsByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting conditions of study - " + name + ", representation - " + this.representationId, ex); } int conditionRowIndex = this.conditionListHeaderRowIndex + 1; for(DatasetCondition condition : conditions) { String traitScaleMethodInfo[] = getTraitScaleMethodInfo(condition.getTraitId(), condition.getScaleId(), condition.getMethodId()); String conditionName = condition.getName(); if(conditionName != null) { conditionName = conditionName.trim(); } String conditionType = condition.getType(); String conditionLabel = ""; try { conditionLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(condition.getFactorId()); } catch (MiddlewareQueryException ex) { conditionLabel = ""; } Row conditionRow = descriptionSheet.createRow(conditionRowIndex); conditionRow.createCell(0).setCellValue(conditionName); conditionRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); conditionRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); conditionRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); conditionRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); conditionRow.createCell(5).setCellValue(conditionType); if(conditionType.equals("N")) { Double thevalue = (Double) condition.getValue(); conditionRow.createCell(6).setCellValue(thevalue); } else { conditionRow.createCell(6).setCellValue(condition.getValue().toString()); } conditionRow.createCell(7).setCellValue(conditionLabel); //add entry to columns mapping //we set the value to -1 to signify that this should not be a column in the observation sheet if(!conditionName.equals("STUDY")) { columnsMap.put(conditionName, Integer.valueOf(-1)); } conditionRowIndex++; } //empty row Row emptyRowBeforeFactors = descriptionSheet.createRow(conditionRowIndex); //row with headings for factor list int factorRowHeaderIndex = conditionRowIndex + 1; Row factorHeaderRow = descriptionSheet.createRow(factorRowHeaderIndex); factorHeaderRow.createCell(0).setCellValue("FACTOR"); factorHeaderRow.createCell(1).setCellValue("DESCRIPTION"); factorHeaderRow.createCell(2).setCellValue("PROPERTY"); factorHeaderRow.createCell(3).setCellValue("SCALE"); factorHeaderRow.createCell(4).setCellValue("METHOD"); factorHeaderRow.createCell(5).setCellValue("DATA TYPE"); factorHeaderRow.createCell(6).setCellValue(""); factorHeaderRow.createCell(7).setCellValue("LABEL"); //get the factors and their details List<Factor> factors = new ArrayList<Factor>(); try { factors.addAll(this.studyDataManager.getFactorsByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting factors of study - " + name + ", representation - " + this.representationId, ex); } int factorRowIndex = factorRowHeaderIndex + 1; for(Factor factor : factors) { String dataType = factor.getDataType(); String factorName = factor.getName(); if(factorName != null) { factorName = factorName.trim(); } //check if factor is already written as a condition Integer temp = columnsMap.get(factorName); if(temp == null && !factorName.equals("STUDY")) { String traitScaleMethodInfo[] = getTraitScaleMethodInfo(factor.getTraitId(), factor.getScaleId(), factor.getMethodId()); String factorLabel = ""; try { factorLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(factor.getFactorId()); } catch (MiddlewareQueryException ex) { factorLabel = ""; } Row factorRow = descriptionSheet.createRow(factorRowIndex); factorRow.createCell(0).setCellValue(factorName); factorRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); factorRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); factorRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); factorRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); factorRow.createCell(5).setCellValue(dataType); factorRow.createCell(6).setCellValue(""); factorRow.createCell(7).setCellValue(factorLabel); //add entry to columns mapping columnsMap.put(factorName, Integer.valueOf(observationSheetColumnIndex)); observationSheetColumnIndex++; factorRowIndex++; } } //empty row Row emptyRowBeforeVariate = descriptionSheet.createRow(factorRowIndex); //row with headings for variate list int variateHeaderRowIndex = factorRowIndex + 1; Row variateHeaderRow = descriptionSheet.createRow(variateHeaderRowIndex); variateHeaderRow.createCell(0).setCellValue("VARIATE"); variateHeaderRow.createCell(1).setCellValue("DESCRIPTION"); variateHeaderRow.createCell(2).setCellValue("PROPERTY"); variateHeaderRow.createCell(3).setCellValue("SCALE"); variateHeaderRow.createCell(4).setCellValue("METHOD"); variateHeaderRow.createCell(5).setCellValue("DATA TYPE"); //get the variates and their details List<Variate> variates = new ArrayList<Variate>(); try { variates.addAll(this.studyDataManager.getVariatesByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting variates of study - " + name + ", representation - " + this.representationId, ex); } int variateRowIndex = variateHeaderRowIndex + 1; for(Variate variate : variates) { String dataType = variate.getDataType(); String variateName = variate.getName(); if(variateName != null) { variateName = variateName.trim(); } String traitScaleMethodInfo[] = getTraitScaleMethodInfo(variate.getTraitId(), variate.getScaleId(), variate.getMethodId()); Row variateRow = descriptionSheet.createRow(variateRowIndex); variateRow.createCell(0).setCellValue(variateName); variateRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); variateRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); variateRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); variateRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); variateRow.createCell(5).setCellValue(dataType); //add entry to columns mapping columnsMap.put(variateName, Integer.valueOf(observationSheetColumnIndex)); observationSheetColumnIndex++; variateRowIndex++; } //populate the measurements sheet //establish the columns of the dataset first Row datasetHeaderRow = observationSheet.createRow(0); for(String columnName : columnsMap.keySet()) { short columnIndex = columnsMap.get(columnName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, datasetHeaderRow, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); cell.setCellValue(columnName); } } //then work with the data //do it by 50 rows at a time int pageSize = 50; long totalNumberOfRows = 0; int sheetRowIndex = 1; try { totalNumberOfRows = this.studyDataManager.countOunitIDsByRepresentationId(this.representationId); } catch(Exception ex) { throw new DatasetExporterException("Error with getting count of ounit ids for study - " + name + ", representation - " + this.representationId, ex); } for(int start = 0; start < totalNumberOfRows; start = start + pageSize) { List<Integer> ounitIds = new ArrayList<Integer>(); try { //first get the ounit ids, these are the ids of the rows in the dataset ounitIds.addAll(this.studyDataManager.getOunitIDsByRepresentationId(this.representationId, start, pageSize)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting ounit ids of study - " + name + ", representation - " + this.representationId, ex); } if(!ounitIds.isEmpty()) { //map each ounit id into a row in the observation sheet Map<Integer, Row> rowMap = new HashMap<Integer, Row>(); for(Integer ounitId : ounitIds) { Row row = observationSheet.createRow(sheetRowIndex); sheetRowIndex++; rowMap.put(ounitId, row); } //then get the data for each of the observation units (ounits) List<CharacterLevelElement> charLevels = new ArrayList<CharacterLevelElement>(); try { charLevels.addAll(this.studyDataManager.getCharacterLevelValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting character level values of study - " + name + ", representation - " + this.representationId, ex); } for(CharacterLevelElement elem : charLevels) { String factorName = elem.getFactorName(); if(factorName != null) { factorName = factorName.trim(); } if(!factorName.equals("STUDY")) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { short columnIndex = columnsMap.get(factorName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); String value = elem.getValue(); if(value != null) { value = value.trim(); } cell.setCellValue(value); } } } } List<NumericLevelElement> numericLevels = new ArrayList<NumericLevelElement>(); try { numericLevels.addAll(this.studyDataManager.getNumericLevelValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting numeric level values of study - " + name + ", representation - " + this.representationId, ex); } for(NumericLevelElement elem : numericLevels) { String factorName = elem.getFactorName(); if(factorName != null) { factorName = factorName.trim(); } if(!factorName.equals("STUDY")) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { short columnIndex = columnsMap.get(factorName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); - cell.setCellValue(elem.getValue()); + double elemValue = 0; + if(elem.getValue() != null){ + elemValue = elem.getValue().doubleValue(); + } + cell.setCellValue(elemValue); } } } } List<CharacterDataElement> charDatas = new ArrayList<CharacterDataElement>(); try { charDatas.addAll(this.studyDataManager.getCharacterDataValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting character data values of study - " + name + ", representation - " + this.representationId, ex); } for(CharacterDataElement elem : charDatas) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { String variateName = elem.getVariateName(); if(variateName != null) { variateName = variateName.trim(); } short columnIndex = columnsMap.get(variateName).shortValue(); Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); String value = elem.getValue(); if(value != null) { value = value.trim(); } cell.setCellValue(value); } } List<NumericDataElement> numericDatas = new ArrayList<NumericDataElement>(); try { numericDatas.addAll(this.studyDataManager.getNumericDataValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting numeric data values of study - " + name + ", representation - " + this.representationId, ex); } for(NumericDataElement elem : numericDatas) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { String variateName = elem.getVariateName(); if(variateName != null) { variateName = variateName.trim(); } short columnIndex = columnsMap.get(variateName).shortValue(); Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); - cell.setCellValue(elem.getValue()); + double elemValue = 0; + if(elem.getValue() != null){ + elemValue = elem.getValue().doubleValue(); + } + cell.setCellValue(elemValue); } } } } } //adjust column widths of description sheet to fit contents for(int ctr = 0; ctr < 8; ctr++) { if(ctr != 1) { descriptionSheet.autoSizeColumn(ctr); } } //adjust column widths of observation sheet to fit contents for(int ctr = 0; ctr < observationSheetColumnIndex; ctr++) { observationSheet.autoSizeColumn(ctr); } try { //write the excel file FileOutputStream fileOutputStream = new FileOutputStream(filename); workbook.write(fileOutputStream); fileOutputStream.close(); return fileOutputStream; } catch(Exception ex) { throw new DatasetExporterException("Error with writing to: " + filename, ex); } } private String[] getTraitScaleMethodInfo(Integer traitId, Integer scaleId, Integer methodId) throws DatasetExporterException { String toreturn[] = new String[4]; try { Trait trait = this.traitDataManager.getTraitById(traitId); Scale scale = this.traitDataManager.getScaleByID(scaleId); TraitMethod method = this.traitDataManager.getTraitMethodById(methodId); if(trait != null){ toreturn[0] = trait.getDescripton(); toreturn[1] = trait.getName(); } else { toreturn[0] = "Not specified"; toreturn[1] = "Not specified"; } if(scale != null){ toreturn[2] = scale.getName(); } else { toreturn[2] = "Not specified"; } if(method != null){ toreturn[3] = method.getName(); } else { toreturn[3] = "Not specified"; } } catch(Exception ex) { throw new DatasetExporterException("Error with getting trait, scale, and method information for " + "trait id = " + traitId + " scale id = " + scaleId + " method id = " + methodId, ex); } return toreturn; } }
false
true
public FileOutputStream exportToFieldBookExcel(String filename) throws DatasetExporterException { //create workbook Workbook workbook = new HSSFWorkbook(); CellStyle cellStyle = workbook.createCellStyle(); CellStyle cellStyleForObservationSheet = workbook.createCellStyle(); //create two sheets, one for description and nother for measurements Sheet descriptionSheet = workbook.createSheet("Description"); Sheet observationSheet = workbook.createSheet("Observation"); //this map is for mapping the columns names of the dataset to their column index in the excel sheet Map<String, Integer> columnsMap = new HashMap<String, Integer>(); int observationSheetColumnIndex = 0; //write the details on the first sheet - description //get the study first Study study = null; try { study = this.studyDataManager.getStudyByID(this.studyId); } catch (MiddlewareQueryException ex) { throw new DatasetExporterException("Error with getting Study with id: " + this.studyId, ex); } if(study != null) { //get the needed study details String name = study.getName(); String title = study.getTitle(); Integer pmkey = study.getProjectKey(); String objective = study.getObjective(); Integer startDate = study.getStartDate(); Integer endDate = study.getEndDate(); String type = study.getType(); //add to the sheet Row row0 = descriptionSheet.createRow(0); row0.createCell(0).setCellValue("STUDY"); row0.createCell(1).setCellValue(name); Row row1 = descriptionSheet.createRow(1); row1.createCell(0).setCellValue("TITLE"); row1.createCell(1).setCellValue(title); Row row2 = descriptionSheet.createRow(2); row2.createCell(0).setCellValue("PMKEY"); Cell pmKeyCell = PoiUtil.createCell(cellStyle, row2, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); pmKeyCell.setCellValue(pmkey); Row row3 = descriptionSheet.createRow(3); row3.createCell(0).setCellValue("OBJECTIVE"); row3.createCell(1).setCellValue(objective); Row row4 = descriptionSheet.createRow(4); row4.createCell(0).setCellValue("START DATE"); Cell startDateCell = PoiUtil.createCell(cellStyle, row4, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); startDateCell.setCellValue(startDate); Row row5 = descriptionSheet.createRow(5); row5.createCell(0).setCellValue("END DATE"); Cell endDateCell = PoiUtil.createCell(cellStyle, row5, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); endDateCell.setCellValue(endDate); Row row6 = descriptionSheet.createRow(6); row6.createCell(0).setCellValue("STUDY TYPE"); row6.createCell(1).setCellValue(type); //merge cells for the study details for(int ctr = 0; ctr < 7; ctr++) { descriptionSheet.addMergedRegion(new CellRangeAddress(ctr, ctr, 1, 7)); } //empty row Row row7 = descriptionSheet.createRow(7); //row with headings for condition list Row conditionHeaderRow = descriptionSheet.createRow(this.conditionListHeaderRowIndex); conditionHeaderRow.createCell(0).setCellValue("CONDITION"); conditionHeaderRow.createCell(1).setCellValue("DESCRIPTION"); conditionHeaderRow.createCell(2).setCellValue("PROPERTY"); conditionHeaderRow.createCell(3).setCellValue("SCALE"); conditionHeaderRow.createCell(4).setCellValue("METHOD"); conditionHeaderRow.createCell(5).setCellValue("DATA TYPE"); conditionHeaderRow.createCell(6).setCellValue("VALUE"); conditionHeaderRow.createCell(7).setCellValue("LABEL"); //get the conditions and their details List<DatasetCondition> conditions = new ArrayList<DatasetCondition>(); try { conditions.addAll(this.studyDataManager.getConditionsByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting conditions of study - " + name + ", representation - " + this.representationId, ex); } int conditionRowIndex = this.conditionListHeaderRowIndex + 1; for(DatasetCondition condition : conditions) { String traitScaleMethodInfo[] = getTraitScaleMethodInfo(condition.getTraitId(), condition.getScaleId(), condition.getMethodId()); String conditionName = condition.getName(); if(conditionName != null) { conditionName = conditionName.trim(); } String conditionType = condition.getType(); String conditionLabel = ""; try { conditionLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(condition.getFactorId()); } catch (MiddlewareQueryException ex) { conditionLabel = ""; } Row conditionRow = descriptionSheet.createRow(conditionRowIndex); conditionRow.createCell(0).setCellValue(conditionName); conditionRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); conditionRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); conditionRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); conditionRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); conditionRow.createCell(5).setCellValue(conditionType); if(conditionType.equals("N")) { Double thevalue = (Double) condition.getValue(); conditionRow.createCell(6).setCellValue(thevalue); } else { conditionRow.createCell(6).setCellValue(condition.getValue().toString()); } conditionRow.createCell(7).setCellValue(conditionLabel); //add entry to columns mapping //we set the value to -1 to signify that this should not be a column in the observation sheet if(!conditionName.equals("STUDY")) { columnsMap.put(conditionName, Integer.valueOf(-1)); } conditionRowIndex++; } //empty row Row emptyRowBeforeFactors = descriptionSheet.createRow(conditionRowIndex); //row with headings for factor list int factorRowHeaderIndex = conditionRowIndex + 1; Row factorHeaderRow = descriptionSheet.createRow(factorRowHeaderIndex); factorHeaderRow.createCell(0).setCellValue("FACTOR"); factorHeaderRow.createCell(1).setCellValue("DESCRIPTION"); factorHeaderRow.createCell(2).setCellValue("PROPERTY"); factorHeaderRow.createCell(3).setCellValue("SCALE"); factorHeaderRow.createCell(4).setCellValue("METHOD"); factorHeaderRow.createCell(5).setCellValue("DATA TYPE"); factorHeaderRow.createCell(6).setCellValue(""); factorHeaderRow.createCell(7).setCellValue("LABEL"); //get the factors and their details List<Factor> factors = new ArrayList<Factor>(); try { factors.addAll(this.studyDataManager.getFactorsByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting factors of study - " + name + ", representation - " + this.representationId, ex); } int factorRowIndex = factorRowHeaderIndex + 1; for(Factor factor : factors) { String dataType = factor.getDataType(); String factorName = factor.getName(); if(factorName != null) { factorName = factorName.trim(); } //check if factor is already written as a condition Integer temp = columnsMap.get(factorName); if(temp == null && !factorName.equals("STUDY")) { String traitScaleMethodInfo[] = getTraitScaleMethodInfo(factor.getTraitId(), factor.getScaleId(), factor.getMethodId()); String factorLabel = ""; try { factorLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(factor.getFactorId()); } catch (MiddlewareQueryException ex) { factorLabel = ""; } Row factorRow = descriptionSheet.createRow(factorRowIndex); factorRow.createCell(0).setCellValue(factorName); factorRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); factorRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); factorRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); factorRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); factorRow.createCell(5).setCellValue(dataType); factorRow.createCell(6).setCellValue(""); factorRow.createCell(7).setCellValue(factorLabel); //add entry to columns mapping columnsMap.put(factorName, Integer.valueOf(observationSheetColumnIndex)); observationSheetColumnIndex++; factorRowIndex++; } } //empty row Row emptyRowBeforeVariate = descriptionSheet.createRow(factorRowIndex); //row with headings for variate list int variateHeaderRowIndex = factorRowIndex + 1; Row variateHeaderRow = descriptionSheet.createRow(variateHeaderRowIndex); variateHeaderRow.createCell(0).setCellValue("VARIATE"); variateHeaderRow.createCell(1).setCellValue("DESCRIPTION"); variateHeaderRow.createCell(2).setCellValue("PROPERTY"); variateHeaderRow.createCell(3).setCellValue("SCALE"); variateHeaderRow.createCell(4).setCellValue("METHOD"); variateHeaderRow.createCell(5).setCellValue("DATA TYPE"); //get the variates and their details List<Variate> variates = new ArrayList<Variate>(); try { variates.addAll(this.studyDataManager.getVariatesByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting variates of study - " + name + ", representation - " + this.representationId, ex); } int variateRowIndex = variateHeaderRowIndex + 1; for(Variate variate : variates) { String dataType = variate.getDataType(); String variateName = variate.getName(); if(variateName != null) { variateName = variateName.trim(); } String traitScaleMethodInfo[] = getTraitScaleMethodInfo(variate.getTraitId(), variate.getScaleId(), variate.getMethodId()); Row variateRow = descriptionSheet.createRow(variateRowIndex); variateRow.createCell(0).setCellValue(variateName); variateRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); variateRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); variateRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); variateRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); variateRow.createCell(5).setCellValue(dataType); //add entry to columns mapping columnsMap.put(variateName, Integer.valueOf(observationSheetColumnIndex)); observationSheetColumnIndex++; variateRowIndex++; } //populate the measurements sheet //establish the columns of the dataset first Row datasetHeaderRow = observationSheet.createRow(0); for(String columnName : columnsMap.keySet()) { short columnIndex = columnsMap.get(columnName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, datasetHeaderRow, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); cell.setCellValue(columnName); } } //then work with the data //do it by 50 rows at a time int pageSize = 50; long totalNumberOfRows = 0; int sheetRowIndex = 1; try { totalNumberOfRows = this.studyDataManager.countOunitIDsByRepresentationId(this.representationId); } catch(Exception ex) { throw new DatasetExporterException("Error with getting count of ounit ids for study - " + name + ", representation - " + this.representationId, ex); } for(int start = 0; start < totalNumberOfRows; start = start + pageSize) { List<Integer> ounitIds = new ArrayList<Integer>(); try { //first get the ounit ids, these are the ids of the rows in the dataset ounitIds.addAll(this.studyDataManager.getOunitIDsByRepresentationId(this.representationId, start, pageSize)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting ounit ids of study - " + name + ", representation - " + this.representationId, ex); } if(!ounitIds.isEmpty()) { //map each ounit id into a row in the observation sheet Map<Integer, Row> rowMap = new HashMap<Integer, Row>(); for(Integer ounitId : ounitIds) { Row row = observationSheet.createRow(sheetRowIndex); sheetRowIndex++; rowMap.put(ounitId, row); } //then get the data for each of the observation units (ounits) List<CharacterLevelElement> charLevels = new ArrayList<CharacterLevelElement>(); try { charLevels.addAll(this.studyDataManager.getCharacterLevelValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting character level values of study - " + name + ", representation - " + this.representationId, ex); } for(CharacterLevelElement elem : charLevels) { String factorName = elem.getFactorName(); if(factorName != null) { factorName = factorName.trim(); } if(!factorName.equals("STUDY")) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { short columnIndex = columnsMap.get(factorName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); String value = elem.getValue(); if(value != null) { value = value.trim(); } cell.setCellValue(value); } } } } List<NumericLevelElement> numericLevels = new ArrayList<NumericLevelElement>(); try { numericLevels.addAll(this.studyDataManager.getNumericLevelValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting numeric level values of study - " + name + ", representation - " + this.representationId, ex); } for(NumericLevelElement elem : numericLevels) { String factorName = elem.getFactorName(); if(factorName != null) { factorName = factorName.trim(); } if(!factorName.equals("STUDY")) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { short columnIndex = columnsMap.get(factorName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); cell.setCellValue(elem.getValue()); } } } } List<CharacterDataElement> charDatas = new ArrayList<CharacterDataElement>(); try { charDatas.addAll(this.studyDataManager.getCharacterDataValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting character data values of study - " + name + ", representation - " + this.representationId, ex); } for(CharacterDataElement elem : charDatas) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { String variateName = elem.getVariateName(); if(variateName != null) { variateName = variateName.trim(); } short columnIndex = columnsMap.get(variateName).shortValue(); Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); String value = elem.getValue(); if(value != null) { value = value.trim(); } cell.setCellValue(value); } } List<NumericDataElement> numericDatas = new ArrayList<NumericDataElement>(); try { numericDatas.addAll(this.studyDataManager.getNumericDataValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting numeric data values of study - " + name + ", representation - " + this.representationId, ex); } for(NumericDataElement elem : numericDatas) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { String variateName = elem.getVariateName(); if(variateName != null) { variateName = variateName.trim(); } short columnIndex = columnsMap.get(variateName).shortValue(); Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); cell.setCellValue(elem.getValue()); } } } } } //adjust column widths of description sheet to fit contents for(int ctr = 0; ctr < 8; ctr++) { if(ctr != 1) { descriptionSheet.autoSizeColumn(ctr); } } //adjust column widths of observation sheet to fit contents for(int ctr = 0; ctr < observationSheetColumnIndex; ctr++) { observationSheet.autoSizeColumn(ctr); } try { //write the excel file FileOutputStream fileOutputStream = new FileOutputStream(filename); workbook.write(fileOutputStream); fileOutputStream.close(); return fileOutputStream; } catch(Exception ex) { throw new DatasetExporterException("Error with writing to: " + filename, ex); } }
public FileOutputStream exportToFieldBookExcel(String filename) throws DatasetExporterException { //create workbook Workbook workbook = new HSSFWorkbook(); CellStyle cellStyle = workbook.createCellStyle(); CellStyle cellStyleForObservationSheet = workbook.createCellStyle(); //create two sheets, one for description and nother for measurements Sheet descriptionSheet = workbook.createSheet("Description"); Sheet observationSheet = workbook.createSheet("Observation"); //this map is for mapping the columns names of the dataset to their column index in the excel sheet Map<String, Integer> columnsMap = new HashMap<String, Integer>(); int observationSheetColumnIndex = 0; //write the details on the first sheet - description //get the study first Study study = null; try { study = this.studyDataManager.getStudyByID(this.studyId); } catch (MiddlewareQueryException ex) { throw new DatasetExporterException("Error with getting Study with id: " + this.studyId, ex); } if(study != null) { //get the needed study details String name = study.getName(); String title = study.getTitle(); Integer pmkey = study.getProjectKey(); String objective = study.getObjective(); Integer startDate = study.getStartDate(); Integer endDate = study.getEndDate(); String type = study.getType(); //add to the sheet Row row0 = descriptionSheet.createRow(0); row0.createCell(0).setCellValue("STUDY"); row0.createCell(1).setCellValue(name); Row row1 = descriptionSheet.createRow(1); row1.createCell(0).setCellValue("TITLE"); row1.createCell(1).setCellValue(title); Row row2 = descriptionSheet.createRow(2); row2.createCell(0).setCellValue("PMKEY"); Cell pmKeyCell = PoiUtil.createCell(cellStyle, row2, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); pmKeyCell.setCellValue(pmkey); Row row3 = descriptionSheet.createRow(3); row3.createCell(0).setCellValue("OBJECTIVE"); row3.createCell(1).setCellValue(objective); Row row4 = descriptionSheet.createRow(4); row4.createCell(0).setCellValue("START DATE"); Cell startDateCell = PoiUtil.createCell(cellStyle, row4, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); startDateCell.setCellValue(startDate); Row row5 = descriptionSheet.createRow(5); row5.createCell(0).setCellValue("END DATE"); Cell endDateCell = PoiUtil.createCell(cellStyle, row5, (short) 1, CellStyle.ALIGN_LEFT, CellStyle.ALIGN_JUSTIFY); endDateCell.setCellValue(endDate); Row row6 = descriptionSheet.createRow(6); row6.createCell(0).setCellValue("STUDY TYPE"); row6.createCell(1).setCellValue(type); //merge cells for the study details for(int ctr = 0; ctr < 7; ctr++) { descriptionSheet.addMergedRegion(new CellRangeAddress(ctr, ctr, 1, 7)); } //empty row Row row7 = descriptionSheet.createRow(7); //row with headings for condition list Row conditionHeaderRow = descriptionSheet.createRow(this.conditionListHeaderRowIndex); conditionHeaderRow.createCell(0).setCellValue("CONDITION"); conditionHeaderRow.createCell(1).setCellValue("DESCRIPTION"); conditionHeaderRow.createCell(2).setCellValue("PROPERTY"); conditionHeaderRow.createCell(3).setCellValue("SCALE"); conditionHeaderRow.createCell(4).setCellValue("METHOD"); conditionHeaderRow.createCell(5).setCellValue("DATA TYPE"); conditionHeaderRow.createCell(6).setCellValue("VALUE"); conditionHeaderRow.createCell(7).setCellValue("LABEL"); //get the conditions and their details List<DatasetCondition> conditions = new ArrayList<DatasetCondition>(); try { conditions.addAll(this.studyDataManager.getConditionsByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting conditions of study - " + name + ", representation - " + this.representationId, ex); } int conditionRowIndex = this.conditionListHeaderRowIndex + 1; for(DatasetCondition condition : conditions) { String traitScaleMethodInfo[] = getTraitScaleMethodInfo(condition.getTraitId(), condition.getScaleId(), condition.getMethodId()); String conditionName = condition.getName(); if(conditionName != null) { conditionName = conditionName.trim(); } String conditionType = condition.getType(); String conditionLabel = ""; try { conditionLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(condition.getFactorId()); } catch (MiddlewareQueryException ex) { conditionLabel = ""; } Row conditionRow = descriptionSheet.createRow(conditionRowIndex); conditionRow.createCell(0).setCellValue(conditionName); conditionRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); conditionRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); conditionRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); conditionRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); conditionRow.createCell(5).setCellValue(conditionType); if(conditionType.equals("N")) { Double thevalue = (Double) condition.getValue(); conditionRow.createCell(6).setCellValue(thevalue); } else { conditionRow.createCell(6).setCellValue(condition.getValue().toString()); } conditionRow.createCell(7).setCellValue(conditionLabel); //add entry to columns mapping //we set the value to -1 to signify that this should not be a column in the observation sheet if(!conditionName.equals("STUDY")) { columnsMap.put(conditionName, Integer.valueOf(-1)); } conditionRowIndex++; } //empty row Row emptyRowBeforeFactors = descriptionSheet.createRow(conditionRowIndex); //row with headings for factor list int factorRowHeaderIndex = conditionRowIndex + 1; Row factorHeaderRow = descriptionSheet.createRow(factorRowHeaderIndex); factorHeaderRow.createCell(0).setCellValue("FACTOR"); factorHeaderRow.createCell(1).setCellValue("DESCRIPTION"); factorHeaderRow.createCell(2).setCellValue("PROPERTY"); factorHeaderRow.createCell(3).setCellValue("SCALE"); factorHeaderRow.createCell(4).setCellValue("METHOD"); factorHeaderRow.createCell(5).setCellValue("DATA TYPE"); factorHeaderRow.createCell(6).setCellValue(""); factorHeaderRow.createCell(7).setCellValue("LABEL"); //get the factors and their details List<Factor> factors = new ArrayList<Factor>(); try { factors.addAll(this.studyDataManager.getFactorsByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting factors of study - " + name + ", representation - " + this.representationId, ex); } int factorRowIndex = factorRowHeaderIndex + 1; for(Factor factor : factors) { String dataType = factor.getDataType(); String factorName = factor.getName(); if(factorName != null) { factorName = factorName.trim(); } //check if factor is already written as a condition Integer temp = columnsMap.get(factorName); if(temp == null && !factorName.equals("STUDY")) { String traitScaleMethodInfo[] = getTraitScaleMethodInfo(factor.getTraitId(), factor.getScaleId(), factor.getMethodId()); String factorLabel = ""; try { factorLabel = this.studyDataManager.getMainLabelOfFactorByFactorId(factor.getFactorId()); } catch (MiddlewareQueryException ex) { factorLabel = ""; } Row factorRow = descriptionSheet.createRow(factorRowIndex); factorRow.createCell(0).setCellValue(factorName); factorRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); factorRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); factorRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); factorRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); factorRow.createCell(5).setCellValue(dataType); factorRow.createCell(6).setCellValue(""); factorRow.createCell(7).setCellValue(factorLabel); //add entry to columns mapping columnsMap.put(factorName, Integer.valueOf(observationSheetColumnIndex)); observationSheetColumnIndex++; factorRowIndex++; } } //empty row Row emptyRowBeforeVariate = descriptionSheet.createRow(factorRowIndex); //row with headings for variate list int variateHeaderRowIndex = factorRowIndex + 1; Row variateHeaderRow = descriptionSheet.createRow(variateHeaderRowIndex); variateHeaderRow.createCell(0).setCellValue("VARIATE"); variateHeaderRow.createCell(1).setCellValue("DESCRIPTION"); variateHeaderRow.createCell(2).setCellValue("PROPERTY"); variateHeaderRow.createCell(3).setCellValue("SCALE"); variateHeaderRow.createCell(4).setCellValue("METHOD"); variateHeaderRow.createCell(5).setCellValue("DATA TYPE"); //get the variates and their details List<Variate> variates = new ArrayList<Variate>(); try { variates.addAll(this.studyDataManager.getVariatesByRepresentationId(this.representationId)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting variates of study - " + name + ", representation - " + this.representationId, ex); } int variateRowIndex = variateHeaderRowIndex + 1; for(Variate variate : variates) { String dataType = variate.getDataType(); String variateName = variate.getName(); if(variateName != null) { variateName = variateName.trim(); } String traitScaleMethodInfo[] = getTraitScaleMethodInfo(variate.getTraitId(), variate.getScaleId(), variate.getMethodId()); Row variateRow = descriptionSheet.createRow(variateRowIndex); variateRow.createCell(0).setCellValue(variateName); variateRow.createCell(1).setCellValue(traitScaleMethodInfo[0]); variateRow.createCell(2).setCellValue(traitScaleMethodInfo[1]); variateRow.createCell(3).setCellValue(traitScaleMethodInfo[2]); variateRow.createCell(4).setCellValue(traitScaleMethodInfo[3]); variateRow.createCell(5).setCellValue(dataType); //add entry to columns mapping columnsMap.put(variateName, Integer.valueOf(observationSheetColumnIndex)); observationSheetColumnIndex++; variateRowIndex++; } //populate the measurements sheet //establish the columns of the dataset first Row datasetHeaderRow = observationSheet.createRow(0); for(String columnName : columnsMap.keySet()) { short columnIndex = columnsMap.get(columnName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, datasetHeaderRow, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); cell.setCellValue(columnName); } } //then work with the data //do it by 50 rows at a time int pageSize = 50; long totalNumberOfRows = 0; int sheetRowIndex = 1; try { totalNumberOfRows = this.studyDataManager.countOunitIDsByRepresentationId(this.representationId); } catch(Exception ex) { throw new DatasetExporterException("Error with getting count of ounit ids for study - " + name + ", representation - " + this.representationId, ex); } for(int start = 0; start < totalNumberOfRows; start = start + pageSize) { List<Integer> ounitIds = new ArrayList<Integer>(); try { //first get the ounit ids, these are the ids of the rows in the dataset ounitIds.addAll(this.studyDataManager.getOunitIDsByRepresentationId(this.representationId, start, pageSize)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting ounit ids of study - " + name + ", representation - " + this.representationId, ex); } if(!ounitIds.isEmpty()) { //map each ounit id into a row in the observation sheet Map<Integer, Row> rowMap = new HashMap<Integer, Row>(); for(Integer ounitId : ounitIds) { Row row = observationSheet.createRow(sheetRowIndex); sheetRowIndex++; rowMap.put(ounitId, row); } //then get the data for each of the observation units (ounits) List<CharacterLevelElement> charLevels = new ArrayList<CharacterLevelElement>(); try { charLevels.addAll(this.studyDataManager.getCharacterLevelValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting character level values of study - " + name + ", representation - " + this.representationId, ex); } for(CharacterLevelElement elem : charLevels) { String factorName = elem.getFactorName(); if(factorName != null) { factorName = factorName.trim(); } if(!factorName.equals("STUDY")) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { short columnIndex = columnsMap.get(factorName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); String value = elem.getValue(); if(value != null) { value = value.trim(); } cell.setCellValue(value); } } } } List<NumericLevelElement> numericLevels = new ArrayList<NumericLevelElement>(); try { numericLevels.addAll(this.studyDataManager.getNumericLevelValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting numeric level values of study - " + name + ", representation - " + this.representationId, ex); } for(NumericLevelElement elem : numericLevels) { String factorName = elem.getFactorName(); if(factorName != null) { factorName = factorName.trim(); } if(!factorName.equals("STUDY")) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { short columnIndex = columnsMap.get(factorName).shortValue(); if(columnIndex >= 0) { Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); double elemValue = 0; if(elem.getValue() != null){ elemValue = elem.getValue().doubleValue(); } cell.setCellValue(elemValue); } } } } List<CharacterDataElement> charDatas = new ArrayList<CharacterDataElement>(); try { charDatas.addAll(this.studyDataManager.getCharacterDataValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting character data values of study - " + name + ", representation - " + this.representationId, ex); } for(CharacterDataElement elem : charDatas) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { String variateName = elem.getVariateName(); if(variateName != null) { variateName = variateName.trim(); } short columnIndex = columnsMap.get(variateName).shortValue(); Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); String value = elem.getValue(); if(value != null) { value = value.trim(); } cell.setCellValue(value); } } List<NumericDataElement> numericDatas = new ArrayList<NumericDataElement>(); try { numericDatas.addAll(this.studyDataManager.getNumericDataValuesByOunitIdList(ounitIds)); } catch(Exception ex) { throw new DatasetExporterException("Error with getting numeric data values of study - " + name + ", representation - " + this.representationId, ex); } for(NumericDataElement elem : numericDatas) { Row row = rowMap.get(elem.getOunitId()); if(row != null) { String variateName = elem.getVariateName(); if(variateName != null) { variateName = variateName.trim(); } short columnIndex = columnsMap.get(variateName).shortValue(); Cell cell = PoiUtil.createCell(cellStyleForObservationSheet, row, columnIndex, CellStyle.ALIGN_CENTER, CellStyle.ALIGN_CENTER); double elemValue = 0; if(elem.getValue() != null){ elemValue = elem.getValue().doubleValue(); } cell.setCellValue(elemValue); } } } } } //adjust column widths of description sheet to fit contents for(int ctr = 0; ctr < 8; ctr++) { if(ctr != 1) { descriptionSheet.autoSizeColumn(ctr); } } //adjust column widths of observation sheet to fit contents for(int ctr = 0; ctr < observationSheetColumnIndex; ctr++) { observationSheet.autoSizeColumn(ctr); } try { //write the excel file FileOutputStream fileOutputStream = new FileOutputStream(filename); workbook.write(fileOutputStream); fileOutputStream.close(); return fileOutputStream; } catch(Exception ex) { throw new DatasetExporterException("Error with writing to: " + filename, ex); } }
diff --git a/src/java/com/trekker/controller/EditMedia.java b/src/java/com/trekker/controller/EditMedia.java index f0ff368..6e7d386 100644 --- a/src/java/com/trekker/controller/EditMedia.java +++ b/src/java/com/trekker/controller/EditMedia.java @@ -1,86 +1,87 @@ package com.trekker.controller; import com.trekker.model.Media; import com.trekker.model.Trip; import com.trekker.model.User; import com.trekker.service.MediaService; import com.trekker.service.TripService; import com.trekker.service.UserService; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Map; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.enterprise.context.RequestScoped; import javax.faces.bean.ManagedBean; import javax.faces.bean.ManagedProperty; import org.omnifaces.util.Faces; import org.omnifaces.util.Messages; @ManagedBean @RequestScoped public class EditMedia { @ManagedProperty(value="#{param.id}") private int id; private Map<Integer, Boolean> checked = new HashMap<Integer, Boolean>(); private Trip trip; private User user; @EJB private TripService tripService; @EJB private MediaService mediaService; @EJB private UserService userService; @PostConstruct public void init() { trip = tripService.find(id); user = userService.currentUser(); } public int getId() { return id; } public void setId(int id) { this.id = id; } public Trip getTrip() { return trip; } public Map<Integer, Boolean> getChecked() { return checked; } public void delete() throws IOException { for(Integer id : checked.keySet()) { if (checked.get(id)) { Media mediaToDelete = mediaService.find(id); trip.getMediaCollection().remove(mediaToDelete); mediaService.delete(mediaToDelete); - String path = "uploads/" + user.getId() + "/" + this.id + "/"; + String instanceRoot = System.getProperty("com.sun.aas.instanceRoot"); + String path = instanceRoot + "uploads/" + user.getId() + "/" + this.id + "/"; File mediaFile = new File(path + mediaToDelete.getFilename()); File mediaThumbFile = new File(path + "t/" + mediaToDelete.getFilename()); mediaFile.delete(); mediaThumbFile.delete(); } } tripService.update(trip); Messages.addFlashGlobalInfo("<div class=\"alert alert-success\">Media successfully updated</div>"); Faces.redirect("editmedia.xhtml?id=%s", Integer.toString(this.id)); } public void submit() throws IOException { tripService.update(trip); Messages.addFlashGlobalInfo("<div class=\"alert alert-success\">Media successfully updated</div>"); Faces.redirect("editmedia.xhtml?id=%s", Integer.toString(this.id)); } }
true
true
public void delete() throws IOException { for(Integer id : checked.keySet()) { if (checked.get(id)) { Media mediaToDelete = mediaService.find(id); trip.getMediaCollection().remove(mediaToDelete); mediaService.delete(mediaToDelete); String path = "uploads/" + user.getId() + "/" + this.id + "/"; File mediaFile = new File(path + mediaToDelete.getFilename()); File mediaThumbFile = new File(path + "t/" + mediaToDelete.getFilename()); mediaFile.delete(); mediaThumbFile.delete(); } } tripService.update(trip); Messages.addFlashGlobalInfo("<div class=\"alert alert-success\">Media successfully updated</div>"); Faces.redirect("editmedia.xhtml?id=%s", Integer.toString(this.id)); }
public void delete() throws IOException { for(Integer id : checked.keySet()) { if (checked.get(id)) { Media mediaToDelete = mediaService.find(id); trip.getMediaCollection().remove(mediaToDelete); mediaService.delete(mediaToDelete); String instanceRoot = System.getProperty("com.sun.aas.instanceRoot"); String path = instanceRoot + "uploads/" + user.getId() + "/" + this.id + "/"; File mediaFile = new File(path + mediaToDelete.getFilename()); File mediaThumbFile = new File(path + "t/" + mediaToDelete.getFilename()); mediaFile.delete(); mediaThumbFile.delete(); } } tripService.update(trip); Messages.addFlashGlobalInfo("<div class=\"alert alert-success\">Media successfully updated</div>"); Faces.redirect("editmedia.xhtml?id=%s", Integer.toString(this.id)); }
diff --git a/src/ServerThread.java b/src/ServerThread.java index 048a634..984e44a 100644 --- a/src/ServerThread.java +++ b/src/ServerThread.java @@ -1,195 +1,198 @@ import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.InetAddress; import java.net.Socket; import java.util.ArrayList; import java.util.Hashtable; public abstract class ServerThread extends Thread{ private Socket socket; private ObjectInputStream inputStream; private ObjectOutputStream outputStream; private int currentAcceptNum; //this containts the last accepted ballot number private String acceptValue; //this contains the current value known to this server (what was last accepted) private Server parentServer; public ServerThread(Server psrv, Socket skt){ this.socket = skt; this.acceptValue = null; currentAcceptNum = 0; parentServer = psrv; try { // create output first outputStream = new ObjectOutputStream(socket.getOutputStream()); //needs this or it wont work inputStream = new ObjectInputStream(socket.getInputStream()); } catch (IOException e) { System.out.println("Error Creating Streams: " + e); return; } } public void run() { //check for message client read/append // if read, return values // if append start 2pc protocol then paxos protocol ServerMessage msg; try { msg = (ServerMessage) inputStream.readObject(); System.out.println("RECIEVED:" + msg); switch (msg.getType()) { case ServerMessage.CLIENT_READ: //read the file case ServerMessage.CLIENT_APPEND: //create a new ballot by incrementing current ballot by 1 parentServer.setCurrentBallotNumber(parentServer.getCurrentBallotNumber()+1); ServerMessage ballot = new ServerMessage(ServerMessage.PAXOS_PREPARE, parentServer.getCurrentBallotNumber() + "," + parentServer.getProcessId(), socket.getLocalAddress().getHostAddress() ); System.out.println("My address:" + socket.getLocalAddress().getHostAddress() ); //send to all other stat or grade servers for (int i = 0; i < Server.StatServers.size(); i++){ - System.out.println("SENDING: PAXOS_PREPARE to " + Server.StatServers.get(i) + " MSG:" + ballot); + System.out.println("SENDING: PAXOS_PREPARE to " + Server.StatServers.get(i)); sendMessage(Server.StatServers.get(i), 3000, ballot); } break; case ServerMessage.PAXOS_PREPARE: //contents of the incoming prepare message are ballotnum,processesid. int proposedBallot = Integer.parseInt(msg.getMessage().split(",")[0]); int proposedprocessID = Integer.parseInt(msg.getMessage().split(",")[1]); //for tie breakers //if the incoming ballot is newer than my ballot, update my ballot and send an ack, otherwise the incoming //ballot is old and we can ignore it if (proposedBallot > parentServer.getCurrentBallotNumber() || (proposedBallot == parentServer.getCurrentBallotNumber() && proposedprocessID > parentServer.getProcessId()) ){ parentServer.setCurrentBallotNumber(proposedBallot); //send the ack message with the current ballot, the last accepted ballot, the current value. ServerMessage ackMessage = new ServerMessage(ServerMessage.PAXOS_ACK, parentServer.getCurrentBallotNumber() + ","+ currentAcceptNum + "," + this.acceptValue, socket.getInetAddress().getHostName() ); - System.out.println("SENDING: PAXOS_ACK to " + socket.getInetAddress().getHostName() + " MSG:" + ackMessage); + System.out.println("SENDING: PAXOS_ACK to " + socket.getInetAddress().getHostName()); sendMessage(socket.getInetAddress().getHostName(), 3000, ackMessage); } break; case ServerMessage.PAXOS_ACK: //contents of the incoming ack message are current ballot, the last accepted ballot, the current value proposedBallot = Integer.parseInt(msg.getMessage().split(",")[0]); Hashtable<Integer,ArrayList<ServerMessage> > hash = parentServer.getMessageHash(); ArrayList<ServerMessage> ballot_msgs = hash.get(proposedBallot); //add the incoming message to a collection of responses for this ballot + if (ballot_msgs == null){ + ballot_msgs = new ArrayList<ServerMessage>(); + } ballot_msgs.add(msg); hash.put(proposedBallot, ballot_msgs); parentServer.setMessageHash(hash); //check to see if we have gotten a majority of responses... if not, do nothing if(ballot_msgs.size() > Server.StatServers.size()/2) { boolean all_null = true; int highest_accept_num = -1; String highest_accept_val = null; //if we have, loop through the acks to see if we have an initial value. for (int i = 0; i < ballot_msgs.size(); i++){ ServerMessage temp_msg = (ServerMessage)ballot_msgs.get(i); int proposedacceptnum = Integer.parseInt(temp_msg.getMessage().split(",")[1]); String proposedVal = temp_msg.getMessage().split(",")[2]; if (proposedacceptnum > highest_accept_num ) { highest_accept_num = proposedacceptnum; highest_accept_val = proposedVal; } if (!proposedVal.equals(null)){ all_null = false; } } if (all_null) { //write line of grades / stats into file parentServer.appendFile("TEST WRITING SHIT"); //tell all other servers to accept my values for (int i = 0; i < Server.StatServers.size(); i++){ ServerMessage acceptMsg = new ServerMessage(ServerMessage.PAXOS_ACCEPT, parentServer.getCurrentBallotNumber() +","+ this.acceptValue ,socket.getLocalAddress().getHostAddress() ); - System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) + " MSG:" + acceptMsg); + System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) ); sendMessage(Server.StatServers.get(i), 3000, acceptMsg); } } else { for (int i = 0; i < Server.StatServers.size(); i++){ ServerMessage acceptMsg = new ServerMessage(ServerMessage.PAXOS_ACCEPT, parentServer.getCurrentBallotNumber() +","+ highest_accept_val ,socket.getLocalAddress().getHostAddress() ); - System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) + " MSG:" + acceptMsg); + System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) ); sendMessage(Server.StatServers.get(i), 3000, acceptMsg); } } } break; case ServerMessage.PAXOS_DECIDE: //do something case ServerMessage.PAXOS_ACCEPT: //do something case ServerMessage.TWOPHASE_VOTE_REQUEST: //reply yes or no case ServerMessage.TWOPHASE_VOTE_YES: //tally yes vote case ServerMessage.TWOPHASE_VOTE_NO: //send abort case ServerMessage.TWOPHASE_ABORT: //cancel the write changes case ServerMessage.TWOPHASE_COMMIT: //write any changes } } catch (IOException e) { System.out.println(" Exception reading Streams: " + e); System.exit(1); } catch(ClassNotFoundException ex) { //this shouldnt be a problem, only ServerMessages should be sent. System.exit(1); } } private void sendMessage(String host, int port, ServerMessage msg){ try { InetAddress address = InetAddress.getByName(host); System.out.print("Connecting to Server..."); // open socket, then input and output streams to it Socket socket = new Socket(address,port); ObjectOutputStream to_server = new ObjectOutputStream(socket.getOutputStream()); System.out.println("Connected"); // send command to server, then read and print lines until // the server closes the connection System.out.print("Sending Message to Server..."); to_server.writeObject(msg); to_server.flush(); System.out.println("Sent: " + msg); } catch (IOException e){ System.out.println("Server failed sending message:" + e.getMessage()); } } }
false
true
public void run() { //check for message client read/append // if read, return values // if append start 2pc protocol then paxos protocol ServerMessage msg; try { msg = (ServerMessage) inputStream.readObject(); System.out.println("RECIEVED:" + msg); switch (msg.getType()) { case ServerMessage.CLIENT_READ: //read the file case ServerMessage.CLIENT_APPEND: //create a new ballot by incrementing current ballot by 1 parentServer.setCurrentBallotNumber(parentServer.getCurrentBallotNumber()+1); ServerMessage ballot = new ServerMessage(ServerMessage.PAXOS_PREPARE, parentServer.getCurrentBallotNumber() + "," + parentServer.getProcessId(), socket.getLocalAddress().getHostAddress() ); System.out.println("My address:" + socket.getLocalAddress().getHostAddress() ); //send to all other stat or grade servers for (int i = 0; i < Server.StatServers.size(); i++){ System.out.println("SENDING: PAXOS_PREPARE to " + Server.StatServers.get(i) + " MSG:" + ballot); sendMessage(Server.StatServers.get(i), 3000, ballot); } break; case ServerMessage.PAXOS_PREPARE: //contents of the incoming prepare message are ballotnum,processesid. int proposedBallot = Integer.parseInt(msg.getMessage().split(",")[0]); int proposedprocessID = Integer.parseInt(msg.getMessage().split(",")[1]); //for tie breakers //if the incoming ballot is newer than my ballot, update my ballot and send an ack, otherwise the incoming //ballot is old and we can ignore it if (proposedBallot > parentServer.getCurrentBallotNumber() || (proposedBallot == parentServer.getCurrentBallotNumber() && proposedprocessID > parentServer.getProcessId()) ){ parentServer.setCurrentBallotNumber(proposedBallot); //send the ack message with the current ballot, the last accepted ballot, the current value. ServerMessage ackMessage = new ServerMessage(ServerMessage.PAXOS_ACK, parentServer.getCurrentBallotNumber() + ","+ currentAcceptNum + "," + this.acceptValue, socket.getInetAddress().getHostName() ); System.out.println("SENDING: PAXOS_ACK to " + socket.getInetAddress().getHostName() + " MSG:" + ackMessage); sendMessage(socket.getInetAddress().getHostName(), 3000, ackMessage); } break; case ServerMessage.PAXOS_ACK: //contents of the incoming ack message are current ballot, the last accepted ballot, the current value proposedBallot = Integer.parseInt(msg.getMessage().split(",")[0]); Hashtable<Integer,ArrayList<ServerMessage> > hash = parentServer.getMessageHash(); ArrayList<ServerMessage> ballot_msgs = hash.get(proposedBallot); //add the incoming message to a collection of responses for this ballot ballot_msgs.add(msg); hash.put(proposedBallot, ballot_msgs); parentServer.setMessageHash(hash); //check to see if we have gotten a majority of responses... if not, do nothing if(ballot_msgs.size() > Server.StatServers.size()/2) { boolean all_null = true; int highest_accept_num = -1; String highest_accept_val = null; //if we have, loop through the acks to see if we have an initial value. for (int i = 0; i < ballot_msgs.size(); i++){ ServerMessage temp_msg = (ServerMessage)ballot_msgs.get(i); int proposedacceptnum = Integer.parseInt(temp_msg.getMessage().split(",")[1]); String proposedVal = temp_msg.getMessage().split(",")[2]; if (proposedacceptnum > highest_accept_num ) { highest_accept_num = proposedacceptnum; highest_accept_val = proposedVal; } if (!proposedVal.equals(null)){ all_null = false; } } if (all_null) { //write line of grades / stats into file parentServer.appendFile("TEST WRITING SHIT"); //tell all other servers to accept my values for (int i = 0; i < Server.StatServers.size(); i++){ ServerMessage acceptMsg = new ServerMessage(ServerMessage.PAXOS_ACCEPT, parentServer.getCurrentBallotNumber() +","+ this.acceptValue ,socket.getLocalAddress().getHostAddress() ); System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) + " MSG:" + acceptMsg); sendMessage(Server.StatServers.get(i), 3000, acceptMsg); } } else { for (int i = 0; i < Server.StatServers.size(); i++){ ServerMessage acceptMsg = new ServerMessage(ServerMessage.PAXOS_ACCEPT, parentServer.getCurrentBallotNumber() +","+ highest_accept_val ,socket.getLocalAddress().getHostAddress() ); System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) + " MSG:" + acceptMsg); sendMessage(Server.StatServers.get(i), 3000, acceptMsg); } } } break; case ServerMessage.PAXOS_DECIDE: //do something case ServerMessage.PAXOS_ACCEPT: //do something case ServerMessage.TWOPHASE_VOTE_REQUEST: //reply yes or no case ServerMessage.TWOPHASE_VOTE_YES: //tally yes vote case ServerMessage.TWOPHASE_VOTE_NO: //send abort case ServerMessage.TWOPHASE_ABORT: //cancel the write changes case ServerMessage.TWOPHASE_COMMIT: //write any changes } } catch (IOException e) { System.out.println(" Exception reading Streams: " + e); System.exit(1); } catch(ClassNotFoundException ex) { //this shouldnt be a problem, only ServerMessages should be sent. System.exit(1); } }
public void run() { //check for message client read/append // if read, return values // if append start 2pc protocol then paxos protocol ServerMessage msg; try { msg = (ServerMessage) inputStream.readObject(); System.out.println("RECIEVED:" + msg); switch (msg.getType()) { case ServerMessage.CLIENT_READ: //read the file case ServerMessage.CLIENT_APPEND: //create a new ballot by incrementing current ballot by 1 parentServer.setCurrentBallotNumber(parentServer.getCurrentBallotNumber()+1); ServerMessage ballot = new ServerMessage(ServerMessage.PAXOS_PREPARE, parentServer.getCurrentBallotNumber() + "," + parentServer.getProcessId(), socket.getLocalAddress().getHostAddress() ); System.out.println("My address:" + socket.getLocalAddress().getHostAddress() ); //send to all other stat or grade servers for (int i = 0; i < Server.StatServers.size(); i++){ System.out.println("SENDING: PAXOS_PREPARE to " + Server.StatServers.get(i)); sendMessage(Server.StatServers.get(i), 3000, ballot); } break; case ServerMessage.PAXOS_PREPARE: //contents of the incoming prepare message are ballotnum,processesid. int proposedBallot = Integer.parseInt(msg.getMessage().split(",")[0]); int proposedprocessID = Integer.parseInt(msg.getMessage().split(",")[1]); //for tie breakers //if the incoming ballot is newer than my ballot, update my ballot and send an ack, otherwise the incoming //ballot is old and we can ignore it if (proposedBallot > parentServer.getCurrentBallotNumber() || (proposedBallot == parentServer.getCurrentBallotNumber() && proposedprocessID > parentServer.getProcessId()) ){ parentServer.setCurrentBallotNumber(proposedBallot); //send the ack message with the current ballot, the last accepted ballot, the current value. ServerMessage ackMessage = new ServerMessage(ServerMessage.PAXOS_ACK, parentServer.getCurrentBallotNumber() + ","+ currentAcceptNum + "," + this.acceptValue, socket.getInetAddress().getHostName() ); System.out.println("SENDING: PAXOS_ACK to " + socket.getInetAddress().getHostName()); sendMessage(socket.getInetAddress().getHostName(), 3000, ackMessage); } break; case ServerMessage.PAXOS_ACK: //contents of the incoming ack message are current ballot, the last accepted ballot, the current value proposedBallot = Integer.parseInt(msg.getMessage().split(",")[0]); Hashtable<Integer,ArrayList<ServerMessage> > hash = parentServer.getMessageHash(); ArrayList<ServerMessage> ballot_msgs = hash.get(proposedBallot); //add the incoming message to a collection of responses for this ballot if (ballot_msgs == null){ ballot_msgs = new ArrayList<ServerMessage>(); } ballot_msgs.add(msg); hash.put(proposedBallot, ballot_msgs); parentServer.setMessageHash(hash); //check to see if we have gotten a majority of responses... if not, do nothing if(ballot_msgs.size() > Server.StatServers.size()/2) { boolean all_null = true; int highest_accept_num = -1; String highest_accept_val = null; //if we have, loop through the acks to see if we have an initial value. for (int i = 0; i < ballot_msgs.size(); i++){ ServerMessage temp_msg = (ServerMessage)ballot_msgs.get(i); int proposedacceptnum = Integer.parseInt(temp_msg.getMessage().split(",")[1]); String proposedVal = temp_msg.getMessage().split(",")[2]; if (proposedacceptnum > highest_accept_num ) { highest_accept_num = proposedacceptnum; highest_accept_val = proposedVal; } if (!proposedVal.equals(null)){ all_null = false; } } if (all_null) { //write line of grades / stats into file parentServer.appendFile("TEST WRITING SHIT"); //tell all other servers to accept my values for (int i = 0; i < Server.StatServers.size(); i++){ ServerMessage acceptMsg = new ServerMessage(ServerMessage.PAXOS_ACCEPT, parentServer.getCurrentBallotNumber() +","+ this.acceptValue ,socket.getLocalAddress().getHostAddress() ); System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) ); sendMessage(Server.StatServers.get(i), 3000, acceptMsg); } } else { for (int i = 0; i < Server.StatServers.size(); i++){ ServerMessage acceptMsg = new ServerMessage(ServerMessage.PAXOS_ACCEPT, parentServer.getCurrentBallotNumber() +","+ highest_accept_val ,socket.getLocalAddress().getHostAddress() ); System.out.println("SENDING: PAXOS_ACCEPT to " + Server.StatServers.get(i) ); sendMessage(Server.StatServers.get(i), 3000, acceptMsg); } } } break; case ServerMessage.PAXOS_DECIDE: //do something case ServerMessage.PAXOS_ACCEPT: //do something case ServerMessage.TWOPHASE_VOTE_REQUEST: //reply yes or no case ServerMessage.TWOPHASE_VOTE_YES: //tally yes vote case ServerMessage.TWOPHASE_VOTE_NO: //send abort case ServerMessage.TWOPHASE_ABORT: //cancel the write changes case ServerMessage.TWOPHASE_COMMIT: //write any changes } } catch (IOException e) { System.out.println(" Exception reading Streams: " + e); System.exit(1); } catch(ClassNotFoundException ex) { //this shouldnt be a problem, only ServerMessages should be sent. System.exit(1); } }
diff --git a/src/main/java/me/eccentric_nz/TARDIS/listeners/TARDISButtonListener.java b/src/main/java/me/eccentric_nz/TARDIS/listeners/TARDISButtonListener.java index 1d8a9683f..54328cb97 100644 --- a/src/main/java/me/eccentric_nz/TARDIS/listeners/TARDISButtonListener.java +++ b/src/main/java/me/eccentric_nz/TARDIS/listeners/TARDISButtonListener.java @@ -1,480 +1,484 @@ /* * Copyright (C) 2013 eccentric_nz * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package me.eccentric_nz.TARDIS.listeners; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import me.eccentric_nz.TARDIS.ARS.TARDISARSInventory; import me.eccentric_nz.TARDIS.TARDIS; import me.eccentric_nz.TARDIS.advanced.TARDISCircuitChecker; import me.eccentric_nz.TARDIS.advanced.TARDISSerializeInventory; import me.eccentric_nz.TARDIS.builders.TARDISEmergencyRelocation; import me.eccentric_nz.TARDIS.database.QueryFactory; import me.eccentric_nz.TARDIS.database.ResultSetBackLocation; import me.eccentric_nz.TARDIS.database.ResultSetControls; import me.eccentric_nz.TARDIS.database.ResultSetCurrentLocation; import me.eccentric_nz.TARDIS.database.ResultSetDiskStorage; import me.eccentric_nz.TARDIS.database.ResultSetLamps; import me.eccentric_nz.TARDIS.database.ResultSetRepeaters; import me.eccentric_nz.TARDIS.database.ResultSetTardis; import me.eccentric_nz.TARDIS.database.ResultSetTravellers; import me.eccentric_nz.TARDIS.enumeration.COMPASS; import me.eccentric_nz.TARDIS.enumeration.MESSAGE; import me.eccentric_nz.TARDIS.enumeration.STORAGE; import me.eccentric_nz.TARDIS.info.TARDISInfoMenu; import me.eccentric_nz.TARDIS.rooms.TARDISExteriorRenderer; import me.eccentric_nz.TARDIS.travel.TARDISTemporalLocatorInventory; import me.eccentric_nz.TARDIS.travel.TARDISTerminalInventory; import me.eccentric_nz.TARDIS.travel.TARDISTimeTravel; import org.bukkit.ChatColor; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Block; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.block.Action; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.ItemStack; /** * The various systems of the console room are fairly well-understood. According * to one account, each of the six panels controls a discrete function. The * navigation panel contains a time and space forward/back control, directional * pointer, atom accelerator and the spatial location input. * * @author eccentric_nz */ public class TARDISButtonListener implements Listener { private final TARDIS plugin; private final List<Material> validBlocks = new ArrayList<Material>(); private final List<Integer> onlythese = Arrays.asList(new Integer[]{1, 8, 9, 10, 11, 12, 13, 14, 16, 17}); public ItemStack[] items; private final ItemStack[] tars; private final ItemStack[] clocks; public TARDISButtonListener(TARDIS plugin) { this.plugin = plugin; validBlocks.add(Material.WOOD_BUTTON); validBlocks.add(Material.REDSTONE_COMPARATOR_OFF); validBlocks.add(Material.REDSTONE_COMPARATOR_ON); validBlocks.add(Material.STONE_BUTTON); validBlocks.add(Material.LEVER); validBlocks.add(Material.WALL_SIGN); validBlocks.add(Material.NOTE_BLOCK); validBlocks.add(Material.JUKEBOX); this.items = new TARDISTerminalInventory().getTerminal(); this.tars = new TARDISARSInventory().getTerminal(); this.clocks = new TARDISTemporalLocatorInventory().getTerminal(); } /** * Listens for player interaction with the TARDIS console button. If the * button is clicked it will return a random destination based on the * settings of the four TARDIS console repeaters. * * @param event the player clicking a block */ @SuppressWarnings("deprecation") @EventHandler(priority = EventPriority.MONITOR) public void onButtonInteract(PlayerInteractEvent event) { final Player player = event.getPlayer(); Block block = event.getClickedBlock(); if (block != null) { Material blockType = block.getType(); Action action = event.getAction(); if (action == Action.RIGHT_CLICK_BLOCK) { // only proceed if they are clicking a type of a button or a lever! if (validBlocks.contains(blockType)) { // get clicked block location String buttonloc = block.getLocation().toString(); // get tardis from saved button location HashMap<String, Object> where = new HashMap<String, Object>(); where.put("location", buttonloc); ResultSetControls rsc = new ResultSetControls(plugin, where, false); if (rsc.resultSet()) { int id = rsc.getTardis_id(); int type = rsc.getType(); if (!onlythese.contains(Integer.valueOf(type))) { return; } HashMap<String, Object> whereid = new HashMap<String, Object>(); whereid.put("tardis_id", id); ResultSetTardis rs = new ResultSetTardis(plugin, whereid, "", false); if (rs.resultSet()) { // check they initialised if (!rs.isTardis_init()) { player.sendMessage(plugin.pluginName + ChatColor.RED + "The TARDIS Artron Energy Capacitor has not been initialised!"); return; } int level = rs.getArtron_level(); boolean hb = rs.isHandbrake_on(); boolean set_dest = false; String comps = rs.getCompanions(); String owner = rs.getOwner(); TARDISCircuitChecker tcc = null; if (plugin.getConfig().getString("preferences.difficulty").equals("hard")) { tcc = new TARDISCircuitChecker(plugin, id); tcc.getCircuits(); } QueryFactory qf = new QueryFactory(plugin); HashMap<String, Object> set = new HashMap<String, Object>(); switch (type) { case 1: // random location button if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } HashMap<String, Object> wherecl = new HashMap<String, Object>(); wherecl.put("tardis_id", rs.getTardis_id()); ResultSetCurrentLocation rscl = new ResultSetCurrentLocation(plugin, wherecl); if (!rscl.resultSet()) { // emergency TARDIS relocation new TARDISEmergencyRelocation(plugin).relocate(id, player); return; } COMPASS dir = rscl.getDirection(); Location cl = new Location(rscl.getWorld(), rscl.getX(), rscl.getY(), rscl.getZ()); if (player.hasPermission("tardis.exile") && plugin.getConfig().getBoolean("travel.exile")) { // get the exile area String permArea = plugin.ta.getExileArea(player); player.sendMessage(plugin.pluginName + ChatColor.RED + " Notice:" + ChatColor.RESET + " Your travel has been restricted to the [" + permArea + "] area!"); Location l = plugin.ta.getNextSpot(permArea); if (l == null) { player.sendMessage(plugin.pluginName + "All available parking spots are taken in this area!"); } else { set.put("world", l.getWorld().getName()); set.put("x", l.getBlockX()); set.put("y", l.getBlockY()); set.put("z", l.getBlockZ()); set.put("direction", dir.toString()); set.put("submarine", 0); player.sendMessage(plugin.pluginName + "Your TARDIS was approved for parking in [" + permArea + "]!"); set_dest = true; } } else { ResultSetRepeaters rsr = new ResultSetRepeaters(plugin, id, rsc.getSecondary()); if (rsr.resultSet()) { String environment = "THIS"; int nether_min = plugin.getArtronConfig().getInt("nether_min"); int the_end_min = plugin.getArtronConfig().getInt("the_end_min"); byte[] repeaters = rsr.getRepeaters(); if (repeaters[0] <= 3) { // first position environment = "THIS"; } if (repeaters[0] >= 4 && repeaters[0] <= 7) { // second position environment = "NORMAL"; } if (repeaters[0] >= 8 && repeaters[0] <= 11) { // third position if (plugin.getConfig().getBoolean("travel.nether") && player.hasPermission("tardis.nether")) { // check they have enough artron energy to travel to the NETHER if (level < nether_min) { environment = "NORMAL"; player.sendMessage(plugin.pluginName + "You need at least " + nether_min + " Artron Energy to travel to the Nether! Overworld selected."); } else { environment = "NETHER"; } } else { String message = (player.hasPermission("tardis.nether")) ? "The ancient, dusty senators of Gallifrey have disabled time travel to the Nether" : "You do not have permission to time travel to the Nether"; player.sendMessage(plugin.pluginName + message); } } if (repeaters[0] >= 12 && repeaters[0] <= 15) { // last position if (plugin.getConfig().getBoolean("travel.the_end") && player.hasPermission("tardis.end")) { // check they have enough artron energy to travel to THE_END if (level < the_end_min) { environment = "NORMAL"; player.sendMessage(plugin.pluginName + "You need at least " + the_end_min + " Artron Energy to travel to The End! Overworld selected."); } else { environment = "THE_END"; } } else { String message = (player.hasPermission("tardis.end")) ? "The ancient, dusty senators of Gallifrey have disabled time travel to The End" : "You do not have permission to time travel to The End"; player.sendMessage(plugin.pluginName + message); } } // create a random destination TARDISTimeTravel tt = new TARDISTimeTravel(plugin); Location rand = tt.randomDestination(player, repeaters[1], repeaters[2], repeaters[3], dir, environment, rscl.getWorld(), false, cl); if (rand != null) { set.put("world", rand.getWorld().getName()); set.put("x", rand.getBlockX()); set.put("y", rand.getBlockY()); set.put("z", rand.getBlockZ()); set.put("direction", dir.toString()); set.put("submarine", (plugin.trackSubmarine.contains(Integer.valueOf(id))) ? 1 : 0); set_dest = true; plugin.trackSubmarine.remove(Integer.valueOf(id)); String dchat = rand.getWorld().getName() + " at x: " + rand.getBlockX() + " y: " + rand.getBlockY() + " z: " + rand.getBlockZ(); boolean isTL = true; if (comps != null && !comps.isEmpty()) { String[] companions = comps.split(":"); for (String c : companions) { // are they online - AND are they travelling if (plugin.getServer().getPlayer(c) != null) { // are they travelling HashMap<String, Object> wherec = new HashMap<String, Object>(); wherec.put("tardis_id", id); wherec.put("player", c); ResultSetTravellers rsv = new ResultSetTravellers(plugin, wherec, false); if (rsv.resultSet()) { plugin.getServer().getPlayer(c).sendMessage(plugin.pluginName + "Destination: " + dchat); } } if (c.equalsIgnoreCase(player.getName())) { isTL = false; } } } if (isTL == true) { player.sendMessage(plugin.pluginName + "Destination: " + dchat); } else { if (plugin.getServer().getPlayer(owner) != null) { plugin.getServer().getPlayer(owner).sendMessage(plugin.pluginName + "Destination: " + dchat); } } } else { player.sendMessage(plugin.pluginName + "Could not find a suitable location within the current settings, the area may be protected."); } } } break; case 8: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } // fast return button HashMap<String, Object> wherebl = new HashMap<String, Object>(); wherebl.put("tardis_id", rs.getTardis_id()); ResultSetBackLocation rsb = new ResultSetBackLocation(plugin, wherebl); if (rsb.resultSet()) { HashMap<String, Object> wherecu = new HashMap<String, Object>(); wherecu.put("tardis_id", rs.getTardis_id()); ResultSetCurrentLocation rscu = new ResultSetCurrentLocation(plugin, wherecu); if (rscu.resultSet()) { if (!compareCurrentToBack(rscu, rsb)) { set.put("world", rsb.getWorld().getName()); set.put("x", rsb.getX()); set.put("y", rsb.getY()); set.put("z", rsb.getZ()); set.put("direction", rsb.getDirection().toString()); set.put("submarine", (rsb.isSubmarine()) ? 1 : 0); set_dest = true; player.sendMessage(plugin.pluginName + "Previous location selected. Please release the handbrake!"); } else { player.sendMessage(plugin.pluginName + "You are already at the previous location. You need to travel somewhere else first!"); } } } else { player.sendMessage(plugin.pluginName + "Could not get the previous location!"); } break; case 9: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } if (tcc != null && !tcc.hasInput()) { player.sendMessage(plugin.pluginName + "The Input Circuit is missing from the console!"); return; } // terminal sign Inventory aec = plugin.getServer().createInventory(player, 54, "§4Destination Terminal"); aec.setContents(items); player.openInventory(aec); break; case 10: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + "You cannot reconfigure rooms while the TARDIS is travelling!"); return; } // check they have permission to grow rooms if (!player.hasPermission("tardis.ars")) { player.sendMessage(plugin.pluginName + "You do not have permission to grow rooms!"); return; } // check they're in a compatible world if (!plugin.utils.canGrowRooms(rs.getChunk())) { player.sendMessage(plugin.pluginName + "You cannot grow rooms unless your TARDIS was created in its own world!"); return; } if (tcc != null && !tcc.hasARS()) { player.sendMessage(plugin.pluginName + "The ARS Circuit is missing from the console!"); return; } // ARS sign Inventory ars = plugin.getServer().createInventory(player, 54, "§4Architectural Reconfiguration"); ars.setContents(tars); player.openInventory(ars); break; case 11: // Temporal Locator sign if (tcc != null && !tcc.hasTemporal()) { player.sendMessage(plugin.pluginName + "The Temporal Circuit is missing from the console!"); return; } if (player.hasPermission("tardis.temporal")) { Inventory tmpl = plugin.getServer().createInventory(player, 27, "§4Temporal Locator"); tmpl.setContents(clocks); player.openInventory(tmpl); } break; case 12: // Control room light switch HashMap<String, Object> wherel = new HashMap<String, Object>(); wherel.put("tardis_id", id); ResultSetLamps rsl = new ResultSetLamps(plugin, wherel, true); List<Block> lamps = new ArrayList<Block>(); if (rsl.resultSet()) { // get lamp locations ArrayList<HashMap<String, String>> data = rsl.getData(); for (HashMap<String, String> map : data) { Location loc = plugin.utils.getLocationFromDB(map.get("location"), 0.0F, 0.0F); lamps.add(loc.getBlock()); } } for (Block b : lamps) { if (b.getTypeId() == 124) { b.setTypeId(19); } else if (b.getTypeId() == 19) { b.setTypeId(124); } } break; case 13: // TIS plugin.trackInfoMenu.put(player.getName(), TARDISInfoMenu.TIS); player.sendMessage(ChatColor.GOLD + "-----------TARDIS Information System-----------"); player.sendMessage(ChatColor.GOLD + "---*Please type a white letter to proceed*---"); player.sendMessage("§6> TARDIS §fM§6anual"); player.sendMessage("§6> §fI§6tems"); player.sendMessage("§6> §fC§6omponents"); player.sendMessage("§6> §fD§6isks"); player.sendMessage("§6> C§fo§6mmands"); player.sendMessage("§6> §fT§6ARDIS Types"); player.sendMessage("§6> §fR§6ooms"); player.sendMessage("§6> §fE§6xit"); break; case 14: // Disk Storage String name = player.getName(); // only the time lord of this tardis if (!owner.equals(name)) { player.sendMessage(plugin.pluginName + MESSAGE.NOT_OWNER.getText()); return; } // do they have a storage record? HashMap<String, Object> wherestore = new HashMap<String, Object>(); wherestore.put("owner", name); ResultSetDiskStorage rsstore = new ResultSetDiskStorage(plugin, wherestore); ItemStack[] stack = new ItemStack[54]; if (rsstore.resultSet()) { try { if (!rsstore.getSavesOne().isEmpty()) { stack = TARDISSerializeInventory.itemStacksFromString(rsstore.getSavesOne()); } else { stack = TARDISSerializeInventory.itemStacksFromString(STORAGE.SAVE_1.getEmpty()); } } catch (IOException ex) { plugin.debug("Could not get Storage Inventory: " + ex.getMessage()); } } else { try { stack = TARDISSerializeInventory.itemStacksFromString(STORAGE.SAVE_1.getEmpty()); } catch (IOException ex) { plugin.debug("Could not get default Storage Inventory: " + ex.getMessage()); } // make a record HashMap<String, Object> setstore = new HashMap<String, Object>(); setstore.put("owner", player.getName()); setstore.put("tardis_id", id); qf.doInsert("storage", setstore); } Inventory inv = plugin.getServer().createInventory(player, 54, STORAGE.SAVE_1.getTitle()); inv.setContents(stack); player.openInventory(inv); break; case 16: // enter zero room int zero_amount = plugin.getArtronConfig().getInt("zero"); if (level < zero_amount) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ZERO_ENERGY.getText()); return; } - player.sendMessage(plugin.pluginName + "Zero room ready, stand by for transmat..."); final Location zero = plugin.utils.getLocationFromDB(rs.getZero(), 0.0F, 0.0F); - plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { - @Override - public void run() { - new TARDISExteriorRenderer(plugin).transmat(player, COMPASS.SOUTH, zero); - } - }, 20L); - plugin.zeroRoomOccupants.add(player.getName()); - HashMap<String, Object> wherez = new HashMap<String, Object>(); - wherez.put("tardis_id", id); - qf.alterEnergyLevel("tardis", -zero_amount, wherez, player); + if (zero != null) { + player.sendMessage(plugin.pluginName + "Zero room ready, stand by for transmat..."); + plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { + @Override + public void run() { + new TARDISExteriorRenderer(plugin).transmat(player, COMPASS.SOUTH, zero); + } + }, 20L); + plugin.zeroRoomOccupants.add(player.getName()); + HashMap<String, Object> wherez = new HashMap<String, Object>(); + wherez.put("tardis_id", id); + qf.alterEnergyLevel("tardis", -zero_amount, wherez, player); + } else { + player.sendMessage(plugin.pluginName + "You don't have a Zero room!"); + } break; case 17: // exit zero room plugin.zeroRoomOccupants.remove(player.getName()); plugin.rendererListener.transmat(player); break; default: break; } if (set_dest) { HashMap<String, Object> wherel = new HashMap<String, Object>(); wherel.put("tardis_id", id); qf.doUpdate("next", set, wherel); plugin.tardisHasDestination.put(id, plugin.getArtronConfig().getInt("random")); if (plugin.trackRescue.containsKey(Integer.valueOf(id))) { plugin.trackRescue.remove(Integer.valueOf(id)); } } } } } } } } private boolean compareCurrentToBack(ResultSetCurrentLocation c, ResultSetBackLocation b) { return (c.getWorld().equals(b.getWorld()) && c.getX() == b.getX() && c.getY() == b.getY() && c.getZ() == b.getZ()); } }
false
true
public void onButtonInteract(PlayerInteractEvent event) { final Player player = event.getPlayer(); Block block = event.getClickedBlock(); if (block != null) { Material blockType = block.getType(); Action action = event.getAction(); if (action == Action.RIGHT_CLICK_BLOCK) { // only proceed if they are clicking a type of a button or a lever! if (validBlocks.contains(blockType)) { // get clicked block location String buttonloc = block.getLocation().toString(); // get tardis from saved button location HashMap<String, Object> where = new HashMap<String, Object>(); where.put("location", buttonloc); ResultSetControls rsc = new ResultSetControls(plugin, where, false); if (rsc.resultSet()) { int id = rsc.getTardis_id(); int type = rsc.getType(); if (!onlythese.contains(Integer.valueOf(type))) { return; } HashMap<String, Object> whereid = new HashMap<String, Object>(); whereid.put("tardis_id", id); ResultSetTardis rs = new ResultSetTardis(plugin, whereid, "", false); if (rs.resultSet()) { // check they initialised if (!rs.isTardis_init()) { player.sendMessage(plugin.pluginName + ChatColor.RED + "The TARDIS Artron Energy Capacitor has not been initialised!"); return; } int level = rs.getArtron_level(); boolean hb = rs.isHandbrake_on(); boolean set_dest = false; String comps = rs.getCompanions(); String owner = rs.getOwner(); TARDISCircuitChecker tcc = null; if (plugin.getConfig().getString("preferences.difficulty").equals("hard")) { tcc = new TARDISCircuitChecker(plugin, id); tcc.getCircuits(); } QueryFactory qf = new QueryFactory(plugin); HashMap<String, Object> set = new HashMap<String, Object>(); switch (type) { case 1: // random location button if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } HashMap<String, Object> wherecl = new HashMap<String, Object>(); wherecl.put("tardis_id", rs.getTardis_id()); ResultSetCurrentLocation rscl = new ResultSetCurrentLocation(plugin, wherecl); if (!rscl.resultSet()) { // emergency TARDIS relocation new TARDISEmergencyRelocation(plugin).relocate(id, player); return; } COMPASS dir = rscl.getDirection(); Location cl = new Location(rscl.getWorld(), rscl.getX(), rscl.getY(), rscl.getZ()); if (player.hasPermission("tardis.exile") && plugin.getConfig().getBoolean("travel.exile")) { // get the exile area String permArea = plugin.ta.getExileArea(player); player.sendMessage(plugin.pluginName + ChatColor.RED + " Notice:" + ChatColor.RESET + " Your travel has been restricted to the [" + permArea + "] area!"); Location l = plugin.ta.getNextSpot(permArea); if (l == null) { player.sendMessage(plugin.pluginName + "All available parking spots are taken in this area!"); } else { set.put("world", l.getWorld().getName()); set.put("x", l.getBlockX()); set.put("y", l.getBlockY()); set.put("z", l.getBlockZ()); set.put("direction", dir.toString()); set.put("submarine", 0); player.sendMessage(plugin.pluginName + "Your TARDIS was approved for parking in [" + permArea + "]!"); set_dest = true; } } else { ResultSetRepeaters rsr = new ResultSetRepeaters(plugin, id, rsc.getSecondary()); if (rsr.resultSet()) { String environment = "THIS"; int nether_min = plugin.getArtronConfig().getInt("nether_min"); int the_end_min = plugin.getArtronConfig().getInt("the_end_min"); byte[] repeaters = rsr.getRepeaters(); if (repeaters[0] <= 3) { // first position environment = "THIS"; } if (repeaters[0] >= 4 && repeaters[0] <= 7) { // second position environment = "NORMAL"; } if (repeaters[0] >= 8 && repeaters[0] <= 11) { // third position if (plugin.getConfig().getBoolean("travel.nether") && player.hasPermission("tardis.nether")) { // check they have enough artron energy to travel to the NETHER if (level < nether_min) { environment = "NORMAL"; player.sendMessage(plugin.pluginName + "You need at least " + nether_min + " Artron Energy to travel to the Nether! Overworld selected."); } else { environment = "NETHER"; } } else { String message = (player.hasPermission("tardis.nether")) ? "The ancient, dusty senators of Gallifrey have disabled time travel to the Nether" : "You do not have permission to time travel to the Nether"; player.sendMessage(plugin.pluginName + message); } } if (repeaters[0] >= 12 && repeaters[0] <= 15) { // last position if (plugin.getConfig().getBoolean("travel.the_end") && player.hasPermission("tardis.end")) { // check they have enough artron energy to travel to THE_END if (level < the_end_min) { environment = "NORMAL"; player.sendMessage(plugin.pluginName + "You need at least " + the_end_min + " Artron Energy to travel to The End! Overworld selected."); } else { environment = "THE_END"; } } else { String message = (player.hasPermission("tardis.end")) ? "The ancient, dusty senators of Gallifrey have disabled time travel to The End" : "You do not have permission to time travel to The End"; player.sendMessage(plugin.pluginName + message); } } // create a random destination TARDISTimeTravel tt = new TARDISTimeTravel(plugin); Location rand = tt.randomDestination(player, repeaters[1], repeaters[2], repeaters[3], dir, environment, rscl.getWorld(), false, cl); if (rand != null) { set.put("world", rand.getWorld().getName()); set.put("x", rand.getBlockX()); set.put("y", rand.getBlockY()); set.put("z", rand.getBlockZ()); set.put("direction", dir.toString()); set.put("submarine", (plugin.trackSubmarine.contains(Integer.valueOf(id))) ? 1 : 0); set_dest = true; plugin.trackSubmarine.remove(Integer.valueOf(id)); String dchat = rand.getWorld().getName() + " at x: " + rand.getBlockX() + " y: " + rand.getBlockY() + " z: " + rand.getBlockZ(); boolean isTL = true; if (comps != null && !comps.isEmpty()) { String[] companions = comps.split(":"); for (String c : companions) { // are they online - AND are they travelling if (plugin.getServer().getPlayer(c) != null) { // are they travelling HashMap<String, Object> wherec = new HashMap<String, Object>(); wherec.put("tardis_id", id); wherec.put("player", c); ResultSetTravellers rsv = new ResultSetTravellers(plugin, wherec, false); if (rsv.resultSet()) { plugin.getServer().getPlayer(c).sendMessage(plugin.pluginName + "Destination: " + dchat); } } if (c.equalsIgnoreCase(player.getName())) { isTL = false; } } } if (isTL == true) { player.sendMessage(plugin.pluginName + "Destination: " + dchat); } else { if (plugin.getServer().getPlayer(owner) != null) { plugin.getServer().getPlayer(owner).sendMessage(plugin.pluginName + "Destination: " + dchat); } } } else { player.sendMessage(plugin.pluginName + "Could not find a suitable location within the current settings, the area may be protected."); } } } break; case 8: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } // fast return button HashMap<String, Object> wherebl = new HashMap<String, Object>(); wherebl.put("tardis_id", rs.getTardis_id()); ResultSetBackLocation rsb = new ResultSetBackLocation(plugin, wherebl); if (rsb.resultSet()) { HashMap<String, Object> wherecu = new HashMap<String, Object>(); wherecu.put("tardis_id", rs.getTardis_id()); ResultSetCurrentLocation rscu = new ResultSetCurrentLocation(plugin, wherecu); if (rscu.resultSet()) { if (!compareCurrentToBack(rscu, rsb)) { set.put("world", rsb.getWorld().getName()); set.put("x", rsb.getX()); set.put("y", rsb.getY()); set.put("z", rsb.getZ()); set.put("direction", rsb.getDirection().toString()); set.put("submarine", (rsb.isSubmarine()) ? 1 : 0); set_dest = true; player.sendMessage(plugin.pluginName + "Previous location selected. Please release the handbrake!"); } else { player.sendMessage(plugin.pluginName + "You are already at the previous location. You need to travel somewhere else first!"); } } } else { player.sendMessage(plugin.pluginName + "Could not get the previous location!"); } break; case 9: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } if (tcc != null && !tcc.hasInput()) { player.sendMessage(plugin.pluginName + "The Input Circuit is missing from the console!"); return; } // terminal sign Inventory aec = plugin.getServer().createInventory(player, 54, "§4Destination Terminal"); aec.setContents(items); player.openInventory(aec); break; case 10: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + "You cannot reconfigure rooms while the TARDIS is travelling!"); return; } // check they have permission to grow rooms if (!player.hasPermission("tardis.ars")) { player.sendMessage(plugin.pluginName + "You do not have permission to grow rooms!"); return; } // check they're in a compatible world if (!plugin.utils.canGrowRooms(rs.getChunk())) { player.sendMessage(plugin.pluginName + "You cannot grow rooms unless your TARDIS was created in its own world!"); return; } if (tcc != null && !tcc.hasARS()) { player.sendMessage(plugin.pluginName + "The ARS Circuit is missing from the console!"); return; } // ARS sign Inventory ars = plugin.getServer().createInventory(player, 54, "§4Architectural Reconfiguration"); ars.setContents(tars); player.openInventory(ars); break; case 11: // Temporal Locator sign if (tcc != null && !tcc.hasTemporal()) { player.sendMessage(plugin.pluginName + "The Temporal Circuit is missing from the console!"); return; } if (player.hasPermission("tardis.temporal")) { Inventory tmpl = plugin.getServer().createInventory(player, 27, "§4Temporal Locator"); tmpl.setContents(clocks); player.openInventory(tmpl); } break; case 12: // Control room light switch HashMap<String, Object> wherel = new HashMap<String, Object>(); wherel.put("tardis_id", id); ResultSetLamps rsl = new ResultSetLamps(plugin, wherel, true); List<Block> lamps = new ArrayList<Block>(); if (rsl.resultSet()) { // get lamp locations ArrayList<HashMap<String, String>> data = rsl.getData(); for (HashMap<String, String> map : data) { Location loc = plugin.utils.getLocationFromDB(map.get("location"), 0.0F, 0.0F); lamps.add(loc.getBlock()); } } for (Block b : lamps) { if (b.getTypeId() == 124) { b.setTypeId(19); } else if (b.getTypeId() == 19) { b.setTypeId(124); } } break; case 13: // TIS plugin.trackInfoMenu.put(player.getName(), TARDISInfoMenu.TIS); player.sendMessage(ChatColor.GOLD + "-----------TARDIS Information System-----------"); player.sendMessage(ChatColor.GOLD + "---*Please type a white letter to proceed*---"); player.sendMessage("§6> TARDIS §fM§6anual"); player.sendMessage("§6> §fI§6tems"); player.sendMessage("§6> §fC§6omponents"); player.sendMessage("§6> §fD§6isks"); player.sendMessage("§6> C§fo§6mmands"); player.sendMessage("§6> §fT§6ARDIS Types"); player.sendMessage("§6> §fR§6ooms"); player.sendMessage("§6> §fE§6xit"); break; case 14: // Disk Storage String name = player.getName(); // only the time lord of this tardis if (!owner.equals(name)) { player.sendMessage(plugin.pluginName + MESSAGE.NOT_OWNER.getText()); return; } // do they have a storage record? HashMap<String, Object> wherestore = new HashMap<String, Object>(); wherestore.put("owner", name); ResultSetDiskStorage rsstore = new ResultSetDiskStorage(plugin, wherestore); ItemStack[] stack = new ItemStack[54]; if (rsstore.resultSet()) { try { if (!rsstore.getSavesOne().isEmpty()) { stack = TARDISSerializeInventory.itemStacksFromString(rsstore.getSavesOne()); } else { stack = TARDISSerializeInventory.itemStacksFromString(STORAGE.SAVE_1.getEmpty()); } } catch (IOException ex) { plugin.debug("Could not get Storage Inventory: " + ex.getMessage()); } } else { try { stack = TARDISSerializeInventory.itemStacksFromString(STORAGE.SAVE_1.getEmpty()); } catch (IOException ex) { plugin.debug("Could not get default Storage Inventory: " + ex.getMessage()); } // make a record HashMap<String, Object> setstore = new HashMap<String, Object>(); setstore.put("owner", player.getName()); setstore.put("tardis_id", id); qf.doInsert("storage", setstore); } Inventory inv = plugin.getServer().createInventory(player, 54, STORAGE.SAVE_1.getTitle()); inv.setContents(stack); player.openInventory(inv); break; case 16: // enter zero room int zero_amount = plugin.getArtronConfig().getInt("zero"); if (level < zero_amount) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ZERO_ENERGY.getText()); return; } player.sendMessage(plugin.pluginName + "Zero room ready, stand by for transmat..."); final Location zero = plugin.utils.getLocationFromDB(rs.getZero(), 0.0F, 0.0F); plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { @Override public void run() { new TARDISExteriorRenderer(plugin).transmat(player, COMPASS.SOUTH, zero); } }, 20L); plugin.zeroRoomOccupants.add(player.getName()); HashMap<String, Object> wherez = new HashMap<String, Object>(); wherez.put("tardis_id", id); qf.alterEnergyLevel("tardis", -zero_amount, wherez, player); break; case 17: // exit zero room plugin.zeroRoomOccupants.remove(player.getName()); plugin.rendererListener.transmat(player); break; default: break; } if (set_dest) { HashMap<String, Object> wherel = new HashMap<String, Object>(); wherel.put("tardis_id", id); qf.doUpdate("next", set, wherel); plugin.tardisHasDestination.put(id, plugin.getArtronConfig().getInt("random")); if (plugin.trackRescue.containsKey(Integer.valueOf(id))) { plugin.trackRescue.remove(Integer.valueOf(id)); } } } } } } } }
public void onButtonInteract(PlayerInteractEvent event) { final Player player = event.getPlayer(); Block block = event.getClickedBlock(); if (block != null) { Material blockType = block.getType(); Action action = event.getAction(); if (action == Action.RIGHT_CLICK_BLOCK) { // only proceed if they are clicking a type of a button or a lever! if (validBlocks.contains(blockType)) { // get clicked block location String buttonloc = block.getLocation().toString(); // get tardis from saved button location HashMap<String, Object> where = new HashMap<String, Object>(); where.put("location", buttonloc); ResultSetControls rsc = new ResultSetControls(plugin, where, false); if (rsc.resultSet()) { int id = rsc.getTardis_id(); int type = rsc.getType(); if (!onlythese.contains(Integer.valueOf(type))) { return; } HashMap<String, Object> whereid = new HashMap<String, Object>(); whereid.put("tardis_id", id); ResultSetTardis rs = new ResultSetTardis(plugin, whereid, "", false); if (rs.resultSet()) { // check they initialised if (!rs.isTardis_init()) { player.sendMessage(plugin.pluginName + ChatColor.RED + "The TARDIS Artron Energy Capacitor has not been initialised!"); return; } int level = rs.getArtron_level(); boolean hb = rs.isHandbrake_on(); boolean set_dest = false; String comps = rs.getCompanions(); String owner = rs.getOwner(); TARDISCircuitChecker tcc = null; if (plugin.getConfig().getString("preferences.difficulty").equals("hard")) { tcc = new TARDISCircuitChecker(plugin, id); tcc.getCircuits(); } QueryFactory qf = new QueryFactory(plugin); HashMap<String, Object> set = new HashMap<String, Object>(); switch (type) { case 1: // random location button if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } HashMap<String, Object> wherecl = new HashMap<String, Object>(); wherecl.put("tardis_id", rs.getTardis_id()); ResultSetCurrentLocation rscl = new ResultSetCurrentLocation(plugin, wherecl); if (!rscl.resultSet()) { // emergency TARDIS relocation new TARDISEmergencyRelocation(plugin).relocate(id, player); return; } COMPASS dir = rscl.getDirection(); Location cl = new Location(rscl.getWorld(), rscl.getX(), rscl.getY(), rscl.getZ()); if (player.hasPermission("tardis.exile") && plugin.getConfig().getBoolean("travel.exile")) { // get the exile area String permArea = plugin.ta.getExileArea(player); player.sendMessage(plugin.pluginName + ChatColor.RED + " Notice:" + ChatColor.RESET + " Your travel has been restricted to the [" + permArea + "] area!"); Location l = plugin.ta.getNextSpot(permArea); if (l == null) { player.sendMessage(plugin.pluginName + "All available parking spots are taken in this area!"); } else { set.put("world", l.getWorld().getName()); set.put("x", l.getBlockX()); set.put("y", l.getBlockY()); set.put("z", l.getBlockZ()); set.put("direction", dir.toString()); set.put("submarine", 0); player.sendMessage(plugin.pluginName + "Your TARDIS was approved for parking in [" + permArea + "]!"); set_dest = true; } } else { ResultSetRepeaters rsr = new ResultSetRepeaters(plugin, id, rsc.getSecondary()); if (rsr.resultSet()) { String environment = "THIS"; int nether_min = plugin.getArtronConfig().getInt("nether_min"); int the_end_min = plugin.getArtronConfig().getInt("the_end_min"); byte[] repeaters = rsr.getRepeaters(); if (repeaters[0] <= 3) { // first position environment = "THIS"; } if (repeaters[0] >= 4 && repeaters[0] <= 7) { // second position environment = "NORMAL"; } if (repeaters[0] >= 8 && repeaters[0] <= 11) { // third position if (plugin.getConfig().getBoolean("travel.nether") && player.hasPermission("tardis.nether")) { // check they have enough artron energy to travel to the NETHER if (level < nether_min) { environment = "NORMAL"; player.sendMessage(plugin.pluginName + "You need at least " + nether_min + " Artron Energy to travel to the Nether! Overworld selected."); } else { environment = "NETHER"; } } else { String message = (player.hasPermission("tardis.nether")) ? "The ancient, dusty senators of Gallifrey have disabled time travel to the Nether" : "You do not have permission to time travel to the Nether"; player.sendMessage(plugin.pluginName + message); } } if (repeaters[0] >= 12 && repeaters[0] <= 15) { // last position if (plugin.getConfig().getBoolean("travel.the_end") && player.hasPermission("tardis.end")) { // check they have enough artron energy to travel to THE_END if (level < the_end_min) { environment = "NORMAL"; player.sendMessage(plugin.pluginName + "You need at least " + the_end_min + " Artron Energy to travel to The End! Overworld selected."); } else { environment = "THE_END"; } } else { String message = (player.hasPermission("tardis.end")) ? "The ancient, dusty senators of Gallifrey have disabled time travel to The End" : "You do not have permission to time travel to The End"; player.sendMessage(plugin.pluginName + message); } } // create a random destination TARDISTimeTravel tt = new TARDISTimeTravel(plugin); Location rand = tt.randomDestination(player, repeaters[1], repeaters[2], repeaters[3], dir, environment, rscl.getWorld(), false, cl); if (rand != null) { set.put("world", rand.getWorld().getName()); set.put("x", rand.getBlockX()); set.put("y", rand.getBlockY()); set.put("z", rand.getBlockZ()); set.put("direction", dir.toString()); set.put("submarine", (plugin.trackSubmarine.contains(Integer.valueOf(id))) ? 1 : 0); set_dest = true; plugin.trackSubmarine.remove(Integer.valueOf(id)); String dchat = rand.getWorld().getName() + " at x: " + rand.getBlockX() + " y: " + rand.getBlockY() + " z: " + rand.getBlockZ(); boolean isTL = true; if (comps != null && !comps.isEmpty()) { String[] companions = comps.split(":"); for (String c : companions) { // are they online - AND are they travelling if (plugin.getServer().getPlayer(c) != null) { // are they travelling HashMap<String, Object> wherec = new HashMap<String, Object>(); wherec.put("tardis_id", id); wherec.put("player", c); ResultSetTravellers rsv = new ResultSetTravellers(plugin, wherec, false); if (rsv.resultSet()) { plugin.getServer().getPlayer(c).sendMessage(plugin.pluginName + "Destination: " + dchat); } } if (c.equalsIgnoreCase(player.getName())) { isTL = false; } } } if (isTL == true) { player.sendMessage(plugin.pluginName + "Destination: " + dchat); } else { if (plugin.getServer().getPlayer(owner) != null) { plugin.getServer().getPlayer(owner).sendMessage(plugin.pluginName + "Destination: " + dchat); } } } else { player.sendMessage(plugin.pluginName + "Could not find a suitable location within the current settings, the area may be protected."); } } } break; case 8: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } // fast return button HashMap<String, Object> wherebl = new HashMap<String, Object>(); wherebl.put("tardis_id", rs.getTardis_id()); ResultSetBackLocation rsb = new ResultSetBackLocation(plugin, wherebl); if (rsb.resultSet()) { HashMap<String, Object> wherecu = new HashMap<String, Object>(); wherecu.put("tardis_id", rs.getTardis_id()); ResultSetCurrentLocation rscu = new ResultSetCurrentLocation(plugin, wherecu); if (rscu.resultSet()) { if (!compareCurrentToBack(rscu, rsb)) { set.put("world", rsb.getWorld().getName()); set.put("x", rsb.getX()); set.put("y", rsb.getY()); set.put("z", rsb.getZ()); set.put("direction", rsb.getDirection().toString()); set.put("submarine", (rsb.isSubmarine()) ? 1 : 0); set_dest = true; player.sendMessage(plugin.pluginName + "Previous location selected. Please release the handbrake!"); } else { player.sendMessage(plugin.pluginName + "You are already at the previous location. You need to travel somewhere else first!"); } } } else { player.sendMessage(plugin.pluginName + "Could not get the previous location!"); } break; case 9: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_WHILE_TRAVELLING.getText()); return; } if (level < plugin.getArtronConfig().getInt("random")) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ENERGY.getText()); return; } if (tcc != null && !tcc.hasInput()) { player.sendMessage(plugin.pluginName + "The Input Circuit is missing from the console!"); return; } // terminal sign Inventory aec = plugin.getServer().createInventory(player, 54, "§4Destination Terminal"); aec.setContents(items); player.openInventory(aec); break; case 10: if (!hb) { player.sendMessage(plugin.pluginName + ChatColor.RED + "You cannot reconfigure rooms while the TARDIS is travelling!"); return; } // check they have permission to grow rooms if (!player.hasPermission("tardis.ars")) { player.sendMessage(plugin.pluginName + "You do not have permission to grow rooms!"); return; } // check they're in a compatible world if (!plugin.utils.canGrowRooms(rs.getChunk())) { player.sendMessage(plugin.pluginName + "You cannot grow rooms unless your TARDIS was created in its own world!"); return; } if (tcc != null && !tcc.hasARS()) { player.sendMessage(plugin.pluginName + "The ARS Circuit is missing from the console!"); return; } // ARS sign Inventory ars = plugin.getServer().createInventory(player, 54, "§4Architectural Reconfiguration"); ars.setContents(tars); player.openInventory(ars); break; case 11: // Temporal Locator sign if (tcc != null && !tcc.hasTemporal()) { player.sendMessage(plugin.pluginName + "The Temporal Circuit is missing from the console!"); return; } if (player.hasPermission("tardis.temporal")) { Inventory tmpl = plugin.getServer().createInventory(player, 27, "§4Temporal Locator"); tmpl.setContents(clocks); player.openInventory(tmpl); } break; case 12: // Control room light switch HashMap<String, Object> wherel = new HashMap<String, Object>(); wherel.put("tardis_id", id); ResultSetLamps rsl = new ResultSetLamps(plugin, wherel, true); List<Block> lamps = new ArrayList<Block>(); if (rsl.resultSet()) { // get lamp locations ArrayList<HashMap<String, String>> data = rsl.getData(); for (HashMap<String, String> map : data) { Location loc = plugin.utils.getLocationFromDB(map.get("location"), 0.0F, 0.0F); lamps.add(loc.getBlock()); } } for (Block b : lamps) { if (b.getTypeId() == 124) { b.setTypeId(19); } else if (b.getTypeId() == 19) { b.setTypeId(124); } } break; case 13: // TIS plugin.trackInfoMenu.put(player.getName(), TARDISInfoMenu.TIS); player.sendMessage(ChatColor.GOLD + "-----------TARDIS Information System-----------"); player.sendMessage(ChatColor.GOLD + "---*Please type a white letter to proceed*---"); player.sendMessage("§6> TARDIS §fM§6anual"); player.sendMessage("§6> §fI§6tems"); player.sendMessage("§6> §fC§6omponents"); player.sendMessage("§6> §fD§6isks"); player.sendMessage("§6> C§fo§6mmands"); player.sendMessage("§6> §fT§6ARDIS Types"); player.sendMessage("§6> §fR§6ooms"); player.sendMessage("§6> §fE§6xit"); break; case 14: // Disk Storage String name = player.getName(); // only the time lord of this tardis if (!owner.equals(name)) { player.sendMessage(plugin.pluginName + MESSAGE.NOT_OWNER.getText()); return; } // do they have a storage record? HashMap<String, Object> wherestore = new HashMap<String, Object>(); wherestore.put("owner", name); ResultSetDiskStorage rsstore = new ResultSetDiskStorage(plugin, wherestore); ItemStack[] stack = new ItemStack[54]; if (rsstore.resultSet()) { try { if (!rsstore.getSavesOne().isEmpty()) { stack = TARDISSerializeInventory.itemStacksFromString(rsstore.getSavesOne()); } else { stack = TARDISSerializeInventory.itemStacksFromString(STORAGE.SAVE_1.getEmpty()); } } catch (IOException ex) { plugin.debug("Could not get Storage Inventory: " + ex.getMessage()); } } else { try { stack = TARDISSerializeInventory.itemStacksFromString(STORAGE.SAVE_1.getEmpty()); } catch (IOException ex) { plugin.debug("Could not get default Storage Inventory: " + ex.getMessage()); } // make a record HashMap<String, Object> setstore = new HashMap<String, Object>(); setstore.put("owner", player.getName()); setstore.put("tardis_id", id); qf.doInsert("storage", setstore); } Inventory inv = plugin.getServer().createInventory(player, 54, STORAGE.SAVE_1.getTitle()); inv.setContents(stack); player.openInventory(inv); break; case 16: // enter zero room int zero_amount = plugin.getArtronConfig().getInt("zero"); if (level < zero_amount) { player.sendMessage(plugin.pluginName + ChatColor.RED + MESSAGE.NOT_ENOUGH_ZERO_ENERGY.getText()); return; } final Location zero = plugin.utils.getLocationFromDB(rs.getZero(), 0.0F, 0.0F); if (zero != null) { player.sendMessage(plugin.pluginName + "Zero room ready, stand by for transmat..."); plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { @Override public void run() { new TARDISExteriorRenderer(plugin).transmat(player, COMPASS.SOUTH, zero); } }, 20L); plugin.zeroRoomOccupants.add(player.getName()); HashMap<String, Object> wherez = new HashMap<String, Object>(); wherez.put("tardis_id", id); qf.alterEnergyLevel("tardis", -zero_amount, wherez, player); } else { player.sendMessage(plugin.pluginName + "You don't have a Zero room!"); } break; case 17: // exit zero room plugin.zeroRoomOccupants.remove(player.getName()); plugin.rendererListener.transmat(player); break; default: break; } if (set_dest) { HashMap<String, Object> wherel = new HashMap<String, Object>(); wherel.put("tardis_id", id); qf.doUpdate("next", set, wherel); plugin.tardisHasDestination.put(id, plugin.getArtronConfig().getInt("random")); if (plugin.trackRescue.containsKey(Integer.valueOf(id))) { plugin.trackRescue.remove(Integer.valueOf(id)); } } } } } } } }
diff --git a/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/decorators/DecoratableResourceHelper.java b/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/decorators/DecoratableResourceHelper.java index bfe3bdbc..89b85679 100644 --- a/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/decorators/DecoratableResourceHelper.java +++ b/org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/decorators/DecoratableResourceHelper.java @@ -1,202 +1,204 @@ /******************************************************************************* * Copyright (C) 2011, Philipp Thun <[email protected]> * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.eclipse.egit.ui.internal.decorators; import java.io.IOException; import java.util.ArrayList; import org.eclipse.core.resources.IResource; import org.eclipse.egit.core.ContainerTreeIterator; import org.eclipse.egit.core.ContainerTreeIterator.ResourceEntry; import org.eclipse.egit.core.IteratorService; import org.eclipse.egit.core.project.RepositoryMapping; import org.eclipse.egit.ui.internal.decorators.IDecoratableResource.Staged; import org.eclipse.jgit.dircache.DirCacheEntry; import org.eclipse.jgit.dircache.DirCacheIterator; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.FileMode; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.EmptyTreeIterator; import org.eclipse.jgit.treewalk.TreeWalk; import org.eclipse.jgit.treewalk.filter.PathFilterGroup; /** * Helper class to create decoratable resources * * @see IDecoratableResource */ class DecoratableResourceHelper { static final int T_HEAD = 0; static final int T_INDEX = 1; static final int T_WORKSPACE = 2; static IDecoratableResource[] createDecoratableResources( final IResource[] resources) throws IOException { // Use first (available) resource to get repository mapping int i = 0; while (resources[i] == null) { i++; if (i >= resources.length) // Array only contains nulls return null; } final RepositoryMapping mapping = RepositoryMapping .getMapping(resources[i]); final IDecoratableResource[] decoratableResources = new IDecoratableResource[resources.length]; ArrayList<String> resourcePaths = new ArrayList<String>(); for (i = 0; i < resources.length; i++) { final IResource resource = resources[i]; if (resource != null && resource.getProject().isOpen()) { switch (resource.getType()) { case IResource.FILE: // Add file path to list used for bulk decoration resourcePaths.add(mapping.getRepoRelativePath(resource)); break; case IResource.FOLDER: case IResource.PROJECT: // Decorate folder and project node separately try { decoratableResources[i] = new DecoratableResourceAdapter( resource); } catch (IOException e) { // Ignore - decoratableResources[i] is null } resourcePaths.add(null); break; } } else { resourcePaths.add(null); } } // Check resource paths before proceeding with bulk decoration boolean containsAtLeastOnePath = false; for (final String p : resourcePaths) { if (p != null) { containsAtLeastOnePath = true; break; } } if (!containsAtLeastOnePath) return decoratableResources; final TreeWalk treeWalk = createThreeWayTreeWalk(mapping, resourcePaths); if (treeWalk != null) while (treeWalk.next()) { i = resourcePaths.indexOf(treeWalk.getPathString()); if (i != -1) { try { - decoratableResources[i] = decorateResource( - new DecoratableResource(resources[i]), treeWalk); + if (decoratableResources[i] == null) + decoratableResources[i] = decorateResource( + new DecoratableResource(resources[i]), + treeWalk); } catch (IOException e) { // Ignore - decoratableResources[i] is null } } } return decoratableResources; } private static TreeWalk createThreeWayTreeWalk( final RepositoryMapping mapping, final ArrayList<String> resourcePaths) throws IOException { final Repository repository = mapping.getRepository(); final TreeWalk treeWalk = new TreeWalk(repository); // Copy path list... final ArrayList<String> paths = new ArrayList<String>(resourcePaths); while (paths.remove(null)) { // ... and remove nulls } treeWalk.setFilter(PathFilterGroup.createFromStrings(paths)); treeWalk.setRecursive(true); treeWalk.reset(); // Repository final ObjectId headId = repository.resolve(Constants.HEAD); if (headId != null) treeWalk.addTree(new RevWalk(repository).parseTree(headId)); else treeWalk.addTree(new EmptyTreeIterator()); // Index treeWalk.addTree(new DirCacheIterator(repository.readDirCache())); // Working directory treeWalk.addTree(IteratorService.createInitialIterator(repository)); return treeWalk; } static DecoratableResource decorateResource( final DecoratableResource decoratableResource, final TreeWalk treeWalk) throws IOException { final ContainerTreeIterator workspaceIterator = treeWalk.getTree( T_WORKSPACE, ContainerTreeIterator.class); final ResourceEntry resourceEntry = workspaceIterator != null ? workspaceIterator .getResourceEntry() : null; if (resourceEntry == null) return null; if (workspaceIterator != null && workspaceIterator.isEntryIgnored()) { decoratableResource.ignored = true; return decoratableResource; } final int mHead = treeWalk.getRawMode(T_HEAD); final int mIndex = treeWalk.getRawMode(T_INDEX); if (mHead == FileMode.MISSING.getBits() && mIndex == FileMode.MISSING.getBits()) return decoratableResource; decoratableResource.tracked = true; if (mHead == FileMode.MISSING.getBits()) { decoratableResource.staged = Staged.ADDED; } else if (mIndex == FileMode.MISSING.getBits()) { decoratableResource.staged = Staged.REMOVED; } else if (mHead != mIndex || (mIndex != FileMode.TREE.getBits() && !treeWalk.idEqual( T_HEAD, T_INDEX))) { decoratableResource.staged = Staged.MODIFIED; } else { decoratableResource.staged = Staged.NOT_STAGED; } final DirCacheIterator indexIterator = treeWalk.getTree(T_INDEX, DirCacheIterator.class); final DirCacheEntry indexEntry = indexIterator != null ? indexIterator .getDirCacheEntry() : null; if (indexEntry == null) return decoratableResource; if (indexEntry.getStage() > 0) decoratableResource.conflicts = true; if (indexEntry.isAssumeValid()) { decoratableResource.dirty = false; decoratableResource.assumeValid = true; } else { if (workspaceIterator != null && workspaceIterator.isModified(indexEntry, true)) decoratableResource.dirty = true; } return decoratableResource; } }
true
true
static IDecoratableResource[] createDecoratableResources( final IResource[] resources) throws IOException { // Use first (available) resource to get repository mapping int i = 0; while (resources[i] == null) { i++; if (i >= resources.length) // Array only contains nulls return null; } final RepositoryMapping mapping = RepositoryMapping .getMapping(resources[i]); final IDecoratableResource[] decoratableResources = new IDecoratableResource[resources.length]; ArrayList<String> resourcePaths = new ArrayList<String>(); for (i = 0; i < resources.length; i++) { final IResource resource = resources[i]; if (resource != null && resource.getProject().isOpen()) { switch (resource.getType()) { case IResource.FILE: // Add file path to list used for bulk decoration resourcePaths.add(mapping.getRepoRelativePath(resource)); break; case IResource.FOLDER: case IResource.PROJECT: // Decorate folder and project node separately try { decoratableResources[i] = new DecoratableResourceAdapter( resource); } catch (IOException e) { // Ignore - decoratableResources[i] is null } resourcePaths.add(null); break; } } else { resourcePaths.add(null); } } // Check resource paths before proceeding with bulk decoration boolean containsAtLeastOnePath = false; for (final String p : resourcePaths) { if (p != null) { containsAtLeastOnePath = true; break; } } if (!containsAtLeastOnePath) return decoratableResources; final TreeWalk treeWalk = createThreeWayTreeWalk(mapping, resourcePaths); if (treeWalk != null) while (treeWalk.next()) { i = resourcePaths.indexOf(treeWalk.getPathString()); if (i != -1) { try { decoratableResources[i] = decorateResource( new DecoratableResource(resources[i]), treeWalk); } catch (IOException e) { // Ignore - decoratableResources[i] is null } } } return decoratableResources; }
static IDecoratableResource[] createDecoratableResources( final IResource[] resources) throws IOException { // Use first (available) resource to get repository mapping int i = 0; while (resources[i] == null) { i++; if (i >= resources.length) // Array only contains nulls return null; } final RepositoryMapping mapping = RepositoryMapping .getMapping(resources[i]); final IDecoratableResource[] decoratableResources = new IDecoratableResource[resources.length]; ArrayList<String> resourcePaths = new ArrayList<String>(); for (i = 0; i < resources.length; i++) { final IResource resource = resources[i]; if (resource != null && resource.getProject().isOpen()) { switch (resource.getType()) { case IResource.FILE: // Add file path to list used for bulk decoration resourcePaths.add(mapping.getRepoRelativePath(resource)); break; case IResource.FOLDER: case IResource.PROJECT: // Decorate folder and project node separately try { decoratableResources[i] = new DecoratableResourceAdapter( resource); } catch (IOException e) { // Ignore - decoratableResources[i] is null } resourcePaths.add(null); break; } } else { resourcePaths.add(null); } } // Check resource paths before proceeding with bulk decoration boolean containsAtLeastOnePath = false; for (final String p : resourcePaths) { if (p != null) { containsAtLeastOnePath = true; break; } } if (!containsAtLeastOnePath) return decoratableResources; final TreeWalk treeWalk = createThreeWayTreeWalk(mapping, resourcePaths); if (treeWalk != null) while (treeWalk.next()) { i = resourcePaths.indexOf(treeWalk.getPathString()); if (i != -1) { try { if (decoratableResources[i] == null) decoratableResources[i] = decorateResource( new DecoratableResource(resources[i]), treeWalk); } catch (IOException e) { // Ignore - decoratableResources[i] is null } } } return decoratableResources; }
diff --git a/Graph.java b/Graph.java index cb2ffc9..b5b1b9b 100644 --- a/Graph.java +++ b/Graph.java @@ -1,75 +1,76 @@ package project; import java.util.*; public class Graph { private boolean array[][]; private int size; protected Graph(int s) { size = s*s; // s is the 'side length' of the graph, which represents a 2D square quoridor board for our purposes array = new boolean[size][size]; int n=0; while (n < size) { //place edges between adjacent squares if (n % s < s - 1) { array[n][n+1] = true; array[n+1][n] = true; } if (n / s < s - 1) { array[n][n+s] = true; array[n+s][n] = true; } n++; } } public void removeEdge (int n1, int n2) { //remove edge between nodes n1 and n2 array[n1][n2] = false; array[n2][n1] = false; } public void insertEdge (int n1, int n2) { array[n1][n2] = true; array[n2][n1] = true; } public boolean isEdge (int n1, int n2) { return array[n1][n2]; } public boolean search(int n1, int n2) { Queue<Integer> q = new LinkedList<Integer>(); int t; int c; int count = 0; int visited[] = new int[size]; boolean isV; q.add(n1); visited[count] = n1; count++; while (q.size() > 0) { //BFS t = q.remove(); if (t==n2) { return true; } c = 0; while (c < size) { if (array[t][c]) { isV = false; for (int v : visited) { if (v == c) { isV = true; } } if (!isV) { visited[count] = c; count++; q.add(c); } } + c++; } } return false; } }
true
true
public boolean search(int n1, int n2) { Queue<Integer> q = new LinkedList<Integer>(); int t; int c; int count = 0; int visited[] = new int[size]; boolean isV; q.add(n1); visited[count] = n1; count++; while (q.size() > 0) { //BFS t = q.remove(); if (t==n2) { return true; } c = 0; while (c < size) { if (array[t][c]) { isV = false; for (int v : visited) { if (v == c) { isV = true; } } if (!isV) { visited[count] = c; count++; q.add(c); } } } } return false; }
public boolean search(int n1, int n2) { Queue<Integer> q = new LinkedList<Integer>(); int t; int c; int count = 0; int visited[] = new int[size]; boolean isV; q.add(n1); visited[count] = n1; count++; while (q.size() > 0) { //BFS t = q.remove(); if (t==n2) { return true; } c = 0; while (c < size) { if (array[t][c]) { isV = false; for (int v : visited) { if (v == c) { isV = true; } } if (!isV) { visited[count] = c; count++; q.add(c); } } c++; } } return false; }
diff --git a/features/src/main/java/org/apache/karaf/cellar/features/shell/UninstallFeatureCommand.java b/features/src/main/java/org/apache/karaf/cellar/features/shell/UninstallFeatureCommand.java index 5bb7db5f..2faf0409 100644 --- a/features/src/main/java/org/apache/karaf/cellar/features/shell/UninstallFeatureCommand.java +++ b/features/src/main/java/org/apache/karaf/cellar/features/shell/UninstallFeatureCommand.java @@ -1,81 +1,81 @@ /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.karaf.cellar.features.shell; import org.apache.karaf.cellar.core.Group; import org.apache.karaf.cellar.core.control.SwitchStatus; import org.apache.karaf.cellar.core.event.EventProducer; import org.apache.karaf.cellar.features.RemoteFeaturesEvent; import org.apache.karaf.features.FeatureEvent; import org.apache.karaf.shell.commands.Argument; import org.apache.karaf.shell.commands.Command; @Command(scope = "cluster", name = "feature-uninstall", description = "Uninstall a feature assigned to a cluster group.") public class UninstallFeatureCommand extends FeatureCommandSupport { @Argument(index = 0, name = "group", description = "The cluster group name.", required = true, multiValued = false) String groupName; @Argument(index = 1, name = "feature", description = "The feature name.", required = true, multiValued = false) String feature; @Argument(index = 2, name = "version", description = "The feature version.", required = false, multiValued = false) String version; private EventProducer eventProducer; @Override protected Object doExecute() throws Exception { // check if the group exists Group group = groupManager.findGroupByName(groupName); if (group == null) { System.err.println("Cluster group " + groupName + " doesn't exist"); return null; } // check if the producer is ON if (eventProducer.getSwitch().getStatus().equals(SwitchStatus.OFF)) { System.err.println("Cluster event producer is OFF for this node"); return null; } // check if the feature exists in the map if (!featureExists(groupName, feature, version)) { if (version != null) System.err.println("Feature " + feature + "/" + version + " doesn't exist for the cluster group " + groupName); else System.err.println("Feature " + feature + " doesn't exist for the cluster group " + groupName); return null; } - // update the distributed map - updateFeatureStatus(groupName, feature, version, true); + // update distributed set + updateFeatureStatus(groupName, feature, version, false); // broadcast the cluster event RemoteFeaturesEvent event = new RemoteFeaturesEvent(feature, version, FeatureEvent.EventType.FeatureUninstalled); event.setForce(true); event.setSourceGroup(group); eventProducer.produce(event); return null; } public EventProducer getEventProducer() { return eventProducer; } public void setEventProducer(EventProducer eventProducer) { this.eventProducer = eventProducer; } }
true
true
protected Object doExecute() throws Exception { // check if the group exists Group group = groupManager.findGroupByName(groupName); if (group == null) { System.err.println("Cluster group " + groupName + " doesn't exist"); return null; } // check if the producer is ON if (eventProducer.getSwitch().getStatus().equals(SwitchStatus.OFF)) { System.err.println("Cluster event producer is OFF for this node"); return null; } // check if the feature exists in the map if (!featureExists(groupName, feature, version)) { if (version != null) System.err.println("Feature " + feature + "/" + version + " doesn't exist for the cluster group " + groupName); else System.err.println("Feature " + feature + " doesn't exist for the cluster group " + groupName); return null; } // update the distributed map updateFeatureStatus(groupName, feature, version, true); // broadcast the cluster event RemoteFeaturesEvent event = new RemoteFeaturesEvent(feature, version, FeatureEvent.EventType.FeatureUninstalled); event.setForce(true); event.setSourceGroup(group); eventProducer.produce(event); return null; }
protected Object doExecute() throws Exception { // check if the group exists Group group = groupManager.findGroupByName(groupName); if (group == null) { System.err.println("Cluster group " + groupName + " doesn't exist"); return null; } // check if the producer is ON if (eventProducer.getSwitch().getStatus().equals(SwitchStatus.OFF)) { System.err.println("Cluster event producer is OFF for this node"); return null; } // check if the feature exists in the map if (!featureExists(groupName, feature, version)) { if (version != null) System.err.println("Feature " + feature + "/" + version + " doesn't exist for the cluster group " + groupName); else System.err.println("Feature " + feature + " doesn't exist for the cluster group " + groupName); return null; } // update distributed set updateFeatureStatus(groupName, feature, version, false); // broadcast the cluster event RemoteFeaturesEvent event = new RemoteFeaturesEvent(feature, version, FeatureEvent.EventType.FeatureUninstalled); event.setForce(true); event.setSourceGroup(group); eventProducer.produce(event); return null; }
diff --git a/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/provisional/p2/artifact/repository/ArtifactDescriptor.java b/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/provisional/p2/artifact/repository/ArtifactDescriptor.java index 3ff85074a..33c526325 100644 --- a/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/provisional/p2/artifact/repository/ArtifactDescriptor.java +++ b/bundles/org.eclipse.equinox.p2.artifact.repository/src/org/eclipse/equinox/internal/provisional/p2/artifact/repository/ArtifactDescriptor.java @@ -1,190 +1,191 @@ /******************************************************************************* * Copyright (c) 2007, 2008 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.equinox.internal.provisional.p2.artifact.repository; import java.util.Arrays; import java.util.Map; import org.eclipse.equinox.internal.p2.core.helpers.OrderedProperties; import org.eclipse.equinox.internal.provisional.p2.artifact.repository.processing.ProcessingStepDescriptor; import org.eclipse.equinox.internal.provisional.p2.metadata.IArtifactKey; /** * This represents information about a given artifact stored on a particular byte server. */ public class ArtifactDescriptor implements IArtifactDescriptor { public static final String ARTIFACT_REFERENCE = "artifact.reference"; //$NON-NLS-1$ private static final ProcessingStepDescriptor[] EMPTY_STEPS = new ProcessingStepDescriptor[0]; protected IArtifactKey key; // The key associated with this artifact // The list of post processing steps that must be applied one the artifact once it // has been downloaded (e.g, unpack, then md5 checksum, then...) protected ProcessingStepDescriptor[] processingSteps = EMPTY_STEPS; protected Map properties = new OrderedProperties(); protected Map repositoryProperties = new OrderedProperties(); protected transient IArtifactRepository repository; // QUESTION: Do we need any description or user readable name public ArtifactDescriptor(IArtifactDescriptor base) { super(); key = base.getArtifactKey(); processingSteps = base.getProcessingSteps(); properties.putAll(base.getProperties()); repository = base.getRepository(); // TODO this property is hardcoded for the blob store. // setProperty("artifact.uuid", base.getProperty("artifact.uuid")); } public ArtifactDescriptor(ArtifactDescriptor base) { super(); key = base.key; processingSteps = base.processingSteps; properties = base.properties; repository = base.repository; } public ArtifactDescriptor(IArtifactKey key) { super(); this.key = key; } public IArtifactKey getArtifactKey() { return key; } public String getProperty(String propertyKey) { return (String) properties.get(propertyKey); } public void setProperty(String key, String value) { if (value == null) properties.remove(key); else properties.put(key, value); } public void addProperties(Map additionalProperties) { properties.putAll(additionalProperties); } /** * Returns a read-only collection of the properties of the artifact descriptor. * @return the properties of this artifact descriptor. */ public Map getProperties() { return OrderedProperties.unmodifiableProperties(properties); } public String getRepositoryProperty(String propertyKey) { return (String) repositoryProperties.get(propertyKey); } public void setRepositoryProperty(String key, String value) { if (value == null) repositoryProperties.remove(key); else repositoryProperties.put(key, value); } public void addRepositoryProperties(Map additionalProperties) { repositoryProperties.putAll(additionalProperties); } /** * Returns a read-only collection of the repository properties of the artifact descriptor. * @return the repository properties of this artifact descriptor. */ public Map getRepositoryProperties() { return OrderedProperties.unmodifiableProperties(repositoryProperties); } public ProcessingStepDescriptor[] getProcessingSteps() { return processingSteps; } public void setProcessingSteps(ProcessingStepDescriptor[] value) { processingSteps = value == null ? EMPTY_STEPS : value; } // Implementation of both equals and hash depends on the implementation of // SimpleArtifactRepository#getOutputStream(IArtifactDescriptor) public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ArtifactDescriptor other = (ArtifactDescriptor) obj; if (key == null) { if (other.key != null) return false; } else if (!key.equals(other.key)) return false; if (!Arrays.equals(processingSteps, other.processingSteps)) return false; //Properties affecting SimpleArtifactRepository#getLocation String locationProperty = getRepositoryProperty(ARTIFACT_REFERENCE); String otherProperty = other.getRepositoryProperty(ARTIFACT_REFERENCE); - if ((locationProperty != null && !locationProperty.equals(otherProperty)) || locationProperty != otherProperty) + // want not null and the same, or both null + if (locationProperty != null ? !locationProperty.equals(otherProperty) : otherProperty != null) return false; locationProperty = getProperty(FORMAT); otherProperty = other.getProperty(FORMAT); - if ((locationProperty != null && !locationProperty.equals(otherProperty)) || locationProperty != otherProperty) + if (locationProperty != null ? !locationProperty.equals(otherProperty) : otherProperty != null) return false; return true; } private int hashCode(Object[] array) { int prime = 31; if (array == null) return 0; int result = 1; for (int index = 0; index < array.length; index++) { result = prime * result + (array[index] == null ? 0 : array[index].hashCode()); } return result; } public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((key == null) ? 0 : key.hashCode()); result = prime * result + hashCode(processingSteps); String[] hashProperties = new String[] {getRepositoryProperty(ARTIFACT_REFERENCE), getProperty(FORMAT)}; result = prime * result + hashCode(hashProperties); return result; } public IArtifactRepository getRepository() { return repository; } public void setRepository(IArtifactRepository value) { repository = value; } public String toString() { String format = getProperty(IArtifactDescriptor.FORMAT); if (format == null) return "canonical: " + key.toString(); //$NON-NLS-1$ return format + ": " + key.toString(); //$NON-NLS-1$ } }
false
true
public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ArtifactDescriptor other = (ArtifactDescriptor) obj; if (key == null) { if (other.key != null) return false; } else if (!key.equals(other.key)) return false; if (!Arrays.equals(processingSteps, other.processingSteps)) return false; //Properties affecting SimpleArtifactRepository#getLocation String locationProperty = getRepositoryProperty(ARTIFACT_REFERENCE); String otherProperty = other.getRepositoryProperty(ARTIFACT_REFERENCE); if ((locationProperty != null && !locationProperty.equals(otherProperty)) || locationProperty != otherProperty) return false; locationProperty = getProperty(FORMAT); otherProperty = other.getProperty(FORMAT); if ((locationProperty != null && !locationProperty.equals(otherProperty)) || locationProperty != otherProperty) return false; return true; }
public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ArtifactDescriptor other = (ArtifactDescriptor) obj; if (key == null) { if (other.key != null) return false; } else if (!key.equals(other.key)) return false; if (!Arrays.equals(processingSteps, other.processingSteps)) return false; //Properties affecting SimpleArtifactRepository#getLocation String locationProperty = getRepositoryProperty(ARTIFACT_REFERENCE); String otherProperty = other.getRepositoryProperty(ARTIFACT_REFERENCE); // want not null and the same, or both null if (locationProperty != null ? !locationProperty.equals(otherProperty) : otherProperty != null) return false; locationProperty = getProperty(FORMAT); otherProperty = other.getProperty(FORMAT); if (locationProperty != null ? !locationProperty.equals(otherProperty) : otherProperty != null) return false; return true; }
diff --git a/base/src/edu/berkeley/cs/cs162/ChatServer.java b/base/src/edu/berkeley/cs/cs162/ChatServer.java index 2e72dc7..6960602 100644 --- a/base/src/edu/berkeley/cs/cs162/ChatServer.java +++ b/base/src/edu/berkeley/cs/cs162/ChatServer.java @@ -1,663 +1,664 @@ package edu.berkeley.cs.cs162; import java.io.BufferedReader; import java.io.EOFException; import java.io.IOException; import java.io.InputStreamReader; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketException; import java.sql.ResultSet; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; /** * This is the core of the chat server. Put the management of groups * and users in here. You will need to control all of the threads, * and respond to requests from the test harness. * * It must implement the ChatServerInterface Interface, and you should * not modify that interface; it is necessary for testing. */ public class ChatServer extends Thread implements ChatServerInterface { private BlockingQueue<User> waiting_users; private Map<String, User> users; private Map<String, ChatGroup> groups; private Set<String> onlineNames; private Set<String> registeredUsers; private ReentrantReadWriteLock lock; private volatile boolean isDown; private final static int MAX_USERS = 100; private final static int MAX_WAITING_USERS = 10; private final static long TIMEOUT = 20; private ServerSocket mySocket; private ExecutorService pool; public ChatServer() { users = new HashMap<String, User>(); groups = new HashMap<String, ChatGroup>(); onlineNames = new HashSet<String>(); lock = new ReentrantReadWriteLock(true); waiting_users = new ArrayBlockingQueue<User>(MAX_WAITING_USERS); isDown = false; } public ChatServer(int port) throws IOException { users = new HashMap<String, User>(); groups = new HashMap<String, ChatGroup>(); onlineNames = new HashSet<String>(); lock = new ReentrantReadWriteLock(true); waiting_users = new ArrayBlockingQueue<User>(MAX_WAITING_USERS); isDown = false; pool = Executors.newFixedThreadPool(1000); try { mySocket = new ServerSocket(port); } catch (Exception e) { throw new IOException("Server socket creation failed"); } try { initStructures(); } catch (Exception e){ e.printStackTrace(); return; } this.start(); } private void initStructures() throws Exception { //initialize registeredUsers ResultSet Usernames = DBHandler.getUsers(); while(Usernames.next()) { String s = Usernames.getString("username"); registeredUsers.add(s); } //initialize groups as well as add group names to onlineNames ResultSet Groupnames = DBHandler.getGroups(); while(Groupnames.next()) { String g = Groupnames.getString("gname"); onlineNames.add(g); groups.put(g, new ChatGroup(g)); } ResultSet Members = DBHandler.getMemberships(); while(Members.next()) { String u = Members.getString("username"); String g = Members.getString("gname"); ChatGroup group = groups.get(g); if(group != null) group.addUser(u); } } public boolean isDown() { return isDown; } @Override public BaseUser getUser(String username) { BaseUser u; lock.readLock().lock(); u = users.get(username); lock.readLock().unlock(); return u; } public ChatGroup getGroup(String groupname) { ChatGroup group; lock.readLock().lock(); group = groups.get(groupname); lock.readLock().unlock(); return group; } public Set<String> getGroups() { Set<String> groupNames; lock.readLock().lock(); groupNames = this.groups.keySet(); lock.readLock().unlock(); return groupNames; } public Set<String> getActiveUsers() { Set<String> userNames; lock.readLock().lock(); userNames = users.keySet(); lock.readLock().unlock(); return userNames; } public Set<String> getAllUsers() { return registeredUsers; } public int getNumUsers(){ int num; lock.readLock().lock(); num = users.size(); lock.readLock().unlock(); return num; } public int getNumGroups(){ int num; lock.readLock().lock(); num = groups.size(); lock.readLock().unlock(); return num; } private void initUserGroups(User u){ ResultSet rs = DBHandler.getUserMemberships(u.getUsername()); try { while(rs.next()){ ChatGroup group = groups.get(rs.getString("gname")); group.addLoggedInUser(u.getUsername(), u); u.addToGroups(group.getName()); } } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public ServerReply addUser(String username, String password){ lock.writeLock().lock(); Set<String> allNames = new HashSet<String>(); allNames.addAll(onlineNames); allNames.addAll(registeredUsers); if(allNames.contains(username)) { lock.writeLock().unlock(); return ServerReply.REJECTED; } SecureRandom random = null; byte bytes[] = null; try { random = SecureRandom.getInstance("SHA1PRNG"); bytes = new byte[100]; random.nextBytes(bytes); } catch (NoSuchAlgorithmException e1) { System.err.println("no PRNG algorithm"); } String salt = bytes.toString(); String hash = hashPassword(password, salt); try { DBHandler.addUser(username, salt, hash); } catch(Exception e) { lock.writeLock().unlock(); return ServerReply.REJECTED; } registeredUsers.add(username); lock.writeLock().unlock(); return ServerReply.OK; } public void readlog(String username) throws SQLException{ lock.readLock().lock(); List<Message> unsentMessages = DBHandler.readAndClearLog(username); for (Message message : unsentMessages) { users.get(username).acceptMsg(message); } lock.readLock().unlock(); } @Override public LoginError login(String username) { return null; } public LoginError login(String username, String password) { lock.writeLock().lock(); if (isDown || onlineNames.contains(username) || !registeredUsers.contains(username)) { TestChatServer.logUserLoginFailed(username, new Date(), LoginError.USER_REJECTED); lock.writeLock().unlock(); return LoginError.USER_REJECTED; } LoginError error = loginAttempt(username, password); lock.writeLock().unlock(); return error; } public LoginError loginAttempt(String username, String password) { String salt; try { salt = DBHandler.getSalt(username); String hash = hashPassword(password, salt); if (hash == null || !hash.equals(DBHandler.getHashedPassword(username))) { TestChatServer.logUserLoginFailed(username, new Date(), LoginError.USER_REJECTED); return LoginError.USER_REJECTED; } } catch (SQLException e) { e.printStackTrace(); } if (users.size() >= MAX_USERS) { //exceeds capacity User newUser = new User(this, username); if(waiting_users.offer(newUser)) { //attempt to add to waiting queue onlineNames.add(username); return LoginError.USER_QUEUED; } else { //else drop user TestChatServer.logUserLoginFailed(username, new Date(), LoginError.USER_DROPPED); return LoginError.USER_DROPPED; } } User newUser = new User(this, username); users.put(username, newUser); onlineNames.add(username); newUser.connected(); TestChatServer.logUserLogin(username, new Date()); initUserGroups(newUser); return LoginError.USER_ACCEPTED; } public String hashPassword(String password, String salt) { String hashed = null; try { MessageDigest md = MessageDigest.getInstance("SHA-256"); String toHash = password + salt; md.update(toHash.getBytes()); MessageDigest tc1 = (MessageDigest) md.clone(); hashed = tc1.digest().toString(); } catch (Exception e) { System.err.println("oops"); } return hashed; } @Override public boolean logoff(String username) { lock.writeLock().lock(); if (!users.containsKey(username)){ User toRemove = null; for (User u : waiting_users) { if(u.getUsername().equals(username)) { u.logoff(); toRemove = u; } } if (toRemove != null) { waiting_users.remove(toRemove); onlineNames.remove(toRemove.getUsername()); lock.writeLock().unlock(); return true; } lock.writeLock().unlock(); return false; } ResultSet rs = DBHandler.getUserMemberships(username); try { while(rs.next()) { String g = rs.getString("gname"); ChatGroup c = groups.get(g); if(c != null) c.removeLoggedInUser(username); } } catch (SQLException e) { } users.get(username).logoff(); onlineNames.remove(username); users.remove(username); // Check for waiting users User newUser = waiting_users.poll(); if (newUser != null) { //add to ChatServer String newUsername = newUser.getUsername(); users.put(newUsername, newUser); TransportObject reply = new TransportObject(Command.login, ServerReply.OK); newUser.queueReply(reply); newUser.connected(); TestChatServer.logUserLogin(newUsername, new Date()); initUserGroups(newUser); } lock.writeLock().unlock(); return true; } public void joinAck(User user, String gname, ServerReply reply) { TransportObject toSend = new TransportObject(Command.join,gname,reply); user.queueReply(toSend); } public void leaveAck(User user, String gname, ServerReply reply) { TransportObject toSend = new TransportObject(Command.leave,gname,reply); user.queueReply(toSend); } public void startNewTimer(SocketParams params) throws IOException { List<Handler> task = new ArrayList<Handler>(); try { task.add(new Handler(params)); ObjectOutputStream sent = params.getOutputStream(); List<Future<Handler>> futures = pool.invokeAll(task, TIMEOUT, TimeUnit.SECONDS); if (futures.get(0).isCancelled()) { TransportObject sendObject = new TransportObject(ServerReply.timeout); sent.writeObject(sendObject); } } catch (Exception e) { e.printStackTrace(); } } @Override public boolean joinGroup(BaseUser baseUser, String groupname) { lock.writeLock().lock(); ChatGroup group; User user = (User) baseUser; boolean success = false; if (!users.keySet().contains(user.getUsername())) { lock.writeLock().unlock(); return false; } if (groups.containsKey(groupname)) { group = groups.get(groupname); success = group.joinGroup(user.getUsername(), user); if(user.getUserGroups().contains(groupname)){ joinAck(user,groupname,ServerReply.ALREADY_MEMBER); lock.writeLock().unlock(); return false; } if (success){ user.addToGroups(groupname); joinAck(user,groupname,ServerReply.OK_JOIN); TestChatServer.logUserJoinGroup(groupname, user.getUsername(), new Date()); } else joinAck(user,groupname,ServerReply.FAIL_FULL); lock.writeLock().unlock(); return success; } else { if (onlineNames.contains(groupname)){ joinAck(user,groupname,ServerReply.BAD_GROUP); lock.writeLock().unlock(); return false; } group = new ChatGroup(groupname); groups.put(groupname, group); success = group.joinGroup(user.getUsername(), user); user.addToGroups(groupname); TestChatServer.logUserJoinGroup(groupname, user.getUsername(), new Date()); if(success) joinAck(user,groupname,ServerReply.OK_CREATE); lock.writeLock().unlock(); return success; } } @Override public boolean leaveGroup(BaseUser baseUser, String groupname) { User user = (User) baseUser; lock.writeLock().lock(); ChatGroup group = groups.get(groupname); if (group == null){ leaveAck(user,groupname,ServerReply.BAD_GROUP); lock.writeLock().unlock(); return false; } if (group.leaveGroup(user.getUsername())) { leaveAck(user,groupname,ServerReply.OK); if(group.getNumUsers() <= 0) { groups.remove(group.getName()); onlineNames.remove(group.getName()); try { DBHandler.removeGroup(groupname); } catch (SQLException e) { System.err.println("unsuccessful group removal from database"); } } user.removeFromGroups(groupname); TestChatServer.logUserLeaveGroup(groupname, user.getUsername(), new Date()); String username = user.getUsername(); try { DBHandler.removeFromGroup(username, groupname); } catch (SQLException e) { System.err.println("unsuccessful membership deletion in database"); } lock.writeLock().unlock(); return true; } else { leaveAck(user,groupname,ServerReply.NOT_MEMBER); } lock.writeLock().unlock(); return false; } @Override public void shutdown() { lock.writeLock().lock(); Set<String> userNames = users.keySet(); for(String name: userNames){ users.get(name).logoff(); } users.clear(); groups.clear(); isDown = true; lock.writeLock().unlock(); } public MsgSendError processMessage(String source, String dest, String msg, int sqn, String timestamp) { Message message = new Message(timestamp, source, dest, msg); message.setSQN(sqn); lock.readLock().lock(); if (users.containsKey(source)) { //Valid destination user if (users.containsKey(dest)) { User destUser = users.get(dest); destUser.acceptMsg(message); } else if (registeredUsers.contains(dest)) { //Registered offline user try { DBHandler.writeLog(message, dest); } catch (SQLException e) { e.printStackTrace(); } } else if (groups.containsKey(dest)) { //Group destination message.setIsFromGroup(); ChatGroup group = groups.get(dest); MsgSendError sendError = group.forwardMessage(message); if (sendError == MsgSendError.NOT_IN_GROUP) { TestChatServer.logChatServerDropMsg(message.toString(), new Date()); lock.readLock().unlock(); return sendError; } else if (sendError == MsgSendError.MESSAGE_FAILED){ lock.readLock().unlock(); return sendError; } } else { TestChatServer.logChatServerDropMsg(message.toString(), new Date()); lock.readLock().unlock(); return MsgSendError.INVALID_DEST; } } else { TestChatServer.logChatServerDropMsg(message.toString(), new Date()); lock.readLock().unlock(); return MsgSendError.INVALID_SOURCE; } lock.readLock().unlock(); return MsgSendError.MESSAGE_SENT; } @Override public void run(){ while(!isDown){ List<Handler> task = new ArrayList<Handler>(); Socket newSocket; try { newSocket = mySocket.accept(); Handler handler = new Handler(newSocket); task.add(handler); Thread t = new FirstThread(task, handler); t.start(); } catch (IOException e) { e.printStackTrace(); } } } class FirstThread extends Thread { private List<Handler> task; private Handler handler; public FirstThread(List<Handler> task, Handler handler) { this.task = task; this.handler = handler; } public void run() { try { List<Future<Handler>> futures = pool.invokeAll(task, TIMEOUT, TimeUnit.SECONDS); if (futures.get(0).isCancelled()) { ObjectOutputStream sent = handler.sent; TransportObject sendObject = new TransportObject(ServerReply.timeout); sent.writeObject(sendObject); handler.socket.close(); } } catch (Exception e){ e.printStackTrace(); } } } class Handler implements Callable<ChatServer.Handler>, Runnable { private final Socket socket; Handler(Socket socket) throws IOException { this.socket = socket; received = new ObjectInputStream(socket.getInputStream()); sent = new ObjectOutputStream(socket.getOutputStream()); } Handler(SocketParams params) { this.socket = params.getMySocket(); received = params.getInputStream(); sent = params.getOutputStream(); } private ObjectInputStream received; private ObjectOutputStream sent; public void run() { } @Override public Handler call() throws Exception { TransportObject recObject = null; while(recObject == null) { try { recObject = (TransportObject) received.readObject(); } catch (EOFException e) { //System.err.println("user connection dropped/finished"); return null; } catch (SocketException e) { //System.err.println("user socket exception"); return null; } catch (Exception e) { e.printStackTrace(); return null; } if (recObject != null) { Command type = recObject.getCommand(); if (type == Command.login) { String username = recObject.getUsername(); - LoginError loginError = login(username); + String password = recObject.getPassword(); + LoginError loginError = login(username,password); TransportObject sendObject; if (loginError == LoginError.USER_ACCEPTED) { sendObject = new TransportObject(Command.login, ServerReply.OK); User newUser = (User) getUser(username); newUser.setSocket(socket, received, sent); } else if (loginError == LoginError.USER_QUEUED) { sendObject = new TransportObject(Command.login, ServerReply.QUEUED); User newUser = null; for(User u : waiting_users) { if(u.getUsername().equals(username)) newUser = u; } if(newUser != null) newUser.setSocket(socket, received, sent); } else if (loginError == LoginError.USER_DROPPED || loginError == LoginError.USER_REJECTED){ sendObject = new TransportObject(Command.login, ServerReply.REJECTED); recObject = null; } else { sendObject = new TransportObject(ServerReply.error); recObject = null; } try { sent.writeObject(sendObject); } catch (IOException e) { e.printStackTrace(); } } else if (type == Command.adduser) { String username = recObject.getUsername(); String password = recObject.getPassword(); ServerReply reply = addUser(username,password); TransportObject sendObject = new TransportObject(type,reply); try { sent.writeObject(sendObject); } catch (IOException e) { e.printStackTrace(); } } } } return null; } } public static void main(String[] args) throws Exception{ if (args.length != 1) { throw new Exception("Invalid number of args to command"); } int port = Integer.parseInt(args[0]); ChatServer chatServer = new ChatServer(port); BufferedReader commands = new BufferedReader(new InputStreamReader(System.in)); while(!chatServer.isDown()){ String line = commands.readLine(); String[] tokens = line.split(" "); if(tokens[0].equals("users")){ if(tokens.length==1) // get users System.out.println(chatServer.getAllUsers()); else { // get users from a specific group ChatGroup group = chatServer.getGroup(tokens[1]); if(group==null) System.out.println("no such group: " + tokens[1]); else{ Map<String,User> userList = group.getUserList(); System.out.println(userList.keySet()); } } } else if(tokens[0].equals("groups")){ System.out.println(chatServer.getGroups()); } else if (tokens[0].equals("active-users")){ System.out.println(chatServer.getActiveUsers()); } else if (tokens[0].equals("shutdown")){ chatServer.shutdown(); } } } }
true
true
public Handler call() throws Exception { TransportObject recObject = null; while(recObject == null) { try { recObject = (TransportObject) received.readObject(); } catch (EOFException e) { //System.err.println("user connection dropped/finished"); return null; } catch (SocketException e) { //System.err.println("user socket exception"); return null; } catch (Exception e) { e.printStackTrace(); return null; } if (recObject != null) { Command type = recObject.getCommand(); if (type == Command.login) { String username = recObject.getUsername(); LoginError loginError = login(username); TransportObject sendObject; if (loginError == LoginError.USER_ACCEPTED) { sendObject = new TransportObject(Command.login, ServerReply.OK); User newUser = (User) getUser(username); newUser.setSocket(socket, received, sent); } else if (loginError == LoginError.USER_QUEUED) { sendObject = new TransportObject(Command.login, ServerReply.QUEUED); User newUser = null; for(User u : waiting_users) { if(u.getUsername().equals(username)) newUser = u; } if(newUser != null) newUser.setSocket(socket, received, sent); } else if (loginError == LoginError.USER_DROPPED || loginError == LoginError.USER_REJECTED){ sendObject = new TransportObject(Command.login, ServerReply.REJECTED); recObject = null; } else { sendObject = new TransportObject(ServerReply.error); recObject = null; } try { sent.writeObject(sendObject); } catch (IOException e) { e.printStackTrace(); } } else if (type == Command.adduser) { String username = recObject.getUsername(); String password = recObject.getPassword(); ServerReply reply = addUser(username,password); TransportObject sendObject = new TransportObject(type,reply); try { sent.writeObject(sendObject); } catch (IOException e) { e.printStackTrace(); } } } } return null; }
public Handler call() throws Exception { TransportObject recObject = null; while(recObject == null) { try { recObject = (TransportObject) received.readObject(); } catch (EOFException e) { //System.err.println("user connection dropped/finished"); return null; } catch (SocketException e) { //System.err.println("user socket exception"); return null; } catch (Exception e) { e.printStackTrace(); return null; } if (recObject != null) { Command type = recObject.getCommand(); if (type == Command.login) { String username = recObject.getUsername(); String password = recObject.getPassword(); LoginError loginError = login(username,password); TransportObject sendObject; if (loginError == LoginError.USER_ACCEPTED) { sendObject = new TransportObject(Command.login, ServerReply.OK); User newUser = (User) getUser(username); newUser.setSocket(socket, received, sent); } else if (loginError == LoginError.USER_QUEUED) { sendObject = new TransportObject(Command.login, ServerReply.QUEUED); User newUser = null; for(User u : waiting_users) { if(u.getUsername().equals(username)) newUser = u; } if(newUser != null) newUser.setSocket(socket, received, sent); } else if (loginError == LoginError.USER_DROPPED || loginError == LoginError.USER_REJECTED){ sendObject = new TransportObject(Command.login, ServerReply.REJECTED); recObject = null; } else { sendObject = new TransportObject(ServerReply.error); recObject = null; } try { sent.writeObject(sendObject); } catch (IOException e) { e.printStackTrace(); } } else if (type == Command.adduser) { String username = recObject.getUsername(); String password = recObject.getPassword(); ServerReply reply = addUser(username,password); TransportObject sendObject = new TransportObject(type,reply); try { sent.writeObject(sendObject); } catch (IOException e) { e.printStackTrace(); } } } } return null; }
diff --git a/Chat/src/Communications/UDPReceiver.java b/Chat/src/Communications/UDPReceiver.java index 9a05340..eea61f6 100644 --- a/Chat/src/Communications/UDPReceiver.java +++ b/Chat/src/Communications/UDPReceiver.java @@ -1,81 +1,84 @@ package Communications; import java.io.*; import java.net.SocketException; import java.util.concurrent.*; import Messages.Message; import Utilities.Parser; public class UDPReceiver { protected PipedInputStream data=null; protected UDPReceiverThread thread=null; protected Thread t; protected ConcurrentLinkedQueue<Byte> queue=new ConcurrentLinkedQueue<Byte>(); protected boolean needsMore=false; int size=132; int moreNeeded=Integer.MAX_VALUE; byte[] current=null; byte[] body=null; Parser parser=new Parser(); public UDPReceiver() throws SocketException{ data=new PipedInputStream(); thread=new UDPReceiverThread(queue); t=(new Thread(thread)); t.start(); } // public byte[] getPacket(){ // if(queue.size()<size){ // return new byte[0]; // } // else{ // byte[] packet = new byte[size]; // for(int i=0;i<size;i++){ // packet[i]=queue.poll(); // } // } // } public Message read(){ if (!needsMore) { if (queue.size() >= size) { + System.out.println(queue.size()); System.out.println("getting first"); current = new byte[size]; for (int i = 0; i < size; i++) { current[i] = queue.poll(); } + System.out.println(queue.size()); moreNeeded=parser.parse(current); if(queue.size()>=moreNeeded){ System.out.println("Second is there alredy"); for (int i = 0; i < moreNeeded; i++) { body[i] = queue.poll(); } + System.out.println(queue.size()); needsMore=false; moreNeeded=Integer.MAX_VALUE; return parser.addBody(body); } needsMore=true; return null; } else { return null; } } else if(queue.size()>=moreNeeded){ System.out.println("getting second"); for (int i = 0; i < moreNeeded; i++) { body[i] = queue.poll(); } needsMore=false; moreNeeded=Integer.MAX_VALUE; return parser.addBody(body); } return null; } public void stop() throws InterruptedException{ thread.setRunning(false); t.join(); } }
false
true
public Message read(){ if (!needsMore) { if (queue.size() >= size) { System.out.println("getting first"); current = new byte[size]; for (int i = 0; i < size; i++) { current[i] = queue.poll(); } moreNeeded=parser.parse(current); if(queue.size()>=moreNeeded){ System.out.println("Second is there alredy"); for (int i = 0; i < moreNeeded; i++) { body[i] = queue.poll(); } needsMore=false; moreNeeded=Integer.MAX_VALUE; return parser.addBody(body); } needsMore=true; return null; } else { return null; } } else if(queue.size()>=moreNeeded){ System.out.println("getting second"); for (int i = 0; i < moreNeeded; i++) { body[i] = queue.poll(); } needsMore=false; moreNeeded=Integer.MAX_VALUE; return parser.addBody(body); } return null; }
public Message read(){ if (!needsMore) { if (queue.size() >= size) { System.out.println(queue.size()); System.out.println("getting first"); current = new byte[size]; for (int i = 0; i < size; i++) { current[i] = queue.poll(); } System.out.println(queue.size()); moreNeeded=parser.parse(current); if(queue.size()>=moreNeeded){ System.out.println("Second is there alredy"); for (int i = 0; i < moreNeeded; i++) { body[i] = queue.poll(); } System.out.println(queue.size()); needsMore=false; moreNeeded=Integer.MAX_VALUE; return parser.addBody(body); } needsMore=true; return null; } else { return null; } } else if(queue.size()>=moreNeeded){ System.out.println("getting second"); for (int i = 0; i < moreNeeded; i++) { body[i] = queue.poll(); } needsMore=false; moreNeeded=Integer.MAX_VALUE; return parser.addBody(body); } return null; }
diff --git a/Chess/src/com/nanu/chess/gui/BoardPanel.java b/Chess/src/com/nanu/chess/gui/BoardPanel.java index be0349e..ab330de 100644 --- a/Chess/src/com/nanu/chess/gui/BoardPanel.java +++ b/Chess/src/com/nanu/chess/gui/BoardPanel.java @@ -1,105 +1,106 @@ package com.nanu.chess.gui; import java.awt.Graphics; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import javax.swing.JPanel; import com.nanu.chess.board.Board; import com.nanu.chess.board.Square; import com.nanu.chess.gui.GUIConstants; import com.nanu.chess.support.Team; @SuppressWarnings("serial") public class BoardPanel extends JPanel { Board _board; Team _team; public BoardPanel (Board board) { _board = board; _team = Team.WHITE; } public void setTeam(Team team) { _team = team; repaint(); } public void paintComponent(Graphics g) { for ( int i = 0; i < 8; i++ ) { for ( int j = 0; j < 8; j++ ) { if ( ((i+j)%2 == 0 && _team.equals(Team.WHITE)) || ((i+j)%2 == 1 && _team.equals(Team.BLACK)) ) g.setColor(GUIConstants.square.BLACK); if ( ((i+j)%2 == 0 && _team.equals(Team.BLACK)) || ((i+j)%2 == 1 && _team.equals(Team.WHITE)) ) g.setColor(GUIConstants.square.WHITE); if ( _board.getSquare(i, j) == curSquare ) g.setColor(GUIConstants.square.SELECTED); g.fillRect(GUIConstants.PADDING+i*GUIConstants.SQUARE_WIDTH, GUIConstants.PADDING+j*GUIConstants.SQUARE_HEIGHT, GUIConstants.SQUARE_WIDTH, GUIConstants.SQUARE_HEIGHT); if ( _board.getSquare(i,j).getPiece() != null ) { GUIConstants.piece.getIcon(_board.getSquare(i,j).getPiece()).paintIcon( this, g, GUIConstants.PADDING+i*GUIConstants.SQUARE_WIDTH+ (GUIConstants.SQUARE_WIDTH-GUIConstants.piece.ICON_WIDTH)/2, GUIConstants.PADDING+j*GUIConstants.SQUARE_HEIGHT+ (GUIConstants.SQUARE_HEIGHT-GUIConstants.piece.ICON_HEIGHT)/2); } } } } public String getMove() { boolean validMove = false; ClickListener click = new ClickListener(); this.addMouseListener(click); Square start = null, end = null; while( !validMove ) { synchronized(lock) { try { lock.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } if ( curSquare.getPiece() != null && curSquare.getPiece().getTeam().equals(_team) ) { start = curSquare; repaint(); } else if ( start != null && start.getPiece().getLegalMoves(_board, start).contains(curSquare) ) { end = curSquare; curSquare = null; validMove = true; } else { + start = null; curSquare = null; repaint(); } } this.removeMouseListener(click); end.setPiece(start.getPiece()); start.setPiece(null); repaint(); return (-start.getX()+7)+""+(-start.getY()+7)+","+(-end.getX()+7)+""+(-end.getY()+7); } public class ClickListener extends MouseAdapter { public void mouseClicked(MouseEvent e) { int x = e.getX(); int y = e.getY(); if ( x > GUIConstants.PADDING && x < GUIConstants.DISPLAY_WIDTH - GUIConstants.PADDING && y > GUIConstants.PADDING && y < GUIConstants.DISPLAY_HEIGHT - GUIConstants.PADDING ) { x = (x - GUIConstants.PADDING)/GUIConstants.SQUARE_WIDTH; y = (y - GUIConstants.PADDING)/GUIConstants.SQUARE_HEIGHT; curSquare = _board.getSquare(x,y); synchronized(lock) { lock.notify(); } } } } private Object lock = new Object(); private Square curSquare; @SuppressWarnings("unused") private Square hoverSquare; }
true
true
public String getMove() { boolean validMove = false; ClickListener click = new ClickListener(); this.addMouseListener(click); Square start = null, end = null; while( !validMove ) { synchronized(lock) { try { lock.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } if ( curSquare.getPiece() != null && curSquare.getPiece().getTeam().equals(_team) ) { start = curSquare; repaint(); } else if ( start != null && start.getPiece().getLegalMoves(_board, start).contains(curSquare) ) { end = curSquare; curSquare = null; validMove = true; } else { curSquare = null; repaint(); } } this.removeMouseListener(click); end.setPiece(start.getPiece()); start.setPiece(null); repaint(); return (-start.getX()+7)+""+(-start.getY()+7)+","+(-end.getX()+7)+""+(-end.getY()+7); }
public String getMove() { boolean validMove = false; ClickListener click = new ClickListener(); this.addMouseListener(click); Square start = null, end = null; while( !validMove ) { synchronized(lock) { try { lock.wait(); } catch (InterruptedException e) { e.printStackTrace(); } } if ( curSquare.getPiece() != null && curSquare.getPiece().getTeam().equals(_team) ) { start = curSquare; repaint(); } else if ( start != null && start.getPiece().getLegalMoves(_board, start).contains(curSquare) ) { end = curSquare; curSquare = null; validMove = true; } else { start = null; curSquare = null; repaint(); } } this.removeMouseListener(click); end.setPiece(start.getPiece()); start.setPiece(null); repaint(); return (-start.getX()+7)+""+(-start.getY()+7)+","+(-end.getX()+7)+""+(-end.getY()+7); }
diff --git a/src/main/java/kr/co/vcnc/haeinsa/HaeinsaTransactionManager.java b/src/main/java/kr/co/vcnc/haeinsa/HaeinsaTransactionManager.java index b82cffa..be42c1a 100644 --- a/src/main/java/kr/co/vcnc/haeinsa/HaeinsaTransactionManager.java +++ b/src/main/java/kr/co/vcnc/haeinsa/HaeinsaTransactionManager.java @@ -1,178 +1,178 @@ /** * Copyright (C) 2013 VCNC, inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kr.co.vcnc.haeinsa; import java.io.IOException; import java.nio.ByteBuffer; import javax.annotation.Nullable; import kr.co.vcnc.haeinsa.thrift.generated.TRowKey; import kr.co.vcnc.haeinsa.thrift.generated.TRowLock; import kr.co.vcnc.haeinsa.thrift.generated.TRowLockState; /** * Manager class of {@link HaeinsaTransaction}. * This class contains {@link HaeinsaTablePool} inside to provide tablePool when user want to access * HBase through {@link HaeinsaTransaction} with {@link HaeinsaTable} and execute transaction. * <p> * HaeinsaTransactionManager also provides method to recover failed transaction from TRowLock in HBase * which can be used to clear it up or complete it. */ public class HaeinsaTransactionManager { private final HaeinsaTablePool tablePool; /** * Constructor for TransactionManager * * @param tablePool HaeinsaTablePool to access HBase. */ public HaeinsaTransactionManager(HaeinsaTablePool tablePool) { this.tablePool = tablePool; } /** * Get {@link HaeinsaTransaction} instance which can be used to start new * transaction. * <p> * This method is thread-safe. * * @return new Transaction instance have reference to this manager instance. */ public HaeinsaTransaction begin() { return new HaeinsaTransaction(this); } /** * Make new {@link HaeinsaTransaction} instance which can be used to recover * other failed/uncompleted transaction. Also read and recover primaryRowKey and primaryRowLock * from failed transaction on HBase. * <p> * This method is thread-safe. * * @param tableName TableName of Transaction to recover. * @param row Row of Transaction to recover. * @return Transaction instance if there is any ongoing Transaction on row, * return null otherwise. * @throws IOException */ @Nullable protected HaeinsaTransaction getTransaction(byte[] tableName, byte[] row) throws IOException { - TRowLock startUnstableRowLock = getUnstableRowLock(tableName, row); + TRowLock unstableRowLock = getUnstableRowLock(tableName, row); - if (startUnstableRowLock == null) { + if (unstableRowLock == null) { // There is no on-going transaction on row. return null; } TRowLock primaryRowLock = null; TRowKey primaryRowKey = null; - if (!startUnstableRowLock.isSetPrimary()) { + if (!unstableRowLock.isSetPrimary()) { // this row is primary row, because primary field is not set. primaryRowKey = new TRowKey(ByteBuffer.wrap(tableName), ByteBuffer.wrap(row)); - primaryRowLock = startUnstableRowLock; + primaryRowLock = unstableRowLock; } else { - primaryRowKey = startUnstableRowLock.getPrimary(); + primaryRowKey = unstableRowLock.getPrimary(); primaryRowLock = getUnstableRowLock(primaryRowKey.getTableName(), primaryRowKey.getRow()); } if (primaryRowLock == null) { return null; } return getTransactionFromPrimary(primaryRowKey, primaryRowLock); } /** * @param tableName * @param row * @return null if TRowLock is {@link TRowLockState#STABLE}, otherwise * return rowLock from HBase. * @throws IOException */ private TRowLock getUnstableRowLock(byte[] tableName, byte[] row) throws IOException { TRowLock rowLock = null; try (HaeinsaTableIfaceInternal table = tablePool.getTableInternal(tableName)) { // access to HBase rowLock = table.getRowLock(row); } if (rowLock.getState() == TRowLockState.STABLE) { return null; } else { return rowLock; } } /** * Recover TRowLocks of failed HaeinsaTransaction from primary row on HBase. * Transaction information about secondary rows are recovered with {@link #addSecondaryRowLock()}. * HaeinsaTransaction made by this method do not assign proper values on mutations variable. * * @param rowKey * @param primaryRowLock * @return * @throws IOException */ private HaeinsaTransaction getTransactionFromPrimary(TRowKey rowKey, TRowLock primaryRowLock) throws IOException { HaeinsaTransaction transaction = new HaeinsaTransaction(this); transaction.setPrimary(rowKey); transaction.setCommitTimestamp(primaryRowLock.getCommitTimestamp()); HaeinsaTableTransaction primaryTableTxState = transaction.createOrGetTableState(rowKey.getTableName()); HaeinsaRowTransaction primaryRowTxState = primaryTableTxState.createOrGetRowState(rowKey.getRow()); primaryRowTxState.setCurrent(primaryRowLock); if (primaryRowLock.getSecondariesSize() > 0) { for (TRowKey secondaryRow : primaryRowLock.getSecondaries()) { addSecondaryRowLock(transaction, secondaryRow); } } return transaction; } /** * Recover TRowLock of secondary row inferred from {@link TRowLock#secondaries} field of primary row lock. * <p> * If target secondary row is in stable state, the row does not included in recovered HaeinsaTransaction * because it suggest that this secondary row is already stabled by previous failed transaction. * <p> * Secondary row is not included in recovered transaction neither when commitTimestamp is different with primary row's, * because it implicates that the row is locked by other transaction. * <p> * As similar to {@link #getTransactionFromPrimary()}, rowTransaction added by this method do not have * proper mutations variable. * * @param transaction * @param rowKey * @throws IOException */ private void addSecondaryRowLock(HaeinsaTransaction transaction, TRowKey rowKey) throws IOException { TRowLock unstableRowLock = getUnstableRowLock(rowKey.getTableName(), rowKey.getRow()); if (unstableRowLock == null) { return; } // commitTimestamp가 다르면, 다른 Transaction 이므로 추가하면 안됨 if (unstableRowLock.getCommitTimestamp() != transaction.getCommitTimestamp()) { return; } HaeinsaTableTransaction tableState = transaction.createOrGetTableState(rowKey.getTableName()); HaeinsaRowTransaction rowState = tableState.createOrGetRowState(rowKey.getRow()); rowState.setCurrent(unstableRowLock); } /** * @return HaeinsaTablePool contained in TransactionManager */ public HaeinsaTablePool getTablePool() { return tablePool; } }
false
true
protected HaeinsaTransaction getTransaction(byte[] tableName, byte[] row) throws IOException { TRowLock startUnstableRowLock = getUnstableRowLock(tableName, row); if (startUnstableRowLock == null) { // There is no on-going transaction on row. return null; } TRowLock primaryRowLock = null; TRowKey primaryRowKey = null; if (!startUnstableRowLock.isSetPrimary()) { // this row is primary row, because primary field is not set. primaryRowKey = new TRowKey(ByteBuffer.wrap(tableName), ByteBuffer.wrap(row)); primaryRowLock = startUnstableRowLock; } else { primaryRowKey = startUnstableRowLock.getPrimary(); primaryRowLock = getUnstableRowLock(primaryRowKey.getTableName(), primaryRowKey.getRow()); } if (primaryRowLock == null) { return null; } return getTransactionFromPrimary(primaryRowKey, primaryRowLock); }
protected HaeinsaTransaction getTransaction(byte[] tableName, byte[] row) throws IOException { TRowLock unstableRowLock = getUnstableRowLock(tableName, row); if (unstableRowLock == null) { // There is no on-going transaction on row. return null; } TRowLock primaryRowLock = null; TRowKey primaryRowKey = null; if (!unstableRowLock.isSetPrimary()) { // this row is primary row, because primary field is not set. primaryRowKey = new TRowKey(ByteBuffer.wrap(tableName), ByteBuffer.wrap(row)); primaryRowLock = unstableRowLock; } else { primaryRowKey = unstableRowLock.getPrimary(); primaryRowLock = getUnstableRowLock(primaryRowKey.getTableName(), primaryRowKey.getRow()); } if (primaryRowLock == null) { return null; } return getTransactionFromPrimary(primaryRowKey, primaryRowLock); }
diff --git a/src/com/dmdirc/addons/ui_swing/dialogs/aliases/AliasManagerLinker.java b/src/com/dmdirc/addons/ui_swing/dialogs/aliases/AliasManagerLinker.java index bd370f79..f05f309a 100644 --- a/src/com/dmdirc/addons/ui_swing/dialogs/aliases/AliasManagerLinker.java +++ b/src/com/dmdirc/addons/ui_swing/dialogs/aliases/AliasManagerLinker.java @@ -1,318 +1,327 @@ /* * Copyright (c) 2006-2014 DMDirc Developers * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.dmdirc.addons.ui_swing.dialogs.aliases; import com.dmdirc.addons.ui_swing.UIUtilities; import com.dmdirc.addons.ui_swing.components.GenericListModel; import com.dmdirc.addons.ui_swing.components.renderers.PropertyListCellRenderer; import com.dmdirc.addons.ui_swing.components.vetoable.VetoableListSelectionModel; import com.dmdirc.addons.ui_swing.dialogs.StandardInputDialog; import com.dmdirc.commandparser.aliases.Alias; import com.dmdirc.interfaces.ui.AliasDialogModel; import com.dmdirc.ui.IconManager; import com.dmdirc.ui.core.aliases.AliasDialogModelAdapter; import com.google.common.base.Optional; import java.awt.Dialog; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyVetoException; import java.beans.VetoableChangeListener; import javax.swing.JButton; import javax.swing.JList; import javax.swing.JScrollPane; import javax.swing.JSpinner; import javax.swing.JTextArea; import javax.swing.JTextField; import javax.swing.SpinnerNumberModel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; /** * Links the Alias Manager Dialog with its controller and model. */ public class AliasManagerLinker { private final AliasDialogModel model; private final AliasManagerDialog dialog; private final IconManager iconManager; public AliasManagerLinker( final AliasDialogModel model, final AliasManagerDialog dialog, final IconManager iconManager) { this.model = model; this.dialog = dialog; this.iconManager = iconManager; } public void bindCommandList(final JList<Alias> commandList) { final GenericListModel<Alias> commandModel = new GenericListModel<>(); final VetoableListSelectionModel selectionModel = new VetoableListSelectionModel(); commandList.setCellRenderer(new PropertyListCellRenderer<>(commandList.getCellRenderer(), Alias.class, "name")); commandList.setModel(commandModel); commandList.setSelectionModel(selectionModel); commandList.getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(final ListSelectionEvent e) { if (e.getValueIsAdjusting()) { return; } final int index = commandList.getSelectedIndex(); if (index == -1) { model.setSelectedAlias(Optional.<Alias>absent()); } else if (commandModel.getSize() == 0) { model.setSelectedAlias(Optional.<Alias>absent()); } else if (index >= commandModel.getSize()) { model.setSelectedAlias(Optional.fromNullable(commandModel. getElementAt(index - 1))); } else { model.setSelectedAlias(Optional.fromNullable(commandModel.getElementAt(index))); } } }); selectionModel.addVetoableSelectionListener(new VetoableChangeListener() { @Override public void vetoableChange(final PropertyChangeEvent evt) throws PropertyVetoException { if (!model.isChangeAliasAllowed()) { throw new PropertyVetoException("Currently selected alias is invalid.", evt); } } }); model.addListener(new AliasDialogModelAdapter() { @Override public void aliasRenamed(final Alias oldAlias, final Alias newAlias) { commandModel.replace(oldAlias, newAlias); } @Override public void aliasEdited(final Alias oldAlias, final Alias newAlias) { commandModel.replace(oldAlias, newAlias); } @Override public void aliasRemoved(final Alias alias) { + final int index = commandModel.indexOf(alias); commandModel.remove(alias); + if (index >= commandModel.getSize()) { + model.setSelectedAlias(Optional.fromNullable( + commandModel.getElementAt(commandModel.getSize() - 1))); + } else if (index == -1 && !commandModel.isEmpty()) { + model.setSelectedAlias(Optional.fromNullable(commandModel.get(0))); + } else { + model.setSelectedAlias(Optional.fromNullable(commandModel.get(index))); + } } @Override public void aliasAdded(final Alias alias) { commandModel.add(alias); commandList.getSelectionModel().setSelectionInterval( commandModel.indexOf(alias), commandModel.indexOf(alias)); } @Override public void aliasSelectionChanged(final Optional<Alias> alias) { final int index; if (alias.isPresent()) { index = commandModel.indexOf(alias.get()); } else { index = -1; } if (index != selectionModel.getLeadSelectionIndex()) { selectionModel.setLeadSelectionIndex(index); } } }); } public void bindCommand(final JTextField command) { command.setEnabled(false); command.getDocument().addDocumentListener(new DocumentListener() { private void update() { model.setSelectedAliasName(command.getText()); } @Override public void insertUpdate(final DocumentEvent e) { update(); } @Override public void removeUpdate(final DocumentEvent e) { update(); } @Override public void changedUpdate(final DocumentEvent e) { update(); } }); model.addListener(new AliasDialogModelAdapter() { @Override public void aliasSelectionChanged(final Optional<Alias> alias) { command.setEnabled(model.isCommandValid()); command.setText(model.getSelectedAliasName()); } }); } public void bindArgumentsNumber(final JSpinner argumentsNumber) { argumentsNumber.setEnabled(false); argumentsNumber.setModel(new SpinnerNumberModel(0, 0, Integer.MAX_VALUE, 1)); argumentsNumber.addChangeListener(new ChangeListener() { @Override public void stateChanged(final ChangeEvent e) { model.setSelectedAliasMinimumArguments((Integer) argumentsNumber.getValue()); } }); model.addListener(new AliasDialogModelAdapter() { @Override public void aliasSelectionChanged(final Optional<Alias> alias) { argumentsNumber.setEnabled(model.isMinimumArgumentsValid()); argumentsNumber.setValue(model.getSelectedAliasMininumArguments()); } }); } public void bindResponse(final JTextArea response, final JScrollPane responseScroll) { response.setEnabled(false); response.getDocument().addDocumentListener(new DocumentListener() { private void update() { model.setSelectedAliasSubstitution(response.getText()); } @Override public void insertUpdate(final DocumentEvent e) { update(); } @Override public void removeUpdate(final DocumentEvent e) { update(); } @Override public void changedUpdate(final DocumentEvent e) { update(); } }); model.addListener(new AliasDialogModelAdapter() { @Override public void aliasSelectionChanged(final Optional<Alias> alias) { response.setEnabled(model.isSubstitutionValid()); response.setText(model.getSelectedAliasSubstitution()); UIUtilities.resetScrollPane(responseScroll); } }); } public void bindAddAlias(final JButton addAlias) { addAlias.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { new StandardInputDialog(dialog, Dialog.ModalityType.DOCUMENT_MODAL, iconManager, "Add Alias", "Enter the alias name", model.getNewCommandValidator()) { private static final long serialVersionUID = 3; @Override public boolean save() { model.addAlias(getText(), 0, getText()); return true; } @Override public void cancelled() { } }.display(); } }); } public void bindDeleteAlias(final JButton deleteAlias) { deleteAlias.setEnabled(false); deleteAlias.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { final Optional<Alias> alias = model.getSelectedAlias(); if (alias.isPresent()) { model.removeAlias(alias.get().getName()); } } }); model.addListener(new AliasDialogModelAdapter() { @Override public void aliasSelectionChanged(final Optional<Alias> alias) { deleteAlias.setEnabled(model.getSelectedAlias().isPresent()); } }); } public void bindOKButton(final JButton okButton) { okButton.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { model.save(); dialog.dispose(); } }); model.addListener(new AliasDialogModelAdapter() { @Override public void selectedAliasEdited(String name, int minArgs, String sub) { okButton.setEnabled(model.isSelectedAliasValid()); } }); } public void bindCancelButton(final JButton cancelButton) { cancelButton.setEnabled(true); cancelButton.addActionListener(new ActionListener() { @Override public void actionPerformed(final ActionEvent e) { dialog.dispose(); } }); } }
false
true
public void bindCommandList(final JList<Alias> commandList) { final GenericListModel<Alias> commandModel = new GenericListModel<>(); final VetoableListSelectionModel selectionModel = new VetoableListSelectionModel(); commandList.setCellRenderer(new PropertyListCellRenderer<>(commandList.getCellRenderer(), Alias.class, "name")); commandList.setModel(commandModel); commandList.setSelectionModel(selectionModel); commandList.getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(final ListSelectionEvent e) { if (e.getValueIsAdjusting()) { return; } final int index = commandList.getSelectedIndex(); if (index == -1) { model.setSelectedAlias(Optional.<Alias>absent()); } else if (commandModel.getSize() == 0) { model.setSelectedAlias(Optional.<Alias>absent()); } else if (index >= commandModel.getSize()) { model.setSelectedAlias(Optional.fromNullable(commandModel. getElementAt(index - 1))); } else { model.setSelectedAlias(Optional.fromNullable(commandModel.getElementAt(index))); } } }); selectionModel.addVetoableSelectionListener(new VetoableChangeListener() { @Override public void vetoableChange(final PropertyChangeEvent evt) throws PropertyVetoException { if (!model.isChangeAliasAllowed()) { throw new PropertyVetoException("Currently selected alias is invalid.", evt); } } }); model.addListener(new AliasDialogModelAdapter() { @Override public void aliasRenamed(final Alias oldAlias, final Alias newAlias) { commandModel.replace(oldAlias, newAlias); } @Override public void aliasEdited(final Alias oldAlias, final Alias newAlias) { commandModel.replace(oldAlias, newAlias); } @Override public void aliasRemoved(final Alias alias) { commandModel.remove(alias); } @Override public void aliasAdded(final Alias alias) { commandModel.add(alias); commandList.getSelectionModel().setSelectionInterval( commandModel.indexOf(alias), commandModel.indexOf(alias)); } @Override public void aliasSelectionChanged(final Optional<Alias> alias) { final int index; if (alias.isPresent()) { index = commandModel.indexOf(alias.get()); } else { index = -1; } if (index != selectionModel.getLeadSelectionIndex()) { selectionModel.setLeadSelectionIndex(index); } } }); }
public void bindCommandList(final JList<Alias> commandList) { final GenericListModel<Alias> commandModel = new GenericListModel<>(); final VetoableListSelectionModel selectionModel = new VetoableListSelectionModel(); commandList.setCellRenderer(new PropertyListCellRenderer<>(commandList.getCellRenderer(), Alias.class, "name")); commandList.setModel(commandModel); commandList.setSelectionModel(selectionModel); commandList.getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(final ListSelectionEvent e) { if (e.getValueIsAdjusting()) { return; } final int index = commandList.getSelectedIndex(); if (index == -1) { model.setSelectedAlias(Optional.<Alias>absent()); } else if (commandModel.getSize() == 0) { model.setSelectedAlias(Optional.<Alias>absent()); } else if (index >= commandModel.getSize()) { model.setSelectedAlias(Optional.fromNullable(commandModel. getElementAt(index - 1))); } else { model.setSelectedAlias(Optional.fromNullable(commandModel.getElementAt(index))); } } }); selectionModel.addVetoableSelectionListener(new VetoableChangeListener() { @Override public void vetoableChange(final PropertyChangeEvent evt) throws PropertyVetoException { if (!model.isChangeAliasAllowed()) { throw new PropertyVetoException("Currently selected alias is invalid.", evt); } } }); model.addListener(new AliasDialogModelAdapter() { @Override public void aliasRenamed(final Alias oldAlias, final Alias newAlias) { commandModel.replace(oldAlias, newAlias); } @Override public void aliasEdited(final Alias oldAlias, final Alias newAlias) { commandModel.replace(oldAlias, newAlias); } @Override public void aliasRemoved(final Alias alias) { final int index = commandModel.indexOf(alias); commandModel.remove(alias); if (index >= commandModel.getSize()) { model.setSelectedAlias(Optional.fromNullable( commandModel.getElementAt(commandModel.getSize() - 1))); } else if (index == -1 && !commandModel.isEmpty()) { model.setSelectedAlias(Optional.fromNullable(commandModel.get(0))); } else { model.setSelectedAlias(Optional.fromNullable(commandModel.get(index))); } } @Override public void aliasAdded(final Alias alias) { commandModel.add(alias); commandList.getSelectionModel().setSelectionInterval( commandModel.indexOf(alias), commandModel.indexOf(alias)); } @Override public void aliasSelectionChanged(final Optional<Alias> alias) { final int index; if (alias.isPresent()) { index = commandModel.indexOf(alias.get()); } else { index = -1; } if (index != selectionModel.getLeadSelectionIndex()) { selectionModel.setLeadSelectionIndex(index); } } }); }
diff --git a/cpa/src/main/java/org/castor/cpa/persistence/sql/driver/SQLServerQueryExpression.java b/cpa/src/main/java/org/castor/cpa/persistence/sql/driver/SQLServerQueryExpression.java index 412ba80f..57e85920 100644 --- a/cpa/src/main/java/org/castor/cpa/persistence/sql/driver/SQLServerQueryExpression.java +++ b/cpa/src/main/java/org/castor/cpa/persistence/sql/driver/SQLServerQueryExpression.java @@ -1,147 +1,147 @@ /** * Redistribution and use of this software and associated documentation * ("Software"), with or without modification, are permitted provided * that the following conditions are met: * * 1. Redistributions of source code must retain copyright * statements and notices. Redistributions must also contain a * copy of this document. * * 2. Redistributions in binary form must reproduce the * above copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other * materials provided with the distribution. * * 3. The name "Exolab" must not be used to endorse or promote * products derived from this Software without prior written * permission of Intalio, Inc. For written permission, * please contact [email protected]. * * 4. Products derived from this Software may not be called "Exolab" * nor may "Exolab" appear in their names without prior written * permission of Intalio, Inc. Exolab is a registered * trademark of Intalio, Inc. * * 5. Due credit should be given to the Exolab Project * (http://www.exolab.org/). * * THIS SOFTWARE IS PROVIDED BY INTALIO, INC. AND CONTRIBUTORS * ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * INTALIO, INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * * Copyright 1999 (C) Intalio, Inc. All Rights Reserved. * * $Id$ */ package org.castor.cpa.persistence.sql.driver; import java.util.Enumeration; import org.exolab.castor.jdo.engine.JDBCSyntax; import org.exolab.castor.persist.spi.PersistenceFactory; /** * QueryExpression for MS SQL Server. * * @author <a href="[email protected]">Oleg Nitz</a> * @version $Revision$ $Date: 2004-10-08 02:58:33 -0600 (Fri, 08 Oct 2004) $ */ public final class SQLServerQueryExpression extends JDBCQueryExpression { public SQLServerQueryExpression(final PersistenceFactory factory) { super(factory); } public String getStatement(final boolean lock) { StringBuffer sql; boolean first; sql = new StringBuffer(); sql.append(JDBCSyntax.SELECT); if (_distinct) { sql.append(JDBCSyntax.DISTINCT); } if (_limit != null) { if (!_limit.equals("")) { sql.append("TOP ").append(_limit).append(" "); } } sql.append(getColumnList()); sql.append(JDBCSyntax.FROM); // Use HOLDLOCK to lock selected tables. Enumeration<String> enumeration = _tables.keys(); while (enumeration.hasMoreElements()) { String tableAlias = enumeration.nextElement(); String tableName = _tables.get(tableAlias); if (tableAlias.equals(tableName)) { sql.append(_factory.quoteName(tableName)); } else { sql.append(_factory.quoteName(tableName) + " " + _factory.quoteName(tableAlias)); } if (lock) { - sql.append(" HOLDLOCK "); + sql.append(" WITH (HOLDLOCK) "); } if (enumeration.hasMoreElements()) { sql.append(JDBCSyntax.TABLE_SEPARATOR); } } first = true; // Use asterisk notation to denote a left outer join // and equals to denote an inner join for (int i = 0; i < _joins.size(); ++i) { Join join; if (first) { sql.append(JDBCSyntax.WHERE); first = false; } else { sql.append(JDBCSyntax.AND); } join = _joins.elementAt(i); for (int j = 0; j < join._leftColumns.length; ++j) { if (j > 0) { sql.append(JDBCSyntax.AND); } sql.append(_factory.quoteName(join._leftTable + JDBCSyntax.TABLE_COLUMN_SEPARATOR + join._leftColumns[j])); if (join._outer) { sql.append("*="); } else { sql.append(OP_EQUALS); } sql.append(_factory.quoteName(join._rightTable + JDBCSyntax.TABLE_COLUMN_SEPARATOR + join._rightColumns[j])); } } first = addWhereClause(sql, first); if (_order != null) { sql.append(JDBCSyntax.ORDER_BY).append(_order); } return sql.toString(); } public boolean isLimitClauseSupported() { return true; } }
true
true
public String getStatement(final boolean lock) { StringBuffer sql; boolean first; sql = new StringBuffer(); sql.append(JDBCSyntax.SELECT); if (_distinct) { sql.append(JDBCSyntax.DISTINCT); } if (_limit != null) { if (!_limit.equals("")) { sql.append("TOP ").append(_limit).append(" "); } } sql.append(getColumnList()); sql.append(JDBCSyntax.FROM); // Use HOLDLOCK to lock selected tables. Enumeration<String> enumeration = _tables.keys(); while (enumeration.hasMoreElements()) { String tableAlias = enumeration.nextElement(); String tableName = _tables.get(tableAlias); if (tableAlias.equals(tableName)) { sql.append(_factory.quoteName(tableName)); } else { sql.append(_factory.quoteName(tableName) + " " + _factory.quoteName(tableAlias)); } if (lock) { sql.append(" HOLDLOCK "); } if (enumeration.hasMoreElements()) { sql.append(JDBCSyntax.TABLE_SEPARATOR); } } first = true; // Use asterisk notation to denote a left outer join // and equals to denote an inner join for (int i = 0; i < _joins.size(); ++i) { Join join; if (first) { sql.append(JDBCSyntax.WHERE); first = false; } else { sql.append(JDBCSyntax.AND); } join = _joins.elementAt(i); for (int j = 0; j < join._leftColumns.length; ++j) { if (j > 0) { sql.append(JDBCSyntax.AND); } sql.append(_factory.quoteName(join._leftTable + JDBCSyntax.TABLE_COLUMN_SEPARATOR + join._leftColumns[j])); if (join._outer) { sql.append("*="); } else { sql.append(OP_EQUALS); } sql.append(_factory.quoteName(join._rightTable + JDBCSyntax.TABLE_COLUMN_SEPARATOR + join._rightColumns[j])); } } first = addWhereClause(sql, first); if (_order != null) { sql.append(JDBCSyntax.ORDER_BY).append(_order); } return sql.toString(); }
public String getStatement(final boolean lock) { StringBuffer sql; boolean first; sql = new StringBuffer(); sql.append(JDBCSyntax.SELECT); if (_distinct) { sql.append(JDBCSyntax.DISTINCT); } if (_limit != null) { if (!_limit.equals("")) { sql.append("TOP ").append(_limit).append(" "); } } sql.append(getColumnList()); sql.append(JDBCSyntax.FROM); // Use HOLDLOCK to lock selected tables. Enumeration<String> enumeration = _tables.keys(); while (enumeration.hasMoreElements()) { String tableAlias = enumeration.nextElement(); String tableName = _tables.get(tableAlias); if (tableAlias.equals(tableName)) { sql.append(_factory.quoteName(tableName)); } else { sql.append(_factory.quoteName(tableName) + " " + _factory.quoteName(tableAlias)); } if (lock) { sql.append(" WITH (HOLDLOCK) "); } if (enumeration.hasMoreElements()) { sql.append(JDBCSyntax.TABLE_SEPARATOR); } } first = true; // Use asterisk notation to denote a left outer join // and equals to denote an inner join for (int i = 0; i < _joins.size(); ++i) { Join join; if (first) { sql.append(JDBCSyntax.WHERE); first = false; } else { sql.append(JDBCSyntax.AND); } join = _joins.elementAt(i); for (int j = 0; j < join._leftColumns.length; ++j) { if (j > 0) { sql.append(JDBCSyntax.AND); } sql.append(_factory.quoteName(join._leftTable + JDBCSyntax.TABLE_COLUMN_SEPARATOR + join._leftColumns[j])); if (join._outer) { sql.append("*="); } else { sql.append(OP_EQUALS); } sql.append(_factory.quoteName(join._rightTable + JDBCSyntax.TABLE_COLUMN_SEPARATOR + join._rightColumns[j])); } } first = addWhereClause(sql, first); if (_order != null) { sql.append(JDBCSyntax.ORDER_BY).append(_order); } return sql.toString(); }
diff --git a/src/de/kumpelblase2/dragonslair/commanddialogs/trigger/TriggerDeleteDialog.java b/src/de/kumpelblase2/dragonslair/commanddialogs/trigger/TriggerDeleteDialog.java index 642effa..2bbabc5 100755 --- a/src/de/kumpelblase2/dragonslair/commanddialogs/trigger/TriggerDeleteDialog.java +++ b/src/de/kumpelblase2/dragonslair/commanddialogs/trigger/TriggerDeleteDialog.java @@ -1,81 +1,82 @@ package de.kumpelblase2.dragonslair.commanddialogs.trigger; import org.bukkit.ChatColor; import org.bukkit.conversations.*; import de.kumpelblase2.dragonslair.DragonsLairMain; public class TriggerDeleteDialog extends ValidatingPrompt { @Override public String getPromptText(final ConversationContext arg0) { if(arg0.getSessionData("trigger_id") == null) return ChatColor.GREEN + "Please enter the id of the trigger to delete:"; else return ChatColor.YELLOW + "Are you sure you want to delete this trigger? Type 'delete' to confirm."; } @Override protected Prompt acceptValidatedInput(final ConversationContext arg0, final String arg1) { if(arg1.equals("cancel")) { arg0.setSessionData("trigger_id", null); return new TriggerManageDialog(); } if(arg0.getSessionData("trigger_id") == null) { if(arg1.equals("back")) return new TriggerManageDialog(); arg0.setSessionData("trigger_id", Integer.parseInt(arg1)); return this; } else { if(arg1.equals("back")) { arg0.setSessionData("trigger_id", null); return this; } if(arg1.equals("delete")) { final int id = (Integer)arg0.getSessionData("trigger_id"); DragonsLairMain.debugLog("Deleted trigger with id '" + id + "'"); + DragonsLairMain.getSettings().getTriggers().get(id).remove(); DragonsLairMain.getSettings().getTriggers().remove(id); DragonsLairMain.getInstance().getEventHandler().reloadTriggers(); } arg0.setSessionData("trigger_id", null); return new TriggerManageDialog(); } } @Override protected boolean isInputValid(final ConversationContext arg0, final String arg1) { if(arg1.equals("back") || arg1.equals("cancel")) return true; if(arg0.getSessionData("trigger_id") == null) try { final int id = Integer.parseInt(arg1); if(!DragonsLairMain.getSettings().getTriggers().containsKey(id)) { arg0.getForWhom().sendRawMessage(ChatColor.RED + "That trigger does not exist."); return false; } else return true; } catch(final Exception e) { arg0.getForWhom().sendRawMessage(ChatColor.RED + "Not a valid number."); return false; } else { if(arg1.length() > 0) return true; return false; } } }
true
true
protected Prompt acceptValidatedInput(final ConversationContext arg0, final String arg1) { if(arg1.equals("cancel")) { arg0.setSessionData("trigger_id", null); return new TriggerManageDialog(); } if(arg0.getSessionData("trigger_id") == null) { if(arg1.equals("back")) return new TriggerManageDialog(); arg0.setSessionData("trigger_id", Integer.parseInt(arg1)); return this; } else { if(arg1.equals("back")) { arg0.setSessionData("trigger_id", null); return this; } if(arg1.equals("delete")) { final int id = (Integer)arg0.getSessionData("trigger_id"); DragonsLairMain.debugLog("Deleted trigger with id '" + id + "'"); DragonsLairMain.getSettings().getTriggers().remove(id); DragonsLairMain.getInstance().getEventHandler().reloadTriggers(); } arg0.setSessionData("trigger_id", null); return new TriggerManageDialog(); } }
protected Prompt acceptValidatedInput(final ConversationContext arg0, final String arg1) { if(arg1.equals("cancel")) { arg0.setSessionData("trigger_id", null); return new TriggerManageDialog(); } if(arg0.getSessionData("trigger_id") == null) { if(arg1.equals("back")) return new TriggerManageDialog(); arg0.setSessionData("trigger_id", Integer.parseInt(arg1)); return this; } else { if(arg1.equals("back")) { arg0.setSessionData("trigger_id", null); return this; } if(arg1.equals("delete")) { final int id = (Integer)arg0.getSessionData("trigger_id"); DragonsLairMain.debugLog("Deleted trigger with id '" + id + "'"); DragonsLairMain.getSettings().getTriggers().get(id).remove(); DragonsLairMain.getSettings().getTriggers().remove(id); DragonsLairMain.getInstance().getEventHandler().reloadTriggers(); } arg0.setSessionData("trigger_id", null); return new TriggerManageDialog(); } }
diff --git a/src/org/newdawn/slick/geom/Vector2f.java b/src/org/newdawn/slick/geom/Vector2f.java index ab84b5f..e038734 100644 --- a/src/org/newdawn/slick/geom/Vector2f.java +++ b/src/org/newdawn/slick/geom/Vector2f.java @@ -1,346 +1,346 @@ package org.newdawn.slick.geom; import org.newdawn.slick.util.FastTrig; /** * A two dimensional vector * * @author Kevin Glass */ public strictfp class Vector2f { /** The x component of this vector */ public float x; /** The y component of this vector */ public float y; /** * Create an empty vector */ public Vector2f() { } /** * Create a new vector based on an angle * * @param theta The angle of the vector in degrees */ public Vector2f(double theta) { x = 1; y = 0; setTheta(theta); } /** * Calculate the components of the vectors based on a angle * * @param theta The angle to calculate the components from (in degrees) */ public void setTheta(double theta) { // Next lines are to prevent numbers like -1.8369701E-16 // when working with negative numbers if ((theta < -360) || (theta > 360)) { theta = theta % 360; } if (theta < 0) { theta = 360 + theta; } double oldTheta = getTheta(); if ((theta < -360) || (theta > 360)) { oldTheta = oldTheta % 360; } if (theta < 0) { oldTheta = 360 + oldTheta; } float len = length(); - x = -len * (float) FastTrig.cos(StrictMath.toRadians(theta)); + x = len * (float) FastTrig.cos(StrictMath.toRadians(theta)); y = len * (float) FastTrig.sin(StrictMath.toRadians(theta)); // x = x / (float) FastTrig.cos(StrictMath.toRadians(oldTheta)) // * (float) FastTrig.cos(StrictMath.toRadians(theta)); // y = x / (float) FastTrig.sin(StrictMath.toRadians(oldTheta)) // * (float) FastTrig.sin(StrictMath.toRadians(theta)); } /** * Adjust this vector by a given angle * * @param theta * The angle to adjust the angle by (in degrees) * @return This vector - useful for chaining operations * */ public Vector2f add(double theta) { setTheta(getTheta() + theta); return this; } /** * Adjust this vector by a given angle * * @param theta The angle to adjust the angle by (in degrees) * @return This vector - useful for chaining operations */ public Vector2f sub(double theta) { setTheta(getTheta() - theta); return this; } /** * Get the angle this vector is at * * @return The angle this vector is at (in degrees) */ public double getTheta() { double theta = StrictMath.toDegrees(StrictMath.atan2(y, x)); if ((theta < -360) || (theta > 360)) { theta = theta % 360; } if (theta < 0) { theta = 360 + theta; } return theta; } /** * Get the x component * * @return The x component */ public float getX() { return x; } /** * Get the y component * * @return The y component */ public float getY() { return y; } /** * Create a new vector based on another * * @param other The other vector to copy into this one */ public Vector2f(Vector2f other) { this(other.getX(),other.getY()); } /** * Create a new vector * * @param x The x component to assign * @param y The y component to assign */ public Vector2f(float x, float y) { this.x = x; this.y = y; } /** * Set the value of this vector * * @param other The values to set into the vector */ public void set(Vector2f other) { set(other.getX(),other.getY()); } /** * Dot this vector against another * * @param other The other vector dot agianst * @return The dot product of the two vectors */ public float dot(Vector2f other) { return (x * other.getX()) + (y * other.getY()); } /** * Set the values in this vector * * @param x The x component to set * @param y The y component to set * @return This vector - useful for chaning operations */ public Vector2f set(float x, float y) { this.x = x; this.y = y; return this; } /** * Negate this vector * * @return A copy of this vector negated */ public Vector2f negate() { return new Vector2f(-x, -y); } /** * Negate this vector without creating a new copy * * @return This vector - useful for chaning operations */ public Vector2f negateLocal() { x = -x; y = -y; return this; } /** * Add a vector to this vector * * @param v The vector to add * @return This vector - useful for chaning operations */ public Vector2f add(Vector2f v) { x += v.getX(); y += v.getY(); return this; } /** * Subtract a vector from this vector * * @param v The vector subtract * @return This vector - useful for chaining operations */ public Vector2f sub(Vector2f v) { x -= v.getX(); y -= v.getY(); return this; } /** * Scale this vector by a value * * @param a The value to scale this vector by * @return This vector - useful for chaining operations */ public Vector2f scale(float a) { x *= a; y *= a; return this; } /** * Normalise the vector * * @return This vector - useful for chaning operations */ public Vector2f normalise() { float l = length(); x /= l; y /= l; return this; } /** * The normal of the vector * * @return A unit vector with the same direction as the vector */ public Vector2f getNormal() { Vector2f cp = copy(); cp.normalise(); return cp; } /** * The length of the vector squared * * @return The length of the vector squared */ public float lengthSquared() { return (x * x) + (y * y); } /** * Get the length of this vector * * @return The length of this vector */ public float length() { return (float) Math.sqrt(lengthSquared()); } /** * Project this vector onto another * * @param b The vector to project onto * @param result The projected vector */ public void projectOntoUnit(Vector2f b, Vector2f result) { float dp = b.dot(this); result.x = dp * b.getX(); result.y = dp * b.getY(); } /** * Return a copy of this vector * * @return The new instance that copies this vector */ public Vector2f copy() { return new Vector2f(x,y); } /** * @see java.lang.Object#toString() */ public String toString() { return "[Vector2f "+x+","+y+" ("+length()+")]"; } /** * Get the distance from this point to another * * @param other The other point we're measuring to * @return The distance to the other point */ public float distance(Vector2f other) { float dx = other.getX() - getX(); float dy = other.getY() - getY(); return (float) Math.sqrt((dx*dx)+(dy*dy)); } /** * @see java.lang.Object#hashCode() */ public int hashCode() { return 997 * ((int)x) ^ 991 * ((int)y); //large primes! } /** * @see java.lang.Object#equals(java.lang.Object) */ public boolean equals(Object other) { if (other instanceof Vector2f) { Vector2f o = ((Vector2f) other); return (o.x == x) && (o.y == y); } return false; } }
true
true
public void setTheta(double theta) { // Next lines are to prevent numbers like -1.8369701E-16 // when working with negative numbers if ((theta < -360) || (theta > 360)) { theta = theta % 360; } if (theta < 0) { theta = 360 + theta; } double oldTheta = getTheta(); if ((theta < -360) || (theta > 360)) { oldTheta = oldTheta % 360; } if (theta < 0) { oldTheta = 360 + oldTheta; } float len = length(); x = -len * (float) FastTrig.cos(StrictMath.toRadians(theta)); y = len * (float) FastTrig.sin(StrictMath.toRadians(theta)); // x = x / (float) FastTrig.cos(StrictMath.toRadians(oldTheta)) // * (float) FastTrig.cos(StrictMath.toRadians(theta)); // y = x / (float) FastTrig.sin(StrictMath.toRadians(oldTheta)) // * (float) FastTrig.sin(StrictMath.toRadians(theta)); }
public void setTheta(double theta) { // Next lines are to prevent numbers like -1.8369701E-16 // when working with negative numbers if ((theta < -360) || (theta > 360)) { theta = theta % 360; } if (theta < 0) { theta = 360 + theta; } double oldTheta = getTheta(); if ((theta < -360) || (theta > 360)) { oldTheta = oldTheta % 360; } if (theta < 0) { oldTheta = 360 + oldTheta; } float len = length(); x = len * (float) FastTrig.cos(StrictMath.toRadians(theta)); y = len * (float) FastTrig.sin(StrictMath.toRadians(theta)); // x = x / (float) FastTrig.cos(StrictMath.toRadians(oldTheta)) // * (float) FastTrig.cos(StrictMath.toRadians(theta)); // y = x / (float) FastTrig.sin(StrictMath.toRadians(oldTheta)) // * (float) FastTrig.sin(StrictMath.toRadians(theta)); }
diff --git a/src/main/java/com/cnaude/purpleirc/Commands/Nick.java b/src/main/java/com/cnaude/purpleirc/Commands/Nick.java index 5006800..3eb833d 100644 --- a/src/main/java/com/cnaude/purpleirc/Commands/Nick.java +++ b/src/main/java/com/cnaude/purpleirc/Commands/Nick.java @@ -1,46 +1,46 @@ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.cnaude.purpleirc.Commands; import com.cnaude.purpleirc.PurpleBot; import com.cnaude.purpleirc.PurpleIRC; import org.bukkit.ChatColor; import org.bukkit.command.CommandSender; /** * * @author cnaude */ public class Nick { private final PurpleIRC plugin; public Nick(PurpleIRC plugin) { this.plugin = plugin; } public void dispatch(CommandSender sender, String[] args) { if (args.length == 3) { String bot = args[1]; String nick = args[2]; if (plugin.ircBots.containsKey(bot)) { if (plugin.ircBots.containsKey(nick)) { sender.sendMessage(ChatColor.RED + "There is already a bot with that nick!"); } else { plugin.ircBots.get(bot).changeNick(sender, nick); PurpleBot ircBot = plugin.ircBots.remove(bot); plugin.ircBots.put(nick, ircBot); boolean isConnected = plugin.botConnected.remove(bot); plugin.botConnected.put(nick, isConnected); - plugin.ircBots.get(bot).botNick = nick; + ircBot.botNick = nick; } } else { sender.sendMessage(plugin.invalidBotName.replace("%BOT%", bot)); } } else { sender.sendMessage(ChatColor.WHITE + "Usage: " + ChatColor.GOLD + "/irc nick [bot] [nick]"); } } }
true
true
public void dispatch(CommandSender sender, String[] args) { if (args.length == 3) { String bot = args[1]; String nick = args[2]; if (plugin.ircBots.containsKey(bot)) { if (plugin.ircBots.containsKey(nick)) { sender.sendMessage(ChatColor.RED + "There is already a bot with that nick!"); } else { plugin.ircBots.get(bot).changeNick(sender, nick); PurpleBot ircBot = plugin.ircBots.remove(bot); plugin.ircBots.put(nick, ircBot); boolean isConnected = plugin.botConnected.remove(bot); plugin.botConnected.put(nick, isConnected); plugin.ircBots.get(bot).botNick = nick; } } else { sender.sendMessage(plugin.invalidBotName.replace("%BOT%", bot)); } } else { sender.sendMessage(ChatColor.WHITE + "Usage: " + ChatColor.GOLD + "/irc nick [bot] [nick]"); } }
public void dispatch(CommandSender sender, String[] args) { if (args.length == 3) { String bot = args[1]; String nick = args[2]; if (plugin.ircBots.containsKey(bot)) { if (plugin.ircBots.containsKey(nick)) { sender.sendMessage(ChatColor.RED + "There is already a bot with that nick!"); } else { plugin.ircBots.get(bot).changeNick(sender, nick); PurpleBot ircBot = plugin.ircBots.remove(bot); plugin.ircBots.put(nick, ircBot); boolean isConnected = plugin.botConnected.remove(bot); plugin.botConnected.put(nick, isConnected); ircBot.botNick = nick; } } else { sender.sendMessage(plugin.invalidBotName.replace("%BOT%", bot)); } } else { sender.sendMessage(ChatColor.WHITE + "Usage: " + ChatColor.GOLD + "/irc nick [bot] [nick]"); } }
diff --git a/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java b/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java index a96ba698a..321160cb9 100644 --- a/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java +++ b/svnkit/src/main/java/org/tmatesoft/svn/core/internal/wc2/remote/SvnRemoteRemoteDelete.java @@ -1,150 +1,150 @@ package org.tmatesoft.svn.core.internal.wc2.remote; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.tmatesoft.svn.core.SVNCommitInfo; import org.tmatesoft.svn.core.SVNErrorCode; import org.tmatesoft.svn.core.SVNErrorMessage; import org.tmatesoft.svn.core.SVNException; import org.tmatesoft.svn.core.SVNNodeKind; import org.tmatesoft.svn.core.SVNURL; import org.tmatesoft.svn.core.internal.util.SVNHashMap; import org.tmatesoft.svn.core.internal.util.SVNPathUtil; import org.tmatesoft.svn.core.internal.wc.ISVNCommitPathHandler; import org.tmatesoft.svn.core.internal.wc.SVNCommitUtil; import org.tmatesoft.svn.core.internal.wc.SVNErrorManager; import org.tmatesoft.svn.core.internal.wc.SVNEventFactory; import org.tmatesoft.svn.core.internal.wc.SVNPropertiesManager; import org.tmatesoft.svn.core.internal.wc17.SVNWCUtils; import org.tmatesoft.svn.core.internal.wc2.SvnRemoteOperationRunner; import org.tmatesoft.svn.core.io.ISVNEditor; import org.tmatesoft.svn.core.io.SVNRepository; import org.tmatesoft.svn.core.wc.ISVNEventHandler; import org.tmatesoft.svn.core.wc.SVNEventAction; import org.tmatesoft.svn.core.wc2.SvnCommitItem; import org.tmatesoft.svn.core.wc2.SvnRemoteDelete; import org.tmatesoft.svn.core.wc2.SvnTarget; import org.tmatesoft.svn.util.SVNLogType; public class SvnRemoteRemoteDelete extends SvnRemoteOperationRunner<SVNCommitInfo, SvnRemoteDelete> { @Override protected SVNCommitInfo run() throws SVNException { if (getOperation().getTargets().size() == 0) { return SVNCommitInfo.NULL; } SVNHashMap reposInfo = new SVNHashMap(); SVNHashMap relPathInfo = new SVNHashMap(); for (SvnTarget target : getOperation().getTargets()) { SVNURL url = target.getURL(); SVNRepository repository = null; SVNURL reposRoot = null; String reposRelPath = null; ArrayList<String> relPaths; SVNNodeKind kind; for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) { reposRoot = (SVNURL) rootUrls.next(); reposRelPath = SVNWCUtils.isChild(reposRoot, url); if (reposRelPath != null) { repository = (SVNRepository)reposInfo.get(reposRoot); relPaths = (ArrayList<String>)relPathInfo.get(reposRoot); relPaths.add(reposRelPath); } } if (repository == null) { repository = getRepositoryAccess().createRepository(url, null, false); reposRoot = repository.getRepositoryRoot(true); repository.setLocation(reposRoot, false); reposInfo.put(reposRoot, repository); reposRelPath = SVNWCUtils.isChild(reposRoot, url); relPaths = new ArrayList<String>(); relPathInfo.put(reposRoot, relPaths); relPaths.add(reposRelPath); } if (reposRelPath == null) { - SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.RA_ILLEGAL_URL, "URL '{0}' not within a repository", url); + SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.RA_ILLEGAL_URL, "URL ''{0}'' not within a repository", url); SVNErrorManager.error(err, SVNLogType.WC); } kind = repository.checkPath(reposRelPath, -1); if (kind == SVNNodeKind.NONE) { - SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL '{0}' does not exist", url); + SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL ''{0}'' does not exist", url); SVNErrorManager.error(err, SVNLogType.WC); } } SVNPropertiesManager.validateRevisionProperties(getOperation().getRevisionProperties()); SVNCommitInfo info = null; for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) { SVNURL reposRoot = (SVNURL) rootUrls.next(); SVNRepository repository = (SVNRepository) reposInfo.get(reposRoot); List<String> paths = (List<String>) relPathInfo.get(reposRoot); info = singleRepositoryDelete(repository, reposRoot, paths); if (info != null) { getOperation().receive(SvnTarget.fromURL(reposRoot), info); } } return info != null ? info : SVNCommitInfo.NULL; } private SVNCommitInfo singleRepositoryDelete(SVNRepository repository, SVNURL rootURL, List<String> paths) throws SVNException { if (paths.isEmpty()) { paths.add(SVNPathUtil.tail(rootURL.getURIEncodedPath())); rootURL = rootURL.removePathTail(); } String commitMessage; if (getOperation().getCommitHandler() != null) { SvnCommitItem[] commitItems = new SvnCommitItem[paths.size()]; for (int i = 0; i < commitItems.length; i++) { String path = (String) paths.get(i); SvnCommitItem item = new SvnCommitItem(); item.setKind(SVNNodeKind.NONE); item.setUrl(rootURL.appendPath(path, true)); item.setFlags(SvnCommitItem.DELETE); commitItems[i] = item; } commitMessage = getOperation().getCommitHandler().getCommitMessage(getOperation().getCommitMessage(), commitItems); if (commitMessage == null) { return SVNCommitInfo.NULL; } commitMessage = SVNCommitUtil.validateCommitMessage(commitMessage); } else { commitMessage = ""; } ISVNEditor commitEditor = repository.getCommitEditor(commitMessage, null, false, getOperation().getRevisionProperties(), null); ISVNCommitPathHandler deleter = new ISVNCommitPathHandler() { public boolean handleCommitPath(String commitPath, ISVNEditor commitEditor) throws SVNException { commitEditor.deleteEntry(commitPath, -1); return false; } }; SVNCommitInfo info; try { SVNCommitUtil.driveCommitEditor(deleter, paths, commitEditor, -1); info = commitEditor.closeEdit(); } catch (SVNException e) { try { commitEditor.abortEdit(); } catch (SVNException inner) { } throw e; } if (info != null && info.getNewRevision() >= 0) { handleEvent(SVNEventFactory.createSVNEvent(null, SVNNodeKind.NONE, null, info.getNewRevision(), SVNEventAction.COMMIT_COMPLETED, null, null, null), ISVNEventHandler.UNKNOWN); } return info != null ? info : SVNCommitInfo.NULL; } }
false
true
protected SVNCommitInfo run() throws SVNException { if (getOperation().getTargets().size() == 0) { return SVNCommitInfo.NULL; } SVNHashMap reposInfo = new SVNHashMap(); SVNHashMap relPathInfo = new SVNHashMap(); for (SvnTarget target : getOperation().getTargets()) { SVNURL url = target.getURL(); SVNRepository repository = null; SVNURL reposRoot = null; String reposRelPath = null; ArrayList<String> relPaths; SVNNodeKind kind; for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) { reposRoot = (SVNURL) rootUrls.next(); reposRelPath = SVNWCUtils.isChild(reposRoot, url); if (reposRelPath != null) { repository = (SVNRepository)reposInfo.get(reposRoot); relPaths = (ArrayList<String>)relPathInfo.get(reposRoot); relPaths.add(reposRelPath); } } if (repository == null) { repository = getRepositoryAccess().createRepository(url, null, false); reposRoot = repository.getRepositoryRoot(true); repository.setLocation(reposRoot, false); reposInfo.put(reposRoot, repository); reposRelPath = SVNWCUtils.isChild(reposRoot, url); relPaths = new ArrayList<String>(); relPathInfo.put(reposRoot, relPaths); relPaths.add(reposRelPath); } if (reposRelPath == null) { SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.RA_ILLEGAL_URL, "URL '{0}' not within a repository", url); SVNErrorManager.error(err, SVNLogType.WC); } kind = repository.checkPath(reposRelPath, -1); if (kind == SVNNodeKind.NONE) { SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL '{0}' does not exist", url); SVNErrorManager.error(err, SVNLogType.WC); } } SVNPropertiesManager.validateRevisionProperties(getOperation().getRevisionProperties()); SVNCommitInfo info = null; for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) { SVNURL reposRoot = (SVNURL) rootUrls.next(); SVNRepository repository = (SVNRepository) reposInfo.get(reposRoot); List<String> paths = (List<String>) relPathInfo.get(reposRoot); info = singleRepositoryDelete(repository, reposRoot, paths); if (info != null) { getOperation().receive(SvnTarget.fromURL(reposRoot), info); } } return info != null ? info : SVNCommitInfo.NULL; }
protected SVNCommitInfo run() throws SVNException { if (getOperation().getTargets().size() == 0) { return SVNCommitInfo.NULL; } SVNHashMap reposInfo = new SVNHashMap(); SVNHashMap relPathInfo = new SVNHashMap(); for (SvnTarget target : getOperation().getTargets()) { SVNURL url = target.getURL(); SVNRepository repository = null; SVNURL reposRoot = null; String reposRelPath = null; ArrayList<String> relPaths; SVNNodeKind kind; for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) { reposRoot = (SVNURL) rootUrls.next(); reposRelPath = SVNWCUtils.isChild(reposRoot, url); if (reposRelPath != null) { repository = (SVNRepository)reposInfo.get(reposRoot); relPaths = (ArrayList<String>)relPathInfo.get(reposRoot); relPaths.add(reposRelPath); } } if (repository == null) { repository = getRepositoryAccess().createRepository(url, null, false); reposRoot = repository.getRepositoryRoot(true); repository.setLocation(reposRoot, false); reposInfo.put(reposRoot, repository); reposRelPath = SVNWCUtils.isChild(reposRoot, url); relPaths = new ArrayList<String>(); relPathInfo.put(reposRoot, relPaths); relPaths.add(reposRelPath); } if (reposRelPath == null) { SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.RA_ILLEGAL_URL, "URL ''{0}'' not within a repository", url); SVNErrorManager.error(err, SVNLogType.WC); } kind = repository.checkPath(reposRelPath, -1); if (kind == SVNNodeKind.NONE) { SVNErrorMessage err = SVNErrorMessage.create(SVNErrorCode.FS_NOT_FOUND, "URL ''{0}'' does not exist", url); SVNErrorManager.error(err, SVNLogType.WC); } } SVNPropertiesManager.validateRevisionProperties(getOperation().getRevisionProperties()); SVNCommitInfo info = null; for (Iterator rootUrls = reposInfo.keySet().iterator(); rootUrls.hasNext();) { SVNURL reposRoot = (SVNURL) rootUrls.next(); SVNRepository repository = (SVNRepository) reposInfo.get(reposRoot); List<String> paths = (List<String>) relPathInfo.get(reposRoot); info = singleRepositoryDelete(repository, reposRoot, paths); if (info != null) { getOperation().receive(SvnTarget.fromURL(reposRoot), info); } } return info != null ? info : SVNCommitInfo.NULL; }
diff --git a/src/org/clapper/curn/output/FileOutputHandler.java b/src/org/clapper/curn/output/FileOutputHandler.java index 740125b..88633c9 100644 --- a/src/org/clapper/curn/output/FileOutputHandler.java +++ b/src/org/clapper/curn/output/FileOutputHandler.java @@ -1,545 +1,545 @@ /*---------------------------------------------------------------------------*\ $Id$ --------------------------------------------------------------------------- This software is released under a Berkeley-style license: Copyright (c) 2004-2005 Brian M. Clapper. All rights reserved. Redistribution and use in source and binary forms are permitted provided that: (1) source distributions retain this entire copyright notice and comment; and (2) modifications made to the software are prominently mentioned, and a copy of the original software (or a pointer to its location) are included. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. Effectively, this means you can do what you want with the software except remove this notice or take advantage of the author's name. If you modify the software and redistribute your modified version, you must indicate that your version is a modification of the original, and you must provide either a pointer to or a copy of the original. \*---------------------------------------------------------------------------*/ package org.clapper.curn.output; import org.clapper.curn.ConfigFile; import org.clapper.curn.ConfiguredOutputHandler; import org.clapper.curn.CurnException; import org.clapper.curn.FeedInfo; import org.clapper.curn.OutputHandler; import org.clapper.curn.util.Util; import org.clapper.curn.parser.RSSChannel; import org.clapper.curn.parser.RSSItem; import org.clapper.util.config.ConfigurationException; import org.clapper.util.config.NoSuchSectionException; import org.clapper.util.io.FileUtil; import org.clapper.util.io.IOExceptionExt; import org.clapper.util.io.RollingFileWriter; import org.clapper.util.logging.Logger; import org.clapper.util.text.HTMLUtil; import org.clapper.util.text.Unicode; import java.io.IOException; import java.io.FileOutputStream; import java.io.File; import java.io.FileWriter; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; /** * <p><tt>FileOutputHandler</tt> is an abstract base class for * <tt>OutputHandler</tt> subclasses that write RSS feed summaries to a * file. It consolidates common logic and configuration handling for such * classes, providing both consistent implementation and configuration. * It handles two additional output handler-specific configuration items:</p> * * <ul> * <li><tt>SaveAs</tt> takes a file name argument and specifies a file * where the handler should save its output permanently. It's useful * if the user wants to keep a copy of the output the handler generates, * in addition to having the output reported by <i>curn</i>. * <li><tt>SaveOnly</tt> instructs the handler to save the output in the * <tt>SaveAs</tt> file, but not report the output to <i>curn</i>. * From <i>curn</i>'s perspective, the handler generates no output * at all. * </ul> * * @see OutputHandler * @see org.clapper.curn.Curn * @see org.clapper.curn.parser.RSSChannel * * @version <tt>$Revision$</tt> */ public abstract class FileOutputHandler implements OutputHandler { /*----------------------------------------------------------------------*\ Public Constants \*----------------------------------------------------------------------*/ /** * Configuration variable: encoding */ public static final String CFG_ENCODING = "Encoding"; /** * Whether or not to show curn information */ public static final String CFG_SHOW_CURN_INFO = "ShowCurnInfo"; /** * Where to save the output, if any */ public static final String CFG_SAVE_AS = "SaveAs"; /** * Whether we're ONLY saving output */ public static final String CFG_SAVE_ONLY = "SaveOnly"; /** * Number of backups of saved files to keep. */ public static final String CFG_SAVED_BACKUPS = "SavedBackups"; /*----------------------------------------------------------------------*\ Public Constants \*----------------------------------------------------------------------*/ /** * Default encoding value */ private static final String DEFAULT_CHARSET_ENCODING = "utf-8"; /*----------------------------------------------------------------------*\ Private Instance Data \*----------------------------------------------------------------------*/ private File outputFile = null; private ConfigFile config = null; private boolean saveOnly = false; private String name = null; private boolean showToolInfo = true; private int savedBackups = 0; private String encoding = null; /** * For logging */ private Logger log = null; /*----------------------------------------------------------------------*\ Constructor \*----------------------------------------------------------------------*/ /** * Construct a new <tt>FileOutputHandler</tt> */ public FileOutputHandler() { } /*----------------------------------------------------------------------*\ Public Methods \*----------------------------------------------------------------------*/ /** * Initializes the output handler for another set of RSS channels. * * @param config the parsed <i>curn</i> configuration data * @param cfgHandler the <tt>ConfiguredOutputHandler</tt> wrapper * containing this object; the wrapper has some useful * metadata, such as the object's configuration section * name and extra variables. * * @throws ConfigurationException configuration error * @throws CurnException some other initialization error */ public final void init (ConfigFile config, ConfiguredOutputHandler cfgHandler) throws ConfigurationException, CurnException { String saveAs = null; String sectionName = null; this.config = config; sectionName = cfgHandler.getSectionName(); this.name = sectionName; log = new Logger (FileOutputHandler.class.getName() + "[" + name + "]"); try { if (sectionName != null) { saveAs = config.getOptionalStringValue (sectionName, CFG_SAVE_AS, null); savedBackups = config.getOptionalCardinalValue (sectionName, CFG_SAVED_BACKUPS, 0); saveOnly = config.getOptionalBooleanValue (sectionName, CFG_SAVE_ONLY, false); showToolInfo = config.getOptionalBooleanValue (sectionName, CFG_SHOW_CURN_INFO, true); encoding = config.getOptionalStringValue (sectionName, CFG_ENCODING, DEFAULT_CHARSET_ENCODING); // saveOnly cannot be set unless saveAs is non-null. The // ConfigFile class is supposed to trap for this, so an // assertion is fine here. assert ((! saveOnly) || (saveAs != null)); } } catch (NoSuchSectionException ex) { throw new ConfigurationException (ex); } if (saveAs != null) outputFile = new File (saveAs); else { try { outputFile = File.createTempFile ("curn", null); outputFile.deleteOnExit(); } catch (IOException ex) { throw new CurnException (Util.BUNDLE_NAME, "FileOutputHandler.cantMakeTempFile", "Cannot create temporary file", ex); } } log.debug ("Calling " + this.getClass().getName() - + "initOutputHandler()"); + + ".initOutputHandler()"); initOutputHandler (config, cfgHandler); } /** * Perform any subclass-specific initialization. Subclasses must * override this method. * * @param config the parsed <i>curn</i> configuration data * @param cfgHandler the <tt>ConfiguredOutputHandler</tt> wrapper * containing this object; the wrapper has some useful * metadata, such as the object's configuration section * name and extra variables. * * @throws ConfigurationException configuration error * @throws CurnException some other initialization error */ public abstract void initOutputHandler (ConfigFile config, ConfiguredOutputHandler cfgHandler) throws ConfigurationException, CurnException; /** * Display the list of <tt>RSSItem</tt> news items to whatever output * is defined for the underlying class. Output is written to the * <tt>PrintWriter</tt> that was passed to the {@link #init init()} * method. * * @param channel The channel containing the items to emit. The method * should emit all the items in the channel; the caller * is responsible for clearing out any items that should * not be seen. * @param feedInfo Information about the feed, from the configuration * * @throws CurnException unable to write output */ public abstract void displayChannel (RSSChannel channel, FeedInfo feedInfo) throws CurnException; /** * Flush any buffered-up output. * * @throws CurnException unable to write output */ public abstract void flush() throws CurnException; /** * Get the content (i.e., MIME) type for output produced by this output * handler. * * @return the content type */ public abstract String getContentType(); /** * Get the <tt>File</tt> that represents the output produced by the * handler, if applicable. (Use of a <tt>File</tt>, rather than an * <tt>InputStream</tt>, is more efficient when mailing the output, * since the email API ultimately wants files and will create * temporary files for <tt>InputStream</tt>s.) * * @return the output file, or null if no suitable output was produced * * @throws CurnException an error occurred */ public final File getGeneratedOutput() throws CurnException { return hasGeneratedOutput() ? outputFile : null; } /** * Determine whether this handler has produced any actual output (i.e., * whether {@link #getGeneratedOutput()} will return a non-null * <tt>File</tt> if called). * * @return <tt>true</tt> if the handler has produced output, * <tt>false</tt> if not * * @see #getGeneratedOutput * @see #getContentType */ public final boolean hasGeneratedOutput() { boolean hasOutput = false; if ((! saveOnly) && (outputFile != null)) { long len = outputFile.length(); log.debug ("outputFile=" + outputFile.getPath() + ", size=" + len); hasOutput = (len > 0); } log.debug ("hasGeneratedOutput? " + hasOutput); return hasOutput; } /*----------------------------------------------------------------------*\ Protected Methods \*----------------------------------------------------------------------*/ /** * Get the output file. * * @return the output file, or none if not created yet */ protected final File getOutputFile() { return outputFile; } /** * Open the output file, returning a <tt>PrintWriter</tt>. Handles * whether or not to roll the saved file, etc. * * @return the <tt>PrintWriter</tt> * * @throws CurnException unable to open file */ protected PrintWriter openOutputFile() throws CurnException { PrintWriter w = null; try { log.debug ("Opening output file \"" + outputFile + "\""); // For the output handler output file, the index marker between // the file name and the extension, rather than at the end of // the file (since the extension is likely to matter). w = Util.openOutputFile (outputFile, encoding, Util.IndexMarker.BEFORE_EXTENSION, savedBackups); } catch (IOExceptionExt ex) { throw new CurnException (ex); } return w; } /** * Determine whether the handler is saving output only, or also reporting * output to <i>curn</i>. * * @return <tt>true</tt> if saving output only, <tt>false</tt> if also * reporting output to <i>curn</i> */ protected final boolean savingOutputOnly() { return saveOnly; } /** * Get the configured encoding. * * @return the encoding, or null if not configured * * @see #setEncoding */ protected final String getEncoding() { return encoding; } /** * Override the encoding specified by the {@link #CFG_ENCODING} * configuration parameter. To have any effect, this method must be * called before {@link #openOutputFile} * * @param newEncoding the new encoding, or null to use the default * * @see #getEncoding */ protected final void setEncoding (String newEncoding) { this.encoding = newEncoding; } /** * Determine whether or not to display curn tool-related information in * the generated output. Subclasses are not required to display * tool-related information in the generated output, but if they do, * they are strongly encouraged to do so conditionally, based on the * value of this configuration item. * * @return <tt>true</tt> if tool-related information is to be displayed * (assuming the output handler supports it), or <tt>false</tt> * if tool-related information should be suppressed. */ protected final boolean displayToolInfo() { return this.showToolInfo; } /** * Convert certain Unicode characters in a string to plain text * sequences. Also strips embedded HTML tags from the string. Useful * primarily for handlers that produce plain text. * * @param s the string to convert * * @return the possibly converted string */ protected String convert (String s) { StringBuffer buf = new StringBuffer(); char[] ch; if (s == null) return ""; s = HTMLUtil.textFromHTML (s); ch = s.toCharArray(); buf.setLength (0); for (int i = 0; i < ch.length; i++) { switch (ch[i]) { case Unicode.LEFT_SINGLE_QUOTE: case Unicode.RIGHT_SINGLE_QUOTE: buf.append ('\''); break; case Unicode.LEFT_DOUBLE_QUOTE: case Unicode.RIGHT_DOUBLE_QUOTE: buf.append ('"'); break; case Unicode.EM_DASH: buf.append ("--"); break; case Unicode.EN_DASH: buf.append ('-'); break; case Unicode.TRADEMARK: buf.append ("[TM]"); break; default: buf.append (ch[i]); break; } } return buf.toString(); } /** * Convert various fields in a channel and its subitems by invoking the * {@link #convert} method on them. Intended primarily for output handlers * that produce plain text. Produces a copy of the channel, so that the * original channel isn't modified (since it might be used by subsequent * handlers that don't want the data to be converted). * * @param channel the channel * * @return a copy of the channel, with possibly converted data. * * @throws CurnException on error */ protected RSSChannel convertChannelText (RSSChannel channel) throws CurnException { RSSChannel channelCopy = channel.makeCopy(); Collection<RSSItem> items = channelCopy.getItems(); if ((items != null) && (items.size() > 0)) { for (Iterator it = items.iterator(); it.hasNext(); ) { RSSItem item = (RSSItem) it.next(); item.setTitle (convert (item.getTitle())); Collection<String> authors = item.getAuthors(); if ((authors != null) && (authors.size() > 0)) { Collection<String> cvtAuthors = new ArrayList<String>(); for (String author : authors) cvtAuthors.add (convert (author)); item.setAuthors (cvtAuthors); } String s = item.getSummary(); if (s != null) item.setSummary (convert (s)); } } return channelCopy; } }
true
true
public final void init (ConfigFile config, ConfiguredOutputHandler cfgHandler) throws ConfigurationException, CurnException { String saveAs = null; String sectionName = null; this.config = config; sectionName = cfgHandler.getSectionName(); this.name = sectionName; log = new Logger (FileOutputHandler.class.getName() + "[" + name + "]"); try { if (sectionName != null) { saveAs = config.getOptionalStringValue (sectionName, CFG_SAVE_AS, null); savedBackups = config.getOptionalCardinalValue (sectionName, CFG_SAVED_BACKUPS, 0); saveOnly = config.getOptionalBooleanValue (sectionName, CFG_SAVE_ONLY, false); showToolInfo = config.getOptionalBooleanValue (sectionName, CFG_SHOW_CURN_INFO, true); encoding = config.getOptionalStringValue (sectionName, CFG_ENCODING, DEFAULT_CHARSET_ENCODING); // saveOnly cannot be set unless saveAs is non-null. The // ConfigFile class is supposed to trap for this, so an // assertion is fine here. assert ((! saveOnly) || (saveAs != null)); } } catch (NoSuchSectionException ex) { throw new ConfigurationException (ex); } if (saveAs != null) outputFile = new File (saveAs); else { try { outputFile = File.createTempFile ("curn", null); outputFile.deleteOnExit(); } catch (IOException ex) { throw new CurnException (Util.BUNDLE_NAME, "FileOutputHandler.cantMakeTempFile", "Cannot create temporary file", ex); } } log.debug ("Calling " + this.getClass().getName() + "initOutputHandler()"); initOutputHandler (config, cfgHandler); }
public final void init (ConfigFile config, ConfiguredOutputHandler cfgHandler) throws ConfigurationException, CurnException { String saveAs = null; String sectionName = null; this.config = config; sectionName = cfgHandler.getSectionName(); this.name = sectionName; log = new Logger (FileOutputHandler.class.getName() + "[" + name + "]"); try { if (sectionName != null) { saveAs = config.getOptionalStringValue (sectionName, CFG_SAVE_AS, null); savedBackups = config.getOptionalCardinalValue (sectionName, CFG_SAVED_BACKUPS, 0); saveOnly = config.getOptionalBooleanValue (sectionName, CFG_SAVE_ONLY, false); showToolInfo = config.getOptionalBooleanValue (sectionName, CFG_SHOW_CURN_INFO, true); encoding = config.getOptionalStringValue (sectionName, CFG_ENCODING, DEFAULT_CHARSET_ENCODING); // saveOnly cannot be set unless saveAs is non-null. The // ConfigFile class is supposed to trap for this, so an // assertion is fine here. assert ((! saveOnly) || (saveAs != null)); } } catch (NoSuchSectionException ex) { throw new ConfigurationException (ex); } if (saveAs != null) outputFile = new File (saveAs); else { try { outputFile = File.createTempFile ("curn", null); outputFile.deleteOnExit(); } catch (IOException ex) { throw new CurnException (Util.BUNDLE_NAME, "FileOutputHandler.cantMakeTempFile", "Cannot create temporary file", ex); } } log.debug ("Calling " + this.getClass().getName() + ".initOutputHandler()"); initOutputHandler (config, cfgHandler); }
diff --git a/src/main/java/org/nuxeo/ecm/webapp/diff/PropertyDiffDisplayHelperBean.java b/src/main/java/org/nuxeo/ecm/webapp/diff/PropertyDiffDisplayHelperBean.java index d2b07d3..b4665d1 100644 --- a/src/main/java/org/nuxeo/ecm/webapp/diff/PropertyDiffDisplayHelperBean.java +++ b/src/main/java/org/nuxeo/ecm/webapp/diff/PropertyDiffDisplayHelperBean.java @@ -1,154 +1,160 @@ /* * (C) Copyright 2012 Nuxeo SA (http://nuxeo.com/) and contributors. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the GNU Lesser General Public License * (LGPL) version 2.1 which accompanies this distribution, and is available at * http://www.gnu.org/licenses/lgpl.html * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * Contributors: * ataillefer */ package org.nuxeo.ecm.webapp.diff; import static org.jboss.seam.ScopeType.APPLICATION; import java.io.Serializable; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.List; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Scope; import org.nuxeo.ecm.core.api.ClientException; import org.nuxeo.ecm.core.api.DocumentModel; import org.nuxeo.ecm.platform.diff.helpers.ComplexPropertyHelper; import org.nuxeo.ecm.platform.diff.model.PropertyDiff; import org.nuxeo.ecm.platform.diff.model.PropertyType; import org.nuxeo.ecm.platform.diff.model.impl.ListPropertyDiff; import org.nuxeo.ecm.webapp.helpers.ResourcesAccessor; /** * Helps handling property diff display. * * @author <a href="mailto:[email protected]">Antoine Taillefer</a> */ @Name("propertyDiffDisplayHelper") @Scope(APPLICATION) public class PropertyDiffDisplayHelperBean implements Serializable { private static final long serialVersionUID = -7995476720750309928L; @In(create = true) protected transient ResourcesAccessor resourcesAccessor; public static Serializable getSimplePropertyValue(DocumentModel doc, String schemaName, String fieldName) throws ClientException { return ComplexPropertyHelper.getSimplePropertyValue(doc, schemaName, fieldName); } public List<String> getComplexItemNames(String schemaName, String fieldName) throws Exception { return ComplexPropertyHelper.getComplexItemNames(schemaName, fieldName); } public Serializable getComplexItemValue(DocumentModel doc, String schemaName, String fieldName, String complexItemName) throws ClientException { return ComplexPropertyHelper.getComplexItemValue(doc, schemaName, fieldName, complexItemName); } public List<Integer> getListItemIndexes(ListPropertyDiff listPropertyDiff) throws ClientException { return ComplexPropertyHelper.getListItemIndexes(listPropertyDiff); } public Serializable getListItemValue(DocumentModel doc, String schemaName, String fieldName, int itemIndex) throws ClientException { return ComplexPropertyHelper.getListItemValue(doc, schemaName, fieldName, itemIndex); } public List<String> getComplexListItemNames(String schemaName, String fieldName) throws Exception { return ComplexPropertyHelper.getComplexListItemNames(schemaName, fieldName); } public Serializable getComplexListItemValue(DocumentModel doc, String schemaName, String fieldName, int itemIndex, String complexItemName) throws ClientException { return ComplexPropertyHelper.getComplexListItemValue(doc, schemaName, fieldName, itemIndex, complexItemName); } public boolean isSimpleProperty(Serializable prop) { return ComplexPropertyHelper.isSimpleProperty(prop); } public boolean isComplexProperty(Serializable prop) { return ComplexPropertyHelper.isComplexProperty(prop); } public boolean isListProperty(Serializable prop) { return ComplexPropertyHelper.isListProperty(prop); } /** * Gets the property display. * * @param propertyValue the property value * @param propertyDiff the property diff * @return the property display */ public String getPropertyDisplay(Serializable propertyValue, PropertyDiff propertyDiff) { String propertyDisplay; - String propertyType = propertyDiff.getPropertyType(); + // TODO: propertyDiff should never be null, see the 'files' schema case. + String propertyType; + if (propertyDiff == null) { + propertyType = PropertyType.STRING; + } else { + propertyType = propertyDiff.getPropertyType(); + } // Boolean if (PropertyType.BOOLEAN.equals(propertyType)) { propertyDisplay = resourcesAccessor.getMessages().get( "property.boolean." + propertyValue); } // Date else if (PropertyType.DATE.equals(propertyType)) { DateFormat sdf = new SimpleDateFormat("dd MMMM yyyy - hh:mm"); if (propertyValue instanceof Calendar) { propertyDisplay = sdf.format(((Calendar) propertyValue).getTime()); } else { // Date propertyDisplay = sdf.format(propertyValue); } } // Default: we consider property value is a String. // Works fine for PropertyType.STRING, PropertyType.INTEGER, // PropertyType.LONG, PropertyType.DOUBLE. else { propertyDisplay = propertyValue.toString(); } // TODO: Directory! return propertyDisplay; } }
true
true
public String getPropertyDisplay(Serializable propertyValue, PropertyDiff propertyDiff) { String propertyDisplay; String propertyType = propertyDiff.getPropertyType(); // Boolean if (PropertyType.BOOLEAN.equals(propertyType)) { propertyDisplay = resourcesAccessor.getMessages().get( "property.boolean." + propertyValue); } // Date else if (PropertyType.DATE.equals(propertyType)) { DateFormat sdf = new SimpleDateFormat("dd MMMM yyyy - hh:mm"); if (propertyValue instanceof Calendar) { propertyDisplay = sdf.format(((Calendar) propertyValue).getTime()); } else { // Date propertyDisplay = sdf.format(propertyValue); } } // Default: we consider property value is a String. // Works fine for PropertyType.STRING, PropertyType.INTEGER, // PropertyType.LONG, PropertyType.DOUBLE. else { propertyDisplay = propertyValue.toString(); } // TODO: Directory! return propertyDisplay; }
public String getPropertyDisplay(Serializable propertyValue, PropertyDiff propertyDiff) { String propertyDisplay; // TODO: propertyDiff should never be null, see the 'files' schema case. String propertyType; if (propertyDiff == null) { propertyType = PropertyType.STRING; } else { propertyType = propertyDiff.getPropertyType(); } // Boolean if (PropertyType.BOOLEAN.equals(propertyType)) { propertyDisplay = resourcesAccessor.getMessages().get( "property.boolean." + propertyValue); } // Date else if (PropertyType.DATE.equals(propertyType)) { DateFormat sdf = new SimpleDateFormat("dd MMMM yyyy - hh:mm"); if (propertyValue instanceof Calendar) { propertyDisplay = sdf.format(((Calendar) propertyValue).getTime()); } else { // Date propertyDisplay = sdf.format(propertyValue); } } // Default: we consider property value is a String. // Works fine for PropertyType.STRING, PropertyType.INTEGER, // PropertyType.LONG, PropertyType.DOUBLE. else { propertyDisplay = propertyValue.toString(); } // TODO: Directory! return propertyDisplay; }
diff --git a/plugins/org.eclipse.acceleo.tutorial/src/org/eclipse/acceleo/tutorial/AcceleoProjectUnzipper.java b/plugins/org.eclipse.acceleo.tutorial/src/org/eclipse/acceleo/tutorial/AcceleoProjectUnzipper.java index f10c0e3..6593e45 100644 --- a/plugins/org.eclipse.acceleo.tutorial/src/org/eclipse/acceleo/tutorial/AcceleoProjectUnzipper.java +++ b/plugins/org.eclipse.acceleo.tutorial/src/org/eclipse/acceleo/tutorial/AcceleoProjectUnzipper.java @@ -1,150 +1,152 @@ /******************************************************************************* * Copyright (c) 2008, 2012 Obeo. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Obeo - initial API and implementation *******************************************************************************/ package org.eclipse.acceleo.tutorial; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URL; import java.util.Enumeration; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import org.eclipse.core.commands.AbstractHandler; import org.eclipse.core.commands.ExecutionEvent; import org.eclipse.core.commands.ExecutionException; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Platform; import org.osgi.framework.Bundle; /** * Utility class to unzip one or more projects contained in plugins. * * @author <a href="mailto:[email protected]">Stephane Begaudeau</a> * @since 3.2 */ public class AcceleoProjectUnzipper extends AbstractHandler { /** * {@inheritDoc} * * @see org.eclipse.core.commands.IHandler#execute(org.eclipse.core.commands.ExecutionEvent) */ public Object execute(ExecutionEvent event) throws ExecutionException { String parameter = event.getParameter("org.eclipse.acceleo.tutorial.projectUnzipperPath"); String path = "invalid"; if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-1/".equals(parameter)) { path = "step-1"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-2/".equals(parameter)) { path = "step-2"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-3/".equals(parameter)) { path = "step-3"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-4/".equals(parameter)) { path = "step-4"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-5/".equals(parameter)) { path = "step-5"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-6/".equals(parameter)) { path = "step-6"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-7/".equals(parameter)) { path = "step-7"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-8/".equals(parameter)) { path = "step-8"; } Bundle bundle = Platform.getBundle("org.eclipse.acceleo.tutorial"); Enumeration<URL> entries = bundle.findEntries(path, "*.zip", false); while (entries.hasMoreElements()) { URL nextElement = entries.nextElement(); String projectName = nextElement.toString(); projectName = projectName.substring(projectName.lastIndexOf("/")); if (projectName.endsWith(".zip")) { projectName = projectName.substring(0, projectName.length() - ".zip".length()); } try { final IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(projectName); if (project.exists()) { return null; } try { project.create(new NullProgressMonitor()); } catch (CoreException e) { e.printStackTrace(); } final String regexedProjectName = projectName.replaceAll("\\.", "\\."); //$NON-NLS-1$ //$NON-NLS-2$ final ZipInputStream zipFileStream = new ZipInputStream(nextElement.openStream()); ZipEntry zipEntry = zipFileStream.getNextEntry(); while (zipEntry != null) { // We will construct the new file but we will strip off the project // directory from the beginning of the path because we have already // created the destination project for this zip. final File file = new File(project.getLocation().toString(), zipEntry.getName() .replaceFirst("^" + regexedProjectName + "/", "")); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$ if (!zipEntry.isDirectory()) { /* * Copy files (and make sure parent directory exist) */ final File parentFile = file.getParentFile(); if (null != parentFile && !parentFile.exists()) { parentFile.mkdirs(); } OutputStream os = null; try { os = new FileOutputStream(file); final int bufferSize = 102400; final byte[] buffer = new byte[bufferSize]; while (true) { final int len = zipFileStream.read(buffer); if (zipFileStream.available() == 0) { break; } os.write(buffer, 0, len); } } finally { if (null != os) { os.close(); } } + } else { + file.mkdir(); } zipFileStream.closeEntry(); zipEntry = zipFileStream.getNextEntry(); } try { project.open(new NullProgressMonitor()); project.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor()); } catch (CoreException e) { e.printStackTrace(); } } catch (IOException e) { e.printStackTrace(); } } return null; } }
true
true
public Object execute(ExecutionEvent event) throws ExecutionException { String parameter = event.getParameter("org.eclipse.acceleo.tutorial.projectUnzipperPath"); String path = "invalid"; if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-1/".equals(parameter)) { path = "step-1"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-2/".equals(parameter)) { path = "step-2"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-3/".equals(parameter)) { path = "step-3"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-4/".equals(parameter)) { path = "step-4"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-5/".equals(parameter)) { path = "step-5"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-6/".equals(parameter)) { path = "step-6"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-7/".equals(parameter)) { path = "step-7"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-8/".equals(parameter)) { path = "step-8"; } Bundle bundle = Platform.getBundle("org.eclipse.acceleo.tutorial"); Enumeration<URL> entries = bundle.findEntries(path, "*.zip", false); while (entries.hasMoreElements()) { URL nextElement = entries.nextElement(); String projectName = nextElement.toString(); projectName = projectName.substring(projectName.lastIndexOf("/")); if (projectName.endsWith(".zip")) { projectName = projectName.substring(0, projectName.length() - ".zip".length()); } try { final IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(projectName); if (project.exists()) { return null; } try { project.create(new NullProgressMonitor()); } catch (CoreException e) { e.printStackTrace(); } final String regexedProjectName = projectName.replaceAll("\\.", "\\."); //$NON-NLS-1$ //$NON-NLS-2$ final ZipInputStream zipFileStream = new ZipInputStream(nextElement.openStream()); ZipEntry zipEntry = zipFileStream.getNextEntry(); while (zipEntry != null) { // We will construct the new file but we will strip off the project // directory from the beginning of the path because we have already // created the destination project for this zip. final File file = new File(project.getLocation().toString(), zipEntry.getName() .replaceFirst("^" + regexedProjectName + "/", "")); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$ if (!zipEntry.isDirectory()) { /* * Copy files (and make sure parent directory exist) */ final File parentFile = file.getParentFile(); if (null != parentFile && !parentFile.exists()) { parentFile.mkdirs(); } OutputStream os = null; try { os = new FileOutputStream(file); final int bufferSize = 102400; final byte[] buffer = new byte[bufferSize]; while (true) { final int len = zipFileStream.read(buffer); if (zipFileStream.available() == 0) { break; } os.write(buffer, 0, len); } } finally { if (null != os) { os.close(); } } } zipFileStream.closeEntry(); zipEntry = zipFileStream.getNextEntry(); } try { project.open(new NullProgressMonitor()); project.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor()); } catch (CoreException e) { e.printStackTrace(); } } catch (IOException e) { e.printStackTrace(); } } return null; }
public Object execute(ExecutionEvent event) throws ExecutionException { String parameter = event.getParameter("org.eclipse.acceleo.tutorial.projectUnzipperPath"); String path = "invalid"; if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-1/".equals(parameter)) { path = "step-1"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-2/".equals(parameter)) { path = "step-2"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-3/".equals(parameter)) { path = "step-3"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-4/".equals(parameter)) { path = "step-4"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-5/".equals(parameter)) { path = "step-5"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-6/".equals(parameter)) { path = "step-6"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-7/".equals(parameter)) { path = "step-7"; } else if ("platform:/plugin/org.eclipse.acceleo.tutorial/step-8/".equals(parameter)) { path = "step-8"; } Bundle bundle = Platform.getBundle("org.eclipse.acceleo.tutorial"); Enumeration<URL> entries = bundle.findEntries(path, "*.zip", false); while (entries.hasMoreElements()) { URL nextElement = entries.nextElement(); String projectName = nextElement.toString(); projectName = projectName.substring(projectName.lastIndexOf("/")); if (projectName.endsWith(".zip")) { projectName = projectName.substring(0, projectName.length() - ".zip".length()); } try { final IProject project = ResourcesPlugin.getWorkspace().getRoot().getProject(projectName); if (project.exists()) { return null; } try { project.create(new NullProgressMonitor()); } catch (CoreException e) { e.printStackTrace(); } final String regexedProjectName = projectName.replaceAll("\\.", "\\."); //$NON-NLS-1$ //$NON-NLS-2$ final ZipInputStream zipFileStream = new ZipInputStream(nextElement.openStream()); ZipEntry zipEntry = zipFileStream.getNextEntry(); while (zipEntry != null) { // We will construct the new file but we will strip off the project // directory from the beginning of the path because we have already // created the destination project for this zip. final File file = new File(project.getLocation().toString(), zipEntry.getName() .replaceFirst("^" + regexedProjectName + "/", "")); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$ if (!zipEntry.isDirectory()) { /* * Copy files (and make sure parent directory exist) */ final File parentFile = file.getParentFile(); if (null != parentFile && !parentFile.exists()) { parentFile.mkdirs(); } OutputStream os = null; try { os = new FileOutputStream(file); final int bufferSize = 102400; final byte[] buffer = new byte[bufferSize]; while (true) { final int len = zipFileStream.read(buffer); if (zipFileStream.available() == 0) { break; } os.write(buffer, 0, len); } } finally { if (null != os) { os.close(); } } } else { file.mkdir(); } zipFileStream.closeEntry(); zipEntry = zipFileStream.getNextEntry(); } try { project.open(new NullProgressMonitor()); project.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor()); } catch (CoreException e) { e.printStackTrace(); } } catch (IOException e) { e.printStackTrace(); } } return null; }
diff --git a/library/src/org/wordpress/passcodelock/DefaultAppLock.java b/library/src/org/wordpress/passcodelock/DefaultAppLock.java index 998431f..cca9a96 100644 --- a/library/src/org/wordpress/passcodelock/DefaultAppLock.java +++ b/library/src/org/wordpress/passcodelock/DefaultAppLock.java @@ -1,158 +1,159 @@ package org.wordpress.passcodelock; import java.util.Arrays; import java.util.Date; import android.app.Activity; import android.app.Application; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; public class DefaultAppLock extends AbstractAppLock { private Application currentApp; //Keep a reference to the app that invoked the locker private SharedPreferences settings; private Date lostFocusDate; private static final String PASSWORD_SALT = "sadasauidhsuyeuihdahdiauhs"; public DefaultAppLock(Application currentApp) { super(); SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(currentApp); this.settings = settings; this.currentApp = currentApp; } public void enable(){ if (android.os.Build.VERSION.SDK_INT < 14) return; if( isPasswordLocked() ) { currentApp.unregisterActivityLifecycleCallbacks(this); currentApp.registerActivityLifecycleCallbacks(this); } } public void disable( ){ if (android.os.Build.VERSION.SDK_INT < 14) return; currentApp.unregisterActivityLifecycleCallbacks(this); } public void forcePasswordLock(){ lostFocusDate = null; } public boolean verifyPassword( String password ){ String storedPassword = settings.getString(APP_LOCK_PASSWORD_PREF_KEY, ""); password = PASSWORD_SALT + password + PASSWORD_SALT; password = StringUtils.getMd5Hash(password); if( password.equalsIgnoreCase(storedPassword) ) { lostFocusDate = new Date(); return true; } else { return false; } } public boolean setPassword(String password){ SharedPreferences.Editor editor = settings.edit(); if(password == null) { editor.remove(APP_LOCK_PASSWORD_PREF_KEY); editor.commit(); this.disable(); } else { password = PASSWORD_SALT + password + PASSWORD_SALT; editor.putString(APP_LOCK_PASSWORD_PREF_KEY, StringUtils.getMd5Hash(password)); editor.commit(); this.enable(); } return true; } public boolean isPasswordLocked(){ //Check if we need to show the lock screen at startup if( settings.getString(APP_LOCK_PASSWORD_PREF_KEY, "").equals("") ) return false; return true; } private boolean mustShowUnlockSceen() { if( isPasswordLocked() == false) return false; if( lostFocusDate == null ) return true; //first startup or when we forced to show the password int currentTimeOut = lockTimeOut; //get a reference to the current password timeout and reset it to default lockTimeOut = DEFAULT_TIMEOUT; Date now = new Date(); long now_ms = now.getTime(); long lost_focus_ms = lostFocusDate.getTime(); int secondsPassed = (int) (now_ms - lost_focus_ms)/(1000); + secondsPassed = Math.abs(secondsPassed); //Make sure changing the clock on the device to a time in the past doesn't by-pass PIN Lock if (secondsPassed >= currentTimeOut) { lostFocusDate = null; return true; } return false; } @Override public void onActivityPaused(Activity arg0) { if( arg0.getClass() == PasscodeUnlockActivity.class ) return; if( ( this.appLockDisabledActivities != null ) && Arrays.asList(this.appLockDisabledActivities).contains( arg0.getClass().getName() ) ) return; lostFocusDate = new Date(); } @Override public void onActivityResumed(Activity arg0) { if( arg0.getClass() == PasscodeUnlockActivity.class ) return; if( ( this.appLockDisabledActivities != null ) && Arrays.asList(this.appLockDisabledActivities).contains( arg0.getClass().getName() ) ) return; if(mustShowUnlockSceen()) { //uhhh ohhh! Intent i = new Intent(arg0.getApplicationContext(), PasscodeUnlockActivity.class); i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); arg0.getApplication().startActivity(i); return; } } @Override public void onActivityCreated(Activity arg0, Bundle arg1) { } @Override public void onActivityDestroyed(Activity arg0) { } @Override public void onActivitySaveInstanceState(Activity arg0, Bundle arg1) { } @Override public void onActivityStarted(Activity arg0) { } @Override public void onActivityStopped(Activity arg0) { } }
true
true
private boolean mustShowUnlockSceen() { if( isPasswordLocked() == false) return false; if( lostFocusDate == null ) return true; //first startup or when we forced to show the password int currentTimeOut = lockTimeOut; //get a reference to the current password timeout and reset it to default lockTimeOut = DEFAULT_TIMEOUT; Date now = new Date(); long now_ms = now.getTime(); long lost_focus_ms = lostFocusDate.getTime(); int secondsPassed = (int) (now_ms - lost_focus_ms)/(1000); if (secondsPassed >= currentTimeOut) { lostFocusDate = null; return true; } return false; }
private boolean mustShowUnlockSceen() { if( isPasswordLocked() == false) return false; if( lostFocusDate == null ) return true; //first startup or when we forced to show the password int currentTimeOut = lockTimeOut; //get a reference to the current password timeout and reset it to default lockTimeOut = DEFAULT_TIMEOUT; Date now = new Date(); long now_ms = now.getTime(); long lost_focus_ms = lostFocusDate.getTime(); int secondsPassed = (int) (now_ms - lost_focus_ms)/(1000); secondsPassed = Math.abs(secondsPassed); //Make sure changing the clock on the device to a time in the past doesn't by-pass PIN Lock if (secondsPassed >= currentTimeOut) { lostFocusDate = null; return true; } return false; }
diff --git a/modules/plugin/arcsde/sde-dummy/src/main/java/com/esri/sde/sdk/client/SeRasterColumn.java b/modules/plugin/arcsde/sde-dummy/src/main/java/com/esri/sde/sdk/client/SeRasterColumn.java index 180b6c23..ec52e118 100644 --- a/modules/plugin/arcsde/sde-dummy/src/main/java/com/esri/sde/sdk/client/SeRasterColumn.java +++ b/modules/plugin/arcsde/sde-dummy/src/main/java/com/esri/sde/sdk/client/SeRasterColumn.java @@ -1,27 +1,27 @@ package com.esri.sde.sdk.client; public class SeRasterColumn { public SeRasterColumn(SeConnection conn, SeObjectId id) throws SeException {} public SeRasterColumn(SeConnection conn) throws SeException {} public SeCoordinateReference getCoordRef() { return null; } public String getName() { return null; } public String getQualifiedTableName() { return null; } public void setTableName(String name) {} public void setDescription(String desc) {} public void setRasterColumnName(String rColName) {} public void setCoordRef(SeCoordinateReference coordref) {} public void setConfigurationKeyword(String s) {} public void create() {} public String getTableName() { return null; } - public Integer getID() throws SeException{ + public SeObjectId getID() throws SeException{ // TODO Auto-generated method stub return null; } }
true
true
public Integer getID() throws SeException{ // TODO Auto-generated method stub return null; }
public SeObjectId getID() throws SeException{ // TODO Auto-generated method stub return null; }
diff --git a/stanford-sw/src/edu/stanford/Item.java b/stanford-sw/src/edu/stanford/Item.java index c498439..f554f92 100644 --- a/stanford-sw/src/edu/stanford/Item.java +++ b/stanford-sw/src/edu/stanford/Item.java @@ -1,517 +1,517 @@ package edu.stanford; import java.util.regex.Pattern; import org.marc4j.marc.DataField; import org.solrmarc.tools.CallNumUtils; import org.solrmarc.tools.MarcUtils; import edu.stanford.enumValues.CallNumberType; /** * Item object for Stanford SolrMarc * @author Naomi Dushay */ public class Item { /** call number for SUL online items */ public final static String ECALLNUM = "INTERNET RESOURCE"; /** location code for online items */ public final static String ELOC = "INTERNET"; /** temporary call numbers (in process, on order ..) should start with this prefix */ public final static String TMP_CALLNUM_PREFIX = "XX"; /* immutable instance variables */ private final String recId; private final String barcode; private final String library; private final String itemType; private final boolean shouldBeSkipped; private final boolean hasGovDocLoc; private final boolean isOnline; private final boolean hasShelbyLoc; private final boolean hasBizShelbyLoc; /* normal instance variables */ private CallNumberType callnumType; private String homeLoc; private String currLoc; private String normCallnum; private boolean isOnOrder = false; private boolean isInProcess = false; private boolean hasIgnoredCallnum = false; private boolean hasBadLcLaneCallnum = false; private boolean isMissingLost = false; private boolean hasSeparateBrowseCallnum = false; /** call number with volume suffix lopped off the end. Used to remove * noise in search results and in browsing */ private String loppedCallnum = null; /** set when there is a callnumber for browsing different from that in the 999 */ private String browseCallnum = null; /** sortable version of lopped call number */ private String loppedShelfkey = null; /** reverse sorted version of loppedShelfkey - the last call number shall * be first, etc. */ private String reverseLoppedShelfkey = null; /** sortable full call number, where, for serials, any volume suffix will sort * in descending order. Non-serial volumes will sort in ascending order. */ private String callnumVolSort = null; /** * initialize object from 999 DataField, which has the following subfields * <ul> * <li>a - call num</li> * <li>i - barcode</li> * <li>k - current location</li> * <li>l - home location</li> * <li>m - library code</li> * <li>o - public note</li> * <li>t - item type</li> * <li>w - call num scheme</li> * </ul> */ public Item(DataField f999, String recId) { // set all the immutable variables this.recId = recId; barcode = MarcUtils.getSubfieldTrimmed(f999, 'i'); currLoc = MarcUtils.getSubfieldTrimmed(f999, 'k'); homeLoc = MarcUtils.getSubfieldTrimmed(f999, 'l'); library = MarcUtils.getSubfieldTrimmed(f999, 'm'); itemType = MarcUtils.getSubfieldTrimmed(f999, 't'); String scheme = MarcUtils.getSubfieldTrimmed(f999, 'w'); String rawCallnum = MarcUtils.getSubfieldTrimmed(f999, 'a'); if (StanfordIndexer.SKIPPED_LOCS.contains(currLoc) || StanfordIndexer.SKIPPED_LOCS.contains(homeLoc) || itemType.equals("EDI-REMOVE")) shouldBeSkipped = true; else shouldBeSkipped = false; if (StanfordIndexer.GOV_DOC_LOCS.contains(currLoc) || StanfordIndexer.GOV_DOC_LOCS.contains(homeLoc) ) hasGovDocLoc = true; else hasGovDocLoc = false; if (StanfordIndexer.MISSING_LOCS.contains(currLoc) || StanfordIndexer.MISSING_LOCS.contains(homeLoc) ) isMissingLost = true; else isMissingLost = false; - if (library == "BUSINESS" + if (library.equals("BUSINESS") && (StanfordIndexer.BIZ_SHELBY_LOCS.contains(currLoc) || StanfordIndexer.BIZ_SHELBY_LOCS.contains(homeLoc) ) ) hasBizShelbyLoc = true; else hasBizShelbyLoc = false; if (StanfordIndexer.SHELBY_LOCS.contains(currLoc) || StanfordIndexer.SHELBY_LOCS.contains(homeLoc) ) hasShelbyLoc = true; else if (hasBizShelbyLoc) hasShelbyLoc = true; else hasShelbyLoc = false; if (StanfordIndexer.SKIPPED_CALLNUMS.contains(rawCallnum) || rawCallnum.startsWith(ECALLNUM) || rawCallnum.startsWith(TMP_CALLNUM_PREFIX)) hasIgnoredCallnum = true; else hasIgnoredCallnum = false; assignCallnumType(scheme); if (!hasIgnoredCallnum) { if (callnumType == CallNumberType.LC || callnumType == CallNumberType.DEWEY) normCallnum = CallNumUtils.normalizeCallnum(rawCallnum); else normCallnum = rawCallnum.trim(); validateCallnum(recId); } else normCallnum = rawCallnum.trim(); // isOnline is immutable so must be set here if (StanfordIndexer.ONLINE_LOCS.contains(currLoc) || StanfordIndexer.ONLINE_LOCS.contains(homeLoc) //) { || normCallnum.startsWith(ECALLNUM) ) { isOnline = true; } else isOnline = false; dealWithXXCallnums(recId); } public String getBarcode() { return barcode; } public String getLibrary() { return library; } public String getHomeLoc() { return homeLoc; } public String getCurrLoc() { return currLoc; } public String getType() { return itemType; } public String getCallnum() { return normCallnum; } public CallNumberType getCallnumType() { return callnumType; } public void setCallnumType(CallNumberType callnumType) { this.callnumType = callnumType; } /** * @return true if this item has a current or home location indicating it * should be skipped (e.g. "WITHDRAWN" or a shadowed location) or has * a type of "EDI-REMOVE") */ public boolean shouldBeSkipped() { return shouldBeSkipped; } /** * @return true if item location indicating it is missing or lost */ public boolean isMissingOrLost() { return isMissingLost; } /** * @return true if item has a government doc location */ public boolean hasGovDocLoc() { return hasGovDocLoc; } /** * return true if item has a callnumber or location code indicating it is online */ public boolean isOnline() { if (normCallnum.startsWith(ECALLNUM) || homeLoc.equals(ELOC) || currLoc.equals(ELOC)) return true; else return isOnline; } /** * @return true if item is on order */ public boolean isOnOrder() { return isOnOrder; } /** * @return true if item is in process */ public boolean isInProcess() { return isInProcess; } /** * return true if item has a shelby location (current or home) */ public boolean hasShelbyLoc() { return hasShelbyLoc; } /** * return true if item has a business library only shelby location (current or home) */ public boolean hasBizShelbyLoc() { return hasBizShelbyLoc; } /** * @return true if call number is to be ignored in some contexts * (e.g. "NO CALL NUMBER" or "XX(blah)") */ public boolean hasIgnoredCallnum() { return hasIgnoredCallnum; } /** * @return true if call number is Lane (Law) invalid LC callnum */ public boolean hasBadLcLaneCallnum() { return hasBadLcLaneCallnum; } /** * @return true if item has a call number from the bib fields */ public boolean hasSeparateBrowseCallnum() { return hasSeparateBrowseCallnum; } /** * return the call number for browsing - it could be a call number provided * outside of the item record. This method will NOT set the lopped call * number if the raw call number is from the item record and no * lopped call number has been set yet. */ public String getBrowseCallnum() { if (hasSeparateBrowseCallnum) return browseCallnum; else return loppedCallnum; } /** * return the call number for browsing - it could be a call number provided * outside of the item record. This method will SET the lopped call * number if the raw call number is from the item record and no * lopped call number has been set yet. */ public String getBrowseCallnum(boolean isSerial) { if (hasSeparateBrowseCallnum) return browseCallnum; else return getLoppedCallnum(isSerial); } /** * for resources that have items without browsable call numbers * (SUL INTERNET RESOURCE), we look for a call number in the bib record * fields (050, 090, 086 ...) for browse nearby and for call number facets. * If one is found, this method is used. */ public void setBrowseCallnum(String callnum) { hasSeparateBrowseCallnum = true; if (callnumType == CallNumberType.LC || callnumType == CallNumberType.DEWEY) browseCallnum = CallNumUtils.normalizeCallnum(callnum); else browseCallnum = callnum.trim(); } /** * get the lopped call number (any volume suffix is lopped off the end.) * This will remove noise in search results and in browsing. * @param isSerial - true if item is for a serial. Used to determine if * year suffix should be lopped in addition to regular volume lopping. */ public String getLoppedCallnum(boolean isSerial) { if (loppedCallnum == null) setLoppedCallnum(isSerial); return loppedCallnum; } /** * sets the private field loppedCallnum to contain the call number without * any volume suffix information. * @param isSerial - true if item is for a serial. Used to determine if * year suffix should be lopped in addition to regular volume lopping. */ private void setLoppedCallnum(boolean isSerial) { loppedCallnum = edu.stanford.CallNumUtils.getLoppedCallnum(normCallnum, callnumType, isSerial); if (!loppedCallnum.endsWith(" ...") && !loppedCallnum.equals(normCallnum)) loppedCallnum = loppedCallnum + " ..."; } /** * sets the private field loppedCallnum to the passed value. Used when * lopping must be dictated elsewhere. */ void setLoppedCallnum(String loppedCallnum) { this.loppedCallnum = loppedCallnum; if (!loppedCallnum.endsWith(" ...") && !loppedCallnum.equals(normCallnum)) this.loppedCallnum = loppedCallnum + " ..."; } /** * get the sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ public String getShelfkey(boolean isSerial) { if (loppedShelfkey == null) setShelfkey(isSerial); return loppedShelfkey; } /** * sets the private field loppedShelfkey (and loppedCallnum if it's not * already set). loppedShelfkey will contain the sortable version of the * lopped call number * @param isSerial - true if item is for a serial. */ private void setShelfkey(boolean isSerial) { if (loppedShelfkey == null) { String skeyCallnum = getBrowseCallnum(isSerial); if (skeyCallnum != null && skeyCallnum.length() > 0 && !StanfordIndexer.SKIPPED_CALLNUMS.contains(skeyCallnum) && !skeyCallnum.startsWith(ECALLNUM) && !skeyCallnum.startsWith(TMP_CALLNUM_PREFIX) ) loppedShelfkey = edu.stanford.CallNumUtils.getShelfKey(skeyCallnum, callnumType, recId); } } /** * get the reverse sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ public String getReverseShelfkey(boolean isSerial) { if (reverseLoppedShelfkey == null) setReverseShelfkey(isSerial); return reverseLoppedShelfkey; } /** * sets the private field reverseLoppedShelfkey (and loppedShelfkey and * loppedCallnum if they're not already set). reverseLoppedShelfkey will * contain the reverse sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ private void setReverseShelfkey(boolean isSerial) { if (loppedShelfkey == null) setShelfkey(isSerial); if (loppedShelfkey != null && loppedShelfkey.length() > 0) reverseLoppedShelfkey = CallNumUtils.getReverseShelfKey(loppedShelfkey); } /** * get the sortable full call number, where, for serials, any volume suffix * will sort in descending order. Non-serial volumes will sort in ascending * order. * @param isSerial - true if item is for a serial. */ public String getCallnumVolSort(boolean isSerial) { if (callnumVolSort == null) setCallnumVolSort(isSerial); return callnumVolSort; } /** * sets the private field callnumVolSort (and loppedShelfkey and * loppedCallnum if they're not already set.) callnumVolSort will contain * the sortable full call number, where, for serials, any volume suffix * will sort in descending order. * @param isSerial - true if item is for a serial. */ private void setCallnumVolSort(boolean isSerial) { if (loppedShelfkey == null) // note: setting loppedShelfkey will also set loppedCallnum loppedShelfkey = getShelfkey(isSerial); if (loppedShelfkey != null && loppedShelfkey.length() > 0) callnumVolSort = edu.stanford.CallNumUtils.getVolumeSortCallnum( normCallnum, loppedCallnum, loppedShelfkey, callnumType, isSerial, recId); } /** call numbers must start with a letter or digit */ private static final Pattern STRANGE_CALLNUM_START_CHARS = Pattern.compile("^\\p{Alnum}"); /** * output an error message if the call number is supposed to be LC or DEWEY * but is invalid * @param recId the id of the record, used in error message */ private void validateCallnum(String recId) { if (callnumType == CallNumberType.LC && !CallNumUtils.isValidLC(normCallnum)) { if (!library.equals("LANE-MED")) System.err.println("record " + recId + " has invalid LC callnumber: " + normCallnum); adjustLCCallnumType(recId); } if (callnumType == CallNumberType.DEWEY && !CallNumUtils.isValidDeweyWithCutter(normCallnum)) { System.err.println("record " + recId + " has invalid DEWEY callnumber: " + normCallnum); callnumType = CallNumberType.OTHER; } else if (STRANGE_CALLNUM_START_CHARS.matcher(normCallnum).matches()) System.err.println("record " + recId + " has strange callnumber: " + normCallnum); } /** * LC is default call number scheme assigned; change it if assigned * incorrectly to a Dewey or ALPHANUM call number. Called after * printMsgIfInvalidCallnum has already found invalid LC call number */ private void adjustLCCallnumType(String id) { if (callnumType == CallNumberType.LC) { if (CallNumUtils.isValidDeweyWithCutter(normCallnum)) callnumType = CallNumberType.DEWEY; else { // FIXME: this is no good if the call number is SUDOC but mislabeled LC ... callnumType = CallNumberType.OTHER; if (library.equals("LANE-MED")) hasBadLcLaneCallnum = true; } } } /** * if item has XX call number * if home location or current location is INPROCESS or ON-ORDER, do * the obvious thing * if no home or current location, item is on order. * o.w. if the current location isn't shadowed, it is an error; * print an error message and fake "ON-ORDER" * @param recId - for error message */ private void dealWithXXCallnums(String recId) { if (normCallnum.startsWith(TMP_CALLNUM_PREFIX)) { if (currLoc.equals("ON-ORDER")) isOnOrder = true; else if (currLoc.equals("INPROCESS")) isInProcess = true; else if (shouldBeSkipped || currLoc.equals("LAC") || homeLoc.equals("LAC")) ; // we're okay else if (currLoc.length() > 0) { System.err.println("record " + recId + " has XX callnumber but current location is not ON-ORDER or INPROCESS or shadowy"); if (homeLoc.equals("ON-ORDER") || homeLoc.equals("INPROCESS")) { currLoc = homeLoc; homeLoc = ""; if (currLoc.equals("ON-ORDER")) isOnOrder = true; else isInProcess = true; } else { currLoc = "ON-ORDER"; isOnOrder = true; } } } } /** * assign a value to callnumType based on scheme ... * LCPER --> LC; DEWEYPER --> DEWEY */ private void assignCallnumType(String scheme) { if (scheme.startsWith("LC")) callnumType = CallNumberType.LC; else if (scheme.startsWith("DEWEY")) callnumType = CallNumberType.DEWEY; else if (scheme.equals("SUDOC")) callnumType = CallNumberType.SUDOC; else callnumType = CallNumberType.OTHER; } }
true
true
public Item(DataField f999, String recId) { // set all the immutable variables this.recId = recId; barcode = MarcUtils.getSubfieldTrimmed(f999, 'i'); currLoc = MarcUtils.getSubfieldTrimmed(f999, 'k'); homeLoc = MarcUtils.getSubfieldTrimmed(f999, 'l'); library = MarcUtils.getSubfieldTrimmed(f999, 'm'); itemType = MarcUtils.getSubfieldTrimmed(f999, 't'); String scheme = MarcUtils.getSubfieldTrimmed(f999, 'w'); String rawCallnum = MarcUtils.getSubfieldTrimmed(f999, 'a'); if (StanfordIndexer.SKIPPED_LOCS.contains(currLoc) || StanfordIndexer.SKIPPED_LOCS.contains(homeLoc) || itemType.equals("EDI-REMOVE")) shouldBeSkipped = true; else shouldBeSkipped = false; if (StanfordIndexer.GOV_DOC_LOCS.contains(currLoc) || StanfordIndexer.GOV_DOC_LOCS.contains(homeLoc) ) hasGovDocLoc = true; else hasGovDocLoc = false; if (StanfordIndexer.MISSING_LOCS.contains(currLoc) || StanfordIndexer.MISSING_LOCS.contains(homeLoc) ) isMissingLost = true; else isMissingLost = false; if (library == "BUSINESS" && (StanfordIndexer.BIZ_SHELBY_LOCS.contains(currLoc) || StanfordIndexer.BIZ_SHELBY_LOCS.contains(homeLoc) ) ) hasBizShelbyLoc = true; else hasBizShelbyLoc = false; if (StanfordIndexer.SHELBY_LOCS.contains(currLoc) || StanfordIndexer.SHELBY_LOCS.contains(homeLoc) ) hasShelbyLoc = true; else if (hasBizShelbyLoc) hasShelbyLoc = true; else hasShelbyLoc = false; if (StanfordIndexer.SKIPPED_CALLNUMS.contains(rawCallnum) || rawCallnum.startsWith(ECALLNUM) || rawCallnum.startsWith(TMP_CALLNUM_PREFIX)) hasIgnoredCallnum = true; else hasIgnoredCallnum = false; assignCallnumType(scheme); if (!hasIgnoredCallnum) { if (callnumType == CallNumberType.LC || callnumType == CallNumberType.DEWEY) normCallnum = CallNumUtils.normalizeCallnum(rawCallnum); else normCallnum = rawCallnum.trim(); validateCallnum(recId); } else normCallnum = rawCallnum.trim(); // isOnline is immutable so must be set here if (StanfordIndexer.ONLINE_LOCS.contains(currLoc) || StanfordIndexer.ONLINE_LOCS.contains(homeLoc) //) { || normCallnum.startsWith(ECALLNUM) ) { isOnline = true; } else isOnline = false; dealWithXXCallnums(recId); } public String getBarcode() { return barcode; } public String getLibrary() { return library; } public String getHomeLoc() { return homeLoc; } public String getCurrLoc() { return currLoc; } public String getType() { return itemType; } public String getCallnum() { return normCallnum; } public CallNumberType getCallnumType() { return callnumType; } public void setCallnumType(CallNumberType callnumType) { this.callnumType = callnumType; } /** * @return true if this item has a current or home location indicating it * should be skipped (e.g. "WITHDRAWN" or a shadowed location) or has * a type of "EDI-REMOVE") */ public boolean shouldBeSkipped() { return shouldBeSkipped; } /** * @return true if item location indicating it is missing or lost */ public boolean isMissingOrLost() { return isMissingLost; } /** * @return true if item has a government doc location */ public boolean hasGovDocLoc() { return hasGovDocLoc; } /** * return true if item has a callnumber or location code indicating it is online */ public boolean isOnline() { if (normCallnum.startsWith(ECALLNUM) || homeLoc.equals(ELOC) || currLoc.equals(ELOC)) return true; else return isOnline; } /** * @return true if item is on order */ public boolean isOnOrder() { return isOnOrder; } /** * @return true if item is in process */ public boolean isInProcess() { return isInProcess; } /** * return true if item has a shelby location (current or home) */ public boolean hasShelbyLoc() { return hasShelbyLoc; } /** * return true if item has a business library only shelby location (current or home) */ public boolean hasBizShelbyLoc() { return hasBizShelbyLoc; } /** * @return true if call number is to be ignored in some contexts * (e.g. "NO CALL NUMBER" or "XX(blah)") */ public boolean hasIgnoredCallnum() { return hasIgnoredCallnum; } /** * @return true if call number is Lane (Law) invalid LC callnum */ public boolean hasBadLcLaneCallnum() { return hasBadLcLaneCallnum; } /** * @return true if item has a call number from the bib fields */ public boolean hasSeparateBrowseCallnum() { return hasSeparateBrowseCallnum; } /** * return the call number for browsing - it could be a call number provided * outside of the item record. This method will NOT set the lopped call * number if the raw call number is from the item record and no * lopped call number has been set yet. */ public String getBrowseCallnum() { if (hasSeparateBrowseCallnum) return browseCallnum; else return loppedCallnum; } /** * return the call number for browsing - it could be a call number provided * outside of the item record. This method will SET the lopped call * number if the raw call number is from the item record and no * lopped call number has been set yet. */ public String getBrowseCallnum(boolean isSerial) { if (hasSeparateBrowseCallnum) return browseCallnum; else return getLoppedCallnum(isSerial); } /** * for resources that have items without browsable call numbers * (SUL INTERNET RESOURCE), we look for a call number in the bib record * fields (050, 090, 086 ...) for browse nearby and for call number facets. * If one is found, this method is used. */ public void setBrowseCallnum(String callnum) { hasSeparateBrowseCallnum = true; if (callnumType == CallNumberType.LC || callnumType == CallNumberType.DEWEY) browseCallnum = CallNumUtils.normalizeCallnum(callnum); else browseCallnum = callnum.trim(); } /** * get the lopped call number (any volume suffix is lopped off the end.) * This will remove noise in search results and in browsing. * @param isSerial - true if item is for a serial. Used to determine if * year suffix should be lopped in addition to regular volume lopping. */ public String getLoppedCallnum(boolean isSerial) { if (loppedCallnum == null) setLoppedCallnum(isSerial); return loppedCallnum; } /** * sets the private field loppedCallnum to contain the call number without * any volume suffix information. * @param isSerial - true if item is for a serial. Used to determine if * year suffix should be lopped in addition to regular volume lopping. */ private void setLoppedCallnum(boolean isSerial) { loppedCallnum = edu.stanford.CallNumUtils.getLoppedCallnum(normCallnum, callnumType, isSerial); if (!loppedCallnum.endsWith(" ...") && !loppedCallnum.equals(normCallnum)) loppedCallnum = loppedCallnum + " ..."; } /** * sets the private field loppedCallnum to the passed value. Used when * lopping must be dictated elsewhere. */ void setLoppedCallnum(String loppedCallnum) { this.loppedCallnum = loppedCallnum; if (!loppedCallnum.endsWith(" ...") && !loppedCallnum.equals(normCallnum)) this.loppedCallnum = loppedCallnum + " ..."; } /** * get the sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ public String getShelfkey(boolean isSerial) { if (loppedShelfkey == null) setShelfkey(isSerial); return loppedShelfkey; } /** * sets the private field loppedShelfkey (and loppedCallnum if it's not * already set). loppedShelfkey will contain the sortable version of the * lopped call number * @param isSerial - true if item is for a serial. */ private void setShelfkey(boolean isSerial) { if (loppedShelfkey == null) { String skeyCallnum = getBrowseCallnum(isSerial); if (skeyCallnum != null && skeyCallnum.length() > 0 && !StanfordIndexer.SKIPPED_CALLNUMS.contains(skeyCallnum) && !skeyCallnum.startsWith(ECALLNUM) && !skeyCallnum.startsWith(TMP_CALLNUM_PREFIX) ) loppedShelfkey = edu.stanford.CallNumUtils.getShelfKey(skeyCallnum, callnumType, recId); } } /** * get the reverse sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ public String getReverseShelfkey(boolean isSerial) { if (reverseLoppedShelfkey == null) setReverseShelfkey(isSerial); return reverseLoppedShelfkey; } /** * sets the private field reverseLoppedShelfkey (and loppedShelfkey and * loppedCallnum if they're not already set). reverseLoppedShelfkey will * contain the reverse sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ private void setReverseShelfkey(boolean isSerial) { if (loppedShelfkey == null) setShelfkey(isSerial); if (loppedShelfkey != null && loppedShelfkey.length() > 0) reverseLoppedShelfkey = CallNumUtils.getReverseShelfKey(loppedShelfkey); } /** * get the sortable full call number, where, for serials, any volume suffix * will sort in descending order. Non-serial volumes will sort in ascending * order. * @param isSerial - true if item is for a serial. */ public String getCallnumVolSort(boolean isSerial) { if (callnumVolSort == null) setCallnumVolSort(isSerial); return callnumVolSort; } /** * sets the private field callnumVolSort (and loppedShelfkey and * loppedCallnum if they're not already set.) callnumVolSort will contain * the sortable full call number, where, for serials, any volume suffix * will sort in descending order. * @param isSerial - true if item is for a serial. */ private void setCallnumVolSort(boolean isSerial) { if (loppedShelfkey == null) // note: setting loppedShelfkey will also set loppedCallnum loppedShelfkey = getShelfkey(isSerial); if (loppedShelfkey != null && loppedShelfkey.length() > 0) callnumVolSort = edu.stanford.CallNumUtils.getVolumeSortCallnum( normCallnum, loppedCallnum, loppedShelfkey, callnumType, isSerial, recId); } /** call numbers must start with a letter or digit */ private static final Pattern STRANGE_CALLNUM_START_CHARS = Pattern.compile("^\\p{Alnum}"); /** * output an error message if the call number is supposed to be LC or DEWEY * but is invalid * @param recId the id of the record, used in error message */ private void validateCallnum(String recId) { if (callnumType == CallNumberType.LC && !CallNumUtils.isValidLC(normCallnum)) { if (!library.equals("LANE-MED")) System.err.println("record " + recId + " has invalid LC callnumber: " + normCallnum); adjustLCCallnumType(recId); } if (callnumType == CallNumberType.DEWEY && !CallNumUtils.isValidDeweyWithCutter(normCallnum)) { System.err.println("record " + recId + " has invalid DEWEY callnumber: " + normCallnum); callnumType = CallNumberType.OTHER; } else if (STRANGE_CALLNUM_START_CHARS.matcher(normCallnum).matches()) System.err.println("record " + recId + " has strange callnumber: " + normCallnum); } /** * LC is default call number scheme assigned; change it if assigned * incorrectly to a Dewey or ALPHANUM call number. Called after * printMsgIfInvalidCallnum has already found invalid LC call number */ private void adjustLCCallnumType(String id) { if (callnumType == CallNumberType.LC) { if (CallNumUtils.isValidDeweyWithCutter(normCallnum)) callnumType = CallNumberType.DEWEY; else { // FIXME: this is no good if the call number is SUDOC but mislabeled LC ... callnumType = CallNumberType.OTHER; if (library.equals("LANE-MED")) hasBadLcLaneCallnum = true; } } } /** * if item has XX call number * if home location or current location is INPROCESS or ON-ORDER, do * the obvious thing * if no home or current location, item is on order. * o.w. if the current location isn't shadowed, it is an error; * print an error message and fake "ON-ORDER" * @param recId - for error message */ private void dealWithXXCallnums(String recId) { if (normCallnum.startsWith(TMP_CALLNUM_PREFIX)) { if (currLoc.equals("ON-ORDER")) isOnOrder = true; else if (currLoc.equals("INPROCESS")) isInProcess = true; else if (shouldBeSkipped || currLoc.equals("LAC") || homeLoc.equals("LAC")) ; // we're okay else if (currLoc.length() > 0) { System.err.println("record " + recId + " has XX callnumber but current location is not ON-ORDER or INPROCESS or shadowy"); if (homeLoc.equals("ON-ORDER") || homeLoc.equals("INPROCESS")) { currLoc = homeLoc; homeLoc = ""; if (currLoc.equals("ON-ORDER")) isOnOrder = true; else isInProcess = true; } else { currLoc = "ON-ORDER"; isOnOrder = true; } } } } /** * assign a value to callnumType based on scheme ... * LCPER --> LC; DEWEYPER --> DEWEY */ private void assignCallnumType(String scheme) { if (scheme.startsWith("LC")) callnumType = CallNumberType.LC; else if (scheme.startsWith("DEWEY")) callnumType = CallNumberType.DEWEY; else if (scheme.equals("SUDOC")) callnumType = CallNumberType.SUDOC; else callnumType = CallNumberType.OTHER; } }
public Item(DataField f999, String recId) { // set all the immutable variables this.recId = recId; barcode = MarcUtils.getSubfieldTrimmed(f999, 'i'); currLoc = MarcUtils.getSubfieldTrimmed(f999, 'k'); homeLoc = MarcUtils.getSubfieldTrimmed(f999, 'l'); library = MarcUtils.getSubfieldTrimmed(f999, 'm'); itemType = MarcUtils.getSubfieldTrimmed(f999, 't'); String scheme = MarcUtils.getSubfieldTrimmed(f999, 'w'); String rawCallnum = MarcUtils.getSubfieldTrimmed(f999, 'a'); if (StanfordIndexer.SKIPPED_LOCS.contains(currLoc) || StanfordIndexer.SKIPPED_LOCS.contains(homeLoc) || itemType.equals("EDI-REMOVE")) shouldBeSkipped = true; else shouldBeSkipped = false; if (StanfordIndexer.GOV_DOC_LOCS.contains(currLoc) || StanfordIndexer.GOV_DOC_LOCS.contains(homeLoc) ) hasGovDocLoc = true; else hasGovDocLoc = false; if (StanfordIndexer.MISSING_LOCS.contains(currLoc) || StanfordIndexer.MISSING_LOCS.contains(homeLoc) ) isMissingLost = true; else isMissingLost = false; if (library.equals("BUSINESS") && (StanfordIndexer.BIZ_SHELBY_LOCS.contains(currLoc) || StanfordIndexer.BIZ_SHELBY_LOCS.contains(homeLoc) ) ) hasBizShelbyLoc = true; else hasBizShelbyLoc = false; if (StanfordIndexer.SHELBY_LOCS.contains(currLoc) || StanfordIndexer.SHELBY_LOCS.contains(homeLoc) ) hasShelbyLoc = true; else if (hasBizShelbyLoc) hasShelbyLoc = true; else hasShelbyLoc = false; if (StanfordIndexer.SKIPPED_CALLNUMS.contains(rawCallnum) || rawCallnum.startsWith(ECALLNUM) || rawCallnum.startsWith(TMP_CALLNUM_PREFIX)) hasIgnoredCallnum = true; else hasIgnoredCallnum = false; assignCallnumType(scheme); if (!hasIgnoredCallnum) { if (callnumType == CallNumberType.LC || callnumType == CallNumberType.DEWEY) normCallnum = CallNumUtils.normalizeCallnum(rawCallnum); else normCallnum = rawCallnum.trim(); validateCallnum(recId); } else normCallnum = rawCallnum.trim(); // isOnline is immutable so must be set here if (StanfordIndexer.ONLINE_LOCS.contains(currLoc) || StanfordIndexer.ONLINE_LOCS.contains(homeLoc) //) { || normCallnum.startsWith(ECALLNUM) ) { isOnline = true; } else isOnline = false; dealWithXXCallnums(recId); } public String getBarcode() { return barcode; } public String getLibrary() { return library; } public String getHomeLoc() { return homeLoc; } public String getCurrLoc() { return currLoc; } public String getType() { return itemType; } public String getCallnum() { return normCallnum; } public CallNumberType getCallnumType() { return callnumType; } public void setCallnumType(CallNumberType callnumType) { this.callnumType = callnumType; } /** * @return true if this item has a current or home location indicating it * should be skipped (e.g. "WITHDRAWN" or a shadowed location) or has * a type of "EDI-REMOVE") */ public boolean shouldBeSkipped() { return shouldBeSkipped; } /** * @return true if item location indicating it is missing or lost */ public boolean isMissingOrLost() { return isMissingLost; } /** * @return true if item has a government doc location */ public boolean hasGovDocLoc() { return hasGovDocLoc; } /** * return true if item has a callnumber or location code indicating it is online */ public boolean isOnline() { if (normCallnum.startsWith(ECALLNUM) || homeLoc.equals(ELOC) || currLoc.equals(ELOC)) return true; else return isOnline; } /** * @return true if item is on order */ public boolean isOnOrder() { return isOnOrder; } /** * @return true if item is in process */ public boolean isInProcess() { return isInProcess; } /** * return true if item has a shelby location (current or home) */ public boolean hasShelbyLoc() { return hasShelbyLoc; } /** * return true if item has a business library only shelby location (current or home) */ public boolean hasBizShelbyLoc() { return hasBizShelbyLoc; } /** * @return true if call number is to be ignored in some contexts * (e.g. "NO CALL NUMBER" or "XX(blah)") */ public boolean hasIgnoredCallnum() { return hasIgnoredCallnum; } /** * @return true if call number is Lane (Law) invalid LC callnum */ public boolean hasBadLcLaneCallnum() { return hasBadLcLaneCallnum; } /** * @return true if item has a call number from the bib fields */ public boolean hasSeparateBrowseCallnum() { return hasSeparateBrowseCallnum; } /** * return the call number for browsing - it could be a call number provided * outside of the item record. This method will NOT set the lopped call * number if the raw call number is from the item record and no * lopped call number has been set yet. */ public String getBrowseCallnum() { if (hasSeparateBrowseCallnum) return browseCallnum; else return loppedCallnum; } /** * return the call number for browsing - it could be a call number provided * outside of the item record. This method will SET the lopped call * number if the raw call number is from the item record and no * lopped call number has been set yet. */ public String getBrowseCallnum(boolean isSerial) { if (hasSeparateBrowseCallnum) return browseCallnum; else return getLoppedCallnum(isSerial); } /** * for resources that have items without browsable call numbers * (SUL INTERNET RESOURCE), we look for a call number in the bib record * fields (050, 090, 086 ...) for browse nearby and for call number facets. * If one is found, this method is used. */ public void setBrowseCallnum(String callnum) { hasSeparateBrowseCallnum = true; if (callnumType == CallNumberType.LC || callnumType == CallNumberType.DEWEY) browseCallnum = CallNumUtils.normalizeCallnum(callnum); else browseCallnum = callnum.trim(); } /** * get the lopped call number (any volume suffix is lopped off the end.) * This will remove noise in search results and in browsing. * @param isSerial - true if item is for a serial. Used to determine if * year suffix should be lopped in addition to regular volume lopping. */ public String getLoppedCallnum(boolean isSerial) { if (loppedCallnum == null) setLoppedCallnum(isSerial); return loppedCallnum; } /** * sets the private field loppedCallnum to contain the call number without * any volume suffix information. * @param isSerial - true if item is for a serial. Used to determine if * year suffix should be lopped in addition to regular volume lopping. */ private void setLoppedCallnum(boolean isSerial) { loppedCallnum = edu.stanford.CallNumUtils.getLoppedCallnum(normCallnum, callnumType, isSerial); if (!loppedCallnum.endsWith(" ...") && !loppedCallnum.equals(normCallnum)) loppedCallnum = loppedCallnum + " ..."; } /** * sets the private field loppedCallnum to the passed value. Used when * lopping must be dictated elsewhere. */ void setLoppedCallnum(String loppedCallnum) { this.loppedCallnum = loppedCallnum; if (!loppedCallnum.endsWith(" ...") && !loppedCallnum.equals(normCallnum)) this.loppedCallnum = loppedCallnum + " ..."; } /** * get the sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ public String getShelfkey(boolean isSerial) { if (loppedShelfkey == null) setShelfkey(isSerial); return loppedShelfkey; } /** * sets the private field loppedShelfkey (and loppedCallnum if it's not * already set). loppedShelfkey will contain the sortable version of the * lopped call number * @param isSerial - true if item is for a serial. */ private void setShelfkey(boolean isSerial) { if (loppedShelfkey == null) { String skeyCallnum = getBrowseCallnum(isSerial); if (skeyCallnum != null && skeyCallnum.length() > 0 && !StanfordIndexer.SKIPPED_CALLNUMS.contains(skeyCallnum) && !skeyCallnum.startsWith(ECALLNUM) && !skeyCallnum.startsWith(TMP_CALLNUM_PREFIX) ) loppedShelfkey = edu.stanford.CallNumUtils.getShelfKey(skeyCallnum, callnumType, recId); } } /** * get the reverse sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ public String getReverseShelfkey(boolean isSerial) { if (reverseLoppedShelfkey == null) setReverseShelfkey(isSerial); return reverseLoppedShelfkey; } /** * sets the private field reverseLoppedShelfkey (and loppedShelfkey and * loppedCallnum if they're not already set). reverseLoppedShelfkey will * contain the reverse sortable version of the lopped call number. * @param isSerial - true if item is for a serial. */ private void setReverseShelfkey(boolean isSerial) { if (loppedShelfkey == null) setShelfkey(isSerial); if (loppedShelfkey != null && loppedShelfkey.length() > 0) reverseLoppedShelfkey = CallNumUtils.getReverseShelfKey(loppedShelfkey); } /** * get the sortable full call number, where, for serials, any volume suffix * will sort in descending order. Non-serial volumes will sort in ascending * order. * @param isSerial - true if item is for a serial. */ public String getCallnumVolSort(boolean isSerial) { if (callnumVolSort == null) setCallnumVolSort(isSerial); return callnumVolSort; } /** * sets the private field callnumVolSort (and loppedShelfkey and * loppedCallnum if they're not already set.) callnumVolSort will contain * the sortable full call number, where, for serials, any volume suffix * will sort in descending order. * @param isSerial - true if item is for a serial. */ private void setCallnumVolSort(boolean isSerial) { if (loppedShelfkey == null) // note: setting loppedShelfkey will also set loppedCallnum loppedShelfkey = getShelfkey(isSerial); if (loppedShelfkey != null && loppedShelfkey.length() > 0) callnumVolSort = edu.stanford.CallNumUtils.getVolumeSortCallnum( normCallnum, loppedCallnum, loppedShelfkey, callnumType, isSerial, recId); } /** call numbers must start with a letter or digit */ private static final Pattern STRANGE_CALLNUM_START_CHARS = Pattern.compile("^\\p{Alnum}"); /** * output an error message if the call number is supposed to be LC or DEWEY * but is invalid * @param recId the id of the record, used in error message */ private void validateCallnum(String recId) { if (callnumType == CallNumberType.LC && !CallNumUtils.isValidLC(normCallnum)) { if (!library.equals("LANE-MED")) System.err.println("record " + recId + " has invalid LC callnumber: " + normCallnum); adjustLCCallnumType(recId); } if (callnumType == CallNumberType.DEWEY && !CallNumUtils.isValidDeweyWithCutter(normCallnum)) { System.err.println("record " + recId + " has invalid DEWEY callnumber: " + normCallnum); callnumType = CallNumberType.OTHER; } else if (STRANGE_CALLNUM_START_CHARS.matcher(normCallnum).matches()) System.err.println("record " + recId + " has strange callnumber: " + normCallnum); } /** * LC is default call number scheme assigned; change it if assigned * incorrectly to a Dewey or ALPHANUM call number. Called after * printMsgIfInvalidCallnum has already found invalid LC call number */ private void adjustLCCallnumType(String id) { if (callnumType == CallNumberType.LC) { if (CallNumUtils.isValidDeweyWithCutter(normCallnum)) callnumType = CallNumberType.DEWEY; else { // FIXME: this is no good if the call number is SUDOC but mislabeled LC ... callnumType = CallNumberType.OTHER; if (library.equals("LANE-MED")) hasBadLcLaneCallnum = true; } } } /** * if item has XX call number * if home location or current location is INPROCESS or ON-ORDER, do * the obvious thing * if no home or current location, item is on order. * o.w. if the current location isn't shadowed, it is an error; * print an error message and fake "ON-ORDER" * @param recId - for error message */ private void dealWithXXCallnums(String recId) { if (normCallnum.startsWith(TMP_CALLNUM_PREFIX)) { if (currLoc.equals("ON-ORDER")) isOnOrder = true; else if (currLoc.equals("INPROCESS")) isInProcess = true; else if (shouldBeSkipped || currLoc.equals("LAC") || homeLoc.equals("LAC")) ; // we're okay else if (currLoc.length() > 0) { System.err.println("record " + recId + " has XX callnumber but current location is not ON-ORDER or INPROCESS or shadowy"); if (homeLoc.equals("ON-ORDER") || homeLoc.equals("INPROCESS")) { currLoc = homeLoc; homeLoc = ""; if (currLoc.equals("ON-ORDER")) isOnOrder = true; else isInProcess = true; } else { currLoc = "ON-ORDER"; isOnOrder = true; } } } } /** * assign a value to callnumType based on scheme ... * LCPER --> LC; DEWEYPER --> DEWEY */ private void assignCallnumType(String scheme) { if (scheme.startsWith("LC")) callnumType = CallNumberType.LC; else if (scheme.startsWith("DEWEY")) callnumType = CallNumberType.DEWEY; else if (scheme.equals("SUDOC")) callnumType = CallNumberType.SUDOC; else callnumType = CallNumberType.OTHER; } }
diff --git a/components/EJB/src/main/java/org/dejava/component/ejb/businessrule/AbstractGenericEntityBusinessRuleSet.java b/components/EJB/src/main/java/org/dejava/component/ejb/businessrule/AbstractGenericEntityBusinessRuleSet.java index c6ff5c5af..dc0b02538 100644 --- a/components/EJB/src/main/java/org/dejava/component/ejb/businessrule/AbstractGenericEntityBusinessRuleSet.java +++ b/components/EJB/src/main/java/org/dejava/component/ejb/businessrule/AbstractGenericEntityBusinessRuleSet.java @@ -1,74 +1,74 @@ package org.dejava.component.ejb.businessrule; import java.util.Collection; import java.util.HashSet; import java.util.Set; import javax.validation.ConstraintViolation; import javax.validation.Validation; import org.dejava.component.validation.object.ValidationException; /** * An implementation generic entity business rule set. * * @param <Entity> * Any entity. */ public abstract class AbstractGenericEntityBusinessRuleSet<Entity> implements GenericEntityBusinessRuleSet<Entity> { /** * Validates an entity in a given context. * * @param entity * Entity to be validated. * @param context * Context from within the entity should be validated. * @return The violations for the entity in the context. */ private Set<ConstraintViolation<Entity>> validateNoExceptions(final Entity entity, final Object... context) { - // If the entity is not null. - if (entity != null) { + // If the entity is null. + if (entity == null) { // Returns an empty set of violations. FIXME Think about. return new HashSet<>(); } // If the entity is not null. else { // Validates the current entity (and returns the found violations). return Validation.buildDefaultValidatorFactory().getValidator() .validate(entity, (Class<?>[]) context); } } /** * @see org.dejava.component.ejb.businessrule.GenericEntityBusinessRuleSet#validate(java.lang.Object, * java.lang.Object[]) */ @Override public void validate(final Entity entity, final Object... context) { // Validates the current entity (and throws an exception for the found violations). ValidationException.throwViolationExceptions(validateNoExceptions(entity, context)); } /** * @see org.dejava.component.ejb.businessrule.GenericEntityBusinessRuleSet#validate(java.util.Collection, * java.lang.Object[]) */ @Override public void validate(final Collection<Entity> entities, final Object... context) { // If there are entities to be added. if (entities != null) { // Creates a new violation set. final HashSet<ConstraintViolation<?>> violations = new HashSet<>(); // For each entity. for (final Entity currentEntity : entities) { // Validates the current entity (and add the violations to the complete set). validateNoExceptions(currentEntity, context); } // Throws an exception for the found violations. ValidationException.throwViolationExceptions(violations); } } }
true
true
private Set<ConstraintViolation<Entity>> validateNoExceptions(final Entity entity, final Object... context) { // If the entity is not null. if (entity != null) { // Returns an empty set of violations. FIXME Think about. return new HashSet<>(); } // If the entity is not null. else { // Validates the current entity (and returns the found violations). return Validation.buildDefaultValidatorFactory().getValidator() .validate(entity, (Class<?>[]) context); } }
private Set<ConstraintViolation<Entity>> validateNoExceptions(final Entity entity, final Object... context) { // If the entity is null. if (entity == null) { // Returns an empty set of violations. FIXME Think about. return new HashSet<>(); } // If the entity is not null. else { // Validates the current entity (and returns the found violations). return Validation.buildDefaultValidatorFactory().getValidator() .validate(entity, (Class<?>[]) context); } }
diff --git a/src/org/opensolaris/opengrok/analysis/TextAnalyzer.java b/src/org/opensolaris/opengrok/analysis/TextAnalyzer.java index 63fe7af..a946cf3 100644 --- a/src/org/opensolaris/opengrok/analysis/TextAnalyzer.java +++ b/src/org/opensolaris/opengrok/analysis/TextAnalyzer.java @@ -1,64 +1,64 @@ /* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License (the "License"). * You may not use this file except in compliance with the License. * * See LICENSE.txt included in this distribution for the specific * language governing permissions and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at LICENSE.txt. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ package org.opensolaris.opengrok.analysis; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.Charset; import org.apache.lucene.document.Document; public abstract class TextAnalyzer extends FileAnalyzer { public TextAnalyzer(FileAnalyzerFactory factory) { super(factory); } public final void analyze(Document doc, InputStream in) throws IOException { String charset = null; in.mark(3); byte[] head = new byte[3]; int br = in.read(head, 0, 3); if (br >= 2 && (head[0] == (byte)0xFE && head[1] == (byte)0xFF) || (head[0] == (byte)0xFF && head[1] == (byte)0xFE)) { charset = "UTF-16"; in.reset(); - } else if (br >= 3 & head[0] == (byte)0xEF && head[1] == (byte)0xBB && + } else if (br >= 3 && head[0] == (byte)0xEF && head[1] == (byte)0xBB && head[2] == (byte)0xBF) { // InputStreamReader does not properly discard BOM on UTF8 streams, // so don't reset the stream. charset = "UTF-8"; } if (charset == null) { in.reset(); charset = Charset.defaultCharset().name(); } analyze(doc, new InputStreamReader(in, charset)); } protected abstract void analyze(Document doc, Reader reader) throws IOException; }
true
true
public final void analyze(Document doc, InputStream in) throws IOException { String charset = null; in.mark(3); byte[] head = new byte[3]; int br = in.read(head, 0, 3); if (br >= 2 && (head[0] == (byte)0xFE && head[1] == (byte)0xFF) || (head[0] == (byte)0xFF && head[1] == (byte)0xFE)) { charset = "UTF-16"; in.reset(); } else if (br >= 3 & head[0] == (byte)0xEF && head[1] == (byte)0xBB && head[2] == (byte)0xBF) { // InputStreamReader does not properly discard BOM on UTF8 streams, // so don't reset the stream. charset = "UTF-8"; } if (charset == null) { in.reset(); charset = Charset.defaultCharset().name(); } analyze(doc, new InputStreamReader(in, charset)); }
public final void analyze(Document doc, InputStream in) throws IOException { String charset = null; in.mark(3); byte[] head = new byte[3]; int br = in.read(head, 0, 3); if (br >= 2 && (head[0] == (byte)0xFE && head[1] == (byte)0xFF) || (head[0] == (byte)0xFF && head[1] == (byte)0xFE)) { charset = "UTF-16"; in.reset(); } else if (br >= 3 && head[0] == (byte)0xEF && head[1] == (byte)0xBB && head[2] == (byte)0xBF) { // InputStreamReader does not properly discard BOM on UTF8 streams, // so don't reset the stream. charset = "UTF-8"; } if (charset == null) { in.reset(); charset = Charset.defaultCharset().name(); } analyze(doc, new InputStreamReader(in, charset)); }
diff --git a/kres/eu.iksproject.kres.semion.refactorer/src/test/java/eu/iksproject/kres/semion/refactorer/SemionRefactoringTest.java b/kres/eu.iksproject.kres.semion.refactorer/src/test/java/eu/iksproject/kres/semion/refactorer/SemionRefactoringTest.java index 77b6b2ce2..8ec4983d9 100644 --- a/kres/eu.iksproject.kres.semion.refactorer/src/test/java/eu/iksproject/kres/semion/refactorer/SemionRefactoringTest.java +++ b/kres/eu.iksproject.kres.semion.refactorer/src/test/java/eu/iksproject/kres/semion/refactorer/SemionRefactoringTest.java @@ -1,259 +1,259 @@ package eu.iksproject.kres.semion.refactorer; import static org.junit.Assert.fail; import java.io.InputStream; import java.util.Dictionary; import java.util.Hashtable; import java.util.Set; import org.apache.clerezza.rdf.core.access.TcManager; import org.apache.clerezza.rdf.core.serializedform.Serializer; import org.junit.BeforeClass; import org.junit.Test; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLDataProperty; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyStorageException; import eu.iksproject.kres.api.manager.KReSONManager; import eu.iksproject.kres.api.rules.KReSRule; import eu.iksproject.kres.api.rules.NoSuchRecipeException; import eu.iksproject.kres.api.rules.Recipe; import eu.iksproject.kres.api.rules.RuleStore; import eu.iksproject.kres.api.rules.util.KReSRuleList; import eu.iksproject.kres.api.semion.SemionRefactorer; import eu.iksproject.kres.api.semion.SemionRefactoringException; import eu.iksproject.kres.api.semion.util.RecipeList; import eu.iksproject.kres.manager.ONManager; import eu.iksproject.kres.reasoners.KReSReasonerImpl; import eu.iksproject.kres.rules.manager.RecipeImpl; import eu.iksproject.kres.rules.parser.KReSRuleParser; import eu.iksproject.kres.semion.manager.SemionManagerImpl; public class SemionRefactoringTest { static RuleStore ruleStore; static OWLOntology ontology; static IRI recipeIRI; @BeforeClass public static void setup(){ recipeIRI = IRI .create("http://kres.iks-project.eu/ontology/meta/rmi_config.owl#MyTestRecipe"); InputStream ontologyStream = SemionRefactoringTest.class .getResourceAsStream("/META-INF/test/testKReSOnt.owl"); InputStream recipeStream = SemionRefactoringTest.class .getResourceAsStream("/META-INF/test/rmi.owl"); try { final OWLOntology recipeModel = OWLManager .createOWLOntologyManager() .loadOntologyFromOntologyDocument(recipeStream); ontology = OWLManager.createOWLOntologyManager() .loadOntologyFromOntologyDocument(ontologyStream); ruleStore = new RuleStore() { @Override public void setStore(OWLOntology owl) { // TODO Auto-generated method stub } @Override public void saveOntology() throws OWLOntologyStorageException { // TODO Auto-generated method stub } @Override public RecipeList listRecipes() { // TODO Auto-generated method stub return null; } @Override public Set<IRI> listIRIRecipes() { // TODO Auto-generated method stub return null; } @Override public String getRuleStoreNamespace() { // TODO Auto-generated method stub return null; } @Override public Recipe getRecipe(IRI recipeIRI) throws NoSuchRecipeException { Recipe recipe = null; if(recipeIRI!=null){ OWLDataFactory factory = OWLManager.getOWLDataFactory(); OWLIndividual recipeIndividual = factory .getOWLNamedIndividual(recipeIRI); if(recipeIndividual != null){ String ruleNS = "http://kres.iks-project.eu/ontology/meta/rmi.owl#"; /** * First get the recipe description in the * rule/recipe ontology. */ OWLDataProperty hasDescription = factory .getOWLDataProperty(IRI.create(ruleNS + "hasDescription")); String recipeDescription = null; Set<OWLLiteral> descriptions = recipeIndividual .getDataPropertyValues(hasDescription, recipeModel); for(OWLLiteral description : descriptions){ recipeDescription = description.getLiteral(); } /** * Then retrieve the rules associated to the recipe * in the rule store. */ OWLObjectProperty objectProperty = factory .getOWLObjectProperty(IRI.create(ruleNS + "hasRule")); Set<OWLIndividual> rules = recipeIndividual .getObjectPropertyValues(objectProperty, - ontology); + recipeModel); String kReSRulesInKReSSyntax = ""; /** * Fetch the rule content expressed as a literal in * KReSRule Syntax. */ OWLDataProperty hasBodyAndHead = factory .getOWLDataProperty(IRI.create(ruleNS + "hasBodyAndHead")); for(OWLIndividual rule : rules){ Set<OWLLiteral> kReSRuleLiterals = rule .getDataPropertyValues(hasBodyAndHead, - ontology); + recipeModel); for(OWLLiteral kReSRuleLiteral : kReSRuleLiterals){ String ruleTmp = kReSRuleLiteral .getLiteral().replace("&lt;", "<"); ruleTmp = ruleTmp.replace("&gt;", ">"); kReSRulesInKReSSyntax += ruleTmp + System .getProperty("line.separator"); } } /** * Create the Recipe object. */ KReSRuleList ruleList = KReSRuleParser.parse( kReSRulesInKReSSyntax).getkReSRuleList(); recipe = new RecipeImpl(recipeIRI, recipeDescription, ruleList); } else { throw new NoSuchRecipeException(recipeIRI); } } return recipe; } @Override public OWLOntology getOntology() { // TODO Auto-generated method stub return null; } @Override public String getFilePath() { // TODO Auto-generated method stub return null; } @Override public boolean addRecipe(IRI recipeIRI, String recipeDescription) { // TODO Auto-generated method stub return false; } @Override public Recipe addRuleToRecipe(String recipeID, String kReSRuleInKReSSyntax) throws NoSuchRecipeException { return null; } @Override public Recipe addRuleToRecipe(Recipe recipe, String kReSRuleInKReSSyntax) { return null; // TODO Auto-generated method stub } @Override public void createRecipe(String recipeID, String rulesInKReSSyntax) { // TODO Auto-generated method stub } @Override public boolean removeRecipe(Recipe recipe) { throw new UnsupportedOperationException( "Not supported yet."); } @Override public boolean removeRecipe(IRI recipeIRI) { throw new UnsupportedOperationException( "Not supported yet."); } @Override public boolean removeRule(KReSRule rule) { throw new UnsupportedOperationException( "Not supported yet."); } }; } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void refactoringTest() throws Exception { Dictionary<String, Object> emptyConfig = new Hashtable<String, Object>(); KReSONManager onm = new ONManager(null, emptyConfig); SemionRefactorer refactorer = new SemionRefactorerImpl(null, new Serializer(), new TcManager(), onm, new SemionManagerImpl( onm), ruleStore, new KReSReasonerImpl(emptyConfig), emptyConfig); try { refactorer.ontologyRefactoring(ontology, recipeIRI); } catch (SemionRefactoringException e) { fail("Error while refactoring."); } catch (NoSuchRecipeException e) { fail("Error while refactoring: no such recipe"); } } }
false
true
public static void setup(){ recipeIRI = IRI .create("http://kres.iks-project.eu/ontology/meta/rmi_config.owl#MyTestRecipe"); InputStream ontologyStream = SemionRefactoringTest.class .getResourceAsStream("/META-INF/test/testKReSOnt.owl"); InputStream recipeStream = SemionRefactoringTest.class .getResourceAsStream("/META-INF/test/rmi.owl"); try { final OWLOntology recipeModel = OWLManager .createOWLOntologyManager() .loadOntologyFromOntologyDocument(recipeStream); ontology = OWLManager.createOWLOntologyManager() .loadOntologyFromOntologyDocument(ontologyStream); ruleStore = new RuleStore() { @Override public void setStore(OWLOntology owl) { // TODO Auto-generated method stub } @Override public void saveOntology() throws OWLOntologyStorageException { // TODO Auto-generated method stub } @Override public RecipeList listRecipes() { // TODO Auto-generated method stub return null; } @Override public Set<IRI> listIRIRecipes() { // TODO Auto-generated method stub return null; } @Override public String getRuleStoreNamespace() { // TODO Auto-generated method stub return null; } @Override public Recipe getRecipe(IRI recipeIRI) throws NoSuchRecipeException { Recipe recipe = null; if(recipeIRI!=null){ OWLDataFactory factory = OWLManager.getOWLDataFactory(); OWLIndividual recipeIndividual = factory .getOWLNamedIndividual(recipeIRI); if(recipeIndividual != null){ String ruleNS = "http://kres.iks-project.eu/ontology/meta/rmi.owl#"; /** * First get the recipe description in the * rule/recipe ontology. */ OWLDataProperty hasDescription = factory .getOWLDataProperty(IRI.create(ruleNS + "hasDescription")); String recipeDescription = null; Set<OWLLiteral> descriptions = recipeIndividual .getDataPropertyValues(hasDescription, recipeModel); for(OWLLiteral description : descriptions){ recipeDescription = description.getLiteral(); } /** * Then retrieve the rules associated to the recipe * in the rule store. */ OWLObjectProperty objectProperty = factory .getOWLObjectProperty(IRI.create(ruleNS + "hasRule")); Set<OWLIndividual> rules = recipeIndividual .getObjectPropertyValues(objectProperty, ontology); String kReSRulesInKReSSyntax = ""; /** * Fetch the rule content expressed as a literal in * KReSRule Syntax. */ OWLDataProperty hasBodyAndHead = factory .getOWLDataProperty(IRI.create(ruleNS + "hasBodyAndHead")); for(OWLIndividual rule : rules){ Set<OWLLiteral> kReSRuleLiterals = rule .getDataPropertyValues(hasBodyAndHead, ontology); for(OWLLiteral kReSRuleLiteral : kReSRuleLiterals){ String ruleTmp = kReSRuleLiteral .getLiteral().replace("&lt;", "<"); ruleTmp = ruleTmp.replace("&gt;", ">"); kReSRulesInKReSSyntax += ruleTmp + System .getProperty("line.separator"); } } /** * Create the Recipe object. */ KReSRuleList ruleList = KReSRuleParser.parse( kReSRulesInKReSSyntax).getkReSRuleList(); recipe = new RecipeImpl(recipeIRI, recipeDescription, ruleList); } else { throw new NoSuchRecipeException(recipeIRI); } } return recipe; } @Override public OWLOntology getOntology() { // TODO Auto-generated method stub return null; } @Override public String getFilePath() { // TODO Auto-generated method stub return null; } @Override public boolean addRecipe(IRI recipeIRI, String recipeDescription) { // TODO Auto-generated method stub return false; } @Override public Recipe addRuleToRecipe(String recipeID, String kReSRuleInKReSSyntax) throws NoSuchRecipeException { return null; } @Override public Recipe addRuleToRecipe(Recipe recipe, String kReSRuleInKReSSyntax) { return null; // TODO Auto-generated method stub } @Override public void createRecipe(String recipeID, String rulesInKReSSyntax) { // TODO Auto-generated method stub } @Override public boolean removeRecipe(Recipe recipe) { throw new UnsupportedOperationException( "Not supported yet."); } @Override public boolean removeRecipe(IRI recipeIRI) { throw new UnsupportedOperationException( "Not supported yet."); } @Override public boolean removeRule(KReSRule rule) { throw new UnsupportedOperationException( "Not supported yet."); } }; } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
public static void setup(){ recipeIRI = IRI .create("http://kres.iks-project.eu/ontology/meta/rmi_config.owl#MyTestRecipe"); InputStream ontologyStream = SemionRefactoringTest.class .getResourceAsStream("/META-INF/test/testKReSOnt.owl"); InputStream recipeStream = SemionRefactoringTest.class .getResourceAsStream("/META-INF/test/rmi.owl"); try { final OWLOntology recipeModel = OWLManager .createOWLOntologyManager() .loadOntologyFromOntologyDocument(recipeStream); ontology = OWLManager.createOWLOntologyManager() .loadOntologyFromOntologyDocument(ontologyStream); ruleStore = new RuleStore() { @Override public void setStore(OWLOntology owl) { // TODO Auto-generated method stub } @Override public void saveOntology() throws OWLOntologyStorageException { // TODO Auto-generated method stub } @Override public RecipeList listRecipes() { // TODO Auto-generated method stub return null; } @Override public Set<IRI> listIRIRecipes() { // TODO Auto-generated method stub return null; } @Override public String getRuleStoreNamespace() { // TODO Auto-generated method stub return null; } @Override public Recipe getRecipe(IRI recipeIRI) throws NoSuchRecipeException { Recipe recipe = null; if(recipeIRI!=null){ OWLDataFactory factory = OWLManager.getOWLDataFactory(); OWLIndividual recipeIndividual = factory .getOWLNamedIndividual(recipeIRI); if(recipeIndividual != null){ String ruleNS = "http://kres.iks-project.eu/ontology/meta/rmi.owl#"; /** * First get the recipe description in the * rule/recipe ontology. */ OWLDataProperty hasDescription = factory .getOWLDataProperty(IRI.create(ruleNS + "hasDescription")); String recipeDescription = null; Set<OWLLiteral> descriptions = recipeIndividual .getDataPropertyValues(hasDescription, recipeModel); for(OWLLiteral description : descriptions){ recipeDescription = description.getLiteral(); } /** * Then retrieve the rules associated to the recipe * in the rule store. */ OWLObjectProperty objectProperty = factory .getOWLObjectProperty(IRI.create(ruleNS + "hasRule")); Set<OWLIndividual> rules = recipeIndividual .getObjectPropertyValues(objectProperty, recipeModel); String kReSRulesInKReSSyntax = ""; /** * Fetch the rule content expressed as a literal in * KReSRule Syntax. */ OWLDataProperty hasBodyAndHead = factory .getOWLDataProperty(IRI.create(ruleNS + "hasBodyAndHead")); for(OWLIndividual rule : rules){ Set<OWLLiteral> kReSRuleLiterals = rule .getDataPropertyValues(hasBodyAndHead, recipeModel); for(OWLLiteral kReSRuleLiteral : kReSRuleLiterals){ String ruleTmp = kReSRuleLiteral .getLiteral().replace("&lt;", "<"); ruleTmp = ruleTmp.replace("&gt;", ">"); kReSRulesInKReSSyntax += ruleTmp + System .getProperty("line.separator"); } } /** * Create the Recipe object. */ KReSRuleList ruleList = KReSRuleParser.parse( kReSRulesInKReSSyntax).getkReSRuleList(); recipe = new RecipeImpl(recipeIRI, recipeDescription, ruleList); } else { throw new NoSuchRecipeException(recipeIRI); } } return recipe; } @Override public OWLOntology getOntology() { // TODO Auto-generated method stub return null; } @Override public String getFilePath() { // TODO Auto-generated method stub return null; } @Override public boolean addRecipe(IRI recipeIRI, String recipeDescription) { // TODO Auto-generated method stub return false; } @Override public Recipe addRuleToRecipe(String recipeID, String kReSRuleInKReSSyntax) throws NoSuchRecipeException { return null; } @Override public Recipe addRuleToRecipe(Recipe recipe, String kReSRuleInKReSSyntax) { return null; // TODO Auto-generated method stub } @Override public void createRecipe(String recipeID, String rulesInKReSSyntax) { // TODO Auto-generated method stub } @Override public boolean removeRecipe(Recipe recipe) { throw new UnsupportedOperationException( "Not supported yet."); } @Override public boolean removeRecipe(IRI recipeIRI) { throw new UnsupportedOperationException( "Not supported yet."); } @Override public boolean removeRule(KReSRule rule) { throw new UnsupportedOperationException( "Not supported yet."); } }; } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
diff --git a/src/org/eclipse/jface/util/Policy.java b/src/org/eclipse/jface/util/Policy.java index af43f038..762e34b3 100644 --- a/src/org/eclipse/jface/util/Policy.java +++ b/src/org/eclipse/jface/util/Policy.java @@ -1,242 +1,239 @@ /******************************************************************************* * Copyright (c) 2004, 2006 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Chris Gross ([email protected]) - support for ILogger added * (bug 49497 [RCP] JFace dependency on org.eclipse.core.runtime enlarges standalone JFace applications) *******************************************************************************/ package org.eclipse.jface.util; import java.util.Comparator; import org.eclipse.core.runtime.IStatus; import org.eclipse.jface.dialogs.AnimatorFactory; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.swt.widgets.Shell; /** * The Policy class handles settings for behaviour, debug flags and logging * within JFace. * * @since 3.0 */ public class Policy { /** * Constant for the the default setting for debug options. */ public static final boolean DEFAULT = false; /** * The unique identifier of the JFace plug-in. */ public static final String JFACE = "org.eclipse.jface";//$NON-NLS-1$ private static ILogger log; private static Comparator viewerComparator; private static AnimatorFactory animatorFactory; /** * A flag to indicate whether unparented dialogs should be checked. */ public static boolean DEBUG_DIALOG_NO_PARENT = DEFAULT; /** * A flag to indicate whether actions are being traced. */ public static boolean TRACE_ACTIONS = DEFAULT; /** * A flag to indicate whether toolbars are being traced. */ public static boolean TRACE_TOOLBAR = DEFAULT; private static ILogDialog logDialog; /** * Returns the dummy log to use if none has been set */ private static ILogger getDummyLog() { return new ILogger() { public void log(IStatus status) { System.err.println(status.getMessage()); } }; } /** * Get the default dialog for JFace errors and warnings. * * @return {@link ILogDialog} */ private static ILogDialog getJFaceLogDialog() { return new ILogDialog() { /* * (non-Javadoc) * * @see org.eclipse.jface.util.ILogger#log(org.eclipse.core.runtime.IStatus) */ public void log(Shell parent, String title, IStatus status) { - MessageDialog dialog = new MessageDialog( - parent, - title, - null, // accept - // the - // default - // window - // icon - status.getMessage(), status.getSeverity(), - new String[] { IDialogConstants.OK_LABEL }, 0); // ok - // is + int dialogConstant = MessageDialog.ERROR; + if (status.getSeverity() == IStatus.WARNING) { + dialogConstant = MessageDialog.WARNING; + } + MessageDialog dialog = new MessageDialog(parent, title, + null, // accept the default window icon + status.getMessage(), dialogConstant, + new String[] { IDialogConstants.OK_LABEL }, 0); // ok is // the // default dialog.open(); } }; } /** * Sets the logger used by JFace to log errors. * * @param logger * the logger to use, or <code>null</code> to use the default * logger * @since 3.1 */ public static void setLog(ILogger logger) { log = logger; } /** * Sets the dialog used by JFace to show errors and warnings. * * @param dialog * the dialog to use, or <code>null</code> to use the default * dialog * @since 3.3 */ public static void setLogDialog(ILogDialog dialog) { logDialog = dialog; } /** * Returns the logger used by JFace to log errors. * <p> * The default logger prints the status to <code>System.err</code>. * </p> * * @return the logger * @since 3.1 */ public static ILogger getLog() { if (log == null) { log = getDummyLog(); } return log; } /** * Returns the dialog used by JFace to show errors and warnings. * <p> * The default dialog shows the status in JFace MessageDialog * </p> * * @return the dialog * @since 3.3 */ public static ILogDialog getLogDialog() { if (logDialog == null) { logDialog = getJFaceLogDialog(); } return logDialog; } /** * Return the default comparator used by JFace to sort strings. * * @return a default comparator used by JFace to sort strings */ private static Comparator getDefaultComparator() { return new Comparator() { /** * Compares string s1 to string s2. * * @param s1 * string 1 * @param s2 * string 2 * @return Returns an integer value. Value is less than zero if * source is less than target, value is zero if source and * target are equal, value is greater than zero if source is * greater than target. * @exception ClassCastException * the arguments cannot be cast to Strings. */ public int compare(Object s1, Object s2) { return ((String) s1).compareTo((String) s2); } }; } /** * Return the comparator used by JFace to sort strings. * * @return the comparator used by JFace to sort strings * @since 3.2 */ public static Comparator getComparator() { if (viewerComparator == null) { viewerComparator = getDefaultComparator(); } return viewerComparator; } /** * Sets the comparator used by JFace to sort strings. * * @param comparator * comparator used by JFace to sort strings * @since 3.2 */ public static void setComparator(Comparator comparator) { org.eclipse.core.runtime.Assert.isTrue(viewerComparator == null); viewerComparator = comparator; } /** * Sets the animator factory used by JFace to create control animator * instances. * * @param factory * the AnimatorFactory to use. * @since 3.2 * @deprecated this is no longer in use as of 3.3 */ public static void setAnimatorFactory(AnimatorFactory factory) { animatorFactory = factory; } /** * Returns the animator factory used by JFace to create control animator * instances. * * @return the animator factory used to create control animator instances. * @since 3.2 * @deprecated this is no longer in use as of 3.3 */ public static AnimatorFactory getAnimatorFactory() { if (animatorFactory == null) animatorFactory = new AnimatorFactory(); return animatorFactory; } }
true
true
private static ILogDialog getJFaceLogDialog() { return new ILogDialog() { /* * (non-Javadoc) * * @see org.eclipse.jface.util.ILogger#log(org.eclipse.core.runtime.IStatus) */ public void log(Shell parent, String title, IStatus status) { MessageDialog dialog = new MessageDialog( parent, title, null, // accept // the // default // window // icon status.getMessage(), status.getSeverity(), new String[] { IDialogConstants.OK_LABEL }, 0); // ok // is // the // default dialog.open(); } }; }
private static ILogDialog getJFaceLogDialog() { return new ILogDialog() { /* * (non-Javadoc) * * @see org.eclipse.jface.util.ILogger#log(org.eclipse.core.runtime.IStatus) */ public void log(Shell parent, String title, IStatus status) { int dialogConstant = MessageDialog.ERROR; if (status.getSeverity() == IStatus.WARNING) { dialogConstant = MessageDialog.WARNING; } MessageDialog dialog = new MessageDialog(parent, title, null, // accept the default window icon status.getMessage(), dialogConstant, new String[] { IDialogConstants.OK_LABEL }, 0); // ok is // the // default dialog.open(); } }; }
diff --git a/build/maven/javadoc/src/test/java/org/geotools/maven/taglet/SourceTest.java b/build/maven/javadoc/src/test/java/org/geotools/maven/taglet/SourceTest.java index b6d62b010..9ea3c3f87 100644 --- a/build/maven/javadoc/src/test/java/org/geotools/maven/taglet/SourceTest.java +++ b/build/maven/javadoc/src/test/java/org/geotools/maven/taglet/SourceTest.java @@ -1,104 +1,108 @@ /* * GeoTools - OpenSource mapping toolkit * http://geotools.org * (C) 2005-2006, GeoTools Project Managment Committee (PMC) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package org.geotools.maven.taglet; // J2SE dependencies import java.util.regex.Matcher; // JUnit dependencies import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * Tests the {@link Source} taglet. * * @source $URL$ * @version $Id$ * @author Martin Desruisseaux */ public class SourceTest extends TestCase { /** * Run the suite from the command line. */ public static void main(final String[] args) { junit.textui.TestRunner.run(suite()); } /** * Returns the test suite. */ public static Test suite() { return new TestSuite(SourceTest.class); } /** * Constructs a test case with the given name. */ public SourceTest(final String name) { super(name); } /** * Tests the regular expression validity using the tag for this source file. */ public void testCurrentTag() { +/* See http://jira.codehaus.org/browse/GEOT-1812 for a patch about this issue. --adrian */ +/* Note the Source file will have to change also since .../gt/... no longer exists. */ +/* Source s = new Source(); Matcher m; String tag, url, group, category, module; tag = "$URL$"; m = s.findURL.matcher(tag); assertTrue(m.matches()); // Try to match the URL provided by SVN. url = m.group(1).trim(); m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("build", group); assertEquals("maven", category); assertEquals("javadoc", module); // Try an other URL from a tag. url = "http://svn.geotools.org/geotools/tags/2.4-M0/modules/library/api/src/main/java/org/geotools/catalog/ResolveChangeListener.java"; m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("modules", group); assertEquals("library", category); assertEquals("api", module); // Try an other URL from a tag. url = "http://svn.geotools.org/geotools/tags/2.2-RC4/modules/library/referencing/src/main/java/org/geotools/referencing/CRS.java"; tag = Source.SVN_KEYWORD_DELIMITER + "URL: " + url + ' ' + Source.SVN_KEYWORD_DELIMITER; m = s.findURL.matcher(tag); assertTrue(m.matches()); assertEquals(url, m.group(1).trim()); m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("modules", group); assertEquals("library", category); assertEquals("referencing", module); +*/ } }
false
true
public void testCurrentTag() { Source s = new Source(); Matcher m; String tag, url, group, category, module; tag = "$URL$"; m = s.findURL.matcher(tag); assertTrue(m.matches()); // Try to match the URL provided by SVN. url = m.group(1).trim(); m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("build", group); assertEquals("maven", category); assertEquals("javadoc", module); // Try an other URL from a tag. url = "http://svn.geotools.org/geotools/tags/2.4-M0/modules/library/api/src/main/java/org/geotools/catalog/ResolveChangeListener.java"; m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("modules", group); assertEquals("library", category); assertEquals("api", module); // Try an other URL from a tag. url = "http://svn.geotools.org/geotools/tags/2.2-RC4/modules/library/referencing/src/main/java/org/geotools/referencing/CRS.java"; tag = Source.SVN_KEYWORD_DELIMITER + "URL: " + url + ' ' + Source.SVN_KEYWORD_DELIMITER; m = s.findURL.matcher(tag); assertTrue(m.matches()); assertEquals(url, m.group(1).trim()); m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("modules", group); assertEquals("library", category); assertEquals("referencing", module); }
public void testCurrentTag() { /* See http://jira.codehaus.org/browse/GEOT-1812 for a patch about this issue. --adrian */ /* Note the Source file will have to change also since .../gt/... no longer exists. */ /* Source s = new Source(); Matcher m; String tag, url, group, category, module; tag = "$URL$"; m = s.findURL.matcher(tag); assertTrue(m.matches()); // Try to match the URL provided by SVN. url = m.group(1).trim(); m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("build", group); assertEquals("maven", category); assertEquals("javadoc", module); // Try an other URL from a tag. url = "http://svn.geotools.org/geotools/tags/2.4-M0/modules/library/api/src/main/java/org/geotools/catalog/ResolveChangeListener.java"; m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("modules", group); assertEquals("library", category); assertEquals("api", module); // Try an other URL from a tag. url = "http://svn.geotools.org/geotools/tags/2.2-RC4/modules/library/referencing/src/main/java/org/geotools/referencing/CRS.java"; tag = Source.SVN_KEYWORD_DELIMITER + "URL: " + url + ' ' + Source.SVN_KEYWORD_DELIMITER; m = s.findURL.matcher(tag); assertTrue(m.matches()); assertEquals(url, m.group(1).trim()); m = s.findModule.matcher(url); assertTrue(m.matches()); group = m.group(1); category = m.group(2); module = m.group(3); assertEquals("modules", group); assertEquals("library", category); assertEquals("referencing", module); */ }
diff --git a/src/java/org/deuce/transform/asm/AtomicMethod.java b/src/java/org/deuce/transform/asm/AtomicMethod.java index c7f2beb..f0f4512 100644 --- a/src/java/org/deuce/transform/asm/AtomicMethod.java +++ b/src/java/org/deuce/transform/asm/AtomicMethod.java @@ -1,266 +1,265 @@ package org.deuce.transform.asm; import org.deuce.objectweb.asm.AnnotationVisitor; import org.deuce.objectweb.asm.Attribute; import org.deuce.objectweb.asm.Label; import org.deuce.objectweb.asm.MethodAdapter; import org.deuce.objectweb.asm.MethodVisitor; import org.deuce.objectweb.asm.Opcodes; import org.deuce.objectweb.asm.Type; import org.deuce.objectweb.asm.commons.Method; import org.deuce.transaction.AbstractContext; import org.deuce.transform.asm.type.TypeCodeResolver; import org.deuce.transform.asm.type.TypeCodeResolverFactory; public class AtomicMethod extends MethodAdapter implements Opcodes{ private int retries = 7; // TODO set default final private String className; final private String methodName; final private TypeCodeResolver returnReolver; final private TypeCodeResolver[] argumentReolvers; final private boolean isStatic; final private int variablesSize; final private Method newMethod; public AtomicMethod(MethodVisitor mv, String className, String methodName, String descriptor, Method newMethod, boolean isStatic) { super(mv); this.className = className; this.methodName = methodName; this.newMethod = newMethod; this.isStatic = isStatic; Type returnType = Type.getReturnType(descriptor); Type[] argumentTypes = Type.getArgumentTypes(descriptor); returnReolver = TypeCodeResolverFactory.getReolver(returnType); argumentReolvers = new TypeCodeResolver[ argumentTypes.length]; for( int i=0; i< argumentTypes.length ; ++i) { argumentReolvers[ i] = TypeCodeResolverFactory.getReolver( argumentTypes[ i]); } variablesSize = variablesSize( argumentReolvers, isStatic); } @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { return super.visitAnnotation(desc, visible); } @Override public AnnotationVisitor visitAnnotationDefault() { return super.visitAnnotationDefault(); } @Override public AnnotationVisitor visitParameterAnnotation(int parameter, String desc, boolean visible) { return super.visitParameterAnnotation(parameter, desc, visible); } @Override public void visitAttribute(Attribute attr) { super.visitAttribute(attr); } @Override public void visitCode() { final int indexIndex = variablesSize; // i final int contextIndex = indexIndex + 1; // context final int resultIndex = returnReolver == null ? contextIndex : contextIndex + 1; final int throwableIndex = resultIndex + (returnReolver == null ? 0 : returnReolver.extendLocals()) + 1; Label l0 = new Label(); Label l1 = new Label(); mv.visitTryCatchBlock(l0, l1, l1, null); // try{ mv.visitIntInsn(SIPUSH, retries); // for( int i =retries; ; ...) mv.visitVarInsn(ISTORE, indexIndex); Label l2 = new Label(); mv.visitJumpInsn(GOTO, l2); Label l3 = getContext(contextIndex); // AbstractContext context = AbstractContext.getInstance(); if( returnReolver != null) { // result = null; mv.visitInsn(returnReolver.nullValueCode()); mv.visitVarInsn(returnReolver.storeCode(), resultIndex); } // -------------- result = foo( context, ...) --------------- mv.visitLabel(l0); - if( !isStatic) // load this id not static + if( !isStatic) // load this id if not static mv.visitVarInsn(ALOAD, 0); // load the rest of the arguments int local = isStatic ? 0 : 1; for( int i=0 ; i < argumentReolvers.length ; ++i) { mv.visitVarInsn(argumentReolvers[i].loadCode(), local); - local += argumentReolvers[i].extendLocals(); + local += (argumentReolvers[i].extendLocals() + 1); // move to the next argument } mv.visitVarInsn(ALOAD, contextIndex); // load the context if( isStatic) mv.visitMethodInsn(INVOKESTATIC, className, methodName, newMethod.getDescriptor()); // ... = foo( ... else mv.visitMethodInsn(INVOKEVIRTUAL, className, methodName, newMethod.getDescriptor()); // ... = foo( ... - if( returnReolver != null) { + if( returnReolver != null) mv.visitVarInsn(returnReolver.storeCode(), resultIndex); // result = ... - } Label l4 = new Label(); mv.visitJumpInsn(GOTO, l4); mv.visitLabel(l1); mv.visitVarInsn(ASTORE, throwableIndex); // store the throwable mv.visitVarInsn(ALOAD, contextIndex); // context.commit() mv.visitMethodInsn(INVOKEVIRTUAL, "org/deuce/transaction/AbstractContext", "commit", "()Z"); Label l5 = new Label(); mv.visitJumpInsn(IFNE, l5); //if( !context.commit()) Label l6 = new Label(); mv.visitJumpInsn(GOTO, l6); // continue; mv.visitLabel(l5); mv.visitVarInsn(ALOAD, throwableIndex); // load the throwable for re-throw mv.visitInsn(ATHROW); mv.visitLabel(l4); mv.visitVarInsn(ALOAD, contextIndex); // context.commit() mv.visitMethodInsn(INVOKEVIRTUAL, "org/deuce/transaction/AbstractContext", "commit", "()Z"); Label l7 = new Label(); mv.visitJumpInsn(IFNE, l7); //if( !context.commit()) mv.visitJumpInsn(GOTO, l6); // continue; mv.visitLabel(l7); if( returnReolver == null) { mv.visitInsn( RETURN); // return; } else { mv.visitVarInsn(returnReolver.loadCode(), resultIndex); // return result; mv.visitInsn(returnReolver.returnCode()); } mv.visitLabel(l6); mv.visitIincInsn(indexIndex, -1); // for( ... ; ... ; --i) // for( ... ; i>0 ... mv.visitLabel(l2); mv.visitVarInsn(ILOAD, indexIndex); mv.visitJumpInsn(IFGT, l3); // throw new TransactionException("Failed to commit ..."); throwTransactionException(); mv.visitMaxs(5 + variablesSize, throwableIndex + 1); mv.visitEnd(); } private Label getContext(final int contextIndex) { Label l3 = new Label(); mv.visitLabel(l3); // AbstractContext context = AbstractContext.getInstance(); mv.visitMethodInsn(INVOKESTATIC, AbstractContext.ABSTRACT_CONTEXT_NAME, "getInstance", "()Lorg/deuce/transaction/AbstractContext;"); mv.visitVarInsn(ASTORE, contextIndex); return l3; } private void throwTransactionException() { mv.visitTypeInsn(NEW, "org/deuce/transaction/TransactionException"); mv.visitInsn(DUP); mv.visitLdcInsn("Failed to commit the transaction in the defined retries."); mv.visitMethodInsn(INVOKESPECIAL, "org/deuce/transaction/TransactionException", "<init>", "(Ljava/lang/String;)V"); mv.visitInsn(ATHROW); } @Override public void visitFrame(int type, int local, Object[] local2, int stack, Object[] stack2) { } @Override public void visitIincInsn(int var, int increment) { } @Override public void visitInsn(int opcode) { } @Override public void visitIntInsn(int opcode, int operand) { } @Override public void visitJumpInsn(int opcode, Label label) { } @Override public void visitLabel(Label label) { } @Override public void visitEnd() { } @Override public void visitFieldInsn(int opcode, String owner, String name, String desc) { } @Override public void visitLdcInsn(Object cst) { } @Override public void visitLineNumber(int line, Label start) { } @Override public void visitLocalVariable(String name, String desc, String signature, Label start, Label end, int index) { } @Override public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) { } @Override public void visitMaxs(int maxStack, int maxLocals) { } @Override public void visitMethodInsn(int opcode, String owner, String name, String desc) { } @Override public void visitMultiANewArrayInsn(String desc, int dims) { } @Override public void visitTableSwitchInsn(int min, int max, Label dflt, Label[] labels) { } @Override public void visitTryCatchBlock(Label start, Label end, Label handler, String type) { } @Override public void visitTypeInsn(int opcode, String type) { } @Override public void visitVarInsn(int opcode, int var) { } public void setRetries(int retries) { this.retries = retries; } private int variablesSize( TypeCodeResolver[] types, boolean isStatic) { int i = isStatic ? 0 : 1; for( TypeCodeResolver type : types) { ++i; i += type.extendLocals(); } return i; } }
false
true
public void visitCode() { final int indexIndex = variablesSize; // i final int contextIndex = indexIndex + 1; // context final int resultIndex = returnReolver == null ? contextIndex : contextIndex + 1; final int throwableIndex = resultIndex + (returnReolver == null ? 0 : returnReolver.extendLocals()) + 1; Label l0 = new Label(); Label l1 = new Label(); mv.visitTryCatchBlock(l0, l1, l1, null); // try{ mv.visitIntInsn(SIPUSH, retries); // for( int i =retries; ; ...) mv.visitVarInsn(ISTORE, indexIndex); Label l2 = new Label(); mv.visitJumpInsn(GOTO, l2); Label l3 = getContext(contextIndex); // AbstractContext context = AbstractContext.getInstance(); if( returnReolver != null) { // result = null; mv.visitInsn(returnReolver.nullValueCode()); mv.visitVarInsn(returnReolver.storeCode(), resultIndex); } // -------------- result = foo( context, ...) --------------- mv.visitLabel(l0); if( !isStatic) // load this id not static mv.visitVarInsn(ALOAD, 0); // load the rest of the arguments int local = isStatic ? 0 : 1; for( int i=0 ; i < argumentReolvers.length ; ++i) { mv.visitVarInsn(argumentReolvers[i].loadCode(), local); local += argumentReolvers[i].extendLocals(); } mv.visitVarInsn(ALOAD, contextIndex); // load the context if( isStatic) mv.visitMethodInsn(INVOKESTATIC, className, methodName, newMethod.getDescriptor()); // ... = foo( ... else mv.visitMethodInsn(INVOKEVIRTUAL, className, methodName, newMethod.getDescriptor()); // ... = foo( ... if( returnReolver != null) { mv.visitVarInsn(returnReolver.storeCode(), resultIndex); // result = ... } Label l4 = new Label(); mv.visitJumpInsn(GOTO, l4); mv.visitLabel(l1); mv.visitVarInsn(ASTORE, throwableIndex); // store the throwable mv.visitVarInsn(ALOAD, contextIndex); // context.commit() mv.visitMethodInsn(INVOKEVIRTUAL, "org/deuce/transaction/AbstractContext", "commit", "()Z"); Label l5 = new Label(); mv.visitJumpInsn(IFNE, l5); //if( !context.commit()) Label l6 = new Label(); mv.visitJumpInsn(GOTO, l6); // continue; mv.visitLabel(l5); mv.visitVarInsn(ALOAD, throwableIndex); // load the throwable for re-throw mv.visitInsn(ATHROW); mv.visitLabel(l4); mv.visitVarInsn(ALOAD, contextIndex); // context.commit() mv.visitMethodInsn(INVOKEVIRTUAL, "org/deuce/transaction/AbstractContext", "commit", "()Z"); Label l7 = new Label(); mv.visitJumpInsn(IFNE, l7); //if( !context.commit()) mv.visitJumpInsn(GOTO, l6); // continue; mv.visitLabel(l7); if( returnReolver == null) { mv.visitInsn( RETURN); // return; } else { mv.visitVarInsn(returnReolver.loadCode(), resultIndex); // return result; mv.visitInsn(returnReolver.returnCode()); } mv.visitLabel(l6); mv.visitIincInsn(indexIndex, -1); // for( ... ; ... ; --i) // for( ... ; i>0 ... mv.visitLabel(l2); mv.visitVarInsn(ILOAD, indexIndex); mv.visitJumpInsn(IFGT, l3); // throw new TransactionException("Failed to commit ..."); throwTransactionException(); mv.visitMaxs(5 + variablesSize, throwableIndex + 1); mv.visitEnd(); } private Label getContext(final int contextIndex) { Label l3 = new Label(); mv.visitLabel(l3); // AbstractContext context = AbstractContext.getInstance(); mv.visitMethodInsn(INVOKESTATIC, AbstractContext.ABSTRACT_CONTEXT_NAME, "getInstance", "()Lorg/deuce/transaction/AbstractContext;"); mv.visitVarInsn(ASTORE, contextIndex); return l3; } private void throwTransactionException() { mv.visitTypeInsn(NEW, "org/deuce/transaction/TransactionException"); mv.visitInsn(DUP); mv.visitLdcInsn("Failed to commit the transaction in the defined retries."); mv.visitMethodInsn(INVOKESPECIAL, "org/deuce/transaction/TransactionException", "<init>", "(Ljava/lang/String;)V"); mv.visitInsn(ATHROW); } @Override public void visitFrame(int type, int local, Object[] local2, int stack, Object[] stack2) { } @Override public void visitIincInsn(int var, int increment) { } @Override public void visitInsn(int opcode) { } @Override public void visitIntInsn(int opcode, int operand) { } @Override public void visitJumpInsn(int opcode, Label label) { } @Override public void visitLabel(Label label) { } @Override public void visitEnd() { } @Override public void visitFieldInsn(int opcode, String owner, String name, String desc) { } @Override public void visitLdcInsn(Object cst) { } @Override public void visitLineNumber(int line, Label start) { } @Override public void visitLocalVariable(String name, String desc, String signature, Label start, Label end, int index) { } @Override public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) { } @Override public void visitMaxs(int maxStack, int maxLocals) { } @Override public void visitMethodInsn(int opcode, String owner, String name, String desc) { } @Override public void visitMultiANewArrayInsn(String desc, int dims) { } @Override public void visitTableSwitchInsn(int min, int max, Label dflt, Label[] labels) { } @Override public void visitTryCatchBlock(Label start, Label end, Label handler, String type) { } @Override public void visitTypeInsn(int opcode, String type) { } @Override public void visitVarInsn(int opcode, int var) { } public void setRetries(int retries) { this.retries = retries; } private int variablesSize( TypeCodeResolver[] types, boolean isStatic) { int i = isStatic ? 0 : 1; for( TypeCodeResolver type : types) { ++i; i += type.extendLocals(); } return i; } }
public void visitCode() { final int indexIndex = variablesSize; // i final int contextIndex = indexIndex + 1; // context final int resultIndex = returnReolver == null ? contextIndex : contextIndex + 1; final int throwableIndex = resultIndex + (returnReolver == null ? 0 : returnReolver.extendLocals()) + 1; Label l0 = new Label(); Label l1 = new Label(); mv.visitTryCatchBlock(l0, l1, l1, null); // try{ mv.visitIntInsn(SIPUSH, retries); // for( int i =retries; ; ...) mv.visitVarInsn(ISTORE, indexIndex); Label l2 = new Label(); mv.visitJumpInsn(GOTO, l2); Label l3 = getContext(contextIndex); // AbstractContext context = AbstractContext.getInstance(); if( returnReolver != null) { // result = null; mv.visitInsn(returnReolver.nullValueCode()); mv.visitVarInsn(returnReolver.storeCode(), resultIndex); } // -------------- result = foo( context, ...) --------------- mv.visitLabel(l0); if( !isStatic) // load this id if not static mv.visitVarInsn(ALOAD, 0); // load the rest of the arguments int local = isStatic ? 0 : 1; for( int i=0 ; i < argumentReolvers.length ; ++i) { mv.visitVarInsn(argumentReolvers[i].loadCode(), local); local += (argumentReolvers[i].extendLocals() + 1); // move to the next argument } mv.visitVarInsn(ALOAD, contextIndex); // load the context if( isStatic) mv.visitMethodInsn(INVOKESTATIC, className, methodName, newMethod.getDescriptor()); // ... = foo( ... else mv.visitMethodInsn(INVOKEVIRTUAL, className, methodName, newMethod.getDescriptor()); // ... = foo( ... if( returnReolver != null) mv.visitVarInsn(returnReolver.storeCode(), resultIndex); // result = ... Label l4 = new Label(); mv.visitJumpInsn(GOTO, l4); mv.visitLabel(l1); mv.visitVarInsn(ASTORE, throwableIndex); // store the throwable mv.visitVarInsn(ALOAD, contextIndex); // context.commit() mv.visitMethodInsn(INVOKEVIRTUAL, "org/deuce/transaction/AbstractContext", "commit", "()Z"); Label l5 = new Label(); mv.visitJumpInsn(IFNE, l5); //if( !context.commit()) Label l6 = new Label(); mv.visitJumpInsn(GOTO, l6); // continue; mv.visitLabel(l5); mv.visitVarInsn(ALOAD, throwableIndex); // load the throwable for re-throw mv.visitInsn(ATHROW); mv.visitLabel(l4); mv.visitVarInsn(ALOAD, contextIndex); // context.commit() mv.visitMethodInsn(INVOKEVIRTUAL, "org/deuce/transaction/AbstractContext", "commit", "()Z"); Label l7 = new Label(); mv.visitJumpInsn(IFNE, l7); //if( !context.commit()) mv.visitJumpInsn(GOTO, l6); // continue; mv.visitLabel(l7); if( returnReolver == null) { mv.visitInsn( RETURN); // return; } else { mv.visitVarInsn(returnReolver.loadCode(), resultIndex); // return result; mv.visitInsn(returnReolver.returnCode()); } mv.visitLabel(l6); mv.visitIincInsn(indexIndex, -1); // for( ... ; ... ; --i) // for( ... ; i>0 ... mv.visitLabel(l2); mv.visitVarInsn(ILOAD, indexIndex); mv.visitJumpInsn(IFGT, l3); // throw new TransactionException("Failed to commit ..."); throwTransactionException(); mv.visitMaxs(5 + variablesSize, throwableIndex + 1); mv.visitEnd(); } private Label getContext(final int contextIndex) { Label l3 = new Label(); mv.visitLabel(l3); // AbstractContext context = AbstractContext.getInstance(); mv.visitMethodInsn(INVOKESTATIC, AbstractContext.ABSTRACT_CONTEXT_NAME, "getInstance", "()Lorg/deuce/transaction/AbstractContext;"); mv.visitVarInsn(ASTORE, contextIndex); return l3; } private void throwTransactionException() { mv.visitTypeInsn(NEW, "org/deuce/transaction/TransactionException"); mv.visitInsn(DUP); mv.visitLdcInsn("Failed to commit the transaction in the defined retries."); mv.visitMethodInsn(INVOKESPECIAL, "org/deuce/transaction/TransactionException", "<init>", "(Ljava/lang/String;)V"); mv.visitInsn(ATHROW); } @Override public void visitFrame(int type, int local, Object[] local2, int stack, Object[] stack2) { } @Override public void visitIincInsn(int var, int increment) { } @Override public void visitInsn(int opcode) { } @Override public void visitIntInsn(int opcode, int operand) { } @Override public void visitJumpInsn(int opcode, Label label) { } @Override public void visitLabel(Label label) { } @Override public void visitEnd() { } @Override public void visitFieldInsn(int opcode, String owner, String name, String desc) { } @Override public void visitLdcInsn(Object cst) { } @Override public void visitLineNumber(int line, Label start) { } @Override public void visitLocalVariable(String name, String desc, String signature, Label start, Label end, int index) { } @Override public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) { } @Override public void visitMaxs(int maxStack, int maxLocals) { } @Override public void visitMethodInsn(int opcode, String owner, String name, String desc) { } @Override public void visitMultiANewArrayInsn(String desc, int dims) { } @Override public void visitTableSwitchInsn(int min, int max, Label dflt, Label[] labels) { } @Override public void visitTryCatchBlock(Label start, Label end, Label handler, String type) { } @Override public void visitTypeInsn(int opcode, String type) { } @Override public void visitVarInsn(int opcode, int var) { } public void setRetries(int retries) { this.retries = retries; } private int variablesSize( TypeCodeResolver[] types, boolean isStatic) { int i = isStatic ? 0 : 1; for( TypeCodeResolver type : types) { ++i; i += type.extendLocals(); } return i; } }
diff --git a/android/src/com/google/zxing/client/android/PreferencesFragment.java b/android/src/com/google/zxing/client/android/PreferencesFragment.java index 95b387bc..d934edb9 100644 --- a/android/src/com/google/zxing/client/android/PreferencesFragment.java +++ b/android/src/com/google/zxing/client/android/PreferencesFragment.java @@ -1,126 +1,126 @@ /* * Copyright (C) 2013 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.android; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import android.app.AlertDialog; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.CheckBoxPreference; import android.preference.EditTextPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.preference.PreferenceScreen; public final class PreferencesFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener { private CheckBoxPreference[] checkBoxPrefs; @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); addPreferencesFromResource(R.xml.preferences); PreferenceScreen preferences = getPreferenceScreen(); preferences.getSharedPreferences().registerOnSharedPreferenceChangeListener(this); checkBoxPrefs = findDecodePrefs(preferences, PreferencesActivity.KEY_DECODE_1D_PRODUCT, PreferencesActivity.KEY_DECODE_1D_INDUSTRIAL, PreferencesActivity.KEY_DECODE_QR, PreferencesActivity.KEY_DECODE_DATA_MATRIX, PreferencesActivity.KEY_DECODE_AZTEC, PreferencesActivity.KEY_DECODE_PDF417); disableLastCheckedPref(); EditTextPreference customProductSearch = (EditTextPreference) preferences.findPreference(PreferencesActivity.KEY_CUSTOM_PRODUCT_SEARCH); customProductSearch.setOnPreferenceChangeListener(new CustomSearchURLValidator()); } private static CheckBoxPreference[] findDecodePrefs(PreferenceScreen preferences, String... keys) { CheckBoxPreference[] prefs = new CheckBoxPreference[keys.length]; for (int i = 0; i < keys.length; i++) { prefs[i] = (CheckBoxPreference) preferences.findPreference(keys[i]); } return prefs; } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { disableLastCheckedPref(); } private void disableLastCheckedPref() { Collection<CheckBoxPreference> checked = new ArrayList<>(checkBoxPrefs.length); for (CheckBoxPreference pref : checkBoxPrefs) { if (pref.isChecked()) { checked.add(pref); } } boolean disable = checked.size() <= 1; for (CheckBoxPreference pref : checkBoxPrefs) { pref.setEnabled(!(disable && checked.contains(pref))); } } private class CustomSearchURLValidator implements Preference.OnPreferenceChangeListener { @Override public boolean onPreferenceChange(Preference preference, Object newValue) { if (!isValid(newValue)) { AlertDialog.Builder builder = new AlertDialog.Builder(PreferencesFragment.this.getActivity()); builder.setTitle(R.string.msg_error); builder.setMessage(R.string.msg_invalid_value); builder.setCancelable(true); builder.show(); return false; } return true; } private boolean isValid(Object newValue) { // Allow empty/null value if (newValue == null) { return true; } String valueString = newValue.toString(); if (valueString.isEmpty()) { return true; } // Before validating, remove custom placeholders, which will not // be considered valid parts of the URL in some locations: - // Blank %d and %s: - valueString = valueString.replaceAll("%[sd]", ""); + // Blank %t and %s: + valueString = valueString.replaceAll("%[st]", ""); // Blank %f but not if followed by digit or a-f as it may be a hex sequence valueString = valueString.replaceAll("%f(?![0-9a-f])", ""); // Require a scheme otherwise: try { URI uri = new URI(valueString); return uri.getScheme() != null; } catch (URISyntaxException use) { return false; } } } }
true
true
private boolean isValid(Object newValue) { // Allow empty/null value if (newValue == null) { return true; } String valueString = newValue.toString(); if (valueString.isEmpty()) { return true; } // Before validating, remove custom placeholders, which will not // be considered valid parts of the URL in some locations: // Blank %d and %s: valueString = valueString.replaceAll("%[sd]", ""); // Blank %f but not if followed by digit or a-f as it may be a hex sequence valueString = valueString.replaceAll("%f(?![0-9a-f])", ""); // Require a scheme otherwise: try { URI uri = new URI(valueString); return uri.getScheme() != null; } catch (URISyntaxException use) { return false; } }
private boolean isValid(Object newValue) { // Allow empty/null value if (newValue == null) { return true; } String valueString = newValue.toString(); if (valueString.isEmpty()) { return true; } // Before validating, remove custom placeholders, which will not // be considered valid parts of the URL in some locations: // Blank %t and %s: valueString = valueString.replaceAll("%[st]", ""); // Blank %f but not if followed by digit or a-f as it may be a hex sequence valueString = valueString.replaceAll("%f(?![0-9a-f])", ""); // Require a scheme otherwise: try { URI uri = new URI(valueString); return uri.getScheme() != null; } catch (URISyntaxException use) { return false; } }
diff --git a/src/org/nutz/ioc/loader/combo/ComboIocLoader.java b/src/org/nutz/ioc/loader/combo/ComboIocLoader.java index 86d0748c0..619a84540 100644 --- a/src/org/nutz/ioc/loader/combo/ComboIocLoader.java +++ b/src/org/nutz/ioc/loader/combo/ComboIocLoader.java @@ -1,148 +1,152 @@ package org.nutz.ioc.loader.combo; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.nutz.ioc.IocLoader; import org.nutz.ioc.IocLoading; import org.nutz.ioc.ObjectLoadException; import org.nutz.ioc.loader.annotation.AnnotationIocLoader; import org.nutz.ioc.loader.json.JsonLoader; import org.nutz.ioc.loader.xml.XmlIocLoader; import org.nutz.ioc.meta.IocObject; import org.nutz.json.Json; import org.nutz.lang.Lang; import org.nutz.lang.Mirror; import org.nutz.log.Log; import org.nutz.log.Logs; /** * 融化多种IocLoader * * @author wendal([email protected]) * */ public class ComboIocLoader implements IocLoader { private static final Log log = Logs.get(); private List<IocLoader> iocLoaders = new ArrayList<IocLoader>(); /** * 这个构造方法需要一组特殊的参数 * <p/> * 第一种,以*开头,后面接类名, 如 <code>*org.nutz.ioc.loader.json.JsonLoader</code> * <p/>1.b.45版开始支持类别名: js , json, xml, annotation 分别对应其加载类 * <p/> * 第二种,为具体的参数 * <p/> * 处理规律, 当遇到第一种参数(*),则认为接下来的一个或多个参数为这一个IocLoader的参数,直至遇到另外一个*开头的参数 * <p/> * <p/> * 例子: * <p/> * <code>{"*org.nutz.ioc.loader.json.JsonLoader","dao.js","service.js","*org.nutz.ioc.loader.xml.XmlIocLoader","config.xml"}</code> * <p/> * 这样的参数, 会生成一个以{"dao.js","service.js"}作为参数的JsonLoader,一个以{"dao.xml"} * 作为参数的XmlIocLoader * * @throws ClassNotFoundException * 如果*开头的参数所指代的类不存在 */ public ComboIocLoader(String... args) throws ClassNotFoundException { ArrayList<String> argsList = null; String currentClassName = null; for (String str : args) { if (str.length() > 0 && str.charAt(0) == '*') { if (argsList != null) createIocLoader(currentClassName, argsList); currentClassName = str.substring(1); argsList = new ArrayList<String>(); - } else - argsList.add(str); + } else { + if (argsList == null) { + throw new IllegalArgumentException("ioc args without Loader ClassName. " + args); + } + argsList.add(str); + } } if (currentClassName != null) createIocLoader(currentClassName, argsList); Set<String> beanNames = new HashSet<String>(); for (IocLoader loader : iocLoaders) { for (String beanName : loader.getName()) { if (!beanNames.add(beanName) && log.isWarnEnabled()) log.warnf("Found Duplicate beanName=%s, pls check you config!", beanName); } } } @SuppressWarnings("unchecked") private void createIocLoader(String className, List<String> args) throws ClassNotFoundException { Class<? extends IocLoader> klass = loaders.get(className); if (klass == null) klass = (Class<? extends IocLoader>) Lang.loadClass(className); iocLoaders.add((IocLoader) Mirror.me(klass).born(args.toArray(new Object[args.size()]))); } public ComboIocLoader(IocLoader... loaders) { for (IocLoader iocLoader : loaders) if (iocLoader != null) iocLoaders.add(iocLoader); } public String[] getName() { ArrayList<String> list = new ArrayList<String>(); for (IocLoader iocLoader : iocLoaders) { for (String name : iocLoader.getName()) list.add(name); } return list.toArray(new String[list.size()]); } public boolean has(String name) { for (IocLoader iocLoader : iocLoaders) if (iocLoader.has(name)) return true; return false; } public IocObject load(IocLoading loading, String name) throws ObjectLoadException { for (IocLoader iocLoader : iocLoaders) if (iocLoader.has(name)) { IocObject iocObject = iocLoader.load(loading, name); if (log.isDebugEnabled()) log.debugf("Found IocObject(%s) in IocLoader(%s)", name, iocLoader.getClass().getSimpleName() + "@" + iocLoader.hashCode()); return iocObject; } throw new ObjectLoadException("Object '" + name + "' without define!"); } /** * 类别名 */ private static Map<String, Class<? extends IocLoader>> loaders = new HashMap<String, Class<? extends IocLoader>>(); static { loaders.put("js", JsonLoader.class); loaders.put("json", JsonLoader.class); loaders.put("xml", XmlIocLoader.class); loaders.put("annotation", AnnotationIocLoader.class); } // TODO 这个方法好好整理一下 ... public String toString() { StringBuilder sb = new StringBuilder(); sb.append("/*ComboIocLoader*/\n{"); for (IocLoader loader : iocLoaders) { String str = Json.toJson(loader); str = str.replaceFirst("[{]", ""); // 肯定有!! int index = str.lastIndexOf("}"); // 肯定有!! StringBuilder sb2 = new StringBuilder(str); sb2.setCharAt(index, ' '); sb.append(sb2).append("\n"); } sb.append("}"); return sb.toString(); } }
true
true
public ComboIocLoader(String... args) throws ClassNotFoundException { ArrayList<String> argsList = null; String currentClassName = null; for (String str : args) { if (str.length() > 0 && str.charAt(0) == '*') { if (argsList != null) createIocLoader(currentClassName, argsList); currentClassName = str.substring(1); argsList = new ArrayList<String>(); } else argsList.add(str); } if (currentClassName != null) createIocLoader(currentClassName, argsList); Set<String> beanNames = new HashSet<String>(); for (IocLoader loader : iocLoaders) { for (String beanName : loader.getName()) { if (!beanNames.add(beanName) && log.isWarnEnabled()) log.warnf("Found Duplicate beanName=%s, pls check you config!", beanName); } } }
public ComboIocLoader(String... args) throws ClassNotFoundException { ArrayList<String> argsList = null; String currentClassName = null; for (String str : args) { if (str.length() > 0 && str.charAt(0) == '*') { if (argsList != null) createIocLoader(currentClassName, argsList); currentClassName = str.substring(1); argsList = new ArrayList<String>(); } else { if (argsList == null) { throw new IllegalArgumentException("ioc args without Loader ClassName. " + args); } argsList.add(str); } } if (currentClassName != null) createIocLoader(currentClassName, argsList); Set<String> beanNames = new HashSet<String>(); for (IocLoader loader : iocLoaders) { for (String beanName : loader.getName()) { if (!beanNames.add(beanName) && log.isWarnEnabled()) log.warnf("Found Duplicate beanName=%s, pls check you config!", beanName); } } }
diff --git a/ModJam.java b/ModJam.java index 856a9bb..36c038c 100644 --- a/ModJam.java +++ b/ModJam.java @@ -1,305 +1,305 @@ package modJam; import java.util.logging.Level; import net.minecraft.block.Block; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.item.EnumArmorMaterial; import net.minecraft.item.EnumToolMaterial; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.CraftingManager; import net.minecraft.item.crafting.FurnaceRecipes; import net.minecraftforge.common.Configuration; import net.minecraftforge.common.EnumHelper; import net.minecraftforge.common.ForgeDirection; import net.minecraftforge.oredict.OreDictionary; import net.minecraftforge.oredict.ShapedOreRecipe; import cpw.mods.fml.common.FMLLog; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.Mod.Init; import cpw.mods.fml.common.Mod.PreInit; import cpw.mods.fml.common.SidedProxy; import cpw.mods.fml.common.event.FMLInitializationEvent; import cpw.mods.fml.common.event.FMLPreInitializationEvent; import cpw.mods.fml.common.network.NetworkMod; import cpw.mods.fml.common.registry.GameRegistry; import cpw.mods.fml.common.registry.LanguageRegistry; @Mod(modid="fuj1n.modJam", name=CommonProxyModJam.modName, version=CommonProxyModJam.version) @NetworkMod(clientSideRequired=true, serverSideRequired=false) public class ModJam { @SidedProxy(serverSide="modJam.CommonProxyModJam", clientSide="modJam.ClientProxyModJam") public static CommonProxyModJam proxy; public static Configuration config; //Config values //Blocks public static int oreAwesomeID = 1024; public static int[] woodChairIDs = { 1025, 1026, 1027, 1028 }; public static int[] stoneChairIDs = { 1029, 1030, 1031, 1032 }; public static int woodTableID = 1033; public static int stoneTableID = 1034; //Items public static int ingotAwesomeID = 3240; public static int woodChairID = 3241; public static int stoneChairID = 3242; public static int awesomeArmorHelmetID = 3243; public static int awesomeArmorChestplateID = 3244; public static int awesomeArmorLeggingsID = 3245; public static int awesomeArmorBootsID = 3246; public static int awesomeToolPickaxeID = 3247; public static int awesomeToolShovelID = 3248; public static int awesomeToolSwordID = 3249; public static int awesomeToolAxeID = 3250; public static int awesomeToolHoeID = 3251; public static int darkExtractID = 3252; //End Config values //Blocks public static Block awesomeOre; public static Block woodChairNorth; public static Block woodChairEast; public static Block woodChairSouth; public static Block woodChairWest; public static Block stoneChairNorth; public static Block stoneChairEast; public static Block stoneChairSouth; public static Block stoneChairWest; public static Block woodTable; public static Block stoneTable; //Items public static Item awesomeIngot; public static Item woodChair; public static Item stoneChair; public static Item awesomeHelmet; public static Item awesomeChestplate; public static Item awesomeLeggings; public static Item awesomeBoots; public static Item awesomePickaxe; public static Item awesomeShovel; public static Item awesomeSword; public static Item awesomeAxe; public static Item awesomeHoe; public static Item darkExtract; //Materials public static EnumArmorMaterial awesomeArmorMaterial; public static EnumToolMaterial awesomeToolMaterial; //CreativeTabs public static CreativeTabs modJamCreativeTab; //Sub Names private static final String[] awesomeColors = { "White", "Orange", "Magenta", "Light-Blue", "Yellow", "Lime", "Pink", "Gray", "Light-Gray", "Cyan", "Purple", "Blue", "Brown", "Green", "Red", "Black" }; @PreInit public void PreInit(FMLPreInitializationEvent event){ config = new Configuration(event.getSuggestedConfigurationFile()); config.load(); //Blocks oreAwesomeID = config.getBlock("Awesome Ore ID", oreAwesomeID).getInt(); woodChairIDs[0] = config.getBlock("Wooden Chair ID Set(of 4)", woodChairIDs[0]).getInt(); stoneChairIDs[0] = config.getBlock("Stone Chair ID Set(of 4)", stoneChairIDs[0]).getInt(); woodTableID = config.getBlock("Wooden Table ID", woodTableID).getInt(); stoneTableID = config.getBlock("Stone Table ID", stoneTableID).getInt(); refreshChairIDs(); //Items ingotAwesomeID = config.getItem("Awesome Ingot ID", ingotAwesomeID).getInt(); woodChairID = config.getItem("Wooden Chair Item ID", woodChairID).getInt(); stoneChairID = config.getItem("Stone Chair Item ID", stoneChairID).getInt(); awesomeArmorHelmetID = config.getItem("Awesome Helmet ID", awesomeArmorHelmetID).getInt(); awesomeArmorChestplateID = config.getItem("Awesome Chestplate ID", awesomeArmorChestplateID).getInt(); awesomeArmorLeggingsID = config.getItem("Awesome Leggings ID", awesomeArmorLeggingsID).getInt(); awesomeArmorBootsID = config.getItem("Awesome Boots ID", awesomeArmorBootsID).getInt(); awesomeToolPickaxeID = config.getItem("Awesome Pickaxe ID", awesomeToolPickaxeID).getInt(); awesomeToolShovelID = config.getItem("Awesome Shovel ID", awesomeToolShovelID).getInt(); awesomeToolSwordID = config.getItem("Awesome Sword ID", awesomeToolSwordID).getInt(); awesomeToolAxeID = config.getItem("Awesome Axe ID", awesomeToolAxeID).getInt(); awesomeToolHoeID = config.getItem("Awesome Hoe ID", awesomeToolHoeID).getInt(); darkExtractID = config.getItem("Dark Extract ID", darkExtractID).getInt(); config.save(); } public void refreshChairIDs(){ woodChairIDs[1] = woodChairIDs[0] + 1; woodChairIDs[2] = woodChairIDs[1] + 1; woodChairIDs[3] = woodChairIDs[2] + 1; stoneChairIDs[1] = stoneChairIDs[0] + 1; stoneChairIDs[2] = stoneChairIDs[1] + 1; stoneChairIDs[3] = stoneChairIDs[2] + 1; } @Init public void Init(FMLInitializationEvent event){ proxy.handler(); registerCreativeTab(); initAllMaterials(); initAllItems(); initAllBlocks(); registerAllBlocks(); addAllNames(); addAllCrafting(); addAllSmelting(); registerAllOreDictionary(); registerAllWorldGenerators(); } public void initAllMaterials(){ awesomeArmorMaterial = EnumHelper.addArmorMaterial("AWESOME", 13, new int[]{3, 7, 5, 2}, 25); awesomeToolMaterial = EnumHelper.addToolMaterial("AWESOME", 1300, 250, 7.0F, 2, 22); } public void initAllBlocks(){ awesomeOre = new BlockAwesomeOre(oreAwesomeID).setHardness(5F).setResistance(5F).setCreativeTab(modJamCreativeTab).setUnlocalizedName("fuj1n.modJam.AwesomeOre"); woodChairNorth = new BlockChair(woodChairIDs[0], ForgeDirection.NORTH, Block.planks, woodChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); woodChairEast = new BlockChair(woodChairIDs[1], ForgeDirection.EAST, Block.planks, woodChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); woodChairSouth = new BlockChair(woodChairIDs[2], ForgeDirection.SOUTH, Block.planks, woodChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); woodChairWest = new BlockChair(woodChairIDs[3], ForgeDirection.WEST, Block.planks, woodChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); stoneChairNorth = new BlockChair(stoneChairIDs[0], ForgeDirection.NORTH, Block.stone, stoneChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); stoneChairEast = new BlockChair(stoneChairIDs[1], ForgeDirection.EAST, Block.stone, stoneChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); stoneChairSouth = new BlockChair(stoneChairIDs[2], ForgeDirection.SOUTH, Block.stone, stoneChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); stoneChairWest = new BlockChair(stoneChairIDs[3], ForgeDirection.WEST, Block.stone, stoneChair.itemID).setHardness(0.3F).setUnlocalizedName("fuj1n.modJam.tileChair"); woodTable = new BlockTable(woodTableID, Block.planks).setHardness(0.3F).setCreativeTab(modJamCreativeTab).setUnlocalizedName("fuj1n.modJam.woodTable"); stoneTable = new BlockTable(stoneTableID, Block.stone).setHardness(0.3F).setCreativeTab(modJamCreativeTab).setUnlocalizedName("fuj1n.modJam.stoneTable"); } public void initAllItems(){ awesomeIngot = new ItemAwesomeIngot(ingotAwesomeID).setCreativeTab(modJamCreativeTab); woodChair = new ItemChair(woodChairID, 0, this.woodChairIDs[0]).setCreativeTab(modJamCreativeTab).setUnlocalizedName("woodChair"); stoneChair = new ItemChair(stoneChairID, 1, this.stoneChairIDs[0]).setCreativeTab(modJamCreativeTab).setUnlocalizedName("stoneChair"); awesomeHelmet = new ItemAwesomeArmor(awesomeArmorHelmetID, awesomeArmorMaterial, CommonProxyModJam.awesomeArmorID, 0, "awesomeMod:fuj1n.AwesomeMod.awesomeArmor").setUnlocalizedName("fuj1n.AwesomeMod.awesomeArmor"); awesomeChestplate = new ItemAwesomeArmor(awesomeArmorChestplateID, awesomeArmorMaterial, CommonProxyModJam.awesomeArmorID, 1, "awesomeMod:fuj1n.AwesomeMod.awesomeArmor").setUnlocalizedName("fuj1n.AwesomeMod.awesomeArmor"); awesomeLeggings = new ItemAwesomeArmor(awesomeArmorLeggingsID, awesomeArmorMaterial, CommonProxyModJam.awesomeArmorID, 2, "awesomeMod:fuj1n.AwesomeMod.awesomeArmor").setUnlocalizedName("fuj1n.AwesomeMod.awesomeArmor"); awesomeBoots = new ItemAwesomeArmor(awesomeArmorBootsID, awesomeArmorMaterial, CommonProxyModJam.awesomeArmorID, 3, "awesomeMod:fuj1n.AwesomeMod.awesomeArmor").setUnlocalizedName("fuj1n.AwesomeMod.awesomeArmor"); awesomePickaxe = new ItemAwesomePickaxe(awesomeToolPickaxeID, awesomeToolMaterial, "awesomeMod:fuj1n.AwesomeMod.awesomePickaxe").setUnlocalizedName("fuj1n.AwesomeMod.awesomePickaxe"); awesomeShovel = new ItemAwesomeShovel(awesomeToolShovelID, awesomeToolMaterial, "awesomeMod:fuj1n.AwesomeMod.awesomeShovel").setUnlocalizedName("fuj1n.AwesomeMod.awesomeShovel"); awesomeSword = new ItemAwesomeSword(awesomeToolSwordID, awesomeToolMaterial, "awesomeMod:fuj1n.AwesomeMod.awesomeSword").setUnlocalizedName("fuj1n.AwesomeMod.awesomeSword"); awesomeAxe = new ItemAwesomeAxe(awesomeToolAxeID, awesomeToolMaterial, "awesomeMod:fuj1n.AwesomeMod.awesomeAxe").setUnlocalizedName("fuj1n.AwesomeMod.awesomeAxe"); awesomeHoe = new ItemAwesomeHoe(awesomeToolHoeID, awesomeToolMaterial, "awesomeMod:fuj1n.AwesomeMod.awesomeHoe").setUnlocalizedName("fuj1n.AwesomeMod.awesomeHoe"); darkExtract = new ItemDarkExtract(darkExtractID).setCreativeTab(modJamCreativeTab).setUnlocalizedName("fuj1n.AwesomeMod.darkExtract"); } public void registerAllBlocks(){ GameRegistry.registerBlock(awesomeOre, ItemAwesomeOre.class, "fuj1n.modJam.awesomeOre"); GameRegistry.registerBlock(woodChairNorth, "fuj1n.modJam.woodChairNorth"); GameRegistry.registerBlock(woodChairEast, "fuj1n.modJam.woodChairEast"); GameRegistry.registerBlock(woodChairSouth, "fuj1n.modJam.woodChairSouth"); GameRegistry.registerBlock(woodChairWest, "fuj1n.modJam.woodChairWest"); GameRegistry.registerBlock(stoneChairNorth, "fuj1n.modJam.stoneChairNorth"); GameRegistry.registerBlock(stoneChairEast, "fuj1n.modJam.stoneChairEast"); GameRegistry.registerBlock(stoneChairSouth, "fuj1n.modJam.stoneChairSouth"); GameRegistry.registerBlock(stoneChairWest, "fuj1n.modJam.stoneChairWest"); GameRegistry.registerBlock(woodTable, ItemTable.class, "fuj1n.modJam.woodTable"); GameRegistry.registerBlock(stoneTable, ItemTable.class, "fuj1n.modJam.stoneTable"); } public void addAllNames(){ for (int i = 0; i < 16; i++) { LanguageRegistry.addName(new ItemStack(awesomeOre, 1, i), awesomeColors[new ItemStack(awesomeOre, 1, i).getItemDamage()] + " Awesome Ore"); LanguageRegistry.addName(new ItemStack(awesomeIngot, 1, i), awesomeColors[new ItemStack(awesomeIngot, 1, i).getItemDamage()] + " Awesome Ingot"); LanguageRegistry.addName(new ItemStack(woodChair, 1, i), awesomeColors[new ItemStack(woodChair, 1, i).getItemDamage()] + " Glowing Wooden Chair"); LanguageRegistry.addName(new ItemStack(stoneChair, 1, i), awesomeColors[new ItemStack(stoneChair, 1, i).getItemDamage()] + " Glowing Stone Chair"); LanguageRegistry.addName(new ItemStack(woodTable, 1, i), awesomeColors[new ItemStack(woodTable, 1, i).getItemDamage()] + " Glowing Wooden Table"); LanguageRegistry.addName(new ItemStack(stoneTable, 1, i), awesomeColors[new ItemStack(stoneTable, 1, i).getItemDamage()] + " Glowing Stone Table"); //LanguageRegistry.instance().addStringLocalization(awesomeHelmet.getUnlocalizedName(ItemAwesomeArmor.getItemStackForNaming(awesomeHelmet.itemID, i)), awesomeColors[i] + " Awesome Helmet"); } LanguageRegistry.addName(new ItemStack(awesomeHelmet), "Awesome Helmet"); LanguageRegistry.addName(new ItemStack(awesomeChestplate), "Awesome Chestplate"); LanguageRegistry.addName(new ItemStack(awesomeLeggings), "Awesome Leggings"); LanguageRegistry.addName(new ItemStack(awesomeBoots), "Awesome Boots"); LanguageRegistry.addName(awesomePickaxe, "Awesome Pickaxe"); LanguageRegistry.addName(awesomeShovel, "Awesome Shovel"); LanguageRegistry.addName(awesomeSword, "Awesome Sword"); LanguageRegistry.addName(awesomeAxe, "Awesome Axe"); LanguageRegistry.addName(awesomeHoe, "Awesome Hoe"); LanguageRegistry.addName(darkExtract, "Dark Extract"); } public void registerCreativeTab(){ modJamCreativeTab = new CreativeTabModJam("fuj1n.modJam"); LanguageRegistry.instance().addStringLocalization("itemGroup." + modJamCreativeTab.getTabLabel(), CommonProxyModJam.modName); } public void addAllCrafting(){ for(int i = 0; i < 15; i++){ CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodChair, 1, i), new Object[]{ "PXX", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodChair, 1, i), new Object[]{ "XXP", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneChair, 1, i), new Object[]{ "SXX", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneChair, 1, i), new Object[]{ "XXS", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodTable, 1, i), new Object[]{ "XXX", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneTable, 1, i), new Object[]{ "XXX", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); - CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(darkExtract, 1), new Object[]{ + CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeIngot, 1, 15), new Object[]{ "XXX", "XDX", "XXX", Character.valueOf('D'), "extractDark", Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); } CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeHelmet, 1, 0), new Object[]{ " X ", "XHX", " X ", Character.valueOf('H'), Item.helmetSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeChestplate, 1, 0), new Object[]{ " X ", "XCX", " X ", Character.valueOf('C'), Item.plateSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeLeggings, 1, 0), new Object[]{ " X ", "XLX", " X ", Character.valueOf('L'), Item.legsSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeBoots, 1, 0), new Object[]{ " X ", "XBX", " X ", Character.valueOf('B'), Item.bootsSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomePickaxe, 1, 0), new Object[]{ " X ", "XPX", " X ", Character.valueOf('P'), Item.pickaxeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeShovel, 1, 0), new Object[]{ " X ", "XSX", " X ", Character.valueOf('S'), Item.shovelSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeSword, 1, 0), new Object[]{ " X ", "XSX", " X ", Character.valueOf('S'), Item.swordSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeAxe, 1, 0), new Object[]{ " X ", "XAX", " X ", Character.valueOf('A'), Item.axeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeHoe, 1, 0), new Object[]{ " X ", "XHX", " X ", Character.valueOf('H'), Item.hoeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); } public void addAllSmelting(){ for (int i = 0; i < 15; i++){ FurnaceRecipes.smelting().addSmelting(oreAwesomeID, i, new ItemStack(awesomeIngot, 1, i), 0.1F); } FurnaceRecipes.smelting().addSmelting(Item.coal.itemID, new ItemStack(darkExtract, 3, 0), 0.1F); } public void registerAllOreDictionary(){ for (int i = 0; i < 16; i++) { OreDictionary.registerOre("oreAwesome" + awesomeColors[i], new ItemStack(awesomeOre, 1, i)); OreDictionary.registerOre("ingotAwesome" + awesomeColors[i], new ItemStack(awesomeIngot, 1, i)); } OreDictionary.registerOre("extractDark", darkExtract); } public void registerAllWorldGenerators(){ GameRegistry.registerWorldGenerator(new WorldGeneratorModJam()); } public static <var> void log(var s, Level level){ FMLLog.log(level, "[Awesome Mod] %s", s); } }
true
true
public void addAllCrafting(){ for(int i = 0; i < 15; i++){ CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodChair, 1, i), new Object[]{ "PXX", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodChair, 1, i), new Object[]{ "XXP", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneChair, 1, i), new Object[]{ "SXX", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneChair, 1, i), new Object[]{ "XXS", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodTable, 1, i), new Object[]{ "XXX", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneTable, 1, i), new Object[]{ "XXX", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(darkExtract, 1), new Object[]{ "XXX", "XDX", "XXX", Character.valueOf('D'), "extractDark", Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); } CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeHelmet, 1, 0), new Object[]{ " X ", "XHX", " X ", Character.valueOf('H'), Item.helmetSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeChestplate, 1, 0), new Object[]{ " X ", "XCX", " X ", Character.valueOf('C'), Item.plateSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeLeggings, 1, 0), new Object[]{ " X ", "XLX", " X ", Character.valueOf('L'), Item.legsSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeBoots, 1, 0), new Object[]{ " X ", "XBX", " X ", Character.valueOf('B'), Item.bootsSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomePickaxe, 1, 0), new Object[]{ " X ", "XPX", " X ", Character.valueOf('P'), Item.pickaxeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeShovel, 1, 0), new Object[]{ " X ", "XSX", " X ", Character.valueOf('S'), Item.shovelSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeSword, 1, 0), new Object[]{ " X ", "XSX", " X ", Character.valueOf('S'), Item.swordSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeAxe, 1, 0), new Object[]{ " X ", "XAX", " X ", Character.valueOf('A'), Item.axeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeHoe, 1, 0), new Object[]{ " X ", "XHX", " X ", Character.valueOf('H'), Item.hoeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); }
public void addAllCrafting(){ for(int i = 0; i < 15; i++){ CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodChair, 1, i), new Object[]{ "PXX", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodChair, 1, i), new Object[]{ "XXP", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneChair, 1, i), new Object[]{ "SXX", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneChair, 1, i), new Object[]{ "XXS", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(woodTable, 1, i), new Object[]{ "XXX", "PPP", "PXP", Character.valueOf('P'), Block.planks, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(stoneTable, 1, i), new Object[]{ "XXX", "SSS", "SXS", Character.valueOf('S'), Block.stone, Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeIngot, 1, 15), new Object[]{ "XXX", "XDX", "XXX", Character.valueOf('D'), "extractDark", Character.valueOf('X'), "ingotAwesome" + awesomeColors[i] })); } CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeHelmet, 1, 0), new Object[]{ " X ", "XHX", " X ", Character.valueOf('H'), Item.helmetSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeChestplate, 1, 0), new Object[]{ " X ", "XCX", " X ", Character.valueOf('C'), Item.plateSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeLeggings, 1, 0), new Object[]{ " X ", "XLX", " X ", Character.valueOf('L'), Item.legsSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeBoots, 1, 0), new Object[]{ " X ", "XBX", " X ", Character.valueOf('B'), Item.bootsSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomePickaxe, 1, 0), new Object[]{ " X ", "XPX", " X ", Character.valueOf('P'), Item.pickaxeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeShovel, 1, 0), new Object[]{ " X ", "XSX", " X ", Character.valueOf('S'), Item.shovelSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeSword, 1, 0), new Object[]{ " X ", "XSX", " X ", Character.valueOf('S'), Item.swordSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeAxe, 1, 0), new Object[]{ " X ", "XAX", " X ", Character.valueOf('A'), Item.axeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); CraftingManager.getInstance().getRecipeList().add(new ShapedOreRecipe(new ItemStack(awesomeHoe, 1, 0), new Object[]{ " X ", "XHX", " X ", Character.valueOf('H'), Item.hoeSteel, Character.valueOf('X'), "ingotAwesomeBlack" })); }
diff --git a/src/org/openstreetmap/josm/gui/tagging/ac/AutoCompletionCache.java b/src/org/openstreetmap/josm/gui/tagging/ac/AutoCompletionCache.java index 8d996c08..8ba67652 100644 --- a/src/org/openstreetmap/josm/gui/tagging/ac/AutoCompletionCache.java +++ b/src/org/openstreetmap/josm/gui/tagging/ac/AutoCompletionCache.java @@ -1,250 +1,254 @@ package org.openstreetmap.josm.gui.tagging.ac; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Logger; import org.openstreetmap.josm.data.osm.OsmPrimitive; import org.openstreetmap.josm.data.osm.OsmUtils; import org.openstreetmap.josm.data.osm.Relation; import org.openstreetmap.josm.data.osm.RelationMember; import org.openstreetmap.josm.gui.MapView; import org.openstreetmap.josm.gui.layer.Layer; import org.openstreetmap.josm.gui.layer.OsmDataLayer; import org.openstreetmap.josm.gui.tagging.TaggingPreset; import org.openstreetmap.josm.tools.MultiMap; /** * AutoCompletionCache temporarily holds a cache of keys with a list of * possible auto completion values for each key. * * The cache can initialize itself from the current JOSM data set such that * <ol> * <li>any key used in a tag in the data set is part of the key list in the cache</li> * <li>any value used in a tag for a specific key is part of the autocompletion list of * this key</li> * </ol> * * Building up auto completion lists should not * slow down tabbing from input field to input field. Looping through the complete * data set in order to build up the auto completion list for a specific input * field is not efficient enough, hence this cache. * */ public class AutoCompletionCache { static private final Logger logger = Logger.getLogger(AutoCompletionCache.class.getName()); private static HashMap<OsmDataLayer, AutoCompletionCache> caches; static { caches = new HashMap<OsmDataLayer, AutoCompletionCache>(); MapView.addLayerChangeListener(new MapView.LayerChangeListener() { public void activeLayerChange(Layer oldLayer, Layer newLayer) { // do nothing } public void layerAdded(Layer newLayer) { // do noting } public void layerRemoved(Layer oldLayer) { if (oldLayer instanceof OsmDataLayer) { caches.remove(oldLayer); } } } ); } static public AutoCompletionCache getCacheForLayer(OsmDataLayer layer) { AutoCompletionCache cache = caches.get(layer); if (cache == null) { cache = new AutoCompletionCache(layer); caches.put(layer, cache); } return cache; } /** the cached tags given by a tag key and a list of values for this tag */ private MultiMap<String, String> tagCache; /** the layer this cache is built for */ private OsmDataLayer layer; /** the same as tagCache but for the preset keys and values */ private static MultiMap<String, String> presetTagCache = new MultiMap<String, String>(); /** the cached list of member roles */ private Set<String> roleCache; /** * constructor */ public AutoCompletionCache(OsmDataLayer layer) { tagCache = new MultiMap<String, String>(); roleCache = new HashSet<String>(); this.layer = layer; } public AutoCompletionCache() { this(null); } /** * make sure, the keys and values of all tags held by primitive are * in the auto completion cache * * @param primitive an OSM primitive */ protected void cachePrimitive(OsmPrimitive primitive) { for (String key: primitive.keySet()) { String value = primitive.get(key); tagCache.put(key, value); } } /** * Caches all member roles of the relation <code>relation</code> * * @param relation the relation */ protected void cacheRelationMemberRoles(Relation relation){ for (RelationMember m: relation.getMembers()) { if (m.hasRole() && !roleCache.contains(m.getRole())) { roleCache.add(m.getRole()); } } } /** * initializes the cache from the primitives in the dataset of * {@see #layer} * */ public void initFromDataSet() { tagCache = new MultiMap<String, String>(); if (layer == null) return; Collection<OsmPrimitive> ds = layer.data.allNonDeletedPrimitives(); for (OsmPrimitive primitive : ds) { cachePrimitive(primitive); } for (Relation relation : layer.data.getRelations()) { if (relation.isIncomplete() || relation.isDeleted()) { continue; } cacheRelationMemberRoles(relation); } } /** * Initialize the cache for presets. This is done only once. */ public static void cachePresets(Collection<TaggingPreset> presets) { for (final TaggingPreset p : presets) { for (TaggingPreset.Item item : p.data) { if (item instanceof TaggingPreset.Check) { TaggingPreset.Check ch = (TaggingPreset.Check) item; + if (ch.key == null) continue; presetTagCache.put(ch.key, OsmUtils.falseval); presetTagCache.put(ch.key, OsmUtils.trueval); } else if (item instanceof TaggingPreset.Combo) { TaggingPreset.Combo co = (TaggingPreset.Combo) item; + if (co.key == null || co.values == null) continue; for (String value : co.values.split(",")) { presetTagCache.put(co.key, value); } } else if (item instanceof TaggingPreset.Key) { TaggingPreset.Key ky = (TaggingPreset.Key) item; + if (ky.key == null || ky.value == null) continue; presetTagCache.put(ky.key, ky.value); } else if (item instanceof TaggingPreset.Text) { TaggingPreset.Text tt = (TaggingPreset.Text) item; + if (tt.key == null) continue; presetTagCache.putVoid(tt.key); if (tt.default_ != null && !tt.default_.equals("")) { presetTagCache.put(tt.key, tt.default_); } } } } } /** * replies the keys held by the cache * * @return the list of keys held by the cache */ protected List<String> getDataKeys() { return new ArrayList<String>(tagCache.keySet()); } protected List<String> getPresetKeys() { return new ArrayList<String>(presetTagCache.keySet()); } /** * replies the auto completion values allowed for a specific key. Replies * an empty list if key is null or if key is not in {@link #getKeys()}. * * @param key * @return the list of auto completion values */ protected List<String> getDataValues(String key) { return new ArrayList<String>(tagCache.getValues(key)); } protected static List<String> getPresetValues(String key) { return new ArrayList<String>(presetTagCache.getValues(key)); } /** * Replies the list of member roles * * @return the list of member roles */ public List<String> getMemberRoles() { return new ArrayList<String>(roleCache); } /** * Populates the an {@see AutoCompletionList} with the currently cached * member roles. * * @param list the list to populate */ public void populateWithMemberRoles(AutoCompletionList list) { list.clear(); list.add(roleCache, AutoCompletionItemPritority.IS_IN_DATASET); } /** * Populates the an {@see AutoCompletionList} with the currently cached * values for a tag * * @param list the list to populate * @param key the tag key * @param append true to add the values to the list; false, to replace the values * in the list by the tag values */ public void populateWithTagValues(AutoCompletionList list, String key, boolean append) { if (!append) { list.clear(); } list.add(getDataValues(key), AutoCompletionItemPritority.IS_IN_DATASET); list.add(getPresetValues(key), AutoCompletionItemPritority.IS_IN_STANDARD); } /** * Populates the an {@see AutoCompletionList} with the currently cached * tag keys * * @param list the list to populate * @param append true to add the keys to the list; false, to replace the keys * in the list by the keys in the cache */ public void populateWithKeys(AutoCompletionList list, boolean append) { if (!append) { list.clear(); } list.add(getDataKeys(), AutoCompletionItemPritority.IS_IN_DATASET); list.add(getPresetKeys(), AutoCompletionItemPritority.IS_IN_STANDARD); } }
false
true
public static void cachePresets(Collection<TaggingPreset> presets) { for (final TaggingPreset p : presets) { for (TaggingPreset.Item item : p.data) { if (item instanceof TaggingPreset.Check) { TaggingPreset.Check ch = (TaggingPreset.Check) item; presetTagCache.put(ch.key, OsmUtils.falseval); presetTagCache.put(ch.key, OsmUtils.trueval); } else if (item instanceof TaggingPreset.Combo) { TaggingPreset.Combo co = (TaggingPreset.Combo) item; for (String value : co.values.split(",")) { presetTagCache.put(co.key, value); } } else if (item instanceof TaggingPreset.Key) { TaggingPreset.Key ky = (TaggingPreset.Key) item; presetTagCache.put(ky.key, ky.value); } else if (item instanceof TaggingPreset.Text) { TaggingPreset.Text tt = (TaggingPreset.Text) item; presetTagCache.putVoid(tt.key); if (tt.default_ != null && !tt.default_.equals("")) { presetTagCache.put(tt.key, tt.default_); } } } } }
public static void cachePresets(Collection<TaggingPreset> presets) { for (final TaggingPreset p : presets) { for (TaggingPreset.Item item : p.data) { if (item instanceof TaggingPreset.Check) { TaggingPreset.Check ch = (TaggingPreset.Check) item; if (ch.key == null) continue; presetTagCache.put(ch.key, OsmUtils.falseval); presetTagCache.put(ch.key, OsmUtils.trueval); } else if (item instanceof TaggingPreset.Combo) { TaggingPreset.Combo co = (TaggingPreset.Combo) item; if (co.key == null || co.values == null) continue; for (String value : co.values.split(",")) { presetTagCache.put(co.key, value); } } else if (item instanceof TaggingPreset.Key) { TaggingPreset.Key ky = (TaggingPreset.Key) item; if (ky.key == null || ky.value == null) continue; presetTagCache.put(ky.key, ky.value); } else if (item instanceof TaggingPreset.Text) { TaggingPreset.Text tt = (TaggingPreset.Text) item; if (tt.key == null) continue; presetTagCache.putVoid(tt.key); if (tt.default_ != null && !tt.default_.equals("")) { presetTagCache.put(tt.key, tt.default_); } } } } }
diff --git a/src/org/pentaho/agilebi/pdi/modeler/AbstractMetaDataModelNode.java b/src/org/pentaho/agilebi/pdi/modeler/AbstractMetaDataModelNode.java index 773e85b..b0b5812 100644 --- a/src/org/pentaho/agilebi/pdi/modeler/AbstractMetaDataModelNode.java +++ b/src/org/pentaho/agilebi/pdi/modeler/AbstractMetaDataModelNode.java @@ -1,138 +1,139 @@ package org.pentaho.agilebi.pdi.modeler; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.List; import org.pentaho.ui.xul.util.AbstractModelNode; public abstract class AbstractMetaDataModelNode<T extends AbstractMetaDataModelNode> extends AbstractModelNode<T> { private static final long serialVersionUID = 1547202580713108254L; protected boolean valid = true; protected List<String> validationMessages = new ArrayList<String>(); protected String image; protected PropertyChangeListener validListener = new PropertyChangeListener(){ public void propertyChange(PropertyChangeEvent arg0) { validateNode(); } }; protected PropertyChangeListener nameListener = new PropertyChangeListener(){ public void propertyChange(PropertyChangeEvent arg0) { validateNode(); } }; protected transient PropertyChangeListener childrenListener = new PropertyChangeListener(){ public void propertyChange(PropertyChangeEvent evt) { fireCollectionChanged(); } }; public AbstractMetaDataModelNode() { this.image = getValidImage(); } @Override public void onAdd(T child) { child.addPropertyChangeListener("name", nameListener); child.addPropertyChangeListener("valid", validListener); child.addPropertyChangeListener("children", childrenListener); validateNode(); } @Override public void onRemove(T child) { child.removePropertyChangeListener(validListener); child.removePropertyChangeListener(nameListener); child.removePropertyChangeListener(childrenListener); validateNode(); } public String getValidationMessagesString() { String str = ""; //$NON-NLS-1$ for (int i = 0 ; i < validationMessages.size(); i++) { if (i > 0) { str += ", "; //$NON-NLS-1$ } str += validationMessages.get(i); } return str; } public List<String> getValidationMessages() { return validationMessages; } @Override protected void fireCollectionChanged() { super.fireCollectionChanged(); validateNode(); } public void setImage(String image) { if (this.image == null || !this.image.equals(image)) { String oldimg = this.image; this.image = image; this.firePropertyChange("image", oldimg, image); //$NON-NLS-1$ } } public abstract String getValidImage(); public final String getInvalidImage() { return "images/warning.png"; //$NON-NLS-1$ } public String getImage() { return (this.valid)? getValidImage():getInvalidImage(); } public abstract void validate(); public void validateNode() { + boolean prevValid = valid; String prevMessages = getValidationMessagesString(); validate(); - this.firePropertyChange("valid", null, valid); + this.firePropertyChange("valid", prevValid, valid); this.firePropertyChange("validationMessagesString", prevMessages, getValidationMessagesString()); if (valid) { setImage(getValidImage()); } else { setImage(getInvalidImage()); } } @SuppressWarnings("unchecked") public void validateTree() { for (T t : this) { ((AbstractMetaDataModelNode)t).validateTree(); } validateNode(); }; @SuppressWarnings("unchecked") public boolean isTreeValid() { if (!isValid()) { return false; } for (T t : this) { if (!((AbstractMetaDataModelNode)t).isValid()) { return false; } } return true; } public boolean isValid() { return valid; } public abstract Class<? extends ModelerNodePropertiesForm> getPropertiesForm(); }
false
true
public void validateNode() { String prevMessages = getValidationMessagesString(); validate(); this.firePropertyChange("valid", null, valid); this.firePropertyChange("validationMessagesString", prevMessages, getValidationMessagesString()); if (valid) { setImage(getValidImage()); } else { setImage(getInvalidImage()); } }
public void validateNode() { boolean prevValid = valid; String prevMessages = getValidationMessagesString(); validate(); this.firePropertyChange("valid", prevValid, valid); this.firePropertyChange("validationMessagesString", prevMessages, getValidationMessagesString()); if (valid) { setImage(getValidImage()); } else { setImage(getInvalidImage()); } }
diff --git a/test/functional/test/java/lang/ClassTest.java b/test/functional/test/java/lang/ClassTest.java index fa22849f..998c98bd 100644 --- a/test/functional/test/java/lang/ClassTest.java +++ b/test/functional/test/java/lang/ClassTest.java @@ -1,119 +1,119 @@ package test.java.lang; import jvm.TestCase; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.Arrays; import java.util.List; public class ClassTest extends TestCase { public static void testGetAnnotation() { Tag tag = TaggedClass.class.getAnnotation(Tag.class); assertEquals(Byte.MAX_VALUE, tag.byteValue()); assertEquals(Character.MAX_VALUE, tag.charValue()); assertEquals(Short.MAX_VALUE, tag.shortValue()); assertEquals(Integer.MAX_VALUE, tag.intValue()); assertEquals(Long.MAX_VALUE, tag.longValue()); assertEquals(Float.MAX_VALUE, tag.floatValue()); assertEquals(Double.MAX_VALUE, tag.doubleValue()); assertEquals("hello, world", tag.stringValue()); // assertEquals(Required.YES, tag.enumValue()); assertEquals(Object.class, tag.classValue()); assertArrayEquals(new byte[] { Byte.MIN_VALUE, Byte.MAX_VALUE }, tag.byteArrayValue()); assertArrayEquals(new char[] { Character.MIN_VALUE, Character.MAX_VALUE }, tag.charArrayValue()); assertArrayEquals(new short[] { Short.MIN_VALUE, Short.MAX_VALUE }, tag.shortArrayValue()); assertArrayEquals(new int[] { Integer.MIN_VALUE, Integer.MAX_VALUE }, tag.intArrayValue()); assertArrayEquals(new long[] { Long.MIN_VALUE, Long.MAX_VALUE }, tag.longArrayValue()); assertArrayEquals(new float[] { Float.MIN_VALUE, Float.MAX_VALUE }, tag.floatArrayValue()); assertArrayEquals(new double[] { Double.MIN_VALUE, Double.MAX_VALUE }, tag.doubleArrayValue()); assertArrayEquals(new String[] { "hello, world", "Hello, World!" }, tag.stringArrayValue()); // assertArrayEquals(new Required[] { Required.YES, Required.NO }, tag.enumArrayValue()); -// assertArrayEquals(new Class<?>[] { Integer.class, Long.class }, tag.classArrayValue()); + assertArrayEquals(new Class<?>[] { Integer.class, Long.class }, tag.classArrayValue()); } @Tag( byteValue = Byte.MAX_VALUE, charValue = Character.MAX_VALUE, shortValue = Short.MAX_VALUE, intValue = Integer.MAX_VALUE, longValue = Long.MAX_VALUE, floatValue = Float.MAX_VALUE, doubleValue = Double.MAX_VALUE, stringValue = "hello, world", enumValue = Required.YES, classValue = Object.class, byteArrayValue = { Byte.MIN_VALUE, Byte.MAX_VALUE }, charArrayValue = { Character.MIN_VALUE, Character.MAX_VALUE }, shortArrayValue = { Short.MIN_VALUE, Short.MAX_VALUE }, intArrayValue = { Integer.MIN_VALUE, Integer.MAX_VALUE }, longArrayValue = { Long.MIN_VALUE, Long.MAX_VALUE }, floatArrayValue = { Float.MIN_VALUE, Float.MAX_VALUE }, doubleArrayValue = { Double.MIN_VALUE, Double.MAX_VALUE }, stringArrayValue = { "hello, world", "Hello, World!" }, enumArrayValue = { Required.YES, Required.NO }, classArrayValue = { Integer.class, Long.class } ) public static class TaggedClass { } @Retention(RetentionPolicy.RUNTIME) public @interface Tag { byte byteValue(); char charValue(); short shortValue(); int intValue(); long longValue(); float floatValue(); double doubleValue(); String stringValue(); Required enumValue(); Class<?> classValue(); byte[] byteArrayValue(); char[] charArrayValue(); short[] shortArrayValue(); int[] intArrayValue(); long[] longArrayValue(); float[] floatArrayValue(); double[] doubleArrayValue(); String[] stringArrayValue(); Required[] enumArrayValue(); Class<?>[] classArrayValue(); } public static enum Required { YES, NO } public static void testGetClasses() { List<String> classNames = sort(map(Arrays.asList(Type.class.getClasses()), new Function<Class<?>, String>() { public String apply(Class<?> klass) { return klass.getName(); } })); assertEquals(Arrays.asList("test.java.lang.ClassTest$Type$InnerInstance", "test.java.lang.ClassTest$Type$InnerStatic"), classNames); } public static void testGetDeclaredClasses() { List<String> classNames = sort(map(Arrays.asList(Type.class.getDeclaredClasses()), new Function<Class<?>, String>() { public String apply(Class<?> klass) { return klass.getName(); } })); assertEquals(Arrays.asList("test.java.lang.ClassTest$Type$InnerInstance", "test.java.lang.ClassTest$Type$InnerInstancePrivate", "test.java.lang.ClassTest$Type$InnerStatic"), classNames); } public static class Type { public class InnerInstance { } public static class InnerStatic { } @SuppressWarnings("unused") private class InnerInstancePrivate { } } public static void main(String[] args) { testGetAnnotation(); testGetClasses(); testGetDeclaredClasses(); } }
true
true
public static void testGetAnnotation() { Tag tag = TaggedClass.class.getAnnotation(Tag.class); assertEquals(Byte.MAX_VALUE, tag.byteValue()); assertEquals(Character.MAX_VALUE, tag.charValue()); assertEquals(Short.MAX_VALUE, tag.shortValue()); assertEquals(Integer.MAX_VALUE, tag.intValue()); assertEquals(Long.MAX_VALUE, tag.longValue()); assertEquals(Float.MAX_VALUE, tag.floatValue()); assertEquals(Double.MAX_VALUE, tag.doubleValue()); assertEquals("hello, world", tag.stringValue()); // assertEquals(Required.YES, tag.enumValue()); assertEquals(Object.class, tag.classValue()); assertArrayEquals(new byte[] { Byte.MIN_VALUE, Byte.MAX_VALUE }, tag.byteArrayValue()); assertArrayEquals(new char[] { Character.MIN_VALUE, Character.MAX_VALUE }, tag.charArrayValue()); assertArrayEquals(new short[] { Short.MIN_VALUE, Short.MAX_VALUE }, tag.shortArrayValue()); assertArrayEquals(new int[] { Integer.MIN_VALUE, Integer.MAX_VALUE }, tag.intArrayValue()); assertArrayEquals(new long[] { Long.MIN_VALUE, Long.MAX_VALUE }, tag.longArrayValue()); assertArrayEquals(new float[] { Float.MIN_VALUE, Float.MAX_VALUE }, tag.floatArrayValue()); assertArrayEquals(new double[] { Double.MIN_VALUE, Double.MAX_VALUE }, tag.doubleArrayValue()); assertArrayEquals(new String[] { "hello, world", "Hello, World!" }, tag.stringArrayValue()); // assertArrayEquals(new Required[] { Required.YES, Required.NO }, tag.enumArrayValue()); // assertArrayEquals(new Class<?>[] { Integer.class, Long.class }, tag.classArrayValue()); }
public static void testGetAnnotation() { Tag tag = TaggedClass.class.getAnnotation(Tag.class); assertEquals(Byte.MAX_VALUE, tag.byteValue()); assertEquals(Character.MAX_VALUE, tag.charValue()); assertEquals(Short.MAX_VALUE, tag.shortValue()); assertEquals(Integer.MAX_VALUE, tag.intValue()); assertEquals(Long.MAX_VALUE, tag.longValue()); assertEquals(Float.MAX_VALUE, tag.floatValue()); assertEquals(Double.MAX_VALUE, tag.doubleValue()); assertEquals("hello, world", tag.stringValue()); // assertEquals(Required.YES, tag.enumValue()); assertEquals(Object.class, tag.classValue()); assertArrayEquals(new byte[] { Byte.MIN_VALUE, Byte.MAX_VALUE }, tag.byteArrayValue()); assertArrayEquals(new char[] { Character.MIN_VALUE, Character.MAX_VALUE }, tag.charArrayValue()); assertArrayEquals(new short[] { Short.MIN_VALUE, Short.MAX_VALUE }, tag.shortArrayValue()); assertArrayEquals(new int[] { Integer.MIN_VALUE, Integer.MAX_VALUE }, tag.intArrayValue()); assertArrayEquals(new long[] { Long.MIN_VALUE, Long.MAX_VALUE }, tag.longArrayValue()); assertArrayEquals(new float[] { Float.MIN_VALUE, Float.MAX_VALUE }, tag.floatArrayValue()); assertArrayEquals(new double[] { Double.MIN_VALUE, Double.MAX_VALUE }, tag.doubleArrayValue()); assertArrayEquals(new String[] { "hello, world", "Hello, World!" }, tag.stringArrayValue()); // assertArrayEquals(new Required[] { Required.YES, Required.NO }, tag.enumArrayValue()); assertArrayEquals(new Class<?>[] { Integer.class, Long.class }, tag.classArrayValue()); }
diff --git a/srcj/com/sun/electric/tool/user/tecEditWizard/TechEditWizardData.java b/srcj/com/sun/electric/tool/user/tecEditWizard/TechEditWizardData.java index 639c6537e..1247f4ff7 100644 --- a/srcj/com/sun/electric/tool/user/tecEditWizard/TechEditWizardData.java +++ b/srcj/com/sun/electric/tool/user/tecEditWizard/TechEditWizardData.java @@ -1,3673 +1,3678 @@ /* -*- tab-width: 4 -*- * * Electric(tm) VLSI Design System * * File: TechEditWizardData.java * Create an Electric XML Technology from a simple numeric description of design rules * Written in Perl by Andrew Wewist, translated to Java by Steven Rubin. * * Copyright (c) 2008 Sun Microsystems and Static Free Software * * Electric(tm) is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * Electric(tm) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Electric(tm); see the file COPYING. If not, write to * the Free Software Foundation, Inc., 59 Temple Place, Suite 330, * Boston, Mass 02111-1307, USA. */ package com.sun.electric.tool.user.tecEditWizard; import com.sun.electric.database.text.TextUtils; import com.sun.electric.database.geometry.*; import com.sun.electric.database.geometry.Poly; import com.sun.electric.tool.Job; import com.sun.electric.tool.io.FileType; import com.sun.electric.tool.io.IOTool; import com.sun.electric.tool.user.dialogs.OpenFile; import com.sun.electric.tool.user.User; import com.sun.electric.technology.*; import java.awt.Color; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.io.PrintWriter; import java.net.URL; import java.net.URLConnection; import java.util.*; /** * Class to handle the "Technology Creation Wizard" dialog. */ public class TechEditWizardData { /************************************** THE DATA **************************************/ private String tech_name; private String tech_description; private int num_metal_layers; private int stepsize; // value in nm private int resolution; // technology resolution in the same scale as stepsize private boolean pSubstrateProcess = false; // to control if process is a pwell or psubstrate process or not. If true, Tech Creation Wizard will not create pwell layers private boolean horizontalFlag = true; // to control if transistor gates are aligned horizontally. True by default . If transistors are horizontal -> M1 is horizontal? private boolean extraInfoFlag = false; // to control if protection polys are added to transistors. False by default // DIFFUSION RULES private WizardField diff_width = new WizardField(); private WizardField diff_poly_overhang = new WizardField(); // min. diff overhang from gate edge private WizardField diff_contact_overhang = new WizardField(); // min. diff overhang contact private WizardField diff_contact_overhang_min_short = new WizardField(); // diff overhang contact. It should hold the min short value private WizardField diff_contact_overhang_min_long = new WizardField(); // diff overhang contact. It should hold the min long value private WizardField diff_spacing = new WizardField(); // POLY RULES private WizardField poly_width = new WizardField(); private WizardField poly_endcap = new WizardField(); // min. poly gate extension from edge of diffusion private WizardField poly_spacing = new WizardField(); private WizardField poly_diff_spacing = new WizardField(); // min. spacing between poly and diffusion private WizardField poly_protection_spacing = new WizardField(); // min. spacing between poly and dummy poly // GATE RULES private WizardField gate_length = new WizardField(); // min. transistor gate length private WizardField gate_width = new WizardField(); // min. transistor gate width private WizardField gate_spacing = new WizardField(); // min. gate to gate spacing on diffusion private WizardField gate_contact_spacing = new WizardField(); // min. spacing from gate edge to contact inside diffusion // Special rules for OD18 transistors if specified. private WizardField gate_od18_length = new WizardField(); // transistor gate length for OD18 transistors private WizardField gate_od18_width = new WizardField(); // transistor gate width for OD18 transistors private WizardField[] od18_diff_overhang = new WizardField[]{new WizardField(), new WizardField()}; // OD18 X and Y overhang // Special rules for native transistors if specified private WizardField gate_nt_length = new WizardField(); // transistor gate length for native transistors private WizardField gate_nt_width = new WizardField(); // transistor gate width for OD18 transistors private WizardField poly_nt_endcap = new WizardField(); // gate extension from edge of diffusion for native transistors private WizardField nt_diff_overhang = new WizardField(); // extension from OD // Special rules for vth/vtl transistors if specified. private WizardField vthl_diff_overhang = new WizardField(); // Overhang of VTH/VTL with respecto to OD private WizardField vthl_poly_overhang = new WizardField(); // Overhang of VTH/VTL with respecto to the gate // Special rules for resistors private WizardField[] extraVariables = new WizardField[]{ new WizardField("poly_resistor_length"), // Poly resistor length new WizardField("poly_resistor_width"), // Poly resistor width new WizardField("rpo_contact_spacing"), // Spacing btw rpo edge and contact cut new WizardField("rpo_odpoly_overhang"), // RPO overhang from poly/OD new WizardField("rh_odpoly_overhang"), // RH overhang from poly/OD new WizardField("well_resistor_length"), // Well resistor length new WizardField("well_resistor_width"), // Well resistor width new WizardField("rpo_select_overlap"), // RPO overlap in select from center new WizardField("rpo_co_space_in_nwrod"), // rpo co distance new WizardField("co_nwrod_overhang"), // overhang of co in nwrod new WizardField("od_nwrod_overhang"), // overhang of od in nwrod new WizardField("rpo_nwrod_space"), // rpo nwrod space }; // CONTACT RULES private WizardField contact_size = new WizardField(); private WizardField contact_spacing = new WizardField(); private WizardField contact_array_spacing = new WizardField(); private WizardField contact_metal_overhang_inline_only = new WizardField(); // metal overhang when overhanging contact from two sides only private WizardField contact_metal_overhang_all_sides = new WizardField(); // metal overhang when surrounding contact private WizardField contact_poly_overhang = new WizardField(); // poly overhang contact. It should hold the recommended value private WizardField polycon_diff_spacing = new WizardField(); // spacing between poly-metal contact edge and diffusion // WELL AND IMPLANT RULES private WizardField nplus_width = new WizardField(); private WizardField nplus_overhang_diff = new WizardField(); private WizardField nplus_overhang_strap = new WizardField(); // for well/substrate contact private WizardField nplus_overhang_poly = new WizardField(); private WizardField nplus_spacing = new WizardField(); private WizardField pplus_width = new WizardField(); private WizardField pplus_overhang_diff = new WizardField(); private WizardField pplus_overhang_strap = new WizardField(); // for well/substrate contact private WizardField pplus_overhang_poly = new WizardField(); private WizardField pplus_spacing = new WizardField(); private WizardField nwell_width = new WizardField(); private WizardField nwell_overhang_diff_p = new WizardField(); private WizardField nwell_overhang_diff_n = new WizardField(); private WizardField nwell_spacing = new WizardField(); // METAL RULES private WizardField [] metal_width; private WizardField [] metal_spacing; private List<WideWizardField> wide_metal_spacing = new ArrayList<WideWizardField>(); // For all wide spacing rules not displayed in graphcs // VIA RULES private WizardField [] via_size; private WizardField [] via_inline_spacing; private WizardField [] via_array_spacing; private WizardField [] via_overhang; // generic cross contacts private static class ContactNode { String layer; WizardField overX; // overhang X value WizardField overY; // overhang Y value ContactNode(String l, double overXV, String overXS, double overYV, String overYS) { layer = l; overX = new WizardField(overXV, overXS); overY = new WizardField(overYV, overYS); } } private static class Contact { // some primitives might not have prefix. "-" should not be in the prefix to avoid // being displayed in the palette String prefix; List<ContactNode> layers; // odd metals go vertical Contact (String p) { prefix = p; layers = new ArrayList<ContactNode>(); } } private Map<String,List<Contact>> metalContacts; private Map<String,List<Contact>> otherContacts; private static class PaletteGroup { String name; List<Xml.ArcProto> arcs; List<Xml.MenuNodeInst> pins; List<Xml.MenuNodeInst> elements; // contact or transistor void addArc(Xml.ArcProto arc) { if (arcs == null) { arcs = new ArrayList<Xml.ArcProto>(); } arcs.add(arc); } private void add(List<Xml.MenuNodeInst> list, Xml.PrimitiveNodeGroup element, String shortName) { assert element.isSingleton; Xml.PrimitiveNode pn = element.nodes.get(0); Xml.MenuNodeInst n = new Xml.MenuNodeInst(); n.protoName = pn.name; n.function = pn.function; if (shortName != null) { n.text = shortName; } list.add(n); } void addPinOrResistor(Xml.PrimitiveNodeGroup pin, String shortName) { if (pins == null) { pins = new ArrayList<Xml.MenuNodeInst>(); } add(pins, pin, shortName); } void addElement(Xml.PrimitiveNodeGroup element, String shortName) { if (elements == null) { elements = new ArrayList<Xml.MenuNodeInst>(); } add(elements, element, shortName); } } // ANTENNA RULES private double poly_antenna_ratio; private double [] metal_antenna_ratio; // GDS-II LAYERS public static class LayerInfo { String name; int value; // normal value int type; // datatype of the normal value int pin; // pin value int pinType; // pin datatype int text; // text value int textType; // text datatype String graphicsTemplate; // uses other template for the graphics Color graphicsColor; // uses this color with no fill EGraphics.Outline graphicsOutline; // uses this outline with graphicsColor int [] graphicsPattern; // uses this pattern with graphicsColor LayerInfo(String n) { name = n; } String getValueWithType() {return (type != 0) ? value + "/" + type : value + "";} String getPinWithType() {return (pinType != 0) ? pin + "/" + pinType : pin + "";} String getTextWithType() {return (textType != 0) ? text + "/" + textType : text + "";} void setGDSData(int[] vals) { assert(vals.length == 6); value = vals[0]; type = vals[1]; pin = vals[2]; pinType = vals[3]; text = vals[4]; textType = vals[5]; } void setGraphicsTemplate(String s) { if (s.startsWith("[")) // color { StringTokenizer p = new StringTokenizer(s, ", []", false); int[] colors = new int[3]; String outlineOrPattern = null; // EGraphics.Outline.NOPAT.name(); // default int itemCount = 0; while (p.hasMoreTokens()) { String str = p.nextToken(); if (itemCount < 3) colors[itemCount++] = Integer.parseInt(str); else outlineOrPattern = str; assert(itemCount < 4); } EGraphics.Outline outline = EGraphics.Outline.findOutline(EGraphics.Outline.NOPAT.name()); int[] pattern = new int[16]; graphicsColor = new Color(colors[0], colors[1], colors[2]); if (outlineOrPattern != null) { EGraphics.Outline out = EGraphics.Outline.findOutline(outlineOrPattern); if (out != null) // manages to parse a valid Outline outline = out; else { assert(outlineOrPattern.startsWith("{")); // Pattern information StringTokenizer pat = new StringTokenizer(outlineOrPattern, "/ {}", false); int count = 0; while (pat.hasMoreTokens()) { String str = pat.nextToken(); int num = Integer.parseInt(str); assert(count < 16); pattern[count++] = num; } if (count != 16) assert(count == 16); } } graphicsOutline = outline; graphicsPattern = pattern; } else graphicsTemplate = s; } public String toString() { String val = getValueWithType(); // useful datatype if (pin != 0) { val = val + "," + getPinWithType() + "p"; } if (text != 0) { val = val + "," + getTextWithType() + "t"; } return val; } } private LayerInfo diff_layer = new LayerInfo("Diff"); private LayerInfo poly_layer = new LayerInfo("Poly"); private LayerInfo nplus_layer = new LayerInfo("NPlus"); private LayerInfo pplus_layer = new LayerInfo("PPlus"); private LayerInfo nwell_layer = new LayerInfo("N-Well"); private LayerInfo contact_layer = new LayerInfo("Contact"); private LayerInfo [] metal_layers; private LayerInfo [] via_layers; private LayerInfo marking_layer = new LayerInfo("Marking"); // Device marking layer // extra layers private List<LayerInfo> extraLayers; LayerInfo[] getBasicLayers() { List<LayerInfo> layers = new ArrayList<LayerInfo>(); layers.add(diff_layer); layers.add(poly_layer); if (getExtraInfoFlag()) layers.addAll(extraLayers); layers.add(nplus_layer); layers.add(pplus_layer); layers.add(nwell_layer); layers.add(contact_layer); layers.add(marking_layer); LayerInfo[] array = new LayerInfo[layers.size()]; layers.toArray(array); return array; } public TechEditWizardData() { stepsize = 100; num_metal_layers = 2; metal_width = new WizardField[num_metal_layers]; metal_spacing = new WizardField[num_metal_layers]; via_size = new WizardField[num_metal_layers-1]; via_inline_spacing = new WizardField[num_metal_layers-1]; via_array_spacing = new WizardField[num_metal_layers-1]; via_overhang = new WizardField[num_metal_layers-1]; metal_antenna_ratio = new double[num_metal_layers]; metalContacts = new HashMap<String,List<Contact>>(); otherContacts = new HashMap<String,List<Contact>>(); metal_layers = new LayerInfo[num_metal_layers]; via_layers = new LayerInfo[num_metal_layers-1]; for(int i=0; i<num_metal_layers; i++) { metal_width[i] = new WizardField(); metal_spacing[i] = new WizardField(); metal_layers[i] = new LayerInfo("Metal-"+(i+1)); } for(int i=0; i<num_metal_layers-1; i++) { via_size[i] = new WizardField(); via_inline_spacing[i] = new WizardField(); via_array_spacing[i] = new WizardField(); via_overhang[i] = new WizardField(); via_layers[i] = new LayerInfo("Via-"+(i+1)); } // extra layers extraLayers = new ArrayList<LayerInfo>(); } /************************************** ACCESSOR METHODS **************************************/ public String getTechName() { return tech_name; } public void setTechName(String s) { tech_name = s; } public String getTechDescription() { return tech_description; } public void setTechDescription(String s) { tech_description = s; } public int getStepSize() { return stepsize; } public void setStepSize(int n) { stepsize = n; } public int getResolution() { return resolution; } public void setResolution(int n) { resolution = n; } public int getNumMetalLayers() { return num_metal_layers; } public void setNumMetalLayers(int n) { int smallest = Math.min(n, num_metal_layers); WizardField [] new_metal_width = new WizardField[n]; for(int i=0; i<smallest; i++) new_metal_width[i] = metal_width[i]; for(int i=smallest; i<n; i++) new_metal_width[i] = new WizardField(); metal_width = new_metal_width; WizardField [] new_metal_spacing = new WizardField[n]; for(int i=0; i<smallest; i++) new_metal_spacing[i] = metal_spacing[i]; for(int i=smallest; i<n; i++) new_metal_spacing[i] = new WizardField(); metal_spacing = new_metal_spacing; WizardField [] new_via_size = new WizardField[n-1]; for(int i=0; i<smallest-1; i++) new_via_size[i] = via_size[i]; for(int i=smallest-1; i<n-1; i++) new_via_size[i] = new WizardField(); via_size = new_via_size; WizardField [] new_via_spacing = new WizardField[n-1]; for(int i=0; i<smallest-1; i++) new_via_spacing[i] = via_inline_spacing[i]; for(int i=smallest-1; i<n-1; i++) new_via_spacing[i] = new WizardField(); via_inline_spacing = new_via_spacing; WizardField [] new_via_array_spacing = new WizardField[n-1]; for(int i=0; i<smallest-1; i++) new_via_array_spacing[i] = via_array_spacing[i]; for(int i=smallest-1; i<n-1; i++) new_via_array_spacing[i] = new WizardField(); via_array_spacing = new_via_array_spacing; WizardField [] new_via_overhang_inline = new WizardField[n-1]; for(int i=0; i<smallest-1; i++) new_via_overhang_inline[i] = via_overhang[i]; for(int i=smallest-1; i<n-1; i++) new_via_overhang_inline[i] = new WizardField(); via_overhang = new_via_overhang_inline; double [] new_metal_antenna_ratio = new double[n]; for(int i=0; i<smallest; i++) new_metal_antenna_ratio[i] = metal_antenna_ratio[i]; metal_antenna_ratio = new_metal_antenna_ratio; LayerInfo [] new_gds_metal_layer = new LayerInfo[n]; for(int i=0; i<smallest; i++) { new_gds_metal_layer[i] = metal_layers[i]; } for(int i=smallest-1; i<n; i++) { new_gds_metal_layer[i] = new LayerInfo("Metal-"+(i+1)); } metal_layers = new_gds_metal_layer; LayerInfo [] new_gds_via_layer = new LayerInfo[n-1]; for(int i=0; i<smallest-1; i++) new_gds_via_layer[i] = via_layers[i]; for(int i=smallest-1; i<n-1; i++) new_gds_via_layer[i] = new LayerInfo("Via-"+(i+1)); via_layers = new_gds_via_layer; num_metal_layers = n; } // Flags boolean getPSubstratelProcess() { return pSubstrateProcess;} void setPSubstratelProcess(boolean b) { pSubstrateProcess = b; } boolean getHorizontalTransistors() { return horizontalFlag;} void setHorizontalTransistors(boolean b) { horizontalFlag = b; } boolean getExtraInfoFlag() { return extraInfoFlag;} void setExtraInfoFlag(boolean b) { extraInfoFlag = b; } // DIFFUSION RULES WizardField getDiffWidth() { return diff_width; } void setDiffWidth(WizardField v) { diff_width = v; } WizardField getDiffPolyOverhang() { return diff_poly_overhang; } void setDiffPolyOverhang(WizardField v) { diff_poly_overhang = v; } WizardField getDiffContactOverhang() { return diff_contact_overhang; } void setDiffContactOverhang(WizardField v) { diff_contact_overhang = v; } WizardField getDiffSpacing() { return diff_spacing; } void setDiffSpacing(WizardField v) { diff_spacing = v; } // POLY RULES WizardField getPolyWidth() { return poly_width; } void setPolyWidth(WizardField v) { poly_width = v; } WizardField getPolyEndcap() { return poly_endcap; } void setPolyEndcap(WizardField v) { poly_endcap = v; } WizardField getPolySpacing() { return poly_spacing; } void setPolySpacing(WizardField v) { poly_spacing = v; } WizardField getPolyDiffSpacing() { return poly_diff_spacing; } void setPolyDiffSpacing(WizardField v) { poly_diff_spacing = v; } WizardField getPolyProtectionSpacing() { return poly_protection_spacing; } void setPolyProtectionSpacing(WizardField v) { poly_protection_spacing = v; } // GATE RULES WizardField getGateLength() { return gate_length; } void setGateLength(WizardField v) { gate_length = v; } WizardField getGateWidth() { return gate_width; } void setGateWidth(WizardField v) { gate_width = v; } WizardField getGateSpacing() { return gate_spacing; } void setGateSpacing(WizardField v) { gate_spacing = v; } WizardField getGateContactSpacing() { return gate_contact_spacing; } void setGateContactSpacing(WizardField v) { gate_contact_spacing = v; } // CONTACT RULES WizardField getContactSize() { return contact_size; } void setContactSize(WizardField v) { contact_size = v; } WizardField getContactSpacing() { return contact_spacing; } void setContactSpacing(WizardField v) { contact_spacing = v; } WizardField getContactArraySpacing() { return contact_array_spacing; } void setContactArraySpacing(WizardField v) { contact_array_spacing = v; } WizardField getContactMetalOverhangInlineOnly() { return contact_metal_overhang_inline_only; } void setContactMetalOverhangInlineOnly(WizardField v) { contact_metal_overhang_inline_only = v; } WizardField getContactMetalOverhangAllSides() { return contact_metal_overhang_all_sides; } void setContactMetalOverhangAllSides(WizardField v) { contact_metal_overhang_all_sides = v; } WizardField getContactPolyOverhang() { return contact_poly_overhang; } void setContactPolyOverhang(WizardField v) { contact_poly_overhang = v; } WizardField getPolyconDiffSpacing() { return polycon_diff_spacing; } void setPolyconDiffSpacing(WizardField v) { polycon_diff_spacing = v; } // WELL AND IMPLANT RULES WizardField getNPlusWidth() { return nplus_width; } void setNPlusWidth(WizardField v) { nplus_width = v; } WizardField getNPlusOverhangDiff() { return nplus_overhang_diff; } void setNPlusOverhangDiff(WizardField v) { nplus_overhang_diff = v; } WizardField getNPlusOverhangStrap() { return nplus_overhang_strap; } void setNPlusOverhangStrap(WizardField v) { nplus_overhang_strap = v; } WizardField getNPlusOverhangPoly() { return nplus_overhang_poly; } void setNPlusOverhangPoly(WizardField v) { nplus_overhang_poly = v; } WizardField getNPlusSpacing() { return nplus_spacing; } void setNPlusSpacing(WizardField v) { nplus_spacing = v; } WizardField getPPlusWidth() { return pplus_width; } void setPPlusWidth(WizardField v) { pplus_width = v; } WizardField getPPlusOverhangDiff() { return pplus_overhang_diff; } void setPPlusOverhangDiff(WizardField v) { pplus_overhang_diff = v; } WizardField getPPlusOverhangStrap() { return pplus_overhang_strap; } void setPPlusOverhangStrap(WizardField v) { pplus_overhang_strap = v; } WizardField getPPlusOverhangPoly() { return pplus_overhang_poly; } void setPPlusOverhangPoly(WizardField v) { pplus_overhang_poly = v; } WizardField getPPlusSpacing() { return pplus_spacing; } void setPPlusSpacing(WizardField v) { pplus_spacing = v; } WizardField getNWellWidth() { return nwell_width; } void setNWellWidth(WizardField v) { nwell_width = v; } WizardField getNWellOverhangDiffP() { return nwell_overhang_diff_p; } void setNWellOverhangDiffP(WizardField v) { nwell_overhang_diff_p = v; } WizardField getNWellOverhangDiffN() { return nwell_overhang_diff_n; } void setNWellOverhangDiffN(WizardField v) { nwell_overhang_diff_n = v; } WizardField getNWellSpacing() { return nwell_spacing; } void setNWellSpacing(WizardField v) { nwell_spacing = v; } // METAL RULES WizardField [] getMetalWidth() { return metal_width; } void setMetalWidth(int met, WizardField value) { metal_width[met] = value; } WizardField [] getMetalSpacing() { return metal_spacing; } void setMetalSpacing(int met, WizardField value) { metal_spacing[met] = value; } // VIA RULES WizardField [] getViaSize() { return via_size; } void setViaSize(int via, WizardField value) { via_size[via] = value; } WizardField [] getViaSpacing() { return via_inline_spacing; } void setViaSpacing(int via, WizardField value) { via_inline_spacing[via] = value; } WizardField [] getViaArraySpacing() { return via_array_spacing; } void setViaArraySpacing(int via, WizardField value) { via_array_spacing[via] = value; } WizardField [] getViaOverhangInline() { return via_overhang; } void setViaOverhangInline(int via, WizardField value) { via_overhang[via] = value; } // ANTENNA RULES public double getPolyAntennaRatio() { return poly_antenna_ratio; } void setPolyAntennaRatio(double v) { poly_antenna_ratio = v; } public double [] getMetalAntennaRatio() { return metal_antenna_ratio; } void setMetalAntennaRatio(int met, double value) { metal_antenna_ratio[met] = value; } // GDS-II LAYERS static int[] getGDSValuesFromString(String s) { int[] vals = new int[6]; StringTokenizer parse = new StringTokenizer(s, ",", false); while (parse.hasMoreTokens()) { String v = parse.nextToken(); int pos = 0; int index = v.indexOf("/"); if (v.contains("p")) // pin section { pos = 2; } else if (v.contains("t")) // text section { pos = 4; } if (index != -1) // datatype value { vals[pos] = TextUtils.atoi(v.substring(0, index)); vals[pos+1] = TextUtils.atoi(v.substring(index+1)); } else vals[pos] = TextUtils.atoi(v); } return vals; } TechEditWizardData.LayerInfo [] getGDSMetal() { return metal_layers; } TechEditWizardData.LayerInfo [] getGDSVia() { return via_layers; } private String errorInData() { // check the General data if (tech_name == null || tech_name.length() == 0) return "General panel: No technology name"; if (stepsize == 0) return "General panel: Invalid unit size"; // check the Active data if (diff_width.value == 0) return "Active panel: Invalid width"; // check the Poly data if (poly_width.value == 0) return "Poly panel: Invalid width"; // check the Gate data if (gate_width.value == 0) return "Gate panel: Invalid width"; if (gate_length.value == 0) return "Gate panel: Invalid length"; // check the Contact data if (contact_size.value == 0) return "Contact panel: Invalid size"; // check the Well/Implant data if (nplus_width.value == 0) return "Well/Implant panel: Invalid NPlus width"; if (pplus_width.value == 0) return "Well/Implant panel: Invalid PPlus width"; if (nwell_width.value == 0) return "Well/Implant panel: Invalid NWell width"; // check the Metal data for(int i=0; i<num_metal_layers; i++) if (metal_width[i].value == 0) return "Metal panel: Invalid Metal-" + (i+1) + " width"; // check the Via data for(int i=0; i<num_metal_layers-1; i++) if (via_size[i].value == 0) return "Via panel: Invalid Via-" + (i+1) + " size"; return null; } /************************************** IMPORT RAW NUMBERS FROM DISK **************************************/ /** * Method to import data from a file to this object. * @return true on success; false on failure. */ boolean importData() { String fileName = OpenFile.chooseInputFile(FileType.ANY, "Technology Wizard File"); if (fileName == null) return false; return importData(fileName); } /** * Method to import data from a given file to this object. It is also in the regression so * keep the access. * @param fileName the name of the file to import. * @return true on success; false on failure. */ public boolean importData(String fileName) { URL url = TextUtils.makeURLToFile(fileName); // clean arrays first metalContacts.clear(); otherContacts.clear(); extraLayers.clear(); try { URLConnection urlCon = url.openConnection(); InputStreamReader is = new InputStreamReader(urlCon.getInputStream()); LineNumberReader lineReader = new LineNumberReader(is); for(;;) { String buf = lineReader.readLine(); if (buf == null) break; buf = buf.trim(); if (buf.length() == 0 || buf.startsWith("#")) continue; // parse the assignment if (buf.startsWith("$") || buf.startsWith("@")) { int spacePos = buf.indexOf(' '); int equalsPos = buf.indexOf('='); if (equalsPos < 0) { Job.getUserInterface().showErrorMessage("Missing '=' on line " + lineReader.getLineNumber(), "Syntax Error In Technology File"); break; } if (spacePos < 0) spacePos = equalsPos; else spacePos = Math.min(spacePos, equalsPos); String varName = buf.substring(1, spacePos); int semiPos = buf.indexOf(';'); if (semiPos < 0) { Job.getUserInterface().showErrorMessage("Missing ';' on line " + lineReader.getLineNumber(), "Syntax Error In Technology File"); break; } equalsPos++; while (equalsPos < semiPos && buf.charAt(equalsPos) == ' ') equalsPos++; String varValue = buf.substring(equalsPos, semiPos); // now figure out what to assign if (varName.equalsIgnoreCase("tech_libname")) { } else if (varName.equalsIgnoreCase("tech_name")) setTechName(stripQuotes(varValue)); else if (varName.equalsIgnoreCase("tech_description")) setTechDescription(stripQuotes(varValue)); else if (varName.equalsIgnoreCase("num_metal_layers")) setNumMetalLayers(TextUtils.atoi(varValue)); else if (varName.equalsIgnoreCase("psubstrate_process")) setPSubstratelProcess(Boolean.valueOf(varValue)); else if (varName.equalsIgnoreCase("horizontal_transistors")) setHorizontalTransistors(Boolean.valueOf(varValue)); else if (varName.equalsIgnoreCase("extra_info")) setExtraInfoFlag(Boolean.valueOf(varValue)); else if (varName.equalsIgnoreCase("stepsize")) setStepSize(TextUtils.atoi(varValue)); else if (varName.equalsIgnoreCase("resolution")) setResolution(TextUtils.atoi(varValue)); else if (varName.equalsIgnoreCase("diff_width")) diff_width.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("diff_width_rule")) diff_width.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("diff_poly_overhang")) diff_poly_overhang.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("diff_poly_overhang_rule")) diff_poly_overhang.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("diff_contact_overhang")) diff_contact_overhang.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("diff_contact_overhang_rule")) diff_contact_overhang.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("diff_contact_overhang_short_min")) diff_contact_overhang_min_short.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("diff_contact_overhang_short_min_rule")) diff_contact_overhang_min_short.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("diff_contact_overhang_long_min")) diff_contact_overhang_min_long.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("diff_contact_overhang_long_min_rule")) diff_contact_overhang_min_long.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("diff_spacing")) diff_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("diff_spacing_rule")) diff_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("poly_width")) poly_width.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("poly_width_rule")) poly_width.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("poly_endcap")) poly_endcap.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("poly_endcap_rule")) poly_endcap.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("poly_spacing")) poly_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("poly_spacing_rule")) poly_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("poly_diff_spacing")) poly_diff_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("poly_diff_spacing_rule")) poly_diff_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("poly_protection_spacing")) poly_protection_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("poly_protection_spacing_rule")) poly_protection_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("gate_length")) gate_length.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("gate_length_rule")) gate_length.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("gate_width")) gate_width.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("gate_width_rule")) gate_width.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("gate_spacing")) gate_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("gate_spacing_rule")) gate_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("gate_contact_spacing")) gate_contact_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("gate_contact_spacing_rule")) gate_contact_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("gate_od18_length")) fillRule(varValue, gate_od18_length); else if (varName.equalsIgnoreCase("gate_od18_width")) fillRule(varValue, gate_od18_width); else if (varName.equalsIgnoreCase("od18_diff_overhang")) fillRule(varValue, od18_diff_overhang); else if (varName.equalsIgnoreCase("gate_nt_length")) fillRule(varValue, gate_nt_length); else if (varName.equalsIgnoreCase("gate_nt_width")) fillRule(varValue, gate_nt_width); else if (varName.equalsIgnoreCase("poly_nt_endcap")) fillRule(varValue, poly_nt_endcap); else if (varName.equalsIgnoreCase("nt_diff_overhang")) fillRule(varValue, nt_diff_overhang); else if (varName.equalsIgnoreCase("vthl_diff_overhang")) fillRule(varValue, vthl_diff_overhang); else if (varName.equalsIgnoreCase("vthl_poly_overhang")) fillRule(varValue, vthl_poly_overhang); else if (varName.equalsIgnoreCase("contact_size")) contact_size.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("contact_size_rule")) contact_size.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("contact_spacing")) contact_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("contact_spacing_rule")) contact_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("contact_array_spacing")) contact_array_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("contact_array_spacing_rule")) contact_array_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("contact_metal_overhang_inline_only")) contact_metal_overhang_inline_only.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("contact_metal_overhang_inline_only_rule")) contact_metal_overhang_inline_only.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("contact_metal_overhang_all_sides")) contact_metal_overhang_all_sides.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("contact_metal_overhang_all_sides_rule")) contact_metal_overhang_all_sides.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("contact_poly_overhang")) contact_poly_overhang.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("contact_poly_overhang_rule")) contact_poly_overhang.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("polycon_diff_spacing")) polycon_diff_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("polycon_diff_spacing_rule")) polycon_diff_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nplus_width")) nplus_width.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nplus_width_rule")) nplus_width.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nplus_overhang_diff")) nplus_overhang_diff.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nplus_overhang_diff_rule")) nplus_overhang_diff.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nplus_overhang_strap")) nplus_overhang_strap.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nplus_overhang_strap_rule")) nplus_overhang_strap.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nplus_overhang_poly")) nplus_overhang_poly.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nplus_overhang_poly_rule")) nplus_overhang_poly.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nplus_spacing")) nplus_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nplus_spacing_rule")) nplus_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("pplus_width")) pplus_width.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("pplus_width_rule")) pplus_width.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("pplus_overhang_diff")) pplus_overhang_diff.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("pplus_overhang_diff_rule")) pplus_overhang_diff.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("pplus_overhang_strap")) pplus_overhang_strap.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("pplus_overhang_strap_rule")) pplus_overhang_strap.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("pplus_overhang_poly")) pplus_overhang_poly.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("pplus_overhang_poly_rule")) pplus_overhang_poly.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("pplus_spacing")) pplus_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("pplus_spacing_rule")) pplus_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nwell_width")) nwell_width.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nwell_width_rule")) nwell_width.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nwell_overhang_diff_p")) nwell_overhang_diff_p.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nwell_overhang_diff_rule_p")) nwell_overhang_diff_p.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nwell_overhang_diff_n")) nwell_overhang_diff_n.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nwell_overhang_diff_rule_n")) nwell_overhang_diff_n.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("nwell_spacing")) nwell_spacing.value = TextUtils.atof(varValue); else if (varName.equalsIgnoreCase("nwell_spacing_rule")) nwell_spacing.rule = stripQuotes(varValue); else if (varName.equalsIgnoreCase("metal_width")) fillWizardArray(varValue, metal_width, num_metal_layers, false); else if (varName.equalsIgnoreCase("metal_width_rule")) fillWizardArray(varValue, metal_width, num_metal_layers, true); else if (varName.equalsIgnoreCase("metal_spacing")) fillWizardArray(varValue, metal_spacing, num_metal_layers, false); else if (varName.equalsIgnoreCase("metal_spacing_rule")) fillWizardArray(varValue, metal_spacing, num_metal_layers, true); else if (varName.equalsIgnoreCase("wide_metal_spacing_rules")) fillWizardWideArray(varValue, wide_metal_spacing); else if (varName.equalsIgnoreCase("via_size")) fillWizardArray(varValue, via_size, num_metal_layers-1, false); else if (varName.equalsIgnoreCase("via_size_rule")) fillWizardArray(varValue, via_size, num_metal_layers-1, true); else if (varName.equalsIgnoreCase("via_spacing")) fillWizardArray(varValue, via_inline_spacing, num_metal_layers-1, false); else if (varName.equalsIgnoreCase("via_spacing_rule")) fillWizardArray(varValue, via_inline_spacing, num_metal_layers-1, true); else if (varName.equalsIgnoreCase("via_array_spacing")) fillWizardArray(varValue, via_array_spacing, num_metal_layers-1, false); else if (varName.equalsIgnoreCase("via_array_spacing_rule")) fillWizardArray(varValue, via_array_spacing, num_metal_layers-1, true); else if (varName.equalsIgnoreCase("via_overhang_inline")) fillWizardArray(varValue, via_overhang, num_metal_layers-1, false); else if (varName.equalsIgnoreCase("via_overhang_inline_rule")) fillWizardArray(varValue, via_overhang, num_metal_layers-1, true); else if (varName.equalsIgnoreCase("metal_contacts_series")) fillContactSeries(varValue, metalContacts); else if (varName.equalsIgnoreCase("contacts_series")) fillContactSeries(varValue, otherContacts); else // Special layers if (varName.equalsIgnoreCase("extra_layers")) fillLayerSeries(varValue, extraLayers); else if (varName.equalsIgnoreCase("poly_antenna_ratio")) setPolyAntennaRatio(TextUtils.atof(varValue)); else if (varName.equalsIgnoreCase("metal_antenna_ratio")) metal_antenna_ratio = makeDoubleArray(varValue); else if (varName.equalsIgnoreCase("gds_diff_layer")) diff_layer.setGDSData(getGDSValuesFromString(varValue)); else if (varName.equalsIgnoreCase("gds_poly_layer")) poly_layer.setGDSData(getGDSValuesFromString(varValue)); else if (varName.equalsIgnoreCase("gds_nplus_layer")) nplus_layer.setGDSData(getGDSValuesFromString(varValue)); else if (varName.equalsIgnoreCase("gds_pplus_layer")) pplus_layer.setGDSData(getGDSValuesFromString(varValue)); else if (varName.equalsIgnoreCase("gds_nwell_layer")) nwell_layer.setGDSData(getGDSValuesFromString(varValue)); else if (varName.equalsIgnoreCase("gds_contact_layer")) contact_layer.setGDSData(getGDSValuesFromString(varValue)); else if (varName.equalsIgnoreCase("gds_metal_layer")) metal_layers = setGDSDataArray(varValue, num_metal_layers, "Metal-"); else if (varName.equalsIgnoreCase("gds_via_layer")) via_layers = setGDSDataArray(varValue, num_metal_layers - 1, "Via-"); else if (varName.equalsIgnoreCase("gds_marking_layer")) marking_layer.setGDSData(getGDSValuesFromString(varValue)); else { WizardField wf = findWizardField(varName); if (wf != null) { fillRule(varValue, wf); } else { Job.getUserInterface().showErrorMessage("Unknown keyword '" + varName + "' on line " + lineReader.getLineNumber(), "Syntax Error In Technology File"); break; } } } } lineReader.close(); } catch (IOException e) { System.out.println("Error reading " + fileName); return false; } return true; } private WizardField findWizardField(String varName) { for (WizardField wf : extraVariables) { if (wf.name.equals(varName)) return wf; } return null; } private String stripQuotes(String str) { if (str.startsWith("\"") && str.endsWith("\"")) return str.substring(1, str.length()-1); return str; } private LayerInfo [] setGDSDataArray(String str, int len, String extra) { LayerInfo [] foundArray = new LayerInfo[len]; for(int i=0; i<len; i++) foundArray[i] = new LayerInfo(extra + (i+1)); StringTokenizer parse = new StringTokenizer(str, "( \")", false); int count = 0; while (parse.hasMoreTokens()) { if (count >= len) { System.out.println("More GDS values than metal layers in TechEditWizardData"); break; } else { String value = parse.nextToken(); // array delimeters must be discarded here because GDS string may // contain "," for the pin/text definition ("," can't be used in the StringTokenizer if (!value.equals(",")) foundArray[count++].setGDSData(getGDSValuesFromString(value)); } } return foundArray; } private double [] makeDoubleArray(String str) { WizardField [] foundArray = new WizardField[num_metal_layers]; for(int i=0; i<num_metal_layers; i++) foundArray[i] = new WizardField(); fillWizardArray(str, foundArray, num_metal_layers, false); double [] retArray = new double[foundArray.length]; for(int i=0; i<foundArray.length; i++) retArray[i] = foundArray[i].value; return retArray; } private void fillWizardWideArray(String str, List<WideWizardField> wideList) { StringTokenizer parse = new StringTokenizer(str, "[]", false); int blocks = 0; WideWizardField tmp = new WideWizardField(); while (parse.hasMoreTokens()) { String value = parse.nextToken(); if (value.equals(";")) // end of line continue; // first block: [value, maxW, minLen, rule name] if (blocks == 0) { StringTokenizer p = new StringTokenizer(value, ", ", false); int count = 0; while (p.hasMoreTokens()) { String v = p.nextToken(); switch (count) { case 0: // value tmp.value = Double.parseDouble(v); break; case 1: // maxW tmp.maxW = Double.parseDouble(v); break; case 2: // minLen tmp.minLen = Double.parseDouble(v); break; case 3: // rule name tmp.rule = stripQuotes(v); break; default: assert(false); // only 4 values } count++; } blocks++; } else { // layers involved StringTokenizer p = new StringTokenizer(value, ",", false); while (p.hasMoreTokens()) { String s = p.nextToken(); tmp.names.add(s); } } } wideList.add(tmp); } private void fillWizardArray(String str, WizardField [] fieldArray, int expectedLength, boolean getRule) { if (!str.startsWith("(")) { Job.getUserInterface().showErrorMessage("Array does not start with '(' on " + str, "Syntax Error In Technology File"); return; } int pos = 1; int index = 0; for(;;) { while (pos < str.length() && str.charAt(pos) == ' ') pos++; if (index >= fieldArray.length) { // Job.getUserInterface().showErrorMessage("Invalid metal index: " + index, // "Syntax Error In Technology File"); return; } if (getRule) { if (str.charAt(pos) != '"') { Job.getUserInterface().showErrorMessage("Rule element does not start with quote on " + str, "Syntax Error In Technology File"); return; } pos++; int end = pos; while (end < str.length() && str.charAt(end) != '"') end++; if (str.charAt(end) != '"') { Job.getUserInterface().showErrorMessage("Rule element does not end with quote on " + str, "Syntax Error In Technology File"); return; } fieldArray[index++].rule = str.substring(pos, end); pos = end+1; } else { double v = TextUtils.atof(str.substring(pos)); fieldArray[index++].value = v; } while (pos < str.length() && str.charAt(pos) != ',' && str.charAt(pos) != ')') pos++; if (str.charAt(pos) != ',') break; pos++; } } // fillRule private void fillRule(String str, WizardField... rules) { StringTokenizer parse = new StringTokenizer(str, "(,)", false); int count = 0; int pos = 0; while (parse.hasMoreTokens()) { String value = parse.nextToken(); switch (count) { case 0: case 2: rules[pos].value = Double.parseDouble(value); break; case 1: case 3: rules[pos].rule = value; break; default: assert(false); // only 2 values } count++; if (count == 2) pos++; } } // fillLayerSeries private void fillLayerSeries(String str, List<LayerInfo> layersList) { StringTokenizer parse = new StringTokenizer(str, "()", false); while (parse.hasMoreTokens()) { String value = parse.nextToken(); if (!value.contains(",")) // only white space continue; // Sequence ("layer name", "GDS value") StringTokenizer p = new StringTokenizer(value, " \"", false); int itemCount = 0; // 2 max items: layer name and GDS value LayerInfo layer = null; while (p.hasMoreTokens()) { String s = p.nextToken(); if (s.startsWith(",")) continue; // skipping comma. Not in the parser because of the color switch (itemCount) { case 0: layer = new LayerInfo(s); layersList.add(layer); break; case 1: layer.setGDSData(getGDSValuesFromString(s)); break; case 2: layer.setGraphicsTemplate(s); break; default: assert(false); } itemCount++; } if (itemCount != 2 && itemCount != 3) assert(itemCount == 2 || itemCount == 3); } } // to get general contact private void fillContactSeries(String str, Map<String,List<Contact>> contactMap) { StringTokenizer parse = new StringTokenizer(str, "[]", false); List<ContactNode> nodeList = new ArrayList<ContactNode>(); while (parse.hasMoreTokens()) { String value = parse.nextToken(); if (value.equals(";")) // end of line continue; // checking the metal pair lists. // overhang values should be in by now if (value.contains("{")) { assert(nodeList.size() > 0); int index = value.indexOf("{"); assert(index != -1); // it should come with a prefix name String prefix = value.substring(0, index); String v = value.substring(index); StringTokenizer p = new StringTokenizer(v, "{}", false); while (p.hasMoreTokens()) { String pair = p.nextToken(); // getting metal numbers {a,b,c} StringTokenizer n = new StringTokenizer(pair, ", ", false); // getting the layer names List<String> layerNames = new ArrayList<String>(); while (n.hasMoreTokens()) { String l = n.nextToken(); layerNames.add(l); } assert (nodeList.size() == layerNames.size()); Contact cont = new Contact(prefix); for (int i = 0; i < layerNames.size(); i++) { String name = layerNames.get(i); ContactNode tmp = nodeList.get(i); ContactNode node = new ContactNode(name, tmp.overX.value, tmp.overX.rule, tmp.overY.value, tmp.overY.rule); cont.layers.add(node); } String layer1 = layerNames.get(0); String layer2 = layerNames.get(1); // n/p plus regions should go at the end // Always store them by lowMetal-highMetal if happens if (layer1.compareToIgnoreCase(layer2) > 0) // layer1 name is second { String temp = layer1; layer1 = layer2; layer2 = temp; } String key = layer1 + "-" + layer2; List<Contact> l = contactMap.get(key); if (l == null) { l = new ArrayList<Contact>(); contactMap.put(key, l); } l.add(cont); } } else { // syntax: A(overX, overXS, overY, overYS)(Layer2, overX, overXS, overY, overYS) // pair of layers found StringTokenizer p = new StringTokenizer(value, "()", false); while (p.hasMoreTokens()) { String s = p.nextToken(); // layer info int itemCount = 0; // 4 max items: metal layer, overhang X, overhang X rule, overhang Y, overhang Y rule StringTokenizer x = new StringTokenizer(s, ", ", false); double overX = 0, overY = 0; String overXS = null, overYS = null; while (x.hasMoreTokens() && itemCount < 4) { String item = x.nextToken(); switch (itemCount) { case 0: // overhang X value overX = Double.valueOf(item); break; case 1: // overhang X rule name overXS = item; break; case 2: // overhang Y value overY = Double.valueOf(item); break; case 3: // overhang Y rule name overYS = item; break; } itemCount++; } assert(itemCount == 4); ContactNode node = new ContactNode("", overX, overXS, overY, overYS); nodeList.add(node); } } } } /************************************** EXPORT RAW NUMBERS TO DISK **************************************/ void exportData() { String fileName = OpenFile.chooseOutputFile(FileType.TEXT, "Technology Wizard File", "Technology.txt"); if (fileName == null) return; try { PrintWriter printWriter = new PrintWriter(new BufferedWriter(new FileWriter(fileName))); dumpNumbers(printWriter); printWriter.close(); } catch (IOException e) { System.out.println("Error writing XML file"); return; } } private void dumpNumbers(PrintWriter pw) { pw.print("#### Electric(tm) VLSI Design System, version "); if (User.isIncludeDateAndVersionInOutput()) { pw.println(com.sun.electric.database.text.Version.getVersion()); } else { pw.println(); } pw.println("#### "); pw.println("#### Technology wizard data file"); pw.println("####"); pw.println("#### All dimensions in nanometers."); if (IOTool.isUseCopyrightMessage()) { String str = IOTool.getCopyrightMessage(); int start = 0; while (start < str.length()) { int endPos = str.indexOf('\n', start); if (endPos < 0) endPos = str.length(); String oneLine = str.substring(start, endPos); pw.println("#### " + oneLine); start = endPos+1; } } pw.println(); pw.println("$tech_name = \"" + tech_name + "\";"); pw.println("$tech_description = \"" + tech_description + "\";"); pw.println("$num_metal_layers = " + num_metal_layers + ";"); pw.println("$psubstrate_process = " + pSubstrateProcess + ";"); pw.println("$horizontal_transistors = " + horizontalFlag + ";"); pw.println("$extra_info = " + extraInfoFlag + ";"); pw.println(); pw.println("## stepsize is minimum granularity that will be used as movement grid"); pw.println("## set to manufacturing grid or lowest common denominator with design rules"); pw.println("$stepsize = " + stepsize + ";"); pw.println(); pw.println("###### DIFFUSION RULES #####"); pw.println("$diff_width = " + TextUtils.formatDouble(diff_width.value) + ";"); pw.println("$diff_width_rule = \"" + diff_width.rule + "\";"); pw.println("$diff_poly_overhang = " + TextUtils.formatDouble(diff_poly_overhang.value) + "; # min. diff overhang from gate edge"); pw.println("$diff_poly_overhang_rule = \"" + diff_poly_overhang.rule + "\"; # min. diff overhang from gate edge"); pw.println("$diff_contact_overhang = " + TextUtils.formatDouble(diff_contact_overhang.value) + "; # min. diff overhang contact"); pw.println("$diff_contact_overhang_rule = \"" + diff_contact_overhang.rule + "\"; # min. diff overhang contact"); pw.println("$diff_spacing = " + TextUtils.formatDouble(diff_spacing.value) + ";"); pw.println("$diff_spacing_rule = \"" + diff_spacing.rule + "\";"); pw.println(); pw.println("###### POLY RULES #####"); pw.println("$poly_width = " + TextUtils.formatDouble(poly_width.value) + ";"); pw.println("$poly_width_rule = \"" + poly_width.rule + "\";"); pw.println("$poly_endcap = " + TextUtils.formatDouble(poly_endcap.value) + "; # min. poly gate extension from edge of diffusion"); pw.println("$poly_endcap_rule = \"" + poly_endcap.rule + "\"; # min. poly gate extension from edge of diffusion"); pw.println("$poly_spacing = " + TextUtils.formatDouble(poly_spacing.value) + ";"); pw.println("$poly_spacing_rule = \"" + poly_spacing.rule + "\";"); pw.println("$poly_diff_spacing = " + TextUtils.formatDouble(poly_diff_spacing.value) + "; # min. spacing between poly and diffusion"); pw.println("$poly_diff_spacing_rule = \"" + poly_diff_spacing.rule + "\"; # min. spacing between poly and diffusion"); pw.println("$poly_protection_spacing = " + TextUtils.formatDouble(poly_protection_spacing.value) + "; # min. spacing between poly and dummy poly"); pw.println("$poly_protection_spacing_rule = \"" + poly_protection_spacing.rule + "\"; # min. spacing between poly and dummy poly"); pw.println(); pw.println("###### GATE RULES #####"); pw.println("$gate_length = " + TextUtils.formatDouble(gate_length.value) + "; # min. transistor gate length"); pw.println("$gate_length_rule = \"" + gate_length.rule + "\"; # min. transistor gate length"); pw.println("$gate_width = " + TextUtils.formatDouble(gate_width.value) + "; # min. transistor gate width"); pw.println("$gate_width_rule = \"" + gate_width.rule + "\"; # min. transistor gate width"); pw.println("$gate_spacing = " + TextUtils.formatDouble(gate_spacing.value) + "; # min. gate to gate spacing on diffusion"); pw.println("$gate_spacing_rule = \"" + gate_spacing.rule + "\"; # min. gate to gate spacing on diffusion"); pw.println("$gate_contact_spacing = " + TextUtils.formatDouble(gate_contact_spacing.value) + "; # min. spacing from gate edge to contact inside diffusion"); pw.println("$gate_contact_spacing_rule = \"" + gate_contact_spacing.rule + "\"; # min. spacing from gate edge to contact inside diffusion"); pw.println(); pw.println("###### CONTACT RULES #####"); pw.println("$contact_size = " + TextUtils.formatDouble(contact_size.value) + ";"); pw.println("$contact_size_rule = \"" + contact_size.rule + "\";"); pw.println("$contact_spacing = " + TextUtils.formatDouble(contact_spacing.value) + ";"); pw.println("$contact_spacing_rule = \"" + contact_spacing.rule + "\";"); pw.println("$contact_array_spacing = " + TextUtils.formatDouble(contact_array_spacing.value) + ";"); pw.println("$contact_array_spacing_rule = \"" + contact_array_spacing.rule + "\";"); pw.println("$contact_metal_overhang_inline_only = " + TextUtils.formatDouble(contact_metal_overhang_inline_only.value) + "; # metal overhang when overhanging contact from two sides only"); pw.println("$contact_metal_overhang_inline_only_rule = \"" + contact_metal_overhang_inline_only.rule + "\"; # metal overhang when overhanging contact from two sides only"); pw.println("$contact_metal_overhang_all_sides = " + TextUtils.formatDouble(contact_metal_overhang_all_sides.value) + "; # metal overhang when surrounding contact"); pw.println("$contact_metal_overhang_all_sides_rule = \"" + contact_metal_overhang_all_sides.rule + "\"; # metal overhang when surrounding contact"); pw.println("$contact_poly_overhang = " + TextUtils.formatDouble(contact_poly_overhang.value) + "; # poly overhang contact, recommended value"); pw.println("$contact_poly_overhang_rule = \"" + contact_poly_overhang.rule + "\"; # poly overhang contact, recommended value"); pw.println("$polycon_diff_spacing = " + TextUtils.formatDouble(polycon_diff_spacing.value) + "; # spacing between poly-metal contact edge and diffusion"); pw.println("$polycon_diff_spacing_rule = \"" + polycon_diff_spacing.rule + "\"; # spacing between poly-metal contact edge and diffusion"); pw.println(); pw.println("###### WELL AND IMPLANT RULES #####"); pw.println("$nplus_width = " + TextUtils.formatDouble(nplus_width.value) + ";"); pw.println("$nplus_width_rule = \"" + nplus_width.rule + "\";"); pw.println("$nplus_overhang_diff = " + TextUtils.formatDouble(nplus_overhang_diff.value) + ";"); pw.println("$nplus_overhang_diff_rule = \"" + nplus_overhang_diff.rule + "\";"); pw.println("$nplus_overhang_strap = " + TextUtils.formatDouble(nplus_overhang_strap.value) + ";"); pw.println("$nplus_overhang_strap_rule = \"" + nplus_overhang_strap.rule + "\";"); pw.println("$nplus_overhang_poly = " + TextUtils.formatDouble(nplus_overhang_poly.value) + ";"); pw.println("$nplus_overhang_poly_rule = \"" + nplus_overhang_poly.rule + "\";"); pw.println("$nplus_spacing = " + TextUtils.formatDouble(nplus_spacing.value) + ";"); pw.println("$nplus_spacing_rule = \"" + nplus_spacing.rule + "\";"); pw.println(); pw.println("$pplus_width = " + TextUtils.formatDouble(pplus_width.value) + ";"); pw.println("$pplus_width_rule = \"" + pplus_width.rule + "\";"); pw.println("$pplus_overhang_diff = " + TextUtils.formatDouble(pplus_overhang_diff.value) + ";"); pw.println("$pplus_overhang_diff_rule = \"" + pplus_overhang_diff.rule + "\";"); pw.println("$pplus_overhang_strap = " + TextUtils.formatDouble(pplus_overhang_strap.value) + ";"); pw.println("$pplus_overhang_strap_rule = \"" + pplus_overhang_strap.rule + "\";"); pw.println("$pplus_overhang_poly = " + TextUtils.formatDouble(pplus_overhang_poly.value) + ";"); pw.println("$pplus_overhang_poly_rule = \"" + pplus_overhang_poly.rule + "\";"); pw.println("$pplus_spacing = " + TextUtils.formatDouble(pplus_spacing.value) + ";"); pw.println("$pplus_spacing_rule = \"" + pplus_spacing.rule + "\";"); pw.println(); pw.println("$nwell_width = " + TextUtils.formatDouble(nwell_width.value) + ";"); pw.println("$nwell_width_rule = \"" + nwell_width.rule + "\";"); pw.println("$nwell_overhang_diff_p = " + TextUtils.formatDouble(nwell_overhang_diff_p.value) + ";"); pw.println("$nwell_overhang_diff_rule_p = \"" + nwell_overhang_diff_p.rule + "\";"); pw.println("$nwell_overhang_diff_n = " + TextUtils.formatDouble(nwell_overhang_diff_n.value) + ";"); pw.println("$nwell_overhang_diff_rule_n = \"" + nwell_overhang_diff_n.rule + "\";"); pw.println("$nwell_spacing = " + TextUtils.formatDouble(nwell_spacing.value) + ";"); pw.println("$nwell_spacing_rule = \"" + nwell_spacing.rule + "\";"); pw.println(); pw.println("###### METAL RULES #####"); pw.print("@metal_width = ("); for(int i=0; i<num_metal_layers; i++) { if (i > 0) pw.print(", "); pw.print(TextUtils.formatDouble(metal_width[i].value)); } pw.println(");"); pw.print("@metal_width_rule = ("); for(int i=0; i<num_metal_layers; i++) { if (i > 0) pw.print(", "); pw.print("\"" + metal_width[i].rule + "\""); } pw.println(");"); pw.print("@metal_spacing = ("); for(int i=0; i<num_metal_layers; i++) { if (i > 0) pw.print(", "); pw.print(TextUtils.formatDouble(metal_spacing[i].value)); } pw.println(");"); pw.print("@metal_spacing_rule = ("); for(int i=0; i<num_metal_layers; i++) { if (i > 0) pw.print(", "); pw.print("\"" + metal_spacing[i].rule + "\""); } pw.println(");"); pw.println(); pw.println("###### VIA RULES #####"); pw.print("@via_size = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print(TextUtils.formatDouble(via_size[i].value)); } pw.println(");"); pw.print("@via_size_rule = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print("\"" + via_size[i].rule + "\""); } pw.println(");"); pw.print("@via_spacing = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print(TextUtils.formatDouble(via_inline_spacing[i].value)); } pw.println(");"); pw.print("@via_spacing_rule = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print("\"" + via_inline_spacing[i].rule + "\""); } pw.println(");"); pw.println(); pw.println("## \"sep2d\" spacing, close proximity via array spacing"); pw.print("@via_array_spacing = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print(TextUtils.formatDouble(via_array_spacing[i].value)); } pw.println(");"); pw.print("@via_array_spacing_rule = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print("\"" + via_array_spacing[i].rule + "\""); } pw.println(");"); pw.print("@via_overhang_inline = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print(TextUtils.formatDouble(via_overhang[i].value)); } pw.println(");"); pw.print("@via_overhang_inline_rule = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print("\"" + via_overhang[i].rule + "\""); } pw.println(");"); pw.println(); pw.println("###### ANTENNA RULES #####"); pw.println("$poly_antenna_ratio = " + TextUtils.formatDouble(poly_antenna_ratio) + ";"); pw.print("@metal_antenna_ratio = ("); for(int i=0; i<num_metal_layers; i++) { if (i > 0) pw.print(", "); pw.print(TextUtils.formatDouble(metal_antenna_ratio[i])); } pw.println(");"); pw.println(); pw.println("###### GDS-II LAYERS #####"); pw.println("$gds_diff_layer = " + diff_layer + ";"); pw.println("$gds_poly_layer = " + poly_layer + ";"); pw.println("$gds_nplus_layer = " + nplus_layer + ";"); pw.println("$gds_pplus_layer = " + pplus_layer + ";"); pw.println("$gds_nwell_layer = " + nwell_layer + ";"); pw.println("$gds_contact_layer = " + contact_layer + ";"); pw.print("@gds_metal_layer = ("); for(int i=0; i<num_metal_layers; i++) { if (i > 0) pw.print(", "); pw.print(metal_layers[i]); } pw.println(");"); pw.print("@gds_via_layer = ("); for(int i=0; i<num_metal_layers-1; i++) { if (i > 0) pw.print(", "); pw.print(via_layers[i]); } pw.println(");"); pw.println(); pw.println("## Device marking layer"); pw.println("$gds_marking_layer = " + marking_layer + ";"); pw.println(); pw.println("# End of techfile"); } /************************************** WRITE XML FILE **************************************/ void writeXML() { String errorMessage = errorInData(); if (errorMessage != null) { Job.getUserInterface().showErrorMessage("ERROR: " + errorMessage, "Missing Technology Data"); return; } String suggestedName = getTechName() + ".xml"; String fileName = OpenFile.chooseOutputFile(FileType.XML, "Technology XML File", suggestedName); //"Technology.xml"); if (fileName == null) return; try { dumpXMLFile(fileName); } catch (IOException e) { System.out.println("Error writing XML file"); return; } } /** * Method to create the XML version of a PrimitiveNode representing a pin * @return */ private Xml.PrimitiveNodeGroup makeXmlPrimitivePin(Xml.Technology t, String name, double size, SizeOffset so, List<String> portNames, Xml.NodeLayer... list) { List<Xml.NodeLayer> nodesList = new ArrayList<Xml.NodeLayer>(list.length); List<Xml.PrimitivePort> nodePorts = new ArrayList<Xml.PrimitivePort>(); for (Xml.NodeLayer lb : list) { if (lb == null) continue; // in case the pwell layer off nodesList.add(lb); } // default uses the same name from the pin node if (portNames == null) { portNames = new ArrayList<String>(); portNames.add(name); } nodePorts.add(makeXmlPrimitivePort(name.toLowerCase(), 0, 180, 0, null, 0, -1, 0, 1, 0, -1, 0, 1, portNames)); Xml.PrimitiveNodeGroup n = makeXmlPrimitive(t.nodeGroups, name + "-Pin", PrimitiveNode.Function.PIN, size, size, 0, 0, so, nodesList, nodePorts, null, true); return n; } /** * Method to creat the XML version of a PrimitiveNode representing a contact * @return */ private Xml.PrimitiveNodeGroup makeXmlPrimitiveCon(List<Xml.PrimitiveNodeGroup> nodeGroups, String name, PrimitiveNode.Function function, double sizeX, double sizeY, SizeOffset so, List<String> portNames, Xml.NodeLayer... list) { List<Xml.NodeLayer> nodesList = new ArrayList<Xml.NodeLayer>(list.length); List<Xml.PrimitivePort> nodePorts = new ArrayList<Xml.PrimitivePort>(); for (Xml.NodeLayer lb : list) { if (lb == null) continue; // in case the pwell layer off nodesList.add(lb); } nodePorts.add(makeXmlPrimitivePort(name.toLowerCase(), 0, 180, 0, null, 0, -1, 0, 1, 0, -1, 0, 1, portNames)); return makeXmlPrimitive(nodeGroups, name + "-Con", function, sizeX, sizeY, 0, 0, so, nodesList, nodePorts, null, false); } /** * Method to create the XML version of a PrimitiveNode * @return */ private Xml.PrimitiveNodeGroup makeXmlPrimitive(List<Xml.PrimitiveNodeGroup> nodeGroups, String name, PrimitiveNode.Function function, double width, double height, double ppLeft, double ppBottom, SizeOffset so, List<Xml.NodeLayer> nodeLayers, List<Xml.PrimitivePort> nodePorts, PrimitiveNode.NodeSizeRule nodeSizeRule, boolean isArcsShrink) { Xml.PrimitiveNodeGroup ng = new Xml.PrimitiveNodeGroup(); ng.isSingleton = true; Xml.PrimitiveNode n = new Xml.PrimitiveNode(); n.name = name; n.function = function; ng.nodes.add(n); ng.shrinkArcs = isArcsShrink; // n.square = isSquare(); // n.canBeZeroSize = isCanBeZeroSize(); // n.wipes = isWipeOn1or2(); // n.lockable = isLockedPrim(); // n.edgeSelect = isEdgeSelect(); // n.skipSizeInPalette = isSkipSizeInPalette(); // n.notUsed = isNotUsed(); // n.lowVt = isNodeBitOn(PrimitiveNode.LOWVTBIT); // n.highVt = isNodeBitOn(PrimitiveNode.HIGHVTBIT); // n.nativeBit = isNodeBitOn(PrimitiveNode.NATIVEBIT); // n.od18 = isNodeBitOn(PrimitiveNode.OD18BIT); // n.od25 = isNodeBitOn(PrimitiveNode.OD25BIT); // n.od33 = isNodeBitOn(PrimitiveNode.OD33BIT); // PrimitiveNode.NodeSizeRule nodeSizeRule = getMinSizeRule(); // EPoint minFullSize = nodeSizeRule != null ? // EPoint.fromLambda(0.5*nodeSizeRule.getWidth(), 0.5*nodeSizeRule.getHeight()) : // EPoint.fromLambda(0.5*getDefWidth(), 0.5*getDefHeight()); // EPoint minFullSize = EPoint.fromLambda(0.5*width, 0.5*height); EPoint topLeft = EPoint.fromLambda(ppLeft, ppBottom + height); EPoint size = EPoint.fromLambda(width, height); double getDefWidth = width, getDefHeight = height; if (function == PrimitiveNode.Function.PIN && isArcsShrink) { // assert getNumPorts() == 1; // assert nodeSizeRule == null; // PrimitivePort pp = getPort(0); // assert pp.getLeft().getMultiplier() == -0.5 && pp.getRight().getMultiplier() == 0.5 && pp.getBottom().getMultiplier() == -0.5 && pp.getTop().getMultiplier() == 0.5; // assert pp.getLeft().getAdder() == -pp.getRight().getAdder() && pp.getBottom().getAdder() == -pp.getTop().getAdder(); // minFullSize = EPoint.fromLambda(ppLeft, ppBottom); } // DRCTemplate nodeSize = xmlRules.getRule(pnp.getPrimNodeIndexInTech(), DRCTemplate.DRCRuleType.NODSIZ); // SizeOffset so = getProtoSizeOffset(); if (so != null && (so.getLowXOffset() == 0 && so.getHighXOffset() == 0 && so.getLowYOffset() == 0 && so.getHighYOffset() == 0)) so = null; ERectangle base = calcBaseRectangle(so, nodeLayers, nodeSizeRule); ng.baseLX.value = base.getLambdaMinX(); ng.baseHX.value = base.getLambdaMaxX(); ng.baseLY.value = base.getLambdaMinY(); ng.baseHY.value = base.getLambdaMaxY(); // n.sizeOffset = so; // if (!minFullSize.equals(EPoint.ORIGIN)) // n.diskOffset = minFullSize; // if (so != null) { // EPoint p2 = EPoint.fromGrid( // minFullSize.getGridX() - ((so.getLowXGridOffset() + so.getHighXGridOffset()) >> 1), // minFullSize.getGridY() - ((so.getLowYGridOffset() + so.getHighYGridOffset()) >> 1)); // n.diskOffset.put(Integer.valueOf(1), minFullSize); // n.diskOffset.put(Integer.valueOf(2), p2); // n.diskOffset.put(Integer.valueOf(2), minFullSize); // } // n.defaultWidth.addLambda(DBMath.round(getDefWidth)); // - 2*minFullSize.getLambdaX()); // n.defaultHeight.addLambda(DBMath.round(getDefHeight)); // - 2*minFullSize.getLambdaY()); ERectangle baseRectangle = ERectangle.fromGrid(topLeft.getGridX(), topLeft.getGridY(), size.getGridX(), size.getGridY()); /* n.nodeBase = baseRectangle;*/ // List<Technology.NodeLayer> nodeLayers = Arrays.asList(getLayers()); // List<Technology.NodeLayer> electricalNodeLayers = nodeLayers; // if (getElectricalLayers() != null) // electricalNodeLayers = Arrays.asList(getElectricalLayers()); boolean isSerp = false; //getSpecialType() == PrimitiveNode.SERPTRANS; if (nodeLayers != null) ng.nodeLayers.addAll(nodeLayers); // int m = 0; // for (Technology.NodeLayer nld: electricalNodeLayers) { // int j = nodeLayers.indexOf(nld); // if (j < 0) { // n.nodeLayers.add(nld.makeXml(isSerp, minFullSize, false, true)); // continue; // } // while (m < j) // n.nodeLayers.add(nodeLayers.get(m++).makeXml(isSerp, minFullSize, true, false)); // n.nodeLayers.add(nodeLayers.get(m++).makeXml(isSerp, minFullSize, true, true)); // } // while (m < nodeLayers.size()) // n.nodeLayers.add(nodeLayers.get(m++).makeXml(isSerp, minFullSize, true, false)); // for (Iterator<PrimitivePort> pit = getPrimitivePorts(); pit.hasNext(); ) { // PrimitivePort pp = pit.next(); // n.ports.add(pp.makeXml(minFullSize)); // } ng.specialType = PrimitiveNode.NORMAL; // getSpecialType(); // if (getSpecialValues() != null) // n.specialValues = getSpecialValues().clone(); if (nodeSizeRule != null) { ng.nodeSizeRule = new Xml.NodeSizeRule(); ng.nodeSizeRule.width = nodeSizeRule.getWidth(); ng.nodeSizeRule.height = nodeSizeRule.getHeight(); ng.nodeSizeRule.rule = nodeSizeRule.getRuleName(); } // n.spiceTemplate = "";//getSpiceTemplate(); // ports ng.ports.addAll(nodePorts); nodeGroups.add(ng); return ng; } private ERectangle calcBaseRectangle(SizeOffset so, List<Xml.NodeLayer> nodeLayers, PrimitiveNode.NodeSizeRule nodeSizeRule) { long lx, hx, ly, hy; if (nodeSizeRule != null) { hx = DBMath.lambdaToGrid(0.5*nodeSizeRule.getWidth()); lx = -hx; hy = DBMath.lambdaToGrid(0.5*nodeSizeRule.getHeight()); ly = -hy; } else { lx = Long.MAX_VALUE; hx = Long.MIN_VALUE; ly = Long.MAX_VALUE; hy = Long.MIN_VALUE; for (int i = 0; i < nodeLayers.size(); i++) { Xml.NodeLayer nl = nodeLayers.get(i); long x, y; if (nl.representation == Technology.NodeLayer.BOX || nl.representation == Technology.NodeLayer.MULTICUTBOX) { x = DBMath.lambdaToGrid(nl.lx.value); lx = Math.min(lx, x); hx = Math.max(hx, x); x = DBMath.lambdaToGrid(nl.hx.value); lx = Math.min(lx, x); hx = Math.max(hx, x); y = DBMath.lambdaToGrid(nl.ly.value); ly = Math.min(ly, y); hy = Math.max(hy, y); y = DBMath.lambdaToGrid(nl.hy.value); ly = Math.min(ly, y); hy = Math.max(hy, y); } else { for (Technology.TechPoint p: nl.techPoints) { x = p.getX().getGridAdder(); lx = Math.min(lx, x); hx = Math.max(hx, x); y = p.getY().getGridAdder(); ly = Math.min(ly, y); hy = Math.max(hy, y); } } } } if (so != null) { lx += so.getLowXGridOffset(); hx -= so.getHighXGridOffset(); ly += so.getLowYGridOffset(); hy -= so.getHighYGridOffset(); } return ERectangle.fromGrid(lx, ly, hx - lx, hy - ly); } /** * Method to create the XML version of a ArcProto * @param name * @param function * @return */ private Xml.ArcProto makeXmlArc(Xml.Technology t, String name, com.sun.electric.technology.ArcProto.Function function, double ant, Xml.ArcLayer ... arcLayers) { Xml.ArcProto a = new Xml.ArcProto(); a.name = name; a.function = function; a.wipable = true; // a.curvable = false; // a.special = false; // a.notUsed = false; // a.skipSizeInPalette = false; // a.elibWidthOffset = getLambdaElibWidthOffset(); a.extended = true; a.fixedAngle = true; a.angleIncrement = 90; a.antennaRatio = DBMath.round(ant); for (Xml.ArcLayer al: arcLayers) { if (al == null) continue; // in case the pwell layer off a.arcLayers.add(al); } t.arcs.add(a); return a; } private Xml.Layer makeXmlLayer(List<Xml.Layer> layers, Map<Xml.Layer, WizardField> layer_width, String name, Layer.Function function, int extraf, EGraphics graph, WizardField width, boolean pureLayerNode, boolean pureLayerPortArc, String... portArcNames) { Xml.Layer l = makeXmlLayer(layers, name, function, extraf, graph, width.value, pureLayerNode, pureLayerPortArc, portArcNames); layer_width.put(l, width); return l; } /** * Method to create the XML version of a Layer. * @return */ private Xml.Layer makeXmlLayer(List<Xml.Layer> layers, String name, Layer.Function function, int extraf, EGraphics graph, double width, boolean pureLayerNode, boolean pureLayerPortArc, String... portArcNames) { Xml.Layer l = new Xml.Layer(); l.name = name; l.function = function; l.extraFunction = extraf; graph = graph.withTransparencyMode(EGraphics.J3DTransparencyOption.NONE); graph = graph.withTransparencyFactor(1); l.desc = graph; l.thick3D = 1; l.height3D = 1; l.cif = "Not set"; //"C" + cifLetter + cifLetter; l.skill = name; l.resistance = 1; l.capacitance = 0; l.edgeCapacitance = 0; // if (layer.getPseudoLayer() != null) // l.pseudoLayer = layer.getPseudoLayer().getName(); // if pureLayerNode is false, pureLayerPortArc must be false assert(pureLayerNode || !pureLayerPortArc); if (pureLayerNode) { l.pureLayerNode = new Xml.PureLayerNode(); l.pureLayerNode.name = name + "-Node"; l.pureLayerNode.style = Poly.Type.FILLED; l.pureLayerNode.size.addLambda(scaledValue(width)); l.pureLayerNode.port = "Port_" + name; /* l.pureLayerNode.size.addRule(width.rule, 1);*/ if (pureLayerPortArc) { if (portArcNames.length == 0) // only 1 port l.pureLayerNode.portArcs.add(name); else { for (String s : portArcNames) l.pureLayerNode.portArcs.add(s); } } // for (ArcProto ap: pureLayerNode.getPort(0).getConnections()) { // if (ap.getTechnology() != tech) continue; // l.pureLayerNode.portArcs.add(ap.getName()); // } } layers.add(l); return l; } /** * Method to create the XML version of NodeLayer */ private Xml.NodeLayer makeXmlNodeLayer(double lx, double hx, double ly, double hy, Xml.Layer lb, Poly.Type style) { return makeXmlNodeLayer(lx, hx, ly, hy, lb, style, true, true, 0); } /** * Method to create the XML version of NodeLayer either graphical or electrical. * makeXmlNodeLayer is the default one where layer is available in both mode. */ private Xml.NodeLayer makeXmlNodeLayer(double lx, int lxk, double hx, int hxk, double ly, int lyk, double hy, int hyk, Xml.Layer lb, Poly.Type style, boolean inLayers, boolean electricalLayers, int port) { Xml.NodeLayer nl = new Xml.NodeLayer(); nl.layer = lb.name; nl.style = style; nl.portNum = port; nl.inLayers = inLayers; nl.inElectricalLayers = electricalLayers; nl.representation = Technology.NodeLayer.BOX; nl.lx.k = lxk; nl.hx.k = hxk; nl.ly.k = lyk; nl.hy.k = hyk; nl.lx.addLambda(-lx); nl.hx.addLambda(hx); nl.ly.addLambda(-ly); nl.hy.addLambda(hy); return nl; } /** * Method to create the XML version of NodeLayer either graphical or electrical. * makeXmlNodeLayer is the default one where layer is available in both mode. */ private Xml.NodeLayer makeXmlNodeLayer(double lx, double hx, double ly, double hy, Xml.Layer lb, Poly.Type style, boolean inLayers, boolean electricalLayers, int port) { return makeXmlNodeLayer(lx, -1, hx, 1, ly, -1, hy, 1, lb, style, inLayers, electricalLayers, port); } /** * Method to create the default XML version of a MultiCUt NodeLayer * @return */ private Xml.NodeLayer makeXmlMulticut(Xml.Layer lb, double sizeRule, double sepRule, double sepRule2D) { return makeXmlMulticut(0, 0, 0, 0, lb, sizeRule, sepRule, sepRule2D); } /** * Method to create the default XML version of a MultiCUt NodeLayer * @return */ private Xml.NodeLayer makeXmlMulticut(double lx, double hx, double ly, double hy, Xml.Layer lb, double sizeRule, double sepRule, double sepRule2D) { return makeXmlMulticut(lx, -1, hx, 1, ly, -1, hy, 1, lb, sizeRule, sepRule, sepRule2D); } /** * Method to create the default XML version of a MultiCUt NodeLayer * @return */ private Xml.NodeLayer makeXmlMulticut(double lx, int lxk, double hx, int hxk, double ly, int lyk, double hy, int hyk, Xml.Layer lb, double sizeRule, double sepRule, double sepRule2D) { Xml.NodeLayer nl = new Xml.NodeLayer(); nl.layer = lb.name; nl.style = Poly.Type.FILLED; nl.inLayers = nl.inElectricalLayers = true; nl.representation = Technology.NodeLayer.MULTICUTBOX; nl.lx.k = lxk; nl.hx.k = hxk; nl.ly.k = lyk; nl.hy.k = hyk; nl.lx.addLambda(-lx); nl.hx.addLambda(hx); nl.ly.addLambda(-ly); nl.hy.addLambda(hy); // nl.sizeRule = sizeRule; nl.sizex = sizeRule; nl.sizey = sizeRule; nl.sep1d = sepRule; nl.sep2d = sepRule2D; return nl; } /** * Method to create the XML versio nof PrimitivePort * @return New Xml.PrimitivePort */ private Xml.PrimitivePort makeXmlPrimitivePort(String name, int portAngle, int portRange, int portTopology, EPoint minFullSize, double lx, int slx, double hx, int shx, double ly, int sly, double hy, int shy, List<String> portArcs) { Xml.PrimitivePort ppd = new Xml.PrimitivePort(); double lambdaX = (minFullSize != null) ? minFullSize.getLambdaX() : 0; double lambdaY = (minFullSize != null) ? minFullSize.getLambdaY() : 0; ppd.name = name; ppd.portAngle = portAngle; ppd.portRange = portRange; ppd.portTopology = portTopology; ppd.lx.k = slx;//-1; //getLeft().getMultiplier()*2; ppd.lx.addLambda(DBMath.round(lx + lambdaX*ppd.lx.k)); ppd.hx.k = shx;//1; //getRight().getMultiplier()*2; ppd.hx.addLambda(DBMath.round(hx + lambdaX*ppd.hx.k)); ppd.ly.k = sly;//-1; // getBottom().getMultiplier()*2; ppd.ly.addLambda(DBMath.round(ly + lambdaY*ppd.ly.k)); ppd.hy.k = shy;//1; // getTop().getMultiplier()*2; ppd.hy.addLambda(DBMath.round(hy + lambdaY*ppd.hy.k)); if (portArcs != null) { for (String s: portArcs) { ppd.portArcs.add(s); } } return ppd; } /** * To create zero, cross, aligned and squared contacts from the same set of rules */ private Xml.PrimitiveNodeGroup makeContactSeries(List<Xml.PrimitiveNodeGroup> nodeGroups, String composeName, double contSize, Xml.Layer conLayer, double spacing, double arraySpacing, double extLayer1, Xml.Layer layer1, double extLayer2, Xml.Layer layer2) { List<String> portNames = new ArrayList<String>(); portNames.add(layer1.name); portNames.add(layer2.name); // align contact double hlaLong1 = DBMath.round(contSize/2 + extLayer1); double hlaLong2 = DBMath.round(contSize/2 + extLayer2); double longD = DBMath.isGreaterThan(extLayer1, extLayer2) ? extLayer1 : extLayer2; // long square contact. Standard ones return (makeXmlPrimitiveCon(nodeGroups, composeName, PrimitiveNode.Function.CONTACT, -1, -1, null, /*new SizeOffset(longD, longD, longD, longD),*/ portNames, makeXmlNodeLayer(hlaLong1, hlaLong1, hlaLong1, hlaLong1, layer1, Poly.Type.FILLED), // layer1 makeXmlNodeLayer(hlaLong2, hlaLong2, hlaLong2, hlaLong2, layer2, Poly.Type.FILLED), // layer2 makeXmlMulticut(conLayer, contSize, spacing, arraySpacing))); // contact } /** * Leave as oublic for the regression. * @param fileName * @throws IOException */ public void dumpXMLFile(String fileName) throws IOException { Xml.Technology t = new Xml.Technology(); t.techName = getTechName(); t.shortTechName = getTechName(); t.description = getTechDescription(); t.minNumMetals = t.maxNumMetals = t.defaultNumMetals = getNumMetalLayers(); t.scaleValue = getStepSize(); t.scaleRelevant = true; t.resolutionValue = getResolution(); // t.scaleRelevant = isScaleRelevant(); t.defaultFoundry = "NONE"; t.minResistance = 1.0; t.minCapacitance = 0.1; // menus t.menuPalette = new Xml.MenuPalette(); t.menuPalette.numColumns = 3; /** RULES **/ Xml.Foundry f = new Xml.Foundry(); f.name = Foundry.Type.NONE.getName(); t.foundries.add(f); // LAYER COLOURS Color [] metal_colour = new Color[] { new Color(0,150,255), // cyan/blue new Color(148,0,211), // purple new Color(255,215,0), // yellow new Color(132,112,255), // mauve new Color(255,160,122), // salmon new Color(34,139,34), // dull green new Color(178,34,34), // dull red new Color(34,34,178), // dull blue new Color(153,153,153), // light gray new Color(102,102,102) // dark gray }; Color poly_colour = new Color(255,155,192); // pink Color diff_colour = new Color(107,226,96); // light green Color via_colour = new Color(205,205,205); // lighter gray Color contact_colour = new Color(100,100,100); // darker gray Color nplus_colour = new Color(224,238,224); Color pplus_colour = new Color(224,224,120); Color nwell_colour = new Color(140,140,140); // Five transparent colors: poly_colour, diff_colour, metal_colour[0->2] Color[] colorMap = {poly_colour, diff_colour, metal_colour[0], metal_colour[1], metal_colour[2]}; for (int i = 0; i < colorMap.length; i++) { Color transparentColor = colorMap[i]; t.transparentLayers.add(transparentColor); } // Layers List<Xml.Layer> metalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> dummyMetalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> exclusionMetalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> viaLayers = new ArrayList<Xml.Layer>(); Map<Xml.Layer,WizardField> layer_width = new LinkedHashMap<Xml.Layer,WizardField>(); int[] nullPattern = new int[] {0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000}; int[] dexclPattern = new int[] { 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808}; // X X for (int i = 0; i < num_metal_layers; i++) { // Adding the metal int metalNum = i + 1; double opacity = (75 - metalNum * 5)/100.0; int metLayHigh = i / 10; int metLayDig = i % 10; int r = metal_colour[metLayDig].getRed() * (10-metLayHigh) / 10; int g = metal_colour[metLayDig].getGreen() * (10-metLayHigh) / 10; int b = metal_colour[metLayDig].getBlue() * (10-metLayHigh) / 10; int tcol = 0; int[] pattern = null; switch (metLayDig) { case 0: tcol = 3; break; case 1: tcol = 4; break; case 2: tcol = 5; break; case 3: pattern = new int[] {0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000}; break; case 4: pattern = new int[] { 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444}; break; case 5: pattern = new int[] { 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555}; break; case 6: pattern = new int[] { 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111}; break; case 7: pattern = new int[] { 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000}; break; case 8: pattern = new int[] {0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888}; // X X X X break; case 9: pattern = new int[] { 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555}; break; } boolean onDisplay = true, onPrinter = true; if (pattern == null) { pattern = nullPattern; onDisplay = false; onPrinter = false; } EGraphics graph = new EGraphics(onDisplay, onPrinter, null, tcol, r, g, b, opacity, true, pattern); Layer.Function fun = Layer.Function.getMetal(metalNum); if (fun == null) throw new IOException("invalid number of metals"); String metalName = "Metal-"+metalNum; Xml.Layer layer = makeXmlLayer(t.layers, layer_width, metalName, fun, 0, graph, metal_width[i], true, true); metalLayers.add(layer); if (getExtraInfoFlag()) { // dummy layers graph = new EGraphics(true, true, null, tcol, r, g, b, opacity, false, nullPattern); layer = makeXmlLayer(t.layers, "DMY-"+metalName, Layer.Function.getDummyMetal(metalNum), 0, graph, 5*metal_width[i].value, true, false); dummyMetalLayers.add(layer); // exclusion layers for metals graph = new EGraphics(true, true, null, tcol, r, g, b, opacity, true, dexclPattern); layer = makeXmlLayer(t.layers, "DEXCL-"+metalName, Layer.Function.getDummyExclMetal(i), 0, graph, 2*metal_width[i].value, true, false); exclusionMetalLayers.add(layer); } } // Vias for (int i = 0; i < num_metal_layers - 1; i++) { // Adding the metal int metalNum = i + 1; // adding the via int r = via_colour.getRed(); int g = via_colour.getGreen(); int b = via_colour.getBlue(); double opacity = 0.7; EGraphics graph = new EGraphics(false, false, null, 0, r, g, b, opacity, true, nullPattern); Layer.Function fun = Layer.Function.getContact(metalNum+1); //via contact starts with CONTACT2 if (fun == null) throw new IOException("invalid number of vias"); viaLayers.add(makeXmlLayer(t.layers, layer_width, "Via-"+metalNum, fun, Layer.Function.CONMETAL, graph, via_size[i], true, false)); } // Poly String polyN = poly_layer.name; EGraphics graph = new EGraphics(false, false, null, 1, 0, 0, 0, 1, true, nullPattern); Xml.Layer polyLayer = makeXmlLayer(t.layers, layer_width, polyN, Layer.Function.POLY1, 0, graph, poly_width, true, true); // PolyGate Xml.Layer polyGateLayer = makeXmlLayer(t.layers, layer_width, polyN+"Gate", Layer.Function.GATE, 0, graph, poly_width, true, false); // false for the port otherwise it won't find any type if (getExtraInfoFlag()) { // exclusion layer poly graph = new EGraphics(true, true, null, 1, 0, 0, 0, 1, true, dexclPattern); Xml.Layer exclusionPolyLayer = makeXmlLayer(t.layers, "DEXCL-"+polyN, Layer.Function.DEXCLPOLY1, 0, graph, 2*poly_width.value, true, false); makeLayerGDS(t, exclusionPolyLayer, "150/21"); } // PolyCon and DiffCon graph = new EGraphics(false, false, null, 0, contact_colour.getRed(), contact_colour.getGreen(), contact_colour.getBlue(), 0.5, true, nullPattern); // PolyCon Xml.Layer polyConLayer = makeXmlLayer(t.layers, layer_width, "Poly-Cut", Layer.Function.CONTACT1, Layer.Function.CONPOLY, graph, contact_size, true, false); // DiffCon Xml.Layer diffConLayer = makeXmlLayer(t.layers, layer_width, diff_layer.name+"-Cut", Layer.Function.CONTACT1, Layer.Function.CONDIFF, graph, contact_size, true, false); List<String> portNames = new ArrayList<String>(); // P-Diff and N-Diff graph = new EGraphics(false, false, null, 2, 0, 0, 0, 1, true, nullPattern); // N-Diff Xml.Layer diffNLayer = makeXmlLayer(t.layers, layer_width, "N-"+ diff_layer.name, Layer.Function.DIFFN, 0, graph, diff_width, true, true, "N-"+ diff_layer.name, "N-Well", "S-N-Well"); // P-Diff dd Xml.Layer diffPLayer = makeXmlLayer(t.layers, layer_width, "P-"+ diff_layer.name, Layer.Function.DIFFP, 0, graph, diff_width, true, true, "P-"+ diff_layer.name, "P-Well", "S-P-Well"); if (getExtraInfoFlag()) { // exclusion layer N/P diff graph = new EGraphics(true, true, null, 2, 0, 0, 0, 1, true, dexclPattern); Xml.Layer exclusionDiffPLayer = makeXmlLayer(t.layers, "DEXCL-P-"+ diff_layer.name, Layer.Function.DEXCLDIFF, 0, graph, 2*diff_width.value, true, false); Xml.Layer exclusionDiffNLayer = makeXmlLayer(t.layers, "DEXCL-N-"+ diff_layer.name, Layer.Function.DEXCLDIFF, 0, graph, 2*diff_width.value, true, false); makeLayerGDS(t, exclusionDiffPLayer, "150/20"); makeLayerGDS(t, exclusionDiffNLayer, "150/20"); } // NPlus and PPlus int [] patternSlash = new int[] { 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x0101, // X X 0x0202, // X X 0x0404, // X X 0x0808, // X X 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x0101, // X X 0x0202, // X X 0x0404, // X X 0x0808}; int [] patternBackSlash = new int[] { 0x0202, // X X 0x0101, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x0404, // X X 0x0202, // X X 0x0101, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x0404}; int[] patternDots = new int[] { 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000}; // int[] patternDotsShift = new int[] { 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202}; // X X // NPlus graph = new EGraphics(true, true, null, 0, nplus_colour.getRed(), nplus_colour.getGreen(), nplus_colour.getBlue(), 1, true, patternSlash); Xml.Layer nplusLayer = makeXmlLayer(t.layers, layer_width, nplus_layer.name, Layer.Function.IMPLANTN, 0, graph, nplus_width, true, false); // PPlus graph = new EGraphics(true, true, null, 0, pplus_colour.getRed(), pplus_colour.getGreen(), pplus_colour.getBlue(), 1, true, patternDots); Xml.Layer pplusLayer = makeXmlLayer(t.layers, layer_width, pplus_layer.name, Layer.Function.IMPLANTP, 0, graph, pplus_width, true, false); // N-Well graph = new EGraphics(true, true, null, 0, nwell_colour.getRed(), nwell_colour.getGreen(), nwell_colour.getBlue(), 1, true, patternDotsShift); Xml.Layer nwellLayer = makeXmlLayer(t.layers, layer_width, nwell_layer.name, Layer.Function.WELLN, 0, graph, nwell_width, true, false); // P-Well graph = new EGraphics(true, true, null, 0, nwell_colour.getRed(), nwell_colour.getGreen(), nwell_colour.getBlue(), 1, true, patternBackSlash); Xml.Layer pwellLayer = makeXmlLayer(t.layers, layer_width, "P-Well", Layer.Function.WELLP, 0, graph, nwell_width, true, false); // DeviceMark graph = new EGraphics(false, false, null, 0, 255, 0, 0, 0.4, true, nullPattern); Xml.Layer deviceMarkLayer = makeXmlLayer(t.layers, layer_width, "DeviceMark", Layer.Function.CONTROL, 0, graph, nplus_width, true, false); // Extra layers if (getExtraInfoFlag()) { for (LayerInfo info : extraLayers) { graph = null; // either color or template assert (info.graphicsTemplate == null || info.graphicsColor == null); if (info.graphicsTemplate != null) { // look for layer name and get its EGraphics for (Xml.Layer l : t.layers) { if (l.name.equals(info.graphicsTemplate)) { graph = l.desc; break; } } if (graph == null) System.out.println("No template layer " + info.graphicsTemplate + " found"); } else if (info.graphicsColor != null) { graph = new EGraphics(true, true, info.graphicsOutline, 0, info.graphicsColor.getRed(), info.graphicsColor.getGreen(), info.graphicsColor.getBlue(), 1, true, info.graphicsPattern); } if (graph == null) graph = new EGraphics(false, false, null, 0, 255, 0, 0, 0.4, true, nullPattern); Xml.Layer layer = makeXmlLayer(t.layers, layer_width, info.name, Layer.Function.ART, 0, graph, nplus_width, true, false); makeLayerGDS(t, layer, String.valueOf(info)); } } // Palette elements should be added at the end so they will appear in groups PaletteGroup[] metalPalette = new PaletteGroup[num_metal_layers]; // write arcs // metal arcs for(int i=1; i<=num_metal_layers; i++) { double ant = (int)Math.round(metal_antenna_ratio[i-1]) | 200; PaletteGroup group = new PaletteGroup(); metalPalette[i-1] = group; group.addArc(makeXmlArc(t, "Metal-"+i, ArcProto.Function.getContact(i), ant, makeXmlArcLayer(metalLayers.get(i-1), metal_width[i-1]))); } /**************************** POLY Nodes/Arcs ***********************************************/ // poly arc double ant = (int)Math.round(poly_antenna_ratio) | 200; PaletteGroup polyGroup = new PaletteGroup(); polyGroup.addArc(makeXmlArc(t, polyLayer.name, ArcProto.Function.getPoly(1), ant, makeXmlArcLayer(polyLayer, poly_width))); // poly pin double hla = scaledValue(poly_width.value / 2); polyGroup.addPinOrResistor(makeXmlPrimitivePin(t, polyLayer.name, hla, null, // new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, polyLayer, Poly.Type.CROSSED)), null); // poly contact portNames.clear(); portNames.add(polyLayer.name); portNames.add(metalLayers.get(0).name); hla = scaledValue((contact_size.value /2 + contact_poly_overhang.value)); Xml.Layer m1Layer = metalLayers.get(0); double contSize = scaledValue(contact_size.value); double contSpacing = scaledValue(contact_spacing.value); double contArraySpacing = scaledValue(contact_array_spacing.value); double metal1Over = scaledValue(contact_size.value /2 + contact_metal_overhang_all_sides.value); // only for standard cases when getProtectionPoly() is false if (!getExtraInfoFlag()) { polyGroup.addElement(makeContactSeries(t.nodeGroups, polyLayer.name, contSize, polyConLayer, contSpacing, contArraySpacing, scaledValue(contact_poly_overhang.value), polyLayer, scaledValue(via_overhang[0].value), m1Layer), null); } /**************************** N/P-Diff Nodes/Arcs/Group ***********************************************/ PaletteGroup[] diffPalette = new PaletteGroup[2]; diffPalette[0] = new PaletteGroup(); diffPalette[1] = new PaletteGroup(); PaletteGroup[] wellPalette = new PaletteGroup[2]; wellPalette[0] = new PaletteGroup(); wellPalette[1] = new PaletteGroup(); // ndiff/pdiff pins hla = scaledValue((contact_size.value /2 + diff_contact_overhang.value)); double nsel = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nplus_overhang_diff.value); double psel = scaledValue(contact_size.value /2 + diff_contact_overhang.value + pplus_overhang_diff.value); double nwell = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nwell_overhang_diff_p.value); double nso = scaledValue(nwell_overhang_diff_p.value /*+ diff_contact_overhang.v*/); // valid for elements that have nwell layers double pso = (!pSubstrateProcess)?nso:scaledValue(nplus_overhang_diff.value/* + diff_contact_overhang.v*/); // ndiff/pdiff contacts String[] diffNames = {"P", "N"}; double[] sos = {nso, pso}; double[] sels = {psel, nsel}; Xml.Layer[] diffLayers = {diffPLayer, diffNLayer}; Xml.Layer[] plusLayers = {pplusLayer, nplusLayer}; // Active and poly contacts. They are defined first that the Full types for (Map.Entry<String,List<Contact>> e : otherContacts.entrySet()) { // generic contacts String name = null; for (Contact c : e.getValue()) { Xml.Layer ly = null, lx = null; Xml.Layer conLay = diffConLayer; PaletteGroup g = null; ContactNode metalLayer = c.layers.get(0); ContactNode otherLayer = c.layers.get(1); + String extraName = ""; if (!TextUtils.isANumber(metalLayer.layer)) // horizontal must be! { assert (TextUtils.isANumber(otherLayer.layer)); metalLayer = c.layers.get(1); otherLayer = c.layers.get(0); } int m1 = Integer.valueOf(metalLayer.layer); ly = metalLayers.get(m1-1); String layerName = otherLayer.layer; if (layerName.equals(diffLayers[0].name)) { lx = diffLayers[0]; g = diffPalette[0]; + extraName = "P"; } else if (layerName.equals(diffLayers[1].name)) { lx = diffLayers[1]; g = diffPalette[1]; + extraName = "N"; } else if (layerName.equals(polyLayer.name)) { lx = polyLayer; conLay = polyConLayer; g = polyGroup; +// extraName = "Poly"; // Poly as name is too long! } else assert(false); // it should not happen double h1x = scaledValue(contact_size.value /2 + metalLayer.overX.value); double h1y = scaledValue(contact_size.value /2 + metalLayer.overY.value); double h2x = scaledValue(contact_size.value /2 + otherLayer.overX.value); double h2y = scaledValue(contact_size.value /2 + otherLayer.overY.value); double longX = (Math.abs(metalLayer.overX.value - otherLayer.overX.value)); double longY = (Math.abs(metalLayer.overY.value - otherLayer.overY.value)); PrimitiveNode.Function func = PrimitiveNode.Function.CONTACT; // Xml.NodeLayer extraN = null; Xml.NodeLayer[] nodes = new Xml.NodeLayer[c.layers.size() + 1]; // all plus cut int count = 0; // cut nodes[count++] = makeXmlMulticut(conLay, contSize, contSpacing, contArraySpacing); // metal nodes[count++] = makeXmlNodeLayer(h1x, h1x, h1y, h1y, ly, Poly.Type.FILLED); // layer1 // active or poly nodes[count++] = makeXmlNodeLayer(h2x, h2x, h2y, h2y, lx, Poly.Type.FILLED); // layer2 Xml.Layer otherLayerPort = lx; for (int i = 2; i < c.layers.size(); i++) // rest of layers. Either select or well. { ContactNode node = c.layers.get(i); Xml.Layer lz = t.findLayer(node.layer); if ((lz == pwellLayer && lx == diffLayers[0]) || (lz == nwellLayer && lx == diffLayers[1])) // well contact { otherLayerPort = lz; -// name = ly.name + "-" + lz.name; // not clean if (lz == pwellLayer) { g = wellPalette[0]; func = getWellContactFunction(Technology.P_TYPE); + extraName = "PW"; // W for well } else // nwell { g = wellPalette[1]; func = getWellContactFunction(Technology.N_TYPE); + extraName = "NW"; // W for well } } if (pSubstrateProcess && lz == pwellLayer) continue; // skip this layer double h3x = scaledValue(contact_size.value /2 + node.overX.value); double h3y = scaledValue(contact_size.value /2 + node.overY.value); nodes[count++] = makeXmlNodeLayer(h3x, h3x, h3y, h3y, lz, Poly.Type.FILLED); // This assumes no well is defined double longXLocal = (Math.abs(node.overX.value - otherLayer.overX.value)); double longYLocal = (Math.abs(node.overY.value - otherLayer.overY.value)); if (DBMath.isGreaterThan(longXLocal, longX)) longX = longXLocal; if (DBMath.isGreaterThan(longYLocal, longY)) longY = longYLocal; } longX = scaledValue(longX); longY = scaledValue(longY); // prt names now after determing wheter is a diff or well contact portNames.clear(); // if (!pSubstrateProcess || otherLayerPort == pwellLayer) portNames.add(otherLayerPort.name); portNames.add(ly.name); // always should represent the metal1 name = ly.name + "-" + otherLayerPort.name; // some primitives might not have prefix. "-" should not be in the prefix to avoid // being displayed in the palette String p = (c.prefix == null || c.prefix.equals("")) ? "" : c.prefix + "-"; g.addElement(makeXmlPrimitiveCon(t.nodeGroups, p + name, func, -1, -1, new SizeOffset(longX, longX, longY, longY), portNames, - nodes), c.prefix); // contact + nodes), p + extraName); // contact } } // ndiff/pdiff contact for (int i = 0; i < 2; i++) { portNames.clear(); portNames.add(diffLayers[i].name); portNames.add(m1Layer.name); String composeName = diffNames[i] + "-" + diff_layer.name; //Diff"; Xml.NodeLayer wellNode, wellNodePin; ArcProto.Function arcF; Xml.ArcLayer arcL; WizardField arcVal; if (i == Technology.P_TYPE) { wellNodePin = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.CROSSED); wellNode = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.FILLED); arcF = ArcProto.Function.DIFFP; arcL = makeXmlArcLayer(nwellLayer, diff_width, nwell_overhang_diff_p); arcVal = pplus_overhang_diff; } else { wellNodePin = (!pSubstrateProcess)?makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.CROSSED):null; wellNode = (!pSubstrateProcess)?makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.FILLED):null; arcF = ArcProto.Function.DIFFN; arcL = (!pSubstrateProcess)?makeXmlArcLayer(pwellLayer, diff_width, nwell_overhang_diff_p):null; arcVal = nplus_overhang_diff; } PaletteGroup diffG = diffPalette[i]; // active arc diffG.addArc(makeXmlArc(t, composeName, arcF, 0, makeXmlArcLayer(diffLayers[i], diff_width), makeXmlArcLayer(plusLayers[i], diff_width, arcVal), arcL)); // active pin diffG.addPinOrResistor(makeXmlPrimitivePin(t, composeName, hla, new SizeOffset(sos[i], sos[i], sos[i], sos[i]), null, makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.CROSSED), makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.CROSSED), wellNodePin), null); // F stands for full (all layers) diffG.addElement(makeXmlPrimitiveCon(t.nodeGroups, "F-"+composeName, PrimitiveNode.Function.CONTACT, hla, hla, new SizeOffset(sos[i], sos[i], sos[i], sos[i]), portNames, makeXmlNodeLayer(metal1Over, metal1Over, metal1Over, metal1Over, m1Layer, Poly.Type.FILLED), // meta1 layer makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.FILLED), // active layer makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.FILLED), // select layer wellNode, // well layer makeXmlMulticut(diffConLayer, contSize, contSpacing, contArraySpacing)), "Full-" + diffNames[i]); // contact } /**************************** N/P-Well Contacts ***********************************************/ nwell = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nwell_overhang_diff_n.value); nso = scaledValue(/*diff_contact_overhang.v +*/ nwell_overhang_diff_n.value); // valid for elements that have nwell layers pso = (!pSubstrateProcess)?nso:scaledValue(/*diff_contact_overhang.v +*/ nplus_overhang_diff.value); double[] wellSos = {pso, nso}; Xml.Layer[] wellLayers = {pwellLayer, nwellLayer}; double nselW = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nplus_overhang_strap.value); double pselW = scaledValue(contact_size.value /2 + diff_contact_overhang.value + pplus_overhang_strap.value); double[] wellSels = {pselW, nselW}; // nwell/pwell contact for (int i = 0; i < 2; i++) { String composeName = diffNames[i] + "-Well"; Xml.NodeLayer wellNodeLayer = null, wellNodePinLayer = null; PaletteGroup g = wellPalette[i]; PrimitiveNode.Function func = getWellContactFunction(i); Xml.ArcLayer arcL; WizardField arcVal; portNames.clear(); if (i == Technology.P_TYPE) { if (!pSubstrateProcess) { portNames.add(pwellLayer.name); wellNodePinLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.CROSSED); wellNodeLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.FILLED); } arcL = (!pSubstrateProcess)?makeXmlArcLayer(pwellLayer, diff_width, nwell_overhang_diff_p):null; arcVal = pplus_overhang_diff; } else { portNames.add(nwellLayer.name); wellNodePinLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.CROSSED); wellNodeLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.FILLED); arcL = makeXmlArcLayer(nwellLayer, diff_width, nwell_overhang_diff_p); arcVal = nplus_overhang_diff; } portNames.add(m1Layer.name); // three layers arcs. This is the first port defined so it will be the default in the palette g.addArc(makeXmlArc(t, composeName, ArcProto.Function.WELL, 0, makeXmlArcLayer(diffLayers[i], diff_width), makeXmlArcLayer(plusLayers[i], diff_width, arcVal), arcL)); // simple arc. S for simple g.addArc(makeXmlArc(t, "S-"+composeName, ArcProto.Function.WELL, 0, makeXmlArcLayer(wellLayers[i], diff_width, nwell_overhang_diff_p))); // well pin List<String> arcNames = new ArrayList<String>(); arcNames.add(composeName); arcNames.add("S-"+composeName); g.addPinOrResistor(makeXmlPrimitivePin(t, composeName, hla, new SizeOffset(wellSos[i], wellSos[i], wellSos[i], wellSos[i]), arcNames, makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.CROSSED), makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.CROSSED), wellNodePinLayer), null); // well contact // F stands for full g.addElement(makeXmlPrimitiveCon(t.nodeGroups, "F-"+composeName, func, hla, hla, new SizeOffset(wellSos[i], wellSos[i], wellSos[i], wellSos[i]), portNames, makeXmlNodeLayer(metal1Over, metal1Over, metal1Over, metal1Over, m1Layer, Poly.Type.FILLED), // meta1 layer makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.FILLED), // active layer makeXmlNodeLayer(wellSels[i], wellSels[i], wellSels[i], wellSels[i], plusLayers[i], Poly.Type.FILLED), // select layer wellNodeLayer, // well layer makeXmlMulticut(diffConLayer, contSize, contSpacing, contArraySpacing)), "Full-"+diffNames[i] + "W"); // contact } /**************************** Metals Nodes/Arcs ***********************************************/ // Pins and contacts for(int i=1; i<num_metal_layers; i++) { hla = scaledValue(metal_width[i-1].value / 2); Xml.Layer lb = metalLayers.get(i-1); Xml.Layer lt = metalLayers.get(i); PaletteGroup group = metalPalette[i-1]; // structure created by the arc definition // Pin bottom metal group.addPinOrResistor(makeXmlPrimitivePin(t, lb.name, hla, null, //new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, lb, Poly.Type.CROSSED)), null); if (i == num_metal_layers - 1) // last pin! { metalPalette[i].addPinOrResistor(makeXmlPrimitivePin(t, lt.name, hla, null, //new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, lt, Poly.Type.CROSSED)), null); } if (!getExtraInfoFlag()) { // original contact Square // via Xml.Layer via = viaLayers.get(i-1); double viaSize = scaledValue(via_size[i-1].value); double viaSpacing = scaledValue(via_inline_spacing[i-1].value); double viaArraySpacing = scaledValue(via_array_spacing[i-1].value); String name = lb.name + "-" + lt.name; double longDist = scaledValue(via_overhang[i-1].value); group.addElement(makeContactSeries(t.nodeGroups, name, viaSize, via, viaSpacing, viaArraySpacing, longDist, lt, longDist, lb), null); } } // metal contacts for (Map.Entry<String,List<Contact>> e : metalContacts.entrySet()) { // generic contacts for (Contact c : e.getValue()) { // We know those layer names are numbers! assert(c.layers.size() == 2); ContactNode verticalLayer = c.layers.get(0); ContactNode horizontalLayer = c.layers.get(1); int i = Integer.valueOf(verticalLayer.layer); int j = Integer.valueOf(horizontalLayer.layer); Xml.Layer ly = metalLayers.get(i-1); Xml.Layer lx = metalLayers.get(j-1); String name = (j>i)?ly.name + "-" + lx.name:lx.name + "-" + ly.name; int via = (j>i)?i:j; double metalContSize = scaledValue(via_size[via-1].value); double spacing = scaledValue(via_inline_spacing[via-1].value); double arraySpacing = scaledValue(via_array_spacing[via-1].value); Xml.Layer metalConLayer = viaLayers.get(via-1); double h1x = scaledValue(via_size[via-1].value /2 + verticalLayer.overX.value); double h1y = scaledValue(via_size[via-1].value /2 + verticalLayer.overY.value); double h2x = scaledValue(via_size[via-1].value /2 + horizontalLayer.overX.value); double h2y = scaledValue(via_size[via-1].value /2 + horizontalLayer.overY.value); // double longX = scaledValue(DBMath.isGreaterThan(verticalLayer.overX.v, horizontalLayer.overX.v) ? verticalLayer.overX.v : horizontalLayer.overX.v); // double longY = scaledValue(DBMath.isGreaterThan(verticalLayer.overY.v, horizontalLayer.overY.v) ? verticalLayer.overY.v : horizontalLayer.overY.v); double longX = scaledValue(Math.abs(verticalLayer.overX.value - horizontalLayer.overX.value)); double longY = scaledValue(Math.abs(verticalLayer.overY.value - horizontalLayer.overY.value)); portNames.clear(); portNames.add(lx.name); portNames.add(ly.name); // some primitives might not have prefix. "-" should not be in the prefix to avoid // being displayed in the palette String p = (c.prefix == null || c.prefix.equals("")) ? "" : c.prefix + "-"; metalPalette[via-1].addElement(makeXmlPrimitiveCon(t.nodeGroups, p + name, PrimitiveNode.Function.CONTACT, -1, -1, new SizeOffset(longX, longX, longY, longY), portNames, makeXmlNodeLayer(h1x, h1x, h1y, h1y, ly, Poly.Type.FILLED), // layer1 makeXmlNodeLayer(h2x, h2x, h2y, h2y, lx, Poly.Type.FILLED), // layer2 makeXmlMulticut(metalConLayer, metalContSize, spacing, arraySpacing)), c.prefix); // contact } } /**************************** Transistors ***********************************************/ /** Transistors **/ // write the transistors List<Xml.NodeLayer> nodesList = new ArrayList<Xml.NodeLayer>(); List<Xml.PrimitivePort> nodePorts = new ArrayList<Xml.PrimitivePort>(); EPoint minFullSize = null; //EPoint.fromLambda(0, 0); // default zero horizontalFlag PaletteGroup[] transPalette = new PaletteGroup[2]; for(int i = 0; i < 2; i++) { String name; double selecty = 0, selectx = 0; Xml.Layer wellLayer = null, activeLayer, selectLayer; double sox = 0, soy = 0; double impx = scaledValue((gate_width.value)/2); double impy = scaledValue((gate_length.value +diff_poly_overhang.value *2)/2); double nwell_overhangX = 0, nwell_overhangY = 0; PaletteGroup g = new PaletteGroup(); transPalette[i] = g; double protectDist = scaledValue(poly_protection_spacing.value); double extraSelX = 0, extraSelY = 0; PrimitiveNode.Function func = null, prFunc = null; if (i==Technology.P_TYPE) { name = "P"; nwell_overhangY = nwell_overhangX = nwell_overhang_diff_n.value; wellLayer = nwellLayer; activeLayer = diffPLayer; selectLayer = pplusLayer; extraSelX = pplus_overhang_poly.value; extraSelY = pplus_overhang_diff.value; func = PrimitiveNode.Function.TRAPMOS; prFunc = PrimitiveNode.Function.RESPPOLY; } else { name = "N"; activeLayer = diffNLayer; selectLayer = nplusLayer; extraSelX = nplus_overhang_poly.value; extraSelY = nplus_overhang_diff.value; func = PrimitiveNode.Function.TRANMOS; prFunc = PrimitiveNode.Function.RESNPOLY; if (!pSubstrateProcess) { nwell_overhangY = nwell_overhangX = nwell_overhang_diff_p.value; wellLayer = pwellLayer; } else { nwell_overhangX = poly_endcap.value +extraSelX; nwell_overhangY = extraSelY; } } selectx = scaledValue(gate_width.value /2+poly_endcap.value +extraSelX); selecty = scaledValue(gate_length.value /2+diff_poly_overhang.value +extraSelY); // Using P values in transistors double wellx = scaledValue((gate_width.value /2+nwell_overhangX)); double welly = scaledValue((gate_length.value /2+diff_poly_overhang.value +nwell_overhangY)); sox = scaledValue(nwell_overhangX); soy = scaledValue(diff_poly_overhang.value +nwell_overhangY); if (DBMath.isLessThan(wellx, selectx)) { sox = scaledValue(poly_endcap.value +extraSelX); wellx = selectx; } if (DBMath.isLessThan(welly, selecty)) { soy = scaledValue(diff_poly_overhang.value +extraSelY); welly = selecty; } nodesList.clear(); nodePorts.clear(); portNames.clear(); // Gate layer Electrical double gatey = scaledValue(gate_length.value /2); double gatex = impx; // Poly layers // left electrical double endPolyx = scaledValue((gate_width.value +poly_endcap.value *2)/2); double endPolyy = gatey; double endLeftOrRight = -impx; // for horizontal transistors. Default double endTopOrBotton = endPolyy; // for horizontal transistors. Default double diffX = 0, diffY = scaledValue(gate_length.value /2+gate_contact_spacing.value +contact_size.value /2); // impy double xSign = 1, ySign = -1; double polyX = endPolyx, polyY = 0; if (!horizontalFlag) // swap the numbers to get vertical transistors { double tmp; tmp = impx; impx = impy; impy = tmp; tmp = wellx; wellx = welly; welly = tmp; tmp = sox; sox = soy; soy = tmp; tmp = selectx; selectx = selecty; selecty = tmp; tmp = gatex; gatex = gatey; gatey = tmp; tmp = endPolyx; endPolyx = endPolyy; endPolyy = tmp; tmp = diffX; diffX = diffY; diffY = tmp; tmp = polyX; polyX = polyY; polyY = tmp; tmp = xSign; xSign = ySign; ySign = tmp; endLeftOrRight = endPolyx; endTopOrBotton = -impx; } // Well layer Xml.NodeLayer xTranWellLayer = null; if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(wellx, wellx, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } // Active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, impy, activeLayer, Poly.Type.FILLED, true, false, -1)); // electrical active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, 0, activeLayer, Poly.Type.FILLED, false, true, 3)); // bottom nodesList.add(makeXmlNodeLayer(impx, impx, 0, impy, activeLayer, Poly.Type.FILLED, false, true, 1)); // top // Diff port portNames.clear(); portNames.add(activeLayer.name); Xml.PrimitivePort diffTopPort = makeXmlPrimitivePort("diff-top", 90, 90, 1, minFullSize, diffX, -1, diffX, 1, diffY, 1, diffY, 1, portNames); // bottom port Xml.PrimitivePort diffBottomPort = makeXmlPrimitivePort("diff-bottom", 270, 90, 2, minFullSize, xSign*diffX, -1, xSign*diffX, 1, ySign*diffY, -1, ySign*diffY, -1, portNames); // Electric layers // Gate layer Electrical nodesList.add(makeXmlNodeLayer(gatex, gatex, gatey, gatey, polyGateLayer, Poly.Type.FILLED, false, true, -1)); // Poly layers // left electrical nodesList.add(makeXmlNodeLayer(endPolyx, endLeftOrRight, endPolyy, endTopOrBotton, polyLayer, Poly.Type.FILLED, false, true, 0)); // right electrical nodesList.add(makeXmlNodeLayer(endLeftOrRight, endPolyx, endTopOrBotton, endPolyy, polyLayer, Poly.Type.FILLED, false, true, 2)); // non-electrical poly (just one poly layer) nodesList.add(makeXmlNodeLayer(endPolyx, endPolyx, endPolyy, endPolyy, polyLayer, Poly.Type.FILLED, true, false, -1)); // Poly port portNames.clear(); portNames.add(polyLayer.name); Xml.PrimitivePort polyLeftPort = makeXmlPrimitivePort("poly-left", 180, 90, 0, minFullSize, ySign*polyX, -1, ySign*polyX, -1, xSign*polyY, -1, xSign*polyY, 1, portNames); // right port Xml.PrimitivePort polyRightPort = makeXmlPrimitivePort("poly-right", 0, 180, 0, minFullSize, polyX, 1, polyX, 1, polyY, -1, polyY, 1, portNames); // Select layer Xml.NodeLayer xTranSelLayer = (makeXmlNodeLayer(selectx, selectx, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); //One (undocumented) requirement of transistors is that the ports must appear in the //order: Poly-left, Diff-top, Poly-right, Diff-bottom. This requirement is //because of the methods Technology.getTransistorGatePort(), //Technology.getTransistorAltGatePort(), Technology.getTransistorSourcePort(), //and Technology.getTransistorDrainPort(). // diff-top = 1, diff-bottom = 2, polys=0 // ports in the correct order: Poly-left, Diff-top, Poly-right, Diff-bottom nodePorts.add(polyLeftPort); nodePorts.add(diffTopPort); nodePorts.add(polyRightPort); nodePorts.add(diffBottomPort); // Standard Transistor Xml.PrimitiveNodeGroup n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name); // Extra transistors which don't have select nor well // Extra protection poly. No ports are necessary. if (getExtraInfoFlag()) { // removing well and select for simplicity // nodesList.remove(xTranSelLayer); // nodesList.remove(xTranWellLayer); // // new sox and soy // sox = scaledValue(poly_endcap.v); // soy = scaledValue(diff_poly_overhang.v); // n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-S", PrimitiveNode.Function.TRANMOS, 0, 0, 0, 0, // new SizeOffset(sox, sox, soy, soy), nodesListW, nodePorts, null, false); // g.addElement(n); /*************************************/ // Short transistors // Adding extra transistors whose select and well are aligned with poly along the X axis nodesList.remove(xTranSelLayer); double shortSelectX = scaledValue(gate_width.value /2+poly_endcap.value); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); double shortSox = sox; shortSox = scaledValue(poly_endcap.value); if (wellLayer != null) { nodesList.remove(xTranWellLayer); xTranWellLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-S"); /*************************************/ // Short transistors with VTH and VTL double vthlx = scaledValue(gate_width.value /2+vthl_diff_overhang.value); double vthly = scaledValue(gate_length.value /2+ vthl_poly_overhang.value); // VTH Transistor String tmp = "VTH-" + name; Xml.Layer vthLayer = t.findLayer(tmp); Xml.NodeLayer nl = makeXmlNodeLayer(vthlx, vthlx, vthly, vthly, vthLayer, Poly.Type.FILLED); nodesList.add(nl); n = makeXmlPrimitive(t.nodeGroups, tmp + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, tmp + "-S"); // VTL Transistor nodesList.remove(nl); tmp = "VTL-" + name; vthLayer = t.findLayer(tmp); nl = makeXmlNodeLayer(vthlx, vthlx, vthly, vthly, vthLayer, Poly.Type.FILLED); nodesList.add(nl); n = makeXmlPrimitive(t.nodeGroups, tmp + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, tmp + "-S"); /*************************************/ // Transistors with extra polys // different select for those with extra protection layers nodesList.remove(xTranSelLayer); double endOfProtectionY = gate_length.value + poly_protection_spacing.value; double selectExtraY = scaledValue(gate_length.value /2 + endOfProtectionY + extraSelX); // actually is extraSelX because of the poly distance! xTranSelLayer = (makeXmlNodeLayer(selectx, selectx, selectExtraY, selectExtraY, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // not sure which condition to apply. It doesn't apply nwell_overhang_diff due to the extra poly if (DBMath.isLessThan(welly, selectExtraY)) { welly = selectExtraY; soy = scaledValue(endOfProtectionY + extraSelX); } if (wellLayer != null) { nodesList.remove(xTranWellLayer); xTranWellLayer = (makeXmlNodeLayer(wellx, wellx, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } if (!horizontalFlag) { System.out.println("Not working with !horizontal"); assert(false); } portNames.clear(); portNames.add(polyLayer.name); // bottom or left Xml.NodeLayer bOrL = (makeXmlNodeLayer(gatex, gatex, DBMath.round((protectDist + 3*endPolyy)), -DBMath.round(endPolyy + protectDist), polyLayer, Poly.Type.FILLED, true, false, -1/*3*/)); // port 3 for left/bottom extra poly lb=left bottom // Adding left nodesList.add(bOrL); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-B", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-B"); // top or right Xml.NodeLayer tOrR = (makeXmlNodeLayer(gatex, gatex, -DBMath.round(endPolyy + protectDist), DBMath.round((protectDist + 3*endPolyy)), polyLayer, Poly.Type.FILLED, true, false, -1/*4*/)); // port 4 for right/top extra poly rt=right top // Adding both nodesList.add(tOrR); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-TB", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-TB"); // Adding right nodesList.remove(bOrL); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-T", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name +"-T"); /*************************************/ // Short transistors woth OD18 double od18x = scaledValue(gate_od18_width.value /2+od18_diff_overhang[0].value); double od18y = scaledValue(gate_od18_length.value /2+diff_poly_overhang.value +od18_diff_overhang[1].value); nodePorts.clear(); nodesList.clear(); prepareTransistor(gate_od18_width.value, gate_od18_length.value, poly_endcap.value, diff_poly_overhang.value, gate_contact_spacing.value, contact_size.value, activeLayer, polyLayer, polyGateLayer, nodesList, nodePorts); // OD18 Xml.Layer od18Layer = t.findLayer("OD_18"); nodesList.add(makeXmlNodeLayer(od18x, od18x, od18y, od18y, od18Layer, Poly.Type.FILLED)); // adding short select shortSelectX = scaledValue(gate_od18_width.value /2+poly_endcap.value); selecty = scaledValue(gate_od18_length.value /2+diff_poly_overhang.value +extraSelY); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // adding well if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(od18x, od18x, od18y, od18y, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } sox = scaledValue(od18_diff_overhang[0].value); soy = scaledValue(diff_poly_overhang.value +od18_diff_overhang[1].value); n = makeXmlPrimitive(t.nodeGroups, "OD18-" + name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, "18-" + name + "-S"); /*************************************/ // Short transistors with native if (i==Technology.N_TYPE) { double ntx = scaledValue(gate_nt_width.value /2+nt_diff_overhang.value); double nty = scaledValue(gate_nt_length.value /2+diff_poly_overhang.value +nt_diff_overhang.value); nodePorts.clear(); nodesList.clear(); prepareTransistor(gate_nt_width.value, gate_nt_length.value, poly_nt_endcap.value, diff_poly_overhang.value, gate_contact_spacing.value, contact_size.value, activeLayer, polyLayer, polyGateLayer, nodesList, nodePorts); // NT-N Xml.Layer ntLayer = t.findLayer("NT-N"); nodesList.add(makeXmlNodeLayer(ntx, ntx, nty, nty, ntLayer, Poly.Type.FILLED)); // adding short select shortSelectX = scaledValue(gate_nt_width.value /2+poly_nt_endcap.value); selecty = scaledValue(gate_nt_length.value /2+diff_poly_overhang.value +extraSelY); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // adding well if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(ntx, ntx, nty, nty, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } sox = scaledValue(poly_nt_endcap.value); soy = scaledValue(diff_poly_overhang.value +nt_diff_overhang.value); n = makeXmlPrimitive(t.nodeGroups, "NT-" + name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, "NT-" + name + "-S"); } /*************************************/ // Poly Resistors nodesList.clear(); nodePorts.clear(); WizardField polyRL = findWizardField("poly_resistor_length"); WizardField polyRW = findWizardField("poly_resistor_width"); WizardField rpoS = findWizardField("rpo_contact_spacing"); WizardField rpoODPolyEx = findWizardField("rpo_odpoly_overhang"); WizardField rhOverhang = findWizardField("rh_odpoly_overhang"); double resistorSpacing = contact_array_spacing.value; // using array value to guarantee proper spacing in nD cases // poly double soxNoScaled = (rpoS.value + contact_poly_overhang.value + resistorSpacing + 2 * contact_size.value); double halfTotalL = scaledValue(polyRL.value /2 + soxNoScaled); double halfTotalW = scaledValue(polyRW.value /2); nodesList.add(makeXmlNodeLayer(halfTotalL, halfTotalL, halfTotalW, halfTotalW, polyLayer, Poly.Type.FILLED, true, true, -1)); // RPO Xml.Layer rpoLayer = t.findLayer("RPO"); double rpoY = scaledValue(polyRW.value /2 + rpoODPolyEx.value); double rpoX = scaledValue(polyRL.value /2); nodesList.add(makeXmlNodeLayer(rpoX, rpoX, rpoY, rpoY, rpoLayer, Poly.Type.FILLED, true, true, -1)); // left cuts double cutDistance = scaledValue(rpoS.value + polyRL.value /2); // M1 and Poly overhang will be the same for now // double absVal = (contact_poly_overhang.v - via_overhang[0].v); double m1Distance = cutDistance - scaledValue(contact_poly_overhang.value); double m1Y = scaledValue(polyRW.value /2); // - absVal); double m1W = scaledValue(2 * contact_poly_overhang.value + resistorSpacing + 2 * contact_size.value); double cutSizeHalf = scaledValue(contact_size.value /2); double cutEnd = cutDistance+contSize; double cutSpacing = scaledValue(resistorSpacing); double cutEnd2 = cutEnd+contSize+cutSpacing; portNames.clear(); portNames.add(m1Layer.name); // left port Xml.PrimitivePort port = makeXmlPrimitivePort("left-rpo", 0, 180, 0, minFullSize, -(cutEnd + cutSpacing), -1, -cutEnd, -1, -cutSizeHalf, -1, cutSizeHalf, 1, portNames); nodePorts.add(port); // right port port = makeXmlPrimitivePort("right-rpo", 0, 180, 1, minFullSize, cutEnd, 1, (cutEnd + cutSpacing), 1, -cutSizeHalf, -1, cutSizeHalf, 1, portNames); nodePorts.add(port); // metal left nodesList.add(makeXmlNodeLayer((m1Distance + m1W), -1, -m1Distance, -1, m1Y, -1, m1Y, 1, m1Layer, Poly.Type.FILLED, true, true, 0)); // right metal nodesList.add(makeXmlNodeLayer(-m1Distance, 1, (m1Distance + m1W), 1, m1Y, -1, m1Y, 1, m1Layer, Poly.Type.FILLED, true, true, 1)); // select double selectY = scaledValue(polyRW.value /2 + rhOverhang.value); double selectX = scaledValue(polyRL.value /2 + soxNoScaled + extraSelX); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, selectLayer, Poly.Type.FILLED, true, true, -1)); // RH Xml.Layer rhLayer = t.findLayer("RH"); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, rhLayer, Poly.Type.FILLED, true, true, -1)); // RPDMY Xml.Layer rPLayer = t.findLayer("RPDMY"); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, rPLayer, Poly.Type.FILLED, true, true, -1)); // cuts nodesList.add(makeXmlMulticut(cutEnd2, -1, -cutDistance, -1, cutSizeHalf, -1, cutSizeHalf, 1, polyConLayer, contSize, contArraySpacing, contArraySpacing)); nodesList.add(makeXmlMulticut(-cutDistance, 1, cutEnd2, 1, cutSizeHalf, -1, cutSizeHalf, 1, polyConLayer, contSize, contArraySpacing, contArraySpacing)); sox = scaledValue(soxNoScaled + extraSelX); soy = scaledValue(rpoODPolyEx.value); n = makeXmlPrimitive(t.nodeGroups, name + "-Poly-RPO-Resistor", prFunc, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addPinOrResistor(n, name + "-RPoly"); /*************************************/ // Well Resistors nodesList.clear(); nodePorts.clear(); WizardField wellRL = findWizardField("well_resistor_length"); WizardField wellRW = findWizardField("well_resistor_width"); WizardField rpoSelO = findWizardField("rpo_select_overlap"); // F WizardField rpoCoS = findWizardField("rpo_co_space_in_nwrod"); // G WizardField coNwrodO = findWizardField("co_nwrod_overhang"); // E WizardField odNwrodO = findWizardField("od_nwrod_overhang"); // D // Total values define RPO dimensions double cutEndNoScaled = /*F*/rpoSelO.value + /*G*/rpoCoS.value; double cutSpacingNoScaled = /*2xCut + spacing*/resistorSpacing + 2*contact_size.value; double activeXNoScaled = /*F+G*/cutEndNoScaled + /*cut spacing+2xcuts*/cutSpacingNoScaled + /*E*/coNwrodO.value + /*D*/odNwrodO.value; soxNoScaled = activeXNoScaled + rpoODPolyEx.value; double soyNoScaled = /*D*/odNwrodO.value + rpoODPolyEx.value; halfTotalL = scaledValue(wellRL.value /2 + soxNoScaled); halfTotalW = scaledValue(wellRW.value /2 + soyNoScaled); double activeWX = scaledValue(activeXNoScaled); double activeWY = scaledValue(wellRW.value /2 + /*D*/odNwrodO.value); // rpo. It has two holes nodesList.add(makeXmlNodeLayer(halfTotalL, halfTotalL, halfTotalW, halfTotalW, rpoLayer, Poly.Type.FILLED, true, true, -1)); // active nodesList.add(makeXmlNodeLayer(activeWX, activeWX, activeWY, activeWY, activeLayer, Poly.Type.FILLED, true, true, -1)); // well double halfW = scaledValue(wellRW.value /2); double halfWellL = scaledValue(wellRL.value /2+/*F+G*/cutEndNoScaled+/*cut spacing+2xcuts*/cutSpacingNoScaled + /*E*/coNwrodO.value); if (i==Technology.N_TYPE) { nodesList.add(makeXmlNodeLayer(halfWellL, halfWellL, halfW, halfW, nwellLayer, Poly.Type.FILLED, true, true, -1)); } // NWDMY-LVS double halfL = scaledValue(wellRL.value /2); Xml.Layer nwdmyLayer = t.findLayer("NWDMY-LVS"); nodesList.add(makeXmlNodeLayer(halfL, halfL, halfTotalW, halfTotalW, nwdmyLayer, Poly.Type.FILLED, true, true, -1)); cutEnd = scaledValue(wellRL.value /2+cutEndNoScaled); cutSpacing = scaledValue(cutSpacingNoScaled); // Metal1 m1Distance = scaledValue(wellRL.value /2 + /*F*/rpoSelO.value); // metal left nodesList.add(makeXmlNodeLayer(halfWellL, -1, -m1Distance, -1, halfW, -1, halfW, 1, m1Layer, Poly.Type.FILLED, true, true, 0)); // right metal nodesList.add(makeXmlNodeLayer(-m1Distance, 1, halfWellL, 1, halfW, -1, halfW, 1, m1Layer, Poly.Type.FILLED, true, true, 1)); // left port port = makeXmlPrimitivePort("left-rpo", 0, 180, 0, minFullSize, -(cutEnd + cutSpacing), -1, -cutEnd, -1, -halfW, -1, halfW, 1, portNames); nodePorts.add(port); // right port port = makeXmlPrimitivePort("right-rpo", 0, 180, 1, minFullSize, cutEnd, 1, (cutEnd + cutSpacing), 1, -halfW, -1, halfW, 1, portNames); nodePorts.add(port); sox = scaledValue(soxNoScaled); soy = scaledValue(soyNoScaled); // n = makeXmlPrimitive(t.nodeGroups, name + "-Well-RPO-Resistor", prFunc, 0, 0, 0, 0, // new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); // g.addPinOrResistor(n, name + "-RWell"); } } // Aggregating all palette groups into one List<PaletteGroup> allGroups = new ArrayList<PaletteGroup>(); allGroups.add(transPalette[0]); allGroups.add(transPalette[1]); allGroups.add(diffPalette[0]); allGroups.add(diffPalette[1]); allGroups.add(wellPalette[0]); allGroups.add(wellPalette[1]); allGroups.add(polyGroup); for (PaletteGroup g : metalPalette) allGroups.add(g); // Adding elements in palette for (PaletteGroup o : allGroups) { t.menuPalette.menuBoxes.add(o.arcs); // arcs t.menuPalette.menuBoxes.add(o.pins); // pins t.menuPalette.menuBoxes.add(o.elements); // contacts } // Writting GDS values makeLayerGDS(t, diffPLayer, String.valueOf(diff_layer)); makeLayerGDS(t, diffNLayer, String.valueOf(diff_layer)); makeLayerGDS(t, pplusLayer, String.valueOf(pplus_layer)); makeLayerGDS(t, nplusLayer, String.valueOf(nplus_layer)); makeLayerGDS(t, nwellLayer, String.valueOf(nwell_layer)); makeLayerGDS(t, deviceMarkLayer, String.valueOf(marking_layer)); makeLayerGDS(t, polyConLayer, String.valueOf(contact_layer)); makeLayerGDS(t, diffConLayer, String.valueOf(contact_layer)); makeLayerGDS(t, polyLayer, String.valueOf(poly_layer)); makeLayerGDS(t, polyGateLayer, String.valueOf(poly_layer)); for (int i = 0; i < num_metal_layers; i++) { Xml.Layer met = metalLayers.get(i); makeLayerGDS(t, met, String.valueOf(metal_layers[i])); if (getExtraInfoFlag()) { // Type is always 1 makeLayerGDS(t, dummyMetalLayers.get(i), metal_layers[i].value + "/1"); // exclusion always takes 150 makeLayerGDS(t, exclusionMetalLayers.get(i), "150/" + (i + 1)); } if (i > num_metal_layers - 2) continue; Xml.Layer via = viaLayers.get(i); makeLayerGDS(t, via, String.valueOf(via_layers[i])); } // Writting Layer Rules for (Xml.Layer l : diffLayers) { makeLayerRuleMinWid(t, l, diff_width); makeLayersRule(t, l, DRCTemplate.DRCRuleType.SPACING, diff_spacing.rule, diff_spacing.value); } WizardField[] plus_diff = {pplus_overhang_diff, nplus_overhang_diff}; WizardField[] plus_width = {pplus_width, nplus_width}; WizardField[] plus_spacing = {pplus_spacing, nplus_spacing}; for (int i = 0; i < plusLayers.length; i++) { makeLayerRuleMinWid(t, plusLayers[i], plus_width[i]); makeLayersRuleSurround(t, plusLayers[i], diffLayers[i], plus_diff[i].rule, plus_diff[i].value); makeLayersRule(t, plusLayers[i], DRCTemplate.DRCRuleType.SPACING, plus_spacing[i].rule, plus_spacing[i].value); } Xml.Layer[] wells = {pwellLayer, nwellLayer}; for (Xml.Layer w : wells) { makeLayerRuleMinWid(t, w, nwell_width); makeLayersRuleSurround(t, w, diffPLayer, nwell_overhang_diff_p.rule, nwell_overhang_diff_p.value); makeLayersRuleSurround(t, w, diffNLayer, nwell_overhang_diff_n.rule, nwell_overhang_diff_n.value); makeLayersRule(t, w, DRCTemplate.DRCRuleType.SPACING, nwell_spacing.rule, nwell_spacing.value); } Xml.Layer[] polys = {polyLayer, polyGateLayer}; for (Xml.Layer w : polys) { makeLayerRuleMinWid(t, w, poly_width); makeLayersRule(t, w, DRCTemplate.DRCRuleType.SPACING, poly_spacing.rule, poly_spacing.value); } // Simple spacing rules included here for (int i = 0; i < num_metal_layers; i++) { Xml.Layer met = metalLayers.get(i); makeLayerRuleMinWid(t, met, metal_width[i]); makeLayersRule(t, met, DRCTemplate.DRCRuleType.SPACING, metal_spacing[i].rule, metal_spacing[i].value); if (i >= num_metal_layers - 1) continue; Xml.Layer via = viaLayers.get(i); makeLayerRuleMinWid(t, via, via_size[i]); makeLayersRule(t, via, DRCTemplate.DRCRuleType.SPACING, via_inline_spacing[i].rule, via_inline_spacing[i].value); // makeLayersRule(t, via, DRCTemplate.DRCRuleType.UCONSPA2D, via_array_spacing[i]); } // wide metal rules for (WideWizardField w : wide_metal_spacing) { for (String layerName : w.names) { Xml.Layer layer = t.findLayer(layerName); assert(layer != null); makeLayersWideRule(t, layer, DRCTemplate.DRCRuleType.SPACING, w.rule, w.value, w.maxW, w.minLen); } } // Finish menu with Pure, Misc and Cell List<Object> l = new ArrayList<Object>(); l.add(new String("Pure")); t.menuPalette.menuBoxes.add(l); l = new ArrayList<Object>(); l.add(new String("Misc.")); t.menuPalette.menuBoxes.add(l); l = new ArrayList<Object>(); l.add(new String("Cell")); t.menuPalette.menuBoxes.add(l); // Sort before writing data. We might need to sort primitive nodes in group before... Collections.sort(t.nodeGroups, primitiveNodeGroupSort); for (Xml.PrimitiveNodeGroup nodeGroup: t.nodeGroups) { // sort NodeLayer before writing them Collections.sort(nodeGroup.nodeLayers, nodeLayerSort); } // write finally the file boolean includeDateAndVersion = User.isIncludeDateAndVersionInOutput(); String copyrightMessage = IOTool.isUseCopyrightMessage() ? IOTool.getCopyrightMessage() : null; t.writeXml(fileName, includeDateAndVersion, copyrightMessage); } private PrimitiveNode.Function getWellContactFunction(int i) { if (i == Technology.P_TYPE) return (pSubstrateProcess) ? PrimitiveNode.Function.SUBSTRATE : PrimitiveNode.Function.WELL; return (pSubstrateProcess) ? PrimitiveNode.Function.WELL : PrimitiveNode.Function.SUBSTRATE; } private void prepareTransistor(double gateWidth, double gateLength, double polyEndcap, double diffPolyOverhang, double gateContactSpacing, double contactSize, Xml.Layer activeLayer, Xml.Layer polyLayer, Xml.Layer polyGateLayer, List<Xml.NodeLayer> nodesList, List<Xml.PrimitivePort> nodePorts) { double impx = scaledValue((gateWidth)/2); double impy = scaledValue((gateLength+diffPolyOverhang*2)/2); double diffY = scaledValue(gateLength/2+gateContactSpacing+contactSize/2); // impy double diffX = 0; double xSign = 1, ySign = -1; // Active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, impy, activeLayer, Poly.Type.FILLED, true, false, -1)); // electrical active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, 0, activeLayer, Poly.Type.FILLED, false, true, 3)); // bottom nodesList.add(makeXmlNodeLayer(impx, impx, 0, impy, activeLayer, Poly.Type.FILLED, false, true, 1)); // top // Diff port List<String> portNames = new ArrayList<String>(); portNames.add(activeLayer.name); // top port Xml.PrimitivePort diffTopPort = makeXmlPrimitivePort("diff-top", 90, 90, 1, null, diffX, -1, diffX, 1, diffY, 1, diffY, 1, portNames); // bottom port Xml.PrimitivePort diffBottomPort = makeXmlPrimitivePort("diff-bottom", 270, 90, 2, null, xSign*diffX, -1, xSign*diffX, 1, ySign*diffY, -1, ySign*diffY, -1, portNames); // Electric layers // Gate layer Electrical double gatey = scaledValue(gateLength/2); double gatex = impx; double endPolyx = scaledValue((gateWidth+polyEndcap*2)/2); double endPolyy = gatey; double endLeftOrRight = -impx; double endTopOrBotton = endPolyy; double polyX = endPolyx; double polyY = 0; nodesList.add(makeXmlNodeLayer(gatex, gatex, gatey, gatey, polyGateLayer, Poly.Type.FILLED, false, true, -1)); // Poly layers // left electrical nodesList.add(makeXmlNodeLayer(endPolyx, endLeftOrRight, endPolyy, endTopOrBotton, polyLayer, Poly.Type.FILLED, false, true, 0)); // right electrical nodesList.add(makeXmlNodeLayer(endLeftOrRight, endPolyx, endTopOrBotton, endPolyy, polyLayer, Poly.Type.FILLED, false, true, 2)); // non-electrical poly (just one poly layer) nodesList.add(makeXmlNodeLayer(endPolyx, endPolyx, endPolyy, endPolyy, polyLayer, Poly.Type.FILLED, true, false, -1)); // Poly port portNames.clear(); portNames.add(polyLayer.name); Xml.PrimitivePort polyLeftPort = makeXmlPrimitivePort("poly-left", 180, 90, 0, null, ySign*polyX, -1, ySign*polyX, -1, xSign*polyY, -1, xSign*polyY, 1, portNames); // right port Xml.PrimitivePort polyRightPort = makeXmlPrimitivePort("poly-right", 0, 180, 0, null, polyX, 1, polyX, 1, polyY, -1, polyY, 1, portNames); nodePorts.clear(); nodePorts.add(polyLeftPort); nodePorts.add(diffTopPort); nodePorts.add(polyRightPort); nodePorts.add(diffBottomPort); } private Xml.ArcLayer makeXmlArcLayer(Xml.Layer layer, WizardField ... flds) { Xml.ArcLayer al = new Xml.ArcLayer(); al.layer = layer.name; al.style = Poly.Type.FILLED; for (int i = 0; i < flds.length; i++) al.extend.addLambda(scaledValue(flds[i].value /2)); return al; } // private Technology.Distance makeXmlDistance(WizardField ... flds) { // Technology.Distance dist = new Technology.Distance(); // dist.addRule(flds[0].rule, 0.5); // for (int i = 1; i < flds.length; i++) // dist.addRule(flds[i].rule, 1); // return dist; // } private void makeLayerGDS(Xml.Technology t, Xml.Layer l, String gdsVal) { for (Xml.Foundry f: t.foundries) { f.layerGds.put(l.name, gdsVal); } } private void makeLayerRuleMinWid(Xml.Technology t, Xml.Layer l, WizardField fld) { for (Xml.Foundry f: t.foundries) { f.rules.add(new DRCTemplate(fld.rule, DRCTemplate.DRCMode.ALL.mode(), DRCTemplate.DRCRuleType.MINWID, l.name, null, new double[] {scaledValue(fld.value)}, null, null)); } } private void makeLayersWideRule(Xml.Technology t, Xml.Layer l, DRCTemplate.DRCRuleType ruleType, String ruleName, double ruleValue, double maxW, double minLen) { for (Xml.Foundry f: t.foundries) { f.rules.add(new DRCTemplate(ruleName, DRCTemplate.DRCMode.ALL.mode(), ruleType, maxW, minLen, l.name, l.name, new double[] {scaledValue(ruleValue)}, -1)); } } private void makeLayersRule(Xml.Technology t, Xml.Layer l, DRCTemplate.DRCRuleType ruleType, String ruleName, double ruleValue) { for (Xml.Foundry f: t.foundries) { f.rules.add(new DRCTemplate(ruleName, DRCTemplate.DRCMode.ALL.mode(), ruleType, l.name, l.name, new double[] {scaledValue(ruleValue)}, null, null)); } } private void makeLayersRuleSurround(Xml.Technology t, Xml.Layer l1, Xml.Layer l2, String ruleName, double ruleValue) { double value = scaledValue(ruleValue); for (Xml.Foundry f: t.foundries) { f.rules.add(new DRCTemplate(ruleName, DRCTemplate.DRCMode.ALL.mode(), DRCTemplate.DRCRuleType.SURROUND, l1.name, l2.name, new double[] {value, value}, null, null)); } } private double scaledValue(double val) { return DBMath.round(val / stepsize); } /*************************************************************************************************** * PrimitiveNodeGroup Comparator ***************************************************************************************************/ /** * A comparator object for sorting NodeGroups * Created once because it is used often. */ private static final PrimitiveNodeGroupSort primitiveNodeGroupSort = new PrimitiveNodeGroupSort(); /** * Comparator class for sorting PrimitiveNodeGroups by their name. */ public static class PrimitiveNodeGroupSort implements Comparator<Xml.PrimitiveNodeGroup> { /** * Method to compare two PrimitiveNodeGroups by their name. * @param l1 one PrimitiveNodeGroup. * @param l2 another PrimitiveNodeGroup. * @return an integer indicating their sorting order. */ public int compare(Xml.PrimitiveNodeGroup l1, Xml.PrimitiveNodeGroup l2) { // Sorting by first element Xml.PrimitiveNode n1 = l1.nodes.get(0); Xml.PrimitiveNode n2 = l2.nodes.get(0); return n1.name.compareTo(n2.name); } } /*************************************************************************************************** * NodeLayer Comparator ***************************************************************************************************/ /** * A comparator object for sorting NodeLayers * Created once because it is used often. */ private static final NodeLayerSort nodeLayerSort = new NodeLayerSort(); /** * Comparator class for sorting PrimitiveNodeGroups by their name. */ public static class NodeLayerSort implements Comparator<Xml.NodeLayer> { /** * Method to compare two NodeLayers by their name. * @param l1 one NodeLayer. * @param l2 another NodeLayer. * @return an integer indicating their sorting order. */ public int compare(Xml.NodeLayer l1, Xml.NodeLayer l2) { return l1.layer.compareTo(l2.layer); } } }
false
true
public void dumpXMLFile(String fileName) throws IOException { Xml.Technology t = new Xml.Technology(); t.techName = getTechName(); t.shortTechName = getTechName(); t.description = getTechDescription(); t.minNumMetals = t.maxNumMetals = t.defaultNumMetals = getNumMetalLayers(); t.scaleValue = getStepSize(); t.scaleRelevant = true; t.resolutionValue = getResolution(); // t.scaleRelevant = isScaleRelevant(); t.defaultFoundry = "NONE"; t.minResistance = 1.0; t.minCapacitance = 0.1; // menus t.menuPalette = new Xml.MenuPalette(); t.menuPalette.numColumns = 3; /** RULES **/ Xml.Foundry f = new Xml.Foundry(); f.name = Foundry.Type.NONE.getName(); t.foundries.add(f); // LAYER COLOURS Color [] metal_colour = new Color[] { new Color(0,150,255), // cyan/blue new Color(148,0,211), // purple new Color(255,215,0), // yellow new Color(132,112,255), // mauve new Color(255,160,122), // salmon new Color(34,139,34), // dull green new Color(178,34,34), // dull red new Color(34,34,178), // dull blue new Color(153,153,153), // light gray new Color(102,102,102) // dark gray }; Color poly_colour = new Color(255,155,192); // pink Color diff_colour = new Color(107,226,96); // light green Color via_colour = new Color(205,205,205); // lighter gray Color contact_colour = new Color(100,100,100); // darker gray Color nplus_colour = new Color(224,238,224); Color pplus_colour = new Color(224,224,120); Color nwell_colour = new Color(140,140,140); // Five transparent colors: poly_colour, diff_colour, metal_colour[0->2] Color[] colorMap = {poly_colour, diff_colour, metal_colour[0], metal_colour[1], metal_colour[2]}; for (int i = 0; i < colorMap.length; i++) { Color transparentColor = colorMap[i]; t.transparentLayers.add(transparentColor); } // Layers List<Xml.Layer> metalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> dummyMetalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> exclusionMetalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> viaLayers = new ArrayList<Xml.Layer>(); Map<Xml.Layer,WizardField> layer_width = new LinkedHashMap<Xml.Layer,WizardField>(); int[] nullPattern = new int[] {0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000}; int[] dexclPattern = new int[] { 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808}; // X X for (int i = 0; i < num_metal_layers; i++) { // Adding the metal int metalNum = i + 1; double opacity = (75 - metalNum * 5)/100.0; int metLayHigh = i / 10; int metLayDig = i % 10; int r = metal_colour[metLayDig].getRed() * (10-metLayHigh) / 10; int g = metal_colour[metLayDig].getGreen() * (10-metLayHigh) / 10; int b = metal_colour[metLayDig].getBlue() * (10-metLayHigh) / 10; int tcol = 0; int[] pattern = null; switch (metLayDig) { case 0: tcol = 3; break; case 1: tcol = 4; break; case 2: tcol = 5; break; case 3: pattern = new int[] {0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000}; break; case 4: pattern = new int[] { 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444}; break; case 5: pattern = new int[] { 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555}; break; case 6: pattern = new int[] { 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111}; break; case 7: pattern = new int[] { 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000}; break; case 8: pattern = new int[] {0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888}; // X X X X break; case 9: pattern = new int[] { 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555}; break; } boolean onDisplay = true, onPrinter = true; if (pattern == null) { pattern = nullPattern; onDisplay = false; onPrinter = false; } EGraphics graph = new EGraphics(onDisplay, onPrinter, null, tcol, r, g, b, opacity, true, pattern); Layer.Function fun = Layer.Function.getMetal(metalNum); if (fun == null) throw new IOException("invalid number of metals"); String metalName = "Metal-"+metalNum; Xml.Layer layer = makeXmlLayer(t.layers, layer_width, metalName, fun, 0, graph, metal_width[i], true, true); metalLayers.add(layer); if (getExtraInfoFlag()) { // dummy layers graph = new EGraphics(true, true, null, tcol, r, g, b, opacity, false, nullPattern); layer = makeXmlLayer(t.layers, "DMY-"+metalName, Layer.Function.getDummyMetal(metalNum), 0, graph, 5*metal_width[i].value, true, false); dummyMetalLayers.add(layer); // exclusion layers for metals graph = new EGraphics(true, true, null, tcol, r, g, b, opacity, true, dexclPattern); layer = makeXmlLayer(t.layers, "DEXCL-"+metalName, Layer.Function.getDummyExclMetal(i), 0, graph, 2*metal_width[i].value, true, false); exclusionMetalLayers.add(layer); } } // Vias for (int i = 0; i < num_metal_layers - 1; i++) { // Adding the metal int metalNum = i + 1; // adding the via int r = via_colour.getRed(); int g = via_colour.getGreen(); int b = via_colour.getBlue(); double opacity = 0.7; EGraphics graph = new EGraphics(false, false, null, 0, r, g, b, opacity, true, nullPattern); Layer.Function fun = Layer.Function.getContact(metalNum+1); //via contact starts with CONTACT2 if (fun == null) throw new IOException("invalid number of vias"); viaLayers.add(makeXmlLayer(t.layers, layer_width, "Via-"+metalNum, fun, Layer.Function.CONMETAL, graph, via_size[i], true, false)); } // Poly String polyN = poly_layer.name; EGraphics graph = new EGraphics(false, false, null, 1, 0, 0, 0, 1, true, nullPattern); Xml.Layer polyLayer = makeXmlLayer(t.layers, layer_width, polyN, Layer.Function.POLY1, 0, graph, poly_width, true, true); // PolyGate Xml.Layer polyGateLayer = makeXmlLayer(t.layers, layer_width, polyN+"Gate", Layer.Function.GATE, 0, graph, poly_width, true, false); // false for the port otherwise it won't find any type if (getExtraInfoFlag()) { // exclusion layer poly graph = new EGraphics(true, true, null, 1, 0, 0, 0, 1, true, dexclPattern); Xml.Layer exclusionPolyLayer = makeXmlLayer(t.layers, "DEXCL-"+polyN, Layer.Function.DEXCLPOLY1, 0, graph, 2*poly_width.value, true, false); makeLayerGDS(t, exclusionPolyLayer, "150/21"); } // PolyCon and DiffCon graph = new EGraphics(false, false, null, 0, contact_colour.getRed(), contact_colour.getGreen(), contact_colour.getBlue(), 0.5, true, nullPattern); // PolyCon Xml.Layer polyConLayer = makeXmlLayer(t.layers, layer_width, "Poly-Cut", Layer.Function.CONTACT1, Layer.Function.CONPOLY, graph, contact_size, true, false); // DiffCon Xml.Layer diffConLayer = makeXmlLayer(t.layers, layer_width, diff_layer.name+"-Cut", Layer.Function.CONTACT1, Layer.Function.CONDIFF, graph, contact_size, true, false); List<String> portNames = new ArrayList<String>(); // P-Diff and N-Diff graph = new EGraphics(false, false, null, 2, 0, 0, 0, 1, true, nullPattern); // N-Diff Xml.Layer diffNLayer = makeXmlLayer(t.layers, layer_width, "N-"+ diff_layer.name, Layer.Function.DIFFN, 0, graph, diff_width, true, true, "N-"+ diff_layer.name, "N-Well", "S-N-Well"); // P-Diff dd Xml.Layer diffPLayer = makeXmlLayer(t.layers, layer_width, "P-"+ diff_layer.name, Layer.Function.DIFFP, 0, graph, diff_width, true, true, "P-"+ diff_layer.name, "P-Well", "S-P-Well"); if (getExtraInfoFlag()) { // exclusion layer N/P diff graph = new EGraphics(true, true, null, 2, 0, 0, 0, 1, true, dexclPattern); Xml.Layer exclusionDiffPLayer = makeXmlLayer(t.layers, "DEXCL-P-"+ diff_layer.name, Layer.Function.DEXCLDIFF, 0, graph, 2*diff_width.value, true, false); Xml.Layer exclusionDiffNLayer = makeXmlLayer(t.layers, "DEXCL-N-"+ diff_layer.name, Layer.Function.DEXCLDIFF, 0, graph, 2*diff_width.value, true, false); makeLayerGDS(t, exclusionDiffPLayer, "150/20"); makeLayerGDS(t, exclusionDiffNLayer, "150/20"); } // NPlus and PPlus int [] patternSlash = new int[] { 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x0101, // X X 0x0202, // X X 0x0404, // X X 0x0808, // X X 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x0101, // X X 0x0202, // X X 0x0404, // X X 0x0808}; int [] patternBackSlash = new int[] { 0x0202, // X X 0x0101, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x0404, // X X 0x0202, // X X 0x0101, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x0404}; int[] patternDots = new int[] { 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000}; // int[] patternDotsShift = new int[] { 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202}; // X X // NPlus graph = new EGraphics(true, true, null, 0, nplus_colour.getRed(), nplus_colour.getGreen(), nplus_colour.getBlue(), 1, true, patternSlash); Xml.Layer nplusLayer = makeXmlLayer(t.layers, layer_width, nplus_layer.name, Layer.Function.IMPLANTN, 0, graph, nplus_width, true, false); // PPlus graph = new EGraphics(true, true, null, 0, pplus_colour.getRed(), pplus_colour.getGreen(), pplus_colour.getBlue(), 1, true, patternDots); Xml.Layer pplusLayer = makeXmlLayer(t.layers, layer_width, pplus_layer.name, Layer.Function.IMPLANTP, 0, graph, pplus_width, true, false); // N-Well graph = new EGraphics(true, true, null, 0, nwell_colour.getRed(), nwell_colour.getGreen(), nwell_colour.getBlue(), 1, true, patternDotsShift); Xml.Layer nwellLayer = makeXmlLayer(t.layers, layer_width, nwell_layer.name, Layer.Function.WELLN, 0, graph, nwell_width, true, false); // P-Well graph = new EGraphics(true, true, null, 0, nwell_colour.getRed(), nwell_colour.getGreen(), nwell_colour.getBlue(), 1, true, patternBackSlash); Xml.Layer pwellLayer = makeXmlLayer(t.layers, layer_width, "P-Well", Layer.Function.WELLP, 0, graph, nwell_width, true, false); // DeviceMark graph = new EGraphics(false, false, null, 0, 255, 0, 0, 0.4, true, nullPattern); Xml.Layer deviceMarkLayer = makeXmlLayer(t.layers, layer_width, "DeviceMark", Layer.Function.CONTROL, 0, graph, nplus_width, true, false); // Extra layers if (getExtraInfoFlag()) { for (LayerInfo info : extraLayers) { graph = null; // either color or template assert (info.graphicsTemplate == null || info.graphicsColor == null); if (info.graphicsTemplate != null) { // look for layer name and get its EGraphics for (Xml.Layer l : t.layers) { if (l.name.equals(info.graphicsTemplate)) { graph = l.desc; break; } } if (graph == null) System.out.println("No template layer " + info.graphicsTemplate + " found"); } else if (info.graphicsColor != null) { graph = new EGraphics(true, true, info.graphicsOutline, 0, info.graphicsColor.getRed(), info.graphicsColor.getGreen(), info.graphicsColor.getBlue(), 1, true, info.graphicsPattern); } if (graph == null) graph = new EGraphics(false, false, null, 0, 255, 0, 0, 0.4, true, nullPattern); Xml.Layer layer = makeXmlLayer(t.layers, layer_width, info.name, Layer.Function.ART, 0, graph, nplus_width, true, false); makeLayerGDS(t, layer, String.valueOf(info)); } } // Palette elements should be added at the end so they will appear in groups PaletteGroup[] metalPalette = new PaletteGroup[num_metal_layers]; // write arcs // metal arcs for(int i=1; i<=num_metal_layers; i++) { double ant = (int)Math.round(metal_antenna_ratio[i-1]) | 200; PaletteGroup group = new PaletteGroup(); metalPalette[i-1] = group; group.addArc(makeXmlArc(t, "Metal-"+i, ArcProto.Function.getContact(i), ant, makeXmlArcLayer(metalLayers.get(i-1), metal_width[i-1]))); } /**************************** POLY Nodes/Arcs ***********************************************/ // poly arc double ant = (int)Math.round(poly_antenna_ratio) | 200; PaletteGroup polyGroup = new PaletteGroup(); polyGroup.addArc(makeXmlArc(t, polyLayer.name, ArcProto.Function.getPoly(1), ant, makeXmlArcLayer(polyLayer, poly_width))); // poly pin double hla = scaledValue(poly_width.value / 2); polyGroup.addPinOrResistor(makeXmlPrimitivePin(t, polyLayer.name, hla, null, // new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, polyLayer, Poly.Type.CROSSED)), null); // poly contact portNames.clear(); portNames.add(polyLayer.name); portNames.add(metalLayers.get(0).name); hla = scaledValue((contact_size.value /2 + contact_poly_overhang.value)); Xml.Layer m1Layer = metalLayers.get(0); double contSize = scaledValue(contact_size.value); double contSpacing = scaledValue(contact_spacing.value); double contArraySpacing = scaledValue(contact_array_spacing.value); double metal1Over = scaledValue(contact_size.value /2 + contact_metal_overhang_all_sides.value); // only for standard cases when getProtectionPoly() is false if (!getExtraInfoFlag()) { polyGroup.addElement(makeContactSeries(t.nodeGroups, polyLayer.name, contSize, polyConLayer, contSpacing, contArraySpacing, scaledValue(contact_poly_overhang.value), polyLayer, scaledValue(via_overhang[0].value), m1Layer), null); } /**************************** N/P-Diff Nodes/Arcs/Group ***********************************************/ PaletteGroup[] diffPalette = new PaletteGroup[2]; diffPalette[0] = new PaletteGroup(); diffPalette[1] = new PaletteGroup(); PaletteGroup[] wellPalette = new PaletteGroup[2]; wellPalette[0] = new PaletteGroup(); wellPalette[1] = new PaletteGroup(); // ndiff/pdiff pins hla = scaledValue((contact_size.value /2 + diff_contact_overhang.value)); double nsel = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nplus_overhang_diff.value); double psel = scaledValue(contact_size.value /2 + diff_contact_overhang.value + pplus_overhang_diff.value); double nwell = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nwell_overhang_diff_p.value); double nso = scaledValue(nwell_overhang_diff_p.value /*+ diff_contact_overhang.v*/); // valid for elements that have nwell layers double pso = (!pSubstrateProcess)?nso:scaledValue(nplus_overhang_diff.value/* + diff_contact_overhang.v*/); // ndiff/pdiff contacts String[] diffNames = {"P", "N"}; double[] sos = {nso, pso}; double[] sels = {psel, nsel}; Xml.Layer[] diffLayers = {diffPLayer, diffNLayer}; Xml.Layer[] plusLayers = {pplusLayer, nplusLayer}; // Active and poly contacts. They are defined first that the Full types for (Map.Entry<String,List<Contact>> e : otherContacts.entrySet()) { // generic contacts String name = null; for (Contact c : e.getValue()) { Xml.Layer ly = null, lx = null; Xml.Layer conLay = diffConLayer; PaletteGroup g = null; ContactNode metalLayer = c.layers.get(0); ContactNode otherLayer = c.layers.get(1); if (!TextUtils.isANumber(metalLayer.layer)) // horizontal must be! { assert (TextUtils.isANumber(otherLayer.layer)); metalLayer = c.layers.get(1); otherLayer = c.layers.get(0); } int m1 = Integer.valueOf(metalLayer.layer); ly = metalLayers.get(m1-1); String layerName = otherLayer.layer; if (layerName.equals(diffLayers[0].name)) { lx = diffLayers[0]; g = diffPalette[0]; } else if (layerName.equals(diffLayers[1].name)) { lx = diffLayers[1]; g = diffPalette[1]; } else if (layerName.equals(polyLayer.name)) { lx = polyLayer; conLay = polyConLayer; g = polyGroup; } else assert(false); // it should not happen double h1x = scaledValue(contact_size.value /2 + metalLayer.overX.value); double h1y = scaledValue(contact_size.value /2 + metalLayer.overY.value); double h2x = scaledValue(contact_size.value /2 + otherLayer.overX.value); double h2y = scaledValue(contact_size.value /2 + otherLayer.overY.value); double longX = (Math.abs(metalLayer.overX.value - otherLayer.overX.value)); double longY = (Math.abs(metalLayer.overY.value - otherLayer.overY.value)); PrimitiveNode.Function func = PrimitiveNode.Function.CONTACT; // Xml.NodeLayer extraN = null; Xml.NodeLayer[] nodes = new Xml.NodeLayer[c.layers.size() + 1]; // all plus cut int count = 0; // cut nodes[count++] = makeXmlMulticut(conLay, contSize, contSpacing, contArraySpacing); // metal nodes[count++] = makeXmlNodeLayer(h1x, h1x, h1y, h1y, ly, Poly.Type.FILLED); // layer1 // active or poly nodes[count++] = makeXmlNodeLayer(h2x, h2x, h2y, h2y, lx, Poly.Type.FILLED); // layer2 Xml.Layer otherLayerPort = lx; for (int i = 2; i < c.layers.size(); i++) // rest of layers. Either select or well. { ContactNode node = c.layers.get(i); Xml.Layer lz = t.findLayer(node.layer); if ((lz == pwellLayer && lx == diffLayers[0]) || (lz == nwellLayer && lx == diffLayers[1])) // well contact { otherLayerPort = lz; // name = ly.name + "-" + lz.name; // not clean if (lz == pwellLayer) { g = wellPalette[0]; func = getWellContactFunction(Technology.P_TYPE); } else // nwell { g = wellPalette[1]; func = getWellContactFunction(Technology.N_TYPE); } } if (pSubstrateProcess && lz == pwellLayer) continue; // skip this layer double h3x = scaledValue(contact_size.value /2 + node.overX.value); double h3y = scaledValue(contact_size.value /2 + node.overY.value); nodes[count++] = makeXmlNodeLayer(h3x, h3x, h3y, h3y, lz, Poly.Type.FILLED); // This assumes no well is defined double longXLocal = (Math.abs(node.overX.value - otherLayer.overX.value)); double longYLocal = (Math.abs(node.overY.value - otherLayer.overY.value)); if (DBMath.isGreaterThan(longXLocal, longX)) longX = longXLocal; if (DBMath.isGreaterThan(longYLocal, longY)) longY = longYLocal; } longX = scaledValue(longX); longY = scaledValue(longY); // prt names now after determing wheter is a diff or well contact portNames.clear(); // if (!pSubstrateProcess || otherLayerPort == pwellLayer) portNames.add(otherLayerPort.name); portNames.add(ly.name); // always should represent the metal1 name = ly.name + "-" + otherLayerPort.name; // some primitives might not have prefix. "-" should not be in the prefix to avoid // being displayed in the palette String p = (c.prefix == null || c.prefix.equals("")) ? "" : c.prefix + "-"; g.addElement(makeXmlPrimitiveCon(t.nodeGroups, p + name, func, -1, -1, new SizeOffset(longX, longX, longY, longY), portNames, nodes), c.prefix); // contact } } // ndiff/pdiff contact for (int i = 0; i < 2; i++) { portNames.clear(); portNames.add(diffLayers[i].name); portNames.add(m1Layer.name); String composeName = diffNames[i] + "-" + diff_layer.name; //Diff"; Xml.NodeLayer wellNode, wellNodePin; ArcProto.Function arcF; Xml.ArcLayer arcL; WizardField arcVal; if (i == Technology.P_TYPE) { wellNodePin = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.CROSSED); wellNode = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.FILLED); arcF = ArcProto.Function.DIFFP; arcL = makeXmlArcLayer(nwellLayer, diff_width, nwell_overhang_diff_p); arcVal = pplus_overhang_diff; } else { wellNodePin = (!pSubstrateProcess)?makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.CROSSED):null; wellNode = (!pSubstrateProcess)?makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.FILLED):null; arcF = ArcProto.Function.DIFFN; arcL = (!pSubstrateProcess)?makeXmlArcLayer(pwellLayer, diff_width, nwell_overhang_diff_p):null; arcVal = nplus_overhang_diff; } PaletteGroup diffG = diffPalette[i]; // active arc diffG.addArc(makeXmlArc(t, composeName, arcF, 0, makeXmlArcLayer(diffLayers[i], diff_width), makeXmlArcLayer(plusLayers[i], diff_width, arcVal), arcL)); // active pin diffG.addPinOrResistor(makeXmlPrimitivePin(t, composeName, hla, new SizeOffset(sos[i], sos[i], sos[i], sos[i]), null, makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.CROSSED), makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.CROSSED), wellNodePin), null); // F stands for full (all layers) diffG.addElement(makeXmlPrimitiveCon(t.nodeGroups, "F-"+composeName, PrimitiveNode.Function.CONTACT, hla, hla, new SizeOffset(sos[i], sos[i], sos[i], sos[i]), portNames, makeXmlNodeLayer(metal1Over, metal1Over, metal1Over, metal1Over, m1Layer, Poly.Type.FILLED), // meta1 layer makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.FILLED), // active layer makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.FILLED), // select layer wellNode, // well layer makeXmlMulticut(diffConLayer, contSize, contSpacing, contArraySpacing)), "Full-" + diffNames[i]); // contact } /**************************** N/P-Well Contacts ***********************************************/ nwell = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nwell_overhang_diff_n.value); nso = scaledValue(/*diff_contact_overhang.v +*/ nwell_overhang_diff_n.value); // valid for elements that have nwell layers pso = (!pSubstrateProcess)?nso:scaledValue(/*diff_contact_overhang.v +*/ nplus_overhang_diff.value); double[] wellSos = {pso, nso}; Xml.Layer[] wellLayers = {pwellLayer, nwellLayer}; double nselW = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nplus_overhang_strap.value); double pselW = scaledValue(contact_size.value /2 + diff_contact_overhang.value + pplus_overhang_strap.value); double[] wellSels = {pselW, nselW}; // nwell/pwell contact for (int i = 0; i < 2; i++) { String composeName = diffNames[i] + "-Well"; Xml.NodeLayer wellNodeLayer = null, wellNodePinLayer = null; PaletteGroup g = wellPalette[i]; PrimitiveNode.Function func = getWellContactFunction(i); Xml.ArcLayer arcL; WizardField arcVal; portNames.clear(); if (i == Technology.P_TYPE) { if (!pSubstrateProcess) { portNames.add(pwellLayer.name); wellNodePinLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.CROSSED); wellNodeLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.FILLED); } arcL = (!pSubstrateProcess)?makeXmlArcLayer(pwellLayer, diff_width, nwell_overhang_diff_p):null; arcVal = pplus_overhang_diff; } else { portNames.add(nwellLayer.name); wellNodePinLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.CROSSED); wellNodeLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.FILLED); arcL = makeXmlArcLayer(nwellLayer, diff_width, nwell_overhang_diff_p); arcVal = nplus_overhang_diff; } portNames.add(m1Layer.name); // three layers arcs. This is the first port defined so it will be the default in the palette g.addArc(makeXmlArc(t, composeName, ArcProto.Function.WELL, 0, makeXmlArcLayer(diffLayers[i], diff_width), makeXmlArcLayer(plusLayers[i], diff_width, arcVal), arcL)); // simple arc. S for simple g.addArc(makeXmlArc(t, "S-"+composeName, ArcProto.Function.WELL, 0, makeXmlArcLayer(wellLayers[i], diff_width, nwell_overhang_diff_p))); // well pin List<String> arcNames = new ArrayList<String>(); arcNames.add(composeName); arcNames.add("S-"+composeName); g.addPinOrResistor(makeXmlPrimitivePin(t, composeName, hla, new SizeOffset(wellSos[i], wellSos[i], wellSos[i], wellSos[i]), arcNames, makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.CROSSED), makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.CROSSED), wellNodePinLayer), null); // well contact // F stands for full g.addElement(makeXmlPrimitiveCon(t.nodeGroups, "F-"+composeName, func, hla, hla, new SizeOffset(wellSos[i], wellSos[i], wellSos[i], wellSos[i]), portNames, makeXmlNodeLayer(metal1Over, metal1Over, metal1Over, metal1Over, m1Layer, Poly.Type.FILLED), // meta1 layer makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.FILLED), // active layer makeXmlNodeLayer(wellSels[i], wellSels[i], wellSels[i], wellSels[i], plusLayers[i], Poly.Type.FILLED), // select layer wellNodeLayer, // well layer makeXmlMulticut(diffConLayer, contSize, contSpacing, contArraySpacing)), "Full-"+diffNames[i] + "W"); // contact } /**************************** Metals Nodes/Arcs ***********************************************/ // Pins and contacts for(int i=1; i<num_metal_layers; i++) { hla = scaledValue(metal_width[i-1].value / 2); Xml.Layer lb = metalLayers.get(i-1); Xml.Layer lt = metalLayers.get(i); PaletteGroup group = metalPalette[i-1]; // structure created by the arc definition // Pin bottom metal group.addPinOrResistor(makeXmlPrimitivePin(t, lb.name, hla, null, //new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, lb, Poly.Type.CROSSED)), null); if (i == num_metal_layers - 1) // last pin! { metalPalette[i].addPinOrResistor(makeXmlPrimitivePin(t, lt.name, hla, null, //new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, lt, Poly.Type.CROSSED)), null); } if (!getExtraInfoFlag()) { // original contact Square // via Xml.Layer via = viaLayers.get(i-1); double viaSize = scaledValue(via_size[i-1].value); double viaSpacing = scaledValue(via_inline_spacing[i-1].value); double viaArraySpacing = scaledValue(via_array_spacing[i-1].value); String name = lb.name + "-" + lt.name; double longDist = scaledValue(via_overhang[i-1].value); group.addElement(makeContactSeries(t.nodeGroups, name, viaSize, via, viaSpacing, viaArraySpacing, longDist, lt, longDist, lb), null); } } // metal contacts for (Map.Entry<String,List<Contact>> e : metalContacts.entrySet()) { // generic contacts for (Contact c : e.getValue()) { // We know those layer names are numbers! assert(c.layers.size() == 2); ContactNode verticalLayer = c.layers.get(0); ContactNode horizontalLayer = c.layers.get(1); int i = Integer.valueOf(verticalLayer.layer); int j = Integer.valueOf(horizontalLayer.layer); Xml.Layer ly = metalLayers.get(i-1); Xml.Layer lx = metalLayers.get(j-1); String name = (j>i)?ly.name + "-" + lx.name:lx.name + "-" + ly.name; int via = (j>i)?i:j; double metalContSize = scaledValue(via_size[via-1].value); double spacing = scaledValue(via_inline_spacing[via-1].value); double arraySpacing = scaledValue(via_array_spacing[via-1].value); Xml.Layer metalConLayer = viaLayers.get(via-1); double h1x = scaledValue(via_size[via-1].value /2 + verticalLayer.overX.value); double h1y = scaledValue(via_size[via-1].value /2 + verticalLayer.overY.value); double h2x = scaledValue(via_size[via-1].value /2 + horizontalLayer.overX.value); double h2y = scaledValue(via_size[via-1].value /2 + horizontalLayer.overY.value); // double longX = scaledValue(DBMath.isGreaterThan(verticalLayer.overX.v, horizontalLayer.overX.v) ? verticalLayer.overX.v : horizontalLayer.overX.v); // double longY = scaledValue(DBMath.isGreaterThan(verticalLayer.overY.v, horizontalLayer.overY.v) ? verticalLayer.overY.v : horizontalLayer.overY.v); double longX = scaledValue(Math.abs(verticalLayer.overX.value - horizontalLayer.overX.value)); double longY = scaledValue(Math.abs(verticalLayer.overY.value - horizontalLayer.overY.value)); portNames.clear(); portNames.add(lx.name); portNames.add(ly.name); // some primitives might not have prefix. "-" should not be in the prefix to avoid // being displayed in the palette String p = (c.prefix == null || c.prefix.equals("")) ? "" : c.prefix + "-"; metalPalette[via-1].addElement(makeXmlPrimitiveCon(t.nodeGroups, p + name, PrimitiveNode.Function.CONTACT, -1, -1, new SizeOffset(longX, longX, longY, longY), portNames, makeXmlNodeLayer(h1x, h1x, h1y, h1y, ly, Poly.Type.FILLED), // layer1 makeXmlNodeLayer(h2x, h2x, h2y, h2y, lx, Poly.Type.FILLED), // layer2 makeXmlMulticut(metalConLayer, metalContSize, spacing, arraySpacing)), c.prefix); // contact } } /**************************** Transistors ***********************************************/ /** Transistors **/ // write the transistors List<Xml.NodeLayer> nodesList = new ArrayList<Xml.NodeLayer>(); List<Xml.PrimitivePort> nodePorts = new ArrayList<Xml.PrimitivePort>(); EPoint minFullSize = null; //EPoint.fromLambda(0, 0); // default zero horizontalFlag PaletteGroup[] transPalette = new PaletteGroup[2]; for(int i = 0; i < 2; i++) { String name; double selecty = 0, selectx = 0; Xml.Layer wellLayer = null, activeLayer, selectLayer; double sox = 0, soy = 0; double impx = scaledValue((gate_width.value)/2); double impy = scaledValue((gate_length.value +diff_poly_overhang.value *2)/2); double nwell_overhangX = 0, nwell_overhangY = 0; PaletteGroup g = new PaletteGroup(); transPalette[i] = g; double protectDist = scaledValue(poly_protection_spacing.value); double extraSelX = 0, extraSelY = 0; PrimitiveNode.Function func = null, prFunc = null; if (i==Technology.P_TYPE) { name = "P"; nwell_overhangY = nwell_overhangX = nwell_overhang_diff_n.value; wellLayer = nwellLayer; activeLayer = diffPLayer; selectLayer = pplusLayer; extraSelX = pplus_overhang_poly.value; extraSelY = pplus_overhang_diff.value; func = PrimitiveNode.Function.TRAPMOS; prFunc = PrimitiveNode.Function.RESPPOLY; } else { name = "N"; activeLayer = diffNLayer; selectLayer = nplusLayer; extraSelX = nplus_overhang_poly.value; extraSelY = nplus_overhang_diff.value; func = PrimitiveNode.Function.TRANMOS; prFunc = PrimitiveNode.Function.RESNPOLY; if (!pSubstrateProcess) { nwell_overhangY = nwell_overhangX = nwell_overhang_diff_p.value; wellLayer = pwellLayer; } else { nwell_overhangX = poly_endcap.value +extraSelX; nwell_overhangY = extraSelY; } } selectx = scaledValue(gate_width.value /2+poly_endcap.value +extraSelX); selecty = scaledValue(gate_length.value /2+diff_poly_overhang.value +extraSelY); // Using P values in transistors double wellx = scaledValue((gate_width.value /2+nwell_overhangX)); double welly = scaledValue((gate_length.value /2+diff_poly_overhang.value +nwell_overhangY)); sox = scaledValue(nwell_overhangX); soy = scaledValue(diff_poly_overhang.value +nwell_overhangY); if (DBMath.isLessThan(wellx, selectx)) { sox = scaledValue(poly_endcap.value +extraSelX); wellx = selectx; } if (DBMath.isLessThan(welly, selecty)) { soy = scaledValue(diff_poly_overhang.value +extraSelY); welly = selecty; } nodesList.clear(); nodePorts.clear(); portNames.clear(); // Gate layer Electrical double gatey = scaledValue(gate_length.value /2); double gatex = impx; // Poly layers // left electrical double endPolyx = scaledValue((gate_width.value +poly_endcap.value *2)/2); double endPolyy = gatey; double endLeftOrRight = -impx; // for horizontal transistors. Default double endTopOrBotton = endPolyy; // for horizontal transistors. Default double diffX = 0, diffY = scaledValue(gate_length.value /2+gate_contact_spacing.value +contact_size.value /2); // impy double xSign = 1, ySign = -1; double polyX = endPolyx, polyY = 0; if (!horizontalFlag) // swap the numbers to get vertical transistors { double tmp; tmp = impx; impx = impy; impy = tmp; tmp = wellx; wellx = welly; welly = tmp; tmp = sox; sox = soy; soy = tmp; tmp = selectx; selectx = selecty; selecty = tmp; tmp = gatex; gatex = gatey; gatey = tmp; tmp = endPolyx; endPolyx = endPolyy; endPolyy = tmp; tmp = diffX; diffX = diffY; diffY = tmp; tmp = polyX; polyX = polyY; polyY = tmp; tmp = xSign; xSign = ySign; ySign = tmp; endLeftOrRight = endPolyx; endTopOrBotton = -impx; } // Well layer Xml.NodeLayer xTranWellLayer = null; if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(wellx, wellx, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } // Active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, impy, activeLayer, Poly.Type.FILLED, true, false, -1)); // electrical active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, 0, activeLayer, Poly.Type.FILLED, false, true, 3)); // bottom nodesList.add(makeXmlNodeLayer(impx, impx, 0, impy, activeLayer, Poly.Type.FILLED, false, true, 1)); // top // Diff port portNames.clear(); portNames.add(activeLayer.name); Xml.PrimitivePort diffTopPort = makeXmlPrimitivePort("diff-top", 90, 90, 1, minFullSize, diffX, -1, diffX, 1, diffY, 1, diffY, 1, portNames); // bottom port Xml.PrimitivePort diffBottomPort = makeXmlPrimitivePort("diff-bottom", 270, 90, 2, minFullSize, xSign*diffX, -1, xSign*diffX, 1, ySign*diffY, -1, ySign*diffY, -1, portNames); // Electric layers // Gate layer Electrical nodesList.add(makeXmlNodeLayer(gatex, gatex, gatey, gatey, polyGateLayer, Poly.Type.FILLED, false, true, -1)); // Poly layers // left electrical nodesList.add(makeXmlNodeLayer(endPolyx, endLeftOrRight, endPolyy, endTopOrBotton, polyLayer, Poly.Type.FILLED, false, true, 0)); // right electrical nodesList.add(makeXmlNodeLayer(endLeftOrRight, endPolyx, endTopOrBotton, endPolyy, polyLayer, Poly.Type.FILLED, false, true, 2)); // non-electrical poly (just one poly layer) nodesList.add(makeXmlNodeLayer(endPolyx, endPolyx, endPolyy, endPolyy, polyLayer, Poly.Type.FILLED, true, false, -1)); // Poly port portNames.clear(); portNames.add(polyLayer.name); Xml.PrimitivePort polyLeftPort = makeXmlPrimitivePort("poly-left", 180, 90, 0, minFullSize, ySign*polyX, -1, ySign*polyX, -1, xSign*polyY, -1, xSign*polyY, 1, portNames); // right port Xml.PrimitivePort polyRightPort = makeXmlPrimitivePort("poly-right", 0, 180, 0, minFullSize, polyX, 1, polyX, 1, polyY, -1, polyY, 1, portNames); // Select layer Xml.NodeLayer xTranSelLayer = (makeXmlNodeLayer(selectx, selectx, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); //One (undocumented) requirement of transistors is that the ports must appear in the //order: Poly-left, Diff-top, Poly-right, Diff-bottom. This requirement is //because of the methods Technology.getTransistorGatePort(), //Technology.getTransistorAltGatePort(), Technology.getTransistorSourcePort(), //and Technology.getTransistorDrainPort(). // diff-top = 1, diff-bottom = 2, polys=0 // ports in the correct order: Poly-left, Diff-top, Poly-right, Diff-bottom nodePorts.add(polyLeftPort); nodePorts.add(diffTopPort); nodePorts.add(polyRightPort); nodePorts.add(diffBottomPort); // Standard Transistor Xml.PrimitiveNodeGroup n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name); // Extra transistors which don't have select nor well // Extra protection poly. No ports are necessary. if (getExtraInfoFlag()) { // removing well and select for simplicity // nodesList.remove(xTranSelLayer); // nodesList.remove(xTranWellLayer); // // new sox and soy // sox = scaledValue(poly_endcap.v); // soy = scaledValue(diff_poly_overhang.v); // n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-S", PrimitiveNode.Function.TRANMOS, 0, 0, 0, 0, // new SizeOffset(sox, sox, soy, soy), nodesListW, nodePorts, null, false); // g.addElement(n); /*************************************/ // Short transistors // Adding extra transistors whose select and well are aligned with poly along the X axis nodesList.remove(xTranSelLayer); double shortSelectX = scaledValue(gate_width.value /2+poly_endcap.value); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); double shortSox = sox; shortSox = scaledValue(poly_endcap.value); if (wellLayer != null) { nodesList.remove(xTranWellLayer); xTranWellLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-S"); /*************************************/ // Short transistors with VTH and VTL double vthlx = scaledValue(gate_width.value /2+vthl_diff_overhang.value); double vthly = scaledValue(gate_length.value /2+ vthl_poly_overhang.value); // VTH Transistor String tmp = "VTH-" + name; Xml.Layer vthLayer = t.findLayer(tmp); Xml.NodeLayer nl = makeXmlNodeLayer(vthlx, vthlx, vthly, vthly, vthLayer, Poly.Type.FILLED); nodesList.add(nl); n = makeXmlPrimitive(t.nodeGroups, tmp + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, tmp + "-S"); // VTL Transistor nodesList.remove(nl); tmp = "VTL-" + name; vthLayer = t.findLayer(tmp); nl = makeXmlNodeLayer(vthlx, vthlx, vthly, vthly, vthLayer, Poly.Type.FILLED); nodesList.add(nl); n = makeXmlPrimitive(t.nodeGroups, tmp + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, tmp + "-S"); /*************************************/ // Transistors with extra polys // different select for those with extra protection layers nodesList.remove(xTranSelLayer); double endOfProtectionY = gate_length.value + poly_protection_spacing.value; double selectExtraY = scaledValue(gate_length.value /2 + endOfProtectionY + extraSelX); // actually is extraSelX because of the poly distance! xTranSelLayer = (makeXmlNodeLayer(selectx, selectx, selectExtraY, selectExtraY, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // not sure which condition to apply. It doesn't apply nwell_overhang_diff due to the extra poly if (DBMath.isLessThan(welly, selectExtraY)) { welly = selectExtraY; soy = scaledValue(endOfProtectionY + extraSelX); } if (wellLayer != null) { nodesList.remove(xTranWellLayer); xTranWellLayer = (makeXmlNodeLayer(wellx, wellx, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } if (!horizontalFlag) { System.out.println("Not working with !horizontal"); assert(false); } portNames.clear(); portNames.add(polyLayer.name); // bottom or left Xml.NodeLayer bOrL = (makeXmlNodeLayer(gatex, gatex, DBMath.round((protectDist + 3*endPolyy)), -DBMath.round(endPolyy + protectDist), polyLayer, Poly.Type.FILLED, true, false, -1/*3*/)); // port 3 for left/bottom extra poly lb=left bottom // Adding left nodesList.add(bOrL); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-B", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-B"); // top or right Xml.NodeLayer tOrR = (makeXmlNodeLayer(gatex, gatex, -DBMath.round(endPolyy + protectDist), DBMath.round((protectDist + 3*endPolyy)), polyLayer, Poly.Type.FILLED, true, false, -1/*4*/)); // port 4 for right/top extra poly rt=right top // Adding both nodesList.add(tOrR); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-TB", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-TB"); // Adding right nodesList.remove(bOrL); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-T", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name +"-T"); /*************************************/ // Short transistors woth OD18 double od18x = scaledValue(gate_od18_width.value /2+od18_diff_overhang[0].value); double od18y = scaledValue(gate_od18_length.value /2+diff_poly_overhang.value +od18_diff_overhang[1].value); nodePorts.clear(); nodesList.clear(); prepareTransistor(gate_od18_width.value, gate_od18_length.value, poly_endcap.value, diff_poly_overhang.value, gate_contact_spacing.value, contact_size.value, activeLayer, polyLayer, polyGateLayer, nodesList, nodePorts); // OD18 Xml.Layer od18Layer = t.findLayer("OD_18"); nodesList.add(makeXmlNodeLayer(od18x, od18x, od18y, od18y, od18Layer, Poly.Type.FILLED)); // adding short select shortSelectX = scaledValue(gate_od18_width.value /2+poly_endcap.value); selecty = scaledValue(gate_od18_length.value /2+diff_poly_overhang.value +extraSelY); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // adding well if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(od18x, od18x, od18y, od18y, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } sox = scaledValue(od18_diff_overhang[0].value); soy = scaledValue(diff_poly_overhang.value +od18_diff_overhang[1].value); n = makeXmlPrimitive(t.nodeGroups, "OD18-" + name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, "18-" + name + "-S"); /*************************************/ // Short transistors with native if (i==Technology.N_TYPE) { double ntx = scaledValue(gate_nt_width.value /2+nt_diff_overhang.value); double nty = scaledValue(gate_nt_length.value /2+diff_poly_overhang.value +nt_diff_overhang.value); nodePorts.clear(); nodesList.clear(); prepareTransistor(gate_nt_width.value, gate_nt_length.value, poly_nt_endcap.value, diff_poly_overhang.value, gate_contact_spacing.value, contact_size.value, activeLayer, polyLayer, polyGateLayer, nodesList, nodePorts); // NT-N Xml.Layer ntLayer = t.findLayer("NT-N"); nodesList.add(makeXmlNodeLayer(ntx, ntx, nty, nty, ntLayer, Poly.Type.FILLED)); // adding short select shortSelectX = scaledValue(gate_nt_width.value /2+poly_nt_endcap.value); selecty = scaledValue(gate_nt_length.value /2+diff_poly_overhang.value +extraSelY); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // adding well if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(ntx, ntx, nty, nty, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } sox = scaledValue(poly_nt_endcap.value); soy = scaledValue(diff_poly_overhang.value +nt_diff_overhang.value); n = makeXmlPrimitive(t.nodeGroups, "NT-" + name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, "NT-" + name + "-S"); } /*************************************/ // Poly Resistors nodesList.clear(); nodePorts.clear(); WizardField polyRL = findWizardField("poly_resistor_length"); WizardField polyRW = findWizardField("poly_resistor_width"); WizardField rpoS = findWizardField("rpo_contact_spacing"); WizardField rpoODPolyEx = findWizardField("rpo_odpoly_overhang"); WizardField rhOverhang = findWizardField("rh_odpoly_overhang"); double resistorSpacing = contact_array_spacing.value; // using array value to guarantee proper spacing in nD cases // poly double soxNoScaled = (rpoS.value + contact_poly_overhang.value + resistorSpacing + 2 * contact_size.value); double halfTotalL = scaledValue(polyRL.value /2 + soxNoScaled); double halfTotalW = scaledValue(polyRW.value /2); nodesList.add(makeXmlNodeLayer(halfTotalL, halfTotalL, halfTotalW, halfTotalW, polyLayer, Poly.Type.FILLED, true, true, -1)); // RPO Xml.Layer rpoLayer = t.findLayer("RPO"); double rpoY = scaledValue(polyRW.value /2 + rpoODPolyEx.value); double rpoX = scaledValue(polyRL.value /2); nodesList.add(makeXmlNodeLayer(rpoX, rpoX, rpoY, rpoY, rpoLayer, Poly.Type.FILLED, true, true, -1)); // left cuts double cutDistance = scaledValue(rpoS.value + polyRL.value /2); // M1 and Poly overhang will be the same for now // double absVal = (contact_poly_overhang.v - via_overhang[0].v); double m1Distance = cutDistance - scaledValue(contact_poly_overhang.value); double m1Y = scaledValue(polyRW.value /2); // - absVal); double m1W = scaledValue(2 * contact_poly_overhang.value + resistorSpacing + 2 * contact_size.value); double cutSizeHalf = scaledValue(contact_size.value /2); double cutEnd = cutDistance+contSize; double cutSpacing = scaledValue(resistorSpacing); double cutEnd2 = cutEnd+contSize+cutSpacing; portNames.clear(); portNames.add(m1Layer.name); // left port Xml.PrimitivePort port = makeXmlPrimitivePort("left-rpo", 0, 180, 0, minFullSize, -(cutEnd + cutSpacing), -1, -cutEnd, -1, -cutSizeHalf, -1, cutSizeHalf, 1, portNames); nodePorts.add(port); // right port port = makeXmlPrimitivePort("right-rpo", 0, 180, 1, minFullSize, cutEnd, 1, (cutEnd + cutSpacing), 1, -cutSizeHalf, -1, cutSizeHalf, 1, portNames); nodePorts.add(port); // metal left nodesList.add(makeXmlNodeLayer((m1Distance + m1W), -1, -m1Distance, -1, m1Y, -1, m1Y, 1, m1Layer, Poly.Type.FILLED, true, true, 0)); // right metal nodesList.add(makeXmlNodeLayer(-m1Distance, 1, (m1Distance + m1W), 1, m1Y, -1, m1Y, 1, m1Layer, Poly.Type.FILLED, true, true, 1)); // select double selectY = scaledValue(polyRW.value /2 + rhOverhang.value); double selectX = scaledValue(polyRL.value /2 + soxNoScaled + extraSelX); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, selectLayer, Poly.Type.FILLED, true, true, -1)); // RH Xml.Layer rhLayer = t.findLayer("RH"); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, rhLayer, Poly.Type.FILLED, true, true, -1)); // RPDMY Xml.Layer rPLayer = t.findLayer("RPDMY"); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, rPLayer, Poly.Type.FILLED, true, true, -1)); // cuts nodesList.add(makeXmlMulticut(cutEnd2, -1, -cutDistance, -1, cutSizeHalf, -1, cutSizeHalf, 1, polyConLayer, contSize, contArraySpacing, contArraySpacing)); nodesList.add(makeXmlMulticut(-cutDistance, 1, cutEnd2, 1, cutSizeHalf, -1, cutSizeHalf, 1, polyConLayer, contSize, contArraySpacing, contArraySpacing)); sox = scaledValue(soxNoScaled + extraSelX); soy = scaledValue(rpoODPolyEx.value); n = makeXmlPrimitive(t.nodeGroups, name + "-Poly-RPO-Resistor", prFunc, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addPinOrResistor(n, name + "-RPoly"); /*************************************/ // Well Resistors nodesList.clear(); nodePorts.clear(); WizardField wellRL = findWizardField("well_resistor_length"); WizardField wellRW = findWizardField("well_resistor_width"); WizardField rpoSelO = findWizardField("rpo_select_overlap"); // F WizardField rpoCoS = findWizardField("rpo_co_space_in_nwrod"); // G WizardField coNwrodO = findWizardField("co_nwrod_overhang"); // E WizardField odNwrodO = findWizardField("od_nwrod_overhang"); // D // Total values define RPO dimensions double cutEndNoScaled = /*F*/rpoSelO.value + /*G*/rpoCoS.value; double cutSpacingNoScaled = /*2xCut + spacing*/resistorSpacing + 2*contact_size.value; double activeXNoScaled = /*F+G*/cutEndNoScaled + /*cut spacing+2xcuts*/cutSpacingNoScaled + /*E*/coNwrodO.value + /*D*/odNwrodO.value; soxNoScaled = activeXNoScaled + rpoODPolyEx.value; double soyNoScaled = /*D*/odNwrodO.value + rpoODPolyEx.value; halfTotalL = scaledValue(wellRL.value /2 + soxNoScaled); halfTotalW = scaledValue(wellRW.value /2 + soyNoScaled); double activeWX = scaledValue(activeXNoScaled); double activeWY = scaledValue(wellRW.value /2 + /*D*/odNwrodO.value); // rpo. It has two holes nodesList.add(makeXmlNodeLayer(halfTotalL, halfTotalL, halfTotalW, halfTotalW, rpoLayer, Poly.Type.FILLED, true, true, -1)); // active nodesList.add(makeXmlNodeLayer(activeWX, activeWX, activeWY, activeWY, activeLayer, Poly.Type.FILLED, true, true, -1)); // well double halfW = scaledValue(wellRW.value /2); double halfWellL = scaledValue(wellRL.value /2+/*F+G*/cutEndNoScaled+/*cut spacing+2xcuts*/cutSpacingNoScaled + /*E*/coNwrodO.value); if (i==Technology.N_TYPE) { nodesList.add(makeXmlNodeLayer(halfWellL, halfWellL, halfW, halfW, nwellLayer, Poly.Type.FILLED, true, true, -1)); } // NWDMY-LVS double halfL = scaledValue(wellRL.value /2); Xml.Layer nwdmyLayer = t.findLayer("NWDMY-LVS"); nodesList.add(makeXmlNodeLayer(halfL, halfL, halfTotalW, halfTotalW, nwdmyLayer, Poly.Type.FILLED, true, true, -1)); cutEnd = scaledValue(wellRL.value /2+cutEndNoScaled); cutSpacing = scaledValue(cutSpacingNoScaled); // Metal1 m1Distance = scaledValue(wellRL.value /2 + /*F*/rpoSelO.value); // metal left nodesList.add(makeXmlNodeLayer(halfWellL, -1, -m1Distance, -1, halfW, -1, halfW, 1, m1Layer, Poly.Type.FILLED, true, true, 0)); // right metal nodesList.add(makeXmlNodeLayer(-m1Distance, 1, halfWellL, 1, halfW, -1, halfW, 1, m1Layer, Poly.Type.FILLED, true, true, 1)); // left port port = makeXmlPrimitivePort("left-rpo", 0, 180, 0, minFullSize, -(cutEnd + cutSpacing), -1, -cutEnd, -1, -halfW, -1, halfW, 1, portNames); nodePorts.add(port); // right port port = makeXmlPrimitivePort("right-rpo", 0, 180, 1, minFullSize, cutEnd, 1, (cutEnd + cutSpacing), 1, -halfW, -1, halfW, 1, portNames); nodePorts.add(port); sox = scaledValue(soxNoScaled); soy = scaledValue(soyNoScaled); // n = makeXmlPrimitive(t.nodeGroups, name + "-Well-RPO-Resistor", prFunc, 0, 0, 0, 0, // new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); // g.addPinOrResistor(n, name + "-RWell"); } } // Aggregating all palette groups into one List<PaletteGroup> allGroups = new ArrayList<PaletteGroup>(); allGroups.add(transPalette[0]); allGroups.add(transPalette[1]); allGroups.add(diffPalette[0]); allGroups.add(diffPalette[1]); allGroups.add(wellPalette[0]); allGroups.add(wellPalette[1]); allGroups.add(polyGroup); for (PaletteGroup g : metalPalette) allGroups.add(g); // Adding elements in palette for (PaletteGroup o : allGroups) { t.menuPalette.menuBoxes.add(o.arcs); // arcs t.menuPalette.menuBoxes.add(o.pins); // pins t.menuPalette.menuBoxes.add(o.elements); // contacts } // Writting GDS values makeLayerGDS(t, diffPLayer, String.valueOf(diff_layer)); makeLayerGDS(t, diffNLayer, String.valueOf(diff_layer)); makeLayerGDS(t, pplusLayer, String.valueOf(pplus_layer)); makeLayerGDS(t, nplusLayer, String.valueOf(nplus_layer)); makeLayerGDS(t, nwellLayer, String.valueOf(nwell_layer)); makeLayerGDS(t, deviceMarkLayer, String.valueOf(marking_layer)); makeLayerGDS(t, polyConLayer, String.valueOf(contact_layer)); makeLayerGDS(t, diffConLayer, String.valueOf(contact_layer)); makeLayerGDS(t, polyLayer, String.valueOf(poly_layer)); makeLayerGDS(t, polyGateLayer, String.valueOf(poly_layer)); for (int i = 0; i < num_metal_layers; i++) { Xml.Layer met = metalLayers.get(i); makeLayerGDS(t, met, String.valueOf(metal_layers[i])); if (getExtraInfoFlag()) { // Type is always 1 makeLayerGDS(t, dummyMetalLayers.get(i), metal_layers[i].value + "/1"); // exclusion always takes 150 makeLayerGDS(t, exclusionMetalLayers.get(i), "150/" + (i + 1)); } if (i > num_metal_layers - 2) continue; Xml.Layer via = viaLayers.get(i); makeLayerGDS(t, via, String.valueOf(via_layers[i])); } // Writting Layer Rules for (Xml.Layer l : diffLayers) { makeLayerRuleMinWid(t, l, diff_width); makeLayersRule(t, l, DRCTemplate.DRCRuleType.SPACING, diff_spacing.rule, diff_spacing.value); } WizardField[] plus_diff = {pplus_overhang_diff, nplus_overhang_diff}; WizardField[] plus_width = {pplus_width, nplus_width}; WizardField[] plus_spacing = {pplus_spacing, nplus_spacing}; for (int i = 0; i < plusLayers.length; i++) { makeLayerRuleMinWid(t, plusLayers[i], plus_width[i]); makeLayersRuleSurround(t, plusLayers[i], diffLayers[i], plus_diff[i].rule, plus_diff[i].value); makeLayersRule(t, plusLayers[i], DRCTemplate.DRCRuleType.SPACING, plus_spacing[i].rule, plus_spacing[i].value); } Xml.Layer[] wells = {pwellLayer, nwellLayer}; for (Xml.Layer w : wells) { makeLayerRuleMinWid(t, w, nwell_width); makeLayersRuleSurround(t, w, diffPLayer, nwell_overhang_diff_p.rule, nwell_overhang_diff_p.value); makeLayersRuleSurround(t, w, diffNLayer, nwell_overhang_diff_n.rule, nwell_overhang_diff_n.value); makeLayersRule(t, w, DRCTemplate.DRCRuleType.SPACING, nwell_spacing.rule, nwell_spacing.value); } Xml.Layer[] polys = {polyLayer, polyGateLayer}; for (Xml.Layer w : polys) { makeLayerRuleMinWid(t, w, poly_width); makeLayersRule(t, w, DRCTemplate.DRCRuleType.SPACING, poly_spacing.rule, poly_spacing.value); } // Simple spacing rules included here for (int i = 0; i < num_metal_layers; i++) { Xml.Layer met = metalLayers.get(i); makeLayerRuleMinWid(t, met, metal_width[i]); makeLayersRule(t, met, DRCTemplate.DRCRuleType.SPACING, metal_spacing[i].rule, metal_spacing[i].value); if (i >= num_metal_layers - 1) continue; Xml.Layer via = viaLayers.get(i); makeLayerRuleMinWid(t, via, via_size[i]); makeLayersRule(t, via, DRCTemplate.DRCRuleType.SPACING, via_inline_spacing[i].rule, via_inline_spacing[i].value); // makeLayersRule(t, via, DRCTemplate.DRCRuleType.UCONSPA2D, via_array_spacing[i]); } // wide metal rules for (WideWizardField w : wide_metal_spacing) { for (String layerName : w.names) { Xml.Layer layer = t.findLayer(layerName); assert(layer != null); makeLayersWideRule(t, layer, DRCTemplate.DRCRuleType.SPACING, w.rule, w.value, w.maxW, w.minLen); } } // Finish menu with Pure, Misc and Cell List<Object> l = new ArrayList<Object>(); l.add(new String("Pure")); t.menuPalette.menuBoxes.add(l); l = new ArrayList<Object>(); l.add(new String("Misc.")); t.menuPalette.menuBoxes.add(l); l = new ArrayList<Object>(); l.add(new String("Cell")); t.menuPalette.menuBoxes.add(l); // Sort before writing data. We might need to sort primitive nodes in group before... Collections.sort(t.nodeGroups, primitiveNodeGroupSort); for (Xml.PrimitiveNodeGroup nodeGroup: t.nodeGroups) { // sort NodeLayer before writing them Collections.sort(nodeGroup.nodeLayers, nodeLayerSort); } // write finally the file boolean includeDateAndVersion = User.isIncludeDateAndVersionInOutput(); String copyrightMessage = IOTool.isUseCopyrightMessage() ? IOTool.getCopyrightMessage() : null; t.writeXml(fileName, includeDateAndVersion, copyrightMessage); }
public void dumpXMLFile(String fileName) throws IOException { Xml.Technology t = new Xml.Technology(); t.techName = getTechName(); t.shortTechName = getTechName(); t.description = getTechDescription(); t.minNumMetals = t.maxNumMetals = t.defaultNumMetals = getNumMetalLayers(); t.scaleValue = getStepSize(); t.scaleRelevant = true; t.resolutionValue = getResolution(); // t.scaleRelevant = isScaleRelevant(); t.defaultFoundry = "NONE"; t.minResistance = 1.0; t.minCapacitance = 0.1; // menus t.menuPalette = new Xml.MenuPalette(); t.menuPalette.numColumns = 3; /** RULES **/ Xml.Foundry f = new Xml.Foundry(); f.name = Foundry.Type.NONE.getName(); t.foundries.add(f); // LAYER COLOURS Color [] metal_colour = new Color[] { new Color(0,150,255), // cyan/blue new Color(148,0,211), // purple new Color(255,215,0), // yellow new Color(132,112,255), // mauve new Color(255,160,122), // salmon new Color(34,139,34), // dull green new Color(178,34,34), // dull red new Color(34,34,178), // dull blue new Color(153,153,153), // light gray new Color(102,102,102) // dark gray }; Color poly_colour = new Color(255,155,192); // pink Color diff_colour = new Color(107,226,96); // light green Color via_colour = new Color(205,205,205); // lighter gray Color contact_colour = new Color(100,100,100); // darker gray Color nplus_colour = new Color(224,238,224); Color pplus_colour = new Color(224,224,120); Color nwell_colour = new Color(140,140,140); // Five transparent colors: poly_colour, diff_colour, metal_colour[0->2] Color[] colorMap = {poly_colour, diff_colour, metal_colour[0], metal_colour[1], metal_colour[2]}; for (int i = 0; i < colorMap.length; i++) { Color transparentColor = colorMap[i]; t.transparentLayers.add(transparentColor); } // Layers List<Xml.Layer> metalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> dummyMetalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> exclusionMetalLayers = new ArrayList<Xml.Layer>(); List<Xml.Layer> viaLayers = new ArrayList<Xml.Layer>(); Map<Xml.Layer,WizardField> layer_width = new LinkedHashMap<Xml.Layer,WizardField>(); int[] nullPattern = new int[] {0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000}; int[] dexclPattern = new int[] { 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808}; // X X for (int i = 0; i < num_metal_layers; i++) { // Adding the metal int metalNum = i + 1; double opacity = (75 - metalNum * 5)/100.0; int metLayHigh = i / 10; int metLayDig = i % 10; int r = metal_colour[metLayDig].getRed() * (10-metLayHigh) / 10; int g = metal_colour[metLayDig].getGreen() * (10-metLayHigh) / 10; int b = metal_colour[metLayDig].getBlue() * (10-metLayHigh) / 10; int tcol = 0; int[] pattern = null; switch (metLayDig) { case 0: tcol = 3; break; case 1: tcol = 4; break; case 2: tcol = 5; break; case 3: pattern = new int[] {0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000, // 0xFFFF, // XXXXXXXXXXXXXXXX 0x0000}; break; case 4: pattern = new int[] { 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444, // X X X X 0x8888, // X X X X 0x1111, // X X X X 0x2222, // X X X X 0x4444}; break; case 5: pattern = new int[] { 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555, // X X X X X X X X 0x1111, // X X X X 0xFFFF, // XXXXXXXXXXXXXXXX 0x1111, // X X X X 0x5555}; break; case 6: pattern = new int[] { 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111, // X X X X 0x8888, // X X X X 0x4444, // X X X X 0x2222, // X X X X 0x1111}; break; case 7: pattern = new int[] { 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000}; break; case 8: pattern = new int[] {0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888, // X X X X 0x0000, // 0x2222, // X X X X 0x0000, // 0x8888}; // X X X X break; case 9: pattern = new int[] { 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555, // X X X X X X X X 0x5555}; break; } boolean onDisplay = true, onPrinter = true; if (pattern == null) { pattern = nullPattern; onDisplay = false; onPrinter = false; } EGraphics graph = new EGraphics(onDisplay, onPrinter, null, tcol, r, g, b, opacity, true, pattern); Layer.Function fun = Layer.Function.getMetal(metalNum); if (fun == null) throw new IOException("invalid number of metals"); String metalName = "Metal-"+metalNum; Xml.Layer layer = makeXmlLayer(t.layers, layer_width, metalName, fun, 0, graph, metal_width[i], true, true); metalLayers.add(layer); if (getExtraInfoFlag()) { // dummy layers graph = new EGraphics(true, true, null, tcol, r, g, b, opacity, false, nullPattern); layer = makeXmlLayer(t.layers, "DMY-"+metalName, Layer.Function.getDummyMetal(metalNum), 0, graph, 5*metal_width[i].value, true, false); dummyMetalLayers.add(layer); // exclusion layers for metals graph = new EGraphics(true, true, null, tcol, r, g, b, opacity, true, dexclPattern); layer = makeXmlLayer(t.layers, "DEXCL-"+metalName, Layer.Function.getDummyExclMetal(i), 0, graph, 2*metal_width[i].value, true, false); exclusionMetalLayers.add(layer); } } // Vias for (int i = 0; i < num_metal_layers - 1; i++) { // Adding the metal int metalNum = i + 1; // adding the via int r = via_colour.getRed(); int g = via_colour.getGreen(); int b = via_colour.getBlue(); double opacity = 0.7; EGraphics graph = new EGraphics(false, false, null, 0, r, g, b, opacity, true, nullPattern); Layer.Function fun = Layer.Function.getContact(metalNum+1); //via contact starts with CONTACT2 if (fun == null) throw new IOException("invalid number of vias"); viaLayers.add(makeXmlLayer(t.layers, layer_width, "Via-"+metalNum, fun, Layer.Function.CONMETAL, graph, via_size[i], true, false)); } // Poly String polyN = poly_layer.name; EGraphics graph = new EGraphics(false, false, null, 1, 0, 0, 0, 1, true, nullPattern); Xml.Layer polyLayer = makeXmlLayer(t.layers, layer_width, polyN, Layer.Function.POLY1, 0, graph, poly_width, true, true); // PolyGate Xml.Layer polyGateLayer = makeXmlLayer(t.layers, layer_width, polyN+"Gate", Layer.Function.GATE, 0, graph, poly_width, true, false); // false for the port otherwise it won't find any type if (getExtraInfoFlag()) { // exclusion layer poly graph = new EGraphics(true, true, null, 1, 0, 0, 0, 1, true, dexclPattern); Xml.Layer exclusionPolyLayer = makeXmlLayer(t.layers, "DEXCL-"+polyN, Layer.Function.DEXCLPOLY1, 0, graph, 2*poly_width.value, true, false); makeLayerGDS(t, exclusionPolyLayer, "150/21"); } // PolyCon and DiffCon graph = new EGraphics(false, false, null, 0, contact_colour.getRed(), contact_colour.getGreen(), contact_colour.getBlue(), 0.5, true, nullPattern); // PolyCon Xml.Layer polyConLayer = makeXmlLayer(t.layers, layer_width, "Poly-Cut", Layer.Function.CONTACT1, Layer.Function.CONPOLY, graph, contact_size, true, false); // DiffCon Xml.Layer diffConLayer = makeXmlLayer(t.layers, layer_width, diff_layer.name+"-Cut", Layer.Function.CONTACT1, Layer.Function.CONDIFF, graph, contact_size, true, false); List<String> portNames = new ArrayList<String>(); // P-Diff and N-Diff graph = new EGraphics(false, false, null, 2, 0, 0, 0, 1, true, nullPattern); // N-Diff Xml.Layer diffNLayer = makeXmlLayer(t.layers, layer_width, "N-"+ diff_layer.name, Layer.Function.DIFFN, 0, graph, diff_width, true, true, "N-"+ diff_layer.name, "N-Well", "S-N-Well"); // P-Diff dd Xml.Layer diffPLayer = makeXmlLayer(t.layers, layer_width, "P-"+ diff_layer.name, Layer.Function.DIFFP, 0, graph, diff_width, true, true, "P-"+ diff_layer.name, "P-Well", "S-P-Well"); if (getExtraInfoFlag()) { // exclusion layer N/P diff graph = new EGraphics(true, true, null, 2, 0, 0, 0, 1, true, dexclPattern); Xml.Layer exclusionDiffPLayer = makeXmlLayer(t.layers, "DEXCL-P-"+ diff_layer.name, Layer.Function.DEXCLDIFF, 0, graph, 2*diff_width.value, true, false); Xml.Layer exclusionDiffNLayer = makeXmlLayer(t.layers, "DEXCL-N-"+ diff_layer.name, Layer.Function.DEXCLDIFF, 0, graph, 2*diff_width.value, true, false); makeLayerGDS(t, exclusionDiffPLayer, "150/20"); makeLayerGDS(t, exclusionDiffNLayer, "150/20"); } // NPlus and PPlus int [] patternSlash = new int[] { 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x0101, // X X 0x0202, // X X 0x0404, // X X 0x0808, // X X 0x1010, // X X 0x2020, // X X 0x4040, // X X 0x8080, // X X 0x0101, // X X 0x0202, // X X 0x0404, // X X 0x0808}; int [] patternBackSlash = new int[] { 0x0202, // X X 0x0101, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x0404, // X X 0x0202, // X X 0x0101, // X X 0x8080, // X X 0x4040, // X X 0x2020, // X X 0x1010, // X X 0x0808, // X X 0x0404}; int[] patternDots = new int[] { 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000}; // int[] patternDotsShift = new int[] { 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202, // X X 0x0000, // 0x2020, // X X 0x0000, // 0x0202}; // X X // NPlus graph = new EGraphics(true, true, null, 0, nplus_colour.getRed(), nplus_colour.getGreen(), nplus_colour.getBlue(), 1, true, patternSlash); Xml.Layer nplusLayer = makeXmlLayer(t.layers, layer_width, nplus_layer.name, Layer.Function.IMPLANTN, 0, graph, nplus_width, true, false); // PPlus graph = new EGraphics(true, true, null, 0, pplus_colour.getRed(), pplus_colour.getGreen(), pplus_colour.getBlue(), 1, true, patternDots); Xml.Layer pplusLayer = makeXmlLayer(t.layers, layer_width, pplus_layer.name, Layer.Function.IMPLANTP, 0, graph, pplus_width, true, false); // N-Well graph = new EGraphics(true, true, null, 0, nwell_colour.getRed(), nwell_colour.getGreen(), nwell_colour.getBlue(), 1, true, patternDotsShift); Xml.Layer nwellLayer = makeXmlLayer(t.layers, layer_width, nwell_layer.name, Layer.Function.WELLN, 0, graph, nwell_width, true, false); // P-Well graph = new EGraphics(true, true, null, 0, nwell_colour.getRed(), nwell_colour.getGreen(), nwell_colour.getBlue(), 1, true, patternBackSlash); Xml.Layer pwellLayer = makeXmlLayer(t.layers, layer_width, "P-Well", Layer.Function.WELLP, 0, graph, nwell_width, true, false); // DeviceMark graph = new EGraphics(false, false, null, 0, 255, 0, 0, 0.4, true, nullPattern); Xml.Layer deviceMarkLayer = makeXmlLayer(t.layers, layer_width, "DeviceMark", Layer.Function.CONTROL, 0, graph, nplus_width, true, false); // Extra layers if (getExtraInfoFlag()) { for (LayerInfo info : extraLayers) { graph = null; // either color or template assert (info.graphicsTemplate == null || info.graphicsColor == null); if (info.graphicsTemplate != null) { // look for layer name and get its EGraphics for (Xml.Layer l : t.layers) { if (l.name.equals(info.graphicsTemplate)) { graph = l.desc; break; } } if (graph == null) System.out.println("No template layer " + info.graphicsTemplate + " found"); } else if (info.graphicsColor != null) { graph = new EGraphics(true, true, info.graphicsOutline, 0, info.graphicsColor.getRed(), info.graphicsColor.getGreen(), info.graphicsColor.getBlue(), 1, true, info.graphicsPattern); } if (graph == null) graph = new EGraphics(false, false, null, 0, 255, 0, 0, 0.4, true, nullPattern); Xml.Layer layer = makeXmlLayer(t.layers, layer_width, info.name, Layer.Function.ART, 0, graph, nplus_width, true, false); makeLayerGDS(t, layer, String.valueOf(info)); } } // Palette elements should be added at the end so they will appear in groups PaletteGroup[] metalPalette = new PaletteGroup[num_metal_layers]; // write arcs // metal arcs for(int i=1; i<=num_metal_layers; i++) { double ant = (int)Math.round(metal_antenna_ratio[i-1]) | 200; PaletteGroup group = new PaletteGroup(); metalPalette[i-1] = group; group.addArc(makeXmlArc(t, "Metal-"+i, ArcProto.Function.getContact(i), ant, makeXmlArcLayer(metalLayers.get(i-1), metal_width[i-1]))); } /**************************** POLY Nodes/Arcs ***********************************************/ // poly arc double ant = (int)Math.round(poly_antenna_ratio) | 200; PaletteGroup polyGroup = new PaletteGroup(); polyGroup.addArc(makeXmlArc(t, polyLayer.name, ArcProto.Function.getPoly(1), ant, makeXmlArcLayer(polyLayer, poly_width))); // poly pin double hla = scaledValue(poly_width.value / 2); polyGroup.addPinOrResistor(makeXmlPrimitivePin(t, polyLayer.name, hla, null, // new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, polyLayer, Poly.Type.CROSSED)), null); // poly contact portNames.clear(); portNames.add(polyLayer.name); portNames.add(metalLayers.get(0).name); hla = scaledValue((contact_size.value /2 + contact_poly_overhang.value)); Xml.Layer m1Layer = metalLayers.get(0); double contSize = scaledValue(contact_size.value); double contSpacing = scaledValue(contact_spacing.value); double contArraySpacing = scaledValue(contact_array_spacing.value); double metal1Over = scaledValue(contact_size.value /2 + contact_metal_overhang_all_sides.value); // only for standard cases when getProtectionPoly() is false if (!getExtraInfoFlag()) { polyGroup.addElement(makeContactSeries(t.nodeGroups, polyLayer.name, contSize, polyConLayer, contSpacing, contArraySpacing, scaledValue(contact_poly_overhang.value), polyLayer, scaledValue(via_overhang[0].value), m1Layer), null); } /**************************** N/P-Diff Nodes/Arcs/Group ***********************************************/ PaletteGroup[] diffPalette = new PaletteGroup[2]; diffPalette[0] = new PaletteGroup(); diffPalette[1] = new PaletteGroup(); PaletteGroup[] wellPalette = new PaletteGroup[2]; wellPalette[0] = new PaletteGroup(); wellPalette[1] = new PaletteGroup(); // ndiff/pdiff pins hla = scaledValue((contact_size.value /2 + diff_contact_overhang.value)); double nsel = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nplus_overhang_diff.value); double psel = scaledValue(contact_size.value /2 + diff_contact_overhang.value + pplus_overhang_diff.value); double nwell = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nwell_overhang_diff_p.value); double nso = scaledValue(nwell_overhang_diff_p.value /*+ diff_contact_overhang.v*/); // valid for elements that have nwell layers double pso = (!pSubstrateProcess)?nso:scaledValue(nplus_overhang_diff.value/* + diff_contact_overhang.v*/); // ndiff/pdiff contacts String[] diffNames = {"P", "N"}; double[] sos = {nso, pso}; double[] sels = {psel, nsel}; Xml.Layer[] diffLayers = {diffPLayer, diffNLayer}; Xml.Layer[] plusLayers = {pplusLayer, nplusLayer}; // Active and poly contacts. They are defined first that the Full types for (Map.Entry<String,List<Contact>> e : otherContacts.entrySet()) { // generic contacts String name = null; for (Contact c : e.getValue()) { Xml.Layer ly = null, lx = null; Xml.Layer conLay = diffConLayer; PaletteGroup g = null; ContactNode metalLayer = c.layers.get(0); ContactNode otherLayer = c.layers.get(1); String extraName = ""; if (!TextUtils.isANumber(metalLayer.layer)) // horizontal must be! { assert (TextUtils.isANumber(otherLayer.layer)); metalLayer = c.layers.get(1); otherLayer = c.layers.get(0); } int m1 = Integer.valueOf(metalLayer.layer); ly = metalLayers.get(m1-1); String layerName = otherLayer.layer; if (layerName.equals(diffLayers[0].name)) { lx = diffLayers[0]; g = diffPalette[0]; extraName = "P"; } else if (layerName.equals(diffLayers[1].name)) { lx = diffLayers[1]; g = diffPalette[1]; extraName = "N"; } else if (layerName.equals(polyLayer.name)) { lx = polyLayer; conLay = polyConLayer; g = polyGroup; // extraName = "Poly"; // Poly as name is too long! } else assert(false); // it should not happen double h1x = scaledValue(contact_size.value /2 + metalLayer.overX.value); double h1y = scaledValue(contact_size.value /2 + metalLayer.overY.value); double h2x = scaledValue(contact_size.value /2 + otherLayer.overX.value); double h2y = scaledValue(contact_size.value /2 + otherLayer.overY.value); double longX = (Math.abs(metalLayer.overX.value - otherLayer.overX.value)); double longY = (Math.abs(metalLayer.overY.value - otherLayer.overY.value)); PrimitiveNode.Function func = PrimitiveNode.Function.CONTACT; // Xml.NodeLayer extraN = null; Xml.NodeLayer[] nodes = new Xml.NodeLayer[c.layers.size() + 1]; // all plus cut int count = 0; // cut nodes[count++] = makeXmlMulticut(conLay, contSize, contSpacing, contArraySpacing); // metal nodes[count++] = makeXmlNodeLayer(h1x, h1x, h1y, h1y, ly, Poly.Type.FILLED); // layer1 // active or poly nodes[count++] = makeXmlNodeLayer(h2x, h2x, h2y, h2y, lx, Poly.Type.FILLED); // layer2 Xml.Layer otherLayerPort = lx; for (int i = 2; i < c.layers.size(); i++) // rest of layers. Either select or well. { ContactNode node = c.layers.get(i); Xml.Layer lz = t.findLayer(node.layer); if ((lz == pwellLayer && lx == diffLayers[0]) || (lz == nwellLayer && lx == diffLayers[1])) // well contact { otherLayerPort = lz; if (lz == pwellLayer) { g = wellPalette[0]; func = getWellContactFunction(Technology.P_TYPE); extraName = "PW"; // W for well } else // nwell { g = wellPalette[1]; func = getWellContactFunction(Technology.N_TYPE); extraName = "NW"; // W for well } } if (pSubstrateProcess && lz == pwellLayer) continue; // skip this layer double h3x = scaledValue(contact_size.value /2 + node.overX.value); double h3y = scaledValue(contact_size.value /2 + node.overY.value); nodes[count++] = makeXmlNodeLayer(h3x, h3x, h3y, h3y, lz, Poly.Type.FILLED); // This assumes no well is defined double longXLocal = (Math.abs(node.overX.value - otherLayer.overX.value)); double longYLocal = (Math.abs(node.overY.value - otherLayer.overY.value)); if (DBMath.isGreaterThan(longXLocal, longX)) longX = longXLocal; if (DBMath.isGreaterThan(longYLocal, longY)) longY = longYLocal; } longX = scaledValue(longX); longY = scaledValue(longY); // prt names now after determing wheter is a diff or well contact portNames.clear(); // if (!pSubstrateProcess || otherLayerPort == pwellLayer) portNames.add(otherLayerPort.name); portNames.add(ly.name); // always should represent the metal1 name = ly.name + "-" + otherLayerPort.name; // some primitives might not have prefix. "-" should not be in the prefix to avoid // being displayed in the palette String p = (c.prefix == null || c.prefix.equals("")) ? "" : c.prefix + "-"; g.addElement(makeXmlPrimitiveCon(t.nodeGroups, p + name, func, -1, -1, new SizeOffset(longX, longX, longY, longY), portNames, nodes), p + extraName); // contact } } // ndiff/pdiff contact for (int i = 0; i < 2; i++) { portNames.clear(); portNames.add(diffLayers[i].name); portNames.add(m1Layer.name); String composeName = diffNames[i] + "-" + diff_layer.name; //Diff"; Xml.NodeLayer wellNode, wellNodePin; ArcProto.Function arcF; Xml.ArcLayer arcL; WizardField arcVal; if (i == Technology.P_TYPE) { wellNodePin = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.CROSSED); wellNode = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.FILLED); arcF = ArcProto.Function.DIFFP; arcL = makeXmlArcLayer(nwellLayer, diff_width, nwell_overhang_diff_p); arcVal = pplus_overhang_diff; } else { wellNodePin = (!pSubstrateProcess)?makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.CROSSED):null; wellNode = (!pSubstrateProcess)?makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.FILLED):null; arcF = ArcProto.Function.DIFFN; arcL = (!pSubstrateProcess)?makeXmlArcLayer(pwellLayer, diff_width, nwell_overhang_diff_p):null; arcVal = nplus_overhang_diff; } PaletteGroup diffG = diffPalette[i]; // active arc diffG.addArc(makeXmlArc(t, composeName, arcF, 0, makeXmlArcLayer(diffLayers[i], diff_width), makeXmlArcLayer(plusLayers[i], diff_width, arcVal), arcL)); // active pin diffG.addPinOrResistor(makeXmlPrimitivePin(t, composeName, hla, new SizeOffset(sos[i], sos[i], sos[i], sos[i]), null, makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.CROSSED), makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.CROSSED), wellNodePin), null); // F stands for full (all layers) diffG.addElement(makeXmlPrimitiveCon(t.nodeGroups, "F-"+composeName, PrimitiveNode.Function.CONTACT, hla, hla, new SizeOffset(sos[i], sos[i], sos[i], sos[i]), portNames, makeXmlNodeLayer(metal1Over, metal1Over, metal1Over, metal1Over, m1Layer, Poly.Type.FILLED), // meta1 layer makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.FILLED), // active layer makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.FILLED), // select layer wellNode, // well layer makeXmlMulticut(diffConLayer, contSize, contSpacing, contArraySpacing)), "Full-" + diffNames[i]); // contact } /**************************** N/P-Well Contacts ***********************************************/ nwell = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nwell_overhang_diff_n.value); nso = scaledValue(/*diff_contact_overhang.v +*/ nwell_overhang_diff_n.value); // valid for elements that have nwell layers pso = (!pSubstrateProcess)?nso:scaledValue(/*diff_contact_overhang.v +*/ nplus_overhang_diff.value); double[] wellSos = {pso, nso}; Xml.Layer[] wellLayers = {pwellLayer, nwellLayer}; double nselW = scaledValue(contact_size.value /2 + diff_contact_overhang.value + nplus_overhang_strap.value); double pselW = scaledValue(contact_size.value /2 + diff_contact_overhang.value + pplus_overhang_strap.value); double[] wellSels = {pselW, nselW}; // nwell/pwell contact for (int i = 0; i < 2; i++) { String composeName = diffNames[i] + "-Well"; Xml.NodeLayer wellNodeLayer = null, wellNodePinLayer = null; PaletteGroup g = wellPalette[i]; PrimitiveNode.Function func = getWellContactFunction(i); Xml.ArcLayer arcL; WizardField arcVal; portNames.clear(); if (i == Technology.P_TYPE) { if (!pSubstrateProcess) { portNames.add(pwellLayer.name); wellNodePinLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.CROSSED); wellNodeLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, pwellLayer, Poly.Type.FILLED); } arcL = (!pSubstrateProcess)?makeXmlArcLayer(pwellLayer, diff_width, nwell_overhang_diff_p):null; arcVal = pplus_overhang_diff; } else { portNames.add(nwellLayer.name); wellNodePinLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.CROSSED); wellNodeLayer = makeXmlNodeLayer(nwell, nwell, nwell, nwell, nwellLayer, Poly.Type.FILLED); arcL = makeXmlArcLayer(nwellLayer, diff_width, nwell_overhang_diff_p); arcVal = nplus_overhang_diff; } portNames.add(m1Layer.name); // three layers arcs. This is the first port defined so it will be the default in the palette g.addArc(makeXmlArc(t, composeName, ArcProto.Function.WELL, 0, makeXmlArcLayer(diffLayers[i], diff_width), makeXmlArcLayer(plusLayers[i], diff_width, arcVal), arcL)); // simple arc. S for simple g.addArc(makeXmlArc(t, "S-"+composeName, ArcProto.Function.WELL, 0, makeXmlArcLayer(wellLayers[i], diff_width, nwell_overhang_diff_p))); // well pin List<String> arcNames = new ArrayList<String>(); arcNames.add(composeName); arcNames.add("S-"+composeName); g.addPinOrResistor(makeXmlPrimitivePin(t, composeName, hla, new SizeOffset(wellSos[i], wellSos[i], wellSos[i], wellSos[i]), arcNames, makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.CROSSED), makeXmlNodeLayer(sels[i], sels[i], sels[i], sels[i], plusLayers[i], Poly.Type.CROSSED), wellNodePinLayer), null); // well contact // F stands for full g.addElement(makeXmlPrimitiveCon(t.nodeGroups, "F-"+composeName, func, hla, hla, new SizeOffset(wellSos[i], wellSos[i], wellSos[i], wellSos[i]), portNames, makeXmlNodeLayer(metal1Over, metal1Over, metal1Over, metal1Over, m1Layer, Poly.Type.FILLED), // meta1 layer makeXmlNodeLayer(hla, hla, hla, hla, diffLayers[i], Poly.Type.FILLED), // active layer makeXmlNodeLayer(wellSels[i], wellSels[i], wellSels[i], wellSels[i], plusLayers[i], Poly.Type.FILLED), // select layer wellNodeLayer, // well layer makeXmlMulticut(diffConLayer, contSize, contSpacing, contArraySpacing)), "Full-"+diffNames[i] + "W"); // contact } /**************************** Metals Nodes/Arcs ***********************************************/ // Pins and contacts for(int i=1; i<num_metal_layers; i++) { hla = scaledValue(metal_width[i-1].value / 2); Xml.Layer lb = metalLayers.get(i-1); Xml.Layer lt = metalLayers.get(i); PaletteGroup group = metalPalette[i-1]; // structure created by the arc definition // Pin bottom metal group.addPinOrResistor(makeXmlPrimitivePin(t, lb.name, hla, null, //new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, lb, Poly.Type.CROSSED)), null); if (i == num_metal_layers - 1) // last pin! { metalPalette[i].addPinOrResistor(makeXmlPrimitivePin(t, lt.name, hla, null, //new SizeOffset(hla, hla, hla, hla), null, makeXmlNodeLayer(hla, hla, hla, hla, lt, Poly.Type.CROSSED)), null); } if (!getExtraInfoFlag()) { // original contact Square // via Xml.Layer via = viaLayers.get(i-1); double viaSize = scaledValue(via_size[i-1].value); double viaSpacing = scaledValue(via_inline_spacing[i-1].value); double viaArraySpacing = scaledValue(via_array_spacing[i-1].value); String name = lb.name + "-" + lt.name; double longDist = scaledValue(via_overhang[i-1].value); group.addElement(makeContactSeries(t.nodeGroups, name, viaSize, via, viaSpacing, viaArraySpacing, longDist, lt, longDist, lb), null); } } // metal contacts for (Map.Entry<String,List<Contact>> e : metalContacts.entrySet()) { // generic contacts for (Contact c : e.getValue()) { // We know those layer names are numbers! assert(c.layers.size() == 2); ContactNode verticalLayer = c.layers.get(0); ContactNode horizontalLayer = c.layers.get(1); int i = Integer.valueOf(verticalLayer.layer); int j = Integer.valueOf(horizontalLayer.layer); Xml.Layer ly = metalLayers.get(i-1); Xml.Layer lx = metalLayers.get(j-1); String name = (j>i)?ly.name + "-" + lx.name:lx.name + "-" + ly.name; int via = (j>i)?i:j; double metalContSize = scaledValue(via_size[via-1].value); double spacing = scaledValue(via_inline_spacing[via-1].value); double arraySpacing = scaledValue(via_array_spacing[via-1].value); Xml.Layer metalConLayer = viaLayers.get(via-1); double h1x = scaledValue(via_size[via-1].value /2 + verticalLayer.overX.value); double h1y = scaledValue(via_size[via-1].value /2 + verticalLayer.overY.value); double h2x = scaledValue(via_size[via-1].value /2 + horizontalLayer.overX.value); double h2y = scaledValue(via_size[via-1].value /2 + horizontalLayer.overY.value); // double longX = scaledValue(DBMath.isGreaterThan(verticalLayer.overX.v, horizontalLayer.overX.v) ? verticalLayer.overX.v : horizontalLayer.overX.v); // double longY = scaledValue(DBMath.isGreaterThan(verticalLayer.overY.v, horizontalLayer.overY.v) ? verticalLayer.overY.v : horizontalLayer.overY.v); double longX = scaledValue(Math.abs(verticalLayer.overX.value - horizontalLayer.overX.value)); double longY = scaledValue(Math.abs(verticalLayer.overY.value - horizontalLayer.overY.value)); portNames.clear(); portNames.add(lx.name); portNames.add(ly.name); // some primitives might not have prefix. "-" should not be in the prefix to avoid // being displayed in the palette String p = (c.prefix == null || c.prefix.equals("")) ? "" : c.prefix + "-"; metalPalette[via-1].addElement(makeXmlPrimitiveCon(t.nodeGroups, p + name, PrimitiveNode.Function.CONTACT, -1, -1, new SizeOffset(longX, longX, longY, longY), portNames, makeXmlNodeLayer(h1x, h1x, h1y, h1y, ly, Poly.Type.FILLED), // layer1 makeXmlNodeLayer(h2x, h2x, h2y, h2y, lx, Poly.Type.FILLED), // layer2 makeXmlMulticut(metalConLayer, metalContSize, spacing, arraySpacing)), c.prefix); // contact } } /**************************** Transistors ***********************************************/ /** Transistors **/ // write the transistors List<Xml.NodeLayer> nodesList = new ArrayList<Xml.NodeLayer>(); List<Xml.PrimitivePort> nodePorts = new ArrayList<Xml.PrimitivePort>(); EPoint minFullSize = null; //EPoint.fromLambda(0, 0); // default zero horizontalFlag PaletteGroup[] transPalette = new PaletteGroup[2]; for(int i = 0; i < 2; i++) { String name; double selecty = 0, selectx = 0; Xml.Layer wellLayer = null, activeLayer, selectLayer; double sox = 0, soy = 0; double impx = scaledValue((gate_width.value)/2); double impy = scaledValue((gate_length.value +diff_poly_overhang.value *2)/2); double nwell_overhangX = 0, nwell_overhangY = 0; PaletteGroup g = new PaletteGroup(); transPalette[i] = g; double protectDist = scaledValue(poly_protection_spacing.value); double extraSelX = 0, extraSelY = 0; PrimitiveNode.Function func = null, prFunc = null; if (i==Technology.P_TYPE) { name = "P"; nwell_overhangY = nwell_overhangX = nwell_overhang_diff_n.value; wellLayer = nwellLayer; activeLayer = diffPLayer; selectLayer = pplusLayer; extraSelX = pplus_overhang_poly.value; extraSelY = pplus_overhang_diff.value; func = PrimitiveNode.Function.TRAPMOS; prFunc = PrimitiveNode.Function.RESPPOLY; } else { name = "N"; activeLayer = diffNLayer; selectLayer = nplusLayer; extraSelX = nplus_overhang_poly.value; extraSelY = nplus_overhang_diff.value; func = PrimitiveNode.Function.TRANMOS; prFunc = PrimitiveNode.Function.RESNPOLY; if (!pSubstrateProcess) { nwell_overhangY = nwell_overhangX = nwell_overhang_diff_p.value; wellLayer = pwellLayer; } else { nwell_overhangX = poly_endcap.value +extraSelX; nwell_overhangY = extraSelY; } } selectx = scaledValue(gate_width.value /2+poly_endcap.value +extraSelX); selecty = scaledValue(gate_length.value /2+diff_poly_overhang.value +extraSelY); // Using P values in transistors double wellx = scaledValue((gate_width.value /2+nwell_overhangX)); double welly = scaledValue((gate_length.value /2+diff_poly_overhang.value +nwell_overhangY)); sox = scaledValue(nwell_overhangX); soy = scaledValue(diff_poly_overhang.value +nwell_overhangY); if (DBMath.isLessThan(wellx, selectx)) { sox = scaledValue(poly_endcap.value +extraSelX); wellx = selectx; } if (DBMath.isLessThan(welly, selecty)) { soy = scaledValue(diff_poly_overhang.value +extraSelY); welly = selecty; } nodesList.clear(); nodePorts.clear(); portNames.clear(); // Gate layer Electrical double gatey = scaledValue(gate_length.value /2); double gatex = impx; // Poly layers // left electrical double endPolyx = scaledValue((gate_width.value +poly_endcap.value *2)/2); double endPolyy = gatey; double endLeftOrRight = -impx; // for horizontal transistors. Default double endTopOrBotton = endPolyy; // for horizontal transistors. Default double diffX = 0, diffY = scaledValue(gate_length.value /2+gate_contact_spacing.value +contact_size.value /2); // impy double xSign = 1, ySign = -1; double polyX = endPolyx, polyY = 0; if (!horizontalFlag) // swap the numbers to get vertical transistors { double tmp; tmp = impx; impx = impy; impy = tmp; tmp = wellx; wellx = welly; welly = tmp; tmp = sox; sox = soy; soy = tmp; tmp = selectx; selectx = selecty; selecty = tmp; tmp = gatex; gatex = gatey; gatey = tmp; tmp = endPolyx; endPolyx = endPolyy; endPolyy = tmp; tmp = diffX; diffX = diffY; diffY = tmp; tmp = polyX; polyX = polyY; polyY = tmp; tmp = xSign; xSign = ySign; ySign = tmp; endLeftOrRight = endPolyx; endTopOrBotton = -impx; } // Well layer Xml.NodeLayer xTranWellLayer = null; if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(wellx, wellx, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } // Active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, impy, activeLayer, Poly.Type.FILLED, true, false, -1)); // electrical active layers nodesList.add(makeXmlNodeLayer(impx, impx, impy, 0, activeLayer, Poly.Type.FILLED, false, true, 3)); // bottom nodesList.add(makeXmlNodeLayer(impx, impx, 0, impy, activeLayer, Poly.Type.FILLED, false, true, 1)); // top // Diff port portNames.clear(); portNames.add(activeLayer.name); Xml.PrimitivePort diffTopPort = makeXmlPrimitivePort("diff-top", 90, 90, 1, minFullSize, diffX, -1, diffX, 1, diffY, 1, diffY, 1, portNames); // bottom port Xml.PrimitivePort diffBottomPort = makeXmlPrimitivePort("diff-bottom", 270, 90, 2, minFullSize, xSign*diffX, -1, xSign*diffX, 1, ySign*diffY, -1, ySign*diffY, -1, portNames); // Electric layers // Gate layer Electrical nodesList.add(makeXmlNodeLayer(gatex, gatex, gatey, gatey, polyGateLayer, Poly.Type.FILLED, false, true, -1)); // Poly layers // left electrical nodesList.add(makeXmlNodeLayer(endPolyx, endLeftOrRight, endPolyy, endTopOrBotton, polyLayer, Poly.Type.FILLED, false, true, 0)); // right electrical nodesList.add(makeXmlNodeLayer(endLeftOrRight, endPolyx, endTopOrBotton, endPolyy, polyLayer, Poly.Type.FILLED, false, true, 2)); // non-electrical poly (just one poly layer) nodesList.add(makeXmlNodeLayer(endPolyx, endPolyx, endPolyy, endPolyy, polyLayer, Poly.Type.FILLED, true, false, -1)); // Poly port portNames.clear(); portNames.add(polyLayer.name); Xml.PrimitivePort polyLeftPort = makeXmlPrimitivePort("poly-left", 180, 90, 0, minFullSize, ySign*polyX, -1, ySign*polyX, -1, xSign*polyY, -1, xSign*polyY, 1, portNames); // right port Xml.PrimitivePort polyRightPort = makeXmlPrimitivePort("poly-right", 0, 180, 0, minFullSize, polyX, 1, polyX, 1, polyY, -1, polyY, 1, portNames); // Select layer Xml.NodeLayer xTranSelLayer = (makeXmlNodeLayer(selectx, selectx, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); //One (undocumented) requirement of transistors is that the ports must appear in the //order: Poly-left, Diff-top, Poly-right, Diff-bottom. This requirement is //because of the methods Technology.getTransistorGatePort(), //Technology.getTransistorAltGatePort(), Technology.getTransistorSourcePort(), //and Technology.getTransistorDrainPort(). // diff-top = 1, diff-bottom = 2, polys=0 // ports in the correct order: Poly-left, Diff-top, Poly-right, Diff-bottom nodePorts.add(polyLeftPort); nodePorts.add(diffTopPort); nodePorts.add(polyRightPort); nodePorts.add(diffBottomPort); // Standard Transistor Xml.PrimitiveNodeGroup n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name); // Extra transistors which don't have select nor well // Extra protection poly. No ports are necessary. if (getExtraInfoFlag()) { // removing well and select for simplicity // nodesList.remove(xTranSelLayer); // nodesList.remove(xTranWellLayer); // // new sox and soy // sox = scaledValue(poly_endcap.v); // soy = scaledValue(diff_poly_overhang.v); // n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-S", PrimitiveNode.Function.TRANMOS, 0, 0, 0, 0, // new SizeOffset(sox, sox, soy, soy), nodesListW, nodePorts, null, false); // g.addElement(n); /*************************************/ // Short transistors // Adding extra transistors whose select and well are aligned with poly along the X axis nodesList.remove(xTranSelLayer); double shortSelectX = scaledValue(gate_width.value /2+poly_endcap.value); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); double shortSox = sox; shortSox = scaledValue(poly_endcap.value); if (wellLayer != null) { nodesList.remove(xTranWellLayer); xTranWellLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-S"); /*************************************/ // Short transistors with VTH and VTL double vthlx = scaledValue(gate_width.value /2+vthl_diff_overhang.value); double vthly = scaledValue(gate_length.value /2+ vthl_poly_overhang.value); // VTH Transistor String tmp = "VTH-" + name; Xml.Layer vthLayer = t.findLayer(tmp); Xml.NodeLayer nl = makeXmlNodeLayer(vthlx, vthlx, vthly, vthly, vthLayer, Poly.Type.FILLED); nodesList.add(nl); n = makeXmlPrimitive(t.nodeGroups, tmp + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, tmp + "-S"); // VTL Transistor nodesList.remove(nl); tmp = "VTL-" + name; vthLayer = t.findLayer(tmp); nl = makeXmlNodeLayer(vthlx, vthlx, vthly, vthly, vthLayer, Poly.Type.FILLED); nodesList.add(nl); n = makeXmlPrimitive(t.nodeGroups, tmp + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(shortSox, shortSox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, tmp + "-S"); /*************************************/ // Transistors with extra polys // different select for those with extra protection layers nodesList.remove(xTranSelLayer); double endOfProtectionY = gate_length.value + poly_protection_spacing.value; double selectExtraY = scaledValue(gate_length.value /2 + endOfProtectionY + extraSelX); // actually is extraSelX because of the poly distance! xTranSelLayer = (makeXmlNodeLayer(selectx, selectx, selectExtraY, selectExtraY, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // not sure which condition to apply. It doesn't apply nwell_overhang_diff due to the extra poly if (DBMath.isLessThan(welly, selectExtraY)) { welly = selectExtraY; soy = scaledValue(endOfProtectionY + extraSelX); } if (wellLayer != null) { nodesList.remove(xTranWellLayer); xTranWellLayer = (makeXmlNodeLayer(wellx, wellx, welly, welly, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } if (!horizontalFlag) { System.out.println("Not working with !horizontal"); assert(false); } portNames.clear(); portNames.add(polyLayer.name); // bottom or left Xml.NodeLayer bOrL = (makeXmlNodeLayer(gatex, gatex, DBMath.round((protectDist + 3*endPolyy)), -DBMath.round(endPolyy + protectDist), polyLayer, Poly.Type.FILLED, true, false, -1/*3*/)); // port 3 for left/bottom extra poly lb=left bottom // Adding left nodesList.add(bOrL); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-B", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-B"); // top or right Xml.NodeLayer tOrR = (makeXmlNodeLayer(gatex, gatex, -DBMath.round(endPolyy + protectDist), DBMath.round((protectDist + 3*endPolyy)), polyLayer, Poly.Type.FILLED, true, false, -1/*4*/)); // port 4 for right/top extra poly rt=right top // Adding both nodesList.add(tOrR); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-TB", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name + "-TB"); // Adding right nodesList.remove(bOrL); n = makeXmlPrimitive(t.nodeGroups, name + "-Transistor-T", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, name +"-T"); /*************************************/ // Short transistors woth OD18 double od18x = scaledValue(gate_od18_width.value /2+od18_diff_overhang[0].value); double od18y = scaledValue(gate_od18_length.value /2+diff_poly_overhang.value +od18_diff_overhang[1].value); nodePorts.clear(); nodesList.clear(); prepareTransistor(gate_od18_width.value, gate_od18_length.value, poly_endcap.value, diff_poly_overhang.value, gate_contact_spacing.value, contact_size.value, activeLayer, polyLayer, polyGateLayer, nodesList, nodePorts); // OD18 Xml.Layer od18Layer = t.findLayer("OD_18"); nodesList.add(makeXmlNodeLayer(od18x, od18x, od18y, od18y, od18Layer, Poly.Type.FILLED)); // adding short select shortSelectX = scaledValue(gate_od18_width.value /2+poly_endcap.value); selecty = scaledValue(gate_od18_length.value /2+diff_poly_overhang.value +extraSelY); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // adding well if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(od18x, od18x, od18y, od18y, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } sox = scaledValue(od18_diff_overhang[0].value); soy = scaledValue(diff_poly_overhang.value +od18_diff_overhang[1].value); n = makeXmlPrimitive(t.nodeGroups, "OD18-" + name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, "18-" + name + "-S"); /*************************************/ // Short transistors with native if (i==Technology.N_TYPE) { double ntx = scaledValue(gate_nt_width.value /2+nt_diff_overhang.value); double nty = scaledValue(gate_nt_length.value /2+diff_poly_overhang.value +nt_diff_overhang.value); nodePorts.clear(); nodesList.clear(); prepareTransistor(gate_nt_width.value, gate_nt_length.value, poly_nt_endcap.value, diff_poly_overhang.value, gate_contact_spacing.value, contact_size.value, activeLayer, polyLayer, polyGateLayer, nodesList, nodePorts); // NT-N Xml.Layer ntLayer = t.findLayer("NT-N"); nodesList.add(makeXmlNodeLayer(ntx, ntx, nty, nty, ntLayer, Poly.Type.FILLED)); // adding short select shortSelectX = scaledValue(gate_nt_width.value /2+poly_nt_endcap.value); selecty = scaledValue(gate_nt_length.value /2+diff_poly_overhang.value +extraSelY); xTranSelLayer = (makeXmlNodeLayer(shortSelectX, shortSelectX, selecty, selecty, selectLayer, Poly.Type.FILLED)); nodesList.add(xTranSelLayer); // adding well if (wellLayer != null) { xTranWellLayer = (makeXmlNodeLayer(ntx, ntx, nty, nty, wellLayer, Poly.Type.FILLED)); nodesList.add(xTranWellLayer); } sox = scaledValue(poly_nt_endcap.value); soy = scaledValue(diff_poly_overhang.value +nt_diff_overhang.value); n = makeXmlPrimitive(t.nodeGroups, "NT-" + name + "-Transistor-S", func, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addElement(n, "NT-" + name + "-S"); } /*************************************/ // Poly Resistors nodesList.clear(); nodePorts.clear(); WizardField polyRL = findWizardField("poly_resistor_length"); WizardField polyRW = findWizardField("poly_resistor_width"); WizardField rpoS = findWizardField("rpo_contact_spacing"); WizardField rpoODPolyEx = findWizardField("rpo_odpoly_overhang"); WizardField rhOverhang = findWizardField("rh_odpoly_overhang"); double resistorSpacing = contact_array_spacing.value; // using array value to guarantee proper spacing in nD cases // poly double soxNoScaled = (rpoS.value + contact_poly_overhang.value + resistorSpacing + 2 * contact_size.value); double halfTotalL = scaledValue(polyRL.value /2 + soxNoScaled); double halfTotalW = scaledValue(polyRW.value /2); nodesList.add(makeXmlNodeLayer(halfTotalL, halfTotalL, halfTotalW, halfTotalW, polyLayer, Poly.Type.FILLED, true, true, -1)); // RPO Xml.Layer rpoLayer = t.findLayer("RPO"); double rpoY = scaledValue(polyRW.value /2 + rpoODPolyEx.value); double rpoX = scaledValue(polyRL.value /2); nodesList.add(makeXmlNodeLayer(rpoX, rpoX, rpoY, rpoY, rpoLayer, Poly.Type.FILLED, true, true, -1)); // left cuts double cutDistance = scaledValue(rpoS.value + polyRL.value /2); // M1 and Poly overhang will be the same for now // double absVal = (contact_poly_overhang.v - via_overhang[0].v); double m1Distance = cutDistance - scaledValue(contact_poly_overhang.value); double m1Y = scaledValue(polyRW.value /2); // - absVal); double m1W = scaledValue(2 * contact_poly_overhang.value + resistorSpacing + 2 * contact_size.value); double cutSizeHalf = scaledValue(contact_size.value /2); double cutEnd = cutDistance+contSize; double cutSpacing = scaledValue(resistorSpacing); double cutEnd2 = cutEnd+contSize+cutSpacing; portNames.clear(); portNames.add(m1Layer.name); // left port Xml.PrimitivePort port = makeXmlPrimitivePort("left-rpo", 0, 180, 0, minFullSize, -(cutEnd + cutSpacing), -1, -cutEnd, -1, -cutSizeHalf, -1, cutSizeHalf, 1, portNames); nodePorts.add(port); // right port port = makeXmlPrimitivePort("right-rpo", 0, 180, 1, minFullSize, cutEnd, 1, (cutEnd + cutSpacing), 1, -cutSizeHalf, -1, cutSizeHalf, 1, portNames); nodePorts.add(port); // metal left nodesList.add(makeXmlNodeLayer((m1Distance + m1W), -1, -m1Distance, -1, m1Y, -1, m1Y, 1, m1Layer, Poly.Type.FILLED, true, true, 0)); // right metal nodesList.add(makeXmlNodeLayer(-m1Distance, 1, (m1Distance + m1W), 1, m1Y, -1, m1Y, 1, m1Layer, Poly.Type.FILLED, true, true, 1)); // select double selectY = scaledValue(polyRW.value /2 + rhOverhang.value); double selectX = scaledValue(polyRL.value /2 + soxNoScaled + extraSelX); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, selectLayer, Poly.Type.FILLED, true, true, -1)); // RH Xml.Layer rhLayer = t.findLayer("RH"); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, rhLayer, Poly.Type.FILLED, true, true, -1)); // RPDMY Xml.Layer rPLayer = t.findLayer("RPDMY"); nodesList.add(makeXmlNodeLayer(selectX, selectX, selectY, selectY, rPLayer, Poly.Type.FILLED, true, true, -1)); // cuts nodesList.add(makeXmlMulticut(cutEnd2, -1, -cutDistance, -1, cutSizeHalf, -1, cutSizeHalf, 1, polyConLayer, contSize, contArraySpacing, contArraySpacing)); nodesList.add(makeXmlMulticut(-cutDistance, 1, cutEnd2, 1, cutSizeHalf, -1, cutSizeHalf, 1, polyConLayer, contSize, contArraySpacing, contArraySpacing)); sox = scaledValue(soxNoScaled + extraSelX); soy = scaledValue(rpoODPolyEx.value); n = makeXmlPrimitive(t.nodeGroups, name + "-Poly-RPO-Resistor", prFunc, 0, 0, 0, 0, new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); g.addPinOrResistor(n, name + "-RPoly"); /*************************************/ // Well Resistors nodesList.clear(); nodePorts.clear(); WizardField wellRL = findWizardField("well_resistor_length"); WizardField wellRW = findWizardField("well_resistor_width"); WizardField rpoSelO = findWizardField("rpo_select_overlap"); // F WizardField rpoCoS = findWizardField("rpo_co_space_in_nwrod"); // G WizardField coNwrodO = findWizardField("co_nwrod_overhang"); // E WizardField odNwrodO = findWizardField("od_nwrod_overhang"); // D // Total values define RPO dimensions double cutEndNoScaled = /*F*/rpoSelO.value + /*G*/rpoCoS.value; double cutSpacingNoScaled = /*2xCut + spacing*/resistorSpacing + 2*contact_size.value; double activeXNoScaled = /*F+G*/cutEndNoScaled + /*cut spacing+2xcuts*/cutSpacingNoScaled + /*E*/coNwrodO.value + /*D*/odNwrodO.value; soxNoScaled = activeXNoScaled + rpoODPolyEx.value; double soyNoScaled = /*D*/odNwrodO.value + rpoODPolyEx.value; halfTotalL = scaledValue(wellRL.value /2 + soxNoScaled); halfTotalW = scaledValue(wellRW.value /2 + soyNoScaled); double activeWX = scaledValue(activeXNoScaled); double activeWY = scaledValue(wellRW.value /2 + /*D*/odNwrodO.value); // rpo. It has two holes nodesList.add(makeXmlNodeLayer(halfTotalL, halfTotalL, halfTotalW, halfTotalW, rpoLayer, Poly.Type.FILLED, true, true, -1)); // active nodesList.add(makeXmlNodeLayer(activeWX, activeWX, activeWY, activeWY, activeLayer, Poly.Type.FILLED, true, true, -1)); // well double halfW = scaledValue(wellRW.value /2); double halfWellL = scaledValue(wellRL.value /2+/*F+G*/cutEndNoScaled+/*cut spacing+2xcuts*/cutSpacingNoScaled + /*E*/coNwrodO.value); if (i==Technology.N_TYPE) { nodesList.add(makeXmlNodeLayer(halfWellL, halfWellL, halfW, halfW, nwellLayer, Poly.Type.FILLED, true, true, -1)); } // NWDMY-LVS double halfL = scaledValue(wellRL.value /2); Xml.Layer nwdmyLayer = t.findLayer("NWDMY-LVS"); nodesList.add(makeXmlNodeLayer(halfL, halfL, halfTotalW, halfTotalW, nwdmyLayer, Poly.Type.FILLED, true, true, -1)); cutEnd = scaledValue(wellRL.value /2+cutEndNoScaled); cutSpacing = scaledValue(cutSpacingNoScaled); // Metal1 m1Distance = scaledValue(wellRL.value /2 + /*F*/rpoSelO.value); // metal left nodesList.add(makeXmlNodeLayer(halfWellL, -1, -m1Distance, -1, halfW, -1, halfW, 1, m1Layer, Poly.Type.FILLED, true, true, 0)); // right metal nodesList.add(makeXmlNodeLayer(-m1Distance, 1, halfWellL, 1, halfW, -1, halfW, 1, m1Layer, Poly.Type.FILLED, true, true, 1)); // left port port = makeXmlPrimitivePort("left-rpo", 0, 180, 0, minFullSize, -(cutEnd + cutSpacing), -1, -cutEnd, -1, -halfW, -1, halfW, 1, portNames); nodePorts.add(port); // right port port = makeXmlPrimitivePort("right-rpo", 0, 180, 1, minFullSize, cutEnd, 1, (cutEnd + cutSpacing), 1, -halfW, -1, halfW, 1, portNames); nodePorts.add(port); sox = scaledValue(soxNoScaled); soy = scaledValue(soyNoScaled); // n = makeXmlPrimitive(t.nodeGroups, name + "-Well-RPO-Resistor", prFunc, 0, 0, 0, 0, // new SizeOffset(sox, sox, soy, soy), nodesList, nodePorts, null, false); // g.addPinOrResistor(n, name + "-RWell"); } } // Aggregating all palette groups into one List<PaletteGroup> allGroups = new ArrayList<PaletteGroup>(); allGroups.add(transPalette[0]); allGroups.add(transPalette[1]); allGroups.add(diffPalette[0]); allGroups.add(diffPalette[1]); allGroups.add(wellPalette[0]); allGroups.add(wellPalette[1]); allGroups.add(polyGroup); for (PaletteGroup g : metalPalette) allGroups.add(g); // Adding elements in palette for (PaletteGroup o : allGroups) { t.menuPalette.menuBoxes.add(o.arcs); // arcs t.menuPalette.menuBoxes.add(o.pins); // pins t.menuPalette.menuBoxes.add(o.elements); // contacts } // Writting GDS values makeLayerGDS(t, diffPLayer, String.valueOf(diff_layer)); makeLayerGDS(t, diffNLayer, String.valueOf(diff_layer)); makeLayerGDS(t, pplusLayer, String.valueOf(pplus_layer)); makeLayerGDS(t, nplusLayer, String.valueOf(nplus_layer)); makeLayerGDS(t, nwellLayer, String.valueOf(nwell_layer)); makeLayerGDS(t, deviceMarkLayer, String.valueOf(marking_layer)); makeLayerGDS(t, polyConLayer, String.valueOf(contact_layer)); makeLayerGDS(t, diffConLayer, String.valueOf(contact_layer)); makeLayerGDS(t, polyLayer, String.valueOf(poly_layer)); makeLayerGDS(t, polyGateLayer, String.valueOf(poly_layer)); for (int i = 0; i < num_metal_layers; i++) { Xml.Layer met = metalLayers.get(i); makeLayerGDS(t, met, String.valueOf(metal_layers[i])); if (getExtraInfoFlag()) { // Type is always 1 makeLayerGDS(t, dummyMetalLayers.get(i), metal_layers[i].value + "/1"); // exclusion always takes 150 makeLayerGDS(t, exclusionMetalLayers.get(i), "150/" + (i + 1)); } if (i > num_metal_layers - 2) continue; Xml.Layer via = viaLayers.get(i); makeLayerGDS(t, via, String.valueOf(via_layers[i])); } // Writting Layer Rules for (Xml.Layer l : diffLayers) { makeLayerRuleMinWid(t, l, diff_width); makeLayersRule(t, l, DRCTemplate.DRCRuleType.SPACING, diff_spacing.rule, diff_spacing.value); } WizardField[] plus_diff = {pplus_overhang_diff, nplus_overhang_diff}; WizardField[] plus_width = {pplus_width, nplus_width}; WizardField[] plus_spacing = {pplus_spacing, nplus_spacing}; for (int i = 0; i < plusLayers.length; i++) { makeLayerRuleMinWid(t, plusLayers[i], plus_width[i]); makeLayersRuleSurround(t, plusLayers[i], diffLayers[i], plus_diff[i].rule, plus_diff[i].value); makeLayersRule(t, plusLayers[i], DRCTemplate.DRCRuleType.SPACING, plus_spacing[i].rule, plus_spacing[i].value); } Xml.Layer[] wells = {pwellLayer, nwellLayer}; for (Xml.Layer w : wells) { makeLayerRuleMinWid(t, w, nwell_width); makeLayersRuleSurround(t, w, diffPLayer, nwell_overhang_diff_p.rule, nwell_overhang_diff_p.value); makeLayersRuleSurround(t, w, diffNLayer, nwell_overhang_diff_n.rule, nwell_overhang_diff_n.value); makeLayersRule(t, w, DRCTemplate.DRCRuleType.SPACING, nwell_spacing.rule, nwell_spacing.value); } Xml.Layer[] polys = {polyLayer, polyGateLayer}; for (Xml.Layer w : polys) { makeLayerRuleMinWid(t, w, poly_width); makeLayersRule(t, w, DRCTemplate.DRCRuleType.SPACING, poly_spacing.rule, poly_spacing.value); } // Simple spacing rules included here for (int i = 0; i < num_metal_layers; i++) { Xml.Layer met = metalLayers.get(i); makeLayerRuleMinWid(t, met, metal_width[i]); makeLayersRule(t, met, DRCTemplate.DRCRuleType.SPACING, metal_spacing[i].rule, metal_spacing[i].value); if (i >= num_metal_layers - 1) continue; Xml.Layer via = viaLayers.get(i); makeLayerRuleMinWid(t, via, via_size[i]); makeLayersRule(t, via, DRCTemplate.DRCRuleType.SPACING, via_inline_spacing[i].rule, via_inline_spacing[i].value); // makeLayersRule(t, via, DRCTemplate.DRCRuleType.UCONSPA2D, via_array_spacing[i]); } // wide metal rules for (WideWizardField w : wide_metal_spacing) { for (String layerName : w.names) { Xml.Layer layer = t.findLayer(layerName); assert(layer != null); makeLayersWideRule(t, layer, DRCTemplate.DRCRuleType.SPACING, w.rule, w.value, w.maxW, w.minLen); } } // Finish menu with Pure, Misc and Cell List<Object> l = new ArrayList<Object>(); l.add(new String("Pure")); t.menuPalette.menuBoxes.add(l); l = new ArrayList<Object>(); l.add(new String("Misc.")); t.menuPalette.menuBoxes.add(l); l = new ArrayList<Object>(); l.add(new String("Cell")); t.menuPalette.menuBoxes.add(l); // Sort before writing data. We might need to sort primitive nodes in group before... Collections.sort(t.nodeGroups, primitiveNodeGroupSort); for (Xml.PrimitiveNodeGroup nodeGroup: t.nodeGroups) { // sort NodeLayer before writing them Collections.sort(nodeGroup.nodeLayers, nodeLayerSort); } // write finally the file boolean includeDateAndVersion = User.isIncludeDateAndVersionInOutput(); String copyrightMessage = IOTool.isUseCopyrightMessage() ? IOTool.getCopyrightMessage() : null; t.writeXml(fileName, includeDateAndVersion, copyrightMessage); }
diff --git a/org.iucn.sis.client/src/org/iucn/sis/client/panels/workingsets/WorkingSetAddAssessmentsPanel.java b/org.iucn.sis.client/src/org/iucn/sis/client/panels/workingsets/WorkingSetAddAssessmentsPanel.java index 34584c7d..a6c4ecec 100644 --- a/org.iucn.sis.client/src/org/iucn/sis/client/panels/workingsets/WorkingSetAddAssessmentsPanel.java +++ b/org.iucn.sis.client/src/org/iucn/sis/client/panels/workingsets/WorkingSetAddAssessmentsPanel.java @@ -1,238 +1,237 @@ package org.iucn.sis.client.panels.workingsets; import java.util.ArrayList; import java.util.List; import org.iucn.sis.client.api.caches.RegionCache; import org.iucn.sis.client.api.caches.WorkingSetCache; import org.iucn.sis.client.panels.utils.RefreshLayoutContainer; import org.iucn.sis.client.tabs.WorkingSetPage; import org.iucn.sis.shared.api.models.AssessmentFilter; import org.iucn.sis.shared.api.models.Relationship; import org.iucn.sis.shared.api.models.WorkingSet; import org.iucn.sis.shared.api.utils.AssessmentUtils; import com.extjs.gxt.ui.client.Style.Scroll; import com.extjs.gxt.ui.client.event.ButtonEvent; import com.extjs.gxt.ui.client.event.SelectionListener; import com.extjs.gxt.ui.client.widget.Html; import com.extjs.gxt.ui.client.widget.button.Button; import com.extjs.gxt.ui.client.widget.button.ButtonBar; import com.extjs.gxt.ui.client.widget.layout.RowData; import com.extjs.gxt.ui.client.widget.layout.RowLayout; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.RadioButton; import com.google.gwt.user.client.ui.VerticalPanel; import com.solertium.lwxml.shared.GenericCallback; import com.solertium.util.events.ComplexListener; import com.solertium.util.extjs.client.WindowUtils; public class WorkingSetAddAssessmentsPanel extends RefreshLayoutContainer { private HTML instructions; private Button add; private Button cancel; private ButtonBar buttons; private RadioButton addToSelected; private RadioButton addToEntireWorkingSet; private RadioButton published; private RadioButton empty; private final WorkingSetPage parent; private WorkingSetTaxaPanel workingSetTaxaPanel; private List<Integer> speciesIDs; public WorkingSetAddAssessmentsPanel(WorkingSetPage parent) { this.parent = parent; workingSetTaxaPanel = new WorkingSetTaxaPanel(); speciesIDs = new ArrayList<Integer>(); build(); } @SuppressWarnings("deprecation") private void build() { RowLayout layout = new RowLayout(); - // layout.setSpacing(10); - // layout.setMargin(6); instructions = new HTML(); add = new Button("Create Assessments", new SelectionListener<ButtonEvent>() { @Override public void componentSelected(ButtonEvent ce) { if (addToSelected.getValue()){ workingSetTaxaPanel.setSaveListener(new ComplexListener<List<Integer>>() { public void handleEvent(List<Integer> eventData) { speciesIDs.clear(); for(int i = 0; i < eventData.size(); i++) speciesIDs.add(eventData.get(i)); createNewAssessmentsIfNotExist(); } }); + workingSetTaxaPanel.updateStore(); workingSetTaxaPanel.show(); }else{ createNewAssessmentsIfNotExist(); } } }); cancel = new Button("Cancel", new SelectionListener<ButtonEvent>() { @Override public void componentSelected(ButtonEvent ce) { cancel(); } }); addToEntireWorkingSet = new RadioButton("type", "Entire working set"); addToEntireWorkingSet.addClickHandler(new ClickHandler() { public void onClick(ClickEvent sender) { add.setText("Create Assessments"); } }); addToEntireWorkingSet.setValue(true); addToSelected = new RadioButton("type", "Selected taxa (List of taxa may take a while to load)"); addToSelected.addClickHandler(new ClickHandler() { public void onClick(ClickEvent sender) { add.setText("Choose Taxa and Create Assessments"); } }); VerticalPanel vp = new VerticalPanel(); vp.add(new HTML("Would you like to add draft assessments to the entire working set, or selected species in the working set?")); VerticalPanel inner = new VerticalPanel(); inner.setSpacing(10); inner.add(addToEntireWorkingSet); inner.add(addToSelected); vp.add(inner); buttons = new ButtonBar(); buttons.add(add); buttons.add(cancel); addStyleName("gwt-background"); published = new RadioButton("published", "Most Recently Published Assessment for Working " + "Set's defined region, or most recent global if no published exists for said region."); published.setChecked(true); empty = new RadioButton("published", "Empty Assessment"); VerticalPanel vp2 = new VerticalPanel(); vp2.add(new HTML("What template should the new draft assessments be based upon?")); VerticalPanel inner2 = new VerticalPanel(); inner2.setSpacing(10); inner2.add(published); inner2.add(empty); vp2.add(inner2); setLayout(layout); add(instructions, new RowData(1d, -1)); //add(type, new RowData(1d, -1)); add(vp2, new RowData(1d,-1)); add(vp, new RowData(1d,-1)); //add(list, new RowData(1d, 1d)); add(buttons, new RowData(1d, -1)); layout(); hideList(); } private void cancel() { parent.setManagerTab(); } private void createNewAssessmentsIfNotExist() { add.disable(); boolean useTemplate = published.getValue(); AssessmentFilter filter = WorkingSetCache.impl.getCurrentWorkingSet().getFilter().deepCopy(); filter.setRecentPublished(true); filter.setDraft(false); filter.setAllPublished(false); if (filter.getRegionType().equalsIgnoreCase(Relationship.OR)) { WindowUtils.errorAlert("Unable to create draft assessements for a working set with assessment scope \"ANY\". Please temporarily change your assessment scope to \"ALL\"."); return; } if (!addToSelected.getValue()) { speciesIDs = WorkingSetCache.impl.getCurrentWorkingSet().getSpeciesIDs(); } WindowUtils.showLoadingAlert("Please wait..."); AssessmentUtils.createGlobalDraftAssessments(speciesIDs, useTemplate, filter, new GenericCallback<String>() { public void onFailure(Throwable caught) { WindowUtils.hideLoadingAlert(); WindowUtils.errorAlert("Unable to complete request, please try again later."); add.enable(); hideList(); } public void onSuccess(String arg0) { WindowUtils.hideLoadingAlert(); if (arg0 != null) { com.extjs.gxt.ui.client.widget.Window w = WindowUtils.newWindow("Batch Create Results", null, false, true); w.setScrollMode(Scroll.AUTOY); w.setSize(400, 500); w.add(new Html(arg0)); w.show(); } WorkingSetCache.impl.uncacheAssessmentsForWorkingSet(WorkingSetCache.impl.getCurrentWorkingSet()); cancel(); add.enable(); hideList(); } }); } private void hideList() { buttons.setVisible(true); } @Override public void refresh() { final WorkingSet ws = WorkingSetCache.impl.getCurrentWorkingSet(); if (ws == null) { instructions.setHTML("<b>Instructions:</b> Please select a working set from the navigator which you would like" + " to add draft assessments to.<br/><br/><br/>"); add.setEnabled(false); } else if (ws.getFilter().getRegionType().equalsIgnoreCase(Relationship.OR)) { instructions.setHTML("<b>Instructions:</b> Please change your working set assessment region scope to \"ALL\" before continuing. " + "This operation does not support the \"ANY\" working set region scope.<br/><br/><br/>"); add.setEnabled(false); } else { instructions .setHTML("<b>Instructions:</b> This operation will add draft assessments for the species in this working set. The created " + "assessments will have a region of " + RegionCache.impl.getRegionNamesAsReadable(ws.getFilter()) + ". " + "Please either choose to create draft assessments for all " + " taxa in the working set " + ws.getWorkingSetName() + " or select to add draft assessments individually to " + "taxa. If you choose to create draft assessments to the entire working set, a draft assessment " + "will be created for each taxa in the working set. However, if a draft assessment already exists, " + "the current draft assessment will <i>not</i> be overwritten. <br/><br/><br/>"); add.setEnabled(true); } layout(); } }
false
true
private void build() { RowLayout layout = new RowLayout(); // layout.setSpacing(10); // layout.setMargin(6); instructions = new HTML(); add = new Button("Create Assessments", new SelectionListener<ButtonEvent>() { @Override public void componentSelected(ButtonEvent ce) { if (addToSelected.getValue()){ workingSetTaxaPanel.setSaveListener(new ComplexListener<List<Integer>>() { public void handleEvent(List<Integer> eventData) { speciesIDs.clear(); for(int i = 0; i < eventData.size(); i++) speciesIDs.add(eventData.get(i)); createNewAssessmentsIfNotExist(); } }); workingSetTaxaPanel.show(); }else{ createNewAssessmentsIfNotExist(); } } }); cancel = new Button("Cancel", new SelectionListener<ButtonEvent>() { @Override public void componentSelected(ButtonEvent ce) { cancel(); } }); addToEntireWorkingSet = new RadioButton("type", "Entire working set"); addToEntireWorkingSet.addClickHandler(new ClickHandler() { public void onClick(ClickEvent sender) { add.setText("Create Assessments"); } }); addToEntireWorkingSet.setValue(true); addToSelected = new RadioButton("type", "Selected taxa (List of taxa may take a while to load)"); addToSelected.addClickHandler(new ClickHandler() { public void onClick(ClickEvent sender) { add.setText("Choose Taxa and Create Assessments"); } }); VerticalPanel vp = new VerticalPanel(); vp.add(new HTML("Would you like to add draft assessments to the entire working set, or selected species in the working set?")); VerticalPanel inner = new VerticalPanel(); inner.setSpacing(10); inner.add(addToEntireWorkingSet); inner.add(addToSelected); vp.add(inner); buttons = new ButtonBar(); buttons.add(add); buttons.add(cancel); addStyleName("gwt-background"); published = new RadioButton("published", "Most Recently Published Assessment for Working " + "Set's defined region, or most recent global if no published exists for said region."); published.setChecked(true); empty = new RadioButton("published", "Empty Assessment"); VerticalPanel vp2 = new VerticalPanel(); vp2.add(new HTML("What template should the new draft assessments be based upon?")); VerticalPanel inner2 = new VerticalPanel(); inner2.setSpacing(10); inner2.add(published); inner2.add(empty); vp2.add(inner2); setLayout(layout); add(instructions, new RowData(1d, -1)); //add(type, new RowData(1d, -1)); add(vp2, new RowData(1d,-1)); add(vp, new RowData(1d,-1)); //add(list, new RowData(1d, 1d)); add(buttons, new RowData(1d, -1)); layout(); hideList(); }
private void build() { RowLayout layout = new RowLayout(); instructions = new HTML(); add = new Button("Create Assessments", new SelectionListener<ButtonEvent>() { @Override public void componentSelected(ButtonEvent ce) { if (addToSelected.getValue()){ workingSetTaxaPanel.setSaveListener(new ComplexListener<List<Integer>>() { public void handleEvent(List<Integer> eventData) { speciesIDs.clear(); for(int i = 0; i < eventData.size(); i++) speciesIDs.add(eventData.get(i)); createNewAssessmentsIfNotExist(); } }); workingSetTaxaPanel.updateStore(); workingSetTaxaPanel.show(); }else{ createNewAssessmentsIfNotExist(); } } }); cancel = new Button("Cancel", new SelectionListener<ButtonEvent>() { @Override public void componentSelected(ButtonEvent ce) { cancel(); } }); addToEntireWorkingSet = new RadioButton("type", "Entire working set"); addToEntireWorkingSet.addClickHandler(new ClickHandler() { public void onClick(ClickEvent sender) { add.setText("Create Assessments"); } }); addToEntireWorkingSet.setValue(true); addToSelected = new RadioButton("type", "Selected taxa (List of taxa may take a while to load)"); addToSelected.addClickHandler(new ClickHandler() { public void onClick(ClickEvent sender) { add.setText("Choose Taxa and Create Assessments"); } }); VerticalPanel vp = new VerticalPanel(); vp.add(new HTML("Would you like to add draft assessments to the entire working set, or selected species in the working set?")); VerticalPanel inner = new VerticalPanel(); inner.setSpacing(10); inner.add(addToEntireWorkingSet); inner.add(addToSelected); vp.add(inner); buttons = new ButtonBar(); buttons.add(add); buttons.add(cancel); addStyleName("gwt-background"); published = new RadioButton("published", "Most Recently Published Assessment for Working " + "Set's defined region, or most recent global if no published exists for said region."); published.setChecked(true); empty = new RadioButton("published", "Empty Assessment"); VerticalPanel vp2 = new VerticalPanel(); vp2.add(new HTML("What template should the new draft assessments be based upon?")); VerticalPanel inner2 = new VerticalPanel(); inner2.setSpacing(10); inner2.add(published); inner2.add(empty); vp2.add(inner2); setLayout(layout); add(instructions, new RowData(1d, -1)); //add(type, new RowData(1d, -1)); add(vp2, new RowData(1d,-1)); add(vp, new RowData(1d,-1)); //add(list, new RowData(1d, 1d)); add(buttons, new RowData(1d, -1)); layout(); hideList(); }
diff --git a/user/src/com/google/gwt/xhr/client/XMLHttpRequest.java b/user/src/com/google/gwt/xhr/client/XMLHttpRequest.java index 08adeb60d..a84e8a367 100644 --- a/user/src/com/google/gwt/xhr/client/XMLHttpRequest.java +++ b/user/src/com/google/gwt/xhr/client/XMLHttpRequest.java @@ -1,272 +1,275 @@ /* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.gwt.xhr.client; import com.google.gwt.core.client.JavaScriptObject; /** * The native XMLHttpRequest object. Most applications should use the higher- * level {@link RequestBuilder} class unless they need specific functionality * provided by the XMLHttpRequest object. * * @see http://www.w3.org/TR/XMLHttpRequest/ */ public class XMLHttpRequest extends JavaScriptObject { /* * NOTE: Testing discovered that for some bizarre reason, on Mozilla, the * JavaScript <code>XmlHttpRequest.onreadystatechange</code> handler * function maybe still be called after it is deleted. The theory is that the * callback is cached somewhere. Setting it to null or an empty function does * seem to work properly, though. * * On IE, there are two problems: Setting onreadystatechange to null (as * opposed to an empty function) sometimes throws an exception. With * particular (rare) versions of jscript.dll, setting onreadystatechange from * within onreadystatechange causes a crash. Setting it from within a timeout * fixes this bug (see issue 1610). * * End result: *always* set onreadystatechange to an empty function (never to * null). Never set onreadystatechange from within onreadystatechange (always * in a setTimeout()). */ /** * When constructed, the XMLHttpRequest object must be in the UNSENT state. */ public static final int UNSENT = 0; /** * The OPENED state is the state of the object when the open() method has been * successfully invoked. During this state request headers can be set using * setRequestHeader() and the request can be made using send(). */ public static final int OPENED = 1; /** * The HEADERS_RECEIVED state is the state of the object when all response * headers have been received. */ public static final int HEADERS_RECEIVED = 2; /** * The LOADING state is the state of the object when the response entity body * is being received. */ public static final int LOADING = 3; /** * The DONE state is the state of the object when either the data transfer has * been completed or something went wrong during the transfer (infinite * redirects for instance). */ public static final int DONE = 4; /** * Creates an XMLHttpRequest object. * * @return the created object */ public static native XMLHttpRequest create() /*-{ + // Don't check window.XMLHttpRequest, because it can + // cause cross-site problems on IE8 if window's URL + // is javascript:'' . if ($wnd.XMLHttpRequest) { - return new XMLHttpRequest(); + return new $wnd.XMLHttpRequest(); } else { try { - return new ActiveXObject('MSXML2.XMLHTTP.3.0'); + return new $wnd.ActiveXObject('MSXML2.XMLHTTP.3.0'); } catch (e) { - return new ActiveXObject("Microsoft.XMLHTTP"); + return new $wnd.ActiveXObject("Microsoft.XMLHTTP"); } } }-*/; protected XMLHttpRequest() { } /** * Aborts the current request. * * @see http://www.w3.org/TR/XMLHttpRequest/#abort */ public final native void abort() /*-{ this.abort(); }-*/; /** * Clears the {@link ReadyStateChangeHandler}. * * @see #clearOnReadyStateChange() * @see http://www.w3.org/TR/XMLHttpRequest/#onreadystatechange */ public final native void clearOnReadyStateChange() /*-{ var self = this; $wnd.setTimeout(function() { // Using a function literal here leaks memory on ie6 // Using the same function object kills HtmlUnit self.onreadystatechange = new Function(); }, 0); }-*/; /** * Gets all the HTTP response headers, as a single string. * * @return the response headers. * @see http://www.w3.org/TR/XMLHttpRequest/#getallresponseheaders */ public final native String getAllResponseHeaders() /*-{ return this.getAllResponseHeaders(); }-*/; /** * Get's the current ready-state. * * @return the ready-state constant * @see http://www.w3.org/TR/XMLHttpRequest/#readystate */ public final native int getReadyState() /*-{ return this.readyState; }-*/; /** * Gets an HTTP response header. * * @param header the response header to be retrieved * @return the header value * @see http://www.w3.org/TR/XMLHttpRequest/#getresponseheader */ public final native String getResponseHeader(String header) /*-{ return this.getResponseHeader(header); }-*/; /** * Gets the response text. * * @return the response text * @see http://www.w3.org/TR/XMLHttpRequest/#responsetext */ public final native String getResponseText() /*-{ return this.responseText; }-*/; /** * Gets the status code. * * @return the status code * @see http://www.w3.org/TR/XMLHttpRequest/#status */ public final native int getStatus() /*-{ return this.status; }-*/; /** * Gets the status text. * * @return the status text * @see http://www.w3.org/TR/XMLHttpRequest/#statustext */ public final native String getStatusText() /*-{ return this.statusText; }-*/; /** * Opens an asynchronous connection. * * @param httpMethod the HTTP method to use * @param url the URL to be opened * @see http://www.w3.org/TR/XMLHttpRequest/#open */ public final native void open(String httpMethod, String url) /*-{ this.open(httpMethod, url, true); }-*/; /** * Opens an asynchronous connection. * * @param httpMethod the HTTP method to use * @param url the URL to be opened * @param user user to use in the URL * @see http://www.w3.org/TR/XMLHttpRequest/#open */ public final native void open(String httpMethod, String url, String user) /*-{ this.open(httpMethod, url, true, user); }-*/; /** * Opens an asynchronous connection. * * @param httpMethod the HTTP method to use * @param url the URL to be opened * @param user user to use in the URL * @param password password to use in the URL * @see http://www.w3.org/TR/XMLHttpRequest/#open */ public final native void open(String httpMethod, String url, String user, String password) /*-{ this.open(httpMethod, url, true, user, password); }-*/; /** * Initiates a request with no request data. This simply calls * {@link #send(String)} with <code>null</code> as an argument, because the * no-argument <code>send()</code> method is unavailable on Firefox. */ public final void send() { send(null); } /** * Initiates a request with data. If there is no data, specify null. * * @param requestData the data to be sent with the request * @see http://www.w3.org/TR/XMLHttpRequest/#send */ public final native void send(String requestData) /*-{ this.send(requestData); }-*/; /** * Sets the {@link ReadyStateChangeHandler} to be notified when the object's * ready-state changes. * * <p> * Note: Applications <em>must</em> call {@link #clearOnReadyStateChange()} * when they no longer need this object, to ensure that it is cleaned up * properly. Failure to do so will result in memory leaks on some browsers. * </p> * * @param handler the handler to be called when the ready state changes * @see #clearOnReadyStateChange() * @see http://www.w3.org/TR/XMLHttpRequest/#onreadystatechange */ public final native void setOnReadyStateChange(ReadyStateChangeHandler handler) /*-{ // The 'this' context is always supposed to point to the xhr object in the // onreadystatechange handler, but we reference it via closure to be extra sure. var _this = this; this.onreadystatechange = $entry(function() { [email protected]::onReadyStateChange(Lcom/google/gwt/xhr/client/XMLHttpRequest;)(_this); }); }-*/; /** * Sets a request header. * * @param header the header to be set * @param value the header's value * @see http://www.w3.org/TR/XMLHttpRequest/#setrequestheader */ public final native void setRequestHeader(String header, String value) /*-{ this.setRequestHeader(header, value); }-*/; }
false
true
public static native XMLHttpRequest create() /*-{ if ($wnd.XMLHttpRequest) { return new XMLHttpRequest(); } else { try { return new ActiveXObject('MSXML2.XMLHTTP.3.0'); } catch (e) { return new ActiveXObject("Microsoft.XMLHTTP"); } } }-*/;
public static native XMLHttpRequest create() /*-{ // Don't check window.XMLHttpRequest, because it can // cause cross-site problems on IE8 if window's URL // is javascript:'' . if ($wnd.XMLHttpRequest) { return new $wnd.XMLHttpRequest(); } else { try { return new $wnd.ActiveXObject('MSXML2.XMLHTTP.3.0'); } catch (e) { return new $wnd.ActiveXObject("Microsoft.XMLHTTP"); } } }-*/;
diff --git a/opentaps/opentaps-common/src/common/org/opentaps/common/domain/party/PartyMergeService.java b/opentaps/opentaps-common/src/common/org/opentaps/common/domain/party/PartyMergeService.java index 2f5e0ed32..50b77de4f 100644 --- a/opentaps/opentaps-common/src/common/org/opentaps/common/domain/party/PartyMergeService.java +++ b/opentaps/opentaps-common/src/common/org/opentaps/common/domain/party/PartyMergeService.java @@ -1,243 +1,243 @@ /* * Copyright (c) 2009 - 2010 Open Source Strategies, Inc. * * Opentaps is free software: you can redistribute it and/or modify it * under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Opentaps is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with Opentaps. If not, see <http://www.gnu.org/licenses/>. */ package org.opentaps.common.domain.party; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilMisc; import org.ofbiz.entity.Delegator; import org.ofbiz.entity.GenericEntityException; import org.ofbiz.entity.GenericValue; import org.ofbiz.entity.model.ModelEntity; import org.ofbiz.entity.model.ModelField; import org.ofbiz.entity.model.ModelRelation; import org.ofbiz.entity.model.ModelViewEntity; import org.opentaps.base.constants.RoleTypeConstants; import org.opentaps.common.party.PartyHelper; import org.opentaps.common.security.OpentapsSecurity; import org.opentaps.common.util.UtilMessage; import org.opentaps.domain.DomainService; import org.opentaps.domain.party.PartyMergeServiceInterface; import org.opentaps.foundation.service.ServiceException; /** * Party merge services implementation. */ public class PartyMergeService extends DomainService implements PartyMergeServiceInterface { public static final String MODULE = PartyMergeService.class.getName(); private String partyIdFrom = null; private String partyIdTo = null; private boolean validate = true; /** {@inheritDoc} */ public void setPartyIdFrom(String partyId) { partyIdFrom = partyId; } /** {@inheritDoc} */ public void setPartyIdTo(String partyId) { partyIdTo = partyId; } /** {@inheritDoc} */ public void setValidate(String s) { validate = "Y".equalsIgnoreCase(s) ? true : "N".equalsIgnoreCase(s) ? false : true; } /** {@inheritDoc} */ public void validateMergeParties() throws ServiceException { Delegator delegator = getInfrastructure().getDelegator(); try { // ensure that merging parties are the same type (ACCOUNT, CONTACT, PROSPECT, SUPPLIER) String fromPartyType = PartyHelper.getFirstValidRoleTypeId(partyIdFrom, PartyHelper.MERGE_PARTY_ROLES, delegator); String toPartyType = PartyHelper.getFirstValidRoleTypeId(partyIdTo, PartyHelper.MERGE_PARTY_ROLES, delegator); if ((fromPartyType == null) || !fromPartyType.equals(toPartyType)) { throw new ServiceException(String.format("Cannot merge party [%1$s] of type [%2$s] with party [%3$s] of type [%4$s] because they are not the same type. %5$s", partyIdFrom, fromPartyType, partyIdTo, toPartyType, UtilMessage.expandLabel("OpentapsError_MergePartiesFail", locale))); } if (partyIdFrom.equals(partyIdTo)) { throw new ServiceException(String.format("Cannot merge party [%1$s] to itself! %2$s", partyIdFrom, UtilMessage.expandLabel("CrmErrorMergeParties", locale))); } // convert ACCOUNT/CONTACT/PROSPECT/SUPPLIER to ACCOUNT/CONTACT/LEAD/SUPPLIER String partyType = (fromPartyType.equals(RoleTypeConstants.PROSPECT) ? RoleTypeConstants.LEAD : fromPartyType); // make sure user has CRMSFA_${partyType}_UPDATE (or PRCH_SPLR_UPDATE for SUPPLIER) permission for both parties // TODO: and delete, check security config if (RoleTypeConstants.SUPPLIER.equals(partyType)) { OpentapsSecurity s = new OpentapsSecurity(getSecurity(), getUser().getOfbizUserLogin()); if (!s.hasPartyRelationSecurity("PRCH_SPLR", "_UPDATE", partyIdFrom) || !s.hasPartyRelationSecurity("PRCH_SPLR", "_UPDATE", partyIdTo)) { throw new ServiceException(UtilMessage.expandLabel("CrmErrorPermissionDenied", locale) + ": PRCH_SPLR_UPDATE"); } } else { OpentapsSecurity s = new OpentapsSecurity(getSecurity(), getUser().getOfbizUserLogin()); if (!s.hasPartyRelationSecurity(String.format("CRMSFA_%1$s", partyType), "_UPDATE", partyIdFrom) || !s.hasPartyRelationSecurity(String.format("CRMSFA_%1$s", partyType), "_UPDATE", partyIdTo)) { throw new ServiceException(UtilMessage.expandLabel("CrmErrorPermissionDenied", locale) + ": CRMSFA_" + partyType + "_UPDATE"); } } } catch (GenericEntityException e) { throw new ServiceException("OpentapsError_MergePartiesFail", null); } } /** {@inheritDoc} */ public void mergeParties() throws ServiceException { Delegator delegator = getInfrastructure().getDelegator(); try { if (validate) { // validate again validateMergeParties(); } // merge the party objects mergeTwoValues("PartySupplementalData", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("Person", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("PartyGroup", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("Party", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); List<GenericValue> toRemove = new ArrayList<GenericValue>(); // Get a list of entities related to the Party entity, in descending order by relation List<ModelEntity> relatedEntities = getRelatedEntities("Party", delegator); // Go through the related entities in forward order - this makes sure that parent records are created before child records Iterator<ModelEntity> reit = relatedEntities.iterator(); while (reit.hasNext()) { ModelEntity modelEntity = reit.next(); // Examine each field of the entity Iterator<ModelField> mefit = modelEntity.getFieldsIterator(); while (mefit.hasNext()) { ModelField modelField = mefit.next(); if (modelField.getName().matches(".*[pP]artyId.*")) { // If the name of the field has something to do with a partyId, get all the existing records from that entity which have the // partyIdFrom in that particular field List<GenericValue> existingRecords = delegator.findByAnd(modelEntity.getEntityName(), UtilMisc.toMap(modelField.getName(), partyIdFrom)); if (existingRecords.size() > 0) { Iterator<GenericValue> eit = existingRecords.iterator(); while (eit.hasNext()) { GenericValue existingRecord = eit.next(); if (modelField.getIsPk()) { // If the partyId field is part of a primary key, create a new record with the partyIdTo in place of the partyIdFrom GenericValue newRecord = delegator.makeValue(modelEntity.getEntityName(), existingRecord.getAllFields()); newRecord.set(modelField.getName(), partyIdTo); // Create the new record if a record with the same primary key doesn't already exist - if (delegator.findByPrimaryKey(newRecord.getPrimaryKey()) == null) { + if (delegator.findOne(newRecord.getPrimaryKey().getEntityName(), newRecord.getPrimaryKey(), false) == null) { newRecord.create(); } // Add the old record to the list of records to remove toRemove.add(existingRecord); } else { // If the partyId field is not party of a primary key, simply update the field with the new value and store it existingRecord.set(modelField.getName(), partyIdTo); existingRecord.store(); } } } } } } // Go through the list of records to remove in REVERSE order! Since they're still in descending order of relation to the Party // entity, reversing makes sure that child records are removed before parent records, all the way back to the original Party record ListIterator<GenericValue> rit = toRemove.listIterator(toRemove.size()); while (rit.hasPrevious()) { GenericValue existingRecord = (GenericValue) rit.previous(); Debug.logError(existingRecord.toString(), MODULE); existingRecord.remove(); } } catch (GenericEntityException e) { new ServiceException(UtilMessage.expandLabel("OpentapsError_MergePartiesFail", locale) + e.getMessage()); } } /** * Merging function for two unique <code>GenericValues</code>. * @param entityName the name of the <code>GenericValue</code> entity * @param fromKeys <code>Map</code> representing the primary key of the entity to merge from * @param toKeys <code>Map</code> representing the primary key of the entity to merge to * @param delegator a <code>Delegator</code> value * @exception GenericEntityException if an error occurs */ private static void mergeTwoValues(String entityName, Map<String, String> fromKeys, Map<String, String> toKeys, Delegator delegator) throws GenericEntityException { GenericValue from = delegator.findByPrimaryKey(entityName, fromKeys); GenericValue to = delegator.findByPrimaryKey(entityName, toKeys); if (from == null || to == null) { return; } from.setNonPKFields(to.getAllFields()); to.setNonPKFields(from.getAllFields()); to.store(); } private static List<ModelEntity> getRelatedEntities(String parentEntityName, Delegator delegator) { ModelEntity parentEntity = delegator.getModelEntity(parentEntityName); // Start the recursion return getRelatedEntities(new ArrayList<ModelEntity>(), parentEntity, delegator); } /** * Recursive method to map relations from a single entity. * @param relatedEntities List of related ModelEntity objects in descending order of relation from the parent entity * @param parentEntity Root ModelEntity for deriving relations * @param delegator Delegator * @return List of ModelEntity objects in descending order of relation from the original parent entity */ private static List<ModelEntity> getRelatedEntities(List<ModelEntity> relatedEntities, ModelEntity parentEntity, Delegator delegator) { // Do nothing if the parent entity has already been mapped if (relatedEntities.contains(parentEntity)) { return relatedEntities; } relatedEntities.add(parentEntity); Iterator<ModelRelation> reit = parentEntity.getRelationsIterator(); // Recurse for each relation from the parent entity that doesn't refer to a view-entity while (reit.hasNext()) { ModelRelation relation = reit.next(); String relatedEntityName = relation.getRelEntityName(); ModelEntity relatedEntity = delegator.getModelEntity(relatedEntityName); if (!(relatedEntity instanceof ModelViewEntity)) { relatedEntities = getRelatedEntities(relatedEntities, relatedEntity, delegator); } } return relatedEntities; } }
true
true
public void mergeParties() throws ServiceException { Delegator delegator = getInfrastructure().getDelegator(); try { if (validate) { // validate again validateMergeParties(); } // merge the party objects mergeTwoValues("PartySupplementalData", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("Person", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("PartyGroup", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("Party", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); List<GenericValue> toRemove = new ArrayList<GenericValue>(); // Get a list of entities related to the Party entity, in descending order by relation List<ModelEntity> relatedEntities = getRelatedEntities("Party", delegator); // Go through the related entities in forward order - this makes sure that parent records are created before child records Iterator<ModelEntity> reit = relatedEntities.iterator(); while (reit.hasNext()) { ModelEntity modelEntity = reit.next(); // Examine each field of the entity Iterator<ModelField> mefit = modelEntity.getFieldsIterator(); while (mefit.hasNext()) { ModelField modelField = mefit.next(); if (modelField.getName().matches(".*[pP]artyId.*")) { // If the name of the field has something to do with a partyId, get all the existing records from that entity which have the // partyIdFrom in that particular field List<GenericValue> existingRecords = delegator.findByAnd(modelEntity.getEntityName(), UtilMisc.toMap(modelField.getName(), partyIdFrom)); if (existingRecords.size() > 0) { Iterator<GenericValue> eit = existingRecords.iterator(); while (eit.hasNext()) { GenericValue existingRecord = eit.next(); if (modelField.getIsPk()) { // If the partyId field is part of a primary key, create a new record with the partyIdTo in place of the partyIdFrom GenericValue newRecord = delegator.makeValue(modelEntity.getEntityName(), existingRecord.getAllFields()); newRecord.set(modelField.getName(), partyIdTo); // Create the new record if a record with the same primary key doesn't already exist if (delegator.findByPrimaryKey(newRecord.getPrimaryKey()) == null) { newRecord.create(); } // Add the old record to the list of records to remove toRemove.add(existingRecord); } else { // If the partyId field is not party of a primary key, simply update the field with the new value and store it existingRecord.set(modelField.getName(), partyIdTo); existingRecord.store(); } } } } } } // Go through the list of records to remove in REVERSE order! Since they're still in descending order of relation to the Party // entity, reversing makes sure that child records are removed before parent records, all the way back to the original Party record ListIterator<GenericValue> rit = toRemove.listIterator(toRemove.size()); while (rit.hasPrevious()) { GenericValue existingRecord = (GenericValue) rit.previous(); Debug.logError(existingRecord.toString(), MODULE); existingRecord.remove(); } } catch (GenericEntityException e) { new ServiceException(UtilMessage.expandLabel("OpentapsError_MergePartiesFail", locale) + e.getMessage()); } }
public void mergeParties() throws ServiceException { Delegator delegator = getInfrastructure().getDelegator(); try { if (validate) { // validate again validateMergeParties(); } // merge the party objects mergeTwoValues("PartySupplementalData", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("Person", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("PartyGroup", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); mergeTwoValues("Party", UtilMisc.toMap("partyId", partyIdFrom), UtilMisc.toMap("partyId", partyIdTo), delegator); List<GenericValue> toRemove = new ArrayList<GenericValue>(); // Get a list of entities related to the Party entity, in descending order by relation List<ModelEntity> relatedEntities = getRelatedEntities("Party", delegator); // Go through the related entities in forward order - this makes sure that parent records are created before child records Iterator<ModelEntity> reit = relatedEntities.iterator(); while (reit.hasNext()) { ModelEntity modelEntity = reit.next(); // Examine each field of the entity Iterator<ModelField> mefit = modelEntity.getFieldsIterator(); while (mefit.hasNext()) { ModelField modelField = mefit.next(); if (modelField.getName().matches(".*[pP]artyId.*")) { // If the name of the field has something to do with a partyId, get all the existing records from that entity which have the // partyIdFrom in that particular field List<GenericValue> existingRecords = delegator.findByAnd(modelEntity.getEntityName(), UtilMisc.toMap(modelField.getName(), partyIdFrom)); if (existingRecords.size() > 0) { Iterator<GenericValue> eit = existingRecords.iterator(); while (eit.hasNext()) { GenericValue existingRecord = eit.next(); if (modelField.getIsPk()) { // If the partyId field is part of a primary key, create a new record with the partyIdTo in place of the partyIdFrom GenericValue newRecord = delegator.makeValue(modelEntity.getEntityName(), existingRecord.getAllFields()); newRecord.set(modelField.getName(), partyIdTo); // Create the new record if a record with the same primary key doesn't already exist if (delegator.findOne(newRecord.getPrimaryKey().getEntityName(), newRecord.getPrimaryKey(), false) == null) { newRecord.create(); } // Add the old record to the list of records to remove toRemove.add(existingRecord); } else { // If the partyId field is not party of a primary key, simply update the field with the new value and store it existingRecord.set(modelField.getName(), partyIdTo); existingRecord.store(); } } } } } } // Go through the list of records to remove in REVERSE order! Since they're still in descending order of relation to the Party // entity, reversing makes sure that child records are removed before parent records, all the way back to the original Party record ListIterator<GenericValue> rit = toRemove.listIterator(toRemove.size()); while (rit.hasPrevious()) { GenericValue existingRecord = (GenericValue) rit.previous(); Debug.logError(existingRecord.toString(), MODULE); existingRecord.remove(); } } catch (GenericEntityException e) { new ServiceException(UtilMessage.expandLabel("OpentapsError_MergePartiesFail", locale) + e.getMessage()); } }
diff --git a/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java b/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java index b646c4c76..46657558d 100644 --- a/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java +++ b/framework/pull-agent/src/main/java/org/apache/manifoldcf/crawler/jobs/JobManager.java @@ -1,8810 +1,8810 @@ /* $Id: JobManager.java 998576 2010-09-19 01:11:02Z kwright $ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.crawler.jobs; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.agents.interfaces.*; import org.apache.manifoldcf.crawler.interfaces.*; import java.util.*; import java.util.regex.*; import org.apache.manifoldcf.crawler.system.Logging; import org.apache.manifoldcf.crawler.system.ManifoldCF; /** This is the main job manager. It provides methods that support both job definition, and the threads that execute the jobs. */ public class JobManager implements IJobManager { public static final String _rcsid = "@(#)$Id: JobManager.java 998576 2010-09-19 01:11:02Z kwright $"; protected static final String hopLock = "_HOPLOCK_"; // Member variables protected IDBInterface database; protected IOutputConnectionManager outputMgr; protected IRepositoryConnectionManager connectionMgr; protected ILockManager lockManager; protected IThreadContext threadContext; protected JobQueue jobQueue; protected Jobs jobs; protected HopCount hopCount; protected Carrydown carryDown; protected EventManager eventManager; protected static Random random = new Random(); /** Constructor. *@param threadContext is the thread context. *@param database is the database. */ public JobManager(IThreadContext threadContext, IDBInterface database) throws ManifoldCFException { this.database = database; this.threadContext = threadContext; jobs = new Jobs(threadContext,database); jobQueue = new JobQueue(threadContext,database); hopCount = new HopCount(threadContext,database); carryDown = new Carrydown(database); eventManager = new EventManager(database); outputMgr = OutputConnectionManagerFactory.make(threadContext); connectionMgr = RepositoryConnectionManagerFactory.make(threadContext); lockManager = LockManagerFactory.make(threadContext); } /** Install. */ public void install() throws ManifoldCFException { jobs.install(outputMgr.getTableName(),outputMgr.getConnectionNameColumn(),connectionMgr.getTableName(),connectionMgr.getConnectionNameColumn()); jobQueue.install(jobs.getTableName(),jobs.idField); hopCount.install(jobs.getTableName(),jobs.idField); carryDown.install(jobs.getTableName(),jobs.idField); eventManager.install(); } /** Uninstall. */ public void deinstall() throws ManifoldCFException { eventManager.deinstall(); carryDown.deinstall(); hopCount.deinstall(); jobQueue.deinstall(); jobs.deinstall(); } /** Export configuration */ public void exportConfiguration(java.io.OutputStream os) throws java.io.IOException, ManifoldCFException { // Write a version indicator ManifoldCF.writeDword(os,3); // Get the job list IJobDescription[] list = getAllJobs(); // Write the number of authorities ManifoldCF.writeDword(os,list.length); // Loop through the list and write the individual repository connection info int i = 0; while (i < list.length) { IJobDescription job = list[i++]; ManifoldCF.writeString(os,job.getConnectionName()); ManifoldCF.writeString(os,job.getOutputConnectionName()); ManifoldCF.writeString(os,job.getDescription()); ManifoldCF.writeDword(os,job.getType()); ManifoldCF.writeDword(os,job.getStartMethod()); ManifoldCF.writeLong(os,job.getInterval()); ManifoldCF.writeLong(os,job.getExpiration()); ManifoldCF.writeLong(os,job.getReseedInterval()); ManifoldCF.writeDword(os,job.getPriority()); ManifoldCF.writeDword(os,job.getHopcountMode()); ManifoldCF.writeString(os,job.getSpecification().toXML()); ManifoldCF.writeString(os,job.getOutputSpecification().toXML()); // Write schedule int recCount = job.getScheduleRecordCount(); ManifoldCF.writeDword(os,recCount); int j = 0; while (j < recCount) { ScheduleRecord sr = job.getScheduleRecord(j++); writeEnumeratedValues(os,sr.getDayOfWeek()); writeEnumeratedValues(os,sr.getMonthOfYear()); writeEnumeratedValues(os,sr.getDayOfMonth()); writeEnumeratedValues(os,sr.getYear()); writeEnumeratedValues(os,sr.getHourOfDay()); writeEnumeratedValues(os,sr.getMinutesOfHour()); ManifoldCF.writeString(os,sr.getTimezone()); ManifoldCF.writeLong(os,sr.getDuration()); ManifoldCF.writeByte(os,sr.getRequestMinimum()?1:0); } // Write hop count filters Map filters = job.getHopCountFilters(); ManifoldCF.writeDword(os,filters.size()); Iterator iter = filters.keySet().iterator(); while (iter.hasNext()) { String linkType = (String)iter.next(); Long hopcount = (Long)filters.get(linkType); ManifoldCF.writeString(os,linkType); ManifoldCF.writeLong(os,hopcount); } } } protected static void writeEnumeratedValues(java.io.OutputStream os, EnumeratedValues ev) throws java.io.IOException { if (ev == null) { ManifoldCF.writeSdword(os,-1); return; } int size = ev.size(); ManifoldCF.writeSdword(os,size); Iterator iter = ev.getValues(); while (iter.hasNext()) { ManifoldCF.writeDword(os,((Integer)iter.next()).intValue()); } } /** Import configuration */ public void importConfiguration(java.io.InputStream is) throws java.io.IOException, ManifoldCFException { int version = ManifoldCF.readDword(is); if (version != 2 && version != 3) throw new java.io.IOException("Unknown job configuration version: "+Integer.toString(version)); int count = ManifoldCF.readDword(is); int i = 0; while (i < count) { IJobDescription job = createJob(); job.setConnectionName(ManifoldCF.readString(is)); job.setOutputConnectionName(ManifoldCF.readString(is)); job.setDescription(ManifoldCF.readString(is)); job.setType(ManifoldCF.readDword(is)); job.setStartMethod(ManifoldCF.readDword(is)); job.setInterval(ManifoldCF.readLong(is)); job.setExpiration(ManifoldCF.readLong(is)); job.setReseedInterval(ManifoldCF.readLong(is)); job.setPriority(ManifoldCF.readDword(is)); job.setHopcountMode(ManifoldCF.readDword(is)); job.getSpecification().fromXML(ManifoldCF.readString(is)); job.getOutputSpecification().fromXML(ManifoldCF.readString(is)); // Read schedule int recCount = ManifoldCF.readDword(is); int j = 0; while (j < recCount) { EnumeratedValues dayOfWeek = readEnumeratedValues(is); EnumeratedValues monthOfYear = readEnumeratedValues(is); EnumeratedValues dayOfMonth = readEnumeratedValues(is); EnumeratedValues year = readEnumeratedValues(is); EnumeratedValues hourOfDay = readEnumeratedValues(is); EnumeratedValues minutesOfHour = readEnumeratedValues(is); String timezone = ManifoldCF.readString(is); Long duration = ManifoldCF.readLong(is); boolean requestMinimum; if (version >= 3) requestMinimum = (ManifoldCF.readByte(is) != 0); else requestMinimum = false; ScheduleRecord sr = new ScheduleRecord(dayOfWeek, monthOfYear, dayOfMonth, year, hourOfDay, minutesOfHour, timezone, duration, requestMinimum); job.addScheduleRecord(sr); j++; } // Read hop count filters int hopFilterCount = ManifoldCF.readDword(is); j = 0; while (j < hopFilterCount) { String linkType = ManifoldCF.readString(is); Long hopcount = ManifoldCF.readLong(is); job.addHopCountFilter(linkType,hopcount); j++; } // Attempt to save this job save(job); i++; } } protected EnumeratedValues readEnumeratedValues(java.io.InputStream is) throws java.io.IOException { int size = ManifoldCF.readSdword(is); if (size == -1) return null; int[] values = new int[size]; int i = 0; while (i < size) { values[i++] = ManifoldCF.readDword(is); } return new EnumeratedValues(values); } /** Note the deregistration of a connector used by the specified connections. * This method will be called when the connector is deregistered. Jobs that use these connections * must therefore enter appropriate states. *@param connectionNames is the set of connection names. */ public void noteConnectorDeregistration(String[] connectionNames) throws ManifoldCFException { // For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status. ArrayList list = new ArrayList(); int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{}); int currentCount = 0; int i = 0; while (i < connectionNames.length) { if (currentCount == maxCount) { noteConnectionDeregistration(list); list.clear(); currentCount = 0; } list.add(connectionNames[i++]); currentCount++; } if (currentCount > 0) noteConnectionDeregistration(list); } /** Note deregistration for a batch of connection names. */ protected void noteConnectionDeregistration(ArrayList list) throws ManifoldCFException { ArrayList newList = new ArrayList(); String query = database.buildConjunctionClause(newList,new ClauseDescription[]{ new MultiClause(jobs.connectionNameField,list)}); // Query for the matching jobs, and then for each job potentially adjust the state IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+ jobs.getTableName()+" WHERE "+query+" FOR UPDATE", newList,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField)); jobs.noteConnectorDeregistration(jobID,statusValue); } } /** Note the registration of a connector used by the specified connections. * This method will be called when a connector is registered, on which the specified * connections depend. *@param connectionNames is the set of connection names. */ public void noteConnectorRegistration(String[] connectionNames) throws ManifoldCFException { // For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status. ArrayList list = new ArrayList(); int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{}); int currentCount = 0; int i = 0; while (i < connectionNames.length) { if (currentCount == maxCount) { noteConnectionRegistration(list); list.clear(); currentCount = 0; } list.add(connectionNames[i++]); currentCount++; } if (currentCount > 0) noteConnectionRegistration(list); } /** Note registration for a batch of connection names. */ protected void noteConnectionRegistration(ArrayList list) throws ManifoldCFException { // Query for the matching jobs, and then for each job potentially adjust the state ArrayList newList = new ArrayList(); String query = database.buildConjunctionClause(newList,new ClauseDescription[]{ new MultiClause(jobs.connectionNameField,list)}); IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+ jobs.getTableName()+" WHERE "+query+" FOR UPDATE", newList,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField)); jobs.noteConnectorRegistration(jobID,statusValue); } } /** Note a change in connection configuration. * This method will be called whenever a connection's configuration is modified, or when an external repository change * is signalled. */ public void noteConnectionChange(String connectionName) throws ManifoldCFException { jobs.noteConnectionChange(connectionName); } /** Note the deregistration of an output connector used by the specified connections. * This method will be called when the connector is deregistered. Jobs that use these connections * must therefore enter appropriate states. *@param connectionNames is the set of connection names. */ public void noteOutputConnectorDeregistration(String[] connectionNames) throws ManifoldCFException { // For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status. ArrayList list = new ArrayList(); int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{}); int currentCount = 0; int i = 0; while (i < connectionNames.length) { if (currentCount == maxCount) { noteOutputConnectionDeregistration(list); list.clear(); currentCount = 0; } list.add(connectionNames[i++]); currentCount++; } if (currentCount > 0) noteOutputConnectionDeregistration(list); } /** Note deregistration for a batch of output connection names. */ protected void noteOutputConnectionDeregistration(ArrayList list) throws ManifoldCFException { ArrayList newList = new ArrayList(); String query = database.buildConjunctionClause(newList,new ClauseDescription[]{ new MultiClause(jobs.outputNameField,list)}); // Query for the matching jobs, and then for each job potentially adjust the state IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+ jobs.getTableName()+" WHERE "+query+" FOR UPDATE", newList,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField)); jobs.noteOutputConnectorDeregistration(jobID,statusValue); } } /** Note the registration of an output connector used by the specified connections. * This method will be called when a connector is registered, on which the specified * connections depend. *@param connectionNames is the set of connection names. */ public void noteOutputConnectorRegistration(String[] connectionNames) throws ManifoldCFException { // For each connection, find the corresponding list of jobs. From these jobs, we want the job id and the status. ArrayList list = new ArrayList(); int maxCount = database.findConjunctionClauseMax(new ClauseDescription[]{}); int currentCount = 0; int i = 0; while (i < connectionNames.length) { if (currentCount == maxCount) { noteOutputConnectionRegistration(list); list.clear(); currentCount = 0; } list.add(connectionNames[i++]); currentCount++; } if (currentCount > 0) noteOutputConnectionRegistration(list); } /** Note registration for a batch of output connection names. */ protected void noteOutputConnectionRegistration(ArrayList list) throws ManifoldCFException { ArrayList newList = new ArrayList(); String query = database.buildConjunctionClause(newList,new ClauseDescription[]{ new MultiClause(jobs.outputNameField,list)}); // Query for the matching jobs, and then for each job potentially adjust the state IResultSet set = database.performQuery("SELECT "+jobs.idField+","+jobs.statusField+" FROM "+ jobs.getTableName()+" WHERE "+query+" FOR UPDATE", newList,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); int statusValue = jobs.stringToStatus((String)row.getValue(jobs.statusField)); jobs.noteOutputConnectorRegistration(jobID,statusValue); } } /** Note a change in output connection configuration. * This method will be called whenever a connection's configuration is modified, or when an external target config change * is signalled. */ public void noteOutputConnectionChange(String connectionName) throws ManifoldCFException { jobs.noteOutputConnectionChange(connectionName); } /** Load a sorted list of job descriptions. *@return the list, sorted by description. */ public IJobDescription[] getAllJobs() throws ManifoldCFException { return jobs.getAll(); } /** Create a new job. *@return the new job. */ public IJobDescription createJob() throws ManifoldCFException { return jobs.create(); } /** Get the hoplock for a given job ID */ protected String getHopLockName(Long jobID) { return hopLock + jobID; } /** Delete a job. *@param id is the job's identifier. This method will purge all the records belonging to the job from the database, as * well as remove all documents indexed by the job from the index. */ public void deleteJob(Long id) throws ManifoldCFException { database.beginTransaction(); try { // If the job is running, throw an error ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,id)}); IResultSet set = database.performQuery("SELECT "+jobs.statusField+" FROM "+ jobs.getTableName()+" WHERE "+query+" FOR UPDATE",list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("Attempting to delete a job that doesn't exist: "+id); IResultRow row = set.getRow(0); int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); if (status == jobs.STATUS_ACTIVE || status == jobs.STATUS_ACTIVESEEDING || status == jobs.STATUS_ACTIVE_UNINSTALLED || status == jobs.STATUS_ACTIVESEEDING_UNINSTALLED || status == jobs.STATUS_ACTIVE_NOOUTPUT || status == jobs.STATUS_ACTIVESEEDING_NOOUTPUT || status == jobs.STATUS_ACTIVE_NEITHER || status == jobs.STATUS_ACTIVESEEDING_NEITHER) throw new ManifoldCFException("Job "+id+" is active; you must shut it down before deleting it"); if (status != jobs.STATUS_INACTIVE) throw new ManifoldCFException("Job "+id+" is busy; you must wait and/or shut it down before deleting it"); jobs.writeStatus(id,jobs.STATUS_READYFORDELETE); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+id+" marked for deletion"); } catch (ManifoldCFException e) { database.signalRollback(); throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } /** Load a job for editing. *@param id is the job's identifier. *@return null if the job doesn't exist. */ public IJobDescription load(Long id) throws ManifoldCFException { return jobs.load(id,false); } /** Load a job. *@param id is the job's identifier. *@param readOnly is true if a read-only object is desired. *@return null if the job doesn't exist. */ public IJobDescription load(Long id, boolean readOnly) throws ManifoldCFException { return jobs.load(id,readOnly); } /** Save a job. *@param jobDescription is the job description. */ public void save(IJobDescription jobDescription) throws ManifoldCFException { ManifoldCF.noteConfigurationChange(); jobs.save(jobDescription); } /** See if there's a reference to a connection name. *@param connectionName is the name of the connection. *@return true if there is a reference, false otherwise. */ public boolean checkIfReference(String connectionName) throws ManifoldCFException { return jobs.checkIfReference(connectionName); } /** See if there's a reference to an output connection name. *@param connectionName is the name of the connection. *@return true if there is a reference, false otherwise. */ public boolean checkIfOutputReference(String connectionName) throws ManifoldCFException { return jobs.checkIfOutputReference(connectionName); } /** Get the job IDs associated with a given connection name. *@param connectionName is the name of the connection. *@return the set of job id's associated with that connection. */ public IJobDescription[] findJobsForConnection(String connectionName) throws ManifoldCFException { return jobs.findJobsForConnection(connectionName); } // These methods cover activities that require interaction with the job queue. // The job queue is maintained underneath this interface, and all threads that perform // job activities need to go through this layer. /** Reset the job queue immediately after starting up. * If the system was shut down in the middle of a job, sufficient information should * be around in the database to allow it to restart. However, BEFORE all the job threads * are spun up, there needs to be a pass over the queue to bring things back to a "normal" * state. * Also, if a job's status is in a state that indicates it was being processed by a thread * (which is now dead), then we have to set that status back to previous value. */ public void prepareForStart() throws ManifoldCFException { Logging.jobs.debug("Resetting due to restart"); while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Clean up events eventManager.restart(); // Clean up job queue jobQueue.restart(); // Clean up jobs jobs.restart(); // Clean up hopcount stuff hopCount.reset(); // Clean up carrydown stuff carryDown.reset(); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); Logging.jobs.debug("Reset complete"); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset as part of restoring document worker threads. */ public void resetDocumentWorkerStatus() throws ManifoldCFException { Logging.jobs.debug("Resetting document active status"); while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobQueue.resetDocumentWorkerStatus(); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting document active status: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } Logging.jobs.debug("Reset complete"); } /** Reset as part of restoring seeding threads. */ public void resetSeedingWorkerStatus() throws ManifoldCFException { Logging.jobs.debug("Resetting seeding status"); jobs.resetSeedingWorkerStatus(); Logging.jobs.debug("Reset complete"); } /** Reset as part of restoring doc delete threads. */ public void resetDocDeleteWorkerStatus() throws ManifoldCFException { Logging.jobs.debug("Resetting doc deleting status"); TrackerClass.notePrecommit(); jobQueue.resetDocDeleteWorkerStatus(); TrackerClass.noteCommit(); Logging.jobs.debug("Reset complete"); } /** Reset as part of restoring doc cleanup threads. */ public void resetDocCleanupWorkerStatus() throws ManifoldCFException { Logging.jobs.debug("Resetting doc cleaning status"); TrackerClass.notePrecommit(); jobQueue.resetDocCleanupWorkerStatus(); TrackerClass.noteCommit(); Logging.jobs.debug("Reset complete"); } /** Reset as part of restoring delete startup threads. */ public void resetDeleteStartupWorkerStatus() throws ManifoldCFException { Logging.jobs.debug("Resetting job delete starting up status"); jobs.resetDeleteStartupWorkerStatus(); Logging.jobs.debug("Reset complete"); } /** Reset as part of restoring notification threads. */ public void resetNotificationWorkerStatus() throws ManifoldCFException { Logging.jobs.debug("Resetting notification up status"); jobs.resetNotificationWorkerStatus(); Logging.jobs.debug("Reset complete"); } /** Reset as part of restoring startup threads. */ public void resetStartupWorkerStatus() throws ManifoldCFException { Logging.jobs.debug("Resetting job starting up status"); jobs.resetStartupWorkerStatus(); Logging.jobs.debug("Reset complete"); } // These methods support job delete threads /** Delete ingested document identifiers (as part of deleting the owning job). * The number of identifiers specified is guaranteed to be less than the maxInClauseCount * for the database. *@param identifiers is the set of document identifiers. */ public void deleteIngestedDocumentIdentifiers(DocumentDescription[] identifiers) throws ManifoldCFException { jobQueue.deleteIngestedDocumentIdentifiers(identifiers); // Hopcount rows get removed when the job itself is removed. // carrydown records get removed when the job itself is removed. } /** Get list of cleanable document descriptions. This list will take into account * multiple jobs that may own the same document. All documents for which a description * is returned will be transitioned to the "beingcleaned" state. Documents which are * not in transition and are eligible, but are owned by other jobs, will have their * jobqueue entries deleted by this method. *@param maxCount is the maximum number of documents to return. *@param currentTime is the current time; some fetches do not occur until a specific time. *@return the document descriptions for these documents. */ public DocumentSetAndFlags getNextCleanableDocuments(int maxCount, long currentTime) throws ManifoldCFException { // The query will be built here, because it joins the jobs table against the jobqueue // table. // // This query must only pick up documents that are not active in any job and // which belong to a job that's in a "shutting down" state and are in // a "purgatory" state. // // We are in fact more conservative in this query than we need to be; the documents // excluded will include some that simply match our criteria, which is designed to // be fast rather than perfect. The match we make is: hashvalue against hashvalue, and // different job id's. // // SELECT id,jobid,docid FROM jobqueue t0 WHERE t0.status='P' AND EXISTS(SELECT 'x' FROM // jobs t3 WHERE t0.jobid=t3.id AND t3.status='X') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','B')) // // Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or // ingestion. // Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time. if (!jobs.cleaningJobsPresent()) return new DocumentSetAndFlags(new DocumentDescription[0],new boolean[0]); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to put on the cleaning queue"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on cleaning queue"); // Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being cleaned". ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(",") .append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.failTimeField).append(",") .append(jobQueue.failCountField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_PURGATORY))})).append(" AND ") .append("(t0.").append(jobQueue.checkTimeField).append(" IS NULL OR t0.").append(jobQueue.checkTimeField).append("<=?) AND "); list.add(new Long(currentTime)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN)), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,maxCount)); // The checktime is null field check is for backwards compatibility IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done getting docs to cleaning queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); // We need to organize the returned set by connection name and output connection name, so that we can efficiently // use getUnindexableDocumentIdentifiers. // This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription // objects. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash, so we reduce the risk of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()]; boolean[] rvalBoolean = new boolean[documentIDMap.size()]; i = 0; while (i < compositeIDArray.length) { String compositeDocID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID); // Determine whether we can delete it from the index or not rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null); // Set the record status to "being cleaned" and return it rval[i++] = dd; jobQueue.setCleaningStatus(dd.getID()); } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); return new DocumentSetAndFlags(rval,rvalBoolean); } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Create a composite document hash key. This consists of the document id hash plus the * connection name. */ protected static String makeCompositeID(String docIDHash, String connectionName) { return docIDHash + ":" + connectionName; } /** Get list of deletable document descriptions. This list will take into account * multiple jobs that may own the same document. All documents for which a description * is returned will be transitioned to the "beingdeleted" state. Documents which are * not in transition and are eligible, but are owned by other jobs, will have their * jobqueue entries deleted by this method. *@param maxCount is the maximum number of documents to return. *@param currentTime is the current time; some fetches do not occur until a specific time. *@return the document descriptions for these documents. */ public DocumentDescription[] getNextDeletableDocuments(int maxCount, long currentTime) throws ManifoldCFException { // The query will be built here, because it joins the jobs table against the jobqueue // table. // // This query must only pick up documents that are not active in any job and // which either belong to a job that's in a "delete pending" state and are in // a "complete", "purgatory", or "pendingpurgatory" state, OR belong to a job // that's in a "shutting down" state and are in the "purgatory" state. // // We are in fact more conservative in this query than we need to be; the documents // excluded will include some that simply match our criteria, which is designed to // be fast rather than perfect. The match we make is: hashvalue against hashvalue, and // different job id's. // // SELECT id,jobid,docid FROM jobqueue t0 WHERE (t0.status IN ('C','P','G') AND EXISTS(SELECT 'x' FROM // jobs t1 WHERE t0.jobid=t1.id AND t1.status='D') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','B')) // // Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or // ingestion. // Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time. if (!jobs.deletingJobsPresent()) return new DocumentDescription[0]; long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to put on the delete queue"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on delete queue"); // Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being deleted". // If FOR UPDATE was included, deadlock happened a lot. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(",") .append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.failTimeField).append(",") .append(jobQueue.failCountField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE))})).append(" AND ") .append("t0.").append(jobQueue.checkTimeField).append("<=? AND "); list.add(new Long(currentTime)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING)), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,maxCount)); // The checktime is null field check is for backwards compatibility IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done getting docs to delete queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); // We need to organize the returned set by connection name, so that we can efficiently // use getUnindexableDocumentIdentifiers. // This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription // objects. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash to reduce chances of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[allowedDocIds.size()]; int j = 0; i = 0; while (i < compositeIDArray.length) { String compositeDocumentID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocumentID); if (allowedDocIds.get(compositeDocumentID) == null) { // Delete this record and do NOT return it. jobQueue.deleteRecord(dd.getID()); // What should we do about hopcount here? // We are deleting a record which belongs to a job that is being // cleaned up. The job itself will go away when this is done, // and so will all the hopcount stuff pertaining to it. So, the // treatment I've chosen here is to leave the hopcount alone and // let the job cleanup get rid of it at the right time. // Note: carrydown records handled in the same manner... //carryDown.deleteRecords(dd.getJobID(),new String[]{dd.getDocumentIdentifier()}); } else { // Set the record status to "being deleted" and return it rval[j++] = dd; jobQueue.setDeletingStatus(dd.getID()); } i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); return rval; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get a list of document identifiers that should actually be deleted from the index, from a list that * might contain identifiers that are shared with other jobs, which are targeted to the same output connection. * The input list is guaranteed to be smaller in size than maxInClauseCount for the database. *@param documentIdentifiers is the set of document identifiers to consider. *@param connectionName is the connection name for ALL the document identifiers. *@param outputConnectionName is the output connection name for ALL the document identifiers. *@return the set of documents which should be removed from the index. */ protected String[] getUnindexableDocumentIdentifiers(DocumentDescription[] documentIdentifiers, String connectionName, String outputConnectionName) throws ManifoldCFException { // This is where we will count the individual document id's HashMap countMap = new HashMap(); // First thing: Compute the set of document identifier hash values to query against HashMap map = new HashMap(); int i = 0; while (i < documentIdentifiers.length) { String hash = documentIdentifiers[i++].getDocumentIdentifierHash(); map.put(hash,hash); countMap.put(hash,new MutableInteger(0)); } if (map.size() == 0) return new String[0]; // Build a query StringBuilder sb = new StringBuilder(); ArrayList list = new ArrayList(); ArrayList docList = new ArrayList(); Iterator iter = map.keySet().iterator(); while (iter.hasNext()) { docList.add(iter.next()); } // Note: There is a potential race condition here. One job may be running while another is in process of // being deleted. If they share a document, then the delete task could decide to delete the document and do so right // after the ingestion takes place in the running job, but right before the document's status is updated // in the job queue [which would have prevented the deletion]. // Unless a transaction is thrown around the time ingestion is taking place (which is a very bad idea) // we are stuck with the possibility of this condition, which will essentially lead to a document being // missing from the index. // One way of dealing with this is to treat "active" documents as already ingested, for the purpose of // reference counting. Then these documents will not be deleted. The risk then becomes that the "active" // document entry will not be completed (say, because of a restart), and thus the corresponding document // will never be removed from the index. // // Instead, the only solution is to not queue a document for any activity that is inconsistent with activities // that may already be ongoing for that document. For this reason, I have introduced a "BEING_DELETED" // and "BEING_CLEANED" state // for a document. These states will allow the various queries that queue up activities to avoid documents that // are currently being processed elsewhere. sb.append("SELECT t0.").append(jobQueue.docHashField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t0."+jobQueue.docHashField,docList)})).append(" AND ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?,?) AND "); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(" AND ") .append("t1.").append(jobs.connectionNameField).append("=? AND ") .append("t1.").append(jobs.outputNameField).append("=?)"); list.add(connectionName); list.add(outputConnectionName); // Do the query, and then count the number of times each document identifier occurs. IResultSet results = database.performQuery(sb.toString(),list,null,null); i = 0; while (i < results.getRowCount()) { IResultRow row = results.getRow(i++); String docIDHash = (String)row.getValue(jobQueue.docHashField); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi != null) mi.increment(); } // Go through and count only those that have a count of 1. int count = 0; iter = countMap.keySet().iterator(); while (iter.hasNext()) { String docIDHash = (String)iter.next(); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi.intValue() == 1) count++; } String[] rval = new String[count]; iter = countMap.keySet().iterator(); count = 0; while (iter.hasNext()) { String docIDHash = (String)iter.next(); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi.intValue() == 1) rval[count++] = docIDHash; } return rval; } // These methods support the reprioritization thread. /** Get a list of already-processed documents to reprioritize. Documents in all jobs will be * returned by this method. Up to n document descriptions will be returned. *@param currentTime is the current time stamp for this prioritization pass. Avoid * picking up any documents that are labeled with this timestamp or after. *@param n is the maximum number of document descriptions desired. *@return the document descriptions. */ public DocumentDescription[] getNextAlreadyProcessedReprioritizationDocuments(long currentTime, int n) throws ManifoldCFException { StringBuilder sb = new StringBuilder(); ArrayList list = new ArrayList(); // The desired query is: // SELECT docid FROM jobqueue WHERE prioritysettime < (currentTime) LIMIT (n) sb.append("SELECT ") .append(jobQueue.idField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.jobIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_COMPLETE), jobQueue.statusToString(JobQueue.STATUS_UNCHANGED), jobQueue.statusToString(JobQueue.STATUS_PURGATORY)}), new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" "); sb.append(database.constructOffsetLimitClause(0,n)); IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null); DocumentDescription[] rval = new DocumentDescription[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField), (Long)row.getValue(jobQueue.jobIDField), (String)row.getValue(jobQueue.docHashField), (String)row.getValue(jobQueue.docIDField)); i++; } return rval; } /** Get a list of not-yet-processed documents to reprioritize. Documents in all jobs will be * returned by this method. Up to n document descriptions will be returned. *@param currentTime is the current time stamp for this prioritization pass. Avoid * picking up any documents that are labeled with this timestamp or after. *@param n is the maximum number of document descriptions desired. *@return the document descriptions. */ public DocumentDescription[] getNextNotYetProcessedReprioritizationDocuments(long currentTime, int n) throws ManifoldCFException { StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); // This query MUST return only documents that are in a pending state which belong to an active job!!! sb.append(jobQueue.idField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.jobIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobQueue.statusField,new Object[]{ JobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED), JobQueue.statusToString(jobQueue.STATUS_PENDING), JobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" AND ") .append(jobQueue.checkActionField).append("=?").append(" AND "); list.add(jobQueue.actionToString(JobQueue.ACTION_RESCAN)); // Per CONNECTORS-290, we need to be leaving priorities blank for jobs that aren't using them, // so this will be changed to not include jobs where the priorities have been bashed to null. // // I've included ALL states that might have non-null doc priorities. This includes states // corresponding to uninstalled connectors, since there is no transition that cleans out the // document priorities in these states. The time during which a connector is uninstalled is // expected to be short, because typically this state is the result of an installation procedure // rather than willful action on the part of a user. sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(Jobs.STATUS_STARTINGUP), Jobs.statusToString(Jobs.STATUS_STARTINGUPMINIMAL), Jobs.statusToString(Jobs.STATUS_ACTIVE), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER) }), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") "); sb.append(database.constructOffsetLimitClause(0,n)); // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be. //jobQueue.unconditionallyAnalyzeTables(); IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null); DocumentDescription[] rval = new DocumentDescription[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField), (Long)row.getValue(jobQueue.jobIDField), (String)row.getValue(jobQueue.docHashField), (String)row.getValue(jobQueue.docIDField)); i++; } return rval; } /** Save a set of document priorities. In the case where a document was eligible to have its * priority set, but it no longer is eligible, then the provided priority will not be written. *@param currentTime is the time in milliseconds since epoch. *@param documentDescriptions are the document descriptions. *@param priorities are the desired priorities. */ public void writeDocumentPriorities(long currentTime, DocumentDescription[] documentDescriptions, double[] priorities) throws ManifoldCFException { // Retry loop - in case we get a deadlock despite our best efforts while (true) { // This should be ordered by document identifier hash in order to prevent potential deadlock conditions HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":"+documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); long sleepAmt = 0L; // Start the transaction now database.beginTransaction(); try { // Need to order the writes by doc id. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); DocumentDescription dd = documentDescriptions[index]; double priority = priorities[index]; jobQueue.writeDocPriority(currentTime,dd.getID(),priorities[index]); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Setting document priority for '"+dd.getDocumentIdentifier()+"' to "+new Double(priority).toString()+", set time "+new Long(currentTime).toString()); i++; } database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction writing doc priorities: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get up to the next n documents to be expired. * This method marks the documents whose descriptions have been returned as "being processed", or active. * The same marking is used as is used for documents that have been queued for worker threads. The model * is thus identical. * *@param n is the maximum number of records desired. *@param currentTime is the current time. *@return the array of document descriptions to expire. */ public DocumentSetAndFlags getExpiredDocuments(int n, long currentTime) throws ManifoldCFException { // Screening query // Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up // for an extended period of time. if (!jobs.activeJobsPresent()) return new DocumentSetAndFlags(new DocumentDescription[0], new boolean[0]); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Beginning query to look for documents to expire"); } // Put together a query with a limit of n // Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active. // If FOR UPDATE was included, deadlock conditions would be common because of the complexity of this query. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.jobIDField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField).append(",") .append("t0.").append(jobQueue.statusField).append(",") .append("t0.").append(jobQueue.failTimeField).append(",") .append("t0.").append(jobQueue.failCountField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t0."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause("t0."+jobQueue.checkActionField,jobQueue.actionToString(JobQueue.ACTION_REMOVE)), new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",new Long(currentTime))})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?)").append(" AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField).append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,n)); String query = sb.toString(); // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be. //jobQueue.unconditionallyAnalyzeTables(); ArrayList answers = new ArrayList(); int repeatCount = 0; while (true) { long sleepAmt = 0L; if (Logging.perf.isDebugEnabled()) { repeatCount++; Logging.perf.debug(" Attempt "+Integer.toString(repeatCount)+" to expire documents, after "+ new Long(System.currentTimeMillis() - startTime)+" ms"); } database.beginTransaction(); try { IResultSet set = database.performQuery(query,list,null,null,n,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Expiring "+Integer.toString(set.getRowCount())+" documents"); // To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash, // before updating any rows in jobqueue. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); Map statusMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString()); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); statusMap.put(compositeDocumentID,new Integer(status)); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash, so we reduce the risk of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()]; boolean[] rvalBoolean = new boolean[documentIDMap.size()]; i = 0; while (i < compositeIDArray.length) { String compositeDocID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID); // Determine whether we can delete it from the index or not rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null); // Set the record status to "being cleaned" and return it rval[i++] = dd; jobQueue.updateActiveRecord(dd.getID(),((Integer)statusMap.get(compositeDocID)).intValue()); } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); return new DocumentSetAndFlags(rval, rvalBoolean); } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding docs to expire: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } // This method supports the "queue stuffer" thread /** /** Get up to the next n document(s) to be fetched and processed. * This fetch returns records that contain the document identifier, plus all instructions * pertaining to the document's handling (e.g. whether it should be refetched if the version * has not changed). * This method also marks the documents whose descriptions have be returned as "being processed". *@param n is the maximum number of records desired. *@param currentTime is the current time; some fetches do not occur until a specific time. *@param interval is the number of milliseconds that this set of documents should represent (for throttling). *@param blockingDocuments is the place to record documents that were encountered, are eligible for reprioritization, * but could not be queued due to throttling considerations. *@param statistics are the current performance statistics per connection, which are used to balance the queue stuffing * so that individual connections are not overwhelmed. *@param scanRecord retains the bins from all documents encountered from the query, even those that were skipped due * to being overcommitted. *@return the array of document descriptions to fetch and process. */ public DocumentDescription[] getNextDocuments(int n, long currentTime, long interval, BlockingDocuments blockingDocuments, PerformanceStatistics statistics, DepthStatistics scanRecord) throws ManifoldCFException { // NOTE WELL: Jobs that are throttled must control the number of documents that are fetched in // a given interval. Therefore, the returned result has the following constraints on it: // 1) There must be no more than n documents returned total; // 2) For any given job that is throttled, the total number of documents returned must be // consistent with the time interval provided. // In general, this requires the database layer to perform fairly advanced filtering on the // the result, far in excess of a simple count. An implementation of an interface is therefore // going to need to be passed into the performQuery() operation, which prunes the resultset // as it is being read into memory. That's a new feature that will need to be added to the // database layer. // Screening query // Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up // for an extended period of time. if (!jobs.activeJobsPresent()) return new DocumentDescription[0]; long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to queue"); } // Below there used to be one large transaction, with multiple read seconds and multiple write sections. // As part of reducing the chance of postgresql encountering deadlock conditions, I wanted to break this // transaction up. However, the transaction depended for its correctness in throttling on making sure // that the throttles that were built were based on the same active jobs that the subsequent queries // that did the stuffing relied upon. This made reorganization impossible until I realized that with // Postgresql's way of doing transaction isolation this was going to happen anyway, so I needed a more // robust solution. // // Specifically, I chose to change the way documents were queued so that only documents from properly // throttled jobs could be queued. That meant I needed to add stuff to the ThrottleLimit class to track // the very knowledge of an active job. This had the additional benefit of meaning there was no chance of // a query occurring from inside a resultset filter. // // But, after I did this, it was no longer necessary to have such a large transaction either. // Anything older than 10 minutes ago is considered eligible for reprioritization. long prioritizationTime = currentTime - 60000L * 10L; ThrottleLimit vList = new ThrottleLimit(n,prioritizationTime); IResultSet jobconnections = jobs.getActiveJobConnections(); HashMap connectionSet = new HashMap(); int i = 0; while (i < jobconnections.getRowCount()) { IResultRow row = jobconnections.getRow(i++); Long jobid = (Long)row.getValue("jobid"); String connectionName = (String)row.getValue("connectionname"); vList.addJob(jobid,connectionName); connectionSet.put(connectionName,connectionName); } // Find the active connection names. We'll load these, and then get throttling info // from each one. String[] activeConnectionNames = new String[connectionSet.size()]; Iterator iter = connectionSet.keySet().iterator(); i = 0; while (iter.hasNext()) { activeConnectionNames[i++] = (String)iter.next(); } IRepositoryConnection[] connections = connectionMgr.loadMultiple(activeConnectionNames); // Accumulate a sum of the max_connection_count * avg_connection_rate values, so we can calculate the appropriate adjustment // factor and set the connection limits. HashMap rawFetchCounts = new HashMap(); double rawFetchCountTotal = 0.0; i = 0; while (i < connections.length) { IRepositoryConnection connection = connections[i++]; String connectionName = connection.getName(); int maxConnections = connection.getMaxConnections(); double avgFetchRate = statistics.calculateConnectionFetchRate(connectionName); double weightedRawFetchCount = avgFetchRate * (double)maxConnections; // Keep the avg rate for later use, since it may get updated before next time we need it. rawFetchCounts.put(connectionName,new Double(weightedRawFetchCount)); rawFetchCountTotal += weightedRawFetchCount; } // Calculate an adjustment factor double fetchCountAdjustmentFactor = ((double)n) / rawFetchCountTotal; // For each job, we must amortize the maximum number of fetches per ms to the actual interval, // and also randomly select an extra fetch based on the fractional probability. (This latter is // necessary for the case where the maximum fetch rate is specified to be pretty low.) // i = 0; while (i < connections.length) { IRepositoryConnection connection = connections[i++]; String connectionName = connection.getName(); // Check if throttled... String[] throttles = connection.getThrottles(); int k = 0; while (k < throttles.length) { // The key is the regexp value itself String throttle = throttles[k++]; float throttleValue = connection.getThrottleValue(throttle); // For the given connection, set the fetch limit per bin. This is calculated using the time interval // and the desired fetch rate. The fractional remainder is used to conditionally provide an "extra fetch" // on a weighted random basis. // // In the future, the connection may specify tuples which pair a regexp describing a set of bins against // a fetch rate. In that case, each fetch rate would need to be turned into a precise maximum // count. double fetchesPerTimeInterval = (double)throttleValue * (double)interval; // Actual amount will be the integer value of this, plus an additional 1 if the random number aligns int fetches = (int)fetchesPerTimeInterval; fetchesPerTimeInterval -= (double)fetches; if (random.nextDouble() <= fetchesPerTimeInterval) fetches++; // Save the limit in the ThrottleLimit structure vList.addLimit(connectionName,throttle,fetches); } // For the overall connection, we also have a limit which is based on the number of connections there are actually available. Double weightedRawFetchCount = (Double)rawFetchCounts.get(connectionName); double adjustedFetchCount = weightedRawFetchCount.doubleValue() * fetchCountAdjustmentFactor; // Note well: Queuing starvation that results from there being very few available documents for high-priority connections is dealt with here by simply allowing // the stuffer thread to keep queuing documents until there are enough. This will be pretty inefficient if there's an active connection that is fast and has lots // of available connection handles, but the bulk of the activity is on slow speed/highly handle limited connections, but I honestly can't think of a better way at the moment. // One good way to correct a bit for this problem is to set a higher document count floor for each connection - say 5 documents - then we won't loop as much. // // Be off in the higher direction rather than the lower; this also prohibits zero values and sets a minimum. int fetchCount = ((int)adjustedFetchCount) + 5; vList.setConnectionLimit(connectionName,fetchCount); } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to queue"); // System.out.println("Done building throttle structure"); // Locate records. // Note that we do NOT want to get everything there is to know about the job // using this query, since the file specification may be large and expensive // to parse. We will load a (cached) copy of the job description for that purpose. // // NOTE: This query deliberately excludes documents which may be being processed by another job. // (It actually excludes a bit more than that, because the exact query is impossible to write given // the fact that document id's cannot be compared.) These are documents where there is ANOTHER // document entry with the same hash value, a different job id, and a status which is either "active", // "activepurgatory", or "beingdeleted". (It does not check whether the jobs have the same connection or // whether the document id's are in fact the same, and therefore may temporarily block legitimate document // activity under rare circumstances.) // // The query I want is: // SELECT jobid,docid,status FROM jobqueue t0 WHERE status IN ('P','G') AND checktime <=xxx // AND EXISTS(SELECT 'x' FROM // jobs t1 WHERE t0.jobid=t1.id AND t1.status='A') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','D')) // ORDER BY docpriority ASC LIMIT xxx // // NOTE WELL: The above query did just fine until adaptive recrawling was seriously tried. Then, because every // document in a job was still active, it failed miserably, actually causing Postgresql to stop responding at // one point. Why? Well, the key thing is the sort criteria - there just isn't any way to sort 1M documents // without working with a monster resultset. // // I introduced a new index as a result - based solely on docpriority - and postgresql now correctly uses that index // to pull its results in an ordered fashion // // // Another subtlety is that I *must* mark the documents active as I find them, so that they do not // have any chance of getting returned twice. // Accumulate the answers here ArrayList answers = new ArrayList(); // The current time value Long currentTimeValue = new Long(currentTime); // Always analyze jobqueue before this query. Otherwise stuffing may get a bad plan, interfering with performance. // This turned out to be needed in postgresql 8.3, even though 8.2 worked fine. //jobQueue.unconditionallyAnalyzeTables(); // Loop through priority values int currentPriority = 1; boolean isDone = false; while (!isDone && currentPriority <= 10) { if (jobs.hasPriorityJobs(currentPriority)) { Long currentPriorityValue = new Long((long)currentPriority); fetchAndProcessDocuments(answers,currentTimeValue,currentPriorityValue,vList,connections); isDone = !vList.checkContinue(); } currentPriority++; } // Assert the blocking documents we discovered vList.tallyBlockingDocuments(blockingDocuments); // Convert the saved answers to an array DocumentDescription[] rval = new DocumentDescription[answers.size()]; i = 0; while (i < rval.length) { rval[i] = (DocumentDescription)answers.get(i); i++; } // After we're done pulling stuff from the queue, find the eligible row with the best priority on the queue, and save the bins for assessment. // This done to decide what the "floor" bincount should be - the idea being that it is wrong to assign priorities for new documents which are // higher than the current level that is currently being dequeued. // // The complicating factor here is that there are indeed many potential *classes* of documents, each of which might have its own current // document priority level. For example, documents could be classed by job, which might make sense because there is a possibility that two jobs' // job priorities may differ. Also, because of document fetch scheduling, each time frame may represent a class in its own right as well. // These classes would have to be associated with independent bin counts, if we were to make any use of them. Then, it would be also necessary // to know what classes a document belonged to in order to be able to calculate its priority. // // An alternative way to proceed is to just have ONE class, and document priorities then get assigned without regard to job, queuing time, etc. // That's the current reality. The code below works in that model, knowing full well that it is an approximation to an ideal. // Find the one row from a live job that has the best document priority, which is available within the current time window. // Note that if there is NO such document, it means we were able to queue all eligible documents, and thus prioritization is probably not even // germane at the moment. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.docPriorityField).append(",").append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()) .append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ //new UnitaryClause(jobQueue.docPriorityField,">=",new Long(0L)), new MultiClause(jobQueue.statusField, new Object[]{jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)), new UnitaryClause(jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(jobs.STATUS_ACTIVE), Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") "); sb.append(" ").append(database.constructIndexOrderByClause(new String[]{ jobQueue.docPriorityField, jobQueue.statusField, jobQueue.checkActionField, jobQueue.checkTimeField}, true)).append(" ") .append(database.constructOffsetLimitClause(0,1,true)); IResultSet set = database.performQuery(sb.toString(),list,null,null,1,null); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); Double docPriority = (Double)row.getValue(jobQueue.docPriorityField); if (docPriority != null && docPriority.doubleValue() < jobQueue.noDocPriorityValue) scanRecord.addBins(docPriority); } return rval; } /** Fetch and process documents matching the passed-in criteria */ protected void fetchAndProcessDocuments(ArrayList answers, Long currentTimeValue, Long currentPriorityValue, ThrottleLimit vList, IRepositoryConnection[] connections) throws ManifoldCFException { // Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active. // When FOR UPDATE was included, deadlock conditions were common because of the complexity of this query. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT t0."); sb.append(jobQueue.idField).append(",t0."); if (Logging.scheduling.isDebugEnabled()) sb.append(jobQueue.docPriorityField).append(",t0."); sb.append(jobQueue.jobIDField).append(",t0.") .append(jobQueue.docHashField).append(",t0.") .append(jobQueue.docIDField).append(",t0.") .append(jobQueue.statusField).append(",t0.") .append(jobQueue.failTimeField).append(",t0.") .append(jobQueue.failCountField).append(",t0.") .append(jobQueue.prioritySetField).append(" FROM ").append(jobQueue.getTableName()) .append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ //new UnitaryClause("t0."+jobQueue.docPriorityField,">=",new Long(0L)), new MultiClause("t0."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause("t0."+jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)), new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(jobs.STATUS_ACTIVE), Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField), new UnitaryClause("t1."+jobs.priorityField,currentPriorityValue)})) .append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") AND "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); // Prerequisite event clause: AND NOT EXISTS(SELECT 'x' FROM prereqevents t3,events t4 WHERE t3.ownerid=t0.id AND t3.name=t4.name) sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.prereqEventManager.getTableName()).append(" t3,").append(eventManager.getTableName()).append(" t4 WHERE t0.") .append(jobQueue.idField).append("=t3.").append(jobQueue.prereqEventManager.ownerField).append(" AND t3.") .append(jobQueue.prereqEventManager.eventNameField).append("=t4.").append(eventManager.eventNameField) .append(")"); sb.append(" ").append(database.constructIndexOrderByClause(new String[]{ "t0."+jobQueue.docPriorityField, "t0."+jobQueue.statusField, "t0."+jobQueue.checkActionField, "t0."+jobQueue.checkTimeField}, true)).append(" "); // Before entering the transaction, we must provide the throttlelimit object with all the connector // instances it could possibly need. The purpose for doing this is to prevent a deadlock where // connector starvation causes database lockup. // // The preallocation of multiple connector instances is certainly a worry. If any other part // of the code allocates multiple connector instances also, the potential exists for this to cause // deadlock all by itself. I've therefore built a "grab multiple" and a "release multiple" // at the connector factory level to make sure these requests are properly ordered. String[] orderingKeys = new String[connections.length]; String[] classNames = new String[connections.length]; ConfigParams[] configParams = new ConfigParams[connections.length]; int[] maxConnections = new int[connections.length]; int k = 0; while (k < connections.length) { IRepositoryConnection connection = connections[k]; orderingKeys[k] = connection.getName(); classNames[k] = connection.getClassName(); configParams[k] = connection.getConfigParams(); maxConnections[k] = connection.getMaxConnections(); k++; } IRepositoryConnector[] connectors = RepositoryConnectorFactory.grabMultiple(threadContext,orderingKeys,classNames,configParams,maxConnections); try { // Hand the connectors off to the ThrottleLimit instance k = 0; while (k < connections.length) { vList.addConnectionName(connections[k].getName(),connectors[k]); k++; } // Now we can tack the limit onto the query. Before this point, remainingDocuments would be crap int limitValue = vList.getRemainingDocuments(); sb.append(database.constructOffsetLimitClause(0,limitValue,true)); if (Logging.perf.isDebugEnabled()) { Logging.perf.debug("Queuing documents from time "+currentTimeValue.toString()+" job priority "+currentPriorityValue.toString()+ " (up to "+Integer.toString(vList.getRemainingDocuments())+" documents)"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { IResultSet set = database.performQuery(sb.toString(),list,null,null,-1,vList); if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Queuing "+Integer.toString(set.getRowCount())+" documents"); // To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash, // before updating any rows in jobqueue. String[] docIDHashes = new String[set.getRowCount()]; Map storageMap = new HashMap(); Map statusMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long id = (Long)row.getValue(jobQueue.idField); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String docIDHash = (String)row.getValue(jobQueue.docHashField); String docID = (String)row.getValue(jobQueue.docIDField); int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString()); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = -1; else failCount = (int)failCountValue.longValue(); DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID,failTime,failCount); docIDHashes[i] = docIDHash + ":" + jobID; storageMap.put(docIDHashes[i],dd); statusMap.put(docIDHashes[i],new Integer(status)); if (Logging.scheduling.isDebugEnabled()) { Double docPriority = (Double)row.getValue(jobQueue.docPriorityField); Logging.scheduling.debug("Stuffing document '"+docID+"' that has priority "+docPriority.toString()+" onto active list"); } i++; } // No duplicates are possible here java.util.Arrays.sort(docIDHashes); i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; DocumentDescription dd = (DocumentDescription)storageMap.get(docIDHash); Long id = dd.getID(); int status = ((Integer)statusMap.get(docIDHash)).intValue(); // Set status to "ACTIVE". jobQueue.updateActiveRecord(id,status); answers.add(dd); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding docs to queue: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } finally { RepositoryConnectorFactory.releaseMultiple(connectors); } } // These methods support the individual fetch/process threads. /** Verify that a specific job is indeed still active. This is used to permit abort or pause to be relatively speedy. * The query done within MUST be cached in order to not cause undue performance degradation. *@param jobID is the job identifier. *@return true if the job is in one of the "active" states. */ public boolean checkJobActive(Long jobID) throws ManifoldCFException { return jobs.checkJobActive(jobID); } /** Verify if a job is still processing documents, or no longer has any outstanding active documents */ public boolean checkJobBusy(Long jobID) throws ManifoldCFException { return jobQueue.checkJobBusy(jobID); } /** Note completion of document processing by a job thread of a document. * This method causes the state of the document to be marked as "completed". *@param documentDescriptions are the description objects for the documents that were processed. */ public void markDocumentCompletedMultiple(DocumentDescription[] documentDescriptions) throws ManifoldCFException { // Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE // transaction is needed in order to complete these documents correctly. // // Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to // lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to // order the "select for update" operations appropriately. // HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); // Retry loop - in case we get a deadlock despite our best efforts while (true) { long sleepAmt = 0L; // Start the transaction now database.beginTransaction(); try { // Do one row at a time, to avoid deadlocking things i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; // Get the DocumentDescription object DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Query for the status ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())}); TrackerClass.notePreread(dd.getID()); IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+ query+" FOR UPDATE",list,null,null); TrackerClass.noteRead(dd.getID()); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); // Grab the status int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); // Update the jobqueue table jobQueue.updateCompletedRecord(dd.getID(),status); } i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+ " docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Note completion of document processing by a job thread of a document. * This method causes the state of the document to be marked as "completed". *@param documentDescription is the description object for the document that was processed. */ public void markDocumentCompleted(DocumentDescription documentDescription) throws ManifoldCFException { markDocumentCompletedMultiple(new DocumentDescription[]{documentDescription}); } /** Delete from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be deleted, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentDeletedMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { // It's no longer an issue to have to deal with documents being conditionally deleted; that's been // taken over by the hopcountremoval method below. So just use the simple 'delete' functionality. return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be deleted, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentDeleted(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentDeletedMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Mark hopcount removal from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be marked as removed, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentHopcountRemovalMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { // For each record, we're going to have to choose between marking it as "hopcount removed", and marking // it for rescan. So the basic flow will involve changing a document's status,. // Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE // transaction is needed in order to complete these documents correctly. // // Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to // lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to // order the "select for update" operations appropriately. // HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); // Retry loop - in case we get a deadlock despite our best efforts while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // Do one row at a time, to avoid deadlocking things List<String> deleteList = new ArrayList<String>(); i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; // Get the DocumentDescription object DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Query for the status ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())}); TrackerClass.notePreread(dd.getID()); IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+ query+" FOR UPDATE",list,null,null); TrackerClass.noteRead(dd.getID()); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); // Grab the status int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); // Update the jobqueue table boolean didDelete = jobQueue.updateOrHopcountRemoveRecord(dd.getID(),status); if (didDelete) { deleteList.add(dd.getDocumentIdentifierHash()); } } i++; } String[] docIDSimpleHashes = new String[deleteList.size()]; for (int j = 0; j < docIDSimpleHashes.length; j++) { docIDSimpleHashes[j] = deleteList.get(j); } // Next, find the documents that are affected by carrydown deletion. DocumentDescription[] rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes); // Since hopcount inheritance and prerequisites came from the addDocument() method, // we don't delete them here. TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+ " docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Mark hopcount removal from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be marked as removed, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentHopcountRemoval(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentHopcountRemovalMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete from queue as a result of expiration of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired, * no special activity takes place as a result of the document being in a RESCAN state. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentExpiredMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of expiration of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired, * no special activity takes place as a result of the document being in a RESCAN state. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentExpired(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentExpiredMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete from queue as a result of cleaning up an unreachable document. * The document is expected to be in the PURGATORY state. There is never any need to reprocess the * document. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentCleanedUpMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of cleaning up an unreachable document. * The document is expected to be in the PURGATORY state. There is never any need to reprocess the * document. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentCleanedUp(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentCleanedUpMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete documents with no repercussions. We don't have to worry about the current state of each document, * since the document is definitely going away. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ protected DocumentDescription[] doDeleteMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { if (documentDescriptions.length == 0) return new DocumentDescription[0]; // Order of locking is not normally important here, because documents that wind up being deleted are never being worked on by anything else. // In all cases, the state of the document excludes other activity. // The only tricky situation is when a thread is processing a document which happens to be getting deleted, while another thread is trying to add // a reference for the very same document to the queue. Then, order of locking matters, so the deletions should happen in a specific order to avoid // the possibility of deadlock. Nevertheless, this is enough of a risk that I've chosen to order the deletions by document id hash order, just like everywhere // else. long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to delete "+Integer.toString(documentDescriptions.length)+" docs and clean up hopcount for job "+jobID.toString()); } HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort by doc hash, to establish non-blocking lock order java.util.Arrays.sort(docIDHashes); DocumentDescription[] rval; while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start deleting "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()); String[] docIDSimpleHashes = new String[docIDHashes.length]; // Delete jobqueue rows FIRST. Even though we do this before assessing the carrydown implications, it is OK because it's the CHILDREN of these // rows that might get affected by carrydown data deletion, not the rows themselves! i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Individual operations are necessary so order can be controlled. jobQueue.deleteRecord(dd.getID()); docIDSimpleHashes[i] = dd.getDocumentIdentifierHash(); i++; } // Next, find the documents that are affected by carrydown deletion. rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes); // Finally, delete the carrydown records in question. carryDown.deleteRecords(jobID,docIDSimpleHashes); if (legalLinkTypes.length > 0) hopCount.deleteDocumentIdentifiers(jobID,legalLinkTypes,docIDSimpleHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to delete "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction deleting "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } return rval; } /** Helper method: Find the document descriptions that will be affected due to carrydown row deletions. */ protected DocumentDescription[] calculateAffectedDeleteCarrydownChildren(Long jobID, String[] docIDHashes) throws ManifoldCFException { // Break the request into pieces, as needed, and throw everything into a hash for uniqueness. // We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes. // The goal is to throw all the children into a hash, to make them unique at the end. HashMap resultHash = new HashMap(); ArrayList list = new ArrayList(); int maxCount = maxClauseProcessDeleteHashSet(); int i = 0; int z = 0; while (i < docIDHashes.length) { if (z == maxCount) { processDeleteHashSet(jobID,resultHash,list); list.clear(); z = 0; } list.add(docIDHashes[i]); i++; z++; } if (z > 0) processDeleteHashSet(jobID,resultHash,list); // Now, put together the result document list from the hash. DocumentDescription[] rval = new DocumentDescription[resultHash.size()]; i = 0; Iterator iter = resultHash.keySet().iterator(); while (iter.hasNext()) { Long id = (Long)iter.next(); DocumentDescription dd = (DocumentDescription)resultHash.get(id); rval[i++] = dd; } return rval; } /** Get maximum count. */ protected int maxClauseProcessDeleteHashSet() { return database.findConjunctionClauseMax(new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)}); } /** Helper method: look up rows affected by a deleteRecords operation. */ protected void processDeleteHashSet(Long jobID, HashMap resultHash, ArrayList list) throws ManifoldCFException { // The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription // object for return, and so a join is necessary against the jobqueue table. StringBuilder sb = new StringBuilder("SELECT "); ArrayList newList = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(carryDown.getTableName()).append(" t1, ") .append(jobQueue.getTableName()).append(" t0 WHERE "); sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new UnitaryClause("t1."+carryDown.jobIDField,jobID), new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND "); sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField), new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})); /* sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new MultiClause("t1."+carryDown.parentIDHashField,list), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})) .append(")"); */ IResultSet set = database.performQuery(sb.toString(),newList,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long id = (Long)row.getValue(jobQueue.idField); String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField); String documentIdentifier = (String)row.getValue(jobQueue.docIDField); resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier)); } } /** Requeue a document for further processing in the future. * This method is called after a document is processed, when the job is a "continuous" one. * It is essentially equivalent to noting that the document processing is complete, except the * document remains on the queue. *@param documentDescriptions is the set of description objects for the document that was processed. *@param executeTimes are the times that the documents should be rescanned. Null indicates "never". *@param actions are what should be done when the time arrives. Choices are ACTION_RESCAN or ACTION_REMOVE. */ public void requeueDocumentMultiple(DocumentDescription[] documentDescriptions, Long[] executeTimes, int[] actions) throws ManifoldCFException { String[] docIDHashes = new String[documentDescriptions.length]; Long[] ids = new Long[documentDescriptions.length]; Long[] executeTimesNew = new Long[documentDescriptions.length]; int[] actionsNew = new int[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); executeTimesNew[i] = executeTimes[index]; actionsNew[i] = actions[index]; i++; } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimesNew[i],actionsNew[i],-1L,-1); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction requeuing documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Requeue a document for further processing in the future. * This method is called after a document is processed, when the job is a "continuous" one. * It is essentially equivalent to noting that the document processing is complete, except the * document remains on the queue. *@param documentDescription is the description object for the document that was processed. *@param executeTime is the time that the document should be rescanned. Null indicates "never". *@param action is what should be done when the time arrives. Choices include ACTION_RESCAN or ACTION_REMOVE. */ public void requeueDocument(DocumentDescription documentDescription, Long executeTime, int action) throws ManifoldCFException { requeueDocumentMultiple(new DocumentDescription[]{documentDescription},new Long[]{executeTime},new int[]{action}); } /** Reset a set of documents for further processing in the future. * This method is called after some unknown number of the documents were processed, but then a service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was processed. *@param executeTime is the time that the documents should be rescanned. *@param failTime is the time beyond which a service interruption will be considered a hard failure. *@param failCount is the number of retries beyond which a service interruption will be considered a hard failure. */ public void resetDocumentMultiple(DocumentDescription[] documentDescriptions, long executeTime, int action, long failTime, int failCount) throws ManifoldCFException { Long executeTimeLong = new Long(executeTime); Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; Long[] executeTimes = new Long[documentDescriptions.length]; int[] actions = new int[documentDescriptions.length]; long[] failTimes = new long[documentDescriptions.length]; int[] failCounts = new int[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); executeTimes[i] = executeTimeLong; actions[i] = action; long oldFailTime = documentDescriptions[index].getFailTime(); if (oldFailTime == -1L) oldFailTime = failTime; failTimes[i] = oldFailTime; int oldFailCount = documentDescriptions[index].getFailRetryCount(); if (oldFailCount == -1) oldFailCount = failCount; else { oldFailCount--; if (failCount != -1 && oldFailCount > failCount) oldFailCount = failCount; } failCounts[i] = oldFailCount; i++; } // Documents get marked PENDINGPURGATORY regardless of their current state; this is because we can't know at this point whether // an ingestion attempt occurred or not, so we have to treat the documents as having been processed at least once. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimes[i],actions[i],(failTimes==null)?-1L:failTimes[i],(failCounts==null)?-1:failCounts[i]); i++; } database.performCommit(); break; } catch (Error e) { database.signalRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a set of cleaning documents for further processing in the future. * This method is called after some unknown number of the documents were cleaned, but then an ingestion service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed cleaned or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetCleaningDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime) throws ManifoldCFException { Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); i++; } // Documents get marked PURGATORY regardless of their current state; this is because we can't know at this point what the actual prior state was. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setUncleaningStatus(ids[i],checkTime); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting cleaning documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a cleaning document back to its former state. * This gets done when a deleting thread sees a service interruption, etc., from the ingestion system. *@param documentDescription is the description of the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetCleaningDocument(DocumentDescription documentDescription, long checkTime) throws ManifoldCFException { resetCleaningDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime); } /** Reset a set of deleting documents for further processing in the future. * This method is called after some unknown number of the documents were deleted, but then an ingestion service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was processed. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetDeletingDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime) throws ManifoldCFException { Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); i++; } // Documents get marked COMPLETED regardless of their current state; this is because we can't know at this point what the actual prior state was. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setUndeletingStatus(ids[i],checkTime); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a deleting document back to its former state. * This gets done when a deleting thread sees a service interruption, etc., from the ingestion system. *@param documentDescription is the description object for the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetDeletingDocument(DocumentDescription documentDescription, long checkTime) throws ManifoldCFException { resetDeletingDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime); } /** Reset an active document back to its former state. * This gets done when there's a service interruption and the document cannot be processed yet. * Note well: This method formerly presumed that a perfect rollback was possible, and that there was zero chance of any * processing activity occuring before it got called. That assumption appears incorrect, however, so I've opted to now * presume that processing has perhaps occurred. Perfect rollback is thus no longer possible. *@param documentDescription is the description object for the document that was processed. *@param executeTime is the time that the document should be rescanned. *@param failTime is the time that the document should be considered to have failed, if it has not been * successfully read until then. */ public void resetDocument(DocumentDescription documentDescription, long executeTime, int action, long failTime, int failCount) throws ManifoldCFException { resetDocumentMultiple(new DocumentDescription[]{documentDescription},executeTime,action,failTime,failCount); } /** Eliminate duplicates, and sort */ protected static String[] eliminateDuplicates(String[] docIDHashes) { HashMap map = new HashMap(); int i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i++]; map.put(docIDHash,docIDHash); } String[] rval = new String[map.size()]; i = 0; Iterator iter = map.keySet().iterator(); while (iter.hasNext()) { rval[i++] = (String)iter.next(); } java.util.Arrays.sort(rval); return rval; } /** Build a reorder map, describing how to convert an original index into a reordered index. */ protected static HashMap buildReorderMap(String[] originalIDHashes, String[] reorderedIDHashes) { HashMap reorderSet = new HashMap(); int i = 0; while (i < reorderedIDHashes.length) { String reorderedIDHash = reorderedIDHashes[i]; Integer position = new Integer(i); reorderSet.put(reorderedIDHash,position); i++; } HashMap map = new HashMap(); int j = 0; while (j < originalIDHashes.length) { String originalIDHash = originalIDHashes[j]; Integer position = (Integer)reorderSet.get(originalIDHash); if (position != null) { map.put(new Integer(j),position); // Remove, so that only one of each duplicate will have a place in the map reorderSet.remove(originalIDHash); } j++; } return map; } /** Add an initial set of documents to the queue. * This method is called during job startup, when the queue is being loaded. * A set of document references is passed to this method, which updates the status of the document * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDs are the local document identifiers. *@param overrideSchedule is true if any existing document schedule should be overridden. *@param hopcountMethod is either accurate, nodelete, or neverdelete. *@param currentTime is the current time in milliseconds since epoch. *@param documentPriorities are the document priorities corresponding to the document identifiers. *@param prereqEventNames are the events that must be completed before each document can be processed. *@return true if the priority value(s) were used, false otherwise. */ public boolean[] addDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String[] docIDs, boolean overrideSchedule, int hopcountMethod, long currentTime, double[] documentPriorities, String[][] prereqEventNames) throws ManifoldCFException { if (docIDHashes.length == 0) return new boolean[0]; // The document identifiers need to be sorted in a consistent fashion to reduce deadlock, and have duplicates removed, before going ahead. // But, the documentPriorities and the return booleans need to correspond to the initial array. So, after we come up with // our internal order, we need to construct a map that takes an original index and maps it to the reduced, reordered index. String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes); double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length]; String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][]; String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length]; boolean[] rval = new boolean[docIDHashes.length]; int i = 0; while (i < docIDHashes.length) { Integer newPosition = (Integer)reorderMap.get(new Integer(i)); if (newPosition != null) { reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i]; if (prereqEventNames != null) reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i]; else reorderedDocumentPrerequisites[newPosition.intValue()] = null; reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i]; } rval[i] = false; i++; } long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" initial docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized // transactions are used. But serialized transactions may require a retry in order // to resolve transaction conflicts. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs and hopcounts for job "+jobID.toString()); // Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update) boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length]; int z = 0; while (z < reorderedDocIDHashes.length) { String docIDHash = reorderedDocIDHashes[z]; double docPriority = reorderedDocumentPriorities[z]; String docID = reorderedDocumentIdentifiers[z]; String[] docPrereqs = reorderedDocumentPrerequisites[z]; StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.docHashField,docIDHash), new UnitaryClause(jobQueue.jobIDField,jobID)})); sb.append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); boolean priorityUsed; long executeTime = overrideSchedule?0L:-1L; if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); priorityUsed = jobQueue.updateExistingRecordInitial(rowID,status,checkTimeValue,executeTime,currentTime,docPriority,docPrereqs); } else { // Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried. jobQueue.insertNewRecordInitial(jobID,docIDHash,docID,docPriority,executeTime,currentTime,docPrereqs); priorityUsed = true; } reorderedRval[z++] = priorityUsed; } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs for job "+jobID.toString()); if (legalLinkTypes.length > 0) hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs and hopcounts for job "+jobID.toString()); // Rejigger to correspond with calling order i = 0; while (i < docIDs.length) { Integer finalPosition = (Integer)reorderMap.get(new Integer(i)); if (finalPosition != null) rval[i] = reorderedRval[finalPosition.intValue()]; i++; } return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Add an initial set of remaining documents to the queue. * This method is called during job startup, when the queue is being loaded, to list documents that * were NOT included by calling addDocumentsInitial(). Documents listed here are simply designed to * enable the framework to get rid of old, invalid seeds. They are not queued for processing. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the local document identifier hashes. *@param hopcountMethod is either accurate, nodelete, or neverdelete. */ public void addRemainingDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes, int hopcountMethod) throws ManifoldCFException { if (docIDHashes.length == 0) return; String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" remaining docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless the transactions are serialized, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()); jobQueue.addRemainingDocumentsInitial(jobID,reorderedDocIDHashes); if (legalLinkTypes.length > 0) hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()); } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Signal that a seeding pass has been done. * Call this method at the end of a seeding pass. It is used to perform the bookkeeping necessary to * maintain the hopcount table. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param isPartial is set if the seeds provided are only a partial list. Some connectors cannot * supply a full list of seeds on every seeding iteration; this acknowledges that limitation. *@param hopcountMethod describes how to handle deletions for hopcount purposes. */ public void doneDocumentsInitial(Long jobID, String[] legalLinkTypes, boolean isPartial, int hopcountMethod) throws ManifoldCFException { long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to finish initial docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized transactions are used. // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to start finishing initial docs and hopcounts for job "+jobID.toString()); jobQueue.doneDocumentsInitial(jobID,isPartial); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to finish initial docs for job "+jobID.toString()); if (legalLinkTypes.length > 0) hopCount.finishSeedReferences(jobID,legalLinkTypes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to finish initial docs and hopcounts for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing initial docs and hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the specified hop counts, with the limit as described. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the hashes for the set of documents to find the hopcount for. *@param linkType is the kind of link to find the hopcount for. *@param limit is the limit, beyond which a negative distance may be returned. *@param hopcountMethod is the method for managing hopcounts that is in effect. *@return a vector of booleans corresponding to the documents requested. A true value is returned * if the document is within the specified limit, false otherwise. */ public boolean[] findHopCounts(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String linkType, int limit, int hopcountMethod) throws ManifoldCFException { if (docIDHashes.length == 0) return new boolean[0]; if (legalLinkTypes.length == 0) throw new ManifoldCFException("Nonsensical request; asking for hopcounts where none are kept"); // The idea is to delay queue processing as much as possible, because that avoids having to wait // on locks and having to repeat our evaluations. // // Luckily, we can glean a lot of information from what's hanging around. Specifically, whatever value // we find in the table is an upper bound on the true hop distance value. So, only if we have documents // that are outside the limit does the queue need to be processed. // // It is therefore really helpful to write in an estimated value for any newly created record, if possible. Even if the // estimate is possibly greater than the true value, a great deal of locking and queue processing will be // avoided. // The flow here is to: // - grab the right hoplock // - process the queue // - if the queue is empty, get the hopcounts we wanted, otherwise release the lock and loop around long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Beginning work to get "+Integer.toString(docIDHashes.length)+" hopcounts for job "+jobID.toString()); } // Make an answer array. boolean[] rval = new boolean[docIDHashes.length]; // Make a hash of what we still need a definitive answer for. HashMap badAnswers = new HashMap(); int i = 0; while (i < rval.length) { String docIDHash = docIDHashes[i]; rval[i] = false; badAnswers.put(docIDHash,new Integer(i)); i++; } int iterationCount = 0; while (true) { // Ask for only about documents we don't have a definitive answer for yet. String[] askDocIDHashes = new String[badAnswers.size()]; i = 0; Iterator iter = badAnswers.keySet().iterator(); while (iter.hasNext()) { askDocIDHashes[i++] = (String)iter.next(); } int[] distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType); i = 0; while (i < distances.length) { int distance = distances[i]; String docIDHash = askDocIDHashes[i]; if (distance != -1 && distance <= limit) { // Found a usable value rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true; } i++; } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Iteration "+Integer.toString(iterationCount++)+": After initial check, "+Integer.toString(badAnswers.size())+ " hopcounts remain to be found for job "+jobID.toString()+", out of "+Integer.toString(docIDHashes.length)+ " ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); if (badAnswers.size() == 0) return rval; // It appears we need to process the queue. We need to enter the hoplock section // to make sure only one player is updating values at a time. Then, before we exit, we get the // remaining values. askDocIDHashes = new String[badAnswers.size()]; i = 0; iter = badAnswers.keySet().iterator(); while (iter.hasNext()) { askDocIDHashes[i++] = (String)iter.next(); } // Currently, only one thread can possibly process any of the queue at a given time. This is because the queue marks are not set to something // other than than the "in queue" value during processing. My instinct is that queue processing is likely to interfere with other queue processing, // so I've taken the route of prohibiting more than one batch of queue processing at a time, for now. String hopLockName = getHopLockName(jobID); long sleepAmt = 0L; lockManager.enterWriteLock(hopLockName); try { database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Processing queue for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); // The internal queue processing only does 200 at a time. This is a compromise between maximum efficiency (bigger number) // and the requirement that database writes are effectively blocked for a while (which argues for a smaller number). boolean definitive = hopCount.processQueue(jobID,legalLinkTypes,hopcountMethod); // If definitive answers were not found, we leave the lock and go back to check on the status of the questions we were // interested in. If the answers are all OK then we are done; if not, we need to process more queue, and keep doing that // until we really ARE done. if (!definitive) { // Sleep a little bit so another thread can have a whack at things sleepAmt = 100L; database.performCommit(); continue; } // Definitive answers found; continue through. distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType); database.performCommit(); } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction processing queue for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } finally { lockManager.leaveWriteLock(hopLockName); sleepFor(sleepAmt); } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Definitive answers found for "+Integer.toString(docIDHashes.length)+ " hopcounts for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); // All answers are guaranteed to be accurate now. i = 0; while (i < distances.length) { int distance = distances[i]; String docIDHash = askDocIDHashes[i]; if (distance != -1 && distance <= limit) { // Found a usable value rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true; } i++; } return rval; } } /** Get all the current seeds. * Returns the seed document identifiers for a job. *@param jobID is the job identifier. *@return the document identifiers that are currently considered to be seeds. */ public String[] getAllSeeds(Long jobID) throws ManifoldCFException { return jobQueue.getAllSeeds(jobID); } /** Add documents to the queue in bulk. * This method is called during document processing, when a set of document references are discovered. * The document references are passed to this method, which updates the status of the document(s) * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the local document identifier hashes. *@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none. * MUST be present in the case of carrydown information. *@param relationshipType is the optional link type between this document and its parent. Pass null if there * is no relationship with a parent. *@param hopcountMethod is the desired method for managing hopcounts. *@param dataNames are the names of the data to carry down to the child from this parent. *@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. If CharacterInput objects are passed in here, * it is the caller's responsibility to clean these up. *@param currentTime is the time in milliseconds since epoch that will be recorded for this operation. *@param documentPriorities are the desired document priorities for the documents. *@param prereqEventNames are the events that must be completed before a document can be queued. *@return an array of boolean values indicating whether or not the passed-in priority value was used or not for each doc id (true if used). */ public boolean[] addDocuments(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String[] docIDs, String parentIdentifierHash, String relationshipType, int hopcountMethod, String[][] dataNames, Object[][][] dataValues, long currentTime, double[] documentPriorities, String[][] prereqEventNames) throws ManifoldCFException { if (docIDs.length == 0) return new boolean[0]; // Sort the id hashes and eliminate duplicates. This will help avoid deadlock conditions. // However, we also need to keep the carrydown data in synch, so track that around as well, and merge if there are // duplicate document identifiers. HashMap nameMap = new HashMap(); int k = 0; while (k < docIDHashes.length) { String docIDHash = docIDHashes[k]; // If there are duplicates, we need to merge them. HashMap names = (HashMap)nameMap.get(docIDHash); if (names == null) { names = new HashMap(); nameMap.put(docIDHash,names); } String[] nameList = dataNames[k]; Object[][] dataList = dataValues[k]; int z = 0; while (z < nameList.length) { String name = nameList[z]; Object[] values = dataList[z]; HashMap valueMap = (HashMap)names.get(name); if (valueMap == null) { valueMap = new HashMap(); names.put(name,valueMap); } int y = 0; while (y < values.length) { // Calculate the value hash; that's the true key, and the one that cannot be duplicated. String valueHash; if (values[y] instanceof CharacterInput) { // It's a CharacterInput object. valueHash = ((CharacterInput)values[y]).getHashValue(); } else { // It better be a String. valueHash = ManifoldCF.hash((String)values[y]); } valueMap.put(valueHash,values[y]); y++; } z++; } k++; } String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes); double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length]; String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][]; String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length]; boolean[] rval = new boolean[docIDHashes.length]; int i = 0; while (i < docIDHashes.length) { Integer newPosition = (Integer)reorderMap.get(new Integer(i)); if (newPosition != null) { reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i]; if (prereqEventNames != null) reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i]; else reorderedDocumentPrerequisites[newPosition.intValue()] = null; reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i]; } rval[i] = false; i++; } dataNames = new String[reorderedDocIDHashes.length][]; String[][][] dataHashValues = new String[reorderedDocIDHashes.length][][]; dataValues = new Object[reorderedDocIDHashes.length][][]; k = 0; while (k < reorderedDocIDHashes.length) { String docIDHash = reorderedDocIDHashes[k]; HashMap names = (HashMap)nameMap.get(docIDHash); dataNames[k] = new String[names.size()]; dataHashValues[k] = new String[names.size()][]; dataValues[k] = new Object[names.size()][]; Iterator iter = names.keySet().iterator(); int z = 0; while (iter.hasNext()) { String dataName = (String)iter.next(); (dataNames[k])[z] = dataName; HashMap values = (HashMap)names.get(dataName); (dataHashValues[k])[z] = new String[values.size()]; (dataValues[k])[z] = new Object[values.size()]; Iterator iter2 = values.keySet().iterator(); int y = 0; while (iter2.hasNext()) { String dataValueHash = (String)iter2.next(); Object dataValue = values.get(dataValueHash); ((dataHashValues[k])[z])[y] = dataValueHash; ((dataValues[k])[z])[y] = dataValue; y++; } z++; } k++; } long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" docs and hopcounts for job "+jobID.toString()+" parent identifier "+parentIdentifierHash); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash); // Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update) HashMap existingRows = new HashMap(); for (int z = 0; z < reorderedDocIDHashes.length; z++) { String docIDHash = reorderedDocIDHashes[z]; StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.docHashField,docIDHash), new UnitaryClause(jobQueue.jobIDField,jobID)})); sb.append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); boolean priorityUsed; if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue)); } else { // Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried. jobQueue.insertNewRecord(jobID,docIDHash,reorderedDocumentIdentifiers[z],reorderedDocumentPriorities[z],0L,currentTime,reorderedDocumentPrerequisites[z]); } } // Update all the carrydown data at once, for greatest efficiency. boolean[] carrydownChangesSeen = carryDown.recordCarrydownDataMultiple(jobID,parentIdentifierHash,reorderedDocIDHashes,dataNames,dataHashValues,dataValues); // Same with hopcount. boolean[] hopcountChangesSeen = null; if (parentIdentifierHash != null && relationshipType != null) hopcountChangesSeen = hopCount.recordReferences(jobID,legalLinkTypes,parentIdentifierHash,reorderedDocIDHashes,relationshipType,hopcountMethod); // Loop through the document id's again, and perform updates where needed boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length]; boolean reactivateRemovedHopcountRecords = false; for (int z = 0; z < reorderedDocIDHashes.length; z++) { String docIDHash = reorderedDocIDHashes[z]; JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash); if (jr == null) // It was an insert reorderedRval[z] = true; else { // It was an existing row; do the update logic // The hopcountChangesSeen array describes whether each reference is a new one. This // helps us determine whether we're going to need to "flip" HOPCOUNTREMOVED documents // to the PENDING state. If the new link ended in an existing record, THEN we need to flip them all! reorderedRval[z] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(), 0L,currentTime,carrydownChangesSeen[z] || (hopcountChangesSeen!=null && hopcountChangesSeen[z]), reorderedDocumentPriorities[z],reorderedDocumentPrerequisites[z]); // Signal if we need to perform the flip if (hopcountChangesSeen != null && hopcountChangesSeen[z]) reactivateRemovedHopcountRecords = true; } } if (reactivateRemovedHopcountRecords) jobQueue.reactivateHopcountRemovedRecords(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash); i = 0; while (i < docIDHashes.length) { Integer finalPosition = (Integer)reorderMap.get(new Integer(i)); if (finalPosition != null) rval[i] = reorderedRval[finalPosition.intValue()]; i++; } return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { sleepAmt = getRandomAmount(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash+": "+e.getMessage()+"; sleeping for "+new Long(sleepAmt).toString()+" ms",e); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Add a document to the queue. * This method is called during document processing, when a document reference is discovered. * The document reference is passed to this method, which updates the status of the document * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHash is the local document identifier hash value. *@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none. * MUST be present in the case of carrydown information. *@param relationshipType is the optional link type between this document and its parent. Pass null if there * is no relationship with a parent. *@param hopcountMethod is the desired method for managing hopcounts. *@param dataNames are the names of the data to carry down to the child from this parent. *@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. *@param currentTime is the time in milliseconds since epoch that will be recorded for this operation. *@param priority is the desired document priority for the document. *@param prereqEventNames are the events that must be completed before the document can be processed. *@return true if the priority value was used, false otherwise. */ public boolean addDocument(Long jobID, String[] legalLinkTypes, String docIDHash, String docID, String parentIdentifierHash, String relationshipType, int hopcountMethod, String[] dataNames, Object[][] dataValues, long currentTime, double priority, String[] prereqEventNames) throws ManifoldCFException { return addDocuments(jobID,legalLinkTypes, new String[]{docIDHash},new String[]{docID}, parentIdentifierHash,relationshipType,hopcountMethod,new String[][]{dataNames}, new Object[][][]{dataValues},currentTime,new double[]{priority},new String[][]{prereqEventNames})[0]; } /** Complete adding child documents to the queue, for a set of documents. * This method is called at the end of document processing, to help the hopcount tracking engine do its bookkeeping. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param parentIdentifierHashes are the document identifier hashes for whom child link extraction just took place. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] finishDocuments(Long jobID, String[] legalLinkTypes, String[] parentIdentifierHashes, int hopcountMethod) throws ManifoldCFException { if (parentIdentifierHashes.length == 0) return new DocumentDescription[0]; DocumentDescription[] rval; if (legalLinkTypes.length == 0) { // Must at least end the carrydown transaction. By itself, this does not need a serialized transaction; however, occasional // deadlock is possible when a document shares multiple parents, so do the whole retry drill while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records! rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes); carryDown.restoreRecords(jobID,parentIdentifierHashes); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc carrydown records for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } else { long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to finish "+Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records! rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes); carryDown.restoreRecords(jobID,parentIdentifierHashes); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); hopCount.finishParents(jobID,legalLinkTypes,parentIdentifierHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to finish "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } return rval; } /** Helper method: Calculate the unique set of affected carrydown children resulting from a "restoreRecords" operation. */ protected DocumentDescription[] calculateAffectedRestoreCarrydownChildren(Long jobID, String[] parentIDHashes) throws ManifoldCFException { // We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes. // The goal is to throw all the children into a hash, to make them unique at the end. HashMap resultHash = new HashMap(); ArrayList list = new ArrayList(); int maxCount = database.getMaxOrClause(); int i = 0; int z = 0; while (i < parentIDHashes.length) { if (z == maxCount) { processParentHashSet(jobID,resultHash,list); list.clear(); z = 0; } list.add(parentIDHashes[i]); i++; z++; } if (z > 0) processParentHashSet(jobID,resultHash,list); // Now, put together the result document list from the hash. DocumentDescription[] rval = new DocumentDescription[resultHash.size()]; i = 0; Iterator iter = resultHash.keySet().iterator(); while (iter.hasNext()) { Long id = (Long)iter.next(); DocumentDescription dd = (DocumentDescription)resultHash.get(id); rval[i++] = dd; } return rval; } /** Helper method: look up rows affected by a restoreRecords operation. */ protected void processParentHashSet(Long jobID, HashMap resultHash, ArrayList list) throws ManifoldCFException { // The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription // object for return, and so a join is necessary against the jobqueue table. StringBuilder sb = new StringBuilder("SELECT "); ArrayList newlist = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(carryDown.getTableName()).append(" t1, ") .append(jobQueue.getTableName()).append(" t0 WHERE "); sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new UnitaryClause("t1."+carryDown.jobIDField,jobID), new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND "); sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField), new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})).append(" AND "); sb.append("t1.").append(carryDown.newField).append("=?"); newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE)); /* sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new MultiClause("t1."+carryDown.parentIDHashField,list), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t1.").append(carryDown.newField).append("=?") .append(")"); newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE)); */ IResultSet set = database.performQuery(sb.toString(),newlist,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long id = (Long)row.getValue(jobQueue.idField); String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField); String documentIdentifier = (String)row.getValue(jobQueue.docIDField); resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier)); } } /** Begin an event sequence. *@param eventName is the name of the event. *@return true if the event could be created, or false if it's already there. */ public boolean beginEventSequence(String eventName) throws ManifoldCFException { try { eventManager.createEvent(eventName); return true; } catch (ManifoldCFException e) { if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) return false; throw e; } } /** Complete an event sequence. *@param eventName is the name of the event. */ public void completeEventSequence(String eventName) throws ManifoldCFException { eventManager.destroyEvent(eventName); } /** Requeue a document set because of carrydown changes. * This method is called when carrydown data is modified for a set of documents. The documents must be requeued for immediate reprocessing, even to the * extent that if one is *already* being processed, it will need to be done over again. *@param documentDescriptions is the set of description objects for the documents that have had their parent carrydown information changed. *@param docPriorities are the document priorities to assign to the documents, if needed. *@return a flag for each document priority, true if it was used, false otherwise. */ public boolean[] carrydownChangeDocumentMultiple(DocumentDescription[] documentDescriptions, long currentTime, double[] docPriorities) throws ManifoldCFException { if (documentDescriptions.length == 0) return new boolean[0]; // Order the updates by document hash, to prevent deadlock as much as possible. // This map contains the original index of the document id hash. HashMap docHashMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] = documentDescriptions[i].getDocumentIdentifier() + ":" + documentDescriptions[i].getJobID(); docHashMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort the hashes java.util.Arrays.sort(docIDHashes); boolean[] rval = new boolean[docIDHashes.length]; // Enter transaction and prepare to look up document states in dochash order while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // This is the map that will contain the rows we found, keyed by docIDHash. HashMap existingRows = new HashMap(); // Loop through hashes in order int j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j]; // Get the index int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue(); // Lookup document description DocumentDescription dd = documentDescriptions[originalIndex]; // Do the query. We can base this on the id column since we have that. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())})).append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // If the row is there, we use its current info to requeue it properly. if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue)); } j++; } // Ok, existingRows contains all the rows we want to try to update. Go through these and update. while (j < docIDHashes.length) { String docIDHash = docIDHashes[j]; int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue(); JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash); if (jr == null) // It wasn't found, so the doc priority wasn't used. rval[originalIndex] = false; else // It was an existing row; do the update logic; use the 'carrydown changes' flag = true all the time. rval[originalIndex] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(), 0L,currentTime,true,docPriorities[originalIndex],null); j++; } database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction handling "+Integer.toString(docIDHashes.length)+" carrydown changes: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } return rval; } /** Requeue a document because of carrydown changes. * This method is called when carrydown data is modified for a document. The document must be requeued for immediate reprocessing, even to the * extent that if it is *already* being processed, it will need to be done over again. *@param documentDescription is the description object for the document that has had its parent carrydown information changed. *@param docPriority is the document priority to assign to the document, if needed. *@return a flag for the document priority, true if it was used, false otherwise. */ public boolean carrydownChangeDocument(DocumentDescription documentDescription, long currentTime, double docPriority) throws ManifoldCFException { return carrydownChangeDocumentMultiple(new DocumentDescription[]{documentDescription},currentTime,new double[]{docPriority})[0]; } /** Sleep a random amount of time after a transaction abort. */ protected long getRandomAmount() { return database.getSleepAmt(); } protected void sleepFor(long amt) throws ManifoldCFException { database.sleepFor(amt); } /** Retrieve specific parent data for a given document. *@param jobID is the job identifier. *@param docIDHash is the document identifier hash value. *@param dataName is the kind of data to retrieve. *@return the unique data values. */ public String[] retrieveParentData(Long jobID, String docIDHash, String dataName) throws ManifoldCFException { return carryDown.getDataValues(jobID,docIDHash,dataName); } /** Retrieve specific parent data for a given document. *@param jobID is the job identifier. *@param docIDHash is the document identifier hash value. *@param dataName is the kind of data to retrieve. *@return the unique data values. */ public CharacterInput[] retrieveParentDataAsFiles(Long jobID, String docIDHash, String dataName) throws ManifoldCFException { return carryDown.getDataValuesAsFiles(jobID,docIDHash,dataName); } // These methods support the job threads (which start jobs and end jobs) // There is one thread that starts jobs. It simply looks for jobs which are ready to // start, and changes their state accordingly. // There is also a pool of threads that end jobs. These threads wait for a job that // looks like it is done, and do completion processing if it is. /** Start all jobs in need of starting. * This method marks all the appropriate jobs as "in progress", which is all that should be * needed to start them. * It's also the case that the start event should be logged in the event log. In order to make it possible for * the caller to do this logging, a set of job ID's will be returned containing the jobs that * were started. *@param currentTime is the current time in milliseconds since epoch. *@param unwaitList is filled in with the set of job ID objects that were resumed. */ public void startJobs(long currentTime, ArrayList unwaitList) throws ManifoldCFException { // This method should compare the lasttime field against the current time, for all // "not active" jobs, and see if a job should be started. // // If a job is to be started, then the following occurs: // (1) If the job is "full scan", then all COMPLETED jobqueue entries are converted to // PURGATORY. // (2) The job is labeled as "ACTIVE". // (3) The starttime field is set. // (4) The endtime field is nulled out. // // This method also assesses jobs that are ACTIVE or PAUSED to see if they should be // converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded // the value in the "windowend" field for the job. // // Finally, jobs in ACTIVEWAIT or PAUSEDWAIT are assessed to see if they should become // ACTIVE or PAUSED. This will occur if we have entered a new window for the job. // Note well: We can't combine locks across both our lock manager and the database unless we do it consistently. The // consistent practice throughout CF is to do the external locks first, then the database locks. This particular method // thus cannot use cached job description information, because it must throw database locks first against the jobs table. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastTimeField).append(",") .append(jobs.statusField).append(",") .append(jobs.startMethodField).append(",") .append(jobs.outputNameField).append(",") .append(jobs.connectionNameField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_INACTIVE), jobs.statusToString(jobs.STATUS_ACTIVEWAIT), jobs.statusToString(jobs.STATUS_ACTIVEWAITSEEDING), jobs.statusToString(jobs.STATUS_PAUSEDWAIT), jobs.statusToString(jobs.STATUS_PAUSEDWAITSEEDING)})})).append(" AND ") .append(jobs.startMethodField).append("!=? FOR UPDATE"); list.add(jobs.startMethodToString(IJobDescription.START_DISABLE)); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Next, we query for the schedule information. In order to do that, we amass a list of job identifiers that we want schedule info // for. Long[] jobIDSet = new Long[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); jobIDSet[i++] = (Long)row.getValue(jobs.idField); } ScheduleRecord[][] srSet = jobs.readScheduleRecords(jobIDSet); i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); int startMethod = jobs.stringToStartMethod((String)row.getValue(jobs.startMethodField)); String outputName = (String)row.getValue(jobs.outputNameField); String connectionName = (String)row.getValue(jobs.connectionNameField); ScheduleRecord[] thisSchedule = srSet[i++]; // Run at specific times // We need to start with the start time as given, plus one long startInterval = ((Long)row.getValue(jobs.lastTimeField)).longValue() + 1; if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Checking if job "+jobID.toString()+" needs to be started; it was last checked at "+ new Long(startInterval).toString()+", and now it is "+new Long(currentTime).toString()); // Proceed to the current time, and find a match if there is one to be found. // If not -> continue // We go through *all* the schedule records. The one that matches that has the latest // end time is the one we take. Long matchTime = null; Long duration = null; boolean requestMinimum = false; for (int l = 0; l < thisSchedule.length; l++) { long trialStartInterval = startInterval; ScheduleRecord sr = thisSchedule[l]; Long thisDuration = sr.getDuration(); if (startMethod == IJobDescription.START_WINDOWINSIDE && thisDuration != null) { // Bump the start interval back before the beginning of the current interval. // This will guarantee a start as long as there is time in the window. long trialStart = currentTime - thisDuration.longValue(); if (trialStart < trialStartInterval) trialStartInterval = trialStart; } Long thisMatchTime = checkTimeMatch(trialStartInterval,currentTime, sr.getDayOfWeek(), sr.getDayOfMonth(), sr.getMonthOfYear(), sr.getYear(), sr.getHourOfDay(), sr.getMinutesOfHour(), sr.getTimezone(), thisDuration); if (thisMatchTime == null) { if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug(" No time match found within interval "+new Long(trialStartInterval).toString()+ " to "+new Long(currentTime).toString()); continue; } if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug(" Time match FOUND within interval "+new Long(trialStartInterval).toString()+ " to "+new Long(currentTime).toString()); if (matchTime == null || thisDuration == null || (duration != null && thisMatchTime.longValue() + thisDuration.longValue() > matchTime.longValue() + duration.longValue())) { matchTime = thisMatchTime; duration = thisDuration; requestMinimum = sr.getRequestMinimum(); } } if (matchTime == null) { jobs.updateLastTime(jobID,currentTime); continue; } int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); // Calculate the end of the window Long windowEnd = null; if (duration != null) { windowEnd = new Long(matchTime.longValue()+duration.longValue()); } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job '"+jobID+"' is within run window at "+new Long(currentTime).toString()+" ms. (which starts at "+ matchTime.toString()+" ms."+((duration==null)?"":(" and goes for "+duration.toString()+" ms."))+")"); } int newJobState; switch (status) { case Jobs.STATUS_INACTIVE: // If job was formerly "inactive", do the full startup. // Start this job! but with no end time. // This does not get logged because the startup thread does the logging. jobs.startJob(jobID,windowEnd,requestMinimum); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Signalled for job start for job "+jobID); } break; case Jobs.STATUS_ACTIVEWAIT: unwaitList.add(jobID); jobs.unwaitJob(jobID,Jobs.STATUS_RESUMING,windowEnd); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited job "+jobID); } break; case Jobs.STATUS_ACTIVEWAITSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,Jobs.STATUS_RESUMINGSEEDING,windowEnd); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited job "+jobID); } break; case Jobs.STATUS_PAUSEDWAIT: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSED,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSEDWAITSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSEDSEEDING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSINGWAITING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSINGWAITINGSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSINGSEEDING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; default: break; } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Put active or paused jobs in wait state, if they've exceeded their window. *@param currentTime is the current time in milliseconds since epoch. *@param waitList is filled in with the set of job ID's that were put into a wait state. */ public void waitJobs(long currentTime, ArrayList waitList) throws ManifoldCFException { // This method assesses jobs that are ACTIVE or PAUSED to see if they should be // converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded // the value in the "windowend" field for the job. // database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVESEEDING), jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NEITHER), jobs.statusToString(jobs.STATUS_PAUSED), jobs.statusToString(jobs.STATUS_PAUSEDSEEDING)})})).append(" AND ") .append(jobs.windowEndField).append("<? FOR UPDATE"); list.add(new Long(currentTime)); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); waitList.add(jobID); int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); // Make the job wait. switch (status) { case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end"); } break; case Jobs.STATUS_ACTIVESEEDING: case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: case Jobs.STATUS_ACTIVESEEDING_NEITHER: jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITINGSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end"); } break; case Jobs.STATUS_PAUSED: jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAIT); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSEDSEEDING: jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAITSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSING: jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSINGSEEDING: jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITINGSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; default: break; } } } catch (ManifoldCFException e) { database.signalRollback(); throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } /** Reset job schedule. This re-evaluates whether the job should be started now. This method would typically * be called after a job's scheduling window has been changed. *@param jobID is the job identifier. */ public void resetJobSchedule(Long jobID) throws ManifoldCFException { // Note: This is problematic; the expected behavior is for the job to start if "we are within the window", // but not to start if the transition to active status was long enough ago. // Since there's no "right" way to do this, do nothing for now. // This explicitly did NOT work - it caused the job to refire every time it was saved. // jobs.updateLastTime(jobID,0L); } /** Check if the specified job parameters have a 'hit' within the specified interval. *@param startTime is the start time. *@param currentTimestamp is the end time. *@param daysOfWeek is the enumerated days of the week, or null. *@param daysOfMonth is the enumerated days of the month, or null. *@param months is the enumerated months, or null. *@param years is the enumerated years, or null. *@param hours is the enumerated hours, or null. *@param minutes is the enumerated minutes, or null. *@return null if there is NO hit within the interval; otherwise the actual time of the hit in milliseconds * from epoch is returned. */ protected static Long checkTimeMatch(long startTime, long currentTimestamp, EnumeratedValues daysOfWeek, EnumeratedValues daysOfMonth, EnumeratedValues months, EnumeratedValues years, EnumeratedValues hours, EnumeratedValues minutes, String timezone, Long duration) { // What we do here is start with the previous timestamp, and advance until we // either encounter a match, or we exceed the current timestamp. Calendar c; if (timezone == null) { c = Calendar.getInstance(); } else { c = Calendar.getInstance(TimeZone.getTimeZone(timezone)); } // Get the current starting time c.setTimeInMillis(startTime); // If there's a duration value, we can't match unless we're within the window. // That means we find a match, and then we verify that the end time is greater than the currenttimestamp. // If not, we move on (by incrementing) // The main loop works off of the calendar and these values. while (c.getTimeInMillis() < currentTimestamp) { // Round up to the nearest minute, unless at 0 already int x = c.get(Calendar.MILLISECOND); if (x != c.getMinimum(Calendar.MILLISECOND)) { int amtToAdd = c.getLeastMaximum(Calendar.MILLISECOND)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MILLISECOND,amtToAdd); continue; } x = c.get(Calendar.SECOND); if (x != c.getMinimum(Calendar.SECOND)) { int amtToAdd = c.getLeastMaximum(Calendar.SECOND)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.SECOND,amtToAdd); continue; } boolean startedToCareYet = false; x = c.get(Calendar.MINUTE); // If we care about minutes, round up, otherwise go to the 0 value if (minutes == null) { if (x != c.getMinimum(Calendar.MINUTE)) { int amtToAdd = c.getLeastMaximum(Calendar.MINUTE)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MINUTE,amtToAdd); continue; } } else { // See if it is a legit value. if (!minutes.checkValue(x-c.getMinimum(Calendar.MINUTE))) { // Advance to next legit value // We could be clever, but we just advance one c.add(Calendar.MINUTE,1); continue; } startedToCareYet = true; } // Hours x = c.get(Calendar.HOUR_OF_DAY); if (hours == null) { if (!startedToCareYet && x != c.getMinimum(Calendar.HOUR_OF_DAY)) { int amtToAdd = c.getLeastMaximum(Calendar.HOUR_OF_DAY)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.HOUR_OF_DAY,amtToAdd); continue; } } else { if (!hours.checkValue(x-c.getMinimum(Calendar.HOUR_OF_DAY))) { // next hour c.add(Calendar.HOUR_OF_DAY,1); continue; } startedToCareYet = true; } // Days of month and days of week are at the same level; // these advance concurrently. However, if NEITHER is specified, and nothing // earlier was, then we do the 1st of the month. x = c.get(Calendar.DAY_OF_WEEK); if (daysOfWeek != null) { if (!daysOfWeek.checkValue(x-c.getMinimum(Calendar.DAY_OF_WEEK))) { // next day c.add(Calendar.DAY_OF_WEEK,1); continue; } startedToCareYet = true; } x = c.get(Calendar.DAY_OF_MONTH); if (daysOfMonth == null) { // If nothing is specified but the month or the year, do it on the 1st. if (!startedToCareYet && x != c.getMinimum(Calendar.DAY_OF_MONTH)) { // Move as rapidly as possible towards the first of the month. But in no case, increment // less than one day. int amtToAdd = c.getLeastMaximum(Calendar.DAY_OF_MONTH)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.DAY_OF_MONTH,amtToAdd); continue; } } else { if (!daysOfMonth.checkValue(x-c.getMinimum(Calendar.DAY_OF_MONTH))) { // next day c.add(Calendar.DAY_OF_MONTH,1); continue; } startedToCareYet = true; } x = c.get(Calendar.MONTH); if (months == null) { if (!startedToCareYet && x != c.getMinimum(Calendar.MONTH)) { int amtToAdd = c.getLeastMaximum(Calendar.MONTH)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MONTH,amtToAdd); continue; } } else { if (!months.checkValue(x-c.getMinimum(Calendar.MONTH))) { c.add(Calendar.MONTH,1); continue; } startedToCareYet = true; } x = c.get(Calendar.YEAR); if (years != null) { if (!years.checkValue(x)) { c.add(Calendar.YEAR,1); continue; } startedToCareYet = true; } // Looks like a match. // Last check is to be sure we are in the window, if any. If we are outside the window, // must skip forward. if (duration != null && c.getTimeInMillis() + duration.longValue() <= currentTimestamp) { c.add(Calendar.MILLISECOND,c.getLeastMaximum(Calendar.MILLISECOND)); continue; } return new Long(c.getTimeInMillis()); } return null; } /** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and * will not cease until complete. If the job is already running, this operation will assure that * the job does not pause when its window ends. The job can be manually paused, or manually aborted. *@param jobID is the ID of the job to start. */ public void manualStart(Long jobID) throws ManifoldCFException { manualStart(jobID,false); } /** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and * will not cease until complete. If the job is already running, this operation will assure that * the job does not pause when its window ends. The job can be manually paused, or manually aborted. *@param jobID is the ID of the job to start. *@param requestMinimum is true if a minimal job run is requested. */ public void manualStart(Long jobID, boolean requestMinimum) throws ManifoldCFException { database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() < 1) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); if (status != Jobs.STATUS_INACTIVE) throw new ManifoldCFException("Job "+jobID+" is already running"); IJobDescription jobDescription = jobs.load(jobID,true); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually starting job "+jobID); } // Start this job! but with no end time. jobs.startJob(jobID,null,requestMinimum); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manual job start signal for job "+jobID+" successfully sent"); } } catch (ManifoldCFException e) { database.signalRollback(); throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } /** Note job delete started. *@param jobID is the job id. *@param startTime is the job delete start time. */ public void noteJobDeleteStarted(Long jobID, long startTime) throws ManifoldCFException { jobs.noteJobDeleteStarted(jobID,startTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" delete is now started"); } /** Note job started. *@param jobID is the job id. *@param startTime is the job start time. */ public void noteJobStarted(Long jobID, long startTime) throws ManifoldCFException { jobs.noteJobStarted(jobID,startTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" is now started"); } /** Note job seeded. *@param jobID is the job id. *@param seedTime is the job seed time. */ public void noteJobSeeded(Long jobID, long seedTime) throws ManifoldCFException { jobs.noteJobSeeded(jobID,seedTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" has been successfully reseeded"); } /** Prepare for a delete scan. *@param jobID is the job id. */ public void prepareDeleteScan(Long jobID) throws ManifoldCFException { // No special treatment needed for hopcount or carrydown, since these all get deleted at once // at the end of the job delete process. TrackerClass.notePrecommit(); jobQueue.prepareDeleteScan(jobID); TrackerClass.noteCommit(); } /** Prepare a job to be run. * This method is called regardless of the details of the job; what differs is only the flags that are passed in. * The code inside will determine the appropriate procedures. * (This method replaces prepareFullScan() and prepareIncrementalScan(). ) *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@param connectorModel is the model used by the connector for the job. *@param continuousJob is true if the job is a continuous one. *@param fromBeginningOfTime is true if the job is running starting from time 0. *@param requestMinimum is true if the minimal amount of work is requested for the job run. */ public void prepareJobScan(Long jobID, String[] legalLinkTypes, int hopcountMethod, int connectorModel, boolean continuousJob, boolean fromBeginningOfTime, boolean requestMinimum) throws ManifoldCFException { // (1) If the connector has MODEL_ADD_CHANGE_DELETE, then // we let the connector run the show; there's no purge phase, and therefore the // documents are left in a COMPLETED state if they don't show up in the list // of seeds that require the attention of the connector. However, we do need to // preload the queue with all the existing documents, if there was any change to the // specification information (which will mean that fromBeginningOfTime is set). // // (2) If the connector has MODEL_ALL, then it's a full crawl no matter what, so // we do a full scan initialization. // // (3) If the connector has some other model, we look at the start time. A start // time of 0 implies a full scan, while any other start time implies an incremental // scan. // Complete connector model is told everything, so no delete phase. if (connectorModel == IRepositoryConnector.MODEL_ADD_CHANGE_DELETE) { if (fromBeginningOfTime) queueAllExisting(jobID,legalLinkTypes); return; } // If the connector model is complete via chaining, then we just need to make // sure discovery works to queue the changes. if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE_DELETE) { if (fromBeginningOfTime) queueAllExisting(jobID,legalLinkTypes); else jobQueue.preparePartialScan(jobID); return; } // Similarly, minimal crawl attempts no delete phase unless the connector explicitly forbids it, or unless // the job criteria have changed. if (requestMinimum && connectorModel != IRepositoryConnector.MODEL_ALL && !fromBeginningOfTime) { // If it is a chained model, do the partial prep. if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD || connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE) jobQueue.preparePartialScan(jobID); return; } if (!continuousJob && connectorModel != IRepositoryConnector.MODEL_PARTIAL && (connectorModel == IRepositoryConnector.MODEL_ALL || fromBeginningOfTime)) prepareFullScan(jobID,legalLinkTypes,hopcountMethod); else jobQueue.prepareIncrementalScan(jobID); } /** Queue all existing. *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. */ protected void queueAllExisting(Long jobID, String[] legalLinkTypes) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (legalLinkTypes.length > 0) { jobQueue.reactivateHopcountRemovedRecords(jobID); } jobQueue.queueAllExisting(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction during queueAllExisting: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Prepare for a full scan. *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. *@param hopcountMethod describes how to handle deletions for hopcount purposes. */ protected void prepareFullScan(Long jobID, String[] legalLinkTypes, int hopcountMethod) throws ManifoldCFException { while (true) { long sleepAmt = 0L; // Since we delete documents here, we need to manage the hopcount part of the world too. database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // Delete the documents we have never fetched, including any hopcount records we've calculated. if (legalLinkTypes.length > 0) { ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t99."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}); hopCount.deleteMatchingDocuments(jobID,legalLinkTypes,jobQueue.getTableName()+" t99", "t99."+jobQueue.docHashField,"t99."+jobQueue.jobIDField, query,list, hopcountMethod); } jobQueue.prepareFullScan(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction preparing full scan: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Manually abort a running job. The job will be permanently stopped, and will not run again until * automatically started based on schedule, or manually started. *@param jobID is the job to abort. */ public void manualAbort(Long jobID) throws ManifoldCFException { // Just whack status back to "INACTIVE". The active documents will continue to be processed until done, // but that's fine. There will be no finishing stage, obviously. if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually aborting job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.abortJob(jobID,null); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" abort signal successfully sent"); } } /** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost, * until the job finishes on its own. *@param jobID is the job to abort. *@param requestMinimum is true if a minimal job run is requested. */ public void manualAbortRestart(Long jobID, boolean requestMinimum) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually restarting job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.abortRestartJob(jobID,requestMinimum); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction restarting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" restart signal successfully sent"); } } /** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost, * until the job finishes on its own. *@param jobID is the job to abort. */ public void manualAbortRestart(Long jobID) throws ManifoldCFException { manualAbortRestart(jobID,false); } /** Abort a running job due to a fatal error condition. *@param jobID is the job to abort. *@param errorText is the error text. *@return true if this is the first logged abort request for this job. */ public boolean errorAbort(Long jobID, String errorText) throws ManifoldCFException { // Just whack status back to "INACTIVE". The active documents will continue to be processed until done, // but that's fine. There will be no finishing stage, obviously. if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Aborting job "+jobID+" due to error '"+errorText+"'"); } boolean rval; while (true) { long sleepAmt = 0L; database.beginTransaction(); try { rval = jobs.abortJob(jobID,errorText); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (rval && Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" abort signal successfully sent"); } return rval; } /** Pause a job. *@param jobID is the job identifier to pause. */ public void pauseJob(Long jobID) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually pausing job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.pauseJob(jobID); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction pausing job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" successfully paused"); } } /** Restart a paused job. *@param jobID is the job identifier to restart. */ public void restartJob(Long jobID) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually restarting paused job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.restartJob(jobID); jobQueue.clearFailTimes(jobID); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction restarting pausing job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" successfully restarted"); } } /** Get the list of jobs that are ready for seeding. *@return jobs that are active and are running in adaptive mode. These will be seeded * based on what the connector says should be added to the queue. */ public JobSeedingRecord[] getJobsReadyForSeeding(long currentTime) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastCheckTimeField).append(",") .append(jobs.reseedIntervalField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_ACTIVE))})).append(" AND ") .append(jobs.typeField).append("=? AND ") .append("(").append(jobs.reseedTimeField).append(" IS NULL OR ").append(jobs.reseedTimeField).append("<=?)") .append(" FOR UPDATE"); list.add(jobs.typeToString(jobs.TYPE_CONTINUOUS)); list.add(new Long(currentTime)); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobSeedingRecord[] rval = new JobSeedingRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); Long x = (Long)row.getValue(jobs.lastCheckTimeField); long synchTime = 0; if (x != null) synchTime = x.longValue(); Long r = (Long)row.getValue(jobs.reseedIntervalField); Long reseedTime; if (r != null) reseedTime = new Long(currentTime + r.longValue()); else reseedTime = null; // Mark status of job as "active/seeding". Special status is needed so that abort // will not complete until seeding is completed. jobs.writeStatus(jobID,jobs.STATUS_ACTIVESEEDING,reseedTime); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for seeding"); } rval[i] = new JobSeedingRecord(jobID,synchTime); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for seeding: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the list of jobs that are ready for deletion. *@return jobs that were in the "readyfordelete" state. */ public JobDeleteRecord[] getJobsReadyForDelete() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORDELETE))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobDeleteRecord[] rval = new JobDeleteRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); // Mark status of job as "starting delete" jobs.writeStatus(jobID,jobs.STATUS_DELETESTARTINGUP); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for delete startup"); } rval[i] = new JobDeleteRecord(jobID); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the list of jobs that are ready for startup. *@return jobs that were in the "readyforstartup" state. These will be marked as being in the "starting up" state. */ public JobStartRecord[] getJobsReadyForStartup() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastCheckTimeField).append(",") .append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_READYFORSTARTUP), jobs.statusToString(jobs.STATUS_READYFORSTARTUPMINIMAL)})})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobStartRecord[] rval = new JobStartRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); Long x = (Long)row.getValue(jobs.lastCheckTimeField); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); boolean requestMinimum = (status == jobs.STATUS_READYFORSTARTUPMINIMAL); long synchTime = 0; if (x != null) synchTime = x.longValue(); // Mark status of job as "starting" jobs.writeStatus(jobID,requestMinimum?jobs.STATUS_STARTINGUPMINIMAL:jobs.STATUS_STARTINGUP); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for startup"); } rval[i] = new JobStartRecord(jobID,synchTime,requestMinimum); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Inactivate a job, from the notification state. *@param jobID is the ID of the job to inactivate. */ public void inactivateJob(Long jobID) throws ManifoldCFException { // While there is no flow that can cause a job to be in the wrong state when this gets called, as a precaution // it might be a good idea to put this in a transaction and have the state get checked first. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_NOTIFYINGOFCOMPLETION: jobs.notificationComplete(jobID); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted clearing notification state for job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a job starting for delete back to "ready for delete" * state. *@param jobID is the job id. */ public void resetStartDeleteJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_DELETESTARTINGUP: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForDelete' state"); // Set the state of the job back to "ReadyForStartup" jobs.writeStatus(jobID,jobs.STATUS_READYFORDELETE); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting start delete job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a job that is notifying back to "ready for notify" * state. *@param jobID is the job id. */ public void resetNotifyJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_NOTIFYINGOFCOMPLETION: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForNotify' state"); // Set the state of the job back to "ReadyForNotify" jobs.writeStatus(jobID,jobs.STATUS_READYFORNOTIFY); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting notify job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a starting job back to "ready for startup" state. *@param jobID is the job id. */ public void resetStartupJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_STARTINGUP: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartup' state"); // Set the state of the job back to "ReadyForStartup" jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUP); break; case Jobs.STATUS_STARTINGUPMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartupMinimal' state"); // Set the state of the job back to "ReadyForStartupMinimal" jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUPMINIMAL); break; case Jobs.STATUS_ABORTINGSTARTINGUP: case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'Aborting' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTING); break; case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestart' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART); break; case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestartMinimal' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL); break; case Jobs.STATUS_READYFORSTARTUP: case Jobs.STATUS_READYFORSTARTUPMINIMAL: case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: // ok break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting startup job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a seeding job back to "active" state. *@param jobID is the job id. */ public void resetSeedJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Uninstalled' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_UNINSTALLED); break; case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_NoOutput' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NOOUTPUT); break; case Jobs.STATUS_ACTIVESEEDING_NEITHER: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Neither' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NEITHER); break; case Jobs.STATUS_ACTIVESEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE); break; case Jobs.STATUS_ACTIVEWAITSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ActiveWait' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVEWAIT); break; case Jobs.STATUS_PAUSEDSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Paused' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_PAUSED); break; case Jobs.STATUS_PAUSEDWAITSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'PausedWait' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_PAUSEDWAIT); break; case Jobs.STATUS_ABORTINGSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Aborting' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTING); break; case Jobs.STATUS_ABORTINGFORRESTARTSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestart' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART); break; case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestartMinimal' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL); break; case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: case Jobs.STATUS_PAUSED: case Jobs.STATUS_ACTIVEWAIT: case Jobs.STATUS_PAUSEDWAIT: // ok break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting seeding job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Delete jobs in need of being deleted (which are marked "ready for delete"). * This method is meant to be called periodically to perform delete processing on jobs. */ public void deleteJobsReadyForDelete() throws ManifoldCFException { while (true) { long sleepAmt = 0L; // This method must find only jobs that have nothing hanging around in their jobqueue that represents an ingested // document. Any jobqueue entries which are in a state to interfere with the delete will be cleaned up by other // threads, so eventually a job will become eligible. This happens when there are no records that have an ingested // status: complete, purgatory, being-cleaned, being-deleted, or pending purgatory. database.beginTransaction(); try { // The original query was: // // SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE t0.id=t1.jobid AND // t1.status IN ('C', 'F', 'G')) // // However, this did not work well with Postgres when the tables got big. So I revised things to do the following multi-stage process: // (1) The query should be broken up, such that n queries are done: // (a) the first one should get all candidate jobs (those that have the right state) // (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1 // This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow // early exit!! // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Now, loop through this list. For each one, verify that it's okay to delete it int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); list.clear(); sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE), jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; ManifoldCF.noteConfigurationChange(); // Remove documents from job queue jobQueue.deleteAllJobRecords(jobID); // Remove carrydowns for the job carryDown.deleteOwner(jobID); // Nothing is in a critical section - so this should be OK. hopCount.deleteOwner(jobID); jobs.delete(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Removed job "+jobID); } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted deleting jobs ready for delete: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Put all eligible jobs in the "shutting down" state. */ public void finishJobs() throws ManifoldCFException { while (true) { long sleepAmt = 0L; // The jobs we should transition: // - are active // - have no ACTIVE, PENDING, ACTIVEPURGATORY, or PENDINGPURGATORY records database.beginTransaction(); try { // The query I used to emit was: // SELECT jobid FROM jobs t0 WHERE t0.status='A' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status IN ('A','P','F','G')) // This did not get along well with Postgresql, so instead this is what is now done: // (1) The query should be broken up, such that n queries are done: // (a) the first one should get all candidate jobs (those that have the right state) // (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1 // This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow // early exit!! // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVEWAIT), jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER)})})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // Check to be sure the job is a candidate for shutdown sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // Mark status of job as "finishing" jobs.writeStatus(jobID,jobs.STATUS_SHUTTINGDOWN); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for shutdown"); } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted finishing jobs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Find the list of jobs that need to have their connectors notified of job completion. *@return the ID's of jobs that need their output connectors notified in order to become inactive. */ public JobNotifyRecord[] getJobsReadyForInactivity() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORNOTIFY))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Return them all JobNotifyRecord[] rval = new JobNotifyRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); // Mark status of job as "starting delete" jobs.writeStatus(jobID,jobs.STATUS_NOTIFYINGOFCOMPLETION); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Found job "+jobID+" in need of notification"); } rval[i++] = new JobNotifyRecord(jobID); } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for notify: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Complete the sequence that resumes jobs, either from a pause or from a scheduling window * wait. The logic will restore the job to an active state (many possibilities depending on * connector status), and will record the jobs that have been so modified. *@param timestamp is the current time in milliseconds since epoch. *@param modifiedJobs is filled in with the set of IJobDescription objects that were resumed. */ public void finishJobResumes(long timestamp, ArrayList modifiedJobs) throws ManifoldCFException { // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_RESUMING), jobs.statusToString(jobs.STATUS_RESUMINGSEEDING) })})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // There are no secondary checks that need to be made; just resume IJobDescription jobDesc = jobs.load(jobID,true); modifiedJobs.add(jobDesc); jobs.finishResumeJob(jobID,timestamp); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Resumed job "+jobID); } } } /** Complete the sequence that stops jobs, either for abort, pause, or because of a scheduling * window. The logic will move the job to its next state (INACTIVE, PAUSED, ACTIVEWAIT), * and will record the jobs that have been so modified. *@param timestamp is the current time in milliseconds since epoch. *@param modifiedJobs is filled in with the set of IJobDescription objects that were stopped. */ public void finishJobStops(long timestamp, ArrayList modifiedJobs) throws ManifoldCFException { // The query I used to emit was: // SELECT jobid FROM jobs t0 WHERE t0.status='X' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status IN ('A','F')) // Now the query is broken up so that Postgresql behaves more efficiently. // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ABORTING), jobs.statusToString(jobs.STATUS_ABORTINGFORRESTART), jobs.statusToString(jobs.STATUS_ABORTINGFORRESTARTMINIMAL), jobs.statusToString(jobs.STATUS_PAUSING), jobs.statusToString(jobs.STATUS_PAUSINGSEEDING), jobs.statusToString(jobs.STATUS_ACTIVEWAITING), jobs.statusToString(jobs.STATUS_ACTIVEWAITINGSEEDING), jobs.statusToString(jobs.STATUS_PAUSINGWAITING), jobs.statusToString(jobs.STATUS_PAUSINGWAITINGSEEDING) })})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // All the job's documents need to have their docpriority set to null, to clear dead wood out of the docpriority index. // See CONNECTORS-290. // We do this BEFORE updating the job state. jobQueue.clearDocPriorities(jobID); IJobDescription jobDesc = jobs.load(jobID,true); modifiedJobs.add(jobDesc); jobs.finishStopJob(jobID,timestamp); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Stopped job "+jobID); } } } /** Reset eligible jobs either back to the "inactive" state, or make them active again. The * latter will occur if the cleanup phase of the job generated more pending documents. * * This method is used to pick up all jobs in the shutting down state * whose purgatory or being-cleaned records have been all processed. * *@param currentTime is the current time in milliseconds since epoch. *@param resetJobs is filled in with the set of IJobDescription objects that were reset. */ public void resetJobs(long currentTime, ArrayList resetJobs) throws ManifoldCFException { // Query for all jobs that fulfill the criteria // The query used to look like: // // SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status='P') // // Now, the query is broken up, for performance // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN))})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // Check to be sure the job is a candidate for shutdown sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PURGATORY), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // The shutting-down phase is complete. However, we need to check if there are any outstanding // PENDING or PENDINGPURGATORY records before we can decide what to do. sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) { // This job needs to re-enter the active state. Make that happen. jobs.returnJobToActive(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" is re-entering active state"); } } else { // This job should be marked as finished. IJobDescription jobDesc = jobs.load(jobID,true); resetJobs.add(jobDesc); jobs.finishJob(jobID,currentTime); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now completed"); } } } } // Status reports /** Get the status of a job. *@return the status object for the specified job. */ @Override public JobStatus getStatus(Long jobID) throws ManifoldCFException { return getStatus(jobID,true); } /** Get a list of all jobs, and their status information. *@return an ordered array of job status objects. */ @Override public JobStatus[] getAllStatus() throws ManifoldCFException { return getAllStatus(true); } /** Get a list of running jobs. This is for status reporting. *@return an array of the job status objects. */ @Override public JobStatus[] getRunningJobs() throws ManifoldCFException { return getRunningJobs(true); } /** Get a list of completed jobs, and their statistics. *@return an array of the job status objects. */ @Override public JobStatus[] getFinishedJobs() throws ManifoldCFException { return getFinishedJobs(true); } /** Get the status of a job. *@param jobID is the job ID. *@param includeCounts is true if document counts should be included. *@return the status object for the specified job. */ public JobStatus getStatus(Long jobID, boolean includeCounts) throws ManifoldCFException { return getStatus(jobID, includeCounts, Integer.MAX_VALUE); } /** Get a list of all jobs, and their status information. *@param includeCounts is true if document counts should be included. *@return an ordered array of job status objects. */ public JobStatus[] getAllStatus(boolean includeCounts) throws ManifoldCFException { return getAllStatus(includeCounts, Integer.MAX_VALUE); } /** Get a list of running jobs. This is for status reporting. *@param includeCounts is true if document counts should be included. *@return an array of the job status objects. */ public JobStatus[] getRunningJobs(boolean includeCounts) throws ManifoldCFException { return getRunningJobs(includeCounts, Integer.MAX_VALUE); } /** Get a list of completed jobs, and their statistics. *@param includeCounts is true if document counts should be included. *@return an array of the job status objects. */ public JobStatus[] getFinishedJobs(boolean includeCounts) throws ManifoldCFException { return getFinishedJobs(includeCounts, Integer.MAX_VALUE); } /** Get the status of a job. *@param includeCounts is true if document counts should be included. *@return the status object for the specified job. */ @Override public JobStatus getStatus(Long jobID, boolean includeCounts, int maxCount) throws ManifoldCFException { ArrayList list = new ArrayList(); String whereClause = Jobs.idField+"=?"; list.add(jobID); JobStatus[] records = makeJobStatus(whereClause,list,includeCounts,maxCount); if (records.length == 0) return null; return records[0]; } /** Get a list of all jobs, and their status information. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an ordered array of job status objects. */ public JobStatus[] getAllStatus(boolean includeCounts, int maxCount) throws ManifoldCFException { return makeJobStatus(null,null,includeCounts,maxCount); } /** Get a list of running jobs. This is for status reporting. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an array of the job status objects. */ @Override public JobStatus[] getRunningJobs(boolean includeCounts, int maxCount) throws ManifoldCFException { ArrayList whereParams = new ArrayList(); String whereClause = database.buildConjunctionClause(whereParams,new ClauseDescription[]{ new MultiClause(Jobs.statusField,new Object[]{ Jobs.statusToString(Jobs.STATUS_ACTIVE), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER), Jobs.statusToString(Jobs.STATUS_PAUSED), Jobs.statusToString(Jobs.STATUS_PAUSEDSEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAIT), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSEDWAIT), Jobs.statusToString(Jobs.STATUS_PAUSEDWAITSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSING), Jobs.statusToString(Jobs.STATUS_PAUSINGSEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITINGSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSINGWAITING), Jobs.statusToString(Jobs.STATUS_PAUSINGWAITINGSEEDING), Jobs.statusToString(Jobs.STATUS_RESUMING), Jobs.statusToString(Jobs.STATUS_RESUMINGSEEDING) })}); return makeJobStatus(whereClause,whereParams,includeCounts,maxCount); } /** Get a list of completed jobs, and their statistics. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an array of the job status objects. */ @Override public JobStatus[] getFinishedJobs(boolean includeCounts, int maxCount) throws ManifoldCFException { StringBuilder sb = new StringBuilder(); ArrayList whereParams = new ArrayList(); sb.append(database.buildConjunctionClause(whereParams,new ClauseDescription[]{ new UnitaryClause(Jobs.statusField,Jobs.statusToString(Jobs.STATUS_INACTIVE))})).append(" AND ") .append(Jobs.endTimeField).append(" IS NOT NULL"); return makeJobStatus(sb.toString(),whereParams,includeCounts,maxCount); } // Protected methods and classes /** Make a job status array from a query result. *@param whereClause is the where clause for the jobs we are interested in. *@return the status array. */ protected JobStatus[] makeJobStatus(String whereClause, ArrayList whereParams, boolean includeCounts, int maxCount) throws ManifoldCFException { IResultSet set = database.performQuery("SELECT t0."+ Jobs.idField+",t0."+ Jobs.descriptionField+",t0."+ Jobs.statusField+",t0."+ Jobs.startTimeField+",t0."+ Jobs.endTimeField+",t0."+ Jobs.errorField+ " FROM "+jobs.getTableName()+" t0 "+((whereClause==null)?"":(" WHERE "+whereClause))+" ORDER BY "+Jobs.descriptionField+" ASC", whereParams,null,null); // Build hashes for set2 and set3 Map<Long,Long> set2Hash = new HashMap<Long,Long>(); Map<Long,Long> set3Hash = new HashMap<Long,Long>(); Map<Long,Long> set4Hash = new HashMap<Long,Long>(); Map<Long,Boolean> set2Exact = new HashMap<Long,Boolean>(); Map<Long,Boolean> set3Exact = new HashMap<Long,Boolean>(); Map<Long,Boolean> set4Exact = new HashMap<Long,Boolean>(); if (includeCounts) { // If we are counting all of them anyway, do this via GROUP BY since it will be the fastest. But // otherwise, fire off an individual query at a time. if (maxCount == Integer.MAX_VALUE) { buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } else { // Check if the total matching jobqueue rows exceeds the limit. If not, we can still use the cheaper query. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1"); addWhereClause(sb,list,whereClause,whereParams,false); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet countResult = database.performQuery(sb.toString(),list,null,null); if (countResult.getRowCount() > 0 && ((Long)countResult.getRow(0).getValue("doccount")).longValue() > maxCount) { // Too many items in queue; do it the hard way buildCountsUsingIndividualQueries(whereClause,whereParams,maxCount,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } else { // Cheap way should still work. buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } } } JobStatus[] rval = new JobStatus[set.getRowCount()]; for (int i = 0; i < rval.length; i++) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(Jobs.idField); String description = row.getValue(Jobs.descriptionField).toString(); int status = Jobs.stringToStatus(row.getValue(Jobs.statusField).toString()); Long startTimeValue = (Long)row.getValue(Jobs.startTimeField); long startTime = -1; if (startTimeValue != null) startTime = startTimeValue.longValue(); Long endTimeValue = (Long)row.getValue(Jobs.endTimeField); long endTime = -1; if (endTimeValue != null) endTime = endTimeValue.longValue(); String errorText = (String)row.getValue(Jobs.errorField); if (errorText != null && errorText.length() == 0) errorText = null; int rstatus = JobStatus.JOBSTATUS_NOTYETRUN; switch (status) { case Jobs.STATUS_INACTIVE: if (errorText != null) rstatus = JobStatus.JOBSTATUS_ERROR; else { if (startTime >= 0) rstatus = JobStatus.JOBSTATUS_COMPLETED; else rstatus = JobStatus.JOBSTATUS_NOTYETRUN; } break; case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: case Jobs.STATUS_ACTIVESEEDING_NEITHER: rstatus = JobStatus.JOBSTATUS_RUNNING_UNINSTALLED; break; case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVESEEDING: rstatus = JobStatus.JOBSTATUS_RUNNING; break; case Jobs.STATUS_SHUTTINGDOWN: rstatus = JobStatus.JOBSTATUS_JOBENDCLEANUP; break; case Jobs.STATUS_READYFORNOTIFY: case Jobs.STATUS_NOTIFYINGOFCOMPLETION: rstatus = JobStatus.JOBSTATUS_JOBENDNOTIFICATION; break; case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGSEEDING: case Jobs.STATUS_ABORTINGSTARTINGUP: case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL: rstatus = JobStatus.JOBSTATUS_ABORTING; break; case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: case Jobs.STATUS_ABORTINGFORRESTARTSEEDING: case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL: case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART: case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL: rstatus = JobStatus.JOBSTATUS_RESTARTING; break; case Jobs.STATUS_PAUSING: case Jobs.STATUS_PAUSINGSEEDING: case Jobs.STATUS_ACTIVEWAITING: case Jobs.STATUS_ACTIVEWAITINGSEEDING: case Jobs.STATUS_PAUSINGWAITING: case Jobs.STATUS_PAUSINGWAITINGSEEDING: rstatus = JobStatus.JOBSTATUS_STOPPING; break; case Jobs.STATUS_RESUMING: case Jobs.STATUS_RESUMINGSEEDING: rstatus = JobStatus.JOBSTATUS_RESUMING; break; case Jobs.STATUS_PAUSED: case Jobs.STATUS_PAUSEDSEEDING: rstatus = JobStatus.JOBSTATUS_PAUSED; break; case Jobs.STATUS_ACTIVEWAIT: case Jobs.STATUS_ACTIVEWAITSEEDING: rstatus = JobStatus.JOBSTATUS_WINDOWWAIT; break; case Jobs.STATUS_PAUSEDWAIT: case Jobs.STATUS_PAUSEDWAITSEEDING: rstatus = JobStatus.JOBSTATUS_PAUSED; break; case Jobs.STATUS_STARTINGUP: case Jobs.STATUS_STARTINGUPMINIMAL: case Jobs.STATUS_READYFORSTARTUP: case Jobs.STATUS_READYFORSTARTUPMINIMAL: rstatus = JobStatus.JOBSTATUS_STARTING; break; case Jobs.STATUS_DELETESTARTINGUP: case Jobs.STATUS_READYFORDELETE: case Jobs.STATUS_DELETING: case Jobs.STATUS_DELETING_NOOUTPUT: rstatus = JobStatus.JOBSTATUS_DESTRUCTING; break; default: break; } Long set2Value = set2Hash.get(jobID); Long set3Value = set3Hash.get(jobID); Long set4Value = set4Hash.get(jobID); Boolean set2ExactValue = set2Exact.get(jobID); Boolean set3ExactValue = set3Exact.get(jobID); Boolean set4ExactValue = set4Exact.get(jobID); rval[i] = new JobStatus(jobID.toString(),description,rstatus,((set2Value==null)?0L:set2Value.longValue()), ((set3Value==null)?0L:set3Value.longValue()), ((set4Value==null)?0L:set4Value.longValue()), ((set2ExactValue==null)?true:set2ExactValue.booleanValue()), ((set3ExactValue==null)?true:set3ExactValue.booleanValue()), ((set4ExactValue==null)?true:set4ExactValue.booleanValue()), startTime,endTime,errorText); } return rval; } protected static ClauseDescription buildOutstandingClause() throws ManifoldCFException { return new MultiClause(JobQueue.statusField,new Object[]{ JobQueue.statusToString(JobQueue.STATUS_ACTIVE), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCAN), JobQueue.statusToString(JobQueue.STATUS_PENDING), JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}); } protected static ClauseDescription buildProcessedClause() throws ManifoldCFException { return new MultiClause(JobQueue.statusField,new Object[]{ JobQueue.statusToString(JobQueue.STATUS_COMPLETE), JobQueue.statusToString(JobQueue.STATUS_UNCHANGED), JobQueue.statusToString(JobQueue.STATUS_PURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}); } protected void buildCountsUsingIndividualQueries(String whereClause, ArrayList whereParams, int maxCount, Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash, Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact) throws ManifoldCFException { // Fire off an individual query with a limit for each job // First, get the list of jobs that we are interested in. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(Jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" t0"); if (whereClause != null) { sb.append(" WHERE ") .append(whereClause); if (whereParams != null) list.addAll(whereParams); } IResultSet jobSet = database.performQuery(sb.toString(),list,null,null); // Scan the set of jobs for (int i = 0; i < jobSet.getRowCount(); i++) { IResultRow row = jobSet.getRow(i); Long jobID = (Long)row.getValue(Jobs.idField); // Now, for each job, fire off a separate, limited, query for each count we care about sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); - sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(Jobs.idField,jobID)})); + sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet totalSet = database.performQuery(sb.toString(),list,null,null); if (totalSet.getRowCount() > 0) { long rowCount = ((Long)totalSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set2Hash.put(jobID,new Long(maxCount)); set2Exact.put(jobID,new Boolean(false)); } else { set2Hash.put(jobID,new Long(rowCount)); set2Exact.put(jobID,new Boolean(true)); } } sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); - sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(Jobs.idField,jobID)})); + sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)})); sb.append(" AND "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet outstandingSet = database.performQuery(sb.toString(),list,null,null); if (outstandingSet.getRowCount() > 0) { long rowCount = ((Long)outstandingSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set3Hash.put(jobID,new Long(maxCount)); set3Exact.put(jobID,new Boolean(false)); } else { set3Hash.put(jobID,new Long(rowCount)); set3Exact.put(jobID,new Boolean(true)); } } sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") - .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE "); - sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(Jobs.idField,jobID)})); + .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); + sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)})); sb.append(" AND "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet processedSet = database.performQuery(sb.toString(),list,null,null); if (processedSet.getRowCount() > 0) { long rowCount = ((Long)processedSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set4Hash.put(jobID,new Long(maxCount)); set4Exact.put(jobID,new Boolean(false)); } else { set4Hash.put(jobID,new Long(rowCount)); set4Exact.put(jobID,new Boolean(true)); } } } } protected void buildCountsUsingGroupBy(String whereClause, ArrayList whereParams, Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash, Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact) throws ManifoldCFException { StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1"); addWhereClause(sb,list,whereClause,whereParams,false); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set2 = database.performQuery(sb.toString(),list,null,null); sb = new StringBuilder("SELECT "); list.clear(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()})); addWhereClause(sb,list,whereClause,whereParams,true); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set3 = database.performQuery(sb.toString(),list,null,null); sb = new StringBuilder("SELECT "); list.clear(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()})); addWhereClause(sb,list,whereClause,whereParams,true); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set4 = database.performQuery(sb.toString(),list,null,null); for (int j = 0; j < set2.getRowCount(); j++) { IResultRow row = set2.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set2Hash.put(jobID,(Long)row.getValue("doccount")); set2Exact.put(jobID,new Boolean(true)); } for (int j = 0; j < set3.getRowCount(); j++) { IResultRow row = set3.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set3Hash.put(jobID,(Long)row.getValue("doccount")); set3Exact.put(jobID,new Boolean(true)); } for (int j = 0; j < set4.getRowCount(); j++) { IResultRow row = set4.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set4Hash.put(jobID,(Long)row.getValue("doccount")); set4Exact.put(jobID,new Boolean(true)); } } protected void addWhereClause(StringBuilder sb, ArrayList list, String whereClause, ArrayList whereParams, boolean wherePresent) { if (whereClause != null) { if (wherePresent) sb.append(" AND"); else sb.append(" WHERE"); sb.append(" EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t0."+Jobs.idField,"t1."+JobQueue.jobIDField)})).append(" AND ") .append(whereClause) .append(")"); if (whereParams != null) list.addAll(whereParams); } } // These methods generate reports for direct display in the UI. /** Run a 'document status' report. *@param connectionName is the name of the connection. *@param filterCriteria are the criteria used to limit the records considered for the report. *@param sortOrder is the specified sort order of the final report. *@param startRow is the first row to include. *@param rowCount is the number of rows to include. *@return the results, with the following columns: identifier, job, state, status, scheduled, action, retrycount, retrylimit. The "scheduled" column and the * "retrylimit" column are long values representing a time; all other values will be user-friendly strings. */ public IResultSet genDocumentStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder, int startRow, int rowCount) throws ManifoldCFException { // Build the query. Long currentTime = new Long(System.currentTimeMillis()); StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(" AS id,") .append("t0.").append(jobQueue.docIDField).append(" AS identifier,") .append("t1.").append(jobs.descriptionField).append(" AS job,") .append("CASE") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Out of scope'") .append(" ELSE 'Unknown'") .append(" END AS state,") .append("CASE") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 'Inactive'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 'Ready for processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 'Ready for expiration'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 'Waiting for processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 'Waiting for expiration'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkTimeField).append(" IS NULL") .append(" THEN 'Waiting forever'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append("=?") .append(" THEN 'Hopcount exceeded'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 'Deleting'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" THEN 'Processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" THEN 'Expiring'") .append(" ELSE 'Unknown'") .append(" END AS status,") .append("t0.").append(jobQueue.checkTimeField).append(" AS scheduled,") .append("CASE") .append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Process'") .append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Expire'") .append(" ELSE 'Unknown'") .append(" END AS action,") .append("t0.").append(jobQueue.failCountField).append(" AS retrycount,") .append("t0.").append(jobQueue.failTimeField).append(" AS retrylimit") .append(" FROM ").append(jobQueue.getTableName()).append(" t0,").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.jobIDField,"t1."+jobs.idField)})); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); addCriteria(sb,list,"t0.",connectionName,filterCriteria,true); // The intrinsic ordering is provided by the "id" column, and nothing else. addOrdering(sb,new String[]{"id"},sortOrder); addLimits(sb,startRow,rowCount); return database.performQuery(sb.toString(),list,null,null,rowCount,null); } /** Run a 'queue status' report. *@param connectionName is the name of the connection. *@param filterCriteria are the criteria used to limit the records considered for the report. *@param sortOrder is the specified sort order of the final report. *@param idBucketDescription is the bucket description for generating the identifier class. *@param startRow is the first row to include. *@param rowCount is the number of rows to include. *@return the results, with the following columns: idbucket, inactive, processing, expiring, deleting, processready, expireready, processwaiting, expirewaiting */ public IResultSet genQueueStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder, BucketDescription idBucketDescription, int startRow, int rowCount) throws ManifoldCFException { // SELECT substring(docid FROM '<id_regexp>') AS idbucket, // substring(entityidentifier FROM '<id_regexp>') AS idbucket, // SUM(CASE WHEN status='C' then 1 else 0 end)) AS inactive FROM jobqueue WHERE <criteria> // GROUP BY idbucket Long currentTime = new Long(System.currentTimeMillis()); StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append("t1.idbucket,SUM(t1.inactive) AS inactive,SUM(t1.processing) AS processing,SUM(t1.expiring) AS expiring,SUM(t1.deleting) AS deleting,") .append("SUM(t1.processready) AS processready,SUM(t1.expireready) AS expireready,SUM(t1.processwaiting) AS processwaiting,SUM(t1.expirewaiting) AS expirewaiting,") .append("SUM(t1.waitingforever) AS waitingforever,SUM(t1.hopcountexceeded) AS hopcountexceeded FROM (SELECT "); addBucketExtract(sb,list,"",jobQueue.docIDField,idBucketDescription); sb.append(" AS idbucket,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 1 ELSE 0") .append(" END") .append(" AS inactive,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as processing,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as expiring,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 1 ELSE 0") .append(" END") .append(" as deleting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as processready,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as expireready,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as processwaiting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as expirewaiting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkTimeField).append(" IS NULL") .append(" THEN 1 ELSE 0") .append(" END") .append(" as waitingforever,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as hopcountexceeded"); sb.append(" FROM ").append(jobQueue.getTableName()); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); addCriteria(sb,list,"",connectionName,filterCriteria,false); sb.append(") t1 GROUP BY idbucket"); addOrdering(sb,new String[]{"idbucket","inactive","processing","expiring","deleting","processready","expireready","processwaiting","expirewaiting","waitingforever","hopcountexceeded"},sortOrder); addLimits(sb,startRow,rowCount); return database.performQuery(sb.toString(),list,null,null,rowCount,null); } // Protected methods for report generation /** Turn a bucket description into a return column. * This is complicated by the fact that the extraction code is inherently case sensitive. So if case insensitive is * desired, that means we whack the whole thing to lower case before doing the match. */ protected void addBucketExtract(StringBuilder sb, ArrayList list, String columnPrefix, String columnName, BucketDescription bucketDesc) { boolean isSensitive = bucketDesc.isSensitive(); list.add(bucketDesc.getRegexp()); sb.append(database.constructSubstringClause(columnPrefix+columnName,"?",!isSensitive)); } /** Add criteria clauses to query. */ protected boolean addCriteria(StringBuilder sb, ArrayList list, String fieldPrefix, String connectionName, StatusFilterCriteria criteria, boolean whereEmitted) throws ManifoldCFException { Long[] matchingJobs = criteria.getJobs(); if (matchingJobs != null) { whereEmitted = emitClauseStart(sb,whereEmitted); if (matchingJobs.length == 0) { sb.append("0>1"); } else { sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.jobIDField,matchingJobs)})); } } RegExpCriteria identifierRegexp = criteria.getIdentifierMatch(); if (identifierRegexp != null) { whereEmitted = emitClauseStart(sb,whereEmitted); list.add(identifierRegexp.getRegexpString()); sb.append(database.constructRegexpClause(fieldPrefix+jobQueue.docIDField,"?",identifierRegexp.isInsensitive())); } Long nowTime = new Long(criteria.getNowTime()); int[] states = criteria.getMatchingStates(); int[] statuses = criteria.getMatchingStatuses(); if (states.length == 0 || statuses.length == 0) { whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("0>1"); return whereEmitted; } // Iterate through the specified states, and emit a series of OR clauses, one for each state. The contents of the clause will be complex. whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("("); int k = 0; while (k < states.length) { int stateValue = states[k]; if (k > 0) sb.append(" OR "); switch (stateValue) { case DOCSTATE_NEVERPROCESSED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)})})); break; case DOCSTATE_PREVIOUSLYPROCESSED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE), jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED), jobQueue.statusToString(jobQueue.STATUS_COMPLETE), jobQueue.statusToString(jobQueue.STATUS_UNCHANGED), jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})})); break; case DOCSTATE_OUTOFSCOPE: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})})); break; } k++; } sb.append(")"); whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("("); k = 0; while (k < statuses.length) { int stateValue = statuses[k]; if (k > 0) sb.append(" OR "); switch (stateValue) { case DOCSTATUS_INACTIVE: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_COMPLETE), jobQueue.statusToString(jobQueue.STATUS_UNCHANGED), jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})})); break; case DOCSTATUS_PROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN))})); break; case DOCSTATUS_EXPIRING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE))})); break; case DOCSTATUS_DELETING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED), jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)})})); break; case DOCSTATUS_READYFORPROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)})); break; case DOCSTATUS_READYFOREXPIRATION: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)})); break; case DOCSTATUS_WAITINGFORPROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)})); break; case DOCSTATUS_WAITINGFOREXPIRATION: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)})); break; case DOCSTATUS_WAITINGFOREVER: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" AND ").append(fieldPrefix).append(jobQueue.checkTimeField).append(" IS NULL"); break; case DOCSTATUS_HOPCOUNTEXCEEDED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})})); break; } k++; } sb.append(")"); return whereEmitted; } /** Emit a WHERE or an AND, depending... */ protected boolean emitClauseStart(StringBuilder sb, boolean whereEmitted) { if (whereEmitted) sb.append(" AND "); else sb.append(" WHERE "); return true; } /** Add ordering. */ protected void addOrdering(StringBuilder sb, String[] completeFieldList, SortOrder sort) { // Keep track of the fields we've seen Map hash = new HashMap(); // Emit the "Order by" sb.append(" ORDER BY "); // Go through the specified list int i = 0; int count = sort.getCount(); while (i < count) { if (i > 0) sb.append(","); String column = sort.getColumn(i); sb.append(column); if (sort.getDirection(i) == sort.SORT_ASCENDING) sb.append(" ASC"); else sb.append(" DESC"); hash.put(column,column); i++; } // Now, go through the complete field list, and emit sort criteria for everything // not actually specified. This is so LIMIT and OFFSET give consistent results. int j = 0; while (j < completeFieldList.length) { String field = completeFieldList[j]; if (hash.get(field) == null) { if (i > 0) sb.append(","); sb.append(field); sb.append(" DESC"); //if (j == 0) // sb.append(" DESC"); //else // sb.append(" ASC"); i++; } j++; } } /** Add limit and offset. */ protected void addLimits(StringBuilder sb, int startRow, int maxRowCount) { sb.append(" ").append(database.constructOffsetLimitClause(startRow,maxRowCount)); } /** Class for tracking existing jobqueue row data */ protected static class JobqueueRecord { protected Long recordID; protected int status; protected Long checkTimeValue; public JobqueueRecord(Long recordID, int status, Long checkTimeValue) { this.recordID = recordID; this.status = status; this.checkTimeValue = checkTimeValue; } public Long getRecordID() { return recordID; } public int getStatus() { return status; } public Long getCheckTimeValue() { return checkTimeValue; } } /** We go through 2x the number of documents we should need if we were perfect at setting document priorities. */ private static int EXTRA_FACTOR = 2; /** This class provides the throttling limits for the job queueing query. */ protected static class ThrottleLimit implements ILimitChecker { // For each connection, there is (a) a number (which is the maximum per bin), and (b) // a current running count per bin. These are stored as elements in a hash map. protected HashMap connectionMap = new HashMap(); // The maximum number of jobs that have reached their chunk size limit that we // need protected int n; // This is the hash table that maps a job ID to the object that tracks the number // of documents already accumulated for this resultset. The count of the number // of queue records we have is tallied by going through each job in this table // and adding the records outstanding for it. protected HashMap jobQueueHash = new HashMap(); // This is the map from jobid to connection name protected HashMap jobConnection = new HashMap(); // This is the set of allowed connection names. We discard all documents that are // not from that set. protected HashMap activeConnections = new HashMap(); // This is the number of documents per set per connection. protected HashMap setSizes = new HashMap(); // These are the individual connection maximums, keyed by connection name. protected HashMap maxConnectionCounts = new HashMap(); // This is the maximum number of documents per set over all the connections we are looking at. This helps us establish a sanity limit. protected int maxSetSize = 0; // This is the number of documents processed so far protected int documentsProcessed = 0; // This is where we accumulate blocking documents. This is an arraylist of DocumentDescription objects. protected ArrayList blockingDocumentArray = new ArrayList(); // Cutoff time for documents eligible for prioritization protected long prioritizationTime; /** Constructor. * This class is built up piecemeal, so the constructor does nothing. *@param n is the maximum number of full job descriptions we want at this time. */ public ThrottleLimit(int n, long prioritizationTime) { this.n = n; this.prioritizationTime = prioritizationTime; Logging.perf.debug("Limit instance created"); } /** Transfer blocking documents discovered to BlockingDocuments object */ public void tallyBlockingDocuments(BlockingDocuments blockingDocuments) { int i = 0; while (i < blockingDocumentArray.size()) { DocumentDescription dd = (DocumentDescription)blockingDocumentArray.get(i++); blockingDocuments.addBlockingDocument(dd); } blockingDocumentArray.clear(); } /** Add a job/connection name map entry. *@param jobID is the job id. *@param connectionName is the connection name. */ public void addJob(Long jobID, String connectionName) { jobConnection.put(jobID,connectionName); } /** Add an active connection. This is the pool of active connections that will be used for the lifetime of this operation. *@param connectionName is the connection name. */ public void addConnectionName(String connectionName, IRepositoryConnector connectorInstance) throws ManifoldCFException { activeConnections.put(connectionName,connectorInstance); int setSize = connectorInstance.getMaxDocumentRequest(); setSizes.put(connectionName,new Integer(setSize)); if (setSize > maxSetSize) maxSetSize = setSize; } /** Add a document limit for a specified connection. This is the limit across all matching bins; if any * individual matching bin exceeds that limit, then documents that belong to that bin will be excluded. *@param connectionName is the connection name. *@param regexp is the regular expression, which we will match against various bins. *@param upperLimit is the maximum count associated with the specified job. */ public void addLimit(String connectionName, String regexp, int upperLimit) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Adding fetch limit of "+Integer.toString(upperLimit)+" fetches for expression '"+regexp+"' for connection '"+connectionName+"'"); ThrottleJobItem ji = (ThrottleJobItem)connectionMap.get(connectionName); if (ji == null) { ji = new ThrottleJobItem(); connectionMap.put(connectionName,ji); } ji.addLimit(regexp,upperLimit); } /** Set a connection-based total document limit. */ public void setConnectionLimit(String connectionName, int maxDocuments) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Setting connection limit of "+Integer.toString(maxDocuments)+" for connection "+connectionName); maxConnectionCounts.put(connectionName,new MutableInteger(maxDocuments)); } /** See if this class can be legitimately compared against another of * the same type. *@return true if comparisons will ever return "true". */ public boolean doesCompareWork() { return false; } /** Create a duplicate of this class instance. All current state should be preserved. * NOTE: Since doesCompareWork() returns false, queries using this limit checker cannot * be cached, and therefore duplicate() is never called from the query executor. But it can * be called from other places. *@return the duplicate. */ public ILimitChecker duplicate() { return makeDeepCopy(); } /** Make a deep copy */ public ThrottleLimit makeDeepCopy() { ThrottleLimit rval = new ThrottleLimit(n,prioritizationTime); // Create a true copy of all the structures in which counts are kept. The referential structures (e.g. connection hashes) // do not need a deep copy. rval.activeConnections = activeConnections; rval.setSizes = setSizes; rval.maxConnectionCounts = maxConnectionCounts; rval.maxSetSize = maxSetSize; rval.jobConnection = jobConnection; // The structures where counts are maintained DO need a deep copy. rval.documentsProcessed = documentsProcessed; Iterator iter; iter = connectionMap.keySet().iterator(); while (iter.hasNext()) { Object key = iter.next(); rval.connectionMap.put(key,((ThrottleJobItem)connectionMap.get(key)).duplicate()); } iter = jobQueueHash.keySet().iterator(); while (iter.hasNext()) { Object key = iter.next(); rval.jobQueueHash.put(key,((QueueHashItem)jobQueueHash.get(key)).duplicate()); } return rval; } /** Find the hashcode for this class. This will only ever be used if * doesCompareWork() returns true. *@return the hashcode. */ public int hashCode() { return 0; } /** Compare two objects and see if equal. This will only ever be used * if doesCompareWork() returns true. *@param object is the object to compare against. *@return true if equal. */ public boolean equals(Object object) { return false; } /** Get the remaining documents we should query for. *@return the maximal remaining count. */ public int getRemainingDocuments() { return EXTRA_FACTOR * n * maxSetSize - documentsProcessed; } /** See if a result row should be included in the final result set. *@param row is the result row to check. *@return true if it should be included, false otherwise. */ public boolean checkInclude(IResultRow row) throws ManifoldCFException { // Note: This method does two things: First, it insures that the number of documents per job per bin does // not exceed the calculated throttle number. Second, it keeps track of how many document queue items // will be needed, so we can stop when we've got enough for the moment. Logging.perf.debug("Checking if row should be included"); // This is the end that does the work. // The row passed in has the following jobqueue columns: idField, jobIDField, docIDField, and statusField Long jobIDValue = (Long)row.getValue(JobQueue.jobIDField); // Get the connection name for this row String connectionName = (String)jobConnection.get(jobIDValue); if (connectionName == null) { Logging.perf.debug(" Row does not have an eligible job - excluding"); return false; } IRepositoryConnector connectorInstance = (IRepositoryConnector)activeConnections.get(connectionName); if (connectorInstance == null) { Logging.perf.debug(" Row does not have an eligible connector instance - excluding"); return false; } // Find the connection limit for this document MutableInteger connectionLimit = (MutableInteger)maxConnectionCounts.get(connectionName); if (connectionLimit != null) { if (connectionLimit.intValue() == 0) { Logging.perf.debug(" Row exceeds its connection limit - excluding"); return false; } connectionLimit.decrement(); } // Tally this item in the job queue hash, so we can detect when to stop QueueHashItem queueItem = (QueueHashItem)jobQueueHash.get(jobIDValue); if (queueItem == null) { // Need to talk to the connector to get a max number of docs per chunk int maxCount = ((Integer)setSizes.get(connectionName)).intValue(); queueItem = new QueueHashItem(maxCount); jobQueueHash.put(jobIDValue,queueItem); } String docIDHash = (String)row.getValue(JobQueue.docHashField); String docID = (String)row.getValue(JobQueue.docIDField); // Figure out what the right bins are, given the data we have. // This will involve a call to the connector. String[] binNames = ManifoldCF.calculateBins(connectorInstance,docID); // Keep the running count, so we can abort without going through the whole set. documentsProcessed++; //scanRecord.addBins(binNames); ThrottleJobItem item = (ThrottleJobItem)connectionMap.get(connectionName); // If there is no schedule-based throttling on this connection, we're done. if (item == null) { queueItem.addDocument(); Logging.perf.debug(" Row has no throttling - including"); return true; } int j = 0; while (j < binNames.length) { if (item.isEmpty(binNames[j])) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Bin "+binNames[j]+" has no more available fetches - excluding"); Object o = row.getValue(JobQueue.prioritySetField); if (o == null || ((Long)o).longValue() <= prioritizationTime) { // Need to add a document descriptor based on this row to the blockingDocuments object! // This will cause it to be reprioritized preferentially, getting it out of the way if it shouldn't // be there. Long id = (Long)row.getValue(JobQueue.idField); Long jobID = (Long)row.getValue(JobQueue.jobIDField); DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID); blockingDocumentArray.add(dd); } return false; } j++; } j = 0; while (j < binNames.length) { item.decrement(binNames[j++]); } queueItem.addDocument(); Logging.perf.debug(" Including!"); return true; } /** See if we should examine another row. *@return true if we need to keep going, or false if we are done. */ public boolean checkContinue() throws ManifoldCFException { if (documentsProcessed >= EXTRA_FACTOR * n * maxSetSize) return false; // If the number of chunks exceeds n, we are done Iterator iter = jobQueueHash.keySet().iterator(); int count = 0; while (iter.hasNext()) { Long jobID = (Long)iter.next(); QueueHashItem item = (QueueHashItem)jobQueueHash.get(jobID); count += item.getChunkCount(); if (count > n) return false; } return true; } } /** This class contains information per job on how many queue items have so far been accumulated. */ protected static class QueueHashItem { // The number of items per chunk for this job int itemsPerChunk; // The number of chunks so far, INCLUDING incomplete chunks int chunkCount = 0; // The number of documents in the current incomplete chunk int currentDocumentCount = 0; /** Construct. *@param itemsPerChunk is the number of items per chunk for this job. */ public QueueHashItem(int itemsPerChunk) { this.itemsPerChunk = itemsPerChunk; } /** Duplicate. */ public QueueHashItem duplicate() { QueueHashItem rval = new QueueHashItem(itemsPerChunk); rval.chunkCount = chunkCount; rval.currentDocumentCount = currentDocumentCount; return rval; } /** Add a document to this job. */ public void addDocument() { currentDocumentCount++; if (currentDocumentCount == 1) chunkCount++; if (currentDocumentCount == itemsPerChunk) currentDocumentCount = 0; } /** Get the number of chunks. *@return the number of chunks. */ public int getChunkCount() { return chunkCount; } } /** This class represents the information stored PER JOB in the throttling structure. * In this structure, "remaining" counts are kept for each bin. When the bin becomes empty, * then no more documents that would map to that bin will be returned, for this query. * * The way in which the maximum count per bin is determined is not part of this class. */ protected static class ThrottleJobItem { /** These are the bin limits. This is an array of ThrottleLimitSpec objects. */ protected ArrayList throttleLimits = new ArrayList(); /** This is a map of the bins and their current counts. If an entry doesn't exist, it's considered to be * the same as maxBinCount. */ protected HashMap binCounts = new HashMap(); /** Constructor. */ public ThrottleJobItem() { } /** Add a bin limit. *@param regexp is the regular expression describing the bins to which the limit applies to. *@param maxCount is the maximum number of fetches allowed for that bin. */ public void addLimit(String regexp, int maxCount) { try { throttleLimits.add(new ThrottleLimitSpec(regexp,maxCount)); } catch (PatternSyntaxException e) { // Ignore the bad entry; it just won't contribute any throttling. } } /** Create a duplicate of this item. *@return the duplicate. */ public ThrottleJobItem duplicate() { ThrottleJobItem rval = new ThrottleJobItem(); rval.throttleLimits = throttleLimits; Iterator iter = binCounts.keySet().iterator(); while (iter.hasNext()) { String key = (String)iter.next(); this.binCounts.put(key,((MutableInteger)binCounts.get(key)).duplicate()); } return rval; } /** Check if the specified bin is empty. *@param binName is the bin name. *@return true if empty. */ public boolean isEmpty(String binName) { MutableInteger value = (MutableInteger)binCounts.get(binName); int remaining; if (value == null) { int x = findMaxCount(binName); if (x == -1) return false; remaining = x; } else remaining = value.intValue(); return (remaining == 0); } /** Decrement specified bin. *@param binName is the bin name. */ public void decrement(String binName) { MutableInteger value = (MutableInteger)binCounts.get(binName); if (value == null) { int x = findMaxCount(binName); if (x == -1) return; value = new MutableInteger(x); binCounts.put(binName,value); } value.decrement(); } /** Given a bin name, find the max value for it using the regexps that are in place. *@param binName is the bin name. *@return the max count for that bin, or -1 if infinite. */ protected int findMaxCount(String binName) { // Each connector generates a set of bins per descriptor, e.g. "", ".com", ".metacarta.com", "foo.metacarta.com" // // We want to be able to do a couple of different kinds of things easily. For example, we want to: // - be able to "turn off" or restrict fetching for a given domain, to a lower value than for other domains // - be able to control fetch rates of .com, .metacarta.com, and foo.metacarta.com such that we // can establish a faster rate for .com than for foo.metacarta.com // // The standard case is to limit fetch rate for all terminal domains (e.g. foo.metacarta.com) to some number: // ^[^\.] = 8 // // To apply an additional limit restriction on a specific domain easily requires that the MINIMUM rate // value be chosen when more than one regexp match is found: // ^[^\.] = 8 // ^foo\.metacarta\.com = 4 // // To apply different rates for different levels: // ^[^\.] = 8 // ^\.[^\.]*\.[^\.]*$ = 20 // ^\.[^\.]*$ = 40 // // If the same bin is matched by more than one regexp, I now take the MINIMUM value, since this seems to be // more what the world wants to do (restrict, rather than increase, fetch rates). int maxCount = -1; int i = 0; while (i < throttleLimits.size()) { ThrottleLimitSpec spec = (ThrottleLimitSpec)throttleLimits.get(i++); Pattern p = spec.getRegexp(); Matcher m = p.matcher(binName); if (m.find()) { int limit = spec.getMaxCount(); if (maxCount == -1 || limit < maxCount) maxCount = limit; } } return maxCount; } } /** This is a class which describes an individual throttle limit, in fetches. */ protected static class ThrottleLimitSpec { /** Regexp */ protected Pattern regexp; /** The fetch limit for all bins matching that regexp */ protected int maxCount; /** Constructor */ public ThrottleLimitSpec(String regexp, int maxCount) throws PatternSyntaxException { this.regexp = Pattern.compile(regexp); this.maxCount = maxCount; } /** Get the regexp. */ public Pattern getRegexp() { return regexp; } /** Get the max count */ public int getMaxCount() { return maxCount; } } /** Mutable integer class. */ protected static class MutableInteger { int value; /** Construct. */ public MutableInteger(int value) { this.value = value; } /** Duplicate */ public MutableInteger duplicate() { return new MutableInteger(value); } /** Decrement. */ public void decrement() { value--; } /** Increment. */ public void increment() { value++; } /** Get value. */ public int intValue() { return value; } } }
false
true
public DocumentSetAndFlags getNextCleanableDocuments(int maxCount, long currentTime) throws ManifoldCFException { // The query will be built here, because it joins the jobs table against the jobqueue // table. // // This query must only pick up documents that are not active in any job and // which belong to a job that's in a "shutting down" state and are in // a "purgatory" state. // // We are in fact more conservative in this query than we need to be; the documents // excluded will include some that simply match our criteria, which is designed to // be fast rather than perfect. The match we make is: hashvalue against hashvalue, and // different job id's. // // SELECT id,jobid,docid FROM jobqueue t0 WHERE t0.status='P' AND EXISTS(SELECT 'x' FROM // jobs t3 WHERE t0.jobid=t3.id AND t3.status='X') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','B')) // // Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or // ingestion. // Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time. if (!jobs.cleaningJobsPresent()) return new DocumentSetAndFlags(new DocumentDescription[0],new boolean[0]); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to put on the cleaning queue"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on cleaning queue"); // Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being cleaned". ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(",") .append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.failTimeField).append(",") .append(jobQueue.failCountField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_PURGATORY))})).append(" AND ") .append("(t0.").append(jobQueue.checkTimeField).append(" IS NULL OR t0.").append(jobQueue.checkTimeField).append("<=?) AND "); list.add(new Long(currentTime)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN)), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,maxCount)); // The checktime is null field check is for backwards compatibility IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done getting docs to cleaning queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); // We need to organize the returned set by connection name and output connection name, so that we can efficiently // use getUnindexableDocumentIdentifiers. // This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription // objects. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash, so we reduce the risk of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()]; boolean[] rvalBoolean = new boolean[documentIDMap.size()]; i = 0; while (i < compositeIDArray.length) { String compositeDocID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID); // Determine whether we can delete it from the index or not rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null); // Set the record status to "being cleaned" and return it rval[i++] = dd; jobQueue.setCleaningStatus(dd.getID()); } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); return new DocumentSetAndFlags(rval,rvalBoolean); } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Create a composite document hash key. This consists of the document id hash plus the * connection name. */ protected static String makeCompositeID(String docIDHash, String connectionName) { return docIDHash + ":" + connectionName; } /** Get list of deletable document descriptions. This list will take into account * multiple jobs that may own the same document. All documents for which a description * is returned will be transitioned to the "beingdeleted" state. Documents which are * not in transition and are eligible, but are owned by other jobs, will have their * jobqueue entries deleted by this method. *@param maxCount is the maximum number of documents to return. *@param currentTime is the current time; some fetches do not occur until a specific time. *@return the document descriptions for these documents. */ public DocumentDescription[] getNextDeletableDocuments(int maxCount, long currentTime) throws ManifoldCFException { // The query will be built here, because it joins the jobs table against the jobqueue // table. // // This query must only pick up documents that are not active in any job and // which either belong to a job that's in a "delete pending" state and are in // a "complete", "purgatory", or "pendingpurgatory" state, OR belong to a job // that's in a "shutting down" state and are in the "purgatory" state. // // We are in fact more conservative in this query than we need to be; the documents // excluded will include some that simply match our criteria, which is designed to // be fast rather than perfect. The match we make is: hashvalue against hashvalue, and // different job id's. // // SELECT id,jobid,docid FROM jobqueue t0 WHERE (t0.status IN ('C','P','G') AND EXISTS(SELECT 'x' FROM // jobs t1 WHERE t0.jobid=t1.id AND t1.status='D') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','B')) // // Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or // ingestion. // Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time. if (!jobs.deletingJobsPresent()) return new DocumentDescription[0]; long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to put on the delete queue"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on delete queue"); // Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being deleted". // If FOR UPDATE was included, deadlock happened a lot. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(",") .append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.failTimeField).append(",") .append(jobQueue.failCountField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE))})).append(" AND ") .append("t0.").append(jobQueue.checkTimeField).append("<=? AND "); list.add(new Long(currentTime)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING)), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,maxCount)); // The checktime is null field check is for backwards compatibility IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done getting docs to delete queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); // We need to organize the returned set by connection name, so that we can efficiently // use getUnindexableDocumentIdentifiers. // This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription // objects. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash to reduce chances of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[allowedDocIds.size()]; int j = 0; i = 0; while (i < compositeIDArray.length) { String compositeDocumentID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocumentID); if (allowedDocIds.get(compositeDocumentID) == null) { // Delete this record and do NOT return it. jobQueue.deleteRecord(dd.getID()); // What should we do about hopcount here? // We are deleting a record which belongs to a job that is being // cleaned up. The job itself will go away when this is done, // and so will all the hopcount stuff pertaining to it. So, the // treatment I've chosen here is to leave the hopcount alone and // let the job cleanup get rid of it at the right time. // Note: carrydown records handled in the same manner... //carryDown.deleteRecords(dd.getJobID(),new String[]{dd.getDocumentIdentifier()}); } else { // Set the record status to "being deleted" and return it rval[j++] = dd; jobQueue.setDeletingStatus(dd.getID()); } i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); return rval; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get a list of document identifiers that should actually be deleted from the index, from a list that * might contain identifiers that are shared with other jobs, which are targeted to the same output connection. * The input list is guaranteed to be smaller in size than maxInClauseCount for the database. *@param documentIdentifiers is the set of document identifiers to consider. *@param connectionName is the connection name for ALL the document identifiers. *@param outputConnectionName is the output connection name for ALL the document identifiers. *@return the set of documents which should be removed from the index. */ protected String[] getUnindexableDocumentIdentifiers(DocumentDescription[] documentIdentifiers, String connectionName, String outputConnectionName) throws ManifoldCFException { // This is where we will count the individual document id's HashMap countMap = new HashMap(); // First thing: Compute the set of document identifier hash values to query against HashMap map = new HashMap(); int i = 0; while (i < documentIdentifiers.length) { String hash = documentIdentifiers[i++].getDocumentIdentifierHash(); map.put(hash,hash); countMap.put(hash,new MutableInteger(0)); } if (map.size() == 0) return new String[0]; // Build a query StringBuilder sb = new StringBuilder(); ArrayList list = new ArrayList(); ArrayList docList = new ArrayList(); Iterator iter = map.keySet().iterator(); while (iter.hasNext()) { docList.add(iter.next()); } // Note: There is a potential race condition here. One job may be running while another is in process of // being deleted. If they share a document, then the delete task could decide to delete the document and do so right // after the ingestion takes place in the running job, but right before the document's status is updated // in the job queue [which would have prevented the deletion]. // Unless a transaction is thrown around the time ingestion is taking place (which is a very bad idea) // we are stuck with the possibility of this condition, which will essentially lead to a document being // missing from the index. // One way of dealing with this is to treat "active" documents as already ingested, for the purpose of // reference counting. Then these documents will not be deleted. The risk then becomes that the "active" // document entry will not be completed (say, because of a restart), and thus the corresponding document // will never be removed from the index. // // Instead, the only solution is to not queue a document for any activity that is inconsistent with activities // that may already be ongoing for that document. For this reason, I have introduced a "BEING_DELETED" // and "BEING_CLEANED" state // for a document. These states will allow the various queries that queue up activities to avoid documents that // are currently being processed elsewhere. sb.append("SELECT t0.").append(jobQueue.docHashField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t0."+jobQueue.docHashField,docList)})).append(" AND ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?,?) AND "); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(" AND ") .append("t1.").append(jobs.connectionNameField).append("=? AND ") .append("t1.").append(jobs.outputNameField).append("=?)"); list.add(connectionName); list.add(outputConnectionName); // Do the query, and then count the number of times each document identifier occurs. IResultSet results = database.performQuery(sb.toString(),list,null,null); i = 0; while (i < results.getRowCount()) { IResultRow row = results.getRow(i++); String docIDHash = (String)row.getValue(jobQueue.docHashField); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi != null) mi.increment(); } // Go through and count only those that have a count of 1. int count = 0; iter = countMap.keySet().iterator(); while (iter.hasNext()) { String docIDHash = (String)iter.next(); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi.intValue() == 1) count++; } String[] rval = new String[count]; iter = countMap.keySet().iterator(); count = 0; while (iter.hasNext()) { String docIDHash = (String)iter.next(); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi.intValue() == 1) rval[count++] = docIDHash; } return rval; } // These methods support the reprioritization thread. /** Get a list of already-processed documents to reprioritize. Documents in all jobs will be * returned by this method. Up to n document descriptions will be returned. *@param currentTime is the current time stamp for this prioritization pass. Avoid * picking up any documents that are labeled with this timestamp or after. *@param n is the maximum number of document descriptions desired. *@return the document descriptions. */ public DocumentDescription[] getNextAlreadyProcessedReprioritizationDocuments(long currentTime, int n) throws ManifoldCFException { StringBuilder sb = new StringBuilder(); ArrayList list = new ArrayList(); // The desired query is: // SELECT docid FROM jobqueue WHERE prioritysettime < (currentTime) LIMIT (n) sb.append("SELECT ") .append(jobQueue.idField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.jobIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_COMPLETE), jobQueue.statusToString(JobQueue.STATUS_UNCHANGED), jobQueue.statusToString(JobQueue.STATUS_PURGATORY)}), new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" "); sb.append(database.constructOffsetLimitClause(0,n)); IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null); DocumentDescription[] rval = new DocumentDescription[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField), (Long)row.getValue(jobQueue.jobIDField), (String)row.getValue(jobQueue.docHashField), (String)row.getValue(jobQueue.docIDField)); i++; } return rval; } /** Get a list of not-yet-processed documents to reprioritize. Documents in all jobs will be * returned by this method. Up to n document descriptions will be returned. *@param currentTime is the current time stamp for this prioritization pass. Avoid * picking up any documents that are labeled with this timestamp or after. *@param n is the maximum number of document descriptions desired. *@return the document descriptions. */ public DocumentDescription[] getNextNotYetProcessedReprioritizationDocuments(long currentTime, int n) throws ManifoldCFException { StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); // This query MUST return only documents that are in a pending state which belong to an active job!!! sb.append(jobQueue.idField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.jobIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobQueue.statusField,new Object[]{ JobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED), JobQueue.statusToString(jobQueue.STATUS_PENDING), JobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" AND ") .append(jobQueue.checkActionField).append("=?").append(" AND "); list.add(jobQueue.actionToString(JobQueue.ACTION_RESCAN)); // Per CONNECTORS-290, we need to be leaving priorities blank for jobs that aren't using them, // so this will be changed to not include jobs where the priorities have been bashed to null. // // I've included ALL states that might have non-null doc priorities. This includes states // corresponding to uninstalled connectors, since there is no transition that cleans out the // document priorities in these states. The time during which a connector is uninstalled is // expected to be short, because typically this state is the result of an installation procedure // rather than willful action on the part of a user. sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(Jobs.STATUS_STARTINGUP), Jobs.statusToString(Jobs.STATUS_STARTINGUPMINIMAL), Jobs.statusToString(Jobs.STATUS_ACTIVE), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER) }), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") "); sb.append(database.constructOffsetLimitClause(0,n)); // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be. //jobQueue.unconditionallyAnalyzeTables(); IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null); DocumentDescription[] rval = new DocumentDescription[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField), (Long)row.getValue(jobQueue.jobIDField), (String)row.getValue(jobQueue.docHashField), (String)row.getValue(jobQueue.docIDField)); i++; } return rval; } /** Save a set of document priorities. In the case where a document was eligible to have its * priority set, but it no longer is eligible, then the provided priority will not be written. *@param currentTime is the time in milliseconds since epoch. *@param documentDescriptions are the document descriptions. *@param priorities are the desired priorities. */ public void writeDocumentPriorities(long currentTime, DocumentDescription[] documentDescriptions, double[] priorities) throws ManifoldCFException { // Retry loop - in case we get a deadlock despite our best efforts while (true) { // This should be ordered by document identifier hash in order to prevent potential deadlock conditions HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":"+documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); long sleepAmt = 0L; // Start the transaction now database.beginTransaction(); try { // Need to order the writes by doc id. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); DocumentDescription dd = documentDescriptions[index]; double priority = priorities[index]; jobQueue.writeDocPriority(currentTime,dd.getID(),priorities[index]); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Setting document priority for '"+dd.getDocumentIdentifier()+"' to "+new Double(priority).toString()+", set time "+new Long(currentTime).toString()); i++; } database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction writing doc priorities: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get up to the next n documents to be expired. * This method marks the documents whose descriptions have been returned as "being processed", or active. * The same marking is used as is used for documents that have been queued for worker threads. The model * is thus identical. * *@param n is the maximum number of records desired. *@param currentTime is the current time. *@return the array of document descriptions to expire. */ public DocumentSetAndFlags getExpiredDocuments(int n, long currentTime) throws ManifoldCFException { // Screening query // Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up // for an extended period of time. if (!jobs.activeJobsPresent()) return new DocumentSetAndFlags(new DocumentDescription[0], new boolean[0]); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Beginning query to look for documents to expire"); } // Put together a query with a limit of n // Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active. // If FOR UPDATE was included, deadlock conditions would be common because of the complexity of this query. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.jobIDField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField).append(",") .append("t0.").append(jobQueue.statusField).append(",") .append("t0.").append(jobQueue.failTimeField).append(",") .append("t0.").append(jobQueue.failCountField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t0."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause("t0."+jobQueue.checkActionField,jobQueue.actionToString(JobQueue.ACTION_REMOVE)), new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",new Long(currentTime))})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?)").append(" AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField).append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,n)); String query = sb.toString(); // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be. //jobQueue.unconditionallyAnalyzeTables(); ArrayList answers = new ArrayList(); int repeatCount = 0; while (true) { long sleepAmt = 0L; if (Logging.perf.isDebugEnabled()) { repeatCount++; Logging.perf.debug(" Attempt "+Integer.toString(repeatCount)+" to expire documents, after "+ new Long(System.currentTimeMillis() - startTime)+" ms"); } database.beginTransaction(); try { IResultSet set = database.performQuery(query,list,null,null,n,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Expiring "+Integer.toString(set.getRowCount())+" documents"); // To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash, // before updating any rows in jobqueue. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); Map statusMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString()); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); statusMap.put(compositeDocumentID,new Integer(status)); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash, so we reduce the risk of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()]; boolean[] rvalBoolean = new boolean[documentIDMap.size()]; i = 0; while (i < compositeIDArray.length) { String compositeDocID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID); // Determine whether we can delete it from the index or not rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null); // Set the record status to "being cleaned" and return it rval[i++] = dd; jobQueue.updateActiveRecord(dd.getID(),((Integer)statusMap.get(compositeDocID)).intValue()); } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); return new DocumentSetAndFlags(rval, rvalBoolean); } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding docs to expire: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } // This method supports the "queue stuffer" thread /** /** Get up to the next n document(s) to be fetched and processed. * This fetch returns records that contain the document identifier, plus all instructions * pertaining to the document's handling (e.g. whether it should be refetched if the version * has not changed). * This method also marks the documents whose descriptions have be returned as "being processed". *@param n is the maximum number of records desired. *@param currentTime is the current time; some fetches do not occur until a specific time. *@param interval is the number of milliseconds that this set of documents should represent (for throttling). *@param blockingDocuments is the place to record documents that were encountered, are eligible for reprioritization, * but could not be queued due to throttling considerations. *@param statistics are the current performance statistics per connection, which are used to balance the queue stuffing * so that individual connections are not overwhelmed. *@param scanRecord retains the bins from all documents encountered from the query, even those that were skipped due * to being overcommitted. *@return the array of document descriptions to fetch and process. */ public DocumentDescription[] getNextDocuments(int n, long currentTime, long interval, BlockingDocuments blockingDocuments, PerformanceStatistics statistics, DepthStatistics scanRecord) throws ManifoldCFException { // NOTE WELL: Jobs that are throttled must control the number of documents that are fetched in // a given interval. Therefore, the returned result has the following constraints on it: // 1) There must be no more than n documents returned total; // 2) For any given job that is throttled, the total number of documents returned must be // consistent with the time interval provided. // In general, this requires the database layer to perform fairly advanced filtering on the // the result, far in excess of a simple count. An implementation of an interface is therefore // going to need to be passed into the performQuery() operation, which prunes the resultset // as it is being read into memory. That's a new feature that will need to be added to the // database layer. // Screening query // Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up // for an extended period of time. if (!jobs.activeJobsPresent()) return new DocumentDescription[0]; long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to queue"); } // Below there used to be one large transaction, with multiple read seconds and multiple write sections. // As part of reducing the chance of postgresql encountering deadlock conditions, I wanted to break this // transaction up. However, the transaction depended for its correctness in throttling on making sure // that the throttles that were built were based on the same active jobs that the subsequent queries // that did the stuffing relied upon. This made reorganization impossible until I realized that with // Postgresql's way of doing transaction isolation this was going to happen anyway, so I needed a more // robust solution. // // Specifically, I chose to change the way documents were queued so that only documents from properly // throttled jobs could be queued. That meant I needed to add stuff to the ThrottleLimit class to track // the very knowledge of an active job. This had the additional benefit of meaning there was no chance of // a query occurring from inside a resultset filter. // // But, after I did this, it was no longer necessary to have such a large transaction either. // Anything older than 10 minutes ago is considered eligible for reprioritization. long prioritizationTime = currentTime - 60000L * 10L; ThrottleLimit vList = new ThrottleLimit(n,prioritizationTime); IResultSet jobconnections = jobs.getActiveJobConnections(); HashMap connectionSet = new HashMap(); int i = 0; while (i < jobconnections.getRowCount()) { IResultRow row = jobconnections.getRow(i++); Long jobid = (Long)row.getValue("jobid"); String connectionName = (String)row.getValue("connectionname"); vList.addJob(jobid,connectionName); connectionSet.put(connectionName,connectionName); } // Find the active connection names. We'll load these, and then get throttling info // from each one. String[] activeConnectionNames = new String[connectionSet.size()]; Iterator iter = connectionSet.keySet().iterator(); i = 0; while (iter.hasNext()) { activeConnectionNames[i++] = (String)iter.next(); } IRepositoryConnection[] connections = connectionMgr.loadMultiple(activeConnectionNames); // Accumulate a sum of the max_connection_count * avg_connection_rate values, so we can calculate the appropriate adjustment // factor and set the connection limits. HashMap rawFetchCounts = new HashMap(); double rawFetchCountTotal = 0.0; i = 0; while (i < connections.length) { IRepositoryConnection connection = connections[i++]; String connectionName = connection.getName(); int maxConnections = connection.getMaxConnections(); double avgFetchRate = statistics.calculateConnectionFetchRate(connectionName); double weightedRawFetchCount = avgFetchRate * (double)maxConnections; // Keep the avg rate for later use, since it may get updated before next time we need it. rawFetchCounts.put(connectionName,new Double(weightedRawFetchCount)); rawFetchCountTotal += weightedRawFetchCount; } // Calculate an adjustment factor double fetchCountAdjustmentFactor = ((double)n) / rawFetchCountTotal; // For each job, we must amortize the maximum number of fetches per ms to the actual interval, // and also randomly select an extra fetch based on the fractional probability. (This latter is // necessary for the case where the maximum fetch rate is specified to be pretty low.) // i = 0; while (i < connections.length) { IRepositoryConnection connection = connections[i++]; String connectionName = connection.getName(); // Check if throttled... String[] throttles = connection.getThrottles(); int k = 0; while (k < throttles.length) { // The key is the regexp value itself String throttle = throttles[k++]; float throttleValue = connection.getThrottleValue(throttle); // For the given connection, set the fetch limit per bin. This is calculated using the time interval // and the desired fetch rate. The fractional remainder is used to conditionally provide an "extra fetch" // on a weighted random basis. // // In the future, the connection may specify tuples which pair a regexp describing a set of bins against // a fetch rate. In that case, each fetch rate would need to be turned into a precise maximum // count. double fetchesPerTimeInterval = (double)throttleValue * (double)interval; // Actual amount will be the integer value of this, plus an additional 1 if the random number aligns int fetches = (int)fetchesPerTimeInterval; fetchesPerTimeInterval -= (double)fetches; if (random.nextDouble() <= fetchesPerTimeInterval) fetches++; // Save the limit in the ThrottleLimit structure vList.addLimit(connectionName,throttle,fetches); } // For the overall connection, we also have a limit which is based on the number of connections there are actually available. Double weightedRawFetchCount = (Double)rawFetchCounts.get(connectionName); double adjustedFetchCount = weightedRawFetchCount.doubleValue() * fetchCountAdjustmentFactor; // Note well: Queuing starvation that results from there being very few available documents for high-priority connections is dealt with here by simply allowing // the stuffer thread to keep queuing documents until there are enough. This will be pretty inefficient if there's an active connection that is fast and has lots // of available connection handles, but the bulk of the activity is on slow speed/highly handle limited connections, but I honestly can't think of a better way at the moment. // One good way to correct a bit for this problem is to set a higher document count floor for each connection - say 5 documents - then we won't loop as much. // // Be off in the higher direction rather than the lower; this also prohibits zero values and sets a minimum. int fetchCount = ((int)adjustedFetchCount) + 5; vList.setConnectionLimit(connectionName,fetchCount); } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to queue"); // System.out.println("Done building throttle structure"); // Locate records. // Note that we do NOT want to get everything there is to know about the job // using this query, since the file specification may be large and expensive // to parse. We will load a (cached) copy of the job description for that purpose. // // NOTE: This query deliberately excludes documents which may be being processed by another job. // (It actually excludes a bit more than that, because the exact query is impossible to write given // the fact that document id's cannot be compared.) These are documents where there is ANOTHER // document entry with the same hash value, a different job id, and a status which is either "active", // "activepurgatory", or "beingdeleted". (It does not check whether the jobs have the same connection or // whether the document id's are in fact the same, and therefore may temporarily block legitimate document // activity under rare circumstances.) // // The query I want is: // SELECT jobid,docid,status FROM jobqueue t0 WHERE status IN ('P','G') AND checktime <=xxx // AND EXISTS(SELECT 'x' FROM // jobs t1 WHERE t0.jobid=t1.id AND t1.status='A') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','D')) // ORDER BY docpriority ASC LIMIT xxx // // NOTE WELL: The above query did just fine until adaptive recrawling was seriously tried. Then, because every // document in a job was still active, it failed miserably, actually causing Postgresql to stop responding at // one point. Why? Well, the key thing is the sort criteria - there just isn't any way to sort 1M documents // without working with a monster resultset. // // I introduced a new index as a result - based solely on docpriority - and postgresql now correctly uses that index // to pull its results in an ordered fashion // // // Another subtlety is that I *must* mark the documents active as I find them, so that they do not // have any chance of getting returned twice. // Accumulate the answers here ArrayList answers = new ArrayList(); // The current time value Long currentTimeValue = new Long(currentTime); // Always analyze jobqueue before this query. Otherwise stuffing may get a bad plan, interfering with performance. // This turned out to be needed in postgresql 8.3, even though 8.2 worked fine. //jobQueue.unconditionallyAnalyzeTables(); // Loop through priority values int currentPriority = 1; boolean isDone = false; while (!isDone && currentPriority <= 10) { if (jobs.hasPriorityJobs(currentPriority)) { Long currentPriorityValue = new Long((long)currentPriority); fetchAndProcessDocuments(answers,currentTimeValue,currentPriorityValue,vList,connections); isDone = !vList.checkContinue(); } currentPriority++; } // Assert the blocking documents we discovered vList.tallyBlockingDocuments(blockingDocuments); // Convert the saved answers to an array DocumentDescription[] rval = new DocumentDescription[answers.size()]; i = 0; while (i < rval.length) { rval[i] = (DocumentDescription)answers.get(i); i++; } // After we're done pulling stuff from the queue, find the eligible row with the best priority on the queue, and save the bins for assessment. // This done to decide what the "floor" bincount should be - the idea being that it is wrong to assign priorities for new documents which are // higher than the current level that is currently being dequeued. // // The complicating factor here is that there are indeed many potential *classes* of documents, each of which might have its own current // document priority level. For example, documents could be classed by job, which might make sense because there is a possibility that two jobs' // job priorities may differ. Also, because of document fetch scheduling, each time frame may represent a class in its own right as well. // These classes would have to be associated with independent bin counts, if we were to make any use of them. Then, it would be also necessary // to know what classes a document belonged to in order to be able to calculate its priority. // // An alternative way to proceed is to just have ONE class, and document priorities then get assigned without regard to job, queuing time, etc. // That's the current reality. The code below works in that model, knowing full well that it is an approximation to an ideal. // Find the one row from a live job that has the best document priority, which is available within the current time window. // Note that if there is NO such document, it means we were able to queue all eligible documents, and thus prioritization is probably not even // germane at the moment. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.docPriorityField).append(",").append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()) .append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ //new UnitaryClause(jobQueue.docPriorityField,">=",new Long(0L)), new MultiClause(jobQueue.statusField, new Object[]{jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)), new UnitaryClause(jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(jobs.STATUS_ACTIVE), Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") "); sb.append(" ").append(database.constructIndexOrderByClause(new String[]{ jobQueue.docPriorityField, jobQueue.statusField, jobQueue.checkActionField, jobQueue.checkTimeField}, true)).append(" ") .append(database.constructOffsetLimitClause(0,1,true)); IResultSet set = database.performQuery(sb.toString(),list,null,null,1,null); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); Double docPriority = (Double)row.getValue(jobQueue.docPriorityField); if (docPriority != null && docPriority.doubleValue() < jobQueue.noDocPriorityValue) scanRecord.addBins(docPriority); } return rval; } /** Fetch and process documents matching the passed-in criteria */ protected void fetchAndProcessDocuments(ArrayList answers, Long currentTimeValue, Long currentPriorityValue, ThrottleLimit vList, IRepositoryConnection[] connections) throws ManifoldCFException { // Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active. // When FOR UPDATE was included, deadlock conditions were common because of the complexity of this query. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT t0."); sb.append(jobQueue.idField).append(",t0."); if (Logging.scheduling.isDebugEnabled()) sb.append(jobQueue.docPriorityField).append(",t0."); sb.append(jobQueue.jobIDField).append(",t0.") .append(jobQueue.docHashField).append(",t0.") .append(jobQueue.docIDField).append(",t0.") .append(jobQueue.statusField).append(",t0.") .append(jobQueue.failTimeField).append(",t0.") .append(jobQueue.failCountField).append(",t0.") .append(jobQueue.prioritySetField).append(" FROM ").append(jobQueue.getTableName()) .append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ //new UnitaryClause("t0."+jobQueue.docPriorityField,">=",new Long(0L)), new MultiClause("t0."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause("t0."+jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)), new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(jobs.STATUS_ACTIVE), Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField), new UnitaryClause("t1."+jobs.priorityField,currentPriorityValue)})) .append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") AND "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); // Prerequisite event clause: AND NOT EXISTS(SELECT 'x' FROM prereqevents t3,events t4 WHERE t3.ownerid=t0.id AND t3.name=t4.name) sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.prereqEventManager.getTableName()).append(" t3,").append(eventManager.getTableName()).append(" t4 WHERE t0.") .append(jobQueue.idField).append("=t3.").append(jobQueue.prereqEventManager.ownerField).append(" AND t3.") .append(jobQueue.prereqEventManager.eventNameField).append("=t4.").append(eventManager.eventNameField) .append(")"); sb.append(" ").append(database.constructIndexOrderByClause(new String[]{ "t0."+jobQueue.docPriorityField, "t0."+jobQueue.statusField, "t0."+jobQueue.checkActionField, "t0."+jobQueue.checkTimeField}, true)).append(" "); // Before entering the transaction, we must provide the throttlelimit object with all the connector // instances it could possibly need. The purpose for doing this is to prevent a deadlock where // connector starvation causes database lockup. // // The preallocation of multiple connector instances is certainly a worry. If any other part // of the code allocates multiple connector instances also, the potential exists for this to cause // deadlock all by itself. I've therefore built a "grab multiple" and a "release multiple" // at the connector factory level to make sure these requests are properly ordered. String[] orderingKeys = new String[connections.length]; String[] classNames = new String[connections.length]; ConfigParams[] configParams = new ConfigParams[connections.length]; int[] maxConnections = new int[connections.length]; int k = 0; while (k < connections.length) { IRepositoryConnection connection = connections[k]; orderingKeys[k] = connection.getName(); classNames[k] = connection.getClassName(); configParams[k] = connection.getConfigParams(); maxConnections[k] = connection.getMaxConnections(); k++; } IRepositoryConnector[] connectors = RepositoryConnectorFactory.grabMultiple(threadContext,orderingKeys,classNames,configParams,maxConnections); try { // Hand the connectors off to the ThrottleLimit instance k = 0; while (k < connections.length) { vList.addConnectionName(connections[k].getName(),connectors[k]); k++; } // Now we can tack the limit onto the query. Before this point, remainingDocuments would be crap int limitValue = vList.getRemainingDocuments(); sb.append(database.constructOffsetLimitClause(0,limitValue,true)); if (Logging.perf.isDebugEnabled()) { Logging.perf.debug("Queuing documents from time "+currentTimeValue.toString()+" job priority "+currentPriorityValue.toString()+ " (up to "+Integer.toString(vList.getRemainingDocuments())+" documents)"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { IResultSet set = database.performQuery(sb.toString(),list,null,null,-1,vList); if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Queuing "+Integer.toString(set.getRowCount())+" documents"); // To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash, // before updating any rows in jobqueue. String[] docIDHashes = new String[set.getRowCount()]; Map storageMap = new HashMap(); Map statusMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long id = (Long)row.getValue(jobQueue.idField); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String docIDHash = (String)row.getValue(jobQueue.docHashField); String docID = (String)row.getValue(jobQueue.docIDField); int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString()); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = -1; else failCount = (int)failCountValue.longValue(); DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID,failTime,failCount); docIDHashes[i] = docIDHash + ":" + jobID; storageMap.put(docIDHashes[i],dd); statusMap.put(docIDHashes[i],new Integer(status)); if (Logging.scheduling.isDebugEnabled()) { Double docPriority = (Double)row.getValue(jobQueue.docPriorityField); Logging.scheduling.debug("Stuffing document '"+docID+"' that has priority "+docPriority.toString()+" onto active list"); } i++; } // No duplicates are possible here java.util.Arrays.sort(docIDHashes); i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; DocumentDescription dd = (DocumentDescription)storageMap.get(docIDHash); Long id = dd.getID(); int status = ((Integer)statusMap.get(docIDHash)).intValue(); // Set status to "ACTIVE". jobQueue.updateActiveRecord(id,status); answers.add(dd); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding docs to queue: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } finally { RepositoryConnectorFactory.releaseMultiple(connectors); } } // These methods support the individual fetch/process threads. /** Verify that a specific job is indeed still active. This is used to permit abort or pause to be relatively speedy. * The query done within MUST be cached in order to not cause undue performance degradation. *@param jobID is the job identifier. *@return true if the job is in one of the "active" states. */ public boolean checkJobActive(Long jobID) throws ManifoldCFException { return jobs.checkJobActive(jobID); } /** Verify if a job is still processing documents, or no longer has any outstanding active documents */ public boolean checkJobBusy(Long jobID) throws ManifoldCFException { return jobQueue.checkJobBusy(jobID); } /** Note completion of document processing by a job thread of a document. * This method causes the state of the document to be marked as "completed". *@param documentDescriptions are the description objects for the documents that were processed. */ public void markDocumentCompletedMultiple(DocumentDescription[] documentDescriptions) throws ManifoldCFException { // Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE // transaction is needed in order to complete these documents correctly. // // Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to // lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to // order the "select for update" operations appropriately. // HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); // Retry loop - in case we get a deadlock despite our best efforts while (true) { long sleepAmt = 0L; // Start the transaction now database.beginTransaction(); try { // Do one row at a time, to avoid deadlocking things i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; // Get the DocumentDescription object DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Query for the status ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())}); TrackerClass.notePreread(dd.getID()); IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+ query+" FOR UPDATE",list,null,null); TrackerClass.noteRead(dd.getID()); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); // Grab the status int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); // Update the jobqueue table jobQueue.updateCompletedRecord(dd.getID(),status); } i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+ " docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Note completion of document processing by a job thread of a document. * This method causes the state of the document to be marked as "completed". *@param documentDescription is the description object for the document that was processed. */ public void markDocumentCompleted(DocumentDescription documentDescription) throws ManifoldCFException { markDocumentCompletedMultiple(new DocumentDescription[]{documentDescription}); } /** Delete from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be deleted, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentDeletedMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { // It's no longer an issue to have to deal with documents being conditionally deleted; that's been // taken over by the hopcountremoval method below. So just use the simple 'delete' functionality. return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be deleted, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentDeleted(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentDeletedMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Mark hopcount removal from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be marked as removed, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentHopcountRemovalMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { // For each record, we're going to have to choose between marking it as "hopcount removed", and marking // it for rescan. So the basic flow will involve changing a document's status,. // Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE // transaction is needed in order to complete these documents correctly. // // Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to // lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to // order the "select for update" operations appropriately. // HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); // Retry loop - in case we get a deadlock despite our best efforts while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // Do one row at a time, to avoid deadlocking things List<String> deleteList = new ArrayList<String>(); i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; // Get the DocumentDescription object DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Query for the status ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())}); TrackerClass.notePreread(dd.getID()); IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+ query+" FOR UPDATE",list,null,null); TrackerClass.noteRead(dd.getID()); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); // Grab the status int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); // Update the jobqueue table boolean didDelete = jobQueue.updateOrHopcountRemoveRecord(dd.getID(),status); if (didDelete) { deleteList.add(dd.getDocumentIdentifierHash()); } } i++; } String[] docIDSimpleHashes = new String[deleteList.size()]; for (int j = 0; j < docIDSimpleHashes.length; j++) { docIDSimpleHashes[j] = deleteList.get(j); } // Next, find the documents that are affected by carrydown deletion. DocumentDescription[] rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes); // Since hopcount inheritance and prerequisites came from the addDocument() method, // we don't delete them here. TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+ " docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Mark hopcount removal from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be marked as removed, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentHopcountRemoval(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentHopcountRemovalMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete from queue as a result of expiration of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired, * no special activity takes place as a result of the document being in a RESCAN state. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentExpiredMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of expiration of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired, * no special activity takes place as a result of the document being in a RESCAN state. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentExpired(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentExpiredMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete from queue as a result of cleaning up an unreachable document. * The document is expected to be in the PURGATORY state. There is never any need to reprocess the * document. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentCleanedUpMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of cleaning up an unreachable document. * The document is expected to be in the PURGATORY state. There is never any need to reprocess the * document. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentCleanedUp(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentCleanedUpMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete documents with no repercussions. We don't have to worry about the current state of each document, * since the document is definitely going away. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ protected DocumentDescription[] doDeleteMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { if (documentDescriptions.length == 0) return new DocumentDescription[0]; // Order of locking is not normally important here, because documents that wind up being deleted are never being worked on by anything else. // In all cases, the state of the document excludes other activity. // The only tricky situation is when a thread is processing a document which happens to be getting deleted, while another thread is trying to add // a reference for the very same document to the queue. Then, order of locking matters, so the deletions should happen in a specific order to avoid // the possibility of deadlock. Nevertheless, this is enough of a risk that I've chosen to order the deletions by document id hash order, just like everywhere // else. long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to delete "+Integer.toString(documentDescriptions.length)+" docs and clean up hopcount for job "+jobID.toString()); } HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort by doc hash, to establish non-blocking lock order java.util.Arrays.sort(docIDHashes); DocumentDescription[] rval; while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start deleting "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()); String[] docIDSimpleHashes = new String[docIDHashes.length]; // Delete jobqueue rows FIRST. Even though we do this before assessing the carrydown implications, it is OK because it's the CHILDREN of these // rows that might get affected by carrydown data deletion, not the rows themselves! i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Individual operations are necessary so order can be controlled. jobQueue.deleteRecord(dd.getID()); docIDSimpleHashes[i] = dd.getDocumentIdentifierHash(); i++; } // Next, find the documents that are affected by carrydown deletion. rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes); // Finally, delete the carrydown records in question. carryDown.deleteRecords(jobID,docIDSimpleHashes); if (legalLinkTypes.length > 0) hopCount.deleteDocumentIdentifiers(jobID,legalLinkTypes,docIDSimpleHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to delete "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction deleting "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } return rval; } /** Helper method: Find the document descriptions that will be affected due to carrydown row deletions. */ protected DocumentDescription[] calculateAffectedDeleteCarrydownChildren(Long jobID, String[] docIDHashes) throws ManifoldCFException { // Break the request into pieces, as needed, and throw everything into a hash for uniqueness. // We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes. // The goal is to throw all the children into a hash, to make them unique at the end. HashMap resultHash = new HashMap(); ArrayList list = new ArrayList(); int maxCount = maxClauseProcessDeleteHashSet(); int i = 0; int z = 0; while (i < docIDHashes.length) { if (z == maxCount) { processDeleteHashSet(jobID,resultHash,list); list.clear(); z = 0; } list.add(docIDHashes[i]); i++; z++; } if (z > 0) processDeleteHashSet(jobID,resultHash,list); // Now, put together the result document list from the hash. DocumentDescription[] rval = new DocumentDescription[resultHash.size()]; i = 0; Iterator iter = resultHash.keySet().iterator(); while (iter.hasNext()) { Long id = (Long)iter.next(); DocumentDescription dd = (DocumentDescription)resultHash.get(id); rval[i++] = dd; } return rval; } /** Get maximum count. */ protected int maxClauseProcessDeleteHashSet() { return database.findConjunctionClauseMax(new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)}); } /** Helper method: look up rows affected by a deleteRecords operation. */ protected void processDeleteHashSet(Long jobID, HashMap resultHash, ArrayList list) throws ManifoldCFException { // The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription // object for return, and so a join is necessary against the jobqueue table. StringBuilder sb = new StringBuilder("SELECT "); ArrayList newList = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(carryDown.getTableName()).append(" t1, ") .append(jobQueue.getTableName()).append(" t0 WHERE "); sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new UnitaryClause("t1."+carryDown.jobIDField,jobID), new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND "); sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField), new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})); /* sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new MultiClause("t1."+carryDown.parentIDHashField,list), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})) .append(")"); */ IResultSet set = database.performQuery(sb.toString(),newList,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long id = (Long)row.getValue(jobQueue.idField); String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField); String documentIdentifier = (String)row.getValue(jobQueue.docIDField); resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier)); } } /** Requeue a document for further processing in the future. * This method is called after a document is processed, when the job is a "continuous" one. * It is essentially equivalent to noting that the document processing is complete, except the * document remains on the queue. *@param documentDescriptions is the set of description objects for the document that was processed. *@param executeTimes are the times that the documents should be rescanned. Null indicates "never". *@param actions are what should be done when the time arrives. Choices are ACTION_RESCAN or ACTION_REMOVE. */ public void requeueDocumentMultiple(DocumentDescription[] documentDescriptions, Long[] executeTimes, int[] actions) throws ManifoldCFException { String[] docIDHashes = new String[documentDescriptions.length]; Long[] ids = new Long[documentDescriptions.length]; Long[] executeTimesNew = new Long[documentDescriptions.length]; int[] actionsNew = new int[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); executeTimesNew[i] = executeTimes[index]; actionsNew[i] = actions[index]; i++; } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimesNew[i],actionsNew[i],-1L,-1); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction requeuing documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Requeue a document for further processing in the future. * This method is called after a document is processed, when the job is a "continuous" one. * It is essentially equivalent to noting that the document processing is complete, except the * document remains on the queue. *@param documentDescription is the description object for the document that was processed. *@param executeTime is the time that the document should be rescanned. Null indicates "never". *@param action is what should be done when the time arrives. Choices include ACTION_RESCAN or ACTION_REMOVE. */ public void requeueDocument(DocumentDescription documentDescription, Long executeTime, int action) throws ManifoldCFException { requeueDocumentMultiple(new DocumentDescription[]{documentDescription},new Long[]{executeTime},new int[]{action}); } /** Reset a set of documents for further processing in the future. * This method is called after some unknown number of the documents were processed, but then a service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was processed. *@param executeTime is the time that the documents should be rescanned. *@param failTime is the time beyond which a service interruption will be considered a hard failure. *@param failCount is the number of retries beyond which a service interruption will be considered a hard failure. */ public void resetDocumentMultiple(DocumentDescription[] documentDescriptions, long executeTime, int action, long failTime, int failCount) throws ManifoldCFException { Long executeTimeLong = new Long(executeTime); Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; Long[] executeTimes = new Long[documentDescriptions.length]; int[] actions = new int[documentDescriptions.length]; long[] failTimes = new long[documentDescriptions.length]; int[] failCounts = new int[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); executeTimes[i] = executeTimeLong; actions[i] = action; long oldFailTime = documentDescriptions[index].getFailTime(); if (oldFailTime == -1L) oldFailTime = failTime; failTimes[i] = oldFailTime; int oldFailCount = documentDescriptions[index].getFailRetryCount(); if (oldFailCount == -1) oldFailCount = failCount; else { oldFailCount--; if (failCount != -1 && oldFailCount > failCount) oldFailCount = failCount; } failCounts[i] = oldFailCount; i++; } // Documents get marked PENDINGPURGATORY regardless of their current state; this is because we can't know at this point whether // an ingestion attempt occurred or not, so we have to treat the documents as having been processed at least once. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimes[i],actions[i],(failTimes==null)?-1L:failTimes[i],(failCounts==null)?-1:failCounts[i]); i++; } database.performCommit(); break; } catch (Error e) { database.signalRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a set of cleaning documents for further processing in the future. * This method is called after some unknown number of the documents were cleaned, but then an ingestion service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed cleaned or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetCleaningDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime) throws ManifoldCFException { Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); i++; } // Documents get marked PURGATORY regardless of their current state; this is because we can't know at this point what the actual prior state was. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setUncleaningStatus(ids[i],checkTime); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting cleaning documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a cleaning document back to its former state. * This gets done when a deleting thread sees a service interruption, etc., from the ingestion system. *@param documentDescription is the description of the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetCleaningDocument(DocumentDescription documentDescription, long checkTime) throws ManifoldCFException { resetCleaningDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime); } /** Reset a set of deleting documents for further processing in the future. * This method is called after some unknown number of the documents were deleted, but then an ingestion service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was processed. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetDeletingDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime) throws ManifoldCFException { Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); i++; } // Documents get marked COMPLETED regardless of their current state; this is because we can't know at this point what the actual prior state was. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setUndeletingStatus(ids[i],checkTime); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a deleting document back to its former state. * This gets done when a deleting thread sees a service interruption, etc., from the ingestion system. *@param documentDescription is the description object for the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetDeletingDocument(DocumentDescription documentDescription, long checkTime) throws ManifoldCFException { resetDeletingDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime); } /** Reset an active document back to its former state. * This gets done when there's a service interruption and the document cannot be processed yet. * Note well: This method formerly presumed that a perfect rollback was possible, and that there was zero chance of any * processing activity occuring before it got called. That assumption appears incorrect, however, so I've opted to now * presume that processing has perhaps occurred. Perfect rollback is thus no longer possible. *@param documentDescription is the description object for the document that was processed. *@param executeTime is the time that the document should be rescanned. *@param failTime is the time that the document should be considered to have failed, if it has not been * successfully read until then. */ public void resetDocument(DocumentDescription documentDescription, long executeTime, int action, long failTime, int failCount) throws ManifoldCFException { resetDocumentMultiple(new DocumentDescription[]{documentDescription},executeTime,action,failTime,failCount); } /** Eliminate duplicates, and sort */ protected static String[] eliminateDuplicates(String[] docIDHashes) { HashMap map = new HashMap(); int i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i++]; map.put(docIDHash,docIDHash); } String[] rval = new String[map.size()]; i = 0; Iterator iter = map.keySet().iterator(); while (iter.hasNext()) { rval[i++] = (String)iter.next(); } java.util.Arrays.sort(rval); return rval; } /** Build a reorder map, describing how to convert an original index into a reordered index. */ protected static HashMap buildReorderMap(String[] originalIDHashes, String[] reorderedIDHashes) { HashMap reorderSet = new HashMap(); int i = 0; while (i < reorderedIDHashes.length) { String reorderedIDHash = reorderedIDHashes[i]; Integer position = new Integer(i); reorderSet.put(reorderedIDHash,position); i++; } HashMap map = new HashMap(); int j = 0; while (j < originalIDHashes.length) { String originalIDHash = originalIDHashes[j]; Integer position = (Integer)reorderSet.get(originalIDHash); if (position != null) { map.put(new Integer(j),position); // Remove, so that only one of each duplicate will have a place in the map reorderSet.remove(originalIDHash); } j++; } return map; } /** Add an initial set of documents to the queue. * This method is called during job startup, when the queue is being loaded. * A set of document references is passed to this method, which updates the status of the document * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDs are the local document identifiers. *@param overrideSchedule is true if any existing document schedule should be overridden. *@param hopcountMethod is either accurate, nodelete, or neverdelete. *@param currentTime is the current time in milliseconds since epoch. *@param documentPriorities are the document priorities corresponding to the document identifiers. *@param prereqEventNames are the events that must be completed before each document can be processed. *@return true if the priority value(s) were used, false otherwise. */ public boolean[] addDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String[] docIDs, boolean overrideSchedule, int hopcountMethod, long currentTime, double[] documentPriorities, String[][] prereqEventNames) throws ManifoldCFException { if (docIDHashes.length == 0) return new boolean[0]; // The document identifiers need to be sorted in a consistent fashion to reduce deadlock, and have duplicates removed, before going ahead. // But, the documentPriorities and the return booleans need to correspond to the initial array. So, after we come up with // our internal order, we need to construct a map that takes an original index and maps it to the reduced, reordered index. String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes); double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length]; String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][]; String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length]; boolean[] rval = new boolean[docIDHashes.length]; int i = 0; while (i < docIDHashes.length) { Integer newPosition = (Integer)reorderMap.get(new Integer(i)); if (newPosition != null) { reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i]; if (prereqEventNames != null) reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i]; else reorderedDocumentPrerequisites[newPosition.intValue()] = null; reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i]; } rval[i] = false; i++; } long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" initial docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized // transactions are used. But serialized transactions may require a retry in order // to resolve transaction conflicts. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs and hopcounts for job "+jobID.toString()); // Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update) boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length]; int z = 0; while (z < reorderedDocIDHashes.length) { String docIDHash = reorderedDocIDHashes[z]; double docPriority = reorderedDocumentPriorities[z]; String docID = reorderedDocumentIdentifiers[z]; String[] docPrereqs = reorderedDocumentPrerequisites[z]; StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.docHashField,docIDHash), new UnitaryClause(jobQueue.jobIDField,jobID)})); sb.append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); boolean priorityUsed; long executeTime = overrideSchedule?0L:-1L; if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); priorityUsed = jobQueue.updateExistingRecordInitial(rowID,status,checkTimeValue,executeTime,currentTime,docPriority,docPrereqs); } else { // Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried. jobQueue.insertNewRecordInitial(jobID,docIDHash,docID,docPriority,executeTime,currentTime,docPrereqs); priorityUsed = true; } reorderedRval[z++] = priorityUsed; } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs for job "+jobID.toString()); if (legalLinkTypes.length > 0) hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs and hopcounts for job "+jobID.toString()); // Rejigger to correspond with calling order i = 0; while (i < docIDs.length) { Integer finalPosition = (Integer)reorderMap.get(new Integer(i)); if (finalPosition != null) rval[i] = reorderedRval[finalPosition.intValue()]; i++; } return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Add an initial set of remaining documents to the queue. * This method is called during job startup, when the queue is being loaded, to list documents that * were NOT included by calling addDocumentsInitial(). Documents listed here are simply designed to * enable the framework to get rid of old, invalid seeds. They are not queued for processing. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the local document identifier hashes. *@param hopcountMethod is either accurate, nodelete, or neverdelete. */ public void addRemainingDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes, int hopcountMethod) throws ManifoldCFException { if (docIDHashes.length == 0) return; String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" remaining docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless the transactions are serialized, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()); jobQueue.addRemainingDocumentsInitial(jobID,reorderedDocIDHashes); if (legalLinkTypes.length > 0) hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()); } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Signal that a seeding pass has been done. * Call this method at the end of a seeding pass. It is used to perform the bookkeeping necessary to * maintain the hopcount table. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param isPartial is set if the seeds provided are only a partial list. Some connectors cannot * supply a full list of seeds on every seeding iteration; this acknowledges that limitation. *@param hopcountMethod describes how to handle deletions for hopcount purposes. */ public void doneDocumentsInitial(Long jobID, String[] legalLinkTypes, boolean isPartial, int hopcountMethod) throws ManifoldCFException { long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to finish initial docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized transactions are used. // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to start finishing initial docs and hopcounts for job "+jobID.toString()); jobQueue.doneDocumentsInitial(jobID,isPartial); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to finish initial docs for job "+jobID.toString()); if (legalLinkTypes.length > 0) hopCount.finishSeedReferences(jobID,legalLinkTypes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to finish initial docs and hopcounts for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing initial docs and hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the specified hop counts, with the limit as described. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the hashes for the set of documents to find the hopcount for. *@param linkType is the kind of link to find the hopcount for. *@param limit is the limit, beyond which a negative distance may be returned. *@param hopcountMethod is the method for managing hopcounts that is in effect. *@return a vector of booleans corresponding to the documents requested. A true value is returned * if the document is within the specified limit, false otherwise. */ public boolean[] findHopCounts(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String linkType, int limit, int hopcountMethod) throws ManifoldCFException { if (docIDHashes.length == 0) return new boolean[0]; if (legalLinkTypes.length == 0) throw new ManifoldCFException("Nonsensical request; asking for hopcounts where none are kept"); // The idea is to delay queue processing as much as possible, because that avoids having to wait // on locks and having to repeat our evaluations. // // Luckily, we can glean a lot of information from what's hanging around. Specifically, whatever value // we find in the table is an upper bound on the true hop distance value. So, only if we have documents // that are outside the limit does the queue need to be processed. // // It is therefore really helpful to write in an estimated value for any newly created record, if possible. Even if the // estimate is possibly greater than the true value, a great deal of locking and queue processing will be // avoided. // The flow here is to: // - grab the right hoplock // - process the queue // - if the queue is empty, get the hopcounts we wanted, otherwise release the lock and loop around long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Beginning work to get "+Integer.toString(docIDHashes.length)+" hopcounts for job "+jobID.toString()); } // Make an answer array. boolean[] rval = new boolean[docIDHashes.length]; // Make a hash of what we still need a definitive answer for. HashMap badAnswers = new HashMap(); int i = 0; while (i < rval.length) { String docIDHash = docIDHashes[i]; rval[i] = false; badAnswers.put(docIDHash,new Integer(i)); i++; } int iterationCount = 0; while (true) { // Ask for only about documents we don't have a definitive answer for yet. String[] askDocIDHashes = new String[badAnswers.size()]; i = 0; Iterator iter = badAnswers.keySet().iterator(); while (iter.hasNext()) { askDocIDHashes[i++] = (String)iter.next(); } int[] distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType); i = 0; while (i < distances.length) { int distance = distances[i]; String docIDHash = askDocIDHashes[i]; if (distance != -1 && distance <= limit) { // Found a usable value rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true; } i++; } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Iteration "+Integer.toString(iterationCount++)+": After initial check, "+Integer.toString(badAnswers.size())+ " hopcounts remain to be found for job "+jobID.toString()+", out of "+Integer.toString(docIDHashes.length)+ " ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); if (badAnswers.size() == 0) return rval; // It appears we need to process the queue. We need to enter the hoplock section // to make sure only one player is updating values at a time. Then, before we exit, we get the // remaining values. askDocIDHashes = new String[badAnswers.size()]; i = 0; iter = badAnswers.keySet().iterator(); while (iter.hasNext()) { askDocIDHashes[i++] = (String)iter.next(); } // Currently, only one thread can possibly process any of the queue at a given time. This is because the queue marks are not set to something // other than than the "in queue" value during processing. My instinct is that queue processing is likely to interfere with other queue processing, // so I've taken the route of prohibiting more than one batch of queue processing at a time, for now. String hopLockName = getHopLockName(jobID); long sleepAmt = 0L; lockManager.enterWriteLock(hopLockName); try { database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Processing queue for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); // The internal queue processing only does 200 at a time. This is a compromise between maximum efficiency (bigger number) // and the requirement that database writes are effectively blocked for a while (which argues for a smaller number). boolean definitive = hopCount.processQueue(jobID,legalLinkTypes,hopcountMethod); // If definitive answers were not found, we leave the lock and go back to check on the status of the questions we were // interested in. If the answers are all OK then we are done; if not, we need to process more queue, and keep doing that // until we really ARE done. if (!definitive) { // Sleep a little bit so another thread can have a whack at things sleepAmt = 100L; database.performCommit(); continue; } // Definitive answers found; continue through. distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType); database.performCommit(); } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction processing queue for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } finally { lockManager.leaveWriteLock(hopLockName); sleepFor(sleepAmt); } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Definitive answers found for "+Integer.toString(docIDHashes.length)+ " hopcounts for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); // All answers are guaranteed to be accurate now. i = 0; while (i < distances.length) { int distance = distances[i]; String docIDHash = askDocIDHashes[i]; if (distance != -1 && distance <= limit) { // Found a usable value rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true; } i++; } return rval; } } /** Get all the current seeds. * Returns the seed document identifiers for a job. *@param jobID is the job identifier. *@return the document identifiers that are currently considered to be seeds. */ public String[] getAllSeeds(Long jobID) throws ManifoldCFException { return jobQueue.getAllSeeds(jobID); } /** Add documents to the queue in bulk. * This method is called during document processing, when a set of document references are discovered. * The document references are passed to this method, which updates the status of the document(s) * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the local document identifier hashes. *@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none. * MUST be present in the case of carrydown information. *@param relationshipType is the optional link type between this document and its parent. Pass null if there * is no relationship with a parent. *@param hopcountMethod is the desired method for managing hopcounts. *@param dataNames are the names of the data to carry down to the child from this parent. *@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. If CharacterInput objects are passed in here, * it is the caller's responsibility to clean these up. *@param currentTime is the time in milliseconds since epoch that will be recorded for this operation. *@param documentPriorities are the desired document priorities for the documents. *@param prereqEventNames are the events that must be completed before a document can be queued. *@return an array of boolean values indicating whether or not the passed-in priority value was used or not for each doc id (true if used). */ public boolean[] addDocuments(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String[] docIDs, String parentIdentifierHash, String relationshipType, int hopcountMethod, String[][] dataNames, Object[][][] dataValues, long currentTime, double[] documentPriorities, String[][] prereqEventNames) throws ManifoldCFException { if (docIDs.length == 0) return new boolean[0]; // Sort the id hashes and eliminate duplicates. This will help avoid deadlock conditions. // However, we also need to keep the carrydown data in synch, so track that around as well, and merge if there are // duplicate document identifiers. HashMap nameMap = new HashMap(); int k = 0; while (k < docIDHashes.length) { String docIDHash = docIDHashes[k]; // If there are duplicates, we need to merge them. HashMap names = (HashMap)nameMap.get(docIDHash); if (names == null) { names = new HashMap(); nameMap.put(docIDHash,names); } String[] nameList = dataNames[k]; Object[][] dataList = dataValues[k]; int z = 0; while (z < nameList.length) { String name = nameList[z]; Object[] values = dataList[z]; HashMap valueMap = (HashMap)names.get(name); if (valueMap == null) { valueMap = new HashMap(); names.put(name,valueMap); } int y = 0; while (y < values.length) { // Calculate the value hash; that's the true key, and the one that cannot be duplicated. String valueHash; if (values[y] instanceof CharacterInput) { // It's a CharacterInput object. valueHash = ((CharacterInput)values[y]).getHashValue(); } else { // It better be a String. valueHash = ManifoldCF.hash((String)values[y]); } valueMap.put(valueHash,values[y]); y++; } z++; } k++; } String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes); double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length]; String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][]; String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length]; boolean[] rval = new boolean[docIDHashes.length]; int i = 0; while (i < docIDHashes.length) { Integer newPosition = (Integer)reorderMap.get(new Integer(i)); if (newPosition != null) { reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i]; if (prereqEventNames != null) reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i]; else reorderedDocumentPrerequisites[newPosition.intValue()] = null; reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i]; } rval[i] = false; i++; } dataNames = new String[reorderedDocIDHashes.length][]; String[][][] dataHashValues = new String[reorderedDocIDHashes.length][][]; dataValues = new Object[reorderedDocIDHashes.length][][]; k = 0; while (k < reorderedDocIDHashes.length) { String docIDHash = reorderedDocIDHashes[k]; HashMap names = (HashMap)nameMap.get(docIDHash); dataNames[k] = new String[names.size()]; dataHashValues[k] = new String[names.size()][]; dataValues[k] = new Object[names.size()][]; Iterator iter = names.keySet().iterator(); int z = 0; while (iter.hasNext()) { String dataName = (String)iter.next(); (dataNames[k])[z] = dataName; HashMap values = (HashMap)names.get(dataName); (dataHashValues[k])[z] = new String[values.size()]; (dataValues[k])[z] = new Object[values.size()]; Iterator iter2 = values.keySet().iterator(); int y = 0; while (iter2.hasNext()) { String dataValueHash = (String)iter2.next(); Object dataValue = values.get(dataValueHash); ((dataHashValues[k])[z])[y] = dataValueHash; ((dataValues[k])[z])[y] = dataValue; y++; } z++; } k++; } long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" docs and hopcounts for job "+jobID.toString()+" parent identifier "+parentIdentifierHash); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash); // Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update) HashMap existingRows = new HashMap(); for (int z = 0; z < reorderedDocIDHashes.length; z++) { String docIDHash = reorderedDocIDHashes[z]; StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.docHashField,docIDHash), new UnitaryClause(jobQueue.jobIDField,jobID)})); sb.append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); boolean priorityUsed; if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue)); } else { // Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried. jobQueue.insertNewRecord(jobID,docIDHash,reorderedDocumentIdentifiers[z],reorderedDocumentPriorities[z],0L,currentTime,reorderedDocumentPrerequisites[z]); } } // Update all the carrydown data at once, for greatest efficiency. boolean[] carrydownChangesSeen = carryDown.recordCarrydownDataMultiple(jobID,parentIdentifierHash,reorderedDocIDHashes,dataNames,dataHashValues,dataValues); // Same with hopcount. boolean[] hopcountChangesSeen = null; if (parentIdentifierHash != null && relationshipType != null) hopcountChangesSeen = hopCount.recordReferences(jobID,legalLinkTypes,parentIdentifierHash,reorderedDocIDHashes,relationshipType,hopcountMethod); // Loop through the document id's again, and perform updates where needed boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length]; boolean reactivateRemovedHopcountRecords = false; for (int z = 0; z < reorderedDocIDHashes.length; z++) { String docIDHash = reorderedDocIDHashes[z]; JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash); if (jr == null) // It was an insert reorderedRval[z] = true; else { // It was an existing row; do the update logic // The hopcountChangesSeen array describes whether each reference is a new one. This // helps us determine whether we're going to need to "flip" HOPCOUNTREMOVED documents // to the PENDING state. If the new link ended in an existing record, THEN we need to flip them all! reorderedRval[z] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(), 0L,currentTime,carrydownChangesSeen[z] || (hopcountChangesSeen!=null && hopcountChangesSeen[z]), reorderedDocumentPriorities[z],reorderedDocumentPrerequisites[z]); // Signal if we need to perform the flip if (hopcountChangesSeen != null && hopcountChangesSeen[z]) reactivateRemovedHopcountRecords = true; } } if (reactivateRemovedHopcountRecords) jobQueue.reactivateHopcountRemovedRecords(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash); i = 0; while (i < docIDHashes.length) { Integer finalPosition = (Integer)reorderMap.get(new Integer(i)); if (finalPosition != null) rval[i] = reorderedRval[finalPosition.intValue()]; i++; } return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { sleepAmt = getRandomAmount(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash+": "+e.getMessage()+"; sleeping for "+new Long(sleepAmt).toString()+" ms",e); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Add a document to the queue. * This method is called during document processing, when a document reference is discovered. * The document reference is passed to this method, which updates the status of the document * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHash is the local document identifier hash value. *@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none. * MUST be present in the case of carrydown information. *@param relationshipType is the optional link type between this document and its parent. Pass null if there * is no relationship with a parent. *@param hopcountMethod is the desired method for managing hopcounts. *@param dataNames are the names of the data to carry down to the child from this parent. *@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. *@param currentTime is the time in milliseconds since epoch that will be recorded for this operation. *@param priority is the desired document priority for the document. *@param prereqEventNames are the events that must be completed before the document can be processed. *@return true if the priority value was used, false otherwise. */ public boolean addDocument(Long jobID, String[] legalLinkTypes, String docIDHash, String docID, String parentIdentifierHash, String relationshipType, int hopcountMethod, String[] dataNames, Object[][] dataValues, long currentTime, double priority, String[] prereqEventNames) throws ManifoldCFException { return addDocuments(jobID,legalLinkTypes, new String[]{docIDHash},new String[]{docID}, parentIdentifierHash,relationshipType,hopcountMethod,new String[][]{dataNames}, new Object[][][]{dataValues},currentTime,new double[]{priority},new String[][]{prereqEventNames})[0]; } /** Complete adding child documents to the queue, for a set of documents. * This method is called at the end of document processing, to help the hopcount tracking engine do its bookkeeping. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param parentIdentifierHashes are the document identifier hashes for whom child link extraction just took place. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] finishDocuments(Long jobID, String[] legalLinkTypes, String[] parentIdentifierHashes, int hopcountMethod) throws ManifoldCFException { if (parentIdentifierHashes.length == 0) return new DocumentDescription[0]; DocumentDescription[] rval; if (legalLinkTypes.length == 0) { // Must at least end the carrydown transaction. By itself, this does not need a serialized transaction; however, occasional // deadlock is possible when a document shares multiple parents, so do the whole retry drill while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records! rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes); carryDown.restoreRecords(jobID,parentIdentifierHashes); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc carrydown records for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } else { long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to finish "+Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records! rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes); carryDown.restoreRecords(jobID,parentIdentifierHashes); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); hopCount.finishParents(jobID,legalLinkTypes,parentIdentifierHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to finish "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } return rval; } /** Helper method: Calculate the unique set of affected carrydown children resulting from a "restoreRecords" operation. */ protected DocumentDescription[] calculateAffectedRestoreCarrydownChildren(Long jobID, String[] parentIDHashes) throws ManifoldCFException { // We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes. // The goal is to throw all the children into a hash, to make them unique at the end. HashMap resultHash = new HashMap(); ArrayList list = new ArrayList(); int maxCount = database.getMaxOrClause(); int i = 0; int z = 0; while (i < parentIDHashes.length) { if (z == maxCount) { processParentHashSet(jobID,resultHash,list); list.clear(); z = 0; } list.add(parentIDHashes[i]); i++; z++; } if (z > 0) processParentHashSet(jobID,resultHash,list); // Now, put together the result document list from the hash. DocumentDescription[] rval = new DocumentDescription[resultHash.size()]; i = 0; Iterator iter = resultHash.keySet().iterator(); while (iter.hasNext()) { Long id = (Long)iter.next(); DocumentDescription dd = (DocumentDescription)resultHash.get(id); rval[i++] = dd; } return rval; } /** Helper method: look up rows affected by a restoreRecords operation. */ protected void processParentHashSet(Long jobID, HashMap resultHash, ArrayList list) throws ManifoldCFException { // The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription // object for return, and so a join is necessary against the jobqueue table. StringBuilder sb = new StringBuilder("SELECT "); ArrayList newlist = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(carryDown.getTableName()).append(" t1, ") .append(jobQueue.getTableName()).append(" t0 WHERE "); sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new UnitaryClause("t1."+carryDown.jobIDField,jobID), new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND "); sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField), new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})).append(" AND "); sb.append("t1.").append(carryDown.newField).append("=?"); newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE)); /* sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new MultiClause("t1."+carryDown.parentIDHashField,list), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t1.").append(carryDown.newField).append("=?") .append(")"); newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE)); */ IResultSet set = database.performQuery(sb.toString(),newlist,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long id = (Long)row.getValue(jobQueue.idField); String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField); String documentIdentifier = (String)row.getValue(jobQueue.docIDField); resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier)); } } /** Begin an event sequence. *@param eventName is the name of the event. *@return true if the event could be created, or false if it's already there. */ public boolean beginEventSequence(String eventName) throws ManifoldCFException { try { eventManager.createEvent(eventName); return true; } catch (ManifoldCFException e) { if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) return false; throw e; } } /** Complete an event sequence. *@param eventName is the name of the event. */ public void completeEventSequence(String eventName) throws ManifoldCFException { eventManager.destroyEvent(eventName); } /** Requeue a document set because of carrydown changes. * This method is called when carrydown data is modified for a set of documents. The documents must be requeued for immediate reprocessing, even to the * extent that if one is *already* being processed, it will need to be done over again. *@param documentDescriptions is the set of description objects for the documents that have had their parent carrydown information changed. *@param docPriorities are the document priorities to assign to the documents, if needed. *@return a flag for each document priority, true if it was used, false otherwise. */ public boolean[] carrydownChangeDocumentMultiple(DocumentDescription[] documentDescriptions, long currentTime, double[] docPriorities) throws ManifoldCFException { if (documentDescriptions.length == 0) return new boolean[0]; // Order the updates by document hash, to prevent deadlock as much as possible. // This map contains the original index of the document id hash. HashMap docHashMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] = documentDescriptions[i].getDocumentIdentifier() + ":" + documentDescriptions[i].getJobID(); docHashMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort the hashes java.util.Arrays.sort(docIDHashes); boolean[] rval = new boolean[docIDHashes.length]; // Enter transaction and prepare to look up document states in dochash order while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // This is the map that will contain the rows we found, keyed by docIDHash. HashMap existingRows = new HashMap(); // Loop through hashes in order int j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j]; // Get the index int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue(); // Lookup document description DocumentDescription dd = documentDescriptions[originalIndex]; // Do the query. We can base this on the id column since we have that. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())})).append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // If the row is there, we use its current info to requeue it properly. if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue)); } j++; } // Ok, existingRows contains all the rows we want to try to update. Go through these and update. while (j < docIDHashes.length) { String docIDHash = docIDHashes[j]; int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue(); JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash); if (jr == null) // It wasn't found, so the doc priority wasn't used. rval[originalIndex] = false; else // It was an existing row; do the update logic; use the 'carrydown changes' flag = true all the time. rval[originalIndex] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(), 0L,currentTime,true,docPriorities[originalIndex],null); j++; } database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction handling "+Integer.toString(docIDHashes.length)+" carrydown changes: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } return rval; } /** Requeue a document because of carrydown changes. * This method is called when carrydown data is modified for a document. The document must be requeued for immediate reprocessing, even to the * extent that if it is *already* being processed, it will need to be done over again. *@param documentDescription is the description object for the document that has had its parent carrydown information changed. *@param docPriority is the document priority to assign to the document, if needed. *@return a flag for the document priority, true if it was used, false otherwise. */ public boolean carrydownChangeDocument(DocumentDescription documentDescription, long currentTime, double docPriority) throws ManifoldCFException { return carrydownChangeDocumentMultiple(new DocumentDescription[]{documentDescription},currentTime,new double[]{docPriority})[0]; } /** Sleep a random amount of time after a transaction abort. */ protected long getRandomAmount() { return database.getSleepAmt(); } protected void sleepFor(long amt) throws ManifoldCFException { database.sleepFor(amt); } /** Retrieve specific parent data for a given document. *@param jobID is the job identifier. *@param docIDHash is the document identifier hash value. *@param dataName is the kind of data to retrieve. *@return the unique data values. */ public String[] retrieveParentData(Long jobID, String docIDHash, String dataName) throws ManifoldCFException { return carryDown.getDataValues(jobID,docIDHash,dataName); } /** Retrieve specific parent data for a given document. *@param jobID is the job identifier. *@param docIDHash is the document identifier hash value. *@param dataName is the kind of data to retrieve. *@return the unique data values. */ public CharacterInput[] retrieveParentDataAsFiles(Long jobID, String docIDHash, String dataName) throws ManifoldCFException { return carryDown.getDataValuesAsFiles(jobID,docIDHash,dataName); } // These methods support the job threads (which start jobs and end jobs) // There is one thread that starts jobs. It simply looks for jobs which are ready to // start, and changes their state accordingly. // There is also a pool of threads that end jobs. These threads wait for a job that // looks like it is done, and do completion processing if it is. /** Start all jobs in need of starting. * This method marks all the appropriate jobs as "in progress", which is all that should be * needed to start them. * It's also the case that the start event should be logged in the event log. In order to make it possible for * the caller to do this logging, a set of job ID's will be returned containing the jobs that * were started. *@param currentTime is the current time in milliseconds since epoch. *@param unwaitList is filled in with the set of job ID objects that were resumed. */ public void startJobs(long currentTime, ArrayList unwaitList) throws ManifoldCFException { // This method should compare the lasttime field against the current time, for all // "not active" jobs, and see if a job should be started. // // If a job is to be started, then the following occurs: // (1) If the job is "full scan", then all COMPLETED jobqueue entries are converted to // PURGATORY. // (2) The job is labeled as "ACTIVE". // (3) The starttime field is set. // (4) The endtime field is nulled out. // // This method also assesses jobs that are ACTIVE or PAUSED to see if they should be // converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded // the value in the "windowend" field for the job. // // Finally, jobs in ACTIVEWAIT or PAUSEDWAIT are assessed to see if they should become // ACTIVE or PAUSED. This will occur if we have entered a new window for the job. // Note well: We can't combine locks across both our lock manager and the database unless we do it consistently. The // consistent practice throughout CF is to do the external locks first, then the database locks. This particular method // thus cannot use cached job description information, because it must throw database locks first against the jobs table. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastTimeField).append(",") .append(jobs.statusField).append(",") .append(jobs.startMethodField).append(",") .append(jobs.outputNameField).append(",") .append(jobs.connectionNameField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_INACTIVE), jobs.statusToString(jobs.STATUS_ACTIVEWAIT), jobs.statusToString(jobs.STATUS_ACTIVEWAITSEEDING), jobs.statusToString(jobs.STATUS_PAUSEDWAIT), jobs.statusToString(jobs.STATUS_PAUSEDWAITSEEDING)})})).append(" AND ") .append(jobs.startMethodField).append("!=? FOR UPDATE"); list.add(jobs.startMethodToString(IJobDescription.START_DISABLE)); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Next, we query for the schedule information. In order to do that, we amass a list of job identifiers that we want schedule info // for. Long[] jobIDSet = new Long[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); jobIDSet[i++] = (Long)row.getValue(jobs.idField); } ScheduleRecord[][] srSet = jobs.readScheduleRecords(jobIDSet); i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); int startMethod = jobs.stringToStartMethod((String)row.getValue(jobs.startMethodField)); String outputName = (String)row.getValue(jobs.outputNameField); String connectionName = (String)row.getValue(jobs.connectionNameField); ScheduleRecord[] thisSchedule = srSet[i++]; // Run at specific times // We need to start with the start time as given, plus one long startInterval = ((Long)row.getValue(jobs.lastTimeField)).longValue() + 1; if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Checking if job "+jobID.toString()+" needs to be started; it was last checked at "+ new Long(startInterval).toString()+", and now it is "+new Long(currentTime).toString()); // Proceed to the current time, and find a match if there is one to be found. // If not -> continue // We go through *all* the schedule records. The one that matches that has the latest // end time is the one we take. Long matchTime = null; Long duration = null; boolean requestMinimum = false; for (int l = 0; l < thisSchedule.length; l++) { long trialStartInterval = startInterval; ScheduleRecord sr = thisSchedule[l]; Long thisDuration = sr.getDuration(); if (startMethod == IJobDescription.START_WINDOWINSIDE && thisDuration != null) { // Bump the start interval back before the beginning of the current interval. // This will guarantee a start as long as there is time in the window. long trialStart = currentTime - thisDuration.longValue(); if (trialStart < trialStartInterval) trialStartInterval = trialStart; } Long thisMatchTime = checkTimeMatch(trialStartInterval,currentTime, sr.getDayOfWeek(), sr.getDayOfMonth(), sr.getMonthOfYear(), sr.getYear(), sr.getHourOfDay(), sr.getMinutesOfHour(), sr.getTimezone(), thisDuration); if (thisMatchTime == null) { if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug(" No time match found within interval "+new Long(trialStartInterval).toString()+ " to "+new Long(currentTime).toString()); continue; } if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug(" Time match FOUND within interval "+new Long(trialStartInterval).toString()+ " to "+new Long(currentTime).toString()); if (matchTime == null || thisDuration == null || (duration != null && thisMatchTime.longValue() + thisDuration.longValue() > matchTime.longValue() + duration.longValue())) { matchTime = thisMatchTime; duration = thisDuration; requestMinimum = sr.getRequestMinimum(); } } if (matchTime == null) { jobs.updateLastTime(jobID,currentTime); continue; } int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); // Calculate the end of the window Long windowEnd = null; if (duration != null) { windowEnd = new Long(matchTime.longValue()+duration.longValue()); } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job '"+jobID+"' is within run window at "+new Long(currentTime).toString()+" ms. (which starts at "+ matchTime.toString()+" ms."+((duration==null)?"":(" and goes for "+duration.toString()+" ms."))+")"); } int newJobState; switch (status) { case Jobs.STATUS_INACTIVE: // If job was formerly "inactive", do the full startup. // Start this job! but with no end time. // This does not get logged because the startup thread does the logging. jobs.startJob(jobID,windowEnd,requestMinimum); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Signalled for job start for job "+jobID); } break; case Jobs.STATUS_ACTIVEWAIT: unwaitList.add(jobID); jobs.unwaitJob(jobID,Jobs.STATUS_RESUMING,windowEnd); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited job "+jobID); } break; case Jobs.STATUS_ACTIVEWAITSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,Jobs.STATUS_RESUMINGSEEDING,windowEnd); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited job "+jobID); } break; case Jobs.STATUS_PAUSEDWAIT: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSED,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSEDWAITSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSEDSEEDING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSINGWAITING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSINGWAITINGSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSINGSEEDING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; default: break; } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Put active or paused jobs in wait state, if they've exceeded their window. *@param currentTime is the current time in milliseconds since epoch. *@param waitList is filled in with the set of job ID's that were put into a wait state. */ public void waitJobs(long currentTime, ArrayList waitList) throws ManifoldCFException { // This method assesses jobs that are ACTIVE or PAUSED to see if they should be // converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded // the value in the "windowend" field for the job. // database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVESEEDING), jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NEITHER), jobs.statusToString(jobs.STATUS_PAUSED), jobs.statusToString(jobs.STATUS_PAUSEDSEEDING)})})).append(" AND ") .append(jobs.windowEndField).append("<? FOR UPDATE"); list.add(new Long(currentTime)); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); waitList.add(jobID); int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); // Make the job wait. switch (status) { case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end"); } break; case Jobs.STATUS_ACTIVESEEDING: case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: case Jobs.STATUS_ACTIVESEEDING_NEITHER: jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITINGSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end"); } break; case Jobs.STATUS_PAUSED: jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAIT); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSEDSEEDING: jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAITSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSING: jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSINGSEEDING: jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITINGSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; default: break; } } } catch (ManifoldCFException e) { database.signalRollback(); throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } /** Reset job schedule. This re-evaluates whether the job should be started now. This method would typically * be called after a job's scheduling window has been changed. *@param jobID is the job identifier. */ public void resetJobSchedule(Long jobID) throws ManifoldCFException { // Note: This is problematic; the expected behavior is for the job to start if "we are within the window", // but not to start if the transition to active status was long enough ago. // Since there's no "right" way to do this, do nothing for now. // This explicitly did NOT work - it caused the job to refire every time it was saved. // jobs.updateLastTime(jobID,0L); } /** Check if the specified job parameters have a 'hit' within the specified interval. *@param startTime is the start time. *@param currentTimestamp is the end time. *@param daysOfWeek is the enumerated days of the week, or null. *@param daysOfMonth is the enumerated days of the month, or null. *@param months is the enumerated months, or null. *@param years is the enumerated years, or null. *@param hours is the enumerated hours, or null. *@param minutes is the enumerated minutes, or null. *@return null if there is NO hit within the interval; otherwise the actual time of the hit in milliseconds * from epoch is returned. */ protected static Long checkTimeMatch(long startTime, long currentTimestamp, EnumeratedValues daysOfWeek, EnumeratedValues daysOfMonth, EnumeratedValues months, EnumeratedValues years, EnumeratedValues hours, EnumeratedValues minutes, String timezone, Long duration) { // What we do here is start with the previous timestamp, and advance until we // either encounter a match, or we exceed the current timestamp. Calendar c; if (timezone == null) { c = Calendar.getInstance(); } else { c = Calendar.getInstance(TimeZone.getTimeZone(timezone)); } // Get the current starting time c.setTimeInMillis(startTime); // If there's a duration value, we can't match unless we're within the window. // That means we find a match, and then we verify that the end time is greater than the currenttimestamp. // If not, we move on (by incrementing) // The main loop works off of the calendar and these values. while (c.getTimeInMillis() < currentTimestamp) { // Round up to the nearest minute, unless at 0 already int x = c.get(Calendar.MILLISECOND); if (x != c.getMinimum(Calendar.MILLISECOND)) { int amtToAdd = c.getLeastMaximum(Calendar.MILLISECOND)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MILLISECOND,amtToAdd); continue; } x = c.get(Calendar.SECOND); if (x != c.getMinimum(Calendar.SECOND)) { int amtToAdd = c.getLeastMaximum(Calendar.SECOND)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.SECOND,amtToAdd); continue; } boolean startedToCareYet = false; x = c.get(Calendar.MINUTE); // If we care about minutes, round up, otherwise go to the 0 value if (minutes == null) { if (x != c.getMinimum(Calendar.MINUTE)) { int amtToAdd = c.getLeastMaximum(Calendar.MINUTE)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MINUTE,amtToAdd); continue; } } else { // See if it is a legit value. if (!minutes.checkValue(x-c.getMinimum(Calendar.MINUTE))) { // Advance to next legit value // We could be clever, but we just advance one c.add(Calendar.MINUTE,1); continue; } startedToCareYet = true; } // Hours x = c.get(Calendar.HOUR_OF_DAY); if (hours == null) { if (!startedToCareYet && x != c.getMinimum(Calendar.HOUR_OF_DAY)) { int amtToAdd = c.getLeastMaximum(Calendar.HOUR_OF_DAY)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.HOUR_OF_DAY,amtToAdd); continue; } } else { if (!hours.checkValue(x-c.getMinimum(Calendar.HOUR_OF_DAY))) { // next hour c.add(Calendar.HOUR_OF_DAY,1); continue; } startedToCareYet = true; } // Days of month and days of week are at the same level; // these advance concurrently. However, if NEITHER is specified, and nothing // earlier was, then we do the 1st of the month. x = c.get(Calendar.DAY_OF_WEEK); if (daysOfWeek != null) { if (!daysOfWeek.checkValue(x-c.getMinimum(Calendar.DAY_OF_WEEK))) { // next day c.add(Calendar.DAY_OF_WEEK,1); continue; } startedToCareYet = true; } x = c.get(Calendar.DAY_OF_MONTH); if (daysOfMonth == null) { // If nothing is specified but the month or the year, do it on the 1st. if (!startedToCareYet && x != c.getMinimum(Calendar.DAY_OF_MONTH)) { // Move as rapidly as possible towards the first of the month. But in no case, increment // less than one day. int amtToAdd = c.getLeastMaximum(Calendar.DAY_OF_MONTH)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.DAY_OF_MONTH,amtToAdd); continue; } } else { if (!daysOfMonth.checkValue(x-c.getMinimum(Calendar.DAY_OF_MONTH))) { // next day c.add(Calendar.DAY_OF_MONTH,1); continue; } startedToCareYet = true; } x = c.get(Calendar.MONTH); if (months == null) { if (!startedToCareYet && x != c.getMinimum(Calendar.MONTH)) { int amtToAdd = c.getLeastMaximum(Calendar.MONTH)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MONTH,amtToAdd); continue; } } else { if (!months.checkValue(x-c.getMinimum(Calendar.MONTH))) { c.add(Calendar.MONTH,1); continue; } startedToCareYet = true; } x = c.get(Calendar.YEAR); if (years != null) { if (!years.checkValue(x)) { c.add(Calendar.YEAR,1); continue; } startedToCareYet = true; } // Looks like a match. // Last check is to be sure we are in the window, if any. If we are outside the window, // must skip forward. if (duration != null && c.getTimeInMillis() + duration.longValue() <= currentTimestamp) { c.add(Calendar.MILLISECOND,c.getLeastMaximum(Calendar.MILLISECOND)); continue; } return new Long(c.getTimeInMillis()); } return null; } /** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and * will not cease until complete. If the job is already running, this operation will assure that * the job does not pause when its window ends. The job can be manually paused, or manually aborted. *@param jobID is the ID of the job to start. */ public void manualStart(Long jobID) throws ManifoldCFException { manualStart(jobID,false); } /** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and * will not cease until complete. If the job is already running, this operation will assure that * the job does not pause when its window ends. The job can be manually paused, or manually aborted. *@param jobID is the ID of the job to start. *@param requestMinimum is true if a minimal job run is requested. */ public void manualStart(Long jobID, boolean requestMinimum) throws ManifoldCFException { database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() < 1) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); if (status != Jobs.STATUS_INACTIVE) throw new ManifoldCFException("Job "+jobID+" is already running"); IJobDescription jobDescription = jobs.load(jobID,true); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually starting job "+jobID); } // Start this job! but with no end time. jobs.startJob(jobID,null,requestMinimum); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manual job start signal for job "+jobID+" successfully sent"); } } catch (ManifoldCFException e) { database.signalRollback(); throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } /** Note job delete started. *@param jobID is the job id. *@param startTime is the job delete start time. */ public void noteJobDeleteStarted(Long jobID, long startTime) throws ManifoldCFException { jobs.noteJobDeleteStarted(jobID,startTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" delete is now started"); } /** Note job started. *@param jobID is the job id. *@param startTime is the job start time. */ public void noteJobStarted(Long jobID, long startTime) throws ManifoldCFException { jobs.noteJobStarted(jobID,startTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" is now started"); } /** Note job seeded. *@param jobID is the job id. *@param seedTime is the job seed time. */ public void noteJobSeeded(Long jobID, long seedTime) throws ManifoldCFException { jobs.noteJobSeeded(jobID,seedTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" has been successfully reseeded"); } /** Prepare for a delete scan. *@param jobID is the job id. */ public void prepareDeleteScan(Long jobID) throws ManifoldCFException { // No special treatment needed for hopcount or carrydown, since these all get deleted at once // at the end of the job delete process. TrackerClass.notePrecommit(); jobQueue.prepareDeleteScan(jobID); TrackerClass.noteCommit(); } /** Prepare a job to be run. * This method is called regardless of the details of the job; what differs is only the flags that are passed in. * The code inside will determine the appropriate procedures. * (This method replaces prepareFullScan() and prepareIncrementalScan(). ) *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@param connectorModel is the model used by the connector for the job. *@param continuousJob is true if the job is a continuous one. *@param fromBeginningOfTime is true if the job is running starting from time 0. *@param requestMinimum is true if the minimal amount of work is requested for the job run. */ public void prepareJobScan(Long jobID, String[] legalLinkTypes, int hopcountMethod, int connectorModel, boolean continuousJob, boolean fromBeginningOfTime, boolean requestMinimum) throws ManifoldCFException { // (1) If the connector has MODEL_ADD_CHANGE_DELETE, then // we let the connector run the show; there's no purge phase, and therefore the // documents are left in a COMPLETED state if they don't show up in the list // of seeds that require the attention of the connector. However, we do need to // preload the queue with all the existing documents, if there was any change to the // specification information (which will mean that fromBeginningOfTime is set). // // (2) If the connector has MODEL_ALL, then it's a full crawl no matter what, so // we do a full scan initialization. // // (3) If the connector has some other model, we look at the start time. A start // time of 0 implies a full scan, while any other start time implies an incremental // scan. // Complete connector model is told everything, so no delete phase. if (connectorModel == IRepositoryConnector.MODEL_ADD_CHANGE_DELETE) { if (fromBeginningOfTime) queueAllExisting(jobID,legalLinkTypes); return; } // If the connector model is complete via chaining, then we just need to make // sure discovery works to queue the changes. if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE_DELETE) { if (fromBeginningOfTime) queueAllExisting(jobID,legalLinkTypes); else jobQueue.preparePartialScan(jobID); return; } // Similarly, minimal crawl attempts no delete phase unless the connector explicitly forbids it, or unless // the job criteria have changed. if (requestMinimum && connectorModel != IRepositoryConnector.MODEL_ALL && !fromBeginningOfTime) { // If it is a chained model, do the partial prep. if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD || connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE) jobQueue.preparePartialScan(jobID); return; } if (!continuousJob && connectorModel != IRepositoryConnector.MODEL_PARTIAL && (connectorModel == IRepositoryConnector.MODEL_ALL || fromBeginningOfTime)) prepareFullScan(jobID,legalLinkTypes,hopcountMethod); else jobQueue.prepareIncrementalScan(jobID); } /** Queue all existing. *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. */ protected void queueAllExisting(Long jobID, String[] legalLinkTypes) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (legalLinkTypes.length > 0) { jobQueue.reactivateHopcountRemovedRecords(jobID); } jobQueue.queueAllExisting(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction during queueAllExisting: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Prepare for a full scan. *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. *@param hopcountMethod describes how to handle deletions for hopcount purposes. */ protected void prepareFullScan(Long jobID, String[] legalLinkTypes, int hopcountMethod) throws ManifoldCFException { while (true) { long sleepAmt = 0L; // Since we delete documents here, we need to manage the hopcount part of the world too. database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // Delete the documents we have never fetched, including any hopcount records we've calculated. if (legalLinkTypes.length > 0) { ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t99."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}); hopCount.deleteMatchingDocuments(jobID,legalLinkTypes,jobQueue.getTableName()+" t99", "t99."+jobQueue.docHashField,"t99."+jobQueue.jobIDField, query,list, hopcountMethod); } jobQueue.prepareFullScan(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction preparing full scan: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Manually abort a running job. The job will be permanently stopped, and will not run again until * automatically started based on schedule, or manually started. *@param jobID is the job to abort. */ public void manualAbort(Long jobID) throws ManifoldCFException { // Just whack status back to "INACTIVE". The active documents will continue to be processed until done, // but that's fine. There will be no finishing stage, obviously. if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually aborting job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.abortJob(jobID,null); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" abort signal successfully sent"); } } /** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost, * until the job finishes on its own. *@param jobID is the job to abort. *@param requestMinimum is true if a minimal job run is requested. */ public void manualAbortRestart(Long jobID, boolean requestMinimum) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually restarting job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.abortRestartJob(jobID,requestMinimum); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction restarting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" restart signal successfully sent"); } } /** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost, * until the job finishes on its own. *@param jobID is the job to abort. */ public void manualAbortRestart(Long jobID) throws ManifoldCFException { manualAbortRestart(jobID,false); } /** Abort a running job due to a fatal error condition. *@param jobID is the job to abort. *@param errorText is the error text. *@return true if this is the first logged abort request for this job. */ public boolean errorAbort(Long jobID, String errorText) throws ManifoldCFException { // Just whack status back to "INACTIVE". The active documents will continue to be processed until done, // but that's fine. There will be no finishing stage, obviously. if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Aborting job "+jobID+" due to error '"+errorText+"'"); } boolean rval; while (true) { long sleepAmt = 0L; database.beginTransaction(); try { rval = jobs.abortJob(jobID,errorText); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (rval && Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" abort signal successfully sent"); } return rval; } /** Pause a job. *@param jobID is the job identifier to pause. */ public void pauseJob(Long jobID) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually pausing job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.pauseJob(jobID); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction pausing job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" successfully paused"); } } /** Restart a paused job. *@param jobID is the job identifier to restart. */ public void restartJob(Long jobID) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually restarting paused job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.restartJob(jobID); jobQueue.clearFailTimes(jobID); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction restarting pausing job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" successfully restarted"); } } /** Get the list of jobs that are ready for seeding. *@return jobs that are active and are running in adaptive mode. These will be seeded * based on what the connector says should be added to the queue. */ public JobSeedingRecord[] getJobsReadyForSeeding(long currentTime) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastCheckTimeField).append(",") .append(jobs.reseedIntervalField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_ACTIVE))})).append(" AND ") .append(jobs.typeField).append("=? AND ") .append("(").append(jobs.reseedTimeField).append(" IS NULL OR ").append(jobs.reseedTimeField).append("<=?)") .append(" FOR UPDATE"); list.add(jobs.typeToString(jobs.TYPE_CONTINUOUS)); list.add(new Long(currentTime)); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobSeedingRecord[] rval = new JobSeedingRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); Long x = (Long)row.getValue(jobs.lastCheckTimeField); long synchTime = 0; if (x != null) synchTime = x.longValue(); Long r = (Long)row.getValue(jobs.reseedIntervalField); Long reseedTime; if (r != null) reseedTime = new Long(currentTime + r.longValue()); else reseedTime = null; // Mark status of job as "active/seeding". Special status is needed so that abort // will not complete until seeding is completed. jobs.writeStatus(jobID,jobs.STATUS_ACTIVESEEDING,reseedTime); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for seeding"); } rval[i] = new JobSeedingRecord(jobID,synchTime); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for seeding: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the list of jobs that are ready for deletion. *@return jobs that were in the "readyfordelete" state. */ public JobDeleteRecord[] getJobsReadyForDelete() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORDELETE))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobDeleteRecord[] rval = new JobDeleteRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); // Mark status of job as "starting delete" jobs.writeStatus(jobID,jobs.STATUS_DELETESTARTINGUP); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for delete startup"); } rval[i] = new JobDeleteRecord(jobID); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the list of jobs that are ready for startup. *@return jobs that were in the "readyforstartup" state. These will be marked as being in the "starting up" state. */ public JobStartRecord[] getJobsReadyForStartup() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastCheckTimeField).append(",") .append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_READYFORSTARTUP), jobs.statusToString(jobs.STATUS_READYFORSTARTUPMINIMAL)})})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobStartRecord[] rval = new JobStartRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); Long x = (Long)row.getValue(jobs.lastCheckTimeField); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); boolean requestMinimum = (status == jobs.STATUS_READYFORSTARTUPMINIMAL); long synchTime = 0; if (x != null) synchTime = x.longValue(); // Mark status of job as "starting" jobs.writeStatus(jobID,requestMinimum?jobs.STATUS_STARTINGUPMINIMAL:jobs.STATUS_STARTINGUP); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for startup"); } rval[i] = new JobStartRecord(jobID,synchTime,requestMinimum); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Inactivate a job, from the notification state. *@param jobID is the ID of the job to inactivate. */ public void inactivateJob(Long jobID) throws ManifoldCFException { // While there is no flow that can cause a job to be in the wrong state when this gets called, as a precaution // it might be a good idea to put this in a transaction and have the state get checked first. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_NOTIFYINGOFCOMPLETION: jobs.notificationComplete(jobID); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted clearing notification state for job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a job starting for delete back to "ready for delete" * state. *@param jobID is the job id. */ public void resetStartDeleteJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_DELETESTARTINGUP: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForDelete' state"); // Set the state of the job back to "ReadyForStartup" jobs.writeStatus(jobID,jobs.STATUS_READYFORDELETE); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting start delete job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a job that is notifying back to "ready for notify" * state. *@param jobID is the job id. */ public void resetNotifyJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_NOTIFYINGOFCOMPLETION: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForNotify' state"); // Set the state of the job back to "ReadyForNotify" jobs.writeStatus(jobID,jobs.STATUS_READYFORNOTIFY); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting notify job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a starting job back to "ready for startup" state. *@param jobID is the job id. */ public void resetStartupJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_STARTINGUP: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartup' state"); // Set the state of the job back to "ReadyForStartup" jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUP); break; case Jobs.STATUS_STARTINGUPMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartupMinimal' state"); // Set the state of the job back to "ReadyForStartupMinimal" jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUPMINIMAL); break; case Jobs.STATUS_ABORTINGSTARTINGUP: case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'Aborting' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTING); break; case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestart' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART); break; case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestartMinimal' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL); break; case Jobs.STATUS_READYFORSTARTUP: case Jobs.STATUS_READYFORSTARTUPMINIMAL: case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: // ok break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting startup job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a seeding job back to "active" state. *@param jobID is the job id. */ public void resetSeedJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Uninstalled' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_UNINSTALLED); break; case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_NoOutput' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NOOUTPUT); break; case Jobs.STATUS_ACTIVESEEDING_NEITHER: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Neither' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NEITHER); break; case Jobs.STATUS_ACTIVESEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE); break; case Jobs.STATUS_ACTIVEWAITSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ActiveWait' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVEWAIT); break; case Jobs.STATUS_PAUSEDSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Paused' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_PAUSED); break; case Jobs.STATUS_PAUSEDWAITSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'PausedWait' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_PAUSEDWAIT); break; case Jobs.STATUS_ABORTINGSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Aborting' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTING); break; case Jobs.STATUS_ABORTINGFORRESTARTSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestart' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART); break; case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestartMinimal' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL); break; case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: case Jobs.STATUS_PAUSED: case Jobs.STATUS_ACTIVEWAIT: case Jobs.STATUS_PAUSEDWAIT: // ok break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting seeding job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Delete jobs in need of being deleted (which are marked "ready for delete"). * This method is meant to be called periodically to perform delete processing on jobs. */ public void deleteJobsReadyForDelete() throws ManifoldCFException { while (true) { long sleepAmt = 0L; // This method must find only jobs that have nothing hanging around in their jobqueue that represents an ingested // document. Any jobqueue entries which are in a state to interfere with the delete will be cleaned up by other // threads, so eventually a job will become eligible. This happens when there are no records that have an ingested // status: complete, purgatory, being-cleaned, being-deleted, or pending purgatory. database.beginTransaction(); try { // The original query was: // // SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE t0.id=t1.jobid AND // t1.status IN ('C', 'F', 'G')) // // However, this did not work well with Postgres when the tables got big. So I revised things to do the following multi-stage process: // (1) The query should be broken up, such that n queries are done: // (a) the first one should get all candidate jobs (those that have the right state) // (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1 // This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow // early exit!! // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Now, loop through this list. For each one, verify that it's okay to delete it int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); list.clear(); sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE), jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; ManifoldCF.noteConfigurationChange(); // Remove documents from job queue jobQueue.deleteAllJobRecords(jobID); // Remove carrydowns for the job carryDown.deleteOwner(jobID); // Nothing is in a critical section - so this should be OK. hopCount.deleteOwner(jobID); jobs.delete(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Removed job "+jobID); } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted deleting jobs ready for delete: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Put all eligible jobs in the "shutting down" state. */ public void finishJobs() throws ManifoldCFException { while (true) { long sleepAmt = 0L; // The jobs we should transition: // - are active // - have no ACTIVE, PENDING, ACTIVEPURGATORY, or PENDINGPURGATORY records database.beginTransaction(); try { // The query I used to emit was: // SELECT jobid FROM jobs t0 WHERE t0.status='A' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status IN ('A','P','F','G')) // This did not get along well with Postgresql, so instead this is what is now done: // (1) The query should be broken up, such that n queries are done: // (a) the first one should get all candidate jobs (those that have the right state) // (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1 // This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow // early exit!! // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVEWAIT), jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER)})})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // Check to be sure the job is a candidate for shutdown sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // Mark status of job as "finishing" jobs.writeStatus(jobID,jobs.STATUS_SHUTTINGDOWN); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for shutdown"); } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted finishing jobs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Find the list of jobs that need to have their connectors notified of job completion. *@return the ID's of jobs that need their output connectors notified in order to become inactive. */ public JobNotifyRecord[] getJobsReadyForInactivity() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORNOTIFY))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Return them all JobNotifyRecord[] rval = new JobNotifyRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); // Mark status of job as "starting delete" jobs.writeStatus(jobID,jobs.STATUS_NOTIFYINGOFCOMPLETION); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Found job "+jobID+" in need of notification"); } rval[i++] = new JobNotifyRecord(jobID); } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for notify: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Complete the sequence that resumes jobs, either from a pause or from a scheduling window * wait. The logic will restore the job to an active state (many possibilities depending on * connector status), and will record the jobs that have been so modified. *@param timestamp is the current time in milliseconds since epoch. *@param modifiedJobs is filled in with the set of IJobDescription objects that were resumed. */ public void finishJobResumes(long timestamp, ArrayList modifiedJobs) throws ManifoldCFException { // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_RESUMING), jobs.statusToString(jobs.STATUS_RESUMINGSEEDING) })})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // There are no secondary checks that need to be made; just resume IJobDescription jobDesc = jobs.load(jobID,true); modifiedJobs.add(jobDesc); jobs.finishResumeJob(jobID,timestamp); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Resumed job "+jobID); } } } /** Complete the sequence that stops jobs, either for abort, pause, or because of a scheduling * window. The logic will move the job to its next state (INACTIVE, PAUSED, ACTIVEWAIT), * and will record the jobs that have been so modified. *@param timestamp is the current time in milliseconds since epoch. *@param modifiedJobs is filled in with the set of IJobDescription objects that were stopped. */ public void finishJobStops(long timestamp, ArrayList modifiedJobs) throws ManifoldCFException { // The query I used to emit was: // SELECT jobid FROM jobs t0 WHERE t0.status='X' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status IN ('A','F')) // Now the query is broken up so that Postgresql behaves more efficiently. // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ABORTING), jobs.statusToString(jobs.STATUS_ABORTINGFORRESTART), jobs.statusToString(jobs.STATUS_ABORTINGFORRESTARTMINIMAL), jobs.statusToString(jobs.STATUS_PAUSING), jobs.statusToString(jobs.STATUS_PAUSINGSEEDING), jobs.statusToString(jobs.STATUS_ACTIVEWAITING), jobs.statusToString(jobs.STATUS_ACTIVEWAITINGSEEDING), jobs.statusToString(jobs.STATUS_PAUSINGWAITING), jobs.statusToString(jobs.STATUS_PAUSINGWAITINGSEEDING) })})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // All the job's documents need to have their docpriority set to null, to clear dead wood out of the docpriority index. // See CONNECTORS-290. // We do this BEFORE updating the job state. jobQueue.clearDocPriorities(jobID); IJobDescription jobDesc = jobs.load(jobID,true); modifiedJobs.add(jobDesc); jobs.finishStopJob(jobID,timestamp); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Stopped job "+jobID); } } } /** Reset eligible jobs either back to the "inactive" state, or make them active again. The * latter will occur if the cleanup phase of the job generated more pending documents. * * This method is used to pick up all jobs in the shutting down state * whose purgatory or being-cleaned records have been all processed. * *@param currentTime is the current time in milliseconds since epoch. *@param resetJobs is filled in with the set of IJobDescription objects that were reset. */ public void resetJobs(long currentTime, ArrayList resetJobs) throws ManifoldCFException { // Query for all jobs that fulfill the criteria // The query used to look like: // // SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status='P') // // Now, the query is broken up, for performance // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN))})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // Check to be sure the job is a candidate for shutdown sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PURGATORY), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // The shutting-down phase is complete. However, we need to check if there are any outstanding // PENDING or PENDINGPURGATORY records before we can decide what to do. sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) { // This job needs to re-enter the active state. Make that happen. jobs.returnJobToActive(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" is re-entering active state"); } } else { // This job should be marked as finished. IJobDescription jobDesc = jobs.load(jobID,true); resetJobs.add(jobDesc); jobs.finishJob(jobID,currentTime); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now completed"); } } } } // Status reports /** Get the status of a job. *@return the status object for the specified job. */ @Override public JobStatus getStatus(Long jobID) throws ManifoldCFException { return getStatus(jobID,true); } /** Get a list of all jobs, and their status information. *@return an ordered array of job status objects. */ @Override public JobStatus[] getAllStatus() throws ManifoldCFException { return getAllStatus(true); } /** Get a list of running jobs. This is for status reporting. *@return an array of the job status objects. */ @Override public JobStatus[] getRunningJobs() throws ManifoldCFException { return getRunningJobs(true); } /** Get a list of completed jobs, and their statistics. *@return an array of the job status objects. */ @Override public JobStatus[] getFinishedJobs() throws ManifoldCFException { return getFinishedJobs(true); } /** Get the status of a job. *@param jobID is the job ID. *@param includeCounts is true if document counts should be included. *@return the status object for the specified job. */ public JobStatus getStatus(Long jobID, boolean includeCounts) throws ManifoldCFException { return getStatus(jobID, includeCounts, Integer.MAX_VALUE); } /** Get a list of all jobs, and their status information. *@param includeCounts is true if document counts should be included. *@return an ordered array of job status objects. */ public JobStatus[] getAllStatus(boolean includeCounts) throws ManifoldCFException { return getAllStatus(includeCounts, Integer.MAX_VALUE); } /** Get a list of running jobs. This is for status reporting. *@param includeCounts is true if document counts should be included. *@return an array of the job status objects. */ public JobStatus[] getRunningJobs(boolean includeCounts) throws ManifoldCFException { return getRunningJobs(includeCounts, Integer.MAX_VALUE); } /** Get a list of completed jobs, and their statistics. *@param includeCounts is true if document counts should be included. *@return an array of the job status objects. */ public JobStatus[] getFinishedJobs(boolean includeCounts) throws ManifoldCFException { return getFinishedJobs(includeCounts, Integer.MAX_VALUE); } /** Get the status of a job. *@param includeCounts is true if document counts should be included. *@return the status object for the specified job. */ @Override public JobStatus getStatus(Long jobID, boolean includeCounts, int maxCount) throws ManifoldCFException { ArrayList list = new ArrayList(); String whereClause = Jobs.idField+"=?"; list.add(jobID); JobStatus[] records = makeJobStatus(whereClause,list,includeCounts,maxCount); if (records.length == 0) return null; return records[0]; } /** Get a list of all jobs, and their status information. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an ordered array of job status objects. */ public JobStatus[] getAllStatus(boolean includeCounts, int maxCount) throws ManifoldCFException { return makeJobStatus(null,null,includeCounts,maxCount); } /** Get a list of running jobs. This is for status reporting. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an array of the job status objects. */ @Override public JobStatus[] getRunningJobs(boolean includeCounts, int maxCount) throws ManifoldCFException { ArrayList whereParams = new ArrayList(); String whereClause = database.buildConjunctionClause(whereParams,new ClauseDescription[]{ new MultiClause(Jobs.statusField,new Object[]{ Jobs.statusToString(Jobs.STATUS_ACTIVE), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER), Jobs.statusToString(Jobs.STATUS_PAUSED), Jobs.statusToString(Jobs.STATUS_PAUSEDSEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAIT), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSEDWAIT), Jobs.statusToString(Jobs.STATUS_PAUSEDWAITSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSING), Jobs.statusToString(Jobs.STATUS_PAUSINGSEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITINGSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSINGWAITING), Jobs.statusToString(Jobs.STATUS_PAUSINGWAITINGSEEDING), Jobs.statusToString(Jobs.STATUS_RESUMING), Jobs.statusToString(Jobs.STATUS_RESUMINGSEEDING) })}); return makeJobStatus(whereClause,whereParams,includeCounts,maxCount); } /** Get a list of completed jobs, and their statistics. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an array of the job status objects. */ @Override public JobStatus[] getFinishedJobs(boolean includeCounts, int maxCount) throws ManifoldCFException { StringBuilder sb = new StringBuilder(); ArrayList whereParams = new ArrayList(); sb.append(database.buildConjunctionClause(whereParams,new ClauseDescription[]{ new UnitaryClause(Jobs.statusField,Jobs.statusToString(Jobs.STATUS_INACTIVE))})).append(" AND ") .append(Jobs.endTimeField).append(" IS NOT NULL"); return makeJobStatus(sb.toString(),whereParams,includeCounts,maxCount); } // Protected methods and classes /** Make a job status array from a query result. *@param whereClause is the where clause for the jobs we are interested in. *@return the status array. */ protected JobStatus[] makeJobStatus(String whereClause, ArrayList whereParams, boolean includeCounts, int maxCount) throws ManifoldCFException { IResultSet set = database.performQuery("SELECT t0."+ Jobs.idField+",t0."+ Jobs.descriptionField+",t0."+ Jobs.statusField+",t0."+ Jobs.startTimeField+",t0."+ Jobs.endTimeField+",t0."+ Jobs.errorField+ " FROM "+jobs.getTableName()+" t0 "+((whereClause==null)?"":(" WHERE "+whereClause))+" ORDER BY "+Jobs.descriptionField+" ASC", whereParams,null,null); // Build hashes for set2 and set3 Map<Long,Long> set2Hash = new HashMap<Long,Long>(); Map<Long,Long> set3Hash = new HashMap<Long,Long>(); Map<Long,Long> set4Hash = new HashMap<Long,Long>(); Map<Long,Boolean> set2Exact = new HashMap<Long,Boolean>(); Map<Long,Boolean> set3Exact = new HashMap<Long,Boolean>(); Map<Long,Boolean> set4Exact = new HashMap<Long,Boolean>(); if (includeCounts) { // If we are counting all of them anyway, do this via GROUP BY since it will be the fastest. But // otherwise, fire off an individual query at a time. if (maxCount == Integer.MAX_VALUE) { buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } else { // Check if the total matching jobqueue rows exceeds the limit. If not, we can still use the cheaper query. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1"); addWhereClause(sb,list,whereClause,whereParams,false); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet countResult = database.performQuery(sb.toString(),list,null,null); if (countResult.getRowCount() > 0 && ((Long)countResult.getRow(0).getValue("doccount")).longValue() > maxCount) { // Too many items in queue; do it the hard way buildCountsUsingIndividualQueries(whereClause,whereParams,maxCount,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } else { // Cheap way should still work. buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } } } JobStatus[] rval = new JobStatus[set.getRowCount()]; for (int i = 0; i < rval.length; i++) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(Jobs.idField); String description = row.getValue(Jobs.descriptionField).toString(); int status = Jobs.stringToStatus(row.getValue(Jobs.statusField).toString()); Long startTimeValue = (Long)row.getValue(Jobs.startTimeField); long startTime = -1; if (startTimeValue != null) startTime = startTimeValue.longValue(); Long endTimeValue = (Long)row.getValue(Jobs.endTimeField); long endTime = -1; if (endTimeValue != null) endTime = endTimeValue.longValue(); String errorText = (String)row.getValue(Jobs.errorField); if (errorText != null && errorText.length() == 0) errorText = null; int rstatus = JobStatus.JOBSTATUS_NOTYETRUN; switch (status) { case Jobs.STATUS_INACTIVE: if (errorText != null) rstatus = JobStatus.JOBSTATUS_ERROR; else { if (startTime >= 0) rstatus = JobStatus.JOBSTATUS_COMPLETED; else rstatus = JobStatus.JOBSTATUS_NOTYETRUN; } break; case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: case Jobs.STATUS_ACTIVESEEDING_NEITHER: rstatus = JobStatus.JOBSTATUS_RUNNING_UNINSTALLED; break; case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVESEEDING: rstatus = JobStatus.JOBSTATUS_RUNNING; break; case Jobs.STATUS_SHUTTINGDOWN: rstatus = JobStatus.JOBSTATUS_JOBENDCLEANUP; break; case Jobs.STATUS_READYFORNOTIFY: case Jobs.STATUS_NOTIFYINGOFCOMPLETION: rstatus = JobStatus.JOBSTATUS_JOBENDNOTIFICATION; break; case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGSEEDING: case Jobs.STATUS_ABORTINGSTARTINGUP: case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL: rstatus = JobStatus.JOBSTATUS_ABORTING; break; case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: case Jobs.STATUS_ABORTINGFORRESTARTSEEDING: case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL: case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART: case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL: rstatus = JobStatus.JOBSTATUS_RESTARTING; break; case Jobs.STATUS_PAUSING: case Jobs.STATUS_PAUSINGSEEDING: case Jobs.STATUS_ACTIVEWAITING: case Jobs.STATUS_ACTIVEWAITINGSEEDING: case Jobs.STATUS_PAUSINGWAITING: case Jobs.STATUS_PAUSINGWAITINGSEEDING: rstatus = JobStatus.JOBSTATUS_STOPPING; break; case Jobs.STATUS_RESUMING: case Jobs.STATUS_RESUMINGSEEDING: rstatus = JobStatus.JOBSTATUS_RESUMING; break; case Jobs.STATUS_PAUSED: case Jobs.STATUS_PAUSEDSEEDING: rstatus = JobStatus.JOBSTATUS_PAUSED; break; case Jobs.STATUS_ACTIVEWAIT: case Jobs.STATUS_ACTIVEWAITSEEDING: rstatus = JobStatus.JOBSTATUS_WINDOWWAIT; break; case Jobs.STATUS_PAUSEDWAIT: case Jobs.STATUS_PAUSEDWAITSEEDING: rstatus = JobStatus.JOBSTATUS_PAUSED; break; case Jobs.STATUS_STARTINGUP: case Jobs.STATUS_STARTINGUPMINIMAL: case Jobs.STATUS_READYFORSTARTUP: case Jobs.STATUS_READYFORSTARTUPMINIMAL: rstatus = JobStatus.JOBSTATUS_STARTING; break; case Jobs.STATUS_DELETESTARTINGUP: case Jobs.STATUS_READYFORDELETE: case Jobs.STATUS_DELETING: case Jobs.STATUS_DELETING_NOOUTPUT: rstatus = JobStatus.JOBSTATUS_DESTRUCTING; break; default: break; } Long set2Value = set2Hash.get(jobID); Long set3Value = set3Hash.get(jobID); Long set4Value = set4Hash.get(jobID); Boolean set2ExactValue = set2Exact.get(jobID); Boolean set3ExactValue = set3Exact.get(jobID); Boolean set4ExactValue = set4Exact.get(jobID); rval[i] = new JobStatus(jobID.toString(),description,rstatus,((set2Value==null)?0L:set2Value.longValue()), ((set3Value==null)?0L:set3Value.longValue()), ((set4Value==null)?0L:set4Value.longValue()), ((set2ExactValue==null)?true:set2ExactValue.booleanValue()), ((set3ExactValue==null)?true:set3ExactValue.booleanValue()), ((set4ExactValue==null)?true:set4ExactValue.booleanValue()), startTime,endTime,errorText); } return rval; } protected static ClauseDescription buildOutstandingClause() throws ManifoldCFException { return new MultiClause(JobQueue.statusField,new Object[]{ JobQueue.statusToString(JobQueue.STATUS_ACTIVE), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCAN), JobQueue.statusToString(JobQueue.STATUS_PENDING), JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}); } protected static ClauseDescription buildProcessedClause() throws ManifoldCFException { return new MultiClause(JobQueue.statusField,new Object[]{ JobQueue.statusToString(JobQueue.STATUS_COMPLETE), JobQueue.statusToString(JobQueue.STATUS_UNCHANGED), JobQueue.statusToString(JobQueue.STATUS_PURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}); } protected void buildCountsUsingIndividualQueries(String whereClause, ArrayList whereParams, int maxCount, Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash, Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact) throws ManifoldCFException { // Fire off an individual query with a limit for each job // First, get the list of jobs that we are interested in. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(Jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" t0"); if (whereClause != null) { sb.append(" WHERE ") .append(whereClause); if (whereParams != null) list.addAll(whereParams); } IResultSet jobSet = database.performQuery(sb.toString(),list,null,null); // Scan the set of jobs for (int i = 0; i < jobSet.getRowCount(); i++) { IResultRow row = jobSet.getRow(i); Long jobID = (Long)row.getValue(Jobs.idField); // Now, for each job, fire off a separate, limited, query for each count we care about sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(Jobs.idField,jobID)})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet totalSet = database.performQuery(sb.toString(),list,null,null); if (totalSet.getRowCount() > 0) { long rowCount = ((Long)totalSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set2Hash.put(jobID,new Long(maxCount)); set2Exact.put(jobID,new Boolean(false)); } else { set2Hash.put(jobID,new Long(rowCount)); set2Exact.put(jobID,new Boolean(true)); } } sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(Jobs.idField,jobID)})); sb.append(" AND "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet outstandingSet = database.performQuery(sb.toString(),list,null,null); if (outstandingSet.getRowCount() > 0) { long rowCount = ((Long)outstandingSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set3Hash.put(jobID,new Long(maxCount)); set3Exact.put(jobID,new Boolean(false)); } else { set3Hash.put(jobID,new Long(rowCount)); set3Exact.put(jobID,new Boolean(true)); } } sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(Jobs.idField,jobID)})); sb.append(" AND "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet processedSet = database.performQuery(sb.toString(),list,null,null); if (processedSet.getRowCount() > 0) { long rowCount = ((Long)processedSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set4Hash.put(jobID,new Long(maxCount)); set4Exact.put(jobID,new Boolean(false)); } else { set4Hash.put(jobID,new Long(rowCount)); set4Exact.put(jobID,new Boolean(true)); } } } } protected void buildCountsUsingGroupBy(String whereClause, ArrayList whereParams, Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash, Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact) throws ManifoldCFException { StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1"); addWhereClause(sb,list,whereClause,whereParams,false); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set2 = database.performQuery(sb.toString(),list,null,null); sb = new StringBuilder("SELECT "); list.clear(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()})); addWhereClause(sb,list,whereClause,whereParams,true); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set3 = database.performQuery(sb.toString(),list,null,null); sb = new StringBuilder("SELECT "); list.clear(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()})); addWhereClause(sb,list,whereClause,whereParams,true); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set4 = database.performQuery(sb.toString(),list,null,null); for (int j = 0; j < set2.getRowCount(); j++) { IResultRow row = set2.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set2Hash.put(jobID,(Long)row.getValue("doccount")); set2Exact.put(jobID,new Boolean(true)); } for (int j = 0; j < set3.getRowCount(); j++) { IResultRow row = set3.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set3Hash.put(jobID,(Long)row.getValue("doccount")); set3Exact.put(jobID,new Boolean(true)); } for (int j = 0; j < set4.getRowCount(); j++) { IResultRow row = set4.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set4Hash.put(jobID,(Long)row.getValue("doccount")); set4Exact.put(jobID,new Boolean(true)); } } protected void addWhereClause(StringBuilder sb, ArrayList list, String whereClause, ArrayList whereParams, boolean wherePresent) { if (whereClause != null) { if (wherePresent) sb.append(" AND"); else sb.append(" WHERE"); sb.append(" EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t0."+Jobs.idField,"t1."+JobQueue.jobIDField)})).append(" AND ") .append(whereClause) .append(")"); if (whereParams != null) list.addAll(whereParams); } } // These methods generate reports for direct display in the UI. /** Run a 'document status' report. *@param connectionName is the name of the connection. *@param filterCriteria are the criteria used to limit the records considered for the report. *@param sortOrder is the specified sort order of the final report. *@param startRow is the first row to include. *@param rowCount is the number of rows to include. *@return the results, with the following columns: identifier, job, state, status, scheduled, action, retrycount, retrylimit. The "scheduled" column and the * "retrylimit" column are long values representing a time; all other values will be user-friendly strings. */ public IResultSet genDocumentStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder, int startRow, int rowCount) throws ManifoldCFException { // Build the query. Long currentTime = new Long(System.currentTimeMillis()); StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(" AS id,") .append("t0.").append(jobQueue.docIDField).append(" AS identifier,") .append("t1.").append(jobs.descriptionField).append(" AS job,") .append("CASE") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Out of scope'") .append(" ELSE 'Unknown'") .append(" END AS state,") .append("CASE") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 'Inactive'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 'Ready for processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 'Ready for expiration'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 'Waiting for processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 'Waiting for expiration'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkTimeField).append(" IS NULL") .append(" THEN 'Waiting forever'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append("=?") .append(" THEN 'Hopcount exceeded'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 'Deleting'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" THEN 'Processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" THEN 'Expiring'") .append(" ELSE 'Unknown'") .append(" END AS status,") .append("t0.").append(jobQueue.checkTimeField).append(" AS scheduled,") .append("CASE") .append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Process'") .append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Expire'") .append(" ELSE 'Unknown'") .append(" END AS action,") .append("t0.").append(jobQueue.failCountField).append(" AS retrycount,") .append("t0.").append(jobQueue.failTimeField).append(" AS retrylimit") .append(" FROM ").append(jobQueue.getTableName()).append(" t0,").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.jobIDField,"t1."+jobs.idField)})); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); addCriteria(sb,list,"t0.",connectionName,filterCriteria,true); // The intrinsic ordering is provided by the "id" column, and nothing else. addOrdering(sb,new String[]{"id"},sortOrder); addLimits(sb,startRow,rowCount); return database.performQuery(sb.toString(),list,null,null,rowCount,null); } /** Run a 'queue status' report. *@param connectionName is the name of the connection. *@param filterCriteria are the criteria used to limit the records considered for the report. *@param sortOrder is the specified sort order of the final report. *@param idBucketDescription is the bucket description for generating the identifier class. *@param startRow is the first row to include. *@param rowCount is the number of rows to include. *@return the results, with the following columns: idbucket, inactive, processing, expiring, deleting, processready, expireready, processwaiting, expirewaiting */ public IResultSet genQueueStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder, BucketDescription idBucketDescription, int startRow, int rowCount) throws ManifoldCFException { // SELECT substring(docid FROM '<id_regexp>') AS idbucket, // substring(entityidentifier FROM '<id_regexp>') AS idbucket, // SUM(CASE WHEN status='C' then 1 else 0 end)) AS inactive FROM jobqueue WHERE <criteria> // GROUP BY idbucket Long currentTime = new Long(System.currentTimeMillis()); StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append("t1.idbucket,SUM(t1.inactive) AS inactive,SUM(t1.processing) AS processing,SUM(t1.expiring) AS expiring,SUM(t1.deleting) AS deleting,") .append("SUM(t1.processready) AS processready,SUM(t1.expireready) AS expireready,SUM(t1.processwaiting) AS processwaiting,SUM(t1.expirewaiting) AS expirewaiting,") .append("SUM(t1.waitingforever) AS waitingforever,SUM(t1.hopcountexceeded) AS hopcountexceeded FROM (SELECT "); addBucketExtract(sb,list,"",jobQueue.docIDField,idBucketDescription); sb.append(" AS idbucket,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 1 ELSE 0") .append(" END") .append(" AS inactive,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as processing,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as expiring,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 1 ELSE 0") .append(" END") .append(" as deleting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as processready,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as expireready,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as processwaiting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as expirewaiting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkTimeField).append(" IS NULL") .append(" THEN 1 ELSE 0") .append(" END") .append(" as waitingforever,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as hopcountexceeded"); sb.append(" FROM ").append(jobQueue.getTableName()); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); addCriteria(sb,list,"",connectionName,filterCriteria,false); sb.append(") t1 GROUP BY idbucket"); addOrdering(sb,new String[]{"idbucket","inactive","processing","expiring","deleting","processready","expireready","processwaiting","expirewaiting","waitingforever","hopcountexceeded"},sortOrder); addLimits(sb,startRow,rowCount); return database.performQuery(sb.toString(),list,null,null,rowCount,null); } // Protected methods for report generation /** Turn a bucket description into a return column. * This is complicated by the fact that the extraction code is inherently case sensitive. So if case insensitive is * desired, that means we whack the whole thing to lower case before doing the match. */ protected void addBucketExtract(StringBuilder sb, ArrayList list, String columnPrefix, String columnName, BucketDescription bucketDesc) { boolean isSensitive = bucketDesc.isSensitive(); list.add(bucketDesc.getRegexp()); sb.append(database.constructSubstringClause(columnPrefix+columnName,"?",!isSensitive)); } /** Add criteria clauses to query. */ protected boolean addCriteria(StringBuilder sb, ArrayList list, String fieldPrefix, String connectionName, StatusFilterCriteria criteria, boolean whereEmitted) throws ManifoldCFException { Long[] matchingJobs = criteria.getJobs(); if (matchingJobs != null) { whereEmitted = emitClauseStart(sb,whereEmitted); if (matchingJobs.length == 0) { sb.append("0>1"); } else { sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.jobIDField,matchingJobs)})); } } RegExpCriteria identifierRegexp = criteria.getIdentifierMatch(); if (identifierRegexp != null) { whereEmitted = emitClauseStart(sb,whereEmitted); list.add(identifierRegexp.getRegexpString()); sb.append(database.constructRegexpClause(fieldPrefix+jobQueue.docIDField,"?",identifierRegexp.isInsensitive())); } Long nowTime = new Long(criteria.getNowTime()); int[] states = criteria.getMatchingStates(); int[] statuses = criteria.getMatchingStatuses(); if (states.length == 0 || statuses.length == 0) { whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("0>1"); return whereEmitted; } // Iterate through the specified states, and emit a series of OR clauses, one for each state. The contents of the clause will be complex. whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("("); int k = 0; while (k < states.length) { int stateValue = states[k]; if (k > 0) sb.append(" OR "); switch (stateValue) { case DOCSTATE_NEVERPROCESSED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)})})); break; case DOCSTATE_PREVIOUSLYPROCESSED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE), jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED), jobQueue.statusToString(jobQueue.STATUS_COMPLETE), jobQueue.statusToString(jobQueue.STATUS_UNCHANGED), jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})})); break; case DOCSTATE_OUTOFSCOPE: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})})); break; } k++; } sb.append(")"); whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("("); k = 0; while (k < statuses.length) { int stateValue = statuses[k]; if (k > 0) sb.append(" OR "); switch (stateValue) { case DOCSTATUS_INACTIVE: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_COMPLETE), jobQueue.statusToString(jobQueue.STATUS_UNCHANGED), jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})})); break; case DOCSTATUS_PROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN))})); break; case DOCSTATUS_EXPIRING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE))})); break; case DOCSTATUS_DELETING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED), jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)})})); break; case DOCSTATUS_READYFORPROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)})); break; case DOCSTATUS_READYFOREXPIRATION: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)})); break; case DOCSTATUS_WAITINGFORPROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)})); break; case DOCSTATUS_WAITINGFOREXPIRATION: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)})); break; case DOCSTATUS_WAITINGFOREVER: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" AND ").append(fieldPrefix).append(jobQueue.checkTimeField).append(" IS NULL"); break; case DOCSTATUS_HOPCOUNTEXCEEDED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})})); break; } k++; } sb.append(")"); return whereEmitted; } /** Emit a WHERE or an AND, depending... */ protected boolean emitClauseStart(StringBuilder sb, boolean whereEmitted) { if (whereEmitted) sb.append(" AND "); else sb.append(" WHERE "); return true; } /** Add ordering. */ protected void addOrdering(StringBuilder sb, String[] completeFieldList, SortOrder sort) { // Keep track of the fields we've seen Map hash = new HashMap(); // Emit the "Order by" sb.append(" ORDER BY "); // Go through the specified list int i = 0; int count = sort.getCount(); while (i < count) { if (i > 0) sb.append(","); String column = sort.getColumn(i); sb.append(column); if (sort.getDirection(i) == sort.SORT_ASCENDING) sb.append(" ASC"); else sb.append(" DESC"); hash.put(column,column); i++; } // Now, go through the complete field list, and emit sort criteria for everything // not actually specified. This is so LIMIT and OFFSET give consistent results. int j = 0; while (j < completeFieldList.length) { String field = completeFieldList[j]; if (hash.get(field) == null) { if (i > 0) sb.append(","); sb.append(field); sb.append(" DESC"); //if (j == 0) // sb.append(" DESC"); //else // sb.append(" ASC"); i++; } j++; } } /** Add limit and offset. */ protected void addLimits(StringBuilder sb, int startRow, int maxRowCount) { sb.append(" ").append(database.constructOffsetLimitClause(startRow,maxRowCount)); } /** Class for tracking existing jobqueue row data */ protected static class JobqueueRecord { protected Long recordID; protected int status; protected Long checkTimeValue; public JobqueueRecord(Long recordID, int status, Long checkTimeValue) { this.recordID = recordID; this.status = status; this.checkTimeValue = checkTimeValue; } public Long getRecordID() { return recordID; } public int getStatus() { return status; } public Long getCheckTimeValue() { return checkTimeValue; } } /** We go through 2x the number of documents we should need if we were perfect at setting document priorities. */ private static int EXTRA_FACTOR = 2; /** This class provides the throttling limits for the job queueing query. */ protected static class ThrottleLimit implements ILimitChecker { // For each connection, there is (a) a number (which is the maximum per bin), and (b) // a current running count per bin. These are stored as elements in a hash map. protected HashMap connectionMap = new HashMap(); // The maximum number of jobs that have reached their chunk size limit that we // need protected int n; // This is the hash table that maps a job ID to the object that tracks the number // of documents already accumulated for this resultset. The count of the number // of queue records we have is tallied by going through each job in this table // and adding the records outstanding for it. protected HashMap jobQueueHash = new HashMap(); // This is the map from jobid to connection name protected HashMap jobConnection = new HashMap(); // This is the set of allowed connection names. We discard all documents that are // not from that set. protected HashMap activeConnections = new HashMap(); // This is the number of documents per set per connection. protected HashMap setSizes = new HashMap(); // These are the individual connection maximums, keyed by connection name. protected HashMap maxConnectionCounts = new HashMap(); // This is the maximum number of documents per set over all the connections we are looking at. This helps us establish a sanity limit. protected int maxSetSize = 0; // This is the number of documents processed so far protected int documentsProcessed = 0; // This is where we accumulate blocking documents. This is an arraylist of DocumentDescription objects. protected ArrayList blockingDocumentArray = new ArrayList(); // Cutoff time for documents eligible for prioritization protected long prioritizationTime; /** Constructor. * This class is built up piecemeal, so the constructor does nothing. *@param n is the maximum number of full job descriptions we want at this time. */ public ThrottleLimit(int n, long prioritizationTime) { this.n = n; this.prioritizationTime = prioritizationTime; Logging.perf.debug("Limit instance created"); } /** Transfer blocking documents discovered to BlockingDocuments object */ public void tallyBlockingDocuments(BlockingDocuments blockingDocuments) { int i = 0; while (i < blockingDocumentArray.size()) { DocumentDescription dd = (DocumentDescription)blockingDocumentArray.get(i++); blockingDocuments.addBlockingDocument(dd); } blockingDocumentArray.clear(); } /** Add a job/connection name map entry. *@param jobID is the job id. *@param connectionName is the connection name. */ public void addJob(Long jobID, String connectionName) { jobConnection.put(jobID,connectionName); } /** Add an active connection. This is the pool of active connections that will be used for the lifetime of this operation. *@param connectionName is the connection name. */ public void addConnectionName(String connectionName, IRepositoryConnector connectorInstance) throws ManifoldCFException { activeConnections.put(connectionName,connectorInstance); int setSize = connectorInstance.getMaxDocumentRequest(); setSizes.put(connectionName,new Integer(setSize)); if (setSize > maxSetSize) maxSetSize = setSize; } /** Add a document limit for a specified connection. This is the limit across all matching bins; if any * individual matching bin exceeds that limit, then documents that belong to that bin will be excluded. *@param connectionName is the connection name. *@param regexp is the regular expression, which we will match against various bins. *@param upperLimit is the maximum count associated with the specified job. */ public void addLimit(String connectionName, String regexp, int upperLimit) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Adding fetch limit of "+Integer.toString(upperLimit)+" fetches for expression '"+regexp+"' for connection '"+connectionName+"'"); ThrottleJobItem ji = (ThrottleJobItem)connectionMap.get(connectionName); if (ji == null) { ji = new ThrottleJobItem(); connectionMap.put(connectionName,ji); } ji.addLimit(regexp,upperLimit); } /** Set a connection-based total document limit. */ public void setConnectionLimit(String connectionName, int maxDocuments) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Setting connection limit of "+Integer.toString(maxDocuments)+" for connection "+connectionName); maxConnectionCounts.put(connectionName,new MutableInteger(maxDocuments)); } /** See if this class can be legitimately compared against another of * the same type. *@return true if comparisons will ever return "true". */ public boolean doesCompareWork() { return false; } /** Create a duplicate of this class instance. All current state should be preserved. * NOTE: Since doesCompareWork() returns false, queries using this limit checker cannot * be cached, and therefore duplicate() is never called from the query executor. But it can * be called from other places. *@return the duplicate. */ public ILimitChecker duplicate() { return makeDeepCopy(); } /** Make a deep copy */ public ThrottleLimit makeDeepCopy() { ThrottleLimit rval = new ThrottleLimit(n,prioritizationTime); // Create a true copy of all the structures in which counts are kept. The referential structures (e.g. connection hashes) // do not need a deep copy. rval.activeConnections = activeConnections; rval.setSizes = setSizes; rval.maxConnectionCounts = maxConnectionCounts; rval.maxSetSize = maxSetSize; rval.jobConnection = jobConnection; // The structures where counts are maintained DO need a deep copy. rval.documentsProcessed = documentsProcessed; Iterator iter; iter = connectionMap.keySet().iterator(); while (iter.hasNext()) { Object key = iter.next(); rval.connectionMap.put(key,((ThrottleJobItem)connectionMap.get(key)).duplicate()); } iter = jobQueueHash.keySet().iterator(); while (iter.hasNext()) { Object key = iter.next(); rval.jobQueueHash.put(key,((QueueHashItem)jobQueueHash.get(key)).duplicate()); } return rval; } /** Find the hashcode for this class. This will only ever be used if * doesCompareWork() returns true. *@return the hashcode. */ public int hashCode() { return 0; } /** Compare two objects and see if equal. This will only ever be used * if doesCompareWork() returns true. *@param object is the object to compare against. *@return true if equal. */ public boolean equals(Object object) { return false; } /** Get the remaining documents we should query for. *@return the maximal remaining count. */ public int getRemainingDocuments() { return EXTRA_FACTOR * n * maxSetSize - documentsProcessed; } /** See if a result row should be included in the final result set. *@param row is the result row to check. *@return true if it should be included, false otherwise. */ public boolean checkInclude(IResultRow row) throws ManifoldCFException { // Note: This method does two things: First, it insures that the number of documents per job per bin does // not exceed the calculated throttle number. Second, it keeps track of how many document queue items // will be needed, so we can stop when we've got enough for the moment. Logging.perf.debug("Checking if row should be included"); // This is the end that does the work. // The row passed in has the following jobqueue columns: idField, jobIDField, docIDField, and statusField Long jobIDValue = (Long)row.getValue(JobQueue.jobIDField); // Get the connection name for this row String connectionName = (String)jobConnection.get(jobIDValue); if (connectionName == null) { Logging.perf.debug(" Row does not have an eligible job - excluding"); return false; } IRepositoryConnector connectorInstance = (IRepositoryConnector)activeConnections.get(connectionName); if (connectorInstance == null) { Logging.perf.debug(" Row does not have an eligible connector instance - excluding"); return false; } // Find the connection limit for this document MutableInteger connectionLimit = (MutableInteger)maxConnectionCounts.get(connectionName); if (connectionLimit != null) { if (connectionLimit.intValue() == 0) { Logging.perf.debug(" Row exceeds its connection limit - excluding"); return false; } connectionLimit.decrement(); } // Tally this item in the job queue hash, so we can detect when to stop QueueHashItem queueItem = (QueueHashItem)jobQueueHash.get(jobIDValue); if (queueItem == null) { // Need to talk to the connector to get a max number of docs per chunk int maxCount = ((Integer)setSizes.get(connectionName)).intValue(); queueItem = new QueueHashItem(maxCount); jobQueueHash.put(jobIDValue,queueItem); } String docIDHash = (String)row.getValue(JobQueue.docHashField); String docID = (String)row.getValue(JobQueue.docIDField); // Figure out what the right bins are, given the data we have. // This will involve a call to the connector. String[] binNames = ManifoldCF.calculateBins(connectorInstance,docID); // Keep the running count, so we can abort without going through the whole set. documentsProcessed++; //scanRecord.addBins(binNames); ThrottleJobItem item = (ThrottleJobItem)connectionMap.get(connectionName); // If there is no schedule-based throttling on this connection, we're done. if (item == null) { queueItem.addDocument(); Logging.perf.debug(" Row has no throttling - including"); return true; } int j = 0; while (j < binNames.length) { if (item.isEmpty(binNames[j])) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Bin "+binNames[j]+" has no more available fetches - excluding"); Object o = row.getValue(JobQueue.prioritySetField); if (o == null || ((Long)o).longValue() <= prioritizationTime) { // Need to add a document descriptor based on this row to the blockingDocuments object! // This will cause it to be reprioritized preferentially, getting it out of the way if it shouldn't // be there. Long id = (Long)row.getValue(JobQueue.idField); Long jobID = (Long)row.getValue(JobQueue.jobIDField); DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID); blockingDocumentArray.add(dd); } return false; } j++; } j = 0; while (j < binNames.length) { item.decrement(binNames[j++]); } queueItem.addDocument(); Logging.perf.debug(" Including!"); return true; } /** See if we should examine another row. *@return true if we need to keep going, or false if we are done. */ public boolean checkContinue() throws ManifoldCFException { if (documentsProcessed >= EXTRA_FACTOR * n * maxSetSize) return false; // If the number of chunks exceeds n, we are done Iterator iter = jobQueueHash.keySet().iterator(); int count = 0; while (iter.hasNext()) { Long jobID = (Long)iter.next(); QueueHashItem item = (QueueHashItem)jobQueueHash.get(jobID); count += item.getChunkCount(); if (count > n) return false; } return true; } } /** This class contains information per job on how many queue items have so far been accumulated. */ protected static class QueueHashItem { // The number of items per chunk for this job int itemsPerChunk; // The number of chunks so far, INCLUDING incomplete chunks int chunkCount = 0; // The number of documents in the current incomplete chunk int currentDocumentCount = 0; /** Construct. *@param itemsPerChunk is the number of items per chunk for this job. */ public QueueHashItem(int itemsPerChunk) { this.itemsPerChunk = itemsPerChunk; } /** Duplicate. */ public QueueHashItem duplicate() { QueueHashItem rval = new QueueHashItem(itemsPerChunk); rval.chunkCount = chunkCount; rval.currentDocumentCount = currentDocumentCount; return rval; } /** Add a document to this job. */ public void addDocument() { currentDocumentCount++; if (currentDocumentCount == 1) chunkCount++; if (currentDocumentCount == itemsPerChunk) currentDocumentCount = 0; } /** Get the number of chunks. *@return the number of chunks. */ public int getChunkCount() { return chunkCount; } } /** This class represents the information stored PER JOB in the throttling structure. * In this structure, "remaining" counts are kept for each bin. When the bin becomes empty, * then no more documents that would map to that bin will be returned, for this query. * * The way in which the maximum count per bin is determined is not part of this class. */ protected static class ThrottleJobItem { /** These are the bin limits. This is an array of ThrottleLimitSpec objects. */ protected ArrayList throttleLimits = new ArrayList(); /** This is a map of the bins and their current counts. If an entry doesn't exist, it's considered to be * the same as maxBinCount. */ protected HashMap binCounts = new HashMap(); /** Constructor. */ public ThrottleJobItem() { } /** Add a bin limit. *@param regexp is the regular expression describing the bins to which the limit applies to. *@param maxCount is the maximum number of fetches allowed for that bin. */ public void addLimit(String regexp, int maxCount) { try { throttleLimits.add(new ThrottleLimitSpec(regexp,maxCount)); } catch (PatternSyntaxException e) { // Ignore the bad entry; it just won't contribute any throttling. } } /** Create a duplicate of this item. *@return the duplicate. */ public ThrottleJobItem duplicate() { ThrottleJobItem rval = new ThrottleJobItem(); rval.throttleLimits = throttleLimits; Iterator iter = binCounts.keySet().iterator(); while (iter.hasNext()) { String key = (String)iter.next(); this.binCounts.put(key,((MutableInteger)binCounts.get(key)).duplicate()); } return rval; } /** Check if the specified bin is empty. *@param binName is the bin name. *@return true if empty. */ public boolean isEmpty(String binName) { MutableInteger value = (MutableInteger)binCounts.get(binName); int remaining; if (value == null) { int x = findMaxCount(binName); if (x == -1) return false; remaining = x; } else remaining = value.intValue(); return (remaining == 0); } /** Decrement specified bin. *@param binName is the bin name. */ public void decrement(String binName) { MutableInteger value = (MutableInteger)binCounts.get(binName); if (value == null) { int x = findMaxCount(binName); if (x == -1) return; value = new MutableInteger(x); binCounts.put(binName,value); } value.decrement(); } /** Given a bin name, find the max value for it using the regexps that are in place. *@param binName is the bin name. *@return the max count for that bin, or -1 if infinite. */ protected int findMaxCount(String binName) { // Each connector generates a set of bins per descriptor, e.g. "", ".com", ".metacarta.com", "foo.metacarta.com" // // We want to be able to do a couple of different kinds of things easily. For example, we want to: // - be able to "turn off" or restrict fetching for a given domain, to a lower value than for other domains // - be able to control fetch rates of .com, .metacarta.com, and foo.metacarta.com such that we // can establish a faster rate for .com than for foo.metacarta.com // // The standard case is to limit fetch rate for all terminal domains (e.g. foo.metacarta.com) to some number: // ^[^\.] = 8 // // To apply an additional limit restriction on a specific domain easily requires that the MINIMUM rate // value be chosen when more than one regexp match is found: // ^[^\.] = 8 // ^foo\.metacarta\.com = 4 // // To apply different rates for different levels: // ^[^\.] = 8 // ^\.[^\.]*\.[^\.]*$ = 20 // ^\.[^\.]*$ = 40 // // If the same bin is matched by more than one regexp, I now take the MINIMUM value, since this seems to be // more what the world wants to do (restrict, rather than increase, fetch rates). int maxCount = -1; int i = 0; while (i < throttleLimits.size()) { ThrottleLimitSpec spec = (ThrottleLimitSpec)throttleLimits.get(i++); Pattern p = spec.getRegexp(); Matcher m = p.matcher(binName); if (m.find()) { int limit = spec.getMaxCount(); if (maxCount == -1 || limit < maxCount) maxCount = limit; } } return maxCount; } } /** This is a class which describes an individual throttle limit, in fetches. */ protected static class ThrottleLimitSpec { /** Regexp */ protected Pattern regexp; /** The fetch limit for all bins matching that regexp */ protected int maxCount; /** Constructor */ public ThrottleLimitSpec(String regexp, int maxCount) throws PatternSyntaxException { this.regexp = Pattern.compile(regexp); this.maxCount = maxCount; } /** Get the regexp. */ public Pattern getRegexp() { return regexp; } /** Get the max count */ public int getMaxCount() { return maxCount; } } /** Mutable integer class. */ protected static class MutableInteger { int value; /** Construct. */ public MutableInteger(int value) { this.value = value; } /** Duplicate */ public MutableInteger duplicate() { return new MutableInteger(value); } /** Decrement. */ public void decrement() { value--; } /** Increment. */ public void increment() { value++; } /** Get value. */ public int intValue() { return value; } } }
public DocumentSetAndFlags getNextCleanableDocuments(int maxCount, long currentTime) throws ManifoldCFException { // The query will be built here, because it joins the jobs table against the jobqueue // table. // // This query must only pick up documents that are not active in any job and // which belong to a job that's in a "shutting down" state and are in // a "purgatory" state. // // We are in fact more conservative in this query than we need to be; the documents // excluded will include some that simply match our criteria, which is designed to // be fast rather than perfect. The match we make is: hashvalue against hashvalue, and // different job id's. // // SELECT id,jobid,docid FROM jobqueue t0 WHERE t0.status='P' AND EXISTS(SELECT 'x' FROM // jobs t3 WHERE t0.jobid=t3.id AND t3.status='X') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','B')) // // Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or // ingestion. // Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time. if (!jobs.cleaningJobsPresent()) return new DocumentSetAndFlags(new DocumentDescription[0],new boolean[0]); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to put on the cleaning queue"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on cleaning queue"); // Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being cleaned". ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(",") .append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.failTimeField).append(",") .append(jobQueue.failCountField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_PURGATORY))})).append(" AND ") .append("(t0.").append(jobQueue.checkTimeField).append(" IS NULL OR t0.").append(jobQueue.checkTimeField).append("<=?) AND "); list.add(new Long(currentTime)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN)), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,maxCount)); // The checktime is null field check is for backwards compatibility IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done getting docs to cleaning queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); // We need to organize the returned set by connection name and output connection name, so that we can efficiently // use getUnindexableDocumentIdentifiers. // This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription // objects. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash, so we reduce the risk of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()]; boolean[] rvalBoolean = new boolean[documentIDMap.size()]; i = 0; while (i < compositeIDArray.length) { String compositeDocID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID); // Determine whether we can delete it from the index or not rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null); // Set the record status to "being cleaned" and return it rval[i++] = dd; jobQueue.setCleaningStatus(dd.getID()); } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); return new DocumentSetAndFlags(rval,rvalBoolean); } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Create a composite document hash key. This consists of the document id hash plus the * connection name. */ protected static String makeCompositeID(String docIDHash, String connectionName) { return docIDHash + ":" + connectionName; } /** Get list of deletable document descriptions. This list will take into account * multiple jobs that may own the same document. All documents for which a description * is returned will be transitioned to the "beingdeleted" state. Documents which are * not in transition and are eligible, but are owned by other jobs, will have their * jobqueue entries deleted by this method. *@param maxCount is the maximum number of documents to return. *@param currentTime is the current time; some fetches do not occur until a specific time. *@return the document descriptions for these documents. */ public DocumentDescription[] getNextDeletableDocuments(int maxCount, long currentTime) throws ManifoldCFException { // The query will be built here, because it joins the jobs table against the jobqueue // table. // // This query must only pick up documents that are not active in any job and // which either belong to a job that's in a "delete pending" state and are in // a "complete", "purgatory", or "pendingpurgatory" state, OR belong to a job // that's in a "shutting down" state and are in the "purgatory" state. // // We are in fact more conservative in this query than we need to be; the documents // excluded will include some that simply match our criteria, which is designed to // be fast rather than perfect. The match we make is: hashvalue against hashvalue, and // different job id's. // // SELECT id,jobid,docid FROM jobqueue t0 WHERE (t0.status IN ('C','P','G') AND EXISTS(SELECT 'x' FROM // jobs t1 WHERE t0.jobid=t1.id AND t1.status='D') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','B')) // // Do a simple preliminary query, since the big query is currently slow, so that we don't waste time during stasis or // ingestion. // Moved outside of transaction, so we have no chance of locking up job status cache key for an extended period of time. if (!jobs.deletingJobsPresent()) return new DocumentDescription[0]; long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to put on the delete queue"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to put on delete queue"); // Note: This query does not do "FOR UPDATE", because it is running under the only thread that can possibly change the document's state to "being deleted". // If FOR UPDATE was included, deadlock happened a lot. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(",") .append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.failTimeField).append(",") .append(jobQueue.failCountField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.statusField,jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE))})).append(" AND ") .append("t0.").append(jobQueue.checkTimeField).append("<=? AND "); list.add(new Long(currentTime)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause("t1."+jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING)), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,maxCount)); // The checktime is null field check is for backwards compatibility IResultSet set = database.performQuery(sb.toString(),list,null,null,maxCount,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done getting docs to delete queue after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); // We need to organize the returned set by connection name, so that we can efficiently // use getUnindexableDocumentIdentifiers. // This is a table keyed by connection name and containing an ArrayList, which in turn contains DocumentDescription // objects. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash to reduce chances of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[allowedDocIds.size()]; int j = 0; i = 0; while (i < compositeIDArray.length) { String compositeDocumentID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocumentID); if (allowedDocIds.get(compositeDocumentID) == null) { // Delete this record and do NOT return it. jobQueue.deleteRecord(dd.getID()); // What should we do about hopcount here? // We are deleting a record which belongs to a job that is being // cleaned up. The job itself will go away when this is done, // and so will all the hopcount stuff pertaining to it. So, the // treatment I've chosen here is to leave the hopcount alone and // let the job cleanup get rid of it at the right time. // Note: carrydown records handled in the same manner... //carryDown.deleteRecords(dd.getJobID(),new String[]{dd.getDocumentIdentifier()}); } else { // Set the record status to "being deleted" and return it rval[j++] = dd; jobQueue.setDeletingStatus(dd.getID()); } i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Done pruning unindexable docs after "+new Long(System.currentTimeMillis()-startTime).toString()+" ms."); return rval; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding deleteable docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get a list of document identifiers that should actually be deleted from the index, from a list that * might contain identifiers that are shared with other jobs, which are targeted to the same output connection. * The input list is guaranteed to be smaller in size than maxInClauseCount for the database. *@param documentIdentifiers is the set of document identifiers to consider. *@param connectionName is the connection name for ALL the document identifiers. *@param outputConnectionName is the output connection name for ALL the document identifiers. *@return the set of documents which should be removed from the index. */ protected String[] getUnindexableDocumentIdentifiers(DocumentDescription[] documentIdentifiers, String connectionName, String outputConnectionName) throws ManifoldCFException { // This is where we will count the individual document id's HashMap countMap = new HashMap(); // First thing: Compute the set of document identifier hash values to query against HashMap map = new HashMap(); int i = 0; while (i < documentIdentifiers.length) { String hash = documentIdentifiers[i++].getDocumentIdentifierHash(); map.put(hash,hash); countMap.put(hash,new MutableInteger(0)); } if (map.size() == 0) return new String[0]; // Build a query StringBuilder sb = new StringBuilder(); ArrayList list = new ArrayList(); ArrayList docList = new ArrayList(); Iterator iter = map.keySet().iterator(); while (iter.hasNext()) { docList.add(iter.next()); } // Note: There is a potential race condition here. One job may be running while another is in process of // being deleted. If they share a document, then the delete task could decide to delete the document and do so right // after the ingestion takes place in the running job, but right before the document's status is updated // in the job queue [which would have prevented the deletion]. // Unless a transaction is thrown around the time ingestion is taking place (which is a very bad idea) // we are stuck with the possibility of this condition, which will essentially lead to a document being // missing from the index. // One way of dealing with this is to treat "active" documents as already ingested, for the purpose of // reference counting. Then these documents will not be deleted. The risk then becomes that the "active" // document entry will not be completed (say, because of a restart), and thus the corresponding document // will never be removed from the index. // // Instead, the only solution is to not queue a document for any activity that is inconsistent with activities // that may already be ongoing for that document. For this reason, I have introduced a "BEING_DELETED" // and "BEING_CLEANED" state // for a document. These states will allow the various queries that queue up activities to avoid documents that // are currently being processed elsewhere. sb.append("SELECT t0.").append(jobQueue.docHashField).append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t0."+jobQueue.docHashField,docList)})).append(" AND ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?,?) AND "); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(" AND ") .append("t1.").append(jobs.connectionNameField).append("=? AND ") .append("t1.").append(jobs.outputNameField).append("=?)"); list.add(connectionName); list.add(outputConnectionName); // Do the query, and then count the number of times each document identifier occurs. IResultSet results = database.performQuery(sb.toString(),list,null,null); i = 0; while (i < results.getRowCount()) { IResultRow row = results.getRow(i++); String docIDHash = (String)row.getValue(jobQueue.docHashField); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi != null) mi.increment(); } // Go through and count only those that have a count of 1. int count = 0; iter = countMap.keySet().iterator(); while (iter.hasNext()) { String docIDHash = (String)iter.next(); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi.intValue() == 1) count++; } String[] rval = new String[count]; iter = countMap.keySet().iterator(); count = 0; while (iter.hasNext()) { String docIDHash = (String)iter.next(); MutableInteger mi = (MutableInteger)countMap.get(docIDHash); if (mi.intValue() == 1) rval[count++] = docIDHash; } return rval; } // These methods support the reprioritization thread. /** Get a list of already-processed documents to reprioritize. Documents in all jobs will be * returned by this method. Up to n document descriptions will be returned. *@param currentTime is the current time stamp for this prioritization pass. Avoid * picking up any documents that are labeled with this timestamp or after. *@param n is the maximum number of document descriptions desired. *@return the document descriptions. */ public DocumentDescription[] getNextAlreadyProcessedReprioritizationDocuments(long currentTime, int n) throws ManifoldCFException { StringBuilder sb = new StringBuilder(); ArrayList list = new ArrayList(); // The desired query is: // SELECT docid FROM jobqueue WHERE prioritysettime < (currentTime) LIMIT (n) sb.append("SELECT ") .append(jobQueue.idField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.jobIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_COMPLETE), jobQueue.statusToString(JobQueue.STATUS_UNCHANGED), jobQueue.statusToString(JobQueue.STATUS_PURGATORY)}), new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" "); sb.append(database.constructOffsetLimitClause(0,n)); IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null); DocumentDescription[] rval = new DocumentDescription[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField), (Long)row.getValue(jobQueue.jobIDField), (String)row.getValue(jobQueue.docHashField), (String)row.getValue(jobQueue.docIDField)); i++; } return rval; } /** Get a list of not-yet-processed documents to reprioritize. Documents in all jobs will be * returned by this method. Up to n document descriptions will be returned. *@param currentTime is the current time stamp for this prioritization pass. Avoid * picking up any documents that are labeled with this timestamp or after. *@param n is the maximum number of document descriptions desired. *@return the document descriptions. */ public DocumentDescription[] getNextNotYetProcessedReprioritizationDocuments(long currentTime, int n) throws ManifoldCFException { StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); // This query MUST return only documents that are in a pending state which belong to an active job!!! sb.append(jobQueue.idField).append(",") .append(jobQueue.docHashField).append(",") .append(jobQueue.docIDField).append(",") .append(jobQueue.jobIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobQueue.statusField,new Object[]{ JobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED), JobQueue.statusToString(jobQueue.STATUS_PENDING), JobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(jobQueue.prioritySetField,"<",new Long(currentTime))})).append(" AND ") .append(jobQueue.checkActionField).append("=?").append(" AND "); list.add(jobQueue.actionToString(JobQueue.ACTION_RESCAN)); // Per CONNECTORS-290, we need to be leaving priorities blank for jobs that aren't using them, // so this will be changed to not include jobs where the priorities have been bashed to null. // // I've included ALL states that might have non-null doc priorities. This includes states // corresponding to uninstalled connectors, since there is no transition that cleans out the // document priorities in these states. The time during which a connector is uninstalled is // expected to be short, because typically this state is the result of an installation procedure // rather than willful action on the part of a user. sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(Jobs.STATUS_STARTINGUP), Jobs.statusToString(Jobs.STATUS_STARTINGUPMINIMAL), Jobs.statusToString(Jobs.STATUS_ACTIVE), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER) }), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") "); sb.append(database.constructOffsetLimitClause(0,n)); // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be. //jobQueue.unconditionallyAnalyzeTables(); IResultSet set = database.performQuery(sb.toString(),list,null,null,n,null); DocumentDescription[] rval = new DocumentDescription[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); rval[i] =new DocumentDescription((Long)row.getValue(jobQueue.idField), (Long)row.getValue(jobQueue.jobIDField), (String)row.getValue(jobQueue.docHashField), (String)row.getValue(jobQueue.docIDField)); i++; } return rval; } /** Save a set of document priorities. In the case where a document was eligible to have its * priority set, but it no longer is eligible, then the provided priority will not be written. *@param currentTime is the time in milliseconds since epoch. *@param documentDescriptions are the document descriptions. *@param priorities are the desired priorities. */ public void writeDocumentPriorities(long currentTime, DocumentDescription[] documentDescriptions, double[] priorities) throws ManifoldCFException { // Retry loop - in case we get a deadlock despite our best efforts while (true) { // This should be ordered by document identifier hash in order to prevent potential deadlock conditions HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":"+documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); long sleepAmt = 0L; // Start the transaction now database.beginTransaction(); try { // Need to order the writes by doc id. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); DocumentDescription dd = documentDescriptions[index]; double priority = priorities[index]; jobQueue.writeDocPriority(currentTime,dd.getID(),priorities[index]); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Setting document priority for '"+dd.getDocumentIdentifier()+"' to "+new Double(priority).toString()+", set time "+new Long(currentTime).toString()); i++; } database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction writing doc priorities: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get up to the next n documents to be expired. * This method marks the documents whose descriptions have been returned as "being processed", or active. * The same marking is used as is used for documents that have been queued for worker threads. The model * is thus identical. * *@param n is the maximum number of records desired. *@param currentTime is the current time. *@return the array of document descriptions to expire. */ public DocumentSetAndFlags getExpiredDocuments(int n, long currentTime) throws ManifoldCFException { // Screening query // Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up // for an extended period of time. if (!jobs.activeJobsPresent()) return new DocumentSetAndFlags(new DocumentDescription[0], new boolean[0]); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Beginning query to look for documents to expire"); } // Put together a query with a limit of n // Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active. // If FOR UPDATE was included, deadlock conditions would be common because of the complexity of this query. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT "); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.jobIDField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField).append(",") .append("t0.").append(jobQueue.statusField).append(",") .append("t0.").append(jobQueue.failTimeField).append(",") .append("t0.").append(jobQueue.failCountField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t0."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause("t0."+jobQueue.checkActionField,jobQueue.actionToString(JobQueue.ACTION_REMOVE)), new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",new Long(currentTime))})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})).append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?)").append(" AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField).append(") "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); sb.append(database.constructOffsetLimitClause(0,n)); String query = sb.toString(); // Analyze jobqueue tables unconditionally, since it's become much more sensitive in 8.3 than it used to be. //jobQueue.unconditionallyAnalyzeTables(); ArrayList answers = new ArrayList(); int repeatCount = 0; while (true) { long sleepAmt = 0L; if (Logging.perf.isDebugEnabled()) { repeatCount++; Logging.perf.debug(" Attempt "+Integer.toString(repeatCount)+" to expire documents, after "+ new Long(System.currentTimeMillis() - startTime)+" ms"); } database.beginTransaction(); try { IResultSet set = database.performQuery(query,list,null,null,n,null); if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Expiring "+Integer.toString(set.getRowCount())+" documents"); // To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash, // before updating any rows in jobqueue. HashMap connectionNameMap = new HashMap(); HashMap documentIDMap = new HashMap(); Map statusMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String documentIDHash = (String)row.getValue(jobQueue.docHashField); String documentID = (String)row.getValue(jobQueue.docIDField); int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString()); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); // Failtime is probably not useful in this context, but we'll bring it along for completeness long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = 0; else failCount = (int)failCountValue.longValue(); IJobDescription jobDesc = load(jobID); String connectionName = jobDesc.getConnectionName(); String outputConnectionName = jobDesc.getOutputConnectionName(); DocumentDescription dd = new DocumentDescription((Long)row.getValue(jobQueue.idField), jobID,documentIDHash,documentID,failTime,failCount); String compositeDocumentID = makeCompositeID(documentIDHash,connectionName); documentIDMap.put(compositeDocumentID,dd); statusMap.put(compositeDocumentID,new Integer(status)); Map y = (Map)connectionNameMap.get(connectionName); if (y == null) { y = new HashMap(); connectionNameMap.put(connectionName,y); } ArrayList x = (ArrayList)y.get(outputConnectionName); if (x == null) { // New entry needed x = new ArrayList(); y.put(outputConnectionName,x); } x.add(dd); i++; } // For each bin, obtain a filtered answer, and enter all answers into a hash table. // We'll then scan the result again to look up the right descriptions for return, // and delete the ones that are owned multiply. HashMap allowedDocIds = new HashMap(); Iterator iter = connectionNameMap.keySet().iterator(); while (iter.hasNext()) { String connectionName = (String)iter.next(); Map y = (Map)connectionNameMap.get(connectionName); Iterator outputIter = y.keySet().iterator(); while (outputIter.hasNext()) { String outputConnectionName = (String)outputIter.next(); ArrayList x = (ArrayList)y.get(outputConnectionName); // Do the filter query DocumentDescription[] descriptions = new DocumentDescription[x.size()]; int j = 0; while (j < descriptions.length) { descriptions[j] = (DocumentDescription)x.get(j); j++; } String[] docIDHashes = getUnindexableDocumentIdentifiers(descriptions,connectionName,outputConnectionName); j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j++]; String key = makeCompositeID(docIDHash,connectionName); allowedDocIds.put(key,docIDHash); } } } // Now, assemble a result, and change the state of the records accordingly // First thing to do is order by document hash, so we reduce the risk of deadlock. String[] compositeIDArray = new String[documentIDMap.size()]; i = 0; iter = documentIDMap.keySet().iterator(); while (iter.hasNext()) { compositeIDArray[i++] = (String)iter.next(); } java.util.Arrays.sort(compositeIDArray); DocumentDescription[] rval = new DocumentDescription[documentIDMap.size()]; boolean[] rvalBoolean = new boolean[documentIDMap.size()]; i = 0; while (i < compositeIDArray.length) { String compositeDocID = compositeIDArray[i]; DocumentDescription dd = (DocumentDescription)documentIDMap.get(compositeDocID); // Determine whether we can delete it from the index or not rvalBoolean[i] = (allowedDocIds.get(compositeDocID) != null); // Set the record status to "being cleaned" and return it rval[i++] = dd; jobQueue.updateActiveRecord(dd.getID(),((Integer)statusMap.get(compositeDocID)).intValue()); } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); return new DocumentSetAndFlags(rval, rvalBoolean); } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding docs to expire: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } // This method supports the "queue stuffer" thread /** /** Get up to the next n document(s) to be fetched and processed. * This fetch returns records that contain the document identifier, plus all instructions * pertaining to the document's handling (e.g. whether it should be refetched if the version * has not changed). * This method also marks the documents whose descriptions have be returned as "being processed". *@param n is the maximum number of records desired. *@param currentTime is the current time; some fetches do not occur until a specific time. *@param interval is the number of milliseconds that this set of documents should represent (for throttling). *@param blockingDocuments is the place to record documents that were encountered, are eligible for reprioritization, * but could not be queued due to throttling considerations. *@param statistics are the current performance statistics per connection, which are used to balance the queue stuffing * so that individual connections are not overwhelmed. *@param scanRecord retains the bins from all documents encountered from the query, even those that were skipped due * to being overcommitted. *@return the array of document descriptions to fetch and process. */ public DocumentDescription[] getNextDocuments(int n, long currentTime, long interval, BlockingDocuments blockingDocuments, PerformanceStatistics statistics, DepthStatistics scanRecord) throws ManifoldCFException { // NOTE WELL: Jobs that are throttled must control the number of documents that are fetched in // a given interval. Therefore, the returned result has the following constraints on it: // 1) There must be no more than n documents returned total; // 2) For any given job that is throttled, the total number of documents returned must be // consistent with the time interval provided. // In general, this requires the database layer to perform fairly advanced filtering on the // the result, far in excess of a simple count. An implementation of an interface is therefore // going to need to be passed into the performQuery() operation, which prunes the resultset // as it is being read into memory. That's a new feature that will need to be added to the // database layer. // Screening query // Moved outside of transaction, so there's less chance of keeping jobstatus cache key tied up // for an extended period of time. if (!jobs.activeJobsPresent()) return new DocumentDescription[0]; long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to find documents to queue"); } // Below there used to be one large transaction, with multiple read seconds and multiple write sections. // As part of reducing the chance of postgresql encountering deadlock conditions, I wanted to break this // transaction up. However, the transaction depended for its correctness in throttling on making sure // that the throttles that were built were based on the same active jobs that the subsequent queries // that did the stuffing relied upon. This made reorganization impossible until I realized that with // Postgresql's way of doing transaction isolation this was going to happen anyway, so I needed a more // robust solution. // // Specifically, I chose to change the way documents were queued so that only documents from properly // throttled jobs could be queued. That meant I needed to add stuff to the ThrottleLimit class to track // the very knowledge of an active job. This had the additional benefit of meaning there was no chance of // a query occurring from inside a resultset filter. // // But, after I did this, it was no longer necessary to have such a large transaction either. // Anything older than 10 minutes ago is considered eligible for reprioritization. long prioritizationTime = currentTime - 60000L * 10L; ThrottleLimit vList = new ThrottleLimit(n,prioritizationTime); IResultSet jobconnections = jobs.getActiveJobConnections(); HashMap connectionSet = new HashMap(); int i = 0; while (i < jobconnections.getRowCount()) { IResultRow row = jobconnections.getRow(i++); Long jobid = (Long)row.getValue("jobid"); String connectionName = (String)row.getValue("connectionname"); vList.addJob(jobid,connectionName); connectionSet.put(connectionName,connectionName); } // Find the active connection names. We'll load these, and then get throttling info // from each one. String[] activeConnectionNames = new String[connectionSet.size()]; Iterator iter = connectionSet.keySet().iterator(); i = 0; while (iter.hasNext()) { activeConnectionNames[i++] = (String)iter.next(); } IRepositoryConnection[] connections = connectionMgr.loadMultiple(activeConnectionNames); // Accumulate a sum of the max_connection_count * avg_connection_rate values, so we can calculate the appropriate adjustment // factor and set the connection limits. HashMap rawFetchCounts = new HashMap(); double rawFetchCountTotal = 0.0; i = 0; while (i < connections.length) { IRepositoryConnection connection = connections[i++]; String connectionName = connection.getName(); int maxConnections = connection.getMaxConnections(); double avgFetchRate = statistics.calculateConnectionFetchRate(connectionName); double weightedRawFetchCount = avgFetchRate * (double)maxConnections; // Keep the avg rate for later use, since it may get updated before next time we need it. rawFetchCounts.put(connectionName,new Double(weightedRawFetchCount)); rawFetchCountTotal += weightedRawFetchCount; } // Calculate an adjustment factor double fetchCountAdjustmentFactor = ((double)n) / rawFetchCountTotal; // For each job, we must amortize the maximum number of fetches per ms to the actual interval, // and also randomly select an extra fetch based on the fractional probability. (This latter is // necessary for the case where the maximum fetch rate is specified to be pretty low.) // i = 0; while (i < connections.length) { IRepositoryConnection connection = connections[i++]; String connectionName = connection.getName(); // Check if throttled... String[] throttles = connection.getThrottles(); int k = 0; while (k < throttles.length) { // The key is the regexp value itself String throttle = throttles[k++]; float throttleValue = connection.getThrottleValue(throttle); // For the given connection, set the fetch limit per bin. This is calculated using the time interval // and the desired fetch rate. The fractional remainder is used to conditionally provide an "extra fetch" // on a weighted random basis. // // In the future, the connection may specify tuples which pair a regexp describing a set of bins against // a fetch rate. In that case, each fetch rate would need to be turned into a precise maximum // count. double fetchesPerTimeInterval = (double)throttleValue * (double)interval; // Actual amount will be the integer value of this, plus an additional 1 if the random number aligns int fetches = (int)fetchesPerTimeInterval; fetchesPerTimeInterval -= (double)fetches; if (random.nextDouble() <= fetchesPerTimeInterval) fetches++; // Save the limit in the ThrottleLimit structure vList.addLimit(connectionName,throttle,fetches); } // For the overall connection, we also have a limit which is based on the number of connections there are actually available. Double weightedRawFetchCount = (Double)rawFetchCounts.get(connectionName); double adjustedFetchCount = weightedRawFetchCount.doubleValue() * fetchCountAdjustmentFactor; // Note well: Queuing starvation that results from there being very few available documents for high-priority connections is dealt with here by simply allowing // the stuffer thread to keep queuing documents until there are enough. This will be pretty inefficient if there's an active connection that is fast and has lots // of available connection handles, but the bulk of the activity is on slow speed/highly handle limited connections, but I honestly can't think of a better way at the moment. // One good way to correct a bit for this problem is to set a higher document count floor for each connection - say 5 documents - then we won't loop as much. // // Be off in the higher direction rather than the lower; this also prohibits zero values and sets a minimum. int fetchCount = ((int)adjustedFetchCount) + 5; vList.setConnectionLimit(connectionName,fetchCount); } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("After "+new Long(System.currentTimeMillis()-startTime).toString()+" ms, beginning query to look for documents to queue"); // System.out.println("Done building throttle structure"); // Locate records. // Note that we do NOT want to get everything there is to know about the job // using this query, since the file specification may be large and expensive // to parse. We will load a (cached) copy of the job description for that purpose. // // NOTE: This query deliberately excludes documents which may be being processed by another job. // (It actually excludes a bit more than that, because the exact query is impossible to write given // the fact that document id's cannot be compared.) These are documents where there is ANOTHER // document entry with the same hash value, a different job id, and a status which is either "active", // "activepurgatory", or "beingdeleted". (It does not check whether the jobs have the same connection or // whether the document id's are in fact the same, and therefore may temporarily block legitimate document // activity under rare circumstances.) // // The query I want is: // SELECT jobid,docid,status FROM jobqueue t0 WHERE status IN ('P','G') AND checktime <=xxx // AND EXISTS(SELECT 'x' FROM // jobs t1 WHERE t0.jobid=t1.id AND t1.status='A') // AND NOT EXISTS(SELECT 'x' FROM jobqueue t2 WHERE t0.hashval=t2.hashval AND t0.jobid!=t2.jobid // AND t2.status IN ('A','F','D')) // ORDER BY docpriority ASC LIMIT xxx // // NOTE WELL: The above query did just fine until adaptive recrawling was seriously tried. Then, because every // document in a job was still active, it failed miserably, actually causing Postgresql to stop responding at // one point. Why? Well, the key thing is the sort criteria - there just isn't any way to sort 1M documents // without working with a monster resultset. // // I introduced a new index as a result - based solely on docpriority - and postgresql now correctly uses that index // to pull its results in an ordered fashion // // // Another subtlety is that I *must* mark the documents active as I find them, so that they do not // have any chance of getting returned twice. // Accumulate the answers here ArrayList answers = new ArrayList(); // The current time value Long currentTimeValue = new Long(currentTime); // Always analyze jobqueue before this query. Otherwise stuffing may get a bad plan, interfering with performance. // This turned out to be needed in postgresql 8.3, even though 8.2 worked fine. //jobQueue.unconditionallyAnalyzeTables(); // Loop through priority values int currentPriority = 1; boolean isDone = false; while (!isDone && currentPriority <= 10) { if (jobs.hasPriorityJobs(currentPriority)) { Long currentPriorityValue = new Long((long)currentPriority); fetchAndProcessDocuments(answers,currentTimeValue,currentPriorityValue,vList,connections); isDone = !vList.checkContinue(); } currentPriority++; } // Assert the blocking documents we discovered vList.tallyBlockingDocuments(blockingDocuments); // Convert the saved answers to an array DocumentDescription[] rval = new DocumentDescription[answers.size()]; i = 0; while (i < rval.length) { rval[i] = (DocumentDescription)answers.get(i); i++; } // After we're done pulling stuff from the queue, find the eligible row with the best priority on the queue, and save the bins for assessment. // This done to decide what the "floor" bincount should be - the idea being that it is wrong to assign priorities for new documents which are // higher than the current level that is currently being dequeued. // // The complicating factor here is that there are indeed many potential *classes* of documents, each of which might have its own current // document priority level. For example, documents could be classed by job, which might make sense because there is a possibility that two jobs' // job priorities may differ. Also, because of document fetch scheduling, each time frame may represent a class in its own right as well. // These classes would have to be associated with independent bin counts, if we were to make any use of them. Then, it would be also necessary // to know what classes a document belonged to in order to be able to calculate its priority. // // An alternative way to proceed is to just have ONE class, and document priorities then get assigned without regard to job, queuing time, etc. // That's the current reality. The code below works in that model, knowing full well that it is an approximation to an ideal. // Find the one row from a live job that has the best document priority, which is available within the current time window. // Note that if there is NO such document, it means we were able to queue all eligible documents, and thus prioritization is probably not even // germane at the moment. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.docPriorityField).append(",").append(jobQueue.jobIDField).append(",") .append(jobQueue.docHashField).append(",").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()) .append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ //new UnitaryClause(jobQueue.docPriorityField,">=",new Long(0L)), new MultiClause(jobQueue.statusField, new Object[]{jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)), new UnitaryClause(jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(jobs.STATUS_ACTIVE), Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField)})) .append(") "); sb.append(" ").append(database.constructIndexOrderByClause(new String[]{ jobQueue.docPriorityField, jobQueue.statusField, jobQueue.checkActionField, jobQueue.checkTimeField}, true)).append(" ") .append(database.constructOffsetLimitClause(0,1,true)); IResultSet set = database.performQuery(sb.toString(),list,null,null,1,null); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); Double docPriority = (Double)row.getValue(jobQueue.docPriorityField); if (docPriority != null && docPriority.doubleValue() < jobQueue.noDocPriorityValue) scanRecord.addBins(docPriority); } return rval; } /** Fetch and process documents matching the passed-in criteria */ protected void fetchAndProcessDocuments(ArrayList answers, Long currentTimeValue, Long currentPriorityValue, ThrottleLimit vList, IRepositoryConnection[] connections) throws ManifoldCFException { // Note well: This query does not do "FOR UPDATE". The reason is that only one thread can possibly change the document's state to active. // When FOR UPDATE was included, deadlock conditions were common because of the complexity of this query. ArrayList list = new ArrayList(); StringBuilder sb = new StringBuilder("SELECT t0."); sb.append(jobQueue.idField).append(",t0."); if (Logging.scheduling.isDebugEnabled()) sb.append(jobQueue.docPriorityField).append(",t0."); sb.append(jobQueue.jobIDField).append(",t0.") .append(jobQueue.docHashField).append(",t0.") .append(jobQueue.docIDField).append(",t0.") .append(jobQueue.statusField).append(",t0.") .append(jobQueue.failTimeField).append(",t0.") .append(jobQueue.failCountField).append(",t0.") .append(jobQueue.prioritySetField).append(" FROM ").append(jobQueue.getTableName()) .append(" t0 ").append(jobQueue.getGetNextDocumentsIndexHint()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ //new UnitaryClause("t0."+jobQueue.docPriorityField,">=",new Long(0L)), new MultiClause("t0."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(JobQueue.STATUS_PENDING), jobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause("t0."+jobQueue.checkActionField,"=",jobQueue.actionToString(JobQueue.ACTION_RESCAN)), new UnitaryClause("t0."+jobQueue.checkTimeField,"<=",currentTimeValue)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t1."+jobs.statusField,new Object[]{ Jobs.statusToString(jobs.STATUS_ACTIVE), Jobs.statusToString(jobs.STATUS_ACTIVESEEDING)}), new JoinClause("t1."+jobs.idField,"t0."+jobQueue.jobIDField), new UnitaryClause("t1."+jobs.priorityField,currentPriorityValue)})) .append(") AND "); sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.getTableName()).append(" t2 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t2."+jobQueue.docHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t2.").append(jobQueue.statusField).append(" IN (?,?,?,?,?,?) AND ") .append("t2.").append(jobQueue.jobIDField).append("!=t0.").append(jobQueue.jobIDField) .append(") AND "); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); // Prerequisite event clause: AND NOT EXISTS(SELECT 'x' FROM prereqevents t3,events t4 WHERE t3.ownerid=t0.id AND t3.name=t4.name) sb.append("NOT EXISTS(SELECT 'x' FROM ").append(jobQueue.prereqEventManager.getTableName()).append(" t3,").append(eventManager.getTableName()).append(" t4 WHERE t0.") .append(jobQueue.idField).append("=t3.").append(jobQueue.prereqEventManager.ownerField).append(" AND t3.") .append(jobQueue.prereqEventManager.eventNameField).append("=t4.").append(eventManager.eventNameField) .append(")"); sb.append(" ").append(database.constructIndexOrderByClause(new String[]{ "t0."+jobQueue.docPriorityField, "t0."+jobQueue.statusField, "t0."+jobQueue.checkActionField, "t0."+jobQueue.checkTimeField}, true)).append(" "); // Before entering the transaction, we must provide the throttlelimit object with all the connector // instances it could possibly need. The purpose for doing this is to prevent a deadlock where // connector starvation causes database lockup. // // The preallocation of multiple connector instances is certainly a worry. If any other part // of the code allocates multiple connector instances also, the potential exists for this to cause // deadlock all by itself. I've therefore built a "grab multiple" and a "release multiple" // at the connector factory level to make sure these requests are properly ordered. String[] orderingKeys = new String[connections.length]; String[] classNames = new String[connections.length]; ConfigParams[] configParams = new ConfigParams[connections.length]; int[] maxConnections = new int[connections.length]; int k = 0; while (k < connections.length) { IRepositoryConnection connection = connections[k]; orderingKeys[k] = connection.getName(); classNames[k] = connection.getClassName(); configParams[k] = connection.getConfigParams(); maxConnections[k] = connection.getMaxConnections(); k++; } IRepositoryConnector[] connectors = RepositoryConnectorFactory.grabMultiple(threadContext,orderingKeys,classNames,configParams,maxConnections); try { // Hand the connectors off to the ThrottleLimit instance k = 0; while (k < connections.length) { vList.addConnectionName(connections[k].getName(),connectors[k]); k++; } // Now we can tack the limit onto the query. Before this point, remainingDocuments would be crap int limitValue = vList.getRemainingDocuments(); sb.append(database.constructOffsetLimitClause(0,limitValue,true)); if (Logging.perf.isDebugEnabled()) { Logging.perf.debug("Queuing documents from time "+currentTimeValue.toString()+" job priority "+currentPriorityValue.toString()+ " (up to "+Integer.toString(vList.getRemainingDocuments())+" documents)"); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { IResultSet set = database.performQuery(sb.toString(),list,null,null,-1,vList); if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Queuing "+Integer.toString(set.getRowCount())+" documents"); // To avoid deadlock, we want to update the document id hashes in order. This means reading into a structure I can sort by docid hash, // before updating any rows in jobqueue. String[] docIDHashes = new String[set.getRowCount()]; Map storageMap = new HashMap(); Map statusMap = new HashMap(); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long id = (Long)row.getValue(jobQueue.idField); Long jobID = (Long)row.getValue(jobQueue.jobIDField); String docIDHash = (String)row.getValue(jobQueue.docHashField); String docID = (String)row.getValue(jobQueue.docIDField); int status = jobQueue.stringToStatus(row.getValue(jobQueue.statusField).toString()); Long failTimeValue = (Long)row.getValue(jobQueue.failTimeField); Long failCountValue = (Long)row.getValue(jobQueue.failCountField); long failTime; if (failTimeValue == null) failTime = -1L; else failTime = failTimeValue.longValue(); int failCount; if (failCountValue == null) failCount = -1; else failCount = (int)failCountValue.longValue(); DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID,failTime,failCount); docIDHashes[i] = docIDHash + ":" + jobID; storageMap.put(docIDHashes[i],dd); statusMap.put(docIDHashes[i],new Integer(status)); if (Logging.scheduling.isDebugEnabled()) { Double docPriority = (Double)row.getValue(jobQueue.docPriorityField); Logging.scheduling.debug("Stuffing document '"+docID+"' that has priority "+docPriority.toString()+" onto active list"); } i++; } // No duplicates are possible here java.util.Arrays.sort(docIDHashes); i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; DocumentDescription dd = (DocumentDescription)storageMap.get(docIDHash); Long id = dd.getID(); int status = ((Integer)statusMap.get(docIDHash)).intValue(); // Set status to "ACTIVE". jobQueue.updateActiveRecord(id,status); answers.add(dd); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finding docs to queue: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } finally { RepositoryConnectorFactory.releaseMultiple(connectors); } } // These methods support the individual fetch/process threads. /** Verify that a specific job is indeed still active. This is used to permit abort or pause to be relatively speedy. * The query done within MUST be cached in order to not cause undue performance degradation. *@param jobID is the job identifier. *@return true if the job is in one of the "active" states. */ public boolean checkJobActive(Long jobID) throws ManifoldCFException { return jobs.checkJobActive(jobID); } /** Verify if a job is still processing documents, or no longer has any outstanding active documents */ public boolean checkJobBusy(Long jobID) throws ManifoldCFException { return jobQueue.checkJobBusy(jobID); } /** Note completion of document processing by a job thread of a document. * This method causes the state of the document to be marked as "completed". *@param documentDescriptions are the description objects for the documents that were processed. */ public void markDocumentCompletedMultiple(DocumentDescription[] documentDescriptions) throws ManifoldCFException { // Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE // transaction is needed in order to complete these documents correctly. // // Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to // lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to // order the "select for update" operations appropriately. // HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); // Retry loop - in case we get a deadlock despite our best efforts while (true) { long sleepAmt = 0L; // Start the transaction now database.beginTransaction(); try { // Do one row at a time, to avoid deadlocking things i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; // Get the DocumentDescription object DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Query for the status ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())}); TrackerClass.notePreread(dd.getID()); IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+ query+" FOR UPDATE",list,null,null); TrackerClass.noteRead(dd.getID()); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); // Grab the status int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); // Update the jobqueue table jobQueue.updateCompletedRecord(dd.getID(),status); } i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+ " docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Note completion of document processing by a job thread of a document. * This method causes the state of the document to be marked as "completed". *@param documentDescription is the description object for the document that was processed. */ public void markDocumentCompleted(DocumentDescription documentDescription) throws ManifoldCFException { markDocumentCompletedMultiple(new DocumentDescription[]{documentDescription}); } /** Delete from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be deleted, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentDeletedMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { // It's no longer an issue to have to deal with documents being conditionally deleted; that's been // taken over by the hopcountremoval method below. So just use the simple 'delete' functionality. return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be deleted, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentDeleted(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentDeletedMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Mark hopcount removal from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be marked as removed, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentHopcountRemovalMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { // For each record, we're going to have to choose between marking it as "hopcount removed", and marking // it for rescan. So the basic flow will involve changing a document's status,. // Before we can change a document status, we need to know the *current* status. Therefore, a SELECT xxx FOR UPDATE/UPDATE // transaction is needed in order to complete these documents correctly. // // Since we are therefore setting row locks on thejobqueue table, we need to work to avoid unnecessary deadlocking. To do that, we have to // lock rows in document id hash order!! Luckily, the DocumentDescription objects have a document identifier buried within, which we can use to // order the "select for update" operations appropriately. // HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { String documentIDHash = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); docIDHashes[i] = documentIDHash; indexMap.put(documentIDHash,new Integer(i)); i++; } java.util.Arrays.sort(docIDHashes); // Retry loop - in case we get a deadlock despite our best efforts while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // Do one row at a time, to avoid deadlocking things List<String> deleteList = new ArrayList<String>(); i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; // Get the DocumentDescription object DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Query for the status ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())}); TrackerClass.notePreread(dd.getID()); IResultSet set = database.performQuery("SELECT "+jobQueue.statusField+" FROM "+jobQueue.getTableName()+" WHERE "+ query+" FOR UPDATE",list,null,null); TrackerClass.noteRead(dd.getID()); if (set.getRowCount() > 0) { IResultRow row = set.getRow(0); // Grab the status int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); // Update the jobqueue table boolean didDelete = jobQueue.updateOrHopcountRemoveRecord(dd.getID(),status); if (didDelete) { deleteList.add(dd.getDocumentIdentifierHash()); } } i++; } String[] docIDSimpleHashes = new String[deleteList.size()]; for (int j = 0; j < docIDSimpleHashes.length; j++) { docIDSimpleHashes[j] = deleteList.get(j); } // Next, find the documents that are affected by carrydown deletion. DocumentDescription[] rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes); // Since hopcount inheritance and prerequisites came from the addDocument() method, // we don't delete them here. TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction marking completed "+Integer.toString(docIDHashes.length)+ " docs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Mark hopcount removal from queue as a result of processing of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. The RESCAN variants are interpreted * as meaning that the document should not be marked as removed, but should instead be popped back on the queue for * a repeat processing attempt. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentHopcountRemoval(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentHopcountRemovalMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete from queue as a result of expiration of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired, * no special activity takes place as a result of the document being in a RESCAN state. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentExpiredMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of expiration of an active document. * The document is expected to be in one of the active states: ACTIVE, ACTIVESEEDING, * ACTIVENEEDSRESCAN, ACTIVESEEDINGNEEDSRESCAN. Since the document expired, * no special activity takes place as a result of the document being in a RESCAN state. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentExpired(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentExpiredMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete from queue as a result of cleaning up an unreachable document. * The document is expected to be in the PURGATORY state. There is never any need to reprocess the * document. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentCleanedUpMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { return doDeleteMultiple(jobID,legalLinkTypes,documentDescriptions,hopcountMethod); } /** Delete from queue as a result of cleaning up an unreachable document. * The document is expected to be in the PURGATORY state. There is never any need to reprocess the * document. *@param documentDescription is the description object for the document that was processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] markDocumentCleanedUp(Long jobID, String[] legalLinkTypes, DocumentDescription documentDescription, int hopcountMethod) throws ManifoldCFException { return markDocumentCleanedUpMultiple(jobID,legalLinkTypes,new DocumentDescription[]{documentDescription},hopcountMethod); } /** Delete documents with no repercussions. We don't have to worry about the current state of each document, * since the document is definitely going away. *@param documentDescriptions are the set of description objects for the documents that were processed. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ protected DocumentDescription[] doDeleteMultiple(Long jobID, String[] legalLinkTypes, DocumentDescription[] documentDescriptions, int hopcountMethod) throws ManifoldCFException { if (documentDescriptions.length == 0) return new DocumentDescription[0]; // Order of locking is not normally important here, because documents that wind up being deleted are never being worked on by anything else. // In all cases, the state of the document excludes other activity. // The only tricky situation is when a thread is processing a document which happens to be getting deleted, while another thread is trying to add // a reference for the very same document to the queue. Then, order of locking matters, so the deletions should happen in a specific order to avoid // the possibility of deadlock. Nevertheless, this is enough of a risk that I've chosen to order the deletions by document id hash order, just like everywhere // else. long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to delete "+Integer.toString(documentDescriptions.length)+" docs and clean up hopcount for job "+jobID.toString()); } HashMap indexMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] = documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort by doc hash, to establish non-blocking lock order java.util.Arrays.sort(docIDHashes); DocumentDescription[] rval; while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start deleting "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()); String[] docIDSimpleHashes = new String[docIDHashes.length]; // Delete jobqueue rows FIRST. Even though we do this before assessing the carrydown implications, it is OK because it's the CHILDREN of these // rows that might get affected by carrydown data deletion, not the rows themselves! i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; DocumentDescription dd = documentDescriptions[((Integer)indexMap.get(docIDHash)).intValue()]; // Individual operations are necessary so order can be controlled. jobQueue.deleteRecord(dd.getID()); docIDSimpleHashes[i] = dd.getDocumentIdentifierHash(); i++; } // Next, find the documents that are affected by carrydown deletion. rval = calculateAffectedDeleteCarrydownChildren(jobID,docIDSimpleHashes); // Finally, delete the carrydown records in question. carryDown.deleteRecords(jobID,docIDSimpleHashes); if (legalLinkTypes.length > 0) hopCount.deleteDocumentIdentifiers(jobID,legalLinkTypes,docIDSimpleHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to delete "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction deleting "+Integer.toString(docIDHashes.length)+ " docs and clean up hopcount for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } return rval; } /** Helper method: Find the document descriptions that will be affected due to carrydown row deletions. */ protected DocumentDescription[] calculateAffectedDeleteCarrydownChildren(Long jobID, String[] docIDHashes) throws ManifoldCFException { // Break the request into pieces, as needed, and throw everything into a hash for uniqueness. // We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes. // The goal is to throw all the children into a hash, to make them unique at the end. HashMap resultHash = new HashMap(); ArrayList list = new ArrayList(); int maxCount = maxClauseProcessDeleteHashSet(); int i = 0; int z = 0; while (i < docIDHashes.length) { if (z == maxCount) { processDeleteHashSet(jobID,resultHash,list); list.clear(); z = 0; } list.add(docIDHashes[i]); i++; z++; } if (z > 0) processDeleteHashSet(jobID,resultHash,list); // Now, put together the result document list from the hash. DocumentDescription[] rval = new DocumentDescription[resultHash.size()]; i = 0; Iterator iter = resultHash.keySet().iterator(); while (iter.hasNext()) { Long id = (Long)iter.next(); DocumentDescription dd = (DocumentDescription)resultHash.get(id); rval[i++] = dd; } return rval; } /** Get maximum count. */ protected int maxClauseProcessDeleteHashSet() { return database.findConjunctionClauseMax(new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)}); } /** Helper method: look up rows affected by a deleteRecords operation. */ protected void processDeleteHashSet(Long jobID, HashMap resultHash, ArrayList list) throws ManifoldCFException { // The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription // object for return, and so a join is necessary against the jobqueue table. StringBuilder sb = new StringBuilder("SELECT "); ArrayList newList = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(carryDown.getTableName()).append(" t1, ") .append(jobQueue.getTableName()).append(" t0 WHERE "); sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new UnitaryClause("t1."+carryDown.jobIDField,jobID), new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND "); sb.append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField), new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})); /* sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(newList,new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new MultiClause("t1."+carryDown.parentIDHashField,list), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})) .append(")"); */ IResultSet set = database.performQuery(sb.toString(),newList,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long id = (Long)row.getValue(jobQueue.idField); String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField); String documentIdentifier = (String)row.getValue(jobQueue.docIDField); resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier)); } } /** Requeue a document for further processing in the future. * This method is called after a document is processed, when the job is a "continuous" one. * It is essentially equivalent to noting that the document processing is complete, except the * document remains on the queue. *@param documentDescriptions is the set of description objects for the document that was processed. *@param executeTimes are the times that the documents should be rescanned. Null indicates "never". *@param actions are what should be done when the time arrives. Choices are ACTION_RESCAN or ACTION_REMOVE. */ public void requeueDocumentMultiple(DocumentDescription[] documentDescriptions, Long[] executeTimes, int[] actions) throws ManifoldCFException { String[] docIDHashes = new String[documentDescriptions.length]; Long[] ids = new Long[documentDescriptions.length]; Long[] executeTimesNew = new Long[documentDescriptions.length]; int[] actionsNew = new int[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); executeTimesNew[i] = executeTimes[index]; actionsNew[i] = actions[index]; i++; } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimesNew[i],actionsNew[i],-1L,-1); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction requeuing documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Requeue a document for further processing in the future. * This method is called after a document is processed, when the job is a "continuous" one. * It is essentially equivalent to noting that the document processing is complete, except the * document remains on the queue. *@param documentDescription is the description object for the document that was processed. *@param executeTime is the time that the document should be rescanned. Null indicates "never". *@param action is what should be done when the time arrives. Choices include ACTION_RESCAN or ACTION_REMOVE. */ public void requeueDocument(DocumentDescription documentDescription, Long executeTime, int action) throws ManifoldCFException { requeueDocumentMultiple(new DocumentDescription[]{documentDescription},new Long[]{executeTime},new int[]{action}); } /** Reset a set of documents for further processing in the future. * This method is called after some unknown number of the documents were processed, but then a service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was processed. *@param executeTime is the time that the documents should be rescanned. *@param failTime is the time beyond which a service interruption will be considered a hard failure. *@param failCount is the number of retries beyond which a service interruption will be considered a hard failure. */ public void resetDocumentMultiple(DocumentDescription[] documentDescriptions, long executeTime, int action, long failTime, int failCount) throws ManifoldCFException { Long executeTimeLong = new Long(executeTime); Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; Long[] executeTimes = new Long[documentDescriptions.length]; int[] actions = new int[documentDescriptions.length]; long[] failTimes = new long[documentDescriptions.length]; int[] failCounts = new int[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); executeTimes[i] = executeTimeLong; actions[i] = action; long oldFailTime = documentDescriptions[index].getFailTime(); if (oldFailTime == -1L) oldFailTime = failTime; failTimes[i] = oldFailTime; int oldFailCount = documentDescriptions[index].getFailRetryCount(); if (oldFailCount == -1) oldFailCount = failCount; else { oldFailCount--; if (failCount != -1 && oldFailCount > failCount) oldFailCount = failCount; } failCounts[i] = oldFailCount; i++; } // Documents get marked PENDINGPURGATORY regardless of their current state; this is because we can't know at this point whether // an ingestion attempt occurred or not, so we have to treat the documents as having been processed at least once. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setStatus(ids[i],jobQueue.STATUS_PENDINGPURGATORY,executeTimes[i],actions[i],(failTimes==null)?-1L:failTimes[i],(failCounts==null)?-1:failCounts[i]); i++; } database.performCommit(); break; } catch (Error e) { database.signalRollback(); throw e; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a set of cleaning documents for further processing in the future. * This method is called after some unknown number of the documents were cleaned, but then an ingestion service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed cleaned or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetCleaningDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime) throws ManifoldCFException { Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); i++; } // Documents get marked PURGATORY regardless of their current state; this is because we can't know at this point what the actual prior state was. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setUncleaningStatus(ids[i],checkTime); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting cleaning documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a cleaning document back to its former state. * This gets done when a deleting thread sees a service interruption, etc., from the ingestion system. *@param documentDescription is the description of the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetCleaningDocument(DocumentDescription documentDescription, long checkTime) throws ManifoldCFException { resetCleaningDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime); } /** Reset a set of deleting documents for further processing in the future. * This method is called after some unknown number of the documents were deleted, but then an ingestion service interruption occurred. * Note well: The logic here basically presumes that we cannot know whether the documents were indeed processed or not. * If we knew for a fact that none of the documents had been handled, it would be possible to look at the document's * current status and decide what the new status ought to be, based on a true rollback scenario. Such cases, however, are rare enough so that * special logic is probably not worth it. *@param documentDescriptions is the set of description objects for the document that was processed. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetDeletingDocumentMultiple(DocumentDescription[] documentDescriptions, long checkTime) throws ManifoldCFException { Long[] ids = new Long[documentDescriptions.length]; String[] docIDHashes = new String[documentDescriptions.length]; // First loop maps document identifier back to an index. HashMap indexMap = new HashMap(); int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] =documentDescriptions[i].getDocumentIdentifierHash() + ":" + documentDescriptions[i].getJobID(); indexMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort! java.util.Arrays.sort(docIDHashes); // Next loop populates the actual arrays we use to feed the operation so that the ordering is correct. i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i]; Integer x = (Integer)indexMap.remove(docIDHash); if (x == null) throw new ManifoldCFException("Assertion failure: duplicate document identifier jobid/hash detected!"); int index = x.intValue(); ids[i] = documentDescriptions[index].getID(); i++; } // Documents get marked COMPLETED regardless of their current state; this is because we can't know at this point what the actual prior state was. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Going through ids in order should greatly reduce or eliminate chances of deadlock occurring. We thus need to pay attention to the sorted order. i = 0; while (i < ids.length) { jobQueue.setUndeletingStatus(ids[i],checkTime); i++; } TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting documents: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a deleting document back to its former state. * This gets done when a deleting thread sees a service interruption, etc., from the ingestion system. *@param documentDescription is the description object for the document that was cleaned. *@param checkTime is the minimum time for the next cleaning attempt. */ public void resetDeletingDocument(DocumentDescription documentDescription, long checkTime) throws ManifoldCFException { resetDeletingDocumentMultiple(new DocumentDescription[]{documentDescription},checkTime); } /** Reset an active document back to its former state. * This gets done when there's a service interruption and the document cannot be processed yet. * Note well: This method formerly presumed that a perfect rollback was possible, and that there was zero chance of any * processing activity occuring before it got called. That assumption appears incorrect, however, so I've opted to now * presume that processing has perhaps occurred. Perfect rollback is thus no longer possible. *@param documentDescription is the description object for the document that was processed. *@param executeTime is the time that the document should be rescanned. *@param failTime is the time that the document should be considered to have failed, if it has not been * successfully read until then. */ public void resetDocument(DocumentDescription documentDescription, long executeTime, int action, long failTime, int failCount) throws ManifoldCFException { resetDocumentMultiple(new DocumentDescription[]{documentDescription},executeTime,action,failTime,failCount); } /** Eliminate duplicates, and sort */ protected static String[] eliminateDuplicates(String[] docIDHashes) { HashMap map = new HashMap(); int i = 0; while (i < docIDHashes.length) { String docIDHash = docIDHashes[i++]; map.put(docIDHash,docIDHash); } String[] rval = new String[map.size()]; i = 0; Iterator iter = map.keySet().iterator(); while (iter.hasNext()) { rval[i++] = (String)iter.next(); } java.util.Arrays.sort(rval); return rval; } /** Build a reorder map, describing how to convert an original index into a reordered index. */ protected static HashMap buildReorderMap(String[] originalIDHashes, String[] reorderedIDHashes) { HashMap reorderSet = new HashMap(); int i = 0; while (i < reorderedIDHashes.length) { String reorderedIDHash = reorderedIDHashes[i]; Integer position = new Integer(i); reorderSet.put(reorderedIDHash,position); i++; } HashMap map = new HashMap(); int j = 0; while (j < originalIDHashes.length) { String originalIDHash = originalIDHashes[j]; Integer position = (Integer)reorderSet.get(originalIDHash); if (position != null) { map.put(new Integer(j),position); // Remove, so that only one of each duplicate will have a place in the map reorderSet.remove(originalIDHash); } j++; } return map; } /** Add an initial set of documents to the queue. * This method is called during job startup, when the queue is being loaded. * A set of document references is passed to this method, which updates the status of the document * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDs are the local document identifiers. *@param overrideSchedule is true if any existing document schedule should be overridden. *@param hopcountMethod is either accurate, nodelete, or neverdelete. *@param currentTime is the current time in milliseconds since epoch. *@param documentPriorities are the document priorities corresponding to the document identifiers. *@param prereqEventNames are the events that must be completed before each document can be processed. *@return true if the priority value(s) were used, false otherwise. */ public boolean[] addDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String[] docIDs, boolean overrideSchedule, int hopcountMethod, long currentTime, double[] documentPriorities, String[][] prereqEventNames) throws ManifoldCFException { if (docIDHashes.length == 0) return new boolean[0]; // The document identifiers need to be sorted in a consistent fashion to reduce deadlock, and have duplicates removed, before going ahead. // But, the documentPriorities and the return booleans need to correspond to the initial array. So, after we come up with // our internal order, we need to construct a map that takes an original index and maps it to the reduced, reordered index. String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes); double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length]; String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][]; String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length]; boolean[] rval = new boolean[docIDHashes.length]; int i = 0; while (i < docIDHashes.length) { Integer newPosition = (Integer)reorderMap.get(new Integer(i)); if (newPosition != null) { reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i]; if (prereqEventNames != null) reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i]; else reorderedDocumentPrerequisites[newPosition.intValue()] = null; reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i]; } rval[i] = false; i++; } long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" initial docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized // transactions are used. But serialized transactions may require a retry in order // to resolve transaction conflicts. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs and hopcounts for job "+jobID.toString()); // Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update) boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length]; int z = 0; while (z < reorderedDocIDHashes.length) { String docIDHash = reorderedDocIDHashes[z]; double docPriority = reorderedDocumentPriorities[z]; String docID = reorderedDocumentIdentifiers[z]; String[] docPrereqs = reorderedDocumentPrerequisites[z]; StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.docHashField,docIDHash), new UnitaryClause(jobQueue.jobIDField,jobID)})); sb.append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); boolean priorityUsed; long executeTime = overrideSchedule?0L:-1L; if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); priorityUsed = jobQueue.updateExistingRecordInitial(rowID,status,checkTimeValue,executeTime,currentTime,docPriority,docPrereqs); } else { // Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried. jobQueue.insertNewRecordInitial(jobID,docIDHash,docID,docPriority,executeTime,currentTime,docPrereqs); priorityUsed = true; } reorderedRval[z++] = priorityUsed; } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs for job "+jobID.toString()); if (legalLinkTypes.length > 0) hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs and hopcounts for job "+jobID.toString()); // Rejigger to correspond with calling order i = 0; while (i < docIDs.length) { Integer finalPosition = (Integer)reorderMap.get(new Integer(i)); if (finalPosition != null) rval[i] = reorderedRval[finalPosition.intValue()]; i++; } return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " initial docs for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Add an initial set of remaining documents to the queue. * This method is called during job startup, when the queue is being loaded, to list documents that * were NOT included by calling addDocumentsInitial(). Documents listed here are simply designed to * enable the framework to get rid of old, invalid seeds. They are not queued for processing. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the local document identifier hashes. *@param hopcountMethod is either accurate, nodelete, or neverdelete. */ public void addRemainingDocumentsInitial(Long jobID, String[] legalLinkTypes, String[] docIDHashes, int hopcountMethod) throws ManifoldCFException { if (docIDHashes.length == 0) return; String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" remaining docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless the transactions are serialized, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()); jobQueue.addRemainingDocumentsInitial(jobID,reorderedDocIDHashes); if (legalLinkTypes.length > 0) hopCount.recordSeedReferences(jobID,legalLinkTypes,reorderedDocIDHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()); } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " remaining docs and hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Signal that a seeding pass has been done. * Call this method at the end of a seeding pass. It is used to perform the bookkeeping necessary to * maintain the hopcount table. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param isPartial is set if the seeds provided are only a partial list. Some connectors cannot * supply a full list of seeds on every seeding iteration; this acknowledges that limitation. *@param hopcountMethod describes how to handle deletions for hopcount purposes. */ public void doneDocumentsInitial(Long jobID, String[] legalLinkTypes, boolean isPartial, int hopcountMethod) throws ManifoldCFException { long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to finish initial docs and hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, unless serialized transactions are used. // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to start finishing initial docs and hopcounts for job "+jobID.toString()); jobQueue.doneDocumentsInitial(jobID,isPartial); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to finish initial docs for job "+jobID.toString()); if (legalLinkTypes.length > 0) hopCount.finishSeedReferences(jobID,legalLinkTypes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+ " ms to finish initial docs and hopcounts for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing initial docs and hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the specified hop counts, with the limit as described. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the hashes for the set of documents to find the hopcount for. *@param linkType is the kind of link to find the hopcount for. *@param limit is the limit, beyond which a negative distance may be returned. *@param hopcountMethod is the method for managing hopcounts that is in effect. *@return a vector of booleans corresponding to the documents requested. A true value is returned * if the document is within the specified limit, false otherwise. */ public boolean[] findHopCounts(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String linkType, int limit, int hopcountMethod) throws ManifoldCFException { if (docIDHashes.length == 0) return new boolean[0]; if (legalLinkTypes.length == 0) throw new ManifoldCFException("Nonsensical request; asking for hopcounts where none are kept"); // The idea is to delay queue processing as much as possible, because that avoids having to wait // on locks and having to repeat our evaluations. // // Luckily, we can glean a lot of information from what's hanging around. Specifically, whatever value // we find in the table is an upper bound on the true hop distance value. So, only if we have documents // that are outside the limit does the queue need to be processed. // // It is therefore really helpful to write in an estimated value for any newly created record, if possible. Even if the // estimate is possibly greater than the true value, a great deal of locking and queue processing will be // avoided. // The flow here is to: // - grab the right hoplock // - process the queue // - if the queue is empty, get the hopcounts we wanted, otherwise release the lock and loop around long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Beginning work to get "+Integer.toString(docIDHashes.length)+" hopcounts for job "+jobID.toString()); } // Make an answer array. boolean[] rval = new boolean[docIDHashes.length]; // Make a hash of what we still need a definitive answer for. HashMap badAnswers = new HashMap(); int i = 0; while (i < rval.length) { String docIDHash = docIDHashes[i]; rval[i] = false; badAnswers.put(docIDHash,new Integer(i)); i++; } int iterationCount = 0; while (true) { // Ask for only about documents we don't have a definitive answer for yet. String[] askDocIDHashes = new String[badAnswers.size()]; i = 0; Iterator iter = badAnswers.keySet().iterator(); while (iter.hasNext()) { askDocIDHashes[i++] = (String)iter.next(); } int[] distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType); i = 0; while (i < distances.length) { int distance = distances[i]; String docIDHash = askDocIDHashes[i]; if (distance != -1 && distance <= limit) { // Found a usable value rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true; } i++; } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Iteration "+Integer.toString(iterationCount++)+": After initial check, "+Integer.toString(badAnswers.size())+ " hopcounts remain to be found for job "+jobID.toString()+", out of "+Integer.toString(docIDHashes.length)+ " ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); if (badAnswers.size() == 0) return rval; // It appears we need to process the queue. We need to enter the hoplock section // to make sure only one player is updating values at a time. Then, before we exit, we get the // remaining values. askDocIDHashes = new String[badAnswers.size()]; i = 0; iter = badAnswers.keySet().iterator(); while (iter.hasNext()) { askDocIDHashes[i++] = (String)iter.next(); } // Currently, only one thread can possibly process any of the queue at a given time. This is because the queue marks are not set to something // other than than the "in queue" value during processing. My instinct is that queue processing is likely to interfere with other queue processing, // so I've taken the route of prohibiting more than one batch of queue processing at a time, for now. String hopLockName = getHopLockName(jobID); long sleepAmt = 0L; lockManager.enterWriteLock(hopLockName); try { database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Processing queue for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); // The internal queue processing only does 200 at a time. This is a compromise between maximum efficiency (bigger number) // and the requirement that database writes are effectively blocked for a while (which argues for a smaller number). boolean definitive = hopCount.processQueue(jobID,legalLinkTypes,hopcountMethod); // If definitive answers were not found, we leave the lock and go back to check on the status of the questions we were // interested in. If the answers are all OK then we are done; if not, we need to process more queue, and keep doing that // until we really ARE done. if (!definitive) { // Sleep a little bit so another thread can have a whack at things sleepAmt = 100L; database.performCommit(); continue; } // Definitive answers found; continue through. distances = hopCount.findHopCounts(jobID,askDocIDHashes,linkType); database.performCommit(); } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction processing queue for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } finally { lockManager.leaveWriteLock(hopLockName); sleepFor(sleepAmt); } if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Definitive answers found for "+Integer.toString(docIDHashes.length)+ " hopcounts for job "+jobID.toString()+" ("+new Long(System.currentTimeMillis()-startTime).toString()+" ms)"); // All answers are guaranteed to be accurate now. i = 0; while (i < distances.length) { int distance = distances[i]; String docIDHash = askDocIDHashes[i]; if (distance != -1 && distance <= limit) { // Found a usable value rval[((Integer)badAnswers.remove(docIDHash)).intValue()] = true; } i++; } return rval; } } /** Get all the current seeds. * Returns the seed document identifiers for a job. *@param jobID is the job identifier. *@return the document identifiers that are currently considered to be seeds. */ public String[] getAllSeeds(Long jobID) throws ManifoldCFException { return jobQueue.getAllSeeds(jobID); } /** Add documents to the queue in bulk. * This method is called during document processing, when a set of document references are discovered. * The document references are passed to this method, which updates the status of the document(s) * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHashes are the local document identifier hashes. *@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none. * MUST be present in the case of carrydown information. *@param relationshipType is the optional link type between this document and its parent. Pass null if there * is no relationship with a parent. *@param hopcountMethod is the desired method for managing hopcounts. *@param dataNames are the names of the data to carry down to the child from this parent. *@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. If CharacterInput objects are passed in here, * it is the caller's responsibility to clean these up. *@param currentTime is the time in milliseconds since epoch that will be recorded for this operation. *@param documentPriorities are the desired document priorities for the documents. *@param prereqEventNames are the events that must be completed before a document can be queued. *@return an array of boolean values indicating whether or not the passed-in priority value was used or not for each doc id (true if used). */ public boolean[] addDocuments(Long jobID, String[] legalLinkTypes, String[] docIDHashes, String[] docIDs, String parentIdentifierHash, String relationshipType, int hopcountMethod, String[][] dataNames, Object[][][] dataValues, long currentTime, double[] documentPriorities, String[][] prereqEventNames) throws ManifoldCFException { if (docIDs.length == 0) return new boolean[0]; // Sort the id hashes and eliminate duplicates. This will help avoid deadlock conditions. // However, we also need to keep the carrydown data in synch, so track that around as well, and merge if there are // duplicate document identifiers. HashMap nameMap = new HashMap(); int k = 0; while (k < docIDHashes.length) { String docIDHash = docIDHashes[k]; // If there are duplicates, we need to merge them. HashMap names = (HashMap)nameMap.get(docIDHash); if (names == null) { names = new HashMap(); nameMap.put(docIDHash,names); } String[] nameList = dataNames[k]; Object[][] dataList = dataValues[k]; int z = 0; while (z < nameList.length) { String name = nameList[z]; Object[] values = dataList[z]; HashMap valueMap = (HashMap)names.get(name); if (valueMap == null) { valueMap = new HashMap(); names.put(name,valueMap); } int y = 0; while (y < values.length) { // Calculate the value hash; that's the true key, and the one that cannot be duplicated. String valueHash; if (values[y] instanceof CharacterInput) { // It's a CharacterInput object. valueHash = ((CharacterInput)values[y]).getHashValue(); } else { // It better be a String. valueHash = ManifoldCF.hash((String)values[y]); } valueMap.put(valueHash,values[y]); y++; } z++; } k++; } String[] reorderedDocIDHashes = eliminateDuplicates(docIDHashes); HashMap reorderMap = buildReorderMap(docIDHashes,reorderedDocIDHashes); double[] reorderedDocumentPriorities = new double[reorderedDocIDHashes.length]; String[][] reorderedDocumentPrerequisites = new String[reorderedDocIDHashes.length][]; String[] reorderedDocumentIdentifiers = new String[reorderedDocIDHashes.length]; boolean[] rval = new boolean[docIDHashes.length]; int i = 0; while (i < docIDHashes.length) { Integer newPosition = (Integer)reorderMap.get(new Integer(i)); if (newPosition != null) { reorderedDocumentPriorities[newPosition.intValue()] = documentPriorities[i]; if (prereqEventNames != null) reorderedDocumentPrerequisites[newPosition.intValue()] = prereqEventNames[i]; else reorderedDocumentPrerequisites[newPosition.intValue()] = null; reorderedDocumentIdentifiers[newPosition.intValue()] = docIDs[i]; } rval[i] = false; i++; } dataNames = new String[reorderedDocIDHashes.length][]; String[][][] dataHashValues = new String[reorderedDocIDHashes.length][][]; dataValues = new Object[reorderedDocIDHashes.length][][]; k = 0; while (k < reorderedDocIDHashes.length) { String docIDHash = reorderedDocIDHashes[k]; HashMap names = (HashMap)nameMap.get(docIDHash); dataNames[k] = new String[names.size()]; dataHashValues[k] = new String[names.size()][]; dataValues[k] = new Object[names.size()][]; Iterator iter = names.keySet().iterator(); int z = 0; while (iter.hasNext()) { String dataName = (String)iter.next(); (dataNames[k])[z] = dataName; HashMap values = (HashMap)names.get(dataName); (dataHashValues[k])[z] = new String[values.size()]; (dataValues[k])[z] = new Object[values.size()]; Iterator iter2 = values.keySet().iterator(); int y = 0; while (iter2.hasNext()) { String dataValueHash = (String)iter2.next(); Object dataValue = values.get(dataValueHash); ((dataHashValues[k])[z])[y] = dataValueHash; ((dataValues[k])[z])[y] = dataValue; y++; } z++; } k++; } long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to add "+Integer.toString(reorderedDocIDHashes.length)+" docs and hopcounts for job "+jobID.toString()+" parent identifier "+parentIdentifierHash); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start adding "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash); // Go through document id's one at a time, in order - mainly to prevent deadlock as much as possible. Search for any existing row in jobqueue first (for update) HashMap existingRows = new HashMap(); for (int z = 0; z < reorderedDocIDHashes.length; z++) { String docIDHash = reorderedDocIDHashes[z]; StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.docHashField,docIDHash), new UnitaryClause(jobQueue.jobIDField,jobID)})); sb.append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); boolean priorityUsed; if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue)); } else { // Not found. Attempt an insert instead. This may fail due to constraints, but if this happens, the whole transaction will be retried. jobQueue.insertNewRecord(jobID,docIDHash,reorderedDocumentIdentifiers[z],reorderedDocumentPriorities[z],0L,currentTime,reorderedDocumentPrerequisites[z]); } } // Update all the carrydown data at once, for greatest efficiency. boolean[] carrydownChangesSeen = carryDown.recordCarrydownDataMultiple(jobID,parentIdentifierHash,reorderedDocIDHashes,dataNames,dataHashValues,dataValues); // Same with hopcount. boolean[] hopcountChangesSeen = null; if (parentIdentifierHash != null && relationshipType != null) hopcountChangesSeen = hopCount.recordReferences(jobID,legalLinkTypes,parentIdentifierHash,reorderedDocIDHashes,relationshipType,hopcountMethod); // Loop through the document id's again, and perform updates where needed boolean[] reorderedRval = new boolean[reorderedDocIDHashes.length]; boolean reactivateRemovedHopcountRecords = false; for (int z = 0; z < reorderedDocIDHashes.length; z++) { String docIDHash = reorderedDocIDHashes[z]; JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash); if (jr == null) // It was an insert reorderedRval[z] = true; else { // It was an existing row; do the update logic // The hopcountChangesSeen array describes whether each reference is a new one. This // helps us determine whether we're going to need to "flip" HOPCOUNTREMOVED documents // to the PENDING state. If the new link ended in an existing record, THEN we need to flip them all! reorderedRval[z] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(), 0L,currentTime,carrydownChangesSeen[z] || (hopcountChangesSeen!=null && hopcountChangesSeen[z]), reorderedDocumentPriorities[z],reorderedDocumentPrerequisites[z]); // Signal if we need to perform the flip if (hopcountChangesSeen != null && hopcountChangesSeen[z]) reactivateRemovedHopcountRecords = true; } } if (reactivateRemovedHopcountRecords) jobQueue.reactivateHopcountRemovedRecords(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to add "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash); i = 0; while (i < docIDHashes.length) { Integer finalPosition = (Integer)reorderMap.get(new Integer(i)); if (finalPosition != null) rval[i] = reorderedRval[finalPosition.intValue()]; i++; } return rval; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { sleepAmt = getRandomAmount(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction adding "+Integer.toString(reorderedDocIDHashes.length)+ " docs and hopcounts for job "+jobID.toString()+" parent identifier hash "+parentIdentifierHash+": "+e.getMessage()+"; sleeping for "+new Long(sleepAmt).toString()+" ms",e); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Add a document to the queue. * This method is called during document processing, when a document reference is discovered. * The document reference is passed to this method, which updates the status of the document * in the specified job's queue, according to specific state rules. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param docIDHash is the local document identifier hash value. *@param parentIdentifierHash is the optional parent identifier hash of this document. Pass null if none. * MUST be present in the case of carrydown information. *@param relationshipType is the optional link type between this document and its parent. Pass null if there * is no relationship with a parent. *@param hopcountMethod is the desired method for managing hopcounts. *@param dataNames are the names of the data to carry down to the child from this parent. *@param dataValues are the values to carry down to the child from this parent, corresponding to dataNames above. *@param currentTime is the time in milliseconds since epoch that will be recorded for this operation. *@param priority is the desired document priority for the document. *@param prereqEventNames are the events that must be completed before the document can be processed. *@return true if the priority value was used, false otherwise. */ public boolean addDocument(Long jobID, String[] legalLinkTypes, String docIDHash, String docID, String parentIdentifierHash, String relationshipType, int hopcountMethod, String[] dataNames, Object[][] dataValues, long currentTime, double priority, String[] prereqEventNames) throws ManifoldCFException { return addDocuments(jobID,legalLinkTypes, new String[]{docIDHash},new String[]{docID}, parentIdentifierHash,relationshipType,hopcountMethod,new String[][]{dataNames}, new Object[][][]{dataValues},currentTime,new double[]{priority},new String[][]{prereqEventNames})[0]; } /** Complete adding child documents to the queue, for a set of documents. * This method is called at the end of document processing, to help the hopcount tracking engine do its bookkeeping. *@param jobID is the job identifier. *@param legalLinkTypes is the set of legal link types that this connector generates. *@param parentIdentifierHashes are the document identifier hashes for whom child link extraction just took place. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@return the set of documents for which carrydown data was changed by this operation. These documents are likely * to be requeued as a result of the change. */ public DocumentDescription[] finishDocuments(Long jobID, String[] legalLinkTypes, String[] parentIdentifierHashes, int hopcountMethod) throws ManifoldCFException { if (parentIdentifierHashes.length == 0) return new DocumentDescription[0]; DocumentDescription[] rval; if (legalLinkTypes.length == 0) { // Must at least end the carrydown transaction. By itself, this does not need a serialized transaction; however, occasional // deadlock is possible when a document shares multiple parents, so do the whole retry drill while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records! rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes); carryDown.restoreRecords(jobID,parentIdentifierHashes); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc carrydown records for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } else { long startTime = 0L; if (Logging.perf.isDebugEnabled()) { startTime = System.currentTimeMillis(); Logging.perf.debug("Waiting to finish "+Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); } // Postgres gets all screwed up if we permit multiple threads into the hopcount code, // and allows one transaction to see the effects of another transaction before it's been committed. while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // A certain set of carrydown records are going to be deleted by the ensuing restoreRecords command. Calculate that set of records! rval = calculateAffectedRestoreCarrydownChildren(jobID,parentIdentifierHashes); carryDown.restoreRecords(jobID,parentIdentifierHashes); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Waited "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to start finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); hopCount.finishParents(jobID,legalLinkTypes,parentIdentifierHashes,hopcountMethod); database.performCommit(); if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Took "+new Long(System.currentTimeMillis()-startTime).toString()+" ms to finish "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction finishing "+ Integer.toString(parentIdentifierHashes.length)+" doc hopcounts for job "+jobID.toString()+": "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } return rval; } /** Helper method: Calculate the unique set of affected carrydown children resulting from a "restoreRecords" operation. */ protected DocumentDescription[] calculateAffectedRestoreCarrydownChildren(Long jobID, String[] parentIDHashes) throws ManifoldCFException { // We are going to need to break up this query into a number of subqueries, each covering a subset of parent id hashes. // The goal is to throw all the children into a hash, to make them unique at the end. HashMap resultHash = new HashMap(); ArrayList list = new ArrayList(); int maxCount = database.getMaxOrClause(); int i = 0; int z = 0; while (i < parentIDHashes.length) { if (z == maxCount) { processParentHashSet(jobID,resultHash,list); list.clear(); z = 0; } list.add(parentIDHashes[i]); i++; z++; } if (z > 0) processParentHashSet(jobID,resultHash,list); // Now, put together the result document list from the hash. DocumentDescription[] rval = new DocumentDescription[resultHash.size()]; i = 0; Iterator iter = resultHash.keySet().iterator(); while (iter.hasNext()) { Long id = (Long)iter.next(); DocumentDescription dd = (DocumentDescription)resultHash.get(id); rval[i++] = dd; } return rval; } /** Helper method: look up rows affected by a restoreRecords operation. */ protected void processParentHashSet(Long jobID, HashMap resultHash, ArrayList list) throws ManifoldCFException { // The query here mirrors the carrydown.restoreRecords() delete query! However, it also fetches enough information to build a DocumentDescription // object for return, and so a join is necessary against the jobqueue table. StringBuilder sb = new StringBuilder("SELECT "); ArrayList newlist = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(carryDown.getTableName()).append(" t1, ") .append(jobQueue.getTableName()).append(" t0 WHERE "); sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new UnitaryClause("t1."+carryDown.jobIDField,jobID), new MultiClause("t1."+carryDown.parentIDHashField,list)})).append(" AND "); sb.append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.docHashField,"t1."+carryDown.childIDHashField), new JoinClause("t0."+jobQueue.jobIDField,"t1."+carryDown.jobIDField)})).append(" AND "); sb.append("t1.").append(carryDown.newField).append("=?"); newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE)); /* sb.append("t0.").append(jobQueue.idField).append(",") .append("t0.").append(jobQueue.docHashField).append(",") .append("t0.").append(jobQueue.docIDField) .append(" FROM ").append(jobQueue.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new UnitaryClause("t0."+jobQueue.jobIDField,jobID)})).append(" AND "); sb.append("EXISTS(SELECT 'x' FROM ").append(carryDown.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(newlist,new ClauseDescription[]{ new JoinClause("t1."+carryDown.jobIDField,"t0."+jobQueue.jobIDField), new MultiClause("t1."+carryDown.parentIDHashField,list), new JoinClause("t1."+carryDown.childIDHashField,"t0."+jobQueue.docHashField)})).append(" AND ") .append("t1.").append(carryDown.newField).append("=?") .append(")"); newlist.add(carryDown.statusToString(carryDown.ISNEW_BASE)); */ IResultSet set = database.performQuery(sb.toString(),newlist,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long id = (Long)row.getValue(jobQueue.idField); String documentIdentifierHash = (String)row.getValue(jobQueue.docHashField); String documentIdentifier = (String)row.getValue(jobQueue.docIDField); resultHash.put(id,new DocumentDescription(id,jobID,documentIdentifierHash,documentIdentifier)); } } /** Begin an event sequence. *@param eventName is the name of the event. *@return true if the event could be created, or false if it's already there. */ public boolean beginEventSequence(String eventName) throws ManifoldCFException { try { eventManager.createEvent(eventName); return true; } catch (ManifoldCFException e) { if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) return false; throw e; } } /** Complete an event sequence. *@param eventName is the name of the event. */ public void completeEventSequence(String eventName) throws ManifoldCFException { eventManager.destroyEvent(eventName); } /** Requeue a document set because of carrydown changes. * This method is called when carrydown data is modified for a set of documents. The documents must be requeued for immediate reprocessing, even to the * extent that if one is *already* being processed, it will need to be done over again. *@param documentDescriptions is the set of description objects for the documents that have had their parent carrydown information changed. *@param docPriorities are the document priorities to assign to the documents, if needed. *@return a flag for each document priority, true if it was used, false otherwise. */ public boolean[] carrydownChangeDocumentMultiple(DocumentDescription[] documentDescriptions, long currentTime, double[] docPriorities) throws ManifoldCFException { if (documentDescriptions.length == 0) return new boolean[0]; // Order the updates by document hash, to prevent deadlock as much as possible. // This map contains the original index of the document id hash. HashMap docHashMap = new HashMap(); String[] docIDHashes = new String[documentDescriptions.length]; int i = 0; while (i < documentDescriptions.length) { docIDHashes[i] = documentDescriptions[i].getDocumentIdentifier() + ":" + documentDescriptions[i].getJobID(); docHashMap.put(docIDHashes[i],new Integer(i)); i++; } // Sort the hashes java.util.Arrays.sort(docIDHashes); boolean[] rval = new boolean[docIDHashes.length]; // Enter transaction and prepare to look up document states in dochash order while (true) { long sleepAmt = 0L; database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // This is the map that will contain the rows we found, keyed by docIDHash. HashMap existingRows = new HashMap(); // Loop through hashes in order int j = 0; while (j < docIDHashes.length) { String docIDHash = docIDHashes[j]; // Get the index int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue(); // Lookup document description DocumentDescription dd = documentDescriptions[originalIndex]; // Do the query. We can base this on the id column since we have that. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobQueue.idField).append(",") .append(jobQueue.statusField).append(",") .append(jobQueue.checkTimeField) .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.idField,dd.getID())})).append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // If the row is there, we use its current info to requeue it properly. if (set.getRowCount() > 0) { // Found a row, and it is now locked. IResultRow row = set.getRow(0); // Decode the row Long rowID = (Long)row.getValue(jobQueue.idField); int status = jobQueue.stringToStatus((String)row.getValue(jobQueue.statusField)); Long checkTimeValue = (Long)row.getValue(jobQueue.checkTimeField); existingRows.put(docIDHash,new JobqueueRecord(rowID,status,checkTimeValue)); } j++; } // Ok, existingRows contains all the rows we want to try to update. Go through these and update. while (j < docIDHashes.length) { String docIDHash = docIDHashes[j]; int originalIndex = ((Integer)docHashMap.get(docIDHash)).intValue(); JobqueueRecord jr = (JobqueueRecord)existingRows.get(docIDHash); if (jr == null) // It wasn't found, so the doc priority wasn't used. rval[originalIndex] = false; else // It was an existing row; do the update logic; use the 'carrydown changes' flag = true all the time. rval[originalIndex] = jobQueue.updateExistingRecord(jr.getRecordID(),jr.getStatus(),jr.getCheckTimeValue(), 0L,currentTime,true,docPriorities[originalIndex],null); j++; } database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction handling "+Integer.toString(docIDHashes.length)+" carrydown changes: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } return rval; } /** Requeue a document because of carrydown changes. * This method is called when carrydown data is modified for a document. The document must be requeued for immediate reprocessing, even to the * extent that if it is *already* being processed, it will need to be done over again. *@param documentDescription is the description object for the document that has had its parent carrydown information changed. *@param docPriority is the document priority to assign to the document, if needed. *@return a flag for the document priority, true if it was used, false otherwise. */ public boolean carrydownChangeDocument(DocumentDescription documentDescription, long currentTime, double docPriority) throws ManifoldCFException { return carrydownChangeDocumentMultiple(new DocumentDescription[]{documentDescription},currentTime,new double[]{docPriority})[0]; } /** Sleep a random amount of time after a transaction abort. */ protected long getRandomAmount() { return database.getSleepAmt(); } protected void sleepFor(long amt) throws ManifoldCFException { database.sleepFor(amt); } /** Retrieve specific parent data for a given document. *@param jobID is the job identifier. *@param docIDHash is the document identifier hash value. *@param dataName is the kind of data to retrieve. *@return the unique data values. */ public String[] retrieveParentData(Long jobID, String docIDHash, String dataName) throws ManifoldCFException { return carryDown.getDataValues(jobID,docIDHash,dataName); } /** Retrieve specific parent data for a given document. *@param jobID is the job identifier. *@param docIDHash is the document identifier hash value. *@param dataName is the kind of data to retrieve. *@return the unique data values. */ public CharacterInput[] retrieveParentDataAsFiles(Long jobID, String docIDHash, String dataName) throws ManifoldCFException { return carryDown.getDataValuesAsFiles(jobID,docIDHash,dataName); } // These methods support the job threads (which start jobs and end jobs) // There is one thread that starts jobs. It simply looks for jobs which are ready to // start, and changes their state accordingly. // There is also a pool of threads that end jobs. These threads wait for a job that // looks like it is done, and do completion processing if it is. /** Start all jobs in need of starting. * This method marks all the appropriate jobs as "in progress", which is all that should be * needed to start them. * It's also the case that the start event should be logged in the event log. In order to make it possible for * the caller to do this logging, a set of job ID's will be returned containing the jobs that * were started. *@param currentTime is the current time in milliseconds since epoch. *@param unwaitList is filled in with the set of job ID objects that were resumed. */ public void startJobs(long currentTime, ArrayList unwaitList) throws ManifoldCFException { // This method should compare the lasttime field against the current time, for all // "not active" jobs, and see if a job should be started. // // If a job is to be started, then the following occurs: // (1) If the job is "full scan", then all COMPLETED jobqueue entries are converted to // PURGATORY. // (2) The job is labeled as "ACTIVE". // (3) The starttime field is set. // (4) The endtime field is nulled out. // // This method also assesses jobs that are ACTIVE or PAUSED to see if they should be // converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded // the value in the "windowend" field for the job. // // Finally, jobs in ACTIVEWAIT or PAUSEDWAIT are assessed to see if they should become // ACTIVE or PAUSED. This will occur if we have entered a new window for the job. // Note well: We can't combine locks across both our lock manager and the database unless we do it consistently. The // consistent practice throughout CF is to do the external locks first, then the database locks. This particular method // thus cannot use cached job description information, because it must throw database locks first against the jobs table. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastTimeField).append(",") .append(jobs.statusField).append(",") .append(jobs.startMethodField).append(",") .append(jobs.outputNameField).append(",") .append(jobs.connectionNameField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_INACTIVE), jobs.statusToString(jobs.STATUS_ACTIVEWAIT), jobs.statusToString(jobs.STATUS_ACTIVEWAITSEEDING), jobs.statusToString(jobs.STATUS_PAUSEDWAIT), jobs.statusToString(jobs.STATUS_PAUSEDWAITSEEDING)})})).append(" AND ") .append(jobs.startMethodField).append("!=? FOR UPDATE"); list.add(jobs.startMethodToString(IJobDescription.START_DISABLE)); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Next, we query for the schedule information. In order to do that, we amass a list of job identifiers that we want schedule info // for. Long[] jobIDSet = new Long[set.getRowCount()]; int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); jobIDSet[i++] = (Long)row.getValue(jobs.idField); } ScheduleRecord[][] srSet = jobs.readScheduleRecords(jobIDSet); i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); int startMethod = jobs.stringToStartMethod((String)row.getValue(jobs.startMethodField)); String outputName = (String)row.getValue(jobs.outputNameField); String connectionName = (String)row.getValue(jobs.connectionNameField); ScheduleRecord[] thisSchedule = srSet[i++]; // Run at specific times // We need to start with the start time as given, plus one long startInterval = ((Long)row.getValue(jobs.lastTimeField)).longValue() + 1; if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Checking if job "+jobID.toString()+" needs to be started; it was last checked at "+ new Long(startInterval).toString()+", and now it is "+new Long(currentTime).toString()); // Proceed to the current time, and find a match if there is one to be found. // If not -> continue // We go through *all* the schedule records. The one that matches that has the latest // end time is the one we take. Long matchTime = null; Long duration = null; boolean requestMinimum = false; for (int l = 0; l < thisSchedule.length; l++) { long trialStartInterval = startInterval; ScheduleRecord sr = thisSchedule[l]; Long thisDuration = sr.getDuration(); if (startMethod == IJobDescription.START_WINDOWINSIDE && thisDuration != null) { // Bump the start interval back before the beginning of the current interval. // This will guarantee a start as long as there is time in the window. long trialStart = currentTime - thisDuration.longValue(); if (trialStart < trialStartInterval) trialStartInterval = trialStart; } Long thisMatchTime = checkTimeMatch(trialStartInterval,currentTime, sr.getDayOfWeek(), sr.getDayOfMonth(), sr.getMonthOfYear(), sr.getYear(), sr.getHourOfDay(), sr.getMinutesOfHour(), sr.getTimezone(), thisDuration); if (thisMatchTime == null) { if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug(" No time match found within interval "+new Long(trialStartInterval).toString()+ " to "+new Long(currentTime).toString()); continue; } if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug(" Time match FOUND within interval "+new Long(trialStartInterval).toString()+ " to "+new Long(currentTime).toString()); if (matchTime == null || thisDuration == null || (duration != null && thisMatchTime.longValue() + thisDuration.longValue() > matchTime.longValue() + duration.longValue())) { matchTime = thisMatchTime; duration = thisDuration; requestMinimum = sr.getRequestMinimum(); } } if (matchTime == null) { jobs.updateLastTime(jobID,currentTime); continue; } int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); // Calculate the end of the window Long windowEnd = null; if (duration != null) { windowEnd = new Long(matchTime.longValue()+duration.longValue()); } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job '"+jobID+"' is within run window at "+new Long(currentTime).toString()+" ms. (which starts at "+ matchTime.toString()+" ms."+((duration==null)?"":(" and goes for "+duration.toString()+" ms."))+")"); } int newJobState; switch (status) { case Jobs.STATUS_INACTIVE: // If job was formerly "inactive", do the full startup. // Start this job! but with no end time. // This does not get logged because the startup thread does the logging. jobs.startJob(jobID,windowEnd,requestMinimum); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Signalled for job start for job "+jobID); } break; case Jobs.STATUS_ACTIVEWAIT: unwaitList.add(jobID); jobs.unwaitJob(jobID,Jobs.STATUS_RESUMING,windowEnd); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited job "+jobID); } break; case Jobs.STATUS_ACTIVEWAITSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,Jobs.STATUS_RESUMINGSEEDING,windowEnd); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited job "+jobID); } break; case Jobs.STATUS_PAUSEDWAIT: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSED,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSEDWAITSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSEDSEEDING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSINGWAITING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; case Jobs.STATUS_PAUSINGWAITINGSEEDING: unwaitList.add(jobID); jobs.unwaitJob(jobID,jobs.STATUS_PAUSINGSEEDING,windowEnd); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Un-waited (but still paused) job "+jobID); } break; default: break; } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction resetting for restart: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Put active or paused jobs in wait state, if they've exceeded their window. *@param currentTime is the current time in milliseconds since epoch. *@param waitList is filled in with the set of job ID's that were put into a wait state. */ public void waitJobs(long currentTime, ArrayList waitList) throws ManifoldCFException { // This method assesses jobs that are ACTIVE or PAUSED to see if they should be // converted to ACTIVEWAIT or PAUSEDWAIT. This would happen if the current time exceeded // the value in the "windowend" field for the job. // database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVESEEDING), jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER), jobs.statusToString(jobs.STATUS_ACTIVESEEDING_NEITHER), jobs.statusToString(jobs.STATUS_PAUSED), jobs.statusToString(jobs.STATUS_PAUSEDSEEDING)})})).append(" AND ") .append(jobs.windowEndField).append("<? FOR UPDATE"); list.add(new Long(currentTime)); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); waitList.add(jobID); int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); // Make the job wait. switch (status) { case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end"); } break; case Jobs.STATUS_ACTIVESEEDING: case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: case Jobs.STATUS_ACTIVESEEDING_NEITHER: jobs.waitJob(jobID,Jobs.STATUS_ACTIVEWAITINGSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait' state due to window end"); } break; case Jobs.STATUS_PAUSED: jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAIT); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSEDSEEDING: jobs.waitJob(jobID,Jobs.STATUS_PAUSEDWAITSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSING: jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; case Jobs.STATUS_PAUSINGSEEDING: jobs.waitJob(jobID,Jobs.STATUS_PAUSINGWAITINGSEEDING); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now in 'wait paused' state due to window end"); } break; default: break; } } } catch (ManifoldCFException e) { database.signalRollback(); throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } /** Reset job schedule. This re-evaluates whether the job should be started now. This method would typically * be called after a job's scheduling window has been changed. *@param jobID is the job identifier. */ public void resetJobSchedule(Long jobID) throws ManifoldCFException { // Note: This is problematic; the expected behavior is for the job to start if "we are within the window", // but not to start if the transition to active status was long enough ago. // Since there's no "right" way to do this, do nothing for now. // This explicitly did NOT work - it caused the job to refire every time it was saved. // jobs.updateLastTime(jobID,0L); } /** Check if the specified job parameters have a 'hit' within the specified interval. *@param startTime is the start time. *@param currentTimestamp is the end time. *@param daysOfWeek is the enumerated days of the week, or null. *@param daysOfMonth is the enumerated days of the month, or null. *@param months is the enumerated months, or null. *@param years is the enumerated years, or null. *@param hours is the enumerated hours, or null. *@param minutes is the enumerated minutes, or null. *@return null if there is NO hit within the interval; otherwise the actual time of the hit in milliseconds * from epoch is returned. */ protected static Long checkTimeMatch(long startTime, long currentTimestamp, EnumeratedValues daysOfWeek, EnumeratedValues daysOfMonth, EnumeratedValues months, EnumeratedValues years, EnumeratedValues hours, EnumeratedValues minutes, String timezone, Long duration) { // What we do here is start with the previous timestamp, and advance until we // either encounter a match, or we exceed the current timestamp. Calendar c; if (timezone == null) { c = Calendar.getInstance(); } else { c = Calendar.getInstance(TimeZone.getTimeZone(timezone)); } // Get the current starting time c.setTimeInMillis(startTime); // If there's a duration value, we can't match unless we're within the window. // That means we find a match, and then we verify that the end time is greater than the currenttimestamp. // If not, we move on (by incrementing) // The main loop works off of the calendar and these values. while (c.getTimeInMillis() < currentTimestamp) { // Round up to the nearest minute, unless at 0 already int x = c.get(Calendar.MILLISECOND); if (x != c.getMinimum(Calendar.MILLISECOND)) { int amtToAdd = c.getLeastMaximum(Calendar.MILLISECOND)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MILLISECOND,amtToAdd); continue; } x = c.get(Calendar.SECOND); if (x != c.getMinimum(Calendar.SECOND)) { int amtToAdd = c.getLeastMaximum(Calendar.SECOND)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.SECOND,amtToAdd); continue; } boolean startedToCareYet = false; x = c.get(Calendar.MINUTE); // If we care about minutes, round up, otherwise go to the 0 value if (minutes == null) { if (x != c.getMinimum(Calendar.MINUTE)) { int amtToAdd = c.getLeastMaximum(Calendar.MINUTE)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MINUTE,amtToAdd); continue; } } else { // See if it is a legit value. if (!minutes.checkValue(x-c.getMinimum(Calendar.MINUTE))) { // Advance to next legit value // We could be clever, but we just advance one c.add(Calendar.MINUTE,1); continue; } startedToCareYet = true; } // Hours x = c.get(Calendar.HOUR_OF_DAY); if (hours == null) { if (!startedToCareYet && x != c.getMinimum(Calendar.HOUR_OF_DAY)) { int amtToAdd = c.getLeastMaximum(Calendar.HOUR_OF_DAY)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.HOUR_OF_DAY,amtToAdd); continue; } } else { if (!hours.checkValue(x-c.getMinimum(Calendar.HOUR_OF_DAY))) { // next hour c.add(Calendar.HOUR_OF_DAY,1); continue; } startedToCareYet = true; } // Days of month and days of week are at the same level; // these advance concurrently. However, if NEITHER is specified, and nothing // earlier was, then we do the 1st of the month. x = c.get(Calendar.DAY_OF_WEEK); if (daysOfWeek != null) { if (!daysOfWeek.checkValue(x-c.getMinimum(Calendar.DAY_OF_WEEK))) { // next day c.add(Calendar.DAY_OF_WEEK,1); continue; } startedToCareYet = true; } x = c.get(Calendar.DAY_OF_MONTH); if (daysOfMonth == null) { // If nothing is specified but the month or the year, do it on the 1st. if (!startedToCareYet && x != c.getMinimum(Calendar.DAY_OF_MONTH)) { // Move as rapidly as possible towards the first of the month. But in no case, increment // less than one day. int amtToAdd = c.getLeastMaximum(Calendar.DAY_OF_MONTH)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.DAY_OF_MONTH,amtToAdd); continue; } } else { if (!daysOfMonth.checkValue(x-c.getMinimum(Calendar.DAY_OF_MONTH))) { // next day c.add(Calendar.DAY_OF_MONTH,1); continue; } startedToCareYet = true; } x = c.get(Calendar.MONTH); if (months == null) { if (!startedToCareYet && x != c.getMinimum(Calendar.MONTH)) { int amtToAdd = c.getLeastMaximum(Calendar.MONTH)+1-x; if (amtToAdd < 1) amtToAdd = 1; c.add(Calendar.MONTH,amtToAdd); continue; } } else { if (!months.checkValue(x-c.getMinimum(Calendar.MONTH))) { c.add(Calendar.MONTH,1); continue; } startedToCareYet = true; } x = c.get(Calendar.YEAR); if (years != null) { if (!years.checkValue(x)) { c.add(Calendar.YEAR,1); continue; } startedToCareYet = true; } // Looks like a match. // Last check is to be sure we are in the window, if any. If we are outside the window, // must skip forward. if (duration != null && c.getTimeInMillis() + duration.longValue() <= currentTimestamp) { c.add(Calendar.MILLISECOND,c.getLeastMaximum(Calendar.MILLISECOND)); continue; } return new Long(c.getTimeInMillis()); } return null; } /** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and * will not cease until complete. If the job is already running, this operation will assure that * the job does not pause when its window ends. The job can be manually paused, or manually aborted. *@param jobID is the ID of the job to start. */ public void manualStart(Long jobID) throws ManifoldCFException { manualStart(jobID,false); } /** Manually start a job. The specified job will be run REGARDLESS of the timed windows, and * will not cease until complete. If the job is already running, this operation will assure that * the job does not pause when its window ends. The job can be manually paused, or manually aborted. *@param jobID is the ID of the job to start. *@param requestMinimum is true if a minimal job run is requested. */ public void manualStart(Long jobID, boolean requestMinimum) throws ManifoldCFException { database.beginTransaction(); try { // First, query the appropriate fields of all jobs. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() < 1) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus(row.getValue(jobs.statusField).toString()); if (status != Jobs.STATUS_INACTIVE) throw new ManifoldCFException("Job "+jobID+" is already running"); IJobDescription jobDescription = jobs.load(jobID,true); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually starting job "+jobID); } // Start this job! but with no end time. jobs.startJob(jobID,null,requestMinimum); jobQueue.clearFailTimes(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manual job start signal for job "+jobID+" successfully sent"); } } catch (ManifoldCFException e) { database.signalRollback(); throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); } } /** Note job delete started. *@param jobID is the job id. *@param startTime is the job delete start time. */ public void noteJobDeleteStarted(Long jobID, long startTime) throws ManifoldCFException { jobs.noteJobDeleteStarted(jobID,startTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" delete is now started"); } /** Note job started. *@param jobID is the job id. *@param startTime is the job start time. */ public void noteJobStarted(Long jobID, long startTime) throws ManifoldCFException { jobs.noteJobStarted(jobID,startTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" is now started"); } /** Note job seeded. *@param jobID is the job id. *@param seedTime is the job seed time. */ public void noteJobSeeded(Long jobID, long seedTime) throws ManifoldCFException { jobs.noteJobSeeded(jobID,seedTime); if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Job "+jobID+" has been successfully reseeded"); } /** Prepare for a delete scan. *@param jobID is the job id. */ public void prepareDeleteScan(Long jobID) throws ManifoldCFException { // No special treatment needed for hopcount or carrydown, since these all get deleted at once // at the end of the job delete process. TrackerClass.notePrecommit(); jobQueue.prepareDeleteScan(jobID); TrackerClass.noteCommit(); } /** Prepare a job to be run. * This method is called regardless of the details of the job; what differs is only the flags that are passed in. * The code inside will determine the appropriate procedures. * (This method replaces prepareFullScan() and prepareIncrementalScan(). ) *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. *@param hopcountMethod describes how to handle deletions for hopcount purposes. *@param connectorModel is the model used by the connector for the job. *@param continuousJob is true if the job is a continuous one. *@param fromBeginningOfTime is true if the job is running starting from time 0. *@param requestMinimum is true if the minimal amount of work is requested for the job run. */ public void prepareJobScan(Long jobID, String[] legalLinkTypes, int hopcountMethod, int connectorModel, boolean continuousJob, boolean fromBeginningOfTime, boolean requestMinimum) throws ManifoldCFException { // (1) If the connector has MODEL_ADD_CHANGE_DELETE, then // we let the connector run the show; there's no purge phase, and therefore the // documents are left in a COMPLETED state if they don't show up in the list // of seeds that require the attention of the connector. However, we do need to // preload the queue with all the existing documents, if there was any change to the // specification information (which will mean that fromBeginningOfTime is set). // // (2) If the connector has MODEL_ALL, then it's a full crawl no matter what, so // we do a full scan initialization. // // (3) If the connector has some other model, we look at the start time. A start // time of 0 implies a full scan, while any other start time implies an incremental // scan. // Complete connector model is told everything, so no delete phase. if (connectorModel == IRepositoryConnector.MODEL_ADD_CHANGE_DELETE) { if (fromBeginningOfTime) queueAllExisting(jobID,legalLinkTypes); return; } // If the connector model is complete via chaining, then we just need to make // sure discovery works to queue the changes. if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE_DELETE) { if (fromBeginningOfTime) queueAllExisting(jobID,legalLinkTypes); else jobQueue.preparePartialScan(jobID); return; } // Similarly, minimal crawl attempts no delete phase unless the connector explicitly forbids it, or unless // the job criteria have changed. if (requestMinimum && connectorModel != IRepositoryConnector.MODEL_ALL && !fromBeginningOfTime) { // If it is a chained model, do the partial prep. if (connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD || connectorModel == IRepositoryConnector.MODEL_CHAINED_ADD_CHANGE) jobQueue.preparePartialScan(jobID); return; } if (!continuousJob && connectorModel != IRepositoryConnector.MODEL_PARTIAL && (connectorModel == IRepositoryConnector.MODEL_ALL || fromBeginningOfTime)) prepareFullScan(jobID,legalLinkTypes,hopcountMethod); else jobQueue.prepareIncrementalScan(jobID); } /** Queue all existing. *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. */ protected void queueAllExisting(Long jobID, String[] legalLinkTypes) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { if (legalLinkTypes.length > 0) { jobQueue.reactivateHopcountRemovedRecords(jobID); } jobQueue.queueAllExisting(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction during queueAllExisting: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Prepare for a full scan. *@param jobID is the job id. *@param legalLinkTypes are the link types allowed for the job. *@param hopcountMethod describes how to handle deletions for hopcount purposes. */ protected void prepareFullScan(Long jobID, String[] legalLinkTypes, int hopcountMethod) throws ManifoldCFException { while (true) { long sleepAmt = 0L; // Since we delete documents here, we need to manage the hopcount part of the world too. database.beginTransaction(database.TRANSACTION_SERIALIZED); try { // Delete the documents we have never fetched, including any hopcount records we've calculated. if (legalLinkTypes.length > 0) { ArrayList list = new ArrayList(); String query = database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause("t99."+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})}); hopCount.deleteMatchingDocuments(jobID,legalLinkTypes,jobQueue.getTableName()+" t99", "t99."+jobQueue.docHashField,"t99."+jobQueue.jobIDField, query,list, hopcountMethod); } jobQueue.prepareFullScan(jobID); TrackerClass.notePrecommit(); database.performCommit(); TrackerClass.noteCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); TrackerClass.noteRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction preparing full scan: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); TrackerClass.noteRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Manually abort a running job. The job will be permanently stopped, and will not run again until * automatically started based on schedule, or manually started. *@param jobID is the job to abort. */ public void manualAbort(Long jobID) throws ManifoldCFException { // Just whack status back to "INACTIVE". The active documents will continue to be processed until done, // but that's fine. There will be no finishing stage, obviously. if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually aborting job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.abortJob(jobID,null); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" abort signal successfully sent"); } } /** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost, * until the job finishes on its own. *@param jobID is the job to abort. *@param requestMinimum is true if a minimal job run is requested. */ public void manualAbortRestart(Long jobID, boolean requestMinimum) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually restarting job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.abortRestartJob(jobID,requestMinimum); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction restarting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" restart signal successfully sent"); } } /** Manually restart a running job. The job will be stopped and restarted. Any schedule affinity will be lost, * until the job finishes on its own. *@param jobID is the job to abort. */ public void manualAbortRestart(Long jobID) throws ManifoldCFException { manualAbortRestart(jobID,false); } /** Abort a running job due to a fatal error condition. *@param jobID is the job to abort. *@param errorText is the error text. *@return true if this is the first logged abort request for this job. */ public boolean errorAbort(Long jobID, String errorText) throws ManifoldCFException { // Just whack status back to "INACTIVE". The active documents will continue to be processed until done, // but that's fine. There will be no finishing stage, obviously. if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Aborting job "+jobID+" due to error '"+errorText+"'"); } boolean rval; while (true) { long sleepAmt = 0L; database.beginTransaction(); try { rval = jobs.abortJob(jobID,errorText); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction aborting job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (rval && Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" abort signal successfully sent"); } return rval; } /** Pause a job. *@param jobID is the job identifier to pause. */ public void pauseJob(Long jobID) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually pausing job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.pauseJob(jobID); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction pausing job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" successfully paused"); } } /** Restart a paused job. *@param jobID is the job identifier to restart. */ public void restartJob(Long jobID) throws ManifoldCFException { if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Manually restarting paused job "+jobID); } while (true) { long sleepAmt = 0L; database.beginTransaction(); try { jobs.restartJob(jobID); jobQueue.clearFailTimes(jobID); database.performCommit(); break; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted transaction restarting pausing job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" successfully restarted"); } } /** Get the list of jobs that are ready for seeding. *@return jobs that are active and are running in adaptive mode. These will be seeded * based on what the connector says should be added to the queue. */ public JobSeedingRecord[] getJobsReadyForSeeding(long currentTime) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastCheckTimeField).append(",") .append(jobs.reseedIntervalField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_ACTIVE))})).append(" AND ") .append(jobs.typeField).append("=? AND ") .append("(").append(jobs.reseedTimeField).append(" IS NULL OR ").append(jobs.reseedTimeField).append("<=?)") .append(" FOR UPDATE"); list.add(jobs.typeToString(jobs.TYPE_CONTINUOUS)); list.add(new Long(currentTime)); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobSeedingRecord[] rval = new JobSeedingRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); Long x = (Long)row.getValue(jobs.lastCheckTimeField); long synchTime = 0; if (x != null) synchTime = x.longValue(); Long r = (Long)row.getValue(jobs.reseedIntervalField); Long reseedTime; if (r != null) reseedTime = new Long(currentTime + r.longValue()); else reseedTime = null; // Mark status of job as "active/seeding". Special status is needed so that abort // will not complete until seeding is completed. jobs.writeStatus(jobID,jobs.STATUS_ACTIVESEEDING,reseedTime); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for seeding"); } rval[i] = new JobSeedingRecord(jobID,synchTime); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for seeding: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the list of jobs that are ready for deletion. *@return jobs that were in the "readyfordelete" state. */ public JobDeleteRecord[] getJobsReadyForDelete() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORDELETE))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobDeleteRecord[] rval = new JobDeleteRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); // Mark status of job as "starting delete" jobs.writeStatus(jobID,jobs.STATUS_DELETESTARTINGUP); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for delete startup"); } rval[i] = new JobDeleteRecord(jobID); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Get the list of jobs that are ready for startup. *@return jobs that were in the "readyforstartup" state. These will be marked as being in the "starting up" state. */ public JobStartRecord[] getJobsReadyForStartup() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(",") .append(jobs.lastCheckTimeField).append(",") .append(jobs.statusField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_READYFORSTARTUP), jobs.statusToString(jobs.STATUS_READYFORSTARTUPMINIMAL)})})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Update them all JobStartRecord[] rval = new JobStartRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); Long x = (Long)row.getValue(jobs.lastCheckTimeField); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); boolean requestMinimum = (status == jobs.STATUS_READYFORSTARTUPMINIMAL); long synchTime = 0; if (x != null) synchTime = x.longValue(); // Mark status of job as "starting" jobs.writeStatus(jobID,requestMinimum?jobs.STATUS_STARTINGUPMINIMAL:jobs.STATUS_STARTINGUP); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for startup"); } rval[i] = new JobStartRecord(jobID,synchTime,requestMinimum); i++; } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for startup: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Inactivate a job, from the notification state. *@param jobID is the ID of the job to inactivate. */ public void inactivateJob(Long jobID) throws ManifoldCFException { // While there is no flow that can cause a job to be in the wrong state when this gets called, as a precaution // it might be a good idea to put this in a transaction and have the state get checked first. while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_NOTIFYINGOFCOMPLETION: jobs.notificationComplete(jobID); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted clearing notification state for job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a job starting for delete back to "ready for delete" * state. *@param jobID is the job id. */ public void resetStartDeleteJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_DELETESTARTINGUP: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForDelete' state"); // Set the state of the job back to "ReadyForStartup" jobs.writeStatus(jobID,jobs.STATUS_READYFORDELETE); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting start delete job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a job that is notifying back to "ready for notify" * state. *@param jobID is the job id. */ public void resetNotifyJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_NOTIFYINGOFCOMPLETION: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForNotify' state"); // Set the state of the job back to "ReadyForNotify" jobs.writeStatus(jobID,jobs.STATUS_READYFORNOTIFY); break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting notify job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a starting job back to "ready for startup" state. *@param jobID is the job id. */ public void resetStartupJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_STARTINGUP: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartup' state"); // Set the state of the job back to "ReadyForStartup" jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUP); break; case Jobs.STATUS_STARTINGUPMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ReadyForStartupMinimal' state"); // Set the state of the job back to "ReadyForStartupMinimal" jobs.writeStatus(jobID,jobs.STATUS_READYFORSTARTUPMINIMAL); break; case Jobs.STATUS_ABORTINGSTARTINGUP: case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'Aborting' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTING); break; case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestart' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART); break; case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" to 'AbortingForRestartMinimal' state"); jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL); break; case Jobs.STATUS_READYFORSTARTUP: case Jobs.STATUS_READYFORSTARTUPMINIMAL: case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: // ok break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting startup job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Reset a seeding job back to "active" state. *@param jobID is the job id. */ public void resetSeedJob(Long jobID) throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Check job status StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.statusField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.idField,jobID)})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); if (set.getRowCount() == 0) throw new ManifoldCFException("No such job: "+jobID); IResultRow row = set.getRow(0); int status = jobs.stringToStatus((String)row.getValue(jobs.statusField)); switch (status) { case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Uninstalled' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_UNINSTALLED); break; case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_NoOutput' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NOOUTPUT); break; case Jobs.STATUS_ACTIVESEEDING_NEITHER: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active_Neither' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE_NEITHER); break; case Jobs.STATUS_ACTIVESEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Active' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVE); break; case Jobs.STATUS_ACTIVEWAITSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'ActiveWait' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ACTIVEWAIT); break; case Jobs.STATUS_PAUSEDSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Paused' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_PAUSED); break; case Jobs.STATUS_PAUSEDWAITSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'PausedWait' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_PAUSEDWAIT); break; case Jobs.STATUS_ABORTINGSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'Aborting' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTING); break; case Jobs.STATUS_ABORTINGFORRESTARTSEEDING: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestart' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTART); break; case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL: if (Logging.jobs.isDebugEnabled()) Logging.jobs.debug("Setting job "+jobID+" back to 'AbortingForRestartMinimal' state"); // Set the state of the job back to "Active" jobs.writeStatus(jobID,jobs.STATUS_ABORTINGFORRESTARTMINIMAL); break; case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: case Jobs.STATUS_PAUSED: case Jobs.STATUS_ACTIVEWAIT: case Jobs.STATUS_PAUSEDWAIT: // ok break; default: throw new ManifoldCFException("Unexpected job status: "+Integer.toString(status)); } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted resetting seeding job: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Delete jobs in need of being deleted (which are marked "ready for delete"). * This method is meant to be called periodically to perform delete processing on jobs. */ public void deleteJobsReadyForDelete() throws ManifoldCFException { while (true) { long sleepAmt = 0L; // This method must find only jobs that have nothing hanging around in their jobqueue that represents an ingested // document. Any jobqueue entries which are in a state to interfere with the delete will be cleaned up by other // threads, so eventually a job will become eligible. This happens when there are no records that have an ingested // status: complete, purgatory, being-cleaned, being-deleted, or pending purgatory. database.beginTransaction(); try { // The original query was: // // SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE t0.id=t1.jobid AND // t1.status IN ('C', 'F', 'G')) // // However, this did not work well with Postgres when the tables got big. So I revised things to do the following multi-stage process: // (1) The query should be broken up, such that n queries are done: // (a) the first one should get all candidate jobs (those that have the right state) // (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1 // This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow // early exit!! // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_DELETING))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Now, loop through this list. For each one, verify that it's okay to delete it int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); list.clear(); sb = new StringBuilder("SELECT "); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE), jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; ManifoldCF.noteConfigurationChange(); // Remove documents from job queue jobQueue.deleteAllJobRecords(jobID); // Remove carrydowns for the job carryDown.deleteOwner(jobID); // Nothing is in a critical section - so this should be OK. hopCount.deleteOwner(jobID); jobs.delete(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Removed job "+jobID); } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted deleting jobs ready for delete: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Put all eligible jobs in the "shutting down" state. */ public void finishJobs() throws ManifoldCFException { while (true) { long sleepAmt = 0L; // The jobs we should transition: // - are active // - have no ACTIVE, PENDING, ACTIVEPURGATORY, or PENDINGPURGATORY records database.beginTransaction(); try { // The query I used to emit was: // SELECT jobid FROM jobs t0 WHERE t0.status='A' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status IN ('A','P','F','G')) // This did not get along well with Postgresql, so instead this is what is now done: // (1) The query should be broken up, such that n queries are done: // (a) the first one should get all candidate jobs (those that have the right state) // (b) there should be a query for each job of roughly this form: SELECT id FROM jobqueue WHERE jobid=xxx AND status IN (...) LIMIT 1 // This will work way better than postgresql currently works, because neither the cost-based analysis nor the actual NOT clause seem to allow // early exit!! // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ACTIVE), jobs.statusToString(jobs.STATUS_ACTIVEWAIT), jobs.statusToString(jobs.STATUS_ACTIVE_UNINSTALLED), jobs.statusToString(jobs.STATUS_ACTIVE_NOOUTPUT), jobs.statusToString(jobs.STATUS_ACTIVE_NEITHER)})})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // Check to be sure the job is a candidate for shutdown sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // Mark status of job as "finishing" jobs.writeStatus(jobID,jobs.STATUS_SHUTTINGDOWN); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Marked job "+jobID+" for shutdown"); } } database.performCommit(); return; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted finishing jobs: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Find the list of jobs that need to have their connectors notified of job completion. *@return the ID's of jobs that need their output connectors notified in order to become inactive. */ public JobNotifyRecord[] getJobsReadyForInactivity() throws ManifoldCFException { while (true) { long sleepAmt = 0L; database.beginTransaction(); try { // Do the query StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_READYFORNOTIFY))})) .append(" FOR UPDATE"); IResultSet set = database.performQuery(sb.toString(),list,null,null); // Return them all JobNotifyRecord[] rval = new JobNotifyRecord[set.getRowCount()]; int i = 0; while (i < rval.length) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(jobs.idField); // Mark status of job as "starting delete" jobs.writeStatus(jobID,jobs.STATUS_NOTIFYINGOFCOMPLETION); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Found job "+jobID+" in need of notification"); } rval[i++] = new JobNotifyRecord(jobID); } database.performCommit(); return rval; } catch (ManifoldCFException e) { database.signalRollback(); if (e.getErrorCode() == e.DATABASE_TRANSACTION_ABORT) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug("Aborted getting jobs ready for notify: "+e.getMessage()); sleepAmt = getRandomAmount(); continue; } throw e; } catch (Error e) { database.signalRollback(); throw e; } finally { database.endTransaction(); sleepFor(sleepAmt); } } } /** Complete the sequence that resumes jobs, either from a pause or from a scheduling window * wait. The logic will restore the job to an active state (many possibilities depending on * connector status), and will record the jobs that have been so modified. *@param timestamp is the current time in milliseconds since epoch. *@param modifiedJobs is filled in with the set of IJobDescription objects that were resumed. */ public void finishJobResumes(long timestamp, ArrayList modifiedJobs) throws ManifoldCFException { // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_RESUMING), jobs.statusToString(jobs.STATUS_RESUMINGSEEDING) })})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // There are no secondary checks that need to be made; just resume IJobDescription jobDesc = jobs.load(jobID,true); modifiedJobs.add(jobDesc); jobs.finishResumeJob(jobID,timestamp); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Resumed job "+jobID); } } } /** Complete the sequence that stops jobs, either for abort, pause, or because of a scheduling * window. The logic will move the job to its next state (INACTIVE, PAUSED, ACTIVEWAIT), * and will record the jobs that have been so modified. *@param timestamp is the current time in milliseconds since epoch. *@param modifiedJobs is filled in with the set of IJobDescription objects that were stopped. */ public void finishJobStops(long timestamp, ArrayList modifiedJobs) throws ManifoldCFException { // The query I used to emit was: // SELECT jobid FROM jobs t0 WHERE t0.status='X' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status IN ('A','F')) // Now the query is broken up so that Postgresql behaves more efficiently. // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField) .append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(jobs.statusField,new Object[]{ jobs.statusToString(jobs.STATUS_ABORTING), jobs.statusToString(jobs.STATUS_ABORTINGFORRESTART), jobs.statusToString(jobs.STATUS_ABORTINGFORRESTARTMINIMAL), jobs.statusToString(jobs.STATUS_PAUSING), jobs.statusToString(jobs.STATUS_PAUSINGSEEDING), jobs.statusToString(jobs.STATUS_ACTIVEWAITING), jobs.statusToString(jobs.STATUS_ACTIVEWAITINGSEEDING), jobs.statusToString(jobs.STATUS_PAUSINGWAITING), jobs.statusToString(jobs.STATUS_PAUSINGWAITINGSEEDING) })})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // All the job's documents need to have their docpriority set to null, to clear dead wood out of the docpriority index. // See CONNECTORS-290. // We do this BEFORE updating the job state. jobQueue.clearDocPriorities(jobID); IJobDescription jobDesc = jobs.load(jobID,true); modifiedJobs.add(jobDesc); jobs.finishStopJob(jobID,timestamp); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Stopped job "+jobID); } } } /** Reset eligible jobs either back to the "inactive" state, or make them active again. The * latter will occur if the cleanup phase of the job generated more pending documents. * * This method is used to pick up all jobs in the shutting down state * whose purgatory or being-cleaned records have been all processed. * *@param currentTime is the current time in milliseconds since epoch. *@param resetJobs is filled in with the set of IJobDescription objects that were reset. */ public void resetJobs(long currentTime, ArrayList resetJobs) throws ManifoldCFException { // Query for all jobs that fulfill the criteria // The query used to look like: // // SELECT id FROM jobs t0 WHERE status='D' AND NOT EXISTS(SELECT 'x' FROM jobqueue t1 WHERE // t0.id=t1.jobid AND t1.status='P') // // Now, the query is broken up, for performance // Do the first query, getting the candidate jobs to be considered StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobs.statusField,jobs.statusToString(jobs.STATUS_SHUTTINGDOWN))})); IResultSet set = database.performQuery(sb.toString(),list,null,null); int i = 0; while (i < set.getRowCount()) { IResultRow row = set.getRow(i++); Long jobID = (Long)row.getValue(jobs.idField); // Check to be sure the job is a candidate for shutdown sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PURGATORY), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); IResultSet confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) continue; // The shutting-down phase is complete. However, we need to check if there are any outstanding // PENDING or PENDINGPURGATORY records before we can decide what to do. sb = new StringBuilder("SELECT "); list.clear(); sb.append(jobQueue.idField).append(" FROM ").append(jobQueue.getTableName()).append(" WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new UnitaryClause(jobQueue.jobIDField,jobID), new MultiClause(jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" ").append(database.constructOffsetLimitClause(0,1)); confirmSet = database.performQuery(sb.toString(),list,null,null,1,null); if (confirmSet.getRowCount() > 0) { // This job needs to re-enter the active state. Make that happen. jobs.returnJobToActive(jobID); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" is re-entering active state"); } } else { // This job should be marked as finished. IJobDescription jobDesc = jobs.load(jobID,true); resetJobs.add(jobDesc); jobs.finishJob(jobID,currentTime); if (Logging.jobs.isDebugEnabled()) { Logging.jobs.debug("Job "+jobID+" now completed"); } } } } // Status reports /** Get the status of a job. *@return the status object for the specified job. */ @Override public JobStatus getStatus(Long jobID) throws ManifoldCFException { return getStatus(jobID,true); } /** Get a list of all jobs, and their status information. *@return an ordered array of job status objects. */ @Override public JobStatus[] getAllStatus() throws ManifoldCFException { return getAllStatus(true); } /** Get a list of running jobs. This is for status reporting. *@return an array of the job status objects. */ @Override public JobStatus[] getRunningJobs() throws ManifoldCFException { return getRunningJobs(true); } /** Get a list of completed jobs, and their statistics. *@return an array of the job status objects. */ @Override public JobStatus[] getFinishedJobs() throws ManifoldCFException { return getFinishedJobs(true); } /** Get the status of a job. *@param jobID is the job ID. *@param includeCounts is true if document counts should be included. *@return the status object for the specified job. */ public JobStatus getStatus(Long jobID, boolean includeCounts) throws ManifoldCFException { return getStatus(jobID, includeCounts, Integer.MAX_VALUE); } /** Get a list of all jobs, and their status information. *@param includeCounts is true if document counts should be included. *@return an ordered array of job status objects. */ public JobStatus[] getAllStatus(boolean includeCounts) throws ManifoldCFException { return getAllStatus(includeCounts, Integer.MAX_VALUE); } /** Get a list of running jobs. This is for status reporting. *@param includeCounts is true if document counts should be included. *@return an array of the job status objects. */ public JobStatus[] getRunningJobs(boolean includeCounts) throws ManifoldCFException { return getRunningJobs(includeCounts, Integer.MAX_VALUE); } /** Get a list of completed jobs, and their statistics. *@param includeCounts is true if document counts should be included. *@return an array of the job status objects. */ public JobStatus[] getFinishedJobs(boolean includeCounts) throws ManifoldCFException { return getFinishedJobs(includeCounts, Integer.MAX_VALUE); } /** Get the status of a job. *@param includeCounts is true if document counts should be included. *@return the status object for the specified job. */ @Override public JobStatus getStatus(Long jobID, boolean includeCounts, int maxCount) throws ManifoldCFException { ArrayList list = new ArrayList(); String whereClause = Jobs.idField+"=?"; list.add(jobID); JobStatus[] records = makeJobStatus(whereClause,list,includeCounts,maxCount); if (records.length == 0) return null; return records[0]; } /** Get a list of all jobs, and their status information. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an ordered array of job status objects. */ public JobStatus[] getAllStatus(boolean includeCounts, int maxCount) throws ManifoldCFException { return makeJobStatus(null,null,includeCounts,maxCount); } /** Get a list of running jobs. This is for status reporting. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an array of the job status objects. */ @Override public JobStatus[] getRunningJobs(boolean includeCounts, int maxCount) throws ManifoldCFException { ArrayList whereParams = new ArrayList(); String whereClause = database.buildConjunctionClause(whereParams,new ClauseDescription[]{ new MultiClause(Jobs.statusField,new Object[]{ Jobs.statusToString(Jobs.STATUS_ACTIVE), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVE_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_UNINSTALLED), Jobs.statusToString(Jobs.STATUS_ACTIVE_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NOOUTPUT), Jobs.statusToString(Jobs.STATUS_ACTIVE_NEITHER), Jobs.statusToString(Jobs.STATUS_ACTIVESEEDING_NEITHER), Jobs.statusToString(Jobs.STATUS_PAUSED), Jobs.statusToString(Jobs.STATUS_PAUSEDSEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAIT), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSEDWAIT), Jobs.statusToString(Jobs.STATUS_PAUSEDWAITSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSING), Jobs.statusToString(Jobs.STATUS_PAUSINGSEEDING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITING), Jobs.statusToString(Jobs.STATUS_ACTIVEWAITINGSEEDING), Jobs.statusToString(Jobs.STATUS_PAUSINGWAITING), Jobs.statusToString(Jobs.STATUS_PAUSINGWAITINGSEEDING), Jobs.statusToString(Jobs.STATUS_RESUMING), Jobs.statusToString(Jobs.STATUS_RESUMINGSEEDING) })}); return makeJobStatus(whereClause,whereParams,includeCounts,maxCount); } /** Get a list of completed jobs, and their statistics. *@param includeCounts is true if document counts should be included. *@param maxCount is the maximum number of documents we want to count for each status. *@return an array of the job status objects. */ @Override public JobStatus[] getFinishedJobs(boolean includeCounts, int maxCount) throws ManifoldCFException { StringBuilder sb = new StringBuilder(); ArrayList whereParams = new ArrayList(); sb.append(database.buildConjunctionClause(whereParams,new ClauseDescription[]{ new UnitaryClause(Jobs.statusField,Jobs.statusToString(Jobs.STATUS_INACTIVE))})).append(" AND ") .append(Jobs.endTimeField).append(" IS NOT NULL"); return makeJobStatus(sb.toString(),whereParams,includeCounts,maxCount); } // Protected methods and classes /** Make a job status array from a query result. *@param whereClause is the where clause for the jobs we are interested in. *@return the status array. */ protected JobStatus[] makeJobStatus(String whereClause, ArrayList whereParams, boolean includeCounts, int maxCount) throws ManifoldCFException { IResultSet set = database.performQuery("SELECT t0."+ Jobs.idField+",t0."+ Jobs.descriptionField+",t0."+ Jobs.statusField+",t0."+ Jobs.startTimeField+",t0."+ Jobs.endTimeField+",t0."+ Jobs.errorField+ " FROM "+jobs.getTableName()+" t0 "+((whereClause==null)?"":(" WHERE "+whereClause))+" ORDER BY "+Jobs.descriptionField+" ASC", whereParams,null,null); // Build hashes for set2 and set3 Map<Long,Long> set2Hash = new HashMap<Long,Long>(); Map<Long,Long> set3Hash = new HashMap<Long,Long>(); Map<Long,Long> set4Hash = new HashMap<Long,Long>(); Map<Long,Boolean> set2Exact = new HashMap<Long,Boolean>(); Map<Long,Boolean> set3Exact = new HashMap<Long,Boolean>(); Map<Long,Boolean> set4Exact = new HashMap<Long,Boolean>(); if (includeCounts) { // If we are counting all of them anyway, do this via GROUP BY since it will be the fastest. But // otherwise, fire off an individual query at a time. if (maxCount == Integer.MAX_VALUE) { buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } else { // Check if the total matching jobqueue rows exceeds the limit. If not, we can still use the cheaper query. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1"); addWhereClause(sb,list,whereClause,whereParams,false); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet countResult = database.performQuery(sb.toString(),list,null,null); if (countResult.getRowCount() > 0 && ((Long)countResult.getRow(0).getValue("doccount")).longValue() > maxCount) { // Too many items in queue; do it the hard way buildCountsUsingIndividualQueries(whereClause,whereParams,maxCount,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } else { // Cheap way should still work. buildCountsUsingGroupBy(whereClause,whereParams,set2Hash,set3Hash,set4Hash,set2Exact,set3Exact,set4Exact); } } } JobStatus[] rval = new JobStatus[set.getRowCount()]; for (int i = 0; i < rval.length; i++) { IResultRow row = set.getRow(i); Long jobID = (Long)row.getValue(Jobs.idField); String description = row.getValue(Jobs.descriptionField).toString(); int status = Jobs.stringToStatus(row.getValue(Jobs.statusField).toString()); Long startTimeValue = (Long)row.getValue(Jobs.startTimeField); long startTime = -1; if (startTimeValue != null) startTime = startTimeValue.longValue(); Long endTimeValue = (Long)row.getValue(Jobs.endTimeField); long endTime = -1; if (endTimeValue != null) endTime = endTimeValue.longValue(); String errorText = (String)row.getValue(Jobs.errorField); if (errorText != null && errorText.length() == 0) errorText = null; int rstatus = JobStatus.JOBSTATUS_NOTYETRUN; switch (status) { case Jobs.STATUS_INACTIVE: if (errorText != null) rstatus = JobStatus.JOBSTATUS_ERROR; else { if (startTime >= 0) rstatus = JobStatus.JOBSTATUS_COMPLETED; else rstatus = JobStatus.JOBSTATUS_NOTYETRUN; } break; case Jobs.STATUS_ACTIVE_UNINSTALLED: case Jobs.STATUS_ACTIVESEEDING_UNINSTALLED: case Jobs.STATUS_ACTIVE_NOOUTPUT: case Jobs.STATUS_ACTIVESEEDING_NOOUTPUT: case Jobs.STATUS_ACTIVE_NEITHER: case Jobs.STATUS_ACTIVESEEDING_NEITHER: rstatus = JobStatus.JOBSTATUS_RUNNING_UNINSTALLED; break; case Jobs.STATUS_ACTIVE: case Jobs.STATUS_ACTIVESEEDING: rstatus = JobStatus.JOBSTATUS_RUNNING; break; case Jobs.STATUS_SHUTTINGDOWN: rstatus = JobStatus.JOBSTATUS_JOBENDCLEANUP; break; case Jobs.STATUS_READYFORNOTIFY: case Jobs.STATUS_NOTIFYINGOFCOMPLETION: rstatus = JobStatus.JOBSTATUS_JOBENDNOTIFICATION; break; case Jobs.STATUS_ABORTING: case Jobs.STATUS_ABORTINGSEEDING: case Jobs.STATUS_ABORTINGSTARTINGUP: case Jobs.STATUS_ABORTINGSTARTINGUPMINIMAL: rstatus = JobStatus.JOBSTATUS_ABORTING; break; case Jobs.STATUS_ABORTINGFORRESTART: case Jobs.STATUS_ABORTINGFORRESTARTMINIMAL: case Jobs.STATUS_ABORTINGFORRESTARTSEEDING: case Jobs.STATUS_ABORTINGFORRESTARTSEEDINGMINIMAL: case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTART: case Jobs.STATUS_ABORTINGSTARTINGUPFORRESTARTMINIMAL: rstatus = JobStatus.JOBSTATUS_RESTARTING; break; case Jobs.STATUS_PAUSING: case Jobs.STATUS_PAUSINGSEEDING: case Jobs.STATUS_ACTIVEWAITING: case Jobs.STATUS_ACTIVEWAITINGSEEDING: case Jobs.STATUS_PAUSINGWAITING: case Jobs.STATUS_PAUSINGWAITINGSEEDING: rstatus = JobStatus.JOBSTATUS_STOPPING; break; case Jobs.STATUS_RESUMING: case Jobs.STATUS_RESUMINGSEEDING: rstatus = JobStatus.JOBSTATUS_RESUMING; break; case Jobs.STATUS_PAUSED: case Jobs.STATUS_PAUSEDSEEDING: rstatus = JobStatus.JOBSTATUS_PAUSED; break; case Jobs.STATUS_ACTIVEWAIT: case Jobs.STATUS_ACTIVEWAITSEEDING: rstatus = JobStatus.JOBSTATUS_WINDOWWAIT; break; case Jobs.STATUS_PAUSEDWAIT: case Jobs.STATUS_PAUSEDWAITSEEDING: rstatus = JobStatus.JOBSTATUS_PAUSED; break; case Jobs.STATUS_STARTINGUP: case Jobs.STATUS_STARTINGUPMINIMAL: case Jobs.STATUS_READYFORSTARTUP: case Jobs.STATUS_READYFORSTARTUPMINIMAL: rstatus = JobStatus.JOBSTATUS_STARTING; break; case Jobs.STATUS_DELETESTARTINGUP: case Jobs.STATUS_READYFORDELETE: case Jobs.STATUS_DELETING: case Jobs.STATUS_DELETING_NOOUTPUT: rstatus = JobStatus.JOBSTATUS_DESTRUCTING; break; default: break; } Long set2Value = set2Hash.get(jobID); Long set3Value = set3Hash.get(jobID); Long set4Value = set4Hash.get(jobID); Boolean set2ExactValue = set2Exact.get(jobID); Boolean set3ExactValue = set3Exact.get(jobID); Boolean set4ExactValue = set4Exact.get(jobID); rval[i] = new JobStatus(jobID.toString(),description,rstatus,((set2Value==null)?0L:set2Value.longValue()), ((set3Value==null)?0L:set3Value.longValue()), ((set4Value==null)?0L:set4Value.longValue()), ((set2ExactValue==null)?true:set2ExactValue.booleanValue()), ((set3ExactValue==null)?true:set3ExactValue.booleanValue()), ((set4ExactValue==null)?true:set4ExactValue.booleanValue()), startTime,endTime,errorText); } return rval; } protected static ClauseDescription buildOutstandingClause() throws ManifoldCFException { return new MultiClause(JobQueue.statusField,new Object[]{ JobQueue.statusToString(JobQueue.STATUS_ACTIVE), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCAN), JobQueue.statusToString(JobQueue.STATUS_PENDING), JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}); } protected static ClauseDescription buildProcessedClause() throws ManifoldCFException { return new MultiClause(JobQueue.statusField,new Object[]{ JobQueue.statusToString(JobQueue.STATUS_COMPLETE), JobQueue.statusToString(JobQueue.STATUS_UNCHANGED), JobQueue.statusToString(JobQueue.STATUS_PURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVEPURGATORY), JobQueue.statusToString(JobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), JobQueue.statusToString(JobQueue.STATUS_PENDINGPURGATORY)}); } protected void buildCountsUsingIndividualQueries(String whereClause, ArrayList whereParams, int maxCount, Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash, Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact) throws ManifoldCFException { // Fire off an individual query with a limit for each job // First, get the list of jobs that we are interested in. StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(Jobs.idField).append(" FROM ").append(jobs.getTableName()).append(" t0"); if (whereClause != null) { sb.append(" WHERE ") .append(whereClause); if (whereParams != null) list.addAll(whereParams); } IResultSet jobSet = database.performQuery(sb.toString(),list,null,null); // Scan the set of jobs for (int i = 0; i < jobSet.getRowCount(); i++) { IResultRow row = jobSet.getRow(i); Long jobID = (Long)row.getValue(Jobs.idField); // Now, for each job, fire off a separate, limited, query for each count we care about sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet totalSet = database.performQuery(sb.toString(),list,null,null); if (totalSet.getRowCount() > 0) { long rowCount = ((Long)totalSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set2Hash.put(jobID,new Long(maxCount)); set2Exact.put(jobID,new Boolean(false)); } else { set2Hash.put(jobID,new Long(rowCount)); set2Exact.put(jobID,new Boolean(true)); } } sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)})); sb.append(" AND "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet outstandingSet = database.performQuery(sb.toString(),list,null,null); if (outstandingSet.getRowCount() > 0) { long rowCount = ((Long)outstandingSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set3Hash.put(jobID,new Long(maxCount)); set3Exact.put(jobID,new Boolean(false)); } else { set3Hash.put(jobID,new Long(rowCount)); set3Exact.put(jobID,new Boolean(true)); } } sb = new StringBuilder("SELECT "); list.clear(); sb.append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" WHERE "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{new UnitaryClause(JobQueue.jobIDField,jobID)})); sb.append(" AND "); sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()})); sb.append(" ").append(database.constructOffsetLimitClause(0,maxCount+1,false)); IResultSet processedSet = database.performQuery(sb.toString(),list,null,null); if (processedSet.getRowCount() > 0) { long rowCount = ((Long)processedSet.getRow(0).getValue("doccount")).longValue(); if (rowCount > maxCount) { set4Hash.put(jobID,new Long(maxCount)); set4Exact.put(jobID,new Boolean(false)); } else { set4Hash.put(jobID,new Long(rowCount)); set4Exact.put(jobID,new Boolean(true)); } } } } protected void buildCountsUsingGroupBy(String whereClause, ArrayList whereParams, Map<Long,Long> set2Hash, Map<Long,Long> set3Hash, Map<Long,Long> set4Hash, Map<Long,Boolean> set2Exact, Map<Long,Boolean> set3Exact, Map<Long,Boolean> set4Exact) throws ManifoldCFException { StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1"); addWhereClause(sb,list,whereClause,whereParams,false); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set2 = database.performQuery(sb.toString(),list,null,null); sb = new StringBuilder("SELECT "); list.clear(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{buildOutstandingClause()})); addWhereClause(sb,list,whereClause,whereParams,true); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set3 = database.performQuery(sb.toString(),list,null,null); sb = new StringBuilder("SELECT "); list.clear(); sb.append(JobQueue.jobIDField).append(",") .append(database.constructCountClause(JobQueue.docHashField)).append(" AS doccount") .append(" FROM ").append(jobQueue.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{buildProcessedClause()})); addWhereClause(sb,list,whereClause,whereParams,true); sb.append(" GROUP BY ").append(JobQueue.jobIDField); IResultSet set4 = database.performQuery(sb.toString(),list,null,null); for (int j = 0; j < set2.getRowCount(); j++) { IResultRow row = set2.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set2Hash.put(jobID,(Long)row.getValue("doccount")); set2Exact.put(jobID,new Boolean(true)); } for (int j = 0; j < set3.getRowCount(); j++) { IResultRow row = set3.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set3Hash.put(jobID,(Long)row.getValue("doccount")); set3Exact.put(jobID,new Boolean(true)); } for (int j = 0; j < set4.getRowCount(); j++) { IResultRow row = set4.getRow(j); Long jobID = (Long)row.getValue(JobQueue.jobIDField); set4Hash.put(jobID,(Long)row.getValue("doccount")); set4Exact.put(jobID,new Boolean(true)); } } protected void addWhereClause(StringBuilder sb, ArrayList list, String whereClause, ArrayList whereParams, boolean wherePresent) { if (whereClause != null) { if (wherePresent) sb.append(" AND"); else sb.append(" WHERE"); sb.append(" EXISTS(SELECT 'x' FROM ").append(jobs.getTableName()).append(" t0 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t0."+Jobs.idField,"t1."+JobQueue.jobIDField)})).append(" AND ") .append(whereClause) .append(")"); if (whereParams != null) list.addAll(whereParams); } } // These methods generate reports for direct display in the UI. /** Run a 'document status' report. *@param connectionName is the name of the connection. *@param filterCriteria are the criteria used to limit the records considered for the report. *@param sortOrder is the specified sort order of the final report. *@param startRow is the first row to include. *@param rowCount is the number of rows to include. *@return the results, with the following columns: identifier, job, state, status, scheduled, action, retrycount, retrylimit. The "scheduled" column and the * "retrylimit" column are long values representing a time; all other values will be user-friendly strings. */ public IResultSet genDocumentStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder, int startRow, int rowCount) throws ManifoldCFException { // Build the query. Long currentTime = new Long(System.currentTimeMillis()); StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append("t0.").append(jobQueue.idField).append(" AS id,") .append("t0.").append(jobQueue.docIDField).append(" AS identifier,") .append("t1.").append(jobs.descriptionField).append(" AS job,") .append("CASE") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Not yet processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Processed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Being removed'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append("=? THEN 'Out of scope'") .append(" ELSE 'Unknown'") .append(" END AS state,") .append("CASE") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 'Inactive'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 'Ready for processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 'Ready for expiration'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 'Waiting for processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" AND t0.").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 'Waiting for expiration'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append("t0.").append(jobQueue.checkTimeField).append(" IS NULL") .append(" THEN 'Waiting forever'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append("=?") .append(" THEN 'Hopcount exceeded'") .append(" WHEN ").append("t0.").append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 'Deleting'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" THEN 'Processing'") .append(" WHEN ") .append("t0.").append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append("t0.").append(jobQueue.checkActionField).append("=?") .append(" THEN 'Expiring'") .append(" ELSE 'Unknown'") .append(" END AS status,") .append("t0.").append(jobQueue.checkTimeField).append(" AS scheduled,") .append("CASE") .append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Process'") .append(" WHEN ").append("t0.").append(jobQueue.checkActionField).append("=? THEN 'Expire'") .append(" ELSE 'Unknown'") .append(" END AS action,") .append("t0.").append(jobQueue.failCountField).append(" AS retrycount,") .append("t0.").append(jobQueue.failTimeField).append(" AS retrylimit") .append(" FROM ").append(jobQueue.getTableName()).append(" t0,").append(jobs.getTableName()).append(" t1 WHERE ") .append(database.buildConjunctionClause(list,new ClauseDescription[]{ new JoinClause("t0."+jobQueue.jobIDField,"t1."+jobs.idField)})); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); addCriteria(sb,list,"t0.",connectionName,filterCriteria,true); // The intrinsic ordering is provided by the "id" column, and nothing else. addOrdering(sb,new String[]{"id"},sortOrder); addLimits(sb,startRow,rowCount); return database.performQuery(sb.toString(),list,null,null,rowCount,null); } /** Run a 'queue status' report. *@param connectionName is the name of the connection. *@param filterCriteria are the criteria used to limit the records considered for the report. *@param sortOrder is the specified sort order of the final report. *@param idBucketDescription is the bucket description for generating the identifier class. *@param startRow is the first row to include. *@param rowCount is the number of rows to include. *@return the results, with the following columns: idbucket, inactive, processing, expiring, deleting, processready, expireready, processwaiting, expirewaiting */ public IResultSet genQueueStatus(String connectionName, StatusFilterCriteria filterCriteria, SortOrder sortOrder, BucketDescription idBucketDescription, int startRow, int rowCount) throws ManifoldCFException { // SELECT substring(docid FROM '<id_regexp>') AS idbucket, // substring(entityidentifier FROM '<id_regexp>') AS idbucket, // SUM(CASE WHEN status='C' then 1 else 0 end)) AS inactive FROM jobqueue WHERE <criteria> // GROUP BY idbucket Long currentTime = new Long(System.currentTimeMillis()); StringBuilder sb = new StringBuilder("SELECT "); ArrayList list = new ArrayList(); sb.append("t1.idbucket,SUM(t1.inactive) AS inactive,SUM(t1.processing) AS processing,SUM(t1.expiring) AS expiring,SUM(t1.deleting) AS deleting,") .append("SUM(t1.processready) AS processready,SUM(t1.expireready) AS expireready,SUM(t1.processwaiting) AS processwaiting,SUM(t1.expirewaiting) AS expirewaiting,") .append("SUM(t1.waitingforever) AS waitingforever,SUM(t1.hopcountexceeded) AS hopcountexceeded FROM (SELECT "); addBucketExtract(sb,list,"",jobQueue.docIDField,idBucketDescription); sb.append(" AS idbucket,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 1 ELSE 0") .append(" END") .append(" AS inactive,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as processing,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as expiring,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?,?)") .append(" THEN 1 ELSE 0") .append(" END") .append(" as deleting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as processready,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append("<=").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as expireready,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as processwaiting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkActionField).append("=?") .append(" AND ").append(jobQueue.checkTimeField).append(">").append(currentTime.toString()) .append(" THEN 1 ELSE 0") .append(" END") .append(" as expirewaiting,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append(" IN (?,?)") .append(" AND ").append(jobQueue.checkTimeField).append(" IS NULL") .append(" THEN 1 ELSE 0") .append(" END") .append(" as waitingforever,") .append("CASE") .append(" WHEN ") .append(jobQueue.statusField).append("=?") .append(" THEN 1 ELSE 0") .append(" END") .append(" as hopcountexceeded"); sb.append(" FROM ").append(jobQueue.getTableName()); list.add(jobQueue.statusToString(jobQueue.STATUS_COMPLETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_UNCHANGED)); list.add(jobQueue.statusToString(jobQueue.STATUS_PURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED)); list.add(jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED)); list.add(jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_RESCAN)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.actionToString(jobQueue.ACTION_REMOVE)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDING)); list.add(jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)); list.add(jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)); addCriteria(sb,list,"",connectionName,filterCriteria,false); sb.append(") t1 GROUP BY idbucket"); addOrdering(sb,new String[]{"idbucket","inactive","processing","expiring","deleting","processready","expireready","processwaiting","expirewaiting","waitingforever","hopcountexceeded"},sortOrder); addLimits(sb,startRow,rowCount); return database.performQuery(sb.toString(),list,null,null,rowCount,null); } // Protected methods for report generation /** Turn a bucket description into a return column. * This is complicated by the fact that the extraction code is inherently case sensitive. So if case insensitive is * desired, that means we whack the whole thing to lower case before doing the match. */ protected void addBucketExtract(StringBuilder sb, ArrayList list, String columnPrefix, String columnName, BucketDescription bucketDesc) { boolean isSensitive = bucketDesc.isSensitive(); list.add(bucketDesc.getRegexp()); sb.append(database.constructSubstringClause(columnPrefix+columnName,"?",!isSensitive)); } /** Add criteria clauses to query. */ protected boolean addCriteria(StringBuilder sb, ArrayList list, String fieldPrefix, String connectionName, StatusFilterCriteria criteria, boolean whereEmitted) throws ManifoldCFException { Long[] matchingJobs = criteria.getJobs(); if (matchingJobs != null) { whereEmitted = emitClauseStart(sb,whereEmitted); if (matchingJobs.length == 0) { sb.append("0>1"); } else { sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.jobIDField,matchingJobs)})); } } RegExpCriteria identifierRegexp = criteria.getIdentifierMatch(); if (identifierRegexp != null) { whereEmitted = emitClauseStart(sb,whereEmitted); list.add(identifierRegexp.getRegexpString()); sb.append(database.constructRegexpClause(fieldPrefix+jobQueue.docIDField,"?",identifierRegexp.isInsensitive())); } Long nowTime = new Long(criteria.getNowTime()); int[] states = criteria.getMatchingStates(); int[] statuses = criteria.getMatchingStatuses(); if (states.length == 0 || statuses.length == 0) { whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("0>1"); return whereEmitted; } // Iterate through the specified states, and emit a series of OR clauses, one for each state. The contents of the clause will be complex. whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("("); int k = 0; while (k < states.length) { int stateValue = states[k]; if (k > 0) sb.append(" OR "); switch (stateValue) { case DOCSTATE_NEVERPROCESSED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN)})})); break; case DOCSTATE_PREVIOUSLYPROCESSED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE), jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED), jobQueue.statusToString(jobQueue.STATUS_COMPLETE), jobQueue.statusToString(jobQueue.STATUS_UNCHANGED), jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})})); break; case DOCSTATE_OUTOFSCOPE: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})})); break; } k++; } sb.append(")"); whereEmitted = emitClauseStart(sb,whereEmitted); sb.append("("); k = 0; while (k < statuses.length) { int stateValue = statuses[k]; if (k > 0) sb.append(" OR "); switch (stateValue) { case DOCSTATUS_INACTIVE: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_COMPLETE), jobQueue.statusToString(jobQueue.STATUS_UNCHANGED), jobQueue.statusToString(jobQueue.STATUS_PURGATORY)})})); break; case DOCSTATUS_PROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN))})); break; case DOCSTATUS_EXPIRING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_ACTIVE), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCAN), jobQueue.statusToString(jobQueue.STATUS_ACTIVEPURGATORY), jobQueue.statusToString(jobQueue.STATUS_ACTIVENEEDRESCANPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE))})); break; case DOCSTATUS_DELETING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_BEINGDELETED), jobQueue.statusToString(jobQueue.STATUS_BEINGCLEANED), jobQueue.statusToString(jobQueue.STATUS_ELIGIBLEFORDELETE)})})); break; case DOCSTATUS_READYFORPROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)})); break; case DOCSTATUS_READYFOREXPIRATION: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,"<=",nowTime)})); break; case DOCSTATUS_WAITINGFORPROCESSING: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_RESCAN)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)})); break; case DOCSTATUS_WAITINGFOREXPIRATION: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)}), new UnitaryClause(fieldPrefix+jobQueue.checkActionField,jobQueue.actionToString(jobQueue.ACTION_REMOVE)), new UnitaryClause(fieldPrefix+jobQueue.checkTimeField,">",nowTime)})); break; case DOCSTATUS_WAITINGFOREVER: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_PENDING), jobQueue.statusToString(jobQueue.STATUS_PENDINGPURGATORY)})})) .append(" AND ").append(fieldPrefix).append(jobQueue.checkTimeField).append(" IS NULL"); break; case DOCSTATUS_HOPCOUNTEXCEEDED: sb.append(database.buildConjunctionClause(list,new ClauseDescription[]{ new MultiClause(fieldPrefix+jobQueue.statusField,new Object[]{ jobQueue.statusToString(jobQueue.STATUS_HOPCOUNTREMOVED)})})); break; } k++; } sb.append(")"); return whereEmitted; } /** Emit a WHERE or an AND, depending... */ protected boolean emitClauseStart(StringBuilder sb, boolean whereEmitted) { if (whereEmitted) sb.append(" AND "); else sb.append(" WHERE "); return true; } /** Add ordering. */ protected void addOrdering(StringBuilder sb, String[] completeFieldList, SortOrder sort) { // Keep track of the fields we've seen Map hash = new HashMap(); // Emit the "Order by" sb.append(" ORDER BY "); // Go through the specified list int i = 0; int count = sort.getCount(); while (i < count) { if (i > 0) sb.append(","); String column = sort.getColumn(i); sb.append(column); if (sort.getDirection(i) == sort.SORT_ASCENDING) sb.append(" ASC"); else sb.append(" DESC"); hash.put(column,column); i++; } // Now, go through the complete field list, and emit sort criteria for everything // not actually specified. This is so LIMIT and OFFSET give consistent results. int j = 0; while (j < completeFieldList.length) { String field = completeFieldList[j]; if (hash.get(field) == null) { if (i > 0) sb.append(","); sb.append(field); sb.append(" DESC"); //if (j == 0) // sb.append(" DESC"); //else // sb.append(" ASC"); i++; } j++; } } /** Add limit and offset. */ protected void addLimits(StringBuilder sb, int startRow, int maxRowCount) { sb.append(" ").append(database.constructOffsetLimitClause(startRow,maxRowCount)); } /** Class for tracking existing jobqueue row data */ protected static class JobqueueRecord { protected Long recordID; protected int status; protected Long checkTimeValue; public JobqueueRecord(Long recordID, int status, Long checkTimeValue) { this.recordID = recordID; this.status = status; this.checkTimeValue = checkTimeValue; } public Long getRecordID() { return recordID; } public int getStatus() { return status; } public Long getCheckTimeValue() { return checkTimeValue; } } /** We go through 2x the number of documents we should need if we were perfect at setting document priorities. */ private static int EXTRA_FACTOR = 2; /** This class provides the throttling limits for the job queueing query. */ protected static class ThrottleLimit implements ILimitChecker { // For each connection, there is (a) a number (which is the maximum per bin), and (b) // a current running count per bin. These are stored as elements in a hash map. protected HashMap connectionMap = new HashMap(); // The maximum number of jobs that have reached their chunk size limit that we // need protected int n; // This is the hash table that maps a job ID to the object that tracks the number // of documents already accumulated for this resultset. The count of the number // of queue records we have is tallied by going through each job in this table // and adding the records outstanding for it. protected HashMap jobQueueHash = new HashMap(); // This is the map from jobid to connection name protected HashMap jobConnection = new HashMap(); // This is the set of allowed connection names. We discard all documents that are // not from that set. protected HashMap activeConnections = new HashMap(); // This is the number of documents per set per connection. protected HashMap setSizes = new HashMap(); // These are the individual connection maximums, keyed by connection name. protected HashMap maxConnectionCounts = new HashMap(); // This is the maximum number of documents per set over all the connections we are looking at. This helps us establish a sanity limit. protected int maxSetSize = 0; // This is the number of documents processed so far protected int documentsProcessed = 0; // This is where we accumulate blocking documents. This is an arraylist of DocumentDescription objects. protected ArrayList blockingDocumentArray = new ArrayList(); // Cutoff time for documents eligible for prioritization protected long prioritizationTime; /** Constructor. * This class is built up piecemeal, so the constructor does nothing. *@param n is the maximum number of full job descriptions we want at this time. */ public ThrottleLimit(int n, long prioritizationTime) { this.n = n; this.prioritizationTime = prioritizationTime; Logging.perf.debug("Limit instance created"); } /** Transfer blocking documents discovered to BlockingDocuments object */ public void tallyBlockingDocuments(BlockingDocuments blockingDocuments) { int i = 0; while (i < blockingDocumentArray.size()) { DocumentDescription dd = (DocumentDescription)blockingDocumentArray.get(i++); blockingDocuments.addBlockingDocument(dd); } blockingDocumentArray.clear(); } /** Add a job/connection name map entry. *@param jobID is the job id. *@param connectionName is the connection name. */ public void addJob(Long jobID, String connectionName) { jobConnection.put(jobID,connectionName); } /** Add an active connection. This is the pool of active connections that will be used for the lifetime of this operation. *@param connectionName is the connection name. */ public void addConnectionName(String connectionName, IRepositoryConnector connectorInstance) throws ManifoldCFException { activeConnections.put(connectionName,connectorInstance); int setSize = connectorInstance.getMaxDocumentRequest(); setSizes.put(connectionName,new Integer(setSize)); if (setSize > maxSetSize) maxSetSize = setSize; } /** Add a document limit for a specified connection. This is the limit across all matching bins; if any * individual matching bin exceeds that limit, then documents that belong to that bin will be excluded. *@param connectionName is the connection name. *@param regexp is the regular expression, which we will match against various bins. *@param upperLimit is the maximum count associated with the specified job. */ public void addLimit(String connectionName, String regexp, int upperLimit) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Adding fetch limit of "+Integer.toString(upperLimit)+" fetches for expression '"+regexp+"' for connection '"+connectionName+"'"); ThrottleJobItem ji = (ThrottleJobItem)connectionMap.get(connectionName); if (ji == null) { ji = new ThrottleJobItem(); connectionMap.put(connectionName,ji); } ji.addLimit(regexp,upperLimit); } /** Set a connection-based total document limit. */ public void setConnectionLimit(String connectionName, int maxDocuments) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Setting connection limit of "+Integer.toString(maxDocuments)+" for connection "+connectionName); maxConnectionCounts.put(connectionName,new MutableInteger(maxDocuments)); } /** See if this class can be legitimately compared against another of * the same type. *@return true if comparisons will ever return "true". */ public boolean doesCompareWork() { return false; } /** Create a duplicate of this class instance. All current state should be preserved. * NOTE: Since doesCompareWork() returns false, queries using this limit checker cannot * be cached, and therefore duplicate() is never called from the query executor. But it can * be called from other places. *@return the duplicate. */ public ILimitChecker duplicate() { return makeDeepCopy(); } /** Make a deep copy */ public ThrottleLimit makeDeepCopy() { ThrottleLimit rval = new ThrottleLimit(n,prioritizationTime); // Create a true copy of all the structures in which counts are kept. The referential structures (e.g. connection hashes) // do not need a deep copy. rval.activeConnections = activeConnections; rval.setSizes = setSizes; rval.maxConnectionCounts = maxConnectionCounts; rval.maxSetSize = maxSetSize; rval.jobConnection = jobConnection; // The structures where counts are maintained DO need a deep copy. rval.documentsProcessed = documentsProcessed; Iterator iter; iter = connectionMap.keySet().iterator(); while (iter.hasNext()) { Object key = iter.next(); rval.connectionMap.put(key,((ThrottleJobItem)connectionMap.get(key)).duplicate()); } iter = jobQueueHash.keySet().iterator(); while (iter.hasNext()) { Object key = iter.next(); rval.jobQueueHash.put(key,((QueueHashItem)jobQueueHash.get(key)).duplicate()); } return rval; } /** Find the hashcode for this class. This will only ever be used if * doesCompareWork() returns true. *@return the hashcode. */ public int hashCode() { return 0; } /** Compare two objects and see if equal. This will only ever be used * if doesCompareWork() returns true. *@param object is the object to compare against. *@return true if equal. */ public boolean equals(Object object) { return false; } /** Get the remaining documents we should query for. *@return the maximal remaining count. */ public int getRemainingDocuments() { return EXTRA_FACTOR * n * maxSetSize - documentsProcessed; } /** See if a result row should be included in the final result set. *@param row is the result row to check. *@return true if it should be included, false otherwise. */ public boolean checkInclude(IResultRow row) throws ManifoldCFException { // Note: This method does two things: First, it insures that the number of documents per job per bin does // not exceed the calculated throttle number. Second, it keeps track of how many document queue items // will be needed, so we can stop when we've got enough for the moment. Logging.perf.debug("Checking if row should be included"); // This is the end that does the work. // The row passed in has the following jobqueue columns: idField, jobIDField, docIDField, and statusField Long jobIDValue = (Long)row.getValue(JobQueue.jobIDField); // Get the connection name for this row String connectionName = (String)jobConnection.get(jobIDValue); if (connectionName == null) { Logging.perf.debug(" Row does not have an eligible job - excluding"); return false; } IRepositoryConnector connectorInstance = (IRepositoryConnector)activeConnections.get(connectionName); if (connectorInstance == null) { Logging.perf.debug(" Row does not have an eligible connector instance - excluding"); return false; } // Find the connection limit for this document MutableInteger connectionLimit = (MutableInteger)maxConnectionCounts.get(connectionName); if (connectionLimit != null) { if (connectionLimit.intValue() == 0) { Logging.perf.debug(" Row exceeds its connection limit - excluding"); return false; } connectionLimit.decrement(); } // Tally this item in the job queue hash, so we can detect when to stop QueueHashItem queueItem = (QueueHashItem)jobQueueHash.get(jobIDValue); if (queueItem == null) { // Need to talk to the connector to get a max number of docs per chunk int maxCount = ((Integer)setSizes.get(connectionName)).intValue(); queueItem = new QueueHashItem(maxCount); jobQueueHash.put(jobIDValue,queueItem); } String docIDHash = (String)row.getValue(JobQueue.docHashField); String docID = (String)row.getValue(JobQueue.docIDField); // Figure out what the right bins are, given the data we have. // This will involve a call to the connector. String[] binNames = ManifoldCF.calculateBins(connectorInstance,docID); // Keep the running count, so we can abort without going through the whole set. documentsProcessed++; //scanRecord.addBins(binNames); ThrottleJobItem item = (ThrottleJobItem)connectionMap.get(connectionName); // If there is no schedule-based throttling on this connection, we're done. if (item == null) { queueItem.addDocument(); Logging.perf.debug(" Row has no throttling - including"); return true; } int j = 0; while (j < binNames.length) { if (item.isEmpty(binNames[j])) { if (Logging.perf.isDebugEnabled()) Logging.perf.debug(" Bin "+binNames[j]+" has no more available fetches - excluding"); Object o = row.getValue(JobQueue.prioritySetField); if (o == null || ((Long)o).longValue() <= prioritizationTime) { // Need to add a document descriptor based on this row to the blockingDocuments object! // This will cause it to be reprioritized preferentially, getting it out of the way if it shouldn't // be there. Long id = (Long)row.getValue(JobQueue.idField); Long jobID = (Long)row.getValue(JobQueue.jobIDField); DocumentDescription dd = new DocumentDescription(id,jobID,docIDHash,docID); blockingDocumentArray.add(dd); } return false; } j++; } j = 0; while (j < binNames.length) { item.decrement(binNames[j++]); } queueItem.addDocument(); Logging.perf.debug(" Including!"); return true; } /** See if we should examine another row. *@return true if we need to keep going, or false if we are done. */ public boolean checkContinue() throws ManifoldCFException { if (documentsProcessed >= EXTRA_FACTOR * n * maxSetSize) return false; // If the number of chunks exceeds n, we are done Iterator iter = jobQueueHash.keySet().iterator(); int count = 0; while (iter.hasNext()) { Long jobID = (Long)iter.next(); QueueHashItem item = (QueueHashItem)jobQueueHash.get(jobID); count += item.getChunkCount(); if (count > n) return false; } return true; } } /** This class contains information per job on how many queue items have so far been accumulated. */ protected static class QueueHashItem { // The number of items per chunk for this job int itemsPerChunk; // The number of chunks so far, INCLUDING incomplete chunks int chunkCount = 0; // The number of documents in the current incomplete chunk int currentDocumentCount = 0; /** Construct. *@param itemsPerChunk is the number of items per chunk for this job. */ public QueueHashItem(int itemsPerChunk) { this.itemsPerChunk = itemsPerChunk; } /** Duplicate. */ public QueueHashItem duplicate() { QueueHashItem rval = new QueueHashItem(itemsPerChunk); rval.chunkCount = chunkCount; rval.currentDocumentCount = currentDocumentCount; return rval; } /** Add a document to this job. */ public void addDocument() { currentDocumentCount++; if (currentDocumentCount == 1) chunkCount++; if (currentDocumentCount == itemsPerChunk) currentDocumentCount = 0; } /** Get the number of chunks. *@return the number of chunks. */ public int getChunkCount() { return chunkCount; } } /** This class represents the information stored PER JOB in the throttling structure. * In this structure, "remaining" counts are kept for each bin. When the bin becomes empty, * then no more documents that would map to that bin will be returned, for this query. * * The way in which the maximum count per bin is determined is not part of this class. */ protected static class ThrottleJobItem { /** These are the bin limits. This is an array of ThrottleLimitSpec objects. */ protected ArrayList throttleLimits = new ArrayList(); /** This is a map of the bins and their current counts. If an entry doesn't exist, it's considered to be * the same as maxBinCount. */ protected HashMap binCounts = new HashMap(); /** Constructor. */ public ThrottleJobItem() { } /** Add a bin limit. *@param regexp is the regular expression describing the bins to which the limit applies to. *@param maxCount is the maximum number of fetches allowed for that bin. */ public void addLimit(String regexp, int maxCount) { try { throttleLimits.add(new ThrottleLimitSpec(regexp,maxCount)); } catch (PatternSyntaxException e) { // Ignore the bad entry; it just won't contribute any throttling. } } /** Create a duplicate of this item. *@return the duplicate. */ public ThrottleJobItem duplicate() { ThrottleJobItem rval = new ThrottleJobItem(); rval.throttleLimits = throttleLimits; Iterator iter = binCounts.keySet().iterator(); while (iter.hasNext()) { String key = (String)iter.next(); this.binCounts.put(key,((MutableInteger)binCounts.get(key)).duplicate()); } return rval; } /** Check if the specified bin is empty. *@param binName is the bin name. *@return true if empty. */ public boolean isEmpty(String binName) { MutableInteger value = (MutableInteger)binCounts.get(binName); int remaining; if (value == null) { int x = findMaxCount(binName); if (x == -1) return false; remaining = x; } else remaining = value.intValue(); return (remaining == 0); } /** Decrement specified bin. *@param binName is the bin name. */ public void decrement(String binName) { MutableInteger value = (MutableInteger)binCounts.get(binName); if (value == null) { int x = findMaxCount(binName); if (x == -1) return; value = new MutableInteger(x); binCounts.put(binName,value); } value.decrement(); } /** Given a bin name, find the max value for it using the regexps that are in place. *@param binName is the bin name. *@return the max count for that bin, or -1 if infinite. */ protected int findMaxCount(String binName) { // Each connector generates a set of bins per descriptor, e.g. "", ".com", ".metacarta.com", "foo.metacarta.com" // // We want to be able to do a couple of different kinds of things easily. For example, we want to: // - be able to "turn off" or restrict fetching for a given domain, to a lower value than for other domains // - be able to control fetch rates of .com, .metacarta.com, and foo.metacarta.com such that we // can establish a faster rate for .com than for foo.metacarta.com // // The standard case is to limit fetch rate for all terminal domains (e.g. foo.metacarta.com) to some number: // ^[^\.] = 8 // // To apply an additional limit restriction on a specific domain easily requires that the MINIMUM rate // value be chosen when more than one regexp match is found: // ^[^\.] = 8 // ^foo\.metacarta\.com = 4 // // To apply different rates for different levels: // ^[^\.] = 8 // ^\.[^\.]*\.[^\.]*$ = 20 // ^\.[^\.]*$ = 40 // // If the same bin is matched by more than one regexp, I now take the MINIMUM value, since this seems to be // more what the world wants to do (restrict, rather than increase, fetch rates). int maxCount = -1; int i = 0; while (i < throttleLimits.size()) { ThrottleLimitSpec spec = (ThrottleLimitSpec)throttleLimits.get(i++); Pattern p = spec.getRegexp(); Matcher m = p.matcher(binName); if (m.find()) { int limit = spec.getMaxCount(); if (maxCount == -1 || limit < maxCount) maxCount = limit; } } return maxCount; } } /** This is a class which describes an individual throttle limit, in fetches. */ protected static class ThrottleLimitSpec { /** Regexp */ protected Pattern regexp; /** The fetch limit for all bins matching that regexp */ protected int maxCount; /** Constructor */ public ThrottleLimitSpec(String regexp, int maxCount) throws PatternSyntaxException { this.regexp = Pattern.compile(regexp); this.maxCount = maxCount; } /** Get the regexp. */ public Pattern getRegexp() { return regexp; } /** Get the max count */ public int getMaxCount() { return maxCount; } } /** Mutable integer class. */ protected static class MutableInteger { int value; /** Construct. */ public MutableInteger(int value) { this.value = value; } /** Duplicate */ public MutableInteger duplicate() { return new MutableInteger(value); } /** Decrement. */ public void decrement() { value--; } /** Increment. */ public void increment() { value++; } /** Get value. */ public int intValue() { return value; } } }
diff --git a/spring-datastore-redis/src/main/java/org/springframework/datastore/mapping/redis/util/JedisTemplate.java b/spring-datastore-redis/src/main/java/org/springframework/datastore/mapping/redis/util/JedisTemplate.java index 846ce7bb..6864591c 100644 --- a/spring-datastore-redis/src/main/java/org/springframework/datastore/mapping/redis/util/JedisTemplate.java +++ b/spring-datastore-redis/src/main/java/org/springframework/datastore/mapping/redis/util/JedisTemplate.java @@ -1,1252 +1,1250 @@ /* Copyright (C) 2010 SpringSource * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.datastore.mapping.redis.util; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.transaction.NoTransactionException; import redis.clients.jedis.*; import redis.clients.jedis.exceptions.JedisConnectionException; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; /** * A Spring-style template for querying Redis and translating * Jedis exceptions into Spring exceptions * * @author Graeme Rocher * @since 1.0 */ @SuppressWarnings("hiding") public class JedisTemplate implements RedisTemplate<Jedis, SortingParams> { private String password; private boolean authenticated; private Jedis redis; private Transaction transaction; private JedisPool pool; public static final String QUEUED = "QUEUED"; private PipelineBlock pipeline; private String host = "localhost"; private int port; private int timeout = 2000; public JedisTemplate(String host, int port, int timeout) { this.host = host; this.port = port; this.timeout = timeout; } public JedisTemplate(Jedis jedis) throws IOException { this.redis = jedis; jedis.connect(); } @Override public boolean append(final String key, final Object val) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { String result = transaction.append(key, val.toString()); return result != null && result.equals(QUEUED); } if (pipeline != null) { pipeline.append(key, val.toString()); return false; } return redis.append(key, val.toString()) > 0; } }); } @Override public List<String> blpop(final int timeout, final String... keys) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.blpop(timeout, keys); } }); } @Override public List<String> brpop(final int timeout, final String... keys) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.brpop(timeout, keys); } }); } @Override public boolean decr(final String key) { return (Boolean) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.decr(key) > 0; } }); } @Override public boolean decrby(final String key, final int amount) { return (Boolean) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.decrBy(key, amount) > 0; } }); } public List<Object> pipeline(final RedisCallback<RedisTemplate<Jedis, SortingParams>> pipeline) { return (List<Object>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { return redis.pipelined(new PipelineBlock(){ @Override public void execute() { try { JedisTemplate.this.pipeline = this; pipeline.doInRedis(JedisTemplate.this); } catch (IOException e) { - JedisTemplate.this.pipeline.disconnect(); throw new DataAccessResourceFailureException("I/O exception thrown connecting to Redis: " + e.getMessage(), e); } catch (RuntimeException e) { - JedisTemplate.this.pipeline.disconnect(); throw e; } finally { JedisTemplate.this.pipeline = null; } } }); } }); } @Override public boolean persist(final String redisKey) { return (Boolean) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.persist(redisKey) > 0; } }); } public JedisTemplate(JedisPool pool) { this.pool = pool; } public JedisTemplate(JedisPool pool, int timeout) { this.timeout = timeout; this.pool = pool; } public Object execute(RedisCallback<Jedis> jedisRedisCallback) { try { if (redis == null) { redis = getNewConnection(); } if (password != null && !authenticated) { try { redis.auth(password); authenticated = true; } catch (Exception e) { throw new DataAccessResourceFailureException("I/O exception authenticating with Redis: " + e.getMessage(), e); } } return jedisRedisCallback.doInRedis(redis); } catch (IOException e) { throw new DataAccessResourceFailureException("I/O exception thrown connecting to Redis: " + e.getMessage(), e); } } protected Jedis getNewConnection() { Jedis jedis; if (pool == null) { jedis = new Jedis(host, port, timeout); } else { try { jedis = pool.getResource(); } catch (JedisConnectionException e) { throw new DataAccessResourceFailureException("Connection timeout getting Jedis connection from pool: " + e.getMessage(), e); } } try { jedis.connect(); } catch (JedisConnectionException e) { throw new DataAccessResourceFailureException("Connection failure connecting to Redis: " + e.getMessage(), e); } return jedis; } public SortParams sortParams() { return new JedisSortParams(); } public void save() { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.save(); } else { redis.save(); } return null; } }); } public void bgsave() { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.save(); } else { redis.bgsave(); } return null; } }); } public boolean sismember(final String redisKey, final Object o) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.sismember(redisKey, o.toString()); return false; } return redis.sismember(redisKey, o.toString()); } }); } public void del(final String redisKey) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.del(redisKey); } else { if (pipeline != null) { pipeline.del(redisKey); } else { redis.del(redisKey); } } return null; } }); } public long scard(final String redisKey) { return (Long)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { redis.scard(redisKey); return 0; } return redis.scard(redisKey); } }); } public boolean sadd(final String redisKey, final Object o) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { String result = transaction.sadd(redisKey, o.toString()); return result != null && result.equals(QUEUED); } if (pipeline != null) { pipeline.sadd(redisKey, o.toString()); return false; } return redis.sadd(redisKey, o.toString()) > 0; } }); } public boolean srem(final String redisKey, final Object o) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { String result = transaction.srem(redisKey, o.toString()); return result != null && result.equals(QUEUED); } if (pipeline != null) { pipeline.srem(redisKey, o.toString()); return false; } return redis.srem(redisKey, o.toString()) > 0; } }); } public Set<String> smembers(final String redisKey) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.smembers(redisKey); } }); } public void lset(final String redisKey, final int index, final Object o) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.lset(redisKey, index, o.toString()); } else { if (pipeline != null) { pipeline.lset(redisKey, index, o.toString()); } else { redis.lset(redisKey, index, o.toString()); } } return null; } }); } @Override public void ltrim(final String redisKey, final int start, final int end) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.ltrim(redisKey, start, end); } else { if (pipeline != null) { pipeline.ltrim(redisKey, start, end); } else { redis.ltrim(redisKey, start, end); } } return null; } }); } public String lindex(final String redisKey, final int index) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.lindex(redisKey, index); return null; } return redis.lindex(redisKey, index); } }); } public long llen(final String redisKey) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.llen(redisKey); return 0; } return redis.llen(redisKey); } }); } public List<String> lrange(final String redisKey, final int start, final int end) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.lrange(redisKey, start, end); return null; } return redis.lrange(redisKey, start, end); } }); } @Override public String rename(final String old, final String newKey) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.rename(old, newKey); } else if(pipeline != null) { pipeline.rename(old, newKey); } else { return redis.rename(old, newKey); } return null; } }); } @Override public String rpop(final String redisKey) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.rpop(redisKey); } else if(pipeline != null) { pipeline.rpop(redisKey); } else { return redis.rpop(redisKey); } return null; } }); } public void rpush(final String redisKey, final Object o) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.rpush(redisKey, o.toString()); } else { if (pipeline != null) { pipeline.rpush(redisKey, o.toString()); } else { redis.rpush(redisKey, o.toString()); } } return null; } }); } public long lrem(final String redisKey, final Object o, final int count) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.lrem(redisKey, count, o.toString()); return 0; } if (pipeline != null) { pipeline.lrem(redisKey, count, o.toString()); return 0; } return redis.lrem(redisKey, count, o.toString()); } }); } public void flushdb() { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { redis.flushDB(); return null; } }); } public void flushall() { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { redis.flushAll(); return null; } }); } public void select(final int index) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { redis.select(index); return null; } }); } public long dbsize() { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.dbSize(); } }); } public void lpush(final String redisKey, final Object o) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.lpush(redisKey, o.toString()); } else { if (pipeline != null) { pipeline.lpush(redisKey, o.toString()); } else { redis.lpush(redisKey, o.toString()); } } return null; } }); } @Override public void lpop(final String redisKey) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.lpop(redisKey); } else { if (pipeline != null) { pipeline.lpop(redisKey); } else { redis.lpop(redisKey); } } return null; } }); } public String hget(final String redisKey, final String entryKey) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.hget(redisKey, entryKey); return null; } return redis.hget(redisKey, entryKey); } }); } public long hlen(final String redisKey) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.hlen(redisKey); return 0; } return redis.hlen(redisKey); } }); } @Override public List<String> hkeys(final String redisKey) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.hkeys(redisKey); return null; } return redis.hkeys(redisKey); } }); } public boolean hset(final String redisKey, final String key, final Object o) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { return transaction.hset(redisKey, key, o.toString()).equals(QUEUED); } if (pipeline != null) { pipeline.hset(redisKey, key, o.toString()); return false; } return redis.hset(redisKey, key, o.toString()) > 0; } }); } @Override public boolean hsetnx(final String redisKey, final String key, final Object o) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { return transaction.hsetnx(redisKey, key, o.toString()).equals(QUEUED); } if (pipeline != null) { pipeline.hsetnx(redisKey, key, o.toString()); return false; } return redis.hsetnx(redisKey, key, o.toString()) > 0; } }); } @Override public List<String> hvals(final String redisKey) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.hvals(redisKey); return null; } return redis.hvals(redisKey); } }); } public boolean hdel(final String redisKey, final String entryKey) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { return transaction.hdel(redisKey, entryKey).equals(QUEUED); } if (pipeline != null) { pipeline.hdel(redisKey, entryKey); return false; } return redis.hdel(redisKey, entryKey) > 0; } }); } @Override public boolean hexists(final String redisKey, final String entryKey) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { return transaction.hexists(redisKey, entryKey).equals(QUEUED); } if (pipeline != null) { pipeline.hexists(redisKey, entryKey); return false; } return redis.hexists(redisKey, entryKey); } }); } public Map<String, String> hgetall(final String redisKey) { return (Map<String, String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.hgetAll(redisKey); return null; } return redis.hgetAll(redisKey); } }); } @Override public boolean hincrby(final String redisKey, final String entryKey, final int amount) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { return transaction.hincrBy(redisKey, entryKey, amount).equals(QUEUED); } if (pipeline != null) { pipeline.hincrBy(redisKey, entryKey, amount); return false; } return redis.hincrBy(redisKey, entryKey, amount) > 0; } }); } public List<String> hmget(final String hashKey, final String... fields) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (pipeline != null) { pipeline.hmget(hashKey, fields); return null; } return redis.hmget(hashKey, fields); } }); } public void hmset(final String key, final Map<String, String> nativeEntry) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.hmset(key, nativeEntry); } else { if (pipeline != null) { pipeline.hmset(key, nativeEntry); return null; } redis.hmset(key, nativeEntry); } return null; } }); } public long incr(final String key) { return (Long)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { if (transaction != null) { redis = getNewConnection(); try { return redis.incr(key); } finally { redis.disconnect(); } } return redis.incr(key); } }); } @Override public long incrby(final String key, final int amount) { return (Long)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { if (transaction != null) { redis = getNewConnection(); try { return redis.incrBy(key, amount); } finally { redis.disconnect(); } } return redis.incrBy(key, amount); } }); } public long del(final String... redisKey) { return (Long)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.del(redisKey); return 0; } return redis.del(redisKey); } }); } public Set<String> sinter(final String...keys) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.sinter(keys); } }); } public Set<String> sunion(final String... keys) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.sunion(keys); } }); } public void sinterstore(final String storeKey, final String... keys) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.sinterstore(storeKey, keys); } else { redis.sinterstore(storeKey, keys); } return null; } }); } public void sunionstore(final String storeKey, final String... keys) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.sunionstore(storeKey, keys); } else { redis.sunionstore(storeKey, keys); } return null; } }); } @Override public Set<String> sdiff(final String... keys) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.sdiff(keys); } }); } @Override public boolean smove(final String source, final String destination, final String member) { return (Boolean) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.smove(source, destination, member); } else { if (pipeline != null) { pipeline.smove(source, destination, member); return null; } return redis.smove(source, destination, member) > 0; } return false; } }); } public void sdiffstore(final String storeKey, final String... keys) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.sdiffstore(storeKey, keys); } else { redis.sdiffstore(storeKey, keys); } return null; } }); } public boolean setnx(final String redisKey, final Object o) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { return transaction.setnx(redisKey, o.toString()).equals(QUEUED); } return redis.setnx(redisKey, o.toString()) > 0; } }); } @Override public long strlen(final String redisKey) { return (Long)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.strlen(redisKey); } }); } public boolean expire(final String key, final int timeout) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { String result = transaction.expire(key,timeout); return result != null && result.equals(QUEUED); } return redis.expire(key,timeout) > 0; } }); } public long ttl(final String key) { return (Long)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.ttl(key); } }); } @Override public String type(final String key) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.type(key); } }); } public String getset(final String redisKey, final Object o) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.getSet(redisKey, o.toString()); } }); } public Set<String> keys(final String pattern) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { if (transaction != null) { redis = getNewConnection(); try { return redis.keys(pattern); } finally { redis.disconnect(); } } return redis.keys(pattern); } }); } public void close() { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { if (pool != null) { pool.returnResource(redis); } else { redis.disconnect(); } return null; } }); } public Object multi() { return execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { transaction = redis.multi(); return transaction; } }); } public Jedis getRedisClient() { return redis; } public boolean exists(final String key) { return (Boolean)execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.exists(key); } }); } public String get(final String key) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.get(key); } }); } @Override public List<String> mget(final String... keys) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.mget(keys); } }); } public void mset(Map<String, String> map) { final String[] keysAndValues = new String[map.size()*2]; int index = 0; for (String key : map.keySet()) { keysAndValues[index++] = key; keysAndValues[index++] = map.get(key); } execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.mset(keysAndValues); } else { redis.mset(keysAndValues); } return null; } }); } public Object[] exec() { return (Object[]) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { List<Object> results = transaction.exec(); try { return results.toArray(new Object[results.size()]); } finally { transaction = null; } } throw new NoTransactionException("No transaction started. Call multi() first!"); } }); } public void discard() { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { if (transaction != null) { transaction.discard(); transaction = null; redis.disconnect(); JedisTemplate.this.redis = getNewConnection(); } return null; } }); } public boolean zadd(final String key, final double rank, final Object o) { return (Boolean) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { return transaction.zadd(key, rank, o.toString()).equals(QUEUED); } if (pipeline != null) { pipeline.zadd(key, rank, o.toString()); return true; } return redis.zadd(key, rank, o.toString()) > 0; } }); } @Override public long zcount(final String key, final double min, final double max) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.zcount(key, min, max); } }); } @Override public double zincrby(final String key, final double score, final String member) { return (Double) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.zincrby(key, score, member); } }); } @Override public long zinterstore(final String destKey, final String...keys) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if(pipeline != null) { pipeline.zinterstore(destKey, keys); return 0; } return redis.zinterstore(destKey,keys); } }); } @Override public long zunionstore(final String destKey, final String... keys) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if(pipeline != null) { pipeline.zunionstore(destKey, keys); return 0; } return redis.zunionstore(destKey, keys); } }); } @Override public long zcard(final String key) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.zcard(key); } }); } public long zrank(final String key, final Object member) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if(pipeline != null) { redis.zrank(key, member.toString()); return 0; } return redis.zrank(key, member.toString()); } }); } @Override public long zrem(final String key, final Object member) { return (Long) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if(pipeline != null) { pipeline.zrem(key, member.toString()); return 0; } return redis.zrem(key, member.toString()); } }); } public Set<String> zrange(final String key, final int fromIndex, final int toIndex) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.zrange(key, fromIndex, toIndex); } }); } public Set<String> zrangebyscore(final String sortKey, final double rank1, final double rank2) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.zrangeByScore(sortKey, rank1, rank2); } }); } public void set(final String key, final Object value) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.set(key, value.toString()); } else { redis.set(key, value.toString()); } return null; } }); } public void setex(final String key, final Object value, final int timeout) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { if (transaction != null) { transaction.setex(key, timeout, String.valueOf(value)); } else { redis.setex(key, timeout, String.valueOf(value)); } return null; } }); } public Double zscore(final String key, final String member) { return (Double) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.zscore(key, member); } }); } public Set<String> zrevrange(final String key, final int start, final int end) { return (Set<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.zrevrange(key, start, end); } }); } public void setPassword(String pass) { this.password = pass; } public String srandmember(final String key) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.srandmember(key); } }); } public String spop(final String key) { return (String) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.spop(key); } }); } public List<String> sort(final String key, final SortParams<SortingParams> params) { return (List<String>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { return redis.sort(key, params.getParamList().get(0)); } }); } public void sortstore(final String key, final String destKey, final SortParams<SortingParams> params) { execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) { redis.sort(key, params.getParamList().get(0), destKey); return null; } }); } private class JedisSortParams extends SortParams<SortingParams> { private SortingParams nativeParams; private JedisSortParams() { this.nativeParams = new SortingParams(); getParamList().add(nativeParams); } @Override protected SortingParams createAlpha() { nativeParams.alpha(); return nativeParams; } @Override protected SortingParams createDesc() { nativeParams.desc(); return nativeParams; } @Override protected SortingParams createGet(String pattern) { nativeParams.get(pattern); return nativeParams; } @Override protected SortingParams createLimit(int start, int count) { nativeParams.limit(start, count); return nativeParams; } @Override protected SortingParams createAsc() { nativeParams.asc(); return nativeParams; } @Override protected SortingParams createBy(String pattern) { nativeParams.by(pattern); return nativeParams; } } }
false
true
public List<Object> pipeline(final RedisCallback<RedisTemplate<Jedis, SortingParams>> pipeline) { return (List<Object>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { return redis.pipelined(new PipelineBlock(){ @Override public void execute() { try { JedisTemplate.this.pipeline = this; pipeline.doInRedis(JedisTemplate.this); } catch (IOException e) { JedisTemplate.this.pipeline.disconnect(); throw new DataAccessResourceFailureException("I/O exception thrown connecting to Redis: " + e.getMessage(), e); } catch (RuntimeException e) { JedisTemplate.this.pipeline.disconnect(); throw e; } finally { JedisTemplate.this.pipeline = null; } } }); } }); }
public List<Object> pipeline(final RedisCallback<RedisTemplate<Jedis, SortingParams>> pipeline) { return (List<Object>) execute(new RedisCallback<Jedis>() { public Object doInRedis(Jedis redis) throws IOException { return redis.pipelined(new PipelineBlock(){ @Override public void execute() { try { JedisTemplate.this.pipeline = this; pipeline.doInRedis(JedisTemplate.this); } catch (IOException e) { throw new DataAccessResourceFailureException("I/O exception thrown connecting to Redis: " + e.getMessage(), e); } catch (RuntimeException e) { throw e; } finally { JedisTemplate.this.pipeline = null; } } }); } }); }
diff --git a/src/com/joshondesign/treegui/modes/aminojs/AminoJSMode.java b/src/com/joshondesign/treegui/modes/aminojs/AminoJSMode.java index ddbe4f9..0f77ecc 100644 --- a/src/com/joshondesign/treegui/modes/aminojs/AminoJSMode.java +++ b/src/com/joshondesign/treegui/modes/aminojs/AminoJSMode.java @@ -1,223 +1,223 @@ package com.joshondesign.treegui.modes.aminojs; import com.joshondesign.treegui.BindingUtils; import com.joshondesign.treegui.Mode; import com.joshondesign.treegui.actions.JAction; import com.joshondesign.treegui.docmodel.Layer; import com.joshondesign.treegui.docmodel.Page; import com.joshondesign.treegui.docmodel.SketchDocument; import com.joshondesign.treegui.docmodel.SketchNode; import com.joshondesign.treegui.model.TreeNode; import com.joshondesign.treegui.modes.aminojava.DynamicNode; import com.joshondesign.treegui.modes.aminojava.Property; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.joshy.gfx.draw.FlatColor; import org.joshy.gfx.draw.Font; import org.joshy.gfx.draw.FontBuilder; import org.joshy.gfx.draw.GFX; import org.joshy.gfx.node.control.Menu; public class AminoJSMode extends Mode { public static Map<String, DynamicNode.DrawDelegate> drawMap = new HashMap<String, DynamicNode.DrawDelegate>(); public AminoJSMode() { setId("com.joshondesign.modes.aminojs"); add(new TreeNode<JAction>()); TreeNode<SketchNode> symbols = new TreeNode<SketchNode>(); symbols.setId("symbols"); DynamicNode visualBase = new DynamicNode(); visualBase .addProperty(new Property("translateX", Double.class, 0)) .addProperty(new Property("translateY", Double.class, 0)) .addProperty(new Property("width", Double.class, 80)) .addProperty(new Property("height", Double.class, 30)) ; drawMap.put("PushButton", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5, 15); g.drawRect(0, 0, w, h); } }); drawMap.put("CheckButton", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, h, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5 + h, 15); g.drawRect(0,0,h,h); } }); symbols.add(parse(new PushButton(), drawMap.get("PushButton"), visualBase)); symbols.add(parse(new ToggleButton(), drawMap.get("PushButton"),visualBase)); symbols.add(parse(new CheckButton(), drawMap.get("CheckButton"),visualBase)); drawMap.put("Slider", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.fillRect(0, 0, h, h); } }); drawMap.put("Image", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawRect(0 + 10, 0 + 10, w - 10 * 2, h - 10 * 2); } }); - drawMap.put("TextBox", new DynamicNode.DrawDelegate() { + drawMap.put("Textbox", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5, 15); g.drawRect(0, 0, w, h); } }); drawMap.put("Label", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String text = node.getProperty("text").getStringValue(); double size = node.getProperty("fontsize").getDoubleValue(); g.setPaint(FlatColor.BLACK); Font font = new FontBuilder(Font.DEFAULT.getName()).size((float)size).resolve(); g.drawText(text, font, 5, h-5); } }); drawMap.put("ListView", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); List<String> data = (List<String>) node.getProperty("data").getRawValue(); if(data == null) { data = Arrays.asList("dummy", "dummy", "dummy"); } if(data != null) { for(int i=0; i<data.size(); i++) { g.drawText(data.get(i), Font.DEFAULT, 5, i*20+20); } } g.drawRect(0, 0, w, h); } }); symbols.add(parse(new Slider(), drawMap.get("Slider"), visualBase)); symbols.add(parse(new Image(), drawMap.get("Image"), visualBase)); - symbols.add(parse(new Textbox(), drawMap.get("TextBox"), visualBase)); + symbols.add(parse(new Textbox(), drawMap.get("Textbox"), visualBase)); symbols.add(parse(new Label(), drawMap.get("Label"), visualBase)); symbols.add(parse(new ListView(), drawMap.get("ListView"), visualBase)); drawMap.put("PlainPanel", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawRect(0,0,w,h); } }); drawMap.put("Spinner", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.BLACK); g.drawOval(10, 10, w - 20, h - 20); } }); drawMap.put("FlickrQuery", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.YELLOW); g.fillRoundRect(0, 0, 80, 80, 10, 10); g.setPaint(FlatColor.BLACK); g.drawRoundRect(0, 0, 80, 80, 10, 10); g.drawText("Flickr Query", Font.DEFAULT, 10, 15); } }); symbols.add(parse(new PlainPanel(), drawMap.get("PlainPanel"), visualBase)); symbols.add(parse(new TabPanel(), drawMap.get("PlainPanel"), visualBase)); symbols.add(parse(new Spinner(), drawMap.get("Spinner"), visualBase)); //symbols.add(parse(new StringListModel(), drawMap.get("FlickrQuery"), visualBase)); //symbols.add(parse(new ControlListModel(), drawMap.get("FlickrQuery"), visualBase)); DynamicNode photo = parse(new FlickrQuery.Photo("a","b"), drawMap.get("FlickrQuery"), visualBase); DynamicNode flickr = parse(new FlickrQuery(), drawMap.get("FlickrQuery"), visualBase); flickr.getProperty("results").setList(true).setItemPrototype(photo); symbols.add(flickr); add(symbols); } @Override public String getName() { return "Amino JS"; } @Override public SketchDocument createEmptyDoc() { SketchDocument doc = new SketchDocument(); doc.setModeId(this.getId()); Layer layer = new Layer(); layer.add(findSymbol("PlainPanel").duplicate(null)); Page page = new Page(); page.add(layer); doc.add(page); return doc; } @Override public void modifyFileMenu(Menu fileMenu, SketchDocument doc) { fileMenu.addItem("Test HTML", "R", new HTMLBindingExport(doc,true)); fileMenu.addItem("Export HTML", "E", new HTMLBindingExport(doc,false)); } @Override public Map<String,DynamicNode.DrawDelegate> getDrawMap() { return drawMap; } private static DynamicNode parse(Object o, DynamicNode.DrawDelegate del, DynamicNode base) { DynamicNode nd = BindingUtils.parseAnnotatedPOJO(o, del); if(base != null) { nd.copyPropertiesFrom(base); } return nd; } }
false
true
public AminoJSMode() { setId("com.joshondesign.modes.aminojs"); add(new TreeNode<JAction>()); TreeNode<SketchNode> symbols = new TreeNode<SketchNode>(); symbols.setId("symbols"); DynamicNode visualBase = new DynamicNode(); visualBase .addProperty(new Property("translateX", Double.class, 0)) .addProperty(new Property("translateY", Double.class, 0)) .addProperty(new Property("width", Double.class, 80)) .addProperty(new Property("height", Double.class, 30)) ; drawMap.put("PushButton", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5, 15); g.drawRect(0, 0, w, h); } }); drawMap.put("CheckButton", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, h, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5 + h, 15); g.drawRect(0,0,h,h); } }); symbols.add(parse(new PushButton(), drawMap.get("PushButton"), visualBase)); symbols.add(parse(new ToggleButton(), drawMap.get("PushButton"),visualBase)); symbols.add(parse(new CheckButton(), drawMap.get("CheckButton"),visualBase)); drawMap.put("Slider", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.fillRect(0, 0, h, h); } }); drawMap.put("Image", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawRect(0 + 10, 0 + 10, w - 10 * 2, h - 10 * 2); } }); drawMap.put("TextBox", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5, 15); g.drawRect(0, 0, w, h); } }); drawMap.put("Label", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String text = node.getProperty("text").getStringValue(); double size = node.getProperty("fontsize").getDoubleValue(); g.setPaint(FlatColor.BLACK); Font font = new FontBuilder(Font.DEFAULT.getName()).size((float)size).resolve(); g.drawText(text, font, 5, h-5); } }); drawMap.put("ListView", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); List<String> data = (List<String>) node.getProperty("data").getRawValue(); if(data == null) { data = Arrays.asList("dummy", "dummy", "dummy"); } if(data != null) { for(int i=0; i<data.size(); i++) { g.drawText(data.get(i), Font.DEFAULT, 5, i*20+20); } } g.drawRect(0, 0, w, h); } }); symbols.add(parse(new Slider(), drawMap.get("Slider"), visualBase)); symbols.add(parse(new Image(), drawMap.get("Image"), visualBase)); symbols.add(parse(new Textbox(), drawMap.get("TextBox"), visualBase)); symbols.add(parse(new Label(), drawMap.get("Label"), visualBase)); symbols.add(parse(new ListView(), drawMap.get("ListView"), visualBase)); drawMap.put("PlainPanel", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawRect(0,0,w,h); } }); drawMap.put("Spinner", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.BLACK); g.drawOval(10, 10, w - 20, h - 20); } }); drawMap.put("FlickrQuery", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.YELLOW); g.fillRoundRect(0, 0, 80, 80, 10, 10); g.setPaint(FlatColor.BLACK); g.drawRoundRect(0, 0, 80, 80, 10, 10); g.drawText("Flickr Query", Font.DEFAULT, 10, 15); } }); symbols.add(parse(new PlainPanel(), drawMap.get("PlainPanel"), visualBase)); symbols.add(parse(new TabPanel(), drawMap.get("PlainPanel"), visualBase)); symbols.add(parse(new Spinner(), drawMap.get("Spinner"), visualBase)); //symbols.add(parse(new StringListModel(), drawMap.get("FlickrQuery"), visualBase)); //symbols.add(parse(new ControlListModel(), drawMap.get("FlickrQuery"), visualBase)); DynamicNode photo = parse(new FlickrQuery.Photo("a","b"), drawMap.get("FlickrQuery"), visualBase); DynamicNode flickr = parse(new FlickrQuery(), drawMap.get("FlickrQuery"), visualBase); flickr.getProperty("results").setList(true).setItemPrototype(photo); symbols.add(flickr); add(symbols); }
public AminoJSMode() { setId("com.joshondesign.modes.aminojs"); add(new TreeNode<JAction>()); TreeNode<SketchNode> symbols = new TreeNode<SketchNode>(); symbols.setId("symbols"); DynamicNode visualBase = new DynamicNode(); visualBase .addProperty(new Property("translateX", Double.class, 0)) .addProperty(new Property("translateY", Double.class, 0)) .addProperty(new Property("width", Double.class, 80)) .addProperty(new Property("height", Double.class, 30)) ; drawMap.put("PushButton", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5, 15); g.drawRect(0, 0, w, h); } }); drawMap.put("CheckButton", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, h, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5 + h, 15); g.drawRect(0,0,h,h); } }); symbols.add(parse(new PushButton(), drawMap.get("PushButton"), visualBase)); symbols.add(parse(new ToggleButton(), drawMap.get("PushButton"),visualBase)); symbols.add(parse(new CheckButton(), drawMap.get("CheckButton"),visualBase)); drawMap.put("Slider", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.fillRect(0, 0, h, h); } }); drawMap.put("Image", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawRect(0 + 10, 0 + 10, w - 10 * 2, h - 10 * 2); } }); drawMap.put("Textbox", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String t = node.getProperty("text").getStringValue(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawText(t, Font.DEFAULT, 5, 15); g.drawRect(0, 0, w, h); } }); drawMap.put("Label", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); String text = node.getProperty("text").getStringValue(); double size = node.getProperty("fontsize").getDoubleValue(); g.setPaint(FlatColor.BLACK); Font font = new FontBuilder(Font.DEFAULT.getName()).size((float)size).resolve(); g.drawText(text, font, 5, h-5); } }); drawMap.put("ListView", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); List<String> data = (List<String>) node.getProperty("data").getRawValue(); if(data == null) { data = Arrays.asList("dummy", "dummy", "dummy"); } if(data != null) { for(int i=0; i<data.size(); i++) { g.drawText(data.get(i), Font.DEFAULT, 5, i*20+20); } } g.drawRect(0, 0, w, h); } }); symbols.add(parse(new Slider(), drawMap.get("Slider"), visualBase)); symbols.add(parse(new Image(), drawMap.get("Image"), visualBase)); symbols.add(parse(new Textbox(), drawMap.get("Textbox"), visualBase)); symbols.add(parse(new Label(), drawMap.get("Label"), visualBase)); symbols.add(parse(new ListView(), drawMap.get("ListView"), visualBase)); drawMap.put("PlainPanel", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.GRAY); g.fillRect(0, 0, w, h); g.setPaint(FlatColor.BLACK); g.drawRect(0,0,w,h); } }); drawMap.put("Spinner", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.BLACK); g.drawOval(10, 10, w - 20, h - 20); } }); drawMap.put("FlickrQuery", new DynamicNode.DrawDelegate() { public void draw(GFX g, DynamicNode node) { double w = node.getWidth(); double h = node.getHeight(); g.setPaint(FlatColor.YELLOW); g.fillRoundRect(0, 0, 80, 80, 10, 10); g.setPaint(FlatColor.BLACK); g.drawRoundRect(0, 0, 80, 80, 10, 10); g.drawText("Flickr Query", Font.DEFAULT, 10, 15); } }); symbols.add(parse(new PlainPanel(), drawMap.get("PlainPanel"), visualBase)); symbols.add(parse(new TabPanel(), drawMap.get("PlainPanel"), visualBase)); symbols.add(parse(new Spinner(), drawMap.get("Spinner"), visualBase)); //symbols.add(parse(new StringListModel(), drawMap.get("FlickrQuery"), visualBase)); //symbols.add(parse(new ControlListModel(), drawMap.get("FlickrQuery"), visualBase)); DynamicNode photo = parse(new FlickrQuery.Photo("a","b"), drawMap.get("FlickrQuery"), visualBase); DynamicNode flickr = parse(new FlickrQuery(), drawMap.get("FlickrQuery"), visualBase); flickr.getProperty("results").setList(true).setItemPrototype(photo); symbols.add(flickr); add(symbols); }
diff --git a/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/tasks/ui/editors/TaskEditor.java b/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/tasks/ui/editors/TaskEditor.java index 37a1eb380..7f97d28f4 100644 --- a/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/tasks/ui/editors/TaskEditor.java +++ b/org.eclipse.mylyn.tasks.ui/src/org/eclipse/mylyn/tasks/ui/editors/TaskEditor.java @@ -1,1193 +1,1195 @@ /******************************************************************************* * Copyright (c) 2004, 2009 Tasktop Technologies and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Tasktop Technologies - initial API and implementation * Eric Booth - initial prototype *******************************************************************************/ package org.eclipse.mylyn.tasks.ui.editors; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.ContributionManager; import org.eclipse.jface.action.ControlContribution; import org.eclipse.jface.action.GroupMarker; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.action.ToolBarManager; import org.eclipse.jface.dialogs.IMessageProvider; import org.eclipse.jface.resource.CompositeImageDescriptor; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.resource.JFaceResources; import org.eclipse.jface.text.Document; import org.eclipse.jface.text.TextViewer; import org.eclipse.jface.util.LocalSelectionTransfer; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.ISelectionProvider; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.mylyn.commons.core.StatusHandler; import org.eclipse.mylyn.internal.provisional.commons.ui.CommonImages; import org.eclipse.mylyn.internal.provisional.commons.ui.CommonTextSupport; import org.eclipse.mylyn.internal.provisional.commons.ui.CommonUiUtil; import org.eclipse.mylyn.internal.provisional.commons.ui.SelectionProviderAdapter; import org.eclipse.mylyn.internal.provisional.commons.ui.WorkbenchUtil; import org.eclipse.mylyn.internal.provisional.commons.ui.editor.EditorBusyIndicator; import org.eclipse.mylyn.internal.provisional.commons.ui.editor.IBusyEditor; import org.eclipse.mylyn.internal.tasks.core.LocalRepositoryConnector; import org.eclipse.mylyn.internal.tasks.core.TaskList; import org.eclipse.mylyn.internal.tasks.ui.TasksUiPlugin; import org.eclipse.mylyn.internal.tasks.ui.actions.TaskEditorScheduleAction; import org.eclipse.mylyn.internal.tasks.ui.actions.ToggleTaskActivationAction; import org.eclipse.mylyn.internal.tasks.ui.editors.EditorUtil; import org.eclipse.mylyn.internal.tasks.ui.editors.Messages; import org.eclipse.mylyn.internal.tasks.ui.editors.TaskEditorActionContributor; import org.eclipse.mylyn.internal.tasks.ui.util.PlatformUtil; import org.eclipse.mylyn.internal.tasks.ui.util.TaskDragSourceListener; import org.eclipse.mylyn.internal.tasks.ui.util.TasksUiInternal; import org.eclipse.mylyn.tasks.core.ITask; import org.eclipse.mylyn.tasks.core.TaskRepository; import org.eclipse.mylyn.tasks.ui.AbstractRepositoryConnectorUi; import org.eclipse.mylyn.tasks.ui.TasksUiImages; import org.eclipse.mylyn.tasks.ui.TasksUiUtil; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.StyledText; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.DragSource; import org.eclipse.swt.dnd.FileTransfer; import org.eclipse.swt.dnd.TextTransfer; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.ControlAdapter; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.events.FocusAdapter; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.RowLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.ToolBar; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.IWorkbenchActionConstants; import org.eclipse.ui.PartInitException; import org.eclipse.ui.contexts.IContextService; import org.eclipse.ui.forms.IFormColors; import org.eclipse.ui.forms.IManagedForm; import org.eclipse.ui.forms.editor.IFormPage; import org.eclipse.ui.forms.editor.SharedHeaderFormEditor; import org.eclipse.ui.forms.events.HyperlinkAdapter; import org.eclipse.ui.forms.events.HyperlinkEvent; import org.eclipse.ui.forms.events.IHyperlinkListener; import org.eclipse.ui.forms.widgets.Form; import org.eclipse.ui.forms.widgets.FormToolkit; import org.eclipse.ui.forms.widgets.ImageHyperlink; import org.eclipse.ui.handlers.IHandlerService; import org.eclipse.ui.internal.forms.widgets.BusyIndicator; import org.eclipse.ui.internal.forms.widgets.FormHeading; import org.eclipse.ui.internal.forms.widgets.TitleRegion; import org.eclipse.ui.menus.IMenuService; import org.eclipse.ui.part.WorkbenchPart; import org.eclipse.ui.progress.IWorkbenchSiteProgressService; import org.eclipse.ui.services.IDisposable; import org.eclipse.ui.views.contentoutline.IContentOutlinePage; /** * @author Mik Kersten * @author Rob Elves * @author Steffen Pingel * @author Thomas Ehrnhoefer * @since 2.0 */ public class TaskEditor extends SharedHeaderFormEditor { /** * @since 2.0 */ public static final String ID_EDITOR = "org.eclipse.mylyn.tasks.ui.editors.task"; //$NON-NLS-1$ /** * @since 3.2 */ public static final String ID_TOOLBAR_HEADER = "org.eclipse.mylyn.tasks.ui.editors.task.toolbar.header"; //$NON-NLS-1$ private static final String ID_LEFT_TOOLBAR_HEADER = "org.eclipse.mylyn.tasks.ui.editors.task.toolbar.header.left"; //$NON-NLS-1$ private static final int LEFT_TOOLBAR_HEADER_TOOLBAR_PADDING = 3; private ToggleTaskActivationAction activateAction; @Deprecated private final IEditorPart contentOutlineProvider = null; private EditorBusyIndicator editorBusyIndicator; private MenuManager menuManager; private IHyperlinkListener messageHyperLinkListener; private ITask task; private TaskEditorInput taskEditorInput; private TaskDragSourceListener titleDragSourceListener; private Composite editorParent; private IMenuService menuService; private IToolBarManager toolBarManager; private ToolBarManager leftToolBarManager; private ToolBar leftToolBar; private Image headerImage; // private int initialLeftToolbarSize; private boolean noExtraPadding; // private boolean headerLabelInitialized; private BusyIndicator busyLabel; private StyledText titleLabel; private CommonTextSupport textSupport; private TaskEditorScheduleAction scheduleAction; private static boolean toolBarFailureLogged; public TaskEditor() { } @Override protected Composite createPageContainer(Composite parent) { this.editorParent = parent; Composite composite = super.createPageContainer(parent); EditorUtil.initializeScrollbars(getHeaderForm().getForm()); // create left tool bar that replaces form heading label try { FormHeading heading = (FormHeading) getHeaderForm().getForm().getForm().getHead(); Field field = FormHeading.class.getDeclaredField("titleRegion"); //$NON-NLS-1$ field.setAccessible(true); TitleRegion titleRegion = (TitleRegion) field.get(heading); leftToolBarManager = new ToolBarManager(SWT.FLAT); leftToolBar = leftToolBarManager.createControl(titleRegion); leftToolBar.addControlListener(new ControlAdapter() { private boolean ignoreResizeEvents; @Override public void controlResized(ControlEvent e) { if (ignoreResizeEvents) { return; } ignoreResizeEvents = true; try { // the tool bar contents has changed, update state updateHeaderImage(); updateHeaderLabel(); } finally { ignoreResizeEvents = false; } } }); //titleLabel = new Label(titleRegion, SWT.NONE); // need a viewer for copy support TextViewer titleViewer = new TextViewer(titleRegion, SWT.READ_ONLY); // Eclipse 3.3 needs a document, otherwise an NPE is thrown titleViewer.setDocument(new Document()); titleLabel = titleViewer.getTextWidget(); titleLabel.setForeground(heading.getForeground()); titleLabel.setFont(heading.getFont()); // XXX work-around problem that causes field to maintain selection when unfocused titleLabel.addFocusListener(new FocusAdapter() { @Override public void focusLost(FocusEvent e) { titleLabel.setSelection(0); } }); titleRegion.addControlListener(new ControlAdapter() { @Override public void controlResized(ControlEvent e) { // do not create busyLabel to avoid recursion updateSizeAndLocations(); } }); IHandlerService handlerService = (IHandlerService) getSite().getService(IHandlerService.class); if (handlerService != null) { textSupport = new CommonTextSupport(handlerService); textSupport.install(titleViewer, false); } } catch (Exception e) { if (!toolBarFailureLogged) { StatusHandler.log(new Status(IStatus.ERROR, TasksUiPlugin.ID_PLUGIN, "Failed to obtain busy label toolbar", e)); //$NON-NLS-1$ } if (titleLabel != null) { titleLabel.dispose(); titleLabel = null; } if (leftToolBar != null) { leftToolBar.dispose(); leftToolBar = null; } if (leftToolBarManager != null) { leftToolBarManager.dispose(); leftToolBarManager = null; } } updateHeader(); return composite; } private BusyIndicator getBusyLabel() { if (busyLabel != null) { return busyLabel; } try { FormHeading heading = (FormHeading) getHeaderForm().getForm().getForm().getHead(); // ensure that busy label exists heading.setBusy(true); heading.setBusy(false); Field field = FormHeading.class.getDeclaredField("titleRegion"); //$NON-NLS-1$ field.setAccessible(true); TitleRegion titleRegion = (TitleRegion) field.get(heading); for (Control child : titleRegion.getChildren()) { if (child instanceof BusyIndicator) { busyLabel = (BusyIndicator) child; } } if (busyLabel == null) { return null; } busyLabel.addControlListener(new ControlAdapter() { @Override public void controlMoved(ControlEvent e) { updateSizeAndLocations(); } }); // the busy label may get disposed if it has no image busyLabel.addDisposeListener(new DisposeListener() { public void widgetDisposed(DisposeEvent e) { busyLabel.setMenu(null); busyLabel = null; } }); if (leftToolBar != null) { leftToolBar.moveAbove(busyLabel); } if (titleLabel != null) { titleLabel.moveAbove(busyLabel); } updateSizeAndLocations(); return busyLabel; } catch (Exception e) { if (!toolBarFailureLogged) { StatusHandler.log(new Status(IStatus.ERROR, TasksUiPlugin.ID_PLUGIN, "Failed to obtain busy label toolbar", e)); //$NON-NLS-1$ } busyLabel = null; } return busyLabel; } private void updateSizeAndLocations() { if (busyLabel == null || busyLabel.isDisposed()) { return; } Point leftToolBarSize = new Point(0, 0); if (leftToolBar != null && !leftToolBar.isDisposed()) { // bottom align tool bar in title region leftToolBarSize = leftToolBar.getSize(); int y = leftToolBar.getParent().getSize().y - leftToolBarSize.y - 2; if (!hasLeftToolBar()) { // hide tool bar to avoid overlaying busyLabel on windows leftToolBarSize.x = 0; } leftToolBar.setBounds(busyLabel.getLocation().x, y, leftToolBarSize.x, leftToolBarSize.y); } if (titleLabel != null && !titleLabel.isDisposed()) { // center align title text in title region Point size = titleLabel.computeSize(SWT.DEFAULT, SWT.DEFAULT, true); int y = (titleLabel.getParent().getSize().y - size.y) / 2; titleLabel.setBounds(busyLabel.getLocation().x + LEFT_TOOLBAR_HEADER_TOOLBAR_PADDING + leftToolBarSize.x, y, size.x, size.y); } } Composite getEditorParent() { return editorParent; } @Override protected void addPages() { initialize(); // determine factories Set<String> conflictingIds = new HashSet<String>(); ArrayList<AbstractTaskEditorPageFactory> pageFactories = new ArrayList<AbstractTaskEditorPageFactory>(); for (AbstractTaskEditorPageFactory pageFactory : TasksUiPlugin.getDefault().getTaskEditorPageFactories()) { if (pageFactory.canCreatePageFor(getTaskEditorInput()) && WorkbenchUtil.allowUseOf(pageFactory)) { pageFactories.add(pageFactory); String[] ids = pageFactory.getConflictingIds(getTaskEditorInput()); if (ids != null) { conflictingIds.addAll(Arrays.asList(ids)); } } } for (Iterator<AbstractTaskEditorPageFactory> it = pageFactories.iterator(); it.hasNext();) { if (conflictingIds.contains(it.next().getId())) { it.remove(); } } // sort by priority Collections.sort(pageFactories, new Comparator<AbstractTaskEditorPageFactory>() { public int compare(AbstractTaskEditorPageFactory o1, AbstractTaskEditorPageFactory o2) { return o1.getPriority() - o2.getPriority(); } }); // create pages for (AbstractTaskEditorPageFactory factory : pageFactories) { try { IFormPage page = factory.createPage(this); int index = addPage(page); setPageImage(index, factory.getPageImage()); setPageText(index, factory.getPageText()); if (factory.getPriority() == AbstractTaskEditorPageFactory.PRIORITY_TASK) { setActivePage(index); } if (page instanceof ISelectionProvider) { ((ISelectionProvider) page).addSelectionChangedListener(getActionBarContributor()); } } catch (Exception e) { StatusHandler.log(new Status(IStatus.ERROR, TasksUiPlugin.ID_PLUGIN, "Could not create editor via factory: " + factory, e)); //$NON-NLS-1$ } } updateTitleImage(); updateHeaderToolBar(); installTitleDrag(getHeaderForm().getForm().getForm()); // do this late to allow pages to replace the selection provider getEditorSite().registerContextMenu(menuManager, getEditorSite().getSelectionProvider(), true); } private void initialize() { editorBusyIndicator = new EditorBusyIndicator(new IBusyEditor() { public Image getTitleImage() { return TaskEditor.this.getTitleImage(); } public void setTitleImage(Image image) { TaskEditor.this.setTitleImage(image); } }); menuManager = new MenuManager(); configureContextMenuManager(menuManager); Menu menu = menuManager.createContextMenu(getContainer()); getContainer().setMenu(menu); // install context menu on form heading and title getHeaderForm().getForm().setMenu(menu); Composite head = getHeaderForm().getForm().getForm().getHead(); if (head != null) { CommonUiUtil.setMenu(head, menu); } } /** * @since 3.0 */ @Deprecated public void configureContextMenuManager(MenuManager manager) { if (manager == null) { return; } IMenuListener listener = new IMenuListener() { public void menuAboutToShow(IMenuManager manager) { contextMenuAboutToShow(manager); } }; manager.setRemoveAllWhenShown(true); manager.addMenuListener(listener); } @Deprecated protected void contextMenuAboutToShow(IMenuManager manager) { TaskEditorActionContributor contributor = getActionBarContributor(); if (contributor != null) { contributor.contextMenuAboutToShow(manager); } } @Override protected FormToolkit createToolkit(Display display) { // create a toolkit that shares colors between editors. return new FormToolkit(TasksUiPlugin.getDefault().getFormColors(display)); } @Override protected void createHeaderContents(IManagedForm headerForm) { getToolkit().decorateFormHeading(headerForm.getForm().getForm()); } @Override public void dispose() { disposeScheduleAction(); if (headerImage != null) { headerImage.dispose(); } if (editorBusyIndicator != null) { editorBusyIndicator.stop(); } if (activateAction != null) { activateAction.dispose(); } if (menuService != null && toolBarManager instanceof ContributionManager) { menuService.releaseContributions((ContributionManager) toolBarManager); } if (textSupport != null) { textSupport.dispose(); } if (messageHyperLinkListener instanceof IDisposable) { ((IDisposable) messageHyperLinkListener).dispose(); } super.dispose(); } @Override public void doSave(IProgressMonitor monitor) { for (IFormPage page : getPages()) { if (page.isDirty()) { page.doSave(monitor); } } editorDirtyStateChanged(); } @Override public void doSaveAs() { throw new UnsupportedOperationException(); } private TaskEditorActionContributor getActionBarContributor() { return (TaskEditorActionContributor) getEditorSite().getActionBarContributor(); } @SuppressWarnings("rawtypes") @Override public Object getAdapter(Class adapter) { if (contentOutlineProvider != null) { return contentOutlineProvider.getAdapter(adapter); } else if (IContentOutlinePage.class.equals(adapter)) { IFormPage[] pages = getPages(); for (IFormPage page : pages) { Object outlinePage = page.getAdapter(adapter); if (outlinePage != null) { return outlinePage; } } } return super.getAdapter(adapter); } /** * @since 3.0 */ public Menu getMenu() { return getContainer().getMenu(); } IFormPage[] getPages() { List<IFormPage> formPages = new ArrayList<IFormPage>(); if (pages != null) { for (int i = 0; i < pages.size(); i++) { Object page = pages.get(i); if (page instanceof IFormPage) { formPages.add((IFormPage) page); } } } return formPages.toArray(new IFormPage[formPages.size()]); } @Deprecated protected IWorkbenchSiteProgressService getProgressService() { Object siteService = getEditorSite().getAdapter(IWorkbenchSiteProgressService.class); if (siteService != null) { return (IWorkbenchSiteProgressService) siteService; } return null; } @Deprecated public ISelection getSelection() { if (getSite() != null && getSite().getSelectionProvider() != null) { return getSite().getSelectionProvider().getSelection(); } else { return StructuredSelection.EMPTY; } } public TaskEditorInput getTaskEditorInput() { return taskEditorInput; } @Deprecated public Form getTopForm() { return this.getHeaderForm().getForm().getForm(); } @Override public void init(IEditorSite site, IEditorInput input) throws PartInitException { if (!(input instanceof TaskEditorInput)) { throw new PartInitException("Invalid editor input \"" + input.getClass() + "\""); //$NON-NLS-1$ //$NON-NLS-2$ } super.init(site, input); this.taskEditorInput = (TaskEditorInput) input; this.task = taskEditorInput.getTask(); // initialize selection site.getSelectionProvider().setSelection(new StructuredSelection(task)); setPartName(input.getName()); // activate context IContextService contextSupport = (IContextService) site.getService(IContextService.class); if (contextSupport != null) { contextSupport.activateContext(ID_EDITOR); } } private void installTitleDrag(Form form) { if (titleDragSourceListener == null /*&& !hasLeftToolBar()*/) { Transfer[] transferTypes; if (null == task) { transferTypes = new Transfer[] { TextTransfer.getInstance() }; } else { transferTypes = new Transfer[] { LocalSelectionTransfer.getTransfer(), TextTransfer.getInstance(), FileTransfer.getInstance() }; } titleDragSourceListener = new TaskDragSourceListener(new SelectionProviderAdapter() { @Override public ISelection getSelection() { return new StructuredSelection(task); } }); if (titleLabel != null) { DragSource source = new DragSource(titleLabel, DND.DROP_MOVE | DND.DROP_LINK); source.setTransfer(transferTypes); source.addDragListener(titleDragSourceListener); } else { form.addTitleDragSupport(DND.DROP_MOVE | DND.DROP_LINK, transferTypes, titleDragSourceListener); } } } @Override public boolean isDirty() { for (IFormPage page : getPages()) { if (page.isDirty()) { return true; } } return false; } @Override public boolean isSaveAsAllowed() { return false; } @Deprecated public void markDirty() { firePropertyChange(PROP_DIRTY); } /** * Refresh editor pages with new contents. * * @since 3.0 */ public void refreshPages() { for (IFormPage page : getPages()) { if (page instanceof TaskFormPage) { if (page.getManagedForm() != null && !page.getManagedForm().getForm().isDisposed()) { ((TaskFormPage) page).refresh(); } } } } @Override public void setFocus() { IFormPage page = getActivePageInstance(); if (page != null) { page.setFocus(); } else { super.setFocus(); } } @Deprecated public void setFocusOfActivePage() { if (this.getActivePage() > -1) { IFormPage page = this.getPages()[this.getActivePage()]; if (page != null) { page.setFocus(); } } } public void setMessage(String message, int type) { setMessage(message, type, null); } private boolean isHeaderFormDisposed() { return getHeaderForm() == null || getHeaderForm().getForm() == null || getHeaderForm().getForm().isDisposed(); } /** * @since 2.3 */ public void setMessage(String message, int type, IHyperlinkListener listener) { if (isHeaderFormDisposed()) { return; } try { // avoid flicker of the left header toolbar getHeaderForm().getForm().setRedraw(false); Form form = getHeaderForm().getForm().getForm(); form.setMessage(message, type); if (messageHyperLinkListener != null) { form.removeMessageHyperlinkListener(messageHyperLinkListener); if (messageHyperLinkListener instanceof IDisposable) { ((IDisposable) messageHyperLinkListener).dispose(); } } if (listener != null) { form.addMessageHyperlinkListener(listener); } messageHyperLinkListener = listener; // make sure the busyLabel image is large enough to accommodate the tool bar if (hasLeftToolBar()) { BusyIndicator busyLabel = getBusyLabel(); if (message != null && busyLabel != null) { setHeaderImage(busyLabel.getImage()); } else { setHeaderImage(null); } } } finally { getHeaderForm().getForm().setRedraw(true); } } private void setHeaderImage(final Image image) { BusyIndicator busyLabel = getBusyLabel(); if (busyLabel == null) { return; } final Point size = leftToolBar.computeSize(SWT.DEFAULT, SWT.DEFAULT, true); Point titleSize = titleLabel.computeSize(SWT.DEFAULT, SWT.DEFAULT, true); size.x += titleSize.x + LEFT_TOOLBAR_HEADER_TOOLBAR_PADDING; size.y = Math.max(titleSize.y, size.y); // padding between toolbar and image, ensure image is at least one pixel wide to avoid SWT error final int padding = (size.x > 0 && !noExtraPadding) ? 10 : 1; final Rectangle imageBounds = (image != null) ? image.getBounds() : new Rectangle(0, 0, 0, 0); int tempHeight = (image != null) ? Math.max(size.y + 1, imageBounds.height) : size.y + 1; // avoid extra padding due to margin added by TitleRegion.VMARGIN final int height = (tempHeight > imageBounds.height + 5) ? tempHeight - 5 : tempHeight; CompositeImageDescriptor descriptor = new CompositeImageDescriptor() { @Override protected void drawCompositeImage(int width, int height) { if (image != null) { drawImage(image.getImageData(), size.x + padding, (height - image.getBounds().height) / 2); } } @Override protected Point getSize() { return new Point(size.x + padding + imageBounds.width, height); } }; Image newHeaderImage = descriptor.createImage(); // directly set on busyLabel since getHeaderForm().getForm().setImage() does not update // the image if a message is currently displayed busyLabel.setImage(newHeaderImage); if (headerImage != null) { headerImage.dispose(); } headerImage = newHeaderImage; // avoid extra padding due to large title font // TODO reset font in case tool bar is empty //leftToolBar.getParent().setFont(JFaceResources.getDefaultFont()); getHeaderForm().getForm().reflow(true); } /** * @since 3.1 */ public String getMessage() { if (getHeaderForm() != null && getHeaderForm().getForm() != null) { if (!getHeaderForm().getForm().isDisposed()) { Form form = getHeaderForm().getForm().getForm(); return form.getMessage(); } } return null; } /** * @since 3.0 */ public void setStatus(String message, final String title, final IStatus status) { setMessage(message, IMessageProvider.ERROR, new HyperlinkAdapter() { @Override public void linkActivated(HyperlinkEvent event) { TasksUiInternal.displayStatus(title, status); } }); } @Override public void showBusy(boolean busy) { if (editorBusyIndicator != null) { if (busy) { if (TasksUiInternal.isAnimationsEnabled()) { editorBusyIndicator.start(); } } else { editorBusyIndicator.stop(); } } if (!isHeaderFormDisposed()) { Form form = getHeaderForm().getForm().getForm(); if (form != null && !form.isDisposed()) { // TODO consider only disabling certain actions IToolBarManager toolBarManager = form.getToolBarManager(); if (toolBarManager instanceof ToolBarManager) { ToolBar control = ((ToolBarManager) toolBarManager).getControl(); if (control != null) { control.setEnabled(!busy); } } if (leftToolBar != null) { leftToolBar.setEnabled(!busy); } if (titleLabel != null) { titleLabel.setEnabled(!busy); } CommonUiUtil.setEnabled(form.getBody(), !busy); for (IFormPage page : getPages()) { if (page instanceof WorkbenchPart) { WorkbenchPart part = (WorkbenchPart) page; part.showBusy(busy); } } } } } private void updateHeader() { IEditorInput input = getEditorInput(); updateHeaderImage(); updateHeaderLabel(); setTitleToolTip(input.getToolTipText()); setPartName(input.getName()); } /** * @since 3.0 */ public void updateHeaderToolBar() { if (isHeaderFormDisposed()) { return; } final Form form = getHeaderForm().getForm().getForm(); toolBarManager = form.getToolBarManager(); toolBarManager.removeAll(); // toolBarManager.update(true); TaskRepository outgoingNewRepository = TasksUiUtil.getOutgoingNewTaskRepository(task); final TaskRepository taskRepository = (outgoingNewRepository != null) ? outgoingNewRepository : taskEditorInput.getTaskRepository(); ControlContribution repositoryLabelControl = new ControlContribution(Messages.AbstractTaskEditorPage_Title) { @Override protected Control createControl(Composite parent) { FormToolkit toolkit = getHeaderForm().getToolkit(); Composite composite = toolkit.createComposite(parent); RowLayout layout = new RowLayout(); if (PlatformUtil.hasNarrowToolBar()) { layout.marginTop = 0; layout.marginBottom = 0; layout.center = true; } composite.setLayout(layout); composite.setBackground(null); String label = taskRepository.getRepositoryLabel(); if (label.indexOf("//") != -1) { //$NON-NLS-1$ label = label.substring((taskRepository.getRepositoryUrl().indexOf("//") + 2)); //$NON-NLS-1$ } ImageHyperlink link = new ImageHyperlink(composite, SWT.NONE); link.setText(label); link.setFont(JFaceResources.getBannerFont()); link.setForeground(toolkit.getColors().getColor(IFormColors.TITLE)); link.addHyperlinkListener(new HyperlinkAdapter() { @Override public void linkActivated(HyperlinkEvent e) { TasksUiUtil.openEditRepositoryWizard(taskRepository); } }); return composite; } }; toolBarManager.add(repositoryLabelControl); toolBarManager.add(new GroupMarker("repository")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker("new")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker("open")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS)); final String taskUrl = TasksUiInternal.getAuthenticatedUrl(taskRepository, task); if (taskUrl != null && taskUrl.length() > 0) { Action openWithBrowserAction = new Action() { @Override public void run() { TasksUiUtil.openWithBrowser(taskRepository, task); } }; // ImageDescriptor overlay = TasksUiPlugin.getDefault().getOverlayIcon(taskRepository.getConnectorKind()); // ImageDescriptor compositeDescriptor = new TaskListImageDescriptor(TasksUiImages.REPOSITORY_SMALL_TOP, // overlay, false, true); openWithBrowserAction.setImageDescriptor(CommonImages.WEB); //openWithBrowserAction.setImageDescriptor(CommonImages.BROWSER_OPEN_TASK); openWithBrowserAction.setToolTipText(Messages.AbstractTaskEditorPage_Open_with_Web_Browser); toolBarManager.appendToGroup("open", openWithBrowserAction); //$NON-NLS-1$ } if (activateAction == null) { activateAction = new ToggleTaskActivationAction(task) { @Override public void run() { TaskList taskList = TasksUiPlugin.getTaskList(); if (taskList.getTask(task.getRepositoryUrl(), task.getTaskId()) == null) { setMessage(Messages.TaskEditor_Task_added_to_the_Uncategorized_container, IMessageProvider.INFORMATION); } super.run(); } }; } toolBarManager.add(new Separator("planning")); //$NON-NLS-1$ disposeScheduleAction(); scheduleAction = new TaskEditorScheduleAction(task); toolBarManager.add(scheduleAction); toolBarManager.add(new GroupMarker("page")); //$NON-NLS-1$ for (IFormPage page : getPages()) { if (page instanceof TaskFormPage) { TaskFormPage taskEditorPage = (TaskFormPage) page; taskEditorPage.fillToolBar(toolBarManager); } } toolBarManager.add(new Separator("activation")); //$NON-NLS-1$ // ContributionItem spacer = new ContributionItem() { // @Override // public void fill(ToolBar toolbar, int index) { // ToolItem item = new ToolItem(toolbar, SWT.NONE); // int scaleHeight = 42; // if (PlatformUtil.needsCarbonToolBarFix()) { // scaleHeight = 32; // } // final Image image = new Image(toolbar.getDisplay(), CommonImages.getImage(CommonImages.BLANK) // .getImageData() // .scaledTo(1, scaleHeight)); // item.setImage(image); // item.addDisposeListener(new DisposeListener() { // public void widgetDisposed(DisposeEvent e) { // image.dispose(); // } // }); // item.setWidth(5); // item.setEnabled(false); // } // }; // toolBarManager.add(spacer); // for (IFormPage page : getPages()) { // if (page instanceof AbstractTaskEditorPage) { // AbstractTaskEditorPage taskEditorPage = (AbstractTaskEditorPage) page; // taskEditorPage.fillLeftHeaderToolBar(toolBarManager); // } else if (page instanceof TaskPlanningEditor) { // TaskPlanningEditor taskEditorPage = (TaskPlanningEditor) page; // taskEditorPage.fillLeftHeaderToolBar(toolBarManager); // } // } // add external contributions menuService = (IMenuService) getSite().getService(IMenuService.class); if (menuService != null && toolBarManager instanceof ContributionManager) { menuService.populateContributionManager((ContributionManager) toolBarManager, "toolbar:" //$NON-NLS-1$ + ID_TOOLBAR_HEADER + "." + taskRepository.getConnectorKind()); //$NON-NLS-1$ + menuService.populateContributionManager((ContributionManager) toolBarManager, "toolbar:" //$NON-NLS-1$ + + ID_TOOLBAR_HEADER); } toolBarManager.update(true); // XXX move this call updateLeftHeaderToolBar(); updateHeader(); } private void disposeScheduleAction() { if (scheduleAction != null) { scheduleAction.dispose(); scheduleAction = null; } } private void updateLeftHeaderToolBar() { leftToolBarManager.removeAll(); leftToolBarManager.add(new Separator("activation")); //$NON-NLS-1$ leftToolBarManager.add(new Separator(IWorkbenchActionConstants.MB_ADDITIONS)); // initialLeftToolbarSize = leftToolBarManager.getSize(); leftToolBarManager.add(activateAction); // for (IFormPage page : getPages()) { // if (page instanceof AbstractTaskEditorPage) { // AbstractTaskEditorPage taskEditorPage = (AbstractTaskEditorPage) page; // taskEditorPage.fillLeftHeaderToolBar(leftToolBarManager); // } else if (page instanceof TaskPlanningEditor) { // TaskPlanningEditor taskEditorPage = (TaskPlanningEditor) page; // taskEditorPage.fillLeftHeaderToolBar(leftToolBarManager); // } // } // add external contributions menuService = (IMenuService) getSite().getService(IMenuService.class); if (menuService != null && leftToolBarManager instanceof ContributionManager) { TaskRepository outgoingNewRepository = TasksUiUtil.getOutgoingNewTaskRepository(task); TaskRepository taskRepository = (outgoingNewRepository != null) ? outgoingNewRepository : taskEditorInput.getTaskRepository(); menuService.populateContributionManager(leftToolBarManager, "toolbar:" + ID_LEFT_TOOLBAR_HEADER + "." //$NON-NLS-1$ //$NON-NLS-2$ + taskRepository.getConnectorKind()); } leftToolBarManager.update(true); if (hasLeftToolBar()) { // XXX work around a bug in Gtk that causes the toolbar size to be incorrect if no // tool bar buttons are contributed // if (leftToolBar != null) { // Point size = leftToolBar.computeSize(SWT.DEFAULT, SWT.DEFAULT, false); // boolean changed = false; // for (Control control : leftToolBar.getChildren()) { // final Point childSize = control.computeSize(SWT.DEFAULT, SWT.DEFAULT, false); // if (childSize.y > size.y) { // size.y = childSize.y; // changed = true; // } // } // if (changed) { // leftToolBar.setSize(size); // } // } // // if (PlatformUtil.isToolBarHeightBroken(leftToolBar)) { // ToolItem item = new ToolItem(leftToolBar, SWT.NONE); // item.setEnabled(false); // item.setImage(CommonImages.getImage(CommonImages.BLANK)); // item.setWidth(1); // noExtraPadding = true; // } else if (PlatformUtil.needsToolItemToForceToolBarHeight()) { // ToolItem item = new ToolItem(leftToolBar, SWT.NONE); // item.setEnabled(false); // int scaleHeight = 22; // if (PlatformUtil.needsCarbonToolBarFix()) { // scaleHeight = 32; // } // final Image image = new Image(item.getDisplay(), CommonImages.getImage(CommonImages.BLANK) // .getImageData() // .scaledTo(1, scaleHeight)); // item.setImage(image); // item.addDisposeListener(new DisposeListener() { // public void widgetDisposed(DisposeEvent e) { // image.dispose(); // } // }); // item.setWidth(1); // noExtraPadding = true; // } // fix size of toolbar on Gtk with Eclipse 3.3 Point size = leftToolBar.getSize(); if (size.x == 0 && size.y == 0) { size = leftToolBar.computeSize(SWT.DEFAULT, SWT.DEFAULT, true); leftToolBar.setSize(size); } } } private void updateHeaderImage() { if (hasLeftToolBar()) { setHeaderImage(null); } else { getHeaderForm().getForm().setImage(getBrandingImage()); } } private Image getBrandingImage() { String connectorKind; TaskRepository outgoingNewRepository = TasksUiUtil.getOutgoingNewTaskRepository(task); if (outgoingNewRepository != null) { connectorKind = outgoingNewRepository.getConnectorKind(); } else { connectorKind = task.getConnectorKind(); } if (LocalRepositoryConnector.CONNECTOR_KIND.equals(connectorKind)) { return CommonImages.getImage(TasksUiImages.TASK); } else { ImageDescriptor overlay = TasksUiPlugin.getDefault().getOverlayIcon(connectorKind); Image image = CommonImages.getImageWithOverlay(TasksUiImages.REPOSITORY, overlay, false, false); return image; } } private boolean hasLeftToolBar() { return leftToolBar != null && leftToolBarManager != null; // && leftToolBarManager.getSize() > initialLeftToolbarSize; } private void updateHeaderLabel() { TaskRepository outgoingNewRepository = TasksUiUtil.getOutgoingNewTaskRepository(task); final TaskRepository taskRepository = (outgoingNewRepository != null) ? outgoingNewRepository : taskEditorInput.getTaskRepository(); // if (taskRepository.getConnectorKind().equals(LocalRepositoryConnector.CONNECTOR_KIND)) { // getHeaderForm().getForm().setText(Messages.TaskEditor_Task_ + task.getSummary()); // } else { AbstractRepositoryConnectorUi connectorUi = TasksUiPlugin.getConnectorUi(taskRepository.getConnectorKind()); String kindLabel = Messages.TaskEditor_Task; if (connectorUi != null) { kindLabel = connectorUi.getTaskKindLabel(task); } String idLabel = task.getTaskKey(); if (idLabel != null) { kindLabel += " " + idLabel; //$NON-NLS-1$ } if (hasLeftToolBar() && titleLabel != null) { titleLabel.setText(kindLabel); getHeaderForm().getForm().setText(null); setHeaderImage(null); } else { getHeaderForm().getForm().setText(kindLabel); } } /** * Update the title of the editor. * * @deprecated use {@link #updateHeaderToolBar()} instead */ @Deprecated public void updateTitle(String name) { updateHeader(); } private void updateTitleImage() { if (task != null) { AbstractRepositoryConnectorUi connectorUi = TasksUiPlugin.getConnectorUi(task.getConnectorKind()); if (connectorUi != null) { ImageDescriptor overlayDescriptor = connectorUi.getTaskKindOverlay(task); setTitleImage(CommonImages.getCompositeTaskImage(TasksUiImages.TASK, overlayDescriptor, false)); } else { setTitleImage(CommonImages.getImage(TasksUiImages.TASK)); } // } else if (getEditorInput() instanceof AbstractRepositoryTaskEditorInput) { // setTitleImage(CommonImages.getImage(TasksUiImages.TASK_REMOTE)); } else { setTitleImage(CommonImages.getImage(TasksUiImages.TASK)); } } }
true
true
public void updateHeaderToolBar() { if (isHeaderFormDisposed()) { return; } final Form form = getHeaderForm().getForm().getForm(); toolBarManager = form.getToolBarManager(); toolBarManager.removeAll(); // toolBarManager.update(true); TaskRepository outgoingNewRepository = TasksUiUtil.getOutgoingNewTaskRepository(task); final TaskRepository taskRepository = (outgoingNewRepository != null) ? outgoingNewRepository : taskEditorInput.getTaskRepository(); ControlContribution repositoryLabelControl = new ControlContribution(Messages.AbstractTaskEditorPage_Title) { @Override protected Control createControl(Composite parent) { FormToolkit toolkit = getHeaderForm().getToolkit(); Composite composite = toolkit.createComposite(parent); RowLayout layout = new RowLayout(); if (PlatformUtil.hasNarrowToolBar()) { layout.marginTop = 0; layout.marginBottom = 0; layout.center = true; } composite.setLayout(layout); composite.setBackground(null); String label = taskRepository.getRepositoryLabel(); if (label.indexOf("//") != -1) { //$NON-NLS-1$ label = label.substring((taskRepository.getRepositoryUrl().indexOf("//") + 2)); //$NON-NLS-1$ } ImageHyperlink link = new ImageHyperlink(composite, SWT.NONE); link.setText(label); link.setFont(JFaceResources.getBannerFont()); link.setForeground(toolkit.getColors().getColor(IFormColors.TITLE)); link.addHyperlinkListener(new HyperlinkAdapter() { @Override public void linkActivated(HyperlinkEvent e) { TasksUiUtil.openEditRepositoryWizard(taskRepository); } }); return composite; } }; toolBarManager.add(repositoryLabelControl); toolBarManager.add(new GroupMarker("repository")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker("new")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker("open")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS)); final String taskUrl = TasksUiInternal.getAuthenticatedUrl(taskRepository, task); if (taskUrl != null && taskUrl.length() > 0) { Action openWithBrowserAction = new Action() { @Override public void run() { TasksUiUtil.openWithBrowser(taskRepository, task); } }; // ImageDescriptor overlay = TasksUiPlugin.getDefault().getOverlayIcon(taskRepository.getConnectorKind()); // ImageDescriptor compositeDescriptor = new TaskListImageDescriptor(TasksUiImages.REPOSITORY_SMALL_TOP, // overlay, false, true); openWithBrowserAction.setImageDescriptor(CommonImages.WEB); //openWithBrowserAction.setImageDescriptor(CommonImages.BROWSER_OPEN_TASK); openWithBrowserAction.setToolTipText(Messages.AbstractTaskEditorPage_Open_with_Web_Browser); toolBarManager.appendToGroup("open", openWithBrowserAction); //$NON-NLS-1$ } if (activateAction == null) { activateAction = new ToggleTaskActivationAction(task) { @Override public void run() { TaskList taskList = TasksUiPlugin.getTaskList(); if (taskList.getTask(task.getRepositoryUrl(), task.getTaskId()) == null) { setMessage(Messages.TaskEditor_Task_added_to_the_Uncategorized_container, IMessageProvider.INFORMATION); } super.run(); } }; } toolBarManager.add(new Separator("planning")); //$NON-NLS-1$ disposeScheduleAction(); scheduleAction = new TaskEditorScheduleAction(task); toolBarManager.add(scheduleAction); toolBarManager.add(new GroupMarker("page")); //$NON-NLS-1$ for (IFormPage page : getPages()) { if (page instanceof TaskFormPage) { TaskFormPage taskEditorPage = (TaskFormPage) page; taskEditorPage.fillToolBar(toolBarManager); } } toolBarManager.add(new Separator("activation")); //$NON-NLS-1$ // ContributionItem spacer = new ContributionItem() { // @Override // public void fill(ToolBar toolbar, int index) { // ToolItem item = new ToolItem(toolbar, SWT.NONE); // int scaleHeight = 42; // if (PlatformUtil.needsCarbonToolBarFix()) { // scaleHeight = 32; // } // final Image image = new Image(toolbar.getDisplay(), CommonImages.getImage(CommonImages.BLANK) // .getImageData() // .scaledTo(1, scaleHeight)); // item.setImage(image); // item.addDisposeListener(new DisposeListener() { // public void widgetDisposed(DisposeEvent e) { // image.dispose(); // } // }); // item.setWidth(5); // item.setEnabled(false); // } // }; // toolBarManager.add(spacer); // for (IFormPage page : getPages()) { // if (page instanceof AbstractTaskEditorPage) { // AbstractTaskEditorPage taskEditorPage = (AbstractTaskEditorPage) page; // taskEditorPage.fillLeftHeaderToolBar(toolBarManager); // } else if (page instanceof TaskPlanningEditor) { // TaskPlanningEditor taskEditorPage = (TaskPlanningEditor) page; // taskEditorPage.fillLeftHeaderToolBar(toolBarManager); // } // } // add external contributions menuService = (IMenuService) getSite().getService(IMenuService.class); if (menuService != null && toolBarManager instanceof ContributionManager) { menuService.populateContributionManager((ContributionManager) toolBarManager, "toolbar:" //$NON-NLS-1$ + ID_TOOLBAR_HEADER + "." + taskRepository.getConnectorKind()); //$NON-NLS-1$ } toolBarManager.update(true); // XXX move this call updateLeftHeaderToolBar(); updateHeader(); }
public void updateHeaderToolBar() { if (isHeaderFormDisposed()) { return; } final Form form = getHeaderForm().getForm().getForm(); toolBarManager = form.getToolBarManager(); toolBarManager.removeAll(); // toolBarManager.update(true); TaskRepository outgoingNewRepository = TasksUiUtil.getOutgoingNewTaskRepository(task); final TaskRepository taskRepository = (outgoingNewRepository != null) ? outgoingNewRepository : taskEditorInput.getTaskRepository(); ControlContribution repositoryLabelControl = new ControlContribution(Messages.AbstractTaskEditorPage_Title) { @Override protected Control createControl(Composite parent) { FormToolkit toolkit = getHeaderForm().getToolkit(); Composite composite = toolkit.createComposite(parent); RowLayout layout = new RowLayout(); if (PlatformUtil.hasNarrowToolBar()) { layout.marginTop = 0; layout.marginBottom = 0; layout.center = true; } composite.setLayout(layout); composite.setBackground(null); String label = taskRepository.getRepositoryLabel(); if (label.indexOf("//") != -1) { //$NON-NLS-1$ label = label.substring((taskRepository.getRepositoryUrl().indexOf("//") + 2)); //$NON-NLS-1$ } ImageHyperlink link = new ImageHyperlink(composite, SWT.NONE); link.setText(label); link.setFont(JFaceResources.getBannerFont()); link.setForeground(toolkit.getColors().getColor(IFormColors.TITLE)); link.addHyperlinkListener(new HyperlinkAdapter() { @Override public void linkActivated(HyperlinkEvent e) { TasksUiUtil.openEditRepositoryWizard(taskRepository); } }); return composite; } }; toolBarManager.add(repositoryLabelControl); toolBarManager.add(new GroupMarker("repository")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker("new")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker("open")); //$NON-NLS-1$ toolBarManager.add(new GroupMarker(IWorkbenchActionConstants.MB_ADDITIONS)); final String taskUrl = TasksUiInternal.getAuthenticatedUrl(taskRepository, task); if (taskUrl != null && taskUrl.length() > 0) { Action openWithBrowserAction = new Action() { @Override public void run() { TasksUiUtil.openWithBrowser(taskRepository, task); } }; // ImageDescriptor overlay = TasksUiPlugin.getDefault().getOverlayIcon(taskRepository.getConnectorKind()); // ImageDescriptor compositeDescriptor = new TaskListImageDescriptor(TasksUiImages.REPOSITORY_SMALL_TOP, // overlay, false, true); openWithBrowserAction.setImageDescriptor(CommonImages.WEB); //openWithBrowserAction.setImageDescriptor(CommonImages.BROWSER_OPEN_TASK); openWithBrowserAction.setToolTipText(Messages.AbstractTaskEditorPage_Open_with_Web_Browser); toolBarManager.appendToGroup("open", openWithBrowserAction); //$NON-NLS-1$ } if (activateAction == null) { activateAction = new ToggleTaskActivationAction(task) { @Override public void run() { TaskList taskList = TasksUiPlugin.getTaskList(); if (taskList.getTask(task.getRepositoryUrl(), task.getTaskId()) == null) { setMessage(Messages.TaskEditor_Task_added_to_the_Uncategorized_container, IMessageProvider.INFORMATION); } super.run(); } }; } toolBarManager.add(new Separator("planning")); //$NON-NLS-1$ disposeScheduleAction(); scheduleAction = new TaskEditorScheduleAction(task); toolBarManager.add(scheduleAction); toolBarManager.add(new GroupMarker("page")); //$NON-NLS-1$ for (IFormPage page : getPages()) { if (page instanceof TaskFormPage) { TaskFormPage taskEditorPage = (TaskFormPage) page; taskEditorPage.fillToolBar(toolBarManager); } } toolBarManager.add(new Separator("activation")); //$NON-NLS-1$ // ContributionItem spacer = new ContributionItem() { // @Override // public void fill(ToolBar toolbar, int index) { // ToolItem item = new ToolItem(toolbar, SWT.NONE); // int scaleHeight = 42; // if (PlatformUtil.needsCarbonToolBarFix()) { // scaleHeight = 32; // } // final Image image = new Image(toolbar.getDisplay(), CommonImages.getImage(CommonImages.BLANK) // .getImageData() // .scaledTo(1, scaleHeight)); // item.setImage(image); // item.addDisposeListener(new DisposeListener() { // public void widgetDisposed(DisposeEvent e) { // image.dispose(); // } // }); // item.setWidth(5); // item.setEnabled(false); // } // }; // toolBarManager.add(spacer); // for (IFormPage page : getPages()) { // if (page instanceof AbstractTaskEditorPage) { // AbstractTaskEditorPage taskEditorPage = (AbstractTaskEditorPage) page; // taskEditorPage.fillLeftHeaderToolBar(toolBarManager); // } else if (page instanceof TaskPlanningEditor) { // TaskPlanningEditor taskEditorPage = (TaskPlanningEditor) page; // taskEditorPage.fillLeftHeaderToolBar(toolBarManager); // } // } // add external contributions menuService = (IMenuService) getSite().getService(IMenuService.class); if (menuService != null && toolBarManager instanceof ContributionManager) { menuService.populateContributionManager((ContributionManager) toolBarManager, "toolbar:" //$NON-NLS-1$ + ID_TOOLBAR_HEADER + "." + taskRepository.getConnectorKind()); //$NON-NLS-1$ menuService.populateContributionManager((ContributionManager) toolBarManager, "toolbar:" //$NON-NLS-1$ + ID_TOOLBAR_HEADER); } toolBarManager.update(true); // XXX move this call updateLeftHeaderToolBar(); updateHeader(); }
diff --git a/lucene/src/test/org/apache/lucene/index/TestMultiFields.java b/lucene/src/test/org/apache/lucene/index/TestMultiFields.java index 5c670dc96..e87786f04 100644 --- a/lucene/src/test/org/apache/lucene/index/TestMultiFields.java +++ b/lucene/src/test/org/apache/lucene/index/TestMultiFields.java @@ -1,118 +1,118 @@ package org.apache.lucene.index; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.store.*; import org.apache.lucene.util.*; import org.apache.lucene.document.*; import org.apache.lucene.analysis.*; import java.util.*; public class TestMultiFields extends LuceneTestCase { public void testRandom() throws Exception { for(int iter=0;iter<2*_TestUtil.getRandomMultiplier();iter++) { Directory dir = new MockRAMDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); Random r = new Random(); Map<BytesRef,List<Integer>> docs = new HashMap<BytesRef,List<Integer>>(); Set<Integer> deleted = new HashSet<Integer>(); List<BytesRef> terms = new ArrayList<BytesRef>(); - int numDocs = r.nextInt(100*_TestUtil.getRandomMultiplier()); + int numDocs = _TestUtil.nextInt(r, 1, 100*_TestUtil.getRandomMultiplier()); Document doc = new Document(); Field f = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED); doc.add(f); Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED); doc.add(id); boolean onlyUniqueTerms = r.nextBoolean(); for(int i=0;i<numDocs;i++) { if (!onlyUniqueTerms && r.nextBoolean() && terms.size() > 0) { // re-use existing term BytesRef term = terms.get(r.nextInt(terms.size())); docs.get(term).add(i); f.setValue(term.utf8ToString()); } else { String s = _TestUtil.randomUnicodeString(r, 10); BytesRef term = new BytesRef(s); if (!docs.containsKey(term)) { docs.put(term, new ArrayList<Integer>()); } docs.get(term).add(i); terms.add(term); f.setValue(s); } id.setValue(""+i); w.addDocument(doc); if (r.nextInt(4) == 1) { w.commit(); } if (i > 0 && r.nextInt(20) == 1) { int delID = r.nextInt(i); deleted.add(delID); w.deleteDocuments(new Term("id", ""+delID)); } } IndexReader reader = w.getReader(); w.close(); Bits delDocs = MultiFields.getDeletedDocs(reader); for(int delDoc : deleted) { assertTrue(delDocs.get(delDoc)); } Terms terms2 = MultiFields.getTerms(reader, "field"); for(int i=0;i<100;i++) { BytesRef term = terms.get(r.nextInt(terms.size())); DocsEnum docsEnum = terms2.docs(delDocs, term, null); int count = 0; for(int docID : docs.get(term)) { if (!deleted.contains(docID)) { assertEquals(docID, docsEnum.nextDoc()); count++; } } //System.out.println("c=" + count + " t=" + term); assertEquals(docsEnum.NO_MORE_DOCS, docsEnum.nextDoc()); } reader.close(); dir.close(); } } private void verify(IndexReader r, String term, List<Integer> expected) throws Exception { DocsEnum docs = MultiFields.getTermDocsEnum(r, MultiFields.getDeletedDocs(r), "field", new BytesRef(term)); for(int docID : expected) { assertEquals(docID, docs.nextDoc()); } assertEquals(docs.NO_MORE_DOCS, docs.nextDoc()); } }
true
true
public void testRandom() throws Exception { for(int iter=0;iter<2*_TestUtil.getRandomMultiplier();iter++) { Directory dir = new MockRAMDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); Random r = new Random(); Map<BytesRef,List<Integer>> docs = new HashMap<BytesRef,List<Integer>>(); Set<Integer> deleted = new HashSet<Integer>(); List<BytesRef> terms = new ArrayList<BytesRef>(); int numDocs = r.nextInt(100*_TestUtil.getRandomMultiplier()); Document doc = new Document(); Field f = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED); doc.add(f); Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED); doc.add(id); boolean onlyUniqueTerms = r.nextBoolean(); for(int i=0;i<numDocs;i++) { if (!onlyUniqueTerms && r.nextBoolean() && terms.size() > 0) { // re-use existing term BytesRef term = terms.get(r.nextInt(terms.size())); docs.get(term).add(i); f.setValue(term.utf8ToString()); } else { String s = _TestUtil.randomUnicodeString(r, 10); BytesRef term = new BytesRef(s); if (!docs.containsKey(term)) { docs.put(term, new ArrayList<Integer>()); } docs.get(term).add(i); terms.add(term); f.setValue(s); } id.setValue(""+i); w.addDocument(doc); if (r.nextInt(4) == 1) { w.commit(); } if (i > 0 && r.nextInt(20) == 1) { int delID = r.nextInt(i); deleted.add(delID); w.deleteDocuments(new Term("id", ""+delID)); } } IndexReader reader = w.getReader(); w.close(); Bits delDocs = MultiFields.getDeletedDocs(reader); for(int delDoc : deleted) { assertTrue(delDocs.get(delDoc)); } Terms terms2 = MultiFields.getTerms(reader, "field"); for(int i=0;i<100;i++) { BytesRef term = terms.get(r.nextInt(terms.size())); DocsEnum docsEnum = terms2.docs(delDocs, term, null); int count = 0; for(int docID : docs.get(term)) { if (!deleted.contains(docID)) { assertEquals(docID, docsEnum.nextDoc()); count++; } } //System.out.println("c=" + count + " t=" + term); assertEquals(docsEnum.NO_MORE_DOCS, docsEnum.nextDoc()); } reader.close(); dir.close(); } }
public void testRandom() throws Exception { for(int iter=0;iter<2*_TestUtil.getRandomMultiplier();iter++) { Directory dir = new MockRAMDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); Random r = new Random(); Map<BytesRef,List<Integer>> docs = new HashMap<BytesRef,List<Integer>>(); Set<Integer> deleted = new HashSet<Integer>(); List<BytesRef> terms = new ArrayList<BytesRef>(); int numDocs = _TestUtil.nextInt(r, 1, 100*_TestUtil.getRandomMultiplier()); Document doc = new Document(); Field f = new Field("field", "", Field.Store.NO, Field.Index.NOT_ANALYZED); doc.add(f); Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED); doc.add(id); boolean onlyUniqueTerms = r.nextBoolean(); for(int i=0;i<numDocs;i++) { if (!onlyUniqueTerms && r.nextBoolean() && terms.size() > 0) { // re-use existing term BytesRef term = terms.get(r.nextInt(terms.size())); docs.get(term).add(i); f.setValue(term.utf8ToString()); } else { String s = _TestUtil.randomUnicodeString(r, 10); BytesRef term = new BytesRef(s); if (!docs.containsKey(term)) { docs.put(term, new ArrayList<Integer>()); } docs.get(term).add(i); terms.add(term); f.setValue(s); } id.setValue(""+i); w.addDocument(doc); if (r.nextInt(4) == 1) { w.commit(); } if (i > 0 && r.nextInt(20) == 1) { int delID = r.nextInt(i); deleted.add(delID); w.deleteDocuments(new Term("id", ""+delID)); } } IndexReader reader = w.getReader(); w.close(); Bits delDocs = MultiFields.getDeletedDocs(reader); for(int delDoc : deleted) { assertTrue(delDocs.get(delDoc)); } Terms terms2 = MultiFields.getTerms(reader, "field"); for(int i=0;i<100;i++) { BytesRef term = terms.get(r.nextInt(terms.size())); DocsEnum docsEnum = terms2.docs(delDocs, term, null); int count = 0; for(int docID : docs.get(term)) { if (!deleted.contains(docID)) { assertEquals(docID, docsEnum.nextDoc()); count++; } } //System.out.println("c=" + count + " t=" + term); assertEquals(docsEnum.NO_MORE_DOCS, docsEnum.nextDoc()); } reader.close(); dir.close(); } }
diff --git a/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/misc/WebReset.java b/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/misc/WebReset.java index 861dfc8c..175fbf37 100644 --- a/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/misc/WebReset.java +++ b/cspi-webui/src/main/java/org/collectionspace/chain/csp/webui/misc/WebReset.java @@ -1,426 +1,426 @@ /* Copyright 2010 University of Cambridge * Licensed under the Educational Community License (ECL), Version 2.0. You may not use this file except in * compliance with this License. * * You may obtain a copy of the ECL 2.0 License at https://source.collectionspace.org/collection-space/LICENSE.txt */ package org.collectionspace.chain.csp.webui.misc; import java.io.IOException; import java.io.InputStream; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.collectionspace.chain.csp.config.ConfigException; import org.collectionspace.chain.csp.schema.Field; import org.collectionspace.chain.csp.schema.Instance; import org.collectionspace.chain.csp.schema.Option; import org.collectionspace.chain.csp.schema.Record; import org.collectionspace.chain.csp.schema.Spec; import org.collectionspace.chain.csp.webui.authorities.AuthoritiesVocabulariesInitialize; import org.collectionspace.chain.csp.webui.authorities.VocabulariesRead; import org.collectionspace.chain.csp.webui.main.Request; import org.collectionspace.chain.csp.webui.main.WebMethod; import org.collectionspace.chain.csp.webui.main.WebUI; import org.collectionspace.csp.api.persistence.ExistException; import org.collectionspace.csp.api.persistence.Storage; import org.collectionspace.csp.api.persistence.UnderlyingStorageException; import org.collectionspace.csp.api.persistence.UnimplementedException; import org.collectionspace.csp.api.ui.TTYOutputter; import org.collectionspace.csp.api.ui.UIException; import org.collectionspace.csp.api.ui.UIRequest; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class WebReset implements WebMethod { private static final Logger log=LoggerFactory.getLogger(WebReset.class); private boolean quick; private boolean populate; private Spec spec; private AuthoritiesVocabulariesInitialize avi; // HACK! This should not build services logic in this way!!! private static final String PERSON_TERMLIST_ELEMENT = "personTermGroup"; private static final String ORG_TERMLIST_ELEMENT = "orgTermGroup"; private static final String TERM_DISPLAYNAME_ELEMENT = "termDisplayName"; public WebReset(boolean in, boolean populate) { quick=in; this.populate = populate; } // XXX refactor private JSONObject getJSONResource(String in) throws IOException, JSONException { return new JSONObject(getResource(in)); } // XXX refactor private String getResource(String in) throws IOException, JSONException { String path=getClass().getPackage().getName().replaceAll("\\.","/"); InputStream stream=Thread.currentThread().getContextClassLoader().getResourceAsStream(path+"/"+in); log.debug(path); String data=IOUtils.toString(stream); stream.close(); return data; } private void initialiseAll(Storage storage,UIRequest request,String path) throws UIException { TTYOutputter tty=request.getTTYOutputter(); try{ log.info("Initialise vocab/auth entries"); tty.line("Initialise vocab/auth entries"); // Delete existing vocab entries JSONObject myjs = new JSONObject(); myjs.put("pageSize", "10"); myjs.put("pageNum", "0"); JSONObject data = storage.getPathsJSON("/",null); String[] paths = (String[]) data.get("listItems"); for(String dir : paths) { try{ if(this.spec.hasRecord(dir)){ Record r = this.spec.getRecord(dir); if(r.isType("authority")){ log.info("testing Authority " +dir); tty.line("testing Authority " + dir); for(Instance n : r.getAllInstances()) { avi = new AuthoritiesVocabulariesInitialize(n, populate); Option[] allOpts = n.getAllOptions(); avi.createIfMissingAuthority(storage,tty, r, n); avi.fillVocab(storage, r, n, tty, allOpts, true); //avi.initializeVocab(storage,request,path); /* String url = r.getID()+"/"+n.getTitleRef(); try{ storage.getPathsJSON(url,new JSONObject()).toString(); log.info("Instance " + n.getID()+ " Exists"); tty.line("Instance " + n.getID()+ " Exists"); } catch (UnderlyingStorageException x) { log.info("need to create Instance " + n.getID()); tty.line("need to create Instance " + n.getID()); JSONObject fields=new JSONObject("{'displayName':'"+n.getTitle()+"','shortIdentifier':'"+n.getWebURL()+"'}"); String base=r.getID(); storage.autocreateJSON(base,fields); log.info("Instance " + n.getID() + " Created"); tty.line("Instance " + n.getID() + " Created"); } */ } } } } catch(Exception e){ tty.line("that was weird but probably not a problem " + e.getMessage()); log.warn("initialiseAll() exception: " + e.getMessage()); } } } catch (ExistException e) { log.info("ExistException "+ e.getLocalizedMessage()); tty.line("ExistException "+ e.getLocalizedMessage()); throw new UIException("Existence problem",e); } catch (UnimplementedException e) { log.info("UnimplementedException "+ e.getLocalizedMessage()); tty.line("UnimplementedException "+ e.getLocalizedMessage()); throw new UIException("Unimplemented ",e); } catch (UnderlyingStorageException x) { log.info("UnderlyingStorageException "+ x.getLocalizedMessage()); tty.line("UnderlyingStorageException "+ x.getLocalizedMessage()); throw new UIException("Problem storing"+x.getLocalizedMessage(),x.getStatus(),x.getUrl(),x); } catch (JSONException e) { log.info("JSONException "+ e.getLocalizedMessage()); tty.line("JSONException "+ e.getLocalizedMessage()); throw new UIException("Invalid JSON",e); } } private static JSONObject createTrivialAuthItem(String termGroup, String name) throws JSONException { JSONObject item=new JSONObject(); JSONArray termInfoArray = new JSONArray(); JSONObject termInfo = new JSONObject(); termInfo.put(TERM_DISPLAYNAME_ELEMENT, name); termInfoArray.put(termInfo); item.put(termGroup, termInfoArray); return item; } private void reset(Storage storage,UIRequest request,String path) throws UIException { //remember to log into the front end before trying to run this JSONObject data = new JSONObject(); TTYOutputter tty=request.getTTYOutputter(); // Temporary hack to reset db try { data = storage.getPathsJSON("/",null); String[] paths = (String[]) data.get("listItems"); if(!path.equals("nodelete")){ // Delete existing records for(String dir : paths) { Record r = null; log.info(dir); if("direct".equals(dir)||"relations".equals(dir)) continue; try{ r = this.spec.getRecord(dir); } catch(Exception e){ continue; } if(r.isType("procedure")){ if("termlistitem".equals(dir) ||"termlist".equals(dir)) continue; // Nothing to do for the pseudo-records if(r.isType("searchall")) { continue; } } else if(r.isType("authority")){ continue; } else if(r.isType("record")){ - if("hierarchy".equals(dir) || "dimension".equals(dir) || "structureddate".equals(dir)) + if("hierarchy".equals(dir) || !r.isRealRecord()) // Filter out self-renderers, etc continue; log.info("S"); } else if(r.isType("authorizationdata")){ continue; } else if(r.isType("userdata")){ continue; } else{ //ignore - have no idea what it is continue; } //if("place".equals(dir) || "vocab".equals(dir) || "contact".equals(dir) || "location".equals(dir) || "person".equals(dir) || "organization".equals(dir) || "taxon".equals(dir)){ // continue; //} // ignore authorization //if("rolePermission".equals(dir) || "accountrole".equals(dir) || "accountroles".equals(dir) || "userperm".equals(dir)|| "permrole".equals(dir) || "permission".equals(dir) || "role".equals(dir)|| "userrole".equals(dir) || "users".equals(dir) ){ // continue; //} // ignore other - tho we do need to clean these up //if("termlistitem".equals(dir) ||"termlist".equals(dir) || "reports".equals(dir) || "reporting".equals(dir) || "output".equals(dir) ) // continue; //// ignore other - tho we do need to clean these up //if("hierarchy".equals(dir) || "dimension".equals(dir) ||"structureddate".equals(dir) ||"blobs".equals(dir) ||"relations".equals(dir) || "direct".equals(dir) || "id".equals(dir) ) // continue; log.info("Deleteing data associated with : "+dir); tty.line("Deleteing data associated with : "+dir); JSONObject data2 = storage.getPathsJSON(dir,null); String[] paths2 = (String[]) data2.get("listItems"); for(int i=0;i<paths2.length;i++) { tty.line("path : "+dir+"/"+paths2[i]); try { storage.deleteJSON(dir+"/"+paths2[i]); } catch (UnimplementedException e) { tty.line("UnimplementedException"+e); tty.line("ux"); } catch (UnderlyingStorageException e) { tty.line("UnderlyingStorageEception"+e); } tty.line("ok"); tty.flush(); } } } log.info("Creating records and procedures: this might take some time, go get a cup of tea and be patient"); tty.line("Creating records and procedures: this might take some time, go get a cup of tea and be patient"); // Create records anew tty.line("Create records anew"); String schedule=getResource("reset.txt"); for(String line : schedule.split("\n")) { String[] parts=line.split(" +",2); if(!parts[0].equals("")){ tty.line("Creating "+parts[0]); log.info("Creating "+parts[0]); storage.autocreateJSON(parts[0],getJSONResource(parts[1])); tty.flush(); } } log.info("Delete existing vocab/auth entries"); tty.line("Delete existing vocab/auth entries"); // Delete existing vocab entries JSONObject myjs = new JSONObject(); myjs.put("pageSize", "10"); myjs.put("pageNum", "0"); for(String dir : paths) { try{ if(this.spec.hasRecord(dir)){ Record r = this.spec.getRecord(dir); if(r.isType("authority")){ for(Instance n : r.getAllInstances()) { String url = r.getID()+"/"+n.getTitleRef(); try{ storage.getPathsJSON(url,new JSONObject()).toString(); } catch (UnderlyingStorageException x) { log.info("need to create Instance " + n.getID()); tty.line("need to create Instance " + n.getID()); JSONObject fields=new JSONObject("{'displayName':'"+n.getTitle()+"','shortIdentifier':'"+n.getWebURL()+"'}"); String base=r.getID(); storage.autocreateJSON(base,fields); log.info("Instance " + n.getID() + " Created"); tty.line("Instance " + n.getID() + " Created"); } deletall(n,r,url,"Deleting "+ url, storage, data, tty, myjs); } } } } catch(Exception e){ log.info("that was weird but probably not an issue " + e.getMessage()); } } log.info("Creating Dummy data"); tty.line("Creating Dummy data"); tty.flush(); // Create vocab entries String names=getResource("names.txt"); int i=0; for(String nextName : names.split("\n")) { i++; JSONObject entry=createTrivialAuthItem(PERSON_TERMLIST_ELEMENT, nextName); storage.autocreateJSON("/person/person",entry); tty.line("Created Person "+entry); log.info("Created Person "+entry); tty.flush(); if(quick && i>20) break; } // Create vocab entries String orgs=getResource("orgs.txt"); i=0; for(String nextName : orgs.split("\n")) { i++; JSONObject entry=createTrivialAuthItem(ORG_TERMLIST_ELEMENT, nextName); storage.autocreateJSON("/organization/organization",entry); tty.line("Created Organisation "+nextName); log.info("Created Organisation "+nextName); tty.flush(); if(quick && i>20) break; } tty.line("done"); log.info("done"); } catch (ExistException e) { log.info("ExistException "+ e.getLocalizedMessage()); tty.line("ExistException "+ e.getLocalizedMessage()); throw new UIException("Existence problem",e); } catch (UnimplementedException e) { log.info("UnimplementedException "+ e.getLocalizedMessage()); tty.line("UnimplementedException "+ e.getLocalizedMessage()); throw new UIException("Unimplemented ",e); } catch (UnderlyingStorageException x) { log.info("UnderlyingStorageException "+ x.getLocalizedMessage()); tty.line("UnderlyingStorageException "+ x.getLocalizedMessage()); throw new UIException("Problem storing"+x.getLocalizedMessage(),x.getStatus(),x.getUrl(),x); } catch (JSONException e) { log.info("JSONException "+ e.getLocalizedMessage()); tty.line("JSONException "+ e.getLocalizedMessage()); throw new UIException("Invalid JSON",e); } catch (IOException e) { log.info("IOException "+ e.getLocalizedMessage()); tty.line("IOException "+ e.getLocalizedMessage()); throw new UIException("IOException",e); } } private JSONObject deletall(Instance n,Record thisr, String path, String msg, Storage storage, JSONObject data, TTYOutputter tty, JSONObject myjs) throws JSONException, ExistException, UnimplementedException, UnderlyingStorageException, UIException { int resultsize; int check; String checkpagination; resultsize=1; check = 0; checkpagination = ""; while(resultsize >0){ myjs.put("pageNum", check); //check++; //don't increment page num as need to call page 0 as //once you delete a page worth the next page is now the current page //String url = thisr.getID()+"/"+n.getTitleRef(); if(thisr== null || n==null){ String[] bits = path.split("/"); thisr = this.spec.getRecordByWebUrl(bits[1]); n = thisr.getInstance(bits[1]+"-"+bits[2]); } try{ data = storage.getPathsJSON(path,myjs); } catch (UnderlyingStorageException x) { JSONObject fields=new JSONObject("{'displayName':'"+n.getTitle()+"','shortIdentifier':'"+n.getWebURL()+"'}"); if(thisr.getFieldFullList("termStatus") instanceof Field){ fields.put("termStatus", ((Field)thisr.getFieldFullList("termStatus")).getOptionDefault()); } String base=thisr.getID(); storage.autocreateJSON(base,fields); //data = storage.getPathsJSON(url,restriction); } String[] res = (String[]) data.get("listItems"); if(res.length==0 || checkpagination.equals(res[0])){ resultsize=0; break; //testing whether we have actually returned the same page or the next page - all csid returned should be unique } else{ checkpagination = res[0]; } resultsize=res.length; for(String urn : res) { try { storage.deleteJSON(path+"/"+urn); tty.line(msg+urn); log.info(msg+urn); } catch(Exception e) { /* Sometimes records are wdged */ } tty.flush(); } } return data; } public void run(Object in,String[] tail) throws UIException { Request q=(Request)in; initialiseAll(q.getStorage(),q.getUIRequest(),StringUtils.join(tail,"/")); if(this.populate){ reset(q.getStorage(),q.getUIRequest(),StringUtils.join(tail,"/")); } } public void configure() throws ConfigException {} public void configure(WebUI ui,Spec spec) { this.spec = spec; } }
true
true
private void reset(Storage storage,UIRequest request,String path) throws UIException { //remember to log into the front end before trying to run this JSONObject data = new JSONObject(); TTYOutputter tty=request.getTTYOutputter(); // Temporary hack to reset db try { data = storage.getPathsJSON("/",null); String[] paths = (String[]) data.get("listItems"); if(!path.equals("nodelete")){ // Delete existing records for(String dir : paths) { Record r = null; log.info(dir); if("direct".equals(dir)||"relations".equals(dir)) continue; try{ r = this.spec.getRecord(dir); } catch(Exception e){ continue; } if(r.isType("procedure")){ if("termlistitem".equals(dir) ||"termlist".equals(dir)) continue; // Nothing to do for the pseudo-records if(r.isType("searchall")) { continue; } } else if(r.isType("authority")){ continue; } else if(r.isType("record")){ if("hierarchy".equals(dir) || "dimension".equals(dir) || "structureddate".equals(dir)) continue; log.info("S"); } else if(r.isType("authorizationdata")){ continue; } else if(r.isType("userdata")){ continue; } else{ //ignore - have no idea what it is continue; } //if("place".equals(dir) || "vocab".equals(dir) || "contact".equals(dir) || "location".equals(dir) || "person".equals(dir) || "organization".equals(dir) || "taxon".equals(dir)){ // continue; //} // ignore authorization //if("rolePermission".equals(dir) || "accountrole".equals(dir) || "accountroles".equals(dir) || "userperm".equals(dir)|| "permrole".equals(dir) || "permission".equals(dir) || "role".equals(dir)|| "userrole".equals(dir) || "users".equals(dir) ){ // continue; //} // ignore other - tho we do need to clean these up //if("termlistitem".equals(dir) ||"termlist".equals(dir) || "reports".equals(dir) || "reporting".equals(dir) || "output".equals(dir) ) // continue; //// ignore other - tho we do need to clean these up //if("hierarchy".equals(dir) || "dimension".equals(dir) ||"structureddate".equals(dir) ||"blobs".equals(dir) ||"relations".equals(dir) || "direct".equals(dir) || "id".equals(dir) ) // continue; log.info("Deleteing data associated with : "+dir); tty.line("Deleteing data associated with : "+dir); JSONObject data2 = storage.getPathsJSON(dir,null); String[] paths2 = (String[]) data2.get("listItems"); for(int i=0;i<paths2.length;i++) { tty.line("path : "+dir+"/"+paths2[i]); try { storage.deleteJSON(dir+"/"+paths2[i]); } catch (UnimplementedException e) { tty.line("UnimplementedException"+e); tty.line("ux"); } catch (UnderlyingStorageException e) { tty.line("UnderlyingStorageEception"+e); } tty.line("ok"); tty.flush(); } } } log.info("Creating records and procedures: this might take some time, go get a cup of tea and be patient"); tty.line("Creating records and procedures: this might take some time, go get a cup of tea and be patient"); // Create records anew tty.line("Create records anew"); String schedule=getResource("reset.txt"); for(String line : schedule.split("\n")) { String[] parts=line.split(" +",2); if(!parts[0].equals("")){ tty.line("Creating "+parts[0]); log.info("Creating "+parts[0]); storage.autocreateJSON(parts[0],getJSONResource(parts[1])); tty.flush(); } } log.info("Delete existing vocab/auth entries"); tty.line("Delete existing vocab/auth entries"); // Delete existing vocab entries JSONObject myjs = new JSONObject(); myjs.put("pageSize", "10"); myjs.put("pageNum", "0"); for(String dir : paths) { try{ if(this.spec.hasRecord(dir)){ Record r = this.spec.getRecord(dir); if(r.isType("authority")){ for(Instance n : r.getAllInstances()) { String url = r.getID()+"/"+n.getTitleRef(); try{ storage.getPathsJSON(url,new JSONObject()).toString(); } catch (UnderlyingStorageException x) { log.info("need to create Instance " + n.getID()); tty.line("need to create Instance " + n.getID()); JSONObject fields=new JSONObject("{'displayName':'"+n.getTitle()+"','shortIdentifier':'"+n.getWebURL()+"'}"); String base=r.getID(); storage.autocreateJSON(base,fields); log.info("Instance " + n.getID() + " Created"); tty.line("Instance " + n.getID() + " Created"); } deletall(n,r,url,"Deleting "+ url, storage, data, tty, myjs); } } } } catch(Exception e){ log.info("that was weird but probably not an issue " + e.getMessage()); } } log.info("Creating Dummy data"); tty.line("Creating Dummy data"); tty.flush(); // Create vocab entries String names=getResource("names.txt"); int i=0; for(String nextName : names.split("\n")) { i++; JSONObject entry=createTrivialAuthItem(PERSON_TERMLIST_ELEMENT, nextName); storage.autocreateJSON("/person/person",entry); tty.line("Created Person "+entry); log.info("Created Person "+entry); tty.flush(); if(quick && i>20) break; } // Create vocab entries String orgs=getResource("orgs.txt"); i=0; for(String nextName : orgs.split("\n")) { i++; JSONObject entry=createTrivialAuthItem(ORG_TERMLIST_ELEMENT, nextName); storage.autocreateJSON("/organization/organization",entry); tty.line("Created Organisation "+nextName); log.info("Created Organisation "+nextName); tty.flush(); if(quick && i>20) break; } tty.line("done"); log.info("done"); } catch (ExistException e) { log.info("ExistException "+ e.getLocalizedMessage()); tty.line("ExistException "+ e.getLocalizedMessage()); throw new UIException("Existence problem",e); } catch (UnimplementedException e) { log.info("UnimplementedException "+ e.getLocalizedMessage()); tty.line("UnimplementedException "+ e.getLocalizedMessage()); throw new UIException("Unimplemented ",e); } catch (UnderlyingStorageException x) { log.info("UnderlyingStorageException "+ x.getLocalizedMessage()); tty.line("UnderlyingStorageException "+ x.getLocalizedMessage()); throw new UIException("Problem storing"+x.getLocalizedMessage(),x.getStatus(),x.getUrl(),x); } catch (JSONException e) { log.info("JSONException "+ e.getLocalizedMessage()); tty.line("JSONException "+ e.getLocalizedMessage()); throw new UIException("Invalid JSON",e); } catch (IOException e) { log.info("IOException "+ e.getLocalizedMessage()); tty.line("IOException "+ e.getLocalizedMessage()); throw new UIException("IOException",e); } }
private void reset(Storage storage,UIRequest request,String path) throws UIException { //remember to log into the front end before trying to run this JSONObject data = new JSONObject(); TTYOutputter tty=request.getTTYOutputter(); // Temporary hack to reset db try { data = storage.getPathsJSON("/",null); String[] paths = (String[]) data.get("listItems"); if(!path.equals("nodelete")){ // Delete existing records for(String dir : paths) { Record r = null; log.info(dir); if("direct".equals(dir)||"relations".equals(dir)) continue; try{ r = this.spec.getRecord(dir); } catch(Exception e){ continue; } if(r.isType("procedure")){ if("termlistitem".equals(dir) ||"termlist".equals(dir)) continue; // Nothing to do for the pseudo-records if(r.isType("searchall")) { continue; } } else if(r.isType("authority")){ continue; } else if(r.isType("record")){ if("hierarchy".equals(dir) || !r.isRealRecord()) // Filter out self-renderers, etc continue; log.info("S"); } else if(r.isType("authorizationdata")){ continue; } else if(r.isType("userdata")){ continue; } else{ //ignore - have no idea what it is continue; } //if("place".equals(dir) || "vocab".equals(dir) || "contact".equals(dir) || "location".equals(dir) || "person".equals(dir) || "organization".equals(dir) || "taxon".equals(dir)){ // continue; //} // ignore authorization //if("rolePermission".equals(dir) || "accountrole".equals(dir) || "accountroles".equals(dir) || "userperm".equals(dir)|| "permrole".equals(dir) || "permission".equals(dir) || "role".equals(dir)|| "userrole".equals(dir) || "users".equals(dir) ){ // continue; //} // ignore other - tho we do need to clean these up //if("termlistitem".equals(dir) ||"termlist".equals(dir) || "reports".equals(dir) || "reporting".equals(dir) || "output".equals(dir) ) // continue; //// ignore other - tho we do need to clean these up //if("hierarchy".equals(dir) || "dimension".equals(dir) ||"structureddate".equals(dir) ||"blobs".equals(dir) ||"relations".equals(dir) || "direct".equals(dir) || "id".equals(dir) ) // continue; log.info("Deleteing data associated with : "+dir); tty.line("Deleteing data associated with : "+dir); JSONObject data2 = storage.getPathsJSON(dir,null); String[] paths2 = (String[]) data2.get("listItems"); for(int i=0;i<paths2.length;i++) { tty.line("path : "+dir+"/"+paths2[i]); try { storage.deleteJSON(dir+"/"+paths2[i]); } catch (UnimplementedException e) { tty.line("UnimplementedException"+e); tty.line("ux"); } catch (UnderlyingStorageException e) { tty.line("UnderlyingStorageEception"+e); } tty.line("ok"); tty.flush(); } } } log.info("Creating records and procedures: this might take some time, go get a cup of tea and be patient"); tty.line("Creating records and procedures: this might take some time, go get a cup of tea and be patient"); // Create records anew tty.line("Create records anew"); String schedule=getResource("reset.txt"); for(String line : schedule.split("\n")) { String[] parts=line.split(" +",2); if(!parts[0].equals("")){ tty.line("Creating "+parts[0]); log.info("Creating "+parts[0]); storage.autocreateJSON(parts[0],getJSONResource(parts[1])); tty.flush(); } } log.info("Delete existing vocab/auth entries"); tty.line("Delete existing vocab/auth entries"); // Delete existing vocab entries JSONObject myjs = new JSONObject(); myjs.put("pageSize", "10"); myjs.put("pageNum", "0"); for(String dir : paths) { try{ if(this.spec.hasRecord(dir)){ Record r = this.spec.getRecord(dir); if(r.isType("authority")){ for(Instance n : r.getAllInstances()) { String url = r.getID()+"/"+n.getTitleRef(); try{ storage.getPathsJSON(url,new JSONObject()).toString(); } catch (UnderlyingStorageException x) { log.info("need to create Instance " + n.getID()); tty.line("need to create Instance " + n.getID()); JSONObject fields=new JSONObject("{'displayName':'"+n.getTitle()+"','shortIdentifier':'"+n.getWebURL()+"'}"); String base=r.getID(); storage.autocreateJSON(base,fields); log.info("Instance " + n.getID() + " Created"); tty.line("Instance " + n.getID() + " Created"); } deletall(n,r,url,"Deleting "+ url, storage, data, tty, myjs); } } } } catch(Exception e){ log.info("that was weird but probably not an issue " + e.getMessage()); } } log.info("Creating Dummy data"); tty.line("Creating Dummy data"); tty.flush(); // Create vocab entries String names=getResource("names.txt"); int i=0; for(String nextName : names.split("\n")) { i++; JSONObject entry=createTrivialAuthItem(PERSON_TERMLIST_ELEMENT, nextName); storage.autocreateJSON("/person/person",entry); tty.line("Created Person "+entry); log.info("Created Person "+entry); tty.flush(); if(quick && i>20) break; } // Create vocab entries String orgs=getResource("orgs.txt"); i=0; for(String nextName : orgs.split("\n")) { i++; JSONObject entry=createTrivialAuthItem(ORG_TERMLIST_ELEMENT, nextName); storage.autocreateJSON("/organization/organization",entry); tty.line("Created Organisation "+nextName); log.info("Created Organisation "+nextName); tty.flush(); if(quick && i>20) break; } tty.line("done"); log.info("done"); } catch (ExistException e) { log.info("ExistException "+ e.getLocalizedMessage()); tty.line("ExistException "+ e.getLocalizedMessage()); throw new UIException("Existence problem",e); } catch (UnimplementedException e) { log.info("UnimplementedException "+ e.getLocalizedMessage()); tty.line("UnimplementedException "+ e.getLocalizedMessage()); throw new UIException("Unimplemented ",e); } catch (UnderlyingStorageException x) { log.info("UnderlyingStorageException "+ x.getLocalizedMessage()); tty.line("UnderlyingStorageException "+ x.getLocalizedMessage()); throw new UIException("Problem storing"+x.getLocalizedMessage(),x.getStatus(),x.getUrl(),x); } catch (JSONException e) { log.info("JSONException "+ e.getLocalizedMessage()); tty.line("JSONException "+ e.getLocalizedMessage()); throw new UIException("Invalid JSON",e); } catch (IOException e) { log.info("IOException "+ e.getLocalizedMessage()); tty.line("IOException "+ e.getLocalizedMessage()); throw new UIException("IOException",e); } }
diff --git a/xstream/src/test/com/thoughtworks/xstream/core/util/ObjectIdDictionaryTest.java b/xstream/src/test/com/thoughtworks/xstream/core/util/ObjectIdDictionaryTest.java index c32ff3e6..ad874251 100644 --- a/xstream/src/test/com/thoughtworks/xstream/core/util/ObjectIdDictionaryTest.java +++ b/xstream/src/test/com/thoughtworks/xstream/core/util/ObjectIdDictionaryTest.java @@ -1,111 +1,112 @@ /* * Copyright (C) 2004 Joe Walnes. * Copyright (C) 2006, 2007 XStream Committers. * All rights reserved. * * The software in this package is published under the terms of the BSD * style license a copy of which has been included with this distribution in * the LICENSE.txt file. * * Created on 30. May 2004 by Joe Walnes */ package com.thoughtworks.xstream.core.util; import junit.framework.TestCase; import java.util.ArrayList; import java.util.List; public class ObjectIdDictionaryTest extends TestCase { public void testMapsIdsToObjectReferences() { ObjectIdDictionary dict = new ObjectIdDictionary(); Object a = new Object(); Object b = new Object(); Object c = new Object(); dict.associateId(a, "id a"); dict.associateId(b, "id b"); dict.associateId(c, "id c"); assertEquals("id a", dict.lookupId(a)); assertEquals("id b", dict.lookupId(b)); assertEquals("id c", dict.lookupId(c)); } public void testTreatsObjectsThatAreEqualButNotSameInstanceAsDifferentReference() { ObjectIdDictionary dict = new ObjectIdDictionary(); Integer a = new Integer(3); Integer b = new Integer(3); dict.associateId(a, "id a"); dict.associateId(b, "id b"); assertEquals("id a", dict.lookupId(a)); assertEquals("id b", dict.lookupId(b)); } public void testEnforceSameSystemHashCodeForGCedObjects() { StringBuffer memInfo = new StringBuffer("MemoryInfo:\n"); memInfo.append(memoryInfo()); memInfo.append('\n'); // create 100000 Strings and call GC after creation of 10000 final int loop = 10; final int elements = 10000; final int[] dictSizes = new int[loop * elements]; // create memory shortage to force gc - long maxMemory = Runtime.getRuntime().maxMemory(); + Runtime runtime = Runtime.getRuntime(); + long maxMemory = runtime.maxMemory(); int mem = Integer.MAX_VALUE; maxMemory -= dictSizes.length * 4; mem = (int)(maxMemory > Integer.MAX_VALUE ? Integer.MAX_VALUE : maxMemory); List blockList = new ArrayList(); byte[] block = null; while (block == null) { try { block = new byte[mem]; } catch(OutOfMemoryError error) { mem -= 1024 * 512; } // This machine has huge memory reserves, consume it! - if (maxMemory == Integer.MAX_VALUE) { + if (mem == Integer.MAX_VALUE) { blockList.add(block); block = null; } } block[mem - 1] = (byte)255; // run test with memory shortage ObjectIdDictionary dict = new ObjectIdDictionary(); for (int i = 0; i < loop; ++i) { System.gc(); memInfo.append(memoryInfo()); memInfo.append('\n'); for (int j = 0; j < elements; ++j) { final String s = new String("JUnit ") + j; // enforce new object dictSizes[i * elements + j] = dict.size(); assertFalse("Failed in (" + i + "/" + j + ")", dict.containsId(s)); dict.associateId(s, "X"); } } memInfo.append(memoryInfo()); memInfo.append('\n'); assertFalse("Algorithm did not reach last element", 0 == dictSizes[loop * elements - 1]); assertFalse("Dictionary did not shrink\n" + memInfo, loop * elements - 1 == dictSizes[loop * elements - 1]); // prevent compiler optimization assertEquals(-1, block[mem-1]); assertNotNull(blockList); } private String memoryInfo() { Runtime runtime = Runtime.getRuntime(); StringBuffer buffer = new StringBuffer("Memory: "); buffer.append(runtime.freeMemory()); buffer.append(" free / "); buffer.append(runtime.maxMemory()); buffer.append(" max / "); buffer.append(runtime.totalMemory()); buffer.append(" total"); return buffer.toString(); } }
false
true
public void testEnforceSameSystemHashCodeForGCedObjects() { StringBuffer memInfo = new StringBuffer("MemoryInfo:\n"); memInfo.append(memoryInfo()); memInfo.append('\n'); // create 100000 Strings and call GC after creation of 10000 final int loop = 10; final int elements = 10000; final int[] dictSizes = new int[loop * elements]; // create memory shortage to force gc long maxMemory = Runtime.getRuntime().maxMemory(); int mem = Integer.MAX_VALUE; maxMemory -= dictSizes.length * 4; mem = (int)(maxMemory > Integer.MAX_VALUE ? Integer.MAX_VALUE : maxMemory); List blockList = new ArrayList(); byte[] block = null; while (block == null) { try { block = new byte[mem]; } catch(OutOfMemoryError error) { mem -= 1024 * 512; } // This machine has huge memory reserves, consume it! if (maxMemory == Integer.MAX_VALUE) { blockList.add(block); block = null; } } block[mem - 1] = (byte)255; // run test with memory shortage ObjectIdDictionary dict = new ObjectIdDictionary(); for (int i = 0; i < loop; ++i) { System.gc(); memInfo.append(memoryInfo()); memInfo.append('\n'); for (int j = 0; j < elements; ++j) { final String s = new String("JUnit ") + j; // enforce new object dictSizes[i * elements + j] = dict.size(); assertFalse("Failed in (" + i + "/" + j + ")", dict.containsId(s)); dict.associateId(s, "X"); } } memInfo.append(memoryInfo()); memInfo.append('\n'); assertFalse("Algorithm did not reach last element", 0 == dictSizes[loop * elements - 1]); assertFalse("Dictionary did not shrink\n" + memInfo, loop * elements - 1 == dictSizes[loop * elements - 1]); // prevent compiler optimization assertEquals(-1, block[mem-1]); assertNotNull(blockList); }
public void testEnforceSameSystemHashCodeForGCedObjects() { StringBuffer memInfo = new StringBuffer("MemoryInfo:\n"); memInfo.append(memoryInfo()); memInfo.append('\n'); // create 100000 Strings and call GC after creation of 10000 final int loop = 10; final int elements = 10000; final int[] dictSizes = new int[loop * elements]; // create memory shortage to force gc Runtime runtime = Runtime.getRuntime(); long maxMemory = runtime.maxMemory(); int mem = Integer.MAX_VALUE; maxMemory -= dictSizes.length * 4; mem = (int)(maxMemory > Integer.MAX_VALUE ? Integer.MAX_VALUE : maxMemory); List blockList = new ArrayList(); byte[] block = null; while (block == null) { try { block = new byte[mem]; } catch(OutOfMemoryError error) { mem -= 1024 * 512; } // This machine has huge memory reserves, consume it! if (mem == Integer.MAX_VALUE) { blockList.add(block); block = null; } } block[mem - 1] = (byte)255; // run test with memory shortage ObjectIdDictionary dict = new ObjectIdDictionary(); for (int i = 0; i < loop; ++i) { System.gc(); memInfo.append(memoryInfo()); memInfo.append('\n'); for (int j = 0; j < elements; ++j) { final String s = new String("JUnit ") + j; // enforce new object dictSizes[i * elements + j] = dict.size(); assertFalse("Failed in (" + i + "/" + j + ")", dict.containsId(s)); dict.associateId(s, "X"); } } memInfo.append(memoryInfo()); memInfo.append('\n'); assertFalse("Algorithm did not reach last element", 0 == dictSizes[loop * elements - 1]); assertFalse("Dictionary did not shrink\n" + memInfo, loop * elements - 1 == dictSizes[loop * elements - 1]); // prevent compiler optimization assertEquals(-1, block[mem-1]); assertNotNull(blockList); }
diff --git a/cobertura/src/main/java/net/sourceforge/cobertura/reporting/ComplexityCalculator.java b/cobertura/src/main/java/net/sourceforge/cobertura/reporting/ComplexityCalculator.java index b427d68..b07cf65 100644 --- a/cobertura/src/main/java/net/sourceforge/cobertura/reporting/ComplexityCalculator.java +++ b/cobertura/src/main/java/net/sourceforge/cobertura/reporting/ComplexityCalculator.java @@ -1,288 +1,287 @@ /* * Cobertura - http://cobertura.sourceforge.net/ * * Copyright (C) 2005 Mark Doliner * Copyright (C) 2005 Jeremy Thomerson * Copyright (C) 2005 Grzegorz Lukasik * Copyright (C) 2008 Tri Bao Ho * Copyright (C) 2009 John Lewis * * Cobertura is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation; either version 2 of the License, * or (at your option) any later version. * * Cobertura is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Cobertura; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA */ package net.sourceforge.cobertura.reporting; import net.sourceforge.cobertura.coveragedata.ClassData; import net.sourceforge.cobertura.coveragedata.PackageData; import net.sourceforge.cobertura.coveragedata.ProjectData; import net.sourceforge.cobertura.coveragedata.SourceFileData; import net.sourceforge.cobertura.javancss.FunctionMetric; import net.sourceforge.cobertura.javancss.Javancss; import net.sourceforge.cobertura.util.FileFinder; import net.sourceforge.cobertura.util.Source; import org.apache.log4j.Logger; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Allows complexity computing for source files, packages and a whole project. Average * McCabe's number for methods contained in the specified entity is returned. This class * depends on FileFinder which is used to map source file names to existing files. * <p/> * <p>One instance of this class should be used for the same set of source files - an * object of this class can cache computed results.</p> * * @author Grzegorz Lukasik */ public class ComplexityCalculator { private static final Logger logger = Logger .getLogger(ComplexityCalculator.class); public static final Complexity ZERO_COMPLEXITY = new Complexity(); // Finder used to map source file names to existing files private final FileFinder finder; // Contains pairs (String sourceFileName, Complexity complexity) private Map sourceFileCNNCache = new HashMap(); // Contains pairs (String packageName, Complexity complexity) private Map packageCNNCache = new HashMap(); /** * Creates new calculator. Passed {@link FileFinder} will be used to * map source file names to existing files when needed. * * @param finder {@link FileFinder} that allows to find source files * * @throws NullPointerException if finder is null */ public ComplexityCalculator(FileFinder finder) { if (finder == null) throw new NullPointerException(); this.finder = finder; } /** * Calculates the code complexity number for an input stream. * "CCN" stands for "code complexity number." This is * sometimes referred to as McCabe's number. This method * calculates the average cyclomatic code complexity of all * methods of all classes in a given directory. * * @param file The input stream for which you want to calculate * the complexity * * @return average complexity for the specified input stream */ private Complexity getAccumlatedCCNForSource(String sourceFileName, Source source) { if (source == null) { return ZERO_COMPLEXITY; } if (!sourceFileName.endsWith(".java")) { return ZERO_COMPLEXITY; } - Javancss javancss = new Javancss(source - .getInputStream()); + Javancss javancss = new Javancss(source.getInputStream()); if (javancss.getLastErrorMessage() != null) { //there is an error while parsing the java file. log it logger.warn("JavaNCSS got an error while parsing the java " + source.getOriginDesc() + "\n" + javancss.getLastErrorMessage()); } List methodMetrics = javancss.getFunctionMetrics(); int classCcn = 0; for (Iterator method = methodMetrics.iterator(); method.hasNext();) { FunctionMetric singleMethodMetrics = (FunctionMetric) method.next(); classCcn += singleMethodMetrics.ccn; } return new Complexity(classCcn, methodMetrics.size()); } /** * Calculates the code complexity number for single source file. * "CCN" stands for "code complexity number." This is * sometimes referred to as McCabe's number. This method * calculates the average cyclomatic code complexity of all * methods of all classes in a given directory. * * @param sourceFileName * @param file The source file for which you want to calculate * the complexity * * @return average complexity for the specified source file * * @throws IOException */ private Complexity getAccumlatedCCNForSingleFile(String sourceFileName) throws IOException { Source source = finder.getSource(sourceFileName); try { return getAccumlatedCCNForSource(sourceFileName, source); } finally { if (source != null) { source.close(); } } } /** * Computes CCN for all sources contained in the project. * CCN for whole project is an average CCN for source files. * All source files for which CCN cannot be computed are ignored. * * @param projectData project to compute CCN for * * @return CCN for project or 0 if no source files were found * * @throws NullPointerException if projectData is null */ public double getCCNForProject(ProjectData projectData) { // Sum complexity for all packages Complexity act = new Complexity(); for (Iterator it = projectData.getPackages().iterator(); it.hasNext();) { PackageData packageData = (PackageData) it.next(); act.add(getCCNForPackageInternal(packageData)); } // Return average CCN for source files return act.averageCCN(); } /** * Computes CCN for all sources contained in the specified package. * All source files that cannot be mapped to existing files are ignored. * * @param packageData package to compute CCN for * * @return CCN for the specified package or 0 if no source files were found * * @throws NullPointerException if <code>packageData</code> is <code>null</code> */ public double getCCNForPackage(PackageData packageData) { return getCCNForPackageInternal(packageData).averageCCN(); } private Complexity getCCNForPackageInternal(PackageData packageData) { // Return CCN if computed earlier Complexity cachedCCN = (Complexity) packageCNNCache.get(packageData .getName()); if (cachedCCN != null) { return cachedCCN; } // Compute CCN for all source files inside package Complexity act = new Complexity(); for (Iterator it = packageData.getSourceFiles().iterator(); it .hasNext();) { SourceFileData sourceData = (SourceFileData) it.next(); act.add(getCCNForSourceFileNameInternal(sourceData.getName())); } // Cache result and return it packageCNNCache.put(packageData.getName(), act); return act; } /** * Computes CCN for single source file. * * @param sourceFile source file to compute CCN for * * @return CCN for the specified source file, 0 if cannot map <code>sourceFile</code> to existing file * * @throws NullPointerException if <code>sourceFile</code> is <code>null</code> */ public double getCCNForSourceFile(SourceFileData sourceFile) { return getCCNForSourceFileNameInternal(sourceFile.getName()) .averageCCN(); } private Complexity getCCNForSourceFileNameInternal(String sourceFileName) { // Return CCN if computed earlier Complexity cachedCCN = (Complexity) sourceFileCNNCache .get(sourceFileName); if (cachedCCN != null) { return cachedCCN; } // Compute CCN and cache it for further use Complexity result = ZERO_COMPLEXITY; try { result = getAccumlatedCCNForSingleFile(sourceFileName); } catch (IOException ex) { logger .info("Cannot find source file during CCN computation, source=[" + sourceFileName + "]"); } sourceFileCNNCache.put(sourceFileName, result); return result; } /** * Computes CCN for source file the specified class belongs to. * * @param classData package to compute CCN for * * @return CCN for source file the specified class belongs to * * @throws NullPointerException if <code>classData</code> is <code>null</code> */ public double getCCNForClass(ClassData classData) { return getCCNForSourceFileNameInternal(classData.getSourceFileName()) .averageCCN(); } /** * Represents complexity of source file, package or project. Stores the number of * methods inside entity and accumlated complexity for these methods. */ private static class Complexity { private double accumlatedCCN; private int methodsNum; public Complexity(double accumlatedCCN, int methodsNum) { this.accumlatedCCN = accumlatedCCN; this.methodsNum = methodsNum; } public Complexity() { this(0, 0); } public double averageCCN() { if (methodsNum == 0) { return 0; } return accumlatedCCN / methodsNum; } public void add(Complexity second) { accumlatedCCN += second.accumlatedCCN; methodsNum += second.methodsNum; } } }
true
true
private Complexity getAccumlatedCCNForSource(String sourceFileName, Source source) { if (source == null) { return ZERO_COMPLEXITY; } if (!sourceFileName.endsWith(".java")) { return ZERO_COMPLEXITY; } Javancss javancss = new Javancss(source .getInputStream()); if (javancss.getLastErrorMessage() != null) { //there is an error while parsing the java file. log it logger.warn("JavaNCSS got an error while parsing the java " + source.getOriginDesc() + "\n" + javancss.getLastErrorMessage()); } List methodMetrics = javancss.getFunctionMetrics(); int classCcn = 0; for (Iterator method = methodMetrics.iterator(); method.hasNext();) { FunctionMetric singleMethodMetrics = (FunctionMetric) method.next(); classCcn += singleMethodMetrics.ccn; } return new Complexity(classCcn, methodMetrics.size()); }
private Complexity getAccumlatedCCNForSource(String sourceFileName, Source source) { if (source == null) { return ZERO_COMPLEXITY; } if (!sourceFileName.endsWith(".java")) { return ZERO_COMPLEXITY; } Javancss javancss = new Javancss(source.getInputStream()); if (javancss.getLastErrorMessage() != null) { //there is an error while parsing the java file. log it logger.warn("JavaNCSS got an error while parsing the java " + source.getOriginDesc() + "\n" + javancss.getLastErrorMessage()); } List methodMetrics = javancss.getFunctionMetrics(); int classCcn = 0; for (Iterator method = methodMetrics.iterator(); method.hasNext();) { FunctionMetric singleMethodMetrics = (FunctionMetric) method.next(); classCcn += singleMethodMetrics.ccn; } return new Complexity(classCcn, methodMetrics.size()); }
diff --git a/Npr/src/org/npr/android/news/NewsStoryActivity.java b/Npr/src/org/npr/android/news/NewsStoryActivity.java index 3710790..01440da 100644 --- a/Npr/src/org/npr/android/news/NewsStoryActivity.java +++ b/Npr/src/org/npr/android/news/NewsStoryActivity.java @@ -1,275 +1,275 @@ // Copyright 2009 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.npr.android.news; import android.content.Intent; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.webkit.WebView; import android.widget.Button; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.TextView; import org.npr.android.util.Tracker; import org.npr.android.util.TypefaceCache; import org.npr.android.util.Tracker.LinkEvent; import org.npr.android.util.Tracker.PlayLaterEvent; import org.npr.android.util.Tracker.PlayNowEvent; import org.npr.android.util.Tracker.StoryDetailsMeasurement; import org.npr.api.Story; import org.npr.api.Story.Audio; import org.npr.api.Story.Byline; import org.npr.api.Story.Parent; import org.npr.api.Story.TextWithHtml; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Iterator; public class NewsStoryActivity extends BackAndForthActivity implements OnClickListener { private static String LOG_TAG = NewsStoryActivity.class.getName(); private String description; private Story story; private String storyId; private String title; private String topicId; private String orgId; private ImageView icon; private Drawable iconDrawable; private Handler handler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case 0: icon.setImageDrawable(iconDrawable); icon.setVisibility(View.VISIBLE); break; } } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); storyId = getIntent().getStringExtra(Constants.EXTRA_STORY_ID); story = NewsListActivity.getStoryFromCache(storyId); description = getIntent().getStringExtra(Constants.EXTRA_DESCRIPTION); if (story == null) { return; } Intent i = new Intent(Constants.BROADCAST_PROGRESS).putExtra( Constants.EXTRA_SHOW_PROGRESS, true); sendBroadcast(i); setContentView(R.layout.news_story); orgId = story.getOrganizations().size() > 0 ? story.getOrganizations().get(0) .getId() : null; for (Parent p : story.getParentTopics()) { if (p.isPrimary()) { topicId = p.getId(); break; } } icon = (ImageView) findViewById(R.id.NewsStoryIcon); TextView title = (TextView) findViewById(R.id.NewsStoryTitleText); TextView dateline = (TextView) findViewById(R.id.NewsStoryDateline); Button listenNow = (Button) findViewById(R.id.NewsStoryListenNowButton); Button enqueue = (Button) findViewById(R.id.NewsStoryListenEnqueueButton); ImageButton share = (ImageButton) findViewById(R.id.NewsStoryShareButton); WebView textView = (WebView) findViewById(R.id.NewsStoryWebView); textView.setBackgroundColor(0); title.setText(story.getTitle()); title.setTypeface(TypefaceCache.getTypeface("fonts/Georgia.ttf", this)); this.title = story.getTitle(); // Sample date from api: Tue, 09 Jun 2009 15:20:00 -0400 SimpleDateFormat longDateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss"); DateFormat shortDateFormat = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.SHORT); StringBuilder datelineText = new StringBuilder(); try { datelineText.append(shortDateFormat.format(longDateFormat.parse(story .getStoryDate()))); } catch (ParseException e) { Log.e(LOG_TAG, "date format", e); } Iterator<Byline> bylines = story.getBylines().iterator(); if (bylines.hasNext()) { datelineText.append(" - by "); } while (bylines.hasNext()) { Byline b = bylines.next(); datelineText.append(b.getName()); if (bylines.hasNext()) { datelineText.append(", "); } } if (datelineText.length() == 0) { dateline.setVisibility(View.GONE); } dateline.setText(datelineText.toString()); TextWithHtml text = story.getTextWithHtml(); String textHtml; if (text != null) { StringBuilder sb = new StringBuilder(); for (String paragraph : text.getParagraphs()) { sb.append("<p>").append(paragraph).append("</p>"); } textHtml = String.format(HTML_FORMAT, sb.toString()); // WebView can't load external images, so we need to strip them or it // may not render. textHtml = textHtml.replaceAll("<img .*/>", ""); } else { // Only show the teaser if there is no full-text. textHtml = String.format(HTML_FORMAT, "<p class='teaser'>" + story.getTeaser() + "</p>"); } - textView.loadData(textHtml, "text/html", "utf-8"); + textView.loadDataWithBaseURL(null, textHtml, "text/html", "utf-8", null); if (story.getImages().size() > 0) { final String url = story.getImages().get(0).getSrc(); Thread imageInitThread = new Thread(new Runnable() { public void run() { iconDrawable = DownloadDrawable.createFromUrl(url); // if (iconDrawable.getBounds().height() > 0) { handler.sendEmptyMessage(0); // } } }); imageInitThread.start(); } listenNow.setOnClickListener(this); enqueue.setOnClickListener(this); share.setOnClickListener(this); boolean isListenable = getPlayableUrl(getPlayable()) != null; listenNow.setVisibility(isListenable ? View.VISIBLE : View.GONE); listenNow.setEnabled(isListenable); enqueue.setVisibility(isListenable ? View.VISIBLE : View.GONE); enqueue.setEnabled(isListenable); i = new Intent(Constants.BROADCAST_PROGRESS).putExtra( Constants.EXTRA_SHOW_PROGRESS, false); sendBroadcast(i); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.NewsStoryListenNowButton: playStory(true); break; case R.id.NewsStoryListenEnqueueButton: playStory(false); break; case R.id.NewsStoryShareButton: Intent shareIntent = new Intent(android.content.Intent.ACTION_SEND); shareIntent.putExtra(Intent.EXTRA_SUBJECT, story.getTitle()); shareIntent.putExtra(Intent.EXTRA_TEXT, String.format( "%s: http://www.npr.org/%s", story.getTitle(), story.getId())); shareIntent.setType("text/plain"); startActivity(Intent.createChooser(shareIntent, getString(R.string.msg_share_story))); break; } } private void playStory(boolean playNow) { Audio a = getPlayable(); Intent i = new Intent(ListenActivity.class.getName()).putExtra( ListenActivity.EXTRA_CONTENT_URL, getPlayableUrl(a)).putExtra( ListenActivity.EXTRA_CONTENT_TITLE, story.getTitle()).putExtra( ListenActivity.EXTRA_ENQUEUE, true).putExtra(Constants.EXTRA_STORY_ID, storyId); LinkEvent e; if (playNow) { i.putExtra(ListenActivity.EXTRA_PLAY_IMMEDIATELY, true); e = new PlayNowEvent(storyId, story.getTitle(), a.getId()); } else { e = new PlayLaterEvent(storyId, story.getTitle(), a.getId()); } sendBroadcast(i); Tracker.instance(getApplication()).trackLink(e); } private Audio getPlayable() { for (Audio a : story.getAudios()) { if (a.getType().equals("primary")) { return a; } } return null; } private String getPlayableUrl(Audio a) { String url = null; if (a != null) { for (Audio.Format f : a.getFormats()) { if ((url = f.getMp3()) != null) { return url; } } } return url; } // WebView is default black text. public static final String HTML_FORMAT = "<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01//EN\">" + "<html><head><title></title>" + "<style type=\"text/css\">" + "body {color:#000; margin:0}" + "a {color:blue}" + ".teaser {font-size: 10pt}" + "</style>" + "</head>" + "<body>" + "%s" + "</body></html>"; @Override public CharSequence getMainTitle() { return description; } @Override public void trackNow() { StringBuilder pageName = new StringBuilder(storyId).append("-"); pageName.append(title); Tracker.instance(getApplication()).trackPage( new StoryDetailsMeasurement(pageName.toString(), "News", orgId, topicId, storyId)); } }
true
true
protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); storyId = getIntent().getStringExtra(Constants.EXTRA_STORY_ID); story = NewsListActivity.getStoryFromCache(storyId); description = getIntent().getStringExtra(Constants.EXTRA_DESCRIPTION); if (story == null) { return; } Intent i = new Intent(Constants.BROADCAST_PROGRESS).putExtra( Constants.EXTRA_SHOW_PROGRESS, true); sendBroadcast(i); setContentView(R.layout.news_story); orgId = story.getOrganizations().size() > 0 ? story.getOrganizations().get(0) .getId() : null; for (Parent p : story.getParentTopics()) { if (p.isPrimary()) { topicId = p.getId(); break; } } icon = (ImageView) findViewById(R.id.NewsStoryIcon); TextView title = (TextView) findViewById(R.id.NewsStoryTitleText); TextView dateline = (TextView) findViewById(R.id.NewsStoryDateline); Button listenNow = (Button) findViewById(R.id.NewsStoryListenNowButton); Button enqueue = (Button) findViewById(R.id.NewsStoryListenEnqueueButton); ImageButton share = (ImageButton) findViewById(R.id.NewsStoryShareButton); WebView textView = (WebView) findViewById(R.id.NewsStoryWebView); textView.setBackgroundColor(0); title.setText(story.getTitle()); title.setTypeface(TypefaceCache.getTypeface("fonts/Georgia.ttf", this)); this.title = story.getTitle(); // Sample date from api: Tue, 09 Jun 2009 15:20:00 -0400 SimpleDateFormat longDateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss"); DateFormat shortDateFormat = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.SHORT); StringBuilder datelineText = new StringBuilder(); try { datelineText.append(shortDateFormat.format(longDateFormat.parse(story .getStoryDate()))); } catch (ParseException e) { Log.e(LOG_TAG, "date format", e); } Iterator<Byline> bylines = story.getBylines().iterator(); if (bylines.hasNext()) { datelineText.append(" - by "); } while (bylines.hasNext()) { Byline b = bylines.next(); datelineText.append(b.getName()); if (bylines.hasNext()) { datelineText.append(", "); } } if (datelineText.length() == 0) { dateline.setVisibility(View.GONE); } dateline.setText(datelineText.toString()); TextWithHtml text = story.getTextWithHtml(); String textHtml; if (text != null) { StringBuilder sb = new StringBuilder(); for (String paragraph : text.getParagraphs()) { sb.append("<p>").append(paragraph).append("</p>"); } textHtml = String.format(HTML_FORMAT, sb.toString()); // WebView can't load external images, so we need to strip them or it // may not render. textHtml = textHtml.replaceAll("<img .*/>", ""); } else { // Only show the teaser if there is no full-text. textHtml = String.format(HTML_FORMAT, "<p class='teaser'>" + story.getTeaser() + "</p>"); } textView.loadData(textHtml, "text/html", "utf-8"); if (story.getImages().size() > 0) { final String url = story.getImages().get(0).getSrc(); Thread imageInitThread = new Thread(new Runnable() { public void run() { iconDrawable = DownloadDrawable.createFromUrl(url); // if (iconDrawable.getBounds().height() > 0) { handler.sendEmptyMessage(0); // } } }); imageInitThread.start(); } listenNow.setOnClickListener(this); enqueue.setOnClickListener(this); share.setOnClickListener(this); boolean isListenable = getPlayableUrl(getPlayable()) != null; listenNow.setVisibility(isListenable ? View.VISIBLE : View.GONE); listenNow.setEnabled(isListenable); enqueue.setVisibility(isListenable ? View.VISIBLE : View.GONE); enqueue.setEnabled(isListenable); i = new Intent(Constants.BROADCAST_PROGRESS).putExtra( Constants.EXTRA_SHOW_PROGRESS, false); sendBroadcast(i); }
protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); storyId = getIntent().getStringExtra(Constants.EXTRA_STORY_ID); story = NewsListActivity.getStoryFromCache(storyId); description = getIntent().getStringExtra(Constants.EXTRA_DESCRIPTION); if (story == null) { return; } Intent i = new Intent(Constants.BROADCAST_PROGRESS).putExtra( Constants.EXTRA_SHOW_PROGRESS, true); sendBroadcast(i); setContentView(R.layout.news_story); orgId = story.getOrganizations().size() > 0 ? story.getOrganizations().get(0) .getId() : null; for (Parent p : story.getParentTopics()) { if (p.isPrimary()) { topicId = p.getId(); break; } } icon = (ImageView) findViewById(R.id.NewsStoryIcon); TextView title = (TextView) findViewById(R.id.NewsStoryTitleText); TextView dateline = (TextView) findViewById(R.id.NewsStoryDateline); Button listenNow = (Button) findViewById(R.id.NewsStoryListenNowButton); Button enqueue = (Button) findViewById(R.id.NewsStoryListenEnqueueButton); ImageButton share = (ImageButton) findViewById(R.id.NewsStoryShareButton); WebView textView = (WebView) findViewById(R.id.NewsStoryWebView); textView.setBackgroundColor(0); title.setText(story.getTitle()); title.setTypeface(TypefaceCache.getTypeface("fonts/Georgia.ttf", this)); this.title = story.getTitle(); // Sample date from api: Tue, 09 Jun 2009 15:20:00 -0400 SimpleDateFormat longDateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss"); DateFormat shortDateFormat = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.SHORT); StringBuilder datelineText = new StringBuilder(); try { datelineText.append(shortDateFormat.format(longDateFormat.parse(story .getStoryDate()))); } catch (ParseException e) { Log.e(LOG_TAG, "date format", e); } Iterator<Byline> bylines = story.getBylines().iterator(); if (bylines.hasNext()) { datelineText.append(" - by "); } while (bylines.hasNext()) { Byline b = bylines.next(); datelineText.append(b.getName()); if (bylines.hasNext()) { datelineText.append(", "); } } if (datelineText.length() == 0) { dateline.setVisibility(View.GONE); } dateline.setText(datelineText.toString()); TextWithHtml text = story.getTextWithHtml(); String textHtml; if (text != null) { StringBuilder sb = new StringBuilder(); for (String paragraph : text.getParagraphs()) { sb.append("<p>").append(paragraph).append("</p>"); } textHtml = String.format(HTML_FORMAT, sb.toString()); // WebView can't load external images, so we need to strip them or it // may not render. textHtml = textHtml.replaceAll("<img .*/>", ""); } else { // Only show the teaser if there is no full-text. textHtml = String.format(HTML_FORMAT, "<p class='teaser'>" + story.getTeaser() + "</p>"); } textView.loadDataWithBaseURL(null, textHtml, "text/html", "utf-8", null); if (story.getImages().size() > 0) { final String url = story.getImages().get(0).getSrc(); Thread imageInitThread = new Thread(new Runnable() { public void run() { iconDrawable = DownloadDrawable.createFromUrl(url); // if (iconDrawable.getBounds().height() > 0) { handler.sendEmptyMessage(0); // } } }); imageInitThread.start(); } listenNow.setOnClickListener(this); enqueue.setOnClickListener(this); share.setOnClickListener(this); boolean isListenable = getPlayableUrl(getPlayable()) != null; listenNow.setVisibility(isListenable ? View.VISIBLE : View.GONE); listenNow.setEnabled(isListenable); enqueue.setVisibility(isListenable ? View.VISIBLE : View.GONE); enqueue.setEnabled(isListenable); i = new Intent(Constants.BROADCAST_PROGRESS).putExtra( Constants.EXTRA_SHOW_PROGRESS, false); sendBroadcast(i); }
diff --git a/src/net/juhonkoti/sharetobrowser/ShareToBrowser.java b/src/net/juhonkoti/sharetobrowser/ShareToBrowser.java index 7094f3b..5b30521 100755 --- a/src/net/juhonkoti/sharetobrowser/ShareToBrowser.java +++ b/src/net/juhonkoti/sharetobrowser/ShareToBrowser.java @@ -1,33 +1,33 @@ package net.juhonkoti.sharetobrowser; import android.app.Activity; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.widget.TextView; public class ShareToBrowser extends Activity { @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_scan_qrcode); Intent intent = getIntent(); Uri data = intent.getData(); Log.d("StartUrlSharing", "type: " + intent.getType()); if ("text/plain".equals(intent.getType())) { String url = intent.getStringExtra(Intent.EXTRA_TEXT); TextView t = (TextView) findViewById(R.id.sendToServerText); t.setText(url); - new SendUrlToServerTask(this).execute(url); + new SendUrlToServerTask(this).execute(url, TargetDatabase.instance().getDefaultTarget()); } } }
true
true
public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_scan_qrcode); Intent intent = getIntent(); Uri data = intent.getData(); Log.d("StartUrlSharing", "type: " + intent.getType()); if ("text/plain".equals(intent.getType())) { String url = intent.getStringExtra(Intent.EXTRA_TEXT); TextView t = (TextView) findViewById(R.id.sendToServerText); t.setText(url); new SendUrlToServerTask(this).execute(url); } }
public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_scan_qrcode); Intent intent = getIntent(); Uri data = intent.getData(); Log.d("StartUrlSharing", "type: " + intent.getType()); if ("text/plain".equals(intent.getType())) { String url = intent.getStringExtra(Intent.EXTRA_TEXT); TextView t = (TextView) findViewById(R.id.sendToServerText); t.setText(url); new SendUrlToServerTask(this).execute(url, TargetDatabase.instance().getDefaultTarget()); } }
diff --git a/EssentialsChat/src/com/earth2me/essentials/chat/EssentialsChatPlayer.java b/EssentialsChat/src/com/earth2me/essentials/chat/EssentialsChatPlayer.java index 58b09479..052479ae 100644 --- a/EssentialsChat/src/com/earth2me/essentials/chat/EssentialsChatPlayer.java +++ b/EssentialsChat/src/com/earth2me/essentials/chat/EssentialsChatPlayer.java @@ -1,162 +1,165 @@ package com.earth2me.essentials.chat; import com.earth2me.essentials.ChargeException; import static com.earth2me.essentials.I18n._; import com.earth2me.essentials.IEssentials; import com.earth2me.essentials.Trade; import com.earth2me.essentials.User; import java.util.Map; import java.util.logging.Logger; import org.bukkit.Location; import org.bukkit.Server; import org.bukkit.World; import org.bukkit.entity.Player; import org.bukkit.event.Listener; import org.bukkit.event.player.PlayerChatEvent; //TODO: Translate the local/spy tags public abstract class EssentialsChatPlayer implements Listener { protected transient IEssentials ess; protected final static Logger logger = Logger.getLogger("Minecraft"); protected final transient Map<String, IEssentialsChatListener> listeners; protected final transient Server server; protected final transient Map<PlayerChatEvent, ChatStore> chatStorage; public EssentialsChatPlayer(final Server server, final IEssentials ess, final Map<String, IEssentialsChatListener> listeners, final Map<PlayerChatEvent, ChatStore> chatStorage) { this.ess = ess; this.listeners = listeners; this.server = server; this.chatStorage = chatStorage; } public void onPlayerChat(final PlayerChatEvent event) { } public boolean isAborted(final PlayerChatEvent event) { if (event.isCancelled()) { return true; } for (IEssentialsChatListener listener : listeners.values()) { if (listener.shouldHandleThisChat(event)) { return true; } } return false; } public String getChatType(final String message) { switch (message.charAt(0)) { case '!': return "shout"; case '?': return "question"; //case '@': //return "admin"; default: return ""; } } public ChatStore getChatStore(final PlayerChatEvent event) { return chatStorage.get(event); } public void setChatStore(final PlayerChatEvent event, final ChatStore chatStore) { chatStorage.put(event, chatStore); } public ChatStore delChatStore(final PlayerChatEvent event) { return chatStorage.remove(event); } protected void charge(final User user, final Trade charge) throws ChargeException { charge.charge(user); } protected boolean charge(final PlayerChatEvent event, final ChatStore chatStore) { try { charge(chatStore.getUser(), chatStore.getCharge()); } catch (ChargeException e) { ess.showError(chatStore.getUser(), e, chatStore.getLongType()); event.setCancelled(true); return false; } return true; } protected void sendLocalChat(final PlayerChatEvent event, final ChatStore chatStore) { event.setCancelled(true); final User sender = chatStore.getUser(); logger.info(_("localFormat", sender.getName(), event.getMessage())); final Location loc = sender.getLocation(); final World world = loc.getWorld(); if (charge(event, chatStore) == false) { return; } for (Player onlinePlayer : server.getOnlinePlayers()) { String type = _("chatTypeLocal"); final User onlineUser = ess.getUser(onlinePlayer); if (onlineUser.isIgnoredPlayer(sender)) { continue; } if (!onlineUser.equals(sender)) { boolean abort = false; final Location playerLoc = onlineUser.getLocation(); if (playerLoc.getWorld() != world) { abort = true; } - final double delta = playerLoc.distanceSquared(loc); - if (delta > chatStore.getRadius()) + else { - abort = true; + final double delta = playerLoc.distanceSquared(loc); + if (delta > chatStore.getRadius()) + { + abort = true; + } } if (abort) { if (onlineUser.isAuthorized("essentials.chat.spy")) { type = type.concat(_("chatTypeSpy")); } else { continue; } } } String message = String.format(event.getFormat(), type.concat(sender.getDisplayName()), event.getMessage()); for (IEssentialsChatListener listener : listeners.values()) { message = listener.modifyMessage(event, onlinePlayer, message); } onlineUser.sendMessage(message); } } }
false
true
protected void sendLocalChat(final PlayerChatEvent event, final ChatStore chatStore) { event.setCancelled(true); final User sender = chatStore.getUser(); logger.info(_("localFormat", sender.getName(), event.getMessage())); final Location loc = sender.getLocation(); final World world = loc.getWorld(); if (charge(event, chatStore) == false) { return; } for (Player onlinePlayer : server.getOnlinePlayers()) { String type = _("chatTypeLocal"); final User onlineUser = ess.getUser(onlinePlayer); if (onlineUser.isIgnoredPlayer(sender)) { continue; } if (!onlineUser.equals(sender)) { boolean abort = false; final Location playerLoc = onlineUser.getLocation(); if (playerLoc.getWorld() != world) { abort = true; } final double delta = playerLoc.distanceSquared(loc); if (delta > chatStore.getRadius()) { abort = true; } if (abort) { if (onlineUser.isAuthorized("essentials.chat.spy")) { type = type.concat(_("chatTypeSpy")); } else { continue; } } } String message = String.format(event.getFormat(), type.concat(sender.getDisplayName()), event.getMessage()); for (IEssentialsChatListener listener : listeners.values()) { message = listener.modifyMessage(event, onlinePlayer, message); } onlineUser.sendMessage(message); } }
protected void sendLocalChat(final PlayerChatEvent event, final ChatStore chatStore) { event.setCancelled(true); final User sender = chatStore.getUser(); logger.info(_("localFormat", sender.getName(), event.getMessage())); final Location loc = sender.getLocation(); final World world = loc.getWorld(); if (charge(event, chatStore) == false) { return; } for (Player onlinePlayer : server.getOnlinePlayers()) { String type = _("chatTypeLocal"); final User onlineUser = ess.getUser(onlinePlayer); if (onlineUser.isIgnoredPlayer(sender)) { continue; } if (!onlineUser.equals(sender)) { boolean abort = false; final Location playerLoc = onlineUser.getLocation(); if (playerLoc.getWorld() != world) { abort = true; } else { final double delta = playerLoc.distanceSquared(loc); if (delta > chatStore.getRadius()) { abort = true; } } if (abort) { if (onlineUser.isAuthorized("essentials.chat.spy")) { type = type.concat(_("chatTypeSpy")); } else { continue; } } } String message = String.format(event.getFormat(), type.concat(sender.getDisplayName()), event.getMessage()); for (IEssentialsChatListener listener : listeners.values()) { message = listener.modifyMessage(event, onlinePlayer, message); } onlineUser.sendMessage(message); } }
diff --git a/src/com/android/bluetooth/btservice/RemoteDevices.java b/src/com/android/bluetooth/btservice/RemoteDevices.java index 04bc1c2..a60f977 100755 --- a/src/com/android/bluetooth/btservice/RemoteDevices.java +++ b/src/com/android/bluetooth/btservice/RemoteDevices.java @@ -1,464 +1,465 @@ /* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.bluetooth.btservice; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothClass; import android.bluetooth.BluetoothDevice; import android.content.Context; import android.content.Intent; import android.os.Handler; import android.os.Message; import android.os.ParcelUuid; import android.util.Log; import com.android.bluetooth.Utils; import com.android.bluetooth.btservice.RemoteDevices.DeviceProperties; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; final class RemoteDevices { private static final boolean DBG = false; private static final String TAG = "BluetoothRemoteDevices"; private static BluetoothAdapter mAdapter; private static AdapterService mAdapterService; private static ArrayList<BluetoothDevice> mSdpTracker; private Object mObject = new Object(); private static final int UUID_INTENT_DELAY = 6000; private static final int MESSAGE_UUID_INTENT = 1; private HashMap<BluetoothDevice, DeviceProperties> mDevices; RemoteDevices(AdapterService service) { mAdapter = BluetoothAdapter.getDefaultAdapter(); mAdapterService = service; mSdpTracker = new ArrayList<BluetoothDevice>(); mDevices = new HashMap<BluetoothDevice, DeviceProperties>(); } void cleanup() { if (mSdpTracker !=null) mSdpTracker.clear(); if (mDevices != null) mDevices.clear(); } public Object Clone() throws CloneNotSupportedException { throw new CloneNotSupportedException(); } DeviceProperties getDeviceProperties(BluetoothDevice device) { synchronized (mDevices) { return mDevices.get(device); } } BluetoothDevice getDevice(byte[] address) { for (BluetoothDevice dev : mDevices.keySet()) { if (dev.getAddress().equals(Utils.getAddressStringFromByte(address))) { return dev; } } return null; } DeviceProperties addDeviceProperties(byte[] address) { synchronized (mDevices) { DeviceProperties prop = new DeviceProperties(); BluetoothDevice device = mAdapter.getRemoteDevice(Utils.getAddressStringFromByte(address)); prop.mAddress = address; mDevices.put(device, prop); return prop; } } class DeviceProperties { private String mName; private byte[] mAddress; private int mBluetoothClass; private short mRssi; private ParcelUuid[] mUuids; private int mDeviceType; private String mAlias; private int mBondState; DeviceProperties() { mBondState = BluetoothDevice.BOND_NONE; } /** * @return the mName */ String getName() { synchronized (mObject) { return mName; } } /** * @return the mClass */ int getBluetoothClass() { synchronized (mObject) { return mBluetoothClass; } } /** * @return the mUuids */ ParcelUuid[] getUuids() { synchronized (mObject) { return mUuids; } } /** * @return the mAddress */ byte[] getAddress() { synchronized (mObject) { return mAddress; } } /** * @return mRssi */ short getRssi() { synchronized (mObject) { return mRssi; } } /** * @return mDeviceType */ int getDeviceType() { synchronized (mObject) { return mDeviceType; } } /** * @return the mAlias */ String getAlias() { synchronized (mObject) { return mAlias; } } /** * @param mAlias the mAlias to set */ void setAlias(String mAlias) { synchronized (mObject) { mAdapterService.setDevicePropertyNative(mAddress, AbstractionLayer.BT_PROPERTY_REMOTE_FRIENDLY_NAME, mAlias.getBytes()); } } /** * @param mBondState the mBondState to set */ void setBondState(int mBondState) { synchronized (mObject) { this.mBondState = mBondState; if (mBondState == BluetoothDevice.BOND_NONE) { /* Clearing the Uuids local copy when the device is unpaired. If not cleared, cachedBluetoothDevice issued a connect using the local cached copy of uuids, without waiting for the ACTION_UUID intent. This was resulting in multiple calls to connect().*/ mUuids = null; } } } /** * @return the mBondState */ int getBondState() { synchronized (mObject) { return mBondState; } } } private void sendUuidIntent(BluetoothDevice device) { DeviceProperties prop = getDeviceProperties(device); Intent intent = new Intent(BluetoothDevice.ACTION_UUID); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, device); intent.putExtra(BluetoothDevice.EXTRA_UUID, prop == null? null: prop.mUuids); mAdapterService.sendBroadcast(intent, AdapterService.BLUETOOTH_ADMIN_PERM); //Remove the outstanding UUID request mSdpTracker.remove(device); } private void sendDisplayPinIntent(byte[] address, int pin) { Intent intent = new Intent(BluetoothDevice.ACTION_PAIRING_REQUEST); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, getDevice(address)); intent.putExtra(BluetoothDevice.EXTRA_PAIRING_KEY, pin); intent.putExtra(BluetoothDevice.EXTRA_PAIRING_VARIANT, BluetoothDevice.PAIRING_VARIANT_DISPLAY_PIN); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_ADMIN_PERM); } void devicePropertyChangedCallback(byte[] address, int[] types, byte[][] values) { Intent intent; byte[] val; int type; BluetoothDevice bdDevice = getDevice(address); DeviceProperties device; if (bdDevice == null) { device = addDeviceProperties(address); bdDevice = getDevice(address); } else { device = getDeviceProperties(bdDevice); } for (int j = 0; j < types.length; j++) { type = types[j]; val = values[j]; if(val.length <= 0) errorLog("devicePropertyChangedCallback: bdDevice: " + bdDevice + ", value is empty for type: " + type); else { synchronized(mObject) { switch (type) { case AbstractionLayer.BT_PROPERTY_BDNAME: device.mName = new String(val); intent = new Intent(BluetoothDevice.ACTION_NAME_CHANGED); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, bdDevice); intent.putExtra(BluetoothDevice.EXTRA_NAME, device.mName); intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); debugLog("Remote Device name is: " + device.mName); break; case AbstractionLayer.BT_PROPERTY_REMOTE_FRIENDLY_NAME: if (device.mAlias != null) { System.arraycopy(val, 0, device.mAlias, 0, val.length); } else { device.mAlias = new String(val); } break; case AbstractionLayer.BT_PROPERTY_BDADDR: device.mAddress = val; debugLog("Remote Address is:" + Utils.getAddressStringFromByte(val)); break; case AbstractionLayer.BT_PROPERTY_CLASS_OF_DEVICE: device.mBluetoothClass = Utils.byteArrayToInt(val); intent = new Intent(BluetoothDevice.ACTION_CLASS_CHANGED); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, bdDevice); intent.putExtra(BluetoothDevice.EXTRA_CLASS, new BluetoothClass(device.mBluetoothClass)); intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); debugLog("Remote class is:" + device.mBluetoothClass); break; case AbstractionLayer.BT_PROPERTY_UUIDS: int numUuids = val.length/AbstractionLayer.BT_UUID_SIZE; device.mUuids = Utils.byteArrayToUuid(val); sendUuidIntent(bdDevice); break; case AbstractionLayer.BT_PROPERTY_TYPE_OF_DEVICE: // The device type from hal layer, defined in bluetooth.h, // matches the type defined in BluetoothDevice.java device.mDeviceType = Utils.byteArrayToInt(val); break; case AbstractionLayer.BT_PROPERTY_REMOTE_RSSI: - device.mRssi = Utils.byteArrayToShort(val); + // RSSI from hal is in one byte + device.mRssi = val[0]; break; } } } } } void deviceFoundCallback(byte[] address) { // The device properties are already registered - we can send the intent // now BluetoothDevice device = getDevice(address); debugLog("deviceFoundCallback: Remote Address is:" + device); DeviceProperties deviceProp = getDeviceProperties(device); if (deviceProp == null) { errorLog("Device Properties is null for Device:" + device); return; } Intent intent = new Intent(BluetoothDevice.ACTION_FOUND); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, device); intent.putExtra(BluetoothDevice.EXTRA_CLASS, new BluetoothClass(Integer.valueOf(deviceProp.mBluetoothClass))); intent.putExtra(BluetoothDevice.EXTRA_RSSI, deviceProp.mRssi); intent.putExtra(BluetoothDevice.EXTRA_NAME, deviceProp.mName); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); } void pinRequestCallback(byte[] address, byte[] name, int cod) { //TODO(BT): Get wakelock and update name and cod BluetoothDevice bdDevice = getDevice(address); if (bdDevice == null) { addDeviceProperties(address); } BluetoothClass btClass = bdDevice.getBluetoothClass(); int btDeviceClass = btClass.getDeviceClass(); if (btDeviceClass == BluetoothClass.Device.PERIPHERAL_KEYBOARD || btDeviceClass == BluetoothClass.Device.PERIPHERAL_KEYBOARD_POINTING) { // Its a keyboard. Follow the HID spec recommendation of creating the // passkey and displaying it to the user. If the keyboard doesn't follow // the spec recommendation, check if the keyboard has a fixed PIN zero // and pair. //TODO: Add sFixedPinZerosAutoPairKeyboard() and maintain list of devices that have fixed pin /*if (mAdapterService.isFixedPinZerosAutoPairKeyboard(address)) { mAdapterService.setPin(address, BluetoothDevice.convertPinToBytes("0000")); return; }*/ // Generate a variable PIN. This is not truly random but good enough. int pin = (int) Math.floor(Math.random() * 1000000); sendDisplayPinIntent(address, pin); return; } infoLog("pinRequestCallback: " + address + " name:" + name + " cod:" + cod); Intent intent = new Intent(BluetoothDevice.ACTION_PAIRING_REQUEST); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, getDevice(address)); intent.putExtra(BluetoothDevice.EXTRA_PAIRING_VARIANT, BluetoothDevice.PAIRING_VARIANT_PIN); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_ADMIN_PERM); return; } void sspRequestCallback(byte[] address, byte[] name, int cod, int pairingVariant, int passkey) { //TODO(BT): Get wakelock and update name and cod BluetoothDevice bdDevice = getDevice(address); if (bdDevice == null) { addDeviceProperties(address); } infoLog("sspRequestCallback: " + address + " name: " + name + " cod: " + cod + " pairingVariant " + pairingVariant + " passkey: " + passkey); int variant; boolean displayPasskey = false; if (pairingVariant == AbstractionLayer.BT_SSP_VARIANT_PASSKEY_CONFIRMATION) { variant = BluetoothDevice.PAIRING_VARIANT_PASSKEY_CONFIRMATION; displayPasskey = true; } else if (pairingVariant == AbstractionLayer.BT_SSP_VARIANT_CONSENT) { variant = BluetoothDevice.PAIRING_VARIANT_CONSENT; } else if (pairingVariant == AbstractionLayer.BT_SSP_VARIANT_PASSKEY_ENTRY) { variant = BluetoothDevice.PAIRING_VARIANT_PASSKEY; } else if (pairingVariant == AbstractionLayer.BT_SSP_VARIANT_PASSKEY_NOTIFICATION) { variant = BluetoothDevice.PAIRING_VARIANT_DISPLAY_PASSKEY; displayPasskey = true; } else { errorLog("SSP Pairing variant not present"); return; } BluetoothDevice device = getDevice(address); if (device == null) { warnLog("Device is not known for:" + Utils.getAddressStringFromByte(address)); addDeviceProperties(address); device = getDevice(address); } Intent intent = new Intent(BluetoothDevice.ACTION_PAIRING_REQUEST); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, device); if (displayPasskey) { intent.putExtra(BluetoothDevice.EXTRA_PAIRING_KEY, passkey); } intent.putExtra(BluetoothDevice.EXTRA_PAIRING_VARIANT, variant); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_ADMIN_PERM); } void aclStateChangeCallback(int status, byte[] address, int newState) { BluetoothDevice device = getDevice(address); if (device == null) { errorLog("aclStateChangeCallback: Device is NULL"); return; } Intent intent = null; if (newState == AbstractionLayer.BT_ACL_STATE_CONNECTED) { intent = new Intent(BluetoothDevice.ACTION_ACL_CONNECTED); debugLog("aclStateChangeCallback: State:Connected to Device:" + device); } else { intent = new Intent(BluetoothDevice.ACTION_ACL_DISCONNECTED); debugLog("aclStateChangeCallback: State:DisConnected to Device:" + device); } intent.putExtra(BluetoothDevice.EXTRA_DEVICE, device); intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); } void fetchUuids(BluetoothDevice device) { if (mSdpTracker.contains(device)) return; mSdpTracker.add(device); Message message = mHandler.obtainMessage(MESSAGE_UUID_INTENT); message.obj = device; mHandler.sendMessageDelayed(message, UUID_INTENT_DELAY); //mAdapterService.getDevicePropertyNative(Utils.getBytesFromAddress(device.getAddress()), AbstractionLayer.BT_PROPERTY_UUIDS); mAdapterService.getRemoteServicesNative(Utils.getBytesFromAddress(device.getAddress())); } private final Handler mHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case MESSAGE_UUID_INTENT: BluetoothDevice device = (BluetoothDevice)msg.obj; if (device != null) { sendUuidIntent(device); } break; } } }; private void errorLog(String msg) { Log.e(TAG, msg); } private void debugLog(String msg) { if (DBG) Log.d(TAG, msg); } private void infoLog(String msg) { if (DBG) Log.i(TAG, msg); } private void warnLog(String msg) { Log.w(TAG, msg); } }
true
true
void devicePropertyChangedCallback(byte[] address, int[] types, byte[][] values) { Intent intent; byte[] val; int type; BluetoothDevice bdDevice = getDevice(address); DeviceProperties device; if (bdDevice == null) { device = addDeviceProperties(address); bdDevice = getDevice(address); } else { device = getDeviceProperties(bdDevice); } for (int j = 0; j < types.length; j++) { type = types[j]; val = values[j]; if(val.length <= 0) errorLog("devicePropertyChangedCallback: bdDevice: " + bdDevice + ", value is empty for type: " + type); else { synchronized(mObject) { switch (type) { case AbstractionLayer.BT_PROPERTY_BDNAME: device.mName = new String(val); intent = new Intent(BluetoothDevice.ACTION_NAME_CHANGED); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, bdDevice); intent.putExtra(BluetoothDevice.EXTRA_NAME, device.mName); intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); debugLog("Remote Device name is: " + device.mName); break; case AbstractionLayer.BT_PROPERTY_REMOTE_FRIENDLY_NAME: if (device.mAlias != null) { System.arraycopy(val, 0, device.mAlias, 0, val.length); } else { device.mAlias = new String(val); } break; case AbstractionLayer.BT_PROPERTY_BDADDR: device.mAddress = val; debugLog("Remote Address is:" + Utils.getAddressStringFromByte(val)); break; case AbstractionLayer.BT_PROPERTY_CLASS_OF_DEVICE: device.mBluetoothClass = Utils.byteArrayToInt(val); intent = new Intent(BluetoothDevice.ACTION_CLASS_CHANGED); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, bdDevice); intent.putExtra(BluetoothDevice.EXTRA_CLASS, new BluetoothClass(device.mBluetoothClass)); intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); debugLog("Remote class is:" + device.mBluetoothClass); break; case AbstractionLayer.BT_PROPERTY_UUIDS: int numUuids = val.length/AbstractionLayer.BT_UUID_SIZE; device.mUuids = Utils.byteArrayToUuid(val); sendUuidIntent(bdDevice); break; case AbstractionLayer.BT_PROPERTY_TYPE_OF_DEVICE: // The device type from hal layer, defined in bluetooth.h, // matches the type defined in BluetoothDevice.java device.mDeviceType = Utils.byteArrayToInt(val); break; case AbstractionLayer.BT_PROPERTY_REMOTE_RSSI: device.mRssi = Utils.byteArrayToShort(val); break; } } } } }
void devicePropertyChangedCallback(byte[] address, int[] types, byte[][] values) { Intent intent; byte[] val; int type; BluetoothDevice bdDevice = getDevice(address); DeviceProperties device; if (bdDevice == null) { device = addDeviceProperties(address); bdDevice = getDevice(address); } else { device = getDeviceProperties(bdDevice); } for (int j = 0; j < types.length; j++) { type = types[j]; val = values[j]; if(val.length <= 0) errorLog("devicePropertyChangedCallback: bdDevice: " + bdDevice + ", value is empty for type: " + type); else { synchronized(mObject) { switch (type) { case AbstractionLayer.BT_PROPERTY_BDNAME: device.mName = new String(val); intent = new Intent(BluetoothDevice.ACTION_NAME_CHANGED); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, bdDevice); intent.putExtra(BluetoothDevice.EXTRA_NAME, device.mName); intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); debugLog("Remote Device name is: " + device.mName); break; case AbstractionLayer.BT_PROPERTY_REMOTE_FRIENDLY_NAME: if (device.mAlias != null) { System.arraycopy(val, 0, device.mAlias, 0, val.length); } else { device.mAlias = new String(val); } break; case AbstractionLayer.BT_PROPERTY_BDADDR: device.mAddress = val; debugLog("Remote Address is:" + Utils.getAddressStringFromByte(val)); break; case AbstractionLayer.BT_PROPERTY_CLASS_OF_DEVICE: device.mBluetoothClass = Utils.byteArrayToInt(val); intent = new Intent(BluetoothDevice.ACTION_CLASS_CHANGED); intent.putExtra(BluetoothDevice.EXTRA_DEVICE, bdDevice); intent.putExtra(BluetoothDevice.EXTRA_CLASS, new BluetoothClass(device.mBluetoothClass)); intent.addFlags(Intent.FLAG_RECEIVER_REGISTERED_ONLY_BEFORE_BOOT); mAdapterService.sendBroadcast(intent, mAdapterService.BLUETOOTH_PERM); debugLog("Remote class is:" + device.mBluetoothClass); break; case AbstractionLayer.BT_PROPERTY_UUIDS: int numUuids = val.length/AbstractionLayer.BT_UUID_SIZE; device.mUuids = Utils.byteArrayToUuid(val); sendUuidIntent(bdDevice); break; case AbstractionLayer.BT_PROPERTY_TYPE_OF_DEVICE: // The device type from hal layer, defined in bluetooth.h, // matches the type defined in BluetoothDevice.java device.mDeviceType = Utils.byteArrayToInt(val); break; case AbstractionLayer.BT_PROPERTY_REMOTE_RSSI: // RSSI from hal is in one byte device.mRssi = val[0]; break; } } } } }
diff --git a/nuget-server/src/jetbrains/buildServer/nuget/server/feed/server/tab/FeedServerController.java b/nuget-server/src/jetbrains/buildServer/nuget/server/feed/server/tab/FeedServerController.java index 7142a034..5dda8b2b 100644 --- a/nuget-server/src/jetbrains/buildServer/nuget/server/feed/server/tab/FeedServerController.java +++ b/nuget-server/src/jetbrains/buildServer/nuget/server/feed/server/tab/FeedServerController.java @@ -1,104 +1,106 @@ /* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.buildServer.nuget.server.feed.server.tab; import jetbrains.buildServer.RootUrlHolder; import jetbrains.buildServer.controllers.AuthorizationInterceptor; import jetbrains.buildServer.controllers.BaseController; import jetbrains.buildServer.controllers.BasePropertiesBean; import jetbrains.buildServer.controllers.RequestPermissionsChecker; import jetbrains.buildServer.nuget.server.feed.server.NuGetServerRunnerSettingsEx; import jetbrains.buildServer.nuget.server.feed.server.NuGetServerStatusHolder; import jetbrains.buildServer.nuget.server.toolRegistry.tab.PermissionChecker; import jetbrains.buildServer.nuget.server.util.SystemInfo; import jetbrains.buildServer.serverSide.auth.AccessDeniedException; import jetbrains.buildServer.serverSide.auth.AuthorityHolder; import jetbrains.buildServer.web.openapi.PluginDescriptor; import jetbrains.buildServer.web.openapi.WebControllerManager; import org.jetbrains.annotations.NotNull; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.HashMap; import java.util.Map; /** * @author Eugene Petrenko ([email protected]) * Date: 26.10.11 19:21 */ public class FeedServerController extends BaseController { @NotNull private final FeedServerSettingsSection mySection; @NotNull private final PluginDescriptor myDescriptor; @NotNull private final NuGetServerRunnerSettingsEx mySettings; @NotNull private final NuGetServerStatusHolder myStatusHolder; @NotNull private final RootUrlHolder myRootUrl; @NotNull private final SystemInfo mySystemInfo; public FeedServerController(@NotNull final AuthorizationInterceptor auth, @NotNull final PermissionChecker checker, @NotNull final FeedServerSettingsSection section, @NotNull final WebControllerManager web, @NotNull final PluginDescriptor descriptor, @NotNull final NuGetServerRunnerSettingsEx settings, @NotNull final NuGetServerStatusHolder holder, @NotNull final RootUrlHolder rootUrl, @NotNull final SystemInfo systemInfo) { mySection = section; myDescriptor = descriptor; mySettings = settings; myStatusHolder = holder; myRootUrl = rootUrl; mySystemInfo = systemInfo; final String myPath = section.getIncludePath(); auth.addPathBasedPermissionsChecker(myPath, new RequestPermissionsChecker() { public void checkPermissions(@NotNull AuthorityHolder authorityHolder, @NotNull HttpServletRequest request) throws AccessDeniedException { checker.assertAccess(authorityHolder); } }); web.registerController(myPath, this); } @Override protected ModelAndView doHandle(@NotNull final HttpServletRequest request, @NotNull final HttpServletResponse response) throws Exception { if (!mySystemInfo.canStartNuGetProcesses()) { - return new ModelAndView(myDescriptor.getPluginResourcesPath("server/feedServerSettingsOther.jsp")); + final ModelAndView mv = new ModelAndView(myDescriptor.getPluginResourcesPath("server/feedServerSettingsOther.jsp")); + mv.getModel().put("canStartNuGetProcessesMessage", mySystemInfo.getNotAvailableMessage()); + return mv; } final ModelAndView modelAndView = new ModelAndView(myDescriptor.getPluginResourcesPath("server/feedServerSettingsWindows.jsp")); final Map<String, String> properties = new HashMap<String, String>(); if (mySettings.isNuGetFeedEnabled()) { properties.put(FeedServerContants.NUGET_SERVER_ENABLED_CHECKBOX, "checked"); } String url = mySettings.getCustomTeamCityBaseUrl(); if (jetbrains.buildServer.util.StringUtil.isEmptyOrSpaces(url)) url = ""; properties.put(FeedServerContants.NUGET_SERVER_URL, url); modelAndView.getModel().put("propertiesBean", new BasePropertiesBean(properties)); modelAndView.getModel().put("serverUrl", myRootUrl.getRootUrl()); modelAndView.getModel().put("nugetStatusRefreshUrl", mySection.getIncludePath()); modelAndView.getModel().put("nugetSettingsPostUrl", mySection.getSettingsPath()); modelAndView.getModel().put("serverStatus", myStatusHolder.getStatus()); modelAndView.getModel().put("imagesBase", myDescriptor.getPluginResourcesPath("server/img")); modelAndView.getModel().put("feedUrl", mySettings.getNuGetFeedControllerPath()); return modelAndView; } }
true
true
protected ModelAndView doHandle(@NotNull final HttpServletRequest request, @NotNull final HttpServletResponse response) throws Exception { if (!mySystemInfo.canStartNuGetProcesses()) { return new ModelAndView(myDescriptor.getPluginResourcesPath("server/feedServerSettingsOther.jsp")); } final ModelAndView modelAndView = new ModelAndView(myDescriptor.getPluginResourcesPath("server/feedServerSettingsWindows.jsp")); final Map<String, String> properties = new HashMap<String, String>(); if (mySettings.isNuGetFeedEnabled()) { properties.put(FeedServerContants.NUGET_SERVER_ENABLED_CHECKBOX, "checked"); } String url = mySettings.getCustomTeamCityBaseUrl(); if (jetbrains.buildServer.util.StringUtil.isEmptyOrSpaces(url)) url = ""; properties.put(FeedServerContants.NUGET_SERVER_URL, url); modelAndView.getModel().put("propertiesBean", new BasePropertiesBean(properties)); modelAndView.getModel().put("serverUrl", myRootUrl.getRootUrl()); modelAndView.getModel().put("nugetStatusRefreshUrl", mySection.getIncludePath()); modelAndView.getModel().put("nugetSettingsPostUrl", mySection.getSettingsPath()); modelAndView.getModel().put("serverStatus", myStatusHolder.getStatus()); modelAndView.getModel().put("imagesBase", myDescriptor.getPluginResourcesPath("server/img")); modelAndView.getModel().put("feedUrl", mySettings.getNuGetFeedControllerPath()); return modelAndView; }
protected ModelAndView doHandle(@NotNull final HttpServletRequest request, @NotNull final HttpServletResponse response) throws Exception { if (!mySystemInfo.canStartNuGetProcesses()) { final ModelAndView mv = new ModelAndView(myDescriptor.getPluginResourcesPath("server/feedServerSettingsOther.jsp")); mv.getModel().put("canStartNuGetProcessesMessage", mySystemInfo.getNotAvailableMessage()); return mv; } final ModelAndView modelAndView = new ModelAndView(myDescriptor.getPluginResourcesPath("server/feedServerSettingsWindows.jsp")); final Map<String, String> properties = new HashMap<String, String>(); if (mySettings.isNuGetFeedEnabled()) { properties.put(FeedServerContants.NUGET_SERVER_ENABLED_CHECKBOX, "checked"); } String url = mySettings.getCustomTeamCityBaseUrl(); if (jetbrains.buildServer.util.StringUtil.isEmptyOrSpaces(url)) url = ""; properties.put(FeedServerContants.NUGET_SERVER_URL, url); modelAndView.getModel().put("propertiesBean", new BasePropertiesBean(properties)); modelAndView.getModel().put("serverUrl", myRootUrl.getRootUrl()); modelAndView.getModel().put("nugetStatusRefreshUrl", mySection.getIncludePath()); modelAndView.getModel().put("nugetSettingsPostUrl", mySection.getSettingsPath()); modelAndView.getModel().put("serverStatus", myStatusHolder.getStatus()); modelAndView.getModel().put("imagesBase", myDescriptor.getPluginResourcesPath("server/img")); modelAndView.getModel().put("feedUrl", mySettings.getNuGetFeedControllerPath()); return modelAndView; }
diff --git a/aura/src/test/java/org/auraframework/throwable/quickfix/QuickFixUITestUtil.java b/aura/src/test/java/org/auraframework/throwable/quickfix/QuickFixUITestUtil.java index 4ffa8917c2..29d6789c5e 100644 --- a/aura/src/test/java/org/auraframework/throwable/quickfix/QuickFixUITestUtil.java +++ b/aura/src/test/java/org/auraframework/throwable/quickfix/QuickFixUITestUtil.java @@ -1,124 +1,127 @@ /* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.auraframework.throwable.quickfix; import java.io.File; import org.auraframework.system.SourceListener; import junit.framework.Assert; import org.auraframework.Aura; import org.auraframework.def.DefDescriptor; import org.auraframework.system.Source; import org.auraframework.test.WebDriverTestCase; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.StaleElementReferenceException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.support.ui.ExpectedCondition; import org.openqa.selenium.support.ui.WebDriverWait; /** * Utility class for browser QuickFixes. Logic here should be common to all QuickFixes. */ public class QuickFixUITestUtil { private final WebDriverTestCase testCase; QuickFixUITestUtil(WebDriverTestCase testCase) { this.testCase = testCase; } /** * Given the buttons localId (aura:id on component), execute javascript on the browser to use COQL to find it's * globalId and then invoke the ui:button's press event. Using this method instead of WebDriver's click() method for * browser compatibility reasons. * * Note that this method is similar to AuraUITestingUtil's findGlobalIdForComponentWithGivenProperties() and * getEval() methods, but these tests must be run in DEV mode where $A.test is not supported. */ public void clickButtonByLocalId(String localId) { JavascriptExecutor jsExecutor = (JavascriptExecutor) testCase.getDriver(); String query = "var cmp = $A.getQueryStatement().from('component').field('globalId').field('localId')" + ".where(\"localId === '" + localId + "'\").query();return cmp.rows[0].globalId"; String globalId = jsExecutor.executeScript(query).toString(); jsExecutor.executeScript("$A.getCmp(\"" + globalId + "\").get('e.press').fire()"); } /** * Click the 'Fix!' button and verify text displayed in browser either from newly loaded component, or any error * message that is displayed on failure. */ public void clickFix(boolean expectedSuccess, String text) { clickButtonByLocalId("fixButton"); if (expectedSuccess) { // Newly created component should be loaded with it's contents displayed to the user waitForFixToProcess("Text from newly created component never displayed", By.tagName("body"), text); } else { // Expecting error message to pop up waitForFixToProcess("Quickfix error text never displayed", By.xpath("//div[@id='auraErrorMessage']"), text); } } /** * Wait for the browser to refresh and display the given text, or timeout with error. */ private void waitForFixToProcess(String msg, final By elementSelector, final String text) { WebDriverWait wait = new WebDriverWait(testCase.getDriver(), 30); // Expect StaleElementReferenceException if browser hasn't displayed new text yet, so ignore until timeout wait.withMessage(msg).ignoring(StaleElementReferenceException.class).until(new ExpectedCondition<Boolean>() { @Override public Boolean apply(WebDriver d) { String elementText = testCase.getDriver().findElement(elementSelector).getText(); return elementText.contains(text); } }); } /** * Verify text at top of QuickFix screen which describes the error. */ public void verifyToolbarText(String text) { By toolbarXpath = By.xpath("//div[@class='toolbar']"); String toolbarText = testCase.getDriver().findElement(toolbarXpath).getText(); Assert.assertTrue("Incorrect message displayed on quickfix toolbar. Expected: " + text + ". But got: " + toolbarText, toolbarText.contains(text)); } public void clickCreateButton(String text) { By buttonXpath = By.xpath("//button/span[text()='" + text + "']"); Assert.assertTrue("Create Attribute QuickFix button not present", testCase.isElementPresent(buttonXpath)); clickButtonByLocalId("createButton"); } /** * Delete all files in component bundle, and then directory file itself. */ public void deleteFiles(DefDescriptor<?> defDescriptor) { Source<?> source = Aura.getContextService().getCurrentContext().getDefRegistry().getSource(defDescriptor); if (source != null) { File f = new File(source.getSystemId()); if (f.exists()) { File dir = f.getParentFile(); for (File x : dir.listFiles()) { x.delete(); } dir.delete(); } } - Aura.getDefinitionService().onSourceChanged(defDescriptor, SourceListener.SourceMonitorEvent.deleted); + // invalidate all cache so that next tests can use fresh defs. + // due to inherit delays in file listeners, there are timing issues with + // file listener clearing the same DD while the next tests run. + Aura.getDefinitionService().onSourceChanged(null, SourceListener.SourceMonitorEvent.deleted); } }
true
true
public void deleteFiles(DefDescriptor<?> defDescriptor) { Source<?> source = Aura.getContextService().getCurrentContext().getDefRegistry().getSource(defDescriptor); if (source != null) { File f = new File(source.getSystemId()); if (f.exists()) { File dir = f.getParentFile(); for (File x : dir.listFiles()) { x.delete(); } dir.delete(); } } Aura.getDefinitionService().onSourceChanged(defDescriptor, SourceListener.SourceMonitorEvent.deleted); }
public void deleteFiles(DefDescriptor<?> defDescriptor) { Source<?> source = Aura.getContextService().getCurrentContext().getDefRegistry().getSource(defDescriptor); if (source != null) { File f = new File(source.getSystemId()); if (f.exists()) { File dir = f.getParentFile(); for (File x : dir.listFiles()) { x.delete(); } dir.delete(); } } // invalidate all cache so that next tests can use fresh defs. // due to inherit delays in file listeners, there are timing issues with // file listener clearing the same DD while the next tests run. Aura.getDefinitionService().onSourceChanged(null, SourceListener.SourceMonitorEvent.deleted); }
diff --git a/src/ru/leks13/jabbertimer/UserCommand.java b/src/ru/leks13/jabbertimer/UserCommand.java index a240051..fd27c88 100644 --- a/src/ru/leks13/jabbertimer/UserCommand.java +++ b/src/ru/leks13/jabbertimer/UserCommand.java @@ -1,192 +1,192 @@ /* * Leks13 * GPL v3 */ package ru.leks13.jabbertimer; import java.io.IOException; import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.StringTokenizer; import org.jivesoftware.smack.XMPPException; import org.jivesoftware.smack.packet.Presence; public class UserCommand { public static String muc; public static Boolean doUserCommand(String command, String jid, String admin) throws XMPPException, IOException, NumberFormatException, ClassNotFoundException, SQLException, ParseException { Boolean ans = false; String msg = null; if (command.startsWith("!report") && !ans) { command = new StringBuffer(command).delete(0, 7).toString(); msg = command + " - " + jid; XmppNet.sendMessage(admin, msg); ans = true; } if (command.startsWith("!list") && !ans) { java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); msg = Sql.listOfTimer(jid, time); XmppNet.sendMessage(jid, msg); ans = true; } if (command.startsWith("!remind") && !ans) { command = command.replaceAll("!remind ", ""); java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); StringTokenizer st = new StringTokenizer(command, "@"); String noteU = ""; while (st.hasMoreTokens()) { command = st.nextToken(); if (!st.hasMoreElements()) { NullNoteEx(jid); } noteU = st.nextToken(); } SimpleDateFormat sdf = new SimpleDateFormat("dd.MM.yyyy HH:mm"); Date dt = null; try { dt = sdf.parse(command); } catch (ParseException e) { msg = "Wrong date!"; XmppNet.sendMessage(jid, msg); ans = true; } long dt1 = dt.getTime() / 1000; if (!ans) { if (dt1 > (time / 1000)) { Sql.add(dt1, jid, Main.id, noteU); msg = "Timer is set!"; } else { msg = "Wrong date"; } } XmppNet.sendMessage(jid, msg); ans = true; Main.id++; } if (command.startsWith("!note") && !ans) { command = command.replaceAll("!note ", ""); Long time = 0L; if (!ans) { Sql.add(time, jid, Main.id, command); } ans = true; msg = "Writed!"; XmppNet.sendMessage(jid, msg); Main.id++; } if (command.startsWith("!my") && !ans) { msg = Sql.listOfNote(jid); ans = true; XmppNet.sendMessage(jid, msg); Main.id++; } if (command.startsWith("!del") && !ans) { command = command.replaceAll("!del #", ""); if (!ans) { Sql.deleteNote(jid, command); } ans = true; msg = "Command complete"; XmppNet.sendMessage(jid, msg); Main.id++; } try { if (command.startsWith("!timer") && !ans) { command = command.replaceAll("!timer ", ""); java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); StringTokenizer st = new StringTokenizer(command, "@"); String noteU = ""; while (st.hasMoreTokens()) { command = st.nextToken(); if (!st.hasMoreElements()) { NullNoteEx(jid); } noteU = st.nextToken(); } if (Long.parseLong(command) < 1 || Long.parseLong(command) > 120) { throw new NumberFormatException(); } long timeDo = ((time + Long.parseLong(command) * 1000 * 60) / 1000L); if (!ans) { Sql.add(timeDo, jid, Main.id, noteU); } ans = true; msg = "Timer is set!"; XmppNet.sendMessage(jid, msg); Main.id++; } } catch (NumberFormatException ex1) { ans = true; XmppNet.sendMessage(jid, "Wrong timer interval \n" + "The permissible range of 1 to 120 minutes."); } - if (command.startsWith("!off") && !ans && jid.contains(admin)) { + if (command.startsWith("!off") && !ans && jid.startsWith(admin)) { XmppNet.disconnect(); ans = true; } - if (command.startsWith("!roster") && !ans && jid.contains(admin)) { + if (command.startsWith("!roster") && !ans && jid.startsWith(admin)) { msg = XmppNet.getXmppRoster(); XmppNet.sendMessage(jid, msg); ans = true; } - if (command.startsWith("!status") && !ans && jid.contains(admin)) { + if (command.startsWith("!status") && !ans && jid.startsWith(admin)) { command = new StringBuffer(command).delete(0, 8).toString(); String status = command; Presence presence = new Presence(Presence.Type.available); presence.setStatus(status); XmppNet.connection.sendPacket(presence); ans = true; } if (command.equals("!help")) { msg = "Commands: \n" + "!report <message> - send <message> to admin \n \n" + "!remind <dd.mm.yyyy HH:mm>@<remind> - set a reminder on this date \n" + " For example !remind 03.10.2012 18:51@Hello \n \n" + "!timer <minutes>@<remind> - set timer. \n" + " For example '!timer 2@Hello' send after 2 minutes 'Hello' \n \n" + "!list - list of installed timers \n \n" + "Notes: \n" + "!my - list of notes \n" + "!note 'text' - write note \n" + "!del #1234567890 - delete note with number #1234567890 \n"; - if (jid.contains(admin)) { + if (jid.startsWith(admin)) { msg += "---------------------\n" + "!roster - show bot`s roster \n" + "!status <new_status> - change status \n" + "!off - offline this bot \n"; } XmppNet.sendMessage(jid, msg); ans = true; } return ans; } private static void NullNoteEx(String jid) throws XMPPException { XmppNet.sendMessage(jid, "Blank or invalid string reminder!"); } }
false
true
public static Boolean doUserCommand(String command, String jid, String admin) throws XMPPException, IOException, NumberFormatException, ClassNotFoundException, SQLException, ParseException { Boolean ans = false; String msg = null; if (command.startsWith("!report") && !ans) { command = new StringBuffer(command).delete(0, 7).toString(); msg = command + " - " + jid; XmppNet.sendMessage(admin, msg); ans = true; } if (command.startsWith("!list") && !ans) { java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); msg = Sql.listOfTimer(jid, time); XmppNet.sendMessage(jid, msg); ans = true; } if (command.startsWith("!remind") && !ans) { command = command.replaceAll("!remind ", ""); java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); StringTokenizer st = new StringTokenizer(command, "@"); String noteU = ""; while (st.hasMoreTokens()) { command = st.nextToken(); if (!st.hasMoreElements()) { NullNoteEx(jid); } noteU = st.nextToken(); } SimpleDateFormat sdf = new SimpleDateFormat("dd.MM.yyyy HH:mm"); Date dt = null; try { dt = sdf.parse(command); } catch (ParseException e) { msg = "Wrong date!"; XmppNet.sendMessage(jid, msg); ans = true; } long dt1 = dt.getTime() / 1000; if (!ans) { if (dt1 > (time / 1000)) { Sql.add(dt1, jid, Main.id, noteU); msg = "Timer is set!"; } else { msg = "Wrong date"; } } XmppNet.sendMessage(jid, msg); ans = true; Main.id++; } if (command.startsWith("!note") && !ans) { command = command.replaceAll("!note ", ""); Long time = 0L; if (!ans) { Sql.add(time, jid, Main.id, command); } ans = true; msg = "Writed!"; XmppNet.sendMessage(jid, msg); Main.id++; } if (command.startsWith("!my") && !ans) { msg = Sql.listOfNote(jid); ans = true; XmppNet.sendMessage(jid, msg); Main.id++; } if (command.startsWith("!del") && !ans) { command = command.replaceAll("!del #", ""); if (!ans) { Sql.deleteNote(jid, command); } ans = true; msg = "Command complete"; XmppNet.sendMessage(jid, msg); Main.id++; } try { if (command.startsWith("!timer") && !ans) { command = command.replaceAll("!timer ", ""); java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); StringTokenizer st = new StringTokenizer(command, "@"); String noteU = ""; while (st.hasMoreTokens()) { command = st.nextToken(); if (!st.hasMoreElements()) { NullNoteEx(jid); } noteU = st.nextToken(); } if (Long.parseLong(command) < 1 || Long.parseLong(command) > 120) { throw new NumberFormatException(); } long timeDo = ((time + Long.parseLong(command) * 1000 * 60) / 1000L); if (!ans) { Sql.add(timeDo, jid, Main.id, noteU); } ans = true; msg = "Timer is set!"; XmppNet.sendMessage(jid, msg); Main.id++; } } catch (NumberFormatException ex1) { ans = true; XmppNet.sendMessage(jid, "Wrong timer interval \n" + "The permissible range of 1 to 120 minutes."); } if (command.startsWith("!off") && !ans && jid.contains(admin)) { XmppNet.disconnect(); ans = true; } if (command.startsWith("!roster") && !ans && jid.contains(admin)) { msg = XmppNet.getXmppRoster(); XmppNet.sendMessage(jid, msg); ans = true; } if (command.startsWith("!status") && !ans && jid.contains(admin)) { command = new StringBuffer(command).delete(0, 8).toString(); String status = command; Presence presence = new Presence(Presence.Type.available); presence.setStatus(status); XmppNet.connection.sendPacket(presence); ans = true; } if (command.equals("!help")) { msg = "Commands: \n" + "!report <message> - send <message> to admin \n \n" + "!remind <dd.mm.yyyy HH:mm>@<remind> - set a reminder on this date \n" + " For example !remind 03.10.2012 18:51@Hello \n \n" + "!timer <minutes>@<remind> - set timer. \n" + " For example '!timer 2@Hello' send after 2 minutes 'Hello' \n \n" + "!list - list of installed timers \n \n" + "Notes: \n" + "!my - list of notes \n" + "!note 'text' - write note \n" + "!del #1234567890 - delete note with number #1234567890 \n"; if (jid.contains(admin)) { msg += "---------------------\n" + "!roster - show bot`s roster \n" + "!status <new_status> - change status \n" + "!off - offline this bot \n"; } XmppNet.sendMessage(jid, msg); ans = true; } return ans; }
public static Boolean doUserCommand(String command, String jid, String admin) throws XMPPException, IOException, NumberFormatException, ClassNotFoundException, SQLException, ParseException { Boolean ans = false; String msg = null; if (command.startsWith("!report") && !ans) { command = new StringBuffer(command).delete(0, 7).toString(); msg = command + " - " + jid; XmppNet.sendMessage(admin, msg); ans = true; } if (command.startsWith("!list") && !ans) { java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); msg = Sql.listOfTimer(jid, time); XmppNet.sendMessage(jid, msg); ans = true; } if (command.startsWith("!remind") && !ans) { command = command.replaceAll("!remind ", ""); java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); StringTokenizer st = new StringTokenizer(command, "@"); String noteU = ""; while (st.hasMoreTokens()) { command = st.nextToken(); if (!st.hasMoreElements()) { NullNoteEx(jid); } noteU = st.nextToken(); } SimpleDateFormat sdf = new SimpleDateFormat("dd.MM.yyyy HH:mm"); Date dt = null; try { dt = sdf.parse(command); } catch (ParseException e) { msg = "Wrong date!"; XmppNet.sendMessage(jid, msg); ans = true; } long dt1 = dt.getTime() / 1000; if (!ans) { if (dt1 > (time / 1000)) { Sql.add(dt1, jid, Main.id, noteU); msg = "Timer is set!"; } else { msg = "Wrong date"; } } XmppNet.sendMessage(jid, msg); ans = true; Main.id++; } if (command.startsWith("!note") && !ans) { command = command.replaceAll("!note ", ""); Long time = 0L; if (!ans) { Sql.add(time, jid, Main.id, command); } ans = true; msg = "Writed!"; XmppNet.sendMessage(jid, msg); Main.id++; } if (command.startsWith("!my") && !ans) { msg = Sql.listOfNote(jid); ans = true; XmppNet.sendMessage(jid, msg); Main.id++; } if (command.startsWith("!del") && !ans) { command = command.replaceAll("!del #", ""); if (!ans) { Sql.deleteNote(jid, command); } ans = true; msg = "Command complete"; XmppNet.sendMessage(jid, msg); Main.id++; } try { if (command.startsWith("!timer") && !ans) { command = command.replaceAll("!timer ", ""); java.util.Date today = new java.util.Date(); long time = (System.currentTimeMillis()); StringTokenizer st = new StringTokenizer(command, "@"); String noteU = ""; while (st.hasMoreTokens()) { command = st.nextToken(); if (!st.hasMoreElements()) { NullNoteEx(jid); } noteU = st.nextToken(); } if (Long.parseLong(command) < 1 || Long.parseLong(command) > 120) { throw new NumberFormatException(); } long timeDo = ((time + Long.parseLong(command) * 1000 * 60) / 1000L); if (!ans) { Sql.add(timeDo, jid, Main.id, noteU); } ans = true; msg = "Timer is set!"; XmppNet.sendMessage(jid, msg); Main.id++; } } catch (NumberFormatException ex1) { ans = true; XmppNet.sendMessage(jid, "Wrong timer interval \n" + "The permissible range of 1 to 120 minutes."); } if (command.startsWith("!off") && !ans && jid.startsWith(admin)) { XmppNet.disconnect(); ans = true; } if (command.startsWith("!roster") && !ans && jid.startsWith(admin)) { msg = XmppNet.getXmppRoster(); XmppNet.sendMessage(jid, msg); ans = true; } if (command.startsWith("!status") && !ans && jid.startsWith(admin)) { command = new StringBuffer(command).delete(0, 8).toString(); String status = command; Presence presence = new Presence(Presence.Type.available); presence.setStatus(status); XmppNet.connection.sendPacket(presence); ans = true; } if (command.equals("!help")) { msg = "Commands: \n" + "!report <message> - send <message> to admin \n \n" + "!remind <dd.mm.yyyy HH:mm>@<remind> - set a reminder on this date \n" + " For example !remind 03.10.2012 18:51@Hello \n \n" + "!timer <minutes>@<remind> - set timer. \n" + " For example '!timer 2@Hello' send after 2 minutes 'Hello' \n \n" + "!list - list of installed timers \n \n" + "Notes: \n" + "!my - list of notes \n" + "!note 'text' - write note \n" + "!del #1234567890 - delete note with number #1234567890 \n"; if (jid.startsWith(admin)) { msg += "---------------------\n" + "!roster - show bot`s roster \n" + "!status <new_status> - change status \n" + "!off - offline this bot \n"; } XmppNet.sendMessage(jid, msg); ans = true; } return ans; }
diff --git a/Tests/src/com/bazaarvoice/test/BaseTest.java b/Tests/src/com/bazaarvoice/test/BaseTest.java index bb86086..3f57628 100644 --- a/Tests/src/com/bazaarvoice/test/BaseTest.java +++ b/Tests/src/com/bazaarvoice/test/BaseTest.java @@ -1,31 +1,31 @@ package com.bazaarvoice.test; import com.bazaarvoice.BazaarRequest; import junit.framework.TestCase; /** * Created with IntelliJ IDEA. * User: gary * Date: 4/26/12 * Time: 10:51 PM */ public class BaseTest extends TestCase { public BazaarRequest request; public BazaarRequest submit; private final String tag = getClass().getSimpleName(); @Override protected void setUp() throws Exception { super.setUp(); request = new BazaarRequest( "reviews.apitestcustomer.bazaarvoice.com/bvstaging", "kuy3zj9pr3n7i0wxajrzj04xo", - "5.1"); + "5.3"); submit = new BazaarRequest( "reviews.apitestcustomer.bazaarvoice.com/bvstaging", "1wtp4lx7aww42x4154oly21ae", - "5.1"); + "5.3"); } }
false
true
protected void setUp() throws Exception { super.setUp(); request = new BazaarRequest( "reviews.apitestcustomer.bazaarvoice.com/bvstaging", "kuy3zj9pr3n7i0wxajrzj04xo", "5.1"); submit = new BazaarRequest( "reviews.apitestcustomer.bazaarvoice.com/bvstaging", "1wtp4lx7aww42x4154oly21ae", "5.1"); }
protected void setUp() throws Exception { super.setUp(); request = new BazaarRequest( "reviews.apitestcustomer.bazaarvoice.com/bvstaging", "kuy3zj9pr3n7i0wxajrzj04xo", "5.3"); submit = new BazaarRequest( "reviews.apitestcustomer.bazaarvoice.com/bvstaging", "1wtp4lx7aww42x4154oly21ae", "5.3"); }
diff --git a/trunk/p-dl-reasoner/src/edu/iastate/pdlreasoner/tableau/graph/TableauGraph.java b/trunk/p-dl-reasoner/src/edu/iastate/pdlreasoner/tableau/graph/TableauGraph.java index e8f1c3b..a37bc81 100644 --- a/trunk/p-dl-reasoner/src/edu/iastate/pdlreasoner/tableau/graph/TableauGraph.java +++ b/trunk/p-dl-reasoner/src/edu/iastate/pdlreasoner/tableau/graph/TableauGraph.java @@ -1,263 +1,265 @@ package edu.iastate.pdlreasoner.tableau.graph; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import edu.iastate.pdlreasoner.model.DLPackage; import edu.iastate.pdlreasoner.server.graph.GlobalNodeID; import edu.iastate.pdlreasoner.tableau.Blocking; import edu.iastate.pdlreasoner.tableau.branch.Branch; import edu.iastate.pdlreasoner.tableau.branch.BranchPoint; import edu.iastate.pdlreasoner.tableau.branch.BranchPointSet; import edu.iastate.pdlreasoner.util.CollectionUtil; public class TableauGraph { private static final Logger LOGGER = Logger.getLogger(TableauGraph.class); //Constants private DLPackage m_Package; //Variables private Set<Node> m_Roots; private Map<GlobalNodeID,Node> m_GlobalMap; /////////////PRUNE ME WHEN BACKTRACKING private NodeFactory m_NodeFactory; private List<Branch> m_Branches; private Blocking m_Blocking; //Processors private ClashCauseCollector m_ClashCollector; private OpenNodesCollector m_OpenNodesCollector; private PruneNodesCollector m_PruneNodesCollector; private ConceptPruner m_ConceptPruner; public TableauGraph(DLPackage dlPackage) { m_Package = dlPackage; m_Roots = CollectionUtil.makeSet(); m_GlobalMap = CollectionUtil.makeMap(); m_NodeFactory = new NodeFactory(this); m_Branches = CollectionUtil.makeList(); m_Blocking = new Blocking(); m_ClashCollector = new ClashCauseCollector(); m_OpenNodesCollector = new OpenNodesCollector(); m_PruneNodesCollector = new PruneNodesCollector(); m_ConceptPruner = new ConceptPruner(); } public DLPackage getPackage() { return m_Package; } public void accept(NodeVisitor v) { for (Node root : m_Roots) { root.accept(v); } } public Node makeNode(BranchPointSet dependency) { return m_NodeFactory.make(dependency); } public Node makeRoot(BranchPointSet dependency) { Node n = makeNode(dependency); m_Roots.add(n); if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "added root " + n + ", all roots = " + m_Roots); } return n; } public void put(GlobalNodeID globalID, Node node) { m_GlobalMap.put(globalID, node); } public Node get(GlobalNodeID globalID) { return m_GlobalMap.get(globalID); } public void addBranch(Branch branch) { m_Branches.add(branch); if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "added branch = " + branch); } } public boolean hasBranch(BranchPoint bp) { for (int i = m_Branches.size() - 1; i >= 0; i--) { int compare = bp.compareTo(m_Branches.get(i).getBranchPoint()); if (compare == 0) { return true; } else if (compare > 0) { return false; } } return false; } public Branch getLastBranch() { return m_Branches.get(m_Branches.size() - 1); } public boolean isBlocked(Node n) { return m_Blocking.isBlocked(n); } public void pruneTo(BranchPoint restoreTarget) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "pruning starts with target = " + restoreTarget); + LOGGER.debug(m_Package.toDebugString() + "branches before pruning = " + m_Branches); } //Prune nodes m_PruneNodesCollector.reset(restoreTarget); accept(m_PruneNodesCollector); for (Node n : m_PruneNodesCollector.getNodes()) { if (!m_Roots.remove(n)) { n.removeFromParent(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "removed node " + n); } } //Prune and reopen concepts on remaining node m_ConceptPruner.reset(restoreTarget); accept(m_ConceptPruner); //Prune branches for (int i = m_Branches.size() - 1; i >= 0; i--) { Branch iBranch = m_Branches.get(i); if (iBranch.getDependency().hasSameOrAfter(restoreTarget)) { m_Branches.remove(i); } else { break; } } - //Reopen remaining branches - those that do not depend on the restoreTarget - //but still have to be pruned to make sure that restoreTarget is the latest branch. + //Reopen remaining branches added after restoreTarget - those that do not depend + //on the restoreTarget but still have to be pruned to make sure that restoreTarget + //is the latest branch. for (int i = m_Branches.size() - 1; i >= 0; i--) { Branch iBranch = m_Branches.get(i); - if (restoreTarget.equals(iBranch.getBranchPoint())) break; + if (iBranch.getBranchPoint().compareTo(restoreTarget) <= 0) break; m_Branches.remove(i); iBranch.reopenConceptOnNode(); } if (LOGGER.isDebugEnabled()) { - LOGGER.debug(m_Package.toDebugString() + "branches = " + m_Branches); + LOGGER.debug(m_Package.toDebugString() + "branches after pruning = " + m_Branches); } } public BranchPointSet getEarliestClashCause() { m_ClashCollector.reset(); accept(m_ClashCollector); Set<BranchPointSet> clashCauses = m_ClashCollector.getClashCauses(); return clashCauses.isEmpty() ? null : Collections.min(clashCauses, BranchPointSet.ORDER_BY_LATEST_BRANCH_POINT); } public Set<Node> getOpenNodes() { m_OpenNodesCollector.reset(); accept(m_OpenNodesCollector); return m_OpenNodesCollector.getNodes(); } private static class ClashCauseCollector implements NodeVisitor { private Set<BranchPointSet> m_ClashCauses; public ClashCauseCollector() { m_ClashCauses = CollectionUtil.makeSet(); } public void reset() { m_ClashCauses.clear(); } public Set<BranchPointSet> getClashCauses() { return m_ClashCauses; } @Override public void visit(Node n) { m_ClashCauses.addAll(n.getClashCauses()); n.clearClashCauses(); } } private class OpenNodesCollector implements NodeVisitor { private Set<Node> m_Nodes; public OpenNodesCollector() { m_Nodes = CollectionUtil.makeSet(); } public void reset() { m_Nodes.clear(); } public Set<Node> getNodes() { return m_Nodes; } @Override public void visit(Node n) { if (!n.isComplete() && !m_Blocking.isBlocked(n)) { m_Nodes.add(n); } } } private static class PruneNodesCollector implements NodeVisitor { private Set<Node> m_Nodes; private BranchPoint m_RestoreTarget; public PruneNodesCollector() { m_Nodes = CollectionUtil.makeSet(); } public void reset(BranchPoint restoreTarget) { m_Nodes.clear(); m_RestoreTarget = restoreTarget; } public Set<Node> getNodes() { return m_Nodes; } @Override public void visit(Node n) { if (n.getDependency().hasSameOrAfter(m_RestoreTarget)) { m_Nodes.add(n); } } } private static class ConceptPruner implements NodeVisitor { private BranchPoint m_RestoreTarget; public void reset(BranchPoint restoreTarget) { m_RestoreTarget = restoreTarget; } @Override public void visit(Node n) { n.pruneAndReopenLabels(m_RestoreTarget); } } }
false
true
public void pruneTo(BranchPoint restoreTarget) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "pruning starts with target = " + restoreTarget); } //Prune nodes m_PruneNodesCollector.reset(restoreTarget); accept(m_PruneNodesCollector); for (Node n : m_PruneNodesCollector.getNodes()) { if (!m_Roots.remove(n)) { n.removeFromParent(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "removed node " + n); } } //Prune and reopen concepts on remaining node m_ConceptPruner.reset(restoreTarget); accept(m_ConceptPruner); //Prune branches for (int i = m_Branches.size() - 1; i >= 0; i--) { Branch iBranch = m_Branches.get(i); if (iBranch.getDependency().hasSameOrAfter(restoreTarget)) { m_Branches.remove(i); } else { break; } } //Reopen remaining branches - those that do not depend on the restoreTarget //but still have to be pruned to make sure that restoreTarget is the latest branch. for (int i = m_Branches.size() - 1; i >= 0; i--) { Branch iBranch = m_Branches.get(i); if (restoreTarget.equals(iBranch.getBranchPoint())) break; m_Branches.remove(i); iBranch.reopenConceptOnNode(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "branches = " + m_Branches); } }
public void pruneTo(BranchPoint restoreTarget) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "pruning starts with target = " + restoreTarget); LOGGER.debug(m_Package.toDebugString() + "branches before pruning = " + m_Branches); } //Prune nodes m_PruneNodesCollector.reset(restoreTarget); accept(m_PruneNodesCollector); for (Node n : m_PruneNodesCollector.getNodes()) { if (!m_Roots.remove(n)) { n.removeFromParent(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "removed node " + n); } } //Prune and reopen concepts on remaining node m_ConceptPruner.reset(restoreTarget); accept(m_ConceptPruner); //Prune branches for (int i = m_Branches.size() - 1; i >= 0; i--) { Branch iBranch = m_Branches.get(i); if (iBranch.getDependency().hasSameOrAfter(restoreTarget)) { m_Branches.remove(i); } else { break; } } //Reopen remaining branches added after restoreTarget - those that do not depend //on the restoreTarget but still have to be pruned to make sure that restoreTarget //is the latest branch. for (int i = m_Branches.size() - 1; i >= 0; i--) { Branch iBranch = m_Branches.get(i); if (iBranch.getBranchPoint().compareTo(restoreTarget) <= 0) break; m_Branches.remove(i); iBranch.reopenConceptOnNode(); } if (LOGGER.isDebugEnabled()) { LOGGER.debug(m_Package.toDebugString() + "branches after pruning = " + m_Branches); } }
diff --git a/src/main/java/com/hmsonline/cassandra/triggers/LogEntryStore.java b/src/main/java/com/hmsonline/cassandra/triggers/LogEntryStore.java index fa3457e..6cfda8f 100644 --- a/src/main/java/com/hmsonline/cassandra/triggers/LogEntryStore.java +++ b/src/main/java/com/hmsonline/cassandra/triggers/LogEntryStore.java @@ -1,115 +1,112 @@ package com.hmsonline.cassandra.triggers; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.cassandra.thrift.ColumnOrSuperColumn; import org.apache.cassandra.thrift.ColumnPath; import org.apache.cassandra.thrift.ConsistencyLevel; import org.apache.cassandra.thrift.KeySlice; import org.apache.cassandra.thrift.Mutation; import org.apache.cassandra.utils.ByteBufferUtil; import org.json.simple.JSONValue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class LogEntryStore extends CassandraStore { private static Logger logger = LoggerFactory.getLogger(LogEntryStore.class); private static String hostName = null; protected LogEntryStore(String keyspace, String columnFamily) throws Exception { super(keyspace, columnFamily + "_" + getHostName()); } public void write(LogEntry logEntry) throws Throwable { write(logEntry, this.getColumnFamily()); } public void write(LogEntry logEntry, String columnFamily) throws Throwable { List<Mutation> slice = new ArrayList<Mutation>(); slice.add(getMutation(logEntry.getUuid(), JSONValue.toJSONString(logEntry.toMap()).toString())); if (ConfigurationStore.getStore().shouldWriteColumns()) { for (ColumnOperation operation : logEntry.getOperations()) { if (operation.isDelete()) { slice.add(getMutation(operation.getName(), OperationType.DELETE)); } else { slice.add(getMutation(operation.getName(), OperationType.UPDATE)); } } } Map<ByteBuffer, Map<String, List<Mutation>>> mutationMap = new HashMap<ByteBuffer, Map<String, List<Mutation>>>(); Map<String, List<Mutation>> cfMutations = new HashMap<String, List<Mutation>>(); cfMutations.put(columnFamily, slice); ByteBuffer rowKey = ByteBufferUtil.bytes(getKey()); mutationMap.put(rowKey, cfMutations); getConnection(this.getKeyspace()).batch_mutate(mutationMap, logEntry.getConsistencyLevel()); } public void remove(LogEntry logEntry) throws Throwable { long deleteTime = System.currentTimeMillis() * 1000; ColumnPath path = new ColumnPath(this.getColumnFamily()); path.setColumn(ByteBufferUtil.bytes(logEntry.getUuid())); getConnection(this.getKeyspace()).remove(ByteBufferUtil.bytes(logEntry.getCommitLogRowKey()), path, deleteTime, ConsistencyLevel.ALL); } public static String getHostName() throws SocketException { if (hostName == null) { Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); { while (interfaces.hasMoreElements()) { NetworkInterface nic = interfaces.nextElement(); Enumeration<InetAddress> addresses = nic.getInetAddresses(); while (hostName == null && addresses.hasMoreElements()) { InetAddress address = addresses.nextElement(); if (!address.isLoopbackAddress()) { hostName = address.getHostName(); logger.debug("Host ID: " + hostName); } } } } - if(hostName.indexOf('.') > -1) { - hostName = hostName.substring(0, hostName.indexOf('.')); - } else if(hostName.indexOf(':') > -1) { - hostName = hostName.replaceAll(":", ""); - hostName = hostName.replaceAll("%", ""); - } + hostName = hostName.replaceAll(":", "_"); + hostName = hostName.replaceAll("%", "_"); + hostName = hostName.replaceAll(".", "_"); } return hostName; } private static String getKey() { long hours = System.currentTimeMillis() / (1000 * 1000 * 60); return "" + hours; } public static List<LogEntry> toLogEntry(List<KeySlice> rows) throws Exception, Throwable { List<LogEntry> logEntries = new ArrayList<LogEntry>(); if (rows == null || rows.size() == 0) { return logEntries; } for (KeySlice keySlice : rows) { if (keySlice.columns.size() > 0) { for (ColumnOrSuperColumn cc : keySlice.columns) { LogEntry logEntry = LogEntry.fromJson(ByteBufferUtil.string(cc.column.value)); if (logEntry != null) { logEntry.setCommitLogRowKey(ByteBufferUtil.string(keySlice.key)); logEntry.setUuid(ByteBufferUtil.string(cc.column.name)); logEntries.add(logEntry); } } } } return logEntries; } }
true
true
public static String getHostName() throws SocketException { if (hostName == null) { Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); { while (interfaces.hasMoreElements()) { NetworkInterface nic = interfaces.nextElement(); Enumeration<InetAddress> addresses = nic.getInetAddresses(); while (hostName == null && addresses.hasMoreElements()) { InetAddress address = addresses.nextElement(); if (!address.isLoopbackAddress()) { hostName = address.getHostName(); logger.debug("Host ID: " + hostName); } } } } if(hostName.indexOf('.') > -1) { hostName = hostName.substring(0, hostName.indexOf('.')); } else if(hostName.indexOf(':') > -1) { hostName = hostName.replaceAll(":", ""); hostName = hostName.replaceAll("%", ""); } } return hostName; }
public static String getHostName() throws SocketException { if (hostName == null) { Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces(); { while (interfaces.hasMoreElements()) { NetworkInterface nic = interfaces.nextElement(); Enumeration<InetAddress> addresses = nic.getInetAddresses(); while (hostName == null && addresses.hasMoreElements()) { InetAddress address = addresses.nextElement(); if (!address.isLoopbackAddress()) { hostName = address.getHostName(); logger.debug("Host ID: " + hostName); } } } } hostName = hostName.replaceAll(":", "_"); hostName = hostName.replaceAll("%", "_"); hostName = hostName.replaceAll(".", "_"); } return hostName; }
diff --git a/SpagoBIProject/src/it/eng/spagobi/analiticalmodel/execution/service/GetParameterValuesForExecutionAction.java b/SpagoBIProject/src/it/eng/spagobi/analiticalmodel/execution/service/GetParameterValuesForExecutionAction.java index b66187792..7a176b247 100644 --- a/SpagoBIProject/src/it/eng/spagobi/analiticalmodel/execution/service/GetParameterValuesForExecutionAction.java +++ b/SpagoBIProject/src/it/eng/spagobi/analiticalmodel/execution/service/GetParameterValuesForExecutionAction.java @@ -1,445 +1,447 @@ /* SpagoBI, the Open Source Business Intelligence suite * Copyright (C) 2012 Engineering Ingegneria Informatica S.p.A. - SpagoBI Competency Center * This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0, without the "Incompatible With Secondary Licenses" notice. * If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package it.eng.spagobi.analiticalmodel.execution.service; import it.eng.spago.base.SourceBean; import it.eng.spago.base.SourceBeanAttribute; import it.eng.spago.error.EMFUserError; import it.eng.spago.security.IEngUserProfile; import it.eng.spagobi.analiticalmodel.document.bo.BIObject; import it.eng.spagobi.analiticalmodel.document.handlers.ExecutionInstance; import it.eng.spagobi.analiticalmodel.document.handlers.LovResultCacheManager; import it.eng.spagobi.behaviouralmodel.analyticaldriver.bo.BIObjectParameter; import it.eng.spagobi.behaviouralmodel.analyticaldriver.bo.ObjParuse; import it.eng.spagobi.behaviouralmodel.lov.bo.DependenciesPostProcessingLov; import it.eng.spagobi.behaviouralmodel.lov.bo.ILovDetail; import it.eng.spagobi.behaviouralmodel.lov.bo.LovResultHandler; import it.eng.spagobi.commons.constants.SpagoBIConstants; import it.eng.spagobi.commons.dao.DAOFactory; import it.eng.spagobi.commons.serializer.JSONStoreFeedTransformer; import it.eng.spagobi.commons.services.AbstractSpagoBIAction; import it.eng.spagobi.commons.services.DelegatedBasicListService; import it.eng.spagobi.utilities.assertion.Assert; import it.eng.spagobi.utilities.cache.CacheInterface; import it.eng.spagobi.utilities.exceptions.SpagoBIServiceException; import it.eng.spagobi.utilities.service.JSONSuccess; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * @author Andrea Gioia ([email protected]) */ public class GetParameterValuesForExecutionAction extends AbstractSpagoBIAction { public static final String SERVICE_NAME = "GET_PARAMETERS_FOR_EXECUTION_SERVICE"; // request parameters public static String PARAMETER_ID = "PARAMETER_ID"; public static String SELECTED_PARAMETER_VALUES = "PARAMETERS"; public static String FILTERS = "FILTERS"; public static String NODE_ID_SEPARATOR = "___SEPA__"; public static String MODE = "MODE"; public static String NODE = "node"; public static String MODE_SIMPLE = "simple"; public static String MODE_COMPLETE = "complete"; public static String START = "start"; public static String LIMIT = "limit"; // in massive export case public static String OBJ_PARAMETER_IDS = "OBJ_PARAMETER_IDS"; public static String CONTEST = "CONTEST"; // used to check if mssive export case; cannot use MODALITY because already in use public static String MASSIVE_EXPORT = "massiveExport"; // logger component private static Logger logger = Logger.getLogger(GetParameterValuesForExecutionAction.class); public void doService() { String biparameterId; JSONObject selectedParameterValuesJSON; JSONObject filtersJSON = null; Map selectedParameterValues; String mode; JSONObject valuesJSON; String contest; BIObjectParameter biObjectParameter; ExecutionInstance executionInstance; String valueColumn; String descriptionColumn; List rows; List<ObjParuse> biParameterExecDependencies; ILovDetail lovProvDet; CacheInterface cache; List objParameterIds; int treeLovNodeLevel = 0; String treeLovNodeValue = null; logger.debug("IN"); try { biparameterId = getAttributeAsString( PARAMETER_ID ); selectedParameterValuesJSON = getAttributeAsJSONObject( SELECTED_PARAMETER_VALUES ); if(this.requestContainsAttribute( FILTERS ) ) { filtersJSON = getAttributeAsJSONObject( FILTERS ); } mode = getAttributeAsString( MODE ); try { treeLovNodeValue = getAttributeAsString(NODE); if(treeLovNodeValue.contains("lovroot")){ treeLovNodeValue = "lovroot"; treeLovNodeLevel = 0; }else{ String[] splittedNode = treeLovNodeValue.split(NODE_ID_SEPARATOR); treeLovNodeValue = splittedNode[0]; treeLovNodeLevel = new Integer(splittedNode[1]); } } catch (NullPointerException e) { logger.debug("there is no tree attribute for the Parameter [" + PARAMETER_ID + "]"); } objParameterIds = getAttributeAsList( OBJ_PARAMETER_IDS ); contest = getAttributeAsString( CONTEST ); logger.debug("Parameter [" + PARAMETER_ID + "] is equals to [" + biparameterId + "]"); logger.debug("Parameter [" + MODE + "] is equals to [" + mode + "]"); logger.debug("Parameter [" + CONTEST + "] is equals to [" + contest + "]"); if(mode == null) { mode = MODE_SIMPLE; } Assert.assertNotNull(getContext(), "Parameter [" + PARAMETER_ID + "] cannot be null" ); Assert.assertNotNull(getContext(), "Execution context cannot be null" ); Assert.assertNotNull(getContext().isExecutionInstanceAMap( ExecutionInstance.class.getName() ), "Execution instance cannot be null"); boolean isAMap = getContext().isExecutionInstanceAMap( ExecutionInstance.class.getName()); executionInstance= null; if(!isAMap){ executionInstance = getContext().getExecutionInstance( ExecutionInstance.class.getName() ); } else{ Map<Integer, ExecutionInstance> instances = getContext().getExecutionInstancesAsMap( ExecutionInstance.class.getName() ); // I want to get (at least one) of the document the parameter is referring to, // I can reach it via the ObjectParameter passed from ParametersPanel Integer biObjectId = null; Assert.assertNotNull(objParameterIds, "In map case objParameterids list cannot be null" ); if(objParameterIds.size()==0){ throw new SpagoBIServiceException("In map case objParameterids list cannot be empty", SERVICE_NAME); } Integer objParId = Integer.valueOf(objParameterIds.get(0).toString()); try { BIObjectParameter biObjPar = DAOFactory.getBIObjectParameterDAO().loadBiObjParameterById(objParId); biObjectId = biObjPar.getBiObjectID(); } catch (EMFUserError e) { throw new SpagoBIServiceException("Could not recover document", e); } executionInstance = instances.get(biObjectId); } if(selectedParameterValuesJSON!=null){ executionInstance.refreshParametersValues(selectedParameterValuesJSON, false); } BIObject obj = executionInstance.getBIObject(); // START converts JSON object with document's parameters into an hashmap selectedParameterValues = null; if(selectedParameterValuesJSON != null) { try { selectedParameterValues = new HashMap(); Iterator it = selectedParameterValuesJSON.keys(); while(it.hasNext()){ String key = (String)it.next(); Object v = selectedParameterValuesJSON.get(key); - if(v instanceof JSONArray) { + if (v == JSONObject.NULL) { + selectedParameterValues.put( key, null ); + } else if(v instanceof JSONArray) { JSONArray a = (JSONArray)v; String[] nv = new String[a.length()]; for(int i = 0; i < a.length(); i++) { if(a.get(i) != null){ nv[i] = a.get(i).toString(); } else{ nv[i] = null; } } selectedParameterValues.put( key, nv ); } else if(v instanceof String) { selectedParameterValues.put( key, (String)v ); } else { Assert.assertUnreachable("Attribute [" + key + "] value [" + v + "] of PARAMETERS is not of type JSONArray nor String. It is of type [" + v.getClass().getName() + "]" ); } } } catch (JSONException e) { throw new SpagoBIServiceException("parameter JSONObject is malformed", e); } } // END converts JSON object with document's parameters into an hashmap // START get the relevant biobject parameter biObjectParameter = null; List parameters = obj.getBiObjectParameters(); for(int i = 0; i < parameters.size(); i++) { BIObjectParameter p = (BIObjectParameter) parameters.get(i); if( biparameterId.equalsIgnoreCase( p.getParameterUrlName() ) ) { biObjectParameter = p; break; } } Assert.assertNotNull(biObjectParameter, "Impossible to find parameter [" + biparameterId + "]" ); // END get the relevant biobject parameter lovProvDet = executionInstance.getLovDetail(biObjectParameter); // START get the lov result String lovResult = null; try { // get the result of the lov IEngUserProfile profile = getUserProfile(); // get from cache, if available LovResultCacheManager executionCacheManager = new LovResultCacheManager(); lovResult = executionCacheManager.getLovResult(profile, lovProvDet, executionInstance.getDependencies(biObjectParameter), executionInstance, true); // get all the rows of the result LovResultHandler lovResultHandler = new LovResultHandler(lovResult); rows = lovResultHandler.getRows(); } catch (Exception e) { throw new SpagoBIServiceException(SERVICE_NAME, "Impossible to get parameter's values", e); } Assert.assertNotNull(lovResult, "Impossible to get parameter's values" ); // END get the lov result // START filtering the list by filtering toolbar try { if(filtersJSON != null) { String valuefilter = (String) filtersJSON.get(SpagoBIConstants.VALUE_FILTER); String columnfilter = (String) filtersJSON.get(SpagoBIConstants.COLUMN_FILTER); String typeFilter = (String) filtersJSON.get(SpagoBIConstants.TYPE_FILTER); String typeValueFilter = (String) filtersJSON.get(SpagoBIConstants.TYPE_VALUE_FILTER); rows = DelegatedBasicListService.filterList(rows, valuefilter, typeValueFilter, columnfilter, typeFilter); } } catch (JSONException e) { throw new SpagoBIServiceException(SERVICE_NAME, "Impossible to read filter's configuration", e); } // END filtering the list by filtering toolbar // START filtering for correlation (only for DependenciesPostProcessingLov, i.e. scripts, java classes and fixed lists) biParameterExecDependencies = executionInstance .getDependencies(biObjectParameter); if (lovProvDet instanceof DependenciesPostProcessingLov && selectedParameterValues != null && biParameterExecDependencies != null && biParameterExecDependencies.size() > 0 && !contest.equals(MASSIVE_EXPORT)) { rows = ((DependenciesPostProcessingLov) lovProvDet) .processDependencies(rows, selectedParameterValues, biParameterExecDependencies); } // END filtering for correlation if(lovProvDet.getLovType()!=null && lovProvDet.getLovType().equals("tree")){ JSONArray valuesJSONArray = getChildrenForTreeLov(lovProvDet, rows, mode, treeLovNodeLevel, treeLovNodeValue); try { writeBackToClient( new JSONSuccess( valuesJSONArray ) ); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } }else{ valuesJSON = buildJSONForLOV(lovProvDet,rows, mode); try { writeBackToClient( new JSONSuccess( valuesJSON ) ); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } } } finally { logger.debug("OUT"); } } private JSONArray getChildrenForTreeLov( ILovDetail lovProvDet, List rows, String mode, int treeLovNodeLevel, String treeLovNodeValue ){ String valueColumn; String descriptionColumn; boolean addNode; String treeLovNodeName = ""; String treeLovParentNodeName = ""; try { if(treeLovNodeValue=="lovroot"){//root node treeLovNodeName = (String) lovProvDet.getTreeLevelsColumns().get(0); treeLovParentNodeName="lovroot"; treeLovNodeLevel = -1; }else if(lovProvDet.getTreeLevelsColumns().size()>treeLovNodeLevel+1){//treeLovNodeLevel-1 because the fake root node is the level 0 treeLovNodeName = (String) lovProvDet.getTreeLevelsColumns().get(treeLovNodeLevel+1); treeLovParentNodeName = (String) lovProvDet.getTreeLevelsColumns().get(treeLovNodeLevel); } Set<JSONObject> valuesDataJSON = new HashSet<JSONObject>(); valueColumn = lovProvDet.getValueColumnName(); descriptionColumn = lovProvDet.getDescriptionColumnName(); for (int q = 0; q < rows.size(); q++) { SourceBean row = (SourceBean) rows.get(q); JSONObject valueJSON =null; addNode=false; List columns = row.getContainedAttributes(); valueJSON = new JSONObject(); boolean notNullNode = false; //if the row does not contain the value atribute we don't add the node for(int i = 0; i < columns.size(); i++) { SourceBeanAttribute attribute = (SourceBeanAttribute)columns.get(i); if((treeLovParentNodeName=="lovroot" ) || (attribute.getKey().equalsIgnoreCase(treeLovParentNodeName) && (attribute.getValue().toString()).equalsIgnoreCase(treeLovNodeValue))){ addNode = true; } //its a leaf so we take the value and description defined in the lov definition if(lovProvDet.getTreeLevelsColumns().size()==treeLovNodeLevel+2){ if(attribute.getKey().equalsIgnoreCase(descriptionColumn)){//its the column of the description valueJSON.put("description", attribute.getValue()); notNullNode = true; } if(attribute.getKey().equalsIgnoreCase(valueColumn)){//its the column of the value valueJSON.put("value", attribute.getValue()); valueJSON.put("id", attribute.getValue()+NODE_ID_SEPARATOR+(treeLovNodeLevel+1)); notNullNode = true; } valueJSON.put("leaf", true); }else if(attribute.getKey().equalsIgnoreCase(treeLovNodeName) ){ valueJSON = new JSONObject(); valueJSON.put("description", attribute.getValue()); valueJSON.put("value", attribute.getValue()); valueJSON.put("id", attribute.getValue()+NODE_ID_SEPARATOR+(treeLovNodeLevel+1)); notNullNode = true; } } if(addNode && notNullNode){ valuesDataJSON.add(valueJSON); } } JSONArray valuesDataJSONArray = new JSONArray(); for (Iterator iterator = valuesDataJSON.iterator(); iterator.hasNext();) { JSONObject jsonObject = (JSONObject) iterator.next(); valuesDataJSONArray.put(jsonObject); } return valuesDataJSONArray; } catch (Exception e) { throw new SpagoBIServiceException("Impossible to serialize response", e); } } private JSONObject buildJSONForLOV( ILovDetail lovProvDet, List rows, String mode){ String valueColumn; String descriptionColumn; JSONObject valuesJSON; Integer start; Integer limit; String displayColumn; // START building JSON object to be returned try { JSONArray valuesDataJSON = new JSONArray(); valueColumn = lovProvDet.getValueColumnName(); displayColumn = lovProvDet.getDescriptionColumnName(); descriptionColumn = displayColumn; start = getAttributeAsInteger( START ); limit = getAttributeAsInteger( LIMIT ); logger.debug("Parameter [" + START + "] is equals to [" + start + "]"); logger.debug("Parameter [" + LIMIT + "] is equals to [" + limit + "]"); int lb = (start != null)? start.intValue(): 0; int ub = (limit != null)? lb + limit.intValue(): rows.size() - lb; ub = (ub > rows.size())? rows.size(): ub; for (int q = lb; q < ub; q++) { SourceBean row = (SourceBean) rows.get(q); JSONObject valueJSON = new JSONObject(); if(MODE_COMPLETE.equalsIgnoreCase( mode )) { List columns = row.getContainedAttributes(); for(int i = 0; i < columns.size(); i++) { SourceBeanAttribute attribute = (SourceBeanAttribute)columns.get(i); valueJSON.put(attribute.getKey().toUpperCase(), attribute.getValue()); } } else { String value = (String) row.getAttribute(valueColumn); String description = (String) row.getAttribute(descriptionColumn); valueJSON.put("value", value); valueJSON.put("label", description); valueJSON.put("description", description); } valuesDataJSON.put(valueJSON); } String[] visiblecolumns; if(MODE_COMPLETE.equalsIgnoreCase( mode )) { visiblecolumns = (String[])lovProvDet.getVisibleColumnNames().toArray(new String[0]); for(int j = 0; j< visiblecolumns.length; j++) { visiblecolumns[j] = visiblecolumns[j].toUpperCase(); } } else { valueColumn = "value"; displayColumn = "label"; descriptionColumn = "description"; visiblecolumns = new String[]{"value", "label", "description"}; } valuesJSON = (JSONObject)JSONStoreFeedTransformer.getInstance().transform(valuesDataJSON, valueColumn.toUpperCase(), displayColumn.toUpperCase(), descriptionColumn.toUpperCase(), visiblecolumns, new Integer(rows.size())); return valuesJSON; } catch (Exception e) { throw new SpagoBIServiceException("Impossible to serialize response", e); } // END building JSON object to be returned } }
true
true
public void doService() { String biparameterId; JSONObject selectedParameterValuesJSON; JSONObject filtersJSON = null; Map selectedParameterValues; String mode; JSONObject valuesJSON; String contest; BIObjectParameter biObjectParameter; ExecutionInstance executionInstance; String valueColumn; String descriptionColumn; List rows; List<ObjParuse> biParameterExecDependencies; ILovDetail lovProvDet; CacheInterface cache; List objParameterIds; int treeLovNodeLevel = 0; String treeLovNodeValue = null; logger.debug("IN"); try { biparameterId = getAttributeAsString( PARAMETER_ID ); selectedParameterValuesJSON = getAttributeAsJSONObject( SELECTED_PARAMETER_VALUES ); if(this.requestContainsAttribute( FILTERS ) ) { filtersJSON = getAttributeAsJSONObject( FILTERS ); } mode = getAttributeAsString( MODE ); try { treeLovNodeValue = getAttributeAsString(NODE); if(treeLovNodeValue.contains("lovroot")){ treeLovNodeValue = "lovroot"; treeLovNodeLevel = 0; }else{ String[] splittedNode = treeLovNodeValue.split(NODE_ID_SEPARATOR); treeLovNodeValue = splittedNode[0]; treeLovNodeLevel = new Integer(splittedNode[1]); } } catch (NullPointerException e) { logger.debug("there is no tree attribute for the Parameter [" + PARAMETER_ID + "]"); } objParameterIds = getAttributeAsList( OBJ_PARAMETER_IDS ); contest = getAttributeAsString( CONTEST ); logger.debug("Parameter [" + PARAMETER_ID + "] is equals to [" + biparameterId + "]"); logger.debug("Parameter [" + MODE + "] is equals to [" + mode + "]"); logger.debug("Parameter [" + CONTEST + "] is equals to [" + contest + "]"); if(mode == null) { mode = MODE_SIMPLE; } Assert.assertNotNull(getContext(), "Parameter [" + PARAMETER_ID + "] cannot be null" ); Assert.assertNotNull(getContext(), "Execution context cannot be null" ); Assert.assertNotNull(getContext().isExecutionInstanceAMap( ExecutionInstance.class.getName() ), "Execution instance cannot be null"); boolean isAMap = getContext().isExecutionInstanceAMap( ExecutionInstance.class.getName()); executionInstance= null; if(!isAMap){ executionInstance = getContext().getExecutionInstance( ExecutionInstance.class.getName() ); } else{ Map<Integer, ExecutionInstance> instances = getContext().getExecutionInstancesAsMap( ExecutionInstance.class.getName() ); // I want to get (at least one) of the document the parameter is referring to, // I can reach it via the ObjectParameter passed from ParametersPanel Integer biObjectId = null; Assert.assertNotNull(objParameterIds, "In map case objParameterids list cannot be null" ); if(objParameterIds.size()==0){ throw new SpagoBIServiceException("In map case objParameterids list cannot be empty", SERVICE_NAME); } Integer objParId = Integer.valueOf(objParameterIds.get(0).toString()); try { BIObjectParameter biObjPar = DAOFactory.getBIObjectParameterDAO().loadBiObjParameterById(objParId); biObjectId = biObjPar.getBiObjectID(); } catch (EMFUserError e) { throw new SpagoBIServiceException("Could not recover document", e); } executionInstance = instances.get(biObjectId); } if(selectedParameterValuesJSON!=null){ executionInstance.refreshParametersValues(selectedParameterValuesJSON, false); } BIObject obj = executionInstance.getBIObject(); // START converts JSON object with document's parameters into an hashmap selectedParameterValues = null; if(selectedParameterValuesJSON != null) { try { selectedParameterValues = new HashMap(); Iterator it = selectedParameterValuesJSON.keys(); while(it.hasNext()){ String key = (String)it.next(); Object v = selectedParameterValuesJSON.get(key); if(v instanceof JSONArray) { JSONArray a = (JSONArray)v; String[] nv = new String[a.length()]; for(int i = 0; i < a.length(); i++) { if(a.get(i) != null){ nv[i] = a.get(i).toString(); } else{ nv[i] = null; } } selectedParameterValues.put( key, nv ); } else if(v instanceof String) { selectedParameterValues.put( key, (String)v ); } else { Assert.assertUnreachable("Attribute [" + key + "] value [" + v + "] of PARAMETERS is not of type JSONArray nor String. It is of type [" + v.getClass().getName() + "]" ); } } } catch (JSONException e) { throw new SpagoBIServiceException("parameter JSONObject is malformed", e); } } // END converts JSON object with document's parameters into an hashmap // START get the relevant biobject parameter biObjectParameter = null; List parameters = obj.getBiObjectParameters(); for(int i = 0; i < parameters.size(); i++) { BIObjectParameter p = (BIObjectParameter) parameters.get(i); if( biparameterId.equalsIgnoreCase( p.getParameterUrlName() ) ) { biObjectParameter = p; break; } } Assert.assertNotNull(biObjectParameter, "Impossible to find parameter [" + biparameterId + "]" ); // END get the relevant biobject parameter lovProvDet = executionInstance.getLovDetail(biObjectParameter); // START get the lov result String lovResult = null; try { // get the result of the lov IEngUserProfile profile = getUserProfile(); // get from cache, if available LovResultCacheManager executionCacheManager = new LovResultCacheManager(); lovResult = executionCacheManager.getLovResult(profile, lovProvDet, executionInstance.getDependencies(biObjectParameter), executionInstance, true); // get all the rows of the result LovResultHandler lovResultHandler = new LovResultHandler(lovResult); rows = lovResultHandler.getRows(); } catch (Exception e) { throw new SpagoBIServiceException(SERVICE_NAME, "Impossible to get parameter's values", e); } Assert.assertNotNull(lovResult, "Impossible to get parameter's values" ); // END get the lov result // START filtering the list by filtering toolbar try { if(filtersJSON != null) { String valuefilter = (String) filtersJSON.get(SpagoBIConstants.VALUE_FILTER); String columnfilter = (String) filtersJSON.get(SpagoBIConstants.COLUMN_FILTER); String typeFilter = (String) filtersJSON.get(SpagoBIConstants.TYPE_FILTER); String typeValueFilter = (String) filtersJSON.get(SpagoBIConstants.TYPE_VALUE_FILTER); rows = DelegatedBasicListService.filterList(rows, valuefilter, typeValueFilter, columnfilter, typeFilter); } } catch (JSONException e) { throw new SpagoBIServiceException(SERVICE_NAME, "Impossible to read filter's configuration", e); } // END filtering the list by filtering toolbar // START filtering for correlation (only for DependenciesPostProcessingLov, i.e. scripts, java classes and fixed lists) biParameterExecDependencies = executionInstance .getDependencies(biObjectParameter); if (lovProvDet instanceof DependenciesPostProcessingLov && selectedParameterValues != null && biParameterExecDependencies != null && biParameterExecDependencies.size() > 0 && !contest.equals(MASSIVE_EXPORT)) { rows = ((DependenciesPostProcessingLov) lovProvDet) .processDependencies(rows, selectedParameterValues, biParameterExecDependencies); } // END filtering for correlation if(lovProvDet.getLovType()!=null && lovProvDet.getLovType().equals("tree")){ JSONArray valuesJSONArray = getChildrenForTreeLov(lovProvDet, rows, mode, treeLovNodeLevel, treeLovNodeValue); try { writeBackToClient( new JSONSuccess( valuesJSONArray ) ); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } }else{ valuesJSON = buildJSONForLOV(lovProvDet,rows, mode); try { writeBackToClient( new JSONSuccess( valuesJSON ) ); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } } } finally { logger.debug("OUT"); } }
public void doService() { String biparameterId; JSONObject selectedParameterValuesJSON; JSONObject filtersJSON = null; Map selectedParameterValues; String mode; JSONObject valuesJSON; String contest; BIObjectParameter biObjectParameter; ExecutionInstance executionInstance; String valueColumn; String descriptionColumn; List rows; List<ObjParuse> biParameterExecDependencies; ILovDetail lovProvDet; CacheInterface cache; List objParameterIds; int treeLovNodeLevel = 0; String treeLovNodeValue = null; logger.debug("IN"); try { biparameterId = getAttributeAsString( PARAMETER_ID ); selectedParameterValuesJSON = getAttributeAsJSONObject( SELECTED_PARAMETER_VALUES ); if(this.requestContainsAttribute( FILTERS ) ) { filtersJSON = getAttributeAsJSONObject( FILTERS ); } mode = getAttributeAsString( MODE ); try { treeLovNodeValue = getAttributeAsString(NODE); if(treeLovNodeValue.contains("lovroot")){ treeLovNodeValue = "lovroot"; treeLovNodeLevel = 0; }else{ String[] splittedNode = treeLovNodeValue.split(NODE_ID_SEPARATOR); treeLovNodeValue = splittedNode[0]; treeLovNodeLevel = new Integer(splittedNode[1]); } } catch (NullPointerException e) { logger.debug("there is no tree attribute for the Parameter [" + PARAMETER_ID + "]"); } objParameterIds = getAttributeAsList( OBJ_PARAMETER_IDS ); contest = getAttributeAsString( CONTEST ); logger.debug("Parameter [" + PARAMETER_ID + "] is equals to [" + biparameterId + "]"); logger.debug("Parameter [" + MODE + "] is equals to [" + mode + "]"); logger.debug("Parameter [" + CONTEST + "] is equals to [" + contest + "]"); if(mode == null) { mode = MODE_SIMPLE; } Assert.assertNotNull(getContext(), "Parameter [" + PARAMETER_ID + "] cannot be null" ); Assert.assertNotNull(getContext(), "Execution context cannot be null" ); Assert.assertNotNull(getContext().isExecutionInstanceAMap( ExecutionInstance.class.getName() ), "Execution instance cannot be null"); boolean isAMap = getContext().isExecutionInstanceAMap( ExecutionInstance.class.getName()); executionInstance= null; if(!isAMap){ executionInstance = getContext().getExecutionInstance( ExecutionInstance.class.getName() ); } else{ Map<Integer, ExecutionInstance> instances = getContext().getExecutionInstancesAsMap( ExecutionInstance.class.getName() ); // I want to get (at least one) of the document the parameter is referring to, // I can reach it via the ObjectParameter passed from ParametersPanel Integer biObjectId = null; Assert.assertNotNull(objParameterIds, "In map case objParameterids list cannot be null" ); if(objParameterIds.size()==0){ throw new SpagoBIServiceException("In map case objParameterids list cannot be empty", SERVICE_NAME); } Integer objParId = Integer.valueOf(objParameterIds.get(0).toString()); try { BIObjectParameter biObjPar = DAOFactory.getBIObjectParameterDAO().loadBiObjParameterById(objParId); biObjectId = biObjPar.getBiObjectID(); } catch (EMFUserError e) { throw new SpagoBIServiceException("Could not recover document", e); } executionInstance = instances.get(biObjectId); } if(selectedParameterValuesJSON!=null){ executionInstance.refreshParametersValues(selectedParameterValuesJSON, false); } BIObject obj = executionInstance.getBIObject(); // START converts JSON object with document's parameters into an hashmap selectedParameterValues = null; if(selectedParameterValuesJSON != null) { try { selectedParameterValues = new HashMap(); Iterator it = selectedParameterValuesJSON.keys(); while(it.hasNext()){ String key = (String)it.next(); Object v = selectedParameterValuesJSON.get(key); if (v == JSONObject.NULL) { selectedParameterValues.put( key, null ); } else if(v instanceof JSONArray) { JSONArray a = (JSONArray)v; String[] nv = new String[a.length()]; for(int i = 0; i < a.length(); i++) { if(a.get(i) != null){ nv[i] = a.get(i).toString(); } else{ nv[i] = null; } } selectedParameterValues.put( key, nv ); } else if(v instanceof String) { selectedParameterValues.put( key, (String)v ); } else { Assert.assertUnreachable("Attribute [" + key + "] value [" + v + "] of PARAMETERS is not of type JSONArray nor String. It is of type [" + v.getClass().getName() + "]" ); } } } catch (JSONException e) { throw new SpagoBIServiceException("parameter JSONObject is malformed", e); } } // END converts JSON object with document's parameters into an hashmap // START get the relevant biobject parameter biObjectParameter = null; List parameters = obj.getBiObjectParameters(); for(int i = 0; i < parameters.size(); i++) { BIObjectParameter p = (BIObjectParameter) parameters.get(i); if( biparameterId.equalsIgnoreCase( p.getParameterUrlName() ) ) { biObjectParameter = p; break; } } Assert.assertNotNull(biObjectParameter, "Impossible to find parameter [" + biparameterId + "]" ); // END get the relevant biobject parameter lovProvDet = executionInstance.getLovDetail(biObjectParameter); // START get the lov result String lovResult = null; try { // get the result of the lov IEngUserProfile profile = getUserProfile(); // get from cache, if available LovResultCacheManager executionCacheManager = new LovResultCacheManager(); lovResult = executionCacheManager.getLovResult(profile, lovProvDet, executionInstance.getDependencies(biObjectParameter), executionInstance, true); // get all the rows of the result LovResultHandler lovResultHandler = new LovResultHandler(lovResult); rows = lovResultHandler.getRows(); } catch (Exception e) { throw new SpagoBIServiceException(SERVICE_NAME, "Impossible to get parameter's values", e); } Assert.assertNotNull(lovResult, "Impossible to get parameter's values" ); // END get the lov result // START filtering the list by filtering toolbar try { if(filtersJSON != null) { String valuefilter = (String) filtersJSON.get(SpagoBIConstants.VALUE_FILTER); String columnfilter = (String) filtersJSON.get(SpagoBIConstants.COLUMN_FILTER); String typeFilter = (String) filtersJSON.get(SpagoBIConstants.TYPE_FILTER); String typeValueFilter = (String) filtersJSON.get(SpagoBIConstants.TYPE_VALUE_FILTER); rows = DelegatedBasicListService.filterList(rows, valuefilter, typeValueFilter, columnfilter, typeFilter); } } catch (JSONException e) { throw new SpagoBIServiceException(SERVICE_NAME, "Impossible to read filter's configuration", e); } // END filtering the list by filtering toolbar // START filtering for correlation (only for DependenciesPostProcessingLov, i.e. scripts, java classes and fixed lists) biParameterExecDependencies = executionInstance .getDependencies(biObjectParameter); if (lovProvDet instanceof DependenciesPostProcessingLov && selectedParameterValues != null && biParameterExecDependencies != null && biParameterExecDependencies.size() > 0 && !contest.equals(MASSIVE_EXPORT)) { rows = ((DependenciesPostProcessingLov) lovProvDet) .processDependencies(rows, selectedParameterValues, biParameterExecDependencies); } // END filtering for correlation if(lovProvDet.getLovType()!=null && lovProvDet.getLovType().equals("tree")){ JSONArray valuesJSONArray = getChildrenForTreeLov(lovProvDet, rows, mode, treeLovNodeLevel, treeLovNodeValue); try { writeBackToClient( new JSONSuccess( valuesJSONArray ) ); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } }else{ valuesJSON = buildJSONForLOV(lovProvDet,rows, mode); try { writeBackToClient( new JSONSuccess( valuesJSON ) ); } catch (IOException e) { throw new SpagoBIServiceException("Impossible to write back the responce to the client", e); } } } finally { logger.debug("OUT"); } }
diff --git a/doxia-core/src/test/java/org/apache/maven/doxia/module/AbstractIdentityTest.java b/doxia-core/src/test/java/org/apache/maven/doxia/module/AbstractIdentityTest.java index 2369ab42..b160b01f 100644 --- a/doxia-core/src/test/java/org/apache/maven/doxia/module/AbstractIdentityTest.java +++ b/doxia-core/src/test/java/org/apache/maven/doxia/module/AbstractIdentityTest.java @@ -1,166 +1,167 @@ package org.apache.maven.doxia.module; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import org.apache.maven.doxia.AbstractModuleTest; import org.apache.maven.doxia.logging.PlexusLoggerWrapper; import org.apache.maven.doxia.parser.ParseException; import org.apache.maven.doxia.parser.Parser; import org.apache.maven.doxia.sink.Sink; import org.apache.maven.doxia.sink.SinkTestDocument; import org.apache.maven.doxia.sink.TextSink; import org.codehaus.plexus.util.IOUtil; /** * If a module provides both Parser and Sink, this class * can be used to check that chaining them together * results in the identity transformation, ie the model is still the same * after being piped through a Parser and the corresponding Sink. * * @version $Id$ */ public abstract class AbstractIdentityTest extends AbstractModuleTest { /** Expected Identity String */ private String expected; /** * Set to true if the identity transformation should actually be asserted, * by default only the expected and actual results are written to a file, but not compared. */ private boolean assertIdentity; /** * Create a new instance of the parser to test. * * @return the parser to test. */ protected abstract Parser createParser(); /** * Return a new instance of the sink that is being tested. * * @param writer The writer for the sink. * @return A new sink. */ protected abstract Sink createSink( Writer writer ); /** * Pipes a full model generated by {@link SinkTestDocument} through * a Sink (generated by {@link #createSink(Writer)}) and a Parser * (generated by {@link #createParser()}) and checks if the result * is the same as the original model. By default, this doesn't actually * assert anything (use {@link #assertIdentity(boolean)} in the setUp() * of an implementation to switch on the test), but the two generated * output files, expected.txt and actual.txt, can be compared for differences. * * @throws IOException if there's a problem reading/writing a test file. * @throws ParseException if a model cannot be parsed. */ public void testIdentity() throws IOException, ParseException { // generate the expected model StringWriter writer = new StringWriter(); Sink sink = new TextSink( writer ); SinkTestDocument.generate( sink ); sink.close(); expected = writer.toString(); // write to file for comparison Writer fileWriter = getTestWriter( "expected" ); fileWriter.write( expected ); IOUtil.close( fileWriter ); // generate the actual model writer = new StringWriter(); sink = createSink( writer ); SinkTestDocument.generate( sink ); sink.close(); StringReader reader = new StringReader( writer.toString() ); writer = new StringWriter(); sink = new TextSink( writer ); Parser parser = createParser(); parser.enableLogging( new PlexusLoggerWrapper( getContainer().getLogger() ) ); parser.parse( reader, sink ); String actual = writer.toString(); // write to file for comparison fileWriter = getTestWriter( "actual" ); fileWriter.write( actual ); IOUtil.close( fileWriter ); // Disabled by default, it's unlikely that all our modules // will pass this test any time soon, but the generated // output files can still be compared. if ( assertIdentity ) { // TODO: make this work for at least apt and xdoc modules? - assertEquals( "Identity test failed!", getExpected(), actual ); + assertEquals( "Identity test failed! See results in " + getTestWriterFile( "actual" ).getParent(), + getExpected(), actual ); } } /** {@inheritDoc} */ protected String getOutputDir() { return "identity/"; } /** * The output files generated by this class are text files, * independend of the kind of module being tested. * * @return The String "txt". */ protected String outputExtension() { return "txt"; } /** * Set to true if the identity transformation should actually be asserted, * by default only the expected and actual results are written to a file, but not compared. * This should be called during setUp(). * * @param doAssert True to actually execute the test. */ protected void assertIdentity( boolean doAssert ) { this.assertIdentity = doAssert; } /** * @return the expected identity string */ protected String getExpected() { return expected; } }
true
true
public void testIdentity() throws IOException, ParseException { // generate the expected model StringWriter writer = new StringWriter(); Sink sink = new TextSink( writer ); SinkTestDocument.generate( sink ); sink.close(); expected = writer.toString(); // write to file for comparison Writer fileWriter = getTestWriter( "expected" ); fileWriter.write( expected ); IOUtil.close( fileWriter ); // generate the actual model writer = new StringWriter(); sink = createSink( writer ); SinkTestDocument.generate( sink ); sink.close(); StringReader reader = new StringReader( writer.toString() ); writer = new StringWriter(); sink = new TextSink( writer ); Parser parser = createParser(); parser.enableLogging( new PlexusLoggerWrapper( getContainer().getLogger() ) ); parser.parse( reader, sink ); String actual = writer.toString(); // write to file for comparison fileWriter = getTestWriter( "actual" ); fileWriter.write( actual ); IOUtil.close( fileWriter ); // Disabled by default, it's unlikely that all our modules // will pass this test any time soon, but the generated // output files can still be compared. if ( assertIdentity ) { // TODO: make this work for at least apt and xdoc modules? assertEquals( "Identity test failed!", getExpected(), actual ); } }
public void testIdentity() throws IOException, ParseException { // generate the expected model StringWriter writer = new StringWriter(); Sink sink = new TextSink( writer ); SinkTestDocument.generate( sink ); sink.close(); expected = writer.toString(); // write to file for comparison Writer fileWriter = getTestWriter( "expected" ); fileWriter.write( expected ); IOUtil.close( fileWriter ); // generate the actual model writer = new StringWriter(); sink = createSink( writer ); SinkTestDocument.generate( sink ); sink.close(); StringReader reader = new StringReader( writer.toString() ); writer = new StringWriter(); sink = new TextSink( writer ); Parser parser = createParser(); parser.enableLogging( new PlexusLoggerWrapper( getContainer().getLogger() ) ); parser.parse( reader, sink ); String actual = writer.toString(); // write to file for comparison fileWriter = getTestWriter( "actual" ); fileWriter.write( actual ); IOUtil.close( fileWriter ); // Disabled by default, it's unlikely that all our modules // will pass this test any time soon, but the generated // output files can still be compared. if ( assertIdentity ) { // TODO: make this work for at least apt and xdoc modules? assertEquals( "Identity test failed! See results in " + getTestWriterFile( "actual" ).getParent(), getExpected(), actual ); } }
diff --git a/plugins/net.sf.orcc.backends/src/net/sf/orcc/backends/cpp/SerDesAdder.java b/plugins/net.sf.orcc.backends/src/net/sf/orcc/backends/cpp/SerDesAdder.java index 87a5b9cab..e06c61d50 100644 --- a/plugins/net.sf.orcc.backends/src/net/sf/orcc/backends/cpp/SerDesAdder.java +++ b/plugins/net.sf.orcc.backends/src/net/sf/orcc/backends/cpp/SerDesAdder.java @@ -1,303 +1,303 @@ /* * Copyright (c) 2010, Ecole Polytechnique Fédérale de Lausanne * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the Ecole Polytechnique Fédérale de Lausanne * nor the names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ package net.sf.orcc.backends.cpp; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import net.sf.orcc.OrccException; import net.sf.orcc.ir.Port; import net.sf.orcc.ir.expr.StringExpr; import net.sf.orcc.network.Broadcast; import net.sf.orcc.network.Connection; import net.sf.orcc.network.Instance; import net.sf.orcc.network.Network; import net.sf.orcc.network.SerDes; import net.sf.orcc.network.Vertex; import net.sf.orcc.network.attributes.IAttribute; import net.sf.orcc.network.attributes.IValueAttribute; import net.sf.orcc.util.OrderedMap; import org.jgrapht.DirectedGraph; /** * This class defines a transformation that replace input and output ports of * the network with a (unique) wrapper instance and appropriate connections. * This transformation is only used in the context of the codesign. * * @author Ghislain Roquier * */ public class SerDesAdder { private DirectedGraph<Vertex, Connection> graph; private Set<Connection> toBeRemoved = new HashSet<Connection>(); private Map<String, Vertex> serdesMap = new HashMap<String, Vertex>(); private void createIncomingConnection(Connection connection, Vertex vertex, Vertex vertexBCast) { // creates new input port of broadcast Port bcastInput = new Port(connection.getTarget()); bcastInput.setName("input"); // creates a connection between the vertex and the broadcast Map<String, IAttribute> attributes = connection.getAttributes(); Port srcPort = connection.getSource(); Connection incoming = new Connection(srcPort, bcastInput, attributes); graph.addEdge(vertex, vertexBCast, incoming); } /** * Creates a connection between the broadcast and the target for each * outgoing connection of vertex. * * @param vertexBCast * @param outList */ private void createOutgoingConnections(Vertex vertexBCast, List<Connection> outList) { int i = 0; for (Connection connection : outList) { // new connection Vertex target = graph.getEdgeTarget(connection); Port srcPort = connection.getSource(); Port outputPort = new Port(srcPort.getLocation(), srcPort.getType(), "output_" + i); i++; Map<String, IAttribute> attributes = connection.getAttributes(); Connection connBcastTarget = new Connection(outputPort, connection.getTarget(), attributes); graph.addEdge(vertexBCast, target, connBcastTarget); // setting source to null so we don't examine it again connection.setSource(null); // add this connection to the set of connections that are to be // removed toBeRemoved.add(connection); } } /** * Examine the outgoing connections of vertex. * * @param vertex * a vertex * @param connections * the outgoing connections of vertex * @param outMap * a map from each output port P(i) of vertex to a list of * outgoing connections from P(i) * @throws OrccException */ private void examineConnections(Vertex vertex, Set<Connection> connections, Map<Port, List<Connection>> outMap) throws OrccException { Instance instance = vertex.getInstance(); for (Connection connection : connections) { Port srcPort = connection.getSource(); if (srcPort != null) { List<Connection> outList = outMap.get(srcPort); int numOutput = outList.size(); if (numOutput > 1) { // add broadcast vertex Broadcast bcast = new Broadcast(numOutput, srcPort.getType()); String name = "broadcast_" + instance.getId() + "_" + srcPort.getName(); Instance newInst = new Instance(name, bcast); Vertex vertexBCast = new Vertex(newInst); graph.addVertex(vertexBCast); // add connections createIncomingConnection(connection, vertex, vertexBCast); createOutgoingConnections(vertexBCast, outList); } } } } private void examineVertex(Vertex vertex) throws OrccException { // make a copy of the existing outgoing connections of vertex because // the set returned is modified when new edges are added Set<Connection> connections = new HashSet<Connection>( graph.outgoingEdgesOf(vertex)); Map<Port, List<Connection>> outMap = new HashMap<Port, List<Connection>>(); for (Connection connection : connections) { Port src = connection.getSource(); List<Connection> outList = outMap.get(src); if (outList == null) { outList = new ArrayList<Connection>(); outMap.put(src, outList); } outList.add(connection); } examineConnections(vertex, connections, outMap); } public void transform(Network network) throws OrccException { graph = network.getGraph(); OrderedMap<String, Port> inputs = network.getInputs(); OrderedMap<String, Port> outputs = network.getOutputs(); if (inputs.getLength() > 0 || outputs.getLength() > 0) { // Vertex serdes = new Vertex(new Instance("SerDes", new SerDes( // inputs.getLength(), outputs.getLength()))); for (Connection conn : graph.edgeSet()) { if (graph.getEdgeSource(conn).isPort()) { IAttribute attr = conn.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); if (serdesMap.containsKey(attrName)) { - Vertex v = serdesMap.get(attr); + Vertex v = serdesMap.get(attrName); SerDes serdes = v.getInstance().getWrapper(); int out = serdes.getNumOutputs(); serdes.setNumOutputs(out++); } else { Vertex serdes = new Vertex(new Instance("SerDes_" + attrName, new SerDes(0, 1))); serdesMap.put(attrName, serdes); graph.addVertex(serdes); } } if (graph.getEdgeTarget(conn).isPort()) { IAttribute attr = conn.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); if (serdesMap.containsKey(attrName)) { Vertex v = serdesMap.get(attrName); SerDes serdes = v.getInstance().getWrapper(); int in = serdes.getNumInputs(); serdes.setNumOutputs(in++); } else { Vertex serdes = new Vertex(new Instance("SerDes_" + attrName, new SerDes(1, 0))); serdesMap.put(attrName, serdes); graph.addVertex(serdes); } } } Set<Vertex> vertexToRemove = new HashSet<Vertex>(); for (Vertex vertex : graph.vertexSet()) { if (vertex.isPort()) { Port port = vertex.getPort(); if (outputs.contains(port.getName())) { Set<Connection> conns = graph.incomingEdgesOf(vertex); // FIXME: there should be only one connection since // fan-in is not allowed for (Connection connection : conns) { Port srcPort = connection.getSource(); srcPort.setType(port.getType()); Port tgtPort = new Port(port); Connection incoming = new Connection(srcPort, tgtPort, connection.getAttributes()); Vertex vSrc = graph.getEdgeSource(connection); IAttribute attr = connection.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(vSrc, serdesMap.get(attrName), incoming); vertexToRemove.add(vertex); outputs.remove(port.getName()); } } else { Iterator<Connection> it = graph.outgoingEdgesOf(vertex) .iterator(); Connection connection = it.next(); Port srcPort = new Port(port); Port tgtPort = connection.getTarget(); tgtPort.setType(port.getType()); Vertex vTgt = graph.getEdgeTarget(connection); Connection outgoing = new Connection(srcPort, tgtPort, connection.getAttributes()); IAttribute attr = connection.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(serdesMap.get(attrName), vTgt, outgoing); vertexToRemove.add(vertex); inputs.remove(port.getName()); while (it.hasNext()) { connection = it.next(); tgtPort = connection.getTarget(); tgtPort.setType(port.getType()); vTgt = graph.getEdgeTarget(connection); Connection newOutgoing = new Connection(srcPort, tgtPort, connection.getAttributes()); attr = connection.getAttribute("busRef"); valAttr = (IValueAttribute) attr; attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(serdesMap.get(attrName), vTgt, newOutgoing); } } } } for (Vertex serdes : serdesMap.values()) { examineVertex(serdes); } graph.removeAllVertices(vertexToRemove); graph.removeAllEdges(toBeRemoved); } } }
true
true
public void transform(Network network) throws OrccException { graph = network.getGraph(); OrderedMap<String, Port> inputs = network.getInputs(); OrderedMap<String, Port> outputs = network.getOutputs(); if (inputs.getLength() > 0 || outputs.getLength() > 0) { // Vertex serdes = new Vertex(new Instance("SerDes", new SerDes( // inputs.getLength(), outputs.getLength()))); for (Connection conn : graph.edgeSet()) { if (graph.getEdgeSource(conn).isPort()) { IAttribute attr = conn.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); if (serdesMap.containsKey(attrName)) { Vertex v = serdesMap.get(attr); SerDes serdes = v.getInstance().getWrapper(); int out = serdes.getNumOutputs(); serdes.setNumOutputs(out++); } else { Vertex serdes = new Vertex(new Instance("SerDes_" + attrName, new SerDes(0, 1))); serdesMap.put(attrName, serdes); graph.addVertex(serdes); } } if (graph.getEdgeTarget(conn).isPort()) { IAttribute attr = conn.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); if (serdesMap.containsKey(attrName)) { Vertex v = serdesMap.get(attrName); SerDes serdes = v.getInstance().getWrapper(); int in = serdes.getNumInputs(); serdes.setNumOutputs(in++); } else { Vertex serdes = new Vertex(new Instance("SerDes_" + attrName, new SerDes(1, 0))); serdesMap.put(attrName, serdes); graph.addVertex(serdes); } } } Set<Vertex> vertexToRemove = new HashSet<Vertex>(); for (Vertex vertex : graph.vertexSet()) { if (vertex.isPort()) { Port port = vertex.getPort(); if (outputs.contains(port.getName())) { Set<Connection> conns = graph.incomingEdgesOf(vertex); // FIXME: there should be only one connection since // fan-in is not allowed for (Connection connection : conns) { Port srcPort = connection.getSource(); srcPort.setType(port.getType()); Port tgtPort = new Port(port); Connection incoming = new Connection(srcPort, tgtPort, connection.getAttributes()); Vertex vSrc = graph.getEdgeSource(connection); IAttribute attr = connection.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(vSrc, serdesMap.get(attrName), incoming); vertexToRemove.add(vertex); outputs.remove(port.getName()); } } else { Iterator<Connection> it = graph.outgoingEdgesOf(vertex) .iterator(); Connection connection = it.next(); Port srcPort = new Port(port); Port tgtPort = connection.getTarget(); tgtPort.setType(port.getType()); Vertex vTgt = graph.getEdgeTarget(connection); Connection outgoing = new Connection(srcPort, tgtPort, connection.getAttributes()); IAttribute attr = connection.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(serdesMap.get(attrName), vTgt, outgoing); vertexToRemove.add(vertex); inputs.remove(port.getName()); while (it.hasNext()) { connection = it.next(); tgtPort = connection.getTarget(); tgtPort.setType(port.getType()); vTgt = graph.getEdgeTarget(connection); Connection newOutgoing = new Connection(srcPort, tgtPort, connection.getAttributes()); attr = connection.getAttribute("busRef"); valAttr = (IValueAttribute) attr; attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(serdesMap.get(attrName), vTgt, newOutgoing); } } } } for (Vertex serdes : serdesMap.values()) { examineVertex(serdes); } graph.removeAllVertices(vertexToRemove); graph.removeAllEdges(toBeRemoved); } }
public void transform(Network network) throws OrccException { graph = network.getGraph(); OrderedMap<String, Port> inputs = network.getInputs(); OrderedMap<String, Port> outputs = network.getOutputs(); if (inputs.getLength() > 0 || outputs.getLength() > 0) { // Vertex serdes = new Vertex(new Instance("SerDes", new SerDes( // inputs.getLength(), outputs.getLength()))); for (Connection conn : graph.edgeSet()) { if (graph.getEdgeSource(conn).isPort()) { IAttribute attr = conn.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); if (serdesMap.containsKey(attrName)) { Vertex v = serdesMap.get(attrName); SerDes serdes = v.getInstance().getWrapper(); int out = serdes.getNumOutputs(); serdes.setNumOutputs(out++); } else { Vertex serdes = new Vertex(new Instance("SerDes_" + attrName, new SerDes(0, 1))); serdesMap.put(attrName, serdes); graph.addVertex(serdes); } } if (graph.getEdgeTarget(conn).isPort()) { IAttribute attr = conn.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); if (serdesMap.containsKey(attrName)) { Vertex v = serdesMap.get(attrName); SerDes serdes = v.getInstance().getWrapper(); int in = serdes.getNumInputs(); serdes.setNumOutputs(in++); } else { Vertex serdes = new Vertex(new Instance("SerDes_" + attrName, new SerDes(1, 0))); serdesMap.put(attrName, serdes); graph.addVertex(serdes); } } } Set<Vertex> vertexToRemove = new HashSet<Vertex>(); for (Vertex vertex : graph.vertexSet()) { if (vertex.isPort()) { Port port = vertex.getPort(); if (outputs.contains(port.getName())) { Set<Connection> conns = graph.incomingEdgesOf(vertex); // FIXME: there should be only one connection since // fan-in is not allowed for (Connection connection : conns) { Port srcPort = connection.getSource(); srcPort.setType(port.getType()); Port tgtPort = new Port(port); Connection incoming = new Connection(srcPort, tgtPort, connection.getAttributes()); Vertex vSrc = graph.getEdgeSource(connection); IAttribute attr = connection.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(vSrc, serdesMap.get(attrName), incoming); vertexToRemove.add(vertex); outputs.remove(port.getName()); } } else { Iterator<Connection> it = graph.outgoingEdgesOf(vertex) .iterator(); Connection connection = it.next(); Port srcPort = new Port(port); Port tgtPort = connection.getTarget(); tgtPort.setType(port.getType()); Vertex vTgt = graph.getEdgeTarget(connection); Connection outgoing = new Connection(srcPort, tgtPort, connection.getAttributes()); IAttribute attr = connection.getAttribute("busRef"); IValueAttribute valAttr = (IValueAttribute) attr; String attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(serdesMap.get(attrName), vTgt, outgoing); vertexToRemove.add(vertex); inputs.remove(port.getName()); while (it.hasNext()) { connection = it.next(); tgtPort = connection.getTarget(); tgtPort.setType(port.getType()); vTgt = graph.getEdgeTarget(connection); Connection newOutgoing = new Connection(srcPort, tgtPort, connection.getAttributes()); attr = connection.getAttribute("busRef"); valAttr = (IValueAttribute) attr; attrName = ((StringExpr) valAttr.getValue()) .getValue(); graph.addEdge(serdesMap.get(attrName), vTgt, newOutgoing); } } } } for (Vertex serdes : serdesMap.values()) { examineVertex(serdes); } graph.removeAllVertices(vertexToRemove); graph.removeAllEdges(toBeRemoved); } }
diff --git a/src/main/java/ch/bli/mez/view/MainView.java b/src/main/java/ch/bli/mez/view/MainView.java index 5bcc9b7..55adfb6 100644 --- a/src/main/java/ch/bli/mez/view/MainView.java +++ b/src/main/java/ch/bli/mez/view/MainView.java @@ -1,100 +1,100 @@ package ch.bli.mez.view; import java.awt.CardLayout; import java.awt.Dimension; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JTabbedPane; import javax.swing.UIManager; import javax.swing.event.ChangeListener; /** * MainFrame beinhaltet die oberen Tabs, ursprünglich "Zeiten erfassen", "Mitarbeiter verwalten", "Auswertungen", "Verwaltung" * @author dave * @version 1.0 */ public class MainView extends JFrame { private static final long serialVersionUID = -8484150056391154851L; private JTabbedPane tabbedPaneMain; public MainView() { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); getContentPane().setLayout(new CardLayout(0, 0)); setMinimumSize(new Dimension(1000, 600)); try { UIManager .setLookAndFeel("com.sun.java.swing.plaf.nimbus.NimbusLookAndFeel"); } catch (Exception e) { e.printStackTrace(); } tabbedPaneMain = new JTabbedPane(JTabbedPane.TOP); - getContentPane().add(tabbedPaneMain); + getContentPane().add("Main", tabbedPaneMain); // Tab Platzhalter hinzufügen tabbedPaneMain.insertTab("Zeiten erfassen", null, new JPanel(), null, 0); tabbedPaneMain.insertTab("Mitarbeiter verwalten", null, new JPanel(), null, 1); tabbedPaneMain.insertTab("Auswertungen", null, new JPanel(), null, 2); tabbedPaneMain.insertTab("Verwaltung", null, new JPanel(), null, 3); } /** * @@@Auskommentieren, sobald Klasse TimeMgmtView erstellt ist Panel * "Zeiten erfassen" setzen * * @param timeMgmtView TimeMgmtView Objekt welches im Tab "Zeiten erfassen" * eingesetzt werden soll */ // public void setTimeMgmtPanel(TimeMgmtView timeMgmtView) { // tabbedPaneMain.setComponentAt(0, timeMgmtView); // } /** * @@@Auskommentieren, sobald Klasse TimeMgmtView erstellt ist Panel * "Zeiten erfassen" ausgeben * * @return TimeMgmtView das aktuell eingesetzte "Zeiten erfassen" Panel */ // public TimeMgmtView getTimeMgmtPanel() { // return tabbedPaneMain.getTabComponentAt(0); // } /** * Panel "Mitarbeiter verwalten" setzen * * @param employeeView EmployeeView Objekt welches im Tab * "Mitarbeiter verwalten" eingesetzt werden soll */ public void setEmployeePanel(EmployeeView employeeView) { tabbedPaneMain.setComponentAt(1, employeeView); } /** * Panel "Mitarbeiter verwalten" ausgeben * * @return EmployeeView das aktuell eingesetzte "Mitarbeiter verwalten" * Panel */ public EmployeeView getEmployeePanel() { return (EmployeeView) tabbedPaneMain.getTabComponentAt(1); } public void setManagementPanel(ManagementView managementView){ tabbedPaneMain.setComponentAt(3, managementView); } public ManagementView getManagementPanel(){ return (ManagementView) tabbedPaneMain.getTabComponentAt(3); } //Setter und Getter für "Auswertung" muss noch erstellt werden (internerKommentar) public void setTabChangeListener(ChangeListener cl){ tabbedPaneMain.addChangeListener(cl); } }
true
true
public MainView() { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); getContentPane().setLayout(new CardLayout(0, 0)); setMinimumSize(new Dimension(1000, 600)); try { UIManager .setLookAndFeel("com.sun.java.swing.plaf.nimbus.NimbusLookAndFeel"); } catch (Exception e) { e.printStackTrace(); } tabbedPaneMain = new JTabbedPane(JTabbedPane.TOP); getContentPane().add(tabbedPaneMain); // Tab Platzhalter hinzufügen tabbedPaneMain.insertTab("Zeiten erfassen", null, new JPanel(), null, 0); tabbedPaneMain.insertTab("Mitarbeiter verwalten", null, new JPanel(), null, 1); tabbedPaneMain.insertTab("Auswertungen", null, new JPanel(), null, 2); tabbedPaneMain.insertTab("Verwaltung", null, new JPanel(), null, 3); }
public MainView() { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); getContentPane().setLayout(new CardLayout(0, 0)); setMinimumSize(new Dimension(1000, 600)); try { UIManager .setLookAndFeel("com.sun.java.swing.plaf.nimbus.NimbusLookAndFeel"); } catch (Exception e) { e.printStackTrace(); } tabbedPaneMain = new JTabbedPane(JTabbedPane.TOP); getContentPane().add("Main", tabbedPaneMain); // Tab Platzhalter hinzufügen tabbedPaneMain.insertTab("Zeiten erfassen", null, new JPanel(), null, 0); tabbedPaneMain.insertTab("Mitarbeiter verwalten", null, new JPanel(), null, 1); tabbedPaneMain.insertTab("Auswertungen", null, new JPanel(), null, 2); tabbedPaneMain.insertTab("Verwaltung", null, new JPanel(), null, 3); }
diff --git a/session-impl/integration-test/src/test/java/org/cytoscape/session/BasicIntegrationTest.java b/session-impl/integration-test/src/test/java/org/cytoscape/session/BasicIntegrationTest.java index 07fd5a4bf..2fafd03b9 100644 --- a/session-impl/integration-test/src/test/java/org/cytoscape/session/BasicIntegrationTest.java +++ b/session-impl/integration-test/src/test/java/org/cytoscape/session/BasicIntegrationTest.java @@ -1,234 +1,236 @@ /* * Copyright (C) 2011 Toni Menzel * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cytoscape.session; import static org.junit.Assert.*; import static org.junit.Assert.assertNotNull; import static org.ops4j.pax.exam.CoreOptions.felix; import static org.ops4j.pax.exam.CoreOptions.junitBundles; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.options; import static org.ops4j.pax.exam.CoreOptions.repository; import static org.ops4j.pax.exam.CoreOptions.frameworkStartLevel; import java.io.File; import javax.inject.Inject; import org.cytoscape.application.CyApplicationManager; import org.cytoscape.io.read.CySessionReader; import org.cytoscape.io.read.CySessionReaderManager; import org.cytoscape.model.CyNetworkFactory; import org.cytoscape.model.CyNetworkManager; import org.cytoscape.model.CyNetworkTableManager; import org.cytoscape.model.CyTableManager; import org.cytoscape.task.read.OpenSessionTaskFactory; import org.cytoscape.view.model.CyNetworkViewManager; import org.cytoscape.view.model.VisualLexicon; import org.cytoscape.view.presentation.RenderingEngineManager; import org.cytoscape.view.vizmap.VisualMappingManager; import org.cytoscape.work.SynchronousTaskManager; import org.cytoscape.work.TaskIterator; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.junit.Configuration; import org.ops4j.pax.exam.junit.ExamReactorStrategy; import org.ops4j.pax.exam.junit.JUnit4TestRunner; import org.ops4j.pax.exam.spi.reactors.AllConfinedStagedReactorFactory; import org.ops4j.pax.exam.util.Filter; import org.osgi.framework.BundleContext; /** * Build minimum set of Cytoscape to test session loading/saving. * */ @RunWith(JUnit4TestRunner.class) // Framework will be reset for each test @ExamReactorStrategy( AllConfinedStagedReactorFactory.class ) public abstract class BasicIntegrationTest { ///////// OSGi Bundle Context //////////// @Inject protected BundleContext bundleContext; ///////// Manager objects //////////////// @Inject protected CyNetworkManager networkManager; @Inject protected CyTableManager tableManager; @Inject protected CyNetworkTableManager networkTableManager; @Inject protected CyNetworkViewManager viewManager; @Inject protected CyNetworkFactory networkFactory; @Inject protected CySessionManager sessionManager; @Inject protected VisualMappingManager vmm; @Inject protected RenderingEngineManager renderingEngineManager; @Inject @Filter("(id=ding)") // Use DING protected VisualLexicon lexicon; @Inject protected OpenSessionTaskFactory openSessionTF; @Inject protected SynchronousTaskManager<?> tm; @Inject protected CyApplicationManager applicationManager; @Inject protected CySessionReaderManager sessionReaderManager; // Target file name. Assume we always have one test session file per test class. protected File sessionFile; /** * Build minimal set of bundles. */ @Configuration public Option[] config() { // These system properties are set in the surefire configuration in the pom. String apiBundleVersion = System.getProperty("cytoscape.api.version"); String implBundleVersion = System.getProperty("cytoscape.impl.version"); return options( junitBundles(), // Use Felix as runtime felix(), // So that we actually start all of our bundles! frameworkStartLevel(50), // Specify all of our repositories repository("http://code.cytoscape.org/nexus/content/repositories/snapshots/"), repository("http://code.cytoscape.org/nexus/content/repositories/releases/"), repository("http://code.cytoscape.org/nexus/content/repositories/thirdparty/"), // Misc. bundles required to run minimal Cytoscape mavenBundle().groupId("cytoscape-sun").artifactId("jhall").version("1.0").startLevel(3), mavenBundle().groupId("com.googlecode.guava-osgi").artifactId("guava-osgi").version("9.0.0").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("parallelcolt").version("0.9.4").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("opencsv").version("2.1").startLevel(3), mavenBundle().groupId("com.lowagie.text").artifactId("com.springsource.com.lowagie.text").version("2.0.8").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio-svg").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio-ps").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphics2d").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-shared").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-fontchooser").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-sheet").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("org.swinglabs.swingx").version("1.6.1").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-export").version("2.1.1").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-util").version("2.0.2").startLevel(3), + mavenBundle().groupId("cytoscape-temp").artifactId("protostuff-core-json-osgi").version("1.0.7").startLevel(3), + mavenBundle().groupId("org.codehaus.jackson").artifactId("jackson-core-lgpl").version("1.9.7").startLevel(3), // API bundles mavenBundle().groupId("org.cytoscape").artifactId("event-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("model-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("group-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("viewmodel-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("presentation-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("session-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("io-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("property-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("work-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("core-task-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("layout-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("datasource-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-gui-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("work-swing-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("swing-application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("equations-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("swing-application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("service-api").version(apiBundleVersion).startLevel(5), // Implementation bundles mavenBundle().groupId("org.cytoscape").artifactId("property-impl").version(implBundleVersion).startLevel(7), mavenBundle().groupId("org.cytoscape").artifactId("swing-util-api").version(apiBundleVersion).startLevel(8), mavenBundle().groupId("org.cytoscape").artifactId("datasource-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("equations-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("event-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("model-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("group-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("work-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("work-headless-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("presentation-impl").version(implBundleVersion).startLevel(13), mavenBundle().groupId("org.cytoscape").artifactId("layout-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("viewmodel-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("application-impl").version(implBundleVersion).startLevel(17), mavenBundle().groupId("org.cytoscape").artifactId("session-impl").version(implBundleVersion).startLevel(19), mavenBundle().groupId("org.cytoscape").artifactId("ding-presentation-impl").version(implBundleVersion).startLevel(21), mavenBundle().groupId("org.cytoscape").artifactId("io-impl").version(implBundleVersion).startLevel(23), mavenBundle().groupId("org.cytoscape").artifactId("core-task-impl").version(implBundleVersion).startLevel(25), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-gui-impl").version(implBundleVersion).startLevel(27) ); } /** * * Make Sure all required basic objects are available. * * @throws Exception */ void checkBasicConfiguration() throws Exception { assertNotNull(sessionFile); assertNotNull(bundleContext); assertNotNull(networkManager); assertNotNull(networkTableManager); assertNotNull(tableManager); assertNotNull(networkFactory); assertNotNull(sessionManager); assertNotNull(renderingEngineManager); assertNotNull(tm); assertNotNull(openSessionTF); assertNotNull(applicationManager); assertNotNull(sessionReaderManager); } }
true
true
public Option[] config() { // These system properties are set in the surefire configuration in the pom. String apiBundleVersion = System.getProperty("cytoscape.api.version"); String implBundleVersion = System.getProperty("cytoscape.impl.version"); return options( junitBundles(), // Use Felix as runtime felix(), // So that we actually start all of our bundles! frameworkStartLevel(50), // Specify all of our repositories repository("http://code.cytoscape.org/nexus/content/repositories/snapshots/"), repository("http://code.cytoscape.org/nexus/content/repositories/releases/"), repository("http://code.cytoscape.org/nexus/content/repositories/thirdparty/"), // Misc. bundles required to run minimal Cytoscape mavenBundle().groupId("cytoscape-sun").artifactId("jhall").version("1.0").startLevel(3), mavenBundle().groupId("com.googlecode.guava-osgi").artifactId("guava-osgi").version("9.0.0").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("parallelcolt").version("0.9.4").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("opencsv").version("2.1").startLevel(3), mavenBundle().groupId("com.lowagie.text").artifactId("com.springsource.com.lowagie.text").version("2.0.8").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio-svg").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio-ps").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphics2d").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-shared").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-fontchooser").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-sheet").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("org.swinglabs.swingx").version("1.6.1").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-export").version("2.1.1").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-util").version("2.0.2").startLevel(3), // API bundles mavenBundle().groupId("org.cytoscape").artifactId("event-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("model-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("group-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("viewmodel-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("presentation-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("session-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("io-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("property-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("work-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("core-task-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("layout-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("datasource-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-gui-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("work-swing-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("swing-application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("equations-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("swing-application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("service-api").version(apiBundleVersion).startLevel(5), // Implementation bundles mavenBundle().groupId("org.cytoscape").artifactId("property-impl").version(implBundleVersion).startLevel(7), mavenBundle().groupId("org.cytoscape").artifactId("swing-util-api").version(apiBundleVersion).startLevel(8), mavenBundle().groupId("org.cytoscape").artifactId("datasource-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("equations-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("event-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("model-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("group-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("work-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("work-headless-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("presentation-impl").version(implBundleVersion).startLevel(13), mavenBundle().groupId("org.cytoscape").artifactId("layout-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("viewmodel-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("application-impl").version(implBundleVersion).startLevel(17), mavenBundle().groupId("org.cytoscape").artifactId("session-impl").version(implBundleVersion).startLevel(19), mavenBundle().groupId("org.cytoscape").artifactId("ding-presentation-impl").version(implBundleVersion).startLevel(21), mavenBundle().groupId("org.cytoscape").artifactId("io-impl").version(implBundleVersion).startLevel(23), mavenBundle().groupId("org.cytoscape").artifactId("core-task-impl").version(implBundleVersion).startLevel(25), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-gui-impl").version(implBundleVersion).startLevel(27) ); }
public Option[] config() { // These system properties are set in the surefire configuration in the pom. String apiBundleVersion = System.getProperty("cytoscape.api.version"); String implBundleVersion = System.getProperty("cytoscape.impl.version"); return options( junitBundles(), // Use Felix as runtime felix(), // So that we actually start all of our bundles! frameworkStartLevel(50), // Specify all of our repositories repository("http://code.cytoscape.org/nexus/content/repositories/snapshots/"), repository("http://code.cytoscape.org/nexus/content/repositories/releases/"), repository("http://code.cytoscape.org/nexus/content/repositories/thirdparty/"), // Misc. bundles required to run minimal Cytoscape mavenBundle().groupId("cytoscape-sun").artifactId("jhall").version("1.0").startLevel(3), mavenBundle().groupId("com.googlecode.guava-osgi").artifactId("guava-osgi").version("9.0.0").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("parallelcolt").version("0.9.4").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("opencsv").version("2.1").startLevel(3), mavenBundle().groupId("com.lowagie.text").artifactId("com.springsource.com.lowagie.text").version("2.0.8").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio-svg").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphicsio-ps").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-graphics2d").version("2.1.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-shared").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-fontchooser").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("l2fprod-common-sheet").version("7.3").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("org.swinglabs.swingx").version("1.6.1").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-export").version("2.1.1").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("freehep-util").version("2.0.2").startLevel(3), mavenBundle().groupId("cytoscape-temp").artifactId("protostuff-core-json-osgi").version("1.0.7").startLevel(3), mavenBundle().groupId("org.codehaus.jackson").artifactId("jackson-core-lgpl").version("1.9.7").startLevel(3), // API bundles mavenBundle().groupId("org.cytoscape").artifactId("event-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("model-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("group-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("viewmodel-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("presentation-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("session-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("io-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("property-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("work-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("core-task-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("layout-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("datasource-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-gui-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("work-swing-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("swing-application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("equations-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("swing-application-api").version(apiBundleVersion).startLevel(5), mavenBundle().groupId("org.cytoscape").artifactId("service-api").version(apiBundleVersion).startLevel(5), // Implementation bundles mavenBundle().groupId("org.cytoscape").artifactId("property-impl").version(implBundleVersion).startLevel(7), mavenBundle().groupId("org.cytoscape").artifactId("swing-util-api").version(apiBundleVersion).startLevel(8), mavenBundle().groupId("org.cytoscape").artifactId("datasource-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("equations-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("event-impl").version(implBundleVersion).startLevel(9), mavenBundle().groupId("org.cytoscape").artifactId("model-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("group-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("work-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("work-headless-impl").version(implBundleVersion).startLevel(11), mavenBundle().groupId("org.cytoscape").artifactId("presentation-impl").version(implBundleVersion).startLevel(13), mavenBundle().groupId("org.cytoscape").artifactId("layout-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("viewmodel-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-impl").version(implBundleVersion).startLevel(15), mavenBundle().groupId("org.cytoscape").artifactId("application-impl").version(implBundleVersion).startLevel(17), mavenBundle().groupId("org.cytoscape").artifactId("session-impl").version(implBundleVersion).startLevel(19), mavenBundle().groupId("org.cytoscape").artifactId("ding-presentation-impl").version(implBundleVersion).startLevel(21), mavenBundle().groupId("org.cytoscape").artifactId("io-impl").version(implBundleVersion).startLevel(23), mavenBundle().groupId("org.cytoscape").artifactId("core-task-impl").version(implBundleVersion).startLevel(25), mavenBundle().groupId("org.cytoscape").artifactId("vizmap-gui-impl").version(implBundleVersion).startLevel(27) ); }
diff --git a/android/ibrdtn-api/src/de/tubs/ibr/dtn/api/Bundle.java b/android/ibrdtn-api/src/de/tubs/ibr/dtn/api/Bundle.java index e7f67d1d..fc220937 100644 --- a/android/ibrdtn-api/src/de/tubs/ibr/dtn/api/Bundle.java +++ b/android/ibrdtn-api/src/de/tubs/ibr/dtn/api/Bundle.java @@ -1,308 +1,308 @@ /* * Block.java * * Copyright (C) 2011 IBR, TU Braunschweig * * Written-by: Johannes Morgenroth <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package de.tubs.ibr.dtn.api; import java.util.Date; import android.os.Parcel; import android.os.Parcelable; public class Bundle implements Parcelable { private EID destination = null; private SingletonEndpoint source = null; private SingletonEndpoint custodian = null; private SingletonEndpoint reportto = null; private Long lifetime = null; private Date timestamp = null; private Long sequencenumber = null; private Long procflags = 0L; private Long app_data_length = null; private Long fragment_offset = null; public enum Priority { LOW, MEDIUM, HIGH }; public enum ProcFlags { FRAGMENT(1 << 0x00), APPDATA_IS_ADMRECORD(1 << 0x01), DONT_FRAGMENT(1 << 0x02), CUSTODY_REQUESTED(1 << 0x03), DESTINATION_IS_SINGLETON(1 << 0x04), ACKOFAPP_REQUESTED(1 << 0x05), RESERVED_6(1 << 0x06), PRIORITY_BIT1(1 << 0x07), PRIORITY_BIT2(1 << 0x08), CLASSOFSERVICE_9(1 << 0x09), CLASSOFSERVICE_10(1 << 0x0A), CLASSOFSERVICE_11(1 << 0x0B), CLASSOFSERVICE_12(1 << 0x0C), CLASSOFSERVICE_13(1 << 0x0D), REQUEST_REPORT_OF_BUNDLE_RECEPTION(1 << 0x0E), REQUEST_REPORT_OF_CUSTODY_ACCEPTANCE(1 << 0x0F), REQUEST_REPORT_OF_BUNDLE_FORWARDING(1 << 0x10), REQUEST_REPORT_OF_BUNDLE_DELIVERY(1 << 0x11), REQUEST_REPORT_OF_BUNDLE_DELETION(1 << 0x12), STATUS_REPORT_REQUEST_19(1 << 0x13), STATUS_REPORT_REQUEST_20(1 << 0x14), // DTNSEC FLAGS (these are customized flags and not written down in any draft) DTNSEC_REQUEST_SIGN(1 << 0x1A), DTNSEC_REQUEST_ENCRYPT(1 << 0x1B), DTNSEC_STATUS_VERIFIED(1 << 0x1C), DTNSEC_STATUS_CONFIDENTIAL(1 << 0x1D), DTNSEC_STATUS_AUTHENTICATED(1 << 0x1E), IBRDTN_REQUEST_COMPRESSION(1 << 0x1F); private int value = 0; private ProcFlags(int i) { this.value = i; } public int getValue() { return this.value; } }; public Bundle() { } public Bundle(long procflags) { this.procflags = procflags; } public Boolean get(ProcFlags flag) { return (flag.getValue() & this.procflags) > 0; } public void set(ProcFlags flag, Boolean value) { if (value) { this.procflags |= flag.getValue(); } else { this.procflags &= ~(flag.getValue()); } } public Priority getPriority() { if (get(ProcFlags.PRIORITY_BIT1)) { return Priority.MEDIUM; } if (get(ProcFlags.PRIORITY_BIT2)) { return Priority.HIGH; } return Priority.LOW; } public void setPriority(Priority p) { // set the priority to the real bundle switch (p) { case LOW: set(ProcFlags.PRIORITY_BIT1, false); set(ProcFlags.PRIORITY_BIT2, false); break; case HIGH: set(ProcFlags.PRIORITY_BIT1, false); set(ProcFlags.PRIORITY_BIT2, true); break; case MEDIUM: set(ProcFlags.PRIORITY_BIT1, true); set(ProcFlags.PRIORITY_BIT2, false); break; } } public EID getDestination() { return destination; } public void setDestination(EID destination) { this.destination = destination; this.set(ProcFlags.DESTINATION_IS_SINGLETON, this.destination instanceof SingletonEndpoint); } public SingletonEndpoint getSource() { return source; } public void setSource(SingletonEndpoint source) { this.source = source; } public SingletonEndpoint getCustodian() { return custodian; } public void setCustodian(SingletonEndpoint custodian) { this.custodian = custodian; } public SingletonEndpoint getReportto() { return reportto; } public void setReportto(SingletonEndpoint reportto) { this.reportto = reportto; } public Long getLifetime() { return lifetime; } public void setLifetime(Long lifetime) { this.lifetime = lifetime; } public Date getTimestamp() { return timestamp; } public void setTimestamp(Date timestamp) { this.timestamp = timestamp; } public Long getSequencenumber() { return sequencenumber; } public void setSequencenumber(Long sequencenumber) { this.sequencenumber = sequencenumber; } public Long getAppDataLength() { return app_data_length; } public void setAppDataLength(Long app_data_length) { this.app_data_length = app_data_length; } public Long getFragmentOffset() { return fragment_offset; } public void setFragmentOffset(Long fragment_offset) { this.fragment_offset = fragment_offset; } public Long getProcflags() { return procflags; } public int describeContents() { return 0; } public void writeToParcel(Parcel dest, int flags) { boolean nullMarker[] = { destination != null, source != null, custodian != null, reportto != null, lifetime != null, timestamp != null, sequencenumber != null, app_data_length != null, fragment_offset != null }; // write processing flags dest.writeLong( procflags ); // write null marker dest.writeBooleanArray(nullMarker); if (nullMarker[0]) dest.writeString(destination.toString()); if (nullMarker[1]) dest.writeString(source.toString()); if (nullMarker[2]) dest.writeString(custodian.toString()); if (nullMarker[3]) dest.writeString(reportto.toString()); if (nullMarker[4]) dest.writeLong( lifetime ); if (nullMarker[5]) dest.writeLong( timestamp.getTime() ); if (nullMarker[6]) dest.writeLong( sequencenumber ); if (nullMarker[7]) dest.writeLong( app_data_length ); if (nullMarker[8]) dest.writeLong( fragment_offset ); } public static final Creator<Bundle> CREATOR = new Creator<Bundle>() { public Bundle createFromParcel(final Parcel source) { // create bundle Bundle b = new Bundle(); // read processing flags b.procflags = source.readLong(); // read null marker array boolean nullMarker[] = { false, false, false, false, false, false, false, false, false }; source.readBooleanArray(nullMarker); // read destination if (nullMarker[0]) { if (b.get(ProcFlags.DESTINATION_IS_SINGLETON)) { b.destination = new SingletonEndpoint(source.readString()); } else { b.destination = new GroupEndpoint(source.readString()); } } else { b.destination = null; } if (nullMarker[1]) b.source = new SingletonEndpoint(source.readString()); else b.source = null; if (nullMarker[2]) b.custodian = new SingletonEndpoint(source.readString()); else b.custodian = null; if (nullMarker[3]) b.reportto = new SingletonEndpoint(source.readString()); else b.reportto = null; if (nullMarker[4]) b.lifetime = source.readLong(); else b.lifetime = null; if (nullMarker[5]) b.timestamp = new Date( source.readLong() ); else b.timestamp = null; if (nullMarker[6]) b.sequencenumber = source.readLong(); - b.sequencenumber = null; + else b.sequencenumber = null; if (nullMarker[7]) b.app_data_length = source.readLong(); - b.app_data_length = null; + else b.app_data_length = null; if (nullMarker[8]) b.fragment_offset = source.readLong(); - b.fragment_offset = null; + else b.fragment_offset = null; return b; } public Bundle[] newArray(final int size) { return new Bundle[size]; } }; }
false
true
public Bundle createFromParcel(final Parcel source) { // create bundle Bundle b = new Bundle(); // read processing flags b.procflags = source.readLong(); // read null marker array boolean nullMarker[] = { false, false, false, false, false, false, false, false, false }; source.readBooleanArray(nullMarker); // read destination if (nullMarker[0]) { if (b.get(ProcFlags.DESTINATION_IS_SINGLETON)) { b.destination = new SingletonEndpoint(source.readString()); } else { b.destination = new GroupEndpoint(source.readString()); } } else { b.destination = null; } if (nullMarker[1]) b.source = new SingletonEndpoint(source.readString()); else b.source = null; if (nullMarker[2]) b.custodian = new SingletonEndpoint(source.readString()); else b.custodian = null; if (nullMarker[3]) b.reportto = new SingletonEndpoint(source.readString()); else b.reportto = null; if (nullMarker[4]) b.lifetime = source.readLong(); else b.lifetime = null; if (nullMarker[5]) b.timestamp = new Date( source.readLong() ); else b.timestamp = null; if (nullMarker[6]) b.sequencenumber = source.readLong(); b.sequencenumber = null; if (nullMarker[7]) b.app_data_length = source.readLong(); b.app_data_length = null; if (nullMarker[8]) b.fragment_offset = source.readLong(); b.fragment_offset = null; return b; }
public Bundle createFromParcel(final Parcel source) { // create bundle Bundle b = new Bundle(); // read processing flags b.procflags = source.readLong(); // read null marker array boolean nullMarker[] = { false, false, false, false, false, false, false, false, false }; source.readBooleanArray(nullMarker); // read destination if (nullMarker[0]) { if (b.get(ProcFlags.DESTINATION_IS_SINGLETON)) { b.destination = new SingletonEndpoint(source.readString()); } else { b.destination = new GroupEndpoint(source.readString()); } } else { b.destination = null; } if (nullMarker[1]) b.source = new SingletonEndpoint(source.readString()); else b.source = null; if (nullMarker[2]) b.custodian = new SingletonEndpoint(source.readString()); else b.custodian = null; if (nullMarker[3]) b.reportto = new SingletonEndpoint(source.readString()); else b.reportto = null; if (nullMarker[4]) b.lifetime = source.readLong(); else b.lifetime = null; if (nullMarker[5]) b.timestamp = new Date( source.readLong() ); else b.timestamp = null; if (nullMarker[6]) b.sequencenumber = source.readLong(); else b.sequencenumber = null; if (nullMarker[7]) b.app_data_length = source.readLong(); else b.app_data_length = null; if (nullMarker[8]) b.fragment_offset = source.readLong(); else b.fragment_offset = null; return b; }
diff --git a/Gamework/src/cz/robyer/gamework/hook/Condition.java b/Gamework/src/cz/robyer/gamework/hook/Condition.java index 45fb730..1e04b65 100644 --- a/Gamework/src/cz/robyer/gamework/hook/Condition.java +++ b/Gamework/src/cz/robyer/gamework/hook/Condition.java @@ -1,118 +1,118 @@ package cz.robyer.gamework.hook; import cz.robyer.gamework.scenario.BaseObject; import cz.robyer.gamework.scenario.variable.BooleanVariable; import cz.robyer.gamework.scenario.variable.DecimalVariable; import cz.robyer.gamework.scenario.variable.Variable; import cz.robyer.gamework.util.Log; /** * Represents condition for {@link Hook}. * @author Robert P�sel */ public class Condition extends BaseObject { public static final String TAG = Condition.class.getSimpleName(); public static final int TYPE_EQUALS = 0; public static final int TYPE_NOTEQUALS = 1; public static final int TYPE_GREATER = 3; public static final int TYPE_SMALLER = 4; public static final int TYPE_GREATEREQUALS = 5; public static final int TYPE_SMALLEREQUALS = 6; protected int type; protected String variable; protected String value; protected Hook parent; /** * Basic constructor. * @param type of condition * @param id of variable for condition, could be empty * @param value to be compared with value of variable */ public Condition(int type, String variable, String value) { super(); this.type = type; this.variable = variable; this.value = value; } public int getType() { return type; } public void setParent(Hook parent) { this.parent = parent; } public Hook getParent() { if (parent == null) { Log.e(TAG, "No parent is attached"); throw new RuntimeException(); } return parent; } /** * Checks if this condition pass. * @param variable which was changed (and which called our hook) * @return true if this condition is valid, false otherwise */ public boolean isValid(Variable variable) { // If this condition has defined own variable, we use that - if (this.variable.length() > 0) { + if (this.variable != null) { Log.d(TAG, String.format("This condition uses own variable '%s'.", this.variable)); variable = getScenario().getVariable(this.variable); } if (variable == null) { Log.e(TAG, "Variable to check is null"); return false; } boolean valid = false; if (variable instanceof BooleanVariable) { boolean varValue = ((BooleanVariable)variable).getValue(); boolean condValue = Boolean.parseBoolean(value); switch (type) { case TYPE_EQUALS: valid = (varValue == condValue); break; case TYPE_NOTEQUALS: valid = (varValue != condValue); break; } } else if (variable instanceof DecimalVariable) { int varValue = ((DecimalVariable)variable).getValue(); int condValue = Integer.parseInt(value); switch (type) { case TYPE_EQUALS: valid = (varValue == condValue); break; case TYPE_NOTEQUALS: valid = (varValue != condValue); break; case TYPE_GREATER: valid = (varValue > condValue); break; case TYPE_SMALLER: valid = (varValue < condValue); break; case TYPE_GREATEREQUALS: valid = (varValue >= condValue); break; case TYPE_SMALLEREQUALS: valid = (varValue <= condValue); break; } } return valid; } }
true
true
public boolean isValid(Variable variable) { // If this condition has defined own variable, we use that if (this.variable.length() > 0) { Log.d(TAG, String.format("This condition uses own variable '%s'.", this.variable)); variable = getScenario().getVariable(this.variable); } if (variable == null) { Log.e(TAG, "Variable to check is null"); return false; } boolean valid = false; if (variable instanceof BooleanVariable) { boolean varValue = ((BooleanVariable)variable).getValue(); boolean condValue = Boolean.parseBoolean(value); switch (type) { case TYPE_EQUALS: valid = (varValue == condValue); break; case TYPE_NOTEQUALS: valid = (varValue != condValue); break; } } else if (variable instanceof DecimalVariable) { int varValue = ((DecimalVariable)variable).getValue(); int condValue = Integer.parseInt(value); switch (type) { case TYPE_EQUALS: valid = (varValue == condValue); break; case TYPE_NOTEQUALS: valid = (varValue != condValue); break; case TYPE_GREATER: valid = (varValue > condValue); break; case TYPE_SMALLER: valid = (varValue < condValue); break; case TYPE_GREATEREQUALS: valid = (varValue >= condValue); break; case TYPE_SMALLEREQUALS: valid = (varValue <= condValue); break; } } return valid; }
public boolean isValid(Variable variable) { // If this condition has defined own variable, we use that if (this.variable != null) { Log.d(TAG, String.format("This condition uses own variable '%s'.", this.variable)); variable = getScenario().getVariable(this.variable); } if (variable == null) { Log.e(TAG, "Variable to check is null"); return false; } boolean valid = false; if (variable instanceof BooleanVariable) { boolean varValue = ((BooleanVariable)variable).getValue(); boolean condValue = Boolean.parseBoolean(value); switch (type) { case TYPE_EQUALS: valid = (varValue == condValue); break; case TYPE_NOTEQUALS: valid = (varValue != condValue); break; } } else if (variable instanceof DecimalVariable) { int varValue = ((DecimalVariable)variable).getValue(); int condValue = Integer.parseInt(value); switch (type) { case TYPE_EQUALS: valid = (varValue == condValue); break; case TYPE_NOTEQUALS: valid = (varValue != condValue); break; case TYPE_GREATER: valid = (varValue > condValue); break; case TYPE_SMALLER: valid = (varValue < condValue); break; case TYPE_GREATEREQUALS: valid = (varValue >= condValue); break; case TYPE_SMALLEREQUALS: valid = (varValue <= condValue); break; } } return valid; }
diff --git a/src/gov/nih/nci/rembrandt/util/ApplicationContext.java b/src/gov/nih/nci/rembrandt/util/ApplicationContext.java index d4c1e286..fd5ce756 100755 --- a/src/gov/nih/nci/rembrandt/util/ApplicationContext.java +++ b/src/gov/nih/nci/rembrandt/util/ApplicationContext.java @@ -1,224 +1,224 @@ package gov.nih.nci.rembrandt.util; import gov.nih.nci.caintegrator.application.analysis.AnalysisServerClientManager; import gov.nih.nci.rembrandt.cache.RembrandtContextListener; import gov.nih.nci.rembrandt.queryservice.queryprocessing.QueryHandler; import gov.nih.nci.rembrandt.web.factory.ApplicationFactory; import java.io.FileInputStream; import java.io.InputStream; import java.io.File; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException ; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Properties; import javax.jms.JMSException; import javax.naming.NamingException; import org.apache.log4j.Logger; import org.w3c.dom.Document; import org.xml.sax.InputSource; import com.sun.org.apache.xerces.internal.impl.xs.dom.DOMParser; import org.apache.ojb.broker.core.PersistenceBrokerFactoryFactory; import org.apache.ojb.broker.core.PersistenceBrokerFactoryIF; import org.apache.ojb.broker.metadata.ConnectionRepository; import org.apache.ojb.broker.metadata.JdbcConnectionDescriptor; import org.apache.ojb.broker.metadata.MetadataManager; import org.apache.ojb.broker.PBKey; /** * @todo comment this! * @author BhattarR, BauerD * */ /** * caIntegrator License * * Copyright 2001-2005 Science Applications International Corporation ("SAIC"). * The software subject to this notice and license includes both human readable source code form and machine readable, * binary, object code form ("the caIntegrator Software"). The caIntegrator Software was developed in conjunction with * the National Cancer Institute ("NCI") by NCI employees and employees of SAIC. * To the extent government employees are authors, any rights in such works shall be subject to Title 17 of the United States * Code, section 105. * This caIntegrator Software License (the "License") is between NCI and You. "You (or "Your") shall mean a person or an * entity, and all other entities that control, are controlled by, or are under common control with the entity. "Control" * for purposes of this definition means (i) the direct or indirect power to cause the direction or management of such entity, * whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) * beneficial ownership of such entity. * This License is granted provided that You agree to the conditions described below. NCI grants You a non-exclusive, * worldwide, perpetual, fully-paid-up, no-charge, irrevocable, transferable and royalty-free right and license in its rights * in the caIntegrator Software to (i) use, install, access, operate, execute, copy, modify, translate, market, publicly * display, publicly perform, and prepare derivative works of the caIntegrator Software; (ii) distribute and have distributed * to and by third parties the caIntegrator Software and any modifications and derivative works thereof; * and (iii) sublicense the foregoing rights set out in (i) and (ii) to third parties, including the right to license such * rights to further third parties. For sake of clarity, and not by way of limitation, NCI shall have no right of accounting * or right of payment from You or Your sublicensees for the rights granted under this License. This License is granted at no * charge to You. * 1. Your redistributions of the source code for the Software must retain the above copyright notice, this list of conditions * and the disclaimer and limitation of liability of Article 6, below. Your redistributions in object code form must reproduce * the above copyright notice, this list of conditions and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * 2. Your end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This * product includes software developed by SAIC and the National Cancer Institute." If You do not include such end-user * documentation, You shall include this acknowledgment in the Software itself, wherever such third-party acknowledgments * normally appear. * 3. You may not use the names "The National Cancer Institute", "NCI" "Science Applications International Corporation" and * "SAIC" to endorse or promote products derived from this Software. This License does not authorize You to use any * trademarks, service marks, trade names, logos or product names of either NCI or SAIC, except as required to comply with * the terms of this License. * 4. For sake of clarity, and not by way of limitation, You may incorporate this Software into Your proprietary programs and * into any third party proprietary programs. However, if You incorporate the Software into third party proprietary * programs, You agree that You are solely responsible for obtaining any permission from such third parties required to * incorporate the Software into such third party proprietary programs and for informing Your sublicensees, including * without limitation Your end-users, of their obligation to secure any required permissions from such third parties * before incorporating the Software into such third party proprietary software programs. In the event that You fail * to obtain such permissions, You agree to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such permissions. * 5. For sake of clarity, and not by way of limitation, You may add Your own copyright statement to Your modifications and * to the derivative works, and You may provide additional or different license terms and conditions in Your sublicenses * of modifications of the Software, or any derivative works of the Software as a whole, provided Your use, reproduction, * and distribution of the Work otherwise complies with the conditions stated in this License. * 6. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. * IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ public class ApplicationContext{ private static Map mappings = new HashMap(); private static Logger logger = Logger.getLogger(ApplicationContext.class); private static Properties labelProps = null; private static Properties messagingProps = null; private static Document doc =null; /** * COMMENT THIS * @return */ public static Properties getLabelProperties() { return labelProps; } public static Map getDEtoBeanAttributeMappings() { return mappings; } public static Properties getJMSProperties(){ return messagingProps; } @SuppressWarnings("unused") public static void init() { logger.debug("Loading Application Resources"); labelProps = PropertyLoader.loadProperties(RembrandtConstants.APPLICATION_RESOURCES); messagingProps = PropertyLoader.loadProperties(RembrandtConstants.JMS_PROPERTIES); try { logger.debug("Bean to Attribute Mappings"); InputStream inStream = QueryHandler.class.getResourceAsStream(RembrandtConstants.DE_BEAN_FILE_NAME); assert true:inStream != null; DOMParser p = new DOMParser(); p.parse(new InputSource(inStream)); doc = p.getDocument(); assert(doc != null); logger.debug("Begining DomainElement to Bean Mapping"); mappings = new DEBeanMappingsHandler().populate(doc); logger.debug("DomainElement to Bean Mapping is completed"); QueryHandler.init(); } catch(Throwable t) { logger.error(new IllegalStateException("Error parsing deToBeanAttrMappings.xml file: Exception: " + t)); } //Start the JMS Lister try { @SuppressWarnings("unused") AnalysisServerClientManager analysisServerClientManager = AnalysisServerClientManager.getInstance(); //set the AnalysisServerClientManager properties //Get the jms properties for this application //analysisServerClientManager.setJMSparameters(); //Get the application properties from the properties file - String propertiesFileName = System.getProperty("gov.nih.nci.rembrandt.propertiesFile"); + String propertiesFileName = System.getProperty("gov.nih.nci.rembrandt.properties"); //Load the the application properties and set them as system properties Properties rembrandtProperties = new Properties(); logger.info("Attempting to load application system properties from file: " + propertiesFileName); FileInputStream in = new FileInputStream(propertiesFileName); rembrandtProperties.load(in); if (rembrandtProperties.isEmpty()) { logger.error("Error: no properties found when loading properties file: " + propertiesFileName); } String key = null; String val = null; for (Iterator i = rembrandtProperties.keySet().iterator(); i.hasNext(); ) { key = (String) i.next(); val = rembrandtProperties.getProperty(key); System.setProperty(key, val); } //Initialize db // PersistenceBrokerFactoryIF pbf = PersistenceBrokerFactoryFactory.instance(); String dbalias = System.getProperty("gov.nih.nci.rembrandt.dbalias"); String username = System.getProperty("gov.nih.nci.rembrandt.db.username"); String password = System.getProperty("gov.nih.nci.rembrandt.db.password"); String jcdalias = System.getProperty("gov.nih.nci.rembrandt.jcd_alias"); if (jcdalias != null && jcdalias.length() > 0){ MetadataManager mm = MetadataManager.getInstance(); ConnectionRepository connectionRepository = mm.connectionRepository(); PBKey pbKey = connectionRepository.getStandardPBKeyForJcdAlias(jcdalias); JdbcConnectionDescriptor jdbcConnectionDescriptor = connectionRepository .getDescriptor(pbKey); if (dbalias != null && dbalias.length() > 0) jdbcConnectionDescriptor.setDbAlias(dbalias); if (username != null && username.length() > 0) jdbcConnectionDescriptor.setUserName(username); if (password != null && password.length() > 0) jdbcConnectionDescriptor.setPassWord(password); } //end of initialize String jmsProviderURL = System.getProperty("gov.nih.nci.rembrandt.jms.jboss_url"); String jndiFactoryName = System.getProperty("gov.nih.nci.rembrandt.jms.factory_jndi"); String requestQueueName = System.getProperty("gov.nih.nci.rembrandt.jms.analysis_request_queue"); String responseQueueName = System.getProperty("gov.nih.nci.rembrandt.jms.analysis_response_queue"); analysisServerClientManager.setJMSparameters(jmsProviderURL, jndiFactoryName,requestQueueName, responseQueueName); // ANALYSIS SERVER SET the CACHE and GeneExpressionAnnotationService analysisServerClientManager.setCache(ApplicationFactory.getBusinessTierCache()); // analysisServerClientManager.setGeneExprAnnotationService(); analysisServerClientManager.establishQueueConnection(); } catch (NamingException e) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(e.getMessage()); logger.error(e); } catch (JMSException e) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(e.getMessage()); logger.error(e); } catch(Throwable t) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(t.getMessage()); logger.error(t); } } }
true
true
public static void init() { logger.debug("Loading Application Resources"); labelProps = PropertyLoader.loadProperties(RembrandtConstants.APPLICATION_RESOURCES); messagingProps = PropertyLoader.loadProperties(RembrandtConstants.JMS_PROPERTIES); try { logger.debug("Bean to Attribute Mappings"); InputStream inStream = QueryHandler.class.getResourceAsStream(RembrandtConstants.DE_BEAN_FILE_NAME); assert true:inStream != null; DOMParser p = new DOMParser(); p.parse(new InputSource(inStream)); doc = p.getDocument(); assert(doc != null); logger.debug("Begining DomainElement to Bean Mapping"); mappings = new DEBeanMappingsHandler().populate(doc); logger.debug("DomainElement to Bean Mapping is completed"); QueryHandler.init(); } catch(Throwable t) { logger.error(new IllegalStateException("Error parsing deToBeanAttrMappings.xml file: Exception: " + t)); } //Start the JMS Lister try { @SuppressWarnings("unused") AnalysisServerClientManager analysisServerClientManager = AnalysisServerClientManager.getInstance(); //set the AnalysisServerClientManager properties //Get the jms properties for this application //analysisServerClientManager.setJMSparameters(); //Get the application properties from the properties file String propertiesFileName = System.getProperty("gov.nih.nci.rembrandt.propertiesFile"); //Load the the application properties and set them as system properties Properties rembrandtProperties = new Properties(); logger.info("Attempting to load application system properties from file: " + propertiesFileName); FileInputStream in = new FileInputStream(propertiesFileName); rembrandtProperties.load(in); if (rembrandtProperties.isEmpty()) { logger.error("Error: no properties found when loading properties file: " + propertiesFileName); } String key = null; String val = null; for (Iterator i = rembrandtProperties.keySet().iterator(); i.hasNext(); ) { key = (String) i.next(); val = rembrandtProperties.getProperty(key); System.setProperty(key, val); } //Initialize db // PersistenceBrokerFactoryIF pbf = PersistenceBrokerFactoryFactory.instance(); String dbalias = System.getProperty("gov.nih.nci.rembrandt.dbalias"); String username = System.getProperty("gov.nih.nci.rembrandt.db.username"); String password = System.getProperty("gov.nih.nci.rembrandt.db.password"); String jcdalias = System.getProperty("gov.nih.nci.rembrandt.jcd_alias"); if (jcdalias != null && jcdalias.length() > 0){ MetadataManager mm = MetadataManager.getInstance(); ConnectionRepository connectionRepository = mm.connectionRepository(); PBKey pbKey = connectionRepository.getStandardPBKeyForJcdAlias(jcdalias); JdbcConnectionDescriptor jdbcConnectionDescriptor = connectionRepository .getDescriptor(pbKey); if (dbalias != null && dbalias.length() > 0) jdbcConnectionDescriptor.setDbAlias(dbalias); if (username != null && username.length() > 0) jdbcConnectionDescriptor.setUserName(username); if (password != null && password.length() > 0) jdbcConnectionDescriptor.setPassWord(password); } //end of initialize String jmsProviderURL = System.getProperty("gov.nih.nci.rembrandt.jms.jboss_url"); String jndiFactoryName = System.getProperty("gov.nih.nci.rembrandt.jms.factory_jndi"); String requestQueueName = System.getProperty("gov.nih.nci.rembrandt.jms.analysis_request_queue"); String responseQueueName = System.getProperty("gov.nih.nci.rembrandt.jms.analysis_response_queue"); analysisServerClientManager.setJMSparameters(jmsProviderURL, jndiFactoryName,requestQueueName, responseQueueName); // ANALYSIS SERVER SET the CACHE and GeneExpressionAnnotationService analysisServerClientManager.setCache(ApplicationFactory.getBusinessTierCache()); // analysisServerClientManager.setGeneExprAnnotationService(); analysisServerClientManager.establishQueueConnection(); } catch (NamingException e) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(e.getMessage()); logger.error(e); } catch (JMSException e) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(e.getMessage()); logger.error(e); } catch(Throwable t) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(t.getMessage()); logger.error(t); } }
public static void init() { logger.debug("Loading Application Resources"); labelProps = PropertyLoader.loadProperties(RembrandtConstants.APPLICATION_RESOURCES); messagingProps = PropertyLoader.loadProperties(RembrandtConstants.JMS_PROPERTIES); try { logger.debug("Bean to Attribute Mappings"); InputStream inStream = QueryHandler.class.getResourceAsStream(RembrandtConstants.DE_BEAN_FILE_NAME); assert true:inStream != null; DOMParser p = new DOMParser(); p.parse(new InputSource(inStream)); doc = p.getDocument(); assert(doc != null); logger.debug("Begining DomainElement to Bean Mapping"); mappings = new DEBeanMappingsHandler().populate(doc); logger.debug("DomainElement to Bean Mapping is completed"); QueryHandler.init(); } catch(Throwable t) { logger.error(new IllegalStateException("Error parsing deToBeanAttrMappings.xml file: Exception: " + t)); } //Start the JMS Lister try { @SuppressWarnings("unused") AnalysisServerClientManager analysisServerClientManager = AnalysisServerClientManager.getInstance(); //set the AnalysisServerClientManager properties //Get the jms properties for this application //analysisServerClientManager.setJMSparameters(); //Get the application properties from the properties file String propertiesFileName = System.getProperty("gov.nih.nci.rembrandt.properties"); //Load the the application properties and set them as system properties Properties rembrandtProperties = new Properties(); logger.info("Attempting to load application system properties from file: " + propertiesFileName); FileInputStream in = new FileInputStream(propertiesFileName); rembrandtProperties.load(in); if (rembrandtProperties.isEmpty()) { logger.error("Error: no properties found when loading properties file: " + propertiesFileName); } String key = null; String val = null; for (Iterator i = rembrandtProperties.keySet().iterator(); i.hasNext(); ) { key = (String) i.next(); val = rembrandtProperties.getProperty(key); System.setProperty(key, val); } //Initialize db // PersistenceBrokerFactoryIF pbf = PersistenceBrokerFactoryFactory.instance(); String dbalias = System.getProperty("gov.nih.nci.rembrandt.dbalias"); String username = System.getProperty("gov.nih.nci.rembrandt.db.username"); String password = System.getProperty("gov.nih.nci.rembrandt.db.password"); String jcdalias = System.getProperty("gov.nih.nci.rembrandt.jcd_alias"); if (jcdalias != null && jcdalias.length() > 0){ MetadataManager mm = MetadataManager.getInstance(); ConnectionRepository connectionRepository = mm.connectionRepository(); PBKey pbKey = connectionRepository.getStandardPBKeyForJcdAlias(jcdalias); JdbcConnectionDescriptor jdbcConnectionDescriptor = connectionRepository .getDescriptor(pbKey); if (dbalias != null && dbalias.length() > 0) jdbcConnectionDescriptor.setDbAlias(dbalias); if (username != null && username.length() > 0) jdbcConnectionDescriptor.setUserName(username); if (password != null && password.length() > 0) jdbcConnectionDescriptor.setPassWord(password); } //end of initialize String jmsProviderURL = System.getProperty("gov.nih.nci.rembrandt.jms.jboss_url"); String jndiFactoryName = System.getProperty("gov.nih.nci.rembrandt.jms.factory_jndi"); String requestQueueName = System.getProperty("gov.nih.nci.rembrandt.jms.analysis_request_queue"); String responseQueueName = System.getProperty("gov.nih.nci.rembrandt.jms.analysis_response_queue"); analysisServerClientManager.setJMSparameters(jmsProviderURL, jndiFactoryName,requestQueueName, responseQueueName); // ANALYSIS SERVER SET the CACHE and GeneExpressionAnnotationService analysisServerClientManager.setCache(ApplicationFactory.getBusinessTierCache()); // analysisServerClientManager.setGeneExprAnnotationService(); analysisServerClientManager.establishQueueConnection(); } catch (NamingException e) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(e.getMessage()); logger.error(e); } catch (JMSException e) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(e.getMessage()); logger.error(e); } catch(Throwable t) { logger.error(new IllegalStateException("Error getting an instance of AnalysisServerClientManager" )); logger.error(t.getMessage()); logger.error(t); } }
diff --git a/src/FE_SRC_COMMON/com/ForgeEssentials/chat/Chat.java b/src/FE_SRC_COMMON/com/ForgeEssentials/chat/Chat.java index d5cc51b71..87d6decfa 100644 --- a/src/FE_SRC_COMMON/com/ForgeEssentials/chat/Chat.java +++ b/src/FE_SRC_COMMON/com/ForgeEssentials/chat/Chat.java @@ -1,144 +1,158 @@ package com.ForgeEssentials.chat; import java.util.ArrayList; import java.util.List; import net.minecraft.network.packet.NetHandler; import net.minecraft.network.packet.Packet3Chat; import net.minecraftforge.event.ForgeSubscribe; import net.minecraftforge.event.ServerChatEvent; import com.ForgeEssentials.permission.Group; import com.ForgeEssentials.permission.GroupManager; import com.ForgeEssentials.permission.PermissionsAPI; import com.ForgeEssentials.permission.PlayerManager; import com.ForgeEssentials.permission.PlayerPermData; import com.ForgeEssentials.permission.Zone; import com.ForgeEssentials.permission.ZoneManager; import com.ForgeEssentials.permission.query.PermQueryPlayer; import com.ForgeEssentials.util.FEChatFormatCodes; import com.ForgeEssentials.util.OutputHandler; import com.ForgeEssentials.util.AreaSelector.Point; import cpw.mods.fml.common.network.IChatListener; public class Chat implements IChatListener { public static List<String> bannedWords = new ArrayList<String>(); public static boolean censor; @ForgeSubscribe public void chatEvent(ServerChatEvent event) { /* * Mute? */ if(event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).getBoolean("mute")) { event.setCanceled(true); event.player.sendChatToPlayer("You are muted."); return; } String message = event.message; String nickname = event.username; if(censor) { for(String word : bannedWords) message = replaceAllIgnoreCase(message, word, "###"); } /* * Nickname */ if(event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).hasKey("nickname")) { nickname = event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).getString("nickname"); } /* * Colorize! */ if(event.message.contains("&")) { if(PermissionsAPI.checkPermAllowed(new PermQueryPlayer(event.player, "ForgeEssentials.chat.usecolor"))) { message = event.message.replaceAll("&", FEChatFormatCodes.CODE.toString()); } } String prefix = ""; String suffix = ""; String rank = ""; String zoneID = ""; try { Zone zone = ZoneManager.getWhichZoneIn(new Point(event.player), event.player.worldObj); PlayerPermData playerData = PlayerManager.getPlayerData(zone.getZoneID(), event.username); prefix = playerData.prefix; suffix = playerData.suffix; ArrayList<Group> groups = GroupManager.getApplicableGroups(event.player, false); if (groups.isEmpty()) { rank = GroupManager.DEFAULT.name; prefix = GroupManager.DEFAULT.prefix + prefix; suffix = suffix + GroupManager.DEFAULT.suffix; } else { rank = groups.get(groups.size() - 1).name; for (Group group : groups) { prefix = group.prefix + prefix; suffix = suffix + group.suffix; } } } catch (Exception e) { e.printStackTrace(); } OutputHandler.debug("TESTING!!!!! prefix: " + prefix + " suffix: " + suffix); String format = ModuleChat.conf.chatFormat; format = ModuleChat.conf.chatFormat == null || ModuleChat.conf.chatFormat == "" ? "<%username>%message" : ModuleChat.conf.chatFormat; - event.line = format.replaceAll("%health", "" + event.player.getHealth()).replaceAll("%reset", FEChatFormatCodes.RESET + "").replaceAll("%red", FEChatFormatCodes.RED + "").replaceAll("%yellow", FEChatFormatCodes.YELLOW + "").replaceAll("%black", FEChatFormatCodes.BLACK + "").replaceAll("%darkblue", FEChatFormatCodes.DARKBLUE + "").replaceAll("%darkgreen", FEChatFormatCodes.DARKGREEN + "").replaceAll("%darkaqua", FEChatFormatCodes.DARKAQUA + "").replaceAll("%darkred", FEChatFormatCodes.DARKRED + "").replaceAll("%purple", FEChatFormatCodes.PURPLE + "").replaceAll("%gold", FEChatFormatCodes.GOLD + "").replaceAll("%grey", FEChatFormatCodes.GREY + "").replaceAll("%darkgrey", FEChatFormatCodes.DARKGREY + "").replaceAll("%indigo", FEChatFormatCodes.INDIGO + "").replaceAll("%green", FEChatFormatCodes.GREEN + "").replaceAll("%aqua", FEChatFormatCodes.AQUA + "").replaceAll("%pink", FEChatFormatCodes.PINK + "").replaceAll("%white", FEChatFormatCodes.WHITE + "").replaceAll("%random", FEChatFormatCodes.RANDOM + "").replaceAll("%bold", FEChatFormatCodes.BOLD + "").replaceAll("%strike", FEChatFormatCodes.STRIKE + "").replaceAll("%underline", FEChatFormatCodes.UNDERLINE + "").replaceAll("%italics", FEChatFormatCodes.ITALICS + "").replaceAll("%message", message).replaceAll("%username", nickname).replaceAll("%rank", rank).replaceAll("%zone", zoneID).replace("%prefix", prefix).replaceAll("%suffix", suffix); + event.line = format.replaceAll("%health", "" + event.player.getHealth()) + .replaceAll("%reset", FEChatFormatCodes.RESET + "").replaceAll("%red", FEChatFormatCodes.RED + "") + .replaceAll("%yellow", FEChatFormatCodes.YELLOW + "").replaceAll("%black", FEChatFormatCodes.BLACK + "") + .replaceAll("%darkblue", FEChatFormatCodes.DARKBLUE + "").replaceAll("%darkgreen", FEChatFormatCodes.DARKGREEN + "") + .replaceAll("%darkaqua", FEChatFormatCodes.DARKAQUA + "").replaceAll("%darkred", FEChatFormatCodes.DARKRED + "") + .replaceAll("%purple", FEChatFormatCodes.PURPLE + "").replaceAll("%gold", FEChatFormatCodes.GOLD + "") + .replaceAll("%grey", FEChatFormatCodes.GREY + "").replaceAll("%darkgrey", FEChatFormatCodes.DARKGREY + "") + .replaceAll("%indigo", FEChatFormatCodes.INDIGO + "").replaceAll("%green", FEChatFormatCodes.GREEN + "") + .replaceAll("%aqua", FEChatFormatCodes.AQUA + "").replaceAll("%pink", FEChatFormatCodes.PINK + "") + .replaceAll("%white", FEChatFormatCodes.WHITE + "").replaceAll("%random", FEChatFormatCodes.RANDOM + "") + .replaceAll("%bold", FEChatFormatCodes.BOLD + "").replaceAll("%strike", FEChatFormatCodes.STRIKE + "") + .replaceAll("%underline", FEChatFormatCodes.UNDERLINE + "").replaceAll("%italics", FEChatFormatCodes.ITALICS + "") + .replace("%message", message).replaceAll("%username", nickname) + .replaceAll("%rank", rank).replaceAll("%zone", zoneID) + .replace("%prefix", prefix).replaceAll("%suffix", suffix); } @Override public Packet3Chat serverChat(NetHandler handler, Packet3Chat message) { return message; } @Override public Packet3Chat clientChat(NetHandler handler, Packet3Chat message) { return message; } private String replaceAllIgnoreCase(String text, String search, String replacement) { if(search.equals(replacement)) return text; StringBuffer buffer = new StringBuffer(text); String lowerSearch = search.toLowerCase(); int i = 0; int prev = 0; while((i = buffer.toString().toLowerCase().indexOf(lowerSearch, prev)) > -1) { buffer.replace(i, i+search.length(), replacement); prev = i+replacement.length(); } return buffer.toString(); } }
true
true
public void chatEvent(ServerChatEvent event) { /* * Mute? */ if(event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).getBoolean("mute")) { event.setCanceled(true); event.player.sendChatToPlayer("You are muted."); return; } String message = event.message; String nickname = event.username; if(censor) { for(String word : bannedWords) message = replaceAllIgnoreCase(message, word, "###"); } /* * Nickname */ if(event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).hasKey("nickname")) { nickname = event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).getString("nickname"); } /* * Colorize! */ if(event.message.contains("&")) { if(PermissionsAPI.checkPermAllowed(new PermQueryPlayer(event.player, "ForgeEssentials.chat.usecolor"))) { message = event.message.replaceAll("&", FEChatFormatCodes.CODE.toString()); } } String prefix = ""; String suffix = ""; String rank = ""; String zoneID = ""; try { Zone zone = ZoneManager.getWhichZoneIn(new Point(event.player), event.player.worldObj); PlayerPermData playerData = PlayerManager.getPlayerData(zone.getZoneID(), event.username); prefix = playerData.prefix; suffix = playerData.suffix; ArrayList<Group> groups = GroupManager.getApplicableGroups(event.player, false); if (groups.isEmpty()) { rank = GroupManager.DEFAULT.name; prefix = GroupManager.DEFAULT.prefix + prefix; suffix = suffix + GroupManager.DEFAULT.suffix; } else { rank = groups.get(groups.size() - 1).name; for (Group group : groups) { prefix = group.prefix + prefix; suffix = suffix + group.suffix; } } } catch (Exception e) { e.printStackTrace(); } OutputHandler.debug("TESTING!!!!! prefix: " + prefix + " suffix: " + suffix); String format = ModuleChat.conf.chatFormat; format = ModuleChat.conf.chatFormat == null || ModuleChat.conf.chatFormat == "" ? "<%username>%message" : ModuleChat.conf.chatFormat; event.line = format.replaceAll("%health", "" + event.player.getHealth()).replaceAll("%reset", FEChatFormatCodes.RESET + "").replaceAll("%red", FEChatFormatCodes.RED + "").replaceAll("%yellow", FEChatFormatCodes.YELLOW + "").replaceAll("%black", FEChatFormatCodes.BLACK + "").replaceAll("%darkblue", FEChatFormatCodes.DARKBLUE + "").replaceAll("%darkgreen", FEChatFormatCodes.DARKGREEN + "").replaceAll("%darkaqua", FEChatFormatCodes.DARKAQUA + "").replaceAll("%darkred", FEChatFormatCodes.DARKRED + "").replaceAll("%purple", FEChatFormatCodes.PURPLE + "").replaceAll("%gold", FEChatFormatCodes.GOLD + "").replaceAll("%grey", FEChatFormatCodes.GREY + "").replaceAll("%darkgrey", FEChatFormatCodes.DARKGREY + "").replaceAll("%indigo", FEChatFormatCodes.INDIGO + "").replaceAll("%green", FEChatFormatCodes.GREEN + "").replaceAll("%aqua", FEChatFormatCodes.AQUA + "").replaceAll("%pink", FEChatFormatCodes.PINK + "").replaceAll("%white", FEChatFormatCodes.WHITE + "").replaceAll("%random", FEChatFormatCodes.RANDOM + "").replaceAll("%bold", FEChatFormatCodes.BOLD + "").replaceAll("%strike", FEChatFormatCodes.STRIKE + "").replaceAll("%underline", FEChatFormatCodes.UNDERLINE + "").replaceAll("%italics", FEChatFormatCodes.ITALICS + "").replaceAll("%message", message).replaceAll("%username", nickname).replaceAll("%rank", rank).replaceAll("%zone", zoneID).replace("%prefix", prefix).replaceAll("%suffix", suffix); }
public void chatEvent(ServerChatEvent event) { /* * Mute? */ if(event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).getBoolean("mute")) { event.setCanceled(true); event.player.sendChatToPlayer("You are muted."); return; } String message = event.message; String nickname = event.username; if(censor) { for(String word : bannedWords) message = replaceAllIgnoreCase(message, word, "###"); } /* * Nickname */ if(event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).hasKey("nickname")) { nickname = event.player.getEntityData().getCompoundTag(event.player.PERSISTED_NBT_TAG).getString("nickname"); } /* * Colorize! */ if(event.message.contains("&")) { if(PermissionsAPI.checkPermAllowed(new PermQueryPlayer(event.player, "ForgeEssentials.chat.usecolor"))) { message = event.message.replaceAll("&", FEChatFormatCodes.CODE.toString()); } } String prefix = ""; String suffix = ""; String rank = ""; String zoneID = ""; try { Zone zone = ZoneManager.getWhichZoneIn(new Point(event.player), event.player.worldObj); PlayerPermData playerData = PlayerManager.getPlayerData(zone.getZoneID(), event.username); prefix = playerData.prefix; suffix = playerData.suffix; ArrayList<Group> groups = GroupManager.getApplicableGroups(event.player, false); if (groups.isEmpty()) { rank = GroupManager.DEFAULT.name; prefix = GroupManager.DEFAULT.prefix + prefix; suffix = suffix + GroupManager.DEFAULT.suffix; } else { rank = groups.get(groups.size() - 1).name; for (Group group : groups) { prefix = group.prefix + prefix; suffix = suffix + group.suffix; } } } catch (Exception e) { e.printStackTrace(); } OutputHandler.debug("TESTING!!!!! prefix: " + prefix + " suffix: " + suffix); String format = ModuleChat.conf.chatFormat; format = ModuleChat.conf.chatFormat == null || ModuleChat.conf.chatFormat == "" ? "<%username>%message" : ModuleChat.conf.chatFormat; event.line = format.replaceAll("%health", "" + event.player.getHealth()) .replaceAll("%reset", FEChatFormatCodes.RESET + "").replaceAll("%red", FEChatFormatCodes.RED + "") .replaceAll("%yellow", FEChatFormatCodes.YELLOW + "").replaceAll("%black", FEChatFormatCodes.BLACK + "") .replaceAll("%darkblue", FEChatFormatCodes.DARKBLUE + "").replaceAll("%darkgreen", FEChatFormatCodes.DARKGREEN + "") .replaceAll("%darkaqua", FEChatFormatCodes.DARKAQUA + "").replaceAll("%darkred", FEChatFormatCodes.DARKRED + "") .replaceAll("%purple", FEChatFormatCodes.PURPLE + "").replaceAll("%gold", FEChatFormatCodes.GOLD + "") .replaceAll("%grey", FEChatFormatCodes.GREY + "").replaceAll("%darkgrey", FEChatFormatCodes.DARKGREY + "") .replaceAll("%indigo", FEChatFormatCodes.INDIGO + "").replaceAll("%green", FEChatFormatCodes.GREEN + "") .replaceAll("%aqua", FEChatFormatCodes.AQUA + "").replaceAll("%pink", FEChatFormatCodes.PINK + "") .replaceAll("%white", FEChatFormatCodes.WHITE + "").replaceAll("%random", FEChatFormatCodes.RANDOM + "") .replaceAll("%bold", FEChatFormatCodes.BOLD + "").replaceAll("%strike", FEChatFormatCodes.STRIKE + "") .replaceAll("%underline", FEChatFormatCodes.UNDERLINE + "").replaceAll("%italics", FEChatFormatCodes.ITALICS + "") .replace("%message", message).replaceAll("%username", nickname) .replaceAll("%rank", rank).replaceAll("%zone", zoneID) .replace("%prefix", prefix).replaceAll("%suffix", suffix); }
diff --git a/plugin/src/main/java/hudson/plugins/swarm/PluginImpl.java b/plugin/src/main/java/hudson/plugins/swarm/PluginImpl.java index 416ad42..a68abbf 100644 --- a/plugin/src/main/java/hudson/plugins/swarm/PluginImpl.java +++ b/plugin/src/main/java/hudson/plugins/swarm/PluginImpl.java @@ -1,60 +1,60 @@ package hudson.plugins.swarm; import hudson.Plugin; import hudson.Util; import hudson.model.Descriptor.FormException; import hudson.model.Hudson; import hudson.model.Node; import hudson.security.ACL; import org.acegisecurity.context.SecurityContextHolder; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import org.kohsuke.stapler.StaplerResponse; import static javax.servlet.http.HttpServletResponse.SC_FORBIDDEN; import java.io.IOException; /** * Exposes an entry point to add a new swarm slave. * * @author Kohsuke Kawaguchi */ public class PluginImpl extends Plugin { /** * Adds a new swarm slave. */ public void doCreateSlave(StaplerRequest req, StaplerResponse rsp, @QueryParameter String name, @QueryParameter String description, @QueryParameter int executors, @QueryParameter String remoteFsRoot, @QueryParameter String labels, @QueryParameter String secret) throws IOException, FormException { // only allow nearby nodes to connect if(!UDPFragmentImpl.all().get(UDPFragmentImpl.class).secret.toString().equals(secret)) { rsp.setStatus(SC_FORBIDDEN); return; } // this is used by swarm clients that otherwise have no access to the system, // so bypass the regular security check, and only rely on secret. SecurityContextHolder.getContext().setAuthentication(ACL.SYSTEM); try { final Hudson hudson = Hudson.getInstance(); // try to make the name unique. Swarm clients are often repliated VMs, and they may have the same name. if(hudson.getNode(name)!=null) name = name+'-'+req.getRemoteAddr(); - SwarmSlave slave = new SwarmSlave(name, "Swam slave from "+req.getRemoteHost()+" : "+description, + SwarmSlave slave = new SwarmSlave(name, "Swarm slave from "+req.getRemoteHost()+" : "+description, remoteFsRoot, String.valueOf(executors), "swarm "+Util.fixNull(labels)); // if this still results in a dupliate, so be it synchronized (hudson) { Node n = hudson.getNode(name); if(n!=null) hudson.removeNode(n); hudson.addNode(slave); } } catch (FormException e) { e.printStackTrace(); } finally { SecurityContextHolder.clearContext(); } } }
true
true
public void doCreateSlave(StaplerRequest req, StaplerResponse rsp, @QueryParameter String name, @QueryParameter String description, @QueryParameter int executors, @QueryParameter String remoteFsRoot, @QueryParameter String labels, @QueryParameter String secret) throws IOException, FormException { // only allow nearby nodes to connect if(!UDPFragmentImpl.all().get(UDPFragmentImpl.class).secret.toString().equals(secret)) { rsp.setStatus(SC_FORBIDDEN); return; } // this is used by swarm clients that otherwise have no access to the system, // so bypass the regular security check, and only rely on secret. SecurityContextHolder.getContext().setAuthentication(ACL.SYSTEM); try { final Hudson hudson = Hudson.getInstance(); // try to make the name unique. Swarm clients are often repliated VMs, and they may have the same name. if(hudson.getNode(name)!=null) name = name+'-'+req.getRemoteAddr(); SwarmSlave slave = new SwarmSlave(name, "Swam slave from "+req.getRemoteHost()+" : "+description, remoteFsRoot, String.valueOf(executors), "swarm "+Util.fixNull(labels)); // if this still results in a dupliate, so be it synchronized (hudson) { Node n = hudson.getNode(name); if(n!=null) hudson.removeNode(n); hudson.addNode(slave); } } catch (FormException e) { e.printStackTrace(); } finally { SecurityContextHolder.clearContext(); } }
public void doCreateSlave(StaplerRequest req, StaplerResponse rsp, @QueryParameter String name, @QueryParameter String description, @QueryParameter int executors, @QueryParameter String remoteFsRoot, @QueryParameter String labels, @QueryParameter String secret) throws IOException, FormException { // only allow nearby nodes to connect if(!UDPFragmentImpl.all().get(UDPFragmentImpl.class).secret.toString().equals(secret)) { rsp.setStatus(SC_FORBIDDEN); return; } // this is used by swarm clients that otherwise have no access to the system, // so bypass the regular security check, and only rely on secret. SecurityContextHolder.getContext().setAuthentication(ACL.SYSTEM); try { final Hudson hudson = Hudson.getInstance(); // try to make the name unique. Swarm clients are often repliated VMs, and they may have the same name. if(hudson.getNode(name)!=null) name = name+'-'+req.getRemoteAddr(); SwarmSlave slave = new SwarmSlave(name, "Swarm slave from "+req.getRemoteHost()+" : "+description, remoteFsRoot, String.valueOf(executors), "swarm "+Util.fixNull(labels)); // if this still results in a dupliate, so be it synchronized (hudson) { Node n = hudson.getNode(name); if(n!=null) hudson.removeNode(n); hudson.addNode(slave); } } catch (FormException e) { e.printStackTrace(); } finally { SecurityContextHolder.clearContext(); } }
diff --git a/neddybenchmark/src/main/java/com/fbudassi/neddy/benchmark/benchmarks/WebsocketBenchmark.java b/neddybenchmark/src/main/java/com/fbudassi/neddy/benchmark/benchmarks/WebsocketBenchmark.java index b5ce57f..c86521c 100644 --- a/neddybenchmark/src/main/java/com/fbudassi/neddy/benchmark/benchmarks/WebsocketBenchmark.java +++ b/neddybenchmark/src/main/java/com/fbudassi/neddy/benchmark/benchmarks/WebsocketBenchmark.java @@ -1,219 +1,219 @@ package com.fbudassi.neddy.benchmark.benchmarks; import com.fbudassi.neddy.benchmark.NeddyBenchmark; import com.fbudassi.neddy.benchmark.bean.ListenerActionBean; import com.fbudassi.neddy.benchmark.bean.ListenerActionBean.ListenerActionEnum; import com.fbudassi.neddy.benchmark.bean.NeddyBean; import com.fbudassi.neddy.benchmark.bean.NeddyBean.ReasonEnum; import com.fbudassi.neddy.benchmark.config.Config; import com.fbudassi.neddy.benchmark.pipeline.WebsocketPipelineFactory; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.util.List; import java.util.Random; import org.apache.commons.lang3.StringUtils; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.handler.codec.http.websocketx.TextWebSocketFrame; import org.jboss.netty.handler.codec.http.websocketx.WebSocketClientHandshaker; import org.jboss.netty.handler.codec.http.websocketx.WebSocketClientHandshakerFactory; import org.jboss.netty.handler.codec.http.websocketx.WebSocketVersion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Websocket benchmark. Please go to the properties file for configurable * parameters. * * @author fbudassi */ public class WebsocketBenchmark implements Benchmark { private static final Logger logger = LoggerFactory.getLogger(WebsocketBenchmark.class); private static Gson gson = new Gson(); private static Random random = new Random(); // Configuration constants. private static final int SERVER_PORT = Config.getIntValue(Config.KEY_SERVER_PORT); private static final String SERVER_ADDRESS = Config.getValue(Config.KEY_SERVER_ADDRESS); private static final int NUMCATEGORIES = Config.getIntValue(Config.KEY_LISTENER_NUMCATEGORIES); private static final String RESOURCE_LISTENER = Config.getValue(Config.KEY_RESOURCE_LISTENER); // Client configuration variables. private static final int NUMADDRESSES = Config.getIntValue(Config.KEY_NUMADDRESSES); private static final int NUMPORTS = Config.getIntValue(Config.KEY_NUMPORTS); private static final int CLIENT_PORTSTART = Config.getIntValue(Config.KEY_CLIENT_PORTSTART); private static final String CLIENT_BASEADDRESS = Config.getValue(Config.KEY_CLIENT_BASEADDRESS); // URI where to connect the websocket. private static URI uri; // Statistic variables. private static int totalConnections = NUMADDRESSES * NUMPORTS; private static int openConnections = 0; /** * Websocket Benchmark constructor. */ public WebsocketBenchmark() throws URISyntaxException { // URL of the server, with the resource path uri = new URI("ws://" + SERVER_ADDRESS + ":" + SERVER_PORT + "/" + RESOURCE_LISTENER); } /** * Executes the benchmark. */ @Override public void execute() throws Exception { logger.info("Trying to generate {} listeners to the server", totalConnections); // Get the first three octets by one side and the last one by the other side. String clientIpBase = CLIENT_BASEADDRESS.substring(0, CLIENT_BASEADDRESS.lastIndexOf(".") + 1); byte clientIpLastOctet = Byte.parseByte(CLIENT_BASEADDRESS.substring( CLIENT_BASEADDRESS.lastIndexOf(".") + 1, CLIENT_BASEADDRESS.length())); //IP addresses loop int lastPort = CLIENT_PORTSTART + NUMPORTS; for (int i = 0; i < NUMADDRESSES; i++) { // Build client ip. String clientIp = clientIpBase + clientIpLastOctet; //Ports loop for (int port = CLIENT_PORTSTART; port <= lastPort; port++) { // Open a Websocket channel to the server. ChannelFuture future = NeddyBenchmark.getBootstrap().connect( new InetSocketAddress(uri.getHost(), uri.getPort()), new InetSocketAddress(clientIp, port)); future.syncUninterruptibly(); Channel ch = future.getChannel(); NeddyBenchmark.getAllChannels().add(ch); // Start with the handshake step. Connect with V13 (RFC 6455 aka HyBi-17). WebSocketClientHandshaker handshaker = new WebSocketClientHandshakerFactory().newHandshaker( getUri(), WebSocketVersion.V13, null, false, null); ch.setAttachment(handshaker); handshaker.handshake(ch).syncUninterruptibly(); // Request the list of categories. - boolean handshakeNotReady = true; - while (handshakeNotReady) { + boolean handshakeReady = false; + while (!handshakeReady) { if (handshaker.isHandshakeComplete()) { - handshakeNotReady = false; + handshakeReady = true; WebsocketBenchmark.getCategories(ch); } } // Increment open connections variable and print the number of listeners once in a while. openConnections++; if ((((double) openConnections * 100 / totalConnections) % 1) == 0) { logger.info("There are {} listeners so far.", openConnections); } } // Increment last octet. clientIpLastOctet++; } } /** * Returns the pipeline for Websocket benchmark. * * @return */ @Override public ChannelPipelineFactory getPipeline() { return new WebsocketPipelineFactory(); } /** * Configure the Netty bootstrap for the best behavior in this benchmark. * * @param bootstrap */ @Override public void configureBootstrap(ClientBootstrap bootstrap) { // Nothing is necessary to be done for the Websocket benchmark. } /** * @return the uri */ public static URI getUri() { return uri; } /** * Request the categories to the server. * * @param ch */ public static void getCategories(Channel ch) { ListenerActionBean listenerActionBean = new ListenerActionBean(); listenerActionBean.setAction(ListenerActionEnum.GET_CATEGORIES.toString()); ch.write(new TextWebSocketFrame(gson.toJson(listenerActionBean))); } /** * Subscribes the channel to a number of categories in the list, randomly * choosing among them. * * @param ch * @param categories */ private static void subscribeToCategories(Channel ch, List<String> categories) { for (int n = 0; n < NUMCATEGORIES; n++) { ListenerActionBean listenerActionBean = new ListenerActionBean(); listenerActionBean.setAction(ListenerActionEnum.SUBSCRIBE.toString()); listenerActionBean.setCategory(categories.get(random.nextInt(categories.size()))); ch.write(new TextWebSocketFrame(gson.toJson(listenerActionBean))); } } /** * It processes the frame content of a Websocket. * * @param frameContent */ public static void processFrame(Channel ch, String frameContent) { try { // Check if frame payload is empty. if (StringUtils.isBlank(frameContent)) { logger.error("Response payload is not valid: {}", frameContent); return; } // Deserialize payload NeddyBean neddyBean = gson.fromJson(frameContent, NeddyBean.class); // Get valid message reason. ReasonEnum reason; try { if (StringUtils.isBlank(neddyBean.getReason())) { logger.error("Request action is blank."); return; } reason = ReasonEnum.valueOf(neddyBean.getReason()); } catch (IllegalArgumentException iaex) { // Invalid action. logger.error("Invalid reason received."); return; } // Process the different reason messages from Neddy. switch (reason) { case MESSAGE_NEW: logger.debug("Message received from {}: {}", neddyBean.getCategory(), neddyBean.getMessage()); break; case MESSAGE_CATEGORY_LIST: List<String> categories = gson.fromJson(neddyBean.getMessage(), List.class); subscribeToCategories(ch, categories); break; default: logger.debug("Reason not recognized: {}", reason); } } catch (JsonSyntaxException jse) { logger.error("Neddy payload can't be deserialized properly.", jse); } } }
false
true
public void execute() throws Exception { logger.info("Trying to generate {} listeners to the server", totalConnections); // Get the first three octets by one side and the last one by the other side. String clientIpBase = CLIENT_BASEADDRESS.substring(0, CLIENT_BASEADDRESS.lastIndexOf(".") + 1); byte clientIpLastOctet = Byte.parseByte(CLIENT_BASEADDRESS.substring( CLIENT_BASEADDRESS.lastIndexOf(".") + 1, CLIENT_BASEADDRESS.length())); //IP addresses loop int lastPort = CLIENT_PORTSTART + NUMPORTS; for (int i = 0; i < NUMADDRESSES; i++) { // Build client ip. String clientIp = clientIpBase + clientIpLastOctet; //Ports loop for (int port = CLIENT_PORTSTART; port <= lastPort; port++) { // Open a Websocket channel to the server. ChannelFuture future = NeddyBenchmark.getBootstrap().connect( new InetSocketAddress(uri.getHost(), uri.getPort()), new InetSocketAddress(clientIp, port)); future.syncUninterruptibly(); Channel ch = future.getChannel(); NeddyBenchmark.getAllChannels().add(ch); // Start with the handshake step. Connect with V13 (RFC 6455 aka HyBi-17). WebSocketClientHandshaker handshaker = new WebSocketClientHandshakerFactory().newHandshaker( getUri(), WebSocketVersion.V13, null, false, null); ch.setAttachment(handshaker); handshaker.handshake(ch).syncUninterruptibly(); // Request the list of categories. boolean handshakeNotReady = true; while (handshakeNotReady) { if (handshaker.isHandshakeComplete()) { handshakeNotReady = false; WebsocketBenchmark.getCategories(ch); } } // Increment open connections variable and print the number of listeners once in a while. openConnections++; if ((((double) openConnections * 100 / totalConnections) % 1) == 0) { logger.info("There are {} listeners so far.", openConnections); } } // Increment last octet. clientIpLastOctet++; } }
public void execute() throws Exception { logger.info("Trying to generate {} listeners to the server", totalConnections); // Get the first three octets by one side and the last one by the other side. String clientIpBase = CLIENT_BASEADDRESS.substring(0, CLIENT_BASEADDRESS.lastIndexOf(".") + 1); byte clientIpLastOctet = Byte.parseByte(CLIENT_BASEADDRESS.substring( CLIENT_BASEADDRESS.lastIndexOf(".") + 1, CLIENT_BASEADDRESS.length())); //IP addresses loop int lastPort = CLIENT_PORTSTART + NUMPORTS; for (int i = 0; i < NUMADDRESSES; i++) { // Build client ip. String clientIp = clientIpBase + clientIpLastOctet; //Ports loop for (int port = CLIENT_PORTSTART; port <= lastPort; port++) { // Open a Websocket channel to the server. ChannelFuture future = NeddyBenchmark.getBootstrap().connect( new InetSocketAddress(uri.getHost(), uri.getPort()), new InetSocketAddress(clientIp, port)); future.syncUninterruptibly(); Channel ch = future.getChannel(); NeddyBenchmark.getAllChannels().add(ch); // Start with the handshake step. Connect with V13 (RFC 6455 aka HyBi-17). WebSocketClientHandshaker handshaker = new WebSocketClientHandshakerFactory().newHandshaker( getUri(), WebSocketVersion.V13, null, false, null); ch.setAttachment(handshaker); handshaker.handshake(ch).syncUninterruptibly(); // Request the list of categories. boolean handshakeReady = false; while (!handshakeReady) { if (handshaker.isHandshakeComplete()) { handshakeReady = true; WebsocketBenchmark.getCategories(ch); } } // Increment open connections variable and print the number of listeners once in a while. openConnections++; if ((((double) openConnections * 100 / totalConnections) % 1) == 0) { logger.info("There are {} listeners so far.", openConnections); } } // Increment last octet. clientIpLastOctet++; } }
diff --git a/com.worldline.clic/src/main/java/com/worldline/clic/internal/commands/impl/HelpCommand.java b/com.worldline.clic/src/main/java/com/worldline/clic/internal/commands/impl/HelpCommand.java index 6f69f9d..7379d8a 100644 --- a/com.worldline.clic/src/main/java/com/worldline/clic/internal/commands/impl/HelpCommand.java +++ b/com.worldline.clic/src/main/java/com/worldline/clic/internal/commands/impl/HelpCommand.java @@ -1,98 +1,102 @@ /* * CLiC, Framework for Command Line Interpretation in Eclipse * * Copyright (C) 2013 Worldline or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package com.worldline.clic.internal.commands.impl; import java.io.IOException; import joptsimple.OptionParser; import joptsimple.OptionSpec; import com.worldline.clic.commands.AbstractCommand; import com.worldline.clic.commands.CommandContext; import com.worldline.clic.internal.Activator; import com.worldline.clic.internal.ClicMessages; import com.worldline.clic.internal.commands.CommandRegistry; /** * The {@link HelpCommand} is an internal implementation of an * {@link AbstractCommand} to be used by the user in order to get help about any * available command from the framework. It can be seen as a <i>man</i> command * in Unix. * * This command will internally rely on * {@link OptionParser#printHelpOn(java.io.Writer)} in order to build the help * message to be displayed. * * @author aneveux / mvanbesien * @version 1.0 * @since 1.0 * * @see AbstractCommand */ public class HelpCommand extends AbstractCommand { /** * An {@link OptionSpec} linked to the parameters we define allowing to * retrieve results in a type-safe way, avoiding unwanted casts... */ OptionSpec<String> command; /** * In this methid, we configure the parser to accept one parameter named * <i>command</i> allowing to retrieve the command on which we need to bring * some help * * @see super{@link #configureParser()} */ @Override public void configureParser() { command = parser.accepts("command").withRequiredArg() .ofType(String.class).describedAs("command"); } /** * For this command, we'll simply display some help about a specified * command, using the {@link OptionParser#printHelpOn(java.io.Writer)} * helper from JOpt-Simple * * @see super{@link #execute(CommandContext)} */ @Override public void execute(final CommandContext context) { final AbstractCommand createdCommand = options.has(command) ? CommandRegistry .getInstance().createCommand(options.valueOf(command)) : CommandRegistry.getInstance().createCommand("help"); try { - context.write(ClicMessages.COMMAND_HELP.value(options - .valueOf(command), CommandRegistry.getInstance() - .getCommandDescription(options.valueOf(command)))); + context.write(options.has(command) ? ClicMessages.COMMAND_HELP + .value(options.valueOf(command), + CommandRegistry.getInstance() + .getCommandDescription( + options.valueOf(command))) + : ClicMessages.COMMAND_HELP.value("help", CommandRegistry + .getInstance().getCommandDescription("help"))); createdCommand.getParser().printHelpOn(context.getWriter()); } catch (final IOException e) { context.write(ClicMessages.COMMAND_EXECUTION_ERROR.value(e .getMessage())); Activator.sendErrorToErrorLog( ClicMessages.COMMAND_EXECUTION_ERROR.value(e.getMessage()), e); } } }
true
true
public void execute(final CommandContext context) { final AbstractCommand createdCommand = options.has(command) ? CommandRegistry .getInstance().createCommand(options.valueOf(command)) : CommandRegistry.getInstance().createCommand("help"); try { context.write(ClicMessages.COMMAND_HELP.value(options .valueOf(command), CommandRegistry.getInstance() .getCommandDescription(options.valueOf(command)))); createdCommand.getParser().printHelpOn(context.getWriter()); } catch (final IOException e) { context.write(ClicMessages.COMMAND_EXECUTION_ERROR.value(e .getMessage())); Activator.sendErrorToErrorLog( ClicMessages.COMMAND_EXECUTION_ERROR.value(e.getMessage()), e); } }
public void execute(final CommandContext context) { final AbstractCommand createdCommand = options.has(command) ? CommandRegistry .getInstance().createCommand(options.valueOf(command)) : CommandRegistry.getInstance().createCommand("help"); try { context.write(options.has(command) ? ClicMessages.COMMAND_HELP .value(options.valueOf(command), CommandRegistry.getInstance() .getCommandDescription( options.valueOf(command))) : ClicMessages.COMMAND_HELP.value("help", CommandRegistry .getInstance().getCommandDescription("help"))); createdCommand.getParser().printHelpOn(context.getWriter()); } catch (final IOException e) { context.write(ClicMessages.COMMAND_EXECUTION_ERROR.value(e .getMessage())); Activator.sendErrorToErrorLog( ClicMessages.COMMAND_EXECUTION_ERROR.value(e.getMessage()), e); } }
diff --git a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/edit/Authenticate.java b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/edit/Authenticate.java index 5cc13b2f2..23fbfaf07 100644 --- a/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/edit/Authenticate.java +++ b/webapp/src/edu/cornell/mannlib/vitro/webapp/controller/edit/Authenticate.java @@ -1,596 +1,596 @@ /* $This file is distributed under the terms of the license in /doc/license.txt$ */ package edu.cornell.mannlib.vitro.webapp.controller.edit; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.commons.codec.binary.Hex; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.hp.hpl.jena.ontology.OntModel; import edu.cornell.mannlib.vedit.beans.LoginFormBean; import edu.cornell.mannlib.vedit.beans.LoginStatusBean; import edu.cornell.mannlib.vitro.webapp.auth.policy.RoleBasedPolicy.AuthRole; import edu.cornell.mannlib.vitro.webapp.beans.User; import edu.cornell.mannlib.vitro.webapp.controller.Controllers; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet; import edu.cornell.mannlib.vitro.webapp.controller.login.LoginProcessBean; import edu.cornell.mannlib.vitro.webapp.controller.login.LoginProcessBean.Message; import edu.cornell.mannlib.vitro.webapp.controller.login.LoginProcessBean.State; import edu.cornell.mannlib.vitro.webapp.dao.UserDao; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory; import edu.cornell.mannlib.vitro.webapp.dao.jena.LoginEvent; import edu.cornell.mannlib.vitro.webapp.dao.jena.LoginLogoutEvent; public class Authenticate extends FreemarkerHttpServlet { /** * Maximum inactive interval for a ordinary logged in user session, in * seconds. */ public static final int LOGGED_IN_TIMEOUT_INTERVAL = 300; /** Maximum inactive interval for a editor (or better) session, in seconds. */ public static final int PRIVILEGED_TIMEOUT_INTERVAL = 32000; private static final Log log = LogFactory.getLog(Authenticate.class .getName()); /** The username field on the login form. */ private static final String PARAMETER_USERNAME = "loginName"; /** The password field on the login form. */ private static final String PARAMETER_PASSWORD = "loginPassword"; /** The new password field on the password change form. */ private static final String PARAMETER_NEW_PASSWORD = "newPassword"; /** The confirm password field on the password change form. */ private static final String PARAMETER_CONFIRM_PASSWORD = "confirmPassword"; /** If this parameter is "true" (ignoring case), cancel the login. */ private static final String PARAMETER_CANCEL = "cancel"; /** If they are logging in, show them this form. */ public static final String TEMPLATE_LOGIN = "login-form.ftl"; /** If they are changing their password on first login, show them this form. */ public static final String TEMPLATE_FORCE_PASSWORD_CHANGE = "login-forcedPasswordChange.ftl"; public static final String BODY_LOGIN_NAME = "loginName"; public static final String BODY_FORM_ACTION = "formAction"; public static final String BODY_ERROR_MESSAGE = "errorMessage"; /** Where do we find the User/Session map in the servlet context? */ public static final String USER_SESSION_MAP_ATTR = "userURISessionMap"; /** * Find out where they are in the login process, and check for progress. If * they succeed in logging in, record the information. Show the next page. */ public void doPost(HttpServletRequest request, HttpServletResponse response) { VitroRequest vreq = new VitroRequest(request); User user = null; try { // Process any input from the login form. State entryState = getCurrentLoginState(vreq); log.debug("State on entry: " + entryState); switch (entryState) { case LOGGING_IN: user = checkLoginProgress(vreq); if (user != null) { whatNextForThisGuy(vreq, user); } break; case FORCED_PASSWORD_CHANGE: if (checkCancel(vreq)) { recordLoginCancelled(vreq); } else { user = checkChangeProgress(vreq); if (user != null) { recordSuccessfulPasswordChange(vreq, user); } } break; default: break; } // Figure out where they should be, and redirect. State exitState = getCurrentLoginState(vreq); log.debug("State on exit: " + exitState); switch (exitState) { case LOGGED_IN: redirectLoggedInUser(vreq, response); break; case CANCELLED: redirectCancellingUser(vreq, response); break; default: showLoginScreen(vreq, response); break; } } catch (Exception e) { showSystemError(e, response); } } /** * They are logging in. Are they successful? */ private User checkLoginProgress(HttpServletRequest request) { String username = request.getParameter(PARAMETER_USERNAME); String password = request.getParameter(PARAMETER_PASSWORD); LoginProcessBean bean = getLoginProcessBean(request); bean.clearMessage(); log.trace("username=" + username + ", password=" + password + ", bean=" + bean); if ((username == null) || username.isEmpty()) { bean.setMessage(Message.NO_USERNAME); return null; } else { bean.setUsername(username); } User user = getUserDao(request).getUserByUsername(username); log.trace("User is " + (user == null ? "null" : user.getURI())); if (user == null) { bean.setMessage(Message.UNKNOWN_USERNAME, username); return null; } if ((password == null) || password.isEmpty()) { bean.setMessage(Message.NO_PASSWORD); return null; } String md5Password = applyMd5Encoding(password); if (!md5Password.equals(user.getMd5password())) { log.trace("Encoded passwords don't match: right=" + user.getMd5password() + ", wrong=" + md5Password); bean.setMessage(Message.INCORRECT_PASSWORD); return null; } return user; } /** * Successfully applied username and password. Are we forcing a password * change, or is this guy logged in? */ private void whatNextForThisGuy(HttpServletRequest request, User user) { if (user.getLoginCount() == 0) { log.debug("Forcing first-time password change"); LoginProcessBean bean = getLoginProcessBean(request); bean.setState(State.FORCED_PASSWORD_CHANGE); } else { recordLoginInfo(request, user.getUsername()); } } /** * Are they cancelling the login (cancelling the first-time password * change)? They are if the cancel parameter is "true" (ignoring case). */ private boolean checkCancel(HttpServletRequest request) { String cancel = request.getParameter(PARAMETER_CANCEL); log.trace("cancel=" + cancel); return Boolean.valueOf(cancel); } /** * If they want to cancel the login, let them. */ private void recordLoginCancelled(HttpServletRequest request) { getLoginProcessBean(request).setState(State.CANCELLED); } /** * They are changing password. Are they successful? */ private User checkChangeProgress(HttpServletRequest request) { String newPassword = request.getParameter(PARAMETER_NEW_PASSWORD); String confirm = request.getParameter(PARAMETER_CONFIRM_PASSWORD); LoginProcessBean bean = getLoginProcessBean(request); bean.clearMessage(); log.trace("newPassword=" + newPassword + ", confirm=" + confirm + ", bean=" + bean); if ((newPassword == null) || newPassword.isEmpty()) { bean.setMessage(Message.NO_NEW_PASSWORD); return null; } if (!newPassword.equals(confirm)) { bean.setMessage(Message.MISMATCH_PASSWORD); return null; } if ((newPassword.length() < User.MIN_PASSWORD_LENGTH) || (newPassword.length() > User.MAX_PASSWORD_LENGTH)) { bean.setMessage(Message.PASSWORD_LENGTH, User.MIN_PASSWORD_LENGTH, User.MAX_PASSWORD_LENGTH); return null; } User user = getUserDao(request).getUserByUsername(bean.getUsername()); log.trace("User is " + (user == null ? "null" : user.getURI())); if (user == null) { throw new IllegalStateException( "Changing password but bean has no user: '" + bean.getUsername() + "'"); } String md5NewPassword = applyMd5Encoding(newPassword); log.trace("Old password: " + user.getMd5password() + ", new password: " + md5NewPassword); if (md5NewPassword.equals(user.getMd5password())) { bean.setMessage(Message.USING_OLD_PASSWORD); return null; } return user; } /** * Store the changed password. They are logged in. */ private void recordSuccessfulPasswordChange(HttpServletRequest request, User user) { String newPassword = request.getParameter(PARAMETER_NEW_PASSWORD); String md5NewPassword = applyMd5Encoding(newPassword); user.setOldPassword(user.getMd5password()); user.setMd5password(md5NewPassword); getUserDao(request).updateUser(user); log.debug("Completed first-time password change."); recordLoginInfo(request, user.getUsername()); } /** * The user provided the correct information, and changed the password if * that was required. Record that they have logged in. */ private void recordLoginInfo(HttpServletRequest request, String username) { log.debug("Completed login."); // Get a fresh user object, so we know it's not stale. User user = getUserDao(request).getUserByUsername(username); HttpSession session = request.getSession(); // Put the login info into the session. // TODO the LoginFormBean is being phased out. LoginFormBean lfb = new LoginFormBean(); lfb.setUserURI(user.getURI()); lfb.setLoginStatus("authenticated"); lfb.setSessionId(session.getId()); lfb.setLoginRole(user.getRoleURI()); lfb.setLoginRemoteAddr(request.getRemoteAddr()); lfb.setLoginName(user.getUsername()); session.setAttribute("loginHandler", lfb); // TODO this should eventually replace the LoginFormBean. LoginStatusBean lsb = new LoginStatusBean(user.getURI(), user.getUsername(), parseUserSecurityLevel(user)); LoginStatusBean.setBean(session, lsb); log.info("Adding status bean: " + lsb); // Remove the login process info from the session. session.removeAttribute(LoginProcessBean.SESSION_ATTRIBUTE); // Record the login on the user. user.setLoginCount(user.getLoginCount() + 1); if (user.getFirstTime() == null) { // first login user.setFirstTime(new Date()); } getUserDao(request).updateUser(user); // Set the timeout limit on the session - editors, etc, get more. if (lsb.isLoggedInAtLeast(LoginStatusBean.EDITOR)) { session.setMaxInactiveInterval(PRIVILEGED_TIMEOUT_INTERVAL); } else { session.setMaxInactiveInterval(LOGGED_IN_TIMEOUT_INTERVAL); } // Record the user in the user/Session map. Map<String, HttpSession> userURISessionMap = getUserURISessionMapFromContext(getServletContext()); userURISessionMap.put(user.getURI(), request.getSession()); // Notify the other users of this model. sendLoginNotifyEvent(new LoginEvent(user.getURI()), getServletContext(), session); } /** * User is in the login process. Show them the login screen. */ private void showLoginScreen(VitroRequest vreq, HttpServletResponse response) throws IOException { response.sendRedirect(getLoginScreenUrl(vreq)); return; } /** * User cancelled the login. Forget that they were logging in, and send them * to the home page. */ private void redirectCancellingUser(HttpServletRequest request, HttpServletResponse response) throws IOException { // Remove the login process info from the session. request.getSession() .removeAttribute(LoginProcessBean.SESSION_ATTRIBUTE); log.debug("User cancelled the login. Redirect to site admin page."); response.sendRedirect(getHomeUrl(request)); } /** * User is logged in. They might go to: * <ul> * <li>A one-time redirect, stored in the session, if they had tried to * bookmark to a page that requires login.</li> * <li>An application-wide redirect, stored in the servlet context.</li> * <li>Their home page, if they are a self-editor.</li> * <li>The site admin page.</li> * </ul> */ private void redirectLoggedInUser(HttpServletRequest request, HttpServletResponse response) throws IOException, UnsupportedEncodingException { // Did they have a one-time redirect stored on the session? String sessionRedirect = (String) request.getSession().getAttribute( "postLoginRequest"); if (sessionRedirect != null) { request.getSession().removeAttribute("postLoginRequest"); log.debug("User is logged in. Redirect by session to " + sessionRedirect); response.sendRedirect(sessionRedirect); return; } // Is there a login-redirect stored in the application as a whole? // It could lead to another page in this app, or to any random URL. String contextRedirect = (String) getServletContext().getAttribute( "postLoginRequest"); if (contextRedirect != null) { log.debug("User is logged in. Redirect by application to " + contextRedirect); if (contextRedirect.indexOf(":") == -1) { response.sendRedirect(request.getContextPath() + contextRedirect); } else { response.sendRedirect(contextRedirect); } return; } // If the user is a self-editor, send them to their home page. User user = getLoggedInUser(request); - if (AuthRole.USER.roleUri().equals(user.getRoleURI())) { + if ( Integer.toString(AuthRole.USER.level()) == user.getRoleURI() ) { UserDao userDao = getUserDao(request); if (userDao != null) { List<String> uris = userDao.getIndividualsUserMayEditAs(user .getURI()); if (uris != null && uris.size() > 0) { log.debug("User is logged in. Redirect as self-editor to " + sessionRedirect); String userHomePage = request.getContextPath() + "/individual?uri=" + URLEncoder.encode(uris.get(0), "UTF-8"); log.debug("User is logged in. Redirect as self-editor to " + sessionRedirect); response.sendRedirect(userHomePage); return; } } } // If nothing else applies, send them to the Site Admin page. log.debug("User is logged in. Redirect to site admin page."); response.sendRedirect(getSiteAdminUrl(request)); } /** * There has been an unexpected exception. Complain mightily. */ private void showSystemError(Exception e, HttpServletResponse response) { log.error("Unexpected error in login process" + e); try { response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } catch (IOException e1) { log.error(e1, e1); } } /** * Where are we in the process? Logged in? Not? Somewhere in between? */ private State getCurrentLoginState(HttpServletRequest request) { HttpSession session = request.getSession(false); if (session == null) { return State.NOWHERE; } if (LoginStatusBean.getBean(request).isLoggedIn()) { return State.LOGGED_IN; } return getLoginProcessBean(request).getState(); } /** * What user are we logged in as? */ private User getLoggedInUser(HttpServletRequest request) { UserDao userDao = getUserDao(request); if (userDao == null) { return null; } LoginStatusBean lsb = LoginStatusBean.getBean(request); if (!lsb.isLoggedIn()) { log.debug("getLoggedInUser: not logged in"); return null; } return userDao.getUserByUsername(lsb.getUsername()); } /** * Get a reference to the {@link UserDao}, or <code>null</code>. */ private UserDao getUserDao(HttpServletRequest request) { HttpSession session = request.getSession(false); if (session == null) { return null; } ServletContext servletContext = session.getServletContext(); WebappDaoFactory wadf = (WebappDaoFactory) servletContext .getAttribute("webappDaoFactory"); if (wadf == null) { log.error("getUserDao: no WebappDaoFactory"); return null; } UserDao userDao = wadf.getUserDao(); if (userDao == null) { log.error("getUserDao: no UserDao"); } return userDao; } /** What's the URL for the login screen? */ private String getLoginScreenUrl(HttpServletRequest request) { String contextPath = request.getContextPath(); String urlParams = "?login=block"; return contextPath + Controllers.LOGIN + urlParams; } /** What's the URL for the site admin screen? */ private String getSiteAdminUrl(HttpServletRequest request) { String contextPath = request.getContextPath(); String urlParams = "?login=block"; return contextPath + Controllers.SITE_ADMIN + urlParams; } /** What's the URL for the home page? */ private String getHomeUrl(HttpServletRequest request) { return request.getContextPath(); } /** Where do we stand in the login process? */ private LoginProcessBean getLoginProcessBean(HttpServletRequest request) { return LoginProcessBean.getBeanFromSession(request); } /** * Parse the role URI from User. Don't crash if it is not valid. */ private int parseUserSecurityLevel(User user) { try { return Integer.parseInt(user.getRoleURI()); } catch (NumberFormatException e) { log.warn("Invalid RoleURI '" + user.getRoleURI() + "' for user '" + user.getURI() + "'"); return 1; } } // ---------------------------------------------------------------------- // Public utility methods. // ---------------------------------------------------------------------- /** * Encode this password for storage in the database. Apply an MD5 encoding, * and store the result as a string of hex digits. */ public static String applyMd5Encoding(String password) { try { MessageDigest md = MessageDigest.getInstance("MD5"); byte[] digest = md.digest(password.getBytes()); char[] hexChars = Hex.encodeHex(digest); return new String(hexChars).toUpperCase(); } catch (NoSuchAlgorithmException e) { // This can't happen with a normal Java runtime. throw new RuntimeException(e); } } /** * The servlet context should contain a map from User URIs to * {@link HttpSession}s. Get a reference to it, creating it if necessary. */ @SuppressWarnings("unchecked") public static Map<String, HttpSession> getUserURISessionMapFromContext( ServletContext ctx) { Map<String, HttpSession> m = (Map<String, HttpSession>) ctx .getAttribute(USER_SESSION_MAP_ATTR); if (m == null) { m = new HashMap<String, HttpSession>(); ctx.setAttribute(USER_SESSION_MAP_ATTR, m); } return m; } /** * Let everyone know that somebody has logged in or logged out. */ public static void sendLoginNotifyEvent(LoginLogoutEvent event, ServletContext context, HttpSession session) { if (event == null) { log.warn("Unable to notify audit model of login " + "because a null event was passed"); return; } OntModel jenaOntModel = (OntModel) session.getAttribute("jenaOntModel"); if (jenaOntModel == null) { jenaOntModel = (OntModel) context.getAttribute("jenaOntModel"); } if (jenaOntModel == null) { log.error("Unable to notify audit model of login event " + "because no model could be found"); return; } jenaOntModel.getBaseModel().notifyEvent(event); } @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { doPost(request, response); } }
true
true
private void redirectLoggedInUser(HttpServletRequest request, HttpServletResponse response) throws IOException, UnsupportedEncodingException { // Did they have a one-time redirect stored on the session? String sessionRedirect = (String) request.getSession().getAttribute( "postLoginRequest"); if (sessionRedirect != null) { request.getSession().removeAttribute("postLoginRequest"); log.debug("User is logged in. Redirect by session to " + sessionRedirect); response.sendRedirect(sessionRedirect); return; } // Is there a login-redirect stored in the application as a whole? // It could lead to another page in this app, or to any random URL. String contextRedirect = (String) getServletContext().getAttribute( "postLoginRequest"); if (contextRedirect != null) { log.debug("User is logged in. Redirect by application to " + contextRedirect); if (contextRedirect.indexOf(":") == -1) { response.sendRedirect(request.getContextPath() + contextRedirect); } else { response.sendRedirect(contextRedirect); } return; } // If the user is a self-editor, send them to their home page. User user = getLoggedInUser(request); if (AuthRole.USER.roleUri().equals(user.getRoleURI())) { UserDao userDao = getUserDao(request); if (userDao != null) { List<String> uris = userDao.getIndividualsUserMayEditAs(user .getURI()); if (uris != null && uris.size() > 0) { log.debug("User is logged in. Redirect as self-editor to " + sessionRedirect); String userHomePage = request.getContextPath() + "/individual?uri=" + URLEncoder.encode(uris.get(0), "UTF-8"); log.debug("User is logged in. Redirect as self-editor to " + sessionRedirect); response.sendRedirect(userHomePage); return; } } } // If nothing else applies, send them to the Site Admin page. log.debug("User is logged in. Redirect to site admin page."); response.sendRedirect(getSiteAdminUrl(request)); }
private void redirectLoggedInUser(HttpServletRequest request, HttpServletResponse response) throws IOException, UnsupportedEncodingException { // Did they have a one-time redirect stored on the session? String sessionRedirect = (String) request.getSession().getAttribute( "postLoginRequest"); if (sessionRedirect != null) { request.getSession().removeAttribute("postLoginRequest"); log.debug("User is logged in. Redirect by session to " + sessionRedirect); response.sendRedirect(sessionRedirect); return; } // Is there a login-redirect stored in the application as a whole? // It could lead to another page in this app, or to any random URL. String contextRedirect = (String) getServletContext().getAttribute( "postLoginRequest"); if (contextRedirect != null) { log.debug("User is logged in. Redirect by application to " + contextRedirect); if (contextRedirect.indexOf(":") == -1) { response.sendRedirect(request.getContextPath() + contextRedirect); } else { response.sendRedirect(contextRedirect); } return; } // If the user is a self-editor, send them to their home page. User user = getLoggedInUser(request); if ( Integer.toString(AuthRole.USER.level()) == user.getRoleURI() ) { UserDao userDao = getUserDao(request); if (userDao != null) { List<String> uris = userDao.getIndividualsUserMayEditAs(user .getURI()); if (uris != null && uris.size() > 0) { log.debug("User is logged in. Redirect as self-editor to " + sessionRedirect); String userHomePage = request.getContextPath() + "/individual?uri=" + URLEncoder.encode(uris.get(0), "UTF-8"); log.debug("User is logged in. Redirect as self-editor to " + sessionRedirect); response.sendRedirect(userHomePage); return; } } } // If nothing else applies, send them to the Site Admin page. log.debug("User is logged in. Redirect to site admin page."); response.sendRedirect(getSiteAdminUrl(request)); }
diff --git a/src/com/android/launcher2/LauncherModel.java b/src/com/android/launcher2/LauncherModel.java index b261a75b..fae1d17f 100644 --- a/src/com/android/launcher2/LauncherModel.java +++ b/src/com/android/launcher2/LauncherModel.java @@ -1,1139 +1,1139 @@ /* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.launcher2; import android.content.ComponentName; import android.content.ContentResolver; import android.content.ContentValues; import android.content.Intent; import android.content.Context; import android.content.pm.ActivityInfo; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.content.res.Resources; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.Drawable; import android.net.Uri; import static android.util.Log.*; import android.util.Log; import android.os.Process; import android.os.SystemClock; import java.lang.ref.WeakReference; import java.net.URISyntaxException; import java.text.Collator; import java.util.ArrayList; import java.util.Comparator; import java.util.Collections; import java.util.HashMap; import java.util.List; /** * Maintains in-memory state of the Launcher. It is expected that there should be only one * LauncherModel object held in a static. Also provide APIs for updating the database state * for the Launcher. */ public class LauncherModel { static final boolean DEBUG_LOADERS = true; static final String TAG = "Launcher.Model"; private final Object mLock = new Object(); private DeferredHandler mHandler = new DeferredHandler(); private Loader mLoader = new Loader(); private WeakReference<Callbacks> mCallbacks; private AllAppsList mAllAppsList = new AllAppsList(); public interface Callbacks { public int getCurrentWorkspaceScreen(); public void startBinding(); public void bindItems(ArrayList<ItemInfo> shortcuts, int start, int end); public void finishBindingItems(); public void bindAppWidget(LauncherAppWidgetInfo info); public void bindAllApplications(ArrayList<ApplicationInfo> apps); public void bindPackageAdded(ArrayList<ApplicationInfo> apps); public void bindPackageUpdated(String packageName, ArrayList<ApplicationInfo> apps); public void bindPackageRemoved(String packageName, ArrayList<ApplicationInfo> apps); } /** * Adds an item to the DB if it was not created previously, or move it to a new * <container, screen, cellX, cellY> */ static void addOrMoveItemInDatabase(Context context, ItemInfo item, long container, int screen, int cellX, int cellY) { if (item.container == ItemInfo.NO_ID) { // From all apps addItemToDatabase(context, item, container, screen, cellX, cellY, false); } else { // From somewhere else moveItemInDatabase(context, item, container, screen, cellX, cellY); } } /** * Move an item in the DB to a new <container, screen, cellX, cellY> */ static void moveItemInDatabase(Context context, ItemInfo item, long container, int screen, int cellX, int cellY) { item.container = container; item.screen = screen; item.cellX = cellX; item.cellY = cellY; final ContentValues values = new ContentValues(); final ContentResolver cr = context.getContentResolver(); values.put(LauncherSettings.Favorites.CONTAINER, item.container); values.put(LauncherSettings.Favorites.CELLX, item.cellX); values.put(LauncherSettings.Favorites.CELLY, item.cellY); values.put(LauncherSettings.Favorites.SCREEN, item.screen); cr.update(LauncherSettings.Favorites.getContentUri(item.id, false), values, null, null); } /** * Returns true if the shortcuts already exists in the database. * we identify a shortcut by its title and intent. */ static boolean shortcutExists(Context context, String title, Intent intent) { final ContentResolver cr = context.getContentResolver(); Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, new String[] { "title", "intent" }, "title=? and intent=?", new String[] { title, intent.toUri(0) }, null); boolean result = false; try { result = c.moveToFirst(); } finally { c.close(); } return result; } /** * Find a folder in the db, creating the FolderInfo if necessary, and adding it to folderList. */ FolderInfo getFolderById(Context context, HashMap<Long,FolderInfo> folderList, long id) { final ContentResolver cr = context.getContentResolver(); Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, null, "_id=? and (itemType=? or itemType=?)", new String[] { String.valueOf(id), String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER), String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER) }, null); try { if (c.moveToFirst()) { final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE); final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE); final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER); final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN); final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX); final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY); FolderInfo folderInfo = null; switch (c.getInt(itemTypeIndex)) { case LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER: folderInfo = findOrMakeUserFolder(folderList, id); break; case LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER: folderInfo = findOrMakeLiveFolder(folderList, id); break; } folderInfo.title = c.getString(titleIndex); folderInfo.id = id; folderInfo.container = c.getInt(containerIndex); folderInfo.screen = c.getInt(screenIndex); folderInfo.cellX = c.getInt(cellXIndex); folderInfo.cellY = c.getInt(cellYIndex); return folderInfo; } } finally { c.close(); } return null; } /** * Add an item to the database in a specified container. Sets the container, screen, cellX and * cellY fields of the item. Also assigns an ID to the item. */ static void addItemToDatabase(Context context, ItemInfo item, long container, int screen, int cellX, int cellY, boolean notify) { item.container = container; item.screen = screen; item.cellX = cellX; item.cellY = cellY; final ContentValues values = new ContentValues(); final ContentResolver cr = context.getContentResolver(); item.onAddToDatabase(values); Uri result = cr.insert(notify ? LauncherSettings.Favorites.CONTENT_URI : LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values); if (result != null) { item.id = Integer.parseInt(result.getPathSegments().get(1)); } } /** * Update an item to the database in a specified container. */ static void updateItemInDatabase(Context context, ItemInfo item) { final ContentValues values = new ContentValues(); final ContentResolver cr = context.getContentResolver(); item.onAddToDatabase(values); cr.update(LauncherSettings.Favorites.getContentUri(item.id, false), values, null, null); } /** * Removes the specified item from the database * @param context * @param item */ static void deleteItemFromDatabase(Context context, ItemInfo item) { final ContentResolver cr = context.getContentResolver(); cr.delete(LauncherSettings.Favorites.getContentUri(item.id, false), null, null); } /** * Remove the contents of the specified folder from the database */ static void deleteUserFolderContentsFromDatabase(Context context, UserFolderInfo info) { final ContentResolver cr = context.getContentResolver(); cr.delete(LauncherSettings.Favorites.getContentUri(info.id, false), null, null); cr.delete(LauncherSettings.Favorites.CONTENT_URI, LauncherSettings.Favorites.CONTAINER + "=" + info.id, null); } /** * Set this as the current Launcher activity object for the loader. */ public void initialize(Callbacks callbacks) { synchronized (mLock) { mCallbacks = new WeakReference<Callbacks>(callbacks); } } public void startLoader(Context context, boolean isLaunching) { mLoader.startLoader(context, isLaunching); } public void stopLoader() { mLoader.stopLoader(); } public void setWorkspaceDirty() { mLoader.setWorkspaceDirty(); } /** * Call from the handler for ACTION_PACKAGE_ADDED, ACTION_PACKAGE_REMOVED and * ACTION_PACKAGE_CHANGED. */ public void onReceiveIntent(Context context, Intent intent) { final String packageName = intent.getData().getSchemeSpecificPart(); ArrayList<ApplicationInfo> added = null; ArrayList<ApplicationInfo> removed = null; ArrayList<ApplicationInfo> modified = null; boolean update = false; boolean remove = false; synchronized (mLock) { final String action = intent.getAction(); final boolean replacing = intent.getBooleanExtra(Intent.EXTRA_REPLACING, false); if (packageName == null || packageName.length() == 0) { // they sent us a bad intent return; } if (Intent.ACTION_PACKAGE_CHANGED.equals(action)) { mAllAppsList.updatePackage(context, packageName); update = true; } else if (Intent.ACTION_PACKAGE_REMOVED.equals(action)) { if (!replacing) { mAllAppsList.removePackage(packageName); remove = true; } // else, we are replacing the package, so a PACKAGE_ADDED will be sent // later, we will update the package at this time } else { if (!replacing) { mAllAppsList.addPackage(context, packageName); } else { mAllAppsList.updatePackage(context, packageName); update = true; } } if (mAllAppsList.added.size() > 0) { added = mAllAppsList.added; mAllAppsList.added = new ArrayList(); } if (mAllAppsList.removed.size() > 0) { removed = mAllAppsList.removed; mAllAppsList.removed = new ArrayList(); for (ApplicationInfo info: removed) { AppInfoCache.remove(info.intent.getComponent()); } } if (mAllAppsList.modified.size() > 0) { modified = mAllAppsList.modified; mAllAppsList.modified = new ArrayList(); } - final Callbacks callbacks = mCallbacks.get(); + final Callbacks callbacks = mCallbacks != null ? mCallbacks.get() : null; if (callbacks == null) { return; } if (added != null) { final ArrayList<ApplicationInfo> addedFinal = added; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageAdded(addedFinal); } }); } if (update || modified != null) { final ArrayList<ApplicationInfo> modifiedFinal = modified; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageUpdated(packageName, modifiedFinal); } }); } if (remove || removed != null) { final ArrayList<ApplicationInfo> removedFinal = removed; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageRemoved(packageName, removedFinal); } }); } } } public class Loader { private static final int ITEMS_CHUNK = 6; private LoaderThread mLoaderThread; private int mLastWorkspaceSeq = 0; private int mWorkspaceSeq = 1; private int mLastAllAppsSeq = 0; private int mAllAppsSeq = 1; final ArrayList<ItemInfo> mItems = new ArrayList(); final ArrayList<LauncherAppWidgetInfo> mAppWidgets = new ArrayList(); final HashMap<Long, FolderInfo> folders = new HashMap(); /** * Call this from the ui thread so the handler is initialized on the correct thread. */ public Loader() { } public void startLoader(Context context, boolean isLaunching) { synchronized (mLock) { Log.d(TAG, "startLoader isLaunching=" + isLaunching); // Don't bother to start the thread if we know it's not going to do anything if (mCallbacks.get() != null) { LoaderThread oldThread = mLoaderThread; if (oldThread != null) { if (oldThread.isLaunching()) { // don't downgrade isLaunching if we're already running isLaunching = true; } oldThread.stopLocked(); } mLoaderThread = new LoaderThread(context, oldThread, isLaunching); mLoaderThread.start(); } } } public void stopLoader() { synchronized (mLock) { if (mLoaderThread != null) { mLoaderThread.stopLocked(); } } } public void setWorkspaceDirty() { synchronized (mLock) { mWorkspaceSeq++; } } public void setAllAppsDirty() { synchronized (mLock) { mAllAppsSeq++; } } /** * Runnable for the thread that loads the contents of the launcher: * - workspace icons * - widgets * - all apps icons */ private class LoaderThread extends Thread { private Context mContext; private Thread mWaitThread; private boolean mIsLaunching; private boolean mStopped; private boolean mWorkspaceDoneBinding; LoaderThread(Context context, Thread waitThread, boolean isLaunching) { mContext = context; mWaitThread = waitThread; mIsLaunching = isLaunching; } boolean isLaunching() { return mIsLaunching; } /** * If another LoaderThread was supplied, we need to wait for that to finish before * we start our processing. This keeps the ordering of the setting and clearing * of the dirty flags correct by making sure we don't start processing stuff until * they've had a chance to re-set them. We do this waiting the worker thread, not * the ui thread to avoid ANRs. */ private void waitForOtherThread() { if (mWaitThread != null) { boolean done = false; while (!done) { try { mWaitThread.join(); done = true; } catch (InterruptedException ex) { } } mWaitThread = null; } } public void run() { waitForOtherThread(); // Elevate priority when Home launches for the first time to avoid // starving at boot time. Staring at a blank home is not cool. synchronized (mLock) { android.os.Process.setThreadPriority(mIsLaunching ? Process.THREAD_PRIORITY_DEFAULT : Process.THREAD_PRIORITY_BACKGROUND); } // Load the workspace only if it's dirty. int workspaceSeq; boolean workspaceDirty; synchronized (mLock) { workspaceSeq = mWorkspaceSeq; workspaceDirty = mWorkspaceSeq != mLastWorkspaceSeq; } if (workspaceDirty) { loadWorkspace(); } synchronized (mLock) { // If we're not stopped, and nobody has incremented mWorkspaceSeq. if (mStopped) { return; } if (workspaceSeq == mWorkspaceSeq) { mLastWorkspaceSeq = mWorkspaceSeq; } } // Bind the workspace bindWorkspace(); // Wait until the either we're stopped or the other threads are done. // This way we don't start loading all apps until the workspace has settled // down. synchronized (LoaderThread.this) { mHandler.post(new Runnable() { public void run() { synchronized (LoaderThread.this) { mWorkspaceDoneBinding = true; Log.d(TAG, "done with workspace"); LoaderThread.this.notify(); } } }); Log.d(TAG, "waiting to be done with workspace"); while (!mStopped && !mWorkspaceDoneBinding) { try { this.wait(); } catch (InterruptedException ex) { } } Log.d(TAG, "done waiting to be done with workspace"); } // Load all apps if they're dirty int allAppsSeq; boolean allAppsDirty; synchronized (mLock) { allAppsSeq = mAllAppsSeq; allAppsDirty = mAllAppsSeq != mLastAllAppsSeq; } if (allAppsDirty) { loadAllApps(); } synchronized (mLock) { // If we're not stopped, and nobody has incremented mAllAppsSeq. if (mStopped) { return; } if (allAppsSeq == mAllAppsSeq) { mLastAllAppsSeq = mAllAppsSeq; } } // Bind all apps bindAllApps(); // Clear out this reference, otherwise we end up holding it until all of the // callback runnables are done. mContext = null; synchronized (mLock) { // Setting the reference is atomic, but we can't do it inside the other critical // sections. mLoaderThread = null; return; } } public void stopLocked() { synchronized (LoaderThread.this) { mStopped = true; this.notify(); } } /** * Gets the callbacks object. If we've been stopped, or if the launcher object * has somehow been garbage collected, return null instead. */ Callbacks tryGetCallbacks() { synchronized (mLock) { if (mStopped) { return null; } final Callbacks callbacks = mCallbacks.get(); if (callbacks == null) { Log.w(TAG, "no mCallbacks"); return null; } return callbacks; } } private void loadWorkspace() { long t = SystemClock.uptimeMillis(); final Context context = mContext; final ContentResolver contentResolver = context.getContentResolver(); final PackageManager manager = context.getPackageManager(); /* TODO if (mLocaleChanged) { updateShortcutLabels(contentResolver, manager); } */ final Cursor c = contentResolver.query( LauncherSettings.Favorites.CONTENT_URI, null, null, null, null); try { final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID); final int intentIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.INTENT); final int titleIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.TITLE); final int iconTypeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_TYPE); final int iconIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ICON); final int iconPackageIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_PACKAGE); final int iconResourceIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_RESOURCE); final int containerIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.CONTAINER); final int itemTypeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ITEM_TYPE); final int appWidgetIdIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.APPWIDGET_ID); final int screenIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.SCREEN); final int cellXIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.CELLX); final int cellYIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.CELLY); final int spanXIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.SPANX); final int spanYIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.SPANY); final int uriIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.URI); final int displayModeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.DISPLAY_MODE); ApplicationInfo info; String intentDescription; Widget widgetInfo; LauncherAppWidgetInfo appWidgetInfo; int container; long id; Intent intent; while (!mStopped && c.moveToNext()) { try { int itemType = c.getInt(itemTypeIndex); switch (itemType) { case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION: case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT: intentDescription = c.getString(intentIndex); try { intent = Intent.parseUri(intentDescription, 0); } catch (URISyntaxException e) { continue; } if (itemType == LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) { info = getApplicationInfo(manager, intent, context); } else { info = getApplicationInfoShortcut(c, context, iconTypeIndex, iconPackageIndex, iconResourceIndex, iconIndex); } if (info == null) { info = new ApplicationInfo(); info.icon = manager.getDefaultActivityIcon(); } if (info != null) { info.title = c.getString(titleIndex); info.intent = intent; info.id = c.getLong(idIndex); container = c.getInt(containerIndex); info.container = container; info.screen = c.getInt(screenIndex); info.cellX = c.getInt(cellXIndex); info.cellY = c.getInt(cellYIndex); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(info); break; default: // Item is in a user folder UserFolderInfo folderInfo = findOrMakeUserFolder(folders, container); folderInfo.add(info); break; } } break; case LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER: id = c.getLong(idIndex); UserFolderInfo folderInfo = findOrMakeUserFolder(folders, id); folderInfo.title = c.getString(titleIndex); folderInfo.id = id; container = c.getInt(containerIndex); folderInfo.container = container; folderInfo.screen = c.getInt(screenIndex); folderInfo.cellX = c.getInt(cellXIndex); folderInfo.cellY = c.getInt(cellYIndex); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(folderInfo); break; } break; case LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER: id = c.getLong(idIndex); LiveFolderInfo liveFolderInfo = findOrMakeLiveFolder(folders, id); intentDescription = c.getString(intentIndex); intent = null; if (intentDescription != null) { try { intent = Intent.parseUri(intentDescription, 0); } catch (URISyntaxException e) { // Ignore, a live folder might not have a base intent } } liveFolderInfo.title = c.getString(titleIndex); liveFolderInfo.id = id; container = c.getInt(containerIndex); liveFolderInfo.container = container; liveFolderInfo.screen = c.getInt(screenIndex); liveFolderInfo.cellX = c.getInt(cellXIndex); liveFolderInfo.cellY = c.getInt(cellYIndex); liveFolderInfo.uri = Uri.parse(c.getString(uriIndex)); liveFolderInfo.baseIntent = intent; liveFolderInfo.displayMode = c.getInt(displayModeIndex); loadLiveFolderIcon(context, c, iconTypeIndex, iconPackageIndex, iconResourceIndex, liveFolderInfo); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(liveFolderInfo); break; } break; case LauncherSettings.Favorites.ITEM_TYPE_WIDGET_SEARCH: widgetInfo = Widget.makeSearch(); container = c.getInt(containerIndex); if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP) { Log.e(TAG, "Widget found where container " + "!= CONTAINER_DESKTOP ignoring!"); continue; } widgetInfo.id = c.getLong(idIndex); widgetInfo.screen = c.getInt(screenIndex); widgetInfo.container = container; widgetInfo.cellX = c.getInt(cellXIndex); widgetInfo.cellY = c.getInt(cellYIndex); mItems.add(widgetInfo); break; case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET: // Read all Launcher-specific widget details int appWidgetId = c.getInt(appWidgetIdIndex); appWidgetInfo = new LauncherAppWidgetInfo(appWidgetId); appWidgetInfo.id = c.getLong(idIndex); appWidgetInfo.screen = c.getInt(screenIndex); appWidgetInfo.cellX = c.getInt(cellXIndex); appWidgetInfo.cellY = c.getInt(cellYIndex); appWidgetInfo.spanX = c.getInt(spanXIndex); appWidgetInfo.spanY = c.getInt(spanYIndex); container = c.getInt(containerIndex); if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP) { Log.e(TAG, "Widget found where container " + "!= CONTAINER_DESKTOP -- ignoring!"); continue; } appWidgetInfo.container = c.getInt(containerIndex); mAppWidgets.add(appWidgetInfo); break; } } catch (Exception e) { Log.w(TAG, "Desktop items loading interrupted:", e); } } } finally { c.close(); } Log.d(TAG, "loaded workspace in " + (SystemClock.uptimeMillis()-t) + "ms"); } /** * Read everything out of our database. */ private void bindWorkspace() { final long t = SystemClock.uptimeMillis(); // Don't use these two variables in any of the callback runnables. // Otherwise we hold a reference to them. Callbacks callbacks = mCallbacks.get(); if (callbacks == null) { // This launcher has exited and nobody bothered to tell us. Just bail. Log.w(TAG, "LoaderThread running with no launcher"); return; } int N; // Tell the workspace that we're about to start firing items at it mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.startBinding(); } } }); // Add the items to the workspace. N = mItems.size(); for (int i=0; i<N; i+=ITEMS_CHUNK) { final int start = i; final int chunkSize = (i+ITEMS_CHUNK <= N) ? ITEMS_CHUNK : (N-i); mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindItems(mItems, start, start+chunkSize); } } }); } // Wait until the queue goes empty. mHandler.postIdle(new Runnable() { public void run() { Log.d(TAG, "Going to start binding widgets soon."); } }); // Bind the widgets, one at a time. // WARNING: this is calling into the workspace from the background thread, // but since getCurrentScreen() just returns the int, we should be okay. This // is just a hint for the order, and if it's wrong, we'll be okay. // TODO: instead, we should have that push the current screen into here. final int currentScreen = callbacks.getCurrentWorkspaceScreen(); N = mAppWidgets.size(); // once for the current screen for (int i=0; i<N; i++) { final LauncherAppWidgetInfo widget = mAppWidgets.get(i); if (widget.screen == currentScreen) { mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAppWidget(widget); } } }); } } // once for the other screens for (int i=0; i<N; i++) { final LauncherAppWidgetInfo widget = mAppWidgets.get(i); if (widget.screen != currentScreen) { mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAppWidget(widget); } } }); } } // TODO: Bind the folders // Tell the workspace that we're done. mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.finishBindingItems(); } } }); // If we're profiling, this is the last thing in the queue. mHandler.post(new Runnable() { public void run() { Log.d(TAG, "bound workspace in " + (SystemClock.uptimeMillis()-t) + "ms"); if (Launcher.PROFILE_ROTATE) { android.os.Debug.stopMethodTracing(); } } }); } private void loadAllApps() { final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null); mainIntent.addCategory(Intent.CATEGORY_LAUNCHER); final Callbacks callbacks = tryGetCallbacks(); if (callbacks == null) { return; } final Context context = mContext; final PackageManager packageManager = context.getPackageManager(); final List<ResolveInfo> apps = packageManager.queryIntentActivities(mainIntent, 0); synchronized (mLock) { mAllAppsList.clear(); if (apps != null) { long t = SystemClock.uptimeMillis(); int N = apps.size(); Utilities.BubbleText bubble = new Utilities.BubbleText(context); for (int i=0; i<N && !mStopped; i++) { // This builds the icon bitmaps. mAllAppsList.add(AppInfoCache.cache(apps.get(i), context, bubble)); } Collections.sort(mAllAppsList.data, sComparator); Collections.sort(mAllAppsList.added, sComparator); Log.d(TAG, "cached app icons in " + (SystemClock.uptimeMillis()-t) + "ms"); } } } private void bindAllApps() { synchronized (mLock) { final ArrayList<ApplicationInfo> results = mAllAppsList.added; mAllAppsList.added = new ArrayList(); mHandler.post(new Runnable() { public void run() { long t = SystemClock.uptimeMillis(); Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAllApplications(results); } Log.d(TAG, "bound app icons in " + (SystemClock.uptimeMillis()-t) + "ms"); } }); } } } } /** * Make an ApplicationInfo object for an application. */ private static ApplicationInfo getApplicationInfo(PackageManager manager, Intent intent, Context context) { final ResolveInfo resolveInfo = manager.resolveActivity(intent, 0); if (resolveInfo == null) { return null; } final ApplicationInfo info = new ApplicationInfo(); final ActivityInfo activityInfo = resolveInfo.activityInfo; info.icon = Utilities.createIconThumbnail(activityInfo.loadIcon(manager), context); if (info.title == null || info.title.length() == 0) { info.title = activityInfo.loadLabel(manager); } if (info.title == null) { info.title = ""; } info.itemType = LauncherSettings.Favorites.ITEM_TYPE_APPLICATION; return info; } /** * Make an ApplicationInfo object for a sortcut */ private static ApplicationInfo getApplicationInfoShortcut(Cursor c, Context context, int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, int iconIndex) { final ApplicationInfo info = new ApplicationInfo(); info.itemType = LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT; int iconType = c.getInt(iconTypeIndex); switch (iconType) { case LauncherSettings.Favorites.ICON_TYPE_RESOURCE: String packageName = c.getString(iconPackageIndex); String resourceName = c.getString(iconResourceIndex); PackageManager packageManager = context.getPackageManager(); try { Resources resources = packageManager.getResourcesForApplication(packageName); final int id = resources.getIdentifier(resourceName, null, null); info.icon = Utilities.createIconThumbnail(resources.getDrawable(id), context); } catch (Exception e) { info.icon = packageManager.getDefaultActivityIcon(); } info.iconResource = new Intent.ShortcutIconResource(); info.iconResource.packageName = packageName; info.iconResource.resourceName = resourceName; info.customIcon = false; break; case LauncherSettings.Favorites.ICON_TYPE_BITMAP: byte[] data = c.getBlob(iconIndex); try { Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length); info.icon = new FastBitmapDrawable( Utilities.createBitmapThumbnail(bitmap, context)); } catch (Exception e) { packageManager = context.getPackageManager(); info.icon = packageManager.getDefaultActivityIcon(); } info.filtered = true; info.customIcon = true; break; default: info.icon = context.getPackageManager().getDefaultActivityIcon(); info.customIcon = false; break; } return info; } private static void loadLiveFolderIcon(Context context, Cursor c, int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, LiveFolderInfo liveFolderInfo) { int iconType = c.getInt(iconTypeIndex); switch (iconType) { case LauncherSettings.Favorites.ICON_TYPE_RESOURCE: String packageName = c.getString(iconPackageIndex); String resourceName = c.getString(iconResourceIndex); PackageManager packageManager = context.getPackageManager(); try { Resources resources = packageManager.getResourcesForApplication(packageName); final int id = resources.getIdentifier(resourceName, null, null); liveFolderInfo.icon = resources.getDrawable(id); } catch (Exception e) { liveFolderInfo.icon = context.getResources().getDrawable(R.drawable.ic_launcher_folder); } liveFolderInfo.iconResource = new Intent.ShortcutIconResource(); liveFolderInfo.iconResource.packageName = packageName; liveFolderInfo.iconResource.resourceName = resourceName; break; default: liveFolderInfo.icon = context.getResources().getDrawable(R.drawable.ic_launcher_folder); } } /** * Return an existing UserFolderInfo object if we have encountered this ID previously, * or make a new one. */ private static UserFolderInfo findOrMakeUserFolder(HashMap<Long, FolderInfo> folders, long id) { // See if a placeholder was created for us already FolderInfo folderInfo = folders.get(id); if (folderInfo == null || !(folderInfo instanceof UserFolderInfo)) { // No placeholder -- create a new instance folderInfo = new UserFolderInfo(); folders.put(id, folderInfo); } return (UserFolderInfo) folderInfo; } /** * Return an existing UserFolderInfo object if we have encountered this ID previously, or make a * new one. */ private static LiveFolderInfo findOrMakeLiveFolder(HashMap<Long, FolderInfo> folders, long id) { // See if a placeholder was created for us already FolderInfo folderInfo = folders.get(id); if (folderInfo == null || !(folderInfo instanceof LiveFolderInfo)) { // No placeholder -- create a new instance folderInfo = new LiveFolderInfo(); folders.put(id, folderInfo); } return (LiveFolderInfo) folderInfo; } private static void updateShortcutLabels(ContentResolver resolver, PackageManager manager) { final Cursor c = resolver.query(LauncherSettings.Favorites.CONTENT_URI, new String[] { LauncherSettings.Favorites._ID, LauncherSettings.Favorites.TITLE, LauncherSettings.Favorites.INTENT, LauncherSettings.Favorites.ITEM_TYPE }, null, null, null); final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID); final int intentIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.INTENT); final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE); final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE); // boolean changed = false; try { while (c.moveToNext()) { try { if (c.getInt(itemTypeIndex) != LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) { continue; } final String intentUri = c.getString(intentIndex); if (intentUri != null) { final Intent shortcut = Intent.parseUri(intentUri, 0); if (Intent.ACTION_MAIN.equals(shortcut.getAction())) { final ComponentName name = shortcut.getComponent(); if (name != null) { final ActivityInfo activityInfo = manager.getActivityInfo(name, 0); final String title = c.getString(titleIndex); String label = getLabel(manager, activityInfo); if (title == null || !title.equals(label)) { final ContentValues values = new ContentValues(); values.put(LauncherSettings.Favorites.TITLE, label); resolver.update( LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values, "_id=?", new String[] { String.valueOf(c.getLong(idIndex)) }); // changed = true; } } } } } catch (URISyntaxException e) { // Ignore } catch (PackageManager.NameNotFoundException e) { // Ignore } } } finally { c.close(); } // if (changed) resolver.notifyChange(Settings.Favorites.CONTENT_URI, null); } private static String getLabel(PackageManager manager, ActivityInfo activityInfo) { String label = activityInfo.loadLabel(manager).toString(); if (label == null) { label = manager.getApplicationLabel(activityInfo.applicationInfo).toString(); if (label == null) { label = activityInfo.name; } } return label; } private static final Collator sCollator = Collator.getInstance(); private static final Comparator<ApplicationInfo> sComparator = new Comparator<ApplicationInfo>() { public final int compare(ApplicationInfo a, ApplicationInfo b) { return sCollator.compare(a.title.toString(), b.title.toString()); } }; }
true
true
static boolean shortcutExists(Context context, String title, Intent intent) { final ContentResolver cr = context.getContentResolver(); Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, new String[] { "title", "intent" }, "title=? and intent=?", new String[] { title, intent.toUri(0) }, null); boolean result = false; try { result = c.moveToFirst(); } finally { c.close(); } return result; } /** * Find a folder in the db, creating the FolderInfo if necessary, and adding it to folderList. */ FolderInfo getFolderById(Context context, HashMap<Long,FolderInfo> folderList, long id) { final ContentResolver cr = context.getContentResolver(); Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, null, "_id=? and (itemType=? or itemType=?)", new String[] { String.valueOf(id), String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER), String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER) }, null); try { if (c.moveToFirst()) { final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE); final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE); final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER); final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN); final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX); final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY); FolderInfo folderInfo = null; switch (c.getInt(itemTypeIndex)) { case LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER: folderInfo = findOrMakeUserFolder(folderList, id); break; case LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER: folderInfo = findOrMakeLiveFolder(folderList, id); break; } folderInfo.title = c.getString(titleIndex); folderInfo.id = id; folderInfo.container = c.getInt(containerIndex); folderInfo.screen = c.getInt(screenIndex); folderInfo.cellX = c.getInt(cellXIndex); folderInfo.cellY = c.getInt(cellYIndex); return folderInfo; } } finally { c.close(); } return null; } /** * Add an item to the database in a specified container. Sets the container, screen, cellX and * cellY fields of the item. Also assigns an ID to the item. */ static void addItemToDatabase(Context context, ItemInfo item, long container, int screen, int cellX, int cellY, boolean notify) { item.container = container; item.screen = screen; item.cellX = cellX; item.cellY = cellY; final ContentValues values = new ContentValues(); final ContentResolver cr = context.getContentResolver(); item.onAddToDatabase(values); Uri result = cr.insert(notify ? LauncherSettings.Favorites.CONTENT_URI : LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values); if (result != null) { item.id = Integer.parseInt(result.getPathSegments().get(1)); } } /** * Update an item to the database in a specified container. */ static void updateItemInDatabase(Context context, ItemInfo item) { final ContentValues values = new ContentValues(); final ContentResolver cr = context.getContentResolver(); item.onAddToDatabase(values); cr.update(LauncherSettings.Favorites.getContentUri(item.id, false), values, null, null); } /** * Removes the specified item from the database * @param context * @param item */ static void deleteItemFromDatabase(Context context, ItemInfo item) { final ContentResolver cr = context.getContentResolver(); cr.delete(LauncherSettings.Favorites.getContentUri(item.id, false), null, null); } /** * Remove the contents of the specified folder from the database */ static void deleteUserFolderContentsFromDatabase(Context context, UserFolderInfo info) { final ContentResolver cr = context.getContentResolver(); cr.delete(LauncherSettings.Favorites.getContentUri(info.id, false), null, null); cr.delete(LauncherSettings.Favorites.CONTENT_URI, LauncherSettings.Favorites.CONTAINER + "=" + info.id, null); } /** * Set this as the current Launcher activity object for the loader. */ public void initialize(Callbacks callbacks) { synchronized (mLock) { mCallbacks = new WeakReference<Callbacks>(callbacks); } } public void startLoader(Context context, boolean isLaunching) { mLoader.startLoader(context, isLaunching); } public void stopLoader() { mLoader.stopLoader(); } public void setWorkspaceDirty() { mLoader.setWorkspaceDirty(); } /** * Call from the handler for ACTION_PACKAGE_ADDED, ACTION_PACKAGE_REMOVED and * ACTION_PACKAGE_CHANGED. */ public void onReceiveIntent(Context context, Intent intent) { final String packageName = intent.getData().getSchemeSpecificPart(); ArrayList<ApplicationInfo> added = null; ArrayList<ApplicationInfo> removed = null; ArrayList<ApplicationInfo> modified = null; boolean update = false; boolean remove = false; synchronized (mLock) { final String action = intent.getAction(); final boolean replacing = intent.getBooleanExtra(Intent.EXTRA_REPLACING, false); if (packageName == null || packageName.length() == 0) { // they sent us a bad intent return; } if (Intent.ACTION_PACKAGE_CHANGED.equals(action)) { mAllAppsList.updatePackage(context, packageName); update = true; } else if (Intent.ACTION_PACKAGE_REMOVED.equals(action)) { if (!replacing) { mAllAppsList.removePackage(packageName); remove = true; } // else, we are replacing the package, so a PACKAGE_ADDED will be sent // later, we will update the package at this time } else { if (!replacing) { mAllAppsList.addPackage(context, packageName); } else { mAllAppsList.updatePackage(context, packageName); update = true; } } if (mAllAppsList.added.size() > 0) { added = mAllAppsList.added; mAllAppsList.added = new ArrayList(); } if (mAllAppsList.removed.size() > 0) { removed = mAllAppsList.removed; mAllAppsList.removed = new ArrayList(); for (ApplicationInfo info: removed) { AppInfoCache.remove(info.intent.getComponent()); } } if (mAllAppsList.modified.size() > 0) { modified = mAllAppsList.modified; mAllAppsList.modified = new ArrayList(); } final Callbacks callbacks = mCallbacks.get(); if (callbacks == null) { return; } if (added != null) { final ArrayList<ApplicationInfo> addedFinal = added; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageAdded(addedFinal); } }); } if (update || modified != null) { final ArrayList<ApplicationInfo> modifiedFinal = modified; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageUpdated(packageName, modifiedFinal); } }); } if (remove || removed != null) { final ArrayList<ApplicationInfo> removedFinal = removed; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageRemoved(packageName, removedFinal); } }); } } } public class Loader { private static final int ITEMS_CHUNK = 6; private LoaderThread mLoaderThread; private int mLastWorkspaceSeq = 0; private int mWorkspaceSeq = 1; private int mLastAllAppsSeq = 0; private int mAllAppsSeq = 1; final ArrayList<ItemInfo> mItems = new ArrayList(); final ArrayList<LauncherAppWidgetInfo> mAppWidgets = new ArrayList(); final HashMap<Long, FolderInfo> folders = new HashMap(); /** * Call this from the ui thread so the handler is initialized on the correct thread. */ public Loader() { } public void startLoader(Context context, boolean isLaunching) { synchronized (mLock) { Log.d(TAG, "startLoader isLaunching=" + isLaunching); // Don't bother to start the thread if we know it's not going to do anything if (mCallbacks.get() != null) { LoaderThread oldThread = mLoaderThread; if (oldThread != null) { if (oldThread.isLaunching()) { // don't downgrade isLaunching if we're already running isLaunching = true; } oldThread.stopLocked(); } mLoaderThread = new LoaderThread(context, oldThread, isLaunching); mLoaderThread.start(); } } } public void stopLoader() { synchronized (mLock) { if (mLoaderThread != null) { mLoaderThread.stopLocked(); } } } public void setWorkspaceDirty() { synchronized (mLock) { mWorkspaceSeq++; } } public void setAllAppsDirty() { synchronized (mLock) { mAllAppsSeq++; } } /** * Runnable for the thread that loads the contents of the launcher: * - workspace icons * - widgets * - all apps icons */ private class LoaderThread extends Thread { private Context mContext; private Thread mWaitThread; private boolean mIsLaunching; private boolean mStopped; private boolean mWorkspaceDoneBinding; LoaderThread(Context context, Thread waitThread, boolean isLaunching) { mContext = context; mWaitThread = waitThread; mIsLaunching = isLaunching; } boolean isLaunching() { return mIsLaunching; } /** * If another LoaderThread was supplied, we need to wait for that to finish before * we start our processing. This keeps the ordering of the setting and clearing * of the dirty flags correct by making sure we don't start processing stuff until * they've had a chance to re-set them. We do this waiting the worker thread, not * the ui thread to avoid ANRs. */ private void waitForOtherThread() { if (mWaitThread != null) { boolean done = false; while (!done) { try { mWaitThread.join(); done = true; } catch (InterruptedException ex) { } } mWaitThread = null; } } public void run() { waitForOtherThread(); // Elevate priority when Home launches for the first time to avoid // starving at boot time. Staring at a blank home is not cool. synchronized (mLock) { android.os.Process.setThreadPriority(mIsLaunching ? Process.THREAD_PRIORITY_DEFAULT : Process.THREAD_PRIORITY_BACKGROUND); } // Load the workspace only if it's dirty. int workspaceSeq; boolean workspaceDirty; synchronized (mLock) { workspaceSeq = mWorkspaceSeq; workspaceDirty = mWorkspaceSeq != mLastWorkspaceSeq; } if (workspaceDirty) { loadWorkspace(); } synchronized (mLock) { // If we're not stopped, and nobody has incremented mWorkspaceSeq. if (mStopped) { return; } if (workspaceSeq == mWorkspaceSeq) { mLastWorkspaceSeq = mWorkspaceSeq; } } // Bind the workspace bindWorkspace(); // Wait until the either we're stopped or the other threads are done. // This way we don't start loading all apps until the workspace has settled // down. synchronized (LoaderThread.this) { mHandler.post(new Runnable() { public void run() { synchronized (LoaderThread.this) { mWorkspaceDoneBinding = true; Log.d(TAG, "done with workspace"); LoaderThread.this.notify(); } } }); Log.d(TAG, "waiting to be done with workspace"); while (!mStopped && !mWorkspaceDoneBinding) { try { this.wait(); } catch (InterruptedException ex) { } } Log.d(TAG, "done waiting to be done with workspace"); } // Load all apps if they're dirty int allAppsSeq; boolean allAppsDirty; synchronized (mLock) { allAppsSeq = mAllAppsSeq; allAppsDirty = mAllAppsSeq != mLastAllAppsSeq; } if (allAppsDirty) { loadAllApps(); } synchronized (mLock) { // If we're not stopped, and nobody has incremented mAllAppsSeq. if (mStopped) { return; } if (allAppsSeq == mAllAppsSeq) { mLastAllAppsSeq = mAllAppsSeq; } } // Bind all apps bindAllApps(); // Clear out this reference, otherwise we end up holding it until all of the // callback runnables are done. mContext = null; synchronized (mLock) { // Setting the reference is atomic, but we can't do it inside the other critical // sections. mLoaderThread = null; return; } } public void stopLocked() { synchronized (LoaderThread.this) { mStopped = true; this.notify(); } } /** * Gets the callbacks object. If we've been stopped, or if the launcher object * has somehow been garbage collected, return null instead. */ Callbacks tryGetCallbacks() { synchronized (mLock) { if (mStopped) { return null; } final Callbacks callbacks = mCallbacks.get(); if (callbacks == null) { Log.w(TAG, "no mCallbacks"); return null; } return callbacks; } } private void loadWorkspace() { long t = SystemClock.uptimeMillis(); final Context context = mContext; final ContentResolver contentResolver = context.getContentResolver(); final PackageManager manager = context.getPackageManager(); /* TODO if (mLocaleChanged) { updateShortcutLabels(contentResolver, manager); } */ final Cursor c = contentResolver.query( LauncherSettings.Favorites.CONTENT_URI, null, null, null, null); try { final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID); final int intentIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.INTENT); final int titleIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.TITLE); final int iconTypeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_TYPE); final int iconIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ICON); final int iconPackageIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_PACKAGE); final int iconResourceIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_RESOURCE); final int containerIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.CONTAINER); final int itemTypeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ITEM_TYPE); final int appWidgetIdIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.APPWIDGET_ID); final int screenIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.SCREEN); final int cellXIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.CELLX); final int cellYIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.CELLY); final int spanXIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.SPANX); final int spanYIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.SPANY); final int uriIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.URI); final int displayModeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.DISPLAY_MODE); ApplicationInfo info; String intentDescription; Widget widgetInfo; LauncherAppWidgetInfo appWidgetInfo; int container; long id; Intent intent; while (!mStopped && c.moveToNext()) { try { int itemType = c.getInt(itemTypeIndex); switch (itemType) { case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION: case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT: intentDescription = c.getString(intentIndex); try { intent = Intent.parseUri(intentDescription, 0); } catch (URISyntaxException e) { continue; } if (itemType == LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) { info = getApplicationInfo(manager, intent, context); } else { info = getApplicationInfoShortcut(c, context, iconTypeIndex, iconPackageIndex, iconResourceIndex, iconIndex); } if (info == null) { info = new ApplicationInfo(); info.icon = manager.getDefaultActivityIcon(); } if (info != null) { info.title = c.getString(titleIndex); info.intent = intent; info.id = c.getLong(idIndex); container = c.getInt(containerIndex); info.container = container; info.screen = c.getInt(screenIndex); info.cellX = c.getInt(cellXIndex); info.cellY = c.getInt(cellYIndex); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(info); break; default: // Item is in a user folder UserFolderInfo folderInfo = findOrMakeUserFolder(folders, container); folderInfo.add(info); break; } } break; case LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER: id = c.getLong(idIndex); UserFolderInfo folderInfo = findOrMakeUserFolder(folders, id); folderInfo.title = c.getString(titleIndex); folderInfo.id = id; container = c.getInt(containerIndex); folderInfo.container = container; folderInfo.screen = c.getInt(screenIndex); folderInfo.cellX = c.getInt(cellXIndex); folderInfo.cellY = c.getInt(cellYIndex); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(folderInfo); break; } break; case LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER: id = c.getLong(idIndex); LiveFolderInfo liveFolderInfo = findOrMakeLiveFolder(folders, id); intentDescription = c.getString(intentIndex); intent = null; if (intentDescription != null) { try { intent = Intent.parseUri(intentDescription, 0); } catch (URISyntaxException e) { // Ignore, a live folder might not have a base intent } } liveFolderInfo.title = c.getString(titleIndex); liveFolderInfo.id = id; container = c.getInt(containerIndex); liveFolderInfo.container = container; liveFolderInfo.screen = c.getInt(screenIndex); liveFolderInfo.cellX = c.getInt(cellXIndex); liveFolderInfo.cellY = c.getInt(cellYIndex); liveFolderInfo.uri = Uri.parse(c.getString(uriIndex)); liveFolderInfo.baseIntent = intent; liveFolderInfo.displayMode = c.getInt(displayModeIndex); loadLiveFolderIcon(context, c, iconTypeIndex, iconPackageIndex, iconResourceIndex, liveFolderInfo); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(liveFolderInfo); break; } break; case LauncherSettings.Favorites.ITEM_TYPE_WIDGET_SEARCH: widgetInfo = Widget.makeSearch(); container = c.getInt(containerIndex); if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP) { Log.e(TAG, "Widget found where container " + "!= CONTAINER_DESKTOP ignoring!"); continue; } widgetInfo.id = c.getLong(idIndex); widgetInfo.screen = c.getInt(screenIndex); widgetInfo.container = container; widgetInfo.cellX = c.getInt(cellXIndex); widgetInfo.cellY = c.getInt(cellYIndex); mItems.add(widgetInfo); break; case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET: // Read all Launcher-specific widget details int appWidgetId = c.getInt(appWidgetIdIndex); appWidgetInfo = new LauncherAppWidgetInfo(appWidgetId); appWidgetInfo.id = c.getLong(idIndex); appWidgetInfo.screen = c.getInt(screenIndex); appWidgetInfo.cellX = c.getInt(cellXIndex); appWidgetInfo.cellY = c.getInt(cellYIndex); appWidgetInfo.spanX = c.getInt(spanXIndex); appWidgetInfo.spanY = c.getInt(spanYIndex); container = c.getInt(containerIndex); if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP) { Log.e(TAG, "Widget found where container " + "!= CONTAINER_DESKTOP -- ignoring!"); continue; } appWidgetInfo.container = c.getInt(containerIndex); mAppWidgets.add(appWidgetInfo); break; } } catch (Exception e) { Log.w(TAG, "Desktop items loading interrupted:", e); } } } finally { c.close(); } Log.d(TAG, "loaded workspace in " + (SystemClock.uptimeMillis()-t) + "ms"); } /** * Read everything out of our database. */ private void bindWorkspace() { final long t = SystemClock.uptimeMillis(); // Don't use these two variables in any of the callback runnables. // Otherwise we hold a reference to them. Callbacks callbacks = mCallbacks.get(); if (callbacks == null) { // This launcher has exited and nobody bothered to tell us. Just bail. Log.w(TAG, "LoaderThread running with no launcher"); return; } int N; // Tell the workspace that we're about to start firing items at it mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.startBinding(); } } }); // Add the items to the workspace. N = mItems.size(); for (int i=0; i<N; i+=ITEMS_CHUNK) { final int start = i; final int chunkSize = (i+ITEMS_CHUNK <= N) ? ITEMS_CHUNK : (N-i); mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindItems(mItems, start, start+chunkSize); } } }); } // Wait until the queue goes empty. mHandler.postIdle(new Runnable() { public void run() { Log.d(TAG, "Going to start binding widgets soon."); } }); // Bind the widgets, one at a time. // WARNING: this is calling into the workspace from the background thread, // but since getCurrentScreen() just returns the int, we should be okay. This // is just a hint for the order, and if it's wrong, we'll be okay. // TODO: instead, we should have that push the current screen into here. final int currentScreen = callbacks.getCurrentWorkspaceScreen(); N = mAppWidgets.size(); // once for the current screen for (int i=0; i<N; i++) { final LauncherAppWidgetInfo widget = mAppWidgets.get(i); if (widget.screen == currentScreen) { mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAppWidget(widget); } } }); } } // once for the other screens for (int i=0; i<N; i++) { final LauncherAppWidgetInfo widget = mAppWidgets.get(i); if (widget.screen != currentScreen) { mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAppWidget(widget); } } }); } } // TODO: Bind the folders // Tell the workspace that we're done. mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.finishBindingItems(); } } }); // If we're profiling, this is the last thing in the queue. mHandler.post(new Runnable() { public void run() { Log.d(TAG, "bound workspace in " + (SystemClock.uptimeMillis()-t) + "ms"); if (Launcher.PROFILE_ROTATE) { android.os.Debug.stopMethodTracing(); } } }); } private void loadAllApps() { final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null); mainIntent.addCategory(Intent.CATEGORY_LAUNCHER); final Callbacks callbacks = tryGetCallbacks(); if (callbacks == null) { return; } final Context context = mContext; final PackageManager packageManager = context.getPackageManager(); final List<ResolveInfo> apps = packageManager.queryIntentActivities(mainIntent, 0); synchronized (mLock) { mAllAppsList.clear(); if (apps != null) { long t = SystemClock.uptimeMillis(); int N = apps.size(); Utilities.BubbleText bubble = new Utilities.BubbleText(context); for (int i=0; i<N && !mStopped; i++) { // This builds the icon bitmaps. mAllAppsList.add(AppInfoCache.cache(apps.get(i), context, bubble)); } Collections.sort(mAllAppsList.data, sComparator); Collections.sort(mAllAppsList.added, sComparator); Log.d(TAG, "cached app icons in " + (SystemClock.uptimeMillis()-t) + "ms"); } } } private void bindAllApps() { synchronized (mLock) { final ArrayList<ApplicationInfo> results = mAllAppsList.added; mAllAppsList.added = new ArrayList(); mHandler.post(new Runnable() { public void run() { long t = SystemClock.uptimeMillis(); Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAllApplications(results); } Log.d(TAG, "bound app icons in " + (SystemClock.uptimeMillis()-t) + "ms"); } }); } } } } /** * Make an ApplicationInfo object for an application. */ private static ApplicationInfo getApplicationInfo(PackageManager manager, Intent intent, Context context) { final ResolveInfo resolveInfo = manager.resolveActivity(intent, 0); if (resolveInfo == null) { return null; } final ApplicationInfo info = new ApplicationInfo(); final ActivityInfo activityInfo = resolveInfo.activityInfo; info.icon = Utilities.createIconThumbnail(activityInfo.loadIcon(manager), context); if (info.title == null || info.title.length() == 0) { info.title = activityInfo.loadLabel(manager); } if (info.title == null) { info.title = ""; } info.itemType = LauncherSettings.Favorites.ITEM_TYPE_APPLICATION; return info; } /** * Make an ApplicationInfo object for a sortcut */ private static ApplicationInfo getApplicationInfoShortcut(Cursor c, Context context, int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, int iconIndex) { final ApplicationInfo info = new ApplicationInfo(); info.itemType = LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT; int iconType = c.getInt(iconTypeIndex); switch (iconType) { case LauncherSettings.Favorites.ICON_TYPE_RESOURCE: String packageName = c.getString(iconPackageIndex); String resourceName = c.getString(iconResourceIndex); PackageManager packageManager = context.getPackageManager(); try { Resources resources = packageManager.getResourcesForApplication(packageName); final int id = resources.getIdentifier(resourceName, null, null); info.icon = Utilities.createIconThumbnail(resources.getDrawable(id), context); } catch (Exception e) { info.icon = packageManager.getDefaultActivityIcon(); } info.iconResource = new Intent.ShortcutIconResource(); info.iconResource.packageName = packageName; info.iconResource.resourceName = resourceName; info.customIcon = false; break; case LauncherSettings.Favorites.ICON_TYPE_BITMAP: byte[] data = c.getBlob(iconIndex); try { Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length); info.icon = new FastBitmapDrawable( Utilities.createBitmapThumbnail(bitmap, context)); } catch (Exception e) { packageManager = context.getPackageManager(); info.icon = packageManager.getDefaultActivityIcon(); } info.filtered = true; info.customIcon = true; break; default: info.icon = context.getPackageManager().getDefaultActivityIcon(); info.customIcon = false; break; } return info; } private static void loadLiveFolderIcon(Context context, Cursor c, int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, LiveFolderInfo liveFolderInfo) { int iconType = c.getInt(iconTypeIndex); switch (iconType) { case LauncherSettings.Favorites.ICON_TYPE_RESOURCE: String packageName = c.getString(iconPackageIndex); String resourceName = c.getString(iconResourceIndex); PackageManager packageManager = context.getPackageManager(); try { Resources resources = packageManager.getResourcesForApplication(packageName); final int id = resources.getIdentifier(resourceName, null, null); liveFolderInfo.icon = resources.getDrawable(id); } catch (Exception e) { liveFolderInfo.icon = context.getResources().getDrawable(R.drawable.ic_launcher_folder); } liveFolderInfo.iconResource = new Intent.ShortcutIconResource(); liveFolderInfo.iconResource.packageName = packageName; liveFolderInfo.iconResource.resourceName = resourceName; break; default: liveFolderInfo.icon = context.getResources().getDrawable(R.drawable.ic_launcher_folder); } } /** * Return an existing UserFolderInfo object if we have encountered this ID previously, * or make a new one. */ private static UserFolderInfo findOrMakeUserFolder(HashMap<Long, FolderInfo> folders, long id) { // See if a placeholder was created for us already FolderInfo folderInfo = folders.get(id); if (folderInfo == null || !(folderInfo instanceof UserFolderInfo)) { // No placeholder -- create a new instance folderInfo = new UserFolderInfo(); folders.put(id, folderInfo); } return (UserFolderInfo) folderInfo; } /** * Return an existing UserFolderInfo object if we have encountered this ID previously, or make a * new one. */ private static LiveFolderInfo findOrMakeLiveFolder(HashMap<Long, FolderInfo> folders, long id) { // See if a placeholder was created for us already FolderInfo folderInfo = folders.get(id); if (folderInfo == null || !(folderInfo instanceof LiveFolderInfo)) { // No placeholder -- create a new instance folderInfo = new LiveFolderInfo(); folders.put(id, folderInfo); } return (LiveFolderInfo) folderInfo; } private static void updateShortcutLabels(ContentResolver resolver, PackageManager manager) { final Cursor c = resolver.query(LauncherSettings.Favorites.CONTENT_URI, new String[] { LauncherSettings.Favorites._ID, LauncherSettings.Favorites.TITLE, LauncherSettings.Favorites.INTENT, LauncherSettings.Favorites.ITEM_TYPE }, null, null, null); final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID); final int intentIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.INTENT); final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE); final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE); // boolean changed = false; try { while (c.moveToNext()) { try { if (c.getInt(itemTypeIndex) != LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) { continue; } final String intentUri = c.getString(intentIndex); if (intentUri != null) { final Intent shortcut = Intent.parseUri(intentUri, 0); if (Intent.ACTION_MAIN.equals(shortcut.getAction())) { final ComponentName name = shortcut.getComponent(); if (name != null) { final ActivityInfo activityInfo = manager.getActivityInfo(name, 0); final String title = c.getString(titleIndex); String label = getLabel(manager, activityInfo); if (title == null || !title.equals(label)) { final ContentValues values = new ContentValues(); values.put(LauncherSettings.Favorites.TITLE, label); resolver.update( LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values, "_id=?", new String[] { String.valueOf(c.getLong(idIndex)) }); // changed = true; } } } } } catch (URISyntaxException e) { // Ignore } catch (PackageManager.NameNotFoundException e) { // Ignore } } } finally { c.close(); } // if (changed) resolver.notifyChange(Settings.Favorites.CONTENT_URI, null); } private static String getLabel(PackageManager manager, ActivityInfo activityInfo) { String label = activityInfo.loadLabel(manager).toString(); if (label == null) { label = manager.getApplicationLabel(activityInfo.applicationInfo).toString(); if (label == null) { label = activityInfo.name; } } return label; } private static final Collator sCollator = Collator.getInstance(); private static final Comparator<ApplicationInfo> sComparator = new Comparator<ApplicationInfo>() { public final int compare(ApplicationInfo a, ApplicationInfo b) { return sCollator.compare(a.title.toString(), b.title.toString()); } }; }
static boolean shortcutExists(Context context, String title, Intent intent) { final ContentResolver cr = context.getContentResolver(); Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, new String[] { "title", "intent" }, "title=? and intent=?", new String[] { title, intent.toUri(0) }, null); boolean result = false; try { result = c.moveToFirst(); } finally { c.close(); } return result; } /** * Find a folder in the db, creating the FolderInfo if necessary, and adding it to folderList. */ FolderInfo getFolderById(Context context, HashMap<Long,FolderInfo> folderList, long id) { final ContentResolver cr = context.getContentResolver(); Cursor c = cr.query(LauncherSettings.Favorites.CONTENT_URI, null, "_id=? and (itemType=? or itemType=?)", new String[] { String.valueOf(id), String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER), String.valueOf(LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER) }, null); try { if (c.moveToFirst()) { final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE); final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE); final int containerIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CONTAINER); final int screenIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.SCREEN); final int cellXIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLX); final int cellYIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.CELLY); FolderInfo folderInfo = null; switch (c.getInt(itemTypeIndex)) { case LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER: folderInfo = findOrMakeUserFolder(folderList, id); break; case LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER: folderInfo = findOrMakeLiveFolder(folderList, id); break; } folderInfo.title = c.getString(titleIndex); folderInfo.id = id; folderInfo.container = c.getInt(containerIndex); folderInfo.screen = c.getInt(screenIndex); folderInfo.cellX = c.getInt(cellXIndex); folderInfo.cellY = c.getInt(cellYIndex); return folderInfo; } } finally { c.close(); } return null; } /** * Add an item to the database in a specified container. Sets the container, screen, cellX and * cellY fields of the item. Also assigns an ID to the item. */ static void addItemToDatabase(Context context, ItemInfo item, long container, int screen, int cellX, int cellY, boolean notify) { item.container = container; item.screen = screen; item.cellX = cellX; item.cellY = cellY; final ContentValues values = new ContentValues(); final ContentResolver cr = context.getContentResolver(); item.onAddToDatabase(values); Uri result = cr.insert(notify ? LauncherSettings.Favorites.CONTENT_URI : LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values); if (result != null) { item.id = Integer.parseInt(result.getPathSegments().get(1)); } } /** * Update an item to the database in a specified container. */ static void updateItemInDatabase(Context context, ItemInfo item) { final ContentValues values = new ContentValues(); final ContentResolver cr = context.getContentResolver(); item.onAddToDatabase(values); cr.update(LauncherSettings.Favorites.getContentUri(item.id, false), values, null, null); } /** * Removes the specified item from the database * @param context * @param item */ static void deleteItemFromDatabase(Context context, ItemInfo item) { final ContentResolver cr = context.getContentResolver(); cr.delete(LauncherSettings.Favorites.getContentUri(item.id, false), null, null); } /** * Remove the contents of the specified folder from the database */ static void deleteUserFolderContentsFromDatabase(Context context, UserFolderInfo info) { final ContentResolver cr = context.getContentResolver(); cr.delete(LauncherSettings.Favorites.getContentUri(info.id, false), null, null); cr.delete(LauncherSettings.Favorites.CONTENT_URI, LauncherSettings.Favorites.CONTAINER + "=" + info.id, null); } /** * Set this as the current Launcher activity object for the loader. */ public void initialize(Callbacks callbacks) { synchronized (mLock) { mCallbacks = new WeakReference<Callbacks>(callbacks); } } public void startLoader(Context context, boolean isLaunching) { mLoader.startLoader(context, isLaunching); } public void stopLoader() { mLoader.stopLoader(); } public void setWorkspaceDirty() { mLoader.setWorkspaceDirty(); } /** * Call from the handler for ACTION_PACKAGE_ADDED, ACTION_PACKAGE_REMOVED and * ACTION_PACKAGE_CHANGED. */ public void onReceiveIntent(Context context, Intent intent) { final String packageName = intent.getData().getSchemeSpecificPart(); ArrayList<ApplicationInfo> added = null; ArrayList<ApplicationInfo> removed = null; ArrayList<ApplicationInfo> modified = null; boolean update = false; boolean remove = false; synchronized (mLock) { final String action = intent.getAction(); final boolean replacing = intent.getBooleanExtra(Intent.EXTRA_REPLACING, false); if (packageName == null || packageName.length() == 0) { // they sent us a bad intent return; } if (Intent.ACTION_PACKAGE_CHANGED.equals(action)) { mAllAppsList.updatePackage(context, packageName); update = true; } else if (Intent.ACTION_PACKAGE_REMOVED.equals(action)) { if (!replacing) { mAllAppsList.removePackage(packageName); remove = true; } // else, we are replacing the package, so a PACKAGE_ADDED will be sent // later, we will update the package at this time } else { if (!replacing) { mAllAppsList.addPackage(context, packageName); } else { mAllAppsList.updatePackage(context, packageName); update = true; } } if (mAllAppsList.added.size() > 0) { added = mAllAppsList.added; mAllAppsList.added = new ArrayList(); } if (mAllAppsList.removed.size() > 0) { removed = mAllAppsList.removed; mAllAppsList.removed = new ArrayList(); for (ApplicationInfo info: removed) { AppInfoCache.remove(info.intent.getComponent()); } } if (mAllAppsList.modified.size() > 0) { modified = mAllAppsList.modified; mAllAppsList.modified = new ArrayList(); } final Callbacks callbacks = mCallbacks != null ? mCallbacks.get() : null; if (callbacks == null) { return; } if (added != null) { final ArrayList<ApplicationInfo> addedFinal = added; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageAdded(addedFinal); } }); } if (update || modified != null) { final ArrayList<ApplicationInfo> modifiedFinal = modified; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageUpdated(packageName, modifiedFinal); } }); } if (remove || removed != null) { final ArrayList<ApplicationInfo> removedFinal = removed; mHandler.post(new Runnable() { public void run() { callbacks.bindPackageRemoved(packageName, removedFinal); } }); } } } public class Loader { private static final int ITEMS_CHUNK = 6; private LoaderThread mLoaderThread; private int mLastWorkspaceSeq = 0; private int mWorkspaceSeq = 1; private int mLastAllAppsSeq = 0; private int mAllAppsSeq = 1; final ArrayList<ItemInfo> mItems = new ArrayList(); final ArrayList<LauncherAppWidgetInfo> mAppWidgets = new ArrayList(); final HashMap<Long, FolderInfo> folders = new HashMap(); /** * Call this from the ui thread so the handler is initialized on the correct thread. */ public Loader() { } public void startLoader(Context context, boolean isLaunching) { synchronized (mLock) { Log.d(TAG, "startLoader isLaunching=" + isLaunching); // Don't bother to start the thread if we know it's not going to do anything if (mCallbacks.get() != null) { LoaderThread oldThread = mLoaderThread; if (oldThread != null) { if (oldThread.isLaunching()) { // don't downgrade isLaunching if we're already running isLaunching = true; } oldThread.stopLocked(); } mLoaderThread = new LoaderThread(context, oldThread, isLaunching); mLoaderThread.start(); } } } public void stopLoader() { synchronized (mLock) { if (mLoaderThread != null) { mLoaderThread.stopLocked(); } } } public void setWorkspaceDirty() { synchronized (mLock) { mWorkspaceSeq++; } } public void setAllAppsDirty() { synchronized (mLock) { mAllAppsSeq++; } } /** * Runnable for the thread that loads the contents of the launcher: * - workspace icons * - widgets * - all apps icons */ private class LoaderThread extends Thread { private Context mContext; private Thread mWaitThread; private boolean mIsLaunching; private boolean mStopped; private boolean mWorkspaceDoneBinding; LoaderThread(Context context, Thread waitThread, boolean isLaunching) { mContext = context; mWaitThread = waitThread; mIsLaunching = isLaunching; } boolean isLaunching() { return mIsLaunching; } /** * If another LoaderThread was supplied, we need to wait for that to finish before * we start our processing. This keeps the ordering of the setting and clearing * of the dirty flags correct by making sure we don't start processing stuff until * they've had a chance to re-set them. We do this waiting the worker thread, not * the ui thread to avoid ANRs. */ private void waitForOtherThread() { if (mWaitThread != null) { boolean done = false; while (!done) { try { mWaitThread.join(); done = true; } catch (InterruptedException ex) { } } mWaitThread = null; } } public void run() { waitForOtherThread(); // Elevate priority when Home launches for the first time to avoid // starving at boot time. Staring at a blank home is not cool. synchronized (mLock) { android.os.Process.setThreadPriority(mIsLaunching ? Process.THREAD_PRIORITY_DEFAULT : Process.THREAD_PRIORITY_BACKGROUND); } // Load the workspace only if it's dirty. int workspaceSeq; boolean workspaceDirty; synchronized (mLock) { workspaceSeq = mWorkspaceSeq; workspaceDirty = mWorkspaceSeq != mLastWorkspaceSeq; } if (workspaceDirty) { loadWorkspace(); } synchronized (mLock) { // If we're not stopped, and nobody has incremented mWorkspaceSeq. if (mStopped) { return; } if (workspaceSeq == mWorkspaceSeq) { mLastWorkspaceSeq = mWorkspaceSeq; } } // Bind the workspace bindWorkspace(); // Wait until the either we're stopped or the other threads are done. // This way we don't start loading all apps until the workspace has settled // down. synchronized (LoaderThread.this) { mHandler.post(new Runnable() { public void run() { synchronized (LoaderThread.this) { mWorkspaceDoneBinding = true; Log.d(TAG, "done with workspace"); LoaderThread.this.notify(); } } }); Log.d(TAG, "waiting to be done with workspace"); while (!mStopped && !mWorkspaceDoneBinding) { try { this.wait(); } catch (InterruptedException ex) { } } Log.d(TAG, "done waiting to be done with workspace"); } // Load all apps if they're dirty int allAppsSeq; boolean allAppsDirty; synchronized (mLock) { allAppsSeq = mAllAppsSeq; allAppsDirty = mAllAppsSeq != mLastAllAppsSeq; } if (allAppsDirty) { loadAllApps(); } synchronized (mLock) { // If we're not stopped, and nobody has incremented mAllAppsSeq. if (mStopped) { return; } if (allAppsSeq == mAllAppsSeq) { mLastAllAppsSeq = mAllAppsSeq; } } // Bind all apps bindAllApps(); // Clear out this reference, otherwise we end up holding it until all of the // callback runnables are done. mContext = null; synchronized (mLock) { // Setting the reference is atomic, but we can't do it inside the other critical // sections. mLoaderThread = null; return; } } public void stopLocked() { synchronized (LoaderThread.this) { mStopped = true; this.notify(); } } /** * Gets the callbacks object. If we've been stopped, or if the launcher object * has somehow been garbage collected, return null instead. */ Callbacks tryGetCallbacks() { synchronized (mLock) { if (mStopped) { return null; } final Callbacks callbacks = mCallbacks.get(); if (callbacks == null) { Log.w(TAG, "no mCallbacks"); return null; } return callbacks; } } private void loadWorkspace() { long t = SystemClock.uptimeMillis(); final Context context = mContext; final ContentResolver contentResolver = context.getContentResolver(); final PackageManager manager = context.getPackageManager(); /* TODO if (mLocaleChanged) { updateShortcutLabels(contentResolver, manager); } */ final Cursor c = contentResolver.query( LauncherSettings.Favorites.CONTENT_URI, null, null, null, null); try { final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID); final int intentIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.INTENT); final int titleIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.TITLE); final int iconTypeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_TYPE); final int iconIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ICON); final int iconPackageIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_PACKAGE); final int iconResourceIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ICON_RESOURCE); final int containerIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.CONTAINER); final int itemTypeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.ITEM_TYPE); final int appWidgetIdIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.APPWIDGET_ID); final int screenIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.SCREEN); final int cellXIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.CELLX); final int cellYIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.CELLY); final int spanXIndex = c.getColumnIndexOrThrow (LauncherSettings.Favorites.SPANX); final int spanYIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.SPANY); final int uriIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.URI); final int displayModeIndex = c.getColumnIndexOrThrow( LauncherSettings.Favorites.DISPLAY_MODE); ApplicationInfo info; String intentDescription; Widget widgetInfo; LauncherAppWidgetInfo appWidgetInfo; int container; long id; Intent intent; while (!mStopped && c.moveToNext()) { try { int itemType = c.getInt(itemTypeIndex); switch (itemType) { case LauncherSettings.Favorites.ITEM_TYPE_APPLICATION: case LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT: intentDescription = c.getString(intentIndex); try { intent = Intent.parseUri(intentDescription, 0); } catch (URISyntaxException e) { continue; } if (itemType == LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) { info = getApplicationInfo(manager, intent, context); } else { info = getApplicationInfoShortcut(c, context, iconTypeIndex, iconPackageIndex, iconResourceIndex, iconIndex); } if (info == null) { info = new ApplicationInfo(); info.icon = manager.getDefaultActivityIcon(); } if (info != null) { info.title = c.getString(titleIndex); info.intent = intent; info.id = c.getLong(idIndex); container = c.getInt(containerIndex); info.container = container; info.screen = c.getInt(screenIndex); info.cellX = c.getInt(cellXIndex); info.cellY = c.getInt(cellYIndex); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(info); break; default: // Item is in a user folder UserFolderInfo folderInfo = findOrMakeUserFolder(folders, container); folderInfo.add(info); break; } } break; case LauncherSettings.Favorites.ITEM_TYPE_USER_FOLDER: id = c.getLong(idIndex); UserFolderInfo folderInfo = findOrMakeUserFolder(folders, id); folderInfo.title = c.getString(titleIndex); folderInfo.id = id; container = c.getInt(containerIndex); folderInfo.container = container; folderInfo.screen = c.getInt(screenIndex); folderInfo.cellX = c.getInt(cellXIndex); folderInfo.cellY = c.getInt(cellYIndex); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(folderInfo); break; } break; case LauncherSettings.Favorites.ITEM_TYPE_LIVE_FOLDER: id = c.getLong(idIndex); LiveFolderInfo liveFolderInfo = findOrMakeLiveFolder(folders, id); intentDescription = c.getString(intentIndex); intent = null; if (intentDescription != null) { try { intent = Intent.parseUri(intentDescription, 0); } catch (URISyntaxException e) { // Ignore, a live folder might not have a base intent } } liveFolderInfo.title = c.getString(titleIndex); liveFolderInfo.id = id; container = c.getInt(containerIndex); liveFolderInfo.container = container; liveFolderInfo.screen = c.getInt(screenIndex); liveFolderInfo.cellX = c.getInt(cellXIndex); liveFolderInfo.cellY = c.getInt(cellYIndex); liveFolderInfo.uri = Uri.parse(c.getString(uriIndex)); liveFolderInfo.baseIntent = intent; liveFolderInfo.displayMode = c.getInt(displayModeIndex); loadLiveFolderIcon(context, c, iconTypeIndex, iconPackageIndex, iconResourceIndex, liveFolderInfo); switch (container) { case LauncherSettings.Favorites.CONTAINER_DESKTOP: mItems.add(liveFolderInfo); break; } break; case LauncherSettings.Favorites.ITEM_TYPE_WIDGET_SEARCH: widgetInfo = Widget.makeSearch(); container = c.getInt(containerIndex); if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP) { Log.e(TAG, "Widget found where container " + "!= CONTAINER_DESKTOP ignoring!"); continue; } widgetInfo.id = c.getLong(idIndex); widgetInfo.screen = c.getInt(screenIndex); widgetInfo.container = container; widgetInfo.cellX = c.getInt(cellXIndex); widgetInfo.cellY = c.getInt(cellYIndex); mItems.add(widgetInfo); break; case LauncherSettings.Favorites.ITEM_TYPE_APPWIDGET: // Read all Launcher-specific widget details int appWidgetId = c.getInt(appWidgetIdIndex); appWidgetInfo = new LauncherAppWidgetInfo(appWidgetId); appWidgetInfo.id = c.getLong(idIndex); appWidgetInfo.screen = c.getInt(screenIndex); appWidgetInfo.cellX = c.getInt(cellXIndex); appWidgetInfo.cellY = c.getInt(cellYIndex); appWidgetInfo.spanX = c.getInt(spanXIndex); appWidgetInfo.spanY = c.getInt(spanYIndex); container = c.getInt(containerIndex); if (container != LauncherSettings.Favorites.CONTAINER_DESKTOP) { Log.e(TAG, "Widget found where container " + "!= CONTAINER_DESKTOP -- ignoring!"); continue; } appWidgetInfo.container = c.getInt(containerIndex); mAppWidgets.add(appWidgetInfo); break; } } catch (Exception e) { Log.w(TAG, "Desktop items loading interrupted:", e); } } } finally { c.close(); } Log.d(TAG, "loaded workspace in " + (SystemClock.uptimeMillis()-t) + "ms"); } /** * Read everything out of our database. */ private void bindWorkspace() { final long t = SystemClock.uptimeMillis(); // Don't use these two variables in any of the callback runnables. // Otherwise we hold a reference to them. Callbacks callbacks = mCallbacks.get(); if (callbacks == null) { // This launcher has exited and nobody bothered to tell us. Just bail. Log.w(TAG, "LoaderThread running with no launcher"); return; } int N; // Tell the workspace that we're about to start firing items at it mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.startBinding(); } } }); // Add the items to the workspace. N = mItems.size(); for (int i=0; i<N; i+=ITEMS_CHUNK) { final int start = i; final int chunkSize = (i+ITEMS_CHUNK <= N) ? ITEMS_CHUNK : (N-i); mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindItems(mItems, start, start+chunkSize); } } }); } // Wait until the queue goes empty. mHandler.postIdle(new Runnable() { public void run() { Log.d(TAG, "Going to start binding widgets soon."); } }); // Bind the widgets, one at a time. // WARNING: this is calling into the workspace from the background thread, // but since getCurrentScreen() just returns the int, we should be okay. This // is just a hint for the order, and if it's wrong, we'll be okay. // TODO: instead, we should have that push the current screen into here. final int currentScreen = callbacks.getCurrentWorkspaceScreen(); N = mAppWidgets.size(); // once for the current screen for (int i=0; i<N; i++) { final LauncherAppWidgetInfo widget = mAppWidgets.get(i); if (widget.screen == currentScreen) { mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAppWidget(widget); } } }); } } // once for the other screens for (int i=0; i<N; i++) { final LauncherAppWidgetInfo widget = mAppWidgets.get(i); if (widget.screen != currentScreen) { mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAppWidget(widget); } } }); } } // TODO: Bind the folders // Tell the workspace that we're done. mHandler.post(new Runnable() { public void run() { Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.finishBindingItems(); } } }); // If we're profiling, this is the last thing in the queue. mHandler.post(new Runnable() { public void run() { Log.d(TAG, "bound workspace in " + (SystemClock.uptimeMillis()-t) + "ms"); if (Launcher.PROFILE_ROTATE) { android.os.Debug.stopMethodTracing(); } } }); } private void loadAllApps() { final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null); mainIntent.addCategory(Intent.CATEGORY_LAUNCHER); final Callbacks callbacks = tryGetCallbacks(); if (callbacks == null) { return; } final Context context = mContext; final PackageManager packageManager = context.getPackageManager(); final List<ResolveInfo> apps = packageManager.queryIntentActivities(mainIntent, 0); synchronized (mLock) { mAllAppsList.clear(); if (apps != null) { long t = SystemClock.uptimeMillis(); int N = apps.size(); Utilities.BubbleText bubble = new Utilities.BubbleText(context); for (int i=0; i<N && !mStopped; i++) { // This builds the icon bitmaps. mAllAppsList.add(AppInfoCache.cache(apps.get(i), context, bubble)); } Collections.sort(mAllAppsList.data, sComparator); Collections.sort(mAllAppsList.added, sComparator); Log.d(TAG, "cached app icons in " + (SystemClock.uptimeMillis()-t) + "ms"); } } } private void bindAllApps() { synchronized (mLock) { final ArrayList<ApplicationInfo> results = mAllAppsList.added; mAllAppsList.added = new ArrayList(); mHandler.post(new Runnable() { public void run() { long t = SystemClock.uptimeMillis(); Callbacks callbacks = tryGetCallbacks(); if (callbacks != null) { callbacks.bindAllApplications(results); } Log.d(TAG, "bound app icons in " + (SystemClock.uptimeMillis()-t) + "ms"); } }); } } } } /** * Make an ApplicationInfo object for an application. */ private static ApplicationInfo getApplicationInfo(PackageManager manager, Intent intent, Context context) { final ResolveInfo resolveInfo = manager.resolveActivity(intent, 0); if (resolveInfo == null) { return null; } final ApplicationInfo info = new ApplicationInfo(); final ActivityInfo activityInfo = resolveInfo.activityInfo; info.icon = Utilities.createIconThumbnail(activityInfo.loadIcon(manager), context); if (info.title == null || info.title.length() == 0) { info.title = activityInfo.loadLabel(manager); } if (info.title == null) { info.title = ""; } info.itemType = LauncherSettings.Favorites.ITEM_TYPE_APPLICATION; return info; } /** * Make an ApplicationInfo object for a sortcut */ private static ApplicationInfo getApplicationInfoShortcut(Cursor c, Context context, int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, int iconIndex) { final ApplicationInfo info = new ApplicationInfo(); info.itemType = LauncherSettings.Favorites.ITEM_TYPE_SHORTCUT; int iconType = c.getInt(iconTypeIndex); switch (iconType) { case LauncherSettings.Favorites.ICON_TYPE_RESOURCE: String packageName = c.getString(iconPackageIndex); String resourceName = c.getString(iconResourceIndex); PackageManager packageManager = context.getPackageManager(); try { Resources resources = packageManager.getResourcesForApplication(packageName); final int id = resources.getIdentifier(resourceName, null, null); info.icon = Utilities.createIconThumbnail(resources.getDrawable(id), context); } catch (Exception e) { info.icon = packageManager.getDefaultActivityIcon(); } info.iconResource = new Intent.ShortcutIconResource(); info.iconResource.packageName = packageName; info.iconResource.resourceName = resourceName; info.customIcon = false; break; case LauncherSettings.Favorites.ICON_TYPE_BITMAP: byte[] data = c.getBlob(iconIndex); try { Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length); info.icon = new FastBitmapDrawable( Utilities.createBitmapThumbnail(bitmap, context)); } catch (Exception e) { packageManager = context.getPackageManager(); info.icon = packageManager.getDefaultActivityIcon(); } info.filtered = true; info.customIcon = true; break; default: info.icon = context.getPackageManager().getDefaultActivityIcon(); info.customIcon = false; break; } return info; } private static void loadLiveFolderIcon(Context context, Cursor c, int iconTypeIndex, int iconPackageIndex, int iconResourceIndex, LiveFolderInfo liveFolderInfo) { int iconType = c.getInt(iconTypeIndex); switch (iconType) { case LauncherSettings.Favorites.ICON_TYPE_RESOURCE: String packageName = c.getString(iconPackageIndex); String resourceName = c.getString(iconResourceIndex); PackageManager packageManager = context.getPackageManager(); try { Resources resources = packageManager.getResourcesForApplication(packageName); final int id = resources.getIdentifier(resourceName, null, null); liveFolderInfo.icon = resources.getDrawable(id); } catch (Exception e) { liveFolderInfo.icon = context.getResources().getDrawable(R.drawable.ic_launcher_folder); } liveFolderInfo.iconResource = new Intent.ShortcutIconResource(); liveFolderInfo.iconResource.packageName = packageName; liveFolderInfo.iconResource.resourceName = resourceName; break; default: liveFolderInfo.icon = context.getResources().getDrawable(R.drawable.ic_launcher_folder); } } /** * Return an existing UserFolderInfo object if we have encountered this ID previously, * or make a new one. */ private static UserFolderInfo findOrMakeUserFolder(HashMap<Long, FolderInfo> folders, long id) { // See if a placeholder was created for us already FolderInfo folderInfo = folders.get(id); if (folderInfo == null || !(folderInfo instanceof UserFolderInfo)) { // No placeholder -- create a new instance folderInfo = new UserFolderInfo(); folders.put(id, folderInfo); } return (UserFolderInfo) folderInfo; } /** * Return an existing UserFolderInfo object if we have encountered this ID previously, or make a * new one. */ private static LiveFolderInfo findOrMakeLiveFolder(HashMap<Long, FolderInfo> folders, long id) { // See if a placeholder was created for us already FolderInfo folderInfo = folders.get(id); if (folderInfo == null || !(folderInfo instanceof LiveFolderInfo)) { // No placeholder -- create a new instance folderInfo = new LiveFolderInfo(); folders.put(id, folderInfo); } return (LiveFolderInfo) folderInfo; } private static void updateShortcutLabels(ContentResolver resolver, PackageManager manager) { final Cursor c = resolver.query(LauncherSettings.Favorites.CONTENT_URI, new String[] { LauncherSettings.Favorites._ID, LauncherSettings.Favorites.TITLE, LauncherSettings.Favorites.INTENT, LauncherSettings.Favorites.ITEM_TYPE }, null, null, null); final int idIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites._ID); final int intentIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.INTENT); final int itemTypeIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.ITEM_TYPE); final int titleIndex = c.getColumnIndexOrThrow(LauncherSettings.Favorites.TITLE); // boolean changed = false; try { while (c.moveToNext()) { try { if (c.getInt(itemTypeIndex) != LauncherSettings.Favorites.ITEM_TYPE_APPLICATION) { continue; } final String intentUri = c.getString(intentIndex); if (intentUri != null) { final Intent shortcut = Intent.parseUri(intentUri, 0); if (Intent.ACTION_MAIN.equals(shortcut.getAction())) { final ComponentName name = shortcut.getComponent(); if (name != null) { final ActivityInfo activityInfo = manager.getActivityInfo(name, 0); final String title = c.getString(titleIndex); String label = getLabel(manager, activityInfo); if (title == null || !title.equals(label)) { final ContentValues values = new ContentValues(); values.put(LauncherSettings.Favorites.TITLE, label); resolver.update( LauncherSettings.Favorites.CONTENT_URI_NO_NOTIFICATION, values, "_id=?", new String[] { String.valueOf(c.getLong(idIndex)) }); // changed = true; } } } } } catch (URISyntaxException e) { // Ignore } catch (PackageManager.NameNotFoundException e) { // Ignore } } } finally { c.close(); } // if (changed) resolver.notifyChange(Settings.Favorites.CONTENT_URI, null); } private static String getLabel(PackageManager manager, ActivityInfo activityInfo) { String label = activityInfo.loadLabel(manager).toString(); if (label == null) { label = manager.getApplicationLabel(activityInfo.applicationInfo).toString(); if (label == null) { label = activityInfo.name; } } return label; } private static final Collator sCollator = Collator.getInstance(); private static final Comparator<ApplicationInfo> sComparator = new Comparator<ApplicationInfo>() { public final int compare(ApplicationInfo a, ApplicationInfo b) { return sCollator.compare(a.title.toString(), b.title.toString()); } }; }
diff --git a/src/main/java/burst/reader/web/ExtendedRedirectResult.java b/src/main/java/burst/reader/web/ExtendedRedirectResult.java index 7aac35d..db88553 100644 --- a/src/main/java/burst/reader/web/ExtendedRedirectResult.java +++ b/src/main/java/burst/reader/web/ExtendedRedirectResult.java @@ -1,55 +1,60 @@ package burst.reader.web; import burst.web.util.WebUtil; import com.opensymphony.xwork2.ActionInvocation; import org.apache.struts2.ServletActionContext; import org.apache.struts2.dispatcher.StrutsResultSupport; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.PrintWriter; import static javax.servlet.http.HttpServletResponse.SC_FOUND; /** * Created with IntelliJ IDEA. * User: Burst * Date: 13-4-4 * Time: 下午9:43 * To change this template use File | Settings | File Templates. */ public class ExtendedRedirectResult extends StrutsResultSupport { private int statusCode = SC_FOUND; public void setStatusCode(int statusCode) { this.statusCode = statusCode; } @Override protected void doExecute(String finalLocation, ActionInvocation invocation) throws Exception { HttpServletRequest request = ServletActionContext.getRequest(); HttpServletResponse response = ServletActionContext.getResponse(); - if(request.getHeader(WebUtil.HEAD_USERAGENT).toLowerCase().indexOf("untrusted/1.0") != -1) { + boolean write_wml = false; + String userAgent = request.getHeader(WebUtil.HEAD_USERAGENT); + if (userAgent != null) { + write_wml = userAgent.toLowerCase().indexOf("untrusted/1.0") != -1; + } + if (write_wml) { response.setContentType("text/vnd.wap.wml"); PrintWriter writer = response.getWriter(); writer.print("<?xml version=\"1.0\"?><!DOCTYPE wml PUBLIC \"-//WAPFORUM//DTD WML 1.1//EN\" \"http://www.wapforum.org/DTD/wml_1.1.xml\">"); writer.print("<wml><head>"); writer.print("<meta http-equiv=\"Content-Type\" content=\"text/vnd.wap.wml;charset=UTF-8\"/>"); writer.print("</head><card id=\"main\" title=\"redirecting...\" onenterforward=\"" + finalLocation + "\"><p>redirecting...</p></card></wml>"); writer.close(); } else { if (SC_FOUND == statusCode) { response.sendRedirect(finalLocation); } else { response.setStatus(statusCode); response.setHeader("Location", finalLocation); response.getWriter().write(finalLocation); response.getWriter().close(); } } } }
true
true
protected void doExecute(String finalLocation, ActionInvocation invocation) throws Exception { HttpServletRequest request = ServletActionContext.getRequest(); HttpServletResponse response = ServletActionContext.getResponse(); if(request.getHeader(WebUtil.HEAD_USERAGENT).toLowerCase().indexOf("untrusted/1.0") != -1) { response.setContentType("text/vnd.wap.wml"); PrintWriter writer = response.getWriter(); writer.print("<?xml version=\"1.0\"?><!DOCTYPE wml PUBLIC \"-//WAPFORUM//DTD WML 1.1//EN\" \"http://www.wapforum.org/DTD/wml_1.1.xml\">"); writer.print("<wml><head>"); writer.print("<meta http-equiv=\"Content-Type\" content=\"text/vnd.wap.wml;charset=UTF-8\"/>"); writer.print("</head><card id=\"main\" title=\"redirecting...\" onenterforward=\"" + finalLocation + "\"><p>redirecting...</p></card></wml>"); writer.close(); } else { if (SC_FOUND == statusCode) { response.sendRedirect(finalLocation); } else { response.setStatus(statusCode); response.setHeader("Location", finalLocation); response.getWriter().write(finalLocation); response.getWriter().close(); } } }
protected void doExecute(String finalLocation, ActionInvocation invocation) throws Exception { HttpServletRequest request = ServletActionContext.getRequest(); HttpServletResponse response = ServletActionContext.getResponse(); boolean write_wml = false; String userAgent = request.getHeader(WebUtil.HEAD_USERAGENT); if (userAgent != null) { write_wml = userAgent.toLowerCase().indexOf("untrusted/1.0") != -1; } if (write_wml) { response.setContentType("text/vnd.wap.wml"); PrintWriter writer = response.getWriter(); writer.print("<?xml version=\"1.0\"?><!DOCTYPE wml PUBLIC \"-//WAPFORUM//DTD WML 1.1//EN\" \"http://www.wapforum.org/DTD/wml_1.1.xml\">"); writer.print("<wml><head>"); writer.print("<meta http-equiv=\"Content-Type\" content=\"text/vnd.wap.wml;charset=UTF-8\"/>"); writer.print("</head><card id=\"main\" title=\"redirecting...\" onenterforward=\"" + finalLocation + "\"><p>redirecting...</p></card></wml>"); writer.close(); } else { if (SC_FOUND == statusCode) { response.sendRedirect(finalLocation); } else { response.setStatus(statusCode); response.setHeader("Location", finalLocation); response.getWriter().write(finalLocation); response.getWriter().close(); } } }
diff --git a/src/main/java/com/solidstategroup/radar/dao/impl/GenericDiagnosisDaoImpl.java b/src/main/java/com/solidstategroup/radar/dao/impl/GenericDiagnosisDaoImpl.java index 8356bc51..9af03056 100644 --- a/src/main/java/com/solidstategroup/radar/dao/impl/GenericDiagnosisDaoImpl.java +++ b/src/main/java/com/solidstategroup/radar/dao/impl/GenericDiagnosisDaoImpl.java @@ -1,58 +1,58 @@ package com.solidstategroup.radar.dao.impl; import com.solidstategroup.radar.dao.generic.GenericDiagnosisDao; import com.solidstategroup.radar.model.generic.DiseaseGroup; import com.solidstategroup.radar.model.generic.GenericDiagnosis; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.RowMapper; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Collections; import java.util.List; public class GenericDiagnosisDaoImpl extends BaseDaoImpl implements GenericDiagnosisDao { private static final Logger LOGGER = LoggerFactory.getLogger(GenericDiagnosisDaoImpl.class); public List<GenericDiagnosis> getAll() { return jdbcTemplate.query("SELECT * FROM rdr_prd_code, rdr_diagnosis_mapping" + " WHERE rdr_prd_code.ERA_EDTA_PRD_code = rdr_diagnosis_mapping.PRDCode", new GenericDiagnosisRowMapper()); } public List<GenericDiagnosis> getByDiseaseGroup(DiseaseGroup diseaseGroup) { List<GenericDiagnosis> genericDiagnosises = jdbcTemplate.query("SELECT * FROM rdr_prd_code, " + "rdr_diagnosis_mapping" + " WHERE rdr_prd_code.ERA_EDTA_PRD_code = rdr_diagnosis_mapping.PRDCode" + " AND rdr_diagnosis_mapping.workingGroup = ?", new Object[]{diseaseGroup.getId()}, new GenericDiagnosisRowMapper()); Collections.sort(genericDiagnosises); return genericDiagnosises; } public GenericDiagnosis getById(String id) { try { - return jdbcTemplate.queryForObject("SELECT DISTINCT *, FROM rdr_prd_code, rdr_diagnosis_mapping" + + return jdbcTemplate.queryForObject("SELECT DISTINCT * FROM rdr_prd_code, rdr_diagnosis_mapping" + " WHERE rdr_prd_code.ERA_EDTA_PRD_code = rdr_diagnosis_mapping.PRDCode" + " AND ERA_EDTA_PRD_code = ?", new Object[]{id}, new GenericDiagnosisRowMapper()); } catch (EmptyResultDataAccessException e) { LOGGER.error("generic diagnosis with id " + id + "not found" + e); return null; } } private class GenericDiagnosisRowMapper implements RowMapper<GenericDiagnosis> { public GenericDiagnosis mapRow(ResultSet resultSet, int i) throws SQLException { GenericDiagnosis genericDiagnosis = new GenericDiagnosis(); genericDiagnosis.setId(resultSet.getString("rdr_prd_code.ERA_EDTA_PRD_code")); genericDiagnosis.setTerm(resultSet.getString("rdr_prd_code.ERA_EDTA_primaryRenalDiagnosisTerm")); Integer order = getIntegerWithNullCheck("ordering", resultSet); genericDiagnosis.setOrder(order != null ? order : 0); return genericDiagnosis; } } }
true
true
public GenericDiagnosis getById(String id) { try { return jdbcTemplate.queryForObject("SELECT DISTINCT *, FROM rdr_prd_code, rdr_diagnosis_mapping" + " WHERE rdr_prd_code.ERA_EDTA_PRD_code = rdr_diagnosis_mapping.PRDCode" + " AND ERA_EDTA_PRD_code = ?", new Object[]{id}, new GenericDiagnosisRowMapper()); } catch (EmptyResultDataAccessException e) { LOGGER.error("generic diagnosis with id " + id + "not found" + e); return null; } }
public GenericDiagnosis getById(String id) { try { return jdbcTemplate.queryForObject("SELECT DISTINCT * FROM rdr_prd_code, rdr_diagnosis_mapping" + " WHERE rdr_prd_code.ERA_EDTA_PRD_code = rdr_diagnosis_mapping.PRDCode" + " AND ERA_EDTA_PRD_code = ?", new Object[]{id}, new GenericDiagnosisRowMapper()); } catch (EmptyResultDataAccessException e) { LOGGER.error("generic diagnosis with id " + id + "not found" + e); return null; } }
diff --git a/carrot2/components/carrot2-launcher/src/org/carrot2/launcher/LaunchOptions.java b/carrot2/components/carrot2-launcher/src/org/carrot2/launcher/LaunchOptions.java index 557431710..b023f019b 100644 --- a/carrot2/components/carrot2-launcher/src/org/carrot2/launcher/LaunchOptions.java +++ b/carrot2/components/carrot2-launcher/src/org/carrot2/launcher/LaunchOptions.java @@ -1,118 +1,118 @@ /* * Carrot2 project. * * Copyright (C) 2002-2007, Dawid Weiss, Stanisław Osiński. * Portions (C) Contributors listed in "carrot2.CONTRIBUTORS" file. * All rights reserved. * * Refer to the full license file "carrot2.LICENSE" * in the root folder of the repository checkout or at: * http://www.carrot2.org/carrot2.LICENSE */ package org.carrot2.launcher; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; /** * Launch options for {@link Launcher}. * * @author Dawid Weiss */ final class LaunchOptions { /** * A list of {@link URL}s to classpath locations. */ private final ArrayList urls = new ArrayList(); /** * Fully qualified name of the class to launch. */ private String className; /** * Arguments to be passed to the launched class. */ private String [] classArgs; /** * */ public void setClassName(String className) { this.className = className; } /** * */ public String getClassName() { return className; } /** * */ public void setClassArgs(String [] classArgs) { this.classArgs = classArgs; } /** * */ public String [] getClassArgs() { return classArgs; } /** * Adds a single JAR location to classpath. */ public void addJarLocation(File jarLocation) { try { this.urls.add(jarLocation.toURL()); } catch (MalformedURLException e) { throw new LaunchException("Could not create an URL to: " + jarLocation); } } /** * Adds a classpath directory (top folder of packages structure). */ public void addDirLocation(File dirLocation) { try { - final URL url = dirLocation.toURL(); + final URL url = dirLocation.toURI().toURL(); final String external = url.toExternalForm(); if (!external.endsWith("/")) { throw new LaunchException("A folder URL should end with a '/'."); } this.urls.add(url); } catch (MalformedURLException e) { throw new LaunchException("Could not create an URL to: " + dirLocation); } } /** * @return Returns a list of {@link URL}s to classpath locations. */ public URL [] getClasspathURLs() { final URL [] urls = (URL []) this.urls.toArray(new URL [this.urls.size()]); return urls; } }
true
true
public void addDirLocation(File dirLocation) { try { final URL url = dirLocation.toURL(); final String external = url.toExternalForm(); if (!external.endsWith("/")) { throw new LaunchException("A folder URL should end with a '/'."); } this.urls.add(url); } catch (MalformedURLException e) { throw new LaunchException("Could not create an URL to: " + dirLocation); } }
public void addDirLocation(File dirLocation) { try { final URL url = dirLocation.toURI().toURL(); final String external = url.toExternalForm(); if (!external.endsWith("/")) { throw new LaunchException("A folder URL should end with a '/'."); } this.urls.add(url); } catch (MalformedURLException e) { throw new LaunchException("Could not create an URL to: " + dirLocation); } }
diff --git a/pp-rebel/src/main/java/com/polopoly/javarebel/PolopolyJRebelPlugin.java b/pp-rebel/src/main/java/com/polopoly/javarebel/PolopolyJRebelPlugin.java index 31a2520..ce1fea6 100644 --- a/pp-rebel/src/main/java/com/polopoly/javarebel/PolopolyJRebelPlugin.java +++ b/pp-rebel/src/main/java/com/polopoly/javarebel/PolopolyJRebelPlugin.java @@ -1,110 +1,115 @@ /** * Copyright (C) 2010 ZeroTurnaround OU * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License v2 as published by * the Free Software Foundation, with the additional requirement that * ZeroTurnaround OU must be prominently attributed in the program. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You can find a copy of GNU General Public License v2 from * http://www.gnu.org/licenses/gpl-2.0.txt */ package com.polopoly.javarebel; import org.zeroturnaround.javarebel.ClassResourceSource; import org.zeroturnaround.javarebel.Integration; import org.zeroturnaround.javarebel.IntegrationFactory; import org.zeroturnaround.javarebel.LoggerFactory; import org.zeroturnaround.javarebel.Plugin; import com.polopoly.javarebel.cfg.ConfigurationProvider; import com.polopoly.javarebel.cfg.ConfigurationProvider.Cfg; import com.polopoly.javarebel.contentfiles.ContentBaseProcessor; import com.polopoly.javarebel.staticfiles.StaticFileFilterProcessor; public class PolopolyJRebelPlugin implements Plugin { public void preinit() { // Register the CBP Integration i = IntegrationFactory.getInstance(); ClassLoader cl = PolopolyJRebelPlugin.class.getClassLoader(); i.addIntegrationProcessor(cl, "com.polopoly.cm.client.impl.service2client.ContentBase", new ContentBaseProcessor()); Cfg cfg = ConfigurationProvider.instance().getConfiguration(); if (cfg == null) { LoggerFactory.getInstance().echo("pp-rebel.ERROR: No configuration present, turning off pp-rebel"); throw new RuntimeException("pp-rebel could not find pp-rebel.xml, please specify a valid PP_HOME property"); + } else if (cfg.configuration == null) { + LoggerFactory.getInstance().echo("pp-rebel.INFO: Not patching servlet filters," + + " static file processing will be disabled until restart" + + " (no configuration, invalid configuration file?)"); } else if (cfg.configuration.enableFilterProcessing() || cfg.configuration.hasFilterFiles()) { i.addIntegrationProcessor(cl, new StaticFileFilterProcessor()); } else { - LoggerFactory.getInstance().echo("pp-rebel.INFO: Not patching servlet filters, static file processing will be disabled until restart"); + LoggerFactory.getInstance().echo("pp-rebel.INFO: Not patching servlet filters," + + " static file processing will be disabled until restart"); } // // // Set up the reload listener // ReloaderFactory.getInstance().addClassReloadListener( // new ClassEventListener() { // public void onClassEvent(int eventType, Class klass) { // // try { // Class abstractCanvasClass = Class.forName("org.zeroturnaround.javarebel.sdkDemo.AbstractCanvas"); // // // Check if it is child of AbstractCanvas // if (abstractCanvasClass.isAssignableFrom(klass)) { // System.out.println("An AbstractCanvas implementation class was reloaded .. re-painting the canvas"); // DemoAppConfigReloader.repaint(); // LoggerFactory.getInstance().echo("Repainted the canvas"); // } // // } catch (Exception e) { // LoggerFactory.getInstance().error(e); // System.out.println(e); // } // } // // public int priority() { // return 0; // } // } // ); } public String getId() { return "pp-rebel"; } public String getName() { return "Polopoly JRebel Plugin"; } public String getDescription() { return "Loads Polopoly content files directly from the file system"; } public String getAuthor() { return null; } public String getWebsite() { return null; } public boolean checkDependencies(ClassLoader classLoader, ClassResourceSource classResourceSource) { return classResourceSource.getClassResource("com.polopoly.cm.client.impl.service2client.ContentBase") != null; } public String getSupportedVersions() { return null; } public String getTestedVersions() { return null; } }
false
true
public void preinit() { // Register the CBP Integration i = IntegrationFactory.getInstance(); ClassLoader cl = PolopolyJRebelPlugin.class.getClassLoader(); i.addIntegrationProcessor(cl, "com.polopoly.cm.client.impl.service2client.ContentBase", new ContentBaseProcessor()); Cfg cfg = ConfigurationProvider.instance().getConfiguration(); if (cfg == null) { LoggerFactory.getInstance().echo("pp-rebel.ERROR: No configuration present, turning off pp-rebel"); throw new RuntimeException("pp-rebel could not find pp-rebel.xml, please specify a valid PP_HOME property"); } else if (cfg.configuration.enableFilterProcessing() || cfg.configuration.hasFilterFiles()) { i.addIntegrationProcessor(cl, new StaticFileFilterProcessor()); } else { LoggerFactory.getInstance().echo("pp-rebel.INFO: Not patching servlet filters, static file processing will be disabled until restart"); } // // // Set up the reload listener // ReloaderFactory.getInstance().addClassReloadListener( // new ClassEventListener() { // public void onClassEvent(int eventType, Class klass) { // // try { // Class abstractCanvasClass = Class.forName("org.zeroturnaround.javarebel.sdkDemo.AbstractCanvas"); // // // Check if it is child of AbstractCanvas // if (abstractCanvasClass.isAssignableFrom(klass)) { // System.out.println("An AbstractCanvas implementation class was reloaded .. re-painting the canvas"); // DemoAppConfigReloader.repaint(); // LoggerFactory.getInstance().echo("Repainted the canvas"); // } // // } catch (Exception e) { // LoggerFactory.getInstance().error(e); // System.out.println(e); // } // } // // public int priority() { // return 0; // } // } // ); }
public void preinit() { // Register the CBP Integration i = IntegrationFactory.getInstance(); ClassLoader cl = PolopolyJRebelPlugin.class.getClassLoader(); i.addIntegrationProcessor(cl, "com.polopoly.cm.client.impl.service2client.ContentBase", new ContentBaseProcessor()); Cfg cfg = ConfigurationProvider.instance().getConfiguration(); if (cfg == null) { LoggerFactory.getInstance().echo("pp-rebel.ERROR: No configuration present, turning off pp-rebel"); throw new RuntimeException("pp-rebel could not find pp-rebel.xml, please specify a valid PP_HOME property"); } else if (cfg.configuration == null) { LoggerFactory.getInstance().echo("pp-rebel.INFO: Not patching servlet filters," + " static file processing will be disabled until restart" + " (no configuration, invalid configuration file?)"); } else if (cfg.configuration.enableFilterProcessing() || cfg.configuration.hasFilterFiles()) { i.addIntegrationProcessor(cl, new StaticFileFilterProcessor()); } else { LoggerFactory.getInstance().echo("pp-rebel.INFO: Not patching servlet filters," + " static file processing will be disabled until restart"); } // // // Set up the reload listener // ReloaderFactory.getInstance().addClassReloadListener( // new ClassEventListener() { // public void onClassEvent(int eventType, Class klass) { // // try { // Class abstractCanvasClass = Class.forName("org.zeroturnaround.javarebel.sdkDemo.AbstractCanvas"); // // // Check if it is child of AbstractCanvas // if (abstractCanvasClass.isAssignableFrom(klass)) { // System.out.println("An AbstractCanvas implementation class was reloaded .. re-painting the canvas"); // DemoAppConfigReloader.repaint(); // LoggerFactory.getInstance().echo("Repainted the canvas"); // } // // } catch (Exception e) { // LoggerFactory.getInstance().error(e); // System.out.println(e); // } // } // // public int priority() { // return 0; // } // } // ); }