repo_name
stringlengths
4
116
path
stringlengths
3
942
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
SingingTree/rustfmt
tests/target/configs-spaces_within_parens-false.rs
122
// rustfmt-spaces_within_parens: false // Spaces within parens fn lorem<T: Eq>(t: T) { let lorem = (ipsum, dolor); }
apache-2.0
kikinteractive/maven-plugins
maven-assembly-plugin/src/main/java/org/apache/maven/plugin/assembly/mojos/UnpackMojo.java
4397
package org.apache.maven.plugin.assembly.mojos; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.artifact.Artifact; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.assembly.archive.ArchiveExpansionException; import org.apache.maven.plugin.assembly.utils.AssemblyFileUtils; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.archiver.manager.ArchiverManager; import org.codehaus.plexus.archiver.manager.NoSuchArchiverException; import java.io.File; import java.util.LinkedHashSet; import java.util.Set; /** * Unpack project dependencies. Currently supports dependencies of type jar and zip. * * @version $Id$ * @deprecated Use org.apache.maven.plugins:maven-dependency-plugin goal: unpack or unpack-dependencies instead. */ @Mojo( name = "unpack", requiresDependencyResolution = ResolutionScope.TEST, inheritByDefault = false ) @Deprecated public class UnpackMojo extends AbstractMojo { /** */ @Parameter( defaultValue = "${project}", readonly = true, required = true ) private MavenProject project; /** */ @Component private ArchiverManager archiverManager; /** * Directory to unpack JARs into if needed */ @Parameter( defaultValue = "${project.build.directory}/assembly/work", required = true ) protected File workDirectory; /** * Unpacks the archive file. * * @throws MojoExecutionException */ @SuppressWarnings( "ResultOfMethodCallIgnored" ) public void execute() throws MojoExecutionException, MojoFailureException { final Set<Artifact> dependencies = new LinkedHashSet<Artifact>(); if ( project.getArtifact() != null && project.getArtifact().getFile() != null ) { dependencies.add( project.getArtifact() ); } @SuppressWarnings( "unchecked" ) final Set<Artifact> projectArtifacts = project.getArtifacts(); if ( projectArtifacts != null ) { dependencies.addAll( projectArtifacts ); } for ( final Artifact artifact : dependencies ) { final String name = artifact.getFile().getName(); final File tempLocation = new File( workDirectory, name.substring( 0, name.lastIndexOf( '.' ) ) ); boolean process = false; if ( !tempLocation.exists() ) { tempLocation.mkdirs(); process = true; } else if ( artifact.getFile().lastModified() > tempLocation.lastModified() ) { process = true; } if ( process ) { final File file = artifact.getFile(); try { AssemblyFileUtils.unpack( file, tempLocation, archiverManager ); } catch ( final NoSuchArchiverException e ) { getLog().info( "Skip unpacking dependency file with unknown extension: " + file.getPath() ); } catch ( final ArchiveExpansionException e ) { throw new MojoExecutionException( "Error unpacking dependency file: " + file, e ); } } } } }
apache-2.0
danilomendonca/A3Droid_Test_MCS
src/a3/a3droid/Timer.java
1579
package a3.a3droid; /**This class is used in A3Channel and in Service, which implement the interface "TimerInterface". * After a 2 seconds timeout, it calls TimerInterface.timerFired(int), to notify the timeout fired. * @author Francesco * */ public class Timer extends Thread{ /**The TimerInterface to notify at timeout firing time.*/ private TimerInterface channel; /**It indicates why the timeout is needed. * It is passed in timerFired(int), in order for the TimerInterface to know which timeout fired. */ private int reason; /**The time to wait before timer firing.*/ private int timeToWait; /** * @param channel The TimerInterface to notify at timeout firing time. * @param reason It indicates why the timeout is needed on "channel". */ public Timer(TimerInterface channel, int reason){ super(); this.channel = channel; this.reason = reason; timeToWait = 2000; } /** * @param timerInterface The TimerInterface to notify at timeout firing time. * @param reason It indicates why the timeout is needed on "channel". * @param timeout The time to wait before timer firing. */ public Timer(TimerInterface timerInterface, int reason, int timeout) { // TODO Auto-generated constructor stub this(timerInterface, reason); timeToWait = timeout; } /** * It notify timeout firing after a 2s timeout. */ @Override public void run(){ try { sleep(timeToWait); channel.timerFired(reason); } catch (Exception e) { // TODO Auto-generated catch block } } }
apache-2.0
msebire/intellij-community
platform/diff-impl/src/com/intellij/openapi/diff/impl/dir/actions/DirDiffToolbarActions.java
2587
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.diff.impl.dir.actions; import com.intellij.ide.diff.DirDiffModelHolder; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.diff.impl.dir.DirDiffTableModel; import com.intellij.openapi.project.DumbAware; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * @author Konstantin Bulenkov */ public class DirDiffToolbarActions extends ActionGroup implements DumbAware { private final AnAction[] myActions; public DirDiffToolbarActions(DirDiffTableModel model, JComponent panel) { super("Directory Diff Actions", false); final List<AnAction> actions = new ArrayList<>(Arrays.asList( new RefreshDirDiffAction(model), Separator.getInstance(), new EnableLeft(model), new EnableNotEqual(model), new EnableEqual(model), new EnableRight(model), Separator.getInstance(), ActionManager.getInstance().getAction("DirDiffMenu.CompareNewFilesWithEachOtherAction"), Separator.getInstance(), new ChangeCompareModeGroup(model), Separator.getInstance())); if (model.isOperationsEnabled()) { actions.add(new SynchronizeDiff(model, true)); actions.add(new SynchronizeDiff(model, false)); } actions.addAll(model.getSettings().getExtraActions()); for (AnAction action : actions) { if (action instanceof ShortcutProvider) { final ShortcutSet shortcut = ((ShortcutProvider)action).getShortcut(); if (shortcut != null) { action.registerCustomShortcutSet(shortcut, panel); } } if (action instanceof DirDiffModelHolder) { ((DirDiffModelHolder)action).setModel(model); } } myActions = actions.toArray(AnAction.EMPTY_ARRAY); } @NotNull @Override public AnAction[] getChildren(@Nullable AnActionEvent e) { return myActions; } }
apache-2.0
ThoughtWorksStudios/excel-2010-mingle-addin
tools/ironruby/lib/ironruby/gems/1.8/doc/bundler-1.0.15/rdoc/classes/Bundler/GemHelper.src/M000283.html
759
<?xml version="1.0" encoding="iso-8859-1"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html> <head> <title>version_tag (Bundler::GemHelper)</title> <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" /> <link rel="stylesheet" href="../../.././rdoc-style.css" type="text/css" media="screen" /> </head> <body class="standalone-code"> <pre><span class="ruby-comment cmt"># File lib/bundler/gem_helper.rb, line 122</span> <span class="ruby-keyword kw">def</span> <span class="ruby-identifier">version_tag</span> <span class="ruby-node">&quot;v#{version}&quot;</span> <span class="ruby-keyword kw">end</span></pre> </body> </html>
apache-2.0
SebsLittleHelpers/scala-js
javalanglib/src/main/scala/java/lang/reflect/Array.scala
7857
/* * Scala.js (https://www.scala-js.org/) * * Copyright EPFL. * * Licensed under Apache License 2.0 * (https://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package java.lang.reflect import scala.scalajs.js import js.JSConverters._ import java.lang.Class object Array { def newInstance(componentType: Class[_], length: Int): AnyRef = componentType.newArrayOfThisClass(js.Array(length)) def newInstance(componentType: Class[_], dimensions: scala.Array[Int]): AnyRef = componentType.newArrayOfThisClass(dimensions.toJSArray) def getLength(array: AnyRef): Int = array match { // yes, this is kind of stupid, but that's how it is case array: Array[Object] => array.length case array: Array[Boolean] => array.length case array: Array[Char] => array.length case array: Array[Byte] => array.length case array: Array[Short] => array.length case array: Array[Int] => array.length case array: Array[Long] => array.length case array: Array[Float] => array.length case array: Array[Double] => array.length case _ => throw new IllegalArgumentException("argument type mismatch") } def get(array: AnyRef, index: Int): AnyRef = array match { case array: Array[Object] => array(index) case array: Array[Boolean] => new java.lang.Boolean(array(index)) case array: Array[Char] => new java.lang.Character(array(index)) case array: Array[Byte] => new java.lang.Byte(array(index)) case array: Array[Short] => new java.lang.Short(array(index)) case array: Array[Int] => new java.lang.Integer(array(index)) case array: Array[Long] => new java.lang.Long(array(index)) case array: Array[Float] => new java.lang.Float(array(index)) case array: Array[Double] => new java.lang.Double(array(index)) case _ => throw new IllegalArgumentException("argument type mismatch") } def getBoolean(array: AnyRef, index: Int): Boolean = array match { case array: Array[Boolean] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def getChar(array: AnyRef, index: Int): Char = array match { case array: Array[Char] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def getByte(array: AnyRef, index: Int): Byte = array match { case array: Array[Byte] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def getShort(array: AnyRef, index: Int): Short = array match { case array: Array[Short] => array(index) case array: Array[Byte] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def getInt(array: AnyRef, index: Int): Int = array match { case array: Array[Int] => array(index) case array: Array[Char] => array(index) case array: Array[Byte] => array(index) case array: Array[Short] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def getLong(array: AnyRef, index: Int): Long = array match { case array: Array[Long] => array(index) case array: Array[Char] => array(index) case array: Array[Byte] => array(index) case array: Array[Short] => array(index) case array: Array[Int] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def getFloat(array: AnyRef, index: Int): Float = array match { case array: Array[Float] => array(index) case array: Array[Char] => array(index) case array: Array[Byte] => array(index) case array: Array[Short] => array(index) case array: Array[Int] => array(index) case array: Array[Long] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def getDouble(array: AnyRef, index: Int): Double = array match { case array: Array[Double] => array(index) case array: Array[Char] => array(index) case array: Array[Byte] => array(index) case array: Array[Short] => array(index) case array: Array[Int] => array(index) case array: Array[Long] => array(index) case array: Array[Float] => array(index) case _ => throw new IllegalArgumentException("argument type mismatch") } def set(array: AnyRef, index: Int, value: AnyRef): Unit = array match { case array: Array[Object] => array(index) = value case _ => (value: Any) match { case value: Boolean => setBoolean(array, index, value) case value: Char => setChar(array, index, value) case value: Byte => setByte(array, index, value) case value: Short => setShort(array, index, value) case value: Int => setInt(array, index, value) case value: Long => setLong(array, index, value) case value: Float => setFloat(array, index, value) case value: Double => setDouble(array, index, value) case _ => throw new IllegalArgumentException("argument type mismatch") } } def setBoolean(array: AnyRef, index: Int, value: Boolean): Unit = array match { case array: Array[Boolean] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } def setChar(array: AnyRef, index: Int, value: Char): Unit = array match { case array: Array[Char] => array(index) = value case array: Array[Int] => array(index) = value case array: Array[Long] => array(index) = value case array: Array[Float] => array(index) = value case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } def setByte(array: AnyRef, index: Int, value: Byte): Unit = array match { case array: Array[Byte] => array(index) = value case array: Array[Short] => array(index) = value case array: Array[Int] => array(index) = value case array: Array[Long] => array(index) = value case array: Array[Float] => array(index) = value case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } def setShort(array: AnyRef, index: Int, value: Short): Unit = array match { case array: Array[Short] => array(index) = value case array: Array[Int] => array(index) = value case array: Array[Long] => array(index) = value case array: Array[Float] => array(index) = value case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } def setInt(array: AnyRef, index: Int, value: Int): Unit = array match { case array: Array[Int] => array(index) = value case array: Array[Long] => array(index) = value case array: Array[Float] => array(index) = value case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } def setLong(array: AnyRef, index: Int, value: Long): Unit = array match { case array: Array[Long] => array(index) = value case array: Array[Float] => array(index) = value case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } def setFloat(array: AnyRef, index: Int, value: Float): Unit = array match { case array: Array[Float] => array(index) = value case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } def setDouble(array: AnyRef, index: Int, value: Double): Unit = array match { case array: Array[Double] => array(index) = value case _ => throw new IllegalArgumentException("argument type mismatch") } }
apache-2.0
apache/incubator-taverna-site
content/javadoc/taverna-language/org/apache/taverna/scufl2/xml/rdf/package-frame.html
1498
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_72-internal) on Mon Mar 14 13:22:15 GMT 2016 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>org.apache.taverna.scufl2.xml.rdf (Apache Taverna Language APIs (Scufl2, Databundle) 0.15.1-incubating API)</title> <meta name="date" content="2016-03-14"> <link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../script.js"></script> </head> <body> <h1 class="bar"><a href="../../../../../../org/apache/taverna/scufl2/xml/rdf/package-summary.html" target="classFrame">org.apache.taverna.scufl2.xml.rdf</a></h1> <div class="indexContainer"> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="Description.html" title="class in org.apache.taverna.scufl2.xml.rdf" target="classFrame">Description</a></li> <li><a href="ObjectFactory.html" title="class in org.apache.taverna.scufl2.xml.rdf" target="classFrame">ObjectFactory</a></li> <li><a href="RDF.html" title="class in org.apache.taverna.scufl2.xml.rdf" target="classFrame">RDF</a></li> <li><a href="Resource.html" title="class in org.apache.taverna.scufl2.xml.rdf" target="classFrame">Resource</a></li> <li><a href="Type.html" title="class in org.apache.taverna.scufl2.xml.rdf" target="classFrame">Type</a></li> </ul> </div> </body> </html>
apache-2.0
brenthand/Panda
poi-3.9/docs/apidocs/org/apache/poi/xslf/model/geom/Path.html
14826
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_29) on Mon Nov 26 17:21:23 MSK 2012 --> <TITLE> Path (POI API Documentation) </TITLE> <META NAME="date" CONTENT="2012-11-26"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Path (POI API Documentation)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/Path.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../org/apache/poi/xslf/model/geom/Outline.html" title="class in org.apache.poi.xslf.model.geom"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../../../org/apache/poi/xslf/model/geom/PathCommand.html" title="interface in org.apache.poi.xslf.model.geom"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/poi/xslf/model/geom/Path.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="Path.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_summary">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_detail">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <!-- ======== START OF CLASS DATA ======== --> <H2> <FONT SIZE="-1"> org.apache.poi.xslf.model.geom</FONT> <BR> Class Path</H2> <PRE> java.lang.Object <IMG SRC="../../../../../../resources/inherit.gif" ALT="extended by "><B>org.apache.poi.xslf.model.geom.Path</B> </PRE> <HR> <DL> <DT><PRE>public class <B>Path</B><DT>extends java.lang.Object</DL> </PRE> <P> Specifies a creation path consisting of a series of moves, lines and curves that when combined forms a geometric shape <P> <P> <DL> <DT><B>Author:</B></DT> <DD>Yegor Kozlov</DD> </DL> <HR> <P> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <A NAME="constructor_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Constructor Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#Path()">Path</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#Path(boolean, boolean)">Path</A></B>(boolean&nbsp;fill, boolean&nbsp;stroke)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#Path(org.openxmlformats.schemas.drawingml.x2006.main.CTPath2D)">Path</A></B>(org.openxmlformats.schemas.drawingml.x2006.main.CTPath2D&nbsp;spPath)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> </TABLE> &nbsp; <!-- ========== METHOD SUMMARY =========== --> <A NAME="method_summary"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2"> <B>Method Summary</B></FONT></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;void</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#addCommand(org.apache.poi.xslf.model.geom.PathCommand)">addCommand</A></B>(<A HREF="../../../../../../org/apache/poi/xslf/model/geom/PathCommand.html" title="interface in org.apache.poi.xslf.model.geom">PathCommand</A>&nbsp;cmd)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;long</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#getH()">getH</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;java.awt.geom.GeneralPath</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#getPath(org.apache.poi.xslf.model.geom.Context)">getPath</A></B>(<A HREF="../../../../../../org/apache/poi/xslf/model/geom/Context.html" title="class in org.apache.poi.xslf.model.geom">Context</A>&nbsp;ctx)</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;Convert the internal represenation to java.awt.GeneralPath</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;long</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#getW()">getW</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;boolean</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#isFilled()">isFilled</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1"> <CODE>&nbsp;boolean</CODE></FONT></TD> <TD><CODE><B><A HREF="../../../../../../org/apache/poi/xslf/model/geom/Path.html#isStroked()">isStroked</A></B>()</CODE> <BR> &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;</TD> </TR> </TABLE> &nbsp;<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor"> <TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH> </TR> <TR BGCOLOR="white" CLASS="TableRowColor"> <TD><CODE>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</CODE></TD> </TR> </TABLE> &nbsp; <P> <!-- ========= CONSTRUCTOR DETAIL ======== --> <A NAME="constructor_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Constructor Detail</B></FONT></TH> </TR> </TABLE> <A NAME="Path()"><!-- --></A><H3> Path</H3> <PRE> public <B>Path</B>()</PRE> <DL> </DL> <HR> <A NAME="Path(boolean, boolean)"><!-- --></A><H3> Path</H3> <PRE> public <B>Path</B>(boolean&nbsp;fill, boolean&nbsp;stroke)</PRE> <DL> </DL> <HR> <A NAME="Path(org.openxmlformats.schemas.drawingml.x2006.main.CTPath2D)"><!-- --></A><H3> Path</H3> <PRE> public <B>Path</B>(org.openxmlformats.schemas.drawingml.x2006.main.CTPath2D&nbsp;spPath)</PRE> <DL> </DL> <!-- ============ METHOD DETAIL ========== --> <A NAME="method_detail"><!-- --></A> <TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY=""> <TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor"> <TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2"> <B>Method Detail</B></FONT></TH> </TR> </TABLE> <A NAME="addCommand(org.apache.poi.xslf.model.geom.PathCommand)"><!-- --></A><H3> addCommand</H3> <PRE> public void <B>addCommand</B>(<A HREF="../../../../../../org/apache/poi/xslf/model/geom/PathCommand.html" title="interface in org.apache.poi.xslf.model.geom">PathCommand</A>&nbsp;cmd)</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getPath(org.apache.poi.xslf.model.geom.Context)"><!-- --></A><H3> getPath</H3> <PRE> public java.awt.geom.GeneralPath <B>getPath</B>(<A HREF="../../../../../../org/apache/poi/xslf/model/geom/Context.html" title="class in org.apache.poi.xslf.model.geom">Context</A>&nbsp;ctx)</PRE> <DL> <DD>Convert the internal represenation to java.awt.GeneralPath <P> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="isStroked()"><!-- --></A><H3> isStroked</H3> <PRE> public boolean <B>isStroked</B>()</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="isFilled()"><!-- --></A><H3> isFilled</H3> <PRE> public boolean <B>isFilled</B>()</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getW()"><!-- --></A><H3> getW</H3> <PRE> public long <B>getW</B>()</PRE> <DL> <DD><DL> </DL> </DD> </DL> <HR> <A NAME="getH()"><!-- --></A><H3> getH</H3> <PRE> public long <B>getH</B>()</PRE> <DL> <DD><DL> </DL> </DD> </DL> <!-- ========= END OF CLASS DATA ========= --> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/Path.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../org/apache/poi/xslf/model/geom/Outline.html" title="class in org.apache.poi.xslf.model.geom"><B>PREV CLASS</B></A>&nbsp; &nbsp;<A HREF="../../../../../../org/apache/poi/xslf/model/geom/PathCommand.html" title="interface in org.apache.poi.xslf.model.geom"><B>NEXT CLASS</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../index.html?org/apache/poi/xslf/model/geom/Path.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="Path.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> <TR> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> SUMMARY:&nbsp;NESTED&nbsp;|&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_summary">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_summary">METHOD</A></FONT></TD> <TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2"> DETAIL:&nbsp;FIELD&nbsp;|&nbsp;<A HREF="#constructor_detail">CONSTR</A>&nbsp;|&nbsp;<A HREF="#method_detail">METHOD</A></FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> <i>Copyright 2012 The Apache Software Foundation or its licensors, as applicable.</i> </BODY> </HTML>
apache-2.0
kirou/books_app_list
sinkancheaker/Resources/ui/Detail.js
3996
(function() { var Detail; Detail = function(detailData) { // 詳細情報を載せるwindow表示(半透明) var t = windowAnimation(0); var win = Ti.UI.createWindow({ backgroundColor:'#333333', borderWidth:1, borderColor:'#666', width:"100%", height:"100%", borderRadius:5, opacity:0.92, transform:t }); // window開ききったときのアニメーション var a = Titanium.UI.createAnimation(); a.duration = 200; a.addEventListener('complete', function() { var t1 = windowAnimation(1.0); win.animate({transform:t1, duration:200}); }); // タイトル titleLabel = Ti.UI.createLabel({ top : 10, font: { fontsize:20, fontWeight: "bold" }, color:"#ffffff", layout:"vertical" }); if (typeof(detailData.title) == "undefined") { titleLabel.text = ''; } else { titleLabel.text = detailData.title; } win.add(titleLabel); // 載せる画像 if (detailData.is_image === true) { imageUrl = "http://images.amazon.com/images/P/" + detailData.asin + ".09._SL200_SCLZZZZZZZ_.jpg"; Ti.API.info(imageUrl); } else { imageUrl = "./images/noimage.jpeg"; } detailImage = Ti.UI.createImageView({ image:imageUrl, width : Ti.UI.SIZE, height : 200, top:50, backgroundColor:'#ffffff', layout:"vertical" }); win.add(detailImage); var data = []; data[0] = Ti.UI.createTableViewRow({title:'購入/予約をする', hasChild:true, className:'detail'}); data[1] = Ti.UI.createTableViewRow({title:'チェックリストに入れる', className:'detail'}); data[2] = Ti.UI.createTableViewRow({title:'カレンダーに登録する', className:'detail'}); data[3] = Ti.UI.createTableViewRow({title:'このページを閉じる', className:'detail'}); var menuTableView = Titanium.UI.createTableView({ data:data, bottom:5, left:30, right:30, height:180, borderWidth:1, borderRadius:7, borderColor:'#999', layout:"vertical" }); win.add(menuTableView); menuTableView.addEventListener('click', function(e){ Ti.API.info(e); // クローズ var eventT = windowAnimation(0); win.close({transform:eventT,duration:300}); // window呼び出し var index = e.index; if (index === 0) { var AmazonDetail = require('ui/AmazonDetail'); var AmazonDetailWin = new AmazonDetail(detailData.asin); ActiveWinTab.tabs.activeTab.open(AmazonDetailWin); } else if (index === 1) { var AmazonDetail = require('ui/AmazonDetail'); var AmazonDetailWin = new AmazonDetail(detailData.asin); ActiveWinTab.tabs.activeTab.open(AmazonDetailWin); } else if (index === 2) { var AmazonDetail = require('ui/AmazonDetail'); var AmazonDetailWin = new AmazonDetail(detailData.asin); ActiveWinTab.tabs.activeTab.open(AmazonDetailWin); } else { } }); // windowにクリックしたらクローズ win.addEventListener('click', function(){ var eventT = windowAnimation(0); win.close({transform:eventT,duration:300}); }); win.open(a); }; var windowAnimation = function(scaleValue) { var t = Titanium.UI.create2DMatrix(); t = t.scale(scaleValue); return t; } return module.exports = Detail; })();
apache-2.0
sergecodd/FireFox-OS
B2G/gecko/netwerk/test/unit/test_bug427957.js
3088
/** * Test for Bidi restrictions on IDNs from RFC 3454 */ var Cc = Components.classes; var Ci = Components.interfaces; var idnService; function expected_pass(inputIDN) { var isASCII = {}; var displayIDN = idnService.convertToDisplayIDN(inputIDN, isASCII); do_check_eq(displayIDN, inputIDN); } function expected_fail(inputIDN) { var isASCII = {}; var displayIDN = ""; try { displayIDN = idnService.convertToDisplayIDN(inputIDN, isASCII); } catch(e) {} do_check_neq(displayIDN, inputIDN); } function run_test() { // add an IDN whitelist pref var pbi = Cc["@mozilla.org/preferences-service;1"] .getService(Ci.nsIPrefBranch); pbi.setBoolPref("network.IDN.whitelist.com", true); idnService = Cc["@mozilla.org/network/idn-service;1"] .getService(Ci.nsIIDNService); /* * In any profile that specifies bidirectional character handling, all * three of the following requirements MUST be met: * * 1) The characters in section 5.8 MUST be prohibited. */ // 0340; COMBINING GRAVE TONE MARK expected_fail("foo\u0340bar.com"); // 0341; COMBINING ACUTE TONE MARK expected_fail("foo\u0341bar.com"); // 200E; LEFT-TO-RIGHT MARK expected_fail("foo\200ebar.com"); // 200F; RIGHT-TO-LEFT MARK // Note: this is an RTL IDN so that it doesn't fail test 2) below expected_fail("\u200f\u0645\u062B\u0627\u0644.\u0622\u0632\u0645\u0627\u06CC\u0634\u06CC"); // 202A; LEFT-TO-RIGHT EMBEDDING expected_fail("foo\u202abar.com"); // 202B; RIGHT-TO-LEFT EMBEDDING expected_fail("foo\u202bbar.com"); // 202C; POP DIRECTIONAL FORMATTING expected_fail("foo\u202cbar.com"); // 202D; LEFT-TO-RIGHT OVERRIDE expected_fail("foo\u202dbar.com"); // 202E; RIGHT-TO-LEFT OVERRIDE expected_fail("foo\u202ebar.com"); // 206A; INHIBIT SYMMETRIC SWAPPING expected_fail("foo\u206abar.com"); // 206B; ACTIVATE SYMMETRIC SWAPPING expected_fail("foo\u206bbar.com"); // 206C; INHIBIT ARABIC FORM SHAPING expected_fail("foo\u206cbar.com"); // 206D; ACTIVATE ARABIC FORM SHAPING expected_fail("foo\u206dbar.com"); // 206E; NATIONAL DIGIT SHAPES expected_fail("foo\u206ebar.com"); // 206F; NOMINAL DIGIT SHAPES expected_fail("foo\u206fbar.com"); /* * 2) If a string contains any RandALCat character, the string MUST NOT * contain any LCat character. */ // www.מיץpetel.com is invalid expected_fail("www.\u05DE\u05D9\u05E5petel.com"); // But www.מיץפטל.com is fine because the ltr and rtl characters are in // different labels expected_pass("www.\u05DE\u05D9\u05E5\u05E4\u05D8\u05DC.com"); /* * 3) If a string contains any RandALCat character, a RandALCat * character MUST be the first character of the string, and a * RandALCat character MUST be the last character of the string. */ // www.1מיץ.com is invalid expected_fail("www.1\u05DE\u05D9\u05E5.com"); // www.מיץ1.com is invalid expected_fail("www.\u05DE\u05D9\u05E51.com"); // But www.מיץ1פטל.com is fine expected_pass("www.\u05DE\u05D9\u05E51\u05E4\u05D8\u05DC.com"); }
apache-2.0
akirakw/asakusafw
info/model/src/main/java/com/asakusafw/info/ParameterInfo.java
3504
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.info; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; /** * Represents a batch parameter definition. * @since 0.9.1 */ public class ParameterInfo { private final String name; private final String comment; private final boolean mandatory; private final String pattern; /** * Creates a new instance. * @param name the parameter name * @param comment the comment (nullable) * @param mandatory {@code true} if this parameter is mandatory, otherwise {@code false} * @param pattern the parameter value pattern in regular expression (nullable) */ @JsonCreator(mode = JsonCreator.Mode.PROPERTIES) public ParameterInfo( @JsonProperty("name") String name, @JsonProperty("comment") String comment, @JsonProperty("mandatory") boolean mandatory, @JsonProperty("pattern") String pattern) { this.name = name; this.comment = comment; this.mandatory = mandatory; this.pattern = pattern; } /** * Returns the name. * @return the name */ public String getName() { return name; } /** * Returns the comment. * @return the comment, or {@code null} if it is not defined */ public String getComment() { return comment; } /** * Returns whether or not this parameter is mandatory. * @return {@code true} if this parameter is mandatory, otherwise {@code false} */ public boolean isMandatory() { return mandatory; } /** * Returns the parameter value pattern in regular expression. * @return the pattern, or {@code null} if it is not defined */ public String getPattern() { return pattern; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Objects.hashCode(name); result = prime * result + Objects.hashCode(comment); result = prime * result + Boolean.hashCode(mandatory); result = prime * result + Objects.hashCode(pattern); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ParameterInfo other = (ParameterInfo) obj; return Objects.equals(name, other.name) && Objects.equals(comment, other.comment) && mandatory == other.mandatory && Objects.equals(pattern, other.pattern); } @Override public String toString() { return String.format("parameter(name=%s)", name); //$NON-NLS-1$ } }
apache-2.0
calebd/swift
lib/SILGen/SILGenPoly.cpp
141662
//===--- SILGenPoly.cpp - Function Type Thunks ----------------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// // // Swift function types can be equivalent or have a subtyping relationship even // if the SIL-level lowering of the calling convention is different. The // routines in this file implement thunking between lowered function types. // // // Re-abstraction thunks // ===================== // After SIL type lowering, generic substitutions become explicit, for example // the AST type Int -> Int passes the Ints directly, whereas T -> T with Int // substituted for T will pass the Ints like a T, as an address-only value with // opaque type metadata. Such a thunk is called a "re-abstraction thunk" -- the // AST-level type of the function value does not change, only the manner in // which parameters and results are passed. // // Function conversion thunks // ========================== // In Swift's AST-level type system, certain types have a subtype relation // involving a representation change. For example, a concrete type is always // a subtype of any protocol it conforms to. The upcast from the concrete // type to an existential type for the protocol requires packaging the // payload together with type metadata and witness tables. // // Between function types, the type A -> B is defined to be a subtype of // A' -> B' iff A' is a subtype of A, and B is a subtype of B' -- parameters // are contravariant, and results are covariant. // // A subtype conversion of a function value A -> B is performed by wrapping // the function value in a thunk of type A' -> B'. The thunk takes an A' and // converts it into an A, calls the inner function value, and converts the // result from B to B'. // // VTable thunks // ============= // // If a base class is generic and a derived class substitutes some generic // parameter of the base with a concrete type, the derived class can override // methods in the base that involved generic types. In the derived class, a // method override that involves substituted types will have a different // SIL lowering than the base method. In this case, the overridden vtable entry // will point to a thunk which transforms parameters and results and invokes // the derived method. // // Some limited forms of subtyping are also supported for method overrides; // namely, a derived method's parameter can be a superclass of, or more // optional than, a parameter of the base, and result can be a subclass of, // or less optional than, the result of the base. // // Witness thunks // ============== // // Currently protocol witness methods are called with an additional generic // parameter bound to the Self type, and thus always require a thunk. // // Thunks for class method witnesses dispatch through the vtable allowing // inherited witnesses to be overridden in subclasses. Hence a witness thunk // might require two levels of abstraction difference -- the method might // override a base class method with more generic types, and the protocol // requirement may involve associated types which are always concrete in the // conforming class. // // Other thunks // ============ // // Foreign-to-native, native-to-foreign thunks for declarations and function // values are implemented in SILGenBridging.cpp. // //===----------------------------------------------------------------------===// #include "SILGen.h" #include "Scope.h" #include "swift/AST/GenericSignatureBuilder.h" #include "swift/AST/Decl.h" #include "swift/AST/DiagnosticsCommon.h" #include "swift/AST/ExistentialLayout.h" #include "swift/AST/GenericEnvironment.h" #include "swift/AST/ProtocolConformance.h" #include "swift/AST/Types.h" #include "swift/SIL/PrettyStackTrace.h" #include "swift/SIL/SILArgument.h" #include "swift/SIL/TypeLowering.h" #include "Initialization.h" #include "LValue.h" #include "RValue.h" #include "llvm/Support/Compiler.h" using namespace swift; using namespace Lowering; /// A helper function that pulls an element off the front of an array. template <class T> static const T &claimNext(ArrayRef<T> &array) { assert(!array.empty() && "claiming next from empty array!"); const T &result = array.front(); array = array.slice(1); return result; } namespace { /// An abstract class for transforming first-class SIL values. class Transform { private: SILGenFunction &SGF; SILLocation Loc; public: Transform(SILGenFunction &SGF, SILLocation loc) : SGF(SGF), Loc(loc) {} virtual ~Transform() = default; /// Transform an arbitrary value. RValue transform(RValue &&input, AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, SGFContext ctxt); /// Transform an arbitrary value. ManagedValue transform(ManagedValue input, AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, SGFContext ctxt); /// Transform a metatype value. ManagedValue transformMetatype(ManagedValue fn, AbstractionPattern inputOrigType, CanMetatypeType inputSubstType, AbstractionPattern outputOrigType, CanMetatypeType outputSubstType); /// Transform a tuple value. ManagedValue transformTuple(ManagedValue input, AbstractionPattern inputOrigType, CanTupleType inputSubstType, AbstractionPattern outputOrigType, CanTupleType outputSubstType, SGFContext ctxt); /// Transform a function value. ManagedValue transformFunction(ManagedValue fn, AbstractionPattern inputOrigType, CanAnyFunctionType inputSubstType, AbstractionPattern outputOrigType, CanAnyFunctionType outputSubstType, const TypeLowering &expectedTL); }; } // end anonymous namespace ; static ArrayRef<ProtocolConformanceRef> collectExistentialConformances(ModuleDecl *M, CanType fromType, CanType toType) { assert(!fromType.isAnyExistentialType()); auto layout = toType.getExistentialLayout(); auto protocols = layout.getProtocols(); SmallVector<ProtocolConformanceRef, 4> conformances; for (auto proto : protocols) { auto conformance = M->lookupConformance(fromType, proto->getDecl(), nullptr); conformances.push_back(*conformance); } return M->getASTContext().AllocateCopy(conformances); } static ArchetypeType *getOpenedArchetype(CanType openedType) { while (auto metatypeTy = dyn_cast<MetatypeType>(openedType)) openedType = metatypeTy.getInstanceType(); return cast<ArchetypeType>(openedType); } static ManagedValue emitTransformExistential(SILGenFunction &SGF, SILLocation loc, ManagedValue input, CanType inputType, CanType outputType, SGFContext ctxt) { assert(inputType != outputType); SILGenFunction::OpaqueValueState state; ArchetypeType *openedArchetype = nullptr; if (inputType->isAnyExistentialType()) { CanType openedType = ArchetypeType::getAnyOpened(inputType); SILType loweredOpenedType = SGF.getLoweredType(openedType); // Unwrap zero or more metatype levels openedArchetype = getOpenedArchetype(openedType); state = SGF.emitOpenExistential(loc, input, openedArchetype, loweredOpenedType, AccessKind::Read); inputType = openedType; } // Build conformance table CanType fromInstanceType = inputType; CanType toInstanceType = outputType; // Look through metatypes while (isa<MetatypeType>(fromInstanceType) && isa<ExistentialMetatypeType>(toInstanceType)) { fromInstanceType = cast<MetatypeType>(fromInstanceType) .getInstanceType(); toInstanceType = cast<ExistentialMetatypeType>(toInstanceType) .getInstanceType(); } ArrayRef<ProtocolConformanceRef> conformances = collectExistentialConformances(SGF.SGM.M.getSwiftModule(), fromInstanceType, toInstanceType); // Build result existential AbstractionPattern opaque = AbstractionPattern::getOpaque(); const TypeLowering &concreteTL = SGF.getTypeLowering(opaque, inputType); const TypeLowering &expectedTL = SGF.getTypeLowering(outputType); input = SGF.emitExistentialErasure( loc, inputType, concreteTL, expectedTL, conformances, ctxt, [&](SGFContext C) -> ManagedValue { if (openedArchetype) return SGF.manageOpaqueValue(state, loc, C); return input; }); return input; } /// Apply this transformation to an arbitrary value. RValue Transform::transform(RValue &&input, AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, SGFContext ctxt) { // Fast path: we don't have a tuple. auto inputTupleType = dyn_cast<TupleType>(inputSubstType); if (!inputTupleType) { assert(!isa<TupleType>(outputSubstType) && "transformation introduced a tuple?"); auto result = transform(std::move(input).getScalarValue(), inputOrigType, inputSubstType, outputOrigType, outputSubstType, ctxt); return RValue(SGF, Loc, outputSubstType, result); } // Okay, we have a tuple. The output type will also be a tuple unless // there's a subtyping conversion that erases tuples, but that's currently // not allowed by the typechecker, which considers existential erasure to // be a conversion relation, not a subtyping one. Anyway, it would be // possible to support that here, but since it's not currently required... assert(isa<TupleType>(outputSubstType) && "subtype constraint erasing tuple is not currently implemented"); auto outputTupleType = cast<TupleType>(outputSubstType); assert(inputTupleType->getNumElements() == outputTupleType->getNumElements()); // Pull the r-value apart. SmallVector<RValue, 8> inputElts; std::move(input).extractElements(inputElts); // Emit into the context initialization if it's present and possible // to split. SmallVector<InitializationPtr, 4> eltInitsBuffer; MutableArrayRef<InitializationPtr> eltInits; auto tupleInit = ctxt.getEmitInto(); if (!ctxt.getEmitInto() || !ctxt.getEmitInto()->canSplitIntoTupleElements()) { tupleInit = nullptr; } else { eltInits = tupleInit->splitIntoTupleElements(SGF, Loc, outputTupleType, eltInitsBuffer); } // At this point, if tupleInit is non-null, we must emit all of the // elements into their corresponding contexts. assert(tupleInit == nullptr || eltInits.size() == inputTupleType->getNumElements()); SmallVector<ManagedValue, 8> outputExpansion; for (auto eltIndex : indices(inputTupleType->getElementTypes())) { // Determine the appropriate context for the element. SGFContext eltCtxt; if (tupleInit) eltCtxt = SGFContext(eltInits[eltIndex].get()); // Recurse. RValue outputElt = transform(std::move(inputElts[eltIndex]), inputOrigType.getTupleElementType(eltIndex), inputTupleType.getElementType(eltIndex), outputOrigType.getTupleElementType(eltIndex), outputTupleType.getElementType(eltIndex), eltCtxt); // Force the r-value into its context if necessary. assert(!outputElt.isInContext() || tupleInit != nullptr); if (tupleInit && !outputElt.isInContext()) { std::move(outputElt).forwardInto(SGF, Loc, eltInits[eltIndex].get()); } else { std::move(outputElt).getAll(outputExpansion); } } // If we emitted into context, be sure to finish the overall initialization. if (tupleInit) { tupleInit->finishInitialization(SGF); return RValue::forInContext(); } return RValue::withPreExplodedElements(outputExpansion, outputTupleType); } // Single @objc protocol value metatypes can be converted to the ObjC // Protocol class type. static bool isProtocolClass(Type t) { auto classDecl = t->getClassOrBoundGenericClass(); if (!classDecl) return false; ASTContext &ctx = classDecl->getASTContext(); return (classDecl->getName() == ctx.Id_Protocol && classDecl->getModuleContext()->getName() == ctx.Id_ObjectiveC); }; static ManagedValue emitManagedLoad(SILGenFunction &gen, SILLocation loc, ManagedValue addr, const TypeLowering &addrTL) { // SEMANTIC ARC TODO: When the verifier is finished, revisit this. auto loadedValue = addrTL.emitLoad(gen.B, loc, addr.forward(gen), LoadOwnershipQualifier::Take); return gen.emitManagedRValueWithCleanup(loadedValue, addrTL); } /// Apply this transformation to an arbitrary value. ManagedValue Transform::transform(ManagedValue v, AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, SGFContext ctxt) { // Look through inout types. if (isa<InOutType>(inputSubstType)) inputSubstType = CanType(inputSubstType->getInOutObjectType()); // Load if the result isn't address-only. All the translation routines // expect this. if (v.getType().isAddress()) { auto &inputTL = SGF.getTypeLowering(v.getType()); if (!inputTL.isAddressOnly()) { v = emitManagedLoad(SGF, Loc, v, inputTL); } } const TypeLowering &expectedTL = SGF.getTypeLowering(outputOrigType, outputSubstType); auto loweredResultTy = expectedTL.getLoweredType(); // Nothing to convert if (v.getType() == loweredResultTy) return v; OptionalTypeKind outputOTK, inputOTK; CanType inputObjectType = inputSubstType.getAnyOptionalObjectType(inputOTK); CanType outputObjectType = outputSubstType.getAnyOptionalObjectType(outputOTK); // If the value is less optional than the desired formal type, wrap in // an optional. if (outputOTK != OTK_None && inputOTK == OTK_None) { return SGF.emitInjectOptional(Loc, expectedTL, ctxt, [&](SGFContext objectCtxt) { return transform(v, inputOrigType, inputSubstType, outputOrigType.getAnyOptionalObjectType(), outputObjectType, objectCtxt); }); } // If the value is IUO, but the desired formal type isn't optional, force it. if (inputOTK == OTK_ImplicitlyUnwrappedOptional && outputOTK == OTK_None) { v = SGF.emitCheckedGetOptionalValueFrom(Loc, v, SGF.getTypeLowering(v.getType()), SGFContext()); // Check if we have any more conversions remaining. if (v.getType() == loweredResultTy) return v; inputOTK = OTK_None; } // Optional-to-optional conversion. if (inputOTK != OTK_None && outputOTK != OTK_None) { // If the conversion is trivial, just cast. if (SGF.SGM.Types.checkForABIDifferences(v.getType(), loweredResultTy) == TypeConverter::ABIDifference::Trivial) { SILValue result = v.getValue(); if (v.getType().isAddress()) result = SGF.B.createUncheckedAddrCast(Loc, result, loweredResultTy); else result = SGF.B.createUncheckedBitCast(Loc, result, loweredResultTy); return ManagedValue(result, v.getCleanup()); } auto transformOptionalPayload = [&](SILGenFunction &gen, SILLocation loc, ManagedValue input, SILType loweredResultTy, SGFContext context) -> ManagedValue { return transform(input, inputOrigType.getAnyOptionalObjectType(), inputObjectType, outputOrigType.getAnyOptionalObjectType(), outputObjectType, context); }; return SGF.emitOptionalToOptional(Loc, v, loweredResultTy, transformOptionalPayload); } // Abstraction changes: // - functions if (auto outputFnType = dyn_cast<AnyFunctionType>(outputSubstType)) { auto inputFnType = cast<AnyFunctionType>(inputSubstType); return transformFunction(v, inputOrigType, inputFnType, outputOrigType, outputFnType, expectedTL); } // - tuples of transformable values if (auto outputTupleType = dyn_cast<TupleType>(outputSubstType)) { auto inputTupleType = cast<TupleType>(inputSubstType); return transformTuple(v, inputOrigType, inputTupleType, outputOrigType, outputTupleType, ctxt); } // - metatypes if (auto outputMetaType = dyn_cast<MetatypeType>(outputSubstType)) { if (auto inputMetaType = dyn_cast<MetatypeType>(inputSubstType)) { return transformMetatype(v, inputOrigType, inputMetaType, outputOrigType, outputMetaType); } } // Subtype conversions: // - upcasts if (outputSubstType->getClassOrBoundGenericClass() && inputSubstType->getClassOrBoundGenericClass()) { auto class1 = inputSubstType->getClassOrBoundGenericClass(); auto class2 = outputSubstType->getClassOrBoundGenericClass(); // CF <-> Objective-C via toll-free bridging. if ((class1->getForeignClassKind() == ClassDecl::ForeignKind::CFType) ^ (class2->getForeignClassKind() == ClassDecl::ForeignKind::CFType)) { return ManagedValue(SGF.B.createUncheckedRefCast(Loc, v.getValue(), loweredResultTy), v.getCleanup()); } if (outputSubstType->isExactSuperclassOf(inputSubstType)) { // Upcast to a superclass. return ManagedValue(SGF.B.createUpcast(Loc, v.getValue(), loweredResultTy), v.getCleanup()); } else { // Unchecked-downcast to a covariant return type. assert(inputSubstType->isExactSuperclassOf(outputSubstType) && "should be inheritance relationship between input and output"); return SGF.emitManagedRValueWithCleanup( SGF.B.createUncheckedRefCast(Loc, v.forward(SGF), loweredResultTy)); } } // - upcasts from an archetype if (outputSubstType->getClassOrBoundGenericClass()) { if (auto archetypeType = dyn_cast<ArchetypeType>(inputSubstType)) { if (archetypeType->getSuperclass()) { // Replace the cleanup with a new one on the superclass value so we // always use concrete retain/release operations. return ManagedValue(SGF.B.createUpcast(Loc, v.getValue(), loweredResultTy), v.getCleanup()); } } } // - metatype to Protocol conversion if (isProtocolClass(outputSubstType)) { if (auto metatypeTy = dyn_cast<MetatypeType>(inputSubstType)) { return SGF.emitProtocolMetatypeToObject(Loc, metatypeTy, SGF.getLoweredLoadableType(outputSubstType)); } } // - metatype to AnyObject conversion if (outputSubstType->isAnyObject() && isa<MetatypeType>(inputSubstType)) { return SGF.emitClassMetatypeToObject(Loc, v, SGF.getLoweredLoadableType(outputSubstType)); } // - existential metatype to AnyObject conversion if (outputSubstType->isAnyObject() && isa<ExistentialMetatypeType>(inputSubstType)) { return SGF.emitExistentialMetatypeToObject(Loc, v, SGF.getLoweredLoadableType(outputSubstType)); } // - existentials if (outputSubstType->isAnyExistentialType()) { // We have to re-abstract payload if its a metatype or a function v = SGF.emitSubstToOrigValue(Loc, v, AbstractionPattern::getOpaque(), inputSubstType); return emitTransformExistential(SGF, Loc, v, inputSubstType, outputSubstType, ctxt); } // - upcasting class-constrained existentials or metatypes thereof if (inputSubstType->isAnyExistentialType()) { auto instanceType = inputSubstType; while (auto metatypeType = dyn_cast<ExistentialMetatypeType>(instanceType)) instanceType = metatypeType.getInstanceType(); auto layout = instanceType.getExistentialLayout(); if (layout.superclass) { CanType openedType = ArchetypeType::getAnyOpened(inputSubstType); SILType loweredOpenedType = SGF.getLoweredType(openedType); // Unwrap zero or more metatype levels auto openedArchetype = getOpenedArchetype(openedType); auto state = SGF.emitOpenExistential(Loc, v, openedArchetype, loweredOpenedType, AccessKind::Read); auto payload = SGF.manageOpaqueValue(state, Loc, SGFContext()); return transform(payload, AbstractionPattern::getOpaque(), openedType, outputOrigType, outputSubstType, ctxt); } } // - T : Hashable to AnyHashable if (isa<StructType>(outputSubstType) && outputSubstType->getAnyNominal() == SGF.getASTContext().getAnyHashableDecl()) { auto *protocol = SGF.getASTContext().getProtocol( KnownProtocolKind::Hashable); auto conformance = SGF.SGM.M.getSwiftModule()->lookupConformance( inputSubstType, protocol, nullptr); auto result = SGF.emitAnyHashableErasure(Loc, v, inputSubstType, *conformance, ctxt); if (result.isInContext()) return ManagedValue::forInContext(); return std::move(result).getAsSingleValue(SGF, Loc); } // Should have handled the conversion in one of the cases above. llvm_unreachable("Unhandled transform?"); } ManagedValue Transform::transformMetatype(ManagedValue meta, AbstractionPattern inputOrigType, CanMetatypeType inputSubstType, AbstractionPattern outputOrigType, CanMetatypeType outputSubstType) { assert(!meta.hasCleanup() && "metatype with cleanup?!"); auto expectedType = SGF.getTypeLowering(outputOrigType, outputSubstType).getLoweredType(); auto wasRepr = meta.getType().castTo<MetatypeType>()->getRepresentation(); auto willBeRepr = expectedType.castTo<MetatypeType>()->getRepresentation(); SILValue result; if ((wasRepr == MetatypeRepresentation::Thick && willBeRepr == MetatypeRepresentation::Thin) || (wasRepr == MetatypeRepresentation::Thin && willBeRepr == MetatypeRepresentation::Thick)) { // If we have a thin-to-thick abstraction change, cook up new a metatype // value out of nothing -- thin metatypes carry no runtime state. result = SGF.B.createMetatype(Loc, expectedType); } else { // Otherwise, we have a metatype subtype conversion of thick metatypes. assert(wasRepr == willBeRepr && "Unhandled metatype conversion"); result = SGF.B.createUpcast(Loc, meta.getUnmanagedValue(), expectedType); } return ManagedValue::forUnmanaged(result); } /// Explode a managed tuple into a bunch of managed elements. /// /// If the tuple is in memory, the result elements will also be in /// memory. typedef std::pair<ManagedValue, const TypeLowering *> ManagedValueAndType; static void explodeTuple(SILGenFunction &gen, SILLocation loc, ManagedValue managedTuple, SmallVectorImpl<ManagedValueAndType> &out) { // None of the operations we do here can fail, so we can atomically // disable the tuple's cleanup and then create cleanups for all the // elements. SILValue tuple = managedTuple.forward(gen); auto tupleSILType = tuple->getType(); auto tupleType = tupleSILType.castTo<TupleType>(); out.reserve(tupleType->getNumElements()); for (auto index : indices(tupleType.getElementTypes())) { // We're starting with a SIL-lowered tuple type, so the elements // must also all be SIL-lowered. SILType eltType = tupleSILType.getTupleElementType(index); auto &eltTL = gen.getTypeLowering(eltType); ManagedValue elt; if (tupleSILType.isAddress()) { auto addr = gen.B.createTupleElementAddr(loc, tuple, index, eltType); elt = gen.emitManagedBufferWithCleanup(addr, eltTL); } else { auto value = gen.B.createTupleExtract(loc, tuple, index, eltType); elt = gen.emitManagedRValueWithCleanup(value, eltTL); } out.push_back(ManagedValueAndType(elt, &eltTL)); } } /// Apply this transformation to all the elements of a tuple value, /// which just entails mapping over each of its component elements. ManagedValue Transform::transformTuple(ManagedValue inputTuple, AbstractionPattern inputOrigType, CanTupleType inputSubstType, AbstractionPattern outputOrigType, CanTupleType outputSubstType, SGFContext ctxt) { const TypeLowering &outputTL = SGF.getTypeLowering(outputOrigType, outputSubstType); assert((outputTL.isAddressOnly() == inputTuple.getType().isAddress() || !SGF.silConv.useLoweredAddresses()) && "expected loadable inputs to have been loaded"); // If there's no representation difference, we're done. if (outputTL.getLoweredType() == inputTuple.getType()) return inputTuple; assert(inputOrigType.matchesTuple(outputSubstType)); assert(outputOrigType.matchesTuple(outputSubstType)); auto inputType = inputTuple.getType().castTo<TupleType>(); assert(outputSubstType->getNumElements() == inputType->getNumElements()); // If the tuple is address only, we need to do the operation in memory. SILValue outputAddr; if (outputTL.isAddressOnly() && SGF.silConv.useLoweredAddresses()) outputAddr = SGF.getBufferForExprResult(Loc, outputTL.getLoweredType(), ctxt); // Explode the tuple into individual managed values. SmallVector<ManagedValueAndType, 4> inputElts; explodeTuple(SGF, Loc, inputTuple, inputElts); // Track all the managed elements whether or not we're actually // emitting to an address, just so that we can disable them after. SmallVector<ManagedValue, 4> outputElts; for (auto index : indices(inputType->getElementTypes())) { auto &inputEltTL = *inputElts[index].second; ManagedValue inputElt = inputElts[index].first; if (inputElt.getType().isAddress() && !inputEltTL.isAddressOnly()) { inputElt = emitManagedLoad(SGF, Loc, inputElt, inputEltTL); } auto inputEltOrigType = inputOrigType.getTupleElementType(index); auto inputEltSubstType = inputSubstType.getElementType(index); auto outputEltOrigType = outputOrigType.getTupleElementType(index); auto outputEltSubstType = outputSubstType.getElementType(index); // If we're emitting to memory, project out this element in the // destination buffer, then wrap that in an Initialization to // track the cleanup. Optional<TemporaryInitialization> outputEltTemp; if (outputAddr) { SILValue outputEltAddr = SGF.B.createTupleElementAddr(Loc, outputAddr, index); auto &outputEltTL = SGF.getTypeLowering(outputEltAddr->getType()); assert(outputEltTL.isAddressOnly() == inputEltTL.isAddressOnly()); auto cleanup = SGF.enterDormantTemporaryCleanup(outputEltAddr, outputEltTL); outputEltTemp.emplace(outputEltAddr, cleanup); } SGFContext eltCtxt = (outputEltTemp ? SGFContext(&outputEltTemp.getValue()) : SGFContext()); auto outputElt = transform(inputElt, inputEltOrigType, inputEltSubstType, outputEltOrigType, outputEltSubstType, eltCtxt); // If we're not emitting to memory, remember this element for // later assembly into a tuple. if (!outputEltTemp) { assert(outputElt); assert(!inputEltTL.isAddressOnly() || !SGF.silConv.useLoweredAddresses()); outputElts.push_back(outputElt); continue; } // Otherwise, make sure we emit into the slot. auto &temp = outputEltTemp.getValue(); auto outputEltAddr = temp.getManagedAddress(); // That might involve storing directly. if (!outputElt.isInContext()) { outputElt.forwardInto(SGF, Loc, outputEltAddr.getValue()); temp.finishInitialization(SGF); } outputElts.push_back(outputEltAddr); } // Okay, disable all the individual element cleanups and collect // the values for a potential tuple aggregate. SmallVector<SILValue, 4> outputEltValues; for (auto outputElt : outputElts) { SILValue value = outputElt.forward(SGF); if (!outputAddr) outputEltValues.push_back(value); } // If we're emitting to an address, just manage that. if (outputAddr) return SGF.manageBufferForExprResult(outputAddr, outputTL, ctxt); // Otherwise, assemble the tuple value and manage that. auto outputTuple = SGF.B.createTuple(Loc, outputTL.getLoweredType(), outputEltValues); return SGF.emitManagedRValueWithCleanup(outputTuple, outputTL); } static ManagedValue manageParam(SILGenFunction &gen, SILLocation loc, SILValue paramValue, SILParameterInfo info, bool allowPlusZero) { switch (info.getConvention()) { case ParameterConvention::Indirect_In_Guaranteed: if (gen.silConv.useLoweredAddresses()) { // FIXME: Avoid a behavior change while guaranteed self is disabled by // default. if (allowPlusZero) { return ManagedValue::forUnmanaged(paramValue); } else { auto copy = gen.emitTemporaryAllocation(loc, paramValue->getType()); gen.B.createCopyAddr(loc, paramValue, copy, IsNotTake, IsInitialization); return gen.emitManagedBufferWithCleanup(copy); } } LLVM_FALLTHROUGH; case ParameterConvention::Direct_Guaranteed: if (allowPlusZero) return gen.emitManagedBeginBorrow(loc, paramValue); LLVM_FALLTHROUGH; // Unowned parameters are only guaranteed at the instant of the call, so we // must retain them even if we're in a context that can accept a +0 value. case ParameterConvention::Direct_Unowned: paramValue = gen.getTypeLowering(paramValue->getType()) .emitCopyValue(gen.B, loc, paramValue); LLVM_FALLTHROUGH; case ParameterConvention::Direct_Owned: return gen.emitManagedRValueWithCleanup(paramValue); case ParameterConvention::Indirect_In: case ParameterConvention::Indirect_In_Constant: if (gen.silConv.useLoweredAddresses()) return gen.emitManagedBufferWithCleanup(paramValue); return gen.emitManagedRValueWithCleanup(paramValue); case ParameterConvention::Indirect_Inout: case ParameterConvention::Indirect_InoutAliasable: return ManagedValue::forLValue(paramValue); } llvm_unreachable("bad parameter convention"); } void SILGenFunction::collectThunkParams(SILLocation loc, SmallVectorImpl<ManagedValue> &params, bool allowPlusZero) { // Add the indirect results. for (auto resultTy : F.getConventions().getIndirectSILResultTypes()) { auto paramTy = F.mapTypeIntoContext(resultTy); SILArgument *arg = F.begin()->createFunctionArgument(paramTy); (void)arg; } // Add the parameters. auto paramTypes = F.getLoweredFunctionType()->getParameters(); for (auto param : paramTypes) { auto paramTy = F.mapTypeIntoContext(F.getConventions().getSILType(param)); auto paramValue = F.begin()->createFunctionArgument(paramTy); auto paramMV = manageParam(*this, loc, paramValue, param, allowPlusZero); params.push_back(paramMV); } } /// Force a ManagedValue to be stored into a temporary initialization /// if it wasn't emitted that way directly. static void emitForceInto(SILGenFunction &SGF, SILLocation loc, ManagedValue result, TemporaryInitialization &temp) { if (result.isInContext()) return; result.forwardInto(SGF, loc, temp.getAddress()); temp.finishInitialization(SGF); } /// If the type is a single-element tuple, return the element type. static CanType getSingleTupleElement(CanType type) { if (auto tupleType = dyn_cast<TupleType>(type)) { if (tupleType->getNumElements() == 1) return tupleType.getElementType(0); } return type; } namespace { class TranslateArguments { SILGenFunction &SGF; SILLocation Loc; ArrayRef<ManagedValue> Inputs; SmallVectorImpl<ManagedValue> &Outputs; ArrayRef<SILParameterInfo> OutputTypes; public: TranslateArguments(SILGenFunction &SGF, SILLocation loc, ArrayRef<ManagedValue> inputs, SmallVectorImpl<ManagedValue> &outputs, ArrayRef<SILParameterInfo> outputTypes) : SGF(SGF), Loc(loc), Inputs(inputs), Outputs(outputs), OutputTypes(outputTypes) {} void translate(AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType) { // Most of this function is about tuples: tuples can be represented // as one or many values, with varying levels of indirection. auto inputTupleType = dyn_cast<TupleType>(inputSubstType); auto outputTupleType = dyn_cast<TupleType>(outputSubstType); // Look inside one-element exploded tuples, but not if both input // and output types are *both* one-element tuples. if (!(inputTupleType && outputTupleType && inputTupleType.getElementTypes().size() == 1 && outputTupleType.getElementTypes().size() == 1)) { if (inputOrigType.isTuple() && inputOrigType.getNumTupleElements() == 1) { inputOrigType = inputOrigType.getTupleElementType(0); inputSubstType = getSingleTupleElement(inputSubstType); return translate(inputOrigType, inputSubstType, outputOrigType, outputSubstType); } if (outputOrigType.isTuple() && outputOrigType.getNumTupleElements() == 1) { outputOrigType = outputOrigType.getTupleElementType(0); outputSubstType = getSingleTupleElement(outputSubstType); return translate(inputOrigType, inputSubstType, outputOrigType, outputSubstType); } } // Special-case: tuples containing inouts. if (inputTupleType && inputTupleType->hasInOutElement()) { // Non-materializable tuple types cannot be bound as generic // arguments, so none of the remaining transformations apply. // Instead, the outermost tuple layer is exploded, even when // they are being passed opaquely. See the comment in // AbstractionPattern.h for a discussion. return translateParallelExploded(inputOrigType, inputTupleType, outputOrigType, outputTupleType); } // Case where the input type is an exploded tuple. if (inputOrigType.isTuple()) { if (outputOrigType.isTuple()) { // Both input and output are exploded tuples, easy case. return translateParallelExploded(inputOrigType, inputTupleType, outputOrigType, outputTupleType); } // Tuple types are subtypes of their optionals if (auto outputObjectType = outputSubstType.getAnyOptionalObjectType()) { auto outputOrigObjectType = outputOrigType.getAnyOptionalObjectType(); if (auto outputTupleType = dyn_cast<TupleType>(outputObjectType)) { // The input is exploded and the output is an optional tuple. // Translate values and collect them into a single optional // payload. auto result = translateAndImplodeIntoOptional(inputOrigType, inputTupleType, outputOrigObjectType, outputTupleType); Outputs.push_back(result); return; } // Tuple types are subtypes of optionals of Any, too. assert(outputObjectType->isAny()); // First, construct the existential. auto result = translateAndImplodeIntoAny(inputOrigType, inputTupleType, outputOrigObjectType, outputObjectType); // Now, convert it to an optional. translateSingle(outputOrigObjectType, outputObjectType, outputOrigType, outputSubstType, result, claimNextOutputType()); return; } if (outputSubstType->isAny()) { claimNextOutputType(); auto result = translateAndImplodeIntoAny(inputOrigType, inputTupleType, outputOrigType, outputSubstType); Outputs.push_back(result); return; } if (outputTupleType) { // The input is exploded and the output is not. Translate values // and store them to a result tuple in memory. assert(outputOrigType.isTypeParameter() && "Output is not a tuple and is not opaque?"); auto outputTy = SGF.getSILType(claimNextOutputType()); auto &outputTL = SGF.getTypeLowering(outputTy); if (SGF.silConv.useLoweredAddresses()) { auto temp = SGF.emitTemporary(Loc, outputTL); translateAndImplodeInto(inputOrigType, inputTupleType, outputOrigType, outputTupleType, *temp); Outputs.push_back(temp->getManagedAddress()); } else { auto result = translateAndImplodeIntoValue( inputOrigType, inputTupleType, outputOrigType, outputTupleType, outputTL.getLoweredType()); Outputs.push_back(result); } return; } llvm_unreachable("Unhandled conversion from exploded tuple"); } // Handle output being an exploded tuple when the input is opaque. if (outputOrigType.isTuple()) { if (inputTupleType) { // The input is exploded and the output is not. Translate values // and store them to a result tuple in memory. assert(inputOrigType.isTypeParameter() && "Input is not a tuple and is not opaque?"); return translateAndExplodeOutOf(inputOrigType, inputTupleType, outputOrigType, outputTupleType, claimNextInput()); } // FIXME: IUO<Tuple> to Tuple llvm_unreachable("Unhandled conversion to exploded tuple"); } // Okay, we are now working with a single value turning into a // single value. auto inputElt = claimNextInput(); auto outputEltType = claimNextOutputType(); translateSingle(inputOrigType, inputSubstType, outputOrigType, outputSubstType, inputElt, outputEltType); } private: /// Take a tuple that has been exploded in the input and turn it into /// a tuple value in the output. ManagedValue translateAndImplodeIntoValue(AbstractionPattern inputOrigType, CanTupleType inputType, AbstractionPattern outputOrigType, CanTupleType outputType, SILType loweredOutputTy) { assert(loweredOutputTy.is<TupleType>()); SmallVector<ManagedValue, 4> elements; assert(outputType->getNumElements() == inputType->getNumElements()); for (unsigned i : indices(outputType->getElementTypes())) { auto inputOrigEltType = inputOrigType.getTupleElementType(i); auto inputEltType = inputType.getElementType(i); auto outputOrigEltType = outputOrigType.getTupleElementType(i); auto outputEltType = outputType.getElementType(i); SILType loweredOutputEltTy = loweredOutputTy.getTupleElementType(i); ManagedValue elt; if (auto inputEltTupleType = dyn_cast<TupleType>(inputEltType)) { elt = translateAndImplodeIntoValue(inputOrigEltType, inputEltTupleType, outputOrigEltType, cast<TupleType>(outputEltType), loweredOutputEltTy); } else { elt = claimNextInput(); // Load if necessary. if (elt.getType().isAddress()) { elt = SGF.emitLoad(Loc, elt.forward(SGF), SGF.getTypeLowering(elt.getType()), SGFContext(), IsTake); } } if (elt.getType() != loweredOutputEltTy) elt = translatePrimitive(inputOrigEltType, inputEltType, outputOrigEltType, outputEltType, elt); elements.push_back(elt); } SmallVector<SILValue, 4> forwarded; for (auto &elt : elements) forwarded.push_back(elt.forward(SGF)); auto tuple = SGF.B.createTuple(Loc, loweredOutputTy, forwarded); return SGF.emitManagedRValueWithCleanup(tuple); } /// Handle a tuple that has been exploded in the input but wrapped in /// an optional in the output. ManagedValue translateAndImplodeIntoOptional(AbstractionPattern inputOrigType, CanTupleType inputTupleType, AbstractionPattern outputOrigType, CanTupleType outputTupleType) { assert(!inputTupleType->hasInOutElement() && !outputTupleType->hasInOutElement()); assert(inputTupleType->getNumElements() == outputTupleType->getNumElements()); // Collect the tuple elements. auto &loweredTL = SGF.getTypeLowering(outputOrigType, outputTupleType); auto loweredTy = loweredTL.getLoweredType(); auto optionalTy = SGF.getSILType(claimNextOutputType()); auto someDecl = SGF.getASTContext().getOptionalSomeDecl(); if (loweredTL.isLoadable() || !SGF.silConv.useLoweredAddresses()) { auto payload = translateAndImplodeIntoValue(inputOrigType, inputTupleType, outputOrigType, outputTupleType, loweredTy); auto optional = SGF.B.createEnum(Loc, payload.getValue(), someDecl, optionalTy); return ManagedValue(optional, payload.getCleanup()); } else { auto optionalBuf = SGF.emitTemporaryAllocation(Loc, optionalTy); auto tupleBuf = SGF.B.createInitEnumDataAddr(Loc, optionalBuf, someDecl, loweredTy); auto tupleTemp = SGF.useBufferAsTemporary(tupleBuf, loweredTL); translateAndImplodeInto(inputOrigType, inputTupleType, outputOrigType, outputTupleType, *tupleTemp); SGF.B.createInjectEnumAddr(Loc, optionalBuf, someDecl); auto payload = tupleTemp->getManagedAddress(); return ManagedValue(optionalBuf, payload.getCleanup()); } } /// Handle a tuple that has been exploded in the input but wrapped /// in an existential in the output. ManagedValue translateAndImplodeIntoAny(AbstractionPattern inputOrigType, CanTupleType inputTupleType, AbstractionPattern outputOrigType, CanType outputSubstType) { auto existentialTy = SGF.getLoweredType(outputOrigType, outputSubstType); auto existentialBuf = SGF.emitTemporaryAllocation(Loc, existentialTy); auto opaque = AbstractionPattern::getOpaque(); auto &concreteTL = SGF.getTypeLowering(opaque, inputTupleType); auto tupleBuf = SGF.B.createInitExistentialAddr(Loc, existentialBuf, inputTupleType, concreteTL.getLoweredType(), /*conformances=*/{}); auto tupleTemp = SGF.useBufferAsTemporary(tupleBuf, concreteTL); translateAndImplodeInto(inputOrigType, inputTupleType, opaque, inputTupleType, *tupleTemp); auto payload = tupleTemp->getManagedAddress(); if (SGF.silConv.useLoweredAddresses()) { return ManagedValue(existentialBuf, payload.getCleanup()); } // We are under opaque value(s) mode - load the any and init an opaque auto loadedPayload = SGF.emitManagedLoadCopy(Loc, payload.getValue()); auto &anyTL = SGF.getTypeLowering(opaque, outputSubstType); SILValue loadedOpaque = SGF.B.createInitExistentialOpaque( Loc, anyTL.getLoweredType(), inputTupleType, loadedPayload.getValue(), /*Conformances=*/{}); return ManagedValue(loadedOpaque, loadedPayload.getCleanup()); } /// Handle a tuple that has been exploded in both the input and /// the output. void translateParallelExploded(AbstractionPattern inputOrigType, CanTupleType inputSubstType, AbstractionPattern outputOrigType, CanTupleType outputSubstType) { assert(inputOrigType.matchesTuple(inputSubstType)); assert(outputOrigType.matchesTuple(outputSubstType)); // Non-materializable input and materializable output occurs // when witness method thunks re-abstract a non-mutating // witness for a mutating requirement. The inout self is just // loaded to produce a value in this case. assert(inputSubstType->hasInOutElement() || !outputSubstType->hasInOutElement()); assert(inputSubstType->getNumElements() == outputSubstType->getNumElements()); for (auto index : indices(outputSubstType.getElementTypes())) { translate(inputOrigType.getTupleElementType(index), inputSubstType.getElementType(index), outputOrigType.getTupleElementType(index), outputSubstType.getElementType(index)); } } /// Given that a tuple value is being passed indirectly in the /// input, explode it and translate the elements. void translateAndExplodeOutOf(AbstractionPattern inputOrigType, CanTupleType inputSubstType, AbstractionPattern outputOrigType, CanTupleType outputSubstType, ManagedValue inputTupleAddr) { assert(inputOrigType.isTypeParameter()); assert(outputOrigType.matchesTuple(outputSubstType)); assert(!inputSubstType->hasInOutElement() && !outputSubstType->hasInOutElement()); assert(inputSubstType->getNumElements() == outputSubstType->getNumElements()); SmallVector<ManagedValueAndType, 4> inputEltAddrs; explodeTuple(SGF, Loc, inputTupleAddr, inputEltAddrs); assert(inputEltAddrs.size() == outputSubstType->getNumElements()); for (auto index : indices(outputSubstType.getElementTypes())) { auto inputEltOrigType = inputOrigType.getTupleElementType(index); auto inputEltSubstType = inputSubstType.getElementType(index); auto outputEltOrigType = outputOrigType.getTupleElementType(index); auto outputEltSubstType = outputSubstType.getElementType(index); auto inputEltAddr = inputEltAddrs[index].first; assert(inputEltAddr.getType().isAddress() || !SGF.silConv.useLoweredAddresses()); if (auto outputEltTupleType = dyn_cast<TupleType>(outputEltSubstType)) { assert(outputEltOrigType.isTuple()); auto inputEltTupleType = cast<TupleType>(inputEltSubstType); translateAndExplodeOutOf(inputEltOrigType, inputEltTupleType, outputEltOrigType, outputEltTupleType, inputEltAddr); } else { auto outputType = claimNextOutputType(); translateSingle(inputEltOrigType, inputEltSubstType, outputEltOrigType, outputEltSubstType, inputEltAddr, outputType); } } } /// Given that a tuple value is being passed indirectly in the /// output, translate the elements and implode it. void translateAndImplodeInto(AbstractionPattern inputOrigType, CanTupleType inputSubstType, AbstractionPattern outputOrigType, CanTupleType outputSubstType, TemporaryInitialization &tupleInit) { assert(inputOrigType.matchesTuple(inputSubstType)); assert(outputOrigType.matchesTuple(outputSubstType)); assert(!inputSubstType->hasInOutElement() && !outputSubstType->hasInOutElement()); assert(inputSubstType->getNumElements() == outputSubstType->getNumElements()); SmallVector<CleanupHandle, 4> cleanups; for (auto index : indices(outputSubstType.getElementTypes())) { auto inputEltOrigType = inputOrigType.getTupleElementType(index); auto inputEltSubstType = inputSubstType.getElementType(index); auto outputEltOrigType = outputOrigType.getTupleElementType(index); auto outputEltSubstType = outputSubstType.getElementType(index); auto eltAddr = SGF.B.createTupleElementAddr(Loc, tupleInit.getAddress(), index); auto &outputEltTL = SGF.getTypeLowering(eltAddr->getType()); CleanupHandle eltCleanup = SGF.enterDormantTemporaryCleanup(eltAddr, outputEltTL); if (eltCleanup.isValid()) cleanups.push_back(eltCleanup); TemporaryInitialization eltInit(eltAddr, eltCleanup); if (auto outputEltTupleType = dyn_cast<TupleType>(outputEltSubstType)) { auto inputEltTupleType = cast<TupleType>(inputEltSubstType); translateAndImplodeInto(inputEltOrigType, inputEltTupleType, outputEltOrigType, outputEltTupleType, eltInit); } else { // Otherwise, we come from a single value. auto input = claimNextInput(); translateSingleInto(inputEltOrigType, inputEltSubstType, outputEltOrigType, outputEltSubstType, input, eltInit); } } // Deactivate all the element cleanups and activate the tuple cleanup. for (auto cleanup : cleanups) SGF.Cleanups.forwardCleanup(cleanup); tupleInit.finishInitialization(SGF); } /// Translate a single value and add it as an output. void translateSingle(AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, ManagedValue input, SILParameterInfo result) { // Easy case: we want to pass exactly this value. if (input.getType() == SGF.getSILType(result)) { Outputs.push_back(input); return; } switch (result.getConvention()) { // Direct translation is relatively easy. case ParameterConvention::Direct_Owned: case ParameterConvention::Direct_Unowned: { auto output = translatePrimitive(inputOrigType, inputSubstType, outputOrigType, outputSubstType, input); assert(output.getType() == SGF.getSILType(result)); // If our output is guaranteed, we need to create a copy here. if (output.getOwnershipKind() == ValueOwnershipKind::Guaranteed) output = output.copyUnmanaged(SGF, Loc); Outputs.push_back(output); return; } case ParameterConvention::Direct_Guaranteed: { auto output = translatePrimitive(inputOrigType, inputSubstType, outputOrigType, outputSubstType, input); assert(output.getType() == SGF.getSILType(result)); // If our output value is not guaranteed, we need to: // // 1. Unowned - Copy + Borrow. // 2. Owned - Borrow. // 3. Trivial - do nothing. // // This means we can first transition unowned => owned and then handle // the new owned value using the same code path as values that are // initially owned. if (output.getOwnershipKind() == ValueOwnershipKind::Unowned) { assert(!output.hasCleanup()); output = SGF.emitManagedRetain(Loc, output.getValue()); } if (output.getOwnershipKind() == ValueOwnershipKind::Owned) { output = SGF.emitManagedBeginBorrow(Loc, output.getValue()); } Outputs.push_back(output); return; } case ParameterConvention::Indirect_Inout: { // If it's inout, we need writeback. llvm::errs() << "inout writeback in abstraction difference thunk " "not yet implemented\n"; llvm::errs() << "input value "; input.getValue()->dump(); llvm::errs() << "output type " << SGF.getSILType(result) << "\n"; abort(); } case ParameterConvention::Indirect_In: case ParameterConvention::Indirect_In_Constant: case ParameterConvention::Indirect_In_Guaranteed: { if (SGF.silConv.useLoweredAddresses()) { // We need to translate into a temporary. auto &outputTL = SGF.getTypeLowering(SGF.getSILType(result)); auto temp = SGF.emitTemporary(Loc, outputTL); translateSingleInto(inputOrigType, inputSubstType, outputOrigType, outputSubstType, input, *temp); Outputs.push_back(temp->getManagedAddress()); } else { auto output = translatePrimitive(inputOrigType, inputSubstType, outputOrigType, outputSubstType, input); assert(output.getType() == SGF.getSILType(result)); if (output.getOwnershipKind() == ValueOwnershipKind::Unowned) { assert(!output.hasCleanup()); output = SGF.emitManagedRetain(Loc, output.getValue()); } if (output.getOwnershipKind() == ValueOwnershipKind::Owned) { output = SGF.emitManagedBeginBorrow(Loc, output.getValue()); } Outputs.push_back(output); } return; } case ParameterConvention::Indirect_InoutAliasable: { llvm_unreachable("abstraction difference in aliasable argument not " "allowed"); } } llvm_unreachable("Covered switch isn't covered?!"); } /// Translate a single value and initialize the given temporary with it. void translateSingleInto(AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, ManagedValue input, TemporaryInitialization &temp) { auto output = translatePrimitive(inputOrigType, inputSubstType, outputOrigType, outputSubstType, input, SGFContext(&temp)); forceInto(output, temp); } /// Apply primitive translation to the given value. ManagedValue translatePrimitive(AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, ManagedValue input, SGFContext context = SGFContext()) { return SGF.emitTransformedValue(Loc, input, inputOrigType, inputSubstType, outputOrigType, outputSubstType, context); } /// Force the given result into the given initialization. void forceInto(ManagedValue result, TemporaryInitialization &temp) { emitForceInto(SGF, Loc, result, temp); } ManagedValue claimNextInput() { return claimNext(Inputs); } SILParameterInfo claimNextOutputType() { return claimNext(OutputTypes); } }; } // end anonymous namespace /// Forward arguments according to a function type's ownership conventions. static void forwardFunctionArguments(SILGenFunction &gen, SILLocation loc, CanSILFunctionType fTy, ArrayRef<ManagedValue> managedArgs, SmallVectorImpl<SILValue> &forwardedArgs) { auto argTypes = fTy->getParameters(); for (auto index : indices(managedArgs)) { auto &arg = managedArgs[index]; auto argTy = argTypes[index]; if (argTy.isConsumed()) { forwardedArgs.push_back(arg.forward(gen)); continue; } if (argTy.getConvention() == ParameterConvention::Direct_Guaranteed) { forwardedArgs.push_back( gen.emitManagedBeginBorrow(loc, arg.getValue()).getValue()); continue; } forwardedArgs.push_back(arg.getValue()); } } namespace { /// A helper class to translate the inner results to the outer results. /// /// Creating a result-translation plan involves three basic things: /// - building SILArguments for each of the outer indirect results /// - building a list of SILValues for each of the inner indirect results /// - building a list of Operations to perform which will reabstract /// the inner results to match the outer. class ResultPlanner { SILGenFunction &Gen; SILLocation Loc; /// A single result-translation operation. struct Operation { enum Kind { /// Take the last N direct outer results, tuple them, and make that a /// new direct outer result. /// /// Valid: NumElements, OuterResult TupleDirect, /// Take the last direct outer result, inject it into an optional /// type, and make that a new direct outer result. /// /// Valid: SomeDecl, OuterResult InjectOptionalDirect, /// Finish building an optional Some in the given address. /// /// Valid: SomeDecl, OuterResultAddr InjectOptionalIndirect, /// Take the next direct inner result and just make it a direct /// outer result. /// /// Valid: InnerResult, OuterResult. DirectToDirect, /// Take the next direct inner result and store it into an /// outer result address. /// /// Valid: InnerDirect, OuterResultAddr. DirectToIndirect, /// Take from an indirect inner result and make it the next outer /// direct result. /// /// Valid: InnerResultAddr, OuterResult. IndirectToDirect, /// Take from an indirect inner result into an outer indirect result. /// /// Valid: InnerResultAddr, OuterResultAddr. IndirectToIndirect, /// Take a value out of the source inner result address, reabstract /// it, and initialize the destination outer result address. /// /// Valid: reabstraction info, InnerAddress, OuterAddress. ReabstractIndirectToIndirect, /// Take a value out of the source inner result address, reabstract /// it, and add it as the next direct outer result. /// /// Valid: reabstraction info, InnerAddress, OuterResult. ReabstractIndirectToDirect, /// Take the next direct inner result, reabstract it, and initialize /// the destination outer result address. /// /// Valid: reabstraction info, InnerResult, OuterAddress. ReabstractDirectToIndirect, /// Take the next direct inner result, reabstract it, and add it as /// the next direct outer result. /// /// Valid: reabstraction info, InnerResult, OuterResult. ReabstractDirectToDirect, }; Operation(Kind kind) : TheKind(kind) {} Kind TheKind; // Reabstraction information. Only valid for reabstraction kinds. AbstractionPattern InnerOrigType = AbstractionPattern::getInvalid(); AbstractionPattern OuterOrigType = AbstractionPattern::getInvalid(); CanType InnerSubstType, OuterSubstType; union { SILValue InnerResultAddr; SILResultInfo InnerResult; unsigned NumElements; EnumElementDecl *SomeDecl; }; union { SILValue OuterResultAddr; SILResultInfo OuterResult; }; }; struct PlanData { ArrayRef<SILResultInfo> OuterResults; ArrayRef<SILResultInfo> InnerResults; SmallVectorImpl<SILValue> &InnerIndirectResultAddrs; size_t NextOuterIndirectResultIndex; }; SmallVector<Operation, 8> Operations; public: ResultPlanner(SILGenFunction &gen, SILLocation loc) : Gen(gen), Loc(loc) {} void plan(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, CanSILFunctionType innerFnType, CanSILFunctionType outerFnType, SmallVectorImpl<SILValue> &innerIndirectResultAddrs) { // Assert that the indirect results are set up like we expect. assert(innerIndirectResultAddrs.empty()); assert(Gen.F.begin()->args_size() >= SILFunctionConventions(outerFnType, Gen.SGM.M) .getNumIndirectSILResults()); innerIndirectResultAddrs.reserve( SILFunctionConventions(innerFnType, Gen.SGM.M) .getNumIndirectSILResults()); PlanData data = {outerFnType->getResults(), innerFnType->getResults(), innerIndirectResultAddrs, 0}; // Recursively walk the result types. plan(innerOrigType, innerSubstType, outerOrigType, outerSubstType, data); // Assert that we consumed and produced all the indirect result // information we needed. assert(data.OuterResults.empty()); assert(data.InnerResults.empty()); assert(data.InnerIndirectResultAddrs.size() == SILFunctionConventions(innerFnType, Gen.SGM.M) .getNumIndirectSILResults()); assert(data.NextOuterIndirectResultIndex == SILFunctionConventions(outerFnType, Gen.SGM.M) .getNumIndirectSILResults()); } SILValue execute(SILValue innerResult); private: void execute(ArrayRef<SILValue> innerDirectResults, SmallVectorImpl<SILValue> &outerDirectResults); void executeInnerTuple(SILValue innerElement, SmallVector<SILValue, 4> &innerDirectResults); void plan(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData); void planIntoIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILValue outerResultAddr); void planTupleIntoIndirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILValue outerResultAddr); void planScalarIntoIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo innerResult, SILValue outerResultAddr); void planIntoDirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo outerResult); void planScalarIntoDirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo innerResult, SILResultInfo outerResult); void planTupleIntoDirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo outerResult); void planFromIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILValue innerResultAddr); void planTupleFromIndirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanTupleType outerSubstType, PlanData &planData, SILValue innerResultAddr); void planTupleFromDirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanTupleType outerSubstType, PlanData &planData, SILResultInfo innerResult); void planScalarFromIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, SILValue innerResultAddr, SILResultInfo outerResult, SILValue optOuterResultAddr); /// Claim the next inner result from the plan data. SILResultInfo claimNextInnerResult(PlanData &data) { return claimNext(data.InnerResults); } /// Claim the next outer result from the plan data. If it's indirect, /// grab its SILArgument. std::pair<SILResultInfo, SILValue> claimNextOuterResult(PlanData &data) { SILResultInfo result = claimNext(data.OuterResults); SILValue resultAddr; if (Gen.silConv.isSILIndirect(result)) { resultAddr = Gen.F.begin()->getArgument(data.NextOuterIndirectResultIndex++); } return { result, resultAddr }; } /// Create a temporary address suitable for passing to the given inner /// indirect result and add it as an inner indirect result. SILValue addInnerIndirectResultTemporary(PlanData &data, SILResultInfo innerResult) { assert(Gen.silConv.isSILIndirect(innerResult) || !Gen.silConv.useLoweredAddresses()); auto temporary = Gen.emitTemporaryAllocation(Loc, Gen.getSILType(innerResult)); data.InnerIndirectResultAddrs.push_back(temporary); return temporary; } /// Cause the next inner indirect result to be emitted directly into /// the given outer result address. void addInPlace(PlanData &data, SILValue outerResultAddr) { data.InnerIndirectResultAddrs.push_back(outerResultAddr); // Does not require an Operation. } Operation &addOperation(Operation::Kind kind) { Operations.emplace_back(kind); return Operations.back(); } void addDirectToDirect(SILResultInfo innerResult, SILResultInfo outerResult) { auto &op = addOperation(Operation::DirectToDirect); op.InnerResult = innerResult; op.OuterResult = outerResult; } void addDirectToIndirect(SILResultInfo innerResult, SILValue outerResultAddr) { auto &op = addOperation(Operation::DirectToIndirect); op.InnerResult = innerResult; op.OuterResultAddr = outerResultAddr; } void addIndirectToDirect(SILValue innerResultAddr, SILResultInfo outerResult) { auto &op = addOperation(Operation::IndirectToDirect); op.InnerResultAddr = innerResultAddr; op.OuterResult = outerResult; } void addIndirectToIndirect(SILValue innerResultAddr, SILValue outerResultAddr) { auto &op = addOperation(Operation::IndirectToIndirect); op.InnerResultAddr = innerResultAddr; op.OuterResultAddr = outerResultAddr; } void addTupleDirect(unsigned numElements, SILResultInfo outerResult) { auto &op = addOperation(Operation::TupleDirect); op.NumElements = numElements; op.OuterResult = outerResult; } void addInjectOptionalDirect(EnumElementDecl *someDecl, SILResultInfo outerResult) { auto &op = addOperation(Operation::InjectOptionalDirect); op.SomeDecl = someDecl; op.OuterResult = outerResult; } void addInjectOptionalIndirect(EnumElementDecl *someDecl, SILValue outerResultAddr) { auto &op = addOperation(Operation::InjectOptionalIndirect); op.SomeDecl = someDecl; op.OuterResultAddr = outerResultAddr; } void addReabstractDirectToDirect(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, SILResultInfo innerResult, SILResultInfo outerResult) { auto &op = addOperation(Operation::ReabstractDirectToDirect); op.InnerResult = innerResult; op.OuterResult = outerResult; op.InnerOrigType = innerOrigType; op.InnerSubstType = innerSubstType; op.OuterOrigType = outerOrigType; op.OuterSubstType = outerSubstType; } void addReabstractDirectToIndirect(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, SILResultInfo innerResult, SILValue outerResultAddr) { auto &op = addOperation(Operation::ReabstractDirectToIndirect); op.InnerResult = innerResult; op.OuterResultAddr = outerResultAddr; op.InnerOrigType = innerOrigType; op.InnerSubstType = innerSubstType; op.OuterOrigType = outerOrigType; op.OuterSubstType = outerSubstType; } void addReabstractIndirectToDirect(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, SILValue innerResultAddr, SILResultInfo outerResult) { auto &op = addOperation(Operation::ReabstractIndirectToDirect); op.InnerResultAddr = innerResultAddr; op.OuterResult = outerResult; op.InnerOrigType = innerOrigType; op.InnerSubstType = innerSubstType; op.OuterOrigType = outerOrigType; op.OuterSubstType = outerSubstType; } void addReabstractIndirectToIndirect(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, SILValue innerResultAddr, SILValue outerResultAddr) { auto &op = addOperation(Operation::ReabstractIndirectToIndirect); op.InnerResultAddr = innerResultAddr; op.OuterResultAddr = outerResultAddr; op.InnerOrigType = innerOrigType; op.InnerSubstType = innerSubstType; op.OuterOrigType = outerOrigType; op.OuterSubstType = outerSubstType; } }; } // end anonymous namespace /// Plan the reabstraction of a call result. void ResultPlanner::plan(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData) { // The substituted types must match up in tuple-ness and arity. assert(isa<TupleType>(innerSubstType) == isa<TupleType>(outerSubstType) || (isa<TupleType>(innerSubstType) && (outerSubstType->isAny() || outerSubstType->getAnyOptionalObjectType()))); assert(!isa<TupleType>(outerSubstType) || cast<TupleType>(innerSubstType)->getNumElements() == cast<TupleType>(outerSubstType)->getNumElements()); // If the inner abstraction pattern is a tuple, that result will be expanded. if (innerOrigType.isTuple()) { auto innerSubstTupleType = cast<TupleType>(innerSubstType); // If the outer abstraction pattern is also a tuple, that result will also // be expanded, in parallel with the inner pattern. if (outerOrigType.isTuple()) { auto outerSubstTupleType = cast<TupleType>(outerSubstType); assert(innerSubstTupleType->getNumElements() == outerSubstTupleType->getNumElements()); // Otherwise, recursively descend into the tuples. for (auto eltIndex : indices(innerSubstTupleType.getElementTypes())) { plan(innerOrigType.getTupleElementType(eltIndex), innerSubstTupleType.getElementType(eltIndex), outerOrigType.getTupleElementType(eltIndex), outerSubstTupleType.getElementType(eltIndex), planData); } return; } // Otherwise, the next outer result must be either opaque or optional. // In either case, it corresponds to a single result. auto outerResult = claimNextOuterResult(planData); // Base the plan on whether the single result is direct or indirect. if (Gen.silConv.isSILIndirect(outerResult.first)) { assert(outerResult.second); planTupleIntoIndirectResult(innerOrigType, innerSubstTupleType, outerOrigType, outerSubstType, planData, outerResult.second); } else { planTupleIntoDirectResult(innerOrigType, innerSubstTupleType, outerOrigType, outerSubstType, planData, outerResult.first); } return; } // Otherwise, the inner pattern is a scalar; claim the next inner result. SILResultInfo innerResult = claimNextInnerResult(planData); assert((!outerOrigType.isTuple() || innerResult.isFormalIndirect()) && "outer pattern is a tuple, inner pattern is not, but inner result is " "not indirect?"); // If the inner result is a tuple, we need to expand from a temporary. if (innerResult.isFormalIndirect() && outerOrigType.isTuple()) { if (Gen.silConv.isSILIndirect(innerResult)) { SILValue innerResultAddr = addInnerIndirectResultTemporary(planData, innerResult); planTupleFromIndirectResult( innerOrigType, cast<TupleType>(innerSubstType), outerOrigType, cast<TupleType>(outerSubstType), planData, innerResultAddr); } else { assert(!Gen.silConv.useLoweredAddresses() && "Formal Indirect Results that are not SIL Indirect are only " "allowed in opaque values mode"); planTupleFromDirectResult(innerOrigType, cast<TupleType>(innerSubstType), outerOrigType, cast<TupleType>(outerSubstType), planData, innerResult); } return; } // Otherwise, the outer pattern is a scalar; claim the next outer result. auto outerResult = claimNextOuterResult(planData); // If the outer result is indirect, plan to emit into that. if (Gen.silConv.isSILIndirect(outerResult.first)) { assert(outerResult.second); planScalarIntoIndirectResult(innerOrigType, innerSubstType, outerOrigType, outerSubstType, planData, innerResult, outerResult.second); } else { planScalarIntoDirectResult(innerOrigType, innerSubstType, outerOrigType, outerSubstType, planData, innerResult, outerResult.first); } } /// Plan the emission of a call result into an outer result address. void ResultPlanner::planIntoIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILValue outerResultAddr) { // outerOrigType can be a tuple if we're also injecting into an optional. // If the inner pattern is a tuple, expand it. if (innerOrigType.isTuple()) { planTupleIntoIndirectResult(innerOrigType, cast<TupleType>(innerSubstType), outerOrigType, outerSubstType, planData, outerResultAddr); // Otherwise, it's scalar. } else { // Claim the next inner result. SILResultInfo innerResult = claimNextInnerResult(planData); planScalarIntoIndirectResult(innerOrigType, innerSubstType, outerOrigType, outerSubstType, planData, innerResult, outerResultAddr); } } /// Plan the emission of a call result into an outer result address, /// given that the inner abstraction pattern is a tuple. void ResultPlanner::planTupleIntoIndirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILValue outerResultAddr) { assert(innerOrigType.isTuple()); // outerOrigType can be a tuple if we're doing something like // injecting into an optional tuple. auto outerSubstTupleType = dyn_cast<TupleType>(outerSubstType); // If the outer type is not a tuple, it must be optional. if (!outerSubstTupleType) { // Figure out what kind of optional it is. CanType outerSubstObjectType = outerSubstType.getAnyOptionalObjectType(); if (outerSubstObjectType) { auto someDecl = Gen.getASTContext().getOptionalSomeDecl(); // Prepare the value slot in the optional value. SILType outerObjectType = outerResultAddr->getType().getAnyOptionalObjectType(); SILValue outerObjectResultAddr = Gen.B.createInitEnumDataAddr(Loc, outerResultAddr, someDecl, outerObjectType); // Emit into that address. planTupleIntoIndirectResult(innerOrigType, innerSubstType, outerOrigType.getAnyOptionalObjectType(), outerSubstObjectType, planData, outerObjectResultAddr); // Add an operation to finish the enum initialization. addInjectOptionalIndirect(someDecl, outerResultAddr); return; } assert(outerSubstType->isAny()); // Prepare the value slot in the existential. auto opaque = AbstractionPattern::getOpaque(); SILValue outerConcreteResultAddr = Gen.B.createInitExistentialAddr(Loc, outerResultAddr, innerSubstType, Gen.getLoweredType(opaque, innerSubstType), /*conformances=*/{}); // Emit into that address. planTupleIntoIndirectResult(innerOrigType, innerSubstType, innerOrigType, innerSubstType, planData, outerConcreteResultAddr); return; } assert(innerSubstType->getNumElements() == outerSubstTupleType->getNumElements()); for (auto eltIndex : indices(innerSubstType.getElementTypes())) { // Project the address of the element. SILValue outerEltResultAddr = Gen.B.createTupleElementAddr(Loc, outerResultAddr, eltIndex); // Plan to emit into that location. planIntoIndirectResult(innerOrigType.getTupleElementType(eltIndex), innerSubstType.getElementType(eltIndex), outerOrigType.getTupleElementType(eltIndex), outerSubstTupleType.getElementType(eltIndex), planData, outerEltResultAddr); } } /// Plan the emission of a call result as a single outer direct result. void ResultPlanner::planIntoDirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo outerResult) { assert(!outerOrigType.isTuple() || !Gen.silConv.useLoweredAddresses()); // If the inner pattern is a tuple, expand it. if (innerOrigType.isTuple()) { planTupleIntoDirectResult(innerOrigType, cast<TupleType>(innerSubstType), outerOrigType, outerSubstType, planData, outerResult); // Otherwise, it's scalar. } else { // Claim the next inner result. SILResultInfo innerResult = claimNextInnerResult(planData); planScalarIntoDirectResult(innerOrigType, innerSubstType, outerOrigType, outerSubstType, planData, innerResult, outerResult); } } /// Plan the emission of a call result as a single outer direct result, /// given that the inner abstraction pattern is a tuple. void ResultPlanner::planTupleIntoDirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo outerResult) { assert(innerOrigType.isTuple()); auto outerSubstTupleType = dyn_cast<TupleType>(outerSubstType); // If the outer type is not a tuple, it must be optional or we are under // opaque value mode if (!outerSubstTupleType) { CanType outerSubstObjectType = outerSubstType.getAnyOptionalObjectType(); if (outerSubstObjectType) { auto someDecl = Gen.getASTContext().getOptionalSomeDecl(); SILType outerObjectType = Gen.getSILType(outerResult).getAnyOptionalObjectType(); SILResultInfo outerObjectResult(outerObjectType.getSwiftRValueType(), outerResult.getConvention()); // Plan to leave the tuple elements as a single direct outer result. planTupleIntoDirectResult(innerOrigType, innerSubstType, outerOrigType.getAnyOptionalObjectType(), outerSubstObjectType, planData, outerObjectResult); // Take that result and inject it into an optional. addInjectOptionalDirect(someDecl, outerResult); return; } else { assert(!Gen.silConv.useLoweredAddresses() && "inner type was a tuple but outer type was neither a tuple nor " "optional nor are we under opaque value mode"); assert(outerSubstType->isAny()); auto opaque = AbstractionPattern::getOpaque(); auto anyType = Gen.getLoweredType(opaque, outerSubstType); auto outerResultAddr = Gen.emitTemporaryAllocation(Loc, anyType); SILValue outerConcreteResultAddr = Gen.B.createInitExistentialAddr( Loc, outerResultAddr, innerSubstType, Gen.getLoweredType(opaque, innerSubstType), /*conformances=*/{}); planTupleIntoIndirectResult(innerOrigType, innerSubstType, innerOrigType, innerSubstType, planData, outerConcreteResultAddr); addReabstractIndirectToDirect(innerOrigType, innerSubstType, outerOrigType, outerSubstType, outerConcreteResultAddr, outerResult); return; } } // Otherwise, the outer type is a tuple. assert(innerSubstType->getNumElements() == outerSubstTupleType->getNumElements()); // Create direct outer results for each of the elements. for (auto eltIndex : indices(innerSubstType.getElementTypes())) { auto outerEltType = Gen.getSILType(outerResult).getTupleElementType(eltIndex); SILResultInfo outerEltResult(outerEltType.getSwiftRValueType(), outerResult.getConvention()); planIntoDirectResult(innerOrigType.getTupleElementType(eltIndex), innerSubstType.getElementType(eltIndex), outerOrigType.getTupleElementType(eltIndex), outerSubstTupleType.getElementType(eltIndex), planData, outerEltResult); } // Bind them together into a single tuple. addTupleDirect(innerSubstType->getNumElements(), outerResult); } /// Plan the emission of a call result as a single outer direct result, /// given that the inner abstraction pattern is not a tuple. void ResultPlanner::planScalarIntoDirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo innerResult, SILResultInfo outerResult) { assert(!innerOrigType.isTuple()); assert(!outerOrigType.isTuple()); // If the inner result is indirect, plan to emit from that. if (Gen.silConv.isSILIndirect(innerResult)) { SILValue innerResultAddr = addInnerIndirectResultTemporary(planData, innerResult); planScalarFromIndirectResult(innerOrigType, innerSubstType, outerOrigType, outerSubstType, innerResultAddr, outerResult, SILValue()); return; } // Otherwise, we have two direct results. // If there's no abstraction difference, it's just returned directly. if (Gen.getSILType(innerResult) == Gen.getSILType(outerResult)) { addDirectToDirect(innerResult, outerResult); // Otherwise, we need to reabstract. } else { addReabstractDirectToDirect(innerOrigType, innerSubstType, outerOrigType, outerSubstType, innerResult, outerResult); } } /// Plan the emission of a call result into an outer result address, /// given that the inner abstraction pattern is not a tuple. void ResultPlanner::planScalarIntoIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILResultInfo innerResult, SILValue outerResultAddr) { assert(!innerOrigType.isTuple()); assert(!outerOrigType.isTuple()); bool hasAbstractionDifference = (innerResult.getType() != outerResultAddr->getType().getSwiftRValueType()); // If the inner result is indirect, we need some memory to emit it into. if (Gen.silConv.isSILIndirect(innerResult)) { // If there's no abstraction difference, that can just be // in-place into the outer result address. if (!hasAbstractionDifference) { addInPlace(planData, outerResultAddr); // Otherwise, we'll need a temporary. } else { SILValue innerResultAddr = addInnerIndirectResultTemporary(planData, innerResult); addReabstractIndirectToIndirect(innerOrigType, innerSubstType, outerOrigType, outerSubstType, innerResultAddr, outerResultAddr); } // Otherwise, the inner result is direct. } else { // If there's no abstraction difference, we just need to store. if (!hasAbstractionDifference) { addDirectToIndirect(innerResult, outerResultAddr); // Otherwise, we need to reabstract and store. } else { addReabstractDirectToIndirect(innerOrigType, innerSubstType, outerOrigType, outerSubstType, innerResult, outerResultAddr); } } } /// Plan the emission of a call result from an inner result address. void ResultPlanner::planFromIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, PlanData &planData, SILValue innerResultAddr) { assert(!innerOrigType.isTuple()); if (outerOrigType.isTuple()) { planTupleFromIndirectResult(innerOrigType, cast<TupleType>(innerSubstType), outerOrigType, cast<TupleType>(outerSubstType), planData, innerResultAddr); } else { auto outerResult = claimNextOuterResult(planData); planScalarFromIndirectResult(innerOrigType, innerSubstType, outerOrigType, outerSubstType, innerResultAddr, outerResult.first, outerResult.second); } } /// Plan the emission of a call result from an inner result address, given /// that the outer abstraction pattern is a tuple. void ResultPlanner::planTupleFromIndirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanTupleType outerSubstType, PlanData &planData, SILValue innerResultAddr) { assert(!innerOrigType.isTuple()); assert(innerSubstType->getNumElements() == outerSubstType->getNumElements()); assert(outerOrigType.isTuple()); for (auto eltIndex : indices(innerSubstType.getElementTypes())) { // Project the address of the element. SILValue innerEltResultAddr = Gen.B.createTupleElementAddr(Loc, innerResultAddr, eltIndex); // Plan to expand from that location. planFromIndirectResult(innerOrigType.getTupleElementType(eltIndex), innerSubstType.getElementType(eltIndex), outerOrigType.getTupleElementType(eltIndex), outerSubstType.getElementType(eltIndex), planData, innerEltResultAddr); } } void ResultPlanner::planTupleFromDirectResult(AbstractionPattern innerOrigType, CanTupleType innerSubstType, AbstractionPattern outerOrigType, CanTupleType outerSubstType, PlanData &planData, SILResultInfo innerResult) { assert(!innerOrigType.isTuple()); auto outerSubstTupleType = dyn_cast<TupleType>(outerSubstType); assert(outerSubstTupleType && "Outer type must be a tuple"); assert(innerSubstType->getNumElements() == outerSubstTupleType->getNumElements()); // Create direct outer results for each of the elements. for (auto eltIndex : indices(innerSubstType.getElementTypes())) { AbstractionPattern newOuterOrigType = outerOrigType.getTupleElementType(eltIndex); AbstractionPattern newInnerOrigType = innerOrigType.getTupleElementType(eltIndex); if (newOuterOrigType.isTuple()) { planTupleFromDirectResult( newInnerOrigType, cast<TupleType>(innerSubstType.getElementType(eltIndex)), newOuterOrigType, cast<TupleType>(outerSubstTupleType.getElementType(eltIndex)), planData, innerResult); continue; } auto outerResult = claimNextOuterResult(planData); auto elemType = outerSubstTupleType.getElementType(eltIndex); SILResultInfo eltResult(elemType, outerResult.first.getConvention()); planScalarIntoDirectResult( newInnerOrigType, innerSubstType.getElementType(eltIndex), newOuterOrigType, outerSubstTupleType.getElementType(eltIndex), planData, eltResult, outerResult.first); } } /// Plan the emission of a call result from an inner result address, /// given that the outer abstraction pattern is not a tuple. void ResultPlanner::planScalarFromIndirectResult(AbstractionPattern innerOrigType, CanType innerSubstType, AbstractionPattern outerOrigType, CanType outerSubstType, SILValue innerResultAddr, SILResultInfo outerResult, SILValue optOuterResultAddr) { assert(!innerOrigType.isTuple()); assert(!outerOrigType.isTuple()); assert(Gen.silConv.isSILIndirect(outerResult) == bool(optOuterResultAddr)); bool hasAbstractionDifference = (innerResultAddr->getType().getSwiftRValueType() != outerResult.getType()); // The outer result can be indirect, and it doesn't necessarily have an // abstraction difference. Note that we should only end up in this path // in cases where simply forwarding the outer result address wasn't possible. if (Gen.silConv.isSILIndirect(outerResult)) { assert(optOuterResultAddr); if (!hasAbstractionDifference) { addIndirectToIndirect(innerResultAddr, optOuterResultAddr); } else { addReabstractIndirectToIndirect(innerOrigType, innerSubstType, outerOrigType, outerSubstType, innerResultAddr, optOuterResultAddr); } } else { if (!hasAbstractionDifference) { addIndirectToDirect(innerResultAddr, outerResult); } else { addReabstractIndirectToDirect(innerOrigType, innerSubstType, outerOrigType, outerSubstType, innerResultAddr, outerResult); } } } void ResultPlanner::executeInnerTuple( SILValue innerElement, SmallVector<SILValue, 4> &innerDirectResults) { auto innerTupleType = innerElement->getType().getAs<TupleType>(); assert(innerTupleType && "Only supports tuple inner types"); ManagedValue ownedInnerResult = Gen.emitManagedRValueWithCleanup(innerElement); // Then borrow the managed direct result. ManagedValue borrowedInnerResult = ownedInnerResult.borrow(Gen, Loc); for (unsigned i : indices(innerTupleType.getElementTypes())) { ManagedValue elt = Gen.B.createTupleExtract(Loc, borrowedInnerResult, i); auto eltType = elt.getType(); if (eltType.is<TupleType>()) { executeInnerTuple(elt.getValue(), innerDirectResults); continue; } innerDirectResults.push_back(elt.copyUnmanaged(Gen, Loc).forward(Gen)); } } SILValue ResultPlanner::execute(SILValue innerResult) { // The code emission here assumes that we don't need to have // active cleanups for all the result values we're not actively // transforming. In other words, it's not "exception-safe". // Explode the inner direct results. SmallVector<SILValue, 4> innerDirectResults; auto innerResultTupleType = innerResult->getType().getAs<TupleType>(); if (!innerResultTupleType) { innerDirectResults.push_back(innerResult); } else { { Scope S(Gen.Cleanups, CleanupLocation::get(Loc)); // First create an rvalue cleanup for our direct result. assert(innerResult.getOwnershipKind() == ValueOwnershipKind::Owned || innerResult.getOwnershipKind() == ValueOwnershipKind::Trivial); executeInnerTuple(innerResult, innerDirectResults); // Then allow the cleanups to be emitted in the proper reverse order. } } // Translate the result values. SmallVector<SILValue, 4> outerDirectResults; execute(innerDirectResults, outerDirectResults); // Implode the outer direct results. SILValue outerResult; if (outerDirectResults.size() == 1) { outerResult = outerDirectResults[0]; } else { outerResult = Gen.B.createTuple(Loc, outerDirectResults); } return outerResult; } void ResultPlanner::execute(ArrayRef<SILValue> innerDirectResults, SmallVectorImpl<SILValue> &outerDirectResults) { // A helper function to claim an inner direct result. auto claimNextInnerDirectResult = [&](SILResultInfo result) -> ManagedValue { auto resultValue = claimNext(innerDirectResults); assert(resultValue->getType() == Gen.getSILType(result)); auto &resultTL = Gen.getTypeLowering(result.getType()); switch (result.getConvention()) { case ResultConvention::Indirect: assert(!Gen.silConv.isSILIndirect(result) && "claiming indirect result as direct!"); LLVM_FALLTHROUGH; case ResultConvention::Owned: case ResultConvention::Autoreleased: return Gen.emitManagedRValueWithCleanup(resultValue, resultTL); case ResultConvention::UnownedInnerPointer: // FIXME: We can't reasonably lifetime-extend an inner-pointer result // through a thunk. We don't know which parameter to the thunk was // originally 'self'. Gen.SGM.diagnose(Loc.getSourceLoc(), diag::not_implemented, "reabstraction of returns_inner_pointer function"); LLVM_FALLTHROUGH; case ResultConvention::Unowned: return Gen.emitManagedRetain(Loc, resultValue, resultTL); } llvm_unreachable("bad result convention!"); }; // A helper function to add an outer direct result. auto addOuterDirectResult = [&](ManagedValue resultValue, SILResultInfo result) { assert(resultValue.getType() == Gen.F.mapTypeIntoContext(Gen.getSILType(result))); outerDirectResults.push_back(resultValue.forward(Gen)); }; auto emitReabstract = [&](Operation &op, bool innerIsIndirect, bool outerIsIndirect) { // Set up the inner result. ManagedValue innerResult; if (innerIsIndirect) { innerResult = Gen.emitManagedBufferWithCleanup(op.InnerResultAddr); } else { innerResult = claimNextInnerDirectResult(op.InnerResult); } // Set up the context into which to emit the outer result. SGFContext outerResultCtxt; Optional<TemporaryInitialization> outerResultInit; if (outerIsIndirect) { outerResultInit.emplace(op.OuterResultAddr, CleanupHandle::invalid()); outerResultCtxt = SGFContext(&*outerResultInit); } // Perform the translation. auto translated = Gen.emitTransformedValue(Loc, innerResult, op.InnerOrigType, op.InnerSubstType, op.OuterOrigType, op.OuterSubstType, outerResultCtxt); // If the outer is indirect, force it into the context. if (outerIsIndirect) { if (!translated.isInContext()) { translated.forwardInto(Gen, Loc, op.OuterResultAddr); } // Otherwise, it's a direct result. } else { addOuterDirectResult(translated, op.OuterResult); } }; // Execute each operation. for (auto &op : Operations) { switch (op.TheKind) { case Operation::DirectToDirect: { auto result = claimNextInnerDirectResult(op.InnerResult); addOuterDirectResult(result, op.OuterResult); continue; } case Operation::DirectToIndirect: { auto result = claimNextInnerDirectResult(op.InnerResult); Gen.B.emitStoreValueOperation(Loc, result.forward(Gen), op.OuterResultAddr, StoreOwnershipQualifier::Init); continue; } case Operation::IndirectToDirect: { auto resultAddr = op.InnerResultAddr; auto &resultTL = Gen.getTypeLowering(resultAddr->getType()); auto result = Gen.emitManagedRValueWithCleanup( resultTL.emitLoad(Gen.B, Loc, resultAddr, LoadOwnershipQualifier::Take), resultTL); addOuterDirectResult(result, op.OuterResult); continue; } case Operation::IndirectToIndirect: { // The type could be address-only; just take. Gen.B.createCopyAddr(Loc, op.InnerResultAddr, op.OuterResultAddr, IsTake, IsInitialization); continue; } case Operation::ReabstractIndirectToIndirect: emitReabstract(op, /*indirect source*/ true, /*indirect dest*/ true); continue; case Operation::ReabstractIndirectToDirect: emitReabstract(op, /*indirect source*/ true, /*indirect dest*/ false); continue; case Operation::ReabstractDirectToIndirect: emitReabstract(op, /*indirect source*/ false, /*indirect dest*/ true); continue; case Operation::ReabstractDirectToDirect: emitReabstract(op, /*indirect source*/ false, /*indirect dest*/ false); continue; case Operation::TupleDirect: { auto firstEltIndex = outerDirectResults.size() - op.NumElements; auto elts = makeArrayRef(outerDirectResults).slice(firstEltIndex); auto tupleType = Gen.F.mapTypeIntoContext(Gen.getSILType(op.OuterResult)); auto tuple = Gen.B.createTuple(Loc, tupleType, elts); outerDirectResults.resize(firstEltIndex); outerDirectResults.push_back(tuple); continue; } case Operation::InjectOptionalDirect: { SILValue value = outerDirectResults.pop_back_val(); auto tupleType = Gen.F.mapTypeIntoContext(Gen.getSILType(op.OuterResult)); SILValue optValue = Gen.B.createEnum(Loc, value, op.SomeDecl, tupleType); outerDirectResults.push_back(optValue); continue; } case Operation::InjectOptionalIndirect: Gen.B.createInjectEnumAddr(Loc, op.OuterResultAddr, op.SomeDecl); continue; } llvm_unreachable("bad operation kind"); } assert(innerDirectResults.empty() && "didn't consume all inner results?"); } /// Build the body of a transformation thunk. /// /// \param inputOrigType Abstraction pattern of function value being thunked /// \param inputSubstType Formal AST type of function value being thunked /// \param outputOrigType Abstraction pattern of the thunk /// \param outputSubstType Formal AST type of the thunk static void buildThunkBody(SILGenFunction &gen, SILLocation loc, AbstractionPattern inputOrigType, CanAnyFunctionType inputSubstType, AbstractionPattern outputOrigType, CanAnyFunctionType outputSubstType) { PrettyStackTraceSILFunction stackTrace("emitting reabstraction thunk in", &gen.F); auto thunkType = gen.F.getLoweredFunctionType(); FullExpr scope(gen.Cleanups, CleanupLocation::get(loc)); SmallVector<ManagedValue, 8> params; // TODO: Could accept +0 arguments here when forwardFunctionArguments/ // emitApply can. gen.collectThunkParams(loc, params, /*allowPlusZero*/ false); ManagedValue fnValue = params.pop_back_val(); auto fnType = fnValue.getType().castTo<SILFunctionType>(); assert(!fnType->isPolymorphic()); auto argTypes = fnType->getParameters(); // Translate the argument values. Function parameters are // contravariant: we want to switch the direction of transformation // on them by flipping inputOrigType and outputOrigType. // // For example, a transformation of (Int,Int)->Int to (T,T)->T is // one that should take an (Int,Int)->Int value and make it be // abstracted like a (T,T)->T value. This must be done with a thunk. // Within the thunk body, the result of calling the inner function // needs to be translated from Int to T (we receive a normal Int // and return it like a T), but the parameters are translated in the // other direction (the thunk receives an Int like a T, and passes it // like a normal Int when calling the inner function). SmallVector<ManagedValue, 8> args; TranslateArguments(gen, loc, params, args, argTypes) .translate(outputOrigType.getFunctionInputType(), outputSubstType.getInput(), inputOrigType.getFunctionInputType(), inputSubstType.getInput()); SmallVector<SILValue, 8> argValues; // Plan the results. This builds argument values for all the // inner indirect results. ResultPlanner resultPlanner(gen, loc); resultPlanner.plan(inputOrigType.getFunctionResultType(), inputSubstType.getResult(), outputOrigType.getFunctionResultType(), outputSubstType.getResult(), fnType, thunkType, argValues); // Add the rest of the arguments. forwardFunctionArguments(gen, loc, fnType, args, argValues); SILValue innerResult = gen.emitApplyWithRethrow(loc, fnValue.forward(gen), /*substFnType*/ fnValue.getType(), /*substitutions*/ {}, argValues); // Reabstract the result. SILValue outerResult = resultPlanner.execute(innerResult); scope.pop(); gen.B.createReturn(loc, outerResult); } /// Build a generic signature and environment for a re-abstraction thunk. /// /// Most thunks share the generic environment with their original function. /// The one exception is if the thunk type involves an open existential, /// in which case we "promote" the opened existential to a new generic parameter. /// /// \param gen - the parent function /// \param openedExistential - the opened existential to promote to a generic // parameter, if any /// \param inheritGenericSig - whether to inherit the generic signature from the /// parent function. /// \param genericEnv - the new generic environment /// \param contextSubs - map old archetypes to new archetypes /// \param interfaceSubs - map interface types to old archetypes static CanGenericSignature buildThunkSignature(SILGenFunction &gen, bool inheritGenericSig, ArchetypeType *openedExistential, GenericEnvironment *&genericEnv, SubstitutionMap &contextSubs, SubstitutionMap &interfaceSubs, ArchetypeType *&newArchetype) { auto *mod = gen.F.getModule().getSwiftModule(); auto &ctx = mod->getASTContext(); // If there's no opened existential, we just inherit the generic environment // from the parent function. if (openedExistential == nullptr) { auto genericSig = gen.F.getLoweredFunctionType()->getGenericSignature(); genericEnv = gen.F.getGenericEnvironment(); auto subsArray = gen.F.getForwardingSubstitutions(); interfaceSubs = genericSig->getSubstitutionMap(subsArray); contextSubs = interfaceSubs; return genericSig; } GenericSignatureBuilder builder(ctx, LookUpConformanceInModule(mod)); // Add the existing generic signature. int depth = 0; if (inheritGenericSig) { if (auto genericSig = gen.F.getLoweredFunctionType()->getGenericSignature()) { builder.addGenericSignature(genericSig); depth = genericSig->getGenericParams().back()->getDepth() + 1; } } // Add a new generic parameter to replace the opened existential. auto *newGenericParam = GenericTypeParamType::get(depth, 0, ctx); builder.addGenericParameter(newGenericParam); Requirement newRequirement(RequirementKind::Conformance, newGenericParam, openedExistential->getOpenedExistentialType()); auto source = GenericSignatureBuilder::FloatingRequirementSource::forAbstract(); builder.addRequirement(newRequirement, source, nullptr); GenericSignature *genericSig = builder.computeGenericSignature(SourceLoc(), /*allowConcreteGenericParams=*/true); genericEnv = genericSig->createGenericEnvironment(*mod); newArchetype = genericEnv->mapTypeIntoContext(newGenericParam) ->castTo<ArchetypeType>(); // Calculate substitutions to map the caller's archetypes to the thunk's // archetypes. if (auto calleeGenericSig = gen.F.getLoweredFunctionType() ->getGenericSignature()) { contextSubs = calleeGenericSig->getSubstitutionMap( [&](SubstitutableType *type) -> Type { return genericEnv->mapTypeIntoContext(type); }, MakeAbstractConformanceForGenericType()); } // Calculate substitutions to map interface types to the caller's archetypes. interfaceSubs = genericSig->getSubstitutionMap( [&](SubstitutableType *type) -> Type { if (type->isEqual(newGenericParam)) return openedExistential; return gen.F.mapTypeIntoContext(type); }, MakeAbstractConformanceForGenericType()); return genericSig->getCanonicalSignature(); } /// Build the type of a function transformation thunk. CanSILFunctionType SILGenFunction::buildThunkType( CanSILFunctionType &sourceType, CanSILFunctionType &expectedType, CanType &inputSubstType, CanType &outputSubstType, GenericEnvironment *&genericEnv, SubstitutionMap &interfaceSubs) { assert(!expectedType->isPolymorphic()); assert(!sourceType->isPolymorphic()); // Can't build a thunk without context, so we require ownership semantics // on the result type. assert(expectedType->getExtInfo().hasContext()); auto extInfo = expectedType->getExtInfo() .withRepresentation(SILFunctionType::Representation::Thin); // Does the thunk type involve archetypes other than opened existentials? bool hasArchetypes = false; // Does the thunk type involve an open existential type? CanArchetypeType openedExistential; auto archetypeVisitor = [&](CanType t) { if (auto archetypeTy = dyn_cast<ArchetypeType>(t)) { if (archetypeTy->getOpenedExistentialType()) { assert((openedExistential == CanArchetypeType() || openedExistential == archetypeTy) && "one too many open existentials"); openedExistential = archetypeTy; } else hasArchetypes = true; } }; // Use the generic signature from the context if the thunk involves // generic parameters. CanGenericSignature genericSig; SubstitutionMap contextSubs; ArchetypeType *newArchetype = nullptr; if (expectedType->hasArchetype() || sourceType->hasArchetype()) { expectedType.visit(archetypeVisitor); sourceType.visit(archetypeVisitor); genericSig = buildThunkSignature(*this, hasArchetypes, openedExistential, genericEnv, contextSubs, interfaceSubs, newArchetype); } // Utility function to apply contextSubs, and also replace the // opened existential with the new archetype. auto substIntoThunkContext = [&](CanType t) -> CanType { return t.subst( [&](SubstitutableType *type) -> Type { if (CanType(type) == openedExistential) return newArchetype; return Type(type).subst(contextSubs); }, LookUpConformanceInSubstitutionMap(contextSubs), SubstFlags::AllowLoweredTypes) ->getCanonicalType(); }; sourceType = cast<SILFunctionType>( substIntoThunkContext(sourceType)); expectedType = cast<SILFunctionType>( substIntoThunkContext(expectedType)); if (inputSubstType) { inputSubstType = cast<AnyFunctionType>( substIntoThunkContext(inputSubstType)); } if (outputSubstType) { outputSubstType = cast<AnyFunctionType>( substIntoThunkContext(outputSubstType)); } // If our parent function was pseudogeneric, this thunk must also be // pseudogeneric, since we have no way to pass generic parameters. if (genericSig) if (F.getLoweredFunctionType()->isPseudogeneric()) extInfo = extInfo.withIsPseudogeneric(); // Add the function type as the parameter. SmallVector<SILParameterInfo, 4> params; params.append(expectedType->getParameters().begin(), expectedType->getParameters().end()); params.push_back({sourceType, sourceType->getExtInfo().hasContext() ? DefaultThickCalleeConvention : ParameterConvention::Direct_Unowned}); auto &mod = *F.getModule().getSwiftModule(); auto getCanonicalType = [&](Type t) -> CanType { return t->getCanonicalType(genericSig, mod); }; // Map the parameter and expected types out of context to get the interface // type of the thunk. SmallVector<SILParameterInfo, 4> interfaceParams; interfaceParams.reserve(params.size()); for (auto &param : params) { auto paramIfaceTy = GenericEnvironment::mapTypeOutOfContext( genericEnv, param.getType()); interfaceParams.push_back( SILParameterInfo(getCanonicalType(paramIfaceTy), param.getConvention())); } SmallVector<SILResultInfo, 4> interfaceResults; for (auto &result : expectedType->getResults()) { auto resultIfaceTy = GenericEnvironment::mapTypeOutOfContext( genericEnv, result.getType()); auto interfaceResult = result.getWithType(getCanonicalType(resultIfaceTy)); interfaceResults.push_back(interfaceResult); } Optional<SILResultInfo> interfaceErrorResult; if (expectedType->hasErrorResult()) { auto errorResult = expectedType->getErrorResult(); auto errorIfaceTy = GenericEnvironment::mapTypeOutOfContext( genericEnv, errorResult.getType()); interfaceErrorResult = SILResultInfo( getCanonicalType(errorIfaceTy), expectedType->getErrorResult().getConvention()); } // The type of the thunk function. return SILFunctionType::get(genericSig, extInfo, ParameterConvention::Direct_Unowned, interfaceParams, interfaceResults, interfaceErrorResult, getASTContext()); } /// Create a reabstraction thunk. static ManagedValue createThunk(SILGenFunction &gen, SILLocation loc, ManagedValue fn, AbstractionPattern inputOrigType, CanAnyFunctionType inputSubstType, AbstractionPattern outputOrigType, CanAnyFunctionType outputSubstType, const TypeLowering &expectedTL) { auto sourceType = fn.getType().castTo<SILFunctionType>(); auto expectedType = expectedTL.getLoweredType().castTo<SILFunctionType>(); // We can't do bridging here. assert(expectedType->getLanguage() == fn.getType().castTo<SILFunctionType>()->getLanguage() && "bridging in re-abstraction thunk?"); // Declare the thunk. SubstitutionMap interfaceSubs; GenericEnvironment *genericEnv = nullptr; auto toType = expectedType; auto thunkType = gen.buildThunkType(sourceType, toType, inputSubstType, outputSubstType, genericEnv, interfaceSubs); auto thunk = gen.SGM.getOrCreateReabstractionThunk( genericEnv, thunkType, sourceType, toType, gen.F.isSerialized()); // Build it if necessary. if (thunk->empty()) { thunk->setGenericEnvironment(genericEnv); SILGenFunction thunkSGF(gen.SGM, *thunk); auto loc = RegularLocation::getAutoGeneratedLocation(); buildThunkBody(thunkSGF, loc, inputOrigType, inputSubstType, outputOrigType, outputSubstType); } CanSILFunctionType substFnType = thunkType; SmallVector<Substitution, 4> subs; if (auto genericSig = thunkType->getGenericSignature()) { genericSig->getSubstitutions(interfaceSubs, subs); substFnType = thunkType->substGenericArgs(gen.F.getModule(), interfaceSubs); } // Create it in our current function. auto thunkValue = gen.B.createFunctionRef(loc, thunk); auto thunkedFn = gen.B.createPartialApply(loc, thunkValue, SILType::getPrimitiveObjectType(substFnType), subs, fn.forward(gen), SILType::getPrimitiveObjectType(expectedType)); return gen.emitManagedRValueWithCleanup(thunkedFn, expectedTL); } ManagedValue Transform::transformFunction(ManagedValue fn, AbstractionPattern inputOrigType, CanAnyFunctionType inputSubstType, AbstractionPattern outputOrigType, CanAnyFunctionType outputSubstType, const TypeLowering &expectedTL) { assert(fn.getType().isObject() && "expected input to emitTransformedFunctionValue to be loaded"); auto expectedFnType = expectedTL.getLoweredType().castTo<SILFunctionType>(); auto fnType = fn.getType().castTo<SILFunctionType>(); assert(expectedFnType->getExtInfo().hasContext() || !fnType->getExtInfo().hasContext()); // If there's no abstraction difference, we're done. if (fnType == expectedFnType) { return fn; } // Check if we require a re-abstraction thunk. if (SGF.SGM.Types.checkForABIDifferences( SILType::getPrimitiveObjectType(fnType), SILType::getPrimitiveObjectType(expectedFnType)) == TypeConverter::ABIDifference::NeedsThunk) { assert(expectedFnType->getExtInfo().hasContext() && "conversion thunk will not be thin!"); return createThunk(SGF, Loc, fn, inputOrigType, inputSubstType, outputOrigType, outputSubstType, expectedTL); } // We do not, conversion is trivial. auto expectedEI = expectedFnType->getExtInfo(); auto newEI = expectedEI.withRepresentation(fnType->getRepresentation()); auto newFnType = adjustFunctionType(expectedFnType, newEI, fnType->getCalleeConvention()); // Apply any ABI-compatible conversions before doing thin-to-thick. if (fnType != newFnType) { SILType resTy = SILType::getPrimitiveObjectType(newFnType); fn = ManagedValue( SGF.B.createConvertFunction(Loc, fn.getValue(), resTy), fn.getCleanup()); } // Now do thin-to-thick if necessary. if (newFnType != expectedFnType) { assert(expectedEI.getRepresentation() == SILFunctionTypeRepresentation::Thick && "all other conversions should have been handled by " "FunctionConversionExpr"); SILType resTy = SILType::getPrimitiveObjectType(expectedFnType); fn = SGF.emitManagedRValueWithCleanup( SGF.B.createThinToThickFunction(Loc, fn.forward(SGF), resTy)); } return fn; } /// Given a value with the abstraction patterns of the original formal /// type, give it the abstraction patterns of the substituted formal type. ManagedValue SILGenFunction::emitOrigToSubstValue(SILLocation loc, ManagedValue v, AbstractionPattern origType, CanType substType, SGFContext ctxt) { return emitTransformedValue(loc, v, origType, substType, AbstractionPattern(substType), substType, ctxt); } /// Given a value with the abstraction patterns of the original formal /// type, give it the abstraction patterns of the substituted formal type. RValue SILGenFunction::emitOrigToSubstValue(SILLocation loc, RValue &&v, AbstractionPattern origType, CanType substType, SGFContext ctxt) { return emitTransformedValue(loc, std::move(v), origType, substType, AbstractionPattern(substType), substType, ctxt); } /// Given a value with the abstraction patterns of the substituted /// formal type, give it the abstraction patterns of the original /// formal type. ManagedValue SILGenFunction::emitSubstToOrigValue(SILLocation loc, ManagedValue v, AbstractionPattern origType, CanType substType, SGFContext ctxt) { return emitTransformedValue(loc, v, AbstractionPattern(substType), substType, origType, substType, ctxt); } /// Given a value with the abstraction patterns of the substituted /// formal type, give it the abstraction patterns of the original /// formal type. RValue SILGenFunction::emitSubstToOrigValue(SILLocation loc, RValue &&v, AbstractionPattern origType, CanType substType, SGFContext ctxt) { return emitTransformedValue(loc, std::move(v), AbstractionPattern(substType), substType, origType, substType, ctxt); } ManagedValue SILGenFunction::emitMaterializedRValueAsOrig(Expr *expr, AbstractionPattern origType) { // Create a temporary. auto &origTL = getTypeLowering(origType, expr->getType()); auto temporary = emitTemporary(expr, origTL); // Emit the reabstracted r-value. auto result = emitRValueAsOrig(expr, origType, origTL, SGFContext(temporary.get())); // Force the result into the temporary. if (!result.isInContext()) { temporary->copyOrInitValueInto(*this, expr, result, /*init*/ true); temporary->finishInitialization(*this); } return temporary->getManagedAddress(); } ManagedValue SILGenFunction::emitRValueAsOrig(Expr *expr, AbstractionPattern origPattern, const TypeLowering &origTL, SGFContext ctxt) { auto outputSubstType = expr->getType()->getCanonicalType(); auto &substTL = getTypeLowering(outputSubstType); if (substTL.getLoweredType() == origTL.getLoweredType()) return emitRValueAsSingleValue(expr, ctxt); ManagedValue temp = emitRValueAsSingleValue(expr); return emitSubstToOrigValue(expr, temp, origPattern, outputSubstType, ctxt); } ManagedValue SILGenFunction::emitTransformedValue(SILLocation loc, ManagedValue v, CanType inputType, CanType outputType, SGFContext ctxt) { return emitTransformedValue(loc, v, AbstractionPattern(inputType), inputType, AbstractionPattern(outputType), outputType); } ManagedValue SILGenFunction::emitTransformedValue(SILLocation loc, ManagedValue v, AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, SGFContext ctxt) { return Transform(*this, loc).transform(v, inputOrigType, inputSubstType, outputOrigType, outputSubstType, ctxt); } RValue SILGenFunction::emitTransformedValue(SILLocation loc, RValue &&v, AbstractionPattern inputOrigType, CanType inputSubstType, AbstractionPattern outputOrigType, CanType outputSubstType, SGFContext ctxt) { return Transform(*this, loc).transform(std::move(v), inputOrigType, inputSubstType, outputOrigType, outputSubstType, ctxt); } //===----------------------------------------------------------------------===// // vtable thunks //===----------------------------------------------------------------------===// void SILGenFunction::emitVTableThunk(SILDeclRef derived, SILFunction *implFn, AbstractionPattern inputOrigType, CanAnyFunctionType inputSubstType, CanAnyFunctionType outputSubstType) { auto fd = cast<AbstractFunctionDecl>(derived.getDecl()); SILLocation loc(fd); loc.markAutoGenerated(); CleanupLocation cleanupLoc(fd); cleanupLoc.markAutoGenerated(); Scope scope(Cleanups, cleanupLoc); auto fTy = implFn->getLoweredFunctionType(); SubstitutionList subs; if (auto *genericEnv = fd->getGenericEnvironment()) { F.setGenericEnvironment(genericEnv); subs = getForwardingSubstitutions(); fTy = fTy->substGenericArgs(SGM.M, subs); inputSubstType = cast<FunctionType>( cast<GenericFunctionType>(inputSubstType) ->substGenericArgs(subs)->getCanonicalType()); outputSubstType = cast<FunctionType>( cast<GenericFunctionType>(outputSubstType) ->substGenericArgs(subs)->getCanonicalType()); } // Emit the indirect return and arguments. auto thunkTy = F.getLoweredFunctionType(); SmallVector<ManagedValue, 8> thunkArgs; collectThunkParams(loc, thunkArgs, /*allowPlusZero*/ true); SmallVector<ManagedValue, 8> substArgs; AbstractionPattern outputOrigType(outputSubstType); // Reabstract the arguments. TranslateArguments(*this, loc, thunkArgs, substArgs, fTy->getParameters()) .translate(inputOrigType.getFunctionInputType(), inputSubstType.getInput(), outputOrigType.getFunctionInputType(), outputSubstType.getInput()); // Collect the arguments to the implementation. SmallVector<SILValue, 8> args; // First, indirect results. ResultPlanner resultPlanner(*this, loc); resultPlanner.plan(outputOrigType.getFunctionResultType(), outputSubstType.getResult(), inputOrigType.getFunctionResultType(), inputSubstType.getResult(), fTy, thunkTy, args); // Then, the arguments. forwardFunctionArguments(*this, loc, fTy, substArgs, args); // Create the call. auto implRef = B.createFunctionRef(loc, implFn); SILValue implResult = emitApplyWithRethrow(loc, implRef, SILType::getPrimitiveObjectType(fTy), subs, args); // Reabstract the return. SILValue result = resultPlanner.execute(implResult); scope.pop(); B.createReturn(loc, result); } //===----------------------------------------------------------------------===// // Protocol witnesses //===----------------------------------------------------------------------===// enum class WitnessDispatchKind { Static, Dynamic, Class }; static WitnessDispatchKind getWitnessDispatchKind(Type selfType, SILDeclRef witness, bool isFree) { // Free functions are always statically dispatched... if (isFree) return WitnessDispatchKind::Static; // If we have a non-class, non-objc method or a class, objc method that is // final, we do not dynamic dispatch. ClassDecl *C = selfType->getClassOrBoundGenericClass(); if (!C) return WitnessDispatchKind::Static; auto *decl = witness.getDecl(); // If the witness is dynamic, go through dynamic dispatch. if (decl->isDynamic()) return WitnessDispatchKind::Dynamic; bool isFinal = (decl->isFinal() || C->isFinal()); if (auto fnDecl = dyn_cast<AbstractFunctionDecl>(witness.getDecl())) isFinal |= fnDecl->hasForcedStaticDispatch(); bool isExtension = isa<ExtensionDecl>(decl->getDeclContext()); // If we have a final method or a method from an extension that is not // Objective-C, emit a static reference. // A natively ObjC method witness referenced this way will end up going // through its native thunk, which will redispatch the method after doing // bridging just like we want. if (isFinal || isExtension || witness.isForeignToNativeThunk() // Hack--We emit a static thunk for ObjC allocating constructors. || (decl->hasClangNode() && witness.kind == SILDeclRef::Kind::Allocator)) return WitnessDispatchKind::Static; // Otherwise emit a class method. return WitnessDispatchKind::Class; } static CanSILFunctionType getWitnessFunctionType(SILGenModule &SGM, SILDeclRef witness, WitnessDispatchKind witnessKind) { switch (witnessKind) { case WitnessDispatchKind::Static: case WitnessDispatchKind::Dynamic: return SGM.Types.getConstantInfo(witness).SILFnType; case WitnessDispatchKind::Class: return SGM.Types.getConstantOverrideType(witness); } llvm_unreachable("Unhandled WitnessDispatchKind in switch."); } static SILValue getWitnessFunctionRef(SILGenFunction &gen, SILDeclRef witness, WitnessDispatchKind witnessKind, SmallVectorImpl<ManagedValue> &witnessParams, SILLocation loc) { SILGenModule &SGM = gen.SGM; switch (witnessKind) { case WitnessDispatchKind::Static: return gen.emitGlobalFunctionRef(loc, witness); case WitnessDispatchKind::Dynamic: return gen.emitDynamicMethodRef(loc, witness, SGM.Types.getConstantInfo(witness)); case WitnessDispatchKind::Class: SILValue selfPtr = witnessParams.back().getValue(); return gen.B.createClassMethod(loc, selfPtr, witness); } llvm_unreachable("Unhandled WitnessDispatchKind in switch."); } static CanType dropLastElement(CanType type) { auto elts = cast<TupleType>(type)->getElements().drop_back(); return TupleType::get(elts, type->getASTContext())->getCanonicalType(); } void SILGenFunction::emitProtocolWitness(Type selfType, AbstractionPattern reqtOrigTy, CanAnyFunctionType reqtSubstTy, SILDeclRef requirement, SILDeclRef witness, SubstitutionList witnessSubs, IsFreeFunctionWitness_t isFree) { // FIXME: Disable checks that the protocol witness carries debug info. // Should we carry debug info for witnesses? F.setBare(IsBare); SILLocation loc(witness.getDecl()); FullExpr scope(Cleanups, CleanupLocation::get(loc)); FormalEvaluationScope formalEvalScope(*this); auto witnessKind = getWitnessDispatchKind(selfType, witness, isFree); auto thunkTy = F.getLoweredFunctionType(); SmallVector<ManagedValue, 8> origParams; // TODO: Should be able to accept +0 values here, once // forwardFunctionArguments/emitApply are able to. collectThunkParams(loc, origParams, /*allowPlusZero*/ false); // Handle special abstraction differences in "self". // If the witness is a free function, drop it completely. // WAY SPECULATIVE TODO: What if 'self' comprised multiple SIL-level params? if (isFree) origParams.pop_back(); // Get the type of the witness. auto witnessInfo = getConstantInfo(witness); CanAnyFunctionType witnessSubstTy = witnessInfo.LoweredInterfaceType; if (!witnessSubs.empty()) { witnessSubstTy = cast<FunctionType>( cast<GenericFunctionType>(witnessSubstTy) ->substGenericArgs(witnessSubs) ->getCanonicalType()); } CanType reqtSubstInputTy = F.mapTypeIntoContext(reqtSubstTy.getInput()) ->getCanonicalType(); CanType reqtSubstResultTy = F.mapTypeIntoContext(reqtSubstTy.getResult()) ->getCanonicalType(); AbstractionPattern reqtOrigInputTy = reqtOrigTy.getFunctionInputType(); // For a free function witness, discard the 'self' parameter of the // requirement. if (isFree) { reqtOrigInputTy = reqtOrigInputTy.dropLastTupleElement(); reqtSubstInputTy = dropLastElement(reqtSubstInputTy); } // Translate the argument values from the requirement abstraction level to // the substituted signature of the witness. auto witnessFTy = getWitnessFunctionType(SGM, witness, witnessKind); if (!witnessSubs.empty()) witnessFTy = witnessFTy->substGenericArgs(SGM.M, witnessSubs); SmallVector<ManagedValue, 8> witnessParams; if (!isFree) { // If the requirement has a self parameter passed as an indirect +0 value, // and the witness takes it as a non-inout value, we must load and retain // the self pointer coming in. This happens when class witnesses implement // non-mutating protocol requirements. auto reqConvention = thunkTy->getSelfParameter().getConvention(); auto witnessConvention = witnessFTy->getSelfParameter().getConvention(); bool inoutDifference; inoutDifference = reqConvention == ParameterConvention::Indirect_Inout && witnessConvention != ParameterConvention::Indirect_Inout; if (inoutDifference) { // If there is an inout difference in self, load the inout self parameter. ManagedValue &selfParam = origParams.back(); SILValue selfAddr = selfParam.getUnmanagedValue(); selfParam = emitLoad(loc, selfAddr, getTypeLowering(selfType), SGFContext(), IsNotTake); } } AbstractionPattern witnessOrigTy(witnessInfo.LoweredInterfaceType); TranslateArguments(*this, loc, origParams, witnessParams, witnessFTy->getParameters()) .translate(reqtOrigInputTy, reqtSubstInputTy, witnessOrigTy.getFunctionInputType(), witnessSubstTy.getInput()); SILValue witnessFnRef = getWitnessFunctionRef(*this, witness, witnessKind, witnessParams, loc); // Collect the arguments. SmallVector<SILValue, 8> args; // - indirect results ResultPlanner resultPlanner(*this, loc); resultPlanner.plan(witnessOrigTy.getFunctionResultType(), witnessSubstTy.getResult(), reqtOrigTy.getFunctionResultType(), reqtSubstResultTy, witnessFTy, thunkTy, args); // - the rest of the arguments forwardFunctionArguments(*this, loc, witnessFTy, witnessParams, args); // Perform the call. SILType witnessSILTy = SILType::getPrimitiveObjectType(witnessFTy); SILValue witnessResultValue = emitApplyWithRethrow(loc, witnessFnRef, witnessSILTy, witnessSubs, args); // Reabstract the result value. SILValue reqtResultValue = resultPlanner.execute(witnessResultValue); scope.pop(); B.createReturn(loc, reqtResultValue); }
apache-2.0
lintzc/gpdb
src/test/tinc/tincrepo/dml/functional/sql_partition/mpp21090_pttab_dropfirstcol_addpt_index_timestamptz.sql
2453
-- @author prabhd -- @created 2014-04-01 12:00:00 -- @modified 2012-04-01 12:00:00 -- @tags dml MPP-21090 ORCA -- @optimizer_mode on -- @description Tests for MPP-21090 \echo --start_ignore set gp_enable_column_oriented_table=on; \echo --end_ignore DROP TABLE IF EXISTS mpp21090_pttab_dropfirstcol_addpt_index_timestamptz; CREATE TABLE mpp21090_pttab_dropfirstcol_addpt_index_timestamptz ( col1 timestamptz, col2 timestamptz, col3 char, col4 timestamptz, col5 int ) DISTRIBUTED by (col1) PARTITION BY RANGE(col2)(partition partone start('2013-12-01 12:00:00 PST') end('2013-12-31 12:00:00 PST') WITH(APPENDONLY=true, COMPRESSLEVEL=5, ORIENTATION=column),partition parttwo start('2013-12-31 12:00:00 PST') end('2014-01-01 12:00:00 PST') WITH (APPENDONLY=true, COMPRESSLEVEL=5, ORIENTATION=row),partition partthree start('2014-01-01 12:00:00 PST') end('2014-02-01 12:00:00 PST')); INSERT INTO mpp21090_pttab_dropfirstcol_addpt_index_timestamptz VALUES('2013-12-30 12:00:00 PST','2013-12-30 12:00:00 PST','a','2013-12-30 12:00:00 PST',0); DROP INDEX IF EXISTS mpp21090_pttab_dropfirstcol_addpt_index_idx_timestamptz; CREATE INDEX mpp21090_pttab_dropfirstcol_addpt_index_idx_timestamptz on mpp21090_pttab_dropfirstcol_addpt_index_timestamptz(col2); ALTER TABLE mpp21090_pttab_dropfirstcol_addpt_index_timestamptz DROP COLUMN col1; ALTER TABLE mpp21090_pttab_dropfirstcol_addpt_index_timestamptz ADD PARTITION partfour start('2014-02-01 12:00:00 PST') end('2014-03-01 12:00:00 PST') inclusive; INSERT INTO mpp21090_pttab_dropfirstcol_addpt_index_timestamptz SELECT '2014-02-10 12:00:00 PST','b','2014-02-10 12:00:00 PST', 1; SELECT * FROM mpp21090_pttab_dropfirstcol_addpt_index_timestamptz ORDER BY 1,2,3; UPDATE mpp21090_pttab_dropfirstcol_addpt_index_timestamptz SET col4 = '2014-01-01 12:00:00 PST' WHERE col2 = '2014-02-10 12:00:00 PST' AND col4 = '2014-02-10 12:00:00 PST'; SELECT * FROM mpp21090_pttab_dropfirstcol_addpt_index_timestamptz ORDER BY 1,2,3; -- Update partition key UPDATE mpp21090_pttab_dropfirstcol_addpt_index_timestamptz SET col2 = '2014-01-01 12:00:00 PST' WHERE col2 = '2014-02-10 12:00:00 PST' AND col4 = '2014-01-01 12:00:00 PST'; SELECT * FROM mpp21090_pttab_dropfirstcol_addpt_index_timestamptz ORDER BY 1,2,3; DELETE FROM mpp21090_pttab_dropfirstcol_addpt_index_timestamptz WHERE col2 = '2014-01-01 12:00:00 PST'; SELECT * FROM mpp21090_pttab_dropfirstcol_addpt_index_timestamptz ORDER BY 1,2,3;
apache-2.0
daviddoria/itkHoughTransform
Wrapping/WrapITK/Languages/Python/Tests/SmoothingRecursiveGaussianImageFilter.py
1366
#========================================================================== # # Copyright Insight Software Consortium # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0.txt # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #==========================================================================*/ # # Example on the use of the SmoothingRecursiveGaussianImageFilter # import itk from sys import argv itk.auto_progress(2) dim = 2 IType = itk.Image[itk.F, dim] OIType = itk.Image[itk.UC, dim] reader = itk.ImageFileReader[IType].New( FileName=argv[1] ) filter = itk.SmoothingRecursiveGaussianImageFilter[IType, IType].New( reader, Sigma=eval( argv[3] ) ) cast = itk.RescaleIntensityImageFilter[IType, OIType].New(filter, OutputMinimum=0, OutputMaximum=255) writer = itk.ImageFileWriter[OIType].New( cast, FileName=argv[2] ) writer.Update()
apache-2.0
aead/minio
pkg/s3select/internal/parquet-go/endian.go
940
/* * Minio Cloud Storage, (C) 2019 Minio, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package parquet import ( "encoding/binary" ) func uint32ToBytes(v uint32) []byte { buf := make([]byte, 4) binary.LittleEndian.PutUint32(buf, v) return buf } func bytesToUint32(buf []byte) uint32 { return binary.LittleEndian.Uint32(buf) } func bytesToUint64(buf []byte) uint64 { return binary.LittleEndian.Uint64(buf) }
apache-2.0
shotishu/spark-mongodb-connector
src/scripts/run_mongod3.bat
170
set CONFIG="C:\Projects\spark-mongodb-connector\src\config" set MONGOBIN="c:\Program Files\MongoDB 2.6 Standard\bin" %MONGOBIN%\mongod.exe --config %CONFIG%\mongod3.cfg
apache-2.0
SangKa/RxJS-Docs-CN
doc/asset/devtools-welcome.js
826
var welcomeText = ( ' ____ _ ____ \n'+ '| _ \\ __ __ | / ___| \n'+ '| |_) |\\ \\/ / | \\___ \\ \n'+ '| _ < > < |_| |___) | \n'+ '|_| \\_\\/_/\\_\\___/|____/ \n'+ '\n试试下面这段代码来开启 RxJS 之旅:\n'+ '\n var subscription = Rx.Observable.interval(500)'+ '.take(4).subscribe(function (x) { console.log(x) });\n'+ '\n还引入了 rxjs-spy 来帮助你调试 RxJS 代码,试试下面这段代码:\n'+ '\n rxSpy.spy();'+ '\n var subscription = Rx.Observable.interval(500)'+ '.tag("interval").subscribe();'+ '\n rxSpy.show();'+ '\n rxSpy.log("interval");\n'+ '\n 想了解更多 rxjs-spy 的用法,请查阅 https://zhuanlan.zhihu.com/p/30870431' ); if (console.info) { console.info(welcomeText); } else { console.log(welcomeText); }
apache-2.0
Mpdreamz/lucene.net
doc/core/Lucene.Net.Index.IndexModifierMembers.html
13075
<html dir="LTR"> <head> <meta http-equiv="Content-Type" content="text/html; charset=Windows-1252" /> <meta name="vs_targetSchema" content="http://schemas.microsoft.com/intellisense/ie5" /> <title>IndexModifier Members</title> <xml> </xml> <link rel="stylesheet" type="text/css" href="MSDN.css" /> </head> <body id="bodyID" class="dtBODY"> <div id="nsbanner"> <div id="bannerrow1"> <table class="bannerparthead" cellspacing="0"> <tr id="hdr"> <td class="runninghead">Apache Lucene.Net 2.4.0 Class Library API</td> <td class="product"> </td> </tr> </table> </div> <div id="TitleRow"> <h1 class="dtH1">IndexModifier Members </h1> </div> </div> <div id="nstext"> <p> <a href="Lucene.Net.Index.IndexModifier.html">IndexModifier overview</a> </p> <h4 class="dtH4">Public Instance Constructors</h4> <div class="tablediv"> <table class="dtTABLE" cellspacing="0"> <tr VALIGN="top"> <td width="50%"> <img src="pubmethod.gif" /> <a href="Lucene.Net.Index.IndexModifierConstructor.html">IndexModifier</a> </td> <td width="50%">Overloaded. Initializes a new instance of the IndexModifier class.</td> </tr> </table> </div> <h4 class="dtH4">Public Instance Methods</h4> <div class="tablediv"> <table class="dtTABLE" cellspacing="0"> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.AddDocument_overloads.html">AddDocument</a></td><td width="50%">Overloaded. Adds a document to this index, using the provided analyzer instead of the one specific in the constructor. If the document contains more than {@link #SetMaxFieldLength(int)} terms for a given field, the remainder are discarded. </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.Close.html">Close</a></td><td width="50%"> Close this index, writing all pending changes to disk. </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.DeleteDocument.html">DeleteDocument</a></td><td width="50%"> Deletes the document numbered <pre class="code">docNum</pre>.</td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.DeleteDocuments.html">DeleteDocuments</a></td><td width="50%"> Deletes all documents containing <pre class="code">term</pre>. This is useful if one uses a document field to hold a unique ID string for the document. Then to delete such a document, one merely constructs a term with the appropriate field and the unique ID string as its text and passes it to this method. Returns the number of documents deleted. </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.DocCount.html">DocCount</a></td><td width="50%"> Returns the number of documents currently in this index. If the writer is currently open, this returns IndexWriter.DocCount(), else IndexReader.NumDocs(). But, note that IndexWriter.DocCount() does not take deltions into account, unlike IndexReader.NumDocs(). </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassEqualsTopic.htm">Equals</a> (inherited from <b>Object</b>)</td><td width="50%">Determines whether the specified <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassTopic.htm">Object</a> is equal to the current <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassTopic.htm">Object</a>.</td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.Flush.html">Flush</a></td><td width="50%"> Make sure all changes are written to disk.</td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassGetHashCodeTopic.htm">GetHashCode</a> (inherited from <b>Object</b>)</td><td width="50%">Serves as a hash function for a particular type. <b>GetHashCode</b> is suitable for use in hashing algorithms and data structures like a hash table.</td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.GetInfoStream.html">GetInfoStream</a></td><td width="50%"> has this index open (<pre class="code">write.lock</pre> could not be obtained) </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.GetMaxBufferedDocs.html">GetMaxBufferedDocs</a></td><td width="50%"> has this index open (<pre class="code">write.lock</pre> could not be obtained) </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.GetMaxFieldLength.html">GetMaxFieldLength</a></td><td width="50%"> has this index open (<pre class="code">write.lock</pre> could not be obtained) </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.GetMergeFactor.html">GetMergeFactor</a></td><td width="50%"> has this index open (<pre class="code">write.lock</pre> could not be obtained) </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassGetTypeTopic.htm">GetType</a> (inherited from <b>Object</b>)</td><td width="50%">Gets the <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemTypeClassTopic.htm">Type</a> of the current instance.</td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.GetUseCompoundFile.html">GetUseCompoundFile</a></td><td width="50%"> has this index open (<pre class="code">write.lock</pre> could not be obtained) </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.Optimize.html">Optimize</a></td><td width="50%"> Merges all segments together into a single segment, optimizing an index for search. </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.SetInfoStream.html">SetInfoStream</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.SetMaxBufferedDocs.html">SetMaxBufferedDocs</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.SetMaxFieldLength.html">SetMaxFieldLength</a></td><td width="50%"> The maximum number of terms that will be indexed for a single field in a document. This limits the amount of memory required for indexing, so that collections with very large files will not crash the indexing process by running out of memory. Note that this effectively truncates large documents, excluding from the index terms that occur further in the document. If you know your source documents are large, be sure to set this value high enough to accomodate the expected size. If you set it to Integer.MAX_VALUE, then the only limit is your memory, but you should anticipate an OutOfMemoryError. By default, no more than 10,000 terms will be indexed for a field. </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.SetMergeFactor.html">SetMergeFactor</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.SetUseCompoundFile.html">SetUseCompoundFile</a></td><td width="50%"> Setting to turn on usage of a compound file. When on, multiple files for each segment are merged into a single file once the segment creation is finished. This is done regardless of what directory is in use. </td></tr> <tr VALIGN="top"><td width="50%"><img src="pubmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.ToString.html">ToString</a></td><td width="50%"> </td></tr></table> </div> <h4 class="dtH4">Protected Instance Methods</h4> <div class="tablediv"> <table class="dtTABLE" cellspacing="0"> <tr VALIGN="top"><td width="50%"><img src="protmethod.gif"></img><a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassFinalizeTopic.htm">Finalize</a> (inherited from <b>Object</b>)</td><td width="50%">Allows an <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassTopic.htm">Object</a> to attempt to free resources and perform other cleanup operations before the <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassTopic.htm">Object</a> is reclaimed by garbage collection.</td></tr> <tr VALIGN="top"><td width="50%"><img src="protmethod.gif"></img><a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassMemberwiseCloneTopic.htm">MemberwiseClone</a> (inherited from <b>Object</b>)</td><td width="50%">Creates a shallow copy of the current <a href="ms-help://MS.NETFrameworkSDKv1.1/cpref/html/frlrfSystemObjectClassTopic.htm">Object</a>.</td></tr></table> </div> <h4 class="dtH4">Protected Internal Instance Fields</h4> <div class="tablediv"> <table class="dtTABLE" cellspacing="0"> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.analyzer.html">analyzer</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.directory.html">directory</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.indexReader.html">indexReader</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.indexWriter.html">indexWriter</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.infoStream.html">infoStream</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.maxBufferedDocs.html">maxBufferedDocs</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.maxFieldLength.html">maxFieldLength</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.mergeFactor.html">mergeFactor</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.open.html">open</a></td><td width="50%"> </td></tr> <tr VALIGN="top"><td width="50%"><img src="intfield.gif"></img><a href="Lucene.Net.Index.IndexModifier.useCompoundFile.html">useCompoundFile</a></td><td width="50%"> </td></tr></table> </div> <h4 class="dtH4">Protected Internal Instance Methods</h4> <div class="tablediv"> <table class="dtTABLE" cellspacing="0"> <tr VALIGN="top"><td width="50%"><img src="intmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.AssureOpen.html">AssureOpen</a></td><td width="50%"> Throw an IllegalStateException if the index is closed.</td></tr> <tr VALIGN="top"><td width="50%"><img src="intmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.CreateIndexReader.html">CreateIndexReader</a></td><td width="50%"> Close the IndexWriter and open an IndexReader.</td></tr> <tr VALIGN="top"><td width="50%"><img src="intmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.CreateIndexWriter.html">CreateIndexWriter</a></td><td width="50%"> Close the IndexReader and open an IndexWriter.</td></tr> <tr VALIGN="top"><td width="50%"><img src="intmethod.gif"></img><a href="Lucene.Net.Index.IndexModifier.Init.html">Init</a></td><td width="50%"> Initialize an IndexWriter.</td></tr></table> </div> <h4 class="dtH4">See Also</h4> <p> <a href="Lucene.Net.Index.IndexModifier.html">IndexModifier Class</a> | <a href="Lucene.Net.Index.html">Lucene.Net.Index Namespace</a></p> <object type="application/x-oleobject" classid="clsid:1e2a7bd0-dab9-11d0-b93a-00c04fc99f9e" viewastext="true" style="display: none;"> <param name="Keyword" value="IndexModifier class"> </param> <param name="Keyword" value="Lucene.Net.Index.IndexModifier class"> </param> <param name="Keyword" value="IndexModifier class, all members"> </param> </object> <hr /> <div id="footer"> <p> </p> <p>Generated from assembly Lucene.Net [2.4.0.2]</p> </div> </div> </body> </html>
apache-2.0
SinnerSchraderMobileMirrors/ZXingObjC
ZXingObjC/common/reedsolomon/ZXGenericGF.h
1735
/* * Copyright 2012 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This class contains utility methods for performing mathematical operations over * the Galois Fields. Operations use a given primitive polynomial in calculations. * * Throughout this package, elements of the GF are represented as an int * for convenience and speed (but at the cost of memory). */ @class ZXGenericGFPoly; @interface ZXGenericGF : NSObject @property (nonatomic, retain, readonly) ZXGenericGFPoly *zero; @property (nonatomic, retain, readonly) ZXGenericGFPoly *one; @property (nonatomic, assign, readonly) int size; @property (nonatomic, assign, readonly) int generatorBase; + (ZXGenericGF *)AztecData12; + (ZXGenericGF *)AztecData10; + (ZXGenericGF *)AztecData6; + (ZXGenericGF *)AztecParam; + (ZXGenericGF *)QrCodeField256; + (ZXGenericGF *)DataMatrixField256; + (ZXGenericGF *)AztecData8; + (ZXGenericGF *)MaxiCodeField64; - (id)initWithPrimitive:(int)primitive size:(int)size b:(int)b; - (ZXGenericGFPoly *)buildMonomial:(int)degree coefficient:(int)coefficient; + (int)addOrSubtract:(int)a b:(int)b; - (int)exp:(int)a; - (int)log:(int)a; - (int)inverse:(int)a; - (int)multiply:(int)a b:(int)b; @end
apache-2.0
johnidelight/bible-assistant
platform/android/tool/BibleGenerator/src/org/heavenus/bible/generator/Book.java
2467
/* * Copyright (C) 2011 The Bible Assistant Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.heavenus.bible.generator; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import android.text.TextUtils; /* * Book content format: * <section_name><TAB><section_content> * <section_name><TAB><section_content> * ... */ class Book { public class Section { public String name; public String content; } private String mName; private ArrayList<Section> mSections = new ArrayList<Section>(); public Book(File f) throws IOException { mName = Path.getFileNameWithoutExtension(f.getAbsolutePath()); init(f); } public String getName() { return mName; } public int getSectionCount() { return mSections.size(); } public Section getSection(int index) { if(index >= 0 && index < mSections.size()) { return mSections.get(index); } return null; } private void init(File book) throws IOException { InputStream in = null; try { // Read all book content. in = new BufferedInputStream(new FileInputStream(book)); byte[] data = new byte[in.available()]; in.read(data); // Skip beginning encoding data. String content = new String(data).substring(1); // Parse every line. String[] rows = content.split("\r\n"); for(String r : rows) { // Parse every field. if(!TextUtils.isEmpty(r)) { String[] fields = r.split("\t"); int count = fields.length; if(count > 0) { // Add new section. Section s = new Section(); s.name = fields[0].trim(); s.content = (count > 1 ? fields[1] : ""); mSections.add(s); } } } } finally { if (in != null) { in.close(); } } } }
apache-2.0
mbrukman/knife-google
README.md
17301
# knife-google A [knife] (http://wiki.opscode.com/display/chef/Knife) plugin to create, delete and enlist [Google Compute Engine] (https://cloud.google.com/products/compute-engine) resources. ## Overview This plugin adds functionality to Chef through a knife plugin to create, delete, and manage [Google Compute Engine](https://cloud.google.com/products/compute-engine) servers and disks. ### Compatibility This plugin utilizes Google Compute Engine API v1. Please review API v1 [release notes](https://developers.google.com/compute/docs/release-notes#december032013) for additional information. With knife-google 1.4.0 Chef 12 is now utilized. With knife-google 1.3.0 options have changed. Several GCE specific short options have been deprecated and GCE specific long options now start with `--gce-`. ### Nomenclature This plugin conforms to the nomenclature used by similar plugins and uses the term "server" when referencing nodes managed by the plugin. In Google Compute Engine parlance, this is equivalent to an "instance" or "virtual machine instance". ### Create a Google Cloud Platform project Before getting started with this plugin, you must first create a [Google Cloud Platform](https://cloud.google.com/) "project" and add the Google Compute Engine service to your project. Once you have created your project, you will have access to other Google Cloud Platform services such as [App Engine](https://developers.google.com/appengine/), [Cloud Storage](https://developers.google.com/storage/), [Cloud SQL](https://developers.google.com/cloud-sql/) and others, but this plugin only requires you enable Google Compute Engine in your project. Note that you will need to be logged in with your Google Account before creating the project and adding services. ### Authorizing Setup In order for the knife plugin to programmatically manage your servers, you will first need to authorize its use of the Google Compute Engine API. Authorization to use any of Google's Cloud service API's utilizes the [OAuth 2.0](https://developers.google.com/accounts/docs/OAuth2) standard. Once your project has been created, log in to your Google Account and visit the [API Console](http://code.google.com/apis/console) and follow the "APIs & auth" menu. Select "Credentials". Under the "OAuth" section, select "Create New Client ID". Specify the [Installed Application](https://developers.google.com/accounts/docs/OAuth2#installed) Application type with sub-type "Other", then "Create Client ID". These actions will generate a new "Client ID", "Client secret", and "Redirect URIs". This knife plugin includes a `setup` sub-command that requires you to supply the client ID and secret in order to obtain an "authorization token". You will only need to run this command one time and the plugin will record your credential information and tokens for future API calls. ## Installation Be sure you are running Chef version 0.10.0 or higher in order to use knife plugins. ```sh gem install knife-google ``` or, for Gemfile: ```ruby gem 'knife-google' ``` There is a long-standing issue in Ruby where the `net/http` library by default does not check the validity of an SSL certificate during a TLS handshake. To configure a Windows system to validate SSL certificate, download the [cacert.pem](http://curl.haxx.se/ca/cacert.pem) file and save to `C:` drive. Now make Ruby aware of your certificate authority by setting `SSL_CERT_FILE`. To set this in your current command prompt session, type: ```sh set SSL_CERT_FILE = C:\cacert.pem ``` On a Linux system, the configuration for SSL certificate validation is present by default. Depending on your system's configuration, you may need to run this command with root/Administrator privileges. ## Configuration ### Setting up the plugin For initial setup, you must first have created your Google Cloud Platform project, enabled Google Compute Engine, and set up the Client ID described above. Run the 'setup' sub-command and supply the Project ID, the Client ID, Client secret, and authorization tokens when prompted. It will also prompt you to open a URL in a browser. Make sure sure the you are logged in with the Google account associated with the project and client id/secret in order to authorize the plugin. ```sh knife google setup ``` By default, the credential and token information will be stored in `~/.google-compute.json`. You can override this location with `-f <credential_file>` flag with all plugin commands. ### Bootstrap Preparation and SSH In order to bootstrap nodes, you will first need to ensure your SSH keys are set up correctly. In Google Compute Engine, you can store SSH keys in project metadata that will get copied over to new servers and placed in the appropriate user's `~/.ssh/authorized_keys` file. If you don't already have SSH keys set up, you can create them with the `ssh-keygen` program. Open up the Metadata page from the GCE section of the cloud console. If it doesn't already exist, create a new `sshKeys` key and paste in your user's `~/.ssh/id_rsa.pub` file; make sure to prefix the entry with the username that corresponds to the username specified with the `-x` (aka `--ssh-user`) argument of the knife command or its default value of `root`. An example entry should look something like this -- notice the prepended username of `myuser`: ``` myuser:ssh-rsa AYAAB3Nwejwejjfjawlwl990sefjsfC5lPulcP4eZB+z1zcMF 76gTV4vojT/SWXymTfGpBL2KHTmF4jnGfEKPwjHIiLrZNHM2ISMi/atlKjOoUCVT AvUyjqqp3z2KVXSP9P50Kgf8JYWjjXKApiZHkJOHJZ8GGf7aTnRU9NEGLbQK6Q1k 4UHbVG4ps4kSLWsJ7eVcu981GvlwP3ooiJ6YWcOX9PS58d4SNtq41/XaoLibKt/Y Wzd/4tjYwMRVcxJdAy1T2474vkU/Qr7ibFinKeJymgouoQpEGhF64cF2pncCcmR7 zRk7CzL3mhcma8Zvwj234-2f3/+234/AR#@R#y1EEFsbzGbxOJfEVSTgJfvY7KYp 329df/2348sd3ARTx99 mymail@myhost ``` ## Usage Some usage examples follow: ```sh # See a list of all zones, their statuses and maintenance windows $ knife google zone list # List all servers (including those that may not be managed by Chef) $ knife google server list -Z us-central1-a # Create a server $ knife google server create www1 -m n1-standard-1 -I centos-7-v20150127 -Z us-central1-a -x jdoe # Create a server with service account scopes $ knife google server create www1 -m n1-standard-1 -I centos-7-v20150127 -Z us-central1-a -x jdoe --gce-service-account-scopes https://www.googleapis.com/auth/userinfo.email,https://www.googleapis.com/auth/compute,https://www.googleapis.com/auth/devstorage.full_control # Delete a server (along with Chef node and API client via --purge) $ knife google server delete www1 --purge -Z us-central1-a ``` For a full list of commands, run `knife google` without additional arguments: ```sh $ knife google ** GOOGLE COMMANDS ** knife google disk create NAME --gce-disk-size N -Z ZONE (options) knife google disk delete NAME -Z ZONE (options) knife google disk list -Z ZONE (options) knife google project list (options) knife google region list (options) knife google server create NAME -m MACHINE_TYPE -I IMAGE -Z ZONE (options) knife google server delete SERVER [SERVER] -Z ZONE (options) knife google server list -Z ZONE (options) knife google setup knife google zone list (options) ``` More detailed help can be obtained by specifying sub-commands. For instance, ```sh $ knife google server list -Z us-central1-a --help knife google server list -Z ZONE (options) -s, --server-url URL Chef Server URL --chef-zero-port PORT Port to start chef-zero on -k, --key KEY API Client Key --[no-]color Use colored output, defaults to false on Windows, true otherwise -f CREDENTIAL_FILE, Google Compute credential file (google setup can create this) --gce-credential-file -c, --config CONFIG The configuration file to use --defaults Accept default values for all questions -d, --disable-editing Do not open EDITOR, just accept the data as is -e, --editor EDITOR Set the editor to use for interactive commands -E, --environment ENVIRONMENT Set the Chef environment -F, --format FORMAT Which format to use for output -z, --local-mode Point knife commands at local repository instead of server -u, --user USER API Client Username --print-after Show the data after a destructive operation -V, --verbose More verbose output. Use twice for max verbosity -v, --version Show chef version -y, --yes Say yes to all prompts for confirmation -Z, --gce-zone ZONE The Zone for this server (required) -h, --help Show this message ``` ## Sub-commands ### `knife google setup` Use this command to initially set up authorization (see above for more details). Note that if you override the default credential file with the `-f` switch, you'll need to use the `-f` switch for *all* sub-commands. When prompted, make sure to specify the "Project ID" (and not the name or number) or you will see 404/not found errors even if the setup command completes successfully. ### `knife google zone list` A zone is an isolated location within a region that is independent of other zones in the same region. Zones are designed to support instances or applications that have high availability requirements. Zones are designed to be fault-tolerant, so that you can distribute instances and resources across multiple zones to protect against the system failure of a single zone. This keeps your application available even in the face of expected and unexpected failures. The fully-qualified name is made up of `<region>/<zone>`. For example, the fully-qualified name for zone `a` in region `us-central1` is `us-central1-a`. Depending on how widely you want to distribute your resources, you may choose to create instances across multiple zones within one region or across multiple regions and multiple zones. Use this command to list out the available Google Compute Engine zones. You can find a zone's current status and upcoming maintenance windows. The output for `knife google zone list` should look similar to: ``` name status deprecation maintainance window europe-west1-a up - 2014-01-18 12:00:00 -0800 to 2014-02-02 12:00:00 -0800 europe-west1-b up - 2014-03-15 12:00:00 -0700 to 2014-03-30 12:00:00 -0700 us-central1-a up - - us-central1-b up - - ``` ### `knife google region list` Each region in Google Compute Engine contains any number of zones. The region describes the geographic location where your resources are stored. For example, a zone named `us-east1-a` is in region `us-east1`. A region contains one or more zones. Choose a region that makes sense for your scenario. For example, if you only have customers on the east coast of the US, or if you have specific needs that require your data to live in the US, it makes sense to store your resources in a zone with a us-east region. Use this command to list out the available Google Compute Engine regions. You can find the region's current status, cpus, disks-total-gb, in-use-addresses and static-addresses. Use the `-L` switch to also list the quota limit for each resource. The output for `knife google region list -L` should look similar to: ``` Name status deprecation cpus disks-total-gb in-use-addresses static-addresses europe-west1 up - 1/10 100/100000 1/10 1/7 us-central1 up - 0/10 0/100000 0/10 0/7 us-central2 up - 1/10 50/100000 1/10 1/7 ``` ### `knife google project list` A project resource is the root collection and settings resource for all Google Compute Engine resources. Use this command to list out your project's current usage of snapshots, networks, firewalls, images, routes, forwarding-rules, target-pools and health-checks. Use the `-L` switch to also list the quota limit for each resource. The output for `knife google project list -L` should look similar to: ``` name snapshots networks firewalls images routes forwarding-rules target-pools health-checks chef-test1 0/1000 1/5 3/100 0/100 2/100 0/50 0/50 0/50 chef-test2 1/1000 2/5 3/100 1/100 2/100 0/50 0/50 0/50 ``` ### `knife google server create` Use this command to create a new Google Compute Engine server (a.k.a. instance) with a persistent boot disk. You must specify a name, the machine type, the zone, and the the image name. Images provided by Google follow this naming convention: ``` debian-7-wheezy-vYYYYMMDD centos-7-vYYYYMMDD ``` By default, the plugin will look for the specified image in the instance's primary project first and then consult GCE's officially supported image locations. The `--gce-image-project-id IMAGE_PROJECT_ID` option can be specified to force the plugin to look for the image in an alternate project location. Note that if you are bootstrapping the node, make sure to follow the preparation instructions earlier and use the `-x` and `-i` commands to specify the username and the identity file for that user. Make sure to use the private key file (e.g. `~/.ssh/id_rsa`) for the identity file and *not* the public key file. If you would like to set up your server with a service account, provide the `--gce-service-account-scopes` argument during server creation. The service account associated with your project will be used by default unless otherwise specified with the optional `--gce-service-account-name` argument. See the extended options that also allow bootstrapping the node with `knife google server create --help`. ### `knife google server delete` This command terminates and deletes a server. Use the `--purge` option to also remove it from Chef. Note that persistent disks associated with the server, including the boot disk, are not deleted with this operation. To delete persistent disks use `knife google disk delete`. Use `knife google server delete --help` for other options. ### `knife google server list` Get a list of servers in the specified zone. Note that this may include servers that are *not* managed by Chef. Your output should look something like: ``` name type public ip private ip disks zone status chef-server n1-standard-1 103.59.80.113 10.240.45.78 chef-server us-central1-a running chef-workstation n1-standard-1 103.59.85.188 10.240.9.140 chef-workstation us-central1-a running fuse-dev n1-standard-1 103.59.80.147 10.240.166.18 fuse-dev us-central1-a running magfs-c1 n1-standard-2 103.59.87.217 10.240.61.92 magfs-c1 us-central1-a running magfs-c2 n1-standard-2 103.59.80.161 10.240.175.240 magfs-c2 us-central1-a running magfs-c3 n1-standard-2 178.255.120.69 10.240.34.197 magfs-c3 us-central1-a running magfs-svr n1-standard-4 103.59.80.178 10.240.81.25 magfs-svr us-central1-a running ``` ### `knife google disk create` Create a new persistent disk. You must provide a name, size in gigabytes, and the desired zone. ### `knife google disk delete` Delete an existing disk in the specified zone. Note that the disk will *not* be deleted if it is currently attached to a running server. ### `knife google disk list` See a listing of disks defined for a specific zone. Your output should look something like: ``` name zone source snapshot size (in GB) status dev-1 us-central1-a 10 ready dev-2 us-central1-a 10 ready test-1 us-central1-a 20 ready ``` ## Troubleshooting * Seeing 404 errors or zone not found? This can result if you mistakenly specified an invalid "Project ID" while going through the `knife google setup` command. Make sure you specified the "Project ID" (not the project name or number). ## Build and Development Standard rake commands for building, installing, testing, and uninstalling the module. ```sh # Run spec tests $ rake # Build and install the module $ rake install # Uninstall $ rake uninstall ``` ## Versioning and Release Protocol Knife-google is released by the maintainer of this source repository to the gem repository at [RubyGems](https://rubygems.org). Releases are versioned according to [SemVer](http://semver.org) as much as possible, with a specific provision for GCE API changes: * When the implementation of knife-google switches to a new GCE API revision, the minor version **MUST** be incremented. The version number of the release is simply the gem version. All releases to RubyGems **MUST** be tagged in git with the version number of the release. ## Contributing * See [CONTRIB.md](https://github.com/opscode/knife-google/blob/master/CONTRIB.md) ## Licensing * See [LICENSE](https://raw.github.com/opscode/knife-google/master/LICENSE)
apache-2.0
wilebeast/FireFox-OS
B2G/gecko/netwerk/test/TestServ.cpp
3636
/* vim:set ts=4 sw=4 et cindent: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "TestCommon.h" #include <stdlib.h> #include "nsIServiceManager.h" #include "nsIServerSocket.h" #include "nsISocketTransport.h" #include "nsNetUtil.h" #include "nsStringAPI.h" #include "nsCOMPtr.h" #include "prlog.h" #if defined(PR_LOGGING) // // set NSPR_LOG_MODULES=Test:5 // static PRLogModuleInfo *gTestLog = nullptr; #endif #define LOG(args) PR_LOG(gTestLog, PR_LOG_DEBUG, args) class MySocketListener : public nsIServerSocketListener { public: NS_DECL_ISUPPORTS NS_DECL_NSISERVERSOCKETLISTENER MySocketListener() {} virtual ~MySocketListener() {} }; NS_IMPL_THREADSAFE_ISUPPORTS1(MySocketListener, nsIServerSocketListener) NS_IMETHODIMP MySocketListener::OnSocketAccepted(nsIServerSocket *serv, nsISocketTransport *trans) { LOG(("MySocketListener::OnSocketAccepted [serv=%p trans=%p]\n", serv, trans)); nsAutoCString host; int32_t port; trans->GetHost(host); trans->GetPort(&port); LOG((" -> %s:%d\n", host.get(), port)); nsCOMPtr<nsIInputStream> input; nsCOMPtr<nsIOutputStream> output; nsresult rv; rv = trans->OpenInputStream(nsITransport::OPEN_BLOCKING, 0, 0, getter_AddRefs(input)); if (NS_FAILED(rv)) return rv; rv = trans->OpenOutputStream(nsITransport::OPEN_BLOCKING, 0, 0, getter_AddRefs(output)); if (NS_FAILED(rv)) return rv; char buf[256]; uint32_t n; rv = input->Read(buf, sizeof(buf), &n); if (NS_FAILED(rv)) return rv; const char response[] = "HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\nFooooopy!!\r\n"; rv = output->Write(response, sizeof(response) - 1, &n); if (NS_FAILED(rv)) return rv; input->Close(); output->Close(); return NS_OK; } NS_IMETHODIMP MySocketListener::OnStopListening(nsIServerSocket *serv, nsresult status) { LOG(("MySocketListener::OnStopListening [serv=%p status=%x]\n", serv, status)); QuitPumpingEvents(); return NS_OK; } static nsresult MakeServer(int32_t port) { nsresult rv; nsCOMPtr<nsIServerSocket> serv = do_CreateInstance(NS_SERVERSOCKET_CONTRACTID, &rv); if (NS_FAILED(rv)) return rv; rv = serv->Init(port, true, 5); if (NS_FAILED(rv)) return rv; rv = serv->GetPort(&port); if (NS_FAILED(rv)) return rv; LOG((" listening on port %d\n", port)); rv = serv->AsyncListen(new MySocketListener()); return rv; } int main(int argc, char* argv[]) { if (test_common_init(&argc, &argv) != 0) return -1; nsresult rv= (nsresult)-1; if (argc < 2) { printf("usage: %s <port>\n", argv[0]); return -1; } #if defined(PR_LOGGING) gTestLog = PR_NewLogModule("Test"); #endif /* * The following code only deals with XPCOM registration stuff. and setting * up the event queues. Copied from TestSocketIO.cpp */ rv = NS_InitXPCOM2(nullptr, nullptr, nullptr); if (NS_FAILED(rv)) return -1; { rv = MakeServer(atoi(argv[1])); if (NS_FAILED(rv)) { LOG(("MakeServer failed [rv=%x]\n", rv)); return -1; } // Enter the message pump to allow the URL load to proceed. PumpEvents(); } // this scopes the nsCOMPtrs // no nsCOMPtrs are allowed to be alive when you call NS_ShutdownXPCOM NS_ShutdownXPCOM(nullptr); return 0; }
apache-2.0
jasimmk/atlas
atlas-chart/src/test/scala/com/netflix/atlas/chart/graphics/TicksSuite.scala
6638
/* * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.atlas.chart.graphics import org.scalatest.FunSuite import scala.util.Random class TicksSuite extends FunSuite { private def checkForDuplicates(ticks: List[ValueTick]): Unit = { val duplicates = ticks.filter(_.major).map(_.label).groupBy(v => v).filter(_._2.size > 1) assert(duplicates === Map.empty, "duplicate tick labels") } private def sanityCheck(ticks: List[ValueTick]): Unit = { checkForDuplicates(ticks) } test("values [0.0, 100.0]") { val ticks = Ticks.value(0.0, 100.0, 5) sanityCheck(ticks) assert(ticks.size === 21) assert(ticks.filter(_.major).size === 6) assert(ticks.head.offset === 0.0) assert(ticks.head.label === "0.0") assert(ticks.last.label === "100.0") } test("values [1.0, 2.0], 7 ticks") { val ticks = Ticks.value(1.0, 2.0, 7) sanityCheck(ticks) assert(ticks.size === 21) assert(ticks.filter(_.major).size === 6) } test("values [0.0, 10.0]") { val ticks = Ticks.value(0.0, 10.0, 5) sanityCheck(ticks) assert(ticks.size === 21) assert(ticks.filter(_.major).size === 6) assert(ticks.head.offset === 0.0) assert(ticks.head.label === "0.0") assert(ticks.last.label === "10.0") } test("values [0.0, 8.0]") { val ticks = Ticks.value(0.0, 8.0, 5) sanityCheck(ticks) assert(ticks.size === 17) assert(ticks.filter(_.major).size === 5) assert(ticks.head.offset === 0.0) assert(ticks.head.label === "0.0") assert(ticks.last.label === "8.0") } test("values [0.0, 7.0]") { val ticks = Ticks.value(0.0, 7.0, 5) sanityCheck(ticks) assert(ticks.size === 15) assert(ticks.filter(_.major).size === 4) assert(ticks.head.offset === 0.0) assert(ticks.filter(_.major).map(_.label).mkString(",") === "0.0,2.0,4.0,6.0") } test("values [0.96, 1.0]") { val ticks = Ticks.value(0.96, 1.0, 5) sanityCheck(ticks) assert(ticks.size === 21) assert(ticks.filter(_.major).size === 5) assert(ticks.head.offset === 0.0) assert(ticks.head.label === "0.96") assert(ticks.last.label === "1.00") } test("values [835, 1068]") { val ticks = Ticks.value(835.0, 1068, 5) sanityCheck(ticks) assert(ticks.size === 23) assert(ticks.filter(_.major).size === 5) assert(ticks.head.offset === 0.0) assert(ticks.filter(_.major).head.label === "0.85k") assert(ticks.filter(_.major).last.label === "1.05k") } test("values [2026, 2027]") { val ticks = Ticks.value(2026.0, 2027.0, 5) sanityCheck(ticks) assert(ticks.size === 21) assert(ticks.filter(_.major).size === 6) assert(ticks.head.offset === 2026.0) assert(ticks.head.label === "0.0") assert(ticks.last.label === "1.0") } test("values [200026, 200027]") { val ticks = Ticks.value(200026.0, 200027.0, 5) sanityCheck(ticks) assert(ticks.size === 21) assert(ticks.filter(_.major).size === 6) assert(ticks.head.offset === 200026.0) assert(ticks.head.label === "0.0") assert(ticks.last.label === "1.0") } test("values [200026.23, 200026.2371654]") { val ticks = Ticks.value(200026.23, 200026.2371654, 5) sanityCheck(ticks) assert(ticks.size === 15) assert(ticks.filter(_.major).size === 4) assert(ticks.head.offset === 200026.23) assert(ticks.head.label === "0.0") assert(ticks.last.label === "7.0m") } test("values [2026, 2047]") { val ticks = Ticks.value(2026.0, 2047.0, 5) sanityCheck(ticks) assert(ticks.size === 22) assert(ticks.filter(_.major).size === 4) assert(ticks.head.offset === 0.0) assert(ticks.filter(_.major).head.label === "2.030k") assert(ticks.filter(_.major).last.label === "2.045k") } test("values [20, 21.8]") { val ticks = Ticks.value(20.0, 21.8, 5) sanityCheck(ticks) assert(ticks.size === 19) assert(ticks.filter(_.major).size === 5) assert(ticks.head.offset === 0.0) assert(ticks.head.label === "20.0") assert(ticks.last.label === "21.8") } test("values [-21.8, -20]") { val ticks = Ticks.value(-21.8, -20.0, 5) sanityCheck(ticks) assert(ticks.size === 19) assert(ticks.filter(_.major).size === 5) assert(ticks.head.offset === 0.0) assert(ticks.head.label === "-21.8") assert(ticks.last.label === "-20.0") } test("values [-2027, -2046]") { val ticks = Ticks.value(-2027.0, -2026.0, 5) sanityCheck(ticks) assert(ticks.size === 21) assert(ticks.filter(_.major).size === 6) assert(ticks.head.offset === -2027.0) assert(ticks.filter(_.major).head.label === "0.0") assert(ticks.filter(_.major).last.label === "1.0") } test("values [42.0, 8.123456e12]") { val ticks = Ticks.value(42.0, 8.123456e12, 5) sanityCheck(ticks) assert(ticks.size === 16) assert(ticks.filter(_.major).size === 4) assert(ticks.head.offset === 0.0) assert(ticks.head.label === "0.5T") assert(ticks.last.label === "8.0T") } test("values [2126.4044472658984, 2128.626188548245], 9 ticks") { val ticks = Ticks.value(2126.4044472658984, 2128.626188548245, 9) sanityCheck(ticks) assert(ticks.size === 22) assert(ticks.filter(_.major).size === 7) assert(ticks.head.offset === 2126.4) assert(ticks.head.label === "100.0m") assert(ticks.last.label === "2.2") } test("sanity check, 0 to y") { for (i <- 0 until 100; j <- 2 until 10) { val v = Random.nextDouble() * 1e12 try { val ticks = Ticks.value(0.0, v, j) sanityCheck(ticks) } catch { case t: Throwable => throw new AssertionError(s"Ticks.value(0.0, $v, $j)", t) } } } test("sanity check, y1 to y2") { for (i <- 0 until 100; j <- 2 until 10) { val v1 = Random.nextDouble() * 1e4 val v2 = v1 + Random.nextDouble() * 1e3 try { val ticks = Ticks.value(v1, v2, j) sanityCheck(ticks) } catch { case t: Throwable => throw new AssertionError(s"Ticks.value($v1, $v2, $j)", t) } } } }
apache-2.0
ramrunner/gobgp
gobgp/lib/path.go
3283
// Copyright (C) 2015 Nippon Telegraph and Telephone Corporation. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. // See the License for the specific language governing permissions and // limitations under the License. package main // typedef struct { // char *value; // int len; // } buf; // // typedef struct path_t { // buf nlri; // buf** path_attributes; // int path_attributes_len; // int path_attributes_cap; // } path; // extern path* new_path(); // extern void free_path(path*); // extern int append_path_attribute(path*, int, char*); // extern buf* get_path_attribute(path*, int); import "C" import ( "encoding/json" "strings" "github.com/osrg/gobgp/gobgp/cmd" "github.com/osrg/gobgp/packet/bgp" ) //export get_route_family func get_route_family(input *C.char) C.int { rf, err := bgp.GetRouteFamily(C.GoString(input)) if err != nil { return C.int(-1) } return C.int(rf) } //export serialize_path func serialize_path(rf C.int, input *C.char) *C.path { args := strings.Split(C.GoString(input), " ") p, err := cmd.ParsePath(bgp.RouteFamily(rf), args) if err != nil { return nil } path := C.new_path() if len(p.Nlri) > 0 { path.nlri.len = C.int(len(p.Nlri)) path.nlri.value = C.CString(string(p.Nlri)) } for _, attr := range p.Pattrs { C.append_path_attribute(path, C.int(len(attr)), C.CString(string(attr))) } return path } //export decode_path func decode_path(p *C.path) *C.char { var buf []byte var nlri bgp.AddrPrefixInterface if p.nlri.len > 0 { buf = []byte(C.GoStringN(p.nlri.value, p.nlri.len)) nlri = &bgp.IPAddrPrefix{} err := nlri.DecodeFromBytes(buf) if err != nil { return nil } } pattrs := make([]bgp.PathAttributeInterface, 0, int(p.path_attributes_len)) for i := 0; i < int(p.path_attributes_len); i++ { b := C.get_path_attribute(p, C.int(i)) buf = []byte(C.GoStringN(b.value, b.len)) pattr, err := bgp.GetPathAttribute(buf) if err != nil { return nil } err = pattr.DecodeFromBytes(buf) if err != nil { return nil } switch pattr.GetType() { case bgp.BGP_ATTR_TYPE_MP_REACH_NLRI: mpreach := pattr.(*bgp.PathAttributeMpReachNLRI) if len(mpreach.Value) != 1 { return nil } nlri = mpreach.Value[0] } pattrs = append(pattrs, pattr) } j, _ := json.Marshal(struct { Nlri bgp.AddrPrefixInterface `json:"nlri"` PathAttrs []bgp.PathAttributeInterface `json:"attrs"` }{ Nlri: nlri, PathAttrs: pattrs, }) return C.CString(string(j)) } //export decode_capabilities func decode_capabilities(p *C.buf) *C.char { buf := []byte(C.GoStringN(p.value, p.len)) c, err := bgp.DecodeCapability(buf) if err != nil { return nil } j, _ := json.Marshal(c) return C.CString(string(j)) } func main() { // We need the main function to make possible // CGO compiler to compile the package as C shared library }
apache-2.0
aws/aws-sdk-cpp
aws-cpp-sdk-iotsitewise/include/aws/iotsitewise/model/ErrorDetails.h
4647
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #pragma once #include <aws/iotsitewise/IoTSiteWise_EXPORTS.h> #include <aws/iotsitewise/model/ErrorCode.h> #include <aws/core/utils/memory/stl/AWSString.h> #include <aws/core/utils/memory/stl/AWSVector.h> #include <aws/iotsitewise/model/DetailedError.h> #include <utility> namespace Aws { namespace Utils { namespace Json { class JsonValue; class JsonView; } // namespace Json } // namespace Utils namespace IoTSiteWise { namespace Model { /** * <p>Contains the details of an IoT SiteWise error.</p><p><h3>See Also:</h3> <a * href="http://docs.aws.amazon.com/goto/WebAPI/iotsitewise-2019-12-02/ErrorDetails">AWS * API Reference</a></p> */ class AWS_IOTSITEWISE_API ErrorDetails { public: ErrorDetails(); ErrorDetails(Aws::Utils::Json::JsonView jsonValue); ErrorDetails& operator=(Aws::Utils::Json::JsonView jsonValue); Aws::Utils::Json::JsonValue Jsonize() const; /** * <p>The error code.</p> */ inline const ErrorCode& GetCode() const{ return m_code; } /** * <p>The error code.</p> */ inline bool CodeHasBeenSet() const { return m_codeHasBeenSet; } /** * <p>The error code.</p> */ inline void SetCode(const ErrorCode& value) { m_codeHasBeenSet = true; m_code = value; } /** * <p>The error code.</p> */ inline void SetCode(ErrorCode&& value) { m_codeHasBeenSet = true; m_code = std::move(value); } /** * <p>The error code.</p> */ inline ErrorDetails& WithCode(const ErrorCode& value) { SetCode(value); return *this;} /** * <p>The error code.</p> */ inline ErrorDetails& WithCode(ErrorCode&& value) { SetCode(std::move(value)); return *this;} /** * <p>The error message.</p> */ inline const Aws::String& GetMessage() const{ return m_message; } /** * <p>The error message.</p> */ inline bool MessageHasBeenSet() const { return m_messageHasBeenSet; } /** * <p>The error message.</p> */ inline void SetMessage(const Aws::String& value) { m_messageHasBeenSet = true; m_message = value; } /** * <p>The error message.</p> */ inline void SetMessage(Aws::String&& value) { m_messageHasBeenSet = true; m_message = std::move(value); } /** * <p>The error message.</p> */ inline void SetMessage(const char* value) { m_messageHasBeenSet = true; m_message.assign(value); } /** * <p>The error message.</p> */ inline ErrorDetails& WithMessage(const Aws::String& value) { SetMessage(value); return *this;} /** * <p>The error message.</p> */ inline ErrorDetails& WithMessage(Aws::String&& value) { SetMessage(std::move(value)); return *this;} /** * <p>The error message.</p> */ inline ErrorDetails& WithMessage(const char* value) { SetMessage(value); return *this;} /** * <p> A list of detailed errors. </p> */ inline const Aws::Vector<DetailedError>& GetDetails() const{ return m_details; } /** * <p> A list of detailed errors. </p> */ inline bool DetailsHasBeenSet() const { return m_detailsHasBeenSet; } /** * <p> A list of detailed errors. </p> */ inline void SetDetails(const Aws::Vector<DetailedError>& value) { m_detailsHasBeenSet = true; m_details = value; } /** * <p> A list of detailed errors. </p> */ inline void SetDetails(Aws::Vector<DetailedError>&& value) { m_detailsHasBeenSet = true; m_details = std::move(value); } /** * <p> A list of detailed errors. </p> */ inline ErrorDetails& WithDetails(const Aws::Vector<DetailedError>& value) { SetDetails(value); return *this;} /** * <p> A list of detailed errors. </p> */ inline ErrorDetails& WithDetails(Aws::Vector<DetailedError>&& value) { SetDetails(std::move(value)); return *this;} /** * <p> A list of detailed errors. </p> */ inline ErrorDetails& AddDetails(const DetailedError& value) { m_detailsHasBeenSet = true; m_details.push_back(value); return *this; } /** * <p> A list of detailed errors. </p> */ inline ErrorDetails& AddDetails(DetailedError&& value) { m_detailsHasBeenSet = true; m_details.push_back(std::move(value)); return *this; } private: ErrorCode m_code; bool m_codeHasBeenSet; Aws::String m_message; bool m_messageHasBeenSet; Aws::Vector<DetailedError> m_details; bool m_detailsHasBeenSet; }; } // namespace Model } // namespace IoTSiteWise } // namespace Aws
apache-2.0
brunobottazzini/soletta
src/modules/linux-micro/console/console.c
12795
/* * This file is part of the Soletta™ Project * * Copyright (C) 2015 Intel Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <ctype.h> #include <errno.h> #include <fcntl.h> #include <inttypes.h> #include <limits.h> #include <signal.h> #include <stdio.h> #include <stdlib.h> #include <sys/ioctl.h> #include <sys/stat.h> #include <termios.h> #include <unistd.h> #define SOL_LOG_DOMAIN &_log_domain #include "sol-log-internal.h" SOL_LOG_INTERNAL_DECLARE_STATIC(_log_domain, "linux-micro-console"); #include "sol-mainloop.h" #include "sol-platform-linux-micro.h" #include "sol-util-file.h" #include "sol-util-internal.h" #include "sol-vector.h" struct instance { struct sol_platform_linux_fork_run *fork_run; struct sol_timeout *respawn_timeout; char tty[]; }; #define RESPAWN_TIMEOUT_MS 1000 static struct sol_ptr_vector instances = SOL_PTR_VECTOR_INIT; static char *getty_cmd = NULL; static char *term = NULL; static char *baudrate = NULL; #define BAUDRATE_DEFAULT "115200,38400,9600" static const char shell[] = "/bin/sh"; static void console_spawn(struct instance *inst); static const char * find_getty_cmd(void) { static const char *cmds[] = { "/usr/bin/agetty", "/usr/sbin/agetty", "/bin/agetty", "/sbin/agetty", "/usr/bin/getty", "/usr/sbin/getty", "/bin/getty", "/sbin/getty", }; const char **itr, **itr_end; itr = cmds; itr_end = itr + sol_util_array_size(cmds); for (; itr < itr_end; itr++) { if (access(*itr, R_OK | X_OK) == 0) return *itr; } SOL_ERR("no getty command found"); return NULL; } static bool on_respawn_timeout(void *data) { struct instance *inst = data; inst->respawn_timeout = NULL; console_spawn(inst); return false; } static const char * get_term_for_tty(const char *tty) { if (streqn(tty, "tty", sizeof("tty") - 1)) { const char *p = tty + sizeof("tty") - 1; if (*p >= '0' && *p <= '9') return "linux"; } return "vt102"; } /* * do things getty would do to spawn a shell, basically become the * session leader of the given tty, then make stdio/stdout/stderr use * it. */ static void do_shell(const char *tty) { char term_buf[128]; const char *envp[] = { term_buf, "HOME=/", NULL, }; char tty_path[PATH_MAX]; pid_t pid, tsid; int r; SOL_INF("no getty, exec shell: %s", shell); r = snprintf(term_buf, sizeof(term_buf), "TERM=%s", term ? term : get_term_for_tty(tty)); if (r < 0 || r >= (int)sizeof(term_buf)) envp[0] = "TERM=vt102"; else envp[0] = term_buf; pid = setsid(); if (pid < 0) { int fd; SOL_WRN("could not setsid(): %s", sol_util_strerrora(errno)); pid = getpid(); fd = open("/dev/tty", O_RDWR | O_NONBLOCK); if (fd >= 0) { sighandler_t oldsig; /* man:tty(4) * TIOCNOTTY: * Detach the calling process from its controlling terminal. * * If the process is the session leader, then SIGHUP and * SIGCONT signals are sent to the foreground process * group and all processes in the current session lose * their controlling tty. */ oldsig = signal(SIGHUP, SIG_IGN); r = ioctl(fd, TIOCNOTTY); close(fd); signal(SIGHUP, oldsig); SOL_INT_CHECK_GOTO(r, < 0, end); } } r = snprintf(tty_path, sizeof(tty_path), "/dev/%s", tty); SOL_INT_CHECK_GOTO(r, < 0, end); SOL_INT_CHECK_GOTO(r, >= (int)sizeof(tty_path), end); close(STDIN_FILENO); r = open(tty_path, O_RDWR | O_NONBLOCK); SOL_INT_CHECK_GOTO(r, < 0, end); if (r != 0) { r = dup2(r, 0); SOL_INT_CHECK_GOTO(r, < 0, end); } r = dup2(STDIN_FILENO, 1); SOL_INT_CHECK_GOTO(r, < 0, end); r = dup2(STDIN_FILENO, 2); SOL_INT_CHECK_GOTO(r, < 0, end); r = fchown(STDIN_FILENO, 0, 0); SOL_INT_CHECK_GOTO(r, < 0, end); fchmod(STDIN_FILENO, 0620); tsid = tcgetsid(STDIN_FILENO); if (tsid < 0) { r = ioctl(STDIN_FILENO, TIOCSCTTY, 1L); SOL_INT_CHECK_GOTO(r, < 0, end); } r = tcsetpgrp(STDIN_FILENO, pid); SOL_INT_CHECK_GOTO(r, < 0, end); end: ioctl(STDIN_FILENO, TIOCSCTTY, 0); r = chdir("/"); if (r < 0) SOL_WRN("Failed to change the current directory to '/'"); execle(shell, shell, NULL, envp); } static void on_fork(void *data) { struct instance *inst = data; if (!getty_cmd || streq(getty_cmd, shell)) do_shell(inst->tty); else { const char *use_term = term; const char *use_baudrate = baudrate; if (!use_term) use_term = get_term_for_tty(inst->tty); if (!use_baudrate) use_baudrate = BAUDRATE_DEFAULT; SOL_DBG("exec %s -L %s %s %s", getty_cmd, use_baudrate, inst->tty, use_term); execl(getty_cmd, getty_cmd, "-L", use_baudrate, inst->tty, use_term, NULL); } sol_platform_linux_fork_run_exit(EXIT_FAILURE); } static void on_fork_exit(void *data, uint64_t pid, int status) { struct instance *inst = data; SOL_DBG("tty=%s pid=%" PRIu64 " exited with status=%d. Respawn on timeout...", inst->tty, pid, status); if (inst->respawn_timeout) sol_timeout_del(inst->respawn_timeout); inst->respawn_timeout = sol_timeout_add(RESPAWN_TIMEOUT_MS, on_respawn_timeout, inst); inst->fork_run = NULL; } static void parse_var(const char *start, size_t len) { static const struct spec { const char *prefix; size_t prefixlen; char **storage; } specs[] = { #define SPEC(str, storage) \ { str, sizeof(str) - 1, &storage \ } SPEC("getty=", getty_cmd), SPEC("baudrate=", baudrate), SPEC("term=", term), #undef SPEC }; const struct spec *itr, *itr_end; itr = specs; itr_end = itr + sol_util_array_size(specs); for (; itr < itr_end; itr++) { if (itr->prefixlen < len && memcmp(itr->prefix, start, itr->prefixlen) == 0) { free(*(itr->storage)); *(itr->storage) = strndup(start + itr->prefixlen, len - itr->prefixlen); break; } } } static void parse_kcmdline_entry(const char *start, size_t len) { const char prefix[] = "sol-console."; const size_t prefixlen = strlen(prefix); if (len < prefixlen) return; if (memcmp(start, prefix, prefixlen) != 0) return; start += prefixlen; len -= prefixlen; parse_var(start, len); } static int load_kcmdline(void) { char buf[4096] = {}; const char *p, *end, *start; int err; err = sol_util_read_file("/proc/cmdline", "%4095[^\n]", buf); if (err < 1) return err; start = buf; end = start + strlen(buf); for (p = start; p < end; p++) { if (isblank(*p) && start < p) { parse_kcmdline_entry(start, p - start); start = p + 1; } } if (start < end) parse_kcmdline_entry(start, end - start); return 0; } static void console_spawn(struct instance *inst) { inst->fork_run = sol_platform_linux_fork_run(on_fork, on_fork_exit, inst); } static void add_active_console(const char *start, size_t len) { struct instance *inst; int r; uint16_t i; SOL_PTR_VECTOR_FOREACH_IDX (&instances, inst, i) { size_t cur_len = strlen(inst->tty); if (cur_len == len && memcmp(inst->tty, start, len) == 0) return; } inst = malloc(sizeof(struct instance) + len + 1); SOL_NULL_CHECK(inst); memcpy(inst->tty, start, len); inst->tty[len] = '\0'; r = sol_ptr_vector_append(&instances, inst); SOL_INT_CHECK_GOTO(r, < 0, err_append); console_spawn(inst); return; err_append: free(inst); } static int load_active_consoles(void) { char buf[4096] = {}; const char *p, *end, *start; int err; err = sol_util_read_file("/sys/class/tty/console/active", "%4095[^\n]", buf); if (err < 1) return err; start = buf; end = start + strlen(buf); SOL_DBG("active consoles: '%s'", buf); for (p = start; p < end; p++) { if (isblank(*p) && start < p) { add_active_console(start, p - start); start = p + 1; } } if (start < end) add_active_console(start, end - start); return 0; } static int console_start(const struct sol_platform_linux_micro_module *mod, const char *service) { int err = 0; if (sol_ptr_vector_get_len(&instances) > 0) goto end; err = load_kcmdline(); if (err < 0) goto error; if (!getty_cmd) { const char *cmd = find_getty_cmd(); if (cmd) getty_cmd = strdup(cmd); } if (!baudrate) baudrate = strdup(BAUDRATE_DEFAULT); err = load_active_consoles(); if (err < 0) goto error; end: sol_platform_linux_micro_inform_service_state(service, SOL_PLATFORM_SERVICE_STATE_ACTIVE); return 0; error: sol_platform_linux_micro_inform_service_state(service, SOL_PLATFORM_SERVICE_STATE_FAILED); return err; } static int console_stop(const struct sol_platform_linux_micro_module *module, const char *service, bool force_immediate) { struct instance *inst; uint16_t i; if (sol_ptr_vector_get_len(&instances) == 0) goto end; SOL_PTR_VECTOR_FOREACH_IDX (&instances, inst, i) { if (inst->fork_run) { sol_platform_linux_fork_run_stop(inst->fork_run); inst->fork_run = NULL; } if (inst->respawn_timeout) { sol_timeout_del(inst->respawn_timeout); inst->respawn_timeout = NULL; } free(inst); } sol_ptr_vector_clear(&instances); end: if (getty_cmd) { free(getty_cmd); getty_cmd = NULL; } if (term) { free(term); term = NULL; } if (baudrate) { free(baudrate); baudrate = NULL; } sol_platform_linux_micro_inform_service_state(service, SOL_PLATFORM_SERVICE_STATE_INACTIVE); return 0; } static int console_restart(const struct sol_platform_linux_micro_module *module, const char *service) { sol_platform_linux_micro_inform_service_state(service, SOL_PLATFORM_SERVICE_STATE_ACTIVE); return 0; } static int console_init(const struct sol_platform_linux_micro_module *module, const char *service) { SOL_LOG_INTERNAL_INIT_ONCE; return 0; } /* * spawn getty/agetty or /bin/sh on active consoles. * * active consoles are defined in the kernel command line with the syntax: * * console=tty0 * starts a console on /dev/tty0 * * console=ttyS0 * console=ttyS0,9600n8 * starts a console on /dev/ttyS0 (serial line), the second * version specifies the baudrate, parity and number of bits. * * console=tty0 console=ttyS0 * multiple entries are allowed, the first is used as the * /dev/console while the others replicate kernel messages. * * See https://www.kernel.org/doc/Documentation/serial-console.txt * * The following kernel command line extensions are supported: * * sol-console.getty=/usr/bin/getty * sol-console.getty=/bin/sh * specify getty command to be used, if not given then * various well-known paths are searched. The special entry * /bin/sh is used to start a shell without getty, this is * useful for constrained systems where getty and login would * add too much overhead. * * sol-console.term=vt100 * specify the $TERM to use for getty or shell. Defaults to * linux if tty<N> or vt102 otherwise. * * sol-console.baudrate=115200,9600 * specify the baudrate to give to getty. * */ SOL_PLATFORM_LINUX_MICRO_MODULE(CONSOLE, .name = "console", .init = console_init, .start = console_start, .restart = console_restart, .stop = console_stop, );
apache-2.0
sergecodd/FireFox-OS
B2G/gecko/gfx/thebes/gfxImageSurface.cpp
10870
/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 4 -*- * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "prmem.h" #include "gfxAlphaRecovery.h" #include "gfxImageSurface.h" #include "cairo.h" #include "mozilla/gfx/2D.h" #include "gfx2DGlue.h" using namespace mozilla::gfx; gfxImageSurface::gfxImageSurface() : mSize(0, 0), mOwnsData(false), mFormat(ImageFormatUnknown), mStride(0) { } void gfxImageSurface::InitFromSurface(cairo_surface_t *csurf) { mSize.width = cairo_image_surface_get_width(csurf); mSize.height = cairo_image_surface_get_height(csurf); mData = cairo_image_surface_get_data(csurf); mFormat = (gfxImageFormat) cairo_image_surface_get_format(csurf); mOwnsData = false; mStride = cairo_image_surface_get_stride(csurf); Init(csurf, true); } gfxImageSurface::gfxImageSurface(unsigned char *aData, const gfxIntSize& aSize, long aStride, gfxImageFormat aFormat) { InitWithData(aData, aSize, aStride, aFormat); } void gfxImageSurface::MakeInvalid() { mSize = gfxIntSize(-1, -1); mData = NULL; mStride = 0; } void gfxImageSurface::InitWithData(unsigned char *aData, const gfxIntSize& aSize, long aStride, gfxImageFormat aFormat) { mSize = aSize; mOwnsData = false; mData = aData; mFormat = aFormat; mStride = aStride; if (!CheckSurfaceSize(aSize)) MakeInvalid(); cairo_surface_t *surface = cairo_image_surface_create_for_data((unsigned char*)mData, (cairo_format_t)mFormat, mSize.width, mSize.height, mStride); // cairo_image_surface_create_for_data can return a 'null' surface // in out of memory conditions. The gfxASurface::Init call checks // the surface it receives to see if there is an error with the // surface and handles it appropriately. That is why there is // no check here. Init(surface); } static void* TryAllocAlignedBytes(size_t aSize) { // Use fallible allocators here #if defined(HAVE_POSIX_MEMALIGN) void* ptr; // Try to align for fast alpha recovery. This should only help // cairo too, can't hurt. return moz_posix_memalign(&ptr, 1 << gfxAlphaRecovery::GoodAlignmentLog2(), aSize) ? nullptr : ptr; #else // Oh well, hope that luck is with us in the allocator return moz_malloc(aSize); #endif } gfxImageSurface::gfxImageSurface(const gfxIntSize& size, gfxImageFormat format, bool aClear) : mSize(size), mOwnsData(false), mData(nullptr), mFormat(format) { mStride = ComputeStride(); if (!CheckSurfaceSize(size)) MakeInvalid(); // if we have a zero-sized surface, just leave mData nullptr if (mSize.height * mStride > 0) { // This can fail to allocate memory aligned as we requested, // or it can fail to allocate any memory at all. mData = (unsigned char *) TryAllocAlignedBytes(mSize.height * mStride); if (!mData) return; if (aClear) memset(mData, 0, mSize.height * mStride); } mOwnsData = true; cairo_surface_t *surface = cairo_image_surface_create_for_data((unsigned char*)mData, (cairo_format_t)format, mSize.width, mSize.height, mStride); Init(surface); if (mSurfaceValid) { RecordMemoryUsed(mSize.height * ComputeStride() + sizeof(gfxImageSurface)); } } gfxImageSurface::gfxImageSurface(cairo_surface_t *csurf) { mSize.width = cairo_image_surface_get_width(csurf); mSize.height = cairo_image_surface_get_height(csurf); mData = cairo_image_surface_get_data(csurf); mFormat = (gfxImageFormat) cairo_image_surface_get_format(csurf); mOwnsData = false; mStride = cairo_image_surface_get_stride(csurf); Init(csurf, true); } gfxImageSurface::~gfxImageSurface() { if (mOwnsData) free(mData); } /*static*/ long gfxImageSurface::ComputeStride(const gfxIntSize& aSize, gfxImageFormat aFormat) { long stride; if (aFormat == ImageFormatARGB32) stride = aSize.width * 4; else if (aFormat == ImageFormatRGB24) stride = aSize.width * 4; else if (aFormat == ImageFormatRGB16_565) stride = aSize.width * 2; else if (aFormat == ImageFormatA8) stride = aSize.width; else if (aFormat == ImageFormatA1) { stride = (aSize.width + 7) / 8; } else { NS_WARNING("Unknown format specified to gfxImageSurface!"); stride = aSize.width * 4; } stride = ((stride + 3) / 4) * 4; return stride; } // helper function for the CopyFrom methods static void CopyForStride(unsigned char* aDest, unsigned char* aSrc, const gfxIntSize& aSize, long aDestStride, long aSrcStride) { if (aDestStride == aSrcStride) { memcpy (aDest, aSrc, aSrcStride * aSize.height); } else { int lineSize = NS_MIN(aDestStride, aSrcStride); for (int i = 0; i < aSize.height; i++) { unsigned char* src = aSrc + aSrcStride * i; unsigned char* dst = aDest + aDestStride * i; memcpy (dst, src, lineSize); } } } // helper function for the CopyFrom methods static bool FormatsAreCompatible(gfxASurface::gfxImageFormat a1, gfxASurface::gfxImageFormat a2) { if (a1 != a2 && !(a1 == gfxASurface::ImageFormatARGB32 && a2 == gfxASurface::ImageFormatRGB24) && !(a1 == gfxASurface::ImageFormatRGB24 && a2 == gfxASurface::ImageFormatARGB32)) { return false; } return true; } bool gfxImageSurface::CopyFrom (SourceSurface *aSurface) { mozilla::RefPtr<DataSourceSurface> data = aSurface->GetDataSurface(); if (!data) { return false; } gfxIntSize size(data->GetSize().width, data->GetSize().height); if (size != mSize) { return false; } if (!FormatsAreCompatible(SurfaceFormatToImageFormat(aSurface->GetFormat()), mFormat)) { return false; } CopyForStride(mData, data->GetData(), size, mStride, data->Stride()); return true; } bool gfxImageSurface::CopyFrom(gfxImageSurface *other) { if (other->mSize != mSize) { return false; } if (!FormatsAreCompatible(other->mFormat, mFormat)) { return false; } CopyForStride(mData, other->mData, mSize, mStride, other->mStride); return true; } already_AddRefed<gfxSubimageSurface> gfxImageSurface::GetSubimage(const gfxRect& aRect) { gfxRect r(aRect); r.Round(); unsigned char* subData = Data() + (Stride() * (int)r.Y()) + (int)r.X() * gfxASurface::BytePerPixelFromFormat(Format()); nsRefPtr<gfxSubimageSurface> image = new gfxSubimageSurface(this, subData, gfxIntSize((int)r.Width(), (int)r.Height())); return image.forget().get(); } gfxSubimageSurface::gfxSubimageSurface(gfxImageSurface* aParent, unsigned char* aData, const gfxIntSize& aSize) : gfxImageSurface(aData, aSize, aParent->Stride(), aParent->Format()) , mParent(aParent) { } already_AddRefed<gfxImageSurface> gfxImageSurface::GetAsImageSurface() { nsRefPtr<gfxImageSurface> surface = this; return surface.forget(); } void gfxImageSurface::MovePixels(const nsIntRect& aSourceRect, const nsIntPoint& aDestTopLeft) { const nsIntRect bounds(0, 0, mSize.width, mSize.height); nsIntPoint offset = aDestTopLeft - aSourceRect.TopLeft(); nsIntRect clippedSource = aSourceRect; clippedSource.IntersectRect(clippedSource, bounds); nsIntRect clippedDest = clippedSource + offset; clippedDest.IntersectRect(clippedDest, bounds); const nsIntRect dest = clippedDest; const nsIntRect source = dest - offset; // NB: this relies on IntersectRect() and operator+/- preserving // x/y for empty rectangles NS_ABORT_IF_FALSE(bounds.Contains(dest) && bounds.Contains(source) && aSourceRect.Contains(source) && nsIntRect(aDestTopLeft, aSourceRect.Size()).Contains(dest) && source.Size() == dest.Size() && offset == (dest.TopLeft() - source.TopLeft()), "Messed up clipping, crash or corruption will follow"); if (source.IsEmpty() || source.IsEqualInterior(dest)) { return; } long naturalStride = ComputeStride(mSize, mFormat); if (mStride == naturalStride && dest.width == bounds.width) { // Fast path: this is a vertical shift of some rows in a // "normal" image surface. We can directly memmove and // hopefully stay in SIMD land. unsigned char* dst = mData + dest.y * mStride; const unsigned char* src = mData + source.y * mStride; size_t nBytes = dest.height * mStride; memmove(dst, src, nBytes); return; } // Slow(er) path: have to move row-by-row. const int32_t bpp = BytePerPixelFromFormat(mFormat); const size_t nRowBytes = dest.width * bpp; // dstRow points at the first pixel within the current destination // row, and similarly for srcRow. endSrcRow is one row beyond the // last row we need to copy. stride is either +mStride or // -mStride, depending on which direction we're copying. unsigned char* dstRow; unsigned char* srcRow; unsigned char* endSrcRow; // NB: this may point outside the image long stride; if (dest.y > source.y) { // We're copying down from source to dest, so walk backwards // starting from the last rows to avoid stomping pixels we // need. stride = -mStride; dstRow = mData + dest.x * bpp + (dest.YMost() - 1) * mStride; srcRow = mData + source.x * bpp + (source.YMost() - 1) * mStride; endSrcRow = mData + source.x * bpp + (source.y - 1) * mStride; } else { stride = mStride; dstRow = mData + dest.x * bpp + dest.y * mStride; srcRow = mData + source.x * bpp + source.y * mStride; endSrcRow = mData + source.x * bpp + source.YMost() * mStride; } for (; srcRow != endSrcRow; dstRow += stride, srcRow += stride) { memmove(dstRow, srcRow, nRowBytes); } }
apache-2.0
frreiss/tensorflow-fred
tensorflow/compiler/mlir/tensorflow/transforms/test_resource_alias_analysis.cc
4295
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include <cstddef> #include <cstdint> #include <string> #include <utility> #include "llvm/ADT/DenseMap.h" #include "llvm/ADT/STLExtras.h" #include "llvm/ADT/SmallVector.h" #include "llvm/Support/Debug.h" #include "mlir/Pass/Pass.h" // from @llvm-project #include "mlir/Pass/PassManager.h" // from @llvm-project #include "mlir/Support/LLVM.h" // from @llvm-project #include "mlir/Transforms/Passes.h" // from @llvm-project #include "tensorflow/compiler/mlir/tensorflow/analysis/resource_alias_analysis.h" #include "tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h" namespace mlir { namespace TF { namespace { // A pass that annotates each operation with a resource type result with the // aliasing values for each such result. Each value is assigned a unique ID, and // that ID is used to annotate the operations. struct TestResourceAliasAnalysis : public TF::PerFunctionAggregateAnalysisConsumerPass< TestResourceAliasAnalysis, TF::ResourceAliasAnalysis> { StringRef getArgument() const final { return "tf-test-resource-alias-analysis"; } StringRef getDescription() const final { return "Add remarks based on resource alias analysis result, for testing " "purpose."; } void runOnFunction(FuncOp func, const TF::ResourceAliasAnalysis::Info& analysis) { int64_t next_id = 0; llvm::SmallDenseMap<Value, int64_t, 8> ids; auto assign_id = [&](Value value) { if (ids.find(value) == ids.end()) ids.insert({value, next_id++}); }; auto get_id = [&](Value value) -> int64_t { auto it = ids.find(value); assert(it != ids.end()); return it->second; }; auto print_aliases = [&](InFlightDiagnostic& diag, Value value) { diag << ", ID " << get_id(value) << " : "; if (analysis.IsUnknownResource(value)) { diag << "Unknown"; } else { auto aliases = llvm::to_vector<4>(analysis.GetResourceAliases(value)); llvm::sort(aliases, [&](Value v1, Value v2) { return get_id(v1) < get_id(v2); }); llvm::interleaveComma(aliases, diag, [&](Value v) { diag << get_id(v); }); } }; // Assign a unique ID to each value seen in this function. func.walk([&](Operation* op) { // For all attached regions, assign ID to the region arguments. for (Region& region : op->getRegions()) { for (auto region_arg : filter_resources(region.getArguments())) assign_id(region_arg); } // Assign ID for all results. for (auto result : filter_resources(op->getResults())) assign_id(result); }); // Now walk each operation, and annotate it wil remarks for aliases for // each resource type result func.walk([&](Operation* op) { // For all attached regions, assign ID to the region arguments. for (Region& region : op->getRegions()) { for (auto region_arg : filter_resources(region.getArguments())) { InFlightDiagnostic diag = op->emitRemark("Region #") << region.getRegionNumber() << ", Arg #" << region_arg.getArgNumber(); print_aliases(diag, region_arg); } } for (auto result : filter_resources(op->getResults())) { InFlightDiagnostic diag = op->emitRemark("Result #") << result.getResultNumber(); print_aliases(diag, result); } }); } }; static mlir::PassRegistration<TestResourceAliasAnalysis> pass; } // anonymous namespace } // namespace TF } // namespace mlir
apache-2.0
daviddoria/itkHoughTransform
Modules/Nonunit/Review/include/itkFastSymmetricForcesDemonsRegistrationFilter.h
7832
/*========================================================================= * * Copyright Insight Software Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef __itkFastSymmetricForcesDemonsRegistrationFilter_h #define __itkFastSymmetricForcesDemonsRegistrationFilter_h #include "itkPDEDeformableRegistrationFilter.h" #include "itkESMDemonsRegistrationFunction.h" #include "itkMultiplyByConstantImageFilter.h" #include "itkExponentialDeformationFieldImageFilter.h" namespace itk { /** \class FastSymmetricForcesDemonsRegistrationFilter * \brief Deformably register two images using a symmetric forces demons algorithm. * * This class was contributed by Tom Vercauteren, INRIA & Mauna Kea Technologies * based on a variation of the DemonsRegistrationFilter. * * FastSymmetricForcesDemonsRegistrationFilter implements the demons deformable algorithm that * register two images by computing the deformation field which will map a * moving image onto a fixed image. * * A deformation field is represented as a image whose pixel type is some * vector type with at least N elements, where N is the dimension of * the fixed image. The vector type must support element access via operator * []. It is assumed that the vector elements behave like floating point * scalars. * * This class is templated over the fixed image type, moving image type * and the deformation field type. * * The input fixed and moving images are set via methods SetFixedImage * and SetMovingImage respectively. An initial deformation field maybe set via * SetInitialDeformationField or SetInput. If no initial field is set, * a zero field is used as the initial condition. * * The output deformation field can be obtained via methods GetOutput * or GetDeformationField. * * This class make use of the finite difference solver hierarchy. Update * for each iteration is computed in DemonsRegistrationFunction. * * \author Tom Vercauteren, INRIA & Mauna Kea Technologies * * This implementation was taken from the Insight Journal paper: * http://hdl.handle.net/1926/510 * * \warning This filter assumes that the fixed image type, moving image type * and deformation field type all have the same number of dimensions. * * \sa DemonsRegistrationFilter * \sa DemonsRegistrationFunction * \ingroup DeformableImageRegistration MultiThreaded * \ingroup ITK-Review */ template< class TFixedImage, class TMovingImage, class TDeformationField > class ITK_EXPORT FastSymmetricForcesDemonsRegistrationFilter: public PDEDeformableRegistrationFilter< TFixedImage, TMovingImage, TDeformationField > { public: /** Standard class typedefs. */ typedef FastSymmetricForcesDemonsRegistrationFilter Self; typedef PDEDeformableRegistrationFilter< TFixedImage, TMovingImage, TDeformationField > Superclass; typedef SmartPointer< Self > Pointer; typedef SmartPointer< const Self > ConstPointer; /** Method for creation through the object factory. */ itkNewMacro(Self); /** Run-time type information (and related methods). */ itkTypeMacro(FastSymmetricForcesDemonsRegistrationFilter, PDEDeformableRegistrationFilter); /** FixedImage image type. */ typedef typename Superclass::FixedImageType FixedImageType; typedef typename Superclass::FixedImagePointer FixedImagePointer; /** MovingImage image type. */ typedef typename Superclass::MovingImageType MovingImageType; typedef typename Superclass::MovingImagePointer MovingImagePointer; /** Deformation field type. */ typedef typename Superclass::DeformationFieldType DeformationFieldType; typedef typename Superclass::DeformationFieldPointer DeformationFieldPointer; /** Get the metric value. The metric value is the mean square difference * in intensity between the fixed image and transforming moving image * computed over the the overlapping region between the two images. * This value is calculated for the current iteration */ virtual double GetMetric() const; virtual const double & GetRMSChange() const; /** DemonsRegistrationFilterFunction type. * * FIXME: Why is this the only permissible function ? * */ typedef ESMDemonsRegistrationFunction< FixedImageType, MovingImageType, DeformationFieldType > DemonsRegistrationFunctionType; typedef typename DemonsRegistrationFunctionType::GradientType GradientType; virtual void SetUseGradientType(GradientType gtype); virtual GradientType GetUseGradientType() const; /** Set/Get the threshold below which the absolute difference of * intensity yields a match. When the intensities match between a * moving and fixed image pixel, the update vector (for that * iteration) will be the zero vector. Default is 0.001. */ virtual void SetIntensityDifferenceThreshold(double); virtual double GetIntensityDifferenceThreshold() const; virtual void SetMaximumUpdateStepLength(double); virtual double GetMaximumUpdateStepLength() const; protected: FastSymmetricForcesDemonsRegistrationFilter(); ~FastSymmetricForcesDemonsRegistrationFilter() {} void PrintSelf(std::ostream & os, Indent indent) const; /** Initialize the state of filter and equation before each iteration. */ virtual void InitializeIteration(); /** This method allocates storage in m_UpdateBuffer. It is called from * FiniteDifferenceFilter::GenerateData(). */ virtual void AllocateUpdateBuffer(); /** FiniteDifferenceFunction type. */ typedef typename Superclass::FiniteDifferenceFunctionType FiniteDifferenceFunctionType; /** Take timestep type from the FiniteDifferenceFunction. */ typedef typename FiniteDifferenceFunctionType::TimeStepType TimeStepType; /** Apply update. */ virtual void ApplyUpdate(TimeStepType dt); /** other typedefs */ typedef MultiplyByConstantImageFilter< DeformationFieldType, TimeStepType, DeformationFieldType > MultiplyByConstantType; typedef AddImageFilter< DeformationFieldType, DeformationFieldType, DeformationFieldType > AdderType; typedef typename MultiplyByConstantType::Pointer MultiplyByConstantPointer; typedef typename AdderType::Pointer AdderPointer; private: FastSymmetricForcesDemonsRegistrationFilter(const Self &); //purposely not // implemented void operator=(const Self &); //purposely not // implemented /** Downcast the DifferenceFunction using a dynamic_cast to ensure that it is of the correct type. * this method will throw an exception if the function is not of the expected type. */ DemonsRegistrationFunctionType * DownCastDifferenceFunctionType(); const DemonsRegistrationFunctionType * DownCastDifferenceFunctionType() const; MultiplyByConstantPointer m_Multiplier; AdderPointer m_Adder; }; } // end namespace itk #ifndef ITK_MANUAL_INSTANTIATION #include "itkFastSymmetricForcesDemonsRegistrationFilter.txx" #endif #endif
apache-2.0
googleapis/google-cloud-dotnet
apis/Google.Cloud.CertificateManager.V1/Google.Cloud.CertificateManager.V1.GeneratedSnippets/CertificateManagerClient.GetCertificateMapResourceNamesAsyncSnippet.g.cs
1884
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.CertificateManager.V1.Snippets { // [START certificatemanager_v1_generated_CertificateManager_GetCertificateMap_async_flattened_resourceNames] using Google.Cloud.CertificateManager.V1; using System.Threading.Tasks; public sealed partial class GeneratedCertificateManagerClientSnippets { /// <summary>Snippet for GetCertificateMapAsync</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public async Task GetCertificateMapResourceNamesAsync() { // Create client CertificateManagerClient certificateManagerClient = await CertificateManagerClient.CreateAsync(); // Initialize request argument(s) CertificateMapName name = CertificateMapName.FromProjectLocationCertificateMap("[PROJECT]", "[LOCATION]", "[CERTIFICATE_MAP]"); // Make the request CertificateMap response = await certificateManagerClient.GetCertificateMapAsync(name); } } // [END certificatemanager_v1_generated_CertificateManager_GetCertificateMap_async_flattened_resourceNames] }
apache-2.0
srdo/storm
storm-server/src/main/java/org/apache/storm/scheduler/multitenant/DefaultPool.java
8903
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.scheduler.multitenant; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.Set; import org.apache.storm.scheduler.SchedulerAssignment; import org.apache.storm.scheduler.TopologyDetails; import org.apache.storm.scheduler.WorkerSlot; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A pool of machines that anyone can use, but topologies are not isolated */ public class DefaultPool extends NodePool { private static final Logger LOG = LoggerFactory.getLogger(DefaultPool.class); private Set<Node> _nodes = new HashSet<>(); private HashMap<String, TopologyDetails> _tds = new HashMap<>(); @Override public void addTopology(TopologyDetails td) { String topId = td.getId(); LOG.debug("Adding in Topology {}", topId); _tds.put(topId, td); SchedulerAssignment assignment = _cluster.getAssignmentById(topId); if (assignment != null) { for (WorkerSlot ws : assignment.getSlots()) { Node n = _nodeIdToNode.get(ws.getNodeId()); _nodes.add(n); } } } @Override public boolean canAdd(TopologyDetails td) { return true; } @Override public Collection<Node> takeNodes(int nodesNeeded) { HashSet<Node> ret = new HashSet<>(); LinkedList<Node> sortedNodes = new LinkedList<>(_nodes); Collections.sort(sortedNodes, Node.FREE_NODE_COMPARATOR_DEC); for (Node n : sortedNodes) { if (nodesNeeded <= ret.size()) { break; } if (n.isAlive()) { n.freeAllSlots(_cluster); _nodes.remove(n); ret.add(n); } } return ret; } @Override public int nodesAvailable() { int total = 0; for (Node n : _nodes) { if (n.isAlive()) total++; } return total; } @Override public int slotsAvailable() { return Node.countTotalSlotsAlive(_nodes); } @Override public NodeAndSlotCounts getNodeAndSlotCountIfSlotsWereTaken(int slotsNeeded) { int nodesFound = 0; int slotsFound = 0; LinkedList<Node> sortedNodes = new LinkedList<>(_nodes); Collections.sort(sortedNodes, Node.FREE_NODE_COMPARATOR_DEC); for (Node n : sortedNodes) { if (slotsNeeded <= 0) { break; } if (n.isAlive()) { nodesFound++; int totalSlotsFree = n.totalSlots(); slotsFound += totalSlotsFree; slotsNeeded -= totalSlotsFree; } } return new NodeAndSlotCounts(nodesFound, slotsFound); } @Override public Collection<Node> takeNodesBySlots(int slotsNeeded) { HashSet<Node> ret = new HashSet<>(); LinkedList<Node> sortedNodes = new LinkedList<>(_nodes); Collections.sort(sortedNodes, Node.FREE_NODE_COMPARATOR_DEC); for (Node n : sortedNodes) { if (slotsNeeded <= 0) { break; } if (n.isAlive()) { n.freeAllSlots(_cluster); _nodes.remove(n); ret.add(n); slotsNeeded -= n.totalSlotsFree(); } } return ret; } @Override public void scheduleAsNeeded(NodePool... lesserPools) { for (TopologyDetails td : _tds.values()) { String topId = td.getId(); if (_cluster.needsScheduling(td)) { LOG.debug("Scheduling topology {}", topId); int totalTasks = td.getExecutors().size(); int origRequest = td.getNumWorkers(); int slotsRequested = Math.min(totalTasks, origRequest); int slotsUsed = Node.countSlotsUsed(topId, _nodes); int slotsFree = Node.countFreeSlotsAlive(_nodes); //Check to see if we have enough slots before trying to get them int slotsAvailable = 0; if (slotsRequested > slotsFree) { slotsAvailable = NodePool.slotsAvailable(lesserPools); } int slotsToUse = Math.min(slotsRequested - slotsUsed, slotsFree + slotsAvailable); int executorsNotRunning = _cluster.getUnassignedExecutors(td).size(); LOG.debug("Slots... requested {} used {} free {} available {} to be used {}, executors not running {}", slotsRequested, slotsUsed, slotsFree, slotsAvailable, slotsToUse, executorsNotRunning); if (slotsToUse <= 0) { if (executorsNotRunning > 0) { _cluster.setStatus(topId, "Not fully scheduled (No free slots in default pool) " + executorsNotRunning + " executors not scheduled"); } else { if (slotsUsed < slotsRequested) { _cluster.setStatus(topId, "Running with fewer slots than requested (" + slotsUsed + "/" + origRequest + ")"); } else { //slotsUsed < origRequest _cluster.setStatus(topId, "Fully Scheduled (requested " + origRequest + " slots, but could only use " + slotsUsed + ")"); } } continue; } int slotsNeeded = slotsToUse - slotsFree; if (slotsNeeded > 0) { _nodes.addAll(NodePool.takeNodesBySlot(slotsNeeded, lesserPools)); } if (executorsNotRunning <= 0) { //There are free slots that we can take advantage of now. for (Node n : _nodes) { n.freeTopology(topId, _cluster); } slotsFree = Node.countFreeSlotsAlive(_nodes); slotsToUse = Math.min(slotsRequested, slotsFree); } RoundRobinSlotScheduler slotSched = new RoundRobinSlotScheduler(td, slotsToUse, _cluster); LinkedList<Node> nodes = new LinkedList<>(_nodes); while (true) { Node n; do { if (nodes.isEmpty()) { throw new IllegalStateException("This should not happen, we" + " messed up and did not get enough slots"); } n = nodes.peekFirst(); if (n.totalSlotsFree() == 0) { nodes.remove(); n = null; } } while (n == null); if (!slotSched.assignSlotTo(n)) { break; } } int afterSchedSlotsUsed = Node.countSlotsUsed(topId, _nodes); if (afterSchedSlotsUsed < slotsRequested) { _cluster.setStatus(topId, "Running with fewer slots than requested (" + afterSchedSlotsUsed + "/" + origRequest + ")"); } else if (afterSchedSlotsUsed < origRequest) { _cluster.setStatus(topId, "Fully Scheduled (requested " + origRequest + " slots, but could only use " + afterSchedSlotsUsed + ")"); } else { _cluster.setStatus(topId, "Fully Scheduled"); } } else { _cluster.setStatus(topId, "Fully Scheduled"); } } } @Override public String toString() { return "DefaultPool " + _nodes.size() + " nodes " + _tds.size() + " topologies"; } }
apache-2.0
carnegiespeech/translations
zh_cn/quizaccess_safebrowser.php
1084
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * Strings for component 'quizaccess_safebrowser', language 'zh_cn', branch 'MOODLE_22_STABLE' * * @package quizaccess_safebrowser * @copyright 1999 onwards Martin Dougiamas {@link http://moodle.com} * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ defined('MOODLE_INTERNAL') || die(); $string['requiresafeexambrowser'] = '必须使用Safe Exam Browser';
apache-2.0
vthangathurai/SOA-Runtime
integration-tests/ProtoBufFindItemService/src/main/java/com/ebay/marketplace/search/v1/services/FieldValuesPair.java
3008
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-792 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.04.07 at 12:06:52 PM GMT+05:30 // package com.ebay.marketplace.search.v1.services; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * * This type defines a mapping between a Field and * its value(s) in the search results. * * * <p>Java class for FieldValuesPair complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="FieldValuesPair"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="field" type="{http://www.ebay.com/marketplace/search/v1/services}Field"/> * &lt;element name="fieldValue" type="{http://www.ebay.com/marketplace/search/v1/services}FieldValue" maxOccurs="unbounded"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "FieldValuesPair", propOrder = { "field", "fieldValue" }) public class FieldValuesPair { @XmlElement(required = true) protected Field field; @XmlElement(required = true) protected List<FieldValue> fieldValue; /** * Gets the value of the field property. * * @return * possible object is * {@link Field } * */ public Field getField() { return field; } /** * Sets the value of the field property. * * @param value * allowed object is * {@link Field } * */ public void setField(Field value) { this.field = value; } /** * Gets the value of the fieldValue property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the fieldValue property. * * <p> * For example, to add a new item, do as follows: * <pre> * getFieldValue().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link FieldValue } * * */ public List<FieldValue> getFieldValue() { if (fieldValue == null) { fieldValue = new ArrayList<FieldValue>(); } return this.fieldValue; } }
apache-2.0
ferventcoder/puppetlabs-dsc
lib/puppet/type/dsc_xexcheventloglevel.rb
4027
require 'pathname' Puppet::Type.newtype(:dsc_xexcheventloglevel) do require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc' require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers' @doc = %q{ The DSC xExchEventLogLevel resource type. Automatically generated from 'xExchange/DSCResources/MSFT_xExchEventLogLevel/MSFT_xExchEventLogLevel.schema.mof' To learn more about PowerShell Desired State Configuration, please visit https://technet.microsoft.com/en-us/library/dn249912.aspx. For more information about built-in DSC Resources, please visit https://technet.microsoft.com/en-us/library/dn249921.aspx. For more information about xDsc Resources, please visit https://github.com/PowerShell/DscResources. } validate do fail('dsc_identity is a required attribute') if self[:dsc_identity].nil? end def dscmeta_resource_friendly_name; 'xExchEventLogLevel' end def dscmeta_resource_name; 'MSFT_xExchEventLogLevel' end def dscmeta_module_name; 'xExchange' end def dscmeta_module_version; '1.11.0.0' end newparam(:name, :namevar => true ) do end ensurable do newvalue(:exists?) { provider.exists? } newvalue(:present) { provider.create } defaultto { :present } end # Name: PsDscRunAsCredential # Type: MSFT_Credential # IsMandatory: False # Values: None newparam(:dsc_psdscrunascredential) do def mof_type; 'MSFT_Credential' end def mof_is_embedded?; true end desc "PsDscRunAsCredential" validate do |value| unless value.kind_of?(Hash) fail("Invalid value '#{value}'. Should be a hash") end PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value) end end # Name: Identity # Type: string # IsMandatory: True # Values: None newparam(:dsc_identity) do def mof_type; 'string' end def mof_is_embedded?; false end desc "Identity - The Identity parameter specifies the name of the event logging category for which you want to set the event logging level." isrequired validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end end end # Name: Credential # Type: MSFT_Credential # IsMandatory: False # Values: None newparam(:dsc_credential) do def mof_type; 'MSFT_Credential' end def mof_is_embedded?; true end desc "Credential - Credentials used to establish a remote Powershell session to Exchange" validate do |value| unless value.kind_of?(Hash) fail("Invalid value '#{value}'. Should be a hash") end PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value) end end # Name: Level # Type: string # IsMandatory: False # Values: ["Lowest", "Low", "Medium", "High", "Expert"] newparam(:dsc_level) do def mof_type; 'string' end def mof_is_embedded?; false end desc "Level - The Level parameter specifies the log level for the specific event logging category. Valid values are Lowest, Low, Medium, High, Expert." validate do |value| unless value.kind_of?(String) fail("Invalid value '#{value}'. Should be a string") end unless ['Lowest', 'lowest', 'Low', 'low', 'Medium', 'medium', 'High', 'high', 'Expert', 'expert'].include?(value) fail("Invalid value '#{value}'. Valid values are Lowest, Low, Medium, High, Expert") end end end def builddepends pending_relations = super() PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations) end end Puppet::Type.type(:dsc_xexcheventloglevel).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10240.16384')) defaultfor :operatingsystem => :windows mk_resource_methods end
apache-2.0
cloudflare/docker
docs/reference/commandline/volume_ls.md
4776
<!--[metadata]> +++ title = "volume ls" description = "The volume ls command description and usage" keywords = ["volume, list"] [menu.main] parent = "smn_cli" +++ <![end-metadata]--> # volume ls ```markdown Usage: docker volume ls [OPTIONS] List volumes Aliases: ls, list Options: -f, --filter value Provide filter values (e.g. 'dangling=true') (default []) - dangling=<boolean> a volume if referenced or not - driver=<string> a volume's driver name - label=<key> or label=<key>=<value> - name=<string> a volume's name --format string Pretty-print volumes using a Go template --help Print usage -q, --quiet Only display volume names ``` Lists all the volumes Docker knows about. You can filter using the `-f` or `--filter` flag. Refer to the [filtering](#filtering) section for more information about available filter options. Example output: ```bash $ docker volume create --name rosemary rosemary $docker volume create --name tyler tyler $ docker volume ls DRIVER VOLUME NAME local rosemary local tyler ``` ## Filtering The filtering flag (`-f` or `--filter`) format is of "key=value". If there is more than one filter, then pass multiple flags (e.g., `--filter "foo=bar" --filter "bif=baz"`) The currently supported filters are: * dangling (boolean - true or false, 0 or 1) * driver (a volume driver's name) * label (`label=<key>` or `label=<key>=<value>`) * name (a volume's name) ### dangling The `dangling` filter matches on all volumes not referenced by any containers ```bash $ docker run -d -v tyler:/tmpwork busybox f86a7dd02898067079c99ceacd810149060a70528eff3754d0b0f1a93bd0af18 $ docker volume ls -f dangling=true DRIVER VOLUME NAME local rosemary ``` ### driver The `driver` filter matches on all or part of a volume's driver name. The following filter matches all volumes with a driver name containing the `local` string. ```bash $ docker volume ls -f driver=local DRIVER VOLUME NAME local rosemary local tyler ``` #### Label The `label` filter matches volumes based on the presence of a `label` alone or a `label` and a value. First, let's create some volumes to illustrate this; ```bash $ docker volume create --name the-doctor --label is-timelord=yes the-doctor $ docker volume create --name daleks --label is-timelord=no daleks ``` The following example filter matches volumes with the `is-timelord` label regardless of its value. ```bash $ docker volume ls --filter label=is-timelord DRIVER NAME local daleks local the-doctor ``` As can be seen in the above example, both volumes with `is-timelord=yes`, and `is-timelord=no` are returned. Filtering on both `key` *and* `value` of the label, produces the expected result: ```bash $ docker volume ls --filter label=is-timelord=yes DRIVER NAME local the-doctor ``` Specifying multiple label filter produces an "and" search; all conditions should be met; ```bash $ docker volume ls --filter label=is-timelord=yes --filter label=is-timelord=no DRIVER NAME ``` ### name The `name` filter matches on all or part of a volume's name. The following filter matches all volumes with a name containing the `rose` string. $ docker volume ls -f name=rose DRIVER VOLUME NAME local rosemary ## Formatting The formatting options (`--format`) pretty-prints volumes output using a Go template. Valid placeholders for the Go template are listed below: Placeholder | Description --------------|------------------------------------------------------------------------------------------ `.Name` | Network name `.Driver` | Network driver `.Scope` | Network scope (local, global) `.Mountpoint` | Whether the network is internal or not. `.Labels` | All labels assigned to the volume. `.Label` | Value of a specific label for this volume. For example `{{.Label "project.version"}}` When using the `--format` option, the `volume ls` command will either output the data exactly as the template declares or, when using the `table` directive, includes column headers as well. The following example uses a template without headers and outputs the `Name` and `Driver` entries separated by a colon for all volumes: ```bash $ docker volume ls --format "{{.Name}}: {{.Driver}}" vol1: local vol2: local vol3: local ``` ## Related information * [volume create](volume_create.md) * [volume inspect](volume_inspect.md) * [volume rm](volume_rm.md) * [Understand Data Volumes](../../tutorials/dockervolumes.md)
apache-2.0
artefactop/ecomponent
doc/mod_monitor.md
924
# Module mod_monitor # * [Function Index](#index) * [Function Details](#functions) <a name="index"></a> ## Function Index ## <table width="100%" border="1" cellspacing="0" cellpadding="2" summary="function index"><tr><td valign="top"><a href="#accept-3">accept/3</a></td><td>Check if the packet can be accepted.</td></tr><tr><td valign="top"><a href="#init-1">init/1</a></td><td>Init the monitor.</td></tr></table> <a name="functions"></a> ## Function Details ## <a name="accept-3"></a> ### accept/3 ### <pre><code> accept(Id::string(), Max::integer(), Period::integer()) -&gt; boolean() </code></pre> <br></br> Check if the packet can be accepted. It depends if ID is whitelisted, and the Max packets can be accepted in the Period seconds. <a name="init-1"></a> ### init/1 ### <pre><code> init(Whitelist::[binary()]) -&gt; ok </code></pre> <br></br> Init the monitor. Adds the JIDs to the whitelist.
apache-2.0
gzwfdy/zone
src/main/webapp/static/app/templates/dialog-dial-out.html
2204
<div class="overlay-container brand-overlay" id="dialog-dial-out" toggle> <form class="overlay-content brand-secondary" name="addParticipantForm" ng-submit="addParticipantForm.$valid && connection.participantAdd(uri, protocol, role, presentationUri) || hide()"> <div class="dialog-title" translate>IDS_PARTICIPANT_ADD</div> <div class="dialog-content"> <table> <tr> <td> <select style="width: auto" ng-model="protocol" ng-options="value as name for (name, value) in applicationSettings.dialOutProtocols"></select> </td> <td> <input required type="text" maxlength="512" size="30" style="width: auto" focus-input ng-attr-placeholder="{{'IDS_PARTICIPANT_ADD_TEXT' | translate}}" ng-model="uri" ng-change="checkProtocolChange(uri)" /> </td> <td> <select style="width: auto" ng-model="role" ng-init="role = applicationSettings.defaultDialOutRole" ng-options="value as (name | translate) for (value, name) in {guest: 'IDS_ROLE_GUEST', host: 'IDS_ROLE_HOST'}"></select> </td> </tr> <tr ng-show="protocol == 'rtmp'"> <td> <label><span translate>IDS_PARTICIPANT_ADD_SEPARATE_PRESENTATION</span><input ng-model="separatePresentation" type="checkbox" /></label> </td> <td> <input ng-show="separatePresentation" type="text" maxlength="512" size="30" style="width:auto" ng-attr-placeholder="{{'IDS_PARTICIPANT_ADD_PRESENTATION_TEXT' | translate}}" ng-model="presentationUri" /> </td> <td></td> </tr> </table> <p class="placeholder-text" style="max-width: 500px" translate="IDS_PARTICIPANT_ADD_{{protocol}}"></p> </div> <div class="alert-actions"> <button type="button" class="button square" ng-click="hide()"> <span translate>IDS_BUTTON_CANCEL</span> </button> <button type="submit" class="button square"> <span translate>IDS_BUTTON_OK</span> </button> </div> </form> </div>
apache-2.0
mafulafunk/wicket
wicket-core/src/main/java/org/apache/wicket/validation/validator/AbstractRangeValidator.java
5358
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.validation.validator; import java.io.Serializable; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.validation.IValidatable; import org.apache.wicket.validation.IValidator; import org.apache.wicket.validation.ValidationError; /** * Base class for validators that check if a given value falls within [min,max] range. * * If either min or max are {@code null} they are not checked. * * <p> * Resource keys: * <ul> * <li>{@code <class.simpleName>.exact} if min==max</li> * <li>{@code <class.simpleName>.range} if both min and max are not {@code null}</li> * <li>{@code <class.simpleName>.minimum} if max is {@code null}</li> * <li>{@code <class.simpleName>.maximum} if min is {@code null}</li> * </ul> * </p> * * <p> * Error Message Variables: * <ul> * <li>{@code name}: the id of {@code Component} that failed</li> * <li>{@code label}: the label of the {@code Component} (either comes from * {@code FormComponent.labelModel} or resource key {@code <form-id>.<form-component-id>}</li> * <li>{@code input}: the input value</li> * <li>{@code minimum}: the minimum allowed value</li> * <li>{@code maximum}: the maximum allowed value</li> * </ul> * </p> * * @param <R> * type of range value * @param <V> * type of validatable * * @author igor */ public abstract class AbstractRangeValidator<R extends Comparable<R> & Serializable, V extends Serializable> extends Behavior implements IValidator<V> { private static final long serialVersionUID = 1L; private R minimum; private R maximum; /** * Constructor that sets the minimum and maximum values. * * @param minimum * the minimum value * @param maximum * the maximum value */ public AbstractRangeValidator(R minimum, R maximum) { setRange(minimum, maximum); } /** * Constructor used for subclasses who want to set the range using * {@link #setRange(Comparable, Comparable)} */ protected AbstractRangeValidator() { } /** * Sets validator range * * @param minimum * @param maximum */ protected final void setRange(R minimum, R maximum) { if (minimum == null && maximum == null) { throw new IllegalArgumentException("Both minimum and maximum values cannot be null"); } this.minimum = minimum; this.maximum = maximum; } @Override public void validate(IValidatable<V> validatable) { R value = getValue(validatable); final R min = getMinimum(); final R max = getMaximum(); if ((min != null && value.compareTo(min) < 0) || (max != null && value.compareTo(max) > 0)) { Mode mode = getMode(); ValidationError error = new ValidationError(this, mode.getVariation()); if (min != null) { error.setVariable("minimum", min); } if (max != null) { error.setVariable("maximum", max); } if (mode == Mode.EXACT) { error.setVariable("exact", max); } validatable.error(decorate(error, validatable)); } } /** * Gets the value that should be validated against the range * * @param validatable * @return value to validate */ protected abstract R getValue(IValidatable<V> validatable); /** * Gets the minimum value. * * @return minimum value */ public R getMinimum() { return minimum; } /** * Gets the maximum value. * * @return maximum value */ public R getMaximum() { return maximum; } /** * Allows subclasses to decorate reported errors * * @param error * @param validatable * @return decorated error */ protected ValidationError decorate(ValidationError error, IValidatable<V> validatable) { return error; } /** * Gets validation mode which is determined by whether min, max, or both values are provided * * @return validation mode */ public final Mode getMode() { final R min = getMinimum(); final R max = getMaximum(); if (min == null && max != null) { return Mode.MAXIMUM; } else if (max == null && min != null) { return Mode.MINIMUM; } else if ((min == null && max == null) || max.equals(min)) { return Mode.EXACT; } else { return Mode.RANGE; } } /** * Validator mode * * @author igor */ public static enum Mode { MINIMUM, MAXIMUM, RANGE, EXACT; public String getVariation() { return name().toLowerCase(); } } }
apache-2.0
flofreud/aws-sdk-java
aws-java-sdk-cloudsearch/src/main/java/com/amazonaws/services/cloudsearchv2/model/BuildSuggestersRequest.java
3006
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudsearchv2.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Container for the parameters to the <code><a>BuildSuggester</a></code> * operation. Specifies the name of the domain you want to update. * </p> */ public class BuildSuggestersRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { private String domainName; /** * @param domainName */ public void setDomainName(String domainName) { this.domainName = domainName; } /** * @return */ public String getDomainName() { return this.domainName; } /** * @param domainName * @return Returns a reference to this object so that method calls can be * chained together. */ public BuildSuggestersRequest withDomainName(String domainName) { setDomainName(domainName); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDomainName() != null) sb.append("DomainName: " + getDomainName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof BuildSuggestersRequest == false) return false; BuildSuggestersRequest other = (BuildSuggestersRequest) obj; if (other.getDomainName() == null ^ this.getDomainName() == null) return false; if (other.getDomainName() != null && other.getDomainName().equals(this.getDomainName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDomainName() == null) ? 0 : getDomainName().hashCode()); return hashCode; } @Override public BuildSuggestersRequest clone() { return (BuildSuggestersRequest) super.clone(); } }
apache-2.0
execunix/vinos
xsrc/external/mit/MesaLib/dist/src/gallium/auxiliary/util/u_keymap.c
7700
/************************************************************************** * * Copyright 2008 VMware, Inc. * All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sub license, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice (including the * next paragraph) shall be included in all copies or substantial portions * of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * **************************************************************************/ /** * Key lookup/associative container. * * Like Jose's util_hash_table, based on CSO cache code for now. * * Author: Brian Paul */ #include "pipe/p_compiler.h" #include "util/u_debug.h" #include "cso_cache/cso_hash.h" #include "util/u_memory.h" #include "util/u_keymap.h" struct keymap { struct cso_hash *cso; unsigned key_size; unsigned max_entries; /* XXX not obeyed net */ unsigned num_entries; keymap_delete_func delete_func; }; struct keymap_item { void *key, *value; }; /** * This the default key-delete function used when the client doesn't * provide one. */ static void default_delete_func(const struct keymap *map, const void *key, void *data, void *user) { FREE((void*) data); } static INLINE struct keymap_item * hash_table_item(struct cso_hash_iter iter) { return (struct keymap_item *) cso_hash_iter_data(iter); } /** * Return 4-byte hash key for a block of bytes. */ static unsigned hash(const void *key, unsigned keySize) { unsigned i, hash; keySize /= 4; /* convert from bytes to uints */ hash = 0; for (i = 0; i < keySize; i++) { hash ^= (i + 1) * ((const unsigned *) key)[i]; } /*hash = hash ^ (hash >> 11) ^ (hash >> 22);*/ return hash; } /** * Create a new map. * \param keySize size of the keys in bytes * \param maxEntries max number of entries to allow (~0 = infinity) * \param deleteFunc optional callback to call when entries * are deleted/replaced */ struct keymap * util_new_keymap(unsigned keySize, unsigned maxEntries, keymap_delete_func deleteFunc) { struct keymap *map = MALLOC_STRUCT(keymap); if (!map) return NULL; map->cso = cso_hash_create(); if (!map->cso) { FREE(map); return NULL; } map->max_entries = maxEntries; map->num_entries = 0; map->key_size = keySize; map->delete_func = deleteFunc ? deleteFunc : default_delete_func; return map; } /** * Delete/free a keymap and all entries. The deleteFunc that was given at * create time will be called for each entry. * \param user user-provided pointer passed through to the delete callback */ void util_delete_keymap(struct keymap *map, void *user) { util_keymap_remove_all(map, user); cso_hash_delete(map->cso); FREE(map); } static INLINE struct cso_hash_iter hash_table_find_iter(const struct keymap *map, const void *key, unsigned key_hash) { struct cso_hash_iter iter; struct keymap_item *item; iter = cso_hash_find(map->cso, key_hash); while (!cso_hash_iter_is_null(iter)) { item = (struct keymap_item *) cso_hash_iter_data(iter); if (!memcmp(item->key, key, map->key_size)) break; iter = cso_hash_iter_next(iter); } return iter; } static INLINE struct keymap_item * hash_table_find_item(const struct keymap *map, const void *key, unsigned key_hash) { struct cso_hash_iter iter = hash_table_find_iter(map, key, key_hash); if (cso_hash_iter_is_null(iter)) { return NULL; } else { return hash_table_item(iter); } } /** * Insert a new key + data pointer into the table. * Note: we create a copy of the key, but not the data! * If the key is already present in the table, replace the existing * entry (calling the delete callback on the previous entry). * If the maximum capacity of the map is reached an old entry * will be deleted (the delete callback will be called). */ boolean util_keymap_insert(struct keymap *map, const void *key, const void *data, void *user) { unsigned key_hash; struct keymap_item *item; struct cso_hash_iter iter; assert(map); if (!map) return FALSE; key_hash = hash(key, map->key_size); item = hash_table_find_item(map, key, key_hash); if (item) { /* call delete callback for old entry/item */ map->delete_func(map, item->key, item->value, user); item->value = (void *) data; return TRUE; } item = MALLOC_STRUCT(keymap_item); if (!item) return FALSE; item->key = mem_dup(key, map->key_size); item->value = (void *) data; iter = cso_hash_insert(map->cso, key_hash, item); if (cso_hash_iter_is_null(iter)) { FREE(item); return FALSE; } map->num_entries++; return TRUE; } /** * Look up a key in the map and return the associated data pointer. */ const void * util_keymap_lookup(const struct keymap *map, const void *key) { unsigned key_hash; struct keymap_item *item; assert(map); if (!map) return NULL; key_hash = hash(key, map->key_size); item = hash_table_find_item(map, key, key_hash); if (!item) return NULL; return item->value; } /** * Remove an entry from the map. * The delete callback will be called if the given key/entry is found. * \param user passed to the delete callback as the last param. */ void util_keymap_remove(struct keymap *map, const void *key, void *user) { unsigned key_hash; struct cso_hash_iter iter; struct keymap_item *item; assert(map); if (!map) return; key_hash = hash(key, map->key_size); iter = hash_table_find_iter(map, key, key_hash); if (cso_hash_iter_is_null(iter)) return; item = hash_table_item(iter); assert(item); if (!item) return; map->delete_func(map, item->key, item->value, user); FREE(item->key); FREE(item); map->num_entries--; cso_hash_erase(map->cso, iter); } /** * Remove all entries from the map, calling the delete callback for each. * \param user passed to the delete callback as the last param. */ void util_keymap_remove_all(struct keymap *map, void *user) { struct cso_hash_iter iter; struct keymap_item *item; assert(map); if (!map) return; iter = cso_hash_first_node(map->cso); while (!cso_hash_iter_is_null(iter)) { item = (struct keymap_item *) cso_hash_take(map->cso, cso_hash_iter_key(iter)); map->delete_func(map, item->key, item->value, user); FREE(item->key); FREE(item); iter = cso_hash_first_node(map->cso); } } extern void util_keymap_info(const struct keymap *map) { debug_printf("Keymap %p: %u of max %u entries\n", (void *) map, map->num_entries, map->max_entries); }
apache-2.0
FLVC/fcrepo-src-3.4.2
fcrepo-security/fcrepo-security-pdp/src/main/java/org/fcrepo/server/security/xacml/pdp/data/PolicyStoreFactory.java
1295
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.security.xacml.pdp.data; /** * A factory for a PolicyStore. Used to get a PolicyStore * instance based on configuration file. * * @author Stephen Bayliss * @version $Id$ */ public class PolicyStoreFactory { /** * Generate a PolicyStore instance based on config file * @return * @throws PolicyStoreException */ public PolicyStore newPolicyStore() throws PolicyStoreException { // TODO: should we be supplying a classloader? PolicyStore policyStore; String policyStoreClassName; try { policyStoreClassName = Config.policyStoreClassName(); } catch (PolicyConfigException e) { throw new PolicyStoreException("Error reading config for PolicyStore", e); } try { policyStore = (PolicyStore) Class.forName(policyStoreClassName).newInstance(); } catch (Exception e) { throw new PolicyStoreException("Error instantiating PolicyStore " + policyStoreClassName, e); } return policyStore; } }
apache-2.0
snicoll/spring-boot
spring-boot-tools/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/ConfigurationMetadataAnnotationProcessorTests.java
15218
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.configurationprocessor; import java.io.IOException; import javax.annotation.processing.SupportedAnnotationTypes; import javax.annotation.processing.SupportedSourceVersion; import javax.lang.model.SourceVersion; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.springframework.boot.configurationprocessor.metadata.ConfigurationMetadata; import org.springframework.boot.configurationsample.lombok.LombokExplicitProperties; import org.springframework.boot.configurationsample.lombok.LombokSimpleDataProperties; import org.springframework.boot.configurationsample.lombok.LombokSimpleProperties; import org.springframework.boot.configurationsample.method.EmptyTypeMethodConfig; import org.springframework.boot.configurationsample.method.InvalidMethodConfig; import org.springframework.boot.configurationsample.method.MethodAndClassConfig; import org.springframework.boot.configurationsample.method.SimpleMethodConfig; import org.springframework.boot.configurationsample.simple.HierarchicalProperties; import org.springframework.boot.configurationsample.simple.NotAnnotated; import org.springframework.boot.configurationsample.simple.SimpleCollectionProperties; import org.springframework.boot.configurationsample.simple.SimplePrefixValueProperties; import org.springframework.boot.configurationsample.simple.SimpleProperties; import org.springframework.boot.configurationsample.simple.SimpleTypeProperties; import org.springframework.boot.configurationsample.specific.BuilderPojo; import org.springframework.boot.configurationsample.specific.ExcludedTypesPojo; import org.springframework.boot.configurationsample.specific.InnerClassAnnotatedGetterConfig; import org.springframework.boot.configurationsample.specific.InnerClassProperties; import org.springframework.boot.configurationsample.specific.InnerClassRootConfig; import org.springframework.boot.configurationsample.specific.SimplePojo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertThat; import static org.springframework.boot.configurationprocessor.ConfigurationMetadataMatchers.containsGroup; import static org.springframework.boot.configurationprocessor.ConfigurationMetadataMatchers.containsProperty; /** * Tests for {@link ConfigurationMetadataAnnotationProcessor}. * * @author Stephane Nicoll * @author Phillip Webb */ public class ConfigurationMetadataAnnotationProcessorTests { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Test public void notAnnotated() throws Exception { ConfigurationMetadata metadata = compile(NotAnnotated.class); assertThat("No config metadata file should have been generated when " + "no metadata is discovered", metadata.getItems(), empty()); } @Test public void simpleProperties() throws Exception { ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata, containsGroup("simple").fromSource(SimpleProperties.class)); assertThat( metadata, containsProperty("simple.the-name", String.class) .fromSource(SimpleProperties.class) .withDescription("The name of this simple properties.") .withDefaultValue(is("boot")).withDeprecated()); assertThat( metadata, containsProperty("simple.flag", Boolean.class) .fromSource(SimpleProperties.class) .withDescription("A simple flag.").withDeprecated()); assertThat(metadata, containsProperty("simple.comparator")); assertThat(metadata, not(containsProperty("simple.counter"))); assertThat(metadata, not(containsProperty("simple.size"))); } @Test public void simplePrefixValueProperties() throws Exception { ConfigurationMetadata metadata = compile(SimplePrefixValueProperties.class); assertThat(metadata, containsGroup("simple").fromSource(SimplePrefixValueProperties.class)); assertThat( metadata, containsProperty("simple.name", String.class).fromSource( SimplePrefixValueProperties.class)); } @Test public void simpleTypeProperties() throws Exception { ConfigurationMetadata metadata = compile(SimpleTypeProperties.class); assertThat(metadata, containsGroup("simple.type").fromSource(SimpleTypeProperties.class)); assertThat(metadata, containsProperty("simple.type.my-string", String.class)); assertThat(metadata, containsProperty("simple.type.my-byte", Byte.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-byte", Byte.class)); assertThat(metadata, containsProperty("simple.type.my-char", Character.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-char", Character.class)); assertThat(metadata, containsProperty("simple.type.my-boolean", Boolean.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-boolean", Boolean.class)); assertThat(metadata, containsProperty("simple.type.my-short", Short.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-short", Short.class)); assertThat(metadata, containsProperty("simple.type.my-integer", Integer.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-integer", Integer.class)); assertThat(metadata, containsProperty("simple.type.my-long", Long.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-long", Long.class)); assertThat(metadata, containsProperty("simple.type.my-double", Double.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-double", Double.class)); assertThat(metadata, containsProperty("simple.type.my-float", Float.class)); assertThat(metadata, containsProperty("simple.type.my-primitive-float", Float.class)); assertThat(metadata.getItems().size(), equalTo(18)); } @Test public void hierarchicalProperties() throws Exception { ConfigurationMetadata metadata = compile(HierarchicalProperties.class); assertThat(metadata, containsGroup("hierarchical").fromSource(HierarchicalProperties.class)); assertThat(metadata, containsProperty("hierarchical.first", String.class) .fromSource(HierarchicalProperties.class)); assertThat(metadata, containsProperty("hierarchical.second", String.class) .fromSource(HierarchicalProperties.class)); assertThat(metadata, containsProperty("hierarchical.third", String.class) .fromSource(HierarchicalProperties.class)); } @Test @SuppressWarnings("deprecation") public void deprecatedProperties() throws Exception { Class<?> type = org.springframework.boot.configurationsample.simple.DeprecatedProperties.class; ConfigurationMetadata metadata = compile(type); assertThat(metadata, containsGroup("deprecated").fromSource(type)); assertThat(metadata, containsProperty("deprecated.name", String.class) .fromSource(type).withDeprecated()); assertThat(metadata, containsProperty("deprecated.description", String.class) .fromSource(type).withDeprecated()); } @Test public void parseCollectionConfig() throws Exception { ConfigurationMetadata metadata = compile(SimpleCollectionProperties.class); // getter and setter assertThat( metadata, containsProperty("collection.integers-to-names", "java.util.Map<java.lang.Integer,java.lang.String>")); assertThat( metadata, containsProperty("collection.longs", "java.util.Collection<java.lang.Long>")); assertThat(metadata, containsProperty("collection.floats", "java.util.List<java.lang.Float>")); // getter only assertThat( metadata, containsProperty("collection.names-to-integers", "java.util.Map<java.lang.String,java.lang.Integer>")); assertThat( metadata, containsProperty("collection.bytes", "java.util.Collection<java.lang.Byte>")); assertThat( metadata, containsProperty("collection.doubles", "java.util.List<java.lang.Double>")); } @Test public void simpleMethodConfig() throws Exception { ConfigurationMetadata metadata = compile(SimpleMethodConfig.class); assertThat(metadata, containsGroup("foo").fromSource(SimpleMethodConfig.class)); assertThat( metadata, containsProperty("foo.name", String.class).fromSource( SimpleMethodConfig.Foo.class)); assertThat( metadata, containsProperty("foo.flag", Boolean.class).fromSource( SimpleMethodConfig.Foo.class)); } @Test public void invalidMethodConfig() throws Exception { ConfigurationMetadata metadata = compile(InvalidMethodConfig.class); assertThat( metadata, containsProperty("something.name", String.class).fromSource( InvalidMethodConfig.class)); assertThat(metadata, not(containsProperty("invalid.name"))); } @Test public void methodAndClassConfig() throws Exception { ConfigurationMetadata metadata = compile(MethodAndClassConfig.class); assertThat( metadata, containsProperty("conflict.name", String.class).fromSource( MethodAndClassConfig.Foo.class)); assertThat( metadata, containsProperty("conflict.flag", Boolean.class).fromSource( MethodAndClassConfig.Foo.class)); assertThat( metadata, containsProperty("conflict.value", String.class).fromSource( MethodAndClassConfig.class)); } @Test public void emptyTypeMethodConfig() throws Exception { ConfigurationMetadata metadata = compile(EmptyTypeMethodConfig.class); assertThat(metadata, not(containsProperty("something.foo"))); } @Test public void innerClassRootConfig() throws Exception { ConfigurationMetadata metadata = compile(InnerClassRootConfig.class); assertThat(metadata, containsProperty("config.name")); } @Test public void innerClassProperties() throws Exception { ConfigurationMetadata metadata = compile(InnerClassProperties.class); assertThat(metadata, containsGroup("config").fromSource(InnerClassProperties.class)); assertThat(metadata, containsGroup("config.first").ofType(InnerClassProperties.Foo.class) .fromSource(InnerClassProperties.class)); assertThat(metadata, containsProperty("config.first.name")); assertThat(metadata, containsProperty("config.first.bar.name")); assertThat(metadata, containsGroup("config.the-second", InnerClassProperties.Foo.class) .fromSource(InnerClassProperties.class)); assertThat(metadata, containsProperty("config.the-second.name")); assertThat(metadata, containsProperty("config.the-second.bar.name")); assertThat(metadata, containsGroup("config.third").ofType(SimplePojo.class) .fromSource(InnerClassProperties.class)); assertThat(metadata, containsProperty("config.third.value")); assertThat(metadata, containsProperty("config.fourth")); assertThat(metadata, not(containsGroup("config.fourth"))); } @Test public void innerClassAnnotatedGetterConfig() throws Exception { ConfigurationMetadata metadata = compile(InnerClassAnnotatedGetterConfig.class); assertThat(metadata, containsProperty("specific.value")); assertThat(metadata, containsProperty("foo.name")); assertThat(metadata, not(containsProperty("specific.foo"))); } @Test public void builderPojo() throws IOException { ConfigurationMetadata metadata = compile(BuilderPojo.class); assertThat(metadata, containsProperty("builder.name")); } @Test public void excludedTypesPojo() throws IOException { ConfigurationMetadata metadata = compile(ExcludedTypesPojo.class); assertThat(metadata, containsProperty("excluded.name")); assertThat(metadata, not(containsProperty("excluded.class-loader"))); assertThat(metadata, not(containsProperty("excluded.data-source"))); assertThat(metadata, not(containsProperty("excluded.print-writer"))); assertThat(metadata, not(containsProperty("excluded.writer"))); assertThat(metadata, not(containsProperty("excluded.writer-array"))); } @Test public void lombokDataProperties() throws Exception { ConfigurationMetadata metadata = compile(LombokSimpleDataProperties.class); assertSimpleLombokProperties(metadata, LombokSimpleDataProperties.class, "data"); } @Test public void lombokSimpleProperties() throws Exception { ConfigurationMetadata metadata = compile(LombokSimpleProperties.class); assertSimpleLombokProperties(metadata, LombokSimpleProperties.class, "simple"); } @Test public void lombokExplicitProperties() throws Exception { ConfigurationMetadata metadata = compile(LombokExplicitProperties.class); assertSimpleLombokProperties(metadata, LombokExplicitProperties.class, "explicit"); } private void assertSimpleLombokProperties(ConfigurationMetadata metadata, Class<?> source, String prefix) { assertThat(metadata, containsGroup(prefix).fromSource(source)); assertThat(metadata, not(containsProperty(prefix + ".id"))); assertThat(metadata, containsProperty(prefix + ".name", String.class).fromSource(source) .withDescription("Name description.")); assertThat(metadata, containsProperty(prefix + ".description")); assertThat(metadata, containsProperty(prefix + ".counter")); assertThat(metadata, containsProperty(prefix + ".number").fromSource(source) .withDefaultValue(is(0)).withDeprecated()); assertThat(metadata, containsProperty(prefix + ".items")); assertThat(metadata, not(containsProperty(prefix + ".ignored"))); } private ConfigurationMetadata compile(Class<?>... types) throws IOException { TestConfigurationMetadataAnnotationProcessor processor = new TestConfigurationMetadataAnnotationProcessor(); new TestCompiler(this.temporaryFolder).getTask(types).call(processor); return processor.getMetadata(); } @SupportedAnnotationTypes({ "*" }) @SupportedSourceVersion(SourceVersion.RELEASE_6) private static class TestConfigurationMetadataAnnotationProcessor extends ConfigurationMetadataAnnotationProcessor { static final String CONFIGURATION_PROPERTIES_ANNOTATION = "org.springframework.boot.configurationsample.ConfigurationProperties"; static final String NESTED_CONFIGURATION_PROPERTY_ANNOTATION = "org.springframework.boot.configurationsample.NestedConfigurationProperty"; private ConfigurationMetadata metadata; @Override protected String configurationPropertiesAnnotation() { return CONFIGURATION_PROPERTIES_ANNOTATION; } @Override protected String nestedConfigurationPropertyAnnotation() { return NESTED_CONFIGURATION_PROPERTY_ANNOTATION; } @Override protected void writeMetaData(ConfigurationMetadata metadata) { this.metadata = metadata; } public ConfigurationMetadata getMetadata() { return this.metadata; } } }
apache-2.0
ahakanbaba/kubernetes
pkg/registry/core/node/storage/storage.go
4861
/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package storage import ( "fmt" "net/http" "net/url" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" genericapirequest "k8s.io/apiserver/pkg/endpoints/request" "k8s.io/apiserver/pkg/registry/generic" genericregistry "k8s.io/apiserver/pkg/registry/generic/registry" "k8s.io/apiserver/pkg/registry/rest" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/api/v1" "k8s.io/kubernetes/pkg/kubelet/client" "k8s.io/kubernetes/pkg/registry/cachesize" "k8s.io/kubernetes/pkg/registry/core/node" noderest "k8s.io/kubernetes/pkg/registry/core/node/rest" ) // NodeStorage includes storage for nodes and all sub resources type NodeStorage struct { Node *REST Status *StatusREST Proxy *noderest.ProxyREST KubeletConnectionInfo client.ConnectionInfoGetter } type REST struct { *genericregistry.Store connection client.ConnectionInfoGetter proxyTransport http.RoundTripper } // StatusREST implements the REST endpoint for changing the status of a pod. type StatusREST struct { store *genericregistry.Store } func (r *StatusREST) New() runtime.Object { return &api.Node{} } // Get retrieves the object from the storage. It is required to support Patch. func (r *StatusREST) Get(ctx genericapirequest.Context, name string, options *metav1.GetOptions) (runtime.Object, error) { return r.store.Get(ctx, name, options) } // Update alters the status subset of an object. func (r *StatusREST) Update(ctx genericapirequest.Context, name string, objInfo rest.UpdatedObjectInfo) (runtime.Object, bool, error) { return r.store.Update(ctx, name, objInfo) } // NewStorage returns a NodeStorage object that will work against nodes. func NewStorage(optsGetter generic.RESTOptionsGetter, kubeletClientConfig client.KubeletClientConfig, proxyTransport http.RoundTripper) (*NodeStorage, error) { store := &genericregistry.Store{ Copier: api.Scheme, NewFunc: func() runtime.Object { return &api.Node{} }, NewListFunc: func() runtime.Object { return &api.NodeList{} }, ObjectNameFunc: func(obj runtime.Object) (string, error) { return obj.(*api.Node).Name, nil }, PredicateFunc: node.MatchNode, QualifiedResource: api.Resource("nodes"), WatchCacheSize: cachesize.GetWatchCacheSizeByResource("nodes"), CreateStrategy: node.Strategy, UpdateStrategy: node.Strategy, DeleteStrategy: node.Strategy, ExportStrategy: node.Strategy, } options := &generic.StoreOptions{RESTOptions: optsGetter, AttrFunc: node.GetAttrs, TriggerFunc: node.NodeNameTriggerFunc} if err := store.CompleteWithOptions(options); err != nil { return nil, err } statusStore := *store statusStore.UpdateStrategy = node.StatusStrategy // Set up REST handlers nodeREST := &REST{Store: store, proxyTransport: proxyTransport} statusREST := &StatusREST{store: &statusStore} proxyREST := &noderest.ProxyREST{Store: store, ProxyTransport: proxyTransport} // Build a NodeGetter that looks up nodes using the REST handler nodeGetter := client.NodeGetterFunc(func(nodeName string, options metav1.GetOptions) (*v1.Node, error) { obj, err := nodeREST.Get(genericapirequest.NewContext(), nodeName, &options) if err != nil { return nil, err } node, ok := obj.(*api.Node) if !ok { return nil, fmt.Errorf("unexpected type %T", obj) } // TODO: Remove the conversion. Consider only return the NodeAddresses externalNode := &v1.Node{} err = v1.Convert_api_Node_To_v1_Node(node, externalNode, nil) if err != nil { return nil, fmt.Errorf("failed to convert to v1.Node: %v", err) } return externalNode, nil }) connectionInfoGetter, err := client.NewNodeConnectionInfoGetter(nodeGetter, kubeletClientConfig) if err != nil { return nil, err } nodeREST.connection = connectionInfoGetter proxyREST.Connection = connectionInfoGetter return &NodeStorage{ Node: nodeREST, Status: statusREST, Proxy: proxyREST, KubeletConnectionInfo: connectionInfoGetter, }, nil } // Implement Redirector. var _ = rest.Redirector(&REST{}) // ResourceLocation returns a URL to which one can send traffic for the specified node. func (r *REST) ResourceLocation(ctx genericapirequest.Context, id string) (*url.URL, http.RoundTripper, error) { return node.ResourceLocation(r, r.connection, r.proxyTransport, ctx, id) }
apache-2.0
google/flutter_flux
example/README.md
77
# flutter_flux example Simple mock chat app example ```bash flutter run ```
apache-2.0
gocd/gocd
server/src/main/java/com/thoughtworks/go/server/cache/LazyCache.java
2065
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.cache; import com.thoughtworks.go.server.transaction.TransactionSynchronizationManager; import net.sf.ehcache.Ehcache; import net.sf.ehcache.Element; import org.springframework.transaction.support.TransactionSynchronizationAdapter; import java.util.function.Supplier; public class LazyCache { private final Ehcache ehcache; private final TransactionSynchronizationManager transactionSynchronizationManager; public LazyCache(Ehcache ehcache, TransactionSynchronizationManager transactionSynchronizationManager) { this.ehcache = ehcache; this.transactionSynchronizationManager = transactionSynchronizationManager; } public <T> T get(String key, Supplier<T> compute) { Element element = ehcache.get(key); if (element != null) { return (T) element.getObjectValue(); } synchronized (key.intern()) { element = ehcache.get(key); if (element != null) { return (T) element.getObjectValue(); } T object = compute.get(); ehcache.put(new Element(key, object)); return object; } } public void flushOnCommit() { transactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() { @Override public void afterCommit() { ehcache.flush(); } }); } }
apache-2.0
dlnufox/ignite
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtAffinityAssignmentResponse.java
6291
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht; import java.nio.ByteBuffer; import java.util.List; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.GridCacheMessage; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode; import org.jetbrains.annotations.NotNull; /** * Affinity assignment response. */ public class GridDhtAffinityAssignmentResponse extends GridCacheMessage { /** */ private static final long serialVersionUID = 0L; /** Topology version. */ private AffinityTopologyVersion topVer; /** Affinity assignment. */ @GridDirectTransient @GridToStringInclude private List<List<ClusterNode>> affAssignment; /** Affinity assignment bytes. */ private byte[] affAssignmentBytes; /** * Empty constructor. */ public GridDhtAffinityAssignmentResponse() { // No-op. } /** * @param cacheId Cache ID. * @param topVer Topology version. * @param affAssignment Affinity assignment. */ public GridDhtAffinityAssignmentResponse(int cacheId, @NotNull AffinityTopologyVersion topVer, List<List<ClusterNode>> affAssignment) { this.cacheId = cacheId; this.topVer = topVer; this.affAssignment = affAssignment; } /** {@inheritDoc} */ @Override public boolean partitionExchangeMessage() { return true; } /** * @return Topology version. */ @Override public AffinityTopologyVersion topologyVersion() { return topVer; } /** * @return Affinity assignment. */ public List<List<ClusterNode>> affinityAssignment() { return affAssignment; } /** {@inheritDoc} */ @Override public byte directType() { return 29; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 5; } /** * @param ctx Context. */ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); if (affAssignment != null) affAssignmentBytes = ctx.marshaller().marshal(affAssignment); } /** {@inheritDoc} */ @SuppressWarnings("ForLoopReplaceableByForEach") @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); if (affAssignmentBytes != null) { affAssignment = ctx.marshaller().unmarshal(affAssignmentBytes, ldr); // TODO IGNITE-10: setting 'local' for nodes not needed when IGNITE-10 is implemented. int assignments = affAssignment.size(); for (int n = 0; n < assignments; n++) { List<ClusterNode> nodes = affAssignment.get(n); int size = nodes.size(); for (int i = 0; i < size; i++) { ClusterNode node = nodes.get(i); if (node instanceof TcpDiscoveryNode) ((TcpDiscoveryNode)node).local(node.id().equals(ctx.localNodeId())); } } } } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 3: if (!writer.writeByteArray("affAssignmentBytes", affAssignmentBytes)) return false; writer.incrementState(); case 4: if (!writer.writeMessage("topVer", topVer)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 3: affAssignmentBytes = reader.readByteArray("affAssignmentBytes"); if (!reader.isLastRead()) return false; reader.incrementState(); case 4: topVer = reader.readMessage("topVer"); if (!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(GridDhtAffinityAssignmentResponse.class); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridDhtAffinityAssignmentResponse.class, this); } }
apache-2.0
jpeach/mesos
src/webui/app/agents/agent.html
12924
<ol class="breadcrumb"> <li> <a class="badge badge-type" href="#">Master</a> </li> <li class="active"> <span class="badge badge-type">Agent</span> {{agent_id}} </li> </ol> <div class="alert alert-error hidden" id="alert"> <button class="close" data-dismiss="alert">×</button> <strong>{{alert_message}}</strong> </div> <div class="row" id="agent"> <div class="col-md-3"> <div class="well"> <dl class="inline clearfix"> <dt>Cluster:</dt> <dd> <span ng-show="clusterNamed">{{cluster}}</span> <span ng-show="!clusterNamed"> (Unnamed) <i class="icon-info-sign" tooltip="To name this cluster, set the --cluster flag when starting the master." tooltip-placement="right"></i> </span> </dd> <dt>Agent:</dt> <dd>{{state.hostname}}</dd> <dt>Version:</dt> <dd>{{state.version}}</dd> <dt>Built:</dt> <dd> <m-timestamp value="{{state.build_time * 1000}}"></m-timestamp> </dd> <dt>Started:</dt> <dd> <m-timestamp value="{{state.start_time * 1000}}"></m-timestamp> </dd> <dt>Master:</dt> <dd>{{state.master_hostname}}</dd> </dl> <p ng-if="agent.log_file_attached"> <b>Agent Log:</b> <span class="btn-group"> <!-- Links can look like buttons using Bootstrap classes. --> <a class="btn btn-xs btn-default" href="{{agent.url_prefix}}/files/download?path=/slave/log"> Download </a> <button class="btn btn-xs btn-default" ng-click="streamLogs($event)"> View </button> </span> </p> <h4>Tasks</h4> <table class="table table-condensed"> <tbody> <tr> <td>Staging</td> <td class="text-right">{{staging_tasks | number}}</td> </tr> <tr> <td>Starting</td> <td class="text-right">{{starting_tasks | number}}</td> </tr> <tr> <td>Running</td> <td class="text-right">{{running_tasks | number}}</td> </tr> <tr> <td>Killing</td> <td class="text-right">{{killing_tasks | number}}</td> </tr> <tr> <td>Finished</td> <td class="text-right">{{finished_tasks | number}}</td> </tr> <tr> <td>Killed</td> <td class="text-right">{{killed_tasks | number}}</td> </tr> <tr> <td>Failed</td> <td class="text-right">{{failed_tasks | number}}</td> </tr> <tr> <td>Lost</td> <td class="text-right">{{lost_tasks | number}}</td> </tr> </tbody> </table> <h4>Resources</h4> <table class="table table-condensed"> <thead> <tr> <td></td> <td class="text-right">Used</td> <td class="text-right">Allocated</td> <td class="text-right">Available</td> <td class="text-right">Total</td> </tr> </thead> <tbody> <tr> <td>CPUs</td> <td class="text-right"> {{monitor.statistics.cpus_total_usage | number}} </td> <td class="text-right"> {{state.allocated_resources.cpus | number}} </td> <td class="text-right"> {{state.resources.cpus - state.allocated_resources.cpus | number}} </td> <td class="text-right"> {{state.resources.cpus | number}} </td> </tr> <tr> <td>GPUs</td> <td class="text-right"> N/A </td> <td class="text-right"> {{state.allocated_resources.gpus | number}} </td> <td class="text-right"> {{state.resources.gpus - state.allocated_resources.gpus | number}} </td> <td class="text-right"> {{state.resources.gpus | number}} </td> </tr> <tr> <td>Memory</td> <td class="text-right"> {{monitor.statistics.mem_rss_bytes | dataSize}} </td> <td class="text-right"> {{state.allocated_resources.mem * (1024 * 1024) | dataSize}} </td> <td class="text-right"> {{(state.resources.mem - state.allocated_resources.mem) * (1024 * 1024) | dataSize}} </td> <td class="text-right"> {{state.resources.mem * (1024 * 1024) | dataSize}} </td> </tr> <tr> <td>Disk</td> <td class="text-right"> {{monitor.statistics.disk_used_bytes | dataSize}} </td> <td class="text-right"> {{state.allocated_resources.disk * (1024 * 1024) | dataSize}} </td> <td class="text-right"> {{(state.resources.disk - state.allocated_resources.disk) * (1024 * 1024) | dataSize}} </td> <td class="text-right"> {{state.resources.disk * (1024 * 1024) | dataSize}} </td> </tr> </tbody> </table> </div> </div> <!-- Only display this table if resource providers are present. --> <div class="col-md-9" ng-if="!_.isEmpty(agent.resource_providers)"> <table m-table table-content="agent.resource_providers" title="Resource Providers" class="table table-striped table-bordered table-condensed"> <thead> <tr> <th data-key="id">ID</th> <th data-key="name">Name</th> <th data-key="type">Type</th> <!-- TODO(bbannier): Show all resources in stringified representation. --> <th data-key="disk">Disk</th> </tr> </thead> <tbody> <tr ng-repeat="provider in $data"> <td> {{provider.resource_provider_info.id.value | truncateMesosID}} <button class="btn btn-xs btn-default btn-toggle" clipboard data-clipboard-text="{{provider.resource_provider_info.id.value}}" tooltip="Copy ID" tooltip-placement="right" tooltip-trigger="clipboardhover"> <i class="icon-file"></i> </button> </td> <td>{{provider.resource_provider_info.name}}</td> <td>{{provider.resource_provider_info.type}}</td> <td>{{provider.total_resources.disk * (1024 * 1024) | dataSize}}</td> </tr> </tbody> </table> </div> <div class="col-md-9"> <table m-table table-content="agent.reserved_resources_as_array" title="Resource Reservations" class="table table-striped table-bordered table-condensed"> <thead> <tr> <th data-key="role">Reservation Role</th> <th data-key="cpus">CPUs (Allocated / Total)</th> <th data-key="gpus">GPUs (Allocated / Total)</th> <th data-key="mem">Mem (Allocated / Total)</th> <th data-key="disk">Disk (Allocated / Total)</th> </tr> </thead> <tbody> <tr> <td><em>Unreserved</em></td> <td>{{state.unreserved_resources_allocated.cpus | number}} / {{state.unreserved_resources.cpus | number}}</td> <td>{{state.unreserved_resources_allocated.gpus | number}} / {{state.unreserved_resources.gpus | number}}</td> <td>{{state.unreserved_resources_allocated.mem * (1024 * 1024) | dataSize}} / {{state.unreserved_resources.mem * (1024 * 1024) | dataSize}}</td> <td>{{state.unreserved_resources_allocated.disk * (1024 * 1024) | dataSize}} / {{state.unreserved_resources.disk * (1024 * 1024) | dataSize}}</td> </tr> <tr ng-repeat="reservation in $data"> <td>{{reservation.role}}</td> <td>{{(state.reserved_resources_allocated[reservation.role].cpus || 0) | number}} / {{reservation.cpus | number}}</td> <td>{{(state.reserved_resources_allocated[reservation.role].gpus || 0) | number}} / {{reservation.gpus | number}}</td> <td>{{(state.reserved_resources_allocated[reservation.role].mem * (1024 * 1024) || 0) | dataSize}} / {{reservation.mem * (1024 * 1024) | dataSize}}</td> <td>{{(state.reserved_resources_allocated[reservation.role].disk * (1024 * 1024) || 0) | dataSize}} / {{reservation.disk * (1024 * 1024) | dataSize}}</td> </tr> </tbody> </table> <table m-table table-content="agent.frameworks" title="Frameworks" class="table table-striped table-bordered table-condensed"> <thead> <tr> <th data-key="id">ID</th> <th data-key="user">User</th> <th data-key="name">Name</th> <th data-key="roles">Roles</th> <th data-key="num_tasks">Active Tasks</th> <th data-key="cpus">CPUs (Used / Allocated)</th> <th data-key="gpus">GPUs (Used / Allocated)</th> <th data-key="mem">Mem (Used / Allocated)</th> <th data-key="disk">Disk (Used / Allocated)</th> </tr> </thead> <tbody> <tr ng-repeat="framework in $data"> <td> <a href="{{'#/agents/' + agent_id + '/frameworks/' + framework.id}}"> {{(framework.id | truncateMesosID) || framework.name}}</a> <button class="btn btn-xs btn-toggle btn-default" clipboard data-clipboard-text="{{framework.id}}" tooltip="Copy ID" tooltip-placement="right" tooltip-trigger="clipboardhover"> </button> </td> <td>{{framework.user}}</td> <td>{{framework.name}}</td> <!-- TODO(bmahler): This doesn't display well when there are a lot of roles (e.g. a large organization with a lot of teams & services, using roles like /engineering/frontend/webserver, etc). Figure out a way to display this without bloating the table. --> <td>{{framework.roles.toString()}}</td> <td>{{framework.num_tasks | number}}</td> <td>{{monitor.frameworks[framework.id].statistics.cpus_total_usage | number}} / {{framework.cpus | number}}</td> <!-- TODO(haosdent): We need to show statistics for gpu once it is provided in monitor endpoint. --> <td>N/A</td> <td>{{monitor.frameworks[framework.id].statistics.mem_rss_bytes | dataSize}} / {{framework.mem * (1024 * 1024) | dataSize}}</td> <td>{{monitor.frameworks[framework.id].statistics.disk_used_bytes | dataSize}} / {{framework.disk * (1024 * 1024) | dataSize}}</td> </tr> </tbody> </table> <table m-table table-content="agent.completed_frameworks" title="Completed Frameworks" class="table table-striped table-bordered table-condensed"> <thead> <tr> <th data-key="id">ID</th> <th data-key="user">User</th> <th data-key="name">Name</th> <th data-key="roles">Roles</th> <th data-key="tasks.length">Active Tasks</th> <th data-key="resources.cpus">CPUs</th> <th data-key="resources.gpus">GPUs</th> <th data-key="resources.mem">Mem</th> <th data-key="resources.disk">Disk</th> </tr> </thead> <tbody> <tr ng-repeat="completed_framework in $data"> <td> <a href="{{'#/agents/' + agent_id + '/frameworks/' + completed_framework.id}}"> {{completed_framework.id | truncateMesosID}}</a> <button class="btn btn-xs btn-toggle btn-default" clipboard data-clipboard-text="{{framework.id}}" tooltip="Copy ID" tooltip-placement="right" tooltip-trigger="clipboardhover"> </button> </td> <td>{{completed_framework.user}}</td> <td>{{completed_framework.name}}</td> <!-- TODO(bmahler): This doesn't display well when there are a lot of roles (e.g. a large organization with a lot of teams & services, using roles like /engineering/frontend/webserver, etc). Figure out a way to display this without bloating the table. --> <td>{{completed_framework.roles.toString()}}</td> <td>{{completed_framework.num_tasks | number}}</td> <td>{{completed_framework.cpus | number}}</td> <td>{{completed_framework.gpus | number}}</td> <td>{{completed_framework.mem * (1024 * 1024) | dataSize}}</td> <td>{{completed_framework.disk * (1024 * 1024) | dataSize}}</td> </tr> </tbody> </table> </div> </div>
apache-2.0
mikewesner-wf/glasshouse
appengine/application/templates/myhome.html
1801
{% extends "base.html" %} {% block style_block %} <style type="text/css"> table.table { width: 60%; } </style> {% endblock %} {% block content %} <!-- {% if user %} You seem to be logged in.<br/><br/> {{ user.id }} {% if credentials %} You also have OAUTH2 credintials stored. {% else %} You don't yet have OAUTH2 credintials.<a href="/signup">Get Them</a> {% endif %} {% else %} You are not logged in. <a href="/signup" class="btn btn-primary" data-toggle="modal" > <i class="icon-plus-sign icon-white"></i> Login </a> {% endif %} --> <div class="row clearfix"> <div class="col-md-12 column"> <div class="jumbotron"> <h1> GlassHouse </h1> <p> Glasshouse sits in between your smarthome and your google glass. It lets you configure what you want to see and do with your smart home on glass. </p> <p> Grab the <a href="#">GlassHouse Indigo Plugin</a> or checkout the <a href="#">api documentation</a> and roll your own smart home backend. </p> <p> <a class="btn btn-primary btn-small" href="#">Learn more</a> or {% if user and credentials %} <a class="btn btn-primary btn-small" href="/myhome">Manage Your Settings</a> {% else %} <a class="btn btn-primary btn-small" href="/signup">Sign In and Get Started</a> {% endif %} </p> </div> </div> </div> {% endblock content %}
apache-2.0
erwelch/camel
tests/camel-itest-karaf/src/test/java/org/apache/camel/itest/karaf/CamelHipchatTest.java
1257
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.itest.karaf; import org.apache.camel.test.karaf.AbstractFeatureTest; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.junit.PaxExam; @RunWith(PaxExam.class) public class CamelHipchatTest extends AbstractFeatureTest { public static final String COMPONENT = extractName(CamelHipchatTest.class); @Test public void test() throws Exception { testComponent(COMPONENT); } }
apache-2.0
kazuki43zoo/spring-security
config/src/main/java/org/springframework/security/config/websocket/WebSocketMessageBrokerSecurityBeanDefinitionParser.java
15241
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.websocket; import static org.springframework.security.config.Elements.*; import java.util.Comparator; import java.util.List; import java.util.Map; import org.springframework.beans.BeansException; import org.springframework.beans.PropertyValue; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanReference; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.RuntimeBeanReference; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor; import org.springframework.beans.factory.support.ManagedList; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.beans.factory.xml.XmlReaderContext; import org.springframework.messaging.simp.SimpMessageType; import org.springframework.messaging.simp.annotation.support.SimpAnnotationMethodMessageHandler; import org.springframework.security.access.vote.ConsensusBased; import org.springframework.security.config.Elements; import org.springframework.security.messaging.access.expression.ExpressionBasedMessageSecurityMetadataSourceFactory; import org.springframework.security.messaging.access.expression.MessageExpressionVoter; import org.springframework.security.messaging.access.intercept.ChannelSecurityInterceptor; import org.springframework.security.messaging.context.AuthenticationPrincipalArgumentResolver; import org.springframework.security.messaging.context.SecurityContextChannelInterceptor; import org.springframework.security.messaging.util.matcher.SimpDestinationMessageMatcher; import org.springframework.security.messaging.util.matcher.SimpMessageTypeMatcher; import org.springframework.security.messaging.web.csrf.CsrfChannelInterceptor; import org.springframework.security.messaging.web.socket.server.CsrfTokenHandshakeInterceptor; import org.springframework.util.AntPathMatcher; import org.springframework.util.PathMatcher; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; /** * Parses Spring Security's websocket namespace support. A simple example is: * * <code> * &lt;websocket-message-broker&gt; * &lt;intercept-message pattern='/permitAll' access='permitAll' /&gt; * &lt;intercept-message pattern='/denyAll' access='denyAll' /&gt; * &lt;/websocket-message-broker&gt; * </code> * * <p> * The above configuration will ensure that any SimpAnnotationMethodMessageHandler has the * AuthenticationPrincipalArgumentResolver registered as a custom argument resolver. It * also ensures that the SecurityContextChannelInterceptor is automatically registered for * the clientInboundChannel. Last, it ensures that a ChannelSecurityInterceptor is * registered with the clientInboundChannel. * </p> * * <p> * If finer control is necessary, the id attribute can be used as shown below: * </p> * * <code> * &lt;websocket-message-broker id="channelSecurityInterceptor"&gt; * &lt;intercept-message pattern='/permitAll' access='permitAll' /&gt; * &lt;intercept-message pattern='/denyAll' access='denyAll' /&gt; * &lt;/websocket-message-broker&gt; * </code> * * <p> * Now the configuration will only create a bean named ChannelSecurityInterceptor and * assign it to the id of channelSecurityInterceptor. Users can explicitly wire Spring * Security using the standard Spring Messaging XML namespace support. * </p> * * @author Rob Winch * @since 4.0 */ public final class WebSocketMessageBrokerSecurityBeanDefinitionParser implements BeanDefinitionParser { private static final String ID_ATTR = "id"; private static final String DISABLED_ATTR = "same-origin-disabled"; private static final String PATTERN_ATTR = "pattern"; private static final String ACCESS_ATTR = "access"; private static final String TYPE_ATTR = "type"; private static final String PATH_MATCHER_BEAN_NAME = "springSecurityMessagePathMatcher"; /** * @param element * @param parserContext * @return */ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinitionRegistry registry = parserContext.getRegistry(); XmlReaderContext context = parserContext.getReaderContext(); ManagedMap<BeanDefinition, String> matcherToExpression = new ManagedMap<>(); String id = element.getAttribute(ID_ATTR); Element expressionHandlerElt = DomUtils.getChildElementByTagName(element, EXPRESSION_HANDLER); String expressionHandlerRef = expressionHandlerElt == null ? null : expressionHandlerElt.getAttribute("ref"); boolean expressionHandlerDefined = StringUtils.hasText(expressionHandlerRef); boolean sameOriginDisabled = Boolean.parseBoolean(element .getAttribute(DISABLED_ATTR)); List<Element> interceptMessages = DomUtils.getChildElementsByTagName(element, Elements.INTERCEPT_MESSAGE); for (Element interceptMessage : interceptMessages) { String matcherPattern = interceptMessage.getAttribute(PATTERN_ATTR); String accessExpression = interceptMessage.getAttribute(ACCESS_ATTR); String messageType = interceptMessage.getAttribute(TYPE_ATTR); BeanDefinition matcher = createMatcher(matcherPattern, messageType, parserContext, interceptMessage); matcherToExpression.put(matcher, accessExpression); } BeanDefinitionBuilder mds = BeanDefinitionBuilder .rootBeanDefinition(ExpressionBasedMessageSecurityMetadataSourceFactory.class); mds.setFactoryMethod("createExpressionMessageMetadataSource"); mds.addConstructorArgValue(matcherToExpression); if(expressionHandlerDefined) { mds.addConstructorArgReference(expressionHandlerRef); } String mdsId = context.registerWithGeneratedName(mds.getBeanDefinition()); ManagedList<BeanDefinition> voters = new ManagedList<>(); BeanDefinitionBuilder messageExpressionVoterBldr = BeanDefinitionBuilder.rootBeanDefinition(MessageExpressionVoter.class); if(expressionHandlerDefined) { messageExpressionVoterBldr.addPropertyReference("expressionHandler", expressionHandlerRef); } voters.add(messageExpressionVoterBldr.getBeanDefinition()); BeanDefinitionBuilder adm = BeanDefinitionBuilder .rootBeanDefinition(ConsensusBased.class); adm.addConstructorArgValue(voters); BeanDefinitionBuilder inboundChannelSecurityInterceptor = BeanDefinitionBuilder .rootBeanDefinition(ChannelSecurityInterceptor.class); inboundChannelSecurityInterceptor.addConstructorArgValue(registry .getBeanDefinition(mdsId)); inboundChannelSecurityInterceptor.addPropertyValue("accessDecisionManager", adm.getBeanDefinition()); String inSecurityInterceptorName = context .registerWithGeneratedName(inboundChannelSecurityInterceptor .getBeanDefinition()); if (StringUtils.hasText(id)) { registry.registerAlias(inSecurityInterceptorName, id); if(!registry.containsBeanDefinition(PATH_MATCHER_BEAN_NAME)) { registry.registerBeanDefinition(PATH_MATCHER_BEAN_NAME, new RootBeanDefinition(AntPathMatcher.class)); } } else { BeanDefinitionBuilder mspp = BeanDefinitionBuilder .rootBeanDefinition(MessageSecurityPostProcessor.class); mspp.addConstructorArgValue(inSecurityInterceptorName); mspp.addConstructorArgValue(sameOriginDisabled); context.registerWithGeneratedName(mspp.getBeanDefinition()); } return null; } private BeanDefinition createMatcher(String matcherPattern, String messageType, ParserContext parserContext, Element interceptMessage) { boolean hasPattern = StringUtils.hasText(matcherPattern); boolean hasMessageType = StringUtils.hasText(messageType); if (!hasPattern) { BeanDefinitionBuilder matcher = BeanDefinitionBuilder .rootBeanDefinition(SimpMessageTypeMatcher.class); matcher.addConstructorArgValue(messageType); return matcher.getBeanDefinition(); } String factoryName = null; if (hasPattern && hasMessageType) { SimpMessageType type = SimpMessageType.valueOf(messageType); if (SimpMessageType.MESSAGE == type) { factoryName = "createMessageMatcher"; } else if (SimpMessageType.SUBSCRIBE == type) { factoryName = "createSubscribeMatcher"; } else { parserContext .getReaderContext() .error("Cannot use intercept-websocket@message-type=" + messageType + " with a pattern because the type does not have a destination.", interceptMessage); } } BeanDefinitionBuilder matcher = BeanDefinitionBuilder .rootBeanDefinition(SimpDestinationMessageMatcher.class); matcher.setFactoryMethod(factoryName); matcher.addConstructorArgValue(matcherPattern); matcher.addConstructorArgValue(new RuntimeBeanReference("springSecurityMessagePathMatcher")); return matcher.getBeanDefinition(); } static class MessageSecurityPostProcessor implements BeanDefinitionRegistryPostProcessor { /** * This is not available prior to Spring 4.2 */ private static final String WEB_SOCKET_AMMH_CLASS_NAME = "org.springframework.web.socket.messaging.WebSocketAnnotationMethodMessageHandler"; private static final String CLIENT_INBOUND_CHANNEL_BEAN_ID = "clientInboundChannel"; private static final String INTERCEPTORS_PROP = "interceptors"; private static final String CUSTOM_ARG_RESOLVERS_PROP = "customArgumentResolvers"; private final String inboundSecurityInterceptorId; private final boolean sameOriginDisabled; public MessageSecurityPostProcessor(String inboundSecurityInterceptorId, boolean sameOriginDisabled) { this.inboundSecurityInterceptorId = inboundSecurityInterceptorId; this.sameOriginDisabled = sameOriginDisabled; } public void postProcessBeanDefinitionRegistry(BeanDefinitionRegistry registry) throws BeansException { String[] beanNames = registry.getBeanDefinitionNames(); for (String beanName : beanNames) { BeanDefinition bd = registry.getBeanDefinition(beanName); String beanClassName = bd.getBeanClassName(); if (SimpAnnotationMethodMessageHandler.class.getName().equals(beanClassName) || WEB_SOCKET_AMMH_CLASS_NAME.equals(beanClassName)) { PropertyValue current = bd.getPropertyValues().getPropertyValue( CUSTOM_ARG_RESOLVERS_PROP); ManagedList<Object> argResolvers = new ManagedList<>(); if (current != null) { argResolvers.addAll((ManagedList<?>) current.getValue()); } argResolvers.add(new RootBeanDefinition( AuthenticationPrincipalArgumentResolver.class)); bd.getPropertyValues().add(CUSTOM_ARG_RESOLVERS_PROP, argResolvers); if(!registry.containsBeanDefinition(PATH_MATCHER_BEAN_NAME)) { PropertyValue pathMatcherProp = bd.getPropertyValues().getPropertyValue("pathMatcher"); Object pathMatcher = pathMatcherProp == null ? null : pathMatcherProp.getValue(); if(pathMatcher instanceof BeanReference) { registry.registerAlias(((BeanReference) pathMatcher).getBeanName(), PATH_MATCHER_BEAN_NAME); } } } else if ("org.springframework.web.socket.server.support.WebSocketHttpRequestHandler" .equals(beanClassName)) { addCsrfTokenHandshakeInterceptor(bd); } else if ("org.springframework.web.socket.sockjs.transport.TransportHandlingSockJsService" .equals(beanClassName)) { addCsrfTokenHandshakeInterceptor(bd); } else if ("org.springframework.web.socket.sockjs.transport.handler.DefaultSockJsService" .equals(beanClassName)) { addCsrfTokenHandshakeInterceptor(bd); } } if (!registry.containsBeanDefinition(CLIENT_INBOUND_CHANNEL_BEAN_ID)) { return; } ManagedList<Object> interceptors = new ManagedList(); interceptors.add(new RootBeanDefinition( SecurityContextChannelInterceptor.class)); if (!sameOriginDisabled) { interceptors.add(new RootBeanDefinition(CsrfChannelInterceptor.class)); } interceptors.add(registry.getBeanDefinition(inboundSecurityInterceptorId)); BeanDefinition inboundChannel = registry .getBeanDefinition(CLIENT_INBOUND_CHANNEL_BEAN_ID); PropertyValue currentInterceptorsPv = inboundChannel.getPropertyValues() .getPropertyValue(INTERCEPTORS_PROP); if (currentInterceptorsPv != null) { ManagedList<?> currentInterceptors = (ManagedList<?>) currentInterceptorsPv .getValue(); interceptors.addAll(currentInterceptors); } inboundChannel.getPropertyValues().add(INTERCEPTORS_PROP, interceptors); if(!registry.containsBeanDefinition(PATH_MATCHER_BEAN_NAME)) { registry.registerBeanDefinition(PATH_MATCHER_BEAN_NAME, new RootBeanDefinition(AntPathMatcher.class)); } } private void addCsrfTokenHandshakeInterceptor(BeanDefinition bd) { if (sameOriginDisabled) { return; } String interceptorPropertyName = "handshakeInterceptors"; ManagedList<? super Object> interceptors = new ManagedList<>(); interceptors.add(new RootBeanDefinition(CsrfTokenHandshakeInterceptor.class)); interceptors.addAll((ManagedList<Object>) bd.getPropertyValues().get( interceptorPropertyName)); bd.getPropertyValues().add(interceptorPropertyName, interceptors); } public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException { } } static class DelegatingPathMatcher implements PathMatcher { private PathMatcher delegate = new AntPathMatcher(); public boolean isPattern(String path) { return delegate.isPattern(path); } public boolean match(String pattern, String path) { return delegate.match(pattern, path); } public boolean matchStart(String pattern, String path) { return delegate.matchStart(pattern, path); } public String extractPathWithinPattern(String pattern, String path) { return delegate.extractPathWithinPattern(pattern, path); } public Map<String, String> extractUriTemplateVariables(String pattern, String path) { return delegate.extractUriTemplateVariables(pattern, path); } public Comparator<String> getPatternComparator(String path) { return delegate.getPatternComparator(path); } public String combine(String pattern1, String pattern2) { return delegate.combine(pattern1, pattern2); } void setPathMatcher(PathMatcher pathMatcher) { this.delegate = pathMatcher; } } }
apache-2.0
mgsx-dev/gdx-kit
core/src/net/mgsx/game/core/binding/BindingManager.java
1782
package net.mgsx.game.core.binding; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.Group; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.ObjectMap; import com.badlogic.gdx.utils.ObjectMap.Entry; public class BindingManager { final private static ObjectMap<String, Binding> bindings = new ObjectMap<String, Binding>(); static final Array<Learner> learners = new Array<Learner>(new Learner[]{new KeyboardLearner()}); public static void setBindings(Binding b) { bindings.put(b.target, b); } public static Binding getBinding(String target) { return bindings.get(target); } public static void clear() { for(Entry<String, Binding> entry : BindingManager.bindings) { for(Learner learner : BindingManager.learners){ learner.unbind(entry.value); } } BindingManager.bindings.clear(); } public static void applyBindings(Binding b, Stage stage) { bindings.put(b.target, b); bindActor(stage.getRoot()); for(Learner learner : BindingManager.learners){ learner.bind(b); } } public static void applyBindings(String key) { Binding b = bindings.get(key); if(b != null){ for(Learner learner : BindingManager.learners){ learner.bind(b); } } } private static void bindActor(Actor actor) { if(actor instanceof Learnable) { final Learnable learnable = (Learnable)actor; Binding bind = BindingManager.getBinding(learnable.bindKey()); if(bind != null){ bind.accessor = learnable.accessorToBind(); } } if(actor instanceof Group){ for(Actor child : ((Group) actor).getChildren()){ bindActor(child); } } } public static ObjectMap<String, Binding> bindings() { return bindings; } }
apache-2.0
peterdocter/zxing
android/src/com/google/zxing/client/android/share/ShareActivity.java
11036
/* * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.zxing.client.android.share; import android.provider.ContactsContract; import com.google.zxing.BarcodeFormat; import com.google.zxing.client.android.Contents; import com.google.zxing.client.android.Intents; import com.google.zxing.client.android.R; import android.app.Activity; import android.content.ContentResolver; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.BaseColumns; import android.provider.Browser; import android.text.ClipboardManager; import android.util.Log; import android.view.KeyEvent; import android.view.View; import android.widget.Button; import android.widget.TextView; /** * Barcode Scanner can share data like contacts and bookmarks by displaying a QR Code on screen, * such that another user can scan the barcode with their phone. * * @author [email protected] (Daniel Switkin) */ public final class ShareActivity extends Activity { private static final String TAG = ShareActivity.class.getSimpleName(); private static final int PICK_BOOKMARK = 0; private static final int PICK_CONTACT = 1; private static final int PICK_APP = 2; private Button clipboardButton; private final Button.OnClickListener contactListener = new Button.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_PICK, ContactsContract.Contacts.CONTENT_URI); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); startActivityForResult(intent, PICK_CONTACT); } }; private final Button.OnClickListener bookmarkListener = new Button.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_PICK); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.setClassName(ShareActivity.this, BookmarkPickerActivity.class.getName()); startActivityForResult(intent, PICK_BOOKMARK); } }; private final Button.OnClickListener appListener = new Button.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_PICK); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.setClassName(ShareActivity.this, AppPickerActivity.class.getName()); startActivityForResult(intent, PICK_APP); } }; private final Button.OnClickListener clipboardListener = new Button.OnClickListener() { @Override public void onClick(View v) { ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); // Should always be true, because we grey out the clipboard button in onResume() if it's empty if (clipboard.hasText()) { launchSearch(clipboard.getText().toString()); } } }; private final View.OnKeyListener textListener = new View.OnKeyListener() { @Override public boolean onKey(View view, int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_ENTER && event.getAction() == KeyEvent.ACTION_DOWN) { String text = ((TextView) view).getText().toString(); if (text != null && !text.isEmpty()) { launchSearch(text); } return true; } return false; } }; private void launchSearch(String text) { Intent intent = new Intent(Intents.Encode.ACTION); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intents.Encode.TYPE, Contents.Type.TEXT); intent.putExtra(Intents.Encode.DATA, text); intent.putExtra(Intents.Encode.FORMAT, BarcodeFormat.QR_CODE.toString()); startActivity(intent); } @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.share); findViewById(R.id.share_contact_button).setOnClickListener(contactListener); findViewById(R.id.share_bookmark_button).setOnClickListener(bookmarkListener); findViewById(R.id.share_app_button).setOnClickListener(appListener); clipboardButton = (Button) findViewById(R.id.share_clipboard_button); clipboardButton.setOnClickListener(clipboardListener); findViewById(R.id.share_text_view).setOnKeyListener(textListener); } @Override protected void onResume() { super.onResume(); ClipboardManager clipboard = (ClipboardManager) getSystemService(CLIPBOARD_SERVICE); clipboardButton.setEnabled(clipboard.hasText()); } @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { if (resultCode == RESULT_OK) { switch (requestCode) { case PICK_BOOKMARK: case PICK_APP: showTextAsBarcode(intent.getStringExtra(Browser.BookmarkColumns.URL)); break; case PICK_CONTACT: // Data field is content://contacts/people/984 showContactAsBarcode(intent.getData()); break; } } } private void showTextAsBarcode(String text) { Log.i(TAG, "Showing text as barcode: " + text); if (text == null) { return; // Show error? } Intent intent = new Intent(Intents.Encode.ACTION); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intents.Encode.TYPE, Contents.Type.TEXT); intent.putExtra(Intents.Encode.DATA, text); intent.putExtra(Intents.Encode.FORMAT, BarcodeFormat.QR_CODE.toString()); startActivity(intent); } /** * Takes a contact Uri and does the necessary database lookups to retrieve that person's info, * then sends an Encode intent to render it as a QR Code. * * @param contactUri A Uri of the form content://contacts/people/17 */ private void showContactAsBarcode(Uri contactUri) { Log.i(TAG, "Showing contact URI as barcode: " + contactUri); if (contactUri == null) { return; // Show error? } ContentResolver resolver = getContentResolver(); Cursor cursor; try { // We're seeing about six reports a week of this exception although I don't understand why. cursor = resolver.query(contactUri, null, null, null, null); } catch (IllegalArgumentException ignored) { return; } if (cursor == null) { return; } String id; String name; boolean hasPhone; try { if (!cursor.moveToFirst()) { return; } id = cursor.getString(cursor.getColumnIndex(BaseColumns._ID)); name = cursor.getString(cursor.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME)); hasPhone = cursor.getInt(cursor.getColumnIndex(ContactsContract.Contacts.HAS_PHONE_NUMBER)) > 0; } finally { cursor.close(); } // Don't require a name to be present, this contact might be just a phone number. Bundle bundle = new Bundle(); if (name != null && !name.isEmpty()) { bundle.putString(ContactsContract.Intents.Insert.NAME, massageContactData(name)); } if (hasPhone) { Cursor phonesCursor = resolver.query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI, null, ContactsContract.CommonDataKinds.Phone.CONTACT_ID + '=' + id, null, null); if (phonesCursor != null) { try { int foundPhone = 0; int phonesNumberColumn = phonesCursor.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER); while (phonesCursor.moveToNext() && foundPhone < Contents.PHONE_KEYS.length) { String number = phonesCursor.getString(phonesNumberColumn); if (number != null && !number.isEmpty()) { bundle.putString(Contents.PHONE_KEYS[foundPhone], massageContactData(number)); } foundPhone++; } } finally { phonesCursor.close(); } } } Cursor methodsCursor = resolver.query(ContactsContract.CommonDataKinds.StructuredPostal.CONTENT_URI, null, ContactsContract.CommonDataKinds.StructuredPostal.CONTACT_ID + '=' + id, null, null); if (methodsCursor != null) { try { if (methodsCursor.moveToNext()) { String data = methodsCursor.getString( methodsCursor.getColumnIndex(ContactsContract.CommonDataKinds.StructuredPostal.FORMATTED_ADDRESS)); if (data != null && !data.isEmpty()) { bundle.putString(ContactsContract.Intents.Insert.POSTAL, massageContactData(data)); } } } finally { methodsCursor.close(); } } Cursor emailCursor = resolver.query(ContactsContract.CommonDataKinds.Email.CONTENT_URI, null, ContactsContract.CommonDataKinds.Email.CONTACT_ID + '=' + id, null, null); if (emailCursor != null) { try { int foundEmail = 0; int emailColumn = emailCursor.getColumnIndex(ContactsContract.CommonDataKinds.Email.DATA); while (emailCursor.moveToNext() && foundEmail < Contents.EMAIL_KEYS.length) { String email = emailCursor.getString(emailColumn); if (email != null && !email.isEmpty()) { bundle.putString(Contents.EMAIL_KEYS[foundEmail], massageContactData(email)); } foundEmail++; } } finally { emailCursor.close(); } } Intent intent = new Intent(Intents.Encode.ACTION); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.putExtra(Intents.Encode.TYPE, Contents.Type.CONTACT); intent.putExtra(Intents.Encode.DATA, bundle); intent.putExtra(Intents.Encode.FORMAT, BarcodeFormat.QR_CODE.toString()); Log.i(TAG, "Sending bundle for encoding: " + bundle); startActivity(intent); } private static String massageContactData(String data) { // For now -- make sure we don't put newlines in shared contact data. It messes up // any known encoding of contact data. Replace with space. if (data.indexOf('\n') >= 0) { data = data.replace("\n", " "); } if (data.indexOf('\r') >= 0) { data = data.replace("\r", " "); } return data; } }
apache-2.0
JMaltat/fragaria-ektorp
org.ektorp/src/main/java/org/ektorp/impl/DocIdResponseHandler.java
1471
package org.ektorp.impl; import java.util.ArrayList; import java.util.List; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.ObjectMapper; import org.ektorp.DbAccessException; import org.ektorp.http.HttpResponse; import org.ektorp.http.StdResponseHandler; /** * * @author henrik lundgren * */ public class DocIdResponseHandler extends StdResponseHandler<List<String>> { private final JsonFactory jsonFactory; public DocIdResponseHandler(ObjectMapper om) { jsonFactory = om.getJsonFactory(); } @Override public List<String> success(HttpResponse hr) throws Exception { JsonParser jp = jsonFactory.createJsonParser(hr.getContent()); if (jp.nextToken() != JsonToken.START_OBJECT) { throw new DbAccessException("Expected data to start with an Object"); } boolean inRow = false; List<String> result = null; while (jp.nextToken() != null) { switch (jp.getCurrentToken()) { case START_ARRAY: inRow = true; break; case END_ARRAY: inRow = false; break; case FIELD_NAME: String n = jp.getCurrentName(); if (inRow) { if ("id".equals(n)) { jp.nextToken(); result.add(jp.getText()); } } else if ("total_rows".equals(n)) { jp.nextToken(); result = new ArrayList<String>(jp.getIntValue()); } break; } } return result; } }
apache-2.0
Swrrt/Samza
samza-core/src/main/java/org/apache/samza/system/SystemAdmins.java
2240
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.system; import java.util.Map; import java.util.Set; import org.apache.samza.SamzaException; import org.apache.samza.config.Config; import org.apache.samza.config.SystemConfig; import org.apache.samza.config.MapConfig; /** * Provides a mapping from system name to a {@link SystemAdmin}. Needs to be started before use and stopped after use. */ public class SystemAdmins { private final Map<String, SystemAdmin> systemAdminMap; public SystemAdmins(Config config) { SystemConfig systemConfig = new SystemConfig(config); this.systemAdminMap = systemConfig.getSystemAdmins(); } /** * Creates a new instance of {@link SystemAdmins} with an empty admin mapping. * @return New empty instance of {@link SystemAdmins} */ public static SystemAdmins empty() { return new SystemAdmins(new MapConfig()); } public void start() { for (SystemAdmin systemAdmin : systemAdminMap.values()) { systemAdmin.start(); } } public void stop() { for (SystemAdmin systemAdmin : systemAdminMap.values()) { systemAdmin.stop(); } } public SystemAdmin getSystemAdmin(String systemName) { if (!systemAdminMap.containsKey(systemName)) { throw new SamzaException("Cannot get systemAdmin for system " + systemName); } return systemAdminMap.get(systemName); } public Set<String> getSystemNames() { return systemAdminMap.keySet(); } }
apache-2.0
rolandomar/stheno
stheno/src/org/cracs/stheno/core/SthenoCoreInterface.h
3058
/* * Copyright 2012 Rolando Martins, CRACS & INESC-TEC, DCC/FCUP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * File: SthenoCoreInterface.h * Author: rmartins * * Created on October 21, 2010, 4:33 PM */ #ifndef STHENOCOREINTERFACE_H #define STHENOCOREINTERFACE_H #include <euryale/common/uuid/UUID.h> #include <stheno/service/UserService.h> #include <stheno/service/ServiceParams.h> #include <stheno/service/ServiceClient.h> #include <stheno/exception/RuntimeException.h> #include <stheno/core/p2p/common/ServiceInfo.h> #include <stheno/core/p2p/common/ServiceInstanceInfo.h> #include <stheno/core/DefaultServiceFactory.h> #include <stheno/core/LocalService.h> #include <euryale/qos/RTParams.h> //#include <stheno/core/SthenoCore.h> class SthenoCore; #include <stheno/core/QoSManagerInterface.h> class SthenoCoreInterface { public: SthenoCoreInterface(SthenoCore* core); virtual ~SthenoCoreInterface(); void createLocalService(UUIDPtr& sid, ServiceParamsPtr& params,UUIDPtr& iid) throw (ServiceException&); void createReplicationGroup( OverlayPeerInfoPtr& primary, list<OverlayPeerInfoPtr>& peers, UUIDPtr& rgid, UUIDPtr& sid, ServiceParamsPtr& params ) throw (ServiceException&); void createLocalServiceReplica(UUIDPtr& sid, ServiceParamsPtr& params,ServiceAbstractPtr& sPtr) throw (ServiceException&); void changeIIDOfService(UUIDPtr& sid,UUIDPtr& iid,UUIDPtr& newIid) throw (ServiceException&); void stopLocalService(UUIDPtr& sid, UUIDPtr& iid) throw (ServiceException&); bool isServiceRunning(UUIDPtr& sid, UUIDPtr& idd); void getInstancesOfService(UUIDPtr& sid,UUIDPtr& iid,ServiceInstanceInfoPtr& info) throw (ServiceException&); list<UUIDPtr>* getInstancesOfServiceUUIDs(UUIDPtr& sid) throw (ServiceException&); list<ServiceInstanceInfoPtr>* getInstancesOfService(UUIDPtr& sid) throw (ServiceException&); //void allocateQoS(QoSResources* qos) throw (ServiceException&); ServiceFactory& getDefaultServiceFactory(); void getUUID(UUIDPtr& uuid); bool isValid(); bool isSuspended(); bool isClose(); Byte getStatus(); UInt getRuntimeQoS(); UInt getRuntimeQoSPeriod(); //QoSManagerInterface* getQoSManager(String& path, ULong runtime,ULong period); private: SthenoCore* m_core; QoSManagerInterface* m_qosManager; }; #endif /* STHENOCOREINTERFACE_H */
apache-2.0
chr-fritz/seu-as-code.packages
com.oracle/jdk8/1.8.0_121/README.md
336
# Java Development Kit SE 8u121 The package contains the Java Platform, Standard Edition (JDK) 8u121 release. Install this package using SEU as Code with the following dependency: ```groovy win{ dependencies { software 'com.oracle:jdk8:1.8.0_121:win' } } mac{ dependencies { software 'com.oracle:jdk8:1.8.0_121:mac' } } ```
apache-2.0
locationtech/geowave
core/geotime/src/main/java/org/locationtech/geowave/core/geotime/adapter/SpatialFieldDescriptorBuilder.java
2649
/** * Copyright (c) 2013-2020 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.geotime.adapter; import org.locationtech.geowave.core.geotime.store.dimension.SpatialField; import org.locationtech.geowave.core.geotime.util.GeometryUtils; import org.locationtech.geowave.core.store.adapter.FieldDescriptorBuilder; import org.opengis.referencing.crs.CoordinateReferenceSystem; /** * A field descriptor builder that includes helper functions for spatial indexing hints and * `CoordinateReferenceSystem`. * * @param <T> the adapter field type */ public class SpatialFieldDescriptorBuilder<T> extends FieldDescriptorBuilder<T, SpatialFieldDescriptor<T>, SpatialFieldDescriptorBuilder<T>> { protected CoordinateReferenceSystem crs = GeometryUtils.getDefaultCRS(); public SpatialFieldDescriptorBuilder(final Class<T> bindingClass) { super(bindingClass); } /** * Hint that the field contains both latitude and longitude information and should be used in * spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> spatialIndexHint() { return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT).indexHint( SpatialField.LATITUDE_DIMENSION_HINT); } /** * Hint that the field contains latitude information and should be used in spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> latitudeIndexHint() { return this.indexHint(SpatialField.LATITUDE_DIMENSION_HINT); } /** * Hint that the field contains longitude information and should be used in spatial indexing. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> longitudeIndexHint() { return this.indexHint(SpatialField.LONGITUDE_DIMENSION_HINT); } /** * Specify the coordinate reference system of the spatial field. * * @return the spatial field descriptor builder */ public SpatialFieldDescriptorBuilder<T> crs(final CoordinateReferenceSystem crs) { this.crs = crs; return this; } @Override public SpatialFieldDescriptor<T> build() { return new SpatialFieldDescriptor<>(bindingClass, fieldName, indexHints, crs); } }
apache-2.0
xasx/assertj-core
src/main/java/org/assertj/core/api/ProxyableMapAssert.java
1243
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2019 the original author or authors. */ package org.assertj.core.api; import java.util.List; import java.util.Map; /** * Concrete assertions for {@link Map}s without any final methods to allow proxying. */ public class ProxyableMapAssert<KEY, VALUE> extends AbstractMapAssert<ProxyableMapAssert<KEY, VALUE>, Map<KEY, VALUE>, KEY, VALUE> { public ProxyableMapAssert(Map<KEY, VALUE> actual) { super(actual, ProxyableMapAssert.class); } @Override protected <ELEMENT> AbstractListAssert<?, List<? extends ELEMENT>, ELEMENT, ObjectAssert<ELEMENT>> newListAssertInstance(List<? extends ELEMENT> newActual) { return new ProxyableListAssert<>(newActual); } }
apache-2.0
machaval/mule-intellij-plugins
data-weave-plugin/src/main/gen/org/mule/tooling/lang/dw/parser/psi/WeaveOutputDirective.java
356
// This is a generated file. Not intended for manual editing. package org.mule.tooling.lang.dw.parser.psi; import java.util.List; import org.jetbrains.annotations.*; import com.intellij.psi.PsiElement; public interface WeaveOutputDirective extends WeaveDirective { @Nullable WeaveDataType getDataType(); @Nullable WeaveOptions getOptions(); }
apache-2.0
Azure/durabletask
test/DurableTask.AzureServiceFabric.Tests/PersistentSessionTests.cs
2352
// ---------------------------------------------------------------------------------- // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- namespace DurableTask.AzureServiceFabric.Tests { using System; using System.Collections.Generic; using System.Collections.Immutable; using DurableTask.Core; using DurableTask.Core.History; using Microsoft.VisualStudio.TestTools.UnitTesting; [TestClass] public class PersistentSessionTests { [TestMethod] public void PersistentSession_SerializationTest() { int numberOfHistoryEvents = 256; var events = new List<HistoryEvent>(); events.Add(new ExecutionStartedEvent(-1, "TestInput")); for (int i = 0; i < numberOfHistoryEvents; i++) { events.Add(new TaskScheduledEvent(-1)); events.Add(new TaskCompletedEvent(-1, -1, $"Task {i} Result")); } events.Add(new ExecutionCompletedEvent(-1, "FinalResult", OrchestrationStatus.Completed)); var instance = new OrchestrationInstance() { InstanceId = "testSession", ExecutionId = Guid.NewGuid().ToString("N") }; PersistentSession testSession = PersistentSession.Create(instance, events.ToImmutableList()); var actual = Measure.DataContractSerialization(testSession); Assert.IsNotNull(actual); Assert.AreEqual(instance.InstanceId, actual.SessionId.InstanceId); Assert.AreEqual(instance.ExecutionId, actual.SessionId.ExecutionId); Assert.AreEqual(numberOfHistoryEvents * 2 + 2, actual.SessionState.Count); } } }
apache-2.0
saturday06/gradle-android-scala-plugin
sample/hello/src/main/scala/jp/leafytree/android/hello/HelloActivity.scala
772
package jp.leafytree.android.hello import android.app.Activity import android.os.Bundle import android.util.Log import android.widget.TextView import com.google.common.collect.ImmutableSet import org.apache.commons.math3.analysis.function.Abs import scalaz.Scalaz._ class HelloActivity extends Activity { override def onCreate(savedInstanceState: Bundle) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_hello) val scalaTextView = findViewById(R.id.scala_text_view).asInstanceOf[TextView] scalaTextView.setText(new HelloJava().say()) val values = for { str <- List("1", "2", "3", "string", "5") int <- str.parseInt.toOption } yield (new Abs()).value(int) Log.d("debug", "" + ImmutableSet.of(values)) } }
apache-2.0
huluwa/superjokes
src/Joke.Data/Properties/AssemblyInfo.cs
1394
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("Joke.Data")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Joke.Data")] [assembly: AssemblyCopyright("Copyright © 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("e791180b-b34a-482d-9589-0314f4fa77e6")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
apache-2.0
movmov/cc
nova/objectstore/image.py
6265
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright [2010] [Anso Labs, LLC] # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Take uploaded bucket contents and register them as disk images (AMIs). Requires decryption using keys in the manifest. """ # TODO(jesse): Got these from Euca2ools, will need to revisit them import binascii import glob import json import os import shutil import tarfile import tempfile from xml.etree import ElementTree from nova import exception from nova import flags from nova import utils from nova.objectstore import bucket FLAGS = flags.FLAGS flags.DEFINE_string('images_path', utils.abspath('../images'), 'path to decrypted images') class Image(object): def __init__(self, image_id): self.image_id = image_id self.path = os.path.abspath(os.path.join(FLAGS.images_path, image_id)) if not self.path.startswith(os.path.abspath(FLAGS.images_path)) or \ not os.path.isdir(self.path): raise exception.NotFound def delete(self): for fn in ['info.json', 'image']: try: os.unlink(os.path.join(self.path, fn)) except: pass try: os.rmdir(self.path) except: pass def is_authorized(self, context): try: return self.metadata['isPublic'] or context.user.is_admin() or self.metadata['imageOwnerId'] == context.project.id except: return False def set_public(self, state): md = self.metadata md['isPublic'] = state with open(os.path.join(self.path, 'info.json'), 'w') as f: json.dump(md, f) @staticmethod def all(): images = [] for fn in glob.glob("%s/*/info.json" % FLAGS.images_path): try: image_id = fn.split('/')[-2] images.append(Image(image_id)) except: pass return images @property def owner_id(self): return self.metadata['imageOwnerId'] @property def metadata(self): with open(os.path.join(self.path, 'info.json')) as f: return json.load(f) @staticmethod def create(image_id, image_location, context): image_path = os.path.join(FLAGS.images_path, image_id) os.makedirs(image_path) bucket_name = image_location.split("/")[0] manifest_path = image_location[len(bucket_name)+1:] bucket_object = bucket.Bucket(bucket_name) manifest = ElementTree.fromstring(bucket_object[manifest_path].read()) image_type = 'machine' try: kernel_id = manifest.find("machine_configuration/kernel_id").text if kernel_id == 'true': image_type = 'kernel' except: pass try: ramdisk_id = manifest.find("machine_configuration/ramdisk_id").text if ramdisk_id == 'true': image_type = 'ramdisk' except: pass info = { 'imageId': image_id, 'imageLocation': image_location, 'imageOwnerId': context.project.id, 'isPublic': False, # FIXME: grab public from manifest 'architecture': 'x86_64', # FIXME: grab architecture from manifest 'type' : image_type } def write_state(state): info['imageState'] = state with open(os.path.join(image_path, 'info.json'), "w") as f: json.dump(info, f) write_state('pending') encrypted_filename = os.path.join(image_path, 'image.encrypted') with open(encrypted_filename, 'w') as f: for filename in manifest.find("image").getiterator("filename"): shutil.copyfileobj(bucket_object[filename.text].file, f) write_state('decrypting') # FIXME: grab kernelId and ramdiskId from bundle manifest encrypted_key = binascii.a2b_hex(manifest.find("image/ec2_encrypted_key").text) encrypted_iv = binascii.a2b_hex(manifest.find("image/ec2_encrypted_iv").text) cloud_private_key = os.path.join(FLAGS.ca_path, "private/cakey.pem") decrypted_filename = os.path.join(image_path, 'image.tar.gz') Image.decrypt_image(encrypted_filename, encrypted_key, encrypted_iv, cloud_private_key, decrypted_filename) write_state('untarring') image_file = Image.untarzip_image(image_path, decrypted_filename) shutil.move(os.path.join(image_path, image_file), os.path.join(image_path, 'image')) write_state('available') os.unlink(decrypted_filename) os.unlink(encrypted_filename) @staticmethod def decrypt_image(encrypted_filename, encrypted_key, encrypted_iv, cloud_private_key, decrypted_filename): key, err = utils.execute('openssl rsautl -decrypt -inkey %s' % cloud_private_key, encrypted_key) if err: raise exception.Error("Failed to decrypt private key: %s" % err) iv, err = utils.execute('openssl rsautl -decrypt -inkey %s' % cloud_private_key, encrypted_iv) if err: raise exception.Error("Failed to decrypt initialization vector: %s" % err) out, err = utils.execute('openssl enc -d -aes-128-cbc -in %s -K %s -iv %s -out %s' % (encrypted_filename, key, iv, decrypted_filename)) if err: raise exception.Error("Failed to decrypt image file %s : %s" % (encrypted_filename, err)) @staticmethod def untarzip_image(path, filename): tar_file = tarfile.open(filename, "r|gz") tar_file.extractall(path) image_file = tar_file.getnames()[0] tar_file.close() return image_file
apache-2.0
GGist/redox-rs
bip_handshake/test/mod.rs
507
extern crate bip_handshake; extern crate bip_util; extern crate futures; extern crate tokio_io; extern crate tokio_core; mod test_connect; mod test_byte_after_handshake; mod test_bytes_after_handshake; mod test_filter_allow_all; mod test_filter_block_all; mod test_filter_whitelist_same_data; mod test_filter_whitelist_diff_data; //----------------------------------------------------------------------------------// #[derive(PartialEq, Eq, Debug)] pub enum TimeoutResult { TimedOut, GotResult }
apache-2.0
softlion/Cheesebaron.MvxPlugins
Samples/AzureAccessControl.Sample.Droid/Bootstrap/VisibilityPluginBootstrap.cs
232
using Cirrious.CrossCore.Plugins; namespace AzureAccessControl.Sample.Droid.Bootstrap { public class VisibilityPluginBootstrap : MvxPluginBootstrapAction<Cirrious.MvvmCross.Plugins.Visibility.PluginLoader> { } }
apache-2.0
jeffgbutler/mybatis-qbe
src/test/java/examples/springbatch/paging/PagingReaderBatchConfiguration.java
5674
/* * Copyright 2016-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package examples.springbatch.paging; import static examples.springbatch.mapper.PersonDynamicSqlSupport.*; import static org.mybatis.dynamic.sql.SqlBuilder.*; import javax.sql.DataSource; import org.apache.ibatis.session.SqlSessionFactory; import org.mybatis.dynamic.sql.select.render.SelectStatementProvider; import org.mybatis.dynamic.sql.update.render.UpdateStatementProvider; import org.mybatis.dynamic.sql.util.springbatch.SpringBatchUtility; import org.mybatis.spring.SqlSessionFactoryBean; import org.mybatis.spring.annotation.MapperScan; import org.mybatis.spring.batch.MyBatisBatchItemWriter; import org.mybatis.spring.batch.MyBatisPagingItemReader; import org.springframework.batch.core.Job; import org.springframework.batch.core.Step; import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.batch.core.configuration.annotation.JobBuilderFactory; import org.springframework.batch.core.configuration.annotation.StepBuilderFactory; import org.springframework.batch.core.launch.support.RunIdIncrementer; import org.springframework.batch.item.ItemProcessor; import org.springframework.batch.item.ItemReader; import org.springframework.batch.item.ItemWriter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.core.convert.converter.Converter; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseBuilder; import org.springframework.jdbc.datasource.embedded.EmbeddedDatabaseType; import org.springframework.transaction.PlatformTransactionManager; import examples.springbatch.common.PersonRecord; import examples.springbatch.mapper.PersonMapper; @EnableBatchProcessing @Configuration @ComponentScan("examples.springbatch.common") @MapperScan("examples.springbatch.mapper") public class PagingReaderBatchConfiguration { @Autowired private JobBuilderFactory jobBuilderFactory; @Autowired private StepBuilderFactory stepBuilderFactory; @Bean public DataSource dataSource() { return new EmbeddedDatabaseBuilder() .setType(EmbeddedDatabaseType.HSQL) .addScript("classpath:/org/springframework/batch/core/schema-drop-hsqldb.sql") .addScript("classpath:/org/springframework/batch/core/schema-hsqldb.sql") .addScript("classpath:/examples/springbatch/schema.sql") .addScript("classpath:/examples/springbatch/data.sql") .build(); } @Bean public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception { SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean(); sessionFactory.setDataSource(dataSource); return sessionFactory.getObject(); } @Bean public PlatformTransactionManager transactionManager(DataSource dataSource) { return new DataSourceTransactionManager(dataSource); } @Bean public MyBatisPagingItemReader<PersonRecord> reader(SqlSessionFactory sqlSessionFactory) { SelectStatementProvider selectStatement = SpringBatchUtility.selectForPaging(person.allColumns()) .from(person) .where(forPagingTest, isEqualTo(true)) .orderBy(id) .build() .render(); MyBatisPagingItemReader<PersonRecord> reader = new MyBatisPagingItemReader<>(); reader.setQueryId(PersonMapper.class.getName() + ".selectMany"); reader.setSqlSessionFactory(sqlSessionFactory); reader.setParameterValues(SpringBatchUtility.toParameterValues(selectStatement)); reader.setPageSize(7); return reader; } @Bean public MyBatisBatchItemWriter<PersonRecord> writer(SqlSessionFactory sqlSessionFactory, Converter<PersonRecord, UpdateStatementProvider> convertor) { MyBatisBatchItemWriter<PersonRecord> writer = new MyBatisBatchItemWriter<>(); writer.setSqlSessionFactory(sqlSessionFactory); writer.setItemToParameterConverter(convertor); writer.setStatementId(PersonMapper.class.getName() + ".update"); return writer; } @Bean public Step step1(ItemReader<PersonRecord> reader, ItemProcessor<PersonRecord, PersonRecord> processor, ItemWriter<PersonRecord> writer) { return stepBuilderFactory.get("step1") .<PersonRecord, PersonRecord>chunk(7) .reader(reader) .processor(processor) .writer(writer) .build(); } @Bean public Job upperCaseLastName(Step step1) { return jobBuilderFactory.get("upperCaseLastName") .incrementer(new RunIdIncrementer()) .flow(step1) .end() .build(); } }
apache-2.0
nterry/aws-sdk-java
aws-java-sdk-datapipeline/src/main/java/com/amazonaws/services/datapipeline/model/transform/SetStatusRequestMarshaller.java
3921
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.datapipeline.model.transform; import java.io.ByteArrayInputStream; import java.util.Collections; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.datapipeline.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.protocol.json.*; /** * SetStatusRequest Marshaller */ public class SetStatusRequestMarshaller implements Marshaller<Request<SetStatusRequest>, SetStatusRequest> { private final SdkJsonProtocolFactory protocolFactory; public SetStatusRequestMarshaller(SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<SetStatusRequest> marshall(SetStatusRequest setStatusRequest) { if (setStatusRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<SetStatusRequest> request = new DefaultRequest<SetStatusRequest>( setStatusRequest, "DataPipeline"); request.addHeader("X-Amz-Target", "DataPipeline.SetStatus"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { final StructuredJsonGenerator jsonGenerator = protocolFactory .createGenerator(); jsonGenerator.writeStartObject(); if (setStatusRequest.getPipelineId() != null) { jsonGenerator.writeFieldName("pipelineId").writeValue( setStatusRequest.getPipelineId()); } com.amazonaws.internal.SdkInternalList<String> objectIdsList = (com.amazonaws.internal.SdkInternalList<String>) setStatusRequest .getObjectIds(); if (!objectIdsList.isEmpty() || !objectIdsList.isAutoConstruct()) { jsonGenerator.writeFieldName("objectIds"); jsonGenerator.writeStartArray(); for (String objectIdsListValue : objectIdsList) { if (objectIdsListValue != null) { jsonGenerator.writeValue(objectIdsListValue); } } jsonGenerator.writeEndArray(); } if (setStatusRequest.getStatus() != null) { jsonGenerator.writeFieldName("status").writeValue( setStatusRequest.getStatus()); } jsonGenerator.writeEndObject(); byte[] content = jsonGenerator.getBytes(); request.setContent(new ByteArrayInputStream(content)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", jsonGenerator.getContentType()); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
apache-2.0
alien4cloud/samples
org/alien4cloud/mock/jobs/scripts/operation.sh
86
#!/bin/bash -e echo "${NODE}.${duration} sleep: ${duration} sec" /bin/sleep $duration
apache-2.0
Fabryprog/camel
core/camel-base/src/main/java/org/apache/camel/processor/Pipeline.java
6742
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import org.apache.camel.AsyncCallback; import org.apache.camel.AsyncProcessor; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.Navigate; import org.apache.camel.Processor; import org.apache.camel.Traceable; import org.apache.camel.spi.IdAware; import org.apache.camel.support.AsyncProcessorConverterHelper; import org.apache.camel.support.AsyncProcessorSupport; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.service.ServiceHelper; import static org.apache.camel.processor.PipelineHelper.continueProcessing; /** * Creates a Pipeline pattern where the output of the previous step is sent as * input to the next step, reusing the same message exchanges */ public class Pipeline extends AsyncProcessorSupport implements Navigate<Processor>, Traceable, IdAware { private final CamelContext camelContext; private List<AsyncProcessor> processors; private String id; public Pipeline(CamelContext camelContext, Collection<Processor> processors) { this.camelContext = camelContext; this.processors = processors.stream().map(AsyncProcessorConverterHelper::convert).collect(Collectors.toList()); } public static Processor newInstance(CamelContext camelContext, List<Processor> processors) { if (processors.isEmpty()) { return null; } else if (processors.size() == 1) { return processors.get(0); } return new Pipeline(camelContext, processors); } public static Processor newInstance(final CamelContext camelContext, final Processor... processors) { if (processors == null || processors.length == 0) { return null; } else if (processors.length == 1) { return processors[0]; } final List<Processor> toBeProcessed = new ArrayList<>(processors.length); for (Processor processor : processors) { if (processor != null) { toBeProcessed.add(processor); } } return new Pipeline(camelContext, toBeProcessed); } @Override public boolean process(Exchange exchange, AsyncCallback callback) { if (exchange.isTransacted()) { camelContext.getReactiveExecutor().scheduleSync(() -> Pipeline.this.doProcess(exchange, callback, processors.iterator(), true), "Step[" + exchange.getExchangeId() + "," + Pipeline.this + "]"); } else { camelContext.getReactiveExecutor().scheduleMain(() -> Pipeline.this.doProcess(exchange, callback, processors.iterator(), true), "Step[" + exchange.getExchangeId() + "," + Pipeline.this + "]"); } return false; } protected void doProcess(Exchange exchange, AsyncCallback callback, Iterator<AsyncProcessor> processors, boolean first) { if (continueRouting(processors, exchange) && (first || continueProcessing(exchange, "so breaking out of pipeline", log))) { // prepare for next run if (exchange.hasOut()) { exchange.setIn(exchange.getOut()); exchange.setOut(null); } // get the next processor AsyncProcessor processor = processors.next(); processor.process(exchange, doneSync -> camelContext.getReactiveExecutor().schedule(() -> doProcess(exchange, callback, processors, false), "Step[" + exchange.getExchangeId() + "," + Pipeline.this + "]")); } else { ExchangeHelper.copyResults(exchange, exchange); // logging nextExchange as it contains the exchange that might have altered the payload and since // we are logging the completion if will be confusing if we log the original instead // we could also consider logging the original and the nextExchange then we have *before* and *after* snapshots log.trace("Processing complete for exchangeId: {} >>> {}", exchange.getExchangeId(), exchange); camelContext.getReactiveExecutor().callback(callback); } } protected boolean continueRouting(Iterator<AsyncProcessor> it, Exchange exchange) { Object stop = exchange.getProperty(Exchange.ROUTE_STOP); if (stop != null) { boolean doStop = exchange.getContext().getTypeConverter().convertTo(Boolean.class, stop); if (doStop) { log.debug("ExchangeId: {} is marked to stop routing: {}", exchange.getExchangeId(), exchange); return false; } } // continue if there are more processors to route boolean answer = it.hasNext(); log.trace("ExchangeId: {} should continue routing: {}", exchange.getExchangeId(), answer); return answer; } @Override protected void doStart() throws Exception { ServiceHelper.startService(processors); } @Override protected void doStop() throws Exception { ServiceHelper.stopService(processors); } @Override public String toString() { return "Pipeline[" + getProcessors() + "]"; } public List<Processor> getProcessors() { return (List) processors; } @Override public String getTraceLabel() { return "pipeline"; } @Override public String getId() { return id; } @Override public void setId(String id) { this.id = id; } public List<Processor> next() { if (!hasNext()) { return null; } return new ArrayList<>(processors); } public boolean hasNext() { return processors != null && !processors.isEmpty(); } }
apache-2.0
gitpan/GOOGLE-ADWORDS-PERL-CLIENT
lib/Google/Ads/AdWords/v201409/UserListType.pm
1082
package Google::Ads::AdWords::v201409::UserListType; use strict; use warnings; sub get_xmlns { 'https://adwords.google.com/api/adwords/rm/v201409'}; # derivation by restriction use base qw( SOAP::WSDL::XSD::Typelib::Builtin::string); 1; __END__ =pod =head1 NAME =head1 DESCRIPTION Perl data type class for the XML Schema defined simpleType UserListType from the namespace https://adwords.google.com/api/adwords/rm/v201409. The user list types This clase is derived from SOAP::WSDL::XSD::Typelib::Builtin::string . SOAP::WSDL's schema implementation does not validate data, so you can use it exactly like it's base type. # Description of restrictions not implemented yet. =head1 METHODS =head2 new Constructor. =head2 get_value / set_value Getter and setter for the simpleType's value. =head1 OVERLOADING Depending on the simple type's base type, the following operations are overloaded Stringification Numerification Boolification Check L<SOAP::WSDL::XSD::Typelib::Builtin> for more information. =head1 AUTHOR Generated by SOAP::WSDL =cut
apache-2.0
StyleTang/incubator-rocketmq-externals
rocketmq-flink/src/main/java/org/apache/rocketmq/flink/source/reader/deserializer/BytesMessage.java
1571
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.flink.source.reader.deserializer; import java.util.HashMap; import java.util.Map; /** Message contains byte array. */ public class BytesMessage { private byte[] data; private Map<String, String> properties = new HashMap<>(); public byte[] getData() { return data; } public void setData(byte[] data) { this.data = data; } public Map<String, String> getProperties() { return properties; } public void setProperties(Map<String, String> props) { this.properties = props; } public Object getProperty(String key) { return properties.get(key); } public void setProperty(String key, String value) { properties.put(key, value); } }
apache-2.0
OscarSwanros/swift
lib/AST/LookupVisibleDecls.cpp
36523
//===--- LookupVisibleDecls - Swift Name Lookup Routines ------------------===// // // This source file is part of the Swift.org open source project // // Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors // Licensed under Apache License v2.0 with Runtime Library Exception // // See https://swift.org/LICENSE.txt for license information // See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors // //===----------------------------------------------------------------------===// // // This file implements the lookupVisibleDecls interface for visiting named // declarations. // //===----------------------------------------------------------------------===// #include "NameLookupImpl.h" #include "swift/AST/ASTContext.h" #include "swift/AST/GenericSignatureBuilder.h" #include "swift/AST/Initializer.h" #include "swift/AST/LazyResolver.h" #include "swift/AST/NameLookup.h" #include "swift/AST/ProtocolConformance.h" #include "swift/AST/SubstitutionMap.h" #include "swift/Basic/SourceManager.h" #include "swift/Basic/STLExtras.h" #include "swift/Sema/IDETypeChecking.h" #include "llvm/ADT/SetVector.h" #include <set> using namespace swift; void VisibleDeclConsumer::anchor() {} void VectorDeclConsumer::anchor() {} void NamedDeclConsumer::anchor() {} namespace { struct LookupState { private: /// If \c false, an unqualified lookup of all visible decls in a /// DeclContext. /// /// If \c true, lookup of all visible members of a given object (possibly of /// metatype type). unsigned IsQualified : 1; /// Is this a qualified lookup on a metatype? unsigned IsOnMetatype : 1; /// Did we recurse into a superclass? unsigned IsOnSuperclass : 1; unsigned InheritsSuperclassInitializers : 1; /// Should instance members be included even if lookup is performed on a type? unsigned IncludeInstanceMembers : 1; LookupState() : IsQualified(0), IsOnMetatype(0), IsOnSuperclass(0), InheritsSuperclassInitializers(0), IncludeInstanceMembers(0) {} public: LookupState(const LookupState &) = default; static LookupState makeQualified() { LookupState Result; Result.IsQualified = 1; return Result; } static LookupState makeUnqualified() { LookupState Result; Result.IsQualified = 0; return Result; } bool isQualified() const { return IsQualified; } bool isOnMetatype() const { return IsOnMetatype; } bool isOnSuperclass() const { return IsOnSuperclass; } bool isInheritsSuperclassInitializers() const { return InheritsSuperclassInitializers; } bool isIncludingInstanceMembers() const { return IncludeInstanceMembers; } LookupState withOnMetatype() const { auto Result = *this; Result.IsOnMetatype = 1; return Result; } LookupState withOnSuperclass() const { auto Result = *this; Result.IsOnSuperclass = 1; return Result; } LookupState withInheritsSuperclassInitializers() const { auto Result = *this; Result.InheritsSuperclassInitializers = 1; return Result; } LookupState withoutInheritsSuperclassInitializers() const { auto Result = *this; Result.InheritsSuperclassInitializers = 0; return Result; } LookupState withIncludedInstanceMembers() const { auto Result = *this; Result.IncludeInstanceMembers = 1; return Result; } }; } // unnamed namespace static bool areTypeDeclsVisibleInLookupMode(LookupState LS) { // Nested type declarations can be accessed only with unqualified lookup or // on metatypes. return !LS.isQualified() || LS.isOnMetatype(); } static bool isDeclVisibleInLookupMode(ValueDecl *Member, LookupState LS, const DeclContext *FromContext, LazyResolver *TypeResolver) { if (TypeResolver) { TypeResolver->resolveDeclSignature(Member); TypeResolver->resolveAccessControl(Member); } // Check access when relevant. if (!Member->getDeclContext()->isLocalContext() && !isa<GenericTypeParamDecl>(Member) && !isa<ParamDecl>(Member) && FromContext->getASTContext().LangOpts.EnableAccessControl) { if (Member->isInvalid() && !Member->hasAccess()) return false; if (!Member->isAccessibleFrom(FromContext)) return false; } if (auto *FD = dyn_cast<FuncDecl>(Member)) { // Cannot call static functions on non-metatypes. if (!LS.isOnMetatype() && FD->isStatic()) return false; // Otherwise, either call a function or curry it. return true; } if (auto *VD = dyn_cast<VarDecl>(Member)) { // Cannot use static properties on non-metatypes. if (!(LS.isQualified() && LS.isOnMetatype()) && VD->isStatic()) return false; // Cannot use instance properties on metatypes. if (LS.isOnMetatype() && !VD->isStatic() && !LS.isIncludingInstanceMembers()) return false; return true; } if (isa<EnumElementDecl>(Member)) { // Cannot reference enum elements on non-metatypes. if (!(LS.isQualified() && LS.isOnMetatype())) return false; } if (auto CD = dyn_cast<ConstructorDecl>(Member)) { // Constructors with stub implementations cannot be called in Swift. if (CD->hasStubImplementation()) return false; if (LS.isQualified() && LS.isOnSuperclass()) { // Cannot call initializers from a superclass, except for inherited // convenience initializers. return LS.isInheritsSuperclassInitializers() && CD->isInheritable(); } } if (isa<TypeDecl>(Member)) return areTypeDeclsVisibleInLookupMode(LS); return true; } /// Lookup members in extensions of \p LookupType, using \p BaseType as the /// underlying type when checking any constraints on the extensions. static void doGlobalExtensionLookup(Type BaseType, Type LookupType, SmallVectorImpl<ValueDecl *> &FoundDecls, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver) { auto nominal = LookupType->getAnyNominal(); // Look in each extension of this type. for (auto extension : nominal->getExtensions()) { if (!isExtensionApplied(*const_cast<DeclContext*>(CurrDC), BaseType, extension)) continue; bool validatedExtension = false; if (TypeResolver && extension->getAsProtocolExtensionContext()) { if (!TypeResolver->isProtocolExtensionUsable( const_cast<DeclContext *>(CurrDC), BaseType, extension)) { continue; } validatedExtension = true; } for (auto Member : extension->getMembers()) { if (auto VD = dyn_cast<ValueDecl>(Member)) if (isDeclVisibleInLookupMode(VD, LS, CurrDC, TypeResolver)) { // Resolve the extension, if we haven't done so already. if (!validatedExtension && TypeResolver) { TypeResolver->resolveExtension(extension); validatedExtension = true; } FoundDecls.push_back(VD); } } } // Handle shadowing. removeShadowedDecls(FoundDecls, CurrDC->getParentModule(), TypeResolver); } /// \brief Enumerate immediate members of the type \c LookupType and its /// extensions, as seen from the context \c CurrDC. /// /// Don't do lookup into superclasses or implemented protocols. Uses /// \p BaseType as the underlying type when checking any constraints on the /// extensions. static void lookupTypeMembers(Type BaseType, Type LookupType, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver) { NominalTypeDecl *D = LookupType->getAnyNominal(); assert(D && "should have a nominal type"); bool LookupFromChildDeclContext = false; const DeclContext *TempDC = CurrDC; while (!TempDC->isModuleContext()) { if (TempDC == D) { LookupFromChildDeclContext = true; break; } TempDC = TempDC->getParent(); } SmallVector<ValueDecl*, 2> FoundDecls; if (LookupFromChildDeclContext) { // Current decl context is contained inside 'D', so generic parameters // are visible. if (D->getGenericParams()) for (auto Param : *D->getGenericParams()) if (isDeclVisibleInLookupMode(Param, LS, CurrDC, TypeResolver)) FoundDecls.push_back(Param); } for (Decl *Member : D->getMembers()) { if (auto *VD = dyn_cast<ValueDecl>(Member)) if (isDeclVisibleInLookupMode(VD, LS, CurrDC, TypeResolver)) FoundDecls.push_back(VD); } doGlobalExtensionLookup(BaseType, LookupType, FoundDecls, CurrDC, LS, Reason, TypeResolver); // Report the declarations we found to the consumer. for (auto *VD : FoundDecls) Consumer.foundDecl(VD, Reason); } /// Enumerate AnyObject declarations as seen from context \c CurrDC. static void doDynamicLookup(VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, LazyResolver *TypeResolver) { class DynamicLookupConsumer : public VisibleDeclConsumer { VisibleDeclConsumer &ChainedConsumer; LookupState LS; const DeclContext *CurrDC; LazyResolver *TypeResolver; llvm::DenseSet<std::pair<DeclBaseName, CanType>> FunctionsReported; llvm::DenseSet<CanType> SubscriptsReported; llvm::DenseSet<std::pair<Identifier, CanType>> PropertiesReported; public: explicit DynamicLookupConsumer(VisibleDeclConsumer &ChainedConsumer, LookupState LS, const DeclContext *CurrDC, LazyResolver *TypeResolver) : ChainedConsumer(ChainedConsumer), LS(LS), CurrDC(CurrDC), TypeResolver(TypeResolver) {} void foundDecl(ValueDecl *D, DeclVisibilityKind Reason) override { // If the declaration has an override, name lookup will also have found // the overridden method. Skip this declaration, because we prefer the // overridden method. if (D->getOverriddenDecl()) return; // Ensure that the declaration has a type. if (!D->hasInterfaceType()) { if (!TypeResolver) return; TypeResolver->resolveDeclSignature(D); if (!D->hasInterfaceType()) return; } switch (D->getKind()) { #define DECL(ID, SUPER) \ case DeclKind::ID: #define VALUE_DECL(ID, SUPER) #include "swift/AST/DeclNodes.def" llvm_unreachable("not a ValueDecl!"); // Types cannot be found by dynamic lookup. case DeclKind::GenericTypeParam: case DeclKind::AssociatedType: case DeclKind::TypeAlias: case DeclKind::Enum: case DeclKind::Class: case DeclKind::Struct: case DeclKind::Protocol: return; // Initializers cannot be found by dynamic lookup. case DeclKind::Constructor: case DeclKind::Destructor: return; // These cases are probably impossible here but can also just // be safely ignored. case DeclKind::EnumElement: case DeclKind::Param: case DeclKind::Module: return; // For other kinds of values, check if we already reported a decl // with the same signature. case DeclKind::Func: { auto FD = cast<FuncDecl>(D); assert(FD->getImplicitSelfDecl() && "should not find free functions"); (void)FD; if (FD->isInvalid()) break; // Get the type without the first uncurry level with 'self'. CanType T = D->getInterfaceType() ->castTo<AnyFunctionType>() ->getResult() ->getCanonicalType(); auto Signature = std::make_pair(D->getBaseName(), T); if (!FunctionsReported.insert(Signature).second) return; break; } case DeclKind::Subscript: { auto Signature = D->getInterfaceType()->getCanonicalType(); if (!SubscriptsReported.insert(Signature).second) return; break; } case DeclKind::Var: { auto *VD = cast<VarDecl>(D); auto Signature = std::make_pair(VD->getName(), VD->getInterfaceType()->getCanonicalType()); if (!PropertiesReported.insert(Signature).second) return; break; } } if (isDeclVisibleInLookupMode(D, LS, CurrDC, TypeResolver)) ChainedConsumer.foundDecl(D, DeclVisibilityKind::DynamicLookup); } }; DynamicLookupConsumer ConsumerWrapper(Consumer, LS, CurrDC, TypeResolver); CurrDC->getParentSourceFile()->forAllVisibleModules( [&](ModuleDecl::ImportedModule Import) { Import.second->lookupClassMembers(Import.first, ConsumerWrapper); }); } namespace { typedef llvm::SmallPtrSet<TypeDecl *, 8> VisitedSet; } // end anonymous namespace static DeclVisibilityKind getReasonForSuper(DeclVisibilityKind Reason) { switch (Reason) { case DeclVisibilityKind::MemberOfCurrentNominal: case DeclVisibilityKind::MemberOfProtocolImplementedByCurrentNominal: case DeclVisibilityKind::MemberOfSuper: return DeclVisibilityKind::MemberOfSuper; case DeclVisibilityKind::MemberOfOutsideNominal: return DeclVisibilityKind::MemberOfOutsideNominal; default: llvm_unreachable("should not see this kind"); } } static void lookupDeclsFromProtocolsBeingConformedTo( Type BaseTy, VisibleDeclConsumer &Consumer, LookupState LS, const DeclContext *FromContext, DeclVisibilityKind Reason, LazyResolver *TypeResolver, VisitedSet &Visited) { NominalTypeDecl *CurrNominal = BaseTy->getAnyNominal(); if (!CurrNominal) return; for (auto Conformance : CurrNominal->getAllConformances()) { auto Proto = Conformance->getProtocol(); if (!Proto->isAccessibleFrom(FromContext)) continue; DeclVisibilityKind ReasonForThisProtocol; if (Reason == DeclVisibilityKind::MemberOfCurrentNominal) ReasonForThisProtocol = DeclVisibilityKind::MemberOfProtocolImplementedByCurrentNominal; else ReasonForThisProtocol = getReasonForSuper(Reason); auto NormalConformance = Conformance->getRootNormalConformance(); for (auto Member : Proto->getMembers()) { if (auto *ATD = dyn_cast<AssociatedTypeDecl>(Member)) { // Skip type decls if they aren't visible, or any type that has a // witness. This cuts down on duplicates. if (areTypeDeclsVisibleInLookupMode(LS) && !NormalConformance->hasTypeWitness(ATD)) { Consumer.foundDecl(ATD, ReasonForThisProtocol); } continue; } if (auto *VD = dyn_cast<ValueDecl>(Member)) { if (TypeResolver) TypeResolver->resolveDeclSignature(VD); // Skip value requirements that have corresponding witnesses. This cuts // down on duplicates. if (!NormalConformance->hasWitness(VD) || !NormalConformance->getWitness(VD, nullptr) || NormalConformance->getWitness(VD, nullptr).getDecl()->getFullName() != VD->getFullName()) { Consumer.foundDecl(VD, ReasonForThisProtocol); } } } // Add members from any extensions. SmallVector<ValueDecl *, 2> FoundDecls; doGlobalExtensionLookup(BaseTy, Proto->getDeclaredType(), FoundDecls, FromContext, LS, ReasonForThisProtocol, TypeResolver); for (auto *VD : FoundDecls) Consumer.foundDecl(VD, ReasonForThisProtocol); } } static void lookupVisibleMemberDeclsImpl(Type BaseTy, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB, VisitedSet &Visited); static void lookupVisibleProtocolMemberDecls( Type BaseTy, ProtocolType *PT, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB, VisitedSet &Visited) { if (!Visited.insert(PT->getDecl()).second) return; for (auto Proto : PT->getDecl()->getInheritedProtocols()) lookupVisibleProtocolMemberDecls(BaseTy, Proto->getDeclaredType(), Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); lookupTypeMembers(BaseTy, PT, Consumer, CurrDC, LS, Reason, TypeResolver); } static void lookupVisibleMemberDeclsImpl( Type BaseTy, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB, VisitedSet &Visited) { // Just look through l-valueness. It doesn't affect name lookup. assert(BaseTy && "lookup into null type"); assert(!BaseTy->hasLValueType()); // Handle metatype references, as in "some_type.some_member". These are // special and can't have extensions. if (auto MTT = BaseTy->getAs<AnyMetatypeType>()) { // The metatype represents an arbitrary named type: dig through to the // declared type to see what we're dealing with. Type Ty = MTT->getInstanceType(); LookupState subLS = LookupState::makeQualified().withOnMetatype(); if (LS.isIncludingInstanceMembers()) { subLS = subLS.withIncludedInstanceMembers(); } // Just perform normal dot lookup on the type see if we find extensions or // anything else. For example, type SomeTy.SomeMember can look up static // functions, and can even look up non-static functions as well (thus // getting the address of the member). lookupVisibleMemberDeclsImpl(Ty, Consumer, CurrDC, subLS, Reason, TypeResolver, GSB, Visited); return; } // Lookup module references, as on some_module.some_member. These are // special and can't have extensions. if (ModuleType *MT = BaseTy->getAs<ModuleType>()) { AccessFilteringDeclConsumer FilteringConsumer(CurrDC, Consumer, TypeResolver); MT->getModule()->lookupVisibleDecls(ModuleDecl::AccessPathTy(), FilteringConsumer, NLKind::QualifiedLookup); return; } // If the base is AnyObject, we are doing dynamic lookup. if (BaseTy->isAnyObject()) { doDynamicLookup(Consumer, CurrDC, LS, TypeResolver); return; } // If the base is a protocol, enumerate its members. if (ProtocolType *PT = BaseTy->getAs<ProtocolType>()) { lookupVisibleProtocolMemberDecls(BaseTy, PT, Consumer, CurrDC, LS, Reason, TypeResolver, GSB, Visited); return; } // If the base is a protocol composition, enumerate members of the protocols. if (auto PC = BaseTy->getAs<ProtocolCompositionType>()) { for (auto Member : PC->getMembers()) lookupVisibleMemberDeclsImpl(Member, Consumer, CurrDC, LS, Reason, TypeResolver, GSB, Visited); return; } // Enumerate members of archetype's requirements. if (ArchetypeType *Archetype = BaseTy->getAs<ArchetypeType>()) { for (auto Proto : Archetype->getConformsTo()) lookupVisibleProtocolMemberDecls( BaseTy, Proto->getDeclaredType(), Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); if (auto superclass = Archetype->getSuperclass()) lookupVisibleMemberDeclsImpl(superclass, Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); return; } // If we're looking into a type parameter and we have a generic signature // builder, use the GSB to resolve where we should look. if (BaseTy->isTypeParameter() && GSB) { auto EquivClass = GSB->resolveEquivalenceClass(BaseTy, ArchetypeResolutionKind::CompleteWellFormed); if (!EquivClass) return; if (EquivClass->concreteType) { BaseTy = EquivClass->concreteType; } else { // Conformances for (const auto &Conforms : EquivClass->conformsTo) { lookupVisibleProtocolMemberDecls( BaseTy, Conforms.first->getDeclaredType(), Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); } // Superclass. if (EquivClass->superclass) { lookupVisibleMemberDeclsImpl(EquivClass->superclass, Consumer, CurrDC, LS, getReasonForSuper(Reason), TypeResolver, GSB, Visited); } return; } } llvm::SmallPtrSet<ClassDecl *, 8> Ancestors; do { NominalTypeDecl *CurNominal = BaseTy->getAnyNominal(); if (!CurNominal) break; // Look in for members of a nominal type. lookupTypeMembers(BaseTy, BaseTy, Consumer, CurrDC, LS, Reason, TypeResolver); lookupDeclsFromProtocolsBeingConformedTo(BaseTy, Consumer, LS, CurrDC, Reason, TypeResolver, Visited); // If we have a class type, look into its superclass. auto *CurClass = dyn_cast<ClassDecl>(CurNominal); if (CurClass && CurClass->hasSuperclass()) { // FIXME: This path is no substitute for an actual circularity check. // The real fix is to check that the superclass doesn't introduce a // circular reference before it's written into the AST. if (Ancestors.count(CurClass)) { break; } BaseTy = CurClass->getSuperclass(); Reason = getReasonForSuper(Reason); bool InheritsSuperclassInitializers = CurClass->inheritsSuperclassInitializers(TypeResolver); if (LS.isOnSuperclass() && !InheritsSuperclassInitializers) LS = LS.withoutInheritsSuperclassInitializers(); else if (!LS.isOnSuperclass()) { LS = LS.withOnSuperclass(); if (InheritsSuperclassInitializers) LS = LS.withInheritsSuperclassInitializers(); } } else { break; } Ancestors.insert(CurClass); } while (1); } namespace { struct FoundDeclTy { ValueDecl *D; DeclVisibilityKind Reason; FoundDeclTy(ValueDecl *D, DeclVisibilityKind Reason) : D(D), Reason(Reason) {} friend bool operator==(const FoundDeclTy &LHS, const FoundDeclTy &RHS) { // If this ever changes - e.g. to include Reason - be sure to also update // DenseMapInfo<FoundDeclTy>::getHashValue(). return LHS.D == RHS.D; } }; } // end anonymous namespace namespace llvm { template <> struct DenseMapInfo<FoundDeclTy> { static inline FoundDeclTy getEmptyKey() { return FoundDeclTy{nullptr, DeclVisibilityKind::LocalVariable}; } static inline FoundDeclTy getTombstoneKey() { return FoundDeclTy{reinterpret_cast<ValueDecl *>(0x1), DeclVisibilityKind::LocalVariable}; } static unsigned getHashValue(const FoundDeclTy &Val) { // Note: FoundDeclTy::operator== only considers D, so don't hash Reason here. return llvm::hash_value(Val.D); } static bool isEqual(const FoundDeclTy &LHS, const FoundDeclTy &RHS) { return LHS == RHS; } }; } // namespace llvm namespace { /// Similar to swift::conflicting, but lenient about protocol extensions which /// don't affect code completion's concept of overloading. static bool relaxedConflicting(const OverloadSignature &sig1, const OverloadSignature &sig2) { // If the base names are different, they can't conflict. if (sig1.Name.getBaseName() != sig2.Name.getBaseName()) return false; // If one is a compound name and the other is not, they do not conflict // if one is a property and the other is a non-nullary function. if (sig1.Name.isCompoundName() != sig2.Name.isCompoundName()) { return !((sig1.IsProperty && sig2.Name.getArgumentNames().size() > 0) || (sig2.IsProperty && sig1.Name.getArgumentNames().size() > 0)); } // Allow null property types to match non-null ones, which only happens when // one property is from a generic extension and the other is not. if (sig1.InterfaceType != sig2.InterfaceType) { if (!sig1.IsProperty || !sig2.IsProperty) return false; if (sig1.InterfaceType && sig2.InterfaceType) return false; } return sig1.Name == sig2.Name && sig1.UnaryOperator == sig2.UnaryOperator && sig1.IsInstanceMember == sig2.IsInstanceMember; } /// Hack to guess at whether substituting into the type of a declaration will /// be okay. /// FIXME: This is awful. We should either have Type::subst() work for /// GenericFunctionType, or we should kill it outright. static bool shouldSubstIntoDeclType(Type type) { auto genericFnType = type->getAs<GenericFunctionType>(); if (!genericFnType) return true; return false; } class OverrideFilteringConsumer : public VisibleDeclConsumer { public: std::set<ValueDecl *> AllFoundDecls; std::map<DeclBaseName, std::set<ValueDecl *>> FoundDecls; llvm::SetVector<FoundDeclTy> DeclsToReport; Type BaseTy; const DeclContext *DC; LazyResolver *TypeResolver; bool IsTypeLookup = false; OverrideFilteringConsumer(Type BaseTy, const DeclContext *DC, LazyResolver *resolver) : BaseTy(BaseTy), DC(DC), TypeResolver(resolver) { assert(!BaseTy->hasLValueType()); if (auto *MetaTy = BaseTy->getAs<AnyMetatypeType>()) { BaseTy = MetaTy->getInstanceType(); IsTypeLookup = true; } assert(DC && BaseTy); } void foundDecl(ValueDecl *VD, DeclVisibilityKind Reason) override { if (!AllFoundDecls.insert(VD).second) return; // If this kind of declaration doesn't participate in overriding, there's // no filtering to do here. if (!isa<AbstractFunctionDecl>(VD) && !isa<AbstractStorageDecl>(VD)) { DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } if (TypeResolver) { TypeResolver->resolveDeclSignature(VD); TypeResolver->resolveAccessControl(VD); } if (VD->isInvalid()) { FoundDecls[VD->getBaseName()].insert(VD); DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } auto &PossiblyConflicting = FoundDecls[VD->getBaseName()]; // Check all overridden decls. { auto *CurrentVD = VD->getOverriddenDecl(); while (CurrentVD) { if (!AllFoundDecls.insert(CurrentVD).second) break; if (PossiblyConflicting.count(CurrentVD)) { PossiblyConflicting.erase(CurrentVD); PossiblyConflicting.insert(VD); bool Erased = DeclsToReport.remove( FoundDeclTy(CurrentVD, DeclVisibilityKind::LocalVariable)); assert(Erased); (void)Erased; DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } CurrentVD = CurrentVD->getOverriddenDecl(); } } // Does it make sense to substitute types? // Don't pass UnboundGenericType here. If you see this assertion // being hit, fix the caller, don't remove it. assert(IsTypeLookup || !BaseTy->hasUnboundGenericType()); // If the base type is AnyObject, we might be doing a dynamic // lookup, so the base type won't match the type of the member's // context type. // // If the base type is not a nominal type, we can't substitute // the member type. // // If the member is a free function and not a member of a type, // don't substitute either. bool shouldSubst = (!BaseTy->isAnyObject() && !BaseTy->hasTypeVariable() && BaseTy->getNominalOrBoundGenericNominal() && VD->getDeclContext()->isTypeContext()); ModuleDecl *M = DC->getParentModule(); // Hack; we shouldn't be filtering at this level anyway. if (!VD->hasInterfaceType()) { FoundDecls[VD->getBaseName()].insert(VD); DeclsToReport.insert(FoundDeclTy(VD, Reason)); return; } auto FoundSignature = VD->getOverloadSignature(); if (FoundSignature.InterfaceType && shouldSubst && shouldSubstIntoDeclType(FoundSignature.InterfaceType)) { auto subs = BaseTy->getMemberSubstitutionMap(M, VD); if (auto CT = FoundSignature.InterfaceType.subst(subs)) FoundSignature.InterfaceType = CT->getCanonicalType(); } for (auto I = PossiblyConflicting.begin(), E = PossiblyConflicting.end(); I != E; ++I) { auto *OtherVD = *I; if (OtherVD->isInvalid() || !OtherVD->hasInterfaceType()) { // For some invalid decls it might be impossible to compute the // signature, for example, if the types could not be resolved. continue; } auto OtherSignature = OtherVD->getOverloadSignature(); if (OtherSignature.InterfaceType && shouldSubst && shouldSubstIntoDeclType(OtherSignature.InterfaceType)) { auto subs = BaseTy->getMemberSubstitutionMap(M, OtherVD); if (auto CT = OtherSignature.InterfaceType.subst(subs)) OtherSignature.InterfaceType = CT->getCanonicalType(); } if (relaxedConflicting(FoundSignature, OtherSignature)) { if (VD->getFormalAccess() > OtherVD->getFormalAccess()) { PossiblyConflicting.erase(I); PossiblyConflicting.insert(VD); bool Erased = DeclsToReport.remove( FoundDeclTy(OtherVD, DeclVisibilityKind::LocalVariable)); assert(Erased); (void)Erased; DeclsToReport.insert(FoundDeclTy(VD, Reason)); } return; } } PossiblyConflicting.insert(VD); DeclsToReport.insert(FoundDeclTy(VD, Reason)); } }; } // unnamed namespace /// \brief Enumerate all members in \c BaseTy (including members of extensions, /// superclasses and implemented protocols), as seen from the context \c CurrDC. /// /// This operation corresponds to a standard "dot" lookup operation like "a.b" /// where 'self' is the type of 'a'. This operation is only valid after name /// binding. static void lookupVisibleMemberDecls( Type BaseTy, VisibleDeclConsumer &Consumer, const DeclContext *CurrDC, LookupState LS, DeclVisibilityKind Reason, LazyResolver *TypeResolver, GenericSignatureBuilder *GSB) { OverrideFilteringConsumer ConsumerWrapper(BaseTy, CurrDC, TypeResolver); VisitedSet Visited; lookupVisibleMemberDeclsImpl(BaseTy, ConsumerWrapper, CurrDC, LS, Reason, TypeResolver, GSB, Visited); // Report the declarations we found to the real consumer. for (const auto &DeclAndReason : ConsumerWrapper.DeclsToReport) Consumer.foundDecl(DeclAndReason.D, DeclAndReason.Reason); } void swift::lookupVisibleDecls(VisibleDeclConsumer &Consumer, const DeclContext *DC, LazyResolver *TypeResolver, bool IncludeTopLevel, SourceLoc Loc) { const ModuleDecl &M = *DC->getParentModule(); const SourceManager &SM = DC->getASTContext().SourceMgr; auto Reason = DeclVisibilityKind::MemberOfCurrentNominal; // If we are inside of a method, check to see if there are any ivars in scope, // and if so, whether this is a reference to one of them. while (!DC->isModuleScopeContext()) { const ValueDecl *BaseDecl = nullptr; Type ExtendedType; auto LS = LookupState::makeUnqualified(); // Skip initializer contexts, we will not find any declarations there. if (isa<Initializer>(DC)) { DC = DC->getParent(); LS = LS.withOnMetatype(); } GenericParamList *GenericParams = DC->getGenericParamsOfContext(); if (auto *AFD = dyn_cast<AbstractFunctionDecl>(DC)) { // Look for local variables; normally, the parser resolves these // for us, but it can't do the right thing inside local types. // FIXME: when we can parse and typecheck the function body partially for // code completion, AFD->getBody() check can be removed. if (Loc.isValid() && AFD->getBody()) { namelookup::FindLocalVal(SM, Loc, Consumer).visit(AFD->getBody()); } for (auto *P : AFD->getParameterLists()) namelookup::FindLocalVal(SM, Loc, Consumer).checkParameterList(P); // Constructors and destructors don't have 'self' in parameter patterns. if (isa<ConstructorDecl>(AFD) || isa<DestructorDecl>(AFD)) if (auto *selfParam = AFD->getImplicitSelfDecl()) Consumer.foundDecl(const_cast<ParamDecl*>(selfParam), DeclVisibilityKind::FunctionParameter); if (AFD->getDeclContext()->isTypeContext()) { ExtendedType = AFD->getDeclContext()->getSelfTypeInContext(); BaseDecl = AFD->getImplicitSelfDecl(); DC = DC->getParent(); if (auto *FD = dyn_cast<FuncDecl>(AFD)) if (FD->isStatic()) ExtendedType = MetatypeType::get(ExtendedType); } } else if (auto CE = dyn_cast<ClosureExpr>(DC)) { if (Loc.isValid()) { namelookup::FindLocalVal(SM, Loc, Consumer).visit(CE->getBody()); if (auto P = CE->getParameters()) { namelookup::FindLocalVal(SM, Loc, Consumer).checkParameterList(P); } } } else if (auto ED = dyn_cast<ExtensionDecl>(DC)) { ExtendedType = ED->getExtendedType(); if (ExtendedType) BaseDecl = ExtendedType->getNominalOrBoundGenericNominal(); } else if (auto ND = dyn_cast<NominalTypeDecl>(DC)) { ExtendedType = ND->getDeclaredTypeInContext(); BaseDecl = ND; } if (BaseDecl && ExtendedType) { ::lookupVisibleMemberDecls(ExtendedType, Consumer, DC, LS, Reason, TypeResolver, nullptr); } // Check any generic parameters for something with the given name. namelookup::FindLocalVal(SM, Loc, Consumer) .checkGenericParams(GenericParams); DC = DC->getParent(); Reason = DeclVisibilityKind::MemberOfOutsideNominal; } SmallVector<ModuleDecl::ImportedModule, 8> extraImports; if (auto SF = dyn_cast<SourceFile>(DC)) { if (Loc.isValid()) { // Look for local variables in top-level code; normally, the parser // resolves these for us, but it can't do the right thing for // local types. namelookup::FindLocalVal(SM, Loc, Consumer).checkSourceFile(*SF); } if (IncludeTopLevel) { auto &cached = SF->getCachedVisibleDecls(); if (!cached.empty()) { for (auto result : cached) Consumer.foundDecl(result, DeclVisibilityKind::VisibleAtTopLevel); return; } SF->getImportedModules(extraImports, ModuleDecl::ImportFilter::Private); } } if (IncludeTopLevel) { using namespace namelookup; SmallVector<ValueDecl *, 0> moduleResults; auto &mutableM = const_cast<ModuleDecl&>(M); lookupVisibleDeclsInModule(&mutableM, {}, moduleResults, NLKind::UnqualifiedLookup, ResolutionKind::Overloadable, TypeResolver, DC, extraImports); for (auto result : moduleResults) Consumer.foundDecl(result, DeclVisibilityKind::VisibleAtTopLevel); if (auto SF = dyn_cast<SourceFile>(DC)) SF->cacheVisibleDecls(std::move(moduleResults)); } } void swift::lookupVisibleMemberDecls(VisibleDeclConsumer &Consumer, Type BaseTy, const DeclContext *CurrDC, LazyResolver *TypeResolver, bool includeInstanceMembers, GenericSignatureBuilder *GSB) { assert(CurrDC); LookupState ls = LookupState::makeQualified(); if (includeInstanceMembers) { ls = ls.withIncludedInstanceMembers(); } ::lookupVisibleMemberDecls(BaseTy, Consumer, CurrDC, ls, DeclVisibilityKind::MemberOfCurrentNominal, TypeResolver, GSB); }
apache-2.0
krahman/emedia
ninja-servlet-jpa-blog-archetype/src/main/resources/archetype-resources/src/test/java/controllers/LoginLogoutControllerTest.java
3238
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) /** * Copyright (C) 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers; import static org.junit.Assert.assertTrue; import java.util.Map; import ninja.NinjaTest; import org.junit.Before; import org.junit.Test; import com.google.common.collect.Maps; public class LoginLogoutControllerTest extends NinjaTest { @Before public void setup() { ninjaTestBrowser.makeRequest(getServerAddress() + "setup"); } @Test public void testLogingLogout() { Map<String, String> headers = Maps.newHashMap(); // ///////////////////////////////////////////////////////////////////// // Test posting of article does not work without login // ///////////////////////////////////////////////////////////////////// String response = ninjaTestBrowser.makeRequest(getServerAddress() + "article/new", headers); System.out.println(response); assertTrue(response.contains("Error. Forbidden.")); // ///////////////////////////////////////////////////////////////////// // Login // ///////////////////////////////////////////////////////////////////// Map<String, String> formParameters = Maps.newHashMap(); formParameters.put("username", "[email protected]"); formParameters.put("password", "secret"); ninjaTestBrowser.makePostRequestWithFormParameters(getServerAddress() + "login", headers, formParameters); // ///////////////////////////////////////////////////////////////////// // Test posting of article works when are logged in // ///////////////////////////////////////////////////////////////////// response = ninjaTestBrowser.makeRequest(getServerAddress() + "article/new", headers); assertTrue(response.contains("New article")); // ///////////////////////////////////////////////////////////////////// // Logout // ///////////////////////////////////////////////////////////////////// ninjaTestBrowser.makeRequest(getServerAddress() + "logout", headers); // ///////////////////////////////////////////////////////////////////// // Assert that posting of article does not work any more... // ///////////////////////////////////////////////////////////////////// response = ninjaTestBrowser.makeRequest(getServerAddress() + "article/new", headers); System.out.println(response); assertTrue(response.contains("Error. Forbidden.")); } }
apache-2.0
Acuant/AcuantiOSMobileSDK
Sample-Connect-Objective-C-App/ConnectObjective-CSampleApp/ViewController.h
311
// // ViewController.h // ConnectObjective-CSampleApp // // Created by Tapas Behera on 8/8/17. // Copyright © 2017 Acuant. All rights reserved. // #import <UIKit/UIKit.h> @protocol ResultCancelDelegate @required -(void) didFinishShowingResult; @end @interface ViewController : UIViewController @end
apache-2.0
VladimirTS/chef-php
recipes/module_common.rb
1074
# # Author:: Panagiotis Papadomitsos (<[email protected]>) # # Cookbook Name:: php # Recipe:: module_common # # Copyright 2009-2011, Opscode, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pkg = value_for_platform_family( [ 'rhel', 'fedora' ] => %w{ php-common php-cli php-mbstring php-gd php-intl php-pspell php-mcrypt php-soap php-sqlite php-xml php-xmlrpc }, 'debian' => %w{ php5-curl php5-json php5-cli php5-gd php5-intl php5-pspell php5-mcrypt php5-mhash php5-sqlite php5-xsl php5-xmlrpc } ) pkg.each do |ppkg| package ppkg do action :install end end
apache-2.0
hkjung/REST-web-server-WIZwiki-W7500eco-uVision5
Libraries/W7500x_stdPeriph_Driver/src/W7500x_i2c.c
8510
/** ****************************************************************************** * @file W7500x_stdPeriph_Driver/src/W7500x_i2c.c * @author IOP Team * @version v1.0.0 * @date 01-May-2015 * @brief This file contains all the functions prototypes for the i2c * firmware library. ****************************************************************************** * ****************************************************************************** */ /*include -------------------------------------*/ #include <stdio.h> #include "W7500x_i2c.h" #include "W7500x_gpio.h" GPIO_InitTypeDef GPIO_InitDef; uint32_t I2C_Init(I2C_ConfigStruct* conf) { uint32_t scl_port_num; uint32_t scl_pin_index; uint32_t sda_port_num; uint32_t sda_pin_index; scl_port_num = I2C_PORT(conf->scl); scl_pin_index = I2C_PIN_INDEX(conf->scl); sda_port_num = I2C_PORT(conf->sda); sda_pin_index = I2C_PIN_INDEX(conf->sda); //SCL setting GPIO_InitDef.GPIO_Pin = scl_pin_index; GPIO_InitDef.GPIO_Mode = GPIO_Mode_OUT; if(scl_port_num == 0) { GPIO_Init(GPIOA, &GPIO_InitDef); GPIO_SetBits(GPIOA, scl_pin_index); } else if(scl_port_num == 1) { GPIO_Init(GPIOB, &GPIO_InitDef); GPIO_SetBits(GPIOB, scl_pin_index); } else if(scl_port_num == 2) { GPIO_Init(GPIOC, &GPIO_InitDef); GPIO_SetBits(GPIOC, scl_pin_index); } else if(scl_port_num == 3) { GPIO_Init(GPIOD, &GPIO_InitDef); GPIO_SetBits(GPIOD, scl_pin_index); } else { printf("SCL pin Port number error\r\n"); return 1; } //SDA setting GPIO_InitDef.GPIO_Pin = sda_pin_index; GPIO_InitDef.GPIO_Mode = GPIO_Mode_IN; if(sda_port_num == 0) { GPIO_Init(GPIOA, &GPIO_InitDef); GPIO_ResetBits(GPIOA, sda_pin_index); } else if(sda_port_num == 1) { GPIO_Init(GPIOB, &GPIO_InitDef); GPIO_ResetBits(GPIOB, sda_pin_index); } else if(sda_port_num == 2) { GPIO_Init(GPIOC, &GPIO_InitDef); GPIO_ResetBits(GPIOC, sda_pin_index); } else if(sda_port_num == 3) { GPIO_Init(GPIOD, &GPIO_InitDef); GPIO_ResetBits(GPIOD, sda_pin_index); } else { printf("SDA pin Port number error\r\n"); return 1; } PAD_AFConfig((PAD_Type) scl_port_num, scl_pin_index, (PAD_AF_TypeDef) PAD_AF1); PAD_AFConfig((PAD_Type) sda_port_num, sda_pin_index, (PAD_AF_TypeDef) PAD_AF1); return 0; } void I2C_WriteBitSCL(I2C_ConfigStruct* conf, uint8_t data) { uint32_t scl_port_num = I2C_PORT(conf->scl); uint32_t scl_pin_index = I2C_PIN_INDEX(conf->scl); if(scl_port_num == 0) { if(data == 1) GPIO_SetBits(GPIOA, scl_pin_index); else GPIO_ResetBits(GPIOA, scl_pin_index); } else if(scl_port_num == 1) { if(data == 1) GPIO_SetBits(GPIOB, scl_pin_index); else GPIO_ResetBits(GPIOB, scl_pin_index); } else if(scl_port_num == 2) { if(data == 1) GPIO_SetBits(GPIOC, scl_pin_index); else GPIO_ResetBits(GPIOC, scl_pin_index); } else if(scl_port_num == 3) { if(data == 1) GPIO_SetBits(GPIOD, scl_pin_index); else GPIO_ResetBits(GPIOD, scl_pin_index); } } void I2C_WriteBitSDA(I2C_ConfigStruct* conf, uint8_t data) { uint32_t sda_port_num = I2C_PORT(conf->sda); uint32_t sda_pin_index = I2C_PIN_INDEX(conf->sda); if(sda_port_num == 0) { if(data == 1) GPIOA->OUTENCLR = sda_pin_index; else GPIOA->OUTENSET = sda_pin_index; } else if(sda_port_num == 1) { if(data == 1) GPIOB->OUTENCLR = sda_pin_index; else GPIOB->OUTENSET = sda_pin_index; } else if(sda_port_num == 2) { if(data == 1) GPIOC->OUTENCLR = sda_pin_index; else GPIOC->OUTENSET = sda_pin_index; } else if(sda_port_num == 3) { if(data == 1) GPIOD->OUTENCLR = sda_pin_index; else GPIOD->OUTENSET = sda_pin_index; } } uint8_t I2C_ReadBitSDA(I2C_ConfigStruct* conf) { uint32_t sda_port_num = I2C_PORT(conf->sda); uint32_t sda_pin_index = I2C_PIN_INDEX(conf->sda); if(sda_port_num == 0) { if(GPIOA->DATA & sda_pin_index) return 1; else return 0; } else if(sda_port_num == 1) { if(GPIOB->DATA & sda_pin_index) return 1; else return 0; } else if(sda_port_num == 2) { if(GPIOC->DATA & sda_pin_index) return 1; else return 0; } else if(sda_port_num == 3) { if(GPIOD->DATA & sda_pin_index) return 1; else return 0; } return 0; } void I2C_Start(I2C_ConfigStruct* conf) { I2C_WriteBitSCL(conf, 1); I2C_WriteBitSDA(conf, 1); I2C_WriteBitSDA(conf, 0); I2C_WriteBitSCL(conf, 0); } void I2C_Stop(I2C_ConfigStruct* conf) { I2C_WriteBitSCL(conf, 0); I2C_WriteBitSDA(conf, 0); I2C_WriteBitSCL(conf, 1); I2C_WriteBitSDA(conf, 1); } uint8_t I2C_WriteByte(I2C_ConfigStruct* conf, uint8_t data) { int i; uint8_t ret; //Write byte for(i=0; i<8; i++) { if((data << i) & 0x80) I2C_WriteBitSDA(conf, 1); else I2C_WriteBitSDA(conf, 0); I2C_WriteBitSCL(conf, 1); I2C_WriteBitSCL(conf, 0); } //Make clk for receiving ack I2C_WriteBitSDA(conf, 1); I2C_WriteBitSCL(conf, 1); //Read Ack/Nack ret = I2C_ReadBitSDA(conf); I2C_WriteBitSCL(conf, 0); return ret; } void I2C_SendACK(I2C_ConfigStruct* conf) { I2C_WriteBitSDA(conf, 0); I2C_WriteBitSCL(conf, 1); I2C_WriteBitSCL(conf, 0); } void I2C_SendNACK(I2C_ConfigStruct* conf) { I2C_WriteBitSDA(conf, 1); I2C_WriteBitSCL(conf, 1); I2C_WriteBitSCL(conf, 0); } uint8_t I2C_ReadByte(I2C_ConfigStruct* conf) { int i; uint8_t ret = 0; I2C_WriteBitSDA(conf, 1); //out enable clear(GPIO is input) //Read byte for(i=0; i<8; i++) { I2C_WriteBitSCL(conf, 1); ret = (ret << 1) | (I2C_ReadBitSDA(conf)); I2C_WriteBitSCL(conf, 0); } return ret; } int I2C_Write(I2C_ConfigStruct* conf, uint8_t addr, uint8_t* data, uint32_t len) { int i; I2C_Start(conf); //Write addr if(I2C_WriteByte(conf, addr) != 0) { printf("Received NACK at address phase!!\r\n"); return -1; } //Write data for(i=0; i<len; i++) { if(I2C_WriteByte(conf, data[i])) return -1; } I2C_Stop(conf); return 0;//success } int I2C_WriteRepeated(I2C_ConfigStruct* conf, uint8_t addr, uint8_t* data, uint32_t len) { int i; I2C_Start(conf); //Write addr if(I2C_WriteByte(conf, addr) != 0) { printf("Received NACK at address phase!!\r\n"); return -1; } //Write data for(i=0; i<len; i++) { if(I2C_WriteByte(conf, data[i])) return -1; } return 0;//success } int I2C_Read(I2C_ConfigStruct* conf, uint8_t addr, uint8_t* data, uint32_t len) { int i; I2C_Start(conf); //Write addr | read command if(I2C_WriteByte(conf, (addr | 1)) != 0) { printf("Received NACK at address phase!!\r\n"); return -1; } //Read data for(i=0; i<len; i++) { data[i] = I2C_ReadByte(conf); if( i == (len - 1) ) I2C_SendNACK(conf); else I2C_SendACK(conf); } I2C_Stop(conf); return 0;//success } int I2C_ReadRepeated(I2C_ConfigStruct* conf, uint8_t addr, uint8_t* data, uint32_t len) { int i; I2C_Start(conf); //Write addr | read command if(I2C_WriteByte(conf, (addr | 1)) != 0) { printf("Received NACK at address phase!!\r\n"); return -1; } //Read data for(i=0; i<len; i++) { data[i] = I2C_ReadByte(conf); if( i == (len - 1) ) I2C_SendNACK(conf); else I2C_SendACK(conf); } return 0;//success }
apache-2.0
ServiceComb/java-chassis
demo/demo-jaxrs/jaxrs-server/src/main/java/org/apache/servicecomb/demo/jaxrs/server/beanParam/TestBeanParameterWithUpload.java
1712
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.servicecomb.demo.jaxrs.server.beanParam; import javax.servlet.http.Part; import javax.ws.rs.FormParam; import javax.ws.rs.QueryParam; public class TestBeanParameterWithUpload { @QueryParam("query") private String queryStr; @FormParam("up1") private Part up1; private Part up2; public String getQueryStr() { return queryStr; } public void setQueryStr(String queryStr) { this.queryStr = queryStr; } public Part getUp1() { return up1; } public void setUp1(Part up1) { this.up1 = up1; } public Part getUp2() { return up2; } @FormParam("up2") public void setUp2(Part up2) { this.up2 = up2; } @Override public String toString() { final StringBuilder sb = new StringBuilder("TestBeanParameterWithUpload{"); sb.append("queryStr='").append(queryStr).append('\''); sb.append('}'); return sb.toString(); } }
apache-2.0
GIP-RECIA/esup-publisher-ui
src/main/webapp/scripts/components/auth/services/register.service.js
163
'use strict'; angular.module('publisherApp') .factory('Register', function ($resource) { return $resource('api/register', {}, { }); });
apache-2.0
sobkowiak/aspectj-in-action-code
ch02/workspace/Section2.6AspectJSpringIntegration/src/main/java/ajia/main/Main.java
1245
/* Copyright 2009 Ramnivas Laddad Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ //Listing 2.10 Using the Spring container application context package ajia.main; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import ajia.messaging.MessageCommunicator; public class Main { public static void main(String[] args) { ApplicationContext context = new ClassPathXmlApplicationContext( "applicationContext.xml"); MessageCommunicator messageCommunicator = (MessageCommunicator) context .getBean("messageCommunicator"); messageCommunicator.deliver("Wanna learn AspectJ?"); messageCommunicator.deliver("Harry", "having fun?"); } }
apache-2.0
apache/geronimo-yoko
yoko-core/src/main/java/org/apache/yoko/orb/OCI/ConnectorSeqHolder.java
1507
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.yoko.orb.OCI; // // IDL:orb.yoko.apache.org/OCI/ConnectorSeq:1.0 // final public class ConnectorSeqHolder implements org.omg.CORBA.portable.Streamable { public Connector[] value; public ConnectorSeqHolder() { } public ConnectorSeqHolder(Connector[] initial) { value = initial; } public void _read(org.omg.CORBA.portable.InputStream in) { value = ConnectorSeqHelper.read(in); } public void _write(org.omg.CORBA.portable.OutputStream out) { ConnectorSeqHelper.write(out, value); } public org.omg.CORBA.TypeCode _type() { return ConnectorSeqHelper.type(); } }
apache-2.0
rvhub/onos
core/net/src/main/java/org/onosproject/app/impl/ApplicationManager.java
8584
/* * Copyright 2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.app.impl; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.apache.karaf.features.Feature; import org.apache.karaf.features.FeaturesService; import org.onosproject.app.ApplicationAdminService; import org.onosproject.app.ApplicationEvent; import org.onosproject.app.ApplicationListener; import org.onosproject.app.ApplicationService; import org.onosproject.app.ApplicationState; import org.onosproject.app.ApplicationStore; import org.onosproject.app.ApplicationStoreDelegate; import org.onosproject.event.AbstractListenerManager; import org.onosproject.core.Application; import org.onosproject.core.ApplicationId; import org.onosproject.core.Permission; import org.slf4j.Logger; import java.io.InputStream; import java.util.Set; import static com.google.common.base.Preconditions.checkNotNull; import static org.onosproject.app.ApplicationEvent.Type.*; import static org.onosproject.security.AppGuard.checkPermission; import static org.slf4j.LoggerFactory.getLogger; /** * Implementation of the application management service. */ @Component(immediate = true) @Service public class ApplicationManager extends AbstractListenerManager<ApplicationEvent, ApplicationListener> implements ApplicationService, ApplicationAdminService { private final Logger log = getLogger(getClass()); private static final String APP_ID_NULL = "Application ID cannot be null"; private final ApplicationStoreDelegate delegate = new InternalStoreDelegate(); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected ApplicationStore store; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected FeaturesService featuresService; private boolean initializing; @Activate public void activate() { eventDispatcher.addSink(ApplicationEvent.class, listenerRegistry); initializing = true; store.setDelegate(delegate); initializing = false; log.info("Started"); } @Deactivate public void deactivate() { eventDispatcher.removeSink(ApplicationEvent.class); store.unsetDelegate(delegate); log.info("Stopped"); } @Override public Set<Application> getApplications() { checkPermission(Permission.APP_READ); return store.getApplications(); } @Override public ApplicationId getId(String name) { checkPermission(Permission.APP_READ); checkNotNull(name, "Name cannot be null"); return store.getId(name); } @Override public Application getApplication(ApplicationId appId) { checkPermission(Permission.APP_READ); checkNotNull(appId, APP_ID_NULL); return store.getApplication(appId); } @Override public ApplicationState getState(ApplicationId appId) { checkPermission(Permission.APP_READ); checkNotNull(appId, APP_ID_NULL); return store.getState(appId); } @Override public Set<Permission> getPermissions(ApplicationId appId) { checkPermission(Permission.APP_READ); checkNotNull(appId, APP_ID_NULL); return store.getPermissions(appId); } @Override public Application install(InputStream appDescStream) { checkNotNull(appDescStream, "Application archive stream cannot be null"); return store.create(appDescStream); } @Override public void uninstall(ApplicationId appId) { checkNotNull(appId, APP_ID_NULL); try { store.remove(appId); } catch (Exception e) { log.warn("Unable to purge application directory for {}", appId.name()); } } @Override public void activate(ApplicationId appId) { checkNotNull(appId, APP_ID_NULL); store.activate(appId); } @Override public void deactivate(ApplicationId appId) { checkNotNull(appId, APP_ID_NULL); store.deactivate(appId); } @Override public void setPermissions(ApplicationId appId, Set<Permission> permissions) { checkNotNull(appId, APP_ID_NULL); checkNotNull(permissions, "Permissions cannot be null"); store.setPermissions(appId, permissions); } private class InternalStoreDelegate implements ApplicationStoreDelegate { @Override public void notify(ApplicationEvent event) { ApplicationEvent.Type type = event.type(); Application app = event.subject(); try { if (type == APP_ACTIVATED) { if (installAppFeatures(app)) { log.info("Application {} has been activated", app.id().name()); } } else if (type == APP_DEACTIVATED) { if (uninstallAppFeatures(app)) { log.info("Application {} has been deactivated", app.id().name()); } } else if (type == APP_INSTALLED) { if (installAppArtifacts(app)) { log.info("Application {} has been installed", app.id().name()); } } else if (type == APP_UNINSTALLED) { if (uninstallAppFeatures(app) || uninstallAppArtifacts(app)) { log.info("Application {} has been uninstalled", app.id().name()); } } post(event); } catch (Exception e) { log.warn("Unable to perform operation on application " + app.id().name(), e); } } } // The following methods are fully synchronized to guard against remote vs. // locally induced feature service interactions. private synchronized boolean installAppArtifacts(Application app) throws Exception { if (app.featuresRepo().isPresent() && featuresService.getRepository(app.featuresRepo().get()) == null) { featuresService.addRepository(app.featuresRepo().get()); return true; } return false; } private synchronized boolean uninstallAppArtifacts(Application app) throws Exception { if (app.featuresRepo().isPresent() && featuresService.getRepository(app.featuresRepo().get()) != null) { featuresService.removeRepository(app.featuresRepo().get()); return true; } return false; } private synchronized boolean installAppFeatures(Application app) throws Exception { boolean changed = false; for (String name : app.features()) { Feature feature = featuresService.getFeature(name); if (feature != null && !featuresService.isInstalled(feature)) { featuresService.installFeature(name); changed = true; } else if (feature == null && !initializing) { // Suppress feature-not-found reporting during startup since these // can arise naturally from the staggered cluster install. log.warn("Feature {} not found", name); } } return changed; } private synchronized boolean uninstallAppFeatures(Application app) throws Exception { boolean changed = false; for (String name : app.features()) { Feature feature = featuresService.getFeature(name); if (feature != null && featuresService.isInstalled(feature)) { featuresService.uninstallFeature(name); changed = true; } else if (feature == null) { log.warn("Feature {} not found", name); } } return changed; } }
apache-2.0
xingwu1/azure-sdk-for-node
lib/services/datafactoryManagement/lib/models/responsysObjectDataset.js
3288
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; const models = require('./index'); /** * Responsys dataset. * * @extends models['Dataset'] */ class ResponsysObjectDataset extends models['Dataset'] { /** * Create a ResponsysObjectDataset. * @member {object} [tableName] The table name. Type: string (or Expression * with resultType string). */ constructor() { super(); } /** * Defines the metadata of ResponsysObjectDataset * * @returns {object} metadata of ResponsysObjectDataset * */ mapper() { return { required: false, serializedName: 'ResponsysObject', type: { name: 'Composite', polymorphicDiscriminator: { serializedName: 'type', clientName: 'type' }, uberParent: 'Dataset', className: 'ResponsysObjectDataset', modelProperties: { description: { required: false, serializedName: 'description', type: { name: 'String' } }, structure: { required: false, serializedName: 'structure', type: { name: 'Object' } }, linkedServiceName: { required: true, serializedName: 'linkedServiceName', defaultValue: {}, type: { name: 'Composite', className: 'LinkedServiceReference' } }, parameters: { required: false, serializedName: 'parameters', type: { name: 'Dictionary', value: { required: false, serializedName: 'ParameterSpecificationElementType', type: { name: 'Composite', className: 'ParameterSpecification' } } } }, annotations: { required: false, serializedName: 'annotations', type: { name: 'Sequence', element: { required: false, serializedName: 'ObjectElementType', type: { name: 'Object' } } } }, folder: { required: false, serializedName: 'folder', type: { name: 'Composite', className: 'DatasetFolder' } }, type: { required: true, serializedName: 'type', isPolymorphicDiscriminator: true, type: { name: 'String' } }, tableName: { required: false, serializedName: 'typeProperties.tableName', type: { name: 'Object' } } } } }; } } module.exports = ResponsysObjectDataset;
apache-2.0
apache/santuario-java
src/main/java/org/apache/xml/security/stax/impl/processor/output/AbstractSignatureOutputProcessor.java
14441
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xml.security.stax.impl.processor.output; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import org.apache.xml.security.exceptions.XMLSecurityException; import org.apache.xml.security.stax.config.JCEAlgorithmMapper; import org.apache.xml.security.stax.config.ResourceResolverMapper; import org.apache.xml.security.stax.ext.AbstractOutputProcessor; import org.apache.xml.security.stax.ext.OutputProcessorChain; import org.apache.xml.security.stax.ext.ResourceResolver; import org.apache.xml.security.stax.ext.SecurePart; import org.apache.xml.security.stax.ext.Transformer; import org.apache.xml.security.stax.ext.XMLSecurityConstants; import org.apache.xml.security.stax.ext.XMLSecurityUtils; import org.apache.xml.security.stax.ext.stax.XMLSecEvent; import org.apache.xml.security.stax.ext.stax.XMLSecStartElement; import org.apache.xml.security.stax.impl.SignaturePartDef; import org.apache.xml.security.stax.impl.transformer.TransformIdentity; import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_Excl; import org.apache.xml.security.stax.impl.util.DigestOutputStream; import org.apache.xml.security.utils.UnsyncBufferedOutputStream; import org.apache.xml.security.utils.XMLUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** */ public abstract class AbstractSignatureOutputProcessor extends AbstractOutputProcessor { private static final transient Logger LOG = LoggerFactory.getLogger(AbstractSignatureOutputProcessor.class); private final List<SignaturePartDef> signaturePartDefList = new ArrayList<>(); private InternalSignatureOutputProcessor activeInternalSignatureOutputProcessor; public AbstractSignatureOutputProcessor() throws XMLSecurityException { super(); } public List<SignaturePartDef> getSignaturePartDefList() { return signaturePartDefList; } @Override public abstract void processEvent(XMLSecEvent xmlSecEvent, OutputProcessorChain outputProcessorChain) throws XMLStreamException, XMLSecurityException; @Override public void doFinal(OutputProcessorChain outputProcessorChain) throws XMLStreamException, XMLSecurityException { doFinalInternal(outputProcessorChain); super.doFinal(outputProcessorChain); } protected void doFinalInternal(OutputProcessorChain outputProcessorChain) throws XMLSecurityException, XMLStreamException { Map<Object, SecurePart> dynamicSecureParts = outputProcessorChain.getSecurityContext().getAsMap(XMLSecurityConstants.SIGNATURE_PARTS); if (dynamicSecureParts != null) { Iterator<Map.Entry<Object, SecurePart>> securePartsMapIterator = dynamicSecureParts.entrySet().iterator(); while (securePartsMapIterator.hasNext()) { Map.Entry<Object, SecurePart> securePartEntry = securePartsMapIterator.next(); final SecurePart securePart = securePartEntry.getValue(); if (securePart.getExternalReference() != null) { digestExternalReference(outputProcessorChain, securePart); } } } verifySignatureParts(outputProcessorChain); } protected void digestExternalReference( OutputProcessorChain outputProcessorChain, SecurePart securePart) throws XMLSecurityException, XMLStreamException { final String externalReference = securePart.getExternalReference(); ResourceResolver resourceResolver = ResourceResolverMapper.getResourceResolver( externalReference, outputProcessorChain.getDocumentContext().getBaseURI()); String digestAlgo = securePart.getDigestMethod(); if (digestAlgo == null) { digestAlgo = getSecurityProperties().getSignatureDigestAlgorithm(); } DigestOutputStream digestOutputStream = createMessageDigestOutputStream(digestAlgo); InputStream inputStream = resourceResolver.getInputStreamFromExternalReference(); SignaturePartDef signaturePartDef = new SignaturePartDef(); signaturePartDef.setSecurePart(securePart); signaturePartDef.setSigRefId(externalReference); signaturePartDef.setExternalResource(true); signaturePartDef.setTransforms(securePart.getTransforms()); signaturePartDef.setDigestAlgo(digestAlgo); try { if (securePart.getTransforms() != null) { signaturePartDef.setExcludeVisibleC14Nprefixes(true); Transformer transformer = buildTransformerChain(digestOutputStream, signaturePartDef, null); transformer.transform(inputStream); transformer.doFinal(); } else { XMLSecurityUtils.copy(inputStream, digestOutputStream); } digestOutputStream.close(); } catch (IOException e) { throw new XMLSecurityException(e); } String calculatedDigest = XMLUtils.encodeToString(digestOutputStream.getDigestValue()); LOG.debug("Calculated Digest: {}", calculatedDigest); signaturePartDef.setDigestValue(calculatedDigest); getSignaturePartDefList().add(signaturePartDef); } protected void verifySignatureParts(OutputProcessorChain outputProcessorChain) throws XMLSecurityException { List<SignaturePartDef> signaturePartDefs = getSignaturePartDefList(); Map<Object, SecurePart> dynamicSecureParts = outputProcessorChain.getSecurityContext().getAsMap(XMLSecurityConstants.SIGNATURE_PARTS); if (dynamicSecureParts != null) { Iterator<Map.Entry<Object, SecurePart>> securePartsMapIterator = dynamicSecureParts.entrySet().iterator(); loop: while (securePartsMapIterator.hasNext()) { Map.Entry<Object, SecurePart> securePartEntry = securePartsMapIterator.next(); final SecurePart securePart = securePartEntry.getValue(); if (securePart.isRequired()) { for (int i = 0; i < signaturePartDefs.size(); i++) { SignaturePartDef signaturePartDef = signaturePartDefs.get(i); if (signaturePartDef.getSecurePart() == securePart) { continue loop; } } throw new XMLSecurityException("stax.signature.securePartNotFound", new Object[] {securePart.getName()}); } } } } protected InternalSignatureOutputProcessor getActiveInternalSignatureOutputProcessor() { return activeInternalSignatureOutputProcessor; } protected void setActiveInternalSignatureOutputProcessor( InternalSignatureOutputProcessor activeInternalSignatureOutputProcessor) { this.activeInternalSignatureOutputProcessor = activeInternalSignatureOutputProcessor; } protected DigestOutputStream createMessageDigestOutputStream(String digestAlgorithm) throws XMLSecurityException { String jceName = JCEAlgorithmMapper.translateURItoJCEID(digestAlgorithm); String jceProvider = JCEAlgorithmMapper.getJCEProviderFromURI(digestAlgorithm); if (jceName == null) { throw new XMLSecurityException("algorithms.NoSuchMap", new Object[] {digestAlgorithm}); } MessageDigest messageDigest; try { if (jceProvider != null) { messageDigest = MessageDigest.getInstance(jceName, jceProvider); } else { messageDigest = MessageDigest.getInstance(jceName); } } catch (NoSuchAlgorithmException e) { throw new XMLSecurityException(e); } catch (NoSuchProviderException e) { throw new XMLSecurityException(e); } return new DigestOutputStream(messageDigest); } protected Transformer buildTransformerChain(OutputStream outputStream, SignaturePartDef signaturePartDef, XMLSecStartElement xmlSecStartElement) throws XMLSecurityException { String[] transforms = signaturePartDef.getTransforms(); if (transforms == null || transforms.length == 0) { Transformer transformer = new TransformIdentity(); transformer.setOutputStream(outputStream); return transformer; } Transformer parentTransformer = null; for (int i = transforms.length - 1; i >= 0; i--) { String transform = transforms[i]; Map<String, Object> transformerProperties = null; if (getSecurityProperties().isAddExcC14NInclusivePrefixes() && XMLSecurityConstants.NS_C14N_EXCL_OMIT_COMMENTS.equals(transform)) { Set<String> prefixSet = XMLSecurityUtils.getExcC14NInclusiveNamespacePrefixes( xmlSecStartElement, signaturePartDef.isExcludeVisibleC14Nprefixes() ); StringBuilder prefixes = new StringBuilder(); for (Iterator<String> iterator = prefixSet.iterator(); iterator.hasNext(); ) { String prefix = iterator.next(); if (prefixes.length() != 0) { prefixes.append(' '); } prefixes.append(prefix); } signaturePartDef.setInclusiveNamespacesPrefixes(prefixes.toString()); List<String> inclusiveNamespacePrefixes = new ArrayList<>(prefixSet); transformerProperties = new HashMap<>(); transformerProperties.put( Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespacePrefixes); } if (parentTransformer != null) { parentTransformer = XMLSecurityUtils.getTransformer( parentTransformer, null, transformerProperties, transform, XMLSecurityConstants.DIRECTION.OUT); } else { parentTransformer = XMLSecurityUtils.getTransformer( null, outputStream, transformerProperties, transform, XMLSecurityConstants.DIRECTION.OUT); } } return parentTransformer; } public class InternalSignatureOutputProcessor extends AbstractOutputProcessor { private SignaturePartDef signaturePartDef; private XMLSecStartElement xmlSecStartElement; private int elementCounter; private OutputStream bufferedDigestOutputStream; private DigestOutputStream digestOutputStream; private Transformer transformer; public InternalSignatureOutputProcessor(SignaturePartDef signaturePartDef, XMLSecStartElement xmlSecStartElement) throws XMLSecurityException { super(); this.addBeforeProcessor(InternalSignatureOutputProcessor.class.getName()); this.signaturePartDef = signaturePartDef; this.xmlSecStartElement = xmlSecStartElement; } @Override public void init(OutputProcessorChain outputProcessorChain) throws XMLSecurityException { this.digestOutputStream = createMessageDigestOutputStream(signaturePartDef.getDigestAlgo()); this.bufferedDigestOutputStream = new UnsyncBufferedOutputStream(digestOutputStream); this.transformer = buildTransformerChain(this.bufferedDigestOutputStream, signaturePartDef, xmlSecStartElement); super.init(outputProcessorChain); } @Override public void processEvent(XMLSecEvent xmlSecEvent, OutputProcessorChain outputProcessorChain) throws XMLStreamException, XMLSecurityException { transformer.transform(xmlSecEvent); if (XMLStreamConstants.START_ELEMENT == xmlSecEvent.getEventType()) { elementCounter++; } else if (XMLStreamConstants.END_ELEMENT == xmlSecEvent.getEventType()) { elementCounter--; if (elementCounter == 0 && xmlSecEvent.asEndElement().getName().equals(this.xmlSecStartElement.getName())) { transformer.doFinal(); try { bufferedDigestOutputStream.close(); } catch (IOException e) { throw new XMLSecurityException(e); } String calculatedDigest = XMLUtils.encodeToString(this.digestOutputStream.getDigestValue()); LOG.debug("Calculated Digest: {}", calculatedDigest); signaturePartDef.setDigestValue(calculatedDigest); outputProcessorChain.removeProcessor(this); //from now on signature is possible again setActiveInternalSignatureOutputProcessor(null); } } outputProcessorChain.processEvent(xmlSecEvent); } } }
apache-2.0
apache/incubator-corinthia
consumers/dfutil/src/main.c
5917
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include "DFPlatform.h" #include "Commands.h" #include "BDTTests.h" #include "WordPlain.h" #include "HTMLPlain.h" #include "FunctionTests.h" #include "StringTests.h" #include "DFZipFile.h" #include "DFCommon.h" #include "DFFilesystem.h" #include <DocFormats/DocFormats.h> #include <stdio.h> #include <string.h> #include <stdlib.h> static int runCommand(int argc, const char **argv, DFError **dferr) { if ((argc == 3) && !strcmp(argv[1],"-pp")) { return prettyPrintFile(argv[2],dferr); } else if ((argc == 4) && !strcmp(argv[1],"-fp")) { return fromPlain(argv[2],argv[3],dferr); } else if ((argc == 3) && !strcmp(argv[1],"-normalize")) { return normalizeFile(argv[2],dferr); } else if ((argc >= 2) && !strcmp(argv[1],"-bdt")) { BDT_Test(argc-2,&argv[2]); return 1; } else if ((argc == 3) && !strcmp(argv[1],"-css")) { return testCSS(argv[2],dferr); } else if ((argc == 3) && !strcmp(argv[1],"-parsehtml")) { return parseHTMLFile(argv[2],dferr); } else if ((argc == 3) && !strcmp(argv[1],"-tplist")) { return textPackageList(argv[2],dferr); } else if ((argc == 4) && !strcmp(argv[1],"-tpget")) { return textPackageGet(argv[2],argv[3],dferr); } else if ((argc == 4) && !strcmp(argv[1],"-diff")) { return diffFiles(argv[2],argv[3],dferr); } else if ((argc == 3) && !strcmp(argv[1],"-parsecontent")) { parseContent(argv[2]); return 1; } else if ((argc >= 2) && !strcmp(argv[1],"-btos")) { const char *inFilename = (argc >= 3) ? argv[2] : NULL; const char *outFilename = (argc >= 4) ? argv[3] : NULL; return btosFile(inFilename,outFilename,dferr); } else if ((argc >= 2) && !strcmp(argv[1],"-stob")) { const char *inFilename = (argc >= 3) ? argv[2] : NULL; const char *outFilename = (argc >= 4) ? argv[3] : NULL; return stobFile(inFilename,outFilename,dferr); } else if ((argc == 3) && (!strcmp(argv[1],"-css-escape"))) { return escapeCSSIdent(argv[2],dferr); } else if ((argc == 3) && (!strcmp(argv[1],"-css-unescape"))) { return unescapeCSSIdent(argv[2],dferr); } #ifdef __APPLE__ else if ((argc == 2) && (!strcmp(argv[1],"-test-unicode"))) { return testUnicode(); } else if ((argc == 2) && (!strcmp(argv[1],"-test-strings"))) { return testStrings(); } else if ((argc == 2) && (!strcmp(argv[1],"-test-path"))) { testPathFunctions(); return 1; } #endif else if ((argc == 4) && !strcmp(argv[1],"-zip")) { DFStorage *storage = DFStorageNewFilesystem(argv[3],DFFileFormatUnknown); int r = DFZip(argv[2],storage,dferr); DFStorageRelease(storage); return r; } else if ((argc == 4) && !strcmp(argv[1],"-unzip")) { DFStorage *storage = DFStorageNewFilesystem(argv[3],DFFileFormatUnknown); int r = DFUnzip(argv[2],storage,dferr); DFStorageRelease(storage); return r; } else { //////////////////////////////////////////////////////////////////////////////// printf("Usage:\n" "\n" "dfutil -pp filename\n" " Print a plain text version of a .docx or .odt file to standard output\n" "\n" "dfutil -fp infilename outfilename\n" " Create a .docx or .odt file based on a plain text representation. If\n" " infilename is -, read from standard input.\n" "\n" "dfutil -normalize filename\n" " Normalize a HTML file\n" "\n" "dfutil -parsecontent string\n" " Parse a value as if it were given as a CSS 'content' property, and print parts\n" "\n" "dfutil -btos [infilename] [outfilename]\n" " Convert binary data to string\n" "\n" "dfutil -stob [infilename] [outfilename]\n" " Convert string to binary data\n" "\n" "dfutil -css-escape [infilename]\n" " Escape CSS class name\n" "\n" "dfutil -css-unescape [infilename]\n" " Unescape CSS class name\n" "\n" "dfutil -zip zipFilename sourceDir\n" " Create a zip file\n" "\n" "dfutil -unzip zipFilename destDir\n" " Extract a zip file\n" "\n" "dfutil input.html output.docx\n" "dfutil input.html output.odt\n" "dfutil input.docx output.html\n" "dfutil input.docx output.html\n" " Convert to/from .docx or .odt and .html\n"); return 1; } } int main(int argc, const char * argv[]) { int r = 0; DFError *dferr = NULL; if (!runCommand(argc,argv,&dferr)) { fprintf(stderr,"%s\n",DFErrorMessage(&dferr)); DFErrorRelease(dferr); r = 1; } return r; }
apache-2.0
mdittmer/foam
js/foam/ui/md/SectionView.js
3734
/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ package: 'foam.ui.md', name: 'SectionView', extends: 'foam.flow.Element', requires: [ 'foam.ui.Icon', 'foam.ui.md.ExpandableView' ], constants: { ELEMENT_NAME: 'section' }, properties: [ { model_: 'BooleanProperty', name: 'expandable', defaultValue: true, postSet: function(old, nu) { if ( ! this.$ || old === nu ) return; // Need full re-render to correctly wire (or not wire) this.on('click'). this.updateHTML(); } }, { model_: 'BooleanProperty', name: 'expanded', defaultValue: true }, { model_: 'StringProperty', name: 'title', defaultValue: 'Heading' }, { model_: 'StringProperty', name: 'titleClass', defaultValue: 'md-subhead' }, { model_: 'ViewFactoryProperty', name: 'icon', defaultValue: null }, { model_: 'ViewFactoryProperty', name: 'delegate' }, { name: 'delegateView', postSet: function(old, nu) { if ( old && old.expanded$ ) Events.unfollow(this.expanded$, old.expanded$); if ( nu && nu.expanded$ ) Events.follow(this.expanded$, nu.expanded$); } }, { model_: 'StringProperty', name: 'expandedIconId', lazyFactory: function() { return this.id + '-expanded-icon'; } }, { model_: 'ViewFactoryProperty', name: 'expandedIcon', defaultValue: function() { return this.Icon.create({ id: this.expandedIconId, url: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAQAAABKfvVzAAAARUlEQVR4AWMY1GAUNAAhScr/A2EDKcr/ACFcC2HlvxnCGMIhWohVDgQwLYSVh8K4hLU0AJWHQNkILXX47NDCIjIIwSgAAGEBHc5iOzTwAAAAAElFTkSuQmCC', ligature: 'expand_less', extraClassName: 'expanded-icon' }, this.Y); } }, ], methods: [ { name: 'initHTML', code: function() { this.SUPER.apply(this, arguments); if ( this.expandable ) { this.delegateView.expandedIcon = this.X.$(this.expandedIconId); } } } ], listeners: [ { name: 'onToggleExpanded', code: function() { this.delegateView && this.delegateView.toggleExpanded && this.delegateView.toggleExpanded(); } } ], templates: [ function toInnerHTML() {/* <% this.delegateView = this.delegate(); this.addDataChild(this.delegateView); %> <heading id="{{this.id}}-heading" class="{{this.titleClass}}"> <% if ( this.icon ) { %>%%icon()<% } %> <span>{{this.title}}</span> <% if ( this.expandable ) { this.on('click', this.onToggleExpanded, this.id + '-heading'); %> <div class="flex-flush-right"> %%expandedIcon() </div> <% } %> </heading> %%delegateView */}, function CSS() {/* section heading { display: flex; align-items: center; cursor: pointer; margin: 8px 0; } section heading > * { flex-grow: 0; } section heading div.flex-flush-right { flex-grow: 1; display: flex; justify-content: flex-end; } section heading icon { margin-right: 12px; } section heading icon.expanded-icon { margin-right: initial; } */} ] });
apache-2.0
wsldl123292/testeveryting
concurrency/src/main/java/threadmanagement/join/DataSourcesLoader.java
532
package threadmanagement.join; import java.util.Date; import java.util.concurrent.TimeUnit; /** * 功能: * 作者: ldl * 时间: 2016-07-29 17:51 */ public class DataSourcesLoader implements Runnable { @Override public void run() { System.out.printf("开始加载资源: %s\n", new Date()); try { TimeUnit.SECONDS.sleep(4); } catch (InterruptedException e) { e.printStackTrace(); } System.out.printf("资源加载结束 : %s\n", new Date()); } }
apache-2.0
StefanoRaggi/Lean
ToolBox/Polygon/PolygonDownloaderProgram.cs
4321
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using QuantConnect.Configuration; using QuantConnect.Data; using QuantConnect.Logging; using QuantConnect.Securities; using QuantConnect.Util; namespace QuantConnect.ToolBox.Polygon { public class PolygonDownloaderProgram { /// <summary> /// Primary entry point to the program. This program only supports SecurityType.Equity /// </summary> public static void PolygonDownloader(IList<string> tickers, string securityTypeString, string market, string resolutionString, DateTime fromDate, DateTime toDate) { if (tickers.IsNullOrEmpty() || securityTypeString.IsNullOrEmpty() || market.IsNullOrEmpty() || resolutionString.IsNullOrEmpty()) { Console.WriteLine("PolygonDownloader ERROR: '--tickers=' or '--security-type=' or '--market=' or '--resolution=' parameter is missing"); Console.WriteLine("--tickers=eg SPY,AAPL"); Console.WriteLine("--security-type=Equity"); Console.WriteLine("--market=usa"); Console.WriteLine("--resolution=Minute/Hour/Daily"); Environment.Exit(1); } try { // Load settings from command line var resolution = (Resolution)Enum.Parse(typeof(Resolution), resolutionString); var securityType = (SecurityType)Enum.Parse(typeof(SecurityType), securityTypeString); // Polygon.io does not support Crypto historical quotes var tickTypes = securityType == SecurityType.Crypto ? new List<TickType> { TickType.Trade } : SubscriptionManager.DefaultDataTypes()[securityType]; // Load settings from config.json var dataDirectory = Config.Get("data-directory", "../../../Data"); var startDate = fromDate.ConvertToUtc(TimeZones.NewYork); var endDate = toDate.ConvertToUtc(TimeZones.NewYork); var marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); // Create an instance of the downloader using (var downloader = new PolygonDataDownloader()) { foreach (var ticker in tickers) { var symbol = Symbol.Create(ticker, securityType, market); var exchangeTimeZone = marketHoursDatabase.GetExchangeHours(market, symbol, securityType).TimeZone; var dataTimeZone = marketHoursDatabase.GetDataTimeZone(market, symbol, securityType); foreach (var tickType in tickTypes) { // Download the data var data = downloader.Get(symbol, resolution, startDate, endDate, tickType) .Select(x => { x.Time = x.Time.ConvertTo(exchangeTimeZone, dataTimeZone); return x; } ); // Save the data var writer = new LeanDataWriter(resolution, symbol, dataDirectory, tickType); writer.Write(data); } } } } catch (Exception err) { Log.Error(err); } } } }
apache-2.0
pwojnaro/robotium-sandwich
robotium-sandwich/src/main/java/com/appthwack/sandwich/views/interfaces/IACompoundButton.java
321
package com.appthwack.sandwich.views.interfaces; // TODO: Auto-generated Javadoc /** * The Interface IACompoundButton. */ public interface IACompoundButton extends IAButton { /** * Checks if a CompoundButton associated with this element is checked * * @return true, if is checked */ boolean isChecked(); }
apache-2.0
goldmansachs/obevo
obevo-db-impls/obevo-db-sybase-ase/src/test/resources/reveng/ddlgen/expected/dbdeploy01/table/TestTable.sql
1123
//// CHANGE name=change0 create table TestTable ( idField int not null , stringField varchar(100) not null , stringDateField date null , dateTimeField datetime null , myBooleanCol int null , tinyIntCol tinyint not null , timeUpdated datetime not null , textField text null , myNewCol int null , CONSTRAINT PK PRIMARY KEY CLUSTERED ( idField ) on 'default' ) lock datarows on 'default' GO //// CHANGE name=change1 sp_bindefault 'DateDefault', 'TestTable.stringDateField' GO //// CHANGE name=change2 sp_bindrule 'booleanRule', 'TestTable.myBooleanCol' GO //// CHANGE INDEX name=IND1 create nonclustered index IND1 on TestTable(stringField) GO
apache-2.0
OLR-xray/OLR-3.0
src/3rd party/MagicSoftware/FreeMagic/Include/MgcTUnorderedSet.h
1726
// Magic Software, Inc. // http://www.magic-software.com // Copyright (c) 2000-2002. All Rights Reserved // // Source code from Magic Software is supplied under the terms of a license // agreement and may not be copied or disclosed except in accordance with the // terms of that agreement. The various license agreements may be found at // the Magic Software web site. This file is subject to the license // // FREE SOURCE CODE // http://www.magic-software.com/License/free.pdf #ifndef MGCTUNORDEREDSET_H #define MGCTUNORDEREDSET_H // An unordered set of objects stored in contiguous memory. The type T must // have the following member functions: // T::T(); // T::~T(); // T& operator= (const T&); // bool operator== (const T&) const; #include <cassert> #include <cstddef> namespace Mgc { template <class T> class TUnorderedSet { public: TUnorderedSet (int iMaxQuantity = 0, int iGrow = 0); TUnorderedSet (const TUnorderedSet& rkSet); ~TUnorderedSet (); void Reset (int iMaxQuantity = 0, int iGrow = 0); void Clear (); TUnorderedSet& operator= (const TUnorderedSet& rkSet); int GetMaxQuantity () const; int GetGrow () const; int GetQuantity () const; const T& Get (int i) const; T& operator[] (int i); bool Exists (const T& rtElement) const; bool Insert (const T& rtElement); int Append (const T& rtElement); bool Remove (const T& rtElement, int* piOld = NULL, int* piNew = NULL); bool RemoveAt (int i, int* piOld = NULL, int* piNew = NULL); void Compactify (); enum { DEFAULT_GROW = 4 }; protected: int m_iQuantity, m_iMaxQuantity, m_iGrow; T* m_atElement; }; #include "MgcTUnorderedSet.inl" } // namespace Mgc #endif
apache-2.0
tempbottle/JgFramework
src/main/java/com/zhaidaosi/game/jgframework/common/queue/BaseQueue.java
4050
package com.zhaidaosi.game.jgframework.common.queue; /** * 可快速定位的FIFO队列 */ public class BaseQueue<E> { private BaseQueueElement<E> start = null; private BaseQueueElement<E> end = null; private Object lock = new Object(); private long putCount = 0; private long takeCount = 0; private long size = 0; /** * 获取队列头 * * @return */ public BaseQueueElement<E> getStart() { return start; } /** * 从队列尾插入元素 * * @param value * @return */ public BaseQueueElement<E> put(E value) { if (value == null) { return null; } BaseQueueElement<E> element = new BaseQueueElement<E>(value); synchronized (lock) { if (start == null) { start = element; end = element; } else { element.setBefore(end); end.setNext(element); end = element; } putCount++; size++; element.setNo(putCount); } return element; } /** * 从队列头弹出元素 * * @return */ public BaseQueueElement<E> take() { if (start == null) { return null; } BaseQueueElement<E> element; synchronized (lock) { element = start; if (start == end) { start = null; end = null; putCount = 0; takeCount = 0; } else { start = start.getNext(); start.setBefore(null); takeCount++; } size--; element.reset(); } return element; } /** * 删除一个元素 * * @param element */ public boolean remove(BaseQueueElement<E> element) { BaseQueueElement<E> after = null; synchronized (lock) { if (element == start && element == end) { start = null; end = null; putCount = 0; takeCount = 0; } else if (element == start) { start = start.getNext(); start.setBefore(null); takeCount++; } else if (element == end) { end = element.getBefore(); end.setNext(null); } else { // 判断是否在队列中 if (element.getBefore().getNext() != element || element.getNext().getBefore() != element) { return false; } after = element.getNext(); after.setBefore(element.getBefore()); element.getBefore().setNext(after); } size--; element.reset(); if (after != null) { do { after.setNo(after.getNo() - 1); after = after.getNext(); } while (after != null); } } return true; } /** * 查找元素所在的位置 * * @param element * @return */ public long findIndex(BaseQueueElement<E> element) { if (element == null) { return -1; } synchronized (lock) { return element.getNo() - takeCount; } } /** * 返回队列长度 * * @return */ public long size() { synchronized (lock) { return size; } } /** * 删除队列所有元素 */ public void clear() { synchronized (lock) { while (start != null) { BaseQueueElement<E> element = start; start = start.getNext(); element.reset(); element = null; } start = null; end = null; putCount = 0; takeCount = 0; size = 0; } } }
apache-2.0
jonathanmarvens/typescript
samples/node/node.d.ts
45576
/************************************************ * * * Node.js v0.8.8 API * * * ************************************************/ /************************************************ * * * GLOBAL * * * ************************************************/ declare var process: NodeProcess; declare var global: any; declare var console: { log(...data: any[]): void; info(...data: any[]): void; error(...data: any[]): void; warn(...data: any[]): void; dir(obj: any): void; timeEnd(label: string): void; trace(label: string): void; assert(expression: any, ...message: string[]): void; } declare var __filename: string; declare var __dirname: string; declare function setTimeout(callback: () => void , ms: number): any; declare function clearTimeout(timeoutId: any); declare function setInterval(callback: () => void , ms: number): any; declare function clearInterval(intervalId: any); declare var require: { (id: string): any; resolve(): string; cache: any; extensions: any; } declare var module: { exports: any; require(id: string): any; id: string; filename: string; loaded: bool; parent: any; children: any[]; } // Same as module.exports declare var exports: any; declare var SlowBuffer: { new (str: string, encoding?: string): NodeBuffer; new (size: number): NodeBuffer; new (array: any[]): NodeBuffer; prototype: NodeBuffer; isBuffer(obj: any): bool; byteLength(string: string, encoding?: string): number; concat(list: NodeBuffer[], totalLength?: number): NodeBuffer; }; declare var Buffer: { new (str: string, encoding?: string): NodeBuffer; new (size: number): NodeBuffer; new (array: any[]): NodeBuffer; prototype: NodeBuffer; isBuffer(obj: any): bool; byteLength(string: string, encoding?: string): number; concat(list: NodeBuffer[], totalLength?: number): NodeBuffer; } /************************************************ * * * INTERFACES * * * ************************************************/ interface EventEmitter { addListener(event: string, listener: Function); on(event: string, listener: Function); once(event: string, listener: Function): void; removeListener(event: string, listener: Function): void; removeAllListener(event: string): void; setMaxListeners(n: number): void; listeners(event: string): { Function; }[]; emit(event: string, arg1?: any, arg2?: any): void; } interface WritableStream extends EventEmitter { writable: bool; write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; end(): void; end(str: string, enconding: string): void; end(buffer: NodeBuffer): void; destroy(): void; destroySoon(): void; } interface ReadableStream extends EventEmitter { readable: bool; setEncoding(encoding: string): void; pause(): void; resume(): void; destroy(): void; pipe(destination: WritableStream, options?: { end?: bool; }): void; } interface NodeProcess extends EventEmitter { stdout: WritableStream; stderr: WritableStream; stdin: ReadableStream; argv: string[]; execPath: string; abort(): void; chdir(directory: string): void; cwd(): void; env: any; exit(code?: number): void; getgid(): number; setgid(id: number): void; getuid(): number; setuid(id: number): void; version: string; versions: { http_parser: string; node: string; v8: string; ares: string; uv: string; zlib: string; openssl: string; }; config: { target_defaults: { cflags: any[]; default_configuration: string; defines: string[]; include_dirs: string[]; libraries: string[]; }; variables: { clang: number; host_arch: string; node_install_npm: bool; node_install_waf: bool; node_prefix: string; node_shared_openssl: bool; node_shared_v8: bool; node_shared_zlib: bool; node_use_dtrace: bool; node_use_etw: bool; node_use_openssl: bool; target_arch: string; v8_no_strict_aliasing: number; v8_use_snapshot: bool; visibility: string; }; }; kill(pid: number, signal?: string): void; pid: number; title: string; arch: string; platform: string; memoryUsage(): { rss: number; heapTotal; number; heapUsed: number; }; nextTick(callback: Function): void; umask(mask?: number): number; uptime(): number; hrtime(): number[]; } // Buffer class interface NodeBuffer { [index: number]: number; write(string: string, offset?: number, length?: number, encoding?: string): number; toString(encoding: string, start: number, end: number): string; length: number; copy(targetBuffer: NodeBuffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): void; slice(start?: number, end?: number): NodeBuffer; readUInt8(offset: number, noAsset?: bool): number; readUInt16LE(offset: number, noAssert?: bool): number; readUInt16BE(offset: number, noAssert?: bool): number; readUInt32LE(offset: number, noAssert?: bool): number; readUInt32BE(offset: number, noAssert?: bool): number; readInt8(offset: number, noAssert?: bool): number; readInt16LE(offset: number, noAssert?: bool): number; readInt16BE(offset: number, noAssert?: bool): number; readInt32LE(offset: number, noAssert?: bool): number; readInt32BE(offset: number, noAssert?: bool): number; readFloatLE(offset: number, noAssert?: bool): number; readFloatBE(offset: number, noAssert?: bool): number; readDoubleLE(offset: number, noAssert?: bool): number; readDoubleBE(offset: number, noAssert?: bool): number; writeUInt8(value: number, offset: number, noAssert?: bool): void; writeUInt16LE(value: number, offset: number, noAssert?: bool): void; writeUInt16BE(value: number, offset: number, noAssert?: bool): void; writeUInt32LE(value: number, offset: number, noAssert?: bool): void; writeUInt32BE(value: number, offset: number, noAssert?: bool): void; writeInt8(value: number, offset: number, noAssert?: bool): void; writeInt16LE(value: number, offset: number, noAssert?: bool): void; writeInt16BE(value: number, offset: number, noAssert?: bool): void; writeInt32LE(value: number, offset: number, noAssert?: bool): void; writeInt32BE(value: number, offset: number, noAssert?: bool): void; writeFloatLE(value: number, offset: number, noAssert?: bool): void; writeFloatBE(value: number, offset: number, noAssert?: bool): void; writeDoubleLE(value: number, offset: number, noAssert?: bool): void; writeDoubleBE(value: number, offset: number, noAssert?: bool): void; fill(value: any, offset?: number, end?: number): void; INSPECT_MAX_BYTES: number; } /************************************************ * * * MODULES * * * ************************************************/ declare module "querystring" { export function stringify(obj: any, sep?: string, eq?: string): string; export function parse(str: string, sep?: string, eq?: string, options?: { maxKeys?: number; }): any; export function escape(): any; export function unescape(): any; } declare module "events" { export interface NodeEventEmitter { addListener(event: string, listener: Function); on(event: string, listener: Function): any; once(event: string, listener: Function): void; removeListener(event: string, listener: Function): void; removeAllListener(event: string): void; setMaxListeners(n: number): void; listeners(event: string): { Function; }[]; emit(event: string, arg1?: any, arg2?: any): void; } export var EventEmitter: NodeEventEmitter; } declare module "http" { import events = module("events"); import net = module("net"); import stream = module("stream"); export interface Server extends events.NodeEventEmitter { listen(port: number, hostname?: string, backlog?: number, callback?: Function): void; listen(path: string, callback?: Function): void; listen(handle: any, listeningListener?: Function): void; close(cb?: any): void; maxHeadersCount: number; } export interface ServerRequest extends events.NodeEventEmitter, stream.ReadableStream { method: string; url: string; headers: string; trailers: string; httpVersion: string; setEncoding(encoding?: string): void; pause(): void; resume(): void; connection: net.NodeSocket; } export interface ServerResponse extends events.NodeEventEmitter, stream.WritableStream { // Extended base methods write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; writeContinue(): void; writeHead(statusCode: number, reasonPhrase?: string, headers?: any): void; writeHead(statusCode: number, headers?: any): void; statusCode: number; setHeader(name: string, value: string): void; sendDate: bool; getHeader(name: string): string; removeHeader(name: string): void; write(chunk: any, encoding?: string): any; addTrailers(headers: any): void; end(data?: any, encoding?: string): void; } export interface ClientRequest extends events.NodeEventEmitter, stream.WritableStream { // Extended base methods write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; write(chunk: any, encoding?: string): void; end(data?: any, encoding?: string): void; abort(): void; setTimeout(timeout: number, callback?: Function): void; setNoDelay(noDelay?: Function): void; setSocketKeepAlive(enable?: bool, initialDelay?: number): void; } export interface ClientResponse extends events.NodeEventEmitter, stream.ReadableStream { statusCode: number; httpVersion: string; headers: any; trailers: any; setEncoding(encoding?: string): void; pause(): void; resume(): void; } export interface Agent { maxSockets: number; sockets: any; requests: any; } export var STATUS_CODES; export function createServer(requestListener?: (request: ServerRequest, response: ServerResponse) =>void ): Server; export function createClient(port?: number, host?: string): any; export function request(options: any, callback?: Function): ClientRequest; export function get(options: any, callback?: Function): ClientRequest; export var globalAgent: Agent; } declare module "cluster" { import child_process = module("child_process"); export interface ClusterSettings { exec: string; args: string[]; silent: bool; } export interface Worker { id: string; process: child_process; suicide: bool; send(message: any, sendHandle?: any): void; destroy(): void; disconnect(): void; } export var settings: ClusterSettings; export var isMaster: bool; export var isWorker: bool; export function setupMaster(settings?: ClusterSettings): void; export function fork(env?: any): Worker; export function disconnect(callback?: Function): void; export var workers: any; // Event emitter export function addListener(event: string, listener: Function): void; export function on(event: string, listener: Function): any; export function once(event: string, listener: Function): void; export function removeListener(event: string, listener: Function): void; export function removeAllListener(event: string): void; export function setMaxListeners(n: number): void; export function listeners(event: string): { Function; }[]; export function emit(event: string, arg1?: any, arg2?: any): void; } declare module "zlib" { import stream = module("stream"); export interface ZlibOptions { chunkSize?: number; windowBits?: number; level?: number; memLevel?: number; strategy?: number; dictionary?: any; } export interface Gzip extends stream.ReadWriteStream { } export interface Gunzip extends stream.ReadWriteStream { } export interface Deflate extends stream.ReadWriteStream { } export interface Inflate extends stream.ReadWriteStream { } export interface DeflateRaw extends stream.ReadWriteStream { } export interface InflateRaw extends stream.ReadWriteStream { } export interface Unzip extends stream.ReadWriteStream { } export function createGzip(options: ZlibOptions): Gzip; export function createGunzip(options: ZlibOptions): Gunzip; export function createDeflate(options: ZlibOptions): Deflate; export function createInflate(options: ZlibOptions): Inflate; export function createDeflateRaw(options: ZlibOptions): DeflateRaw; export function createInflateRaw(options: ZlibOptions): InflateRaw; export function createUnzip(options: ZlibOptions): Unzip; export function deflate(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function deflateRaw(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function gzip(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function gunzip(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function inflate(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function inflateRaw(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; export function unzip(buf: NodeBuffer, callback: (error: Error, result) =>void ): void; // Constants export var Z_NO_FLUSH: number; export var Z_PARTIAL_FLUSH: number; export var Z_SYNC_FLUSH: number; export var Z_FULL_FLUSH: number; export var Z_FINISH: number; export var Z_BLOCK: number; export var Z_TREES: number; export var Z_OK: number; export var Z_STREAM_END: number; export var Z_NEED_DICT: number; export var Z_ERRNO: number; export var Z_STREAM_ERROR: number; export var Z_DATA_ERROR: number; export var Z_MEM_ERROR: number; export var Z_BUF_ERROR: number; export var Z_VERSION_ERROR: number; export var Z_NO_COMPRESSION: number; export var Z_BEST_SPEED: number; export var Z_BEST_COMPRESSION: number; export var Z_DEFAULT_COMPRESSION: number; export var Z_FILTERED: number; export var Z_HUFFMAN_ONLY: number; export var Z_RLE: number; export var Z_FIXED: number; export var Z_DEFAULT_STRATEGY: number; export var Z_BINARY: number; export var Z_TEXT: number; export var Z_ASCII: number; export var Z_UNKNOWN: number; export var Z_DEFLATED: number; export var Z_NULL: number; } declare module "os" { export function tmpDir(): string; export function hostname(): string; export function type(): string; export function platform(): string; export function arch(): string; export function release(): string; export function uptime(): number; export function loadavg(): number[]; export function totalmem(): number; export function freemem(): number; export function cpus(): { model: string; speed: number; times: { user: number; nice: number; sys: number; idle: number; irq: number; }; }[]; export function networkInterfaces(): any; export var EOL: string; } declare module "https" { import tls = module("tls"); import events = module("events"); import http = module("http"); export interface ServerOptions { pfx?: any; key?: any; passphrase?: string; cert?: any; ca?: any; crl?: any; ciphers?: string; honorCipherOrder?: bool; requestCert?: bool; rejectUnauthorized?: bool; NPNProtocols?: any; SNICallback?: (servername: string) => any; } export interface RequestOptions { host?: string; hostname?: string; port?: number; path?: string; method?: string; headers?: any; auth?: string; agent?: any; pfx?: any; key?: any; passphrase?: string; cert?: any; ca?: any; ciphers?: string; rejectUnauthorized?: bool; } export interface NodeAgent { maxSockets: number; sockets: any; requests: any; } export var Agent: { new (options?: RequestOptions): NodeAgent; }; export interface Server extends tls.Server { } export function createServer(options: ServerOptions, requestListener?: Function): Server; export function request(options: RequestOptions, callback?: (res: events.NodeEventEmitter) =>void ): http.ClientRequest; export function get(options: RequestOptions, callback?: (res: events.NodeEventEmitter) =>void ): http.ClientRequest; export var globalAgent: NodeAgent; } declare module "punycode" { export function decode(string: string): string; export function encode(string: string): string; export function toUnicode(domain: string): string; export function toASCII(domain: string): string; export var ucs2: ucs2; interface ucs2 { decode(string: string): string; encode(codePoints: number[]): string; } export var version; } declare module "repl" { import stream = module("stream"); import events = module("events"); export interface ReplOptions { prompt?: string; input?: stream.ReadableStream; output?: stream.WritableStream; terminal?: bool; eval?: Function; useColors?: bool; useGlobal?: bool; ignoreUndefined?: bool; writer?: Function; } export function start(options: ReplOptions): events.NodeEventEmitter; } declare module "readline" { import events = module("events"); import stream = module("stream"); export interface ReadLine extends events.NodeEventEmitter { setPrompt(prompt: string, length: number): void; prompt(preserveCursor?: bool): void; question(query: string, callback: Function): void; pause(): void; resume(): void; close(): void; write(data: any, key?: any): void; } export interface ReadLineOptions { input: stream.ReadableStream; output: stream.WritableStream; completer?: Function; terminal?: bool; } export function createInterface(options: ReadLineOptions): ReadLine; } declare module "vm" { export interface Context { } export interface Script { runInThisContext(): void; runInNewContext(sandbox?: Context): void; } export function runInThisContext(code: string, filename?: string): void; export function runInNewContext(code: string, sandbox?: Context, filename?: string): void; export function runInContext(code: string, context: Context, filename?: string): void; export function createContext(initSandbox?: Context): Context; export function createScript(code: string, filename?: string): Script; } declare module "child_process" { import events = module("events"); import stream = module("stream"); export interface ChildProcess extends events.NodeEventEmitter { stdin: stream.WritableStream; stdout: stream.ReadableStream; stderr: stream.ReadableStream; pid: number; kill(signal?: string): void; send(message: any, sendHandle: any): void; disconnect(): void; } export function spawn(command: string, args?: string[], options?: { cwd?: string; stdio?: any; custom?: any; env?: any; detached?: bool; }): ChildProcess; export function exec(command: string, options: { cwd?: string; stdio?: any; customFds?: any; env?: any; encoding?: string; timeout?: number; maxBuffer?: number; killSignal?: string; }, callback: (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) =>void ): ChildProcess; export function exec(command: string, callback: (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) =>void ): ChildProcess; export function execFile(file: string, args: string[], options: { cwd?: string; stdio?: any; customFds?: any; env?: any; encoding?: string; timeout?: number; maxBuffer?: string; killSignal?: string; }, callback: (error: Error, stdout: NodeBuffer, stderr: NodeBuffer) =>void ): ChildProcess; export function fork(modulePath: string, args?: string[], options?: { cwd?: string; env?: any; encoding?: string; }): ChildProcess; } declare module "url" { export interface Url { href?: string; protocol?: string; auth?: string; hostname?: string; port?: string; host?: string; pathname?: string; search?: string; query?: string; slashes?: bool; hash?: string; } export function parse(urlStr: string, parseQueryString? , slashesDenoteHost? ): Url; export function format(url: Url): string; export function resolve(from: string, to: string): string; } declare module "dns" { export function lookup(domain: string, family: number, callback: (err: Error, address: string, family: number) =>void ): string; export function lookup(domain: string, callback: (err: Error, address: string, family: number) =>void ): string; export function resolve(domain: string, rrtype: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolve(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolve4(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolve6(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveMx(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveTxt(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveSrv(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveNs(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function resolveCname(domain: string, callback: (err: Error, addresses: string[]) =>void ): string[]; export function reverse(ip: string, callback: (err: Error, domains: string[]) =>void ): string[]; } declare module "net" { import stream = module("stream"); export interface NodeSocket extends stream.ReadWriteStream { // Extended base methods write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; connect(port: number, host?: string, connectionListener?: Function): void; connect(path: string, connectionListener?: Function): void; bufferSize: number; setEncoding(encoding?: string): void; write(data: any, encoding?: string, callback?: Function): void; end(data?: any, encoding?: string): void; destroy(): void; pause(): void; resume(): void; setTimeout(timeout: number, callback?: Function); void; setNoDelay(noDelay?: bool): void; setKeepAlive(enable?: bool, initialDelay?: number): void; address(): { port: number; family: string; address: string; }; remoteAddress: string; remotePort: number; bytesRead: number; bytesWritten: number; } export var Socket: { new (options?: { fd?: string; type?: string; allowHalfOpen?: bool; }): NodeSocket; }; export interface Server extends NodeSocket { listen(port: number, host?: string, backlog?: number, listeningListener?: Function): void; listen(path: string, listeningListener?: Function): void; listen(handle: any, listeningListener?: Function): void; close(callback?: Function): void; address(): { port: number; family: string; address: string; }; maxConnections: number; connections: number; } export function createServer(connectionListener?: (socket: NodeSocket) =>void ): Server; export function createServer(options?: { allowHalfOpen?: bool; }, connectionListener?: (socket: NodeSocket) =>void ): Server; export function connect(options: { allowHalfOpen?: bool; }, connectionListener?: Function): void; export function connect(port: number, host?: string, connectionListener?: Function): void; export function connect(path: string, connectionListener?: Function): void; export function createConnection(options: { allowHalfOpen?: bool; }, connectionListener?: Function): void; export function createConnection(port: number, host?: string, connectionListener?: Function): void; export function createConnection(path: string, connectionListener?: Function): void; export function isIP(input: string): number; export function isIPv4(input: string): bool; export function isIPv6(input: string): bool; } declare module "dgram" { import events = module("events"); export function createSocket(type: string, callback?: Function): Socket; interface Socket extends events.NodeEventEmitter { send(buf: NodeBuffer, offset: number, length: number, port: number, address: string, callback?: Function): void; bind(port: number, address?: string): void; close(): void; address: { address: string; family: string; port: number; }; setBroadcast(flag: bool): void; setMulticastTTL(ttl: number): void; setMulticastLoopback(flag: bool): void; addMembership(multicastAddress: string, multicastInterface?: string): void; dropMembership(multicastAddress: string, multicastInterface?: string): void; } } declare module "fs" { import stream = module("stream"); interface Stats { isFile(): bool; isDirectory(): bool; isBlockDevice(): bool; isCharacterDevice(): bool; isSymbolicLink(): bool; isFIFO(): bool; isSocket(): bool; dev: number; ino: number; mode: number; nlink: number; uid: number; gid: number; rdev: number; size: number; blksize: number; blocks: number; atime: Date; mtime: Date; ctime: Date; } interface FSWatcher { close(): void; } export interface ReadStream extends stream.ReadableStream { } export interface WriteStream extends stream.WritableStream { } export function rename(oldPath: string, newPath: string, callback?: Function): void; export function renameSync(oldPath: string, newPath: string): void; export function truncate(fd: number, len: number, callback?: Function): void; export function truncateSync(fd: number, len: number): void; export function chown(path: string, uid: number, gid: number, callback?: Function): void; export function chownSync(path: string, uid: number, gid: number): void; export function fchown(fd: number, uid: number, gid: number, callback?: Function): void; export function fchownSync(fd: number, uid: number, gid: number): void; export function lchown(path: string, uid: number, gid: number, callback?: Function): void; export function lchownSync(path: string, uid: number, gid: number): void; export function chmod(path: string, mode: number, callback?: Function): void; export function chmod(path: string, mode: string, callback?: Function): void; export function chmodSync(path: string, mode: number): void; export function chmodSync(path: string, mode: string): void; export function fchmod(fd: number, mode: number, callback?: Function): void; export function fchmod(fd: number, mode: string, callback?: Function): void; export function fchmodSync(fd: number, mode: number): void; export function fchmodSync(fd: number, mode: string): void; export function lchmod(path: string, mode: string, callback?: Function): void; export function lchmod(path: string, mode: number, callback?: Function): void; export function lchmodSync(path: string, mode: number): void; export function lchmodSync(path: string, mode: string): void; export function stat(path: string, callback?: (err: Error, stats: Stats) =>any): Stats; export function lstat(path: string, callback?: (err: Error, stats: Stats) =>any): Stats; export function fstat(fd: number, callback?: (err: Error, stats: Stats) =>any): Stats; export function statSync(path: string): Stats; export function lstatSync(path: string): Stats; export function fstatSync(fd: number): Stats; export function link(srcpath: string, dstpath: string, callback?: Function): void; export function linkSync(srcpath: string, dstpath: string): void; export function symlink(srcpath: string, dstpath: string, type?: string, callback?: Function): void; export function symlinkSync(srcpath: string, dstpath: string, type?: string): void; export function readlink(path: string, callback?: (err: Error, linkString: string) =>any): void; export function realpath(path: string, callback?: (err: Error, resolvedPath: string) =>any): void; export function realpath(path: string, cache: string, callback: (err: Error, resolvedPath: string) =>any): void; export function realpathSync(path: string, cache?: string): string; export function unlink(path: string, callback?: Function): void; export function unlinkSync(path: string): void; export function rmdir(path: string, callback?: Function): void; export function rmdirSync(path: string): void; export function mkdir(path: string, mode?: number, callback?: Function): void; export function mkdir(path: string, mode?: string, callback?: Function): void; export function mkdirSync(path: string, mode?: number): void; export function mkdirSync(path: string, mode?: string): void; export function readdir(path: string, callback?: (err: Error, files: string[]) => void): void; export function readdirSync(path: string): string[]; export function close(fd: number, callback?: Function): void; export function closeSync(fd: number): void; export function open(path: string, flags: string, mode?: string, callback?: (err: Error, fd: number) =>any): void; export function openSync(path: string, flags: string, mode?: string): number; export function utimes(path: string, atime: number, mtime: number, callback?: Function): void; export function utimesSync(path: string, atime: number, mtime: number): void; export function futimes(fd: number, atime: number, mtime: number, callback?: Function): void; export function futimesSync(fd: number, atime: number, mtime: number): void; export function fsync(fd: number, callback?: Function): void; export function fsyncSync(fd: number): void; export function write(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number, callback?: (err: Error, written: number, buffer: NodeBuffer) =>any): void; export function writeSync(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number): number; export function read(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number, callback?: (err: Error, bytesRead: number, buffer: NodeBuffer) => void): void; export function readSync(fd: number, buffer: NodeBuffer, offset: number, length: number, position: number): number; export function readFile(filename: string, encoding: string, callback: (err: Error, data: string) => void ): void; export function readFile(filename: string, callback: (err: Error, data: NodeBuffer) => void ): void; export function readFileSync(filename: string): NodeBuffer; export function readFileSync(filename: string, encoding: string): string; export function writeFile(filename: string, data: any, encoding?: string, callback?: Function): void; export function writeFileSync(filename: string, data: any, encoding?: string): void; export function appendFile(filename: string, data: any, encoding?: string, callback?: Function): void; export function appendFileSync(filename: string, data: any, encoding?: string): void; export function watchFile(filename: string, listener: { curr: Stats; prev: Stats; }): void; export function watchFile(filename: string, options: { persistent?: bool; interval?: number; }, listener: { curr: Stats; prev: Stats; }): void; export function unwatchFile(filename: string, listener?: Stats): void; export function watch(filename: string, options?: { persistent?: bool; }, listener?: (event: string, filename: string) =>any): FSWatcher; export function exists(path: string, callback?: (exists: bool) =>void ): void; export function existsSync(path: string): bool; export function createReadStream(path: string, options?: { flags?: string; encoding?: string; fd?: string; mode?: number; bufferSize?: number; }): ReadStream; export function createWriteStream(path: string, options?: { flags?: string; encoding?: string; string?: string; }): WriteStream; } declare module "path" { export function normalize(p: string): string; export function join(...paths: any[]): string; export function resolve(from: string, to: string): string; export function resolve(from: string, from2: string, to: string): string; export function resolve(from: string, from2: string, from3: string, to: string): string; export function resolve(from: string, from2: string, from3: string, from4: string, to: string): string; export function resolve(from: string, from2: string, from3: string, from4: string, from5: string, to: string): string; export function relative(from: string, to: string): string; export function dirname(p: string): string; export function basename(p: string, ext?: string): string; export function extname(p: string): string; export var sep: string; } declare module "string_decoder" { export interface NodeStringDecoder { write(buffer: NodeBuffer): string; detectIncompleteChar(buffer: NodeBuffer): number; } export var StringDecoder: { new (encoding: string): NodeStringDecoder; }; } declare module "tls" { import crypto = module("crypto"); import net = module("net"); import stream = module("stream"); var CLIENT_RENEG_LIMIT: number; var CLIENT_RENEG_WINDOW: number; export interface TlsOptions { pfx?: any; //string or buffer key?: any; //string or buffer passphrase?: string; cert?: any; ca?: any; //string or buffer crl?: any; //string or string array ciphers?: string; honorCipherOrder?: any; requestCert?: bool; rejectUnauthorized?: bool; NPNProtocols?: any; //array or Buffer; SNICallback?: (servername: string) => any; } export interface ConnectionOptions { host?: string; port?: number; socket?: net.NodeSocket; pfx?: any; //string | Buffer key?: any; //string | Buffer passphrase?: string; cert?: any; //string | Buffer ca?: any; //Array of string | Buffer rejectUnauthorized?: bool; NPNProtocols?: any; //Array of string | Buffer servername?: string; } export interface Server extends net.Server { // Extended base methods listen(port: number, host?: string, backlog?: number, listeningListener?: Function): void; listen(path: string, listeningListener?: Function): void; listen(handle: any, listeningListener?: Function): void; listen(port: number, host?: string, callback?: Function): void; close(): void; address(): { port: number; family: string; address: string; }; addContext(hostName: string, credentials: { key: string; cert: string; ca: string; }): void; maxConnections: number; connections: number; } export interface ClearTextStream extends stream.ReadWriteStream { authorized: bool; authorizationError: Error; getPeerCertificate(): any; getCipher: { name: string; version: string; }; address: { port: number; family: string; address: string; }; remoteAddress: string; remotePort: number; } export interface SecurePair { encrypted: any; cleartext: any; } export function createServer(options: TlsOptions, secureConnectionListener?: (cleartextStream: ClearTextStream) =>void ): Server; export function connect(options: TlsOptions, secureConnectionListener?: () =>void ): ClearTextStream; export function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () =>void ): ClearTextStream; export function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () =>void ): ClearTextStream; export function createSecurePair(credentials?: crypto.Credentials, isServer?: bool, requestCert?: bool, rejectUnauthorized?: bool): SecurePair; } declare module "crypto" { export interface CredentialDetails { pfx: string; key: string; passphrase: string; cert: string; ca: any; //string | string array crl: any; //string | string array ciphers: string; } export interface Credentials { context?: any; } export function createCredentials(details: CredentialDetails): Credentials; export function createHash(algorithm: string): Hash; export function createHmac(algorithm: string, key: string): Hmac; interface Hash { update(data: any, input_encoding?: string): void; digest(encoding?: string): string; } interface Hmac { update(data: any): void; digest(encoding?: string): void; } export function createCipher(algorithm: string, password: any): Cipher; export function createCipheriv(algorithm: string, key: any, iv: any): Cipher; interface Cipher { update(data: any, input_encoding?: string, output_encoding?: string): string; final(output_encoding?: string): string; setAutoPadding(auto_padding: bool): void; createDecipher(algorithm: string, password: any): Decipher; createDecipheriv(algorithm: string, key: any, iv: any): Decipher; } interface Decipher { update(data: any, input_encoding?: string, output_encoding?: string): void; final(output_encoding?: string): string; setAutoPadding(auto_padding: bool): void; } export function createSign(algorithm: string): Signer; interface Signer { update(data: any): void; sign(private_key: string, output_format: string): string; } export function createVerify(algorith: string): Verify; interface Verify { update(data: any): void; verify(object: string, signature: string, signature_format?: string): bool; } export function createDiffieHellman(prime_length: number): DiffieHellman; export function createDiffieHellman(prime: number, encoding?: string): DiffieHellman; interface DiffieHellman { generateKeys(encoding?: string): string; computeSecret(other_public_key: string, input_encoding?: string, output_encoding?: string): string; getPrime(encoding?: string): string; getGenerator(encoding: string): string; getPublicKey(encoding?: string): string; getPrivateKey(encoding?: string): string; setPublicKey(public_key: string, encoding?: string): void; setPrivateKey(public_key: string, encoding?: string): void; } export function getDiffieHellman(group_name: string): DiffieHellman; export function pbkdf2(password: string, salt: string, iterations: number, keylen: number, callback: (err: Error, derivedKey: string) => any): void; export function randomBytes(size: number, callback?: (err: Error, buf: NodeBuffer) =>void ); } declare module "stream" { import events = module("events"); export interface WritableStream extends events.NodeEventEmitter { writable: bool; write(str: string, encoding?: string, fd?: string): bool; write(buffer: NodeBuffer): bool; end(): void; end(str: string, enconding: string): void; end(buffer: NodeBuffer): void; destroy(): void; destroySoon(): void; } export interface ReadableStream extends events.NodeEventEmitter { readable: bool; setEncoding(encoding: string): void; pause(): void; resume(): void; destroy(): void; pipe(destination: WritableStream, options?: { end?: bool; }): void; } export interface ReadWriteStream extends ReadableStream, WritableStream { } } declare module "util" { export function format(format: any, ...param: any[]): string; export function debug(string: string): void; export function error(...param: any[]): void; export function puts(...param: any[]): void; export function print(...param: any[]): void; export function log(string: string): void; export function inspect(object: any, showHidden?: bool, depth?: number, color?: bool): void; export function isArray(object: any): bool; export function isRegExp(object: any): bool; export function isDate(object: any): bool; export function isError(object: any): bool; export function inherits(constructor: any, superConstructor: any): void; } declare module "assert" { export function fail(actual: any, expected: any, message: string, operator: string): void; export function assert(value: any, message: string): void; export function ok(value: any, message?: string): void; export function equal(actual: any, expected: any, message?: string): void; export function notEqual(actual: any, expected: any, message?: string): void; export function deepEqual(actual: any, expected: any, message?: string): void; export function notDeepEqual(acutal: any, expected: any, message?: string): void; export function strictEqual(actual: any, expected: any, message?: string): void; export function notStrictEqual(actual: any, expected: any, message?: string): void; export function throws(block: any, error?: any, messsage?: string): void; export function doesNotThrow(block: any, error?: any, messsage?: string): void; export function ifError(value: any): void; } declare module "tty" { import net = module("net"); export function isatty(fd: string): bool; export interface ReadStream extends net.NodeSocket { isRaw: bool; setRawMode(mode: bool): void; } export interface WriteStream extends net.NodeSocket { columns: number; rows: number; } } declare module "domain" { import events = module("events"); export interface Domain extends events.NodeEventEmitter { } export function create(): Domain; export function run(fn: Function): void; export function add(emitter: events.NodeEventEmitter): void; export function remove(emitter: events.NodeEventEmitter): void; export function bind(cb: (er: Error, data: any) =>any): any; export function intercept(cb: (data: any) => any): any; export function dispose(): void; }
apache-2.0
racker/omnibus
source/curl-7.21.2/docs/examples/cacertinmem.c
6090
/***************************************************************************** * _ _ ____ _ * Project ___| | | | _ \| | * / __| | | | |_) | | * | (__| |_| | _ <| |___ * \___|\___/|_| \_\_____| * * * Example using a "in core" PEM certificate to retrieve a https page. * Written by Theo Borm */ /* on a netBSD system with OPENSSL& LIBCURL installed from * pkgsrc (using default paths) this program can be compiled using: * gcc -I/usr/pkg/include -L/usr/pkg/lib -lcurl -Wl,-R/usr/pkg/lib -lssl * -lcrypto -lz -o curlcacerttest curlcacerttest.c * on other operating systems you may want to change paths to headers * and libraries */ #include <openssl/ssl.h> #include <curl/curl.h> #include <stdio.h> size_t writefunction( void *ptr, size_t size, size_t nmemb, void *stream) { fwrite(ptr,size,nmemb,stream); return(nmemb*size); } static CURLcode sslctx_function(CURL * curl, void * sslctx, void * parm) { X509_STORE * store; X509 * cert=NULL; BIO * bio; char * mypem = /* www.cacert.org */ "-----BEGIN CERTIFICATE-----\n"\ "MIIHPTCCBSWgAwIBAgIBADANBgkqhkiG9w0BAQQFADB5MRAwDgYDVQQKEwdSb290\n"\ "IENBMR4wHAYDVQQLExVodHRwOi8vd3d3LmNhY2VydC5vcmcxIjAgBgNVBAMTGUNB\n"\ "IENlcnQgU2lnbmluZyBBdXRob3JpdHkxITAfBgkqhkiG9w0BCQEWEnN1cHBvcnRA\n"\ "Y2FjZXJ0Lm9yZzAeFw0wMzAzMzAxMjI5NDlaFw0zMzAzMjkxMjI5NDlaMHkxEDAO\n"\ "BgNVBAoTB1Jvb3QgQ0ExHjAcBgNVBAsTFWh0dHA6Ly93d3cuY2FjZXJ0Lm9yZzEi\n"\ "MCAGA1UEAxMZQ0EgQ2VydCBTaWduaW5nIEF1dGhvcml0eTEhMB8GCSqGSIb3DQEJ\n"\ "ARYSc3VwcG9ydEBjYWNlcnQub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC\n"\ "CgKCAgEAziLA4kZ97DYoB1CW8qAzQIxL8TtmPzHlawI229Z89vGIj053NgVBlfkJ\n"\ "8BLPRoZzYLdufujAWGSuzbCtRRcMY/pnCujW0r8+55jE8Ez64AO7NV1sId6eINm6\n"\ "zWYyN3L69wj1x81YyY7nDl7qPv4coRQKFWyGhFtkZip6qUtTefWIonvuLwphK42y\n"\ "fk1WpRPs6tqSnqxEQR5YYGUFZvjARL3LlPdCfgv3ZWiYUQXw8wWRBB0bF4LsyFe7\n"\ "w2t6iPGwcswlWyCR7BYCEo8y6RcYSNDHBS4CMEK4JZwFaz+qOqfrU0j36NK2B5jc\n"\ "G8Y0f3/JHIJ6BVgrCFvzOKKrF11myZjXnhCLotLddJr3cQxyYN/Nb5gznZY0dj4k\n"\ "epKwDpUeb+agRThHqtdB7Uq3EvbXG4OKDy7YCbZZ16oE/9KTfWgu3YtLq1i6L43q\n"\ "laegw1SJpfvbi1EinbLDvhG+LJGGi5Z4rSDTii8aP8bQUWWHIbEZAWV/RRyH9XzQ\n"\ "QUxPKZgh/TMfdQwEUfoZd9vUFBzugcMd9Zi3aQaRIt0AUMyBMawSB3s42mhb5ivU\n"\ "fslfrejrckzzAeVLIL+aplfKkQABi6F1ITe1Yw1nPkZPcCBnzsXWWdsC4PDSy826\n"\ "YreQQejdIOQpvGQpQsgi3Hia/0PsmBsJUUtaWsJx8cTLc6nloQsCAwEAAaOCAc4w\n"\ "ggHKMB0GA1UdDgQWBBQWtTIb1Mfz4OaO873SsDrusjkY0TCBowYDVR0jBIGbMIGY\n"\ "gBQWtTIb1Mfz4OaO873SsDrusjkY0aF9pHsweTEQMA4GA1UEChMHUm9vdCBDQTEe\n"\ "MBwGA1UECxMVaHR0cDovL3d3dy5jYWNlcnQub3JnMSIwIAYDVQQDExlDQSBDZXJ0\n"\ "IFNpZ25pbmcgQXV0aG9yaXR5MSEwHwYJKoZIhvcNAQkBFhJzdXBwb3J0QGNhY2Vy\n"\ "dC5vcmeCAQAwDwYDVR0TAQH/BAUwAwEB/zAyBgNVHR8EKzApMCegJaAjhiFodHRw\n"\ "czovL3d3dy5jYWNlcnQub3JnL3Jldm9rZS5jcmwwMAYJYIZIAYb4QgEEBCMWIWh0\n"\ "dHBzOi8vd3d3LmNhY2VydC5vcmcvcmV2b2tlLmNybDA0BglghkgBhvhCAQgEJxYl\n"\ "aHR0cDovL3d3dy5jYWNlcnQub3JnL2luZGV4LnBocD9pZD0xMDBWBglghkgBhvhC\n"\ "AQ0ESRZHVG8gZ2V0IHlvdXIgb3duIGNlcnRpZmljYXRlIGZvciBGUkVFIGhlYWQg\n"\ "b3ZlciB0byBodHRwOi8vd3d3LmNhY2VydC5vcmcwDQYJKoZIhvcNAQEEBQADggIB\n"\ "ACjH7pyCArpcgBLKNQodgW+JapnM8mgPf6fhjViVPr3yBsOQWqy1YPaZQwGjiHCc\n"\ "nWKdpIevZ1gNMDY75q1I08t0AoZxPuIrA2jxNGJARjtT6ij0rPtmlVOKTV39O9lg\n"\ "18p5aTuxZZKmxoGCXJzN600BiqXfEVWqFcofN8CCmHBh22p8lqOOLlQ+TyGpkO/c\n"\ "gr/c6EWtTZBzCDyUZbAEmXZ/4rzCahWqlwQ3JNgelE5tDlG+1sSPypZt90Pf6DBl\n"\ "Jzt7u0NDY8RD97LsaMzhGY4i+5jhe1o+ATc7iwiwovOVThrLm82asduycPAtStvY\n"\ "sONvRUgzEv/+PDIqVPfE94rwiCPCR/5kenHA0R6mY7AHfqQv0wGP3J8rtsYIqQ+T\n"\ "SCX8Ev2fQtzzxD72V7DX3WnRBnc0CkvSyqD/HMaMyRa+xMwyN2hzXwj7UfdJUzYF\n"\ "CpUCTPJ5GhD22Dp1nPMd8aINcGeGG7MW9S/lpOt5hvk9C8JzC6WZrG/8Z7jlLwum\n"\ "GCSNe9FINSkYQKyTYOGWhlC0elnYjyELn8+CkcY7v2vcB5G5l1YjqrZslMZIBjzk\n"\ "zk6q5PYvCdxTby78dOs6Y5nCpqyJvKeyRKANihDjbPIky/qbn3BHLt4Ui9SyIAmW\n"\ "omTxJBzcoTWcFbLUvFUufQb1nA5V9FrWk9p2rSVzTMVD\n"\ "-----END CERTIFICATE-----\n"; /* get a BIO */ bio=BIO_new_mem_buf(mypem, -1); /* use it to read the PEM formatted certificate from memory into an X509 * structure that SSL can use */ PEM_read_bio_X509(bio, &cert, 0, NULL); if (cert == NULL) printf("PEM_read_bio_X509 failed...\n"); /* get a pointer to the X509 certificate store (which may be empty!) */ store=SSL_CTX_get_cert_store((SSL_CTX *)sslctx); /* add our certificate to this store */ if (X509_STORE_add_cert(store, cert)==0) printf("error adding certificate\n"); /* all set to go */ return CURLE_OK ; } int main(void) { CURL * ch; CURLcode rv; rv=curl_global_init(CURL_GLOBAL_ALL); ch=curl_easy_init(); rv=curl_easy_setopt(ch,CURLOPT_VERBOSE, 0L); rv=curl_easy_setopt(ch,CURLOPT_HEADER, 0L); rv=curl_easy_setopt(ch,CURLOPT_NOPROGRESS, 1L); rv=curl_easy_setopt(ch,CURLOPT_NOSIGNAL, 1L); rv=curl_easy_setopt(ch,CURLOPT_WRITEFUNCTION, *writefunction); rv=curl_easy_setopt(ch,CURLOPT_WRITEDATA, stdout); rv=curl_easy_setopt(ch,CURLOPT_HEADERFUNCTION, *writefunction); rv=curl_easy_setopt(ch,CURLOPT_WRITEHEADER, stderr); rv=curl_easy_setopt(ch,CURLOPT_SSLCERTTYPE,"PEM"); rv=curl_easy_setopt(ch,CURLOPT_SSL_VERIFYPEER,1L); rv=curl_easy_setopt(ch, CURLOPT_URL, "https://www.example.com/"); /* first try: retrieve page without cacerts' certificate -> will fail */ rv=curl_easy_perform(ch); if (rv==CURLE_OK) printf("*** transfer succeeded ***\n"); else printf("*** transfer failed ***\n"); /* second try: retrieve page using cacerts' certificate -> will succeed * load the certificate by installing a function doing the nescessary * "modifications" to the SSL CONTEXT just before link init */ rv=curl_easy_setopt(ch,CURLOPT_SSL_CTX_FUNCTION, *sslctx_function); rv=curl_easy_perform(ch); if (rv==CURLE_OK) printf("*** transfer succeeded ***\n"); else printf("*** transfer failed ***\n"); curl_easy_cleanup(ch); curl_global_cleanup(); return rv; }
apache-2.0
apache/manifoldcf
connectors/jcifs/connector/src/main/java/org/apache/manifoldcf/crawler/connectors/sharedrive/SharedDriveConnector.java
211260
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.crawler.connectors.sharedrive; import jcifs.ACE; import jcifs.smb.NtlmPasswordAuthenticator; import jcifs.smb.SmbException; import jcifs.smb.SmbFile; import jcifs.smb.SmbFileFilter; import jcifs.context.SingletonContext; import org.apache.manifoldcf.agents.interfaces.RepositoryDocument; import org.apache.manifoldcf.agents.interfaces.ServiceInterruption; import org.apache.manifoldcf.connectorcommon.extmimemap.ExtensionMimeMap; import org.apache.manifoldcf.connectorcommon.interfaces.IKeystoreManager; import org.apache.manifoldcf.connectorcommon.interfaces.KeystoreManagerFactory; import org.apache.manifoldcf.core.common.DateParser; import org.apache.manifoldcf.core.interfaces.ConfigParams; import org.apache.manifoldcf.core.interfaces.Configuration; import org.apache.manifoldcf.core.interfaces.ConfigurationNode; import org.apache.manifoldcf.core.interfaces.IHTTPOutput; import org.apache.manifoldcf.core.interfaces.IPostParameters; import org.apache.manifoldcf.core.interfaces.IThreadContext; import org.apache.manifoldcf.core.interfaces.LockManagerFactory; import org.apache.manifoldcf.core.interfaces.ManifoldCFException; import org.apache.manifoldcf.core.interfaces.Specification; import org.apache.manifoldcf.core.interfaces.SpecificationNode; import org.apache.manifoldcf.core.util.URLEncoder; import org.apache.manifoldcf.crawler.interfaces.IExistingVersions; import org.apache.manifoldcf.crawler.interfaces.IFingerprintActivity; import org.apache.manifoldcf.crawler.interfaces.IProcessActivity; import org.apache.manifoldcf.crawler.interfaces.ISeedingActivity; import org.apache.manifoldcf.crawler.system.Logging; import org.apache.manifoldcf.crawler.system.ManifoldCF; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.net.MalformedURLException; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; /** This is the "repository connector" for a smb/cifs shared drive file system. It's a relative of the share crawler, and should have * comparable basic functionality. */ public class SharedDriveConnector extends org.apache.manifoldcf.crawler.connectors.BaseRepositoryConnector { public static final String _rcsid = "@(#)$Id: SharedDriveConnector.java 996524 2010-09-13 13:38:01Z kwright $"; // Activities we log public final static String ACTIVITY_ACCESS = "access"; // These are the share connector nodes and attributes in the document specification public static final String NODE_STARTPOINT = "startpoint"; public static final String NODE_INCLUDE = "include"; public static final String NODE_EXCLUDE = "exclude"; public static final String NODE_PATHNAMEATTRIBUTE = "pathnameattribute"; public static final String NODE_PATHMAP = "pathmap"; public static final String NODE_FILEMAP = "filemap"; public static final String NODE_URIMAP = "urimap"; public static final String NODE_SHAREACCESS = "shareaccess"; public static final String NODE_SHARESECURITY = "sharesecurity"; public static final String NODE_PARENTFOLDERACCESS = "parentfolderaccess"; public static final String NODE_PARENTFOLDERSECURITY = "parentfoldersecurity"; public static final String NODE_MAXLENGTH = "maxlength"; public static final String NODE_ACCESS = "access"; public static final String NODE_SECURITY = "security"; public static final String ATTRIBUTE_PATH = "path"; public static final String ATTRIBUTE_TYPE = "type"; public static final String ATTRIBUTE_INDEXABLE = "indexable"; public static final String ATTRIBUTE_FILESPEC = "filespec"; public static final String ATTRIBUTE_VALUE = "value"; public static final String ATTRIBUTE_TOKEN = "token"; public static final String ATTRIBUTE_MATCH = "match"; public static final String ATTRIBUTE_REPLACE = "replace"; public static final String VALUE_DIRECTORY = "directory"; public static final String VALUE_FILE = "file"; // Properties this connector needs (that can only be configured once) public final static String PROPERTY_JCIFS_USE_NTLM_V1 = "org.apache.manifoldcf.crawler.connectors.jcifs.usentlmv1"; // Static initialization of various system properties. This hopefully takes place // before jcifs is loaded. static { if (System.getProperty("jcifs.resolveOrder") == null) { System.setProperty("jcifs.resolveOrder","LMHOSTS,DNS,WINS"); } if (System.getProperty("jcifs.smb.client.soTimeout") == null) { System.setProperty("jcifs.smb.client.soTimeout","150000"); } if (System.getProperty("jcifs.smb.client.responseTimeout") == null) { System.setProperty("jcifs.smb.client.responseTimeout","120000"); } if (System.getProperty("jcifs.smb.client.minVersion") == null) { System.setProperty("jcifs.smb.client.minVersion","SMB1"); } if (System.getProperty("jcifs.smb.client.maxVersion") == null) { System.setProperty("jcifs.smb.client.maxVersion","SMB210"); } if (System.getProperty("jcifs.traceResources") == null) { System.setProperty("jcifs.traceResources","true"); } if (System.getProperty("jcifs.smb.client.ipcSigningEnforced") == null) { System.setProperty("jcifs.smb.client.ipcSigningEnforced","true"); } // Don't change these!! System.setProperty("jcifs.smb.client.listCount","20"); System.setProperty("jcifs.smb.client.dfs.strictView","true"); } private String smbconnectionPath = null; private String server = null; private String domain = null; private String username = null; private String password = null; private boolean useSIDs = true; private String binName = null; private NtlmPasswordAuthenticator pa; /** Deny access token for default authority */ private final static String defaultAuthorityDenyToken = GLOBAL_DENY_TOKEN; /** Constructor. */ public SharedDriveConnector() { } /** Set thread context. * Use the opportunity to set the system properties we'll need. */ @Override public void setThreadContext(IThreadContext threadContext) throws ManifoldCFException { super.setThreadContext(threadContext); // We need to know whether to operate in NTLMv2 mode, or in NTLM mode. We do this before jcifs called the first time. boolean useV1 = LockManagerFactory.getBooleanProperty(threadContext, PROPERTY_JCIFS_USE_NTLM_V1, false); if (!useV1) { System.setProperty("jcifs.smb.lmCompatibility","3"); System.setProperty("jcifs.smb.client.useExtendedSecurity","true"); } else { System.setProperty("jcifs.smb.lmCompatibility","0"); System.setProperty("jcifs.smb.client.useExtendedSecurity","false"); } } /** Establish a "session". In the case of the jcifs connector, this just builds the appropriate smbconnectionPath string, and does the necessary checks. */ protected void getSession() throws ManifoldCFException { if (smbconnectionPath == null) { // Get the server if (server == null || server.length() == 0) throw new ManifoldCFException("Missing parameter '"+SharedDriveParameters.server+"'"); // make the smb connection to the server String authenticationString; if (domain == null || domain.length() == 0) domain = null; if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("Connecting to: " + "smb://" + ((domain==null)?"":domain)+";"+username+":<password>@" + server + "/"); try { // use NtlmPasswordAuthentication so that we can reuse credential for DFS support pa = new NtlmPasswordAuthenticator(domain,username,password); SmbFile smbconnection = new SmbFile("smb://" + server + "/",SingletonContext.getInstance().withCredentials(pa)); smbconnectionPath = getFileCanonicalPath(smbconnection); } catch (MalformedURLException e) { Logging.connectors.error("Unable to access SMB/CIFS share: "+"smb://" + ((domain==null)?"":domain)+";"+username+":<password>@"+ server + "/\n" + e); throw new ManifoldCFException("Unable to access SMB/CIFS share: "+server, e, ManifoldCFException.REPOSITORY_CONNECTION_ERROR); } } } /** Return the list of activities that this connector supports (i.e. writes into the log). *@return the list. */ @Override public String[] getActivitiesList() { return new String[]{ACTIVITY_ACCESS}; } /** Close the connection. Call this before discarding the repository connector. */ @Override public void disconnect() throws ManifoldCFException { server = null; domain = null; username = null; password = null; pa = null; smbconnectionPath = null; binName = null; super.disconnect(); } /** Connect. *@param configParameters is the set of configuration parameters, which * in this case describe the root directory. */ @Override public void connect(ConfigParams configParameters) { super.connect(configParameters); // Get the server server = configParameters.getParameter(SharedDriveParameters.server); domain = configParameters.getParameter(SharedDriveParameters.domain); username = configParameters.getParameter(SharedDriveParameters.username); if (username == null) username = ""; password = configParameters.getObfuscatedParameter(SharedDriveParameters.password); if (password == null) password = ""; String useSIDsString = configParameters.getParameter(SharedDriveParameters.useSIDs); if (useSIDsString == null) useSIDsString = "true"; useSIDs = "true".equals(useSIDsString); String configBinName = configParameters.getParameter(SharedDriveParameters.binName); binName = (configBinName == null || configBinName.length() == 0) ? server : configBinName; if (binName.length() > 255) // trim the bin name to fit in the database binName = binName.substring(0, 255); // Rejigger the username/domain to be sure we PASS in a domain and we do not include the domain attached to the user! // (This became essential at jcifs 1.3.0) int index = username.indexOf("@"); if (index != -1) { // Strip off the domain from the user String userDomain = username.substring(index+1); if (domain == null || domain.length() == 0) domain = userDomain; username = username.substring(0,index); } index = username.indexOf("\\"); if (index != -1) { String userDomain = username.substring(0,index); if (domain == null || domain.length() == 0) domain = userDomain; username = username.substring(index+1); } } /** Get the bin name string for a document identifier. The bin name describes the queue to which the * document will be assigned for throttling purposes. Throttling controls the rate at which items in a * given queue are fetched; it does not say anything about the overall fetch rate, which may operate on * multiple queues or bins. * For example, if you implement a web crawler, a good choice of bin name would be the server name, since * that is likely to correspond to a real resource that will need real throttle protection. *@param documentIdentifier is the document identifier. *@return the bin name. */ @Override public String[] getBinNames(String documentIdentifier) { return new String[]{binName}; } /** * Convert a document identifier to a URI. The URI is the URI that will be * the unique key from the search index, and will be presented to the user * as part of the search results. * * @param documentIdentifier * is the document identifier. * @return the document uri. */ protected static String convertToURI(String documentIdentifier, MatchMap fileMap, MatchMap uriMap) throws ManifoldCFException { // // Note well: This MUST be a legal URI!! // e.g. // smb://10.33.65.1/Test Folder/PPT Docs/Dearman_University of Texas 20030220.ppt // file:////10.33.65.1/Test Folder/PPT Docs/Dearman_University of Texas 20030220.ppt String serverPath = documentIdentifier.substring("smb://".length()); // The first mapping converts one server path to another. // If not present, we leave the original path alone. serverPath = fileMap.translate(serverPath); // The second mapping, if present, creates a URI, using certain rules. If not present, the old standard IRI conversion is done. if (uriMap.getMatchCount() != 0) { // URI translation. // First step is to perform utf-8 translation and %-encoding. byte[] byteArray = serverPath.getBytes(StandardCharsets.UTF_8); StringBuilder output = new StringBuilder(); int i = 0; while (i < byteArray.length) { int x = ((int)byteArray[i++]) & 0xff; if (x >= 0x80 || (x >= 0 && x <= ' ') || x == ':' || x == '?' || x == '^' || x == '{' || x == '}' || x == '%' || x == '#' || x == '`' || x == ';' || x == '@' || x == '&' || x == '=' || x == '+' || x == '$' || x == ',') { output.append('%'); String hexValue = Integer.toHexString((int)x).toUpperCase(Locale.ROOT); if (hexValue.length() == 1) output.append('0'); output.append(hexValue); } else output.append((char)x); } // Second step is to perform the mapping. This strips off the server name and glues on the protocol and web server name, most likely. return uriMap.translate(output.toString()); } else { // Convert to a URI that begins with file://///. This used to be done according to the following IE7 specification: // http://blogs.msdn.com/ie/archive/2006/12/06/file-uris-in-windows.aspx // However, two factors required change. First, IE8 decided to no longer adhere to the same specification as IE7. // Second, the ingestion API does not (and will never) accept anything other than a well-formed URI. Thus, file // specifications are ingested in a canonical form (which happens to be pretty much what this connector used prior to // 3.9.0), and the various clients are responsible for converting that form into something the browser will accept. StringBuilder output = new StringBuilder(); int i = 0; while (i < serverPath.length()) { int pos = serverPath.indexOf("/",i); if (pos == -1) pos = serverPath.length(); String piece = serverPath.substring(i,pos); // Note well. This does *not* %-encode some characters such as '#', which are legal in URI's but have special meanings! String replacePiece = URLEncoder.encode(piece); // Convert the +'s back to %20's int j = 0; while (j < replacePiece.length()) { int plusPos = replacePiece.indexOf("+",j); if (plusPos == -1) plusPos = replacePiece.length(); output.append(replacePiece.substring(j,plusPos)); if (plusPos < replacePiece.length()) { output.append("%20"); plusPos++; } j = plusPos; } if (pos < serverPath.length()) { output.append("/"); pos++; } i = pos; } return "file://///"+output.toString(); } } /** Request arbitrary connector information. * This method is called directly from the API in order to allow API users to perform any one of several connector-specific * queries. *@param output is the response object, to be filled in by this method. *@param command is the command, which is taken directly from the API request. *@return true if the resource is found, false if not. In either case, output may be filled in. */ @Override public boolean requestInfo(Configuration output, String command) throws ManifoldCFException { if (command.startsWith("folders/")) { String parentFolder = command.substring("folders/".length()); try { String[] folders = getChildFolderNames(parentFolder); int i = 0; while (i < folders.length) { String folder = folders[i++]; ConfigurationNode node = new ConfigurationNode("folder"); node.setValue(folder); output.addChild(output.getChildCount(),node); } } catch (ManifoldCFException e) { ManifoldCF.createErrorNode(output,e); } } else if (command.startsWith("folder/")) { String folder = command.substring("folder/".length()); try { String canonicalFolder = validateFolderName(folder); if (canonicalFolder != null) { ConfigurationNode node = new ConfigurationNode("folder"); node.setValue(canonicalFolder); output.addChild(output.getChildCount(),node); } } catch (ManifoldCFException e) { ManifoldCF.createErrorNode(output,e); } } else return super.requestInfo(output,command); return true; } /** Queue "seed" documents. Seed documents are the starting places for crawling activity. Documents * are seeded when this method calls appropriate methods in the passed in ISeedingActivity object. * * This method can choose to find repository changes that happen only during the specified time interval. * The seeds recorded by this method will be viewed by the framework based on what the * getConnectorModel() method returns. * * It is not a big problem if the connector chooses to create more seeds than are * strictly necessary; it is merely a question of overall work required. * * The end time and seeding version string passed to this method may be interpreted for greatest efficiency. * For continuous crawling jobs, this method will * be called once, when the job starts, and at various periodic intervals as the job executes. * * When a job's specification is changed, the framework automatically resets the seeding version string to null. The * seeding version string may also be set to null on each job run, depending on the connector model returned by * getConnectorModel(). * * Note that it is always ok to send MORE documents rather than less to this method. * The connector will be connected before this method can be called. *@param activities is the interface this method should use to perform whatever framework actions are desired. *@param spec is a document specification (that comes from the job). *@param seedTime is the end of the time range of documents to consider, exclusive. *@param lastSeedVersion is the last seeding version string for this job, or null if the job has no previous seeding version string. *@param jobMode is an integer describing how the job is being run, whether continuous or once-only. *@return an updated seeding version string, to be stored with the job. */ @Override public String addSeedDocuments(ISeedingActivity activities, Specification spec, String lastSeedVersion, long seedTime, int jobMode) throws ManifoldCFException, ServiceInterruption { getSession(); try { for (int i = 0; i < spec.getChildCount(); i++) { SpecificationNode n = spec.getChild(i); if (n.getType().equals(NODE_STARTPOINT)) { // The id returned MUST be in canonical form!!! String seed = mapToIdentifier(n.getAttributeValue(ATTRIBUTE_PATH)); if (Logging.connectors.isDebugEnabled()) { Logging.connectors.debug("Seed = '"+seed+"'"); } activities.addSeedDocument(seed); } } } catch (MalformedURLException e) { throw new ManifoldCFException("Could not get a canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Could not get a canonical path: "+e.getMessage(),e); } return ""; } /** Process a set of documents. * This is the method that should cause each document to be fetched, processed, and the results either added * to the queue of documents for the current job, and/or entered into the incremental ingestion manager. * The document specification allows this class to filter what is done based on the job. * The connector will be connected before this method can be called. *@param documentIdentifiers is the set of document identifiers to process. *@param statuses are the currently-stored document versions for each document in the set of document identifiers * passed in above. *@param activities is the interface this method should use to queue up new document references * and ingest documents. *@param jobMode is an integer describing how the job is being run, whether continuous or once-only. *@param usesDefaultAuthority will be true only if the authority in use for these documents is the default one. */ @Override public void processDocuments(String[] documentIdentifiers, IExistingVersions statuses, Specification spec, IProcessActivity activities, int jobMode, boolean usesDefaultAuthority) throws ManifoldCFException, ServiceInterruption { // Read the forced acls. A null return indicates that security is disabled!!! // A zero-length return indicates that the native acls should be used. // All of this is germane to how we ingest the document, so we need to note it in // the version string completely. String[] acls = getForcedAcls(spec); String[] shareAcls = getForcedShareAcls(spec); String[] parentFolderAcls = getForcedParentFolderAcls(spec); String pathAttributeName = null; MatchMap matchMap = new MatchMap(); MatchMap fileMap = new MatchMap(); MatchMap uriMap = new MatchMap(); int i = 0; while (i < spec.getChildCount()) { SpecificationNode n = spec.getChild(i++); if (n.getType().equals(NODE_PATHNAMEATTRIBUTE)) pathAttributeName = n.getAttributeValue(ATTRIBUTE_VALUE); else if (n.getType().equals(NODE_PATHMAP)) { // Path mapping info also needs to be looked at, because it affects what is // ingested. String pathMatch = n.getAttributeValue(ATTRIBUTE_MATCH); String pathReplace = n.getAttributeValue(ATTRIBUTE_REPLACE); matchMap.appendMatchPair(pathMatch,pathReplace); } else if (n.getType().equals(NODE_FILEMAP)) { String pathMatch = n.getAttributeValue(ATTRIBUTE_MATCH); String pathReplace = n.getAttributeValue(ATTRIBUTE_REPLACE); fileMap.appendMatchPair(pathMatch,pathReplace); } else if (n.getType().equals(NODE_URIMAP)) { String pathMatch = n.getAttributeValue(ATTRIBUTE_MATCH); String pathReplace = n.getAttributeValue(ATTRIBUTE_REPLACE); uriMap.appendMatchPair(pathMatch,pathReplace); } } for (String documentIdentifier : documentIdentifiers) { getSession(); if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Processing '"+documentIdentifier+"'"); String versionString; SmbFile file; String ingestionURI = null; String pathAttributeValue = null; String[] shareAllow = null; String[] shareDeny = null; boolean shareSecurityOn = false; String[] parentAllow = null; String[] parentDeny = null; boolean parentSecurityOn = false; String[] documentAllow = null; String[] documentDeny = null; boolean documentSecurityOn = false; // Common info we really need to fetch only once long fileLength = 0L; long lastModified = 0L; boolean fileExists = false; boolean fileIsDirectory = false; try { file = new SmbFile(documentIdentifier,SingletonContext.getInstance().withCredentials(pa)); fileExists = fileExists(file); // File has to exist AND have a non-null canonical path to be readable. If the canonical path is // null, it means that the windows permissions are not right and directory/file is not readable!!! String newPath = getFileCanonicalPath(file); // We MUST check the specification here, otherwise a recrawl may not delete what it's supposed to! if (fileExists && newPath != null) { fileIsDirectory = fileIsDirectory(file); if (checkInclude(fileIsDirectory,newPath,spec)) { if (fileIsDirectory) { // Hmm, this is not correct; version string should be empty for windows directories, since // they are not hierarchical in modified date propagation. // It's a directory. The version ID will be the // last modified date. //long lastModified = fileLastModified(file); //versionString = new Long(lastModified).toString(); versionString = ""; } else { fileLength = fileLength(file); if (checkIncludeFile(fileLength,newPath,spec,activities)) { // It's a file of acceptable length. // The ability to get ACLs, list files, and an inputstream under DFS all work now. // The SmbFile for parentFolder acls. SmbFile parentFolder = new SmbFile(file.getParent(),SingletonContext.getInstance().withCredentials(pa)); // Compute the security information String[] modelArray = new String[0]; List<String> allowList = new ArrayList<String>(); List<String> denyList = new ArrayList<String>(); shareSecurityOn = getFileShareSecuritySet(allowList, denyList, file, shareAcls); shareAllow = allowList.toArray(modelArray); shareDeny = denyList.toArray(modelArray); allowList.clear(); denyList.clear(); parentSecurityOn = getFileSecuritySet(allowList, denyList, parentFolder, parentFolderAcls); parentAllow = allowList.toArray(modelArray); parentDeny = denyList.toArray(modelArray); allowList.clear(); denyList.clear(); documentSecurityOn = getFileSecuritySet(allowList, denyList, file, acls); documentAllow = allowList.toArray(modelArray); documentDeny = denyList.toArray(modelArray); // This is stuff we need for computing the version string AND for indexing lastModified = fileLastModified(file); // The format of this string changed on 11/8/2006 to be comformant with the standard way // acls and metadata descriptions are being stuffed into the version string across connectors. // The format of this string changed again on 7/3/2009 to permit the ingestion uri/iri to be included. // This was to support filename/uri mapping functionality. StringBuilder sb = new StringBuilder(); addSecuritySet(sb,shareSecurityOn,shareAllow,shareDeny); addSecuritySet(sb,parentSecurityOn,parentAllow,parentDeny); addSecuritySet(sb,documentSecurityOn,documentAllow,documentDeny); // Include the path attribute name and value in the parseable area. if (pathAttributeName != null) { sb.append('+'); pack(sb,pathAttributeName,'+'); // Calculate path string; we'll include that wholesale in the version pathAttributeValue = documentIdentifier; // 3/13/2008 // In looking at what comes into the path metadata attribute by default, and cogitating a bit, I've concluded that // the smb:// and the server/domain name at the start of the path are just plain old noise, and should be stripped. // This changes a behavior that has been around for a while, so there is a risk, but a quick back-and-forth with the // SE's leads me to believe that this is safe. if (pathAttributeValue.startsWith("smb://")) { int index = pathAttributeValue.indexOf("/","smb://".length()); if (index == -1) index = pathAttributeValue.length(); pathAttributeValue = pathAttributeValue.substring(index); } // Now, translate pathAttributeValue = matchMap.translate(pathAttributeValue); pack(sb,pathAttributeValue,'+'); } else sb.append('-'); // Calculate the ingestion IRI/URI, and include that in the parseable area. ingestionURI = convertToURI(documentIdentifier,fileMap,uriMap); pack(sb,ingestionURI,'+'); // The stuff from here on down is non-parseable. sb.append(new Long(lastModified).toString()).append(":") .append(new Long(fileLength).toString()); // Also include the specification-based answer for the question of whether fingerprinting is // going to be done. Although we may not consider this to truly be "version" information, the // specification does affect whether anything is ingested or not, so it really is. The alternative // is to fingerprint right here, in the version part of the world, but that's got a performance // downside, because it means that we'd have to suck over pretty much everything just to determine // what we wanted to ingest. boolean ifIndexable = wouldFileBeIncluded(newPath,spec,true); boolean ifNotIndexable = wouldFileBeIncluded(newPath,spec,false); if (ifIndexable == ifNotIndexable) sb.append("I"); else sb.append(ifIndexable?"Y":"N"); versionString = sb.toString(); } else { activities.deleteDocument(documentIdentifier); continue; } } } else { activities.deleteDocument(documentIdentifier); continue; } } else { activities.deleteDocument(documentIdentifier); continue; } } catch (jcifs.smb.SmbAuthException e) { Logging.connectors.warn("JCIFS: Authorization exception reading version information for "+documentIdentifier+" - skipping"); if(e.getMessage().equals("Logon failure: unknown user name or bad password.")) throw new ManifoldCFException( "SmbAuthException thrown: " + e.getMessage(), e ); else { activities.deleteDocument(documentIdentifier ); continue; } } catch (MalformedURLException mue) { Logging.connectors.error("JCIFS: MalformedURLException thrown: "+mue.getMessage(),mue); throw new ManifoldCFException("MalformedURLException thrown: "+mue.getMessage(),mue); } catch (SmbException se) { processSMBException(se,documentIdentifier,"getting document version","fetching share security"); activities.deleteDocument(documentIdentifier); continue; } catch (java.net.SocketTimeoutException e) { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: Socket timeout reading version information for document "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: I/O error reading version information for document "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } if (versionString.length() == 0 || activities.checkDocumentNeedsReindexing(documentIdentifier,versionString)) { String errorCode = null; String errorDesc = null; Long fileLengthLong = null; long startFetchTime = System.currentTimeMillis(); try { byte[] transferBuffer = null; try { if (fileExists) { if (fileIsDirectory) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: '"+documentIdentifier+"' is a directory"); // Queue up stuff for directory // DFS special support no longer needed, because JCifs now does the right thing. // This is the string we replace in the child canonical paths. // String matchPrefix = ""; // This is what we replace it with, to get back to a DFS path. // String matchReplace = ""; // DFS resolved. // Use a filter to actually do the work here. This prevents large arrays from being // created when there are big directories. ProcessDocumentsFilter filter = new ProcessDocumentsFilter(activities,spec); fileListFiles(file,filter); filter.checkAndThrow(); } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: '"+documentIdentifier+"' is a file"); // We've already avoided queuing documents that we // don't want, based on file specifications. // We still need to check based on file data. // DFS support is now implicit in JCifs. String fileName = getFileCanonicalPath(file); if (fileName != null && !file.isHidden()) { String uri = ingestionURI; String fileNameString = file.getName(); Date lastModifiedDate = new Date(lastModified); Date creationDate = new Date(file.createTime()); Long originalLength = new Long(fileLength); String contentType = mapExtensionToMimeType(fileNameString); if (!activities.checkURLIndexable(uri)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept URL ('"+uri+"')"); errorCode = activities.EXCLUDED_URL; errorDesc = "Rejected due to URL ('"+uri+"')"; activities.noDocument(documentIdentifier,versionString); continue; } if (!activities.checkMimeTypeIndexable(contentType)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept content type ('"+contentType+"')"); errorCode = activities.EXCLUDED_MIMETYPE; errorDesc = "Rejected due to mime type ("+contentType+")"; activities.noDocument(documentIdentifier,versionString); continue; } if (!activities.checkDateIndexable(lastModifiedDate)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept date ("+lastModifiedDate+")"); errorCode = activities.EXCLUDED_DATE; errorDesc = "Rejected due to date ("+lastModifiedDate+")"; activities.noDocument(documentIdentifier,versionString); continue; } // Initialize repository document with common stuff, and find the URI RepositoryDocument rd = new RepositoryDocument(); //If using the lastAccess patched/Google version of jcifs then this can be uncommented //Date lastAccessDate = new Date(file.lastAccess()); Integer attributes = file.getAttributes(); String shareName = file.getShare(); rd.setFileName(fileNameString); rd.setOriginalSize(originalLength); if (contentType != null) rd.setMimeType(contentType); rd.addField("lastModified", lastModifiedDate.toString()); rd.addField("fileLastModified",DateParser.formatISO8601Date(lastModifiedDate)); rd.setModifiedDate(lastModifiedDate); // Add extra obtainable fields to the field map rd.addField("createdOn", creationDate.toString()); rd.addField("fileCreatedOn",DateParser.formatISO8601Date(creationDate)); rd.setCreatedDate(creationDate); //rd.addField("lastAccess", lastModifiedDate.toString()); rd.addField("attributes", Integer.toString(attributes)); rd.addField("shareName", shareName); setDocumentSecurity(rd,shareAllow,shareDeny,parentAllow,parentDeny,documentAllow,documentDeny); setPathMetadata(rd,pathAttributeName,pathAttributeValue); // manipulate path to include the DFS alias, not the literal path // String newPath = matchPrefix + fileName.substring(matchReplace.length()); String newPath = fileName; if (checkNeedFileData(newPath, spec)) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Local file data needed for '"+documentIdentifier+"'"); // Create a temporary file, and use that for the check and then the ingest File tempFile = File.createTempFile("_sdc_",null); try { FileOutputStream os = new FileOutputStream(tempFile); try { // Now, make a local copy so we can fingerprint InputStream inputStream = getFileInputStream(file); try { // Copy! if (transferBuffer == null) transferBuffer = new byte[65536]; while (true) { int amt = inputStream.read(transferBuffer,0,transferBuffer.length); if (amt == -1) break; os.write(transferBuffer,0,amt); } } finally { inputStream.close(); } } finally { os.close(); } if (checkIngest(tempFile, newPath, spec, activities)) { // Not needed; fetched earlier: long fileLength = tempFile.length(); if (!activities.checkLengthIndexable(fileLength)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept length ("+fileLength+")"); errorCode = activities.EXCLUDED_LENGTH; errorDesc = "Rejected due to length ("+fileLength+")"; activities.noDocument(documentIdentifier,versionString); continue; } if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Decided to ingest '"+documentIdentifier+"'"); // OK, do ingestion itself! InputStream inputStream = new FileInputStream(tempFile); try { rd.setBinary(inputStream, fileLength); activities.ingestDocumentWithException(documentIdentifier, versionString, uri, rd); errorCode = "OK"; fileLengthLong = new Long(fileLength); } finally { inputStream.close(); } } else { // We must actively remove the document here, because the getDocumentVersions() // method has no way of signalling this, since it does not do the fingerprinting. if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Decided to remove '"+documentIdentifier+"'"); activities.noDocument(documentIdentifier, versionString); errorCode = "NOWORKNEEDED"; errorDesc = "No indexing needed for document at this time"; } } finally { tempFile.delete(); } } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Local file data not needed for '"+documentIdentifier+"'"); // Not needed; fetched earlier: long fileLength = fileLength(file); if (!activities.checkLengthIndexable(fileLength)) { Logging.connectors.debug("JCIFS: Skipping file because output connector cannot accept length ("+fileLength+")"); errorCode = activities.EXCLUDED_LENGTH; errorDesc = "Rejected because of length ("+fileLength+")"; activities.noDocument(documentIdentifier,versionString); continue; } // Presume that since the file was queued that it fulfilled the needed criteria. // Go off and ingest the fast way. // Ingest the document. InputStream inputStream = getFileInputStream(file); try { rd.setBinary(inputStream, fileLength); activities.ingestDocumentWithException(documentIdentifier, versionString, uri, rd); errorCode = "OK"; fileLengthLong = new Long(fileLength); } finally { inputStream.close(); } } } else { Logging.connectors.debug("JCIFS: Skipping file because canonical path is null, or because file is hidden"); errorCode = "NULLORHIDDEN"; errorDesc = "Null canonical path or hidden file"; activities.noDocument(documentIdentifier,versionString); continue; } } } } catch (MalformedURLException mue) { Logging.connectors.error("MalformedURLException tossed: "+mue.getMessage(),mue); errorCode = mue.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Malformed URL: "+mue.getMessage(); throw new ManifoldCFException("MalformedURLException tossed: "+mue.getMessage(),mue); } catch (jcifs.smb.SmbAuthException e) { Logging.connectors.warn("JCIFS: Authorization exception reading document/directory "+documentIdentifier+" - skipping"); errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Authorization: "+e.getMessage(); if(e.getMessage().equals("Logon failure: unknown user name or bad password.")) throw new ManifoldCFException( "SmbAuthException thrown: " + e.getMessage(), e ); else { activities.noDocument(documentIdentifier, versionString); continue; } } catch (SmbException se) { // At least some of these are transport errors, and should be treated as service // interruptions. long currentTime = System.currentTimeMillis(); Throwable cause = se.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw new ManifoldCFException(te.getRootCause().getMessage(),te.getRootCause(),ManifoldCFException.INTERRUPTED); Logging.connectors.warn("JCIFS: Timeout processing document/directory "+documentIdentifier+": retrying...",se); errorCode = cause.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Transport: "+cause.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+cause.getMessage(),cause,currentTime + 300000L, currentTime + 12 * 60 * 60000L,-1,false); } if (se.getMessage().toLowerCase(Locale.ROOT).indexOf("reset by peer") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("busy") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("file in use") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("is being used") != -1 || se.getMessage().indexOf("0xC0000054") != -1) { Logging.connectors.warn("JCIFS: 'Busy' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Busy: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("handle is invalid") != -1) { Logging.connectors.warn("JCIFS: 'Handle is invalid' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Expiration: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("parameter is incorrect") != -1) { Logging.connectors.warn("JCIFS: 'Parameter is incorrect' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Expiration: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("no longer available") != -1) { Logging.connectors.warn("JCIFS: 'No longer available' response when processing document/directory for "+documentIdentifier+": retrying...",se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Expiration: "+se.getMessage(); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("cannot find") != -1 || se.getMessage().indexOf("cannot be found") != -1) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Skipping document/directory "+documentIdentifier+" because it cannot be found"); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Not found: "+se.getMessage(); activities.noDocument(documentIdentifier, versionString); } else if (se.getMessage().indexOf("0xC0000205") != -1) { Logging.connectors.warn("JCIFS: Out of resources exception reading document/directory "+documentIdentifier+" - skipping"); // We call the delete even if it's a directory; this is harmless and it cleans up the jobqueue row. errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Resources: "+se.getMessage(); activities.noDocument(documentIdentifier, versionString); } else if (se.getMessage().indexOf("is denied") != -1) { Logging.connectors.warn("JCIFS: Access exception reading document/directory "+documentIdentifier+" - skipping"); // We call the delete even if it's a directory; this is harmless and it cleans up the jobqueue row. errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Authorization: "+se.getMessage(); activities.noDocument(documentIdentifier, versionString); } else { Logging.connectors.error("JCIFS: SmbException tossed processing "+documentIdentifier,se); errorCode = se.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = "Unknown: "+se.getMessage(); throw new ServiceInterruption("Unknown SMBException thrown: "+se.getMessage(),se,currentTime + 3 * 60 * 60000L, -1L,1,true); } } catch (IOException e) { errorCode = e.getClass().getSimpleName().toUpperCase(Locale.ROOT); errorDesc = e.getMessage(); handleIOException(documentIdentifier,e); } } catch (ManifoldCFException e) { if (e.getErrorCode() == ManifoldCFException.INTERRUPTED) errorCode = null; throw e; } finally { if (errorCode != null) activities.recordActivity(new Long(startFetchTime),ACTIVITY_ACCESS, fileLengthLong,documentIdentifier,errorCode,errorDesc,null); } } } } protected static void handleIOException(String documentIdentifier, IOException e) throws ManifoldCFException, ServiceInterruption { if (e instanceof java.net.SocketTimeoutException) { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: Socket timeout processing "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (e instanceof InterruptedIOException) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } else { long currentTime = System.currentTimeMillis(); Logging.connectors.warn("JCIFS: IO error processing "+documentIdentifier+": "+e.getMessage(),e); throw new ServiceInterruption("Timeout or other service interruption: "+e.getMessage(),e,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } } /** Map an extension to a mime type */ protected static String mapExtensionToMimeType(String fileName) { int slashIndex = fileName.lastIndexOf("/"); if (slashIndex != -1) fileName = fileName.substring(slashIndex+1); int dotIndex = fileName.lastIndexOf("."); if (dotIndex == -1) return null; return ExtensionMimeMap.mapToMimeType(fileName.substring(dotIndex+1).toLowerCase(java.util.Locale.ROOT)); } protected static void addSecuritySet(StringBuilder description, boolean enabled, String[] allowTokens, String[] denyTokens) { if (enabled) { description.append("+"); java.util.Arrays.sort(allowTokens); java.util.Arrays.sort(denyTokens); // Stuff the acls into the description string. packList(description,allowTokens,'+'); packList(description,denyTokens,'+'); } else description.append("-"); } protected boolean getFileSecuritySet(List<String> allowList, List<String> denyList, SmbFile file, String[] forced) throws ManifoldCFException, IOException { if (forced != null) { if (forced.length == 0) { convertACEs(allowList,denyList,getFileSecurity(file, useSIDs)); } else { for (String forcedToken : forced) { allowList.add(forcedToken); } denyList.add(defaultAuthorityDenyToken); } return true; } else return false; } protected boolean getFileShareSecuritySet(List<String> allowList, List<String> denyList, SmbFile file, String[] forced) throws ManifoldCFException, IOException { if (forced != null) { if (forced.length == 0) { convertACEs(allowList,denyList,getFileShareSecurity(file, useSIDs)); } else { for (String forcedToken : forced) { allowList.add(forcedToken); } denyList.add(defaultAuthorityDenyToken); } return true; } else return false; } protected void convertACEs(List<String> allowList, List<String> denyList, ACE[] aces) { if (aces == null) { // "Public" share: S-1-1-0 allowList.add("S-1-1-0"); denyList.add(defaultAuthorityDenyToken); } else { denyList.add(defaultAuthorityDenyToken); for (ACE ace : aces) { if ((ace.getAccessMask() & ACE.FILE_READ_DATA) != 0) { if (ace.isAllow()) allowList.add(useSIDs ? ace.getSID().toString() : ace.getSID().getAccountName()); else denyList.add(useSIDs ? ace.getSID().toString() : ace.getSID().getAccountName()); } } } } protected static void processSMBException(SmbException se, String documentIdentifier, String activity, String operation) throws ManifoldCFException, ServiceInterruption { // At least some of these are transport errors, and should be treated as service // interruptions. long currentTime = System.currentTimeMillis(); Throwable cause = se.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw new ManifoldCFException(te.getRootCause().getMessage(),te.getRootCause(),ManifoldCFException.INTERRUPTED); Logging.connectors.warn("JCIFS: Timeout "+activity+" for "+documentIdentifier+": retrying...",se); // Transport exceptions no longer abort when they give up, so we can't get notified that there is a problem. throw new ServiceInterruption("Timeout or other service interruption: "+cause.getMessage(),cause,currentTime + 300000L, currentTime + 12 * 60 * 60000L,-1,false); } if (se.getMessage().indexOf("busy") != -1) { Logging.connectors.warn("JCIFS: 'Busy' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Busy exceptions just skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("0xC0000054") != -1) { Logging.connectors.warn("JCIFS: 'Busy' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Busy exceptions just skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("handle is invalid") != -1) { Logging.connectors.warn("JCIFS: 'Handle is invalid' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Invalid handle errors treated like "busy" throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("parameter is incorrect") != -1) { Logging.connectors.warn("JCIFS: 'Parameter is incorrect' response when "+activity+" for "+documentIdentifier+": retrying...",se); // Invalid handle errors treated like "busy" throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("no longer available") != -1) { Logging.connectors.warn("JCIFS: 'No longer available' response when "+activity+" for "+documentIdentifier+": retrying...",se); // No longer available == busy throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if(se.getMessage().indexOf("No process is on the other end of the pipe") != -1) { Logging.connectors.warn("JCIFS: 'No process is on the other end of the pipe' response when "+activity+" for "+documentIdentifier+": retrying...",se); // 'No process is on the other end of the pipe' skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().toLowerCase(Locale.ROOT).indexOf("busy") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("file in use") != -1 || se.getMessage().toLowerCase(Locale.ROOT).indexOf("is being used") != -1) { Logging.connectors.warn("JCIFS: 'File in Use' response when "+activity+" for "+documentIdentifier+": retrying...",se); // 'File in Use' skip the document and keep going throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 300000L, currentTime + 3 * 60 * 60000L,-1,false); } else if (se.getMessage().indexOf("cannot find") != -1 || se.getMessage().indexOf("cannot be found") != -1) { return; } else if (se.getMessage().indexOf("is denied") != -1) { Logging.connectors.warn("JCIFS: Access exception when "+activity+" for "+documentIdentifier+" - skipping"); return; } else if (se.getMessage().indexOf("Incorrect function") != -1) { Logging.connectors.error("JCIFS: Server does not support a required operation ("+operation+"?) for "+documentIdentifier); throw new ManifoldCFException("Server does not support a required operation ("+operation+", possibly?) accessing document "+documentIdentifier,se); } else { Logging.connectors.error("Unrecognized SmbException thrown "+activity+" for "+documentIdentifier,se); throw new ServiceInterruption("Timeout or other service interruption: "+se.getMessage(),se,currentTime + 3 * 60 * 60000L, -1,1,true); } } protected static void setDocumentSecurity(RepositoryDocument rd, String[] shareAllow, String[] shareDeny, String[] parentAllow, String[] parentDeny, String[] allow, String[] deny) { // set share acls if (shareAllow.length > 0 || shareDeny.length > 0) rd.setSecurity(RepositoryDocument.SECURITY_TYPE_SHARE,shareAllow,shareDeny); // set parent folder acls if (parentAllow.length > 0 || parentDeny.length > 0) rd.setSecurity(RepositoryDocument.SECURITY_TYPE_PARENT,parentAllow,parentDeny); // set native file acls if (allow.length > 0 || deny.length > 0) rd.setSecurity(RepositoryDocument.SECURITY_TYPE_DOCUMENT,allow,deny); } protected static void setPathMetadata(RepositoryDocument rd, String pathAttributeName, String pathAttributeValue) throws ManifoldCFException { if (pathAttributeName != null && pathAttributeValue != null) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Path attribute name is '"+pathAttributeName+"'"); if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Path attribute value is '"+pathAttributeValue+"'"); rd.addField(pathAttributeName,pathAttributeValue); } else Logging.connectors.debug("JCIFS: Path attribute name is null"); } /** Check status of connection. */ @Override public String check() throws ManifoldCFException { getSession(); String serverURI = smbconnectionPath; SmbFile server = null; try { server = new SmbFile(serverURI,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { return "Malformed URL: '"+serverURI+"': "+e1.getMessage(); } try { // check to make sure it's a server or a folder int type = getFileType(server); if (type==SmbFile.TYPE_SERVER || type==SmbFile.TYPE_SHARE || type==SmbFile.TYPE_FILESYSTEM) { try { server.connect(); if (!server.exists()) return "Server or path does not exist"; } catch (java.net.SocketTimeoutException e) { return "Timeout connecting to server: "+e.getMessage(); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { return "Couldn't connect to server: "+e.getMessage(); } return super.check(); } else return "URI is not a server URI: '"+serverURI+"'"; } catch (SmbException e) { return "Could not connect: "+e.getMessage(); } } // Protected methods /** Check if a file's stats are OK for inclusion. */ protected static boolean checkIncludeFile(long fileLength, String fileName, Specification documentSpecification, IFingerprintActivity activities) throws ManifoldCFException, ServiceInterruption { // If it's a file, make sure the maximum length is not exceeded if (!activities.checkLengthIndexable(fileLength) || !activities.checkMimeTypeIndexable(mapExtensionToMimeType(fileName))) return false; long maxFileLength = Long.MAX_VALUE; for (int i = 0; i < documentSpecification.getChildCount(); i++) { SpecificationNode sn = documentSpecification.getChild(i); if (sn.getType().equals(NODE_MAXLENGTH)) { try { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value != null && value.length() > 0) maxFileLength = new Long(value).longValue(); } catch (NumberFormatException e) { throw new ManifoldCFException("Bad number: "+e.getMessage(),e); } } } if (fileLength > maxFileLength) return false; return true; } /** Check if a file or directory should be included, given a document specification. *@param isDirectory is true if the file is a directory. *@param fileName is the canonical file name. *@param documentSpecification is the specification. *@return true if it should be included. */ protected boolean checkInclude(boolean isDirectory, String fileName, Specification documentSpecification) throws ManifoldCFException { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: In checkInclude for '"+fileName+"'"); // This method does not attempt to do any fingerprinting. Instead, it will opt to include any // file that may depend on fingerprinting, and exclude everything else. The actual setup for // the fingerprinting test is in checkNeedFileData(), while the actual code that determines in vs. // out using the file data is in checkIngest(). try { String pathPart; String filePart; if (isDirectory) { pathPart = fileName; filePart = null; } else { int lastSlash = fileName.lastIndexOf("/"); if (lastSlash == -1) { pathPart = ""; filePart = fileName; } else { // Pathpart has to include the slash pathPart = fileName.substring(0,lastSlash+1); filePart = fileName.substring(lastSlash+1); } } int i; // Scan until we match a startpoint i = 0; while (i < documentSpecification.getChildCount()) { SpecificationNode sn = documentSpecification.getChild(i++); if (sn.getType().equals(NODE_STARTPOINT)) { // Prepend the server URL to the path, since that's what pathpart will have. String path = mapToIdentifier(sn.getAttributeValue(ATTRIBUTE_PATH)); // Compare with filename if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Matching startpoint '"+path+"' against actual '"+pathPart+"'"); int matchEnd = matchSubPath(path,pathPart); if (matchEnd == -1) { Logging.connectors.debug("JCIFS: No match"); continue; } Logging.connectors.debug("JCIFS: Startpoint found!"); // If this is the root, it's always included. if (matchEnd == fileName.length()) { Logging.connectors.debug("JCIFS: Startpoint: always included"); return true; } // matchEnd is the start of the rest of the path (after the match) in fileName. // We need to walk through the rules and see whether it's in or out. int j = 0; while (j < sn.getChildCount()) { SpecificationNode node = sn.getChild(j++); String flavor = node.getType(); if (flavor.equals(NODE_INCLUDE) || flavor.equals(NODE_EXCLUDE)) { String type = node.getAttributeValue(ATTRIBUTE_TYPE); if (type == null) type = ""; String indexable = node.getAttributeValue(ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; String match = node.getAttributeValue(ATTRIBUTE_FILESPEC); // Check if there's a match against the filespec if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Checking '"+match+"' against '"+fileName.substring(matchEnd-1)+"'"); boolean isMatch = checkMatch(fileName,matchEnd-1,match); boolean isKnown = true; // Check the directory/file criteria if (isMatch) { Logging.connectors.debug("JCIFS: Match found."); isMatch = type.length() == 0 || (type.equals(VALUE_DIRECTORY) && isDirectory) || (type.equals(VALUE_FILE) && !isDirectory); } else Logging.connectors.debug("JCIFS: No match!"); // Check the indexable criteria if (isMatch) { if (indexable.length() != 0) { // Directories are never considered indexable. // But if this is not a directory, things become ambiguous. boolean isIndexable; if (isDirectory) { isIndexable = false; isMatch = (indexable.equals("yes") && isIndexable) || (indexable.equals("no") && !isIndexable); } else isKnown = false; } } if (isKnown) { if (isMatch) { if (flavor.equals(NODE_INCLUDE)) return true; else return false; } } else { // Not known // What we do depends on whether this is an include rule or an exclude one. // We want to err on the side of inclusion, which means for include rules // we return true, and for exclude rules we simply continue. if (flavor.equals(NODE_INCLUDE)) return true; // Continue } } } } } return false; } catch (MalformedURLException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } finally { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Leaving checkInclude for '"+fileName+"'"); } } /** Pretend that a file is either indexable or not, and return whether or not it would be ingested. * This is only ever called for files. *@param fileName is the canonical file name. *@param documentSpecification is the specification. *@param pretendIndexable should be set to true if the document's contents would be fingerprinted as "indexable", * or false otherwise. *@return true if the file would be ingested given the parameters. */ protected boolean wouldFileBeIncluded(String fileName, Specification documentSpecification, boolean pretendIndexable) throws ManifoldCFException { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: In wouldFileBeIncluded for '"+fileName+"', pretendIndexable="+(pretendIndexable?"true":"false")); // This file was flagged as needing file data. However, that doesn't tell us *for what* we need it. // So we need to redo the decision tree, but this time do everything completely. try { String pathPart; String filePart; boolean isDirectory = false; int lastSlash = fileName.lastIndexOf("/"); if (lastSlash == -1) { pathPart = ""; filePart = fileName; } else { pathPart = fileName.substring(0,lastSlash+1); filePart = fileName.substring(lastSlash+1); } // Scan until we match a startpoint int i = 0; while (i < documentSpecification.getChildCount()) { SpecificationNode sn = documentSpecification.getChild(i++); if (sn.getType().equals(NODE_STARTPOINT)) { // Prepend the server URL to the path, since that's what pathpart will have. String path = mapToIdentifier(sn.getAttributeValue(ATTRIBUTE_PATH)); // Compare with filename int matchEnd = matchSubPath(path,pathPart); if (matchEnd == -1) { continue; } // matchEnd is the start of the rest of the path (after the match) in fileName. // We need to walk through the rules and see whether it's in or out. int j = 0; while (j < sn.getChildCount()) { SpecificationNode node = sn.getChild(j++); String flavor = node.getType(); if (flavor.equals(NODE_INCLUDE) || flavor.equals(NODE_EXCLUDE)) { String type = node.getAttributeValue(ATTRIBUTE_TYPE); if (type == null) type = ""; String indexable = node.getAttributeValue(ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; String match = node.getAttributeValue(ATTRIBUTE_FILESPEC); // Check if there's a match against the filespec boolean isMatch = checkMatch(fileName,matchEnd-1,match); // Check the directory/file criteria if (isMatch) { isMatch = type.length() == 0 || (type.equals(VALUE_DIRECTORY) && isDirectory) || (type.equals(VALUE_FILE) && !isDirectory); } // Check the indexable criteria if (isMatch) { if (indexable.length() != 0) { // Directories are never considered indexable. // But if this is not a directory, things become ambiguous. boolean isIndexable; if (isDirectory) isIndexable = false; else { // Evaluate the parts of being indexable that are based on the filename, mime type, and url isIndexable = pretendIndexable; } isMatch = (indexable.equals("yes") && isIndexable) || (indexable.equals("no") && !isIndexable); } } if (isMatch) { if (flavor.equals(NODE_INCLUDE)) return true; else return false; } } } } } return false; } catch (MalformedURLException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } finally { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Leaving wouldFileBeIncluded for '"+fileName+"'"); } } /** Check to see whether we need the contents of the file for anything. We do this by assuming that * the file is indexable, and assuming that it's not, and seeing if the same thing would happen. *@param fileName is the name of the file. *@param documentSpecification is the document specification. *@return true if the file needs to be fingerprinted. */ protected boolean checkNeedFileData(String fileName, Specification documentSpecification) throws ManifoldCFException { return wouldFileBeIncluded(fileName,documentSpecification,true) != wouldFileBeIncluded(fileName,documentSpecification,false); } /** Check if a file should be ingested, given a document specification and a local copy of the * file. It is presumed that only files that passed checkInclude() and were also flagged as needing * file data by checkNeedFileData() will be checked by this method. *@param localFile is the file. *@param fileName is the JCIFS file name. *@param documentSpecification is the specification. *@param activities are the activities available to determine indexability. *@return true if the file should be ingested. */ protected boolean checkIngest(File localFile, String fileName, Specification documentSpecification, IFingerprintActivity activities) throws ManifoldCFException, ServiceInterruption { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: In checkIngest for '"+fileName+"'"); // This file was flagged as needing file data. However, that doesn't tell us *for what* we need it. // So we need to redo the decision tree, but this time do everything completely. try { String pathPart; String filePart; boolean isDirectory = false; int lastSlash = fileName.lastIndexOf("/"); if (lastSlash == -1) { pathPart = ""; filePart = fileName; } else { pathPart = fileName.substring(0,lastSlash+1); filePart = fileName.substring(lastSlash+1); } // Scan until we match a startpoint int i = 0; while (i < documentSpecification.getChildCount()) { SpecificationNode sn = documentSpecification.getChild(i++); if (sn.getType().equals(NODE_STARTPOINT)) { // Prepend the server URL to the path, since that's what pathpart will have. String path = mapToIdentifier(sn.getAttributeValue(ATTRIBUTE_PATH)); // Compare with filename int matchEnd = matchSubPath(path,pathPart); if (matchEnd == -1) { continue; } // matchEnd is the start of the rest of the path (after the match) in fileName. // We need to walk through the rules and see whether it's in or out. int j = 0; while (j < sn.getChildCount()) { SpecificationNode node = sn.getChild(j++); String flavor = node.getType(); if (flavor.equals(NODE_INCLUDE) || flavor.equals(NODE_EXCLUDE)) { String type = node.getAttributeValue(ATTRIBUTE_TYPE); if (type == null) type = ""; String indexable = node.getAttributeValue(ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; String match = node.getAttributeValue(ATTRIBUTE_FILESPEC); // Check if there's a match against the filespec boolean isMatch = checkMatch(fileName,matchEnd-1,match); // Check the directory/file criteria if (isMatch) { isMatch = type.length() == 0 || (type.equals(VALUE_DIRECTORY) && isDirectory) || (type.equals(VALUE_FILE) && !isDirectory); } // Check the indexable criteria if (isMatch) { if (indexable.length() != 0) { // Directories are never considered indexable. // But if this is not a directory, things become ambiguous. boolean isIndexable; if (isDirectory) isIndexable = false; else { isIndexable = activities.checkDocumentIndexable(localFile); } isMatch = (indexable.equals("yes") && isIndexable) || (indexable.equals("no") && !isIndexable); } } if (isMatch) { if (flavor.equals(NODE_INCLUDE)) return true; else return false; } } } } } return false; } catch (MalformedURLException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } catch (UnknownHostException e) { throw new ManifoldCFException("Couldn't map to canonical path: "+e.getMessage(),e); } finally { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Leaving checkIngest for '"+fileName+"'"); } } /** Match a sub-path. The sub-path must match the complete starting part of the full path, in a path * sense. The returned value should point into the file name beyond the end of the matched path, or * be -1 if there is no match. *@param subPath is the sub path. *@param fullPath is the full path. *@return the index of the start of the remaining part of the full path, or -1. */ protected static int matchSubPath(String subPath, String fullPath) { if (subPath.length() > fullPath.length()) return -1; if (fullPath.startsWith(subPath) == false) return -1; int rval = subPath.length(); if (fullPath.length() == rval) return rval; char x = fullPath.charAt(rval); if (x == File.separatorChar) rval++; return rval; } /** Check a match between two strings with wildcards. *@param sourceMatch is the expanded string (no wildcards) *@param sourceIndex is the starting point in the expanded string. *@param match is the wildcard-based string. *@return true if there is a match. */ protected static boolean checkMatch(String sourceMatch, int sourceIndex, String match) { // Note: The java regex stuff looks pretty heavyweight for this purpose. // I've opted to try and do a simple recursive version myself, which is not compiled. // Basically, the match proceeds by recursive descent through the string, so that all *'s cause // recursion. boolean caseSensitive = false; return processCheck(caseSensitive, sourceMatch, sourceIndex, match, 0); } /** Recursive worker method for checkMatch. Returns 'true' if there is a path that consumes both * strings in their entirety in a matched way. *@param caseSensitive is true if file names are case sensitive. *@param sourceMatch is the source string (w/o wildcards) *@param sourceIndex is the current point in the source string. *@param match is the match string (w/wildcards) *@param matchIndex is the current point in the match string. *@return true if there is a match. */ protected static boolean processCheck(boolean caseSensitive, String sourceMatch, int sourceIndex, String match, int matchIndex) { // Logging.connectors.debug("Matching '"+sourceMatch+"' position "+Integer.toString(sourceIndex)+ // " against '"+match+"' position "+Integer.toString(matchIndex)); // Match up through the next * we encounter while (true) { // If we've reached the end, it's a match. if (sourceMatch.length() == sourceIndex && match.length() == matchIndex) return true; // If one has reached the end but the other hasn't, no match if (match.length() == matchIndex) return false; if (sourceMatch.length() == sourceIndex) { if (match.charAt(matchIndex) != '*') return false; matchIndex++; continue; } char x = sourceMatch.charAt(sourceIndex); char y = match.charAt(matchIndex); if (!caseSensitive) { if (x >= 'A' && x <= 'Z') x -= 'A'-'a'; if (y >= 'A' && y <= 'Z') y -= 'A'-'a'; } if (y == '*') { // Wildcard! // We will recurse at this point. // Basically, we want to combine the results for leaving the "*" in the match string // at this point and advancing the source index, with skipping the "*" and leaving the source // string alone. return processCheck(caseSensitive,sourceMatch,sourceIndex+1,match,matchIndex) || processCheck(caseSensitive,sourceMatch,sourceIndex,match,matchIndex+1); } if (y == '?' || x == y) { sourceIndex++; matchIndex++; } else return false; } } /** Grab forced acl out of document specification. *@param spec is the document specification. *@return the acls. */ protected static String[] getForcedAcls(Specification spec) { HashMap map = new HashMap(); int i = 0; boolean securityOn = true; while (i < spec.getChildCount()) { SpecificationNode sn = spec.getChild(i++); if (sn.getType().equals(NODE_ACCESS)) { String token = sn.getAttributeValue(ATTRIBUTE_TOKEN); map.put(token,token); } else if (sn.getType().equals(NODE_SECURITY)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value.equals("on")) securityOn = true; else if (value.equals("off")) securityOn = false; } } if (!securityOn) return null; String[] rval = new String[map.size()]; Iterator iter = map.keySet().iterator(); i = 0; while (iter.hasNext()) { rval[i++] = (String)iter.next(); } return rval; } /** Grab forced share acls out of document specification. *@param spec is the document specification. *@return the acls. */ protected static String[] getForcedShareAcls(Specification spec) { HashMap map = new HashMap(); int i = 0; boolean securityOn = true; while (i < spec.getChildCount()) { SpecificationNode sn = spec.getChild(i++); if (sn.getType().equals(NODE_SHAREACCESS)) { String token = sn.getAttributeValue(ATTRIBUTE_TOKEN); map.put(token,token); } else if (sn.getType().equals(NODE_SHARESECURITY)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value.equals("on")) securityOn = true; else if (value.equals("off")) securityOn = false; } } if (!securityOn) return null; String[] rval = new String[map.size()]; Iterator iter = map.keySet().iterator(); i = 0; while (iter.hasNext()) { rval[i++] = (String)iter.next(); } return rval; } /** Grab forced parent folder acls out of document specification. *@param spec is the document specification. *@return the acls. */ protected static String[] getForcedParentFolderAcls(Specification spec) { HashMap map = new HashMap(); int i = 0; boolean securityOn = false; while (i < spec.getChildCount()) { SpecificationNode sn = spec.getChild(i++); if (sn.getType().equals(NODE_PARENTFOLDERACCESS)) { String token = sn.getAttributeValue(ATTRIBUTE_TOKEN); map.put(token,token); } else if (sn.getType().equals(NODE_PARENTFOLDERSECURITY)) { String value = sn.getAttributeValue(ATTRIBUTE_VALUE); if (value.equals("on")) securityOn = true; else if (value.equals("off")) securityOn = false; } } if (!securityOn) return null; String[] rval = new String[map.size()]; Iterator iter = map.keySet().iterator(); i = 0; while (iter.hasNext()) { rval[i++] = (String)iter.next(); } return rval; } /** Map a "path" specification to a full identifier. */ protected String mapToIdentifier(String path) throws MalformedURLException, UnknownHostException { String smburi = smbconnectionPath; String uri = smburi + path + "/"; return getFileCanonicalPath(new SmbFile(uri,SingletonContext.getInstance().withCredentials(pa))); } // These methods allow me to experiment with cluster-mandated error handling on an entirely local level. They correspond to individual SMBFile methods. /** Get canonical path */ protected static String getFileCanonicalPath(SmbFile file) { return file.getCanonicalPath(); } /** Check for file/directory existence */ protected static boolean fileExists(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.exists(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while checking if file exists: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Check if file is a directory */ protected static boolean fileIsDirectory(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.isDirectory(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while seeing if file is a directory: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get last modified date for file */ protected static long fileLastModified(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.lastModified(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file last-modified date: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get file length */ protected static long fileLength(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.length(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file length: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** List files */ protected static SmbFile[] fileListFiles(SmbFile file, SmbFileFilter filter) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.listFiles(filter); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } if (e.getMessage().equals("0x8000002D")) { // Symlink Logging.connectors.warn("JCIFS: Symlink detected: "+file); return new SmbFile[0]; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while listing files: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get input stream for file */ protected static InputStream getFileInputStream(SmbFile file) throws IOException { int totalTries = 0; int retriesRemaining = 3; IOException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getInputStream(); } catch (java.net.SocketTimeoutException e) { throw e; } catch (InterruptedIOException e) { throw e; } catch (IOException e) { Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file input stream: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentIOExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get file security */ protected static ACE[] getFileSecurity(SmbFile file, boolean useSIDs) throws IOException { int totalTries = 0; int retriesRemaining = 3; IOException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getSecurity(!useSIDs); } catch (java.net.SocketTimeoutException e) { throw e; } catch (InterruptedIOException e) { throw e; } catch (IOException e) { Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file security: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentIOExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get share security */ protected static ACE[] getFileShareSecurity(SmbFile file, boolean useSIDs) throws IOException { int totalTries = 0; int retriesRemaining = 3; IOException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getShareSecurity(!useSIDs); } catch (java.net.SocketTimeoutException e) { throw e; } catch (InterruptedIOException e) { throw e; } catch (IOException e) { Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting share security: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentIOExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Get file type */ protected static int getFileType(SmbFile file) throws SmbException { int totalTries = 0; int retriesRemaining = 3; SmbException currentException = null; while (retriesRemaining > 0 && totalTries < 5) { retriesRemaining--; totalTries++; try { return file.getType(); } catch (SmbException e) { // If it's an interruption, throw it right away. Throwable cause = e.getRootCause(); if (cause != null && (cause instanceof jcifs.util.transport.TransportException)) { // See if it's an interruption jcifs.util.transport.TransportException te = (jcifs.util.transport.TransportException)cause; if (te.getRootCause() != null && te.getRootCause() instanceof java.lang.InterruptedException) throw e; } Logging.connectors.warn("JCIFS: Possibly transient exception detected on attempt "+Integer.toString(totalTries)+" while getting file type: "+e.getMessage(),e); if (currentException != null) { // Compare exceptions. If they differ, reset the retry count. if (!equivalentSmbExceptions(currentException,e)) retriesRemaining = 3; } currentException = e; } } throw currentException; } /** Check if two SmbExceptions are equivalent */ protected static boolean equivalentSmbExceptions(SmbException e1, SmbException e2) { // The thing we want to compare is the message. This is a little risky in that if there are (for example) object addresses in the message, the comparison will always fail. // However, I don't think we expect any such thing in this case. String e1m = e1.getMessage(); String e2m = e2.getMessage(); if (e1m == null) e1m = ""; if (e2m == null) e2m = ""; return e1m.equals(e2m); } /** Check if two IOExceptions are equivalent */ protected static boolean equivalentIOExceptions(IOException e1, IOException e2) { // The thing we want to compare is the message. This is a little risky in that if there are (for example) object addresses in the message, the comparison will always fail. // However, I don't think we expect any such thing in this case. String e1m = e1.getMessage(); String e2m = e2.getMessage(); if (e1m == null) e1m = ""; if (e2m == null) e2m = ""; return e1m.equals(e2m); } // UI support methods. // // These support methods come in two varieties. The first bunch is involved in setting up connection configuration information. The second bunch // is involved in presenting and editing document specification information for a job. The two kinds of methods are accordingly treated differently, // in that the first bunch cannot assume that the current connector object is connected, while the second bunch can. That is why the first bunch // receives a thread context argument for all UI methods, while the second bunch does not need one (since it has already been applied via the connect() // method, above). /** Output the configuration header section. * This method is called in the head section of the connector's configuration page. Its purpose is to add the required tabs to the list, and to output any * javascript methods that might be needed by the configuration editing HTML. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabsArray is an array of tab names. Add to this array any tab names that are specific to the connector. */ @Override public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException { tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Server")); out.print( "<script type=\"text/javascript\">\n"+ "<!--\n"+ "function checkConfigForSave()\n"+ "{\n"+ " if (editconnection.server.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NeedAServerName") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.Server2") + "\");\n"+ " editconnection.server.focus();\n"+ " return false;\n"+ " }\n"+ "\n"+ " if (editconnection.server.value.indexOf(\"/\") != -1)\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.ServerNameCannotIncludePathInformation") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.Server2") + "\");\n"+ " editconnection.server.focus();\n"+ " return false;\n"+ " }\n"+ " \n"+ " if (editconnection.username.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NeedAUserName") + "\");\n"+ " SelectTab(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.Server2") + "\");\n"+ " editconnection.username.focus();\n"+ " return false;\n"+ " }\n"+ "\n"+ " return true;\n"+ "}\n"+ "\n"+ "//-->\n"+ "</script>\n" ); } /** Output the configuration body section. * This method is called in the body section of the connector's configuration page. Its purpose is to present the required form elements for editing. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt;, &lt;body&gt;, and &lt;form&gt; tags. The name of the * form is "editconnection". *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabName is the current tab name. */ @Override public void outputConfigurationBody(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException { String server = parameters.getParameter(SharedDriveParameters.server); if (server==null) server = ""; String domain = parameters.getParameter(SharedDriveParameters.domain); if (domain==null) domain = ""; String username = parameters.getParameter(SharedDriveParameters.username); if (username==null) username = ""; String password = parameters.getObfuscatedParameter(SharedDriveParameters.password); if (password==null) password = ""; else password = out.mapPasswordToKey(password); String resolvesids = parameters.getParameter(SharedDriveParameters.useSIDs); if (resolvesids==null) resolvesids = "true"; String binName = parameters.getParameter(SharedDriveParameters.binName); if (binName == null) binName = ""; // "Server" tab if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Server"))) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.Server3") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"server\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(server)+"\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.AuthenticationDomain") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"domain\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(domain)+"\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.UserName") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"username\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(username)+"\"/></td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.Password") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"password\" size=\"32\" name=\"password\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(password)+"\"/></td>\n"+ " </tr>\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.UseSIDSForSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\"resolvesidspresent\" value=\"true\"/><input type=\"checkbox\" value=\"true\" name=\"resolvesids\" "+("true".equals(resolvesids)?"checked=\"true\"":"")+"/></td>\n"+ " </tr>\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.BinName") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" size=\"32\" name=\"binname\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(binName)+"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\"server\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(server)+"\"/>\n"+ "<input type=\"hidden\" name=\"domain\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(domain)+"\"/>\n"+ "<input type=\"hidden\" name=\"username\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(username)+"\"/>\n"+ "<input type=\"hidden\" name=\"password\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(password)+"\"/>\n"+ "<input type=\"hidden\" name=\"resolvesidspresent\" value=\"true\"/>\n"+ "<input type=\"hidden\" name=\"resolvesids\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(resolvesids)+"\"/>\n"+ "<input type=\"hidden\" name=\"binname\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(binName)+"\"/>\n" ); } } /** Process a configuration post. * This method is called at the start of the connector's configuration page, whenever there is a possibility that form data for a connection has been * posted. Its purpose is to gather form information and modify the configuration parameters accordingly. * The name of the posted form is "editconnection". *@param threadContext is the local thread context. *@param variableContext is the set of variables available from the post, including binary file post information. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@return null if all is well, or a string error message if there is an error that should prevent saving of the connection (and cause a redirection to an error page). */ @Override public String processConfigurationPost(IThreadContext threadContext, IPostParameters variableContext, Locale locale, ConfigParams parameters) throws ManifoldCFException { String server = variableContext.getParameter("server"); if (server != null) parameters.setParameter(SharedDriveParameters.server,server); String domain = variableContext.getParameter("domain"); if (domain != null) parameters.setParameter(SharedDriveParameters.domain,domain); String username = variableContext.getParameter("username"); if (username != null) parameters.setParameter(SharedDriveParameters.username,username); String password = variableContext.getParameter("password"); if (password != null) parameters.setObfuscatedParameter(SharedDriveParameters.password,variableContext.mapKeyToPassword(password)); String resolvesidspresent = variableContext.getParameter("resolvesidspresent"); if (resolvesidspresent != null) { parameters.setParameter(SharedDriveParameters.useSIDs,"false"); String resolvesids = variableContext.getParameter("resolvesids"); if (resolvesids != null) parameters.setParameter(SharedDriveParameters.useSIDs, resolvesids); } String binName = variableContext.getParameter("binname"); if (binName != null) parameters.setParameter(SharedDriveParameters.binName, binName); return null; } /** View configuration. * This method is called in the body section of the connector's view configuration page. Its purpose is to present the connection information to the user. * The coder can presume that the HTML that is output from this configuration will be within appropriate &lt;html&gt; and &lt;body&gt;tags. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. */ @Override public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException { out.print( "<table class=\"displaytable\">\n"+ " <tr>\n"+ " <td class=\"description\" colspan=\"1\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.Parameters") + "</nobr></td>\n"+ " <td class=\"value\" colspan=\"3\">\n" ); Iterator iter = parameters.listParameters(); while (iter.hasNext()) { String param = (String)iter.next(); String value = parameters.getParameter(param); if (param.length() >= "password".length() && param.substring(param.length()-"password".length()).equalsIgnoreCase("password")) { out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param)+"=********</nobr><br/>\n" ); } else if (param.length() >="keystore".length() && param.substring(param.length()-"keystore".length()).equalsIgnoreCase("keystore")) { IKeystoreManager kmanager = KeystoreManagerFactory.make("",value); out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param)+"=&lt;"+Integer.toString(kmanager.getContents().length)+ Messages.getBodyString(locale,"SharedDriveConnector.certificate") + "&gt;</nobr><br/>\n" ); } else { out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(param)+"="+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(value)+"</nobr><br/>\n" ); } } out.print( " </td>\n"+ " </tr>\n"+ "</table>\n" ); } /** Output the specification header section. * This method is called in the head section of a job page which has selected a repository connection of the * current type. Its purpose is to add the required tabs to the list, and to output any javascript methods * that might be needed by the job editing HTML. * The connector will be connected before this method can be called. *@param out is the output to which any HTML should be sent. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@param tabsArray is an array of tab names. Add to this array any tab names that are specific to the connector. */ @Override public void outputSpecificationHeader(IHTTPOutput out, Locale locale, Specification ds, int connectionSequenceNumber, List<String> tabsArray) throws ManifoldCFException, IOException { tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Paths")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Security")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.Metadata")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.ContentLength")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.FileMapping")); tabsArray.add(Messages.getString(locale,"SharedDriveConnector.URLMapping")); String seqPrefix = "s"+connectionSequenceNumber+"_"; out.print( "<script type=\"text/javascript\">\n"+ "//<!--\n"+ "\n"+ "function "+seqPrefix+"checkSpecification()\n"+ "{\n"+ " if (editjob."+seqPrefix+"specmaxlength.value != \"\" && !isInteger(editjob."+seqPrefix+"specmaxlength.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NeedAValidNumberForMaximumDocumentLength") + "\");\n"+ " editjob."+seqPrefix+"specmaxlength.focus();\n"+ " return false;\n"+ " }\n"+ " return true;\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecOp(n, opValue, anchorvalue)\n"+ "{\n"+ " eval(\"editjob.\"+n+\".value = \\\"\"+opValue+\"\\\"\");\n"+ " postFormSetAnchor(anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddToPath(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"pathaddon.value == \"\" && editjob."+seqPrefix+"pathtypein.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.SelectAFolderOrTypeInAPathFirst") + "\");\n"+ " editjob."+seqPrefix+"pathaddon.focus();\n"+ " return;\n"+ " }\n"+ " if (editjob."+seqPrefix+"pathaddon.value != \"\" && editjob."+seqPrefix+"pathtypein.value != \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.EitherSelectAFolderORTypeInAPath") + "\");\n"+ " editjob."+seqPrefix+"pathaddon.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\",\"AddToPath\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddSpec(suffix,anchorvalue)\n"+ "{\n"+ " if (eval(\"editjob."+seqPrefix+"specfile\"+suffix+\".value\") == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.EnterAFileSpecificationFirst") + "\");\n"+ " eval(\"editjob."+seqPrefix+"specfile\"+suffix+\".focus()\");\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\"+suffix,\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecInsertSpec(postfix,anchorvalue)\n"+ "{\n"+ " if (eval(\"editjob."+seqPrefix+"specfile_i\"+postfix+\".value\") == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.EnterAFileSpecificationFirst") + "\");\n"+ " eval(\"editjob."+seqPrefix+"specfile_i\"+postfix+\".focus()\");\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specop\"+postfix,\"Insert Here\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddToken(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"spectoken.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.NullAccessTokensNotAllowed") + "\");\n"+ " editjob."+seqPrefix+"spectoken.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"accessop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddMapping(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"specmatch.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringCannotBeEmpty") + "\");\n"+ " editjob."+seqPrefix+"specmatch.focus();\n"+ " return;\n"+ " }\n"+ " if (!isRegularExpression(editjob."+seqPrefix+"specmatch.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringMustBeValidRegularExpression") + "\");\n"+ " editjob."+seqPrefix+"specmatch.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specmappingop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddFMap(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"specfmapmatch.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringCannotBeEmpty") + "\");\n"+ " editjob."+seqPrefix+"specfmapmatch.focus();\n"+ " return;\n"+ " }\n"+ " if (!isRegularExpression(editjob."+seqPrefix+"specfmapmatch.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringMustBeValidRegularExpression") + "\");\n"+ " editjob."+seqPrefix+"specfmapmatch.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specfmapop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "function "+seqPrefix+"SpecAddUMap(anchorvalue)\n"+ "{\n"+ " if (editjob."+seqPrefix+"specumapmatch.value == \"\")\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringCannotBeEmpty") + "\");\n"+ " editjob."+seqPrefix+"specumapmatch.focus();\n"+ " return;\n"+ " }\n"+ " if (!isRegularExpression(editjob."+seqPrefix+"specumapmatch.value))\n"+ " {\n"+ " alert(\"" + Messages.getBodyJavascriptString(locale,"SharedDriveConnector.MatchStringMustBeValidRegularExpression") + "\");\n"+ " editjob."+seqPrefix+"specumapmatch.focus();\n"+ " return;\n"+ " }\n"+ " "+seqPrefix+"SpecOp(\""+seqPrefix+"specumapop\",\"Add\",anchorvalue);\n"+ "}\n"+ "\n"+ "//-->\n"+ "</script>\n" ); } /** Output the specification body section. * This method is called in the body section of a job page which has selected a repository connection of the * current type. Its purpose is to present the required form elements for editing. * The coder can presume that the HTML that is output from this configuration will be within appropriate * &lt;html&gt;, &lt;body&gt;, and &lt;form&gt; tags. The name of the form is always "editjob". * The connector will be connected before this method can be called. *@param out is the output to which any HTML should be sent. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@param actualSequenceNumber is the connection within the job that has currently been selected. *@param tabName is the current tab name. (actualSequenceNumber, tabName) form a unique tuple within * the job. */ @Override public void outputSpecificationBody(IHTTPOutput out, Locale locale, Specification ds, int connectionSequenceNumber, int actualSequenceNumber, String tabName) throws ManifoldCFException, IOException { String seqPrefix = "s"+connectionSequenceNumber+"_"; int i; int k; // "Content Length" tab i = 0; String maxLength = null; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH)) maxLength = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } if (maxLength == null) maxLength = ""; if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.ContentLength")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.MaximumDocumentLength") + "</nobr></td>\n"+ " <td class=\"value\"><input type=\"text\" name=\""+seqPrefix+"specmaxlength\" size=\"10\" value=\""+maxLength+"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specmaxlength\" value=\""+maxLength+"\"/>\n" ); } // Check for Paths tab if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Paths")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" ); // Now, loop through paths. There will be a row in the current table for each one. // The row will contain a delete button on the left. On the right will be the startpoint itself at the top, // and underneath it the table where the filter criteria are edited. i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) { String pathDescription = "_"+Integer.toString(k); String pathOpName = seqPrefix+"pathop"+pathDescription; String startPath = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"path_"+Integer.toString(k)+"\">\n"+ " <input type=\"button\" value=\"Delete\" alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeletePath")+Integer.toString(k)+"\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+pathOpName+"\",\"Delete\",\""+seqPrefix+"path_"+Integer.toString(k)+"\")'/>\n"+ " </a>&nbsp;\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specpath"+pathDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH))+"\"/>\n"+ " <input type=\"hidden\" name=\""+pathOpName+"\" value=\"\"/>\n"+ " <nobr>"+((startPath.length() == 0)?"(root)":org.apache.manifoldcf.ui.util.Encoder.bodyEscape(startPath))+"</nobr>\n"+ " </td>\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"boxcell\">\n"+ " <table class=\"displaytable\">\n" ); // Now go through the include/exclude children of this node, and display one line per node, followed // an "add" line. int j = 0; while (j < sn.getChildCount()) { SpecificationNode excludeNode = sn.getChild(j); String instanceDescription = "_"+Integer.toString(k)+"_"+Integer.toString(j); String instanceOpName = seqPrefix + "specop" + instanceDescription; String nodeFlavor = excludeNode.getType(); String nodeType = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE); if (nodeType == null) nodeType = ""; String filespec = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC); String indexable = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"button\" value=\"Insert\" onClick='Javascript:"+seqPrefix+"SpecInsertSpec(\""+instanceDescription+"\",\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j+1)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.InsertNewMatchForPath")+Integer.toString(k)+" before position #"+Integer.toString(j)+"\"/>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n"+ " <select name=\""+seqPrefix+"specfl_i"+instanceDescription+"\">\n"+ " <option value=\"include\">" + Messages.getBodyString(locale,"SharedDriveConnector.Include") + "</option>\n"+ " <option value=\"exclude\">" + Messages.getBodyString(locale,"SharedDriveConnector.Exclude") + "</option>\n"+ " </select>&nbsp;\n"+ " <select name=\""+seqPrefix+"spectin_i"+instanceDescription+"\">\n"+ " <option value=\"\" selected=\"selected\">" + Messages.getBodyString(locale,"SharedDriveConnector.AnyFileOrDirectory") + "</option>\n"+ " <option value=\"file\">" + Messages.getBodyString(locale,"SharedDriveConnector.files") + "</option>\n"+ " <option value=\"indexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.indexableFiles") + "</option>\n"+ " <option value=\"unindexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.unindexableFiles") + "</option>\n"+ " <option value=\"directory\">" + Messages.getBodyString(locale,"SharedDriveConnector.directorys") + "</option>\n"+ " </select>&nbsp;" + Messages.getBodyString(locale,"SharedDriveConnector.matching") + "&nbsp;\n"+ " <input type=\"text\" size=\"20\" name=\""+seqPrefix+"specfile_i"+instanceDescription+"\" value=\"\"/>\n"+ " </nobr>\n"+ " </td>\n"+ "\n"+ " </tr>\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j)+"\">\n"+ " <input type=\"button\" value=\"Delete\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+instanceOpName+"\",\"Delete\",\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeletePath")+Integer.toString(k)+Messages.getAttributeString(locale,"SharedDriveConnector.matchSpec")+Integer.toString(j)+"\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"hidden\" name=\""+instanceOpName+"\" value=\"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specfl"+instanceDescription+"\" value=\""+nodeFlavor+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specty"+instanceDescription+"\" value=\""+nodeType+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specin"+instanceDescription+"\" value=\""+indexable+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specfile"+instanceDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(filespec)+"\"/>\n"+ " "+Integer.toString(j+1)+".&nbsp;"+(nodeFlavor.equals("include")?"Include":"")+""+(nodeFlavor.equals("exclude")?"Exclude":"")+""+(indexable.equals("yes")?"&nbsp;indexable":"")+""+(indexable.equals("no")?"&nbsp;un-indexable":"")+""+(nodeType.equals("file")?"&nbsp;file(s)":"")+""+(nodeType.equals("directory")?"&nbsp;directory(s)":"")+""+(nodeType.equals("")?"&nbsp;file(s)&nbsp;or&nbsp;directory(s)":"")+"&nbsp;matching&nbsp;"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(filespec)+"\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n" ); j++; } if (j == 0) { out.print( " <tr><td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoRulesDefined") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specchildcount"+pathDescription+"\" value=\""+Integer.toString(j)+"\"/>\n"+ " <a name=\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j)+"\">\n"+ " <input type=\"button\" value=\"Add\" onClick='Javascript:"+seqPrefix+"SpecAddSpec(\""+pathDescription+"\",\""+seqPrefix+"filespec_"+Integer.toString(k)+"_"+Integer.toString(j+1)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.AddNewMatchForPath")+Integer.toString(k)+"\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n"+ " <select name=\""+seqPrefix+"specfl"+pathDescription+"\">\n"+ " <option value=\"include\">" + Messages.getBodyString(locale,"SharedDriveConnector.Include") + "</option>\n"+ " <option value=\"exclude\">" + Messages.getBodyString(locale,"SharedDriveConnector.Exclude") + "</option>\n"+ " </select>&nbsp;\n"+ " <select name=\""+seqPrefix+"spectin"+pathDescription+"\">\n"+ " <option value=\"\">" + Messages.getBodyString(locale,"SharedDriveConnector.AnyFileOrDirectory") + "</option>\n"+ " <option value=\"file\">" + Messages.getBodyString(locale,"SharedDriveConnector.files") + "</option>\n"+ " <option value=\"indexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.indexableFiles") + "</option>\n"+ " <option value=\"unindexable-file\">" + Messages.getBodyString(locale,"SharedDriveConnector.unindexableFiles") + "</option>\n"+ " <option value=\"directory\">" + Messages.getBodyString(locale,"SharedDriveConnector.directorys") + "</option>\n"+ " </select>&nbsp;" + Messages.getBodyString(locale,"SharedDriveConnector.matching") + "&nbsp;\n"+ " <input type=\"text\" size=\"20\" name=\""+seqPrefix+"specfile"+pathDescription+"\" value=\"\"/>\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ " </table>\n"+ " </td>\n"+ " </tr>\n"+ " </table>\n"+ " </td>\n"+ " </tr>\n" ); k++; } } if (k == 0) { out.print( " <tr>\n"+ " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoStartingPointsDefined") + "</td>\n"+ " </tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"2\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"value\" colspan=\"2\">\n"+ " <nobr>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"pathcount\" value=\""+Integer.toString(k)+"\"/>\n"+ " <a name=\""+seqPrefix+"path_"+Integer.toString(k)+"\">\n" ); String pathSoFar = (String)currentContext.get(seqPrefix+"specpath"); if (pathSoFar == null) pathSoFar = ""; // Grab next folder/project list try { String[] childList; childList = getChildFolderNames(pathSoFar); if (childList == null) { // Illegal path - set it back pathSoFar = ""; childList = getChildFolderNames(""); if (childList == null) throw new ManifoldCFException("Can't find any children for root folder"); } out.print( " <input type=\"hidden\" name=\""+seqPrefix+"specpath\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(pathSoFar)+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"pathop\" value=\"\"/>\n"+ " <input type=\"button\" value=\"Add\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddPath") + "\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\",\"Add\",\""+seqPrefix+"path_"+Integer.toString(k+1)+"\")'/>\n"+ " &nbsp;"+((pathSoFar.length()==0)?"(root)":org.apache.manifoldcf.ui.util.Encoder.bodyEscape(pathSoFar))+"\n" ); if (pathSoFar.length() > 0) { out.print( " <input type=\"button\" value=\"-\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.RemoveFromPath") + "\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"pathop\",\"Up\",\""+seqPrefix+"path_"+Integer.toString(k)+"\")'/>\n" ); } if (childList.length > 0) { out.print( " <nobr>\n"+ " <input type=\"button\" value=\"+\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddPath") + "\" onClick='Javascript:"+seqPrefix+"SpecAddToPath(\""+seqPrefix+"path_"+Integer.toString(k)+"\")'/>&nbsp;\n"+ " <select name=\""+seqPrefix+"pathaddon\">\n"+ " <option value=\"\" selected=\"selected\">" + Messages.getBodyString(locale,"SharedDriveConnector.PickAFolder") + "</option>\n" ); int j = 0; while (j < childList.length) { String folder = org.apache.manifoldcf.ui.util.Encoder.attributeEscape(childList[j]); out.print( " <option value=\""+folder+"\">"+folder+"</option>\n" ); j++; } out.print( " </select>" + Messages.getBodyString(locale,"SharedDriveConnector.orTypeAPath") + " <input type=\"text\" name=\""+seqPrefix+"pathtypein\" size=\"16\" value=\"\"/>\n"+ " </nobr>\n" ); } } catch (ManifoldCFException e) { e.printStackTrace(); out.println(org.apache.manifoldcf.ui.util.Encoder.bodyEscape(e.getMessage())); } out.print( " </a>\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "</table>\n" ); } else { // Generate hiddens for the pathspec tab i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) { String pathDescription = "_"+Integer.toString(k); String startPath = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specpath"+pathDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(startPath)+"\"/>\n" ); // Now go through the include/exclude children of this node. int j = 0; while (j < sn.getChildCount()) { SpecificationNode excludeNode = sn.getChild(j); String instanceDescription = "_"+Integer.toString(k)+"_"+Integer.toString(j); String nodeFlavor = excludeNode.getType(); String nodeType = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE); if (nodeType == null) nodeType = ""; String filespec = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC); String indexable = excludeNode.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE); if (indexable == null) indexable = ""; out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfl"+instanceDescription+"\" value=\""+nodeFlavor+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specty"+instanceDescription+"\" value=\""+nodeType+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specin"+instanceDescription+"\" value=\""+indexable+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specfile"+instanceDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(filespec)+"\"/>\n" ); j++; } k++; out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specchildcount"+pathDescription+"\" value=\""+Integer.toString(j)+"\"/>\n" ); } } out.print( "<input type=\"hidden\" name=\""+seqPrefix+"pathcount\" value=\""+Integer.toString(k)+"\"/>\n" ); } // Security tab // Find whether security is on or off i = 0; boolean securityOn = true; boolean shareSecurityOn = true; boolean parentFolderSecurityOn = false; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) securityOn = false; else if (securityValue.equals("on")) securityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) shareSecurityOn = false; else if (securityValue.equals("on")) shareSecurityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) parentFolderSecurityOn = false; else if (securityValue.equals("on")) parentFolderSecurityOn = true; } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Security")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileSecurity") + "</nobr></td>\n"+ " <td colspan=\"3\" class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsecurity\" value=\"on\" "+(securityOn?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Enabled") + "&nbsp;\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsecurity\" value=\"off\" "+((securityOn==false)?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Disabled") + "\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n" ); // Finally, go through forced ACL i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) { String accessDescription = "_"+Integer.toString(k); String accessOpName = seqPrefix+"accessop"+accessDescription; String token = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN); out.print( " <tr>\n"+ " <td class=\"description\" colspan=\"1\">\n"+ " <input type=\"hidden\" name=\""+accessOpName+"\" value=\"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"spectoken"+accessDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(token)+"\"/>\n"+ " <a name=\""+seqPrefix+"token_"+Integer.toString(k)+"\">\n"+ " <input type=\"button\" value=\"Delete\" alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteToken")+Integer.toString(k)+"\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+accessOpName+"\",\"Delete\",\""+seqPrefix+"token_"+Integer.toString(k)+"\")'/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\" colspan=\"3\">\n"+ " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(token)+"</nobr>\n"+ " </td>\n"+ " </tr>\n" ); k++; } } if (k == 0) { out.print( " <tr>\n"+ " <td class=\"message\" colspan=\"4\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileAccessTokensPresent") + "</td>\n"+ " </tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ " <tr>\n"+ " <td class=\"description\" colspan=\"1\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"tokencount\" value=\""+Integer.toString(k)+"\"/>\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"accessop\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"token_"+Integer.toString(k)+"\">\n"+ " <input type=\"button\" value=\"Add\" alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToken") + "\" onClick='Javascript:"+seqPrefix+"SpecAddToken(\""+seqPrefix+"token_"+Integer.toString(k+1)+"\")'/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\" colspan=\"3\">\n"+ " <nobr><input type=\"text\" size=\"30\" name=\""+seqPrefix+"spectoken\" value=\"\"/></nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ShareSecurity") + "</nobr></td>\n"+ " <td colspan=\"3\" class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsharesecurity\" value=\"on\" "+(shareSecurityOn?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Enabled") + "&nbsp;\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specsharesecurity\" value=\"off\" "+((shareSecurityOn==false)?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Disabled") + "\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ParentFolderSecurity") + "</nobr></td>\n"+ " <td colspan=\"3\" class=\"value\">\n"+ " <nobr>\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specparentfoldersecurity\" value=\"on\" "+(parentFolderSecurityOn?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Enabled") + "&nbsp;\n"+ " <input type=\"radio\" name=\""+seqPrefix+"specparentfoldersecurity\" value=\"off\" "+((parentFolderSecurityOn==false)?"checked=\"true\"":"")+" />" + Messages.getBodyString(locale,"SharedDriveConnector.Disabled") + "\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specsecurity\" value=\""+(securityOn?"on":"off")+"\"/>\n" ); // Finally, go through forced ACL i = 0; k = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) { String accessDescription = "_"+Integer.toString(k); String token = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"spectoken"+accessDescription+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(token)+"\"/>\n" ); k++; } } out.print( "<input type=\"hidden\" name=\""+seqPrefix+"tokencount\" value=\""+Integer.toString(k)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specsharesecurity\" value=\""+(shareSecurityOn?"on":"off")+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specparentfoldersecurity\" value=\""+(parentFolderSecurityOn?"on":"off")+"\"/>\n" ); } // Metadata tab // Find the path-value metadata attribute name // Find the path-value mapping data i = 0; String pathNameAttribute = ""; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap matchMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE)) { pathNameAttribute = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } else if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); matchMap.appendMatchPair(pathMatch,pathReplace); } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.Metadata")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specmappingcount\" value=\""+Integer.toString(matchMap.getMatchCount())+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specmappingop\" value=\"\"/>\n"+ "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\" colspan=\"1\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.PathAttributeName") + "</nobr></td>\n"+ " <td class=\"value\" colspan=\"3\">\n"+ " <input type=\"text\" name=\""+seqPrefix+"specpathnameattribute\" size=\"20\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(pathNameAttribute)+"\"/>\n"+ " </td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n" ); i = 0; while (i < matchMap.getMatchCount()) { String matchString = matchMap.getMatchString(i); String replaceString = matchMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specmappingop_"+Integer.toString(i)+"\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"mapping_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"specmappingop_"+Integer.toString(i)+"\",\"Delete\",\""+seqPrefix+"mapping_"+Integer.toString(i)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteMapping")+Integer.toString(i)+"\" value=\"Delete\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</td>\n"+ " </tr>\n" ); i++; } if (i == 0) { out.print( " <tr><td colspan=\"4\" class=\"message\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoMappingsSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"mapping_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecAddMapping(\""+seqPrefix+"mapping_"+Integer.toString(i+1)+"\")' alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToMappings") + "\" value=\"Add\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.MatchRegexp") + "<input type=\"text\" name=\""+seqPrefix+"specmatch\" size=\"32\" value=\"\"/></nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ReplaceString") + "<input type=\"text\" name=\""+seqPrefix+"specreplace\" size=\"32\" value=\"\"/></nobr></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specpathnameattribute\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(pathNameAttribute)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specmappingcount\" value=\""+Integer.toString(matchMap.getMatchCount())+"\"/>\n" ); i = 0; while (i < matchMap.getMatchCount()) { String matchString = matchMap.getMatchString(i); String replaceString = matchMap.getReplaceString(i); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n" ); i++; } } // File and URL Mapping tabs // Find the filename mapping data // Find the URL mapping data org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap fileMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap uriMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); fileMap.appendMatchPair(pathMatch,pathReplace); } else if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); uriMap.appendMatchPair(pathMatch,pathReplace); } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.FileMapping")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfmapcount\" value=\""+Integer.toString(fileMap.getMatchCount())+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specfmapop\" value=\"\"/>\n"+ "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n" ); i = 0; while (i < fileMap.getMatchCount()) { String matchString = fileMap.getMatchString(i); String replaceString = fileMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specfmapop_"+Integer.toString(i)+"\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"fmap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"specfmapop_"+Integer.toString(i)+"\",\"Delete\",\""+seqPrefix+"fmap_"+Integer.toString(i)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteFileMapping")+Integer.toString(i)+"\" value=\"Delete\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specfmapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><input type=\"hidden\" name=\""+seqPrefix+"specfmapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</td>\n"+ " </tr>\n" ); i++; } if (i == 0) { out.print( " <tr><td colspan=\"4\" class=\"message\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileMappingsSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"fmap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecAddFMap(\""+seqPrefix+"fmap_"+Integer.toString(i+1)+"\")' alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToFileMappings") + "\" value=\"Add\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.MatchRegexp") + "<input type=\"text\" name=\""+seqPrefix+"specfmapmatch\" size=\"32\" value=\"\"/></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.ReplaceString") + "<input type=\"text\" name=\""+seqPrefix+"specfmapreplace\" size=\"32\" value=\"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfmapcount\" value=\""+Integer.toString(fileMap.getMatchCount())+"\"/>\n" ); i = 0; while (i < fileMap.getMatchCount()) { String matchString = fileMap.getMatchString(i); String replaceString = fileMap.getReplaceString(i); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specfmapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specfmapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n" ); i++; } } if (tabName.equals(Messages.getString(locale,"SharedDriveConnector.URLMapping")) && connectionSequenceNumber == actualSequenceNumber) { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specumapcount\" value=\""+Integer.toString(uriMap.getMatchCount())+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specumapop\" value=\"\"/>\n"+ "<table class=\"displaytable\">\n"+ " <tr><td class=\"separator\" colspan=\"4\"><hr/></td></tr>\n" ); i = 0; while (i < uriMap.getMatchCount()) { String matchString = uriMap.getMatchString(i); String replaceString = uriMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specumapop_"+Integer.toString(i)+"\" value=\"\"/>\n"+ " <a name=\""+seqPrefix+"umap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecOp(\""+seqPrefix+"specumapop_"+Integer.toString(i)+"\",\"Delete\",\""+seqPrefix+"umap_"+Integer.toString(i)+"\")' alt=\""+Messages.getAttributeString(locale,"SharedDriveConnector.DeleteUrlMapping")+Integer.toString(i)+"\" value=\"Delete\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specumapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ " "+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"\n"+ " </td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\">\n"+ " <input type=\"hidden\" name=\""+seqPrefix+"specumapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n"+ " "+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"\n"+ " </td>\n"+ " </tr>\n" ); i++; } if (i == 0) { out.print( " <tr><td colspan=\"4\" class=\"message\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoURLMappingsSpecifiedWillProduceAFileIRI") + "</td></tr>\n" ); } out.print( " <tr><td class=\"lightseparator\" colspan=\"4\"><hr/></td></tr>\n"+ " \n"+ " <tr>\n"+ " <td class=\"value\">\n"+ " <a name=\""+seqPrefix+"umap_"+Integer.toString(i)+"\">\n"+ " <input type=\"button\" onClick='Javascript:"+seqPrefix+"SpecAddUMap(\""+seqPrefix+"umap_"+Integer.toString(i+1)+"\")' alt=\"" + Messages.getAttributeString(locale,"SharedDriveConnector.AddToURLMappings") + "\" value=\"Add\"/>\n"+ " </a>\n"+ " </td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.MatchRegexp") + "<input type=\"text\" name=\""+seqPrefix+"specumapmatch\" size=\"32\" value=\"\"/></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\">" + Messages.getBodyString(locale,"SharedDriveConnector.ReplaceString") + "<input type=\"text\" name=\""+seqPrefix+"specumapreplace\" size=\"32\" value=\"\"/></td>\n"+ " </tr>\n"+ "</table>\n" ); } else { out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specumapcount\" value=\""+Integer.toString(uriMap.getMatchCount())+"\"/>\n" ); i = 0; while (i < uriMap.getMatchCount()) { String matchString = uriMap.getMatchString(i); String replaceString = uriMap.getReplaceString(i); out.print( "<input type=\"hidden\" name=\""+seqPrefix+"specumapmatch_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(matchString)+"\"/>\n"+ "<input type=\"hidden\" name=\""+seqPrefix+"specumapreplace_"+Integer.toString(i)+"\" value=\""+org.apache.manifoldcf.ui.util.Encoder.attributeEscape(replaceString)+"\"/>\n" ); i++; } } } /** Process a specification post. * This method is called at the start of job's edit or view page, whenever there is a possibility that form * data for a connection has been posted. Its purpose is to gather form information and modify the * document specification accordingly. The name of the posted form is always "editjob". * The connector will be connected before this method can be called. *@param variableContext contains the post data, including binary file-upload information. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. *@return null if all is well, or a string error message if there is an error that should prevent saving of * the job (and cause a redirection to an error page). */ @Override public String processSpecificationPost(IPostParameters variableContext, Locale locale, Specification ds, int connectionSequenceNumber) throws ManifoldCFException { String seqPrefix = "s"+connectionSequenceNumber+"_"; String x = variableContext.getParameter(seqPrefix+"pathcount"); if (x != null) { // Delete all path specs first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) ds.removeChild(i); else i++; } // Find out how many children were sent int pathCount = Integer.parseInt(x); // Gather up these i = 0; while (i < pathCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"pathop"+pathDescription; x = variableContext.getParameter(pathOpName); if (x != null && x.equals("Delete")) { // Skip to the next i++; continue; } // Path inserts won't happen until the very end String path = variableContext.getParameter(seqPrefix+"specpath"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH,path); // Now, get the number of children String y = variableContext.getParameter(seqPrefix+"specchildcount"+pathDescription); int childCount = Integer.parseInt(y); int j = 0; int w = 0; while (j < childCount) { String instanceDescription = "_"+Integer.toString(i)+"_"+Integer.toString(j); // Look for an insert or a delete at this point String instanceOp = seqPrefix+"specop"+instanceDescription; String z = variableContext.getParameter(instanceOp); String flavor; String type; String indexable; String match; SpecificationNode sn; if (z != null && z.equals("Delete")) { // Process the deletion as we gather j++; continue; } if (z != null && z.equals("Insert Here")) { // Process the insertion as we gather. flavor = variableContext.getParameter(seqPrefix+"specfl_i"+instanceDescription); indexable = ""; type = ""; String xxx = variableContext.getParameter(seqPrefix+"spectin_i"+instanceDescription); if (xxx.equals("file") || xxx.equals("directory")) type = xxx; else if (xxx.equals("indexable-file")) { indexable = "yes"; type = "file"; } else if (xxx.equals("unindexable-file")) { indexable = "no"; type = "file"; } match = variableContext.getParameter(seqPrefix+"specfile_i"+instanceDescription); sn = new SpecificationNode(flavor); if (type != null && type.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,type); if (indexable != null && indexable.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,indexable); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,match); node.addChild(w++,sn); } flavor = variableContext.getParameter(seqPrefix+"specfl"+instanceDescription); type = variableContext.getParameter(seqPrefix+"specty"+instanceDescription); match = variableContext.getParameter(seqPrefix+"specfile"+instanceDescription); indexable = variableContext.getParameter(seqPrefix+"specin"+instanceDescription); sn = new SpecificationNode(flavor); if (type != null && type.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,type); if (indexable != null && indexable.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,indexable); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,match); node.addChild(w++,sn); j++; } if (x != null && x.equals("Add")) { // Process adds to the end of the rules in-line String match = variableContext.getParameter(seqPrefix+"specfile"+pathDescription); String indexable = ""; String type = ""; String xxx = variableContext.getParameter(seqPrefix+"spectin"+pathDescription); if (xxx.equals("file") || xxx.equals("directory")) type = xxx; else if (xxx.equals("indexable-file")) { indexable = "yes"; type = "file"; } else if (xxx.equals("unindexable-file")) { indexable = "no"; type = "file"; } String flavor = variableContext.getParameter(seqPrefix+"specfl"+pathDescription); SpecificationNode sn = new SpecificationNode(flavor); if (type != null && type.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,type); if (indexable != null && indexable.length() > 0) sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,indexable); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,match); node.addChild(w,sn); } ds.addChild(ds.getChildCount(),node); i++; } // See if there's a global add operation String op = variableContext.getParameter(seqPrefix+"pathop"); if (op != null && op.equals("Add")) { String path = variableContext.getParameter(seqPrefix+"specpath"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH,path); ds.addChild(ds.getChildCount(),node); // Now add in the defaults; these will be "include all directories" and "include all indexable files". SpecificationNode sn = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_INCLUDE); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,"file"); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE,"yes"); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,"*"); node.addChild(node.getChildCount(),sn); sn = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_INCLUDE); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE,"directory"); sn.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC,"*"); node.addChild(node.getChildCount(),sn); } else if (op != null && op.equals("Up")) { // Strip off end String path = variableContext.getParameter(seqPrefix+"specpath"); int k = path.lastIndexOf("/"); if (k == -1) path = ""; else path = path.substring(0,k); currentContext.save(seqPrefix+"specpath",path); } else if (op != null && op.equals("AddToPath")) { String path = variableContext.getParameter(seqPrefix+"specpath"); String addon = variableContext.getParameter(seqPrefix+"pathaddon"); String typein = variableContext.getParameter(seqPrefix+"pathtypein"); if (addon != null && addon.length() > 0) { if (path.length() == 0) path = addon; else path += "/" + addon; } else if (typein != null && typein.length() > 0) { String trialPath = path; if (trialPath.length() == 0) trialPath = typein; else trialPath += "/" + typein; // Validate trial path try { trialPath = validateFolderName(trialPath); if (trialPath != null) path = trialPath; } catch (ManifoldCFException e) { // Effectively, this just means we can't add a typein to the path right now. } } currentContext.save(seqPrefix+"specpath",path); } } x = variableContext.getParameter(seqPrefix+"specmaxlength"); if (x != null) { // Delete max length entry int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH)) ds.removeChild(i); else i++; } if (x.length() > 0) { SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } } x = variableContext.getParameter(seqPrefix+"specsecurity"); if (x != null) { // Delete all security entries first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY)) ds.removeChild(i); else i++; } SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } x = variableContext.getParameter(seqPrefix+"tokencount"); if (x != null) { // Delete all file specs first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) ds.removeChild(i); else i++; } int accessCount = Integer.parseInt(x); i = 0; while (i < accessCount) { String accessDescription = "_"+Integer.toString(i); String accessOpName = seqPrefix+"accessop"+accessDescription; x = variableContext.getParameter(accessOpName); if (x != null && x.equals("Delete")) { // Next row i++; continue; } // Get the stuff we need String accessSpec = variableContext.getParameter(seqPrefix+"spectoken"+accessDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN,accessSpec); ds.addChild(ds.getChildCount(),node); i++; } String op = variableContext.getParameter(seqPrefix+"accessop"); if (op != null && op.equals("Add")) { String accessspec = variableContext.getParameter(seqPrefix+"spectoken"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN,accessspec); ds.addChild(ds.getChildCount(),node); } } x = variableContext.getParameter(seqPrefix+"specsharesecurity"); if (x != null) { // Delete all security entries first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY)) ds.removeChild(i); else i++; } SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } x = variableContext.getParameter(seqPrefix+"specparentfoldersecurity"); if (x != null) { // Delete all security entries first int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY)) ds.removeChild(i); else i++; } SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,x); ds.addChild(ds.getChildCount(),node); } String xc = variableContext.getParameter(seqPrefix+"specpathnameattribute"); if (xc != null) { // Delete old one int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE)) ds.removeChild(i); else i++; } if (xc.length() > 0) { SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE,xc); ds.addChild(ds.getChildCount(),node); } } xc = variableContext.getParameter(seqPrefix+"specmappingcount"); if (xc != null) { // Delete old spec int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP)) ds.removeChild(i); else i++; } // Now, go through the data and assemble a new list. int mappingCount = Integer.parseInt(xc); // Gather up these i = 0; while (i < mappingCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"specmappingop"+pathDescription; xc = variableContext.getParameter(pathOpName); if (xc != null && xc.equals("Delete")) { // Skip to the next i++; continue; } // Inserts won't happen until the very end String match = variableContext.getParameter(seqPrefix+"specmatch"+pathDescription); String replace = variableContext.getParameter(seqPrefix+"specreplace"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); i++; } // Check for add xc = variableContext.getParameter(seqPrefix+"specmappingop"); if (xc != null && xc.equals("Add")) { String match = variableContext.getParameter(seqPrefix+"specmatch"); String replace = variableContext.getParameter(seqPrefix+"specreplace"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); } } xc = variableContext.getParameter(seqPrefix+"specfmapcount"); if (xc != null) { // Delete old spec int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP)) ds.removeChild(i); else i++; } // Now, go through the data and assemble a new list. int mappingCount = Integer.parseInt(xc); // Gather up these i = 0; while (i < mappingCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"specfmapop"+pathDescription; xc = variableContext.getParameter(pathOpName); if (xc != null && xc.equals("Delete")) { // Skip to the next i++; continue; } // Inserts won't happen until the very end String match = variableContext.getParameter(seqPrefix+"specfmapmatch"+pathDescription); String replace = variableContext.getParameter(seqPrefix+"specfmapreplace"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); i++; } // Check for add xc = variableContext.getParameter(seqPrefix+"specfmapop"); if (xc != null && xc.equals("Add")) { String match = variableContext.getParameter(seqPrefix+"specfmapmatch"); String replace = variableContext.getParameter(seqPrefix+"specfmapreplace"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); } } xc = variableContext.getParameter(seqPrefix+"specumapcount"); if (xc != null) { // Delete old spec int i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP)) ds.removeChild(i); else i++; } // Now, go through the data and assemble a new list. int mappingCount = Integer.parseInt(xc); // Gather up these i = 0; while (i < mappingCount) { String pathDescription = "_"+Integer.toString(i); String pathOpName = seqPrefix+"specumapop"+pathDescription; xc = variableContext.getParameter(pathOpName); if (xc != null && xc.equals("Delete")) { // Skip to the next i++; continue; } // Inserts won't happen until the very end String match = variableContext.getParameter(seqPrefix+"specumapmatch"+pathDescription); String replace = variableContext.getParameter(seqPrefix+"specumapreplace"+pathDescription); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); i++; } // Check for add xc = variableContext.getParameter(seqPrefix+"specumapop"); if (xc != null && xc.equals("Add")) { String match = variableContext.getParameter(seqPrefix+"specumapmatch"); String replace = variableContext.getParameter(seqPrefix+"specumapreplace"); SpecificationNode node = new SpecificationNode(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH,match); node.setAttribute(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE,replace); ds.addChild(ds.getChildCount(),node); } } return null; } /** View specification. * This method is called in the body section of a job's view page. Its purpose is to present the document * specification information to the user. The coder can presume that the HTML that is output from * this configuration will be within appropriate &lt;html&gt; and &lt;body&gt;tags. * The connector will be connected before this method can be called. *@param out is the output to which any HTML should be sent. *@param locale is the locale the output is preferred to be in. *@param ds is the current document specification for this job. *@param connectionSequenceNumber is the unique number of this connection within the job. */ @Override public void viewSpecification(IHTTPOutput out, Locale locale, Specification ds, int connectionSequenceNumber) throws ManifoldCFException, IOException { out.print( "<table class=\"displaytable\">\n" ); int i = 0; boolean seenAny = false; while (i < ds.getChildCount()) { SpecificationNode spn = ds.getChild(i++); if (spn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_STARTPOINT)) { if (seenAny == false) { seenAny = true; } out.print( " <tr>\n"+ " <td class=\"description\">\n"+ " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(spn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_PATH))+":"+"</nobr>\n"+ " </td>\n"+ " <td class=\"value\">\n" ); int j = 0; while (j < spn.getChildCount()) { SpecificationNode sn = spn.getChild(j++); // This is "include" or "exclude" String nodeFlavor = sn.getType(); // This is the file/directory name match String filespec = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_FILESPEC); // This has a value of null, "", "file", or "directory". String nodeType = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TYPE); if (nodeType == null) nodeType = ""; // This has a value of null, "", "yes", or "no". String ingestableFlag = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_INDEXABLE); if (ingestableFlag == null) ingestableFlag = ""; out.print( " <nobr>\n"+ " "+Integer.toString(j)+".\n"+ " "+(nodeFlavor.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_INCLUDE)?"Include":"")+"\n"+ " "+(nodeFlavor.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_EXCLUDE)?"Exclude":"")+"\n"+ " "+(ingestableFlag.equals("yes")?"&nbsp;indexable":"")+"\n"+ " "+(ingestableFlag.equals("no")?"&nbsp;un-indexable":"")+"\n"+ " "+(nodeType.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.VALUE_FILE)?"&nbsp;file(s)":"")+"\n"+ " "+(nodeType.equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.VALUE_DIRECTORY)?"&nbsp;directory(s)":"")+"\n"+ " "+(nodeType.equals("")?"&nbsp;file(s)&nbsp;or&nbsp;directory(s)":"")+"&nbsp;matching&nbsp;\n"+ " "+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(filespec)+"\n"+ " </nobr>\n"+ " <br/>\n" ); } out.print( " </td>\n"+ " </tr>\n" ); } } if (seenAny == false) { out.print( " <tr><td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoDocumentsSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n" ); // Find whether security is on or off i = 0; boolean securityOn = true; boolean shareSecurityOn = true; boolean parentFolderSecurityOn = false; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) securityOn = false; else if (securityValue.equals("on")) securityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_SHARESECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) shareSecurityOn = false; else if (securityValue.equals("on")) shareSecurityOn = true; } if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PARENTFOLDERSECURITY)) { String securityValue = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); if (securityValue.equals("off")) parentFolderSecurityOn = false; else if (securityValue.equals("on")) parentFolderSecurityOn = true; } } out.print( "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+(securityOn?Messages.getBodyString(locale,"SharedDriveConnector.Enabled"):Messages.getBodyString(locale,"SharedDriveConnector.Disabled"))+"</nobr></td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" ); // Go through looking for access tokens seenAny = false; i = 0; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_ACCESS)) { if (seenAny == false) { out.print( " <tr><td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileAccessTokens") + "</nobr></td>\n"+ " <td class=\"value\">\n" ); seenAny = true; } String token = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_TOKEN); out.print( " <nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(token)+"</nobr><br/>\n" ); } } if (seenAny) { out.print( " </td>\n"+ " </tr>\n" ); } else { out.print( " <tr><td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileAccessTokensSpecified") + "</td></tr>\n" ); } out.print( " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " \n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ShareSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+(shareSecurityOn?Messages.getBodyString(locale,"SharedDriveConnector.Enabled"):Messages.getBodyString(locale,"SharedDriveConnector.Disabled"))+"</nobr></td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ " \n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.ParentFolderSecurity") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+(parentFolderSecurityOn?Messages.getBodyString(locale,"SharedDriveConnector.Enabled"):Messages.getBodyString(locale,"SharedDriveConnector.Disabled"))+"</nobr></td>\n"+ " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n" ); // Find the path-name metadata attribute name i = 0; String pathNameAttribute = ""; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHNAMEATTRIBUTE)) { pathNameAttribute = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } } out.print( " <tr>\n" ); if (pathNameAttribute.length() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.PathNameMetadataAttribute") + "</nobr></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(pathNameAttribute)+"</nobr></td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoPathNameMetadataAttributeSpecified") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ "\n" ); // Find the path-value mapping data i = 0; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap matchMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_PATHMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); matchMap.appendMatchPair(pathMatch,pathReplace); } } if (matchMap.getMatchCount() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.PathValueMapping") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n" ); i = 0; while (i < matchMap.getMatchCount()) { String matchString = matchMap.getMatchString(i); String replaceString = matchMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</nobr></td>\n"+ " </tr>\n" ); i++; } out.print( " </table>\n"+ " </td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoMappingsSpecified") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n" ); // Find the file name mapping data i = 0; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap fileMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_FILEMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); fileMap.appendMatchPair(pathMatch,pathReplace); } } if (fileMap.getMatchCount() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.FileNameMapping") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n" ); i = 0; while (i < fileMap.getMatchCount()) { String matchString = fileMap.getMatchString(i); String replaceString = fileMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</nobr></td>\n"+ " </tr>\n" ); i++; } out.print( " </table>\n"+ " </td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoFileNameMappingsSpecified") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n" ); // Find the url mapping data i = 0; org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap uriMap = new org.apache.manifoldcf.crawler.connectors.sharedrive.MatchMap(); while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_URIMAP)) { String pathMatch = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_MATCH); String pathReplace = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_REPLACE); uriMap.appendMatchPair(pathMatch,pathReplace); } } if (uriMap.getMatchCount() > 0) { out.print( " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.URLMappingColon") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <table class=\"displaytable\">\n" ); i = 0; while (i < uriMap.getMatchCount()) { String matchString = uriMap.getMatchString(i); String replaceString = uriMap.getReplaceString(i); out.print( " <tr>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(matchString)+"</nobr></td>\n"+ " <td class=\"value\">==></td>\n"+ " <td class=\"value\"><nobr>"+org.apache.manifoldcf.ui.util.Encoder.bodyEscape(replaceString)+"</nobr></td>\n"+ " </tr>\n" ); i++; } out.print( " </table>\n"+ " </td>\n" ); } else { out.print( " <td class=\"message\" colspan=\"2\">" + Messages.getBodyString(locale,"SharedDriveConnector.NoURLMappingsSpecifiedWillProduceAFileIRI") + "</td>\n" ); } out.print( " </tr>\n"+ "\n"+ " <tr><td class=\"separator\" colspan=\"2\"><hr/></td></tr>\n"+ "\n"+ " <tr>\n"+ " <td class=\"description\"><nobr>" + Messages.getBodyString(locale,"SharedDriveConnector.MaximumDocumentLength") + "</nobr></td>\n"+ " <td class=\"value\">\n"+ " <nobr>\n" ); // Find the path-value mapping data i = 0; String maxLength = null; while (i < ds.getChildCount()) { SpecificationNode sn = ds.getChild(i++); if (sn.getType().equals(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.NODE_MAXLENGTH)) { maxLength = sn.getAttributeValue(org.apache.manifoldcf.crawler.connectors.sharedrive.SharedDriveConnector.ATTRIBUTE_VALUE); } } if (maxLength == null || maxLength.length() == 0) maxLength = "Unlimited"; out.print( " "+maxLength+"\n"+ " </nobr>\n"+ " </td>\n"+ " </tr>\n"+ "</table>\n" ); } /* The following are additional methods used by the UI */ /** * given a server uri, return all shares * * @param serverURI - * @return an array of SmbFile */ public SmbFile[] getShareNames(String serverURI) throws ManifoldCFException { getSession(); SmbFile server = null; try { server = new SmbFile(serverURI,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { throw new ManifoldCFException("MalformedURLException tossed",e1); } SmbFile[] shares = null; try { // check to make sure it's a server if (getFileType(server)==SmbFile.TYPE_SERVER) { shares = fileListFiles(server,new ShareFilter()); } } catch (SmbException e) { throw new ManifoldCFException("SmbException tossed: "+e.getMessage(),e); } return shares; } /** * Given a folder path, determine if the folder is in fact legal and accessible (and is a folder). * @param folder is the relative folder from the network root * @return the canonical folder name if valid, or null if not. * @throws ManifoldCFException */ public String validateFolderName(String folder) throws ManifoldCFException { getSession(); //create new connection by appending to the old connection String smburi = smbconnectionPath; String uri = smburi; if (folder.length() > 0) { uri = smburi + folder + "/"; } SmbFile currentDirectory = null; try { currentDirectory = new SmbFile(uri,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { throw new ManifoldCFException("validateFolderName: Can't get parent file: " + uri,e1); } try { currentDirectory.connect(); if (fileIsDirectory(currentDirectory) == false) return null; String newCanonicalPath = currentDirectory.getCanonicalPath(); String rval = newCanonicalPath.substring(smburi.length()); if (rval.endsWith("/")) rval = rval.substring(0,rval.length()-1); return rval; } catch (SmbException se) { try { processSMBException(se, folder, "checking folder", "getting canonical path"); return null; } catch (ServiceInterruption si) { throw new ManifoldCFException("Service interruption: "+si.getMessage(),si); } } catch (MalformedURLException e) { throw new ManifoldCFException("MalformedURLException tossed: "+e.getMessage(),e); } catch (java.net.SocketTimeoutException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } } /** * given a smb uri, return all children directories * * @param folder is the relative folder from the network root * @return array of child folder names * @throws ManifoldCFException */ public String[] getChildFolderNames(String folder) throws ManifoldCFException { getSession(); //create new connection by appending to the old connection String smburi = smbconnectionPath; String uri = smburi; if (folder.length() > 0) { uri = smburi + folder + "/"; } SmbFile currentDirectory = null; try { currentDirectory = new SmbFile(uri,SingletonContext.getInstance().withCredentials(pa)); } catch (MalformedURLException e1) { throw new ManifoldCFException("getChildFolderNames: Can't get parent file: " + uri,e1); } // add DFS support SmbFile[] children = null; try { currentDirectory.connect(); children = currentDirectory.listFiles(new DirectoryFilter()); } catch (SmbException se) { try { processSMBException(se, folder, "getting child folder names", "listing files"); children = new SmbFile[0]; } catch (ServiceInterruption si) { throw new ManifoldCFException("Service interruption: "+si.getMessage(),si); } } catch (MalformedURLException e) { throw new ManifoldCFException("MalformedURLException tossed: "+e.getMessage(),e); } catch (java.net.SocketTimeoutException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } catch (InterruptedIOException e) { throw new ManifoldCFException("Interrupted: "+e.getMessage(),e,ManifoldCFException.INTERRUPTED); } catch (IOException e) { throw new ManifoldCFException("IOException tossed: "+e.getMessage(),e); } // populate a String array String[] directories = new String[children.length]; for (int i=0;i < children.length;i++){ String directoryName = children[i].getName(); // strip the trailing slash directoryName = directoryName.replaceAll("/",""); directories[i] = directoryName; } java.util.Arrays.sort(directories); return directories; } /** * inner class which returns only shares. used by listfiles(SmbFileFilter) * * @author James Maupin */ class ShareFilter implements SmbFileFilter { /* (non-Javadoc) * @see jcifs.smb.SmbFileFilter#accept(jcifs.smb.SmbFile) */ public boolean accept(SmbFile arg0) throws SmbException { if (getFileType(arg0)==SmbFile.TYPE_SHARE){ return true; } else { return false; } } } /** * inner class which returns only directories. used by listfiles(SmbFileFilter) * * @author James Maupin */ class DirectoryFilter implements SmbFileFilter { /* (non-Javadoc) * @see jcifs.smb.SmbFileFilter#accept(jcifs.smb.SmbFile) */ public boolean accept(SmbFile arg0) throws SmbException { int type = getFileType(arg0); if (type==SmbFile.TYPE_SHARE || (type==SmbFile.TYPE_FILESYSTEM && fileIsDirectory(arg0))){ return true; } else { return false; } } } /** This is the filter class that actually receives the files in batches. We do it this way * so that the client won't run out of memory loading a huge directory. */ protected class ProcessDocumentsFilter implements SmbFileFilter { /** This is the activities object, where matching references will be logged */ protected final IProcessActivity activities; /** Document specification */ protected final Specification spec; /** Exceptions that we saw. These are saved here so that they can be rethrown when done */ protected ManifoldCFException lcfException = null; protected ServiceInterruption serviceInterruption = null; /** Constructor */ public ProcessDocumentsFilter(IProcessActivity activities, Specification spec) { this.activities = activities; this.spec = spec; } /** Decide if we accept the file. This is where we will actually do the work. */ public boolean accept(SmbFile f) throws SmbException { if (lcfException != null || serviceInterruption != null) return false; try { int type = f.getType(); if (type != SmbFile.TYPE_SERVER && type != SmbFile.TYPE_FILESYSTEM && type != SmbFile.TYPE_SHARE) return false; String canonicalPath = getFileCanonicalPath(f); if (canonicalPath != null) { // manipulate path to include the DFS alias, not the literal path // String newPath = matchPrefix + canonicalPath.substring(matchReplace.length()); String newPath = canonicalPath; // Check against the current specification. This is a nicety to avoid queuing // documents that we will immediately turn around and remove. However, if this // check was not here, everything should still function, provided the getDocumentVersions() // method does the right thing. boolean fileIsDirectory = fileIsDirectory(f); if (checkInclude(fileIsDirectory, newPath, spec)) { if (fileIsDirectory) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path is '" + newPath + "' and is included."); activities.addDocumentReference(newPath); } else { long fileLength = fileLength(f); if (checkIncludeFile(fileLength, newPath, spec, activities)) { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path is '" + newPath + "' and is included."); activities.addDocumentReference(newPath); } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path '"+newPath+"' is excluded!"); } } } else { if (Logging.connectors.isDebugEnabled()) Logging.connectors.debug("JCIFS: Recorded path '"+newPath+"' is excluded!"); } } else Logging.connectors.debug("JCIFS: Excluding a child file because canonical path is null"); return false; } catch (ManifoldCFException e) { if (lcfException == null) lcfException = e; return false; } catch (ServiceInterruption e) { if (serviceInterruption == null) serviceInterruption = e; return false; } } /** Check for exception, and throw if there is one */ public void checkAndThrow() throws ServiceInterruption, ManifoldCFException { if (lcfException != null) throw lcfException; if (serviceInterruption != null) throw serviceInterruption; } } }
apache-2.0
gatapia/nclosure
lib/third_party/node/docs/symbols/node.global.html
29311
<!DOCTYPE html> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta charset="utf8"></meta> <meta name="generator" content="JsDoc Toolkit"></meta> <title>node.global | .</title> <meta name="viewport" content="width=device-width; initial-scale=1.0; maximum-scale=1.0;"></meta> <meta name="mobileoptimized" content="0"></meta> <link rel="stylesheet" href="../css/prettify_wynn.css" media="all"></link> <link rel="stylesheet" href="../css/all.css" media="all"></link> <link rel="stylesheet" href="../css/handheld.css" media="handheld"></link> <link rel="stylesheet" href="../css/handheld.css" media="only screen and (max-width: 660px)"></link> <link rel="stylesheet" href="../css/screen.css" media="screen and (min-width: 661px)"></link> <script src="../javascript/all.js"></script> <script src="../javascript/prettify.js"></script> <!--[if lt IE 9]> <script src="../javascript/html5.js"></script> <![endif]--> </head> <body> <div class="index"> <div class="menu"> <div class="indexLinks"> <a href="../index.html">Classes</a> <a href="../files.html">Files</a> </div> <h2 class="heading1">Classes</h2> <input type="text" name="classFilter" class="classFilter" id="ClassFilter" placeholder="Filter"></input> <nav> <ul class="classList" id="ClassList"> <li><a href="../symbols/node.html">node</a></li> <li><a href="../symbols/node.assert.html">node<span class='break'> </span>.<span class='break'> </span>assert</a></li> <li><a href="../symbols/node.assert.AssertionError.html">node<span class='break'> </span>.<span class='break'> </span>assert<span class='break'> </span>.<span class='break'> </span>AssertionError</a></li> <li><a href="../symbols/node.buffer.html">node<span class='break'> </span>.<span class='break'> </span>buffer</a></li> <li><a href="../symbols/node.buffer.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>buffer<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.buffer.SlowBuffer.html">node<span class='break'> </span>.<span class='break'> </span>buffer<span class='break'> </span>.<span class='break'> </span>SlowBuffer</a></li> <li><a href="../symbols/node.child_process.html">node<span class='break'> </span>.<span class='break'> </span>child_process</a></li> <li><a href="../symbols/node.console.html">node<span class='break'> </span>.<span class='break'> </span>console</a></li> <li><a href="../symbols/node.constants.html">node<span class='break'> </span>.<span class='break'> </span>constants</a></li> <li><a href="../symbols/node.crypto.html">node<span class='break'> </span>.<span class='break'> </span>crypto</a></li> <li><a href="../symbols/node.crypto.Cipher.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Cipher</a></li> <li><a href="../symbols/node.crypto.Cipher.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Cipher<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.crypto.Credentials.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Credentials</a></li> <li><a href="../symbols/node.crypto.Decipher.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Decipher</a></li> <li><a href="../symbols/node.crypto.Decipher.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Decipher<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.crypto.Hash.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Hash</a></li> <li><a href="../symbols/node.crypto.Hmac.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Hmac</a></li> <li><a href="../symbols/node.crypto.Hmac.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Hmac<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.crypto.Sign.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Sign</a></li> <li><a href="../symbols/node.crypto.Sign.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Sign<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.crypto.Verify.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Verify</a></li> <li><a href="../symbols/node.crypto.Verify.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>crypto<span class='break'> </span>.<span class='break'> </span>Verify<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.dgram.html">node<span class='break'> </span>.<span class='break'> </span>dgram</a></li> <li><a href="../symbols/node.dgram.Socket.html">node<span class='break'> </span>.<span class='break'> </span>dgram<span class='break'> </span>.<span class='break'> </span>Socket</a></li> <li><a href="../symbols/node.dns.html">node<span class='break'> </span>.<span class='break'> </span>dns</a></li> <li><a href="../symbols/node.events.html">node<span class='break'> </span>.<span class='break'> </span>events</a></li> <li><a href="../symbols/node.events.EventEmitter.html">node<span class='break'> </span>.<span class='break'> </span>events<span class='break'> </span>.<span class='break'> </span>EventEmitter</a></li> <li><a href="../symbols/node.freelist.html">node<span class='break'> </span>.<span class='break'> </span>freelist</a></li> <li><a href="../symbols/node.freelist.FreeList.html">node<span class='break'> </span>.<span class='break'> </span>freelist<span class='break'> </span>.<span class='break'> </span>FreeList</a></li> <li><a href="../symbols/node.fs.html">node<span class='break'> </span>.<span class='break'> </span>fs</a></li> <li><a href="../symbols/node.fs.FileReadStream.html">node<span class='break'> </span>.<span class='break'> </span>fs<span class='break'> </span>.<span class='break'> </span>FileReadStream</a></li> <li><a href="../symbols/node.fs.FileWriteStream.html">node<span class='break'> </span>.<span class='break'> </span>fs<span class='break'> </span>.<span class='break'> </span>FileWriteStream</a></li> <li><a href="../symbols/node.fs.ReadStream.html">node<span class='break'> </span>.<span class='break'> </span>fs<span class='break'> </span>.<span class='break'> </span>ReadStream</a></li> <li><a href="../symbols/node.fs.Stats.html">node<span class='break'> </span>.<span class='break'> </span>fs<span class='break'> </span>.<span class='break'> </span>Stats</a></li> <li><a href="../symbols/node.fs.Stats.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>fs<span class='break'> </span>.<span class='break'> </span>Stats<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.fs.WriteStream.html">node<span class='break'> </span>.<span class='break'> </span>fs<span class='break'> </span>.<span class='break'> </span>WriteStream</a></li> <li><a href="../symbols/node.global.html">node<span class='break'> </span>.<span class='break'> </span>global</a></li> <li><a href="../symbols/node.global.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>global<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.http.html">node<span class='break'> </span>.<span class='break'> </span>http</a></li> <li><a href="../symbols/node.http.Agent.html">node<span class='break'> </span>.<span class='break'> </span>http<span class='break'> </span>.<span class='break'> </span>Agent</a></li> <li><a href="../symbols/node.http.Client.html">node<span class='break'> </span>.<span class='break'> </span>http<span class='break'> </span>.<span class='break'> </span>Client</a></li> <li><a href="../symbols/node.http.ClientRequest.html">node<span class='break'> </span>.<span class='break'> </span>http<span class='break'> </span>.<span class='break'> </span>ClientRequest</a></li> <li><a href="../symbols/node.http.IncomingMessage.html">node<span class='break'> </span>.<span class='break'> </span>http<span class='break'> </span>.<span class='break'> </span>IncomingMessage</a></li> <li><a href="../symbols/node.http.OutgoingMessage.html">node<span class='break'> </span>.<span class='break'> </span>http<span class='break'> </span>.<span class='break'> </span>OutgoingMessage</a></li> <li><a href="../symbols/node.http.Server.html">node<span class='break'> </span>.<span class='break'> </span>http<span class='break'> </span>.<span class='break'> </span>Server</a></li> <li><a href="../symbols/node.http.ServerResponse.html">node<span class='break'> </span>.<span class='break'> </span>http<span class='break'> </span>.<span class='break'> </span>ServerResponse</a></li> <li><a href="../symbols/node.https.html">node<span class='break'> </span>.<span class='break'> </span>https</a></li> <li><a href="../symbols/node.https.Server.html">node<span class='break'> </span>.<span class='break'> </span>https<span class='break'> </span>.<span class='break'> </span>Server</a></li> <li><a href="../symbols/node.module.html">node<span class='break'> </span>.<span class='break'> </span>module</a></li> <li><a href="../symbols/node.net.html">node<span class='break'> </span>.<span class='break'> </span>net</a></li> <li><a href="../symbols/node.net.Server.html">node<span class='break'> </span>.<span class='break'> </span>net<span class='break'> </span>.<span class='break'> </span>Server</a></li> <li><a href="../symbols/node.net.Socket.html">node<span class='break'> </span>.<span class='break'> </span>net<span class='break'> </span>.<span class='break'> </span>Socket</a></li> <li><a href="../symbols/node.net.Stream.html">node<span class='break'> </span>.<span class='break'> </span>net<span class='break'> </span>.<span class='break'> </span>Stream</a></li> <li><a href="../symbols/node.os.html">node<span class='break'> </span>.<span class='break'> </span>os</a></li> <li><a href="../symbols/node.path.html">node<span class='break'> </span>.<span class='break'> </span>path</a></li> <li><a href="../symbols/node.process.html">node<span class='break'> </span>.<span class='break'> </span>process</a></li> <li><a href="../symbols/node.process.EventEmitter.html">node<span class='break'> </span>.<span class='break'> </span>process<span class='break'> </span>.<span class='break'> </span>EventEmitter</a></li> <li><a href="../symbols/node.process.EventEmitter.Buffer.html">node<span class='break'> </span>.<span class='break'> </span>process<span class='break'> </span>.<span class='break'> </span>EventEmitter<span class='break'> </span>.<span class='break'> </span>Buffer</a></li> <li><a href="../symbols/node.querystring.html">node<span class='break'> </span>.<span class='break'> </span>querystring</a></li> <li><a href="../symbols/node.readline.html">node<span class='break'> </span>.<span class='break'> </span>readline</a></li> <li><a href="../symbols/node.readline.Interface.html">node<span class='break'> </span>.<span class='break'> </span>readline<span class='break'> </span>.<span class='break'> </span>Interface</a></li> <li><a href="../symbols/node.repl.html">node<span class='break'> </span>.<span class='break'> </span>repl</a></li> <li><a href="../symbols/node.repl.REPLServer.html">node<span class='break'> </span>.<span class='break'> </span>repl<span class='break'> </span>.<span class='break'> </span>REPLServer</a></li> <li><a href="../symbols/node.stream.html">node<span class='break'> </span>.<span class='break'> </span>stream</a></li> <li><a href="../symbols/node.stream.Stream.html">node<span class='break'> </span>.<span class='break'> </span>stream<span class='break'> </span>.<span class='break'> </span>Stream</a></li> <li><a href="../symbols/node.string_decoder.html">node<span class='break'> </span>.<span class='break'> </span>string_decoder</a></li> <li><a href="../symbols/node.string_decoder.StringDecoder.html">node<span class='break'> </span>.<span class='break'> </span>string_decoder<span class='break'> </span>.<span class='break'> </span>StringDecoder</a></li> <li><a href="../symbols/node.sys.html">node<span class='break'> </span>.<span class='break'> </span>sys</a></li> <li><a href="../symbols/node.timers.html">node<span class='break'> </span>.<span class='break'> </span>timers</a></li> <li><a href="../symbols/node.tls.html">node<span class='break'> </span>.<span class='break'> </span>tls</a></li> <li><a href="../symbols/node.tls.Server.html">node<span class='break'> </span>.<span class='break'> </span>tls<span class='break'> </span>.<span class='break'> </span>Server</a></li> <li><a href="../symbols/node.tty.html">node<span class='break'> </span>.<span class='break'> </span>tty</a></li> <li><a href="../symbols/node.tty_posix.html">node<span class='break'> </span>.<span class='break'> </span>tty_posix</a></li> <li><a href="../symbols/node.tty_posix.ReadStream.html">node<span class='break'> </span>.<span class='break'> </span>tty_posix<span class='break'> </span>.<span class='break'> </span>ReadStream</a></li> <li><a href="../symbols/node.tty_posix.WriteStream.html">node<span class='break'> </span>.<span class='break'> </span>tty_posix<span class='break'> </span>.<span class='break'> </span>WriteStream</a></li> <li><a href="../symbols/node.tty_win32.html">node<span class='break'> </span>.<span class='break'> </span>tty_win32</a></li> <li><a href="../symbols/node.tty_win32.ReadStream.html">node<span class='break'> </span>.<span class='break'> </span>tty_win32<span class='break'> </span>.<span class='break'> </span>ReadStream</a></li> <li><a href="../symbols/node.tty_win32.WriteStream.html">node<span class='break'> </span>.<span class='break'> </span>tty_win32<span class='break'> </span>.<span class='break'> </span>WriteStream</a></li> <li><a href="../symbols/node.url.html">node<span class='break'> </span>.<span class='break'> </span>url</a></li> <li><a href="../symbols/node.util.html">node<span class='break'> </span>.<span class='break'> </span>util</a></li> <li><a href="../symbols/node.vm.html">node<span class='break'> </span>.<span class='break'> </span>vm</a></li> <li><a href="../symbols/node.vm.Script.html">node<span class='break'> </span>.<span class='break'> </span>vm<span class='break'> </span>.<span class='break'> </span>Script</a></li> </ul> </nav> </div> <div class="fineprint" style="clear:both"> </div> </div> <div class="index indexStatic"></div> <div class="content"> <div class="innerContent"> <h1 class="classTitle"> Namespace <span>node.global</span> </h1> <div class="intro"> <p class="description summary"> These object are available in the global scope and can be accessed from anywhere. </p> <ul class="summary"> <li>Defined in: <a href="../symbols/src/node.global.js.html">node.global.js</a></li> </ul> </div> <div class="props"> <table class="summaryTable" cellspacing="0" summary="A summary of the fields documented in the class node.global."> <caption class="sectionTitle">Field Summary</caption> <thead> <tr> <th scope="col">Field Attributes</th> <th scope="col">Field Name and Description</th> </tr> </thead> <tbody> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.COMPILED">COMPILED</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.console">console</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.global">global</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.GLOBAL">GLOBAL</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.goog">goog</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.module">module</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.nclosure">nclosure</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.process">process</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.root">root</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.top">top</a></b> </div> <div class="description"></div> </td> </tr> <tr> <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont"> node.global.<b><a href="../symbols/node.global.html#.window">window</a></b> </div> <div class="description"></div> </td> </tr> </tbody> </table> </div> <!-- #### METHODS SUMMARY --> <div class="props"> <h2 class="sectionTitle">Method Summary</h2> <table class="summaryTable" id="MethodsList" cellspacing="0" summary="A summary of the methods documented in the class node.global."> <thead> <tr> <th scope="col">Method Attributes</th> <th scope="col">Method Name and Description</th> </tr> </thead> <tbody> <tr class='item0' > <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont">node.global.<b><a href="../symbols/node.global.html#.clearInterval">clearInterval</a></b>() </div> <div class="description"></div> </td> </tr> <tr class='item1' > <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont">node.global.<b><a href="../symbols/node.global.html#.clearTimeout">clearTimeout</a></b>() </div> <div class="description"></div> </td> </tr> <tr class='item2' > <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont">node.global.<b><a href="../symbols/node.global.html#.require">require</a></b>(path) </div> <div class="description"></div> </td> </tr> <tr class='item3' > <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont">node.global.<b><a href="../symbols/node.global.html#.setInterval">setInterval</a></b>() </div> <div class="description"></div> </td> </tr> <tr class='item4' > <td class="attributes">static &nbsp;</td> <td class="nameDescription"> <div class="fixedFont">node.global.<b><a href="../symbols/node.global.html#.setTimeout">setTimeout</a></b>() </div> <div class="description"></div> </td> </tr> </tbody> </table> </div> <!-- #### EVENTS SUMMARY --> <!-- #### CONSTRUCTOR DETAILS --> <!-- #### FIELD DETAILS --> <div class="details props"> <div class="innerProps"> <h2 class="sectionTitle"> Field Detail </h2> <div class="fixedFont heading" id=".COMPILED"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>COMPILED</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".console"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>console</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".global"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>global</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".GLOBAL"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>GLOBAL</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".goog"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>goog</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".module"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>module</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".nclosure"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>nclosure</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".process"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>process</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".root"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>root</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".top"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>top</b> </div> <div class="description"> </div> <div class="hr"></div> <div class="fixedFont heading" id=".window"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>window</b> </div> <div class="description"> </div> </div> </div> <!-- #### METHOD DETAILS --> <div class="details props"> <div class="innerProps"> <h2 class="sectionTitle"> Method Detail </h2> <ul class="methodDetail" id="MethodDetail"> <li class='item0' > <div class="fixedFont heading" id=".clearInterval"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>clearInterval</b>() </div> <div class="description"> </div> <div class="hr"></div> </li> <li class='item1' > <div class="fixedFont heading" id=".clearTimeout"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>clearTimeout</b>() </div> <div class="description"> </div> <div class="hr"></div> </li> <li class='item2' > <div class="fixedFont heading" id=".require"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>require</b>(path) </div> <div class="description"> </div> <dl class="detailList params"> <dt class="heading">Parameters:</dt> <dt> <span class="light fixedFont">{string}</span> <b>path</b> </dt> <dd></dd> </dl> <div class="hr"></div> </li> <li class='item3' > <div class="fixedFont heading" id=".setInterval"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>setInterval</b>() </div> <div class="description"> </div> <div class="hr"></div> </li> <li class='item4' > <div class="fixedFont heading" id=".setTimeout"> <span class='lighter'> static </span> <span class="light">node.global.</span><b>setTimeout</b>() </div> <div class="description"> </div> </li> </ul> </div> </div> <!-- #### EVENT DETAILS --> </div> </div> <script type="text/javascript"> wbos.CssTools.MediaQueryFallBack.LoadCss('../css/screen.css', '../css/handheld.css', 660) codeview.classFilter.Init() addEventListener('load', prePrettyPrint, false); </script> </body> </html>
apache-2.0
alexpilotti/python-keystoneclient
keystoneclient/v3/contrib/oauth1/request_tokens.py
2659
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import unicode_literals from six.moves.urllib import parse as urlparse from keystoneclient import base from keystoneclient.v3.contrib.oauth1 import utils try: from oauthlib import oauth1 except ImportError: oauth1 = None class RequestToken(base.Resource): def authorize(self, roles): try: retval = self.manager.authorize(self.id, roles) self = retval except Exception: retval = None return retval class RequestTokenManager(base.CrudManager): """Manager class for manipulating identity OAuth request tokens.""" resource_class = RequestToken def authorize(self, request_token, roles): """Authorize a request token with specific roles. Utilize Identity API operation: PUT /OS-OAUTH1/authorize/$request_token_id :param request_token: a request token that will be authorized, and can be exchanged for an access token. :param roles: a list of roles, that will be delegated to the user. """ request_id = urlparse.quote(base.getid(request_token)) endpoint = utils.OAUTH_PATH + '/authorize/%s' % (request_id) body = {'roles': [{'id': base.getid(r_id)} for r_id in roles]} return self._put(endpoint, body, "token") def create(self, consumer_key, consumer_secret, project): endpoint = utils.OAUTH_PATH + '/request_token' headers = {'requested_project_id': base.getid(project)} oauth_client = oauth1.Client(consumer_key, client_secret=consumer_secret, signature_method=oauth1.SIGNATURE_HMAC, callback_uri="oob") url = self.client.auth_url.rstrip("/") + endpoint url, headers, body = oauth_client.sign(url, http_method='POST', headers=headers) resp, body = self.client.post(endpoint, headers=headers) token = utils.get_oauth_token_from_body(resp.content) return self.resource_class(self, token)
apache-2.0
gitpan/GOOGLE-ADWORDS-PERL-CLIENT
lib/Google/Ads/AdWords/v201406/ConstantDataService/getUserInterestCriterionResponse.pm
1893
package Google::Ads::AdWords::v201406::ConstantDataService::getUserInterestCriterionResponse; use strict; use warnings; { # BLOCK to scope variables sub get_xmlns { 'https://adwords.google.com/api/adwords/cm/v201406' } __PACKAGE__->__set_name('getUserInterestCriterionResponse'); __PACKAGE__->__set_nillable(); __PACKAGE__->__set_minOccurs(); __PACKAGE__->__set_maxOccurs(); __PACKAGE__->__set_ref(); use base qw( SOAP::WSDL::XSD::Typelib::Element Google::Ads::SOAP::Typelib::ComplexType ); our $XML_ATTRIBUTE_CLASS; undef $XML_ATTRIBUTE_CLASS; sub __get_attr_class { return $XML_ATTRIBUTE_CLASS; } use Class::Std::Fast::Storable constructor => 'none'; use base qw(Google::Ads::SOAP::Typelib::ComplexType); { # BLOCK to scope variables my %rval_of :ATTR(:get<rval>); __PACKAGE__->_factory( [ qw( rval ) ], { 'rval' => \%rval_of, }, { 'rval' => 'Google::Ads::AdWords::v201406::CriterionUserInterest', }, { 'rval' => 'rval', } ); } # end BLOCK } # end of BLOCK 1; =pod =head1 NAME Google::Ads::AdWords::v201406::ConstantDataService::getUserInterestCriterionResponse =head1 DESCRIPTION Perl data type class for the XML Schema defined element getUserInterestCriterionResponse from the namespace https://adwords.google.com/api/adwords/cm/v201406. =head1 PROPERTIES The following properties may be accessed using get_PROPERTY / set_PROPERTY methods: =over =item * rval $element->set_rval($data); $element->get_rval(); =back =head1 METHODS =head2 new my $element = Google::Ads::AdWords::v201406::ConstantDataService::getUserInterestCriterionResponse->new($data); Constructor. The following data structure may be passed to new(): { rval => $a_reference_to, # see Google::Ads::AdWords::v201406::CriterionUserInterest }, =head1 AUTHOR Generated by SOAP::WSDL =cut
apache-2.0
Kinza59/educate-for-oop
java/book-dietel/kinza/10_9_h_Tetrahedron.java
453
public class Tetrahedron extends ThreeDimensionalShapes { public Tetrahedron ( double s) { super ( s ); } @Override public double areaOf3DShapes ( ){ return Math.sqrt(3) * getSide() * getSide(); } @Override public double volumeOf3DShapes(){ return (Math.pow(getSide(),3))/ ( 6 * Math.sqrt(2) ); } @Override public String toString ( ) { return super.toString () + "=> Tetrahedron"; } }
apache-2.0
apache/pdfbox
examples/src/main/java/org/apache/pdfbox/examples/interactive/form/CreateRadioButtons.java
8207
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.examples.interactive.form; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.pdmodel.PDAppearanceContentStream; import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.PDPageContentStream; import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.apache.pdfbox.pdmodel.font.PDType1Font; import org.apache.pdfbox.pdmodel.font.Standard14Fonts.FontName; import org.apache.pdfbox.pdmodel.graphics.color.PDColor; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceRGB; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotationWidget; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceCharacteristicsDictionary; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceDictionary; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceEntry; import org.apache.pdfbox.pdmodel.interactive.annotation.PDAppearanceStream; import org.apache.pdfbox.pdmodel.interactive.annotation.PDBorderStyleDictionary; import org.apache.pdfbox.pdmodel.interactive.form.PDAcroForm; import org.apache.pdfbox.pdmodel.interactive.form.PDRadioButton; /** * Example to create radio buttons. * * @author Tilman Hausherr */ public class CreateRadioButtons { private CreateRadioButtons() { } public static void main(String[] args) throws IOException { try (PDDocument document = new PDDocument()) { PDPage page = new PDPage(PDRectangle.A4); document.addPage(page); PDAcroForm acroForm = new PDAcroForm(document); // if you want to see what Adobe does, activate this, open with Adobe // save the file, and then open it with PDFDebugger //acroForm.setNeedAppearances(true) document.getDocumentCatalog().setAcroForm(acroForm); List<String> options = Arrays.asList("a", "b", "c"); PDRadioButton radioButton = new PDRadioButton(acroForm); radioButton.setPartialName("MyRadioButton"); radioButton.setExportValues(options); PDAppearanceCharacteristicsDictionary appearanceCharacteristics = new PDAppearanceCharacteristicsDictionary(new COSDictionary()); appearanceCharacteristics.setBorderColour(new PDColor(new float[] { 1, 0, 0 }, PDDeviceRGB.INSTANCE)); appearanceCharacteristics.setBackground(new PDColor(new float[]{0, 1, 0.3f}, PDDeviceRGB.INSTANCE)); // no caption => round // with caption => see checkbox example List<PDAnnotationWidget> widgets = new ArrayList<>(); for (int i = 0; i < options.size(); i++) { PDAnnotationWidget widget = new PDAnnotationWidget(); widget.setRectangle(new PDRectangle(30, PDRectangle.A4.getHeight() - 40 - i * 35, 30, 30)); widget.setAppearanceCharacteristics(appearanceCharacteristics); PDBorderStyleDictionary borderStyleDictionary = new PDBorderStyleDictionary(); borderStyleDictionary.setWidth(2); borderStyleDictionary.setStyle(PDBorderStyleDictionary.STYLE_SOLID); widget.setBorderStyle(borderStyleDictionary); widget.setPage(page); COSDictionary apNDict = new COSDictionary(); apNDict.setItem(COSName.Off, createAppearanceStream(document, widget, false)); apNDict.setItem(options.get(i), createAppearanceStream(document, widget, true)); PDAppearanceDictionary appearance = new PDAppearanceDictionary(); PDAppearanceEntry appearanceNEntry = new PDAppearanceEntry(apNDict); appearance.setNormalAppearance(appearanceNEntry); widget.setAppearance(appearance); widget.setAppearanceState("Off"); // don't forget this, or button will be invisible widgets.add(widget); page.getAnnotations().add(widget); } radioButton.setWidgets(widgets); acroForm.getFields().add(radioButton); // Set the texts PDType1Font helvetica = new PDType1Font(FontName.HELVETICA); try (PDPageContentStream contents = new PDPageContentStream(document, page)) { for (int i = 0; i < options.size(); i++) { contents.beginText(); contents.setFont(helvetica, 15); contents.newLineAtOffset(70, PDRectangle.A4.getHeight() - 30 - i * 35); contents.showText(options.get(i)); contents.endText(); } } radioButton.setValue("c"); document.save("target/RadioButtonsSample.pdf"); } } private static PDAppearanceStream createAppearanceStream( final PDDocument document, PDAnnotationWidget widget, boolean on) throws IOException { PDRectangle rect = widget.getRectangle(); PDAppearanceStream onAP = new PDAppearanceStream(document); onAP.setBBox(new PDRectangle(rect.getWidth(), rect.getHeight())); try (PDAppearanceContentStream onAPCS = new PDAppearanceContentStream(onAP)) { PDAppearanceCharacteristicsDictionary appearanceCharacteristics = widget.getAppearanceCharacteristics(); PDColor backgroundColor = appearanceCharacteristics.getBackground(); PDColor borderColor = appearanceCharacteristics.getBorderColour(); float lineWidth = getLineWidth(widget); onAPCS.setBorderLine(lineWidth, widget.getBorderStyle(), widget.getBorder()); onAPCS.setNonStrokingColor(backgroundColor); float radius = Math.min(rect.getWidth() / 2, rect.getHeight() / 2); drawCircle(onAPCS, rect.getWidth() / 2, rect.getHeight() / 2, radius); onAPCS.fill(); onAPCS.setStrokingColor(borderColor); drawCircle(onAPCS, rect.getWidth() / 2, rect.getHeight() / 2, radius - lineWidth / 2); onAPCS.stroke(); if (on) { onAPCS.setNonStrokingColor(0f); drawCircle(onAPCS, rect.getWidth() / 2, rect.getHeight() / 2, (radius - lineWidth) / 2); onAPCS.fill(); } } return onAP; } static float getLineWidth(PDAnnotationWidget widget) { PDBorderStyleDictionary bs = widget.getBorderStyle(); if (bs != null) { return bs.getWidth(); } return 1; } static void drawCircle(PDAppearanceContentStream cs, float x, float y, float r) throws IOException { // http://stackoverflow.com/a/2007782/535646 float magic = r * 0.551784f; cs.moveTo(x, y + r); cs.curveTo(x + magic, y + r, x + r, y + magic, x + r, y); cs.curveTo(x + r, y - magic, x + magic, y - r, x, y - r); cs.curveTo(x - magic, y - r, x - r, y - magic, x - r, y); cs.curveTo(x - r, y + magic, x - magic, y + r, x, y + r); cs.closePath(); } }
apache-2.0
swift-lang/swift-t
c-utils/code/src/list.c
19734
/* * Copyright 2013 University of Chicago and Argonne National Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ /* * list.c * * Created on: May 4, 2011 * Author: wozniak */ #include <assert.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include "src/list.h" void list_init(struct list* target) { assert(target); target->head = NULL; target->tail = NULL; target->size = 0; } struct list* list_create() { struct list* new_list = malloc(sizeof(struct list)); if (! new_list) return NULL; list_init(new_list); return new_list; } /** @return The new list_item. */ struct list_item* list_add(struct list* target, void* data) { struct list_item* new_item = malloc(sizeof(struct list_item)); if (! new_item) return NULL; new_item->data = data; new_item->next = NULL; if (target->size == 0) { target->head = new_item; target->tail = new_item; } else { target->tail->next = new_item; } target->tail = new_item; target->size++; return new_item; } /** Add this data if list_inspect does not find it. */ struct list_item* list_add_one(struct list* target, void* data, size_t n) { if (! list_inspect(target, data, n)) return list_add(target, data); return NULL; } /** Add this pre-formed list_item to target. Convenience: sets item->next to NULL. @return The added item. */ struct list_item* list_append(struct list* target, struct list_item* item) { if (target->size == 0) target->head = item; else target->tail->next = item; target->tail = item; item->next = NULL; target->size++; return item; } struct list* list_split_words(char* s) { struct list* result = list_create(); char* p = s; char* q; while (*p) { // Set p to start of word, q to end of word... while (*p == ' ' || *p == '\t') p++; if (!*p) break; q = p+1; while (! (*q == ' ' || *q == '\t' || *q == '\0')) q++; // Insert word into list... char* data = malloc((size_t)(q-p+2)); strncpy(data, p, (size_t)(q-p)); data[q-p] = '\0'; list_add(result, data); // Step forward: p = q; } return result; } struct list* list_split_lines(const char* s) { struct list* result = list_create(); const char* p = s; const char* q; while (*p) { // Set p to start of word, q to end of word... while (*p == '\n') p++; if (!*p) break; q = p+1; while (! (*q == '\n' || *q == '\0')) q++; // Insert line into list... char* data = malloc((size_t)(q-p+2)); strncpy(data, p, (size_t)(q-p)); data[q-p] = '\0'; list_add(result, data); // Step forward: p = q; } return result; } /** Remove and return the tail data item. This is expensive: singly linked list. */ void* list_pop(struct list* target) { void* data; if (target->size == 0) return NULL; if (target->size == 1) { data = target->head->data; free(target->head); target->head = NULL; target->tail = NULL; target->size = 0; return data; } struct list_item* item; for (item = target->head; item->next->next; item = item->next); data = item->next->data; free(item->next); item->next = NULL; target->tail = item; target->size--; return data; } void* list_head(struct list* target) { if (target->size == 0) return NULL; return target->head->data; } /** */ void* list_poll(struct list* target) { // NOTE_FI(target->size); void* data; if (target->size == 0) return NULL; if (target->size == 1) { data = target->head->data; free(target->head); target->head = NULL; target->tail = NULL; target->size = 0; return data; } struct list_item* delendum = target->head; data = target->head->data; target->head = target->head->next; free(delendum); target->size--; return data; } void* list_random(struct list* target) { if (target->size == 0) return NULL; int p = rand() % target->size; struct list_item* item = target->head; for (int i = 0; i < p; i++) item = item->next; return item->data; } struct list_item* list_ordered_insert(struct list* target, int (*cmp)(void*,void*), void* data) { // NOTE_F; struct list_item* new_item = malloc(sizeof(struct list_item)); if (! new_item) return NULL; new_item->data = data; new_item->next = NULL; if (target->size == 0) { target->head = new_item; target->tail = new_item; } else { struct list_item* item = target->head; // Are we the new head? if (cmp(data, item->data) == -1) { new_item->next = target->head; target->head = new_item; } else { do { // Are we inserting after this item? if (item->next == NULL) { item->next = new_item; target->tail = new_item; break; } else { if (cmp(data, item->next->data) == -1) { new_item->next = item->next; item->next = new_item; break; } } } while ((item = item->next)); } } target->size++; return new_item; } /** Untested. */ struct list_item* list_ordered_insert_unique(struct list* target, int (*cmp)(void*,void*), void* data) { struct list_item* new_item = malloc(sizeof(struct list_item)); if (! new_item) return NULL; new_item->data = data; new_item->next = NULL; if (target->size == 0) { target->head = new_item; target->tail = new_item; } else { struct list_item* item = target->head; // Are we the new head? if (cmp(data, item->data) == -1) { new_item->next = target->head; target->head = new_item; } else { do { // Are we inserting after this item? if (item->next == NULL) { item->next = new_item; target->tail = new_item; break; } else { int c = cmp(data, item->next->data); if (c == 0) { free(new_item); return NULL; } if (c == -1) { new_item->next = item->next; item->next = new_item; break; } } } while ((item = item->next)); } } target->size++; return new_item; } struct list_item* list_add_unique(struct list* target, int (*cmp)(void*,void*), void* data) { if (! list_contains(target, cmp, data)) return list_add(target, data); return NULL; } /** */ bool list_contains(struct list* target, int (*cmp)(void*,void*), void* data) { struct list_item* item; for (item = target->head; item; item = item->next) if (cmp(item->data, data) == 0) return true; return false; } /** Compare data pointer addresses for match. @return An equal data pointer or NULL if not found. */ void* list_search(struct list* target, void* data) { struct list_item* item; for (item = target->head; item; item = item->next) if (item->data == data) return data; return NULL; } /** Compare data contents with memcmp for match. @return A pointer to an equivalent object or NULL if not found. */ void* list_inspect(struct list* target, void* data, size_t n) { struct list_item* item; for (item = target->head; item; item = item->next) if (memcmp(item->data, data, n) == 0) return item->data; return NULL; } bool list_matches(struct list* target, int (*cmp)(void*,void*), void* arg) { assert(target != NULL); for (struct list_item* item = target->head; item; item = item->next) if (cmp(item->data, arg) == 0) return true; return false; } /** Empty the list and free the data. */ void list_clear(struct list* target) { list_clear_callback(target, free); } void list_clear_callback(struct list* target, void (*callback)(void*)) { struct list_item* item = target->head; while (item) { struct list_item* next = item->next; if (callback != NULL) callback(item->data); free(item); item = next; } // Reset everything list_init(target); } /** Removes only one item that points to given data. Does not free the item data. @return True iff the data pointer was matched and the item was freed. */ bool list_remove(struct list* target, void* data) { if (target->size == 0) return false; struct list_item* item = target->head; if (data == item->data) { struct list_item* next = item->next; free(item); target->head = next; target->size--; if (target->size == 0) target->tail = NULL; return true; } while (item->next) { // Are we removing the item after this item? if (data == item->next->data) { struct list_item* nextnext = item->next->next; if (target->tail == item->next) target->tail = nextnext; free(item->next); item->next = nextnext; target->size--; return true; } item = item->next; } return false; } /** Return all elements from the list where cmp(data,arg) == 0. */ struct list* list_select(struct list* target, int (*cmp)(void*,void*), void* arg) { struct list* result = list_create(); struct list_item* item; assert(target != NULL); for (item = target->head; item; item = item->next) { if (cmp(item->data, arg) == 0) list_add(result, item->data); } return result; } /** Return the first data element from the list where f(data,arg). */ void* list_select_one(struct list* target, int (*cmp)(void*,void*), void* arg) { assert(target != NULL); for (struct list_item* item = target->head; item; item = item->next) if (cmp(item->data, arg) == 0) return item->data; return NULL; } /** Remove the elements from the list where cmp(data,arg) == 0. @return true if one or more items were deleted. */ bool list_remove_where(struct list* target, int (*cmp)(void*,void*), void* arg) { bool result = false; struct list_item* item; if (target->size == 0) return false; int old_size = target->size; // Establish next good item in list... struct list_item* good = NULL; for (item = target->head; item; item = item->next) { if (cmp(item->data, arg) != 0) { good = item; break; } } if (! good) // List should be empty { if (target->size > 0) result = true; list_clear(target); return result; } // Establish correct head... struct list_item* head = target->head; while (head && head != good) { struct list_item* next = head->next; free(head); target->size--; head = next; } target->head = good; // Now current points to the first valid item in the list. struct list_item* current = target->head; while (good != NULL) { // Move to a good item or NULL... struct list_item* item = good->next; good = NULL; while (item) { if (cmp(item->data, arg) != 0) { good = item; break; } item = item->next; } if (good == NULL) // No more good items were found { target->tail = current; } // Free items between current and good: struct list_item* link = current; current = current->next; while (current != good) { struct list_item* next = current->next; free(current); target->size--; current = next; } link->next = good; } if (target->size != old_size) return true; return false; } /** Remove and return all elements from the list where cmp(data,arg) == 0. */ struct list* list_pop_where(struct list* target, int (*cmp)(void*,void*), void* arg) { struct list* result = list_create(); struct list_item* item; if (target->size == 0) return result; // Establish next good item in list... struct list_item* good = NULL; for (item = target->head; item; item = item->next) { if (cmp(item->data, arg) != 0) { good = item; break; } } if (! good) // All elements should be moved { list_transplant(result, target); return result; } // Establish correct head... struct list_item* head = target->head; while (head && head != good) { struct list_item* next = head->next; list_append(result, head); target->size--; head = next; } target->head = good; // Now current points to the first valid item in the list. struct list_item* current = target->head; while (good != NULL) { // Move to a good item or NULL... struct list_item* item = good->next; good = NULL; while (item) { if (cmp(item->data, arg) != 0) { good = item; break; } item = item->next; } if (good == NULL) // No more good items were found target->tail = current; // if (good != NULL) // printf("good: %i \n", *(int*) good->data); // Free items between current and good: struct list_item* link = current; current = current->next; while (current != good) { struct list_item* next = current->next; list_append(result, current); target->size--; current = next; } link->next = good; } return result; } /** Moves all items from segment into target structure. */ void list_transplant(struct list* target, struct list* segment) { if (target->size == 0) { target->head = segment->head; target->tail = segment->tail; } else { target->tail->next = segment->head; target->tail = segment->tail; } target->size += segment->size; segment->head = NULL; segment->tail = NULL; segment->size = 0; } /** Does not free the item data. @return True iff the data content was matched by memcmp and the item was freed. */ bool list_erase(struct list* target, void* data, size_t n) { struct list_item* item = target->head; // Are we removing the head? if (memcmp(data, item->data, n) == 0) { struct list_item* next = item->next; free(item); target->head = next; if (target->tail == next) target->tail = NULL; target->size--; return true; } do { // Are we removing the item after this item? if (memcmp(data, item->next->data, n) == 0) { struct list_item* nextnext = item->next->next; if (target->tail == item->next) target->tail = nextnext; free(item->next); item->next = nextnext; target->size--; return true; } } while ((item = item->next)); return false; } /** Function specifies the output format for the data items Does not free return of f. */ void list_output(char* (*f)(void*), struct list* target) { struct list_item* item; printf("["); for (item = target->head; item; item = item->next) { printf("%s", f(item->data)); if (item->next) printf(","); } printf("]\n"); } /** format specifies the output format for the data items */ void list_printf(char* format, struct list* target) { printf("["); for (struct list_item* item = target->head; item; item = item->next) { if (strcmp(format, "%s") == 0) printf(format, item->data); else if (strcmp(format, "%i") == 0) printf(format, *((int*) (item->data))); else if (strcmp(format, "%li") == 0) printf(format, *((long*) (item->data))); if (item->next) printf(","); } printf("]\n"); } /** Free this list but not its data. */ void list_free(struct list* target) { list_free_callback(target, NULL); } void list_free_callback(struct list* target, void (*callback)(void*)) { struct list_item* item = target->head; while (item) { struct list_item* next = item->next; if (callback != NULL) callback(item->data); free(item); item = next; } free(target); } /** Free this list and its data. */ void list_destroy(struct list* target) { struct list_item* item = target->head; while (item) { struct list_item* next = item->next; free(item->data); free(item); item = next; } free(target); } int int_cmp(void* i1, void* i2) { int j1 = *(int*) i1; int j2 = *(int*) i2; if (j1 > j2) return 1; else if (j1 < j2) return -1; else return 0; } /** Returns 0 iff i1 is divisible by i2. */ int divides_cmp(void* i1, void* i2) { int j1 = *(int*) i1; int j2 = *(int*) i2; return (j1 % j2); } #ifdef DEBUG_LIST int main() { struct list* L = list_create(); int zero = 0; int one = 1; int two = 2; int three = 3; int four = 4; int four2 = 4; int five = 5; int six = 6; int seven = 7; int eight = 8; list_ordered_insert(L, &two, int_cmp); list_ordered_insert(L, &four, int_cmp); list_ordered_insert(L, &three, int_cmp); list_ordered_insert(L, &three, int_cmp); list_ordered_insert(L, &three, int_cmp); list_ordered_insert(L, &three, int_cmp); list_ordered_insert(L, &zero, int_cmp); list_ordered_insert(L, &four2, int_cmp); list_ordered_insert(L, &four2, int_cmp); list_ordered_insert(L, &five, int_cmp); list_ordered_insert(L, &one, int_cmp); list_push(L, &eight); list_dump("%i", L); printf("size: %i \n", L->size); // struct list* matches = list_select(L, int_cmp, &four); // list_remove_where(L, divides_cmp, &two); struct list* K = list_pop_where(L, divides_cmp, &two); list_dump("%i", L); printf("size: %i \n", L->size); list_dump("%i", K); printf("size: %i \n", L->size); /* list_dump("%i", L); list_poll(L); list_dump("%i", L); list_pop(L); list_dump("%i", L); list_add(L, &seven); list_add(L, &six); list_dump("%i", L); */ // list_clobber(L); list_clear(L); printf("size(L): %i \n", L->size); list_clear(K); printf("size(K): %i \n", K->size); list_dump("%i", L); } #endif /* char* append_pair(char* ptr, struct list_item* item, char* s) { ptr += sprintf(ptr, "(%s,", item->data); ptr += sprintf(ptr, "%s)", s); if (item->next) ptr += sprintf(ptr, ","); return ptr; } ** Dump list to string a la snprintf() size must be greater than 2. format specifies the output format for the data items returns int greater than size if size limits are exceeded indicating result is garbage int list_tostring(char* str, size_t size, char* format, struct list* target) { int error = size+1; char* ptr = str; struct list_item* item; if (size <= 2) return error; ptr += sprintf(ptr, "["); char* s = (char*) malloc(sizeof(char)*LIST_MAX_DATUM); for (item = target->head; item; item = item->next, item && ptr-str < size) { int r = snprintf(s, LIST_MAX_DATUM, format, item->data); if (r > LIST_MAX_DATUM) return size+1; if ((ptr-str) + strlen(item->data) + r + 4 < size) ptr = append_pair(ptr, item, s); else return error; } sprintf(ptr, "]"); // free(s); return (ptr-str); } */
apache-2.0