text
stringlengths
2
1.04M
meta
dict
set -e # Source libraries. Uncomment if needed/defined #. "${RT_LIB}" . "${RT_PROJECT_ROOT}/_lib/lib.sh" NAME=test-ctr clean_up() { rm -rf ${NAME}-* } trap clean_up EXIT linuxkit build -docker -format kernel+initrd -name "${NAME}" test.yml [ -f "${NAME}-kernel" ] || exit 1 [ -f "${NAME}-initrd.img" ] || exit 1 [ -f "${NAME}-cmdline" ]|| exit 1 ./test.exp exit 0
{ "content_hash": "c8d3a398dc2dff9df084e6cee346cc77", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 69, "avg_line_length": 19.42105263157895, "alnum_prop": 0.6124661246612466, "repo_name": "justincormack/linuxkit", "id": "219745694e8d3145df3a167ff5438cf1f9f919ba", "size": "476", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/cases/040_packages/007_getty-containerd/test.sh", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "55532" }, { "name": "Cap'n Proto", "bytes": "2365" }, { "name": "Dockerfile", "bytes": "77509" }, { "name": "Go", "bytes": "498337" }, { "name": "HTML", "bytes": "188" }, { "name": "Makefile", "bytes": "47564" }, { "name": "OCaml", "bytes": "69432" }, { "name": "Python", "bytes": "6363" }, { "name": "Ruby", "bytes": "937" }, { "name": "Shell", "bytes": "168624" }, { "name": "Standard ML", "bytes": "37" } ], "symlink_target": "" }
/** * Function to create query string permalinks of the form: * * [protocol][server][resource]/?[queryVariable]=[queryString] * * The [queryString] portion should be the result of calling JSON.stringify() * on an Object variable. * * @param {string} queryVariable the query string's key name * @param {string} defaultQueryString the default query string (optional) */ function QueryPermalink(queryVariable, defaultQueryString) { /** * Removes leading and trailing whitespace and trims remaining whitespace. * * example: * - in: ' Hello. My name is Inigo Montoya. You killed my father. Prepare to die. ' * - out: 'Hello. My name is Inigo Montoya. You killed my father. Prepare to die.' * * @param {string} string the string to 'squish' */ this.squish = function(string) { return string.replace(/(?:(?:^|\n)\s+|\s+(?:$|\n))/g, '').replace(/\s+/g, ' '); }; /** * Serialize an object for use in a URL query string or Ajax request (similar to jQuery.param()). * * @param {Object} source the object to serialize * @return {string} the serialized object */ this.param = function(source) { var array = []; for (var key in source) { array.push(encodeURIComponent(key) + '=' + encodeURIComponent(source[key])); } return array.join('&'); } /** * Returns the result of appending a given string to the current path. * * @param {string} fullQueryString the full query string * (e.g. '?key01=val01' instead of 'val01') * @return {string} the resulting URL */ this.buildUrl = function(fullQueryString) { return window.location.href.split('?')[0] + fullQueryString; }; /** * Appends the given query string to the current path and navigates to the new URL. * * @param {string} queryString the query string */ this.loadPage = function(queryString) { if (queryString.length > 0) { var obj = {}; obj[queryVariable] = this.squish(queryString); location.assign(this.buildUrl('?' + this.param(obj))); } }; /** * Returns the query string from the current URL. * If no query string is present, use the default value and reload the page. * * @return {string} the query string */ this.getQueryString = function() { var qs = ''; var vars = (new QueryData())[queryVariable]; if (vars !== undefined) { qs = this.squish(vars); } else if (defaultQueryString !== undefined && this.squish(defaultQueryString) !== '') { qs = defaultQueryString; this.loadPage(qs); } return qs; }; } /* QueryData.js A function to parse data from a query string Created by Stephen Morley - http://code.stephenmorley.org/ - and released under the terms of the CC0 1.0 Universal legal code: http://creativecommons.org/publicdomain/zero/1.0/legalcode */ function QueryData(_1,_2){if(_1==undefined){_1=location.search?location.search:"";}if(_1.charAt(0)=="?"){_1=_1.substring(1);}if(_1.length>0){_1=_1.replace(/\+/g," ");var _3=_1.split(/[&;]/g);for(var _4=0;_4<_3.length;_4++){var _5=_3[_4].split("=");var _6=decodeURIComponent(_5[0]);var _7=_5.length>1?decodeURIComponent(_5[1]):"";if(_2){if(!(_6 in this)){this[_6]=[];}this[_6].push(_7);}else{this[_6]=_7;}}}};
{ "content_hash": "622a587567854d7a373b075b6eaaa787", "timestamp": "", "source": "github", "line_count": 97, "max_line_length": 408, "avg_line_length": 34.103092783505154, "alnum_prop": 0.6266626360338573, "repo_name": "lsissoko/query-permalink", "id": "93f3d0e0f5f8248ccc86050bc65ff3619a852eec", "size": "3308", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qp.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "452" }, { "name": "HTML", "bytes": "1096" }, { "name": "JavaScript", "bytes": "4824" } ], "symlink_target": "" }
package com.google.security.zynamics.binnavi.yfileswrap.zygraph.Implementations; import java.util.HashMap; import y.view.Graph2DView; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.CUtilityFunctions; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException; import com.google.security.zynamics.binnavi.ZyGraph.CViewSettings; import com.google.security.zynamics.binnavi.ZyGraph.ZyGraphViewSettings; import com.google.security.zynamics.binnavi.disassembly.views.INaviView; /** * Contains functions related to view settings. */ public final class CSettingsFunctions { /** * You are not supposed to instantiate this class. */ private CSettingsFunctions() { } /** * Saves the settings of a view to the database. * * @param view The view whose settings are saved. * @param view2d The yFiles view that shows the given view. * @param settings The settings to be stored. */ public static void saveSettings( final INaviView view, final Graph2DView view2d, final ZyGraphViewSettings settings) { Preconditions.checkNotNull(view, "IE02132: View argument can not be null"); Preconditions.checkNotNull(view2d, "IE02133: View2D argument can not be null"); Preconditions.checkNotNull(settings, "IE02134: Settings argument can not be null"); final HashMap<String, String> settingsMap = new HashMap<String, String>(); settingsMap.put(CViewSettings.ANIMATION_SPEED, String.valueOf(settings.getDisplaySettings().getAnimationSpeed())); settingsMap.put(CViewSettings.AUTOLAYOUT_THRESHOLD, String.valueOf(settings.getLayoutSettings().getAutolayoutDeactivationThreshold())); settingsMap.put(CViewSettings.AUTOMATIC_LAYOUTING, String.valueOf(settings.getLayoutSettings().getAutomaticLayouting())); settingsMap.put(CViewSettings.CIRCULAR_LAYOUT_STYLE, String.valueOf(settings.getLayoutSettings().getCircularSettings().getStyle().ordinal())); settingsMap.put(CViewSettings.DISPLAY_MULTIPLE_EDGES_AS_ONE, String.valueOf(settings.getEdgeSettings().getDisplayMultipleEdgesAsOne())); settingsMap.put(CViewSettings.FUNCTION_NODE_INFORMATION, String.valueOf(settings.getDisplaySettings().getFunctionNodeInformation())); settingsMap.put(CViewSettings.GRADIENT_BACKGROUND, String.valueOf(settings.getDisplaySettings().getGradientBackground())); settingsMap.put(CViewSettings.HIERARCHIC_LAYOUT_STYLE, String.valueOf( settings.getLayoutSettings().getHierarchicalSettings().getStyle().ordinal())); settingsMap.put(CViewSettings.LAYOUT_ANIMATION, String.valueOf(settings.getLayoutSettings().getAnimateLayout())); settingsMap.put(CViewSettings.LAYOUT_CALCULATION_TRESHOLD, String.valueOf(settings.getLayoutSettings().getLayoutCalculationTimeWarningThreshold())); settingsMap.put(CViewSettings.MINIMUM_CIRCULAR_NODE_DISTANCE, String.valueOf( settings.getLayoutSettings().getCircularSettings().getMinimumNodeDistance())); settingsMap.put(CViewSettings.MINIMUM_HIERARCHIC_EDGE_DISTANCE, String.valueOf( settings.getLayoutSettings().getHierarchicalSettings().getMinimumEdgeDistance())); settingsMap.put(CViewSettings.MINIMUM_HIERARCHIC_LAYER_DISTANCE, String.valueOf( settings.getLayoutSettings().getHierarchicalSettings().getMinimumLayerDistance())); settingsMap.put(CViewSettings.MINIMUM_HIERARCHIC_NODE_DISTANCE, String.valueOf( settings.getLayoutSettings().getHierarchicalSettings().getMinimumNodeDistance())); settingsMap.put(CViewSettings.MINIMUM_ORTHOGONAL_NODE_DISTANCE, String.valueOf( settings.getLayoutSettings().getOrthogonalSettings().getMinimumNodeDistance())); settingsMap.put(CViewSettings.MOUSEWHEEL_ACTION, String.valueOf(settings.getMouseSettings().getMouseWheelAction().ordinal())); settingsMap.put(CViewSettings.ORTHOGONAL_LAYOUT_STYLE, String.valueOf(settings.getLayoutSettings().getOrthogonalSettings().getStyle().ordinal())); settingsMap.put(CViewSettings.ORTHOGONAL_ORIENTATION, String.valueOf( settings.getLayoutSettings().getOrthogonalSettings().getOrientation().ordinal())); settingsMap.put(CViewSettings.PROXIMITY_BROWSING, String.valueOf(settings.getProximitySettings().getProximityBrowsing())); settingsMap.put(CViewSettings.PROXIMITY_BROWSING_THRESHOLD, String.valueOf(settings.getProximitySettings().getProximityBrowsingActivationThreshold())); settingsMap.put(CViewSettings.PROXIMITY_BROWSING_CHILDREN, String.valueOf(settings.getProximitySettings().getProximityBrowsingChildren())); settingsMap.put(CViewSettings.PROXIMITY_BROWSING_FROZEN, String.valueOf(settings.getProximitySettings().getProximityBrowsingFrozen())); settingsMap.put(CViewSettings.PROXIMITY_BROWSING_PARENTS, String.valueOf(settings.getProximitySettings().getProximityBrowsingParents())); settingsMap.put(CViewSettings.PROXIMITY_BROWSING_PREVIEW, String.valueOf(settings.getProximitySettings().getProximityBrowsingPreview())); settingsMap.put(CViewSettings.SCROLL_SENSIBILITY, String.valueOf(settings.getMouseSettings().getScrollSensitivity())); settingsMap.put(CViewSettings.SEARCH_CASE_SENSITIVE, String.valueOf(settings.getSearchSettings().getSearchCaseSensitive())); settingsMap.put( CViewSettings.SEARCH_REGEX, String.valueOf(settings.getSearchSettings().getSearchRegEx())); settingsMap.put(CViewSettings.SEARCH_SELECTED_ONLY, String.valueOf(settings.getSearchSettings().getSearchSelectedNodesOnly())); settingsMap.put(CViewSettings.SEARCH_VISIBLE_ONLY, String.valueOf(settings.getSearchSettings().getSearchVisibleNodesOnly())); settingsMap.put(CViewSettings.ZOOM_SENSIBILITY, String.valueOf(settings.getMouseSettings().getZoomSensitivity())); settingsMap.put(CViewSettings.SIMPLIFIED_VARIABLE_ACCESS, String.valueOf(settings.getDisplaySettings().getSimplifiedVariableAccess())); // Not really graph settings => No CViewSettings constants settingsMap.put("view_center_x", String.valueOf(view2d.getCenter().getX())); settingsMap.put("view_center_y", String.valueOf(view2d.getCenter().getY())); settingsMap.put("world_rect_x", String.valueOf(view2d.getWorldRect().x)); settingsMap.put("world_rect_y", String.valueOf(view2d.getWorldRect().y)); settingsMap.put("world_rect_width", String.valueOf(view2d.getWorldRect().width)); settingsMap.put("world_rect_height", String.valueOf(view2d.getWorldRect().height)); settingsMap.put("zoom", String.valueOf(view2d.getZoom())); try { // TODO: Pass this exception to the outside? view.saveSettings(settingsMap); } catch (final CouldntSaveDataException e) { CUtilityFunctions.logException(e); } } }
{ "content_hash": "be526cacce8a772f08bdc4fed69e2e7a", "timestamp": "", "source": "github", "line_count": 121, "max_line_length": 99, "avg_line_length": 56.84297520661157, "alnum_prop": 0.7694097121256179, "repo_name": "chubbymaggie/binnavi", "id": "7f5b1f564fec5842c81f93b7e70373d31d15b343", "size": "7461", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/main/java/com/google/security/zynamics/binnavi/yfileswrap/zygraph/Implementations/CSettingsFunctions.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1489" }, { "name": "C", "bytes": "8997" }, { "name": "C++", "bytes": "982064" }, { "name": "CMake", "bytes": "1953" }, { "name": "CSS", "bytes": "12843" }, { "name": "GAP", "bytes": "3637" }, { "name": "HTML", "bytes": "437459" }, { "name": "Java", "bytes": "21693692" }, { "name": "Makefile", "bytes": "3498" }, { "name": "PLSQL", "bytes": "1866504" }, { "name": "PLpgSQL", "bytes": "638893" }, { "name": "Protocol Buffer", "bytes": "10300" }, { "name": "Python", "bytes": "23981" }, { "name": "SQLPL", "bytes": "330046" }, { "name": "Shell", "bytes": "713" } ], "symlink_target": "" }
package com.coolweather.app.util; /** * »Øµ÷½Ó¿Ú * @author zhangkcode * @createDate 20170428 * */ public interface HttpCallbackListener { public void onFinish(String response); public void onError(Exception e); }
{ "content_hash": "62e3cc0eaf2880d3e14e49b65bec1443", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 39, "avg_line_length": 20, "alnum_prop": 0.7318181818181818, "repo_name": "zhangkcode/CoolWeather", "id": "ff353d8ae9eef02286d38506a715407909b81513", "size": "220", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/coolweather/app/util/HttpCallbackListener.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "31029" } ], "symlink_target": "" }
#ifndef FSL_DDR_MAIN_H #define FSL_DDR_MAIN_H #include <fsl_ddrc_version.h> #include <fsl_ddr_sdram.h> #include <fsl_ddr_dimm_params.h> #include <common_timing_params.h> #ifndef CONFIG_SYS_FSL_DDR_MAIN_NUM_CTRLS /* All controllers are for main memory */ #define CONFIG_SYS_FSL_DDR_MAIN_NUM_CTRLS CONFIG_SYS_NUM_DDR_CTLRS #endif #ifdef CONFIG_SYS_FSL_DDR_LE #define ddr_in32(a) in_le32(a) #define ddr_out32(a, v) out_le32(a, v) #define ddr_setbits32(a, v) setbits_le32(a, v) #define ddr_clrbits32(a, v) clrbits_le32(a, v) #define ddr_clrsetbits32(a, clear, set) clrsetbits_le32(a, clear, set) #else #define ddr_in32(a) in_be32(a) #define ddr_out32(a, v) out_be32(a, v) #define ddr_setbits32(a, v) setbits_be32(a, v) #define ddr_clrbits32(a, v) clrbits_be32(a, v) #define ddr_clrsetbits32(a, clear, set) clrsetbits_be32(a, clear, set) #endif u32 fsl_ddr_get_version(unsigned int ctrl_num); #if defined(CONFIG_DDR_SPD) || defined(CONFIG_SPD_EEPROM) /* * Bind the main DDR setup driver's generic names * to this specific DDR technology. */ static __inline__ int compute_dimm_parameters(const unsigned int ctrl_num, const generic_spd_eeprom_t *spd, dimm_params_t *pdimm, unsigned int dimm_number) { return ddr_compute_dimm_parameters(ctrl_num, spd, pdimm, dimm_number); } #endif /* * Data Structures * * All data structures have to be on the stack */ #define CONFIG_SYS_DIMM_SLOTS_PER_CTLR CONFIG_DIMM_SLOTS_PER_CTLR typedef struct { generic_spd_eeprom_t spd_installed_dimms[CONFIG_SYS_NUM_DDR_CTLRS][CONFIG_SYS_DIMM_SLOTS_PER_CTLR]; struct dimm_params_s dimm_params[CONFIG_SYS_NUM_DDR_CTLRS][CONFIG_SYS_DIMM_SLOTS_PER_CTLR]; memctl_options_t memctl_opts[CONFIG_SYS_NUM_DDR_CTLRS]; common_timing_params_t common_timing_params[CONFIG_SYS_NUM_DDR_CTLRS]; fsl_ddr_cfg_regs_t fsl_ddr_config_reg[CONFIG_SYS_NUM_DDR_CTLRS]; unsigned int first_ctrl; unsigned int num_ctrls; unsigned long long mem_base; unsigned int dimm_slots_per_ctrl; int (*board_need_mem_reset)(void); void (*board_mem_reset)(void); void (*board_mem_de_reset)(void); } fsl_ddr_info_t; /* Compute steps */ #define STEP_GET_SPD (1 << 0) #define STEP_COMPUTE_DIMM_PARMS (1 << 1) #define STEP_COMPUTE_COMMON_PARMS (1 << 2) #define STEP_GATHER_OPTS (1 << 3) #define STEP_ASSIGN_ADDRESSES (1 << 4) #define STEP_COMPUTE_REGS (1 << 5) #define STEP_PROGRAM_REGS (1 << 6) #define STEP_ALL 0xFFF unsigned long long fsl_ddr_compute(fsl_ddr_info_t *pinfo, unsigned int start_step, unsigned int size_only); const char *step_to_string(unsigned int step); unsigned int compute_fsl_memctl_config_regs(const unsigned int ctrl_num, const memctl_options_t *popts, fsl_ddr_cfg_regs_t *ddr, const common_timing_params_t *common_dimm, const dimm_params_t *dimm_parameters, unsigned int dbw_capacity_adjust, unsigned int size_only); unsigned int compute_lowest_common_dimm_parameters( const unsigned int ctrl_num, const dimm_params_t *dimm_params, common_timing_params_t *outpdimm, unsigned int number_of_dimms); unsigned int populate_memctl_options(const common_timing_params_t *common_dimm, memctl_options_t *popts, dimm_params_t *pdimm, unsigned int ctrl_num); void check_interleaving_options(fsl_ddr_info_t *pinfo); unsigned int mclk_to_picos(const unsigned int ctrl_num, unsigned int mclk); unsigned int get_memory_clk_period_ps(const unsigned int ctrl_num); unsigned int picos_to_mclk(const unsigned int ctrl_num, unsigned int picos); void fsl_ddr_set_lawbar( const common_timing_params_t *memctl_common_params, unsigned int memctl_interleaved, unsigned int ctrl_num); void fsl_ddr_sync_memctl_refresh(unsigned int first_ctrl, unsigned int last_ctrl); int fsl_ddr_interactive_env_var_exists(void); unsigned long long fsl_ddr_interactive(fsl_ddr_info_t *pinfo, int var_is_set); void fsl_ddr_get_spd(generic_spd_eeprom_t *ctrl_dimms_spd, unsigned int ctrl_num, unsigned int dimm_slots_per_ctrl); int do_reset(cmd_tbl_t *cmdtp, int flag, int argc, char * const argv[]); unsigned int check_fsl_memctl_config_regs(const fsl_ddr_cfg_regs_t *ddr); void board_add_ram_info(int use_default); /* processor specific function */ void fsl_ddr_set_memctl_regs(const fsl_ddr_cfg_regs_t *regs, unsigned int ctrl_num, int step); void remove_unused_controllers(fsl_ddr_info_t *info); /* board specific function */ int fsl_ddr_get_dimm_params(dimm_params_t *pdimm, unsigned int controller_number, unsigned int dimm_number); void update_spd_address(unsigned int ctrl_num, unsigned int slot, unsigned int *addr); void erratum_a009942_check_cpo(void); #endif
{ "content_hash": "5059bff36607c2aab584b9e96abb96fa", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 82, "avg_line_length": 34.44927536231884, "alnum_prop": 0.7103491796381994, "repo_name": "guileschool/beagleboard", "id": "261b94e9845eb926dacb6175addde42b15049844", "size": "4852", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "u-boot/include/fsl_ddr.h", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "960094" }, { "name": "Awk", "bytes": "269" }, { "name": "Batchfile", "bytes": "3451" }, { "name": "C", "bytes": "62720528" }, { "name": "C++", "bytes": "5261365" }, { "name": "CSS", "bytes": "8362" }, { "name": "GDB", "bytes": "3642" }, { "name": "HTML", "bytes": "237884" }, { "name": "Lex", "bytes": "13917" }, { "name": "Makefile", "bytes": "429363" }, { "name": "Objective-C", "bytes": "370078" }, { "name": "Perl", "bytes": "358570" }, { "name": "Python", "bytes": "884691" }, { "name": "Roff", "bytes": "9384" }, { "name": "Shell", "bytes": "96042" }, { "name": "Tcl", "bytes": "967" }, { "name": "XSLT", "bytes": "445" }, { "name": "Yacc", "bytes": "26163" } ], "symlink_target": "" }
function bob(){} function bill(){i=2 function mary(){var currentConstructor=this.constructor,methodName=null}}
{ "content_hash": "a085b372081be38184011ef8bddcdb74", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 73, "avg_line_length": 37, "alnum_prop": 0.7837837837837838, "repo_name": "fgaray/hjsmin", "id": "58c958d1fce7e4e2e192842a8711d568e9133309", "size": "111", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/pminified/5_comments.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "3773" }, { "name": "Haskell", "bytes": "45822" }, { "name": "JavaScript", "bytes": "1118416" }, { "name": "PHP", "bytes": "50335" }, { "name": "Shell", "bytes": "453" } ], "symlink_target": "" }
<# .SYNOPSIS Get-PrefetchListingLastWriteTime.ps1 Requires logparser.exe in path Pulls PrefetchListing data sorted by LastWriteTimeUtc Descending on collected Get-PrefetchListing data. This script exepcts files matching the pattern *PrefetchListing.tsv to be in the current working directory .NOTES DATADIR PrefetchListing #> if (Get-Command logparser.exe) { $lpquery = @" SELECT FullName, LastWriteTimeUtc, PSComputerName FROM *PrefetchListing.tsv ORDER BY LastWriteTimeUtc Desc "@ & logparser -stats:off -i:tsv -fixedsep:on -dtlines:0 -rtp:-1 $lpquery } else { $ScriptName = [System.IO.Path]::GetFileName($MyInvocation.ScriptName) "${ScriptName} requires logparser.exe in the path." }
{ "content_hash": "0f67f56ab5c9bff503cf18ed7d4c7467", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 74, "avg_line_length": 23.060606060606062, "alnum_prop": 0.7201051248357424, "repo_name": "vimokumar/Kansa", "id": "f6b473d9e1382a801a6be39abacf2cc025e5151e", "size": "763", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Analysis/process/Get-PrefetchListingLastWriteTime.ps1", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "PowerShell", "bytes": "305283" } ], "symlink_target": "" }
export const createStore = (state, storeChange) => { const listeners = []; let store = state || {}; const subscribe = (listen) => listeners.push(listen); const dispatch = (action) => { const newStore = storeChange(store, action); listeners.forEach(item => { item(newStore, store); }) store = newStore; }; return { store, dispatch, subscribe } }
{ "content_hash": "ef599d2e141017eafc2f493c9bb3fc01", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 55, "avg_line_length": 29.153846153846153, "alnum_prop": 0.6226912928759895, "repo_name": "wumouren/react-demo", "id": "b3b3a27eaa134d8623e5e67cfdb9bb1713f6ee2a", "size": "379", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "my-redux-notes/my-redux/src/redux/createStore.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "230" }, { "name": "HTML", "bytes": "2732" }, { "name": "JavaScript", "bytes": "6968105" } ], "symlink_target": "" }
<?php namespace Kunstmaan\LeadGenerationBundle\Form\Rule; use Symfony\Component\Form\FormBuilderInterface; class MaxXTimeAdminType extends AbstractRuleAdminType { /** * Builds the form. * * This method is called for each type in the hierarchy starting form the * top most type. Type extensions can further modify the form. * * @see FormTypeExtensionInterface::buildForm() * * @param FormBuilderInterface $builder The form builder * @param array $options The options */ public function buildForm(FormBuilderInterface $builder, array $options) { $builder->add('times', 'integer'); } /** * Returns the name of this type. * * @return string The name of this type */ public function getName() { return 'max_x_times_form'; } }
{ "content_hash": "d2d0e4403bd765ad7a2e70e8fe3bb15b", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 77, "avg_line_length": 25.235294117647058, "alnum_prop": 0.6398601398601399, "repo_name": "mennowame/KunstmaanBundlesCMS", "id": "a86b151d97115e0becec6b88b9ce81aa510643dd", "size": "858", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "src/Kunstmaan/LeadGenerationBundle/Form/Rule/MaxXTimeAdminType.php", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "130" }, { "name": "CSS", "bytes": "561843" }, { "name": "Cucumber", "bytes": "22143" }, { "name": "HTML", "bytes": "657329" }, { "name": "JavaScript", "bytes": "6395203" }, { "name": "PHP", "bytes": "2492897" }, { "name": "PowerShell", "bytes": "161" }, { "name": "Ruby", "bytes": "2363" }, { "name": "Shell", "bytes": "4014" } ], "symlink_target": "" }
module Fencer # Fences out HTML tags and character entities. See {Fencer}. module Html extend self def fence(string) fence_tags(string) end def fence_tags(string) scanner = UnicodeScanner.new(string) tokens = Hash.new { |hsh, k| hsh[k] = [] } until scanner.eos? match = scanner.scan_until(/</) scanner.pos = scanner.pos - 1 # rewind back to tag opening break unless match start = scanner.pos token = scanner.scan(/<\s*(\/\s*)?\w+(\s+[a-zA-Z0-9\-]+(="?.+?"?)?)*\s*(\/\s*)?>/) unless token # advance past the < again, so as not to catch it the next time around scanner.pos = scanner.pos + 1 next end stop = scanner.pos - 1 tokens[token] << (start..stop) end return tokens end # Currently, this method is unused. We do not want to enforce # #fence_entities because many languages that will be translated may not # used the fenced entities. # # TODO: Add optional vs required fences. def fence_entities(string) scanner = UnicodeScanner.new(string) tokens = Hash.new until scanner.eos? match = scanner.scan_until(/&/) scanner.pos = scanner.pos - 1 # rewind back to ampersand break unless match start = scanner.pos token = scanner.scan(/&(#[0-9]{1,3}|#x[0-9a-fA-F]{1,4}|[0-9a-zA-Z]+);/) unless token # advance past the & again, so as not to catch it the next time around scanner.pos = scanner.pos + 1 next end stop = scanner.pos - 1 tokens[token] ||= Array.new tokens[token] << (start..stop) end return tokens end # Verifies that the HTML is valid. def valid?(string) wrapped_str = HTML_TEMPLATE.sub('%{string}', string) @validator ||= PageValidations::HTMLValidation.new(nil, %w(-utf8)) validation = @validator.validation(wrapped_str, '_') return validation.valid? end # @private HTML_TEMPLATE = <<-HTML <!DOCTYPE html> <html> <head> <title>test</title> </head> <body> <div>%{string}</div> </body> </html> HTML end end
{ "content_hash": "3a9c41962ff3c156f22324a473d5314c", "timestamp": "", "source": "github", "line_count": 88, "max_line_length": 90, "avg_line_length": 25.443181818181817, "alnum_prop": 0.5690040196516302, "repo_name": "square/shuttle", "id": "e47c0cd0dfccbe4f491d9fa219bba52f126f5774", "size": "2842", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/fencer/html.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1321" }, { "name": "CoffeeScript", "bytes": "109642" }, { "name": "Dockerfile", "bytes": "699" }, { "name": "HTML", "bytes": "69426" }, { "name": "JavaScript", "bytes": "3652" }, { "name": "Mustache", "bytes": "7387" }, { "name": "Procfile", "bytes": "115" }, { "name": "Ruby", "bytes": "2005291" }, { "name": "SCSS", "bytes": "99725" }, { "name": "Shell", "bytes": "3784" }, { "name": "Slim", "bytes": "185134" } ], "symlink_target": "" }
<?php class Google_Service_Dataflow_WorkerLifecycleEvent extends Google_Model { public $containerStartTime; public $event; public $metadata; public function setContainerStartTime($containerStartTime) { $this->containerStartTime = $containerStartTime; } public function getContainerStartTime() { return $this->containerStartTime; } public function setEvent($event) { $this->event = $event; } public function getEvent() { return $this->event; } public function setMetadata($metadata) { $this->metadata = $metadata; } public function getMetadata() { return $this->metadata; } }
{ "content_hash": "9509f4013b84928673462345cc450229", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 71, "avg_line_length": 18.941176470588236, "alnum_prop": 0.6925465838509317, "repo_name": "phil-davis/core", "id": "c862c6b8b382bfbce1ad8a52495d460bed83cd55", "size": "1234", "binary": false, "copies": "21", "ref": "refs/heads/master", "path": "apps/files_external/3rdparty/google/apiclient-services/src/Google/Service/Dataflow/WorkerLifecycleEvent.php", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "262" }, { "name": "Makefile", "bytes": "473" }, { "name": "Shell", "bytes": "8644" } ], "symlink_target": "" }
/* Commentary: * */ /* Change log: * */ #include "dtkPlotCurve.h" #include "dtkPlotView.h" #include "dtkPlotViewPanner.h" #include "dtkPlotViewPicker.h" #include "dtkPlotViewZoomer.h" #include "dtkPlotViewGrid.h" #include "dtkPlotViewToolBar.h" #include <qwt_plot.h> #include <qwt_plot_canvas.h> #include <qwt_plot_curve.h> #include <qwt_plot_grid.h> #include <qwt_scale_engine.h> #include <qwt_scale_widget.h> #include <qwt_plot_zoomer.h> #include <float.h> class dtkPlotViewPrivate : public QwtPlot { public: dtkPlotViewPanner *panner; dtkPlotViewPicker *picker; dtkPlotViewZoomer *zoomer; dtkPlotViewGrid *grid; dtkPlotViewLegend *legend; public: dtkPlotViewToolBar *toolbar; public: QFrame *frame_view; public: int alphaCurveArea; public: QColor grid_color; QColor picking_color; QColor zoom_color; QColor background_color; QColor foreground_color; public: dtkPlotView::Scale scaleEngineX; dtkPlotView::Scale scaleEngineY; public: QList<dtkPlotCurve *> curves; }; dtkPlotView::dtkPlotView(void) : dtkAbstractView(), d(new dtkPlotViewPrivate()) { d->panner = NULL; d->picker = NULL; d->zoomer = NULL; d->grid = NULL; d->legend = NULL; // Colors d->grid_color = Qt::black; d->picking_color = Qt::black; d->zoom_color = Qt::black; d->background_color = Qt::white; d->foreground_color = Qt::black; d->alphaCurveArea = 0; // Scale d->setAxisAutoScale(0, true); d->setAxisAutoScale(1, true); d->scaleEngineX = dtkPlotView::Linear; d->scaleEngineY = dtkPlotView::Linear; reinterpret_cast<QwtPlotCanvas *>(d->canvas())->setFrameStyle(QFrame::NoFrame); d->toolbar = new dtkPlotViewToolBar(this); d->frame_view = new QFrame; QVBoxLayout *layout = new QVBoxLayout(d->frame_view); layout->setContentsMargins(0, 0, 0, 0); layout->setSpacing(0); layout->addWidget(d); layout->addWidget(d->toolbar); this->setStyleSheet(dtkReadFile(":dtkPlot/dtkPlotView.qss")); this->readSettings(); } dtkPlotView::~dtkPlotView(void) { this->clear(); delete d; d = NULL; } void dtkPlotView::clear(void) { d->curves.clear(); foreach (QwtPlotItem *item, d->itemList()) item->detach(); this->update(); } QList<dtkPlotCurve *> dtkPlotView::curves(void) { return d->curves; } void dtkPlotView::activatePanning(void) { if (!d->panner) d->panner = new dtkPlotViewPanner(this); d->panner->activate(); } void dtkPlotView::updateAxes() { QRectF rect; foreach (dtkPlotCurve *curve, d->curves) { if (((QwtPlotCurve *)(curve->d))->isVisible()) { rect = rect.united(curve->boundingRect()); } } qreal xmin = rect.left(); qreal xmax = rect.right(); qreal ymin = rect.top(); qreal ymax = rect.bottom(); if (xmin < xmax) { this->setAxisScaleX(xmin, xmax); this->setAxisScaleY(ymin, ymax); } d->updateAxes(); if (d->zoomer) d->zoomer->updateBase(rect); this->update(); } void dtkPlotView::deactivatePanning(void) { if (!d->panner) d->panner = new dtkPlotViewPanner(this); d->panner->deactivate(); } void dtkPlotView::activatePicking(void) { if (!d->picker) { d->picker = new dtkPlotViewPicker(this); d->picker->setColor(d->picking_color); } d->picker->activate(); } void dtkPlotView::deactivatePicking(void) { if (!d->picker) d->picker = new dtkPlotViewPicker(this); d->picker->deactivate(); } void dtkPlotView::setPickingColor(const QColor& color) { if (d->picker) d->picker->setColor(color); d->picking_color = color; this->writeSettings(); } QColor dtkPlotView::pickingColor(void) const { return d->picking_color; } void dtkPlotView::activateZooming(void) { if (!d->zoomer) { d->zoomer = new dtkPlotViewZoomer(this); this->setZoomColor(d->zoom_color); } d->zoomer->activate(); } void dtkPlotView::deactivateZooming(void) { if (!d->zoomer) d->zoomer = new dtkPlotViewZoomer(this); d->zoomer->deactivate(); } bool dtkPlotView::isZoomActivated(void) const { bool value = false; if (d->zoomer) value = d->zoomer->isActivated(); return value; } void dtkPlotView::setZoomColor(const QColor& color) { if (d->zoomer) d->zoomer->setColor(color); d->zoom_color = color; this->writeSettings(); } QColor dtkPlotView::zoomColor(void) const { return d->zoom_color; } void dtkPlotView::activateGrid(void) { if (!d->grid) { d->grid = new dtkPlotViewGrid(this); d->grid->setColor(d->grid_color); } this->update(); } void dtkPlotView::deactivateGrid(void) { if (d->grid) { delete d->grid; d->grid = NULL; } this->update(); } void dtkPlotView::setGridColor(const QColor& color) { if (d->grid) d->grid->setColor(color); d->grid_color = color; this->writeSettings(); } QColor dtkPlotView::gridColor(void) const { return d->grid_color; } void dtkPlotView::activateLegend(void) { if (!d->legend) d->legend = new dtkPlotViewLegend(this); this->update(); } void dtkPlotView::deactivateLegend(void) { if (d->legend) { delete d->legend; d->legend = NULL; } this->update(); } void dtkPlotView::setTitle(const QString& title) { d->setTitle(title); this->setName(title); } void dtkPlotView::setTitleSize(const int& size) { QFont font; font.setFamily("verdana"); font.setBold(false); font.setPointSize(size); QwtText title = d->title(); title.setFont(font); d->setTitle(title); } QString dtkPlotView::title(void) const { return d->title().text(); } int dtkPlotView::titleSize(void) const { return d->title().font().pointSize(); } void dtkPlotView::setAxisTitleX(const QString& title) { d->setAxisTitle(QwtPlot::xBottom, title); } QString dtkPlotView::axisTitleX(void) const { return d->axisWidget(QwtPlot::xBottom)->title().text(); } void dtkPlotView::setAxisTitleY(const QString& title) { d->setAxisTitle(QwtPlot::yLeft, title); } QString dtkPlotView::axisTitleY(void) const { return d->axisWidget(QwtPlot::yLeft)->title().text(); } void dtkPlotView::setAxisTitleSizeX(const int& size) { int axisId = QwtPlot::xBottom; QFont font = d->title().font(); font.setPointSize(size); QwtText title = d->axisTitle(axisId); title.setFont(font); d->setAxisTitle(axisId, title); } void dtkPlotView::setAxisTitleSizeY(const int& size) { int axisId = QwtPlot::yLeft; QFont font = d->title().font(); font.setPointSize(size); QwtText title = d->axisTitle(axisId); title.setFont(font); d->setAxisTitle(axisId, title); } int dtkPlotView::axisTitleSizeX(void) const { return d->axisWidget(QwtPlot::xBottom)->title().font().pointSize(); } int dtkPlotView::axisTitleSizeY(void) const { return d->axisWidget(QwtPlot::yLeft)->title().font().pointSize(); } void dtkPlotView::setAxisScaleX(double min, double max) { d->setAxisScale(QwtPlot::xBottom, min, max); } void dtkPlotView::setAxisScaleY(double min, double max) { d->setAxisScale(QwtPlot::yLeft, min, max); } void dtkPlotView::setAxisScaleX(dtkPlotView::Scale scale) { if (scale == dtkPlotView::Linear) d->setAxisScaleEngine(QwtPlot::xBottom, new QwtLinearScaleEngine); if (scale == dtkPlotView::Logarithmic) { #if QWT_VERSION >= 0x060100 d->setAxisScaleEngine(QwtPlot::xBottom, new QwtLogScaleEngine); #else d->setAxisScaleEngine(QwtPlot::xBottom, new QwtLog10ScaleEngine); #endif } d->scaleEngineX = scale; this->update(); this->writeSettings(); } void dtkPlotView::setAxisScaleY(dtkPlotView::Scale scale) { if (scale == dtkPlotView::Linear) d->setAxisScaleEngine(QwtPlot::yLeft, new QwtLinearScaleEngine); if (scale == dtkPlotView::Logarithmic) { #if QWT_VERSION >= 0x060100 d->setAxisScaleEngine(QwtPlot::yLeft, new QwtLogScaleEngine); #else d->setAxisScaleEngine(QwtPlot::yLeft, new QwtLog10ScaleEngine); #endif } d->scaleEngineY = scale; this->update(); this->writeSettings(); } dtkPlotView::Scale dtkPlotView::axisScaleX(void) const { return d->scaleEngineX; } dtkPlotView::Scale dtkPlotView::axisScaleY(void) const { return d->scaleEngineY; } void dtkPlotView::setLegendPosition(LegendPosition position) { if (d->legend) { d->legend->setPosition(position); } } void dtkPlotView::setBackgroundColor(int red, int green, int blue) { this->setBackgroundColor(QColor(red, green, blue)); } void dtkPlotView::setBackgroundColor(double red, double green, double blue) { this->setBackgroundColor(QColor(red, green, blue)); } void dtkPlotView::setBackgroundColor(const QColor& color) { d->background_color = color; d->setCanvasBackground(color); this->updateColors(); } QColor dtkPlotView::backgroundColor() const { return d->canvasBackground().color(); } void dtkPlotView::setForegroundColor(const QColor& color) { d->foreground_color = color; this->updateColors(); } QColor dtkPlotView::foregroundColor() const { return d->foreground_color; } void dtkPlotView::updateColors(void) { QString sheet = "background: " + d->background_color.name() + "; color: " + d->foreground_color.name() + ";"; d->setStyleSheet(sheet); this->update(); this->writeSettings(); } void dtkPlotView::setDark(bool dark) { d->toolbar->setDark(dark); } void dtkPlotView::fillCurveArea(int alpha) { if (alpha < 0) { alpha = 0; } else if (alpha > 255) { alpha = 255; } d->alphaCurveArea = alpha; foreach (dtkPlotCurve *curve, d->curves) { QColor color = curve->color(); color.setAlphaF(alpha / 255.); curve->setColorArea(color); } this->update(); } int dtkPlotView::alphaCurveArea(void) const { return d->alphaCurveArea; } void dtkPlotView::setStyleSheet(const QString& sheet) { d->setStyleSheet(sheet); d->toolbar->setStyleSheet(sheet); } dtkPlotView& dtkPlotView::operator<<(dtkPlotCurve *curve) { QwtPlotCurve *c = ((QwtPlotCurve *)(curve->d)); if (c->plot() != d) d->curves << curve; if (c->plot() != d) c->attach((QwtPlot *)d); this->updateCurveColor(curve); return *(this); } void dtkPlotView::updateCurveColor(dtkPlotCurve *curve) { QColor c_color = curve->color(); QColor v_color = this->backgroundColor(); int c_color_value = qGray(c_color.rgb()); int v_color_value = qGray(v_color.rgb()); int delta = 0; while ( qAbs(c_color_value - v_color_value) < 50 && delta < 120) { int r = (c_color.red() + delta) % 255; int g = (c_color.green() + delta) % 255; int b = (c_color.blue() + delta) % 255; curve->setColor(QColor(255 - r, 255 - g, 255 - b)); c_color_value = qGray(255 - r, 255 - g, 255 - b); delta += 10; } } void dtkPlotView::setRandomCurvesColor(int seed) { qsrand(seed); int index = 0; foreach (dtkPlotCurve *curve, d->curves) { QColor color = QColor::fromHsv((qrand() * index) % 255, 255, 190); curve->setColor(color); this->updateCurveColor(curve); index++; } this->update(); } void dtkPlotView::update(void) { d->updateLayout(); d->replot(); emit updated(); } QWidget *dtkPlotView::widget(void) { return d->frame_view; } QWidget *dtkPlotView::plotWidget(void) { return d; } void dtkPlotView::readSettings(void) { QSettings settings("inria", "dtk"); settings.beginGroup("plot"); d->grid_color = settings.value("grid_color", QColor(Qt::black)).value<QColor>(); d->picking_color = settings.value("picking_color", QColor(Qt::black)).value<QColor>(); d->zoom_color = settings.value("zoom_color", QColor(Qt::black)).value<QColor>(); d->background_color = settings.value("background_color", QColor(Qt::white)).value<QColor>(); d->foreground_color = settings.value("forergound_color", QColor(Qt::black)).value<QColor>(); d->scaleEngineX = (dtkPlotView::Scale) settings.value("axis_x_scale_engine", dtkPlotView::Linear).toUInt(); d->scaleEngineY = (dtkPlotView::Scale) settings.value("axis_y_scale_engine", dtkPlotView::Linear).toUInt(); settings.endGroup(); this->updateColors(); this->setAxisScaleX(d->scaleEngineX); this->setAxisScaleY(d->scaleEngineY); } void dtkPlotView::writeSettings(void) { QSettings settings("inria", "dtk"); settings.beginGroup("plot"); settings.setValue("grid_color", d->grid_color); settings.setValue("picking_color", d->picking_color); settings.setValue("zoom_color", d->zoom_color); settings.setValue("background_color", d->background_color); settings.setValue("forergound_color", d->foreground_color); settings.setValue("axis_x_scale_engine", d->scaleEngineX); settings.setValue("axis_y_scale_engine", d->scaleEngineY); settings.endGroup(); } // ///////////////////////////////////////////////////////////////// // // ///////////////////////////////////////////////////////////////// dtkAbstractView *createPlotView(void) { return new dtkPlotView; }
{ "content_hash": "f9d6b7698d577a633979704a55c58b7a", "timestamp": "", "source": "github", "line_count": 645, "max_line_length": 113, "avg_line_length": 20.708527131782947, "alnum_prop": 0.6404881335629258, "repo_name": "d-tk/dtk", "id": "05fac437106bbd4e75caae724dfc835036a04177", "size": "13630", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "src/dtkPlotSupport/dtkPlotView.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "3061" }, { "name": "C++", "bytes": "4888561" }, { "name": "CMake", "bytes": "189979" }, { "name": "HTML", "bytes": "4722" }, { "name": "JavaScript", "bytes": "60221" }, { "name": "Objective-C++", "bytes": "4190" }, { "name": "Python", "bytes": "2637" }, { "name": "QML", "bytes": "25085" }, { "name": "Shell", "bytes": "1944" } ], "symlink_target": "" }
<?php namespace Tecnocreaciones\Bundle\ToolsBundle\Model\Intro; /** * * @author Carlos Mendoza <[email protected]> */ interface IntroStepInterface { const POSITION_LEFT = 'left'; const POSITION_RIGHT = 'right'; const POSITION_TOP = 'top'; const POSITION_BOTTOM = 'bottom'; public function setIntro(IntroInterface $intro = null); public function getIntro(); public function isEnabled(); public static function getPositions(); }
{ "content_hash": "1084cf60f59e554811a951e19153fc53", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 59, "avg_line_length": 19.36, "alnum_prop": 0.6735537190082644, "repo_name": "Tecnocreaciones/ToolsBundle", "id": "f42af00d269a6aa104b7bf9540417ee3ff163228", "size": "712", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Model/Intro/IntroStepInterface.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "44670" }, { "name": "HTML", "bytes": "220843" }, { "name": "JavaScript", "bytes": "751768" }, { "name": "PHP", "bytes": "561217" } ], "symlink_target": "" }
rangy.createModule("TextCommands", function(api, module) { api.requireModules( ["WrappedSelection", "WrappedRange"] ); var dom = api.dom; var log = log4javascript.getLogger("rangy.textcommands"); var tagName = "span", BOOLEAN = "boolean", UNDEF = "undefined"; function trim(str) { return str.replace(/^\s\s*/, "").replace(/\s\s*$/, ""); } function hasClass(el, cssClass) { return el.className && new RegExp("(?:^|\\s)" + cssClass + "(?:\\s|$)").test(el.className); } function addClass(el, cssClass) { if (el.className) { if (!hasClass(el, cssClass)) { el.className += " " + cssClass; } } else { el.className = cssClass; } } var removeClass = (function() { function replacer(matched, whitespaceBefore, whitespaceAfter) { return (whitespaceBefore && whitespaceAfter) ? " " : ""; } return function(el, cssClass) { if (el.className) { el.className = el.className.replace(new RegExp("(?:^|\\s)" + cssClass + "(?:\\s|$)"), replacer); } }; })(); function getSortedClassName(el) { return el.className.split(/\s+/).sort().join(" "); } function hasSameClasses(el1, el2) { return getSortedClassName(el1) == getSortedClassName(el2); } function replaceWithOwnChildren(el) { var parent = el.parentNode; while (el.hasChildNodes()) { parent.insertBefore(el.firstChild, el); } parent.removeChild(el); } /* function normalize(node) { var child = node.firstChild, nextChild; while (child) { if (child.nodeType == 3) { while ((nextChild = child.nextSibling) && nextChild.nodeType == 3) { child.appendData(nextChild.data); node.removeChild(nextChild); } } else { normalize(child); } child = child.nextSibling; } } */ function elementsHaveSameNonClassAttributes(el1, el2) { if (el1.attributes.length != el2.attributes.length) return false; for (var i = 0, len = el1.attributes.length, attr1, attr2, name; i < len; ++i) { attr1 = el1.attributes[i]; name = attr1.name; if (name != "class") { attr2 = el2.attributes.getNamedItem(name); if (attr1.specified != attr2.specified) return false; if (attr1.specified && attr1.nodeValue !== attr2.nodeValue) return false; } } return true; } function elementHasNonClassAttributes(el) { for (var i = 0, len = el.attributes.length; i < len; ++i) { if (el.attributes[i].specified && el.attributes[i].name != "class") { return true; } } return false; } function isSplitPoint(node, offset) { if (dom.isCharacterDataNode(node)) { if (offset == 0) { return !!node.previousSibling; } else if (offset == node.length) { return !!node.nextSibling; } else { return true; } } return offset > 0 && offset < node.childNodes.length; } function splitNodeAt(node, descendantNode, descendantOffset) { log.debug("splitNodeAt", dom.inspectNode(node), dom.inspectNode(descendantNode), descendantOffset); var newNode; if (dom.isCharacterDataNode(descendantNode)) { if (descendantOffset == 0) { descendantOffset = dom.getNodeIndex(descendantNode); descendantNode = descendantNode.parentNode; } else if (descendantOffset == descendantNode.length) { descendantOffset = dom.getNodeIndex(descendantNode) + 1; descendantNode = descendantNode.parentNode; } else { newNode = dom.splitDataNode(descendantNode, descendantOffset); } } if (!newNode) { newNode = descendantNode.cloneNode(false); if (newNode.id) { newNode.removeAttribute("id"); } var child; while ((child = descendantNode.childNodes[descendantOffset])) { log.debug("Moving node " + dom.inspectNode(child) + " into " + dom.inspectNode(newNode)); newNode.appendChild(child); } dom.insertAfter(newNode, descendantNode); } return (descendantNode == node) ? newNode : splitNodeAt(node, newNode.parentNode, dom.getNodeIndex(newNode)); } function areElementsMergeable(el1, el2) { return el1.tagName == el2.tagName && hasSameClasses(el1, el2) && elementsHaveSameNonClassAttributes(el1, el2); } function getAdjacentMergeableTextNode(node, forward) { var isTextNode = (node.nodeType == 3); var el = isTextNode ? node.parentNode : node; var adjacentNode; var propName = forward ? "nextSibling" : "previousSibling"; if (isTextNode) { // Can merge if the node's previous/next sibling is a text node adjacentNode = node[propName]; if (adjacentNode && adjacentNode.nodeType == 3) { return adjacentNode; } } else { // Compare element with its sibling adjacentNode = el[propName]; if (adjacentNode && areElementsMergeable(node, adjacentNode)) { return adjacentNode[forward ? "firstChild" : "lastChild"]; } } return null; } function Merge(firstNode) { this.isElementMerge = (firstNode.nodeType == 1); this.firstTextNode = this.isElementMerge ? firstNode.lastChild : firstNode; if (this.isElementMerge) { this.sortedCssClasses = getSortedClassName(firstNode); } this.textNodes = [this.firstTextNode]; } Merge.prototype = { doMerge: function() { var textBits = [], textNode, parent, text; for (var i = 0, len = this.textNodes.length; i < len; ++i) { textNode = this.textNodes[i]; parent = textNode.parentNode; textBits[i] = textNode.data; if (i) { parent.removeChild(textNode); if (!parent.hasChildNodes()) { parent.parentNode.removeChild(parent); } } } this.firstTextNode.data = text = textBits.join(""); return text; }, getLength: function() { var i = this.textNodes.length, len = 0; while (i--) { len += this.textNodes[i].length; } return len; }, toString: function() { var textBits = []; for (var i = 0, len = this.textNodes.length; i < len; ++i) { textBits[i] = "'" + this.textNodes[i].data + "'"; } return "[Merge(" + textBits.join(",") + ")]"; } }; function TextCommand(name, options) { this.name = name; if (typeof options == "object") { for (var i in options) { if (options.hasOwnProperty(i)) { this[i] = options[i]; } } } } TextCommand.prototype = { type: BOOLEAN, normalize: true, applyToAnyTagName: true, tagNames: ["span"], // Normalizes nodes after applying a CSS class to a Range. postApply: function(textNodes, range) { log.group("postApply"); var firstNode = textNodes[0], lastNode = textNodes[textNodes.length - 1]; var merges = [], currentMerge; var rangeStartNode = firstNode, rangeEndNode = lastNode; var rangeStartOffset = 0, rangeEndOffset = lastNode.length; var textNode, precedingTextNode; for (var i = 0, len = textNodes.length; i < len; ++i) { textNode = textNodes[i]; precedingTextNode = getAdjacentMergeableTextNode(textNode, false); log.debug("Checking for merge. text node: " + textNode.data + ", preceding: " + (precedingTextNode ? precedingTextNode.data : null)); if (precedingTextNode) { if (!currentMerge) { currentMerge = new Merge(precedingTextNode); merges.push(currentMerge); } currentMerge.textNodes.push(textNode); if (textNode === firstNode) { rangeStartNode = currentMerge.firstTextNode; rangeStartOffset = rangeStartNode.length; } if (textNode === lastNode) { rangeEndNode = currentMerge.firstTextNode; rangeEndOffset = currentMerge.getLength(); } } else { currentMerge = null; } } // Test whether the first node after the range needs merging var nextTextNode = getAdjacentMergeableTextNode(lastNode, true); if (nextTextNode) { if (!currentMerge) { currentMerge = new Merge(lastNode); merges.push(currentMerge); } currentMerge.textNodes.push(nextTextNode); } // Do the merges if (merges.length) { log.info("Merging. Merges:", merges); for (i = 0, len = merges.length; i < len; ++i) { merges[i].doMerge(); } log.info(rangeStartNode.nodeValue, rangeStartOffset, rangeEndNode.nodeValue, rangeEndOffset); // Set the range boundaries range.setStart(rangeStartNode, rangeStartOffset); range.setEnd(rangeEndNode, rangeEndOffset); } log.groupEnd(); }, getAppliedAncestor: function(textNode) { var node = textNode.parentNode; while (node) { if (node.nodeType == 1 && dom.arrayContains(this.tagNames, node.tagName.toLowerCase()) && this.isAppliedToElement(node)) { return node; } node = node.parentNode; } return false; }, applyToElement: function(el) { }, unapplyToElement: function(el) { }, createContainer: function(doc) { var el = doc.createElement(tagName); this.applyToElement(el); return el; }, applyToTextNode: function(textNode) { var parent = textNode.parentNode; if (parent.childNodes.length == 1 && dom.arrayContains(this.tagNames, parent.tagName.toLowerCase())) { this.applyToElement(parent); } else { var el = this.createContainer(dom.getDocument(textNode)); if (el) { textNode.parentNode.insertBefore(el, textNode); el.appendChild(textNode); } } }, isRemovable: function(el) { return el.tagName.toLowerCase() == tagName && trim(el.className) == this.cssClass && !elementHasNonClassAttributes(el); }, undoToTextNode: function(textNode, range, appliedAncestor) { log.info("undoToTextNode", dom.inspectNode(textNode), range.inspect(), dom.inspectNode(appliedAncestor), range.containsNode(appliedAncestor)); if (!range.containsNode(appliedAncestor)) { // Split out the portion of the ancestor from which we can remove the CSS class var ancestorRange = range.cloneRange(); ancestorRange.selectNode(appliedAncestor); log.info("range end in ancestor " + ancestorRange.isPointInRange(range.endContainer, range.endOffset) + ", isSplitPoint " + isSplitPoint(range.endContainer, range.endOffset)); if (ancestorRange.isPointInRange(range.endContainer, range.endOffset) && isSplitPoint(range.endContainer, range.endOffset)) { splitNodeAt(appliedAncestor, range.endContainer, range.endOffset); range.setEndAfter(appliedAncestor); } if (ancestorRange.isPointInRange(range.startContainer, range.startOffset) && isSplitPoint(range.startContainer, range.startOffset)) { appliedAncestor = splitNodeAt(appliedAncestor, range.startContainer, range.startOffset); } } log.info("isRemovable", this.isRemovable(appliedAncestor), dom.inspectNode(appliedAncestor), appliedAncestor.innerHTML, appliedAncestor.parentNode.innerHTML); if (this.isRemovable(appliedAncestor)) { replaceWithOwnChildren(appliedAncestor); } else { this.unapplyToElement(appliedAncestor); } }, applyToRange: function(range) { range.splitBoundaries(); log.info("applyToRange split boundaries "); var textNodes = range.getNodes([3]); log.info("applyToRange got text nodes " + textNodes); if (textNodes.length) { var textNode; for (var i = 0, len = textNodes.length; i < len; ++i) { textNode = textNodes[i]; if (!this.getAppliedAncestor(textNode)) { this.applyToTextNode(textNode); } } range.setStart(textNodes[0], 0); textNode = textNodes[textNodes.length - 1]; range.setEnd(textNode, textNode.length); log.info("Apply set range to '" + textNodes[0].data + "', '" + textNode.data + "'"); if (this.normalize) { this.postApply(textNodes, range); } } }, applyToSelection: function(win) { log.group("applyToSelection"); win = win || window; var sel = api.getSelection(win); log.info("applyToSelection " + sel.inspect()); var range, ranges = sel.getAllRanges(); sel.removeAllRanges(); var i = ranges.length; while (i--) { range = ranges[i]; this.applyToRange(range); sel.addRange(range); } log.groupEnd(); }, undoToRange: function(range) { log.info("undoToRange " + range.inspect()); range.splitBoundaries(); var textNodes = range.getNodes( [3] ), textNode, appliedAncestor; if (textNodes.length) { for (var i = 0, len = textNodes.length; i < len; ++i) { textNode = textNodes[i]; appliedAncestor = this.getAppliedAncestor(textNode); if (appliedAncestor) { this.undoToTextNode(textNode, range, appliedAncestor); } } range.setStart(textNodes[0], 0); textNode = textNodes[textNodes.length - 1]; range.setEnd(textNode, textNode.length); log.info("Undo set range to '" + textNodes[0].data + "', '" + textNode.data + "'"); if (this.normalize) { this.postApply(textNodes, range); } } }, undoToSelection: function(win) { win = win || window; var sel = api.getSelection(win); var ranges = sel.getAllRanges(), range; sel.removeAllRanges(); for (var i = 0, len = ranges.length; i < len; ++i) { range = ranges[i]; this.undoToRange(range); sel.addRange(range); } }, getTextSelectedByRange: function(textNode, range) { var textRange = range.cloneRange(); textRange.selectNodeContents(textNode); var intersectionRange = textRange.intersection(range); var text = intersectionRange ? intersectionRange.toString() : ""; textRange.detach(); return text; }, isAppliedToElement: function(el) { return false; }, isAppliedToRange: function(range) { var textNodes = range.getNodes( [3] ); for (var i = 0, len = textNodes.length, selectedText; i < len; ++i) { selectedText = this.getTextSelectedByRange(textNodes[i], range); log.debug("text node: '" + textNodes[i].data + "', selectedText: '" + selectedText + "'", this.isAppliedToElement(textNodes[i].parentNode)); if (selectedText != "" && !this.isAppliedToElement(textNodes[i].parentNode)) { return false; } } return true; }, isAppliedToSelection: function(win) { win = win || window; var sel = api.getSelection(win); var ranges = sel.getAllRanges(); var i = ranges.length; while (i--) { if (!this.isAppliedToRange(ranges[i])) { return false; } } log.groupEnd(); return true; }, toggleRange: function(range) { if (this.isAppliedToRange(range)) { this.undoToRange(range); } else { this.applyToRange(range); } }, toggleSelection: function(win) { if (this.isAppliedToSelection(win)) { this.undoToSelection(win); } else { this.applyToSelection(win); } }, execSelection: function(win, value, options) { if (this.type == BOOLEAN) { this.toggleSelection(win); } }, querySelectionValue: function(win) { if (this.type == BOOLEAN) { return this.isAppliedToSelection(win); } } }; var textCommands = {}; api.registerTextCommand = function(name, options) { var cmd = new TextCommand(name, options); textCommands[name.toLowerCase()] = cmd; return cmd; }; api.execSelectionCommand = function(name, win, value, options) { var cmd = textCommands[name.toLowerCase()]; if (cmd && cmd instanceof TextCommand) { cmd.execSelection(win, value, options); } }; api.querySelectionCommandValue = function(name, win) { var cmd = textCommands[name.toLowerCase()]; if (cmd && cmd instanceof TextCommand) { return cmd.querySelectionValue(win); } }; /*----------------------------------------------------------------------------------------------------------------*/ // Register core commands var getComputedStyleProperty; if (typeof window.getComputedStyle != UNDEF) { getComputedStyleProperty = function(el, propName) { return dom.getWindow(el).getComputedStyle(el, null)[propName]; }; } else if (typeof dom.getBody(document).currentStyle != UNDEF) { getComputedStyleProperty = function(el, propName) { return el.currentStyle[propName]; }; } else { module.fail("No means of obtaining computed style properties found"); } api.registerTextCommand("bold", { type: BOOLEAN, tagNames: ["b", "span", "strong"], isAppliedToElement: function(el) { var fontWeight = getComputedStyleProperty(el, "fontWeight"); var isBold = false; if (fontWeight == "bold" || fontWeight == "bolder") { isBold = true; } else if (fontWeight == "normal" || fontWeight == "lighter") { isBold = false; } else { var weightNum = parseInt("" + fontWeight); if (!isNaN(weightNum)) { isBold = weightNum > 400; } } return isBold; }, applyToElement: function(el) { el.style.fontWeight = "bold"; }, unapplyToElement: function(el) { el.style.fontWeight = "normal"; } }); });
{ "content_hash": "7b4c246219cd38b6bc5915dab6e6fffd", "timestamp": "", "source": "github", "line_count": 566, "max_line_length": 191, "avg_line_length": 37.65017667844523, "alnum_prop": 0.5076959174096668, "repo_name": "jackcviers/Rangy", "id": "d20e425ca6df852633b4c9e99f173d3b9cf462f8", "size": "21726", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/js/modules/rangy-textcommands.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "2678754" }, { "name": "PHP", "bytes": "153" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace StingCore { public class SqlPlaceModel { public int Id { get; set; } public string Name { get; set; } public string Description { get; set; } public int OwnerId { get; set; } public float Longtitude { get; set; } public float Latitude { get; set; } public SqlPlaceModel(Place place) { Id = place.PlaceId; Name = place.Name; Description = place.Description; OwnerId = place.Owner.UserId; Longtitude = (float)place.Position.Longtitude; Latitude = (float)place.Position.Langtitude; } } }
{ "content_hash": "5f9d4753a5474517a874b923bcd8beb1", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 58, "avg_line_length": 26.586206896551722, "alnum_prop": 0.5953307392996109, "repo_name": "theAppleist/Sting", "id": "af779aa32644a04b1d1e34aa2f6900eddb89ea48", "size": "773", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Sting/StingCore/SqlPlaceModel.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "99" }, { "name": "C#", "bytes": "70753" }, { "name": "HTML", "bytes": "5069" } ], "symlink_target": "" }
''' Test Cases for DocumentConverter Class for WordCloud Project Daniel Klein Computer-Based Honors Program The University of Alabama 9.27.2013 ''' import unittest import os, os.path from src.core.python.SupremeCourtOpinionFileConverter import SupremeCourtOpinionFileConverter ##### Here are all the global variables used in these tests. VALID_OPINION_FILE_LINES = ([ """\ TITLE: UNITED STATES v. JOHNSON ET AL., DOING BUSINESS AS UNITED STATES\ DENTAL CO., ET AL.\ """, """CASE NUMBER: No. 43""", """US CITATION: 323 U.S. 273""", """SUPREME COURT CITATION: 65 S. Ct. 249""", """LAWYERS ED CITATION: 89 L. Ed. 236""", """LEXIS CITATION: 1944 U.S. LEXIS 1230""", """\ FULL CITATION: 323 U.S. 273; 65 S. Ct. 249; 89 L. Ed. 236; 1944 U.S. LEXIS 1230\ """, """DATES: November 8, 1944, Argued;December 18, 1944, Decided;""", """DISPOSITION: 53 F.Supp. 596, affirmed.""", """OPINION TYPE: concur""", """* * * * * * * *""", """MR. JUSTICE MURPHY, concurring.""", """I join in the opinion of the Court and believe that the judgment should be \ affirmed.""", """Congress has the constitutional power to fix venue at any place where a \ crime occurs. Our problem here is to determine, in the absence of a specific \ venue provision, where the crime outlawed by the Federal Denture Act occurred \ for purposes of venue.""", """The Act prohibits the use of the mails for the purpose of sending or \ bringing into any state certain prohibited articles. It is undisputed that \ when a defendant places a prohibited article in the mails in Illinois for \ the purpose of sending it into Delaware he has completed a statutory offense. \ Hence he is triable in Illinois. But to hold that the statutory crime also \ encompasses the receipt of the prohibited article in Delaware, justifying a \ trial at that point, requires an implication that I am unwilling to make in \ the absence of more explicit Congressional language.""", """Very often the difference between liberty and imprisonment in cases where \ the direct evidence offered by the government and the defendant is evenly \ balanced depends upon the presence of character witnesses. The defendant is \ more likely to obtain their presence in the district of his residence, which \ in this instance is usually the place where the prohibited article is mailed. \ The inconvenience, expense and loss of time involved in transplanting these \ witnesses to testify in trials far removed from their homes are often too \ great to warrant their use. Moreover, they are likely to lose much of their \ effectiveness before a distant jury that knows nothing of their reputations. \ Such factors make it difficult for me to conclude, where Congress has not \ said so specifically, that we should construe the Federal Denture Act as \ covering more than the first sufficient and punishable use of the mails \ insofar as the sender of a prohibited article is concerned. The principle of \ narrow construction of criminal statutes does not warrant interpreting the \ "use" of the mails to cover all possible uses in light of the foregoing \ considerations."""]) CASE_TITLE = """\ UNITED STATES v. JOHNSON ET AL., DOING BUSINESS AS UNITED STATES\ DENTAL CO., ET AL.\ """ CASE_NUM = "No. 43" CASE_US_CITE = "323 U.S. 273" CASE_SUPREME_COURT_CITE = "65 S. Ct. 249" CASE_LAWYERS_ED_CITE = "89 L. Ed. 236" CASE_LEXIS_CITE = "1944 U.S. LEXIS 1230" CASE_FULL_CITE = "323 U.S. 273; 65 S. Ct. 249; 89 L. Ed. 236; 1944 U.S. LEXIS 1230" CASE_DATES = "November 8, 1944 (Argued) December 18, 1944 (Decided) " # THIS MIGHT CHANGE!! CASE_DISPOSITION = "53 F.Supp. 596, affirmed." OPINION_AUTHOR = "MURPHY" OPINION_TYPE = "concur" OPINION_TEXT = "\n".join(VALID_OPINION_FILE_LINES[11:]) TEST_FILE_PATH = os.path.join(os.path.abspath(os.curdir), "MURPHY_1944 U.S. LEXIS 1230.txt") TEST_PICKLE_PATH = os.path.join(os.path.abspath(os.curdir), "pickled_test_doc") ##### def create_test_file(file_lines): with open(TEST_FILE_PATH, 'w') as test_file: for line in file_lines: test_file.write(line + "\n") class DocumentConverterTest(unittest.TestCase): def setUp(self): ''' what do i need to run tests? - a test file. ''' self.test_path = TEST_FILE_PATH self.test_converter = SupremeCourtOpinionFileConverter(self.test_path, TEST_PICKLE_PATH) def tearDown(self): if os.path.exists(self.test_path): os.remove(self.test_path) if os.path.exists(TEST_PICKLE_PATH): os.chmod(TEST_PICKLE_PATH, 0777) os.remove(TEST_PICKLE_PATH) del self.test_converter def testNormalCase(self): print("DocumentConverterTest: testing DocumentConverter.convert_file() normal case...") # create a normal test file create_test_file(VALID_OPINION_FILE_LINES) converted_doc = self.test_converter.convert_file() print("Word count: {0}".format(converted_doc.word_count)) # here assert a bunch of things about the resulting converted_doc self.assertTrue(hasattr(converted_doc, 'output_filename')) self.assertEqual(converted_doc.output_filename, TEST_PICKLE_PATH) self.assertTrue(hasattr(converted_doc, 'doc_text')) self.assertEqual(converted_doc.doc_text, OPINION_TEXT) self.assertTrue(hasattr(converted_doc, 'doc_metadata')) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_title')) self.assertEqual(converted_doc.doc_metadata.case_title, CASE_TITLE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_author')) self.assertEqual(converted_doc.doc_metadata.opinion_author, OPINION_AUTHOR) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_type')) self.assertEqual(converted_doc.doc_metadata.opinion_type, OPINION_TYPE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_num')) self.assertEqual(converted_doc.doc_metadata.case_num, CASE_NUM) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_us_cite')) self.assertEqual(converted_doc.doc_metadata.case_us_cite, CASE_US_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_supreme_court_cite')) self.assertEqual(converted_doc.doc_metadata.case_supreme_court_cite, CASE_SUPREME_COURT_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lawyers_ed_cite')) self.assertEqual(converted_doc.doc_metadata.case_lawyers_ed_cite, CASE_LAWYERS_ED_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lexis_cite')) self.assertEqual(converted_doc.doc_metadata.case_lexis_cite, CASE_LEXIS_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_full_cite')) self.assertEqual(converted_doc.doc_metadata.case_full_cite, CASE_FULL_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_dates')) self.assertEqual(converted_doc.doc_metadata.case_dates, CASE_DATES) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_disposition')) self.assertEqual(converted_doc.doc_metadata.case_disposition, CASE_DISPOSITION) def testNoMetadataInFile(self): print("DocumentConverterTest: testing DocumentConverter.convert_file() " "with no Metadata in the input file...") # create a test file without any metadata fields in it create_test_file(VALID_OPINION_FILE_LINES[10:]) converted_doc = self.test_converter.convert_file() # here assert a bunch of things about the resulting converted_doc self.assertTrue(hasattr(converted_doc, 'output_filename')) self.assertEqual(converted_doc.output_filename, TEST_PICKLE_PATH) self.assertTrue(hasattr(converted_doc, 'doc_text')) self.assertEqual(converted_doc.doc_text, OPINION_TEXT) self.assertTrue(hasattr(converted_doc, 'doc_metadata')) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_title')) self.assertEqual(converted_doc.doc_metadata.case_title, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_author')) self.assertEqual(converted_doc.doc_metadata.opinion_author, OPINION_AUTHOR) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_type')) self.assertEqual(converted_doc.doc_metadata.opinion_type, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_num')) self.assertEqual(converted_doc.doc_metadata.case_num, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_us_cite')) self.assertEqual(converted_doc.doc_metadata.case_us_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_supreme_court_cite')) self.assertEqual(converted_doc.doc_metadata.case_supreme_court_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lawyers_ed_cite')) self.assertEqual(converted_doc.doc_metadata.case_lawyers_ed_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lexis_cite')) self.assertEqual(converted_doc.doc_metadata.case_lexis_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_full_cite')) self.assertEqual(converted_doc.doc_metadata.case_full_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_dates')) self.assertEqual(converted_doc.doc_metadata.case_dates, '') self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_disposition')) self.assertEqual(converted_doc.doc_metadata.case_disposition, "") #self.fail("DocumentConverterTest: I haven't written testNoMetadataInFile yet.") def testNoBodyTextInFile(self): print("DocumentConverterTest: testing DocumentConverter.convert_file() " "with no body text in the input file...") # create a test file with valid metadata but without any body text in it create_test_file(VALID_OPINION_FILE_LINES[:11]) converted_doc = self.test_converter.convert_file() # here assert a bunch of things about the resulting converted_doc self.assertTrue(hasattr(converted_doc, 'output_filename')) self.assertEqual(converted_doc.output_filename, TEST_PICKLE_PATH) self.assertTrue(hasattr(converted_doc, 'doc_text')) self.assertEqual(converted_doc.doc_text, "") self.assertTrue(hasattr(converted_doc, 'doc_metadata')) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_title')) self.assertEqual(converted_doc.doc_metadata.case_title, CASE_TITLE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_author')) self.assertEqual(converted_doc.doc_metadata.opinion_author, OPINION_AUTHOR) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_type')) self.assertEqual(converted_doc.doc_metadata.opinion_type, OPINION_TYPE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_num')) self.assertEqual(converted_doc.doc_metadata.case_num, CASE_NUM) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_us_cite')) self.assertEqual(converted_doc.doc_metadata.case_us_cite, CASE_US_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_supreme_court_cite')) self.assertEqual(converted_doc.doc_metadata.case_supreme_court_cite, CASE_SUPREME_COURT_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lawyers_ed_cite')) self.assertEqual(converted_doc.doc_metadata.case_lawyers_ed_cite, CASE_LAWYERS_ED_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lexis_cite')) self.assertEqual(converted_doc.doc_metadata.case_lexis_cite, CASE_LEXIS_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_full_cite')) self.assertEqual(converted_doc.doc_metadata.case_full_cite, CASE_FULL_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_dates')) self.assertEqual(converted_doc.doc_metadata.case_dates, CASE_DATES) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_disposition')) self.assertEqual(converted_doc.doc_metadata.case_disposition, CASE_DISPOSITION) #self.fail("DocumentConverterTest: I haven't written testNoBodyTextInFile yet.") def testOutputFileNotWritable(self): print("DocumentConverterTest: testing DocumentConverter.convert_file() " "and save_converted_doc() with an unwritable output file...") create_test_file(VALID_OPINION_FILE_LINES) converted_doc = self.test_converter.convert_file() # assert stuff about the created converted_doc self.assertTrue(hasattr(converted_doc, 'output_filename')) self.assertEqual(converted_doc.output_filename, TEST_PICKLE_PATH) self.assertTrue(hasattr(converted_doc, 'doc_text')) self.assertEqual(converted_doc.doc_text, OPINION_TEXT) self.assertTrue(hasattr(converted_doc, 'doc_metadata')) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_title')) self.assertEqual(converted_doc.doc_metadata.case_title, CASE_TITLE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_author')) self.assertEqual(converted_doc.doc_metadata.opinion_author, OPINION_AUTHOR) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_type')) self.assertEqual(converted_doc.doc_metadata.opinion_type, OPINION_TYPE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_num')) self.assertEqual(converted_doc.doc_metadata.case_num, CASE_NUM) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_us_cite')) self.assertEqual(converted_doc.doc_metadata.case_us_cite, CASE_US_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_supreme_court_cite')) self.assertEqual(converted_doc.doc_metadata.case_supreme_court_cite, CASE_SUPREME_COURT_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lawyers_ed_cite')) self.assertEqual(converted_doc.doc_metadata.case_lawyers_ed_cite, CASE_LAWYERS_ED_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lexis_cite')) self.assertEqual(converted_doc.doc_metadata.case_lexis_cite, CASE_LEXIS_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_full_cite')) self.assertEqual(converted_doc.doc_metadata.case_full_cite, CASE_FULL_CITE) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_dates')) self.assertEqual(converted_doc.doc_metadata.case_dates, CASE_DATES) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_disposition')) self.assertEqual(converted_doc.doc_metadata.case_disposition, CASE_DISPOSITION) # I need to change the permisssions of the pickle_path (chmod 0444) with open(converted_doc.output_filename, 'w') as dummy: pass os.chmod(converted_doc.output_filename, 0444) self.assertRaises(IOError, self.test_converter.save_converted_doc) #self.fail("DocumentConverterTest: I haven't written testOutputFileNotWritable yet.") def testInputFileNonexistent(self): print("DocumentConverterTest: testing DocumentConverter.convert_file() " "with nonexistent input file...") # skip the create_test_file call and just try to convert. self.assertRaises(IOError, self.test_converter.convert_file) #self.fail("DocumentConverterTest: I haven't written testInputFileNonexistent yet.") def testEmptyInputFile(self): print("DocumentConverterTest: testing DocumentConverter.convert_file() " "with completely empty input file...") # create a test file with nothing in it create_test_file([]) converted_doc = self.test_converter.convert_file() # here assert a bunch of things about the resulting converted_doc self.assertTrue(hasattr(converted_doc, 'output_filename')) self.assertEqual(converted_doc.output_filename, TEST_PICKLE_PATH) self.assertTrue(hasattr(converted_doc, 'doc_text')) self.assertEqual(converted_doc.doc_text, "") self.assertTrue(hasattr(converted_doc, 'doc_metadata')) self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_title')) self.assertEqual(converted_doc.doc_metadata.case_title, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_author')) self.assertEqual(converted_doc.doc_metadata.opinion_author, OPINION_AUTHOR) self.assertTrue(hasattr(converted_doc.doc_metadata, 'opinion_type')) self.assertEqual(converted_doc.doc_metadata.opinion_type, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_num')) self.assertEqual(converted_doc.doc_metadata.case_num, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_us_cite')) self.assertEqual(converted_doc.doc_metadata.case_us_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_supreme_court_cite')) self.assertEqual(converted_doc.doc_metadata.case_supreme_court_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lawyers_ed_cite')) self.assertEqual(converted_doc.doc_metadata.case_lawyers_ed_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_lexis_cite')) self.assertEqual(converted_doc.doc_metadata.case_lexis_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_full_cite')) self.assertEqual(converted_doc.doc_metadata.case_full_cite, "") self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_dates')) self.assertEqual(converted_doc.doc_metadata.case_dates, '') self.assertTrue(hasattr(converted_doc.doc_metadata, 'case_disposition')) self.assertEqual(converted_doc.doc_metadata.case_disposition, "") #self.fail("DocumentConverterTest: I haven't written testEmptyInputFile yet.") if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()
{ "content_hash": "4372e8f01ac83998fff5895ca2a1c2ff", "timestamp": "", "source": "github", "line_count": 371, "max_line_length": 101, "avg_line_length": 50.35309973045822, "alnum_prop": 0.690594721909962, "repo_name": "dmarklein/WordCloud", "id": "87123ba6391e24dd49f979cc6f67422fac8c5782", "size": "18681", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/unit/python/DocumentConverterTest.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5968" }, { "name": "CSS", "bytes": "17383" }, { "name": "Groovy", "bytes": "82610" }, { "name": "HTML", "bytes": "1258" }, { "name": "Java", "bytes": "118357" }, { "name": "JavaScript", "bytes": "13989" }, { "name": "PowerShell", "bytes": "507" }, { "name": "Python", "bytes": "172532" } ], "symlink_target": "" }
package mongo import ( "time" "upload-demo/models" "labix.org/v2/mgo" "labix.org/v2/mgo/bson" ) type file struct{ gridFile *mgo.GridFile } func (f *file) Close() error { return f.gridFile.Close() } func (f *file) Read(p []byte) (int, error) { return f.gridFile.Read(p) } func (f *file) Write(p []byte) (int, error) { return f.gridFile.Write(p) } func (f *file) Id() models.FileId { oid := f.gridFile.Id().(bson.ObjectId) return models.FileId(oid.Hex()) } func (f *file) ContentType() string { return f.gridFile.ContentType() } func (f *file) SetContentType(s string) { f.gridFile.SetContentType(s) } func (f *file) Name() string { return f.gridFile.Name() } func (f *file) SetName(s string) { f.gridFile.SetName(s) } func (f *file) UploadDate() time.Time { return f.gridFile.UploadDate() } func (f *file) Size() int64 { return f.gridFile.Size() }
{ "content_hash": "2d701aab097c0eb10e1b29abd4534c65", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 75, "avg_line_length": 29.533333333333335, "alnum_prop": 0.654627539503386, "repo_name": "levicook/upload-demo", "id": "1088c5eae4bc95fa11990ade8f1022cd1556e975", "size": "886", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/upload-demo/mongo/file.go", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
/** * Created by jnevins on 7/24/16. */ import { Component, OnInit } from '@angular/core'; import {AuthService} from '../../app.auth.service'; import { PolymerElement } from '@vaadin/angular2-polymer'; import {IUserModel, IVenue, IOwnership, ISuggestCriteria} from "../../common/interfaces"; import {Venue} from "../../common/venue"; import {Genre, Criteria} from "../../common/constants"; import {SuggestedBookingService} from "./suggested_bookings.service"; import {IVenueSuggestion} from "../../common/interfaces"; @Component({ selector: 'account', templateUrl: 'app/dashboard/suggested_bookings/templates/suggested_bookings.component.html', styleUrls: ['app/dashboard/suggested_bookings/styles/suggested_bookings.component.css'], directives: [ PolymerElement('paper-material') ], viewProviders: [ SuggestedBookingService ] }) export class SuggestedBookings implements OnInit { public venues : IVenueSuggestion[]; constructor( private authService : AuthService, private suggestedBookingService : SuggestedBookingService ) {} public ngOnInit() : void { this.suggestedBookingService.getSuggestions(this.authService.activeUser._id).subscribe( ((venues : IVenueSuggestion[]) => { this.venues = venues; }).bind(this) ); } }
{ "content_hash": "16b8e6f993bbb78942d9d588d36b7cf1", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 96, "avg_line_length": 34.075, "alnum_prop": 0.6786500366837858, "repo_name": "PennStateSoftwareTest/FinalProjectPennState", "id": "d7afe8425529a476b92be4ea9225d12b563ae8be", "size": "1363", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/dashboard/suggested_bookings/suggested_bookings.component.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "23227" }, { "name": "HTML", "bytes": "350535" }, { "name": "JavaScript", "bytes": "49771" }, { "name": "TypeScript", "bytes": "68447" } ], "symlink_target": "" }
/* * HomeConstants * Each action has a corresponding type, which the reducer knows and picks up on. * To avoid weird typos between the reducer and the actions, we save them as * constants here. We prefix them with 'yourproject/YourComponent' so we avoid * reducers accidentally picking up actions they shouldn't. * * Follow this format: * export const YOUR_ACTION_CONSTANT = 'yourproject/YourContainer/YOUR_ACTION_CONSTANT'; */ export const CHANGE_USERNAME = 'product/Home/CHANGE_USERNAME'; export const FETCH_PRODUCTS = 'product/Home/FETCH_PRODUCTS'; export const FETCH_PRODUCTS_SUCCESS = 'product/Home/FETCH_PRODUCTS_SUCCESS'; export const FETCH_PRODUCTS_FAILED = 'product/Home/FETCH_PRODUCTS_FAILED'; export const REMOVE_PRODUCT = 'product/Home/REMOVE_PRODUCT'; export const REMOVE_PRODUCT_SUCCESS = 'product/Home/REMOVE_PRODUCT_SUCCESS'; export const REMOVE_PRODUCT_FAILED = 'product/Home/REMOVE_PRODUCT_FAILED';
{ "content_hash": "bd3762c5aebb1bfa218abd89a4c7560e", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 88, "avg_line_length": 48.8421052631579, "alnum_prop": 0.7780172413793104, "repo_name": "fascinating2000/productFrontend", "id": "b0a3fa8fafeb2f695fcfea816bcccb42e46d76cc", "size": "928", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/containers/HomePage/constants.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "10806" }, { "name": "JavaScript", "bytes": "234565" } ], "symlink_target": "" }
 #pragma once #include <aws/apigateway/APIGateway_EXPORTS.h> #include <aws/core/utils/memory/stl/AWSString.h> #include <aws/core/utils/memory/stl/AWSVector.h> #include <aws/apigateway/model/UsagePlanKey.h> #include <utility> namespace Aws { template<typename RESULT_TYPE> class AmazonWebServiceResult; namespace Utils { namespace Json { class JsonValue; } // namespace Json } // namespace Utils namespace APIGateway { namespace Model { /** * <p>Represents the collection of usage plan keys added to usage plans for the * associated API keys and, possibly, other types of keys.</p> <div * class="seeAlso"> <a * href="https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-api-usage-plans.html">Create * and Use Usage Plans</a> </div><p><h3>See Also:</h3> <a * href="http://docs.aws.amazon.com/goto/WebAPI/apigateway-2015-07-09/UsagePlanKeys">AWS * API Reference</a></p> */ class AWS_APIGATEWAY_API GetUsagePlanKeysResult { public: GetUsagePlanKeysResult(); GetUsagePlanKeysResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); GetUsagePlanKeysResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result); inline const Aws::String& GetPosition() const{ return m_position; } inline void SetPosition(const Aws::String& value) { m_position = value; } inline void SetPosition(Aws::String&& value) { m_position = std::move(value); } inline void SetPosition(const char* value) { m_position.assign(value); } inline GetUsagePlanKeysResult& WithPosition(const Aws::String& value) { SetPosition(value); return *this;} inline GetUsagePlanKeysResult& WithPosition(Aws::String&& value) { SetPosition(std::move(value)); return *this;} inline GetUsagePlanKeysResult& WithPosition(const char* value) { SetPosition(value); return *this;} /** * <p>The current page of elements from this collection.</p> */ inline const Aws::Vector<UsagePlanKey>& GetItems() const{ return m_items; } /** * <p>The current page of elements from this collection.</p> */ inline void SetItems(const Aws::Vector<UsagePlanKey>& value) { m_items = value; } /** * <p>The current page of elements from this collection.</p> */ inline void SetItems(Aws::Vector<UsagePlanKey>&& value) { m_items = std::move(value); } /** * <p>The current page of elements from this collection.</p> */ inline GetUsagePlanKeysResult& WithItems(const Aws::Vector<UsagePlanKey>& value) { SetItems(value); return *this;} /** * <p>The current page of elements from this collection.</p> */ inline GetUsagePlanKeysResult& WithItems(Aws::Vector<UsagePlanKey>&& value) { SetItems(std::move(value)); return *this;} /** * <p>The current page of elements from this collection.</p> */ inline GetUsagePlanKeysResult& AddItems(const UsagePlanKey& value) { m_items.push_back(value); return *this; } /** * <p>The current page of elements from this collection.</p> */ inline GetUsagePlanKeysResult& AddItems(UsagePlanKey&& value) { m_items.push_back(std::move(value)); return *this; } private: Aws::String m_position; Aws::Vector<UsagePlanKey> m_items; }; } // namespace Model } // namespace APIGateway } // namespace Aws
{ "content_hash": "319540be19da25a30a1b800ab416744a", "timestamp": "", "source": "github", "line_count": 109, "max_line_length": 124, "avg_line_length": 31.01834862385321, "alnum_prop": 0.6853002070393375, "repo_name": "JoyIfBam5/aws-sdk-cpp", "id": "7f9bdfeb73d14c253c43604e72e9be7f6263e3af", "size": "3954", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-cpp-sdk-apigateway/include/aws/apigateway/model/GetUsagePlanKeysResult.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "11868" }, { "name": "C++", "bytes": "167818064" }, { "name": "CMake", "bytes": "591577" }, { "name": "HTML", "bytes": "4471" }, { "name": "Java", "bytes": "271801" }, { "name": "Python", "bytes": "85650" }, { "name": "Shell", "bytes": "5277" } ], "symlink_target": "" }
import { registerBidder } from '../src/adapters/bidderFactory.js'; import { config } from '../src/config.js'; import { deepAccess, uniques, isArray, getWindowTop, isGptPubadsDefined, isSlotMatchingAdUnitCode, logInfo, logWarn, getWindowSelf, mergeDeep, } from '../src/utils.js'; import { BANNER, VIDEO } from '../src/mediaTypes.js'; // **************************** UTILS *************************** // const BIDDER_CODE = '33across'; const END_POINT = 'https://ssc.33across.com/api/v1/hb'; const SYNC_ENDPOINT = 'https://ssc-cms.33across.com/ps/?m=xch&rt=html&ru=deb'; const CURRENCY = 'USD'; const GUID_PATTERN = /^[a-zA-Z0-9_-]{22}$/; const PRODUCT = { SIAB: 'siab', INVIEW: 'inview', INSTREAM: 'instream' }; const VIDEO_ORTB_PARAMS = [ 'mimes', 'minduration', 'maxduration', 'placement', 'protocols', 'startdelay', 'skip', 'skipafter', 'minbitrate', 'maxbitrate', 'delivery', 'playbackmethod', 'api', 'linearity' ]; const adapterState = { uniqueSiteIds: [] }; const NON_MEASURABLE = 'nm'; function getTTXConfig() { const ttxSettings = Object.assign({}, config.getConfig('ttxSettings') ); return ttxSettings; } // **************************** VALIDATION *************************** // function isBidRequestValid(bid) { return ( _validateBasic(bid) && _validateBanner(bid) && _validateVideo(bid) ); } function _validateBasic(bid) { if (bid.bidder !== BIDDER_CODE || typeof bid.params === 'undefined') { return false; } if (!_validateGUID(bid)) { return false; } return true; } function _validateGUID(bid) { const siteID = deepAccess(bid, 'params.siteId', '') || ''; if (siteID.trim().match(GUID_PATTERN) === null) { return false; } return true; } function _validateBanner(bid) { const banner = deepAccess(bid, 'mediaTypes.banner'); // If there's no banner no need to validate against banner rules if (banner === undefined) { return true; } if (!Array.isArray(banner.sizes)) { return false; } return true; } function _validateVideo(bid) { const videoAdUnit = deepAccess(bid, 'mediaTypes.video'); const videoBidderParams = deepAccess(bid, 'params.video', {}); // If there's no video no need to validate against video rules if (videoAdUnit === undefined) { return true; } if (!Array.isArray(videoAdUnit.playerSize)) { return false; } if (!videoAdUnit.context) { return false; } const videoParams = { ...videoAdUnit, ...videoBidderParams }; if (!Array.isArray(videoParams.mimes) || videoParams.mimes.length === 0) { return false; } if (!Array.isArray(videoParams.protocols) || videoParams.protocols.length === 0) { return false; } // If placement if defined, it must be a number if ( typeof videoParams.placement !== 'undefined' && typeof videoParams.placement !== 'number' ) { return false; } // If startdelay is defined it must be a number if ( videoAdUnit.context === 'instream' && typeof videoParams.startdelay !== 'undefined' && typeof videoParams.startdelay !== 'number' ) { return false; } return true; } // **************************** BUILD REQUESTS *************************** // // NOTE: With regards to gdrp consent data, the server will independently // infer the gdpr applicability therefore, setting the default value to false function buildRequests(bidRequests, bidderRequest) { const { ttxSettings, gdprConsent, uspConsent, pageUrl } = _buildRequestParams(bidRequests, bidderRequest); const groupedRequests = _buildRequestGroups(ttxSettings, bidRequests); const serverRequests = []; for (const key in groupedRequests) { serverRequests.push( _createServerRequest({ bidRequests: groupedRequests[key], gdprConsent, uspConsent, pageUrl, ttxSettings }) ) } return serverRequests; } function _buildRequestParams(bidRequests, bidderRequest) { const ttxSettings = getTTXConfig(); const gdprConsent = Object.assign({ consentString: undefined, gdprApplies: false }, bidderRequest && bidderRequest.gdprConsent); const uspConsent = bidderRequest && bidderRequest.uspConsent; const pageUrl = (bidderRequest && bidderRequest.refererInfo) ? (bidderRequest.refererInfo.referer) : (undefined); adapterState.uniqueSiteIds = bidRequests.map(req => req.params.siteId).filter(uniques); return { ttxSettings, gdprConsent, uspConsent, pageUrl } } function _buildRequestGroups(ttxSettings, bidRequests) { const bidRequestsComplete = bidRequests.map(_inferProduct); const enableSRAMode = ttxSettings && ttxSettings.enableSRAMode; const keyFunc = (enableSRAMode === true) ? _getSRAKey : _getMRAKey; return _groupBidRequests(bidRequestsComplete, keyFunc); } function _groupBidRequests(bidRequests, keyFunc) { const groupedRequests = {}; bidRequests.forEach((req) => { const key = keyFunc(req); groupedRequests[key] = groupedRequests[key] || []; groupedRequests[key].push(req); }); return groupedRequests; } function _getSRAKey(bidRequest) { return `${bidRequest.params.siteId}:${bidRequest.params.productId}`; } function _getMRAKey(bidRequest) { return `${bidRequest.bidId}`; } // Infer the necessary data from valid bid for a minimal ttxRequest and create HTTP request function _createServerRequest({ bidRequests, gdprConsent = {}, uspConsent, pageUrl, ttxSettings }) { const ttxRequest = {}; const { siteId, test } = bidRequests[0].params; /* * Infer data for the request payload */ ttxRequest.imp = []; bidRequests.forEach((req) => { ttxRequest.imp.push(_buildImpORTB(req)); }); ttxRequest.site = { id: siteId }; if (pageUrl) { ttxRequest.site.page = pageUrl; } ttxRequest.id = bidRequests[0].auctionId; if (gdprConsent.consentString) { ttxRequest.user = setExtensions(ttxRequest.user, { 'consent': gdprConsent.consentString }); } if (Array.isArray(bidRequests[0].userIdAsEids) && bidRequests[0].userIdAsEids.length > 0) { ttxRequest.user = setExtensions(ttxRequest.user, { 'eids': bidRequests[0].userIdAsEids }); } ttxRequest.regs = setExtensions(ttxRequest.regs, { 'gdpr': Number(gdprConsent.gdprApplies) }); if (uspConsent) { ttxRequest.regs = setExtensions(ttxRequest.regs, { 'us_privacy': uspConsent }); } ttxRequest.ext = { ttx: { prebidStartedAt: Date.now(), caller: [ { 'name': 'prebidjs', 'version': '$prebid.version$' } ] } }; if (bidRequests[0].schain) { ttxRequest.source = setExtensions(ttxRequest.source, { 'schain': bidRequests[0].schain }); } // Finally, set the openRTB 'test' param if this is to be a test bid if (test === 1) { ttxRequest.test = 1; } /* * Now construct the full server request */ const options = { contentType: 'text/plain', withCredentials: true }; // Allow the ability to configure the HB endpoint for testing purposes. const url = (ttxSettings && ttxSettings.url) || `${END_POINT}?guid=${siteId}`; // Return the server request return { 'method': 'POST', 'url': url, 'data': JSON.stringify(ttxRequest), 'options': options } } // BUILD REQUESTS: SET EXTENSIONS function setExtensions(obj = {}, extFields) { return mergeDeep({}, obj, { 'ext': extFields }); } // BUILD REQUESTS: IMP function _buildImpORTB(bidRequest) { const imp = { id: bidRequest.bidId, ext: { ttx: { prod: deepAccess(bidRequest, 'params.productId') } } }; if (deepAccess(bidRequest, 'mediaTypes.banner')) { imp.banner = { ..._buildBannerORTB(bidRequest) } } if (deepAccess(bidRequest, 'mediaTypes.video')) { imp.video = _buildVideoORTB(bidRequest); } return imp; } // BUILD REQUESTS: SIZE INFERENCE function _transformSizes(sizes) { if (isArray(sizes) && sizes.length === 2 && !isArray(sizes[0])) { return [ _getSize(sizes) ]; } return sizes.map(_getSize); } function _getSize(size) { return { w: parseInt(size[0], 10), h: parseInt(size[1], 10) } } // BUILD REQUESTS: PRODUCT INFERENCE function _inferProduct(bidRequest) { return mergeDeep({}, bidRequest, { params: { productId: _getProduct(bidRequest) } }); } function _getProduct(bidRequest) { const { params, mediaTypes } = bidRequest; const { banner, video } = mediaTypes; if ((video && !banner) && video.context === 'instream') { return PRODUCT.INSTREAM; } return (params.productId === PRODUCT.INVIEW) ? (params.productId) : PRODUCT.SIAB; } // BUILD REQUESTS: BANNER function _buildBannerORTB(bidRequest) { const bannerAdUnit = deepAccess(bidRequest, 'mediaTypes.banner', {}); const element = _getAdSlotHTMLElement(bidRequest.adUnitCode); const sizes = _transformSizes(bannerAdUnit.sizes); let format; // We support size based bidfloors so obtain one if there's a rule associated if (typeof bidRequest.getFloor === 'function') { format = sizes.map((size) => { const bidfloors = _getBidFloors(bidRequest, size, BANNER); let formatExt; if (bidfloors) { formatExt = { ext: { ttx: { bidfloors: [ bidfloors ] } } } } return Object.assign({}, size, formatExt); }); } else { format = sizes; } const minSize = _getMinSize(sizes); const viewabilityAmount = _isViewabilityMeasurable(element) ? _getViewability(element, getWindowTop(), minSize) : NON_MEASURABLE; const ext = contributeViewability(viewabilityAmount); return { format, ext } } // BUILD REQUESTS: VIDEO // eslint-disable-next-line no-unused-vars function _buildVideoORTB(bidRequest) { const videoAdUnit = deepAccess(bidRequest, 'mediaTypes.video', {}); const videoBidderParams = deepAccess(bidRequest, 'params.video', {}); const videoParams = { ...videoAdUnit, ...videoBidderParams // Bidder Specific overrides }; const video = {} const { w, h } = _getSize(videoParams.playerSize[0]); video.w = w; video.h = h; // Obtain all ORTB params related video from Ad Unit VIDEO_ORTB_PARAMS.forEach((param) => { if (videoParams.hasOwnProperty(param)) { video[param] = videoParams[param]; } }); const product = _getProduct(bidRequest); // Placement Inference Rules: // - If no placement is defined then default to 2 (In Banner) // - If product is instream (for instream context) then override placement to 1 video.placement = video.placement || 2; if (product === PRODUCT.INSTREAM) { video.startdelay = video.startdelay || 0; video.placement = 1; } // bidfloors if (typeof bidRequest.getFloor === 'function') { const bidfloors = _getBidFloors(bidRequest, { w: video.w, h: video.h }, VIDEO); if (bidfloors) { Object.assign(video, { ext: { ttx: { bidfloors: [ bidfloors ] } } }); } } return video; } // BUILD REQUESTS: BIDFLOORS function _getBidFloors(bidRequest, size, mediaType) { const bidFloors = bidRequest.getFloor({ currency: CURRENCY, mediaType, size: [ size.w, size.h ] }); if (!isNaN(bidFloors.floor) && (bidFloors.currency === CURRENCY)) { return bidFloors.floor; } } // BUILD REQUESTS: VIEWABILITY function _isViewabilityMeasurable(element) { return !_isIframe() && element !== null; } function _getViewability(element, topWin, { w, h } = {}) { return topWin.document.visibilityState === 'visible' ? _getPercentInView(element, topWin, { w, h }) : 0; } function _mapAdUnitPathToElementId(adUnitCode) { if (isGptPubadsDefined()) { // eslint-disable-next-line no-undef const adSlots = googletag.pubads().getSlots(); const isMatchingAdSlot = isSlotMatchingAdUnitCode(adUnitCode); for (let i = 0; i < adSlots.length; i++) { if (isMatchingAdSlot(adSlots[i])) { const id = adSlots[i].getSlotElementId(); logInfo(`[33Across Adapter] Map ad unit path to HTML element id: '${adUnitCode}' -> ${id}`); return id; } } } logWarn(`[33Across Adapter] Unable to locate element for ad unit code: '${adUnitCode}'`); return null; } function _getAdSlotHTMLElement(adUnitCode) { return document.getElementById(adUnitCode) || document.getElementById(_mapAdUnitPathToElementId(adUnitCode)); } function _getMinSize(sizes) { return sizes.reduce((min, size) => size.h * size.w < min.h * min.w ? size : min); } function _getBoundingBox(element, { w, h } = {}) { let { width, height, left, top, right, bottom } = element.getBoundingClientRect(); if ((width === 0 || height === 0) && w && h) { width = w; height = h; right = left + w; bottom = top + h; } return { width, height, left, top, right, bottom }; } function _getIntersectionOfRects(rects) { const bbox = { left: rects[0].left, right: rects[0].right, top: rects[0].top, bottom: rects[0].bottom }; for (let i = 1; i < rects.length; ++i) { bbox.left = Math.max(bbox.left, rects[i].left); bbox.right = Math.min(bbox.right, rects[i].right); if (bbox.left >= bbox.right) { return null; } bbox.top = Math.max(bbox.top, rects[i].top); bbox.bottom = Math.min(bbox.bottom, rects[i].bottom); if (bbox.top >= bbox.bottom) { return null; } } bbox.width = bbox.right - bbox.left; bbox.height = bbox.bottom - bbox.top; return bbox; } function _getPercentInView(element, topWin, { w, h } = {}) { const elementBoundingBox = _getBoundingBox(element, { w, h }); // Obtain the intersection of the element and the viewport const elementInViewBoundingBox = _getIntersectionOfRects([ { left: 0, top: 0, right: topWin.innerWidth, bottom: topWin.innerHeight }, elementBoundingBox ]); let elementInViewArea, elementTotalArea; if (elementInViewBoundingBox !== null) { // Some or all of the element is in view elementInViewArea = elementInViewBoundingBox.width * elementInViewBoundingBox.height; elementTotalArea = elementBoundingBox.width * elementBoundingBox.height; return ((elementInViewArea / elementTotalArea) * 100); } // No overlap between element and the viewport; therefore, the element // lies completely out of view return 0; } /** * Viewability contribution to request.. */ function contributeViewability(viewabilityAmount) { const amount = isNaN(viewabilityAmount) ? viewabilityAmount : Math.round(viewabilityAmount); return { ttx: { viewability: { amount } } }; } function _isIframe() { try { return getWindowSelf() !== getWindowTop(); } catch (e) { return true; } } // **************************** INTERPRET RESPONSE ******************************** // function interpretResponse(serverResponse, bidRequest) { const { seatbid, cur = 'USD' } = serverResponse.body; if (!isArray(seatbid)) { return []; } // Pick seats with valid bids and convert them into an Array of responses // in format expected by Prebid Core return seatbid .filter((seat) => ( isArray(seat.bid) && seat.bid.length > 0 )) .reduce((acc, seat) => { return acc.concat( seat.bid.map((bid) => _createBidResponse(bid, cur)) ); }, []); } function _createBidResponse(bid, cur) { const isADomainPresent = bid.adomain && bid.adomain.length; const bidResponse = { requestId: bid.impid, bidderCode: BIDDER_CODE, cpm: bid.price, width: bid.w, height: bid.h, ad: bid.adm, ttl: bid.ttl || 60, creativeId: bid.crid, mediaType: deepAccess(bid, 'ext.ttx.mediaType', BANNER), currency: cur, netRevenue: true } if (isADomainPresent) { bidResponse.meta = { advertiserDomains: bid.adomain }; } if (bidResponse.mediaType === VIDEO) { const vastType = deepAccess(bid, 'ext.ttx.vastType', 'xml'); if (vastType === 'xml') { bidResponse.vastXml = bidResponse.ad; } else { bidResponse.vastUrl = bidResponse.ad; } } return bidResponse; } // **************************** USER SYNC *************************** // // Register one sync per unique guid so long as iframe is enable // Else no syncs // For logic on how we handle gdpr data see _createSyncs and module's unit tests // '33acrossBidAdapter#getUserSyncs' function getUserSyncs(syncOptions, responses, gdprConsent, uspConsent) { const syncUrls = ( (syncOptions.iframeEnabled) ? adapterState.uniqueSiteIds.map((siteId) => _createSync({ gdprConsent, uspConsent, siteId })) : ([]) ); // Clear adapter state of siteID's since we don't need this info anymore. adapterState.uniqueSiteIds = []; return syncUrls; } // Sync object will always be of type iframe for TTX function _createSync({ siteId = 'zzz000000000003zzz', gdprConsent = {}, uspConsent }) { const ttxSettings = config.getConfig('ttxSettings'); const syncUrl = (ttxSettings && ttxSettings.syncUrl) || SYNC_ENDPOINT; const { consentString, gdprApplies } = gdprConsent; const sync = { type: 'iframe', url: `${syncUrl}&id=${siteId}&gdpr_consent=${encodeURIComponent(consentString)}&us_privacy=${encodeURIComponent(uspConsent)}` }; if (typeof gdprApplies === 'boolean') { sync.url += `&gdpr=${Number(gdprApplies)}`; } return sync; } export const spec = { NON_MEASURABLE, code: BIDDER_CODE, supportedMediaTypes: [ BANNER, VIDEO ], isBidRequestValid, buildRequests, interpretResponse, getUserSyncs, }; registerBidder(spec);
{ "content_hash": "fc1b4fcf176427efebc449e40e791802", "timestamp": "", "source": "github", "line_count": 744, "max_line_length": 129, "avg_line_length": 23.83467741935484, "alnum_prop": 0.6406135453673941, "repo_name": "PWyrembak/Prebid.js", "id": "af67bb2bf48727432c3b0518a2c97b7dce80b319", "size": "17733", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "modules/33acrossBidAdapter.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "435" }, { "name": "Dockerfile", "bytes": "558" }, { "name": "HTML", "bytes": "250523" }, { "name": "JavaScript", "bytes": "10924127" }, { "name": "Shell", "bytes": "80" } ], "symlink_target": "" }
 using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Xml; namespace OfficeOpenXml.ConditionalFormatting { /// <summary> /// Enum for Conditional Format Type ST_CfType §18.18.12. With some changes. /// </summary> public enum eExcelConditionalFormattingRuleType { #region Average /// <summary> /// This conditional formatting rule highlights cells that are above the average /// for all values in the range. /// </summary> /// <remarks>AboveAverage Excel CF Rule Type</remarks> AboveAverage, /// <summary> /// This conditional formatting rule highlights cells that are above or equal /// the average for all values in the range. /// </summary> /// <remarks>AboveAverage Excel CF Rule Type</remarks> AboveOrEqualAverage, /// <summary> /// This conditional formatting rule highlights cells that are below the average /// for all values in the range. /// </summary> /// <remarks>AboveAverage Excel CF Rule Type</remarks> BelowAverage, /// <summary> /// This conditional formatting rule highlights cells that are below or equal /// the average for all values in the range. /// </summary> /// <remarks>AboveAverage Excel CF Rule Type</remarks> BelowOrEqualAverage, #endregion #region StdDev /// <summary> /// This conditional formatting rule highlights cells that are above the standard /// deviationa for all values in the range. /// <remarks>AboveAverage Excel CF Rule Type</remarks> /// </summary> AboveStdDev, /// <summary> /// This conditional formatting rule highlights cells that are below the standard /// deviationa for all values in the range. /// </summary> /// <remarks>AboveAverage Excel CF Rule Type</remarks> BelowStdDev, #endregion #region TopBottom /// <summary> /// This conditional formatting rule highlights cells whose values fall in the /// bottom N bracket as specified. /// </summary> /// <remarks>Top10 Excel CF Rule Type</remarks> Bottom, /// <summary> /// This conditional formatting rule highlights cells whose values fall in the /// bottom N percent as specified. /// </summary> /// <remarks>Top10 Excel CF Rule Type</remarks> BottomPercent, /// <summary> /// This conditional formatting rule highlights cells whose values fall in the /// top N bracket as specified. /// </summary> /// <remarks>Top10 Excel CF Rule Type</remarks> Top, /// <summary> /// This conditional formatting rule highlights cells whose values fall in the /// top N percent as specified. /// </summary> /// <remarks>Top10 Excel CF Rule Type</remarks> TopPercent, #endregion #region TimePeriod /// <summary> /// This conditional formatting rule highlights cells containing dates in the /// last 7 days. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> Last7Days, /// <summary> /// This conditional formatting rule highlights cells containing dates in the /// last month. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> LastMonth, /// <summary> /// This conditional formatting rule highlights cells containing dates in the /// last week. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> LastWeek, /// <summary> /// This conditional formatting rule highlights cells containing dates in the /// next month. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> NextMonth, /// <summary> /// This conditional formatting rule highlights cells containing dates in the /// next week. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> NextWeek, /// <summary> /// This conditional formatting rule highlights cells containing dates in this /// month. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> ThisMonth, /// <summary> /// This conditional formatting rule highlights cells containing dates in this /// week. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> ThisWeek, /// <summary> /// This conditional formatting rule highlights cells containing today dates. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> Today, /// <summary> /// This conditional formatting rule highlights cells containing tomorrow dates. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> Tomorrow, /// <summary> /// This conditional formatting rule highlights cells containing yesterday dates. /// </summary> /// <remarks>TimePeriod Excel CF Rule Type</remarks> Yesterday, #endregion #region CellIs /// <summary> /// This conditional formatting rule highlights cells in the range that begin with /// the given text. /// </summary> /// <remarks> /// Equivalent to using the LEFT() sheet function and comparing values. /// </remarks> /// <remarks>BeginsWith Excel CF Rule Type</remarks> BeginsWith, /// <summary> /// This conditional formatting rule highlights cells in the range between the /// given two formulas. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> Between, /// <summary> /// This conditional formatting rule highlights cells that are completely blank. /// </summary> /// <remarks> /// Equivalent of using LEN(TRIM()). This means that if the cell contains only /// characters that TRIM() would remove, then it is considered blank. An empty cell /// is also considered blank. /// </remarks> /// <remarks>ContainsBlanks Excel CF Rule Type</remarks> ContainsBlanks, /// <summary> /// This conditional formatting rule highlights cells with formula errors. /// </summary> /// <remarks> /// Equivalent to using ISERROR() sheet function to determine if there is /// a formula error. /// </remarks> /// <remarks>ContainsErrors Excel CF Rule Type</remarks> ContainsErrors, /// <summary> /// This conditional formatting rule highlights cells in the range that begin with /// the given text. /// </summary> /// <remarks> /// Equivalent to using the LEFT() sheet function and comparing values. /// </remarks> /// <remarks>ContainsText Excel CF Rule Type</remarks> ContainsText, /// <summary> /// This conditional formatting rule highlights duplicated values. /// </summary> /// <remarks>DuplicateValues Excel CF Rule Type</remarks> DuplicateValues, /// <summary> /// This conditional formatting rule highlights cells ending with given text. /// </summary> /// <remarks> /// Equivalent to using the RIGHT() sheet function and comparing values. /// </remarks> /// <remarks>EndsWith Excel CF Rule Type</remarks> EndsWith, /// <summary> /// This conditional formatting rule highlights cells equals to with given formula. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> Equal, /// <summary> /// This conditional formatting rule contains a formula to evaluate. When the /// formula result is true, the cell is highlighted. /// </summary> /// <remarks>Expression Excel CF Rule Type</remarks> Expression, /// <summary> /// This conditional formatting rule highlights cells greater than the given formula. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> GreaterThan, /// <summary> /// This conditional formatting rule highlights cells greater than or equal the /// given formula. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> GreaterThanOrEqual, /// <summary> /// This conditional formatting rule highlights cells less than the given formula. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> LessThan, /// <summary> /// This conditional formatting rule highlights cells less than or equal the /// given formula. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> LessThanOrEqual, /// <summary> /// This conditional formatting rule highlights cells outside the range in /// given two formulas. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> NotBetween, /// <summary> /// This conditional formatting rule highlights cells that does not contains the /// given formula. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> NotContains, /// <summary> /// This conditional formatting rule highlights cells that are not blank. /// </summary> /// <remarks> /// Equivalent of using LEN(TRIM()). This means that if the cell contains only /// characters that TRIM() would remove, then it is considered blank. An empty cell /// is also considered blank. /// </remarks> /// <remarks>NotContainsBlanks Excel CF Rule Type</remarks> NotContainsBlanks, /// <summary> /// This conditional formatting rule highlights cells without formula errors. /// </summary> /// <remarks> /// Equivalent to using ISERROR() sheet function to determine if there is a /// formula error. /// </remarks> /// <remarks>NotContainsErrors Excel CF Rule Type</remarks> NotContainsErrors, /// <summary> /// This conditional formatting rule highlights cells that do not contain /// the given text. /// </summary> /// <remarks> /// Equivalent to using the SEARCH() sheet function. /// </remarks> /// <remarks>NotContainsText Excel CF Rule Type</remarks> NotContainsText, /// <summary> /// This conditional formatting rule highlights cells not equals to with /// given formula. /// </summary> /// <remarks>CellIs Excel CF Rule Type</remarks> NotEqual, /// <summary> /// This conditional formatting rule highlights unique values in the range. /// </summary> /// <remarks>UniqueValues Excel CF Rule Type</remarks> UniqueValues, #endregion #region ColorScale /// <summary> /// Three Color Scale (Low, Middle and High Color Scale) /// </summary> /// <remarks>ColorScale Excel CF Rule Type</remarks> ThreeColorScale, /// <summary> /// Two Color Scale (Low and High Color Scale) /// </summary> /// <remarks>ColorScale Excel CF Rule Type</remarks> TwoColorScale, #endregion #region IconSet /// <summary> /// This conditional formatting rule applies a 3 set icons to cells according /// to their values. /// </summary> /// <remarks>IconSet Excel CF Rule Type</remarks> ThreeIconSet, /// <summary> /// This conditional formatting rule applies a 4 set icons to cells according /// to their values. /// </summary> /// <remarks>IconSet Excel CF Rule Type</remarks> FourIconSet, /// <summary> /// This conditional formatting rule applies a 5 set icons to cells according /// to their values. /// </summary> /// <remarks>IconSet Excel CF Rule Type</remarks> FiveIconSet, #endregion #region DataBar /// <summary> /// This conditional formatting rule displays a gradated data bar in the range of cells. /// </summary> /// <remarks>DataBar Excel CF Rule Type</remarks> DataBar #endregion } /// <summary> /// Enum for Conditional Format Value Object Type ST_CfvoType §18.18.13 /// </summary> public enum eExcelConditionalFormattingValueObjectType { /// <summary> /// Formula /// </summary> Formula, /// <summary> /// Maximum Value /// </summary> Max, /// <summary> /// Minimum Value /// </summary> Min, /// <summary> /// Number Value /// </summary> Num, /// <summary> /// Percent /// </summary> Percent, /// <summary> /// Percentile /// </summary> Percentile } /// <summary> /// Enum for Conditional Formatting Value Object Position /// </summary> public enum eExcelConditionalFormattingValueObjectPosition { /// <summary> /// The lower position for both TwoColorScale and ThreeColorScale /// </summary> Low, /// <summary> /// The middle position only for ThreeColorScale /// </summary> Middle, /// <summary> /// The highest position for both TwoColorScale and ThreeColorScale /// </summary> High } /// <summary> /// Enum for Conditional Formatting Value Object Node Type /// </summary> public enum eExcelConditionalFormattingValueObjectNodeType { /// <summary> /// 'cfvo' node /// </summary> Cfvo, /// <summary> /// 'color' node /// </summary> Color } /// <summary> /// Enum for Conditional Formatting Operartor Type ST_ConditionalFormattingOperator §18.18.15 /// </summary> public enum eExcelConditionalFormattingOperatorType { /// <summary> /// Begins With. 'Begins with' operator /// </summary> BeginsWith, /// <summary> /// Between. 'Between' operator /// </summary> Between, /// <summary> /// Contains. 'Contains' operator /// </summary> ContainsText, /// <summary> /// Ends With. 'Ends with' operator /// </summary> EndsWith, /// <summary> /// Equal. 'Equal to' operator /// </summary> Equal, /// <summary> /// Greater Than. 'Greater than' operator /// </summary> GreaterThan, /// <summary> /// Greater Than Or Equal. 'Greater than or equal to' operator /// </summary> GreaterThanOrEqual, /// <summary> /// Less Than. 'Less than' operator /// </summary> LessThan, /// <summary> /// Less Than Or Equal. 'Less than or equal to' operator /// </summary> LessThanOrEqual, /// <summary> /// Not Between. 'Not between' operator /// </summary> NotBetween, /// <summary> /// Does Not Contain. 'Does not contain' operator /// </summary> NotContains, /// <summary> /// Not Equal. 'Not equal to' operator /// </summary> NotEqual } /// <summary> /// Enum for Conditional Formatting Time Period Type ST_TimePeriod §18.18.82 /// </summary> public enum eExcelConditionalFormattingTimePeriodType { /// <summary> /// Last 7 Days. A date in the last seven days. /// </summary> Last7Days, /// <summary> /// Last Month. A date occuring in the last calendar month. /// </summary> LastMonth, /// <summary> /// Last Week. A date occuring last week. /// </summary> LastWeek, /// <summary> /// Next Month. A date occuring in the next calendar month. /// </summary> NextMonth, /// <summary> /// Next Week. A date occuring next week. /// </summary> NextWeek, /// <summary> /// This Month. A date occuring in this calendar month. /// </summary> ThisMonth, /// <summary> /// This Week. A date occuring this week. /// </summary> ThisWeek, /// <summary> /// Today. Today's date. /// </summary> Today, /// <summary> /// Tomorrow. Tomorrow's date. /// </summary> Tomorrow, /// <summary> /// Yesterday. Yesterday's date. /// </summary> Yesterday } /// <summary> /// 18.18.42 ST_IconSetType (Icon Set Type) - Only 3 icons /// </summary> public enum eExcelconditionalFormatting3IconsSetType { /// <summary> /// (3 Arrows) 3 arrows icon set. /// </summary> Arrows, /// <summary> /// (3 Arrows (Gray)) 3 gray arrows icon set. /// </summary> ArrowsGray, /// <summary> /// (3 Flags) 3 flags icon set. /// </summary> Flags, /// <summary> /// (3 Signs) 3 signs icon set. /// </summary> Signs, /// <summary> /// (3 Symbols Circled) 3 symbols icon set. /// </summary> Symbols, /// <summary> /// (3 Symbols) 3 Symbols icon set. /// </summary> Symbols2, /// <summary> /// (3 Traffic Lights) 3 traffic lights icon set (#1). /// </summary> TrafficLights1, /// <summary> /// (3 Traffic Lights Black) 3 traffic lights icon set with thick black border. /// </summary> TrafficLights2 } /// <summary> /// 18.18.42 ST_IconSetType (Icon Set Type) - Only 4 icons /// </summary> public enum eExcelconditionalFormatting4IconsSetType { /// <summary> /// (4 Arrows) 4 arrows icon set. /// </summary> Arrows, /// <summary> /// (4 Arrows (Gray)) 4 gray arrows icon set. /// </summary> ArrowsGray, /// <summary> /// (4 Ratings) 4 ratings icon set. /// </summary> Rating, /// <summary> /// (4 Red To Black) 4 'red to black' icon set. /// </summary> RedToBlack, /// <summary> /// (4 Traffic Lights) 4 traffic lights icon set. /// </summary> TrafficLights } /// <summary> /// 18.18.42 ST_IconSetType (Icon Set Type) - Only 5 icons /// </summary> public enum eExcelconditionalFormatting5IconsSetType { /// <summary> /// (5 Arrows) 5 arrows icon set. /// </summary> Arrows, /// <summary> /// (5 Arrows (Gray)) 5 gray arrows icon set. /// </summary> ArrowsGray, /// <summary> /// (5 Quarters) 5 quarters icon set. /// </summary> Quarters, /// <summary> /// (5 Ratings Icon Set) 5 rating icon set. /// </summary> Rating } }
{ "content_hash": "c1c0c104cd2f4123dd319a0699d7fcfa", "timestamp": "", "source": "github", "line_count": 665, "max_line_length": 95, "avg_line_length": 26.634586466165413, "alnum_prop": 0.6146680216802168, "repo_name": "RH-Code/GAPP", "id": "c7572e0615a9bd48f819cd73ec74b38fac4edacc", "size": "19300", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "EPPlus/ConditionalFormatting/ExcelConditionalFormattingEnums.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "356" }, { "name": "C#", "bytes": "27428180" }, { "name": "HTML", "bytes": "1064161" }, { "name": "JavaScript", "bytes": "28729" } ], "symlink_target": "" }
package com.tle.core.institution.migration.v64; import com.tle.core.guice.Bind; import com.tle.core.hibernate.impl.HibernateMigrationHelper; import com.tle.core.migration.AbstractHibernateSchemaMigration; import com.tle.core.migration.MigrationInfo; import com.tle.core.migration.MigrationResult; import java.util.List; import javax.inject.Singleton; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import org.hibernate.annotations.AccessType; import org.hibernate.classic.Session; @Bind @Singleton public class IncreaseThumbnailLength extends AbstractHibernateSchemaMigration { @Override public MigrationInfo createMigrationInfo() { // Legacy string key return new MigrationInfo( "com.tle.core.entity.services.migration.v64.thumbnail.columnsize.title"); } @Override protected void executeDataMigration( HibernateMigrationHelper helper, MigrationResult result, Session session) throws Exception { result.incrementStatus(); } @Override protected int countDataMigrations(HibernateMigrationHelper helper, Session session) { return 1; } @Override protected List<String> getDropModifySql(HibernateMigrationHelper helper) { return helper.getModifyColumnSQL("attachment", "thumbnail", false, true); } @Override protected List<String> getAddSql(HibernateMigrationHelper helper) { return null; } @Override protected Class<?>[] getDomainClasses() { return new Class<?>[] {FakeAttachment.class}; } @Entity(name = "Attachment") @AccessType("field") public static class FakeAttachment { @Id @GeneratedValue(strategy = GenerationType.AUTO) long id; @Column(length = 512) protected String thumbnail; } }
{ "content_hash": "f365ad0def17dfb08e1945b8b0ac58b9", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 98, "avg_line_length": 27.803030303030305, "alnum_prop": 0.7694822888283379, "repo_name": "equella/Equella", "id": "56f0e4f8b98fbfa24e707c7f6cda2c2725105791", "size": "2638", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "Source/Plugins/Core/com.equella.core/src/com/tle/core/institution/migration/v64/IncreaseThumbnailLength.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "402" }, { "name": "Batchfile", "bytes": "38432" }, { "name": "CSS", "bytes": "648823" }, { "name": "Dockerfile", "bytes": "2055" }, { "name": "FreeMarker", "bytes": "370046" }, { "name": "HTML", "bytes": "865667" }, { "name": "Java", "bytes": "27081020" }, { "name": "JavaScript", "bytes": "1673995" }, { "name": "PHP", "bytes": "821" }, { "name": "PLpgSQL", "bytes": "1363" }, { "name": "PureScript", "bytes": "307610" }, { "name": "Python", "bytes": "79871" }, { "name": "Scala", "bytes": "765981" }, { "name": "Shell", "bytes": "64170" }, { "name": "TypeScript", "bytes": "146564" }, { "name": "XSLT", "bytes": "510113" } ], "symlink_target": "" }
LATEX = pdflatex MAINFILE = tut01r-riondato UPLOADHOST = ssh.cs.brown.edu UPLOADURI = public_html/RiondatoUpfal-VCTutorial.pdf .PHONY: all archive check clean osx pdf upload view all : archive $(MAINFILE).aux: *.tex $(wildcard *.eps) $(LATEX) $(MAINFILE).tex $(MAINFILE).bbl: *.tex *.bib -bibtex --min-crossrefs=20 $(MAINFILE) $(MAINFILE).pdf: $(MAINFILE).aux $(MAINFILE).bbl $(LATEX) $(MAINFILE).tex $(LATEX) $(MAINFILE).tex $(MAINFILE).tar.bz2: $(MAINFILE).pdf env COPYFILE_DISABLE=1 tar cjvfh $(MAINFILE).tar.bz2 *.tex *.bib *.pdf $(wildcard figures/*pdf) $(wildcard *.bst) $(wildcard *.cls) $(wildcard *.clo) $(wildcard *.eps) $(wildcard *.svg) Makefile archive: $(MAINFILE).tar.bz2 check: *.tex ($(LATEX) $(MAINFILE).tex | grep -s -e "multiply" -e "undefined") || echo "all OK" clean: -/bin/rm -f $(MAINFILE).pdf $(MAINFILE).tar.bz2 *.dvi *.aux *.ps *~ -/bin/rm -f *.log *.lot *.lof *.toc *.blg *.bbl *.idx *.out pdf: $(MAINFILE).pdf osx: pdf open $(MAINFILE).pdf upload: pdf rsync -e ssh --perms --chmod=F644 $(MAINFILE).pdf $(UPLOADHOST):$(UPLOADURI) view: pdf acroread -geometry 1000x1000 $(MAINFILE).pdf
{ "content_hash": "9f7602d8dd4b137ee0dcb2c71feabd53", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 195, "avg_line_length": 28.142857142857142, "alnum_prop": 0.6370558375634517, "repo_name": "rionda/vctutorial", "id": "4f5cd4405b39a867a3a083212195b0a8588c651b", "size": "1182", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "KDD/proceedings/Makefile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Makefile", "bytes": "2481" }, { "name": "TeX", "bytes": "212937" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project name="mitre-watchdog" default="wd.show" basedir="."> <!-- ## Targets --> <target name="wd.show" description="Show the latest watchdog errors"> <!--<watchdog save="false" count="100" type="" /> --> <echo msg=" " /> <echo msg="Show the latest ${drush.ws.count} watchdog errors" /> <echo msg=" " /> <exec command="${drush.bin} ${drush.target} ws ${drush.strict} ${drush.ws.count}" passthru="true" checkreturn="true"> </exec> </target> <target name="wd.clear" description="Clear the watchdog table of all entries"> <echo msg=" " /> <echo msg="Clear the watchdog table of all entries" /> <echo msg=" " /> <exec command="${drush.bin} ${drush.target} wd-del all -y" passthru="true" checkreturn="true" /> </target> </project>
{ "content_hash": "afdc4b97b17e327502110a02d186100d", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 85, "avg_line_length": 33.55555555555556, "alnum_prop": 0.5562913907284768, "repo_name": "timani/mitre-logger", "id": "0524aeb461c2db3256e752db98f4deaf67cc81bc", "size": "906", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "build.watchdog.xml", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "2557" } ], "symlink_target": "" }
package org.apache.phoenix.queryserver.server; import com.google.common.annotations.VisibleForTesting; import org.apache.calcite.avatica.Meta; import org.apache.calcite.avatica.remote.Driver; import org.apache.calcite.avatica.remote.LocalService; import org.apache.calcite.avatica.remote.Service; import org.apache.calcite.avatica.server.HandlerFactory; import org.apache.calcite.avatica.server.HttpServer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.util.Strings; import org.apache.hadoop.net.DNS; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.query.QueryServicesOptions; import org.eclipse.jetty.server.Handler; import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.util.Arrays; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; /** * A query server for Phoenix over Calcite's Avatica. */ public final class Main extends Configured implements Tool, Runnable { protected static final Log LOG = LogFactory.getLog(Main.class); private final String[] argv; private final CountDownLatch runningLatch = new CountDownLatch(1); private HttpServer server = null; private int retCode = 0; private Throwable t = null; /** * Log information about the currently running JVM. */ public static void logJVMInfo() { // Print out vm stats before starting up. RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean(); if (runtime != null) { LOG.info("vmName=" + runtime.getVmName() + ", vmVendor=" + runtime.getVmVendor() + ", vmVersion=" + runtime.getVmVersion()); LOG.info("vmInputArguments=" + runtime.getInputArguments()); } } /** * Logs information about the currently running JVM process including * the environment variables. Logging of env vars can be disabled by * setting {@code "phoenix.envvars.logging.disabled"} to {@code "true"}. * <p>If enabled, you can also exclude environment variables containing * certain substrings by setting {@code "phoenix.envvars.logging.skipwords"} * to comma separated list of such substrings. */ public static void logProcessInfo(Configuration conf) { // log environment variables unless asked not to if (conf == null || !conf.getBoolean(QueryServices.QUERY_SERVER_ENV_LOGGING_ATTRIB, false)) { Set<String> skipWords = new HashSet<String>(QueryServicesOptions.DEFAULT_QUERY_SERVER_SKIP_WORDS); if (conf != null) { String[] confSkipWords = conf.getStrings(QueryServices.QUERY_SERVER_ENV_LOGGING_SKIPWORDS_ATTRIB); if (confSkipWords != null) { skipWords.addAll(Arrays.asList(confSkipWords)); } } nextEnv: for (Map.Entry<String, String> entry : System.getenv().entrySet()) { String key = entry.getKey().toLowerCase(); String value = entry.getValue().toLowerCase(); // exclude variables which may contain skip words for(String skipWord : skipWords) { if (key.contains(skipWord) || value.contains(skipWord)) continue nextEnv; } LOG.info("env:"+entry); } } // and JVM info logJVMInfo(); } /** Constructor for use from {@link org.apache.hadoop.util.ToolRunner}. */ public Main() { this(null, null); } /** Constructor for use as {@link java.lang.Runnable}. */ public Main(String[] argv, Configuration conf) { this.argv = argv; setConf(conf); } /** * @return the port number this instance is bound to, or {@code -1} if the server is not running. */ @VisibleForTesting public int getPort() { if (server == null) return -1; return server.getPort(); } /** * @return the return code from running as a {@link Tool}. */ @VisibleForTesting public int getRetCode() { return retCode; } /** * @return the throwable from an unsuccessful run, or null otherwise. */ @VisibleForTesting public Throwable getThrowable() { return t; } /** Calling thread waits until the server is running. */ public void awaitRunning() throws InterruptedException { runningLatch.await(); } /** Calling thread waits until the server is running. */ public void awaitRunning(long timeout, TimeUnit unit) throws InterruptedException { runningLatch.await(timeout, unit); } @Override public int run(String[] args) throws Exception { logProcessInfo(getConf()); try { // handle secure cluster credentials if ("kerberos".equalsIgnoreCase(getConf().get(QueryServices.QUERY_SERVER_HBASE_SECURITY_CONF_ATTRIB))) { String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost( getConf().get(QueryServices.QUERY_SERVER_DNS_INTERFACE_ATTRIB, "default"), getConf().get(QueryServices.QUERY_SERVER_DNS_NAMESERVER_ATTRIB, "default"))); if (LOG.isDebugEnabled()) { LOG.debug("Login to " + hostname + " using " + getConf().get(QueryServices.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB) + " and principal " + getConf().get(QueryServices.QUERY_SERVER_KERBEROS_PRINCIPAL_ATTRIB) + "."); } SecurityUtil.login(getConf(), QueryServices.QUERY_SERVER_KEYTAB_FILENAME_ATTRIB, QueryServices.QUERY_SERVER_KERBEROS_PRINCIPAL_ATTRIB, hostname); LOG.info("Login successful."); } Class<? extends PhoenixMetaFactory> factoryClass = getConf().getClass( QueryServices.QUERY_SERVER_META_FACTORY_ATTRIB, PhoenixMetaFactoryImpl.class, PhoenixMetaFactory.class); int port = getConf().getInt(QueryServices.QUERY_SERVER_HTTP_PORT_ATTRIB, QueryServicesOptions.DEFAULT_QUERY_SERVER_HTTP_PORT); LOG.debug("Listening on port " + port); PhoenixMetaFactory factory = factoryClass.getDeclaredConstructor(Configuration.class).newInstance(getConf()); Meta meta = factory.create(Arrays.asList(args)); final HandlerFactory handlerFactory = new HandlerFactory(); Service service = new LocalService(meta); server = new HttpServer(port, getHandler(getConf(), service, handlerFactory)); server.start(); runningLatch.countDown(); server.join(); return 0; } catch (Throwable t) { LOG.fatal("Unrecoverable service error. Shutting down.", t); this.t = t; return -1; } } /** * Instantiates the Handler for use by the Avatica (Jetty) server. * * @param conf The configuration * @param service The Avatica Service implementation * @param handlerFactory Factory used for creating a Handler * @return The Handler to use based on the configuration. */ Handler getHandler(Configuration conf, Service service, HandlerFactory handlerFactory) { String serializationName = conf.get(QueryServices.QUERY_SERVER_SERIALIZATION_ATTRIB, QueryServicesOptions.DEFAULT_QUERY_SERVER_SERIALIZATION); Driver.Serialization serialization; // Otherwise, use what was provided in the configuration try { serialization = Driver.Serialization.valueOf(serializationName); } catch (Exception e) { LOG.error("Unknown message serialization type for " + serializationName); throw e; } Handler handler = handlerFactory.getHandler(service, serialization); LOG.info("Instantiated " + handler.getClass() + " for QueryServer"); return handler; } @Override public void run() { try { retCode = run(argv); } catch (Exception e) { // already logged } } public static void main(String[] argv) throws Exception { int ret = ToolRunner.run(HBaseConfiguration.create(), new Main(), argv); System.exit(ret); } }
{ "content_hash": "5820f1b2a4e407a01d79963406ae9fac", "timestamp": "", "source": "github", "line_count": 220, "max_line_length": 121, "avg_line_length": 36.7, "alnum_prop": 0.7007678969531831, "repo_name": "rvaleti/phoenix", "id": "106d4224157499259a5a88de876d5e2f5f980194", "size": "8875", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "phoenix-server/src/main/java/org/apache/phoenix/queryserver/server/Main.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "GAP", "bytes": "45024" }, { "name": "HTML", "bytes": "18969" }, { "name": "Java", "bytes": "12006360" }, { "name": "JavaScript", "bytes": "203766" }, { "name": "Protocol Buffer", "bytes": "13795" }, { "name": "Python", "bytes": "83417" }, { "name": "Scala", "bytes": "52875" }, { "name": "Shell", "bytes": "62142" } ], "symlink_target": "" }
package errors // Config is a configuration struct meant to be used with // github.com/spacemonkeygo/flagfile/utils.Setup // but can be set independently. var Config = struct { Stacklogsize int `default:"4096" usage:"the max stack trace byte length to log"` }{ Stacklogsize: 4096, }
{ "content_hash": "60430392bc693a1d0902e768bb431dd6", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 81, "avg_line_length": 28.8, "alnum_prop": 0.7465277777777778, "repo_name": "mavenraven/errors", "id": "3a2f2ee03f062047a26f7ffefe0ff56fa4a07580", "size": "886", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "config.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "39680" } ], "symlink_target": "" }
<?php namespace Ekyna\Bundle\AdminBundle\Dashboard; /** * Class Factory * @package Ekyna\Bundle\AdminBundle\Dashboard * @author Étienne Dauvergne <[email protected]> */ class Factory { /** * @var Widget\Factory */ protected $widgetFactory; /** * Constructor. * @param Widget\Factory $widgetFactory */ public function __construct(Widget\Factory $widgetFactory) { $this->widgetFactory = $widgetFactory; } /** * Creates the dashboard. * * @param array $config * @return Dashboard */ public function create(array $config) { $dashboard = new Dashboard(); foreach ($config as $name => $cfg) { $widget = $this->widgetFactory->create($name, $cfg['type'], $cfg['options']); $dashboard->addWidget($widget); } return $dashboard; } }
{ "content_hash": "c7ca3d129c64786f2b552b4ee5a8c20e", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 89, "avg_line_length": 20.15909090909091, "alnum_prop": 0.5794813979706878, "repo_name": "ekyna/AdminBundle", "id": "f760d4c2a9f59c195a01c3fe9c8adf8521c92984", "size": "888", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Dashboard/Factory.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "53740" }, { "name": "HTML", "bytes": "33544" }, { "name": "JavaScript", "bytes": "6494" }, { "name": "PHP", "bytes": "274371" } ], "symlink_target": "" }
import * as React from 'react'; import {SvgIconProps} from '../../SvgIcon'; export default function FormatColorReset(props: SvgIconProps): React.ReactElement<SvgIconProps>;
{ "content_hash": "f9c1092599f387659b8394d44859318d", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 96, "avg_line_length": 43.5, "alnum_prop": 0.7701149425287356, "repo_name": "mattiamanzati/typings-material-ui", "id": "13f9291196d44b5a7006e9d9096b1a188abcf42d", "size": "174", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "svg-icons/editor/format-color-reset.d.ts", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "171" }, { "name": "JavaScript", "bytes": "2513" } ], "symlink_target": "" }
"""wikisource. Usage: dasem.wikisource get <title> dasem.wikisource list Example: $ python -m dasem.wikisource get Mogens """ from __future__ import print_function import re from bs4 import BeautifulSoup import requests from six import u from .wikidata import query_to_dataframe SPARQL_QUERY = """ SELECT distinct ?item ?itemLabel ?article WHERE { ?article schema:about ?item. ?article schema:isPartOf <https://da.wikisource.org/>. values ?kind { wd:Q7725634 wd:Q1372064 wd:Q7366 wd:Q49848} ?item (wdt:P31/wdt:P279*) ?kind . SERVICE wikibase:label { bd:serviceParam wikibase:language "da,en". } } """ def extract_text(text): """Extract relevant part of text from page. Attempts with various regular expressions to extract the relevant text from the downloaded parsed wikipage. Poems might have the '<poem>...</poem>' construct. Text between these two tags are extracted and returned. Public domain license information is ignored. Parameters ---------- text : str Downloaded text. Returns ------- extracted_text : str Extracted text. """ # Match <poem> and just extract that. in_poem = re.findall(r'<poem>(.*?)</poem>', text, flags=re.UNICODE | re.DOTALL) if in_poem: return u"\n\n".join(in_poem) # Ignore license information. This might be above or below the text. text = re.sub((r'Public domainPublic domain(.*?), ' 'da det blev udgivet.{15,25}\.$'), '\n', text, flags=re.UNICODE | re.DOTALL | re.MULTILINE) regex = r'Teksten\[redig' + u('\xe9') + r'r\](.*)' after_teksten = re.findall(regex, text, flags=re.UNICODE | re.DOTALL) if after_teksten: return u"\n\n".join(after_teksten) # Match bottom of infobox on some of the songs rest = re.findall(r'.*Wikipedia-link\s*(.*)', text, flags=re.UNICODE | re.DOTALL) if rest: return u"\n\n".join(rest) return text def get_list_from_wikidata(): """Get list of works from Wikidata. Returns ------- df : pandas.DataFrame DataFrame with information from Wikidata. """ df = query_to_dataframe(SPARQL_QUERY) return df def get_text_by_title(title): """Get text from Wikisource based on title. If the text is split over several wikipages (which is the case with novels) then the full text will not be returned, - only the index page. Parameters ---------- title : str Title of wikipage on Danish Wikisource. Returns ------- text : str or None The text. Returns none if the page does not exist. """ url = 'https://da.wikisource.org/w/api.php' params = {'page': title, 'action': 'parse', 'format': 'json'} data = requests.get(url, params=params).json() if 'parse' in data: text = BeautifulSoup(data['parse']['text']['*'], "lxml").get_text() else: text = None return text def main(): """Handle command-line interface.""" from docopt import docopt arguments = docopt(__doc__) if arguments['get']: text = get_text_by_title(arguments['<title>']) if text: extracted_text = extract_text(text) print(extracted_text.encode('utf-8')) elif arguments['list']: df = get_list_from_wikidata() print(df.to_csv(encoding='utf-8')) if __name__ == '__main__': main()
{ "content_hash": "a6b449a5fd9b5a9dec362b2fa6ab980a", "timestamp": "", "source": "github", "line_count": 142, "max_line_length": 79, "avg_line_length": 24.47887323943662, "alnum_prop": 0.6101841196777905, "repo_name": "fnielsen/dasem", "id": "e33432e08fe82ad7aa2af92e5d86c25d4b605d9a", "size": "3476", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dasem/wikisource.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "2069" }, { "name": "Jupyter Notebook", "bytes": "279950" }, { "name": "Python", "bytes": "283237" } ], "symlink_target": "" }
// Copyright (c) 2018-2019 Intel Corporation // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. #include "umc_defs.h" #include "mfx_umc_alloc_wrapper.h" #include "mfx_common.h" #include "libmfx_core.h" #include "mfx_common_int.h" #if defined (MFX_ENABLE_MJPEG_VIDEO_DECODE) #include "mfx_vpp_jpeg_d3d9.h" UMC::Status mfx_UMC_FrameAllocator_D3D_Converter::InitMfx(UMC::FrameAllocatorParams *, VideoCORE* mfxCore, const mfxVideoParam *params, const mfxFrameAllocRequest *request, mfxFrameAllocResponse *response, bool isUseExternalFrames, bool isSWplatform) { UMC::AutomaticUMCMutex guard(m_guard); m_isSWDecode = isSWplatform; if (!mfxCore || !params) return UMC::UMC_ERR_NULL_PTR; if (!isUseExternalFrames && (!request || !response)) return UMC::UMC_ERR_NULL_PTR; m_pCore = mfxCore; m_IsUseExternalFrames = isUseExternalFrames; UMC::ColorFormat color_format; switch (params->mfx.FrameInfo.FourCC) { case MFX_FOURCC_NV12: color_format = UMC::NV12; break; case MFX_FOURCC_RGB4: color_format = UMC::RGB32; break; //case MFX_FOURCC_YV12: // color_format = UMC::YUV420; // break; case MFX_FOURCC_YUV400: color_format = UMC::GRAY; break; case MFX_FOURCC_IMC3: color_format = UMC::IMC3; break; case MFX_FOURCC_YUV422H: case MFX_FOURCC_YUV422V: color_format = UMC::YUV422; break; case MFX_FOURCC_YUV444: color_format = UMC::YUV444; break; case MFX_FOURCC_YUV411: color_format = UMC::YUV411; break; case MFX_FOURCC_RGBP: color_format = UMC::YUV444; break; case MFX_FOURCC_YUY2: color_format = UMC::YUY2; break; default: return UMC::UMC_ERR_UNSUPPORTED; } UMC::Status umcSts = m_info.Init(params->mfx.FrameInfo.Width, params->mfx.FrameInfo.Height, color_format, 8); m_surface_info = params->mfx.FrameInfo; if (umcSts != UMC::UMC_OK) return umcSts; if (!m_IsUseExternalFrames || !m_isSWDecode) { m_frameDataInternal.Resize(response->NumFrameActual); m_extSurfaces.resize(response->NumFrameActual); for (mfxU32 i = 0; i < response->NumFrameActual; i++) { mfxFrameSurface1 & surface = m_frameDataInternal.GetSurface(i); surface.Data.MemId = response->mids[i]; MFX_INTERNAL_CPY(&surface.Info, &request->Info, sizeof(mfxFrameInfo)); // fill UMC frameData UMC::FrameData& frameData = m_frameDataInternal.GetFrameData(i); // set correct width & height to planes frameData.Init(&m_info, (UMC::FrameMemID)i, this); } } mfxCore->SetWrapper(this); return UMC::UMC_OK; } UMC::Status mfx_UMC_FrameAllocator_D3D_Converter::Reset() { m_pCc.reset(); return mfx_UMC_FrameAllocator_D3D::Reset(); } mfxStatus mfx_UMC_FrameAllocator_D3D_Converter::InitVideoVppJpegD3D9(const mfxVideoParam *params) { bool isD3DToSys = false; if(params->IOPattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY) { isD3DToSys = true; } else if (params->IOPattern & MFX_IOPATTERN_OUT_OPAQUE_MEMORY) { mfxExtOpaqueSurfaceAlloc *pOpaqAlloc = (mfxExtOpaqueSurfaceAlloc *)GetExtendedBuffer(params->ExtParam, params->NumExtParam, MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION); MFX_CHECK(pOpaqAlloc, MFX_ERR_INVALID_VIDEO_PARAM); isD3DToSys = (pOpaqAlloc->Out.Type & MFX_MEMTYPE_SYSTEM_MEMORY) != 0; } m_pCc.reset(new VideoVppJpegD3D9(m_pCore, isD3DToSys, params->IOPattern & MFX_IOPATTERN_OUT_OPAQUE_MEMORY)); mfxStatus mfxSts; if (params->mfx.Rotation == MFX_ROTATION_90 || params->mfx.Rotation == MFX_ROTATION_270) { mfxVideoParam localParams = *params; // Frame allocation is possible inside VideoVppJpegD3D9::Init(). // Those frames must have width/height of target image, so the swapping. std::swap(localParams.mfx.FrameInfo.Width, localParams.mfx.FrameInfo.Height); std::swap(localParams.mfx.FrameInfo.CropW, localParams.mfx.FrameInfo.CropH); mfxSts = m_pCc->Init(&localParams); } else { mfxSts = m_pCc->Init(params); } MFX_CHECK_STS( mfxSts ); return mfxSts; } mfxStatus mfx_UMC_FrameAllocator_D3D_Converter::FindSurfaceByMemId(const UMC::FrameData* in, bool isOpaq, const mfxHDLPair &hdlPair, // output param mfxFrameSurface1 &surface) { MFX_CHECK_NULL_PTR1(in); UMC::FrameMemID index = in->GetFrameMID(); mfxMemId memInter = m_frameDataInternal.GetSurface(index).Data.MemId; mfxMemId memId = isOpaq?(memInter):(m_pCore->MapIdx(memInter)); // if memid of in is same as memid of surface_work, StartPreparingToOutput() must not be called MFX_CHECK_WITH_ASSERT(!hdlPair.first || hdlPair.first != memId, MFX_ERR_UNSUPPORTED); surface = m_frameDataInternal.GetSurface(index); return MFX_ERR_NONE; } mfxStatus mfx_UMC_FrameAllocator_D3D_Converter::StartPreparingToOutput(mfxFrameSurface1 *surface_work, UMC::FrameData* in, const mfxVideoParam *par, mfxU16 *taskId, bool isOpaq) { UMC::AutomaticUMCMutex guard(m_guard); mfxStatus sts = MFX_ERR_NONE; if (!m_pCc) { MFX_SAFE_CALL( InitVideoVppJpegD3D9(par) ); } mfxHDLPair hdlPair; if(isOpaq) sts = m_pCore->GetFrameHDL(surface_work->Data.MemId, (mfxHDL*)&hdlPair); else sts = m_pCore->GetExternalFrameHDL(surface_work->Data.MemId, (mfxHDL*)&hdlPair); if (sts == MFX_ERR_UNDEFINED_BEHAVIOR // nothing found by Get*FrameHDL() || sts == MFX_ERR_UNSUPPORTED) // Get*FrameHDL() does not support obtaining OS-specific handle { hdlPair.first = nullptr; } else { MFX_CHECK_STS(sts); } // for interlaced case, [0] is top and [1] is bottom; for progressive only [0] is used mfxFrameSurface1 srcSurface[2]; for (int i = 0; i < 1 + (par->mfx.FrameInfo.PicStruct != MFX_PICSTRUCT_PROGRESSIVE); ++i) { MFX_SAFE_CALL( FindSurfaceByMemId(&in[i], isOpaq, hdlPair, srcSurface[i]) ); /* JPEG standard does not support crops as it is done in AVC, so: - CropX and CropY are always 0, - CropW and CropH represents picture size for current frame (in case of rotation, surface_work has rotated CropW and CropH), - Width and Height represents surface allocation size (they are initialized in decoder Init and are correct). */ if (par->mfx.Rotation == MFX_ROTATION_0 || par->mfx.Rotation == MFX_ROTATION_180) { srcSurface[i].Info.CropW = surface_work->Info.CropW; srcSurface[i].Info.CropH = surface_work->Info.CropH; } else { srcSurface[i].Info.CropW = surface_work->Info.CropH; srcSurface[i].Info.CropH = surface_work->Info.CropW; } if (par->mfx.FrameInfo.PicStruct != MFX_PICSTRUCT_PROGRESSIVE) { srcSurface[i].Info.CropH /= 2; } } return par->mfx.FrameInfo.PicStruct == MFX_PICSTRUCT_PROGRESSIVE? m_pCc->BeginHwJpegProcessing(&srcSurface[0], surface_work, taskId) : m_pCc->BeginHwJpegProcessing(&srcSurface[0], &srcSurface[1], surface_work, taskId); } mfxStatus mfx_UMC_FrameAllocator_D3D_Converter::CheckPreparingToOutput(mfxFrameSurface1 *surface_work, UMC::FrameData* in, const mfxVideoParam * par, mfxU16 taskId) { UMC::AutomaticUMCMutex guard(m_guard); MFX_CHECK_NULL_PTR1(m_pCc); mfxStatus sts = m_pCc->QueryTaskRoutine(taskId); if (sts == MFX_TASK_BUSY) { return sts; } if (sts != MFX_TASK_DONE) return sts; if(par->mfx.FrameInfo.PicStruct == MFX_PICSTRUCT_PROGRESSIVE) { UMC::FrameMemID index = in->GetFrameMID(); mfxFrameSurface1 src = m_frameDataInternal.GetSurface(index); //Performance issue. We need to unlock mutex to let decoding thread run async. guard.Unlock(); sts = m_pCc->EndHwJpegProcessing(&src, surface_work); guard.Lock(); if (sts < MFX_ERR_NONE) return sts; if (!m_IsUseExternalFrames) { m_pCore->DecreaseReference(&surface_work->Data); m_extSurfaces[index].FrameSurface = 0; } } else { UMC::FrameMemID indexTop = in[0].GetFrameMID(); UMC::FrameMemID indexBottom = in[1].GetFrameMID(); mfxFrameSurface1 srcTop, srcBottom; srcTop = m_frameDataInternal.GetSurface(indexTop); srcBottom = m_frameDataInternal.GetSurface(indexBottom); //Performance issue. We need to unlock mutex to let decoding thread run async. guard.Unlock(); sts = m_pCc->EndHwJpegProcessing(&srcTop, &srcBottom, surface_work); guard.Lock(); if (sts < MFX_ERR_NONE) return sts; if (!m_IsUseExternalFrames) { m_pCore->DecreaseReference(&surface_work->Data); m_extSurfaces[indexTop].FrameSurface = 0; } } return MFX_ERR_NONE; } void mfx_UMC_FrameAllocator_D3D_Converter::SetJPEGInfo(mfx_UMC_FrameAllocator_D3D_Converter::JPEG_Info * jpegInfo) { m_jpegInfo = *jpegInfo; } #endif //defined (MFX_ENABLE_MJPEG_VIDEO_DECODE)
{ "content_hash": "60847d881ed85334ac788c99548d9291", "timestamp": "", "source": "github", "line_count": 315, "max_line_length": 171, "avg_line_length": 36.22857142857143, "alnum_prop": 0.5956887486855941, "repo_name": "Intel-Media-SDK/MediaSDK", "id": "4f7f4414467a559cc9d1d88180cc8ec37d8618f5", "size": "11412", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_studio/shared/src/mfx_umc_mjpeg_vpp.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "575675" }, { "name": "Batchfile", "bytes": "890" }, { "name": "C", "bytes": "1400627" }, { "name": "C#", "bytes": "41124" }, { "name": "C++", "bytes": "69118186" }, { "name": "CMake", "bytes": "196487" }, { "name": "M4", "bytes": "25814" }, { "name": "Makefile", "bytes": "51696" }, { "name": "Python", "bytes": "524446" }, { "name": "Shell", "bytes": "42934" }, { "name": "Starlark", "bytes": "22028" } ], "symlink_target": "" }
class Browser; namespace favicon_base { struct FaviconImageResult; } namespace content { class NavigationEntry; class WebContents; } /////////////////////////////////////////////////////////////////////////////// // // BackForwardMenuModel // // Interface for the showing of the dropdown menu for the Back/Forward buttons. // Actual implementations are platform-specific. /////////////////////////////////////////////////////////////////////////////// class BackForwardMenuModel : public ui::MenuModel { public: // These are IDs used to identify individual UI elements within the // browser window using View::GetViewByID. enum class ModelType { kForward = 1, kBackward = 2 }; BackForwardMenuModel(Browser* browser, ModelType model_type); BackForwardMenuModel(const BackForwardMenuModel&) = delete; BackForwardMenuModel& operator=(const BackForwardMenuModel&) = delete; ~BackForwardMenuModel() override; // MenuModel implementation. bool HasIcons() const override; // Returns how many items the menu should show, including history items, // chapter-stops, separators and the Show Full History link. This function // uses GetHistoryItemCount() and GetChapterStopCount() internally to figure // out the total number of items to show. int GetItemCount() const override; ItemType GetTypeAt(int index) const override; ui::MenuSeparatorType GetSeparatorTypeAt(int index) const override; int GetCommandIdAt(int index) const override; std::u16string GetLabelAt(int index) const override; bool IsItemDynamicAt(int index) const override; bool GetAcceleratorAt(int index, ui::Accelerator* accelerator) const override; bool IsItemCheckedAt(int index) const override; int GetGroupIdAt(int index) const override; ui::ImageModel GetIconAt(int index) const override; ui::ButtonMenuItemModel* GetButtonMenuItemAt(int index) const override; bool IsEnabledAt(int index) const override; MenuModel* GetSubmenuModelAt(int index) const override; void ActivatedAt(int index) override; void ActivatedAt(int index, int event_flags) override; void MenuWillShow() override; // Is the item at |index| a separator? bool IsSeparator(int index) const; private: friend class BackFwdMenuModelTest; FRIEND_TEST_ALL_PREFIXES(BackFwdMenuModelTest, BasicCase); FRIEND_TEST_ALL_PREFIXES(BackFwdMenuModelTest, MaxItemsTest); FRIEND_TEST_ALL_PREFIXES(BackFwdMenuModelTest, ChapterStops); FRIEND_TEST_ALL_PREFIXES(BackFwdMenuModelTest, EscapeLabel); FRIEND_TEST_ALL_PREFIXES(BackFwdMenuModelTest, FaviconLoadTest); FRIEND_TEST_ALL_PREFIXES(BackFwdMenuModelIncognitoTest, IncognitoCaseTest); FRIEND_TEST_ALL_PREFIXES(ChromeNavigationBrowserTest, NoUserActivationSetSkipOnBackForward); // Requests a favicon from the FaviconService. Called by GetIconAt if the // NavigationEntry has an invalid favicon. void FetchFavicon(content::NavigationEntry* entry); // Callback from the favicon service. void OnFavIconDataAvailable( int navigation_entry_unique_id, const favicon_base::FaviconImageResult& image_result); // Allows the unit test to use its own dummy tab contents. void set_test_web_contents(content::WebContents* test_web_contents) { test_web_contents_ = test_web_contents; } // Returns how many history items the menu should show. For example, if the // navigation controller of the current tab has a current entry index of 5 and // forward_direction_ is false (we are the back button delegate) then this // function will return 5 (representing 0-4). If forward_direction_ is // true (we are the forward button delegate), then this function will return // the number of entries after 5. Note, though, that in either case it will // not report more than kMaxHistoryItems. The number returned also does not // include the separator line after the history items (nor the separator for // the "Show Full History" link). int GetHistoryItemCount() const; // Returns how many chapter-stop items the menu should show. For the // definition of a chapter-stop, see GetIndexOfNextChapterStop(). The number // returned does not include the separator lines before and after the // chapter-stops. int GetChapterStopCount(int history_items) const; // Finds the next chapter-stop in the NavigationEntryList starting from // the index specified in |start_from| and continuing in the direction // specified (|forward|) until either a chapter-stop is found or we reach the // end, in which case -1 is returned. If |start_from| is out of bounds, -1 // will also be returned. A chapter-stop is defined as the last page the user // browsed to within the same domain. For example, if the user's homepage is // Google and they navigate to Google pages G1, G2 and G3 before heading over // to WikiPedia for pages W1 and W2 and then back to Google for pages G4 and // G5 then G3, W2 and G5 are considered chapter-stops. The return value from // this function is an index into the NavigationEntryList vector. int GetIndexOfNextChapterStop(int start_from, bool forward) const; // Finds a given chapter-stop starting at the currently active entry in the // NavigationEntryList vector advancing first forward or backward by |offset| // (depending on the direction specified in parameter |forward|). It also // allows you to skip chapter-stops by specifying a positive value for |skip|. // Example: FindChapterStop(5, false, 3) starts with the currently active // index, subtracts 5 from it and then finds the fourth chapter-stop before // that index (skipping the first 3 it finds). // Example: FindChapterStop(0, true, 0) is functionally equivalent to // calling GetIndexOfNextChapterStop(GetCurrentEntryIndex(), true). // // NOTE: Both |offset| and |skip| must be non-negative. The return value from // this function is an index into the NavigationEntryList vector. If |offset| // is out of bounds or if we skip too far (run out of chapter-stops) this // function returns -1. int FindChapterStop(int offset, bool forward, int skip) const; // How many items (max) to show in the back/forward history menu dropdown. static const int kMaxHistoryItems; // How many chapter-stops (max) to show in the back/forward dropdown list. static const int kMaxChapterStops; // Takes a menu item index as passed in through one of the menu delegate // functions and converts it into an index into the NavigationEntryList // vector. |index| can point to a separator, or the // "Show Full History" link in which case this function returns -1. int MenuIndexToNavEntryIndex(int index) const; // Does the item have a command associated with it? bool ItemHasCommand(int index) const; // Returns true if there is an icon for this menu item. bool ItemHasIcon(int index) const; // Allow the unit test to use the "Show Full History" label. std::u16string GetShowFullHistoryLabel() const; // Looks up a NavigationEntry by menu id. content::NavigationEntry* GetNavigationEntry(int index) const; // Retrieves the WebContents pointer to use, which is either the one that // the unit test sets (using set_test_web_contents) or the one from // the browser window. content::WebContents* GetWebContents() const; // Build a string version of a user action on this menu, used as an // identifier for logging user behavior. // E.g. BuildActionName("Click", 2) returns "BackMenu_Click2". // An index of -1 means no index. std::string BuildActionName(const std::string& name, int index) const; // Returns true if "Show Full History" item should be visible. It is visible // only in outside incognito mode. bool ShouldShowFullHistoryBeVisible() const; const raw_ptr<Browser> browser_; // The unit tests will provide their own WebContents to use. raw_ptr<content::WebContents> test_web_contents_ = nullptr; // Represents whether this is the delegate for the forward button or the // back button. const ModelType model_type_; // Keeps track of which favicons have already been requested from the history // to prevent duplicate requests, identified by // NavigationEntry->GetUniqueID(). base::flat_set<int> requested_favicons_; // Used for loading favicons. base::CancelableTaskTracker cancelable_task_tracker_; }; #endif // CHROME_BROWSER_UI_TOOLBAR_BACK_FORWARD_MENU_MODEL_H_
{ "content_hash": "1f9c787b357fdd5247945cadbacb59d3", "timestamp": "", "source": "github", "line_count": 185, "max_line_length": 80, "avg_line_length": 45.42702702702703, "alnum_prop": 0.7386958591147073, "repo_name": "ric2b/Vivaldi-browser", "id": "bf27516896f553613967d9c9b7655631a6e79b90", "size": "9006", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "chromium/chrome/browser/ui/toolbar/back_forward_menu_model.h", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
module.exports = { "index": 30, "lineNumber": 1, "column": 31, "description": "Parameter name eval or arguments is not allowed in strict mode" };
{ "content_hash": "fb23e1a1ee61108c922ec358b1af7716", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 83, "avg_line_length": 26.833333333333332, "alnum_prop": 0.6335403726708074, "repo_name": "pze/espree", "id": "de5fb84e6f110ef150ca23a1ca2d19ce5f557819", "size": "161", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "tests/fixtures/ecma-features/arrowFunctions/error-strict-eval.result.js", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C++", "bytes": "23886" }, { "name": "HTML", "bytes": "373488" }, { "name": "JavaScript", "bytes": "335918" } ], "symlink_target": "" }
import torch import torch.nn as nn import torch.nn.functional as F from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence, PackedSequence from sklearn.metrics import confusion_matrix from utils import create_emb_layer def accuracy(pred, label): correct = [] for i in range(len(pred)): correct.append(pred[i]==label[i]) correct = torch.stack(correct) return torch.sum(correct) def create_sorted_batch(batch): """ To create packed sequence later in forward pass to utilise Mini-batch for LSTM input """ sent_lengths, perm_idx = batch['sent_len'].sort(0, descending=True) batch['asp_len'] = batch['asp_len'][perm_idx] asp_lengths = batch['asp_len'] batch['sentence'] = batch['sentence'][perm_idx][:, :sent_lengths.max()] batch['aspect'] = batch['aspect'][perm_idx][:, :asp_lengths.max()] batch['sentiment']= batch['sentiment'][perm_idx] batch['sent_len'] = sent_lengths return batch class ATAE_LSTM(nn.Module): def __init__(self, weights_matrix, hidden_dim, output_dim, dropout, words, word2idx): super().__init__() self.embedding, embedding_dim= create_emb_layer(weights_matrix) self.lstm = nn.LSTM(embedding_dim*2, hidden_dim, num_layers=2, bidirectional=True, dropout=0.5, batch_first=True) self.attn = nn.Linear(hidden_dim*2+embedding_dim, hidden_dim, bias=False) self.fc = nn.Linear(hidden_dim*2, output_dim) self.dropout = nn.Dropout(dropout) ################### Projection Parameters ################### self.v = nn.Parameter(torch.rand(hidden_dim)) self.Wp = nn.Parameter(torch.rand(hidden_dim*2, hidden_dim*2)) self.Wx = nn.Parameter(torch.rand(hidden_dim*2, hidden_dim*2)) def forward(self, vocab): with torch.no_grad(): s_shape = vocab['sentence'].shape a_shape = vocab['aspect'].shape s_embedding = self.embedding(vocab['sentence'].cuda()) ########################################################## ############### Average the aspect embedding ############# ########################################################## """ Note:- simply averaging the word embeddings of a target phrase is not sufficient to represent the semantics of the target phrase. Reference - https://aclweb.org/anthology/D16-1058 Future work - Learning aspect embedding """ ########################################################## a_embedding = self.embedding(vocab['aspect'].cuda()) a_embedding = torch.unsqueeze(torch.mean(a_embedding, 1),1).repeat(1,s_shape[1],1) # Concatenate each word in sentence with aspect vector concated_input = self.dropout(torch.cat((s_embedding,a_embedding),-1)) packed_input = pack_padded_sequence(concated_input, vocab['sent_len'], batch_first=True) out, (h, c) = self.lstm(packed_input) ########################################################################## """ Concatenate the aspect vector into the sentence hidden representations for computing attention weights """ ########################################################################## with torch.no_grad(): unpacked_out, _ = pad_packed_sequence(out, batch_first=True) concated_out = torch.cat((unpacked_out, a_embedding),-1) attn_in = concated_out ########################################################## ################# Attention ############################# ########################################################## score = F.tanh(self.attn(attn_in.cuda())).transpose(2,1) v = self.v.repeat(unpacked_out.shape[0],1).unsqueeze(1) attn_score = torch.bmm(v,score).squeeze(1) attn_weights = F.softmax(attn_score, dim=1).unsqueeze(1) r = torch.bmm(attn_weights, unpacked_out).squeeze(1) ########################################################## ################# h' = tanh(Wp.r + Wx.Hn) ################ ########################################################## final_rep = F.tanh(r.matmul(self.Wp) + unpacked_out[:,-1,:].matmul(self.Wx)) pred = self.fc(final_rep) return pred ############################################################## ######################## TRAINING ########################### ############################################################## def train_(model, batches, optimizer, criterion): model.train() total_loss = 0 total_acc = 0 count = 0 cm = torch.zeros(3,3) for batch in batches: batch = create_sorted_batch(batch) label = batch['sentiment'] optimizer.zero_grad() pred = model(batch) loss = criterion(pred, label.cuda()) acc = accuracy(torch.argmax(F.softmax(pred,dim=1),1).float(), label.float().cuda()) cm += torch.from_numpy(confusion_matrix(label, torch.argmax(pred,1), \ labels=[torch.tensor(0), torch.tensor(1), torch.tensor(2)])).float() loss.backward() optimizer.step() total_loss += loss.item() total_acc += acc.item() count += len(label) return total_loss/len(batches), total_acc/count, cm ############################################################## ######################## Validation ########################### ############################################################## def eval_(model, batches, criterion): model.eval() total_loss = 0 total_acc = 0 count = 0 cm = torch.zeros(3,3) for batch in batches: batch = create_sorted_batch(batch) label = batch['sentiment'] pred = model(batch) loss = criterion(pred, label.cuda()) acc = accuracy(torch.argmax(F.softmax(pred,dim=1),1).float(), label.float().cuda()) cm += torch.from_numpy(confusion_matrix(label, torch.argmax(pred,1), \ labels=[torch.tensor(0), torch.tensor(1), torch.tensor(2)])).float() total_loss += loss.item() total_acc += acc.item() count += len(label) return total_loss/len(batches), total_acc/count, cm ############################################################## #################### Test/ Prediction ####################### ############################################################## def test(model, batches, weights_matrix): model.embedding, _ = create_emb_layer(weights_matrix) model.embedding = model.embedding.cuda() model.eval() total_acc = 0 count = 0 cm = torch.zeros(3,3) for batch in batches: batch = create_sorted_batch(batch) label = batch['sentiment'] pred = model(batch) acc = accuracy(torch.argmax(F.softmax(pred,dim=1),1).float(), label.float().cuda()) cm += torch.from_numpy(confusion_matrix(label, torch.argmax(pred,1), \ labels=[torch.tensor(0), torch.tensor(1), torch.tensor(2)])).float() total_acc += acc.item() count += len(label) return total_acc/count, cm def predict(model, batches, weights_matrix): model.embedding, _ = create_emb_layer(weights_matrix) model.embedding = model.embedding.cuda() model.eval() # Only 1 batch and 1 item in that batch for batch in batches: pred = model(batch) return torch.argmax(F.softmax(pred,dim=1),1)
{ "content_hash": "a2eed6f9cfcf94586bea18ce35cd87c7", "timestamp": "", "source": "github", "line_count": 190, "max_line_length": 121, "avg_line_length": 41.84736842105263, "alnum_prop": 0.48132310401207395, "repo_name": "prakhar2b/Weekend-Projects", "id": "134355699b05aacc960f2c1c81e861b80dc292d6", "size": "8373", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Machine-Learning/model.py", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "6818" }, { "name": "Jupyter Notebook", "bytes": "217785" }, { "name": "Python", "bytes": "21895" } ], "symlink_target": "" }
.class public interface abstract Landroid/widget/ExpandableListViewMz$scrollActionOnGroupExpand; .super Ljava/lang/Object; .source "ExpandableListViewMz.java" # annotations .annotation system Ldalvik/annotation/EnclosingClass; value = Landroid/widget/ExpandableListViewMz; .end annotation .annotation system Ldalvik/annotation/InnerClass; accessFlags = 0x609 name = "scrollActionOnGroupExpand" .end annotation # virtual methods .method public abstract scrollAfterGroupExpand(I)V .end method
{ "content_hash": "9a1941244010d771083fbf523901d92f", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 96, "avg_line_length": 26.736842105263158, "alnum_prop": 0.8169291338582677, "repo_name": "hexiaoshuai/Flyme_device_ZTE_A1", "id": "7e049f467d5fd89add7dd7c2d45d783707cd434d", "size": "508", "binary": false, "copies": "4", "ref": "refs/heads/C880AV1.0.0B06", "path": "framework.jar.out/smali/android/widget/ExpandableListViewMz$scrollActionOnGroupExpand.smali", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "GLSL", "bytes": "1500" }, { "name": "HTML", "bytes": "10195" }, { "name": "Makefile", "bytes": "11258" }, { "name": "Python", "bytes": "924" }, { "name": "Shell", "bytes": "2734" }, { "name": "Smali", "bytes": "234274633" } ], "symlink_target": "" }
class ProfileManagerAndroid : public ProfileManagerObserver { public: ProfileManagerAndroid(); ~ProfileManagerAndroid() override; void OnProfileAdded(Profile* profile) override; void OnProfileMarkedForPermanentDeletion(Profile* profile) override; }; #endif // CHROME_BROWSER_PROFILES_PROFILE_MANAGER_ANDROID_H_
{ "content_hash": "2c7d4608966d480c97e8781e4c9326be", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 70, "avg_line_length": 32.3, "alnum_prop": 0.8018575851393189, "repo_name": "nwjs/chromium.src", "id": "3e2881c0269da0fc7f5ef3971eb4e062360c16e1", "size": "667", "binary": false, "copies": "6", "ref": "refs/heads/nw70", "path": "chrome/browser/profiles/profile_manager_android.h", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Globalization; using System.Runtime.Serialization; using System.Security; using System.Text; using System.Threading; using RuntimeTypeCache = System.RuntimeType.RuntimeTypeCache; namespace System.Reflection { [Serializable] internal sealed class RuntimeMethodInfo : MethodInfo, ISerializable, IRuntimeMethodInfo { #region Private Data Members private IntPtr m_handle; private RuntimeTypeCache m_reflectedTypeCache; private string m_name; private string m_toString; private ParameterInfo[] m_parameters; private ParameterInfo m_returnParameter; private BindingFlags m_bindingFlags; private MethodAttributes m_methodAttributes; private Signature m_signature; private RuntimeType m_declaringType; private object m_keepalive; private INVOCATION_FLAGS m_invocationFlags; internal INVOCATION_FLAGS InvocationFlags { get { if ((m_invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_INITIALIZED) == 0) { INVOCATION_FLAGS invocationFlags = INVOCATION_FLAGS.INVOCATION_FLAGS_UNKNOWN; Type declaringType = DeclaringType; // // first take care of all the NO_INVOKE cases. if (ContainsGenericParameters || ReturnType.IsByRef || (declaringType != null && declaringType.ContainsGenericParameters) || ((CallingConvention & CallingConventions.VarArgs) == CallingConventions.VarArgs) || ((Attributes & MethodAttributes.RequireSecObject) == MethodAttributes.RequireSecObject)) { // We don't need other flags if this method cannot be invoked invocationFlags = INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE; } else { // this should be an invocable method, determine the other flags that participate in invocation invocationFlags = RuntimeMethodHandle.GetSecurityFlags(this); if ((invocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY) == 0) { if ((Attributes & MethodAttributes.MemberAccessMask) != MethodAttributes.Public || (declaringType != null && declaringType.NeedsReflectionSecurityCheck)) { // If method is non-public, or declaring type is not visible invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY; } else if (IsGenericMethod) { Type[] genericArguments = GetGenericArguments(); for (int i = 0; i < genericArguments.Length; i++) { if (genericArguments[i].NeedsReflectionSecurityCheck) { invocationFlags |= INVOCATION_FLAGS.INVOCATION_FLAGS_NEED_SECURITY; break; } } } } } m_invocationFlags = invocationFlags | INVOCATION_FLAGS.INVOCATION_FLAGS_INITIALIZED; } return m_invocationFlags; } } #endregion #region Constructor internal RuntimeMethodInfo( RuntimeMethodHandleInternal handle, RuntimeType declaringType, RuntimeTypeCache reflectedTypeCache, MethodAttributes methodAttributes, BindingFlags bindingFlags, object keepalive) { Contract.Ensures(!m_handle.IsNull()); Debug.Assert(!handle.IsNullHandle()); Debug.Assert(methodAttributes == RuntimeMethodHandle.GetAttributes(handle)); m_bindingFlags = bindingFlags; m_declaringType = declaringType; m_keepalive = keepalive; m_handle = handle.Value; m_reflectedTypeCache = reflectedTypeCache; m_methodAttributes = methodAttributes; } #endregion #region Private Methods RuntimeMethodHandleInternal IRuntimeMethodInfo.Value { get { return new RuntimeMethodHandleInternal(m_handle); } } private RuntimeType ReflectedTypeInternal { get { return m_reflectedTypeCache.GetRuntimeType(); } } private ParameterInfo[] FetchNonReturnParameters() { if (m_parameters == null) m_parameters = RuntimeParameterInfo.GetParameters(this, this, Signature); return m_parameters; } private ParameterInfo FetchReturnParameter() { if (m_returnParameter == null) m_returnParameter = RuntimeParameterInfo.GetReturnParameter(this, this, Signature); return m_returnParameter; } #endregion #region Internal Members internal override string FormatNameAndSig(bool serialization) { // Serialization uses ToString to resolve MethodInfo overloads. StringBuilder sbName = new StringBuilder(Name); // serialization == true: use unambiguous (except for assembly name) type names to distinguish between overloads. // serialization == false: use basic format to maintain backward compatibility of MethodInfo.ToString(). TypeNameFormatFlags format = serialization ? TypeNameFormatFlags.FormatSerialization : TypeNameFormatFlags.FormatBasic; if (IsGenericMethod) sbName.Append(RuntimeMethodHandle.ConstructInstantiation(this, format)); sbName.Append("("); sbName.Append(ConstructParameters(GetParameterTypes(), CallingConvention, serialization)); sbName.Append(")"); return sbName.ToString(); } internal override bool CacheEquals(object o) { RuntimeMethodInfo m = o as RuntimeMethodInfo; if ((object)m == null) return false; return m.m_handle == m_handle; } internal Signature Signature { get { if (m_signature == null) m_signature = new Signature(this, m_declaringType); return m_signature; } } internal BindingFlags BindingFlags { get { return m_bindingFlags; } } internal RuntimeMethodInfo GetParentDefinition() { if (!IsVirtual || m_declaringType.IsInterface) return null; RuntimeType parent = (RuntimeType)m_declaringType.BaseType; if (parent == null) return null; int slot = RuntimeMethodHandle.GetSlot(this); if (RuntimeTypeHandle.GetNumVirtuals(parent) <= slot) return null; return (RuntimeMethodInfo)RuntimeType.GetMethodBase(parent, RuntimeTypeHandle.GetMethodAt(parent, slot)); } // Unlike DeclaringType, this will return a valid type even for global methods internal RuntimeType GetDeclaringTypeInternal() { return m_declaringType; } #endregion #region Object Overrides public override String ToString() { if (m_toString == null) m_toString = ReturnType.FormatTypeName() + " " + FormatNameAndSig(); return m_toString; } public override int GetHashCode() { // See RuntimeMethodInfo.Equals() below. if (IsGenericMethod) return ValueType.GetHashCodeOfPtr(m_handle); else return base.GetHashCode(); } public override bool Equals(object obj) { if (!IsGenericMethod) return obj == (object)this; // We cannot do simple object identity comparisons for generic methods. // Equals will be called in CerHashTable when RuntimeType+RuntimeTypeCache.GetGenericMethodInfo() // retrieve items from and insert items into s_methodInstantiations which is a CerHashtable. RuntimeMethodInfo mi = obj as RuntimeMethodInfo; if (mi == null || !mi.IsGenericMethod) return false; // now we know that both operands are generic methods IRuntimeMethodInfo handle1 = RuntimeMethodHandle.StripMethodInstantiation(this); IRuntimeMethodInfo handle2 = RuntimeMethodHandle.StripMethodInstantiation(mi); if (handle1.Value.Value != handle2.Value.Value) return false; Type[] lhs = GetGenericArguments(); Type[] rhs = mi.GetGenericArguments(); if (lhs.Length != rhs.Length) return false; for (int i = 0; i < lhs.Length; i++) { if (lhs[i] != rhs[i]) return false; } if (DeclaringType != mi.DeclaringType) return false; if (ReflectedType != mi.ReflectedType) return false; return true; } #endregion #region ICustomAttributeProvider public override Object[] GetCustomAttributes(bool inherit) { return CustomAttribute.GetCustomAttributes(this, typeof(object) as RuntimeType as RuntimeType, inherit); } public override Object[] GetCustomAttributes(Type attributeType, bool inherit) { if (attributeType == null) throw new ArgumentNullException(nameof(attributeType)); Contract.EndContractBlock(); RuntimeType attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType; if (attributeRuntimeType == null) throw new ArgumentException(SR.Arg_MustBeType, nameof(attributeType)); return CustomAttribute.GetCustomAttributes(this, attributeRuntimeType, inherit); } public override bool IsDefined(Type attributeType, bool inherit) { if (attributeType == null) throw new ArgumentNullException(nameof(attributeType)); Contract.EndContractBlock(); RuntimeType attributeRuntimeType = attributeType.UnderlyingSystemType as RuntimeType; if (attributeRuntimeType == null) throw new ArgumentException(SR.Arg_MustBeType, nameof(attributeType)); return CustomAttribute.IsDefined(this, attributeRuntimeType, inherit); } public override IList<CustomAttributeData> GetCustomAttributesData() { return CustomAttributeData.GetCustomAttributesInternal(this); } #endregion #region MemberInfo Overrides public override String Name { get { if (m_name == null) m_name = RuntimeMethodHandle.GetName(this); return m_name; } } public override Type DeclaringType { get { if (m_reflectedTypeCache.IsGlobal) return null; return m_declaringType; } } public override Type ReflectedType { get { if (m_reflectedTypeCache.IsGlobal) return null; return m_reflectedTypeCache.GetRuntimeType(); } } public override MemberTypes MemberType { get { return MemberTypes.Method; } } public override int MetadataToken { get { return RuntimeMethodHandle.GetMethodDef(this); } } public override Module Module { get { return GetRuntimeModule(); } } internal RuntimeType GetRuntimeType() { return m_declaringType; } internal RuntimeModule GetRuntimeModule() { return m_declaringType.GetRuntimeModule(); } internal RuntimeAssembly GetRuntimeAssembly() { return GetRuntimeModule().GetRuntimeAssembly(); } public override bool IsSecurityCritical { get { return true; } } public override bool IsSecuritySafeCritical { get { return false; } } public override bool IsSecurityTransparent { get { return false; } } #endregion #region MethodBase Overrides internal override ParameterInfo[] GetParametersNoCopy() { FetchNonReturnParameters(); return m_parameters; } [System.Diagnostics.Contracts.Pure] public override ParameterInfo[] GetParameters() { FetchNonReturnParameters(); if (m_parameters.Length == 0) return m_parameters; ParameterInfo[] ret = new ParameterInfo[m_parameters.Length]; Array.Copy(m_parameters, ret, m_parameters.Length); return ret; } public override MethodImplAttributes GetMethodImplementationFlags() { return RuntimeMethodHandle.GetImplAttributes(this); } public override RuntimeMethodHandle MethodHandle { get { Type declaringType = DeclaringType; if ((declaringType == null && Module.Assembly.ReflectionOnly) || declaringType is ReflectionOnlyType) throw new InvalidOperationException(SR.InvalidOperation_NotAllowedInReflectionOnly); return new RuntimeMethodHandle(this); } } public override MethodAttributes Attributes { get { return m_methodAttributes; } } public override CallingConventions CallingConvention { get { return Signature.CallingConvention; } } public override MethodBody GetMethodBody() { MethodBody mb = RuntimeMethodHandle.GetMethodBody(this, ReflectedTypeInternal); if (mb != null) mb.m_methodBase = this; return mb; } #endregion #region Invocation Logic(On MemberBase) private void CheckConsistency(Object target) { // only test instance methods if ((m_methodAttributes & MethodAttributes.Static) != MethodAttributes.Static) { if (!m_declaringType.IsInstanceOfType(target)) { if (target == null) throw new TargetException(SR.RFLCT_Targ_StatMethReqTarg); else throw new TargetException(SR.RFLCT_Targ_ITargMismatch); } } } private void ThrowNoInvokeException() { // method is ReflectionOnly Type declaringType = DeclaringType; if ((declaringType == null && Module.Assembly.ReflectionOnly) || declaringType is ReflectionOnlyType) { throw new InvalidOperationException(SR.Arg_ReflectionOnlyInvoke); } // method is on a class that contains stack pointers else if ((InvocationFlags & INVOCATION_FLAGS.INVOCATION_FLAGS_CONTAINS_STACK_POINTERS) != 0) { throw new NotSupportedException(); } // method is vararg else if ((CallingConvention & CallingConventions.VarArgs) == CallingConventions.VarArgs) { throw new NotSupportedException(); } // method is generic or on a generic class else if (DeclaringType.ContainsGenericParameters || ContainsGenericParameters) { throw new InvalidOperationException(SR.Arg_UnboundGenParam); } // method is abstract class else if (IsAbstract) { throw new MemberAccessException(); } // ByRef return are not allowed in reflection else if (ReturnType.IsByRef) { throw new NotSupportedException(SR.NotSupported_ByRefReturn); } throw new TargetException(); } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] [System.Security.DynamicSecurityMethod] // Methods containing StackCrawlMark local var has to be marked DynamicSecurityMethod public override Object Invoke(Object obj, BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture) { object[] arguments = InvokeArgumentsCheck(obj, invokeAttr, binder, parameters, culture); return UnsafeInvokeInternal(obj, parameters, arguments); } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] internal object UnsafeInvoke(Object obj, BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture) { object[] arguments = InvokeArgumentsCheck(obj, invokeAttr, binder, parameters, culture); return UnsafeInvokeInternal(obj, parameters, arguments); } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] private object UnsafeInvokeInternal(Object obj, Object[] parameters, Object[] arguments) { if (arguments == null || arguments.Length == 0) return RuntimeMethodHandle.InvokeMethod(obj, null, Signature, false); else { Object retValue = RuntimeMethodHandle.InvokeMethod(obj, arguments, Signature, false); // copy out. This should be made only if ByRef are present. for (int index = 0; index < arguments.Length; index++) parameters[index] = arguments[index]; return retValue; } } [DebuggerStepThroughAttribute] [Diagnostics.DebuggerHidden] private object[] InvokeArgumentsCheck(Object obj, BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture) { Signature sig = Signature; // get the signature int formalCount = sig.Arguments.Length; int actualCount = (parameters != null) ? parameters.Length : 0; INVOCATION_FLAGS invocationFlags = InvocationFlags; // INVOCATION_FLAGS_CONTAINS_STACK_POINTERS means that the struct (either the declaring type or the return type) // contains pointers that point to the stack. This is either a ByRef or a TypedReference. These structs cannot // be boxed and thus cannot be invoked through reflection which only deals with boxed value type objects. if ((invocationFlags & (INVOCATION_FLAGS.INVOCATION_FLAGS_NO_INVOKE | INVOCATION_FLAGS.INVOCATION_FLAGS_CONTAINS_STACK_POINTERS)) != 0) ThrowNoInvokeException(); // check basic method consistency. This call will throw if there are problems in the target/method relationship CheckConsistency(obj); if (formalCount != actualCount) throw new TargetParameterCountException(SR.Arg_ParmCnt); if (actualCount != 0) return CheckArguments(parameters, binder, invokeAttr, culture, sig); else return null; } #endregion #region MethodInfo Overrides public override Type ReturnType { get { return Signature.ReturnType; } } public override ICustomAttributeProvider ReturnTypeCustomAttributes { get { return ReturnParameter; } } public override ParameterInfo ReturnParameter { get { Contract.Ensures(m_returnParameter != null); FetchReturnParameter(); return m_returnParameter as ParameterInfo; } } public override MethodInfo GetBaseDefinition() { if (!IsVirtual || IsStatic || m_declaringType == null || m_declaringType.IsInterface) return this; int slot = RuntimeMethodHandle.GetSlot(this); RuntimeType declaringType = (RuntimeType)DeclaringType; RuntimeType baseDeclaringType = declaringType; RuntimeMethodHandleInternal baseMethodHandle = new RuntimeMethodHandleInternal(); do { int cVtblSlots = RuntimeTypeHandle.GetNumVirtuals(declaringType); if (cVtblSlots <= slot) break; baseMethodHandle = RuntimeTypeHandle.GetMethodAt(declaringType, slot); baseDeclaringType = declaringType; declaringType = (RuntimeType)declaringType.BaseType; } while (declaringType != null); return (MethodInfo)RuntimeType.GetMethodBase(baseDeclaringType, baseMethodHandle); } public override Delegate CreateDelegate(Type delegateType) { StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller; // This API existed in v1/v1.1 and only expected to create closed // instance delegates. Constrain the call to BindToMethodInfo to // open delegates only for backwards compatibility. But we'll allow // relaxed signature checking and open static delegates because // there's no ambiguity there (the caller would have to explicitly // pass us a static method or a method with a non-exact signature // and the only change in behavior from v1.1 there is that we won't // fail the call). return CreateDelegateInternal( delegateType, null, DelegateBindingFlags.OpenDelegateOnly | DelegateBindingFlags.RelaxedSignature, ref stackMark); } public override Delegate CreateDelegate(Type delegateType, Object target) { StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller; // This API is new in Whidbey and allows the full range of delegate // flexability (open or closed delegates binding to static or // instance methods with relaxed signature checking). The delegate // can also be closed over null. There's no ambiguity with all these // options since the caller is providing us a specific MethodInfo. return CreateDelegateInternal( delegateType, target, DelegateBindingFlags.RelaxedSignature, ref stackMark); } private Delegate CreateDelegateInternal(Type delegateType, Object firstArgument, DelegateBindingFlags bindingFlags, ref StackCrawlMark stackMark) { // Validate the parameters. if (delegateType == null) throw new ArgumentNullException(nameof(delegateType)); Contract.EndContractBlock(); RuntimeType rtType = delegateType as RuntimeType; if (rtType == null) throw new ArgumentException(SR.Argument_MustBeRuntimeType, nameof(delegateType)); if (!rtType.IsDelegate()) throw new ArgumentException(SR.Arg_MustBeDelegate, nameof(delegateType)); Delegate d = Delegate.CreateDelegateInternal(rtType, this, firstArgument, bindingFlags, ref stackMark); if (d == null) { throw new ArgumentException(SR.Arg_DlgtTargMeth); } return d; } #endregion #region Generics public override MethodInfo MakeGenericMethod(params Type[] methodInstantiation) { if (methodInstantiation == null) throw new ArgumentNullException(nameof(methodInstantiation)); Contract.EndContractBlock(); RuntimeType[] methodInstantionRuntimeType = new RuntimeType[methodInstantiation.Length]; if (!IsGenericMethodDefinition) throw new InvalidOperationException( SR.Format(SR.Arg_NotGenericMethodDefinition, this)); for (int i = 0; i < methodInstantiation.Length; i++) { Type methodInstantiationElem = methodInstantiation[i]; if (methodInstantiationElem == null) throw new ArgumentNullException(); RuntimeType rtMethodInstantiationElem = methodInstantiationElem as RuntimeType; if (rtMethodInstantiationElem == null) { Type[] methodInstantiationCopy = new Type[methodInstantiation.Length]; for (int iCopy = 0; iCopy < methodInstantiation.Length; iCopy++) methodInstantiationCopy[iCopy] = methodInstantiation[iCopy]; methodInstantiation = methodInstantiationCopy; return System.Reflection.Emit.MethodBuilderInstantiation.MakeGenericMethod(this, methodInstantiation); } methodInstantionRuntimeType[i] = rtMethodInstantiationElem; } RuntimeType[] genericParameters = GetGenericArgumentsInternal(); RuntimeType.SanityCheckGenericArguments(methodInstantionRuntimeType, genericParameters); MethodInfo ret = null; try { ret = RuntimeType.GetMethodBase(ReflectedTypeInternal, RuntimeMethodHandle.GetStubIfNeeded(new RuntimeMethodHandleInternal(m_handle), m_declaringType, methodInstantionRuntimeType)) as MethodInfo; } catch (VerificationException e) { RuntimeType.ValidateGenericArguments(this, methodInstantionRuntimeType, e); throw; } return ret; } internal RuntimeType[] GetGenericArgumentsInternal() { return RuntimeMethodHandle.GetMethodInstantiationInternal(this); } public override Type[] GetGenericArguments() { Type[] types = RuntimeMethodHandle.GetMethodInstantiationPublic(this); if (types == null) { types = Array.Empty<Type>(); } return types; } public override MethodInfo GetGenericMethodDefinition() { if (!IsGenericMethod) throw new InvalidOperationException(); Contract.EndContractBlock(); return RuntimeType.GetMethodBase(m_declaringType, RuntimeMethodHandle.StripMethodInstantiation(this)) as MethodInfo; } public override bool IsGenericMethod { get { return RuntimeMethodHandle.HasMethodInstantiation(this); } } public override bool IsGenericMethodDefinition { get { return RuntimeMethodHandle.IsGenericMethodDefinition(this); } } public override bool ContainsGenericParameters { get { if (DeclaringType != null && DeclaringType.ContainsGenericParameters) return true; if (!IsGenericMethod) return false; Type[] pis = GetGenericArguments(); for (int i = 0; i < pis.Length; i++) { if (pis[i].ContainsGenericParameters) return true; } return false; } } #endregion #region ISerializable Implementation public void GetObjectData(SerializationInfo info, StreamingContext context) { if (info == null) throw new ArgumentNullException(nameof(info)); Contract.EndContractBlock(); if (m_reflectedTypeCache.IsGlobal) throw new NotSupportedException(SR.NotSupported_GlobalMethodSerialization); MemberInfoSerializationHolder.GetSerializationInfo(info, this); } internal string SerializationToString() { return ReturnType.FormatTypeName(true) + " " + FormatNameAndSig(true); } #endregion #region Legacy Internal internal static MethodBase InternalGetCurrentMethod(ref StackCrawlMark stackMark) { IRuntimeMethodInfo method = RuntimeMethodHandle.GetCurrentMethod(ref stackMark); if (method == null) return null; return RuntimeType.GetMethodBase(method); } #endregion } }
{ "content_hash": "ab355a9c884f7007ef0814155795e81a", "timestamp": "", "source": "github", "line_count": 799, "max_line_length": 160, "avg_line_length": 36.81476846057572, "alnum_prop": 0.585721570627231, "repo_name": "James-Ko/coreclr", "id": "b8a2341e4e85f6bb128c60b900fdda5d3567edac", "size": "29619", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/mscorlib/src/System/Reflection/RuntimeMethodInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "940283" }, { "name": "Awk", "bytes": "5652" }, { "name": "Batchfile", "bytes": "34621" }, { "name": "C", "bytes": "6424424" }, { "name": "C#", "bytes": "118947799" }, { "name": "C++", "bytes": "66297728" }, { "name": "CMake", "bytes": "522350" }, { "name": "Groff", "bytes": "529523" }, { "name": "Groovy", "bytes": "19066" }, { "name": "Makefile", "bytes": "2314" }, { "name": "Objective-C", "bytes": "224503" }, { "name": "Perl", "bytes": "63850" }, { "name": "PowerShell", "bytes": "4332" }, { "name": "Python", "bytes": "8165" }, { "name": "Shell", "bytes": "55499" }, { "name": "Smalltalk", "bytes": "1481952" } ], "symlink_target": "" }
//======================================================================== //Copyright 2007-2010 David Yu [email protected] //------------------------------------------------------------------------ //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. //======================================================================== package com.dyuproject.protostuff; import java.io.IOException; import java.util.Map; /** * Tests for the {@link StringMapSchema} via protobuf. * * @author David Yu * @created Oct 7, 2010 */ public class StringMapSchemaProtobufTest extends StringMapSchemaTest { public <T extends Map<String, String>> void mergeFrom(byte[] data, int offset, int length, T message, Schema<T> schema) throws IOException { ProtobufIOUtil.mergeFrom(data, offset, length, message, schema); } public <T extends Map<String, String>> byte[] toByteArray(T message, Schema<T> schema) throws IOException { return ProtobufIOUtil.toByteArray(message, schema, buf()); } }
{ "content_hash": "b69fb769bd42acb0bafbab42ff49de87", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 94, "avg_line_length": 38.292682926829265, "alnum_prop": 0.6038216560509554, "repo_name": "waahoo/protostuff", "id": "f86190e5e73a7f82b8736aff87d714970bcecce7", "size": "1570", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "protostuff-collectionschema/src/test/java/com/dyuproject/protostuff/StringMapSchemaProtobufTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1212" }, { "name": "GAP", "bytes": "32392" }, { "name": "HTML", "bytes": "4060" }, { "name": "Java", "bytes": "3810948" }, { "name": "JavaScript", "bytes": "34694" }, { "name": "Protocol Buffer", "bytes": "64930" } ], "symlink_target": "" }
# socket.io [![Build Status](https://secure.travis-ci.org/socketio/socket.io.svg?branch=master)](https://travis-ci.org/socketio/socket.io) [![Dependency Status](https://david-dm.org/socketio/socket.io.svg)](https://david-dm.org/socketio/socket.io) [![devDependency Status](https://david-dm.org/socketio/socket.io/dev-status.svg)](https://david-dm.org/socketio/socket.io#info=devDependencies) [![NPM version](https://badge.fury.io/js/socket.io.svg)](https://www.npmjs.com/package/socket.io) ![Downloads](https://img.shields.io/npm/dm/socket.io.svg?style=flat) [![](http://slack.socket.io/badge.svg?)](http://slack.socket.io) ## How to use The following example attaches socket.io to a plain Node.JS HTTP server listening on port `3000`. ```js var server = require('http').createServer(); var io = require('socket.io')(server); io.on('connection', function(client){ client.on('event', function(data){}); client.on('disconnect', function(){}); }); server.listen(3000); ``` ### Standalone ```js var io = require('socket.io')(); io.on('connection', function(client){}); io.listen(3000); ``` ### In conjunction with Express Starting with **3.0**, express applications have become request handler functions that you pass to `http` or `http` `Server` instances. You need to pass the `Server` to `socket.io`, and not the express application function. ```js var app = require('express')(); var server = require('http').createServer(app); var io = require('socket.io')(server); io.on('connection', function(){ /* … */ }); server.listen(3000); ``` ### In conjunction with Koa Like Express.JS, Koa works by exposing an application as a request handler function, but only by calling the `callback` method. ```js var app = require('koa')(); var server = require('http').createServer(app.callback()); var io = require('socket.io')(server); io.on('connection', function(){ /* … */ }); server.listen(3000); ``` ## API ### Server Exposed by `require('socket.io')`. ### Server() Creates a new `Server`. Works with and without `new`: ```js var io = require('socket.io')(); // or var Server = require('socket.io'); var io = new Server(); ``` ### Server(opts:Object) Optionally, the first or second argument (see below) of the `Server` constructor can be an options object. The following options are supported: - `serveClient` sets the value for Server#serveClient() - `path` sets the value for Server#path() The same options passed to socket.io are always passed to the `engine.io` `Server` that gets created. See engine.io [options](https://github.com/socketio/engine.io#methods-1) as reference. ### Server(srv:http#Server, opts:Object) Creates a new `Server` and attaches it to the given `srv`. Optionally `opts` can be passed. ### Server(port:Number, opts:Object) Binds socket.io to a new `http.Server` that listens on `port`. ### Server#serveClient(v:Boolean):Server If `v` is `true` the attached server (see `Server#attach`) will serve the client files. Defaults to `true`. This method has no effect after `attach` is called. ```js // pass a server and the `serveClient` option var io = require('socket.io')(http, { serveClient: false }); // or pass no server and then you can call the method var io = require('socket.io')(); io.serveClient(false); io.attach(http); ``` If no arguments are supplied this method returns the current value. ### Server#path(v:String):Server Sets the path `v` under which `engine.io` and the static files will be served. Defaults to `/socket.io`. If no arguments are supplied this method returns the current value. ### Server#adapter(v:Adapter):Server Sets the adapter `v`. Defaults to an instance of the `Adapter` that ships with socket.io which is memory based. See [socket.io-adapter](https://github.com/socketio/socket.io-adapter). If no arguments are supplied this method returns the current value. ### Server#origins(v:String):Server Sets the allowed origins `v`. Defaults to any origins being allowed. If no arguments are supplied this method returns the current value. ### Server#origins(v:Function):Server Sets the allowed origins as dynamic function. Function takes two arguments `origin:String` and `callback(error, success)`, where `success` is a boolean value indicating whether origin is allowed or not. __Potential drawbacks__: * in some situations, when it is not possible to determine `origin` it may have value of `*` * As this function will be executed for every request, it is advised to make this function work as fast as possible * If `socket.io` is used together with `Express`, the CORS headers will be affected only for `socket.io` requests. For Express can use [cors](https://github.com/expressjs/cors). ### Server#sockets:Namespace The default (`/`) namespace. ### Server#attach(srv:http#Server, opts:Object):Server Attaches the `Server` to an engine.io instance on `srv` with the supplied `opts` (optionally). ### Server#attach(port:Number, opts:Object):Server Attaches the `Server` to an engine.io instance that is bound to `port` with the given `opts` (optionally). ### Server#listen Synonym of `Server#attach`. ### Server#bind(srv:engine#Server):Server Advanced use only. Binds the server to a specific engine.io `Server` (or compatible API) instance. ### Server#onconnection(socket:engine#Socket):Server Advanced use only. Creates a new `socket.io` client from the incoming engine.io (or compatible API) `socket`. ### Server#of(nsp:String):Namespace Initializes and retrieves the given `Namespace` by its pathname identifier `nsp`. If the namespace was already initialized it returns it immediately. ### Server#emit Emits an event to all connected clients. The following two are equivalent: ```js var io = require('socket.io')(); io.sockets.emit('an event sent to all connected clients'); io.emit('an event sent to all connected clients'); ``` For other available methods, see `Namespace` below. ### Server#close([fn:Function]) Closes socket.io server. The optional `fn` is passed to the `server.close([callback])` method of the core `net` module and is called on error or when all connections are closed. The callback is expected to implement the common single argument `err` signature (if any). ```js var Server = require('socket.io'); var PORT = 3030; var server = require('http').Server(); var io = Server(PORT); io.close(); // Close current server server.listen(PORT); // PORT is free to use io = Server(server); ``` ### Server#use See `Namespace#use` below. ### Namespace Represents a pool of sockets connected under a given scope identified by a pathname (eg: `/chat`). By default the client always connects to `/`. #### Events - `connection` / `connect`. Fired upon a connection. Parameters: - `Socket` the incoming socket. ### Namespace#name:String The namespace identifier property. ### Namespace#connected:Object<Socket> Hash of `Socket` objects that are connected to this namespace indexed by `id`. ### Namespace#clients(fn:Function) Gets a list of client IDs connected to this namespace (across all nodes if applicable). An example to get all clients in a namespace: ```js var io = require('socket.io')(); io.of('/chat').clients(function(error, clients){ if (error) throw error; console.log(clients); // => [PZDoMHjiu8PYfRiKAAAF, Anw2LatarvGVVXEIAAAD] }); ``` An example to get all clients in namespace's room: ```js var io = require('socket.io')(); io.of('/chat').in('general').clients(function(error, clients){ if (error) throw error; console.log(clients); // => [Anw2LatarvGVVXEIAAAD] }); ``` As with broadcasting, the default is all clients from the default namespace ('/'): ```js var io = require('socket.io')(); io.clients(function(error, clients){ if (error) throw error; console.log(clients); // => [6em3d4TJP8Et9EMNAAAA, G5p55dHhGgUnLUctAAAB] }); ``` ### Namespace#use(fn:Function):Namespace Registers a middleware, which is a function that gets executed for every incoming `Socket`, and receives as parameters the socket and a function to optionally defer execution to the next registered middleware. ```js var io = require('socket.io')(); io.use(function(socket, next){ if (socket.request.headers.cookie) return next(); next(new Error('Authentication error')); }); ``` Errors passed to middleware callbacks are sent as special `error` packets to clients. ### Socket A `Socket` is the fundamental class for interacting with browser clients. A `Socket` belongs to a certain `Namespace` (by default `/`) and uses an underlying `Client` to communicate. It should be noted the `Socket` doesn't relate directly to the actual underlying TCP/IP `socket` and it is only the name of the class. ### Socket#use(fn:Function):Socket Registers a middleware, which is a function that gets executed for every incoming `Packet` and receives as parameter the packet and a function to optionally defer execution to the next registered middleware. ```js var io = require('socket.io')(); io.on('connection', function(socket){ socket.use(function(packet, next){ if (packet.doge === true) return next(); next(new Error('Not a doge error')); }); ``` Errors passed to middleware callbacks are sent as special `error` packets to clients. ### Socket#rooms:Object A hash of strings identifying the rooms this client is in, indexed by room name. ### Socket#client:Client A reference to the underlying `Client` object. ### Socket#conn:Socket A reference to the underlying `Client` transport connection (engine.io `Socket` object). This allows access to the IO transport layer, which still (mostly) abstracts the actual TCP/IP socket. ### Socket#request:Request A getter proxy that returns the reference to the `request` that originated the underlying engine.io `Client`. Useful for accessing request headers such as `Cookie` or `User-Agent`. ### Socket#id:String A unique identifier for the session, that comes from the underlying `Client`. ### Socket#emit(name:String[, …]):Socket Emits an event identified by the string `name` to the client. Any other parameters can be included. All datastructures are supported, including `Buffer`. JavaScript functions can't be serialized/deserialized. ```js var io = require('socket.io')(); io.on('connection', function(client){ client.emit('an event', { some: 'data' }); }); ``` ### Socket#join(name:String[, fn:Function]):Socket Adds the client to the `room`, and fires optionally a callback `fn` with `err` signature (if any). The client is automatically a member of a room identified with its session id (see `Socket#id`). The mechanics of joining rooms are handled by the `Adapter` that has been configured (see `Server#adapter` above), defaulting to [socket.io-adapter](https://github.com/socketio/socket.io-adapter). ### Socket#leave(name:String[, fn:Function]):Socket Removes the client from `room`, and fires optionally a callback `fn` with `err` signature (if any). **Rooms are left automatically upon disconnection**. The mechanics of leaving rooms are handled by the `Adapter` that has been configured (see `Server#adapter` above), defaulting to [socket.io-adapter](https://github.com/socketio/socket.io-adapter). ### Socket#to(room:String):Socket Sets a modifier for a subsequent event emission that the event will only be _broadcasted_ to clients that have joined the given `room`. To emit to multiple rooms, you can call `to` several times. ```js var io = require('socket.io')(); io.on('connection', function(client){ client.to('others').emit('an event', { some: 'data' }); }); ``` ### Socket#in(room:String):Socket Same as `Socket#to` ### Socket#compress(v:Boolean):Socket Sets a modifier for a subsequent event emission that the event data will only be _compressed_ if the value is `true`. Defaults to `true` when you don't call the method. ```js var io = require('socket.io')(); io.on('connection', function(client){ client.compress(false).emit('an event', { some: 'data' }); }); ``` ### Socket#disconnect(close:Boolean):Socket Disconnects this client. If value of close is `true`, closes the underlying connection. Otherwise, it just disconnects the namespace. #### Events - `disconnect` - Fired upon disconnection. - **Arguments** - `String`: the reason of the disconnection (either client or server-side) - `error` - Fired when an error occurs. - **Arguments** - `Object`: error data - `disconnecting` - Fired when the client is going to be disconnected (but hasn't left its `rooms` yet). - **Arguments** - `String`: the reason of the disconnection (either client or server-side) These are reserved events (along with `connect`, `newListener` and `removeListener`) which cannot be used as event names. ### Client The `Client` class represents an incoming transport (engine.io) connection. A `Client` can be associated with many multiplexed `Socket`s that belong to different `Namespace`s. ### Client#conn A reference to the underlying `engine.io` `Socket` connection. ### Client#request A getter proxy that returns the reference to the `request` that originated the engine.io connection. Useful for accessing request headers such as `Cookie` or `User-Agent`. ## Debug / logging Socket.IO is powered by [debug](https://github.com/visionmedia/debug). In order to see all the debug output, run your app with the environment variable `DEBUG` including the desired scope. To see the output from all of Socket.IO's debugging scopes you can use: ``` DEBUG=socket.io* node myapp ``` ## Testing ``` npm test ``` This runs the `gulp` task `test`. By default the test will be run with the source code in `lib` directory. Set the environmental variable `TEST_VERSION` to `compat` to test the transpiled es5-compat version of the code. The `gulp` task `test` will always transpile the source code into es5 and export to `dist` first before running the test. ## License [MIT](LICENSE)
{ "content_hash": "b4a0fa76add806e0ebbc65c5750787f6", "timestamp": "", "source": "github", "line_count": 489, "max_line_length": 204, "avg_line_length": 30.33128834355828, "alnum_prop": 0.6806229773462783, "repo_name": "byxchen/MC2", "id": "2059adf90341393065bfa4d777a964f7105bc458", "size": "14839", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "node_modules/socket.io/Readme.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "472333" }, { "name": "CoffeeScript", "bytes": "3111" }, { "name": "HTML", "bytes": "1862401" }, { "name": "JavaScript", "bytes": "3925316" }, { "name": "PHP", "bytes": "3859" } ], "symlink_target": "" }
#ifdef __UNIX__ #include <assert.h> #endif #include "stderr.h" #include "blobbox.h" #include "ccstruct.h" #include "detlinefit.h" #include "statistc.h" #include "drawtord.h" #include "blkocc.h" #include "sortflts.h" #include "oldbasel.h" #include "textord.h" #include "tordmain.h" #include "underlin.h" #include "makerow.h" #include "tprintf.h" #include "tovars.h" // Include automatically generated configuration file if running autoconf. #ifdef HAVE_CONFIG_H #include "config_auto.h" #endif BOOL_VAR(textord_heavy_nr, FALSE, "Vigorously remove noise"); BOOL_VAR(textord_show_initial_rows, FALSE, "Display row accumulation"); BOOL_VAR(textord_show_parallel_rows, FALSE, "Display page correlated rows"); BOOL_VAR(textord_show_expanded_rows, FALSE, "Display rows after expanding"); BOOL_VAR(textord_show_final_rows, FALSE, "Display rows after final fitting"); BOOL_VAR(textord_show_final_blobs, FALSE, "Display blob bounds after pre-ass"); BOOL_VAR(textord_test_landscape, FALSE, "Tests refer to land/port"); BOOL_VAR(textord_parallel_baselines, TRUE, "Force parallel baselines"); BOOL_VAR(textord_straight_baselines, FALSE, "Force straight baselines"); BOOL_VAR(textord_old_baselines, TRUE, "Use old baseline algorithm"); BOOL_VAR(textord_old_xheight, FALSE, "Use old xheight algorithm"); BOOL_VAR(textord_fix_xheight_bug, TRUE, "Use spline baseline"); BOOL_VAR(textord_fix_makerow_bug, TRUE, "Prevent multiple baselines"); BOOL_VAR(textord_debug_xheights, FALSE, "Test xheight algorithms"); BOOL_VAR(textord_biased_skewcalc, TRUE, "Bias skew estimates with line length"); BOOL_VAR(textord_interpolating_skew, TRUE, "Interpolate across gaps"); INT_VAR(textord_skewsmooth_offset, 4, "For smooth factor"); INT_VAR(textord_skewsmooth_offset2, 1, "For smooth factor"); INT_VAR(textord_test_x, -MAX_INT32, "coord of test pt"); INT_VAR(textord_test_y, -MAX_INT32, "coord of test pt"); INT_VAR(textord_min_blobs_in_row, 4, "Min blobs before gradient counted"); INT_VAR(textord_spline_minblobs, 8, "Min blobs in each spline segment"); INT_VAR(textord_spline_medianwin, 6, "Size of window for spline segmentation"); INT_VAR(textord_max_blob_overlaps, 4, "Max number of blobs a big blob can overlap"); INT_VAR(textord_min_xheight, 10, "Min credible pixel xheight"); double_VAR(textord_spline_shift_fraction, 0.02, "Fraction of line spacing for quad"); double_VAR(textord_spline_outlier_fraction, 0.1, "Fraction of line spacing for outlier"); double_VAR(textord_skew_ile, 0.5, "Ile of gradients for page skew"); double_VAR(textord_skew_lag, 0.02, "Lag for skew on row accumulation"); double_VAR(textord_linespace_iqrlimit, 0.2, "Max iqr/median for linespace"); double_VAR(textord_width_limit, 8, "Max width of blobs to make rows"); double_VAR(textord_chop_width, 1.5, "Max width before chopping"); double_VAR(textord_expansion_factor, 1.0, "Factor to expand rows by in expand_rows"); double_VAR(textord_overlap_x, 0.375, "Fraction of linespace for good overlap"); double_VAR(textord_minxh, 0.25, "fraction of linesize for min xheight"); double_VAR(textord_min_linesize, 1.25, "* blob height for initial linesize"); double_VAR(textord_excess_blobsize, 1.3, "New row made if blob makes row this big"); double_VAR(textord_occupancy_threshold, 0.4, "Fraction of neighbourhood"); double_VAR(textord_underline_width, 2.0, "Multiple of line_size for underline"); double_VAR(textord_min_blob_height_fraction, 0.75, "Min blob height/top to include blob top into xheight stats"); double_VAR(textord_xheight_mode_fraction, 0.4, "Min pile height to make xheight"); double_VAR(textord_ascheight_mode_fraction, 0.08, "Min pile height to make ascheight"); double_VAR(textord_descheight_mode_fraction, 0.08, "Min pile height to make descheight"); double_VAR(textord_ascx_ratio_min, 1.25, "Min cap/xheight"); double_VAR(textord_ascx_ratio_max, 1.8, "Max cap/xheight"); double_VAR(textord_descx_ratio_min, 0.25, "Min desc/xheight"); double_VAR(textord_descx_ratio_max, 0.6, "Max desc/xheight"); double_VAR(textord_xheight_error_margin, 0.1, "Accepted variation"); INT_VAR(textord_lms_line_trials, 12, "Number of linew fits to do"); BOOL_VAR(textord_new_initial_xheight, TRUE, "Use test xheight mechanism"); BOOL_VAR(textord_debug_blob, FALSE, "Print test blob information"); #define MAX_HEIGHT_MODES 12 const int kMinLeaderCount = 5; // Factored-out helper to build a single row from a list of blobs. // Returns the mean blob size. static float MakeRowFromBlobs(float line_size, BLOBNBOX_IT* blob_it, TO_ROW_IT* row_it) { blob_it->sort(blob_x_order); blob_it->move_to_first(); TO_ROW* row = NULL; float total_size = 0.0f; int blob_count = 0; // Add all the blobs to a single TO_ROW. for (; !blob_it->empty(); blob_it->forward()) { BLOBNBOX* blob = blob_it->extract(); int top = blob->bounding_box().top(); int bottom = blob->bounding_box().bottom(); if (row == NULL) { row = new TO_ROW(blob, top, bottom, line_size); row_it->add_before_then_move(row); } else { row->add_blob(blob, top, bottom, line_size); } total_size += top - bottom; ++blob_count; } return blob_count > 0 ? total_size / blob_count : total_size; } // Helper to make a row using the children of a single blob. // Returns the mean size of the blobs created. float MakeRowFromSubBlobs(TO_BLOCK* block, C_BLOB* blob, TO_ROW_IT* row_it) { // The blobs made from the children will go in the small_blobs list. BLOBNBOX_IT bb_it(&block->small_blobs); C_OUTLINE_IT ol_it(blob->out_list()); // Get the children. ol_it.set_to_list(ol_it.data()->child()); if (ol_it.empty()) return 0.0f; for (ol_it.mark_cycle_pt(); !ol_it.cycled_list(); ol_it.forward()) { // Deep copy the child outline and use that to make a blob. C_BLOB* blob = new C_BLOB(C_OUTLINE::deep_copy(ol_it.data())); // Correct direction as needed. blob->CheckInverseFlagAndDirection(); BLOBNBOX* bbox = new BLOBNBOX(blob); bb_it.add_after_then_move(bbox); } // Now we can make a row from the blobs. return MakeRowFromBlobs(block->line_size, &bb_it, row_it); } /** * @name make_single_row * * Arrange the blobs into a single row... well actually, if there is * only a single blob, it makes 2 rows, in case the top-level blob * is a container of the real blobs to recognize. */ float make_single_row(ICOORD page_tr, TO_BLOCK* block, TO_BLOCK_LIST* blocks) { BLOBNBOX_IT blob_it = &block->blobs; TO_ROW_IT row_it = block->get_rows(); // Include all the small blobs and large blobs. blob_it.add_list_after(&block->small_blobs); blob_it.add_list_after(&block->noise_blobs); blob_it.add_list_after(&block->large_blobs); if (block->blobs.singleton()) { blob_it.move_to_first(); float size = MakeRowFromSubBlobs(block, blob_it.data()->cblob(), &row_it); if (size > block->line_size) block->line_size = size; } MakeRowFromBlobs(block->line_size, &blob_it, &row_it); // Fit an LMS line to the rows. for (row_it.mark_cycle_pt(); !row_it.cycled_list(); row_it.forward()) fit_lms_line(row_it.data()); float gradient; float fit_error; // Compute the skew based on the fitted line. compute_page_skew(blocks, gradient, fit_error); return gradient; } /** * @name make_rows * * Arrange the blobs into rows. */ float make_rows(ICOORD page_tr, TO_BLOCK_LIST *port_blocks) { float port_m; // global skew float port_err; // global noise TO_BLOCK_IT block_it; // iterator block_it.set_to_list(port_blocks); for (block_it.mark_cycle_pt(); !block_it.cycled_list(); block_it.forward()) make_initial_textrows(page_tr, block_it.data(), FCOORD(1.0f, 0.0f), !(BOOL8) textord_test_landscape); // compute globally compute_page_skew(port_blocks, port_m, port_err); block_it.set_to_list(port_blocks); for (block_it.mark_cycle_pt(); !block_it.cycled_list(); block_it.forward()) { cleanup_rows_making(page_tr, block_it.data(), port_m, FCOORD(1.0f, 0.0f), block_it.data()->block->bounding_box().left(), !(BOOL8)textord_test_landscape); } return port_m; // global skew } /** * @name make_initial_textrows * * Arrange the good blobs into rows of text. */ void make_initial_textrows( //find lines ICOORD page_tr, TO_BLOCK *block, //block to do FCOORD rotation, //for drawing BOOL8 testing_on //correct orientation ) { TO_ROW_IT row_it = block->get_rows (); #ifndef GRAPHICS_DISABLED ScrollView::Color colour; //of row if (textord_show_initial_rows && testing_on) { if (to_win == NULL) create_to_win(page_tr); } #endif //guess skew assign_blobs_to_rows (block, NULL, 0, TRUE, TRUE, textord_show_initial_rows && testing_on); row_it.move_to_first (); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) fit_lms_line (row_it.data ()); #ifndef GRAPHICS_DISABLED if (textord_show_initial_rows && testing_on) { colour = ScrollView::RED; for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { plot_to_row (row_it.data (), colour, rotation); colour = (ScrollView::Color) (colour + 1); if (colour > ScrollView::MAGENTA) colour = ScrollView::RED; } } #endif } /** * @name fit_lms_line * * Fit an LMS line to a row. */ void fit_lms_line(TO_ROW *row) { float m, c; // fitted line tesseract::DetLineFit lms; BLOBNBOX_IT blob_it = row->blob_list(); for (blob_it.mark_cycle_pt(); !blob_it.cycled_list(); blob_it.forward()) { const TBOX& box = blob_it.data()->bounding_box(); lms.Add(ICOORD((box.left() + box.right()) / 2, box.bottom())); } double error = lms.Fit(&m, &c); row->set_line(m, c, error); } /** * @name compute_page_skew * * Compute the skew over a full page by averaging the gradients over * all the lines. Get the error of the same row. */ void compute_page_skew( //get average gradient TO_BLOCK_LIST *blocks, //list of blocks float &page_m, //average gradient float &page_err //average error ) { inT32 row_count; //total rows inT32 blob_count; //total_blobs inT32 row_err; //integer error float *gradients; //of rows float *errors; //of rows inT32 row_index; //of total TO_ROW *row; //current row TO_BLOCK_IT block_it = blocks; //iterator TO_ROW_IT row_it; row_count = 0; blob_count = 0; for (block_it.mark_cycle_pt (); !block_it.cycled_list (); block_it.forward ()) { POLY_BLOCK* pb = block_it.data()->block->poly_block(); if (pb != NULL && !pb->IsText()) continue; // Pretend non-text blocks don't exist. row_count += block_it.data ()->get_rows ()->length (); //count up rows row_it.set_to_list (block_it.data ()->get_rows ()); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) blob_count += row_it.data ()->blob_list ()->length (); } if (row_count == 0) { page_m = 0.0f; page_err = 0.0f; return; } gradients = (float *) alloc_mem (blob_count * sizeof (float)); //get mem errors = (float *) alloc_mem (blob_count * sizeof (float)); if (gradients == NULL || errors == NULL) MEMORY_OUT.error ("compute_page_skew", ABORT, NULL); row_index = 0; for (block_it.mark_cycle_pt (); !block_it.cycled_list (); block_it.forward ()) { POLY_BLOCK* pb = block_it.data()->block->poly_block(); if (pb != NULL && !pb->IsText()) continue; // Pretend non-text blocks don't exist. row_it.set_to_list (block_it.data ()->get_rows ()); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row = row_it.data (); blob_count = row->blob_list ()->length (); row_err = (inT32) ceil (row->line_error ()); if (row_err <= 0) row_err = 1; if (textord_biased_skewcalc) { blob_count /= row_err; for (blob_count /= row_err; blob_count > 0; blob_count--) { gradients[row_index] = row->line_m (); errors[row_index] = row->line_error (); row_index++; } } else if (blob_count >= textord_min_blobs_in_row) { //get gradient gradients[row_index] = row->line_m (); errors[row_index] = row->line_error (); row_index++; } } } if (row_index == 0) { //desperate for (block_it.mark_cycle_pt (); !block_it.cycled_list (); block_it.forward ()) { POLY_BLOCK* pb = block_it.data()->block->poly_block(); if (pb != NULL && !pb->IsText()) continue; // Pretend non-text blocks don't exist. row_it.set_to_list (block_it.data ()->get_rows ()); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row = row_it.data (); gradients[row_index] = row->line_m (); errors[row_index] = row->line_error (); row_index++; } } } row_count = row_index; row_index = choose_nth_item ((inT32) (row_count * textord_skew_ile), gradients, row_count); page_m = gradients[row_index]; row_index = choose_nth_item ((inT32) (row_count * textord_skew_ile), errors, row_count); page_err = errors[row_index]; free_mem(gradients); free_mem(errors); } const double kNoiseSize = 0.5; // Fraction of xheight. const int kMinSize = 8; // Min pixels to be xheight. /** * Return true if the dot looks like it is part of the i. * Doesn't work for any other diacritical. */ static bool dot_of_i(BLOBNBOX* dot, BLOBNBOX* i, TO_ROW* row) { const TBOX& ibox = i->bounding_box(); const TBOX& dotbox = dot->bounding_box(); // Must overlap horizontally by enough and be high enough. int overlap = MIN(dotbox.right(), ibox.right()) - MAX(dotbox.left(), ibox.left()); if (ibox.height() <= 2 * dotbox.height() || (overlap * 2 < ibox.width() && overlap < dotbox.width())) return false; // If the i is tall and thin then it is good. if (ibox.height() > ibox.width() * 2) return true; // The i or ! must be tall and thin. // It might still be tall and thin, but it might be joined to something. // So search the outline for a piece of large height close to the edges // of the dot. const double kHeightFraction = 0.6; double target_height = MIN(dotbox.bottom(), ibox.top()); target_height -= row->line_m()*dotbox.left() + row->line_c(); target_height *= kHeightFraction; int left_min = dotbox.left() - dotbox.width(); int middle = (dotbox.left() + dotbox.right())/2; int right_max = dotbox.right() + dotbox.width(); int left_miny = 0; int left_maxy = 0; int right_miny = 0; int right_maxy = 0; bool found_left = false; bool found_right = false; bool in_left = false; bool in_right = false; C_BLOB* blob = i->cblob(); C_OUTLINE_IT o_it = blob->out_list(); for (o_it.mark_cycle_pt(); !o_it.cycled_list(); o_it.forward()) { C_OUTLINE* outline = o_it.data(); int length = outline->pathlength(); ICOORD pos = outline->start_pos(); for (int step = 0; step < length; pos += outline->step(step++)) { int x = pos.x(); int y = pos.y(); if (x >= left_min && x < middle && !found_left) { // We are in the left part so find min and max y. if (in_left) { if (y > left_maxy) left_maxy = y; if (y < left_miny) left_miny = y; } else { left_maxy = left_miny = y; in_left = true; } } else if (in_left) { // We just left the left so look for size. if (left_maxy - left_miny > target_height) { if (found_right) return true; found_left = true; } in_left = false; } if (x <= right_max && x > middle && !found_right) { // We are in the right part so find min and max y. if (in_right) { if (y > right_maxy) right_maxy = y; if (y < right_miny) right_miny = y; } else { right_maxy = right_miny = y; in_right = true; } } else if (in_right) { // We just left the right so look for size. if (right_maxy - right_miny > target_height) { if (found_left) return true; found_right = true; } in_right = false; } } } return false; } void vigorous_noise_removal(TO_BLOCK* block) { TO_ROW_IT row_it = block->get_rows (); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { TO_ROW* row = row_it.data(); BLOBNBOX_IT b_it = row->blob_list(); // Estimate the xheight on the row. int max_height = 0; for (b_it.mark_cycle_pt(); !b_it.cycled_list(); b_it.forward()) { BLOBNBOX* blob = b_it.data(); if (blob->bounding_box().height() > max_height) max_height = blob->bounding_box().height(); } STATS hstats(0, max_height + 1); for (b_it.mark_cycle_pt(); !b_it.cycled_list(); b_it.forward()) { BLOBNBOX* blob = b_it.data(); int height = blob->bounding_box().height(); if (height >= kMinSize) hstats.add(blob->bounding_box().height(), 1); } float xheight = hstats.median(); // Delete small objects. BLOBNBOX* prev = NULL; for (b_it.mark_cycle_pt(); !b_it.cycled_list(); b_it.forward()) { BLOBNBOX* blob = b_it.data(); const TBOX& box = blob->bounding_box(); if (box.height() < kNoiseSize * xheight) { // Small so delete unless it looks like an i dot. if (prev != NULL) { if (dot_of_i(blob, prev, row)) continue; // Looks OK. } if (!b_it.at_last()) { BLOBNBOX* next = b_it.data_relative(1); if (dot_of_i(blob, next, row)) continue; // Looks OK. } // It might be noise so get rid of it. if (blob->cblob() != NULL) delete blob->cblob(); delete b_it.extract(); } else { prev = blob; } } } } /** * cleanup_rows_making * * Remove overlapping rows and fit all the blobs to what's left. */ void cleanup_rows_making( //find lines ICOORD page_tr, //top right TO_BLOCK *block, //block to do float gradient, //gradient to fit FCOORD rotation, //for drawing inT32 block_edge, //edge of block BOOL8 testing_on //correct orientation ) { //iterators BLOBNBOX_IT blob_it = &block->blobs; TO_ROW_IT row_it = block->get_rows (); #ifndef GRAPHICS_DISABLED if (textord_show_parallel_rows && testing_on) { if (to_win == NULL) create_to_win(page_tr); } #endif //get row coords fit_parallel_rows(block, gradient, rotation, block_edge, textord_show_parallel_rows &&testing_on); delete_non_dropout_rows(block, gradient, rotation, block_edge, textord_show_parallel_rows &&testing_on); expand_rows(page_tr, block, gradient, rotation, block_edge, testing_on); blob_it.set_to_list (&block->blobs); row_it.set_to_list (block->get_rows ()); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) blob_it.add_list_after (row_it.data ()->blob_list ()); //give blobs back assign_blobs_to_rows (block, &gradient, 1, FALSE, FALSE, FALSE); //now new rows must be genuine blob_it.set_to_list (&block->blobs); blob_it.add_list_after (&block->large_blobs); assign_blobs_to_rows (block, &gradient, 2, TRUE, TRUE, FALSE); //safe to use big ones now blob_it.set_to_list (&block->blobs); //throw all blobs in blob_it.add_list_after (&block->noise_blobs); blob_it.add_list_after (&block->small_blobs); assign_blobs_to_rows (block, &gradient, 3, FALSE, FALSE, FALSE); } /** * delete_non_dropout_rows * * Compute the linespacing and offset. */ void delete_non_dropout_rows( //find lines TO_BLOCK *block, //block to do float gradient, //global skew FCOORD rotation, //deskew vector inT32 block_edge, //left edge BOOL8 testing_on //correct orientation ) { TBOX block_box; //deskewed block inT32 *deltas; //change in occupation inT32 *occupation; //of pixel coords inT32 max_y; //in block inT32 min_y; inT32 line_index; //of scan line inT32 line_count; //no of scan lines inT32 distance; //to drop-out inT32 xleft; //of block inT32 ybottom; //of block TO_ROW *row; //current row TO_ROW_IT row_it = block->get_rows (); BLOBNBOX_IT blob_it = &block->blobs; if (row_it.length () == 0) return; //empty block block_box = deskew_block_coords (block, gradient); xleft = block->block->bounding_box ().left (); ybottom = block->block->bounding_box ().bottom (); min_y = block_box.bottom () - 1; max_y = block_box.top () + 1; for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { line_index = (inT32) floor (row_it.data ()->intercept ()); if (line_index <= min_y) min_y = line_index - 1; if (line_index >= max_y) max_y = line_index + 1; } line_count = max_y - min_y + 1; if (line_count <= 0) return; //empty block deltas = (inT32 *) alloc_mem (line_count * sizeof (inT32)); occupation = (inT32 *) alloc_mem (line_count * sizeof (inT32)); if (deltas == NULL || occupation == NULL) MEMORY_OUT.error ("compute_line_spacing", ABORT, NULL); compute_line_occupation(block, gradient, min_y, max_y, occupation, deltas); compute_occupation_threshold ((inT32) ceil (block->line_spacing * (tesseract::CCStruct::kDescenderFraction + tesseract::CCStruct::kAscenderFraction)), (inT32) ceil (block->line_spacing * (tesseract::CCStruct::kXHeightFraction + tesseract::CCStruct::kAscenderFraction)), max_y - min_y + 1, occupation, deltas); #ifndef GRAPHICS_DISABLED if (testing_on) { draw_occupation(xleft, ybottom, min_y, max_y, occupation, deltas); } #endif compute_dropout_distances(occupation, deltas, line_count); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row = row_it.data (); line_index = (inT32) floor (row->intercept ()); distance = deltas[line_index - min_y]; if (find_best_dropout_row (row, distance, block->line_spacing / 2, line_index, &row_it, testing_on)) { #ifndef GRAPHICS_DISABLED if (testing_on) plot_parallel_row(row, gradient, block_edge, ScrollView::WHITE, rotation); #endif blob_it.add_list_after (row_it.data ()->blob_list ()); delete row_it.extract (); //too far away } } for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { blob_it.add_list_after (row_it.data ()->blob_list ()); } free_mem(deltas); free_mem(occupation); } /** * @name find_best_dropout_row * * Delete this row if it has a neighbour with better dropout characteristics. * TRUE is returned if the row should be deleted. */ BOOL8 find_best_dropout_row( //find neighbours TO_ROW *row, //row to test inT32 distance, //dropout dist float dist_limit, //threshold distance inT32 line_index, //index of row TO_ROW_IT *row_it, //current position BOOL8 testing_on //correct orientation ) { inT32 next_index; //of neigbouring row inT32 row_offset; //from current row inT32 abs_dist; //absolute distance inT8 row_inc; //increment to row_index TO_ROW *next_row; //nextious row if (testing_on) tprintf ("Row at %g(%g), dropout dist=%d,", row->intercept (), row->parallel_c (), distance); if (distance < 0) { row_inc = 1; abs_dist = -distance; } else { row_inc = -1; abs_dist = distance; } if (abs_dist > dist_limit) { if (testing_on) { tprintf (" too far - deleting\n"); } return TRUE; } if ((distance < 0 && !row_it->at_last ()) || (distance >= 0 && !row_it->at_first ())) { row_offset = row_inc; do { next_row = row_it->data_relative (row_offset); next_index = (inT32) floor (next_row->intercept ()); if ((distance < 0 && next_index < line_index && next_index > line_index + distance + distance) || (distance >= 0 && next_index > line_index && next_index < line_index + distance + distance)) { if (testing_on) { tprintf (" nearer neighbour (%d) at %g\n", line_index + distance - next_index, next_row->intercept ()); } return TRUE; //other is nearer } else if (next_index == line_index || next_index == line_index + distance + distance) { if (row->believability () <= next_row->believability ()) { if (testing_on) { tprintf (" equal but more believable at %g (%g/%g)\n", next_row->intercept (), row->believability (), next_row->believability ()); } return TRUE; //other is more believable } } row_offset += row_inc; } while ((next_index == line_index || next_index == line_index + distance + distance) && row_offset < row_it->length ()); if (testing_on) tprintf (" keeping\n"); } return FALSE; } /** * @name deskew_block_coords * * Compute the bounding box of all the blobs in the block * if they were deskewed without actually doing it. */ TBOX deskew_block_coords( //block box TO_BLOCK *block, //block to do float gradient //global skew ) { TBOX result; //block bounds TBOX blob_box; //of block FCOORD rotation; //deskew vector float length; //of gradient vector TO_ROW_IT row_it = block->get_rows (); TO_ROW *row; //current row BLOBNBOX *blob; //current blob BLOBNBOX_IT blob_it; //iterator length = sqrt (gradient * gradient + 1); rotation = FCOORD (1 / length, -gradient / length); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row = row_it.data (); blob_it.set_to_list (row->blob_list ()); for (blob_it.mark_cycle_pt (); !blob_it.cycled_list (); blob_it.forward ()) { blob = blob_it.data (); blob_box = blob->bounding_box (); blob_box.rotate (rotation);//de-skew it result += blob_box; } } return result; } /** * @name compute_line_occupation * * Compute the pixel projection back on the y axis given the global * skew. Also compute the 1st derivative. */ void compute_line_occupation( //project blobs TO_BLOCK *block, //block to do float gradient, //global skew inT32 min_y, //min coord in block inT32 max_y, //in block inT32 *occupation, //output projection inT32 *deltas //derivative ) { inT32 line_count; //maxy-miny+1 inT32 line_index; //of scan line int index; //array index for daft compilers float top, bottom; //coords of blob inT32 width; //of blob TO_ROW *row; //current row TO_ROW_IT row_it = block->get_rows (); BLOBNBOX *blob; //current blob BLOBNBOX_IT blob_it; //iterator float length; //of skew vector TBOX blob_box; //bounding box FCOORD rotation; //inverse of skew line_count = max_y - min_y + 1; length = sqrt (gradient * gradient + 1); rotation = FCOORD (1 / length, -gradient / length); for (line_index = 0; line_index < line_count; line_index++) deltas[line_index] = 0; for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row = row_it.data (); blob_it.set_to_list (row->blob_list ()); for (blob_it.mark_cycle_pt (); !blob_it.cycled_list (); blob_it.forward ()) { blob = blob_it.data (); blob_box = blob->bounding_box (); blob_box.rotate (rotation);//de-skew it top = blob_box.top (); bottom = blob_box.bottom (); width = (inT32) floor ((FLOAT32) (blob_box.right () - blob_box.left ())); if ((inT32) floor (bottom) < min_y || (inT32) floor (bottom) - min_y >= line_count) fprintf (stderr, "Bad y coord of bottom, " INT32FORMAT "(" INT32FORMAT "," INT32FORMAT ")\n", (inT32) floor (bottom), min_y, max_y); //count transitions index = (inT32) floor (bottom) - min_y; deltas[index] += width; if ((inT32) floor (top) < min_y || (inT32) floor (top) - min_y >= line_count) fprintf (stderr, "Bad y coord of top, " INT32FORMAT "(" INT32FORMAT "," INT32FORMAT ")\n", (inT32) floor (top), min_y, max_y); index = (inT32) floor (top) - min_y; deltas[index] -= width; } } occupation[0] = deltas[0]; for (line_index = 1; line_index < line_count; line_index++) occupation[line_index] = occupation[line_index - 1] + deltas[line_index]; } /** * compute_occupation_threshold * * Compute thresholds for textline or not for the occupation array. */ void compute_occupation_threshold( //project blobs inT32 low_window, //below result point inT32 high_window, //above result point inT32 line_count, //array sizes inT32 *occupation, //input projection inT32 *thresholds //output thresholds ) { inT32 line_index; //of thresholds line inT32 low_index; //in occupation inT32 high_index; //in occupation inT32 sum; //current average inT32 divisor; //to get thresholds inT32 min_index; //of min occ inT32 min_occ; //min in locality inT32 test_index; //for finding min divisor = (inT32) ceil ((low_window + high_window) / textord_occupancy_threshold); if (low_window + high_window < line_count) { for (sum = 0, high_index = 0; high_index < low_window; high_index++) sum += occupation[high_index]; for (low_index = 0; low_index < high_window; low_index++, high_index++) sum += occupation[high_index]; min_occ = occupation[0]; min_index = 0; for (test_index = 1; test_index < high_index; test_index++) { if (occupation[test_index] <= min_occ) { min_occ = occupation[test_index]; min_index = test_index; //find min in region } } for (line_index = 0; line_index < low_window; line_index++) thresholds[line_index] = (sum - min_occ) / divisor + min_occ; //same out to end for (low_index = 0; high_index < line_count; low_index++, high_index++) { sum -= occupation[low_index]; sum += occupation[high_index]; if (occupation[high_index] <= min_occ) { //find min in region min_occ = occupation[high_index]; min_index = high_index; } //lost min from region if (min_index <= low_index) { min_occ = occupation[low_index + 1]; min_index = low_index + 1; for (test_index = low_index + 2; test_index <= high_index; test_index++) { if (occupation[test_index] <= min_occ) { min_occ = occupation[test_index]; //find min in region min_index = test_index; } } } thresholds[line_index++] = (sum - min_occ) / divisor + min_occ; } } else { min_occ = occupation[0]; min_index = 0; for (sum = 0, low_index = 0; low_index < line_count; low_index++) { if (occupation[low_index] < min_occ) { min_occ = occupation[low_index]; min_index = low_index; } sum += occupation[low_index]; } line_index = 0; } for (; line_index < line_count; line_index++) thresholds[line_index] = (sum - min_occ) / divisor + min_occ; //same out to end } /** * @name compute_dropout_distances * * Compute the distance from each coordinate to the nearest dropout. */ void compute_dropout_distances( //project blobs inT32 *occupation, //input projection inT32 *thresholds, //output thresholds inT32 line_count //array sizes ) { inT32 line_index; //of thresholds line inT32 distance; //from prev dropout inT32 next_dist; //to next dropout inT32 back_index; //for back filling inT32 prev_threshold; //before overwrite distance = -line_count; line_index = 0; do { do { distance--; prev_threshold = thresholds[line_index]; //distance from prev thresholds[line_index] = distance; line_index++; } while (line_index < line_count && (occupation[line_index] < thresholds[line_index] || occupation[line_index - 1] >= prev_threshold)); if (line_index < line_count) { back_index = line_index - 1; next_dist = 1; while (next_dist < -distance && back_index >= 0) { thresholds[back_index] = next_dist; back_index--; next_dist++; distance++; } distance = 1; } } while (line_index < line_count); } /** * @name expand_rows * * Expand each row to the least of its allowed size and touching its * neighbours. If the expansion would entirely swallow a neighbouring row * then do so. */ void expand_rows( //find lines ICOORD page_tr, //top right TO_BLOCK *block, //block to do float gradient, //gradient to fit FCOORD rotation, //for drawing inT32 block_edge, //edge of block BOOL8 testing_on //correct orientation ) { BOOL8 swallowed_row; //eaten a neighbour float y_max, y_min; //new row limits float y_bottom, y_top; //allowed limits TO_ROW *test_row; //next row TO_ROW *row; //current row //iterators BLOBNBOX_IT blob_it = &block->blobs; TO_ROW_IT row_it = block->get_rows (); #ifndef GRAPHICS_DISABLED if (textord_show_expanded_rows && testing_on) { if (to_win == NULL) create_to_win(page_tr); } #endif adjust_row_limits(block); //shift min,max. if (textord_new_initial_xheight) { if (block->get_rows ()->length () == 0) return; compute_row_stats(block, textord_show_expanded_rows &&testing_on); } assign_blobs_to_rows (block, &gradient, 4, TRUE, FALSE, FALSE); //get real membership if (block->get_rows ()->length () == 0) return; fit_parallel_rows(block, gradient, rotation, block_edge, textord_show_expanded_rows &&testing_on); if (!textord_new_initial_xheight) compute_row_stats(block, textord_show_expanded_rows &&testing_on); row_it.move_to_last (); do { row = row_it.data (); y_max = row->max_y (); //get current limits y_min = row->min_y (); y_bottom = row->intercept () - block->line_size * textord_expansion_factor * tesseract::CCStruct::kDescenderFraction; y_top = row->intercept () + block->line_size * textord_expansion_factor * (tesseract::CCStruct::kXHeightFraction + tesseract::CCStruct::kAscenderFraction); if (y_min > y_bottom) { //expansion allowed if (textord_show_expanded_rows && testing_on) tprintf("Expanding bottom of row at %f from %f to %f\n", row->intercept(), y_min, y_bottom); //expandable swallowed_row = TRUE; while (swallowed_row && !row_it.at_last ()) { swallowed_row = FALSE; //get next one test_row = row_it.data_relative (1); //overlaps space if (test_row->max_y () > y_bottom) { if (test_row->min_y () > y_bottom) { if (textord_show_expanded_rows && testing_on) tprintf("Eating row below at %f\n", test_row->intercept()); row_it.forward (); #ifndef GRAPHICS_DISABLED if (textord_show_expanded_rows && testing_on) plot_parallel_row(test_row, gradient, block_edge, ScrollView::WHITE, rotation); #endif blob_it.set_to_list (row->blob_list ()); blob_it.add_list_after (test_row->blob_list ()); //swallow complete row delete row_it.extract (); row_it.backward (); swallowed_row = TRUE; } else if (test_row->max_y () < y_min) { //shorter limit y_bottom = test_row->max_y (); if (textord_show_expanded_rows && testing_on) tprintf("Truncating limit to %f due to touching row at %f\n", y_bottom, test_row->intercept()); } else { y_bottom = y_min; //can't expand it if (textord_show_expanded_rows && testing_on) tprintf("Not expanding limit beyond %f due to touching row at %f\n", y_bottom, test_row->intercept()); } } } y_min = y_bottom; //expand it } if (y_max < y_top) { //expansion allowed if (textord_show_expanded_rows && testing_on) tprintf("Expanding top of row at %f from %f to %f\n", row->intercept(), y_max, y_top); swallowed_row = TRUE; while (swallowed_row && !row_it.at_first ()) { swallowed_row = FALSE; //get one above test_row = row_it.data_relative (-1); if (test_row->min_y () < y_top) { if (test_row->max_y () < y_top) { if (textord_show_expanded_rows && testing_on) tprintf("Eating row above at %f\n", test_row->intercept()); row_it.backward (); blob_it.set_to_list (row->blob_list ()); #ifndef GRAPHICS_DISABLED if (textord_show_expanded_rows && testing_on) plot_parallel_row(test_row, gradient, block_edge, ScrollView::WHITE, rotation); #endif blob_it.add_list_after (test_row->blob_list ()); //swallow complete row delete row_it.extract (); row_it.forward (); swallowed_row = TRUE; } else if (test_row->min_y () < y_max) { //shorter limit y_top = test_row->min_y (); if (textord_show_expanded_rows && testing_on) tprintf("Truncating limit to %f due to touching row at %f\n", y_top, test_row->intercept()); } else { y_top = y_max; //can't expand it if (textord_show_expanded_rows && testing_on) tprintf("Not expanding limit beyond %f due to touching row at %f\n", y_top, test_row->intercept()); } } } y_max = y_top; } //new limits row->set_limits (y_min, y_max); row_it.backward (); } while (!row_it.at_last ()); } /** * adjust_row_limits * * Change the limits of rows to suit the default fractions. */ void adjust_row_limits( //tidy limits TO_BLOCK *block //block to do ) { TO_ROW *row; //current row float size; //size of row float ymax; //top of row float ymin; //bottom of row TO_ROW_IT row_it = block->get_rows (); if (textord_show_expanded_rows) tprintf("Adjusting row limits for block(%d,%d)\n", block->block->bounding_box().left(), block->block->bounding_box().top()); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row = row_it.data (); size = row->max_y () - row->min_y (); if (textord_show_expanded_rows) tprintf("Row at %f has min %f, max %f, size %f\n", row->intercept(), row->min_y(), row->max_y(), size); size /= tesseract::CCStruct::kXHeightFraction + tesseract::CCStruct::kAscenderFraction + tesseract::CCStruct::kDescenderFraction; ymax = size * (tesseract::CCStruct::kXHeightFraction + tesseract::CCStruct::kAscenderFraction); ymin = -size * tesseract::CCStruct::kDescenderFraction; row->set_limits (row->intercept () + ymin, row->intercept () + ymax); row->merged = FALSE; } } /** * @name compute_row_stats * * Compute the linespacing and offset. */ void compute_row_stats( //find lines TO_BLOCK *block, //block to do BOOL8 testing_on //correct orientation ) { inT32 row_index; //of median TO_ROW *row; //current row TO_ROW *prev_row; //previous row float iqr; //inter quartile range TO_ROW_IT row_it = block->get_rows (); //number of rows inT16 rowcount = row_it.length (); TO_ROW **rows; //for choose nth rows = (TO_ROW **) alloc_mem (rowcount * sizeof (TO_ROW *)); if (rows == NULL) MEMORY_OUT.error ("compute_row_stats", ABORT, NULL); rowcount = 0; prev_row = NULL; row_it.move_to_last (); //start at bottom do { row = row_it.data (); if (prev_row != NULL) { rows[rowcount++] = prev_row; prev_row->spacing = row->intercept () - prev_row->intercept (); if (testing_on) tprintf ("Row at %g yields spacing of %g\n", row->intercept (), prev_row->spacing); } prev_row = row; row_it.backward (); } while (!row_it.at_last ()); block->key_row = prev_row; block->baseline_offset = fmod (prev_row->parallel_c (), block->line_spacing); if (testing_on) tprintf ("Blob based spacing=(%g,%g), offset=%g", block->line_size, block->line_spacing, block->baseline_offset); if (rowcount > 0) { row_index = choose_nth_item (rowcount * 3 / 4, rows, rowcount, sizeof (TO_ROW *), row_spacing_order); iqr = rows[row_index]->spacing; row_index = choose_nth_item (rowcount / 4, rows, rowcount, sizeof (TO_ROW *), row_spacing_order); iqr -= rows[row_index]->spacing; row_index = choose_nth_item (rowcount / 2, rows, rowcount, sizeof (TO_ROW *), row_spacing_order); block->key_row = rows[row_index]; if (testing_on) tprintf (" row based=%g(%g)", rows[row_index]->spacing, iqr); if (rowcount > 2 && iqr < rows[row_index]->spacing * textord_linespace_iqrlimit) { if (!textord_new_initial_xheight) { if (rows[row_index]->spacing < block->line_spacing && rows[row_index]->spacing > block->line_size) //within range block->line_size = rows[row_index]->spacing; //spacing=size else if (rows[row_index]->spacing > block->line_spacing) block->line_size = block->line_spacing; //too big so use max } else { if (rows[row_index]->spacing < block->line_spacing) block->line_size = rows[row_index]->spacing; else block->line_size = block->line_spacing; //too big so use max } if (block->line_size < textord_min_xheight) block->line_size = (float) textord_min_xheight; block->line_spacing = rows[row_index]->spacing; block->max_blob_size = block->line_spacing * textord_excess_blobsize; } block->baseline_offset = fmod (rows[row_index]->intercept (), block->line_spacing); } if (testing_on) tprintf ("\nEstimate line size=%g, spacing=%g, offset=%g\n", block->line_size, block->line_spacing, block->baseline_offset); free_mem(rows); } /** * @name compute_block_xheight * * Compute the xheight of the individual rows, then correlate them * and interpret ascenderless lines, correcting xheights. * * First we compute our best guess of the x-height of each row independently * with compute_row_xheight(), which looks for a pair of commonly occurring * heights that could be x-height and ascender height. This function also * attempts to find descenders of lowercase letters (i.e. not the small * descenders that could appear in upper case letters as Q,J). * * After this computation each row falls into one of the following categories: * ROW_ASCENDERS_FOUND: we found xheight and ascender modes, so this must be * a regular row; we'll use its xheight to compute * xheight and ascrise estimates for the block * ROW_DESCENDERS_FOUND: no ascenders, so we do not have a high confidence in * the xheight of this row (don't use it for estimating * block xheight), but this row can't contain all caps * ROW_UNKNOWN: a row with no ascenders/descenders, could be all lowercase * (or mostly lowercase for fonts with very few ascenders), * all upper case or small caps * ROW_INVALID: no meaningful xheight could be found for this row * * We then run correct_row_xheight() and use the computed xheight and ascrise * averages to correct xheight values of the rows in ROW_DESCENDERS_FOUND, * ROW_UNKNOWN and ROW_INVALID categories. * */ namespace tesseract { void Textord::compute_block_xheight(TO_BLOCK *block, float gradient) { TO_ROW *row; // current row float asc_frac_xheight = CCStruct::kAscenderFraction / CCStruct::kXHeightFraction; float desc_frac_xheight = CCStruct::kDescenderFraction / CCStruct::kXHeightFraction; inT32 min_height, max_height; // limits on xheight TO_ROW_IT row_it = block->get_rows(); if (row_it.empty()) return; // no rows // Compute the best guess of xheight of each row individually. // Use xheight and ascrise values of the rows where ascenders were found. get_min_max_xheight(block->line_size, &min_height, &max_height); STATS row_asc_xheights(min_height, max_height + 1); STATS row_asc_ascrise(static_cast<int>(min_height * asc_frac_xheight), static_cast<int>(max_height * asc_frac_xheight) + 1); int min_desc_height = static_cast<int>(min_height * desc_frac_xheight); int max_desc_height = static_cast<int>(max_height * desc_frac_xheight); STATS row_asc_descdrop(min_desc_height, max_desc_height + 1); STATS row_desc_xheights(min_height, max_height + 1); STATS row_desc_descdrop(min_desc_height, max_desc_height + 1); STATS row_cap_xheights(min_height, max_height + 1); STATS row_cap_floating_xheights(min_height, max_height + 1); for (row_it.mark_cycle_pt(); !row_it.cycled_list(); row_it.forward()) { row = row_it.data(); // Compute the xheight of this row if it has not been computed before. if (row->xheight <= 0.0) { compute_row_xheight(row, block->block->classify_rotation(), gradient, block->line_size); } ROW_CATEGORY row_category = get_row_category(row); if (row_category == ROW_ASCENDERS_FOUND) { row_asc_xheights.add(static_cast<inT32>(row->xheight), row->xheight_evidence); row_asc_ascrise.add(static_cast<inT32>(row->ascrise), row->xheight_evidence); row_asc_descdrop.add(static_cast<inT32>(-row->descdrop), row->xheight_evidence); } else if (row_category == ROW_DESCENDERS_FOUND) { row_desc_xheights.add(static_cast<inT32>(row->xheight), row->xheight_evidence); row_desc_descdrop.add(static_cast<inT32>(-row->descdrop), row->xheight_evidence); } else if (row_category == ROW_UNKNOWN) { fill_heights(row, gradient, min_height, max_height, &row_cap_xheights, &row_cap_floating_xheights); } } float xheight = 0.0; float ascrise = 0.0; float descdrop = 0.0; // Compute our best guess of xheight of this block. if (row_asc_xheights.get_total() > 0) { // Determine xheight from rows where ascenders were found. xheight = row_asc_xheights.median(); ascrise = row_asc_ascrise.median(); descdrop = -row_asc_descdrop.median(); } else if (row_desc_xheights.get_total() > 0) { // Determine xheight from rows where descenders were found. xheight = row_desc_xheights.median(); descdrop = -row_desc_descdrop.median(); } else if (row_cap_xheights.get_total() > 0) { // All the rows in the block were (a/de)scenderless. // Try to search for two modes in row_cap_heights that could // be the xheight and the capheight (e.g. some of the rows // were lowercase, but did not have enough (a/de)scenders. // If such two modes can not be found, this block is most // likely all caps (or all small caps, in which case the code // still works as intended). compute_xheight_from_modes(&row_cap_xheights, &row_cap_floating_xheights, textord_single_height_mode && block->block->classify_rotation().y() == 0.0, min_height, max_height, &(xheight), &(ascrise)); if (ascrise == 0) { // assume only caps in the whole block xheight = row_cap_xheights.median() * CCStruct::kXHeightCapRatio; } } else { // default block sizes xheight = block->line_size * CCStruct::kXHeightFraction; } // Correct xheight, ascrise and descdrop if necessary. bool corrected_xheight = false; if (xheight < textord_min_xheight) { xheight = static_cast<float>(textord_min_xheight); corrected_xheight = true; } if (corrected_xheight || ascrise <= 0.0) { ascrise = xheight * asc_frac_xheight; } if (corrected_xheight || descdrop >= 0.0) { descdrop = -(xheight * desc_frac_xheight); } block->xheight = xheight; if (textord_debug_xheights) { tprintf("Block average xheight=%.4f, ascrise=%.4f, descdrop=%.4f\n", xheight, ascrise, descdrop); } // Correct xheight, ascrise, descdrop of rows based on block averages. for (row_it.mark_cycle_pt(); !row_it.cycled_list(); row_it.forward()) { correct_row_xheight(row_it.data(), xheight, ascrise, descdrop); } } /** * @name compute_row_xheight * * Estimate the xheight of this row. * Compute the ascender rise and descender drop at the same time. * Set xheigh_evidence to the number of blobs with the chosen xheight * that appear in this row. */ void Textord::compute_row_xheight(TO_ROW *row, // row to do const FCOORD& rotation, float gradient, // global skew int block_line_size) { // Find blobs representing repeated characters in rows and mark them. // This information is used for computing row xheight and at a later // stage when words are formed by make_words. if (!row->rep_chars_marked()) { mark_repeated_chars(row); } int min_height, max_height; get_min_max_xheight(block_line_size, &min_height, &max_height); STATS heights(min_height, max_height + 1); STATS floating_heights(min_height, max_height + 1); fill_heights(row, gradient, min_height, max_height, &heights, &floating_heights); row->ascrise = 0.0f; row->xheight = 0.0f; row->xheight_evidence = compute_xheight_from_modes(&heights, &floating_heights, textord_single_height_mode && rotation.y() == 0.0, min_height, max_height, &(row->xheight), &(row->ascrise)); row->descdrop = 0.0f; if (row->xheight > 0.0) { row->descdrop = static_cast<float>( compute_row_descdrop(row, gradient, row->xheight_evidence, &heights)); } } } // namespace tesseract. /** * @name fill_heights * * Fill the given heights with heights of the blobs that are legal * candidates for estimating xheight. */ void fill_heights(TO_ROW *row, float gradient, int min_height, int max_height, STATS *heights, STATS *floating_heights) { float xcentre; // centre of blob float top; // top y coord of blob float height; // height of blob BLOBNBOX *blob; // current blob int repeated_set; BLOBNBOX_IT blob_it = row->blob_list(); if (blob_it.empty()) return; // no blobs in this row bool has_rep_chars = row->rep_chars_marked() && row->num_repeated_sets() > 0; do { blob = blob_it.data(); if (!blob->joined_to_prev()) { xcentre = (blob->bounding_box().left() + blob->bounding_box().right()) / 2.0f; top = blob->bounding_box().top(); height = blob->bounding_box().height(); if (textord_fix_xheight_bug) top -= row->baseline.y(xcentre); else top -= gradient * xcentre + row->parallel_c(); if (top >= min_height && top <= max_height) { heights->add(static_cast<inT32>(floor(top + 0.5)), 1); if (height / top < textord_min_blob_height_fraction) { floating_heights->add(static_cast<inT32>(floor(top + 0.5)), 1); } } } // Skip repeated chars, since they are likely to skew the height stats. if (has_rep_chars && blob->repeated_set() != 0) { repeated_set = blob->repeated_set(); blob_it.forward(); while (!blob_it.at_first() && blob_it.data()->repeated_set() == repeated_set) { blob_it.forward(); if (textord_debug_xheights) tprintf("Skipping repeated char when computing xheight\n"); } } else { blob_it.forward(); } } while (!blob_it.at_first()); } /** * @name compute_xheight_from_modes * * Given a STATS object heights, looks for two most frequently occurring * heights that look like xheight and xheight + ascrise. If found, sets * the values of *xheight and *ascrise accordingly, otherwise sets xheight * to any most frequently occurring height and sets *ascrise to 0. * Returns the number of times xheight occurred in heights. * For each mode that is considered for being an xheight the count of * floating blobs (stored in floating_heights) is subtracted from the * total count of the blobs of this height. This is done because blobs * that sit far above the baseline could represent valid ascenders, but * it is highly unlikely that such a character's height will be an xheight * (e.g. -, ', =, ^, `, ", ', etc) * If cap_only, then force finding of only the top mode. */ int compute_xheight_from_modes( STATS *heights, STATS *floating_heights, bool cap_only, int min_height, int max_height, float *xheight, float *ascrise) { int blob_index = heights->mode(); // find mode int blob_count = heights->pile_count(blob_index); // get count of mode if (textord_debug_xheights) { tprintf("min_height=%d, max_height=%d, mode=%d, count=%d, total=%d\n", min_height, max_height, blob_index, blob_count, heights->get_total()); heights->print(); floating_heights->print(); } if (blob_count == 0) return 0; int modes[MAX_HEIGHT_MODES]; // biggest piles bool in_best_pile = FALSE; int prev_size = -MAX_INT32; int best_count = 0; int mode_count = compute_height_modes(heights, min_height, max_height, modes, MAX_HEIGHT_MODES); if (cap_only && mode_count > 1) mode_count = 1; int x; if (textord_debug_xheights) { tprintf("found %d modes: ", mode_count); for (x = 0; x < mode_count; x++) tprintf("%d ", modes[x]); tprintf("\n"); } for (x = 0; x < mode_count - 1; x++) { if (modes[x] != prev_size + 1) in_best_pile = FALSE; // had empty height int modes_x_count = heights->pile_count(modes[x]) - floating_heights->pile_count(modes[x]); if ((modes_x_count >= blob_count * textord_xheight_mode_fraction) && (in_best_pile || modes_x_count > best_count)) { for (int asc = x + 1; asc < mode_count; asc++) { float ratio = static_cast<float>(modes[asc]) / static_cast<float>(modes[x]); if (textord_ascx_ratio_min < ratio && ratio < textord_ascx_ratio_max && (heights->pile_count(modes[asc]) >= blob_count * textord_ascheight_mode_fraction)) { if (modes_x_count > best_count) { in_best_pile = true; best_count = modes_x_count; } if (textord_debug_xheights) { tprintf("X=%d, asc=%d, count=%d, ratio=%g\n", modes[x], modes[asc]-modes[x], modes_x_count, ratio); } prev_size = modes[x]; *xheight = static_cast<float>(modes[x]); *ascrise = static_cast<float>(modes[asc] - modes[x]); } } } } if (*xheight == 0) { // single mode // Remove counts of the "floating" blobs (the one whose height is too // small in relation to it's top end of the bounding box) from heights // before computing the single-mode xheight. // Restore the counts in heights after the mode is found, since // floating blobs might be useful for determining potential ascenders // in compute_row_descdrop(). if (floating_heights->get_total() > 0) { for (x = min_height; x < max_height; ++x) { heights->add(x, -(floating_heights->pile_count(x))); } blob_index = heights->mode(); // find the modified mode for (x = min_height; x < max_height; ++x) { heights->add(x, floating_heights->pile_count(x)); } } *xheight = static_cast<float>(blob_index); *ascrise = 0.0f; best_count = heights->pile_count(blob_index); if (textord_debug_xheights) tprintf("Single mode xheight set to %g\n", *xheight); } else if (textord_debug_xheights) { tprintf("Multi-mode xheight set to %g, asc=%g\n", *xheight, *ascrise); } return best_count; } /** * @name compute_row_descdrop * * Estimates the descdrop of this row. This function looks for * "significant" descenders of lowercase letters (those that could * not just be the small descenders of upper case letters like Q,J). * The function also takes into account how many potential ascenders * this row might contain. If the number of potential ascenders along * with descenders is close to the expected fraction of the total * number of blobs in the row, the function returns the descender * height, returns 0 otherwise. */ inT32 compute_row_descdrop(TO_ROW *row, float gradient, int xheight_blob_count, STATS *asc_heights) { // Count how many potential ascenders are in this row. int i_min = asc_heights->min_bucket(); if ((i_min / row->xheight) < textord_ascx_ratio_min) { i_min = static_cast<int>( floor(row->xheight * textord_ascx_ratio_min + 0.5)); } int i_max = asc_heights->max_bucket(); if ((i_max / row->xheight) > textord_ascx_ratio_max) { i_max = static_cast<int>(floor(row->xheight * textord_ascx_ratio_max)); } int num_potential_asc = 0; for (int i = i_min; i <= i_max; ++i) { num_potential_asc += asc_heights->pile_count(i); } inT32 min_height = static_cast<inT32>(floor(row->xheight * textord_descx_ratio_min + 0.5)); inT32 max_height = static_cast<inT32>(floor(row->xheight * textord_descx_ratio_max)); float xcentre; // centre of blob float height; // height of blob BLOBNBOX_IT blob_it = row->blob_list(); BLOBNBOX *blob; // current blob STATS heights (min_height, max_height + 1); for (blob_it.mark_cycle_pt(); !blob_it.cycled_list(); blob_it.forward()) { blob = blob_it.data(); if (!blob->joined_to_prev()) { xcentre = (blob->bounding_box().left() + blob->bounding_box().right()) / 2.0f; height = (gradient * xcentre + row->parallel_c() - blob->bounding_box().bottom()); if (height >= min_height && height <= max_height) heights.add(static_cast<int>(floor(height + 0.5)), 1); } } int blob_index = heights.mode(); // find mode int blob_count = heights.pile_count(blob_index); // get count of mode float total_fraction = (textord_descheight_mode_fraction + textord_ascheight_mode_fraction); if (static_cast<float>(blob_count + num_potential_asc) < xheight_blob_count * total_fraction) { blob_count = 0; } int descdrop = blob_count > 0 ? -blob_index : 0; if (textord_debug_xheights) { tprintf("Descdrop: %d (potential ascenders %d, descenders %d)\n", descdrop, num_potential_asc, blob_count); heights.print(); } return descdrop; } /** * @name compute_height_modes * * Find the top maxmodes values in the input array and put their * indices in the output in the order in which they occurred. */ inT32 compute_height_modes(STATS *heights, // stats to search inT32 min_height, // bottom of range inT32 max_height, // top of range inT32 *modes, // output array inT32 maxmodes) { // size of modes inT32 pile_count; // no in source pile inT32 src_count; // no of source entries inT32 src_index; // current entry inT32 least_count; // height of smalllest inT32 least_index; // index of least inT32 dest_count; // index in modes src_count = max_height + 1 - min_height; dest_count = 0; least_count = MAX_INT32; least_index = -1; for (src_index = 0; src_index < src_count; src_index++) { pile_count = heights->pile_count(min_height + src_index); if (pile_count > 0) { if (dest_count < maxmodes) { if (pile_count < least_count) { // find smallest in array least_count = pile_count; least_index = dest_count; } modes[dest_count++] = min_height + src_index; } else if (pile_count >= least_count) { while (least_index < maxmodes - 1) { modes[least_index] = modes[least_index + 1]; // shuffle up least_index++; } // new one on end modes[maxmodes - 1] = min_height + src_index; if (pile_count == least_count) { // new smallest least_index = maxmodes - 1; } else { least_count = heights->pile_count(modes[0]); least_index = 0; for (dest_count = 1; dest_count < maxmodes; dest_count++) { pile_count = heights->pile_count(modes[dest_count]); if (pile_count < least_count) { // find smallest least_count = pile_count; least_index = dest_count; } } } } } } return dest_count; } /** * @name correct_row_xheight * * Adjust the xheight etc of this row if not within reasonable limits * of the average for the block. */ void correct_row_xheight(TO_ROW *row, float xheight, float ascrise, float descdrop) { ROW_CATEGORY row_category = get_row_category(row); if (textord_debug_xheights) { tprintf("correcting row xheight: row->xheight %.4f" ", row->acrise %.4f row->descdrop %.4f\n", row->xheight, row->ascrise, row->descdrop); } bool normal_xheight = within_error_margin(row->xheight, xheight, textord_xheight_error_margin); bool cap_xheight = within_error_margin(row->xheight, xheight + ascrise, textord_xheight_error_margin); // Use the average xheight/ascrise for the following cases: // -- the xheight of the row could not be determined at all // -- the row has descenders (e.g. "many groups", "ISBN 12345 p.3") // and its xheight is close to either cap height or average xheight // -- the row does not have ascenders or descenders, but its xheight // is close to the average block xheight (e.g. row with "www.mmm.com") if (row_category == ROW_ASCENDERS_FOUND) { if (row->descdrop >= 0.0) { row->descdrop = row->xheight * (descdrop / xheight); } } else if (row_category == ROW_INVALID || (row_category == ROW_DESCENDERS_FOUND && (normal_xheight || cap_xheight)) || (row_category == ROW_UNKNOWN && normal_xheight)) { if (textord_debug_xheights) tprintf("using average xheight\n"); row->xheight = xheight; row->ascrise = ascrise; row->descdrop = descdrop; } else if (row_category == ROW_DESCENDERS_FOUND) { // Assume this is a row with mostly lowercase letters and it's xheight // is computed correctly (unfortunately there is no way to distinguish // this from the case when descenders are found, but the most common // height is capheight). if (textord_debug_xheights) tprintf("lowercase, corrected ascrise\n"); row->ascrise = row->xheight * (ascrise / xheight); } else if (row_category == ROW_UNKNOWN) { // Otherwise assume this row is an all-caps or small-caps row // and adjust xheight and ascrise of the row. row->all_caps = true; if (cap_xheight) { // regular all caps if (textord_debug_xheights) tprintf("all caps\n"); row->xheight = xheight; row->ascrise = ascrise; row->descdrop = descdrop; } else { // small caps or caps with an odd xheight if (textord_debug_xheights) { if (row->xheight < xheight + ascrise && row->xheight > xheight) { tprintf("small caps\n"); } else { tprintf("all caps with irregular xheight\n"); } } row->ascrise = row->xheight * (ascrise / (xheight + ascrise)); row->xheight -= row->ascrise; row->descdrop = row->xheight * (descdrop / xheight); } } if (textord_debug_xheights) { tprintf("corrected row->xheight = %.4f, row->acrise = %.4f, row->descdrop" " = %.4f\n", row->xheight, row->ascrise, row->descdrop); } } static int CountOverlaps(const TBOX& box, int min_height, BLOBNBOX_LIST* blobs) { int overlaps = 0; BLOBNBOX_IT blob_it(blobs); for (blob_it.mark_cycle_pt(); !blob_it.cycled_list(); blob_it.forward()) { BLOBNBOX* blob = blob_it.data(); TBOX blob_box = blob->bounding_box(); if (blob_box.height() >= min_height && box.major_overlap(blob_box)) { ++overlaps; } } return overlaps; } /** * @name separate_underlines * * Test wide objects for being potential underlines. If they are then * put them in a separate list in the block. */ void separate_underlines(TO_BLOCK *block, // block to do float gradient, // skew angle FCOORD rotation, // inverse landscape BOOL8 testing_on) { // correct orientation BLOBNBOX *blob; // current blob C_BLOB *rotated_blob; // rotated blob TO_ROW *row; // current row float length; // of g_vec TBOX blob_box; FCOORD blob_rotation; // inverse of rotation FCOORD g_vec; // skew rotation BLOBNBOX_IT blob_it; // iterator // iterator BLOBNBOX_IT under_it = &block->underlines; BLOBNBOX_IT large_it = &block->large_blobs; TO_ROW_IT row_it = block->get_rows(); int min_blob_height = static_cast<int>(textord_min_blob_height_fraction * block->line_size + 0.5); // length of vector length = sqrt(1 + gradient * gradient); g_vec = FCOORD(1 / length, -gradient / length); blob_rotation = FCOORD(rotation.x(), -rotation.y()); blob_rotation.rotate(g_vec); // undoing everything for (row_it.mark_cycle_pt(); !row_it.cycled_list(); row_it.forward()) { row = row_it.data(); // get blobs blob_it.set_to_list(row->blob_list()); for (blob_it.mark_cycle_pt(); !blob_it.cycled_list(); blob_it.forward()) { blob = blob_it.data(); blob_box = blob->bounding_box(); if (blob_box.width() > block->line_size * textord_underline_width) { ASSERT_HOST(blob->cblob() != NULL); rotated_blob = crotate_cblob (blob->cblob(), blob_rotation); if (test_underline( testing_on && textord_show_final_rows, rotated_blob, static_cast<inT16>(row->intercept()), static_cast<inT16>( block->line_size * (tesseract::CCStruct::kXHeightFraction + tesseract::CCStruct::kAscenderFraction / 2.0f)))) { under_it.add_after_then_move(blob_it.extract()); if (testing_on && textord_show_final_rows) { tprintf("Underlined blob at:"); rotated_blob->bounding_box().print(); tprintf("Was:"); blob_box.print(); } } else if (CountOverlaps(blob->bounding_box(), min_blob_height, row->blob_list()) > textord_max_blob_overlaps) { large_it.add_after_then_move(blob_it.extract()); if (testing_on && textord_show_final_rows) { tprintf("Large blob overlaps %d blobs at:", CountOverlaps(blob_box, min_blob_height, row->blob_list())); blob_box.print(); } } delete rotated_blob; } } } } /** * @name pre_associate_blobs * * Associate overlapping blobs and fake chop wide blobs. */ void pre_associate_blobs( //make rough chars ICOORD page_tr, //top right TO_BLOCK *block, //block to do FCOORD rotation, //inverse landscape BOOL8 testing_on //correct orientation ) { #ifndef GRAPHICS_DISABLED ScrollView::Color colour; //of boxes #endif BLOBNBOX *blob; //current blob BLOBNBOX *nextblob; //next in list TBOX blob_box; FCOORD blob_rotation; //inverse of rotation BLOBNBOX_IT blob_it; //iterator BLOBNBOX_IT start_it; //iterator TO_ROW_IT row_it = block->get_rows (); #ifndef GRAPHICS_DISABLED colour = ScrollView::RED; #endif blob_rotation = FCOORD (rotation.x (), -rotation.y ()); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { //get blobs blob_it.set_to_list (row_it.data ()->blob_list ()); for (blob_it.mark_cycle_pt (); !blob_it.cycled_list (); blob_it.forward ()) { blob = blob_it.data (); blob_box = blob->bounding_box (); start_it = blob_it; //save start point // if (testing_on && textord_show_final_blobs) // { // tprintf("Blob at (%d,%d)->(%d,%d), addr=%x, count=%d\n", // blob_box.left(),blob_box.bottom(), // blob_box.right(),blob_box.top(), // (void*)blob,blob_it.length()); // } bool overlap; do { overlap = false; if (!blob_it.at_last ()) { nextblob = blob_it.data_relative(1); overlap = blob_box.major_x_overlap(nextblob->bounding_box()); if (overlap) { blob->merge(nextblob); // merge new blob blob_box = blob->bounding_box(); // get bigger box blob_it.forward(); } } } while (overlap); blob->chop (&start_it, &blob_it, blob_rotation, block->line_size * tesseract::CCStruct::kXHeightFraction * textord_chop_width); //attempt chop } #ifndef GRAPHICS_DISABLED if (testing_on && textord_show_final_blobs) { if (to_win == NULL) create_to_win(page_tr); to_win->Pen(colour); for (blob_it.mark_cycle_pt (); !blob_it.cycled_list (); blob_it.forward ()) { blob = blob_it.data (); blob_box = blob->bounding_box (); blob_box.rotate (rotation); if (!blob->joined_to_prev ()) { to_win->Rectangle (blob_box.left (), blob_box.bottom (), blob_box.right (), blob_box.top ()); } } colour = (ScrollView::Color) (colour + 1); if (colour > ScrollView::MAGENTA) colour = ScrollView::RED; } #endif } } /** * @name fit_parallel_rows * * Re-fit the rows in the block to the given gradient. */ void fit_parallel_rows( //find lines TO_BLOCK *block, //block to do float gradient, //gradient to fit FCOORD rotation, //for drawing inT32 block_edge, //edge of block BOOL8 testing_on //correct orientation ) { #ifndef GRAPHICS_DISABLED ScrollView::Color colour; //of row #endif TO_ROW_IT row_it = block->get_rows (); row_it.move_to_first (); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { if (row_it.data ()->blob_list ()->empty ()) delete row_it.extract (); //nothing in it else fit_parallel_lms (gradient, row_it.data ()); } #ifndef GRAPHICS_DISABLED if (testing_on) { colour = ScrollView::RED; for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { plot_parallel_row (row_it.data (), gradient, block_edge, colour, rotation); colour = (ScrollView::Color) (colour + 1); if (colour > ScrollView::MAGENTA) colour = ScrollView::RED; } } #endif row_it.sort (row_y_order); //may have gone out of order } /** * @name fit_parallel_lms * * Fit an LMS line to a row. * Make the fit parallel to the given gradient and set the * row accordingly. */ void fit_parallel_lms(float gradient, TO_ROW *row) { float c; // fitted line int blobcount; // no of blobs tesseract::DetLineFit lms; BLOBNBOX_IT blob_it = row->blob_list(); blobcount = 0; for (blob_it.mark_cycle_pt(); !blob_it.cycled_list(); blob_it.forward()) { if (!blob_it.data()->joined_to_prev()) { const TBOX& box = blob_it.data()->bounding_box(); lms.Add(ICOORD((box.left() + box.right()) / 2, box.bottom())); blobcount++; } } double error = lms.ConstrainedFit(gradient, &c); row->set_parallel_line(gradient, c, error); if (textord_straight_baselines && blobcount > textord_lms_line_trials) { error = lms.Fit(&gradient, &c); } //set the other too row->set_line(gradient, c, error); } /** * @name make_spline_rows * * Re-fit the rows in the block to the given gradient. */ namespace tesseract { void Textord::make_spline_rows(TO_BLOCK *block, // block to do float gradient, // gradient to fit BOOL8 testing_on) { #ifndef GRAPHICS_DISABLED ScrollView::Color colour; //of row #endif TO_ROW_IT row_it = block->get_rows (); row_it.move_to_first (); for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { if (row_it.data ()->blob_list ()->empty ()) delete row_it.extract (); //nothing in it else make_baseline_spline (row_it.data (), block); } if (textord_old_baselines) { #ifndef GRAPHICS_DISABLED if (testing_on) { colour = ScrollView::RED; for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row_it.data ()->baseline.plot (to_win, colour); colour = (ScrollView::Color) (colour + 1); if (colour > ScrollView::MAGENTA) colour = ScrollView::RED; } } #endif make_old_baselines(block, testing_on, gradient); } #ifndef GRAPHICS_DISABLED if (testing_on) { colour = ScrollView::RED; for (row_it.mark_cycle_pt (); !row_it.cycled_list (); row_it.forward ()) { row_it.data ()->baseline.plot (to_win, colour); colour = (ScrollView::Color) (colour + 1); if (colour > ScrollView::MAGENTA) colour = ScrollView::RED; } } #endif } } // namespace tesseract. /** * @name make_baseline_spline * * Fit an LMS line to a row. * Make the fit parallel to the given gradient and set the * row accordingly. */ void make_baseline_spline(TO_ROW *row, //row to fit TO_BLOCK *block) { inT32 *xstarts; // spline boundaries double *coeffs; // quadratic coeffs inT32 segments; // no of segments xstarts = (inT32 *) alloc_mem((row->blob_list()->length() + 1) * sizeof(inT32)); if (segment_baseline(row, block, segments, xstarts) && !textord_straight_baselines && !textord_parallel_baselines) { coeffs = linear_spline_baseline(row, block, segments, xstarts); } else { xstarts[1] = xstarts[segments]; segments = 1; coeffs = (double *) alloc_mem (3 * sizeof (double)); coeffs[0] = 0; coeffs[1] = row->line_m (); coeffs[2] = row->line_c (); } row->baseline = QSPLINE (segments, xstarts, coeffs); free_mem(coeffs); free_mem(xstarts); } /** * @name segment_baseline * * Divide the baseline up into segments which require a different * quadratic fitted to them. * Return TRUE if enough blobs were far enough away to need a quadratic. */ BOOL8 segment_baseline ( //split baseline TO_ROW * row, //row to fit TO_BLOCK * block, //block it came from inT32 & segments, //no fo segments inT32 xstarts[] //coords of segments ) { BOOL8 needs_curve; //needs curved line int blobcount; //no of blobs int blobindex; //current blob int last_state; //above, on , below int state; //of current blob float yshift; //from baseline TBOX box; //blob box TBOX new_box; //new_it box float middle; //xcentre of blob //blobs BLOBNBOX_IT blob_it = row->blob_list (); BLOBNBOX_IT new_it = blob_it; //front end SORTED_FLOATS yshifts; //shifts from baseline needs_curve = FALSE; box = box_next_pre_chopped (&blob_it); xstarts[0] = box.left (); segments = 1; blobcount = row->blob_list ()->length (); if (textord_oldbl_debug) tprintf ("Segmenting baseline of %d blobs at (%d,%d)\n", blobcount, box.left (), box.bottom ()); if (blobcount <= textord_spline_medianwin || blobcount < textord_spline_minblobs) { blob_it.move_to_last (); box = blob_it.data ()->bounding_box (); xstarts[1] = box.right (); return FALSE; } last_state = 0; new_it.mark_cycle_pt (); for (blobindex = 0; blobindex < textord_spline_medianwin; blobindex++) { new_box = box_next_pre_chopped (&new_it); middle = (new_box.left () + new_box.right ()) / 2.0; yshift = new_box.bottom () - row->line_m () * middle - row->line_c (); //record shift yshifts.add (yshift, blobindex); if (new_it.cycled_list ()) { xstarts[1] = new_box.right (); return FALSE; } } for (blobcount = 0; blobcount < textord_spline_medianwin / 2; blobcount++) box = box_next_pre_chopped (&blob_it); do { new_box = box_next_pre_chopped (&new_it); //get middle one yshift = yshifts[textord_spline_medianwin / 2]; if (yshift > textord_spline_shift_fraction * block->line_size) state = 1; else if (-yshift > textord_spline_shift_fraction * block->line_size) state = -1; else state = 0; if (state != 0) needs_curve = TRUE; // tprintf("State=%d, prev=%d, shift=%g\n", // state,last_state,yshift); if (state != last_state && blobcount > textord_spline_minblobs) { xstarts[segments++] = box.left (); blobcount = 0; } last_state = state; yshifts.remove (blobindex - textord_spline_medianwin); box = box_next_pre_chopped (&blob_it); middle = (new_box.left () + new_box.right ()) / 2.0; yshift = new_box.bottom () - row->line_m () * middle - row->line_c (); yshifts.add (yshift, blobindex); blobindex++; blobcount++; } while (!new_it.cycled_list ()); if (blobcount > textord_spline_minblobs || segments == 1) { xstarts[segments] = new_box.right (); } else { xstarts[--segments] = new_box.right (); } if (textord_oldbl_debug) tprintf ("Made %d segments on row at (%d,%d)\n", segments, box.right (), box.bottom ()); return needs_curve; } /** * @name linear_spline_baseline * * Divide the baseline up into segments which require a different * quadratic fitted to them. * @return TRUE if enough blobs were far enough away to need a quadratic. */ double * linear_spline_baseline ( //split baseline TO_ROW * row, //row to fit TO_BLOCK * block, //block it came from inT32 & segments, //no fo segments inT32 xstarts[] //coords of segments ) { int blobcount; //no of blobs int blobindex; //current blob int index1, index2; //blob numbers int blobs_per_segment; //blobs in each TBOX box; //blob box TBOX new_box; //new_it box //blobs BLOBNBOX_IT blob_it = row->blob_list (); BLOBNBOX_IT new_it = blob_it; //front end float b, c; //fitted curve tesseract::DetLineFit lms; double *coeffs; //quadratic coeffs inT32 segment; //current segment box = box_next_pre_chopped (&blob_it); xstarts[0] = box.left (); blobcount = 1; while (!blob_it.at_first ()) { blobcount++; box = box_next_pre_chopped (&blob_it); } segments = blobcount / textord_spline_medianwin; if (segments < 1) segments = 1; blobs_per_segment = blobcount / segments; coeffs = (double *) alloc_mem (segments * 3 * sizeof (double)); if (textord_oldbl_debug) tprintf ("Linear splining baseline of %d blobs at (%d,%d), into %d segments of %d blobs\n", blobcount, box.left (), box.bottom (), segments, blobs_per_segment); segment = 1; for (index2 = 0; index2 < blobs_per_segment / 2; index2++) box_next_pre_chopped(&new_it); index1 = 0; blobindex = index2; do { blobindex += blobs_per_segment; lms.Clear(); while (index1 < blobindex || (segment == segments && index1 < blobcount)) { box = box_next_pre_chopped (&blob_it); int middle = (box.left() + box.right()) / 2; lms.Add(ICOORD(middle, box.bottom())); index1++; if (index1 == blobindex - blobs_per_segment / 2 || index1 == blobcount - 1) { xstarts[segment] = box.left (); } } lms.Fit(&b, &c); coeffs[segment * 3 - 3] = 0; coeffs[segment * 3 - 2] = b; coeffs[segment * 3 - 1] = c; segment++; if (segment > segments) break; blobindex += blobs_per_segment; lms.Clear(); while (index2 < blobindex || (segment == segments && index2 < blobcount)) { new_box = box_next_pre_chopped (&new_it); int middle = (new_box.left() + new_box.right()) / 2; lms.Add(ICOORD (middle, new_box.bottom())); index2++; if (index2 == blobindex - blobs_per_segment / 2 || index2 == blobcount - 1) { xstarts[segment] = new_box.left (); } } lms.Fit(&b, &c); coeffs[segment * 3 - 3] = 0; coeffs[segment * 3 - 2] = b; coeffs[segment * 3 - 1] = c; segment++; } while (segment <= segments); return coeffs; } /** * @name assign_blobs_to_rows * * Make enough rows to allocate all the given blobs to one. * If a block skew is given, use that, else attempt to track it. */ void assign_blobs_to_rows( //find lines TO_BLOCK *block, //block to do float *gradient, //block skew int pass, //identification BOOL8 reject_misses, //chuck big ones out BOOL8 make_new_rows, //add rows for unmatched BOOL8 drawing_skew //draw smoothed skew ) { OVERLAP_STATE overlap_result; //what to do with it float ycoord; //current y float top, bottom; //of blob float g_length = 1.0f; //from gradient inT16 row_count; //no of rows inT16 left_x; //left edge inT16 last_x; //previous edge float block_skew; //y delta float smooth_factor; //for new coords float near_dist; //dist to nearest row ICOORD testpt; //testing only BLOBNBOX *blob; //current blob TO_ROW *row; //current row TO_ROW *dest_row = NULL; //row to put blob in //iterators BLOBNBOX_IT blob_it = &block->blobs; TO_ROW_IT row_it = block->get_rows (); ycoord = (block->block->bounding_box ().bottom () + block->block->bounding_box ().top ()) / 2.0f; if (gradient != NULL) g_length = sqrt (1 + *gradient * *gradient); #ifndef GRAPHICS_DISABLED if (drawing_skew) to_win->SetCursor(block->block->bounding_box ().left (), ycoord); #endif testpt = ICOORD (textord_test_x, textord_test_y); blob_it.sort (blob_x_order); smooth_factor = 1.0; block_skew = 0.0f; row_count = row_it.length (); //might have rows if (!blob_it.empty ()) { left_x = blob_it.data ()->bounding_box ().left (); } else { left_x = block->block->bounding_box ().left (); } last_x = left_x; for (blob_it.mark_cycle_pt (); !blob_it.cycled_list (); blob_it.forward ()) { blob = blob_it.data (); if (gradient != NULL) { block_skew = (1 - 1 / g_length) * blob->bounding_box ().bottom () + *gradient / g_length * blob->bounding_box ().left (); } else if (blob->bounding_box ().left () - last_x > block->line_size / 2 && last_x - left_x > block->line_size * 2 && textord_interpolating_skew) { // tprintf("Interpolating skew from %g",block_skew); block_skew *= (float) (blob->bounding_box ().left () - left_x) / (last_x - left_x); // tprintf("to %g\n",block_skew); } last_x = blob->bounding_box ().left (); top = blob->bounding_box ().top () - block_skew; bottom = blob->bounding_box ().bottom () - block_skew; #ifndef GRAPHICS_DISABLED if (drawing_skew) to_win->DrawTo(blob->bounding_box ().left (), ycoord + block_skew); #endif if (!row_it.empty ()) { for (row_it.move_to_first (); !row_it.at_last () && row_it.data ()->min_y () > top; row_it.forward ()); row = row_it.data (); if (row->min_y () <= top && row->max_y () >= bottom) { //any overlap dest_row = row; overlap_result = most_overlapping_row (&row_it, dest_row, top, bottom, block->line_size, blob->bounding_box (). contains (testpt)); if (overlap_result == NEW_ROW && !reject_misses) overlap_result = ASSIGN; } else { overlap_result = NEW_ROW; if (!make_new_rows) { near_dist = row_it.data_relative (-1)->min_y () - top; //below bottom if (bottom < row->min_y ()) { if (row->min_y () - bottom <= (block->line_spacing - block->line_size) * tesseract::CCStruct::kDescenderFraction) { //done it overlap_result = ASSIGN; dest_row = row; } } else if (near_dist > 0 && near_dist < bottom - row->max_y ()) { row_it.backward (); dest_row = row_it.data (); if (dest_row->min_y () - bottom <= (block->line_spacing - block->line_size) * tesseract::CCStruct::kDescenderFraction) { //done it overlap_result = ASSIGN; } } else { if (top - row->max_y () <= (block->line_spacing - block->line_size) * (textord_overlap_x + tesseract::CCStruct::kAscenderFraction)) { //done it overlap_result = ASSIGN; dest_row = row; } } } } if (overlap_result == ASSIGN) dest_row->add_blob (blob_it.extract (), top, bottom, block->line_size); if (overlap_result == NEW_ROW) { if (make_new_rows && top - bottom < block->max_blob_size) { dest_row = new TO_ROW (blob_it.extract (), top, bottom, block->line_size); row_count++; if (bottom > row_it.data ()->min_y ()) row_it.add_before_then_move (dest_row); //insert in right place else row_it.add_after_then_move (dest_row); smooth_factor = 1.0 / (row_count * textord_skew_lag + textord_skewsmooth_offset); } else overlap_result = REJECT; } } else if (make_new_rows && top - bottom < block->max_blob_size) { overlap_result = NEW_ROW; dest_row = new TO_ROW(blob_it.extract(), top, bottom, block->line_size); row_count++; row_it.add_after_then_move(dest_row); smooth_factor = 1.0 / (row_count * textord_skew_lag + textord_skewsmooth_offset2); } else overlap_result = REJECT; if (blob->bounding_box ().contains(testpt) && textord_debug_blob) { if (overlap_result != REJECT) { tprintf("Test blob assigned to row at (%g,%g) on pass %d\n", dest_row->min_y(), dest_row->max_y(), pass); } else { tprintf("Test blob assigned to no row on pass %d\n", pass); } } if (overlap_result != REJECT) { while (!row_it.at_first() && row_it.data()->min_y() > row_it.data_relative(-1)->min_y()) { row = row_it.extract(); row_it.backward(); row_it.add_before_then_move(row); } while (!row_it.at_last() && row_it.data ()->min_y() < row_it.data_relative (1)->min_y()) { row = row_it.extract(); row_it.forward(); // Keep rows in order. row_it.add_after_then_move(row); } BLOBNBOX_IT added_blob_it(dest_row->blob_list()); added_blob_it.move_to_last(); TBOX prev_box = added_blob_it.data_relative(-1)->bounding_box(); if (dest_row->blob_list()->singleton() || !prev_box.major_x_overlap(blob->bounding_box())) { block_skew = (1 - smooth_factor) * block_skew + smooth_factor * (blob->bounding_box().bottom() - dest_row->initial_min_y()); } } } for (row_it.mark_cycle_pt(); !row_it.cycled_list(); row_it.forward()) { if (row_it.data()->blob_list()->empty()) delete row_it.extract(); // Discard empty rows. } } /** * @name most_overlapping_row * * Return the row which most overlaps the blob. */ OVERLAP_STATE most_overlapping_row( //find best row TO_ROW_IT *row_it, //iterator TO_ROW *&best_row, //output row float top, //top of blob float bottom, //bottom of blob float rowsize, //max row size BOOL8 testing_blob //test stuff ) { OVERLAP_STATE result; //result of tests float overlap; //of blob & row float bestover; //nearest row float merge_top, merge_bottom; //size of merged row ICOORD testpt; //testing only TO_ROW *row; //current row TO_ROW *test_row; //for multiple overlaps BLOBNBOX_IT blob_it; //for merging rows result = ASSIGN; row = row_it->data (); bestover = top - bottom; if (top > row->max_y ()) bestover -= top - row->max_y (); if (bottom < row->min_y ()) //compute overlap bestover -= row->min_y () - bottom; if (testing_blob && textord_debug_blob) { tprintf("Test blob y=(%g,%g), row=(%f,%f), size=%g, overlap=%f\n", bottom, top, row->min_y(), row->max_y(), rowsize, bestover); } test_row = row; do { if (!row_it->at_last ()) { row_it->forward (); test_row = row_it->data (); if (test_row->min_y () <= top && test_row->max_y () >= bottom) { merge_top = test_row->max_y () > row->max_y ()? test_row->max_y () : row->max_y (); merge_bottom = test_row->min_y () < row->min_y ()? test_row->min_y () : row->min_y (); if (merge_top - merge_bottom <= rowsize) { if (testing_blob) { tprintf ("Merging rows at (%g,%g), (%g,%g)\n", row->min_y (), row->max_y (), test_row->min_y (), test_row->max_y ()); } test_row->set_limits (merge_bottom, merge_top); blob_it.set_to_list (test_row->blob_list ()); blob_it.add_list_after (row->blob_list ()); blob_it.sort (blob_x_order); row_it->backward (); delete row_it->extract (); row_it->forward (); bestover = -1.0f; //force replacement } overlap = top - bottom; if (top > test_row->max_y ()) overlap -= top - test_row->max_y (); if (bottom < test_row->min_y ()) overlap -= test_row->min_y () - bottom; if (bestover >= rowsize - 1 && overlap >= rowsize - 1) { result = REJECT; } if (overlap > bestover) { bestover = overlap; //find biggest overlap row = test_row; } if (testing_blob && textord_debug_blob) { tprintf("Test blob y=(%g,%g), row=(%f,%f), size=%g, overlap=%f->%f\n", bottom, top, test_row->min_y(), test_row->max_y(), rowsize, overlap, bestover); } } } } while (!row_it->at_last () && test_row->min_y () <= top && test_row->max_y () >= bottom); while (row_it->data () != row) row_it->backward (); //make it point to row //doesn't overlap much if (top - bottom - bestover > rowsize * textord_overlap_x && (!textord_fix_makerow_bug || bestover < rowsize * textord_overlap_x) && result == ASSIGN) result = NEW_ROW; //doesn't overlap enough best_row = row; return result; } /** * @name blob_x_order * * Sort function to sort blobs in x from page left. */ int blob_x_order( //sort function const void *item1, //items to compare const void *item2) { //converted ptr BLOBNBOX *blob1 = *(BLOBNBOX **) item1; //converted ptr BLOBNBOX *blob2 = *(BLOBNBOX **) item2; if (blob1->bounding_box ().left () < blob2->bounding_box ().left ()) return -1; else if (blob1->bounding_box ().left () > blob2->bounding_box ().left ()) return 1; else return 0; } /** * @name row_y_order * * Sort function to sort rows in y from page top. */ int row_y_order( //sort function const void *item1, //items to compare const void *item2) { //converted ptr TO_ROW *row1 = *(TO_ROW **) item1; //converted ptr TO_ROW *row2 = *(TO_ROW **) item2; if (row1->parallel_c () > row2->parallel_c ()) return -1; else if (row1->parallel_c () < row2->parallel_c ()) return 1; else return 0; } /** * @name row_spacing_order * * Qsort style function to compare 2 TO_ROWS based on their spacing value. */ int row_spacing_order( //sort function const void *item1, //items to compare const void *item2) { //converted ptr TO_ROW *row1 = *(TO_ROW **) item1; //converted ptr TO_ROW *row2 = *(TO_ROW **) item2; if (row1->spacing < row2->spacing) return -1; else if (row1->spacing > row2->spacing) return 1; else return 0; } /** * @name mark_repeated_chars * * Mark blobs marked with BTFT_LEADER in repeated sets using the * repeated_set member of BLOBNBOX. */ void mark_repeated_chars(TO_ROW *row) { BLOBNBOX_IT box_it(row->blob_list()); // Iterator. int num_repeated_sets = 0; if (!box_it.empty()) { do { BLOBNBOX* bblob = box_it.data(); int repeat_length = 0; if (bblob->flow() == BTFT_LEADER && !bblob->joined_to_prev() && bblob->cblob() != NULL) { BLOBNBOX_IT test_it(box_it); for (test_it.forward(); !test_it.at_first(); test_it.forward()) { bblob = test_it.data(); if (bblob->flow() != BTFT_LEADER) break; if (bblob->joined_to_prev() || bblob->cblob() == NULL) { repeat_length = 0; break; } ++repeat_length; } } if (repeat_length >= kMinLeaderCount) { num_repeated_sets++; for (; repeat_length > 0; box_it.forward(), --repeat_length) { bblob = box_it.data(); bblob->set_repeated_set(num_repeated_sets); } if (!box_it.at_first()) bblob->set_repeated_set(0); } else { box_it.forward(); bblob->set_repeated_set(0); } } while (!box_it.at_first()); // until all done } row->set_num_repeated_sets(num_repeated_sets); }
{ "content_hash": "742b3ebd127a41d68d927edb74d5b4a0", "timestamp": "", "source": "github", "line_count": 2683, "max_line_length": 94, "avg_line_length": 37.67275437942602, "alnum_prop": 0.5552653448889944, "repo_name": "BruceWoR/tess-two-master", "id": "6d72b33d09a665b5451349847a235263d583aeac", "size": "101996", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "tess-two/jni/com_googlecode_tesseract_android/src/textord/makerow.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "10391930" }, { "name": "C++", "bytes": "8215892" }, { "name": "Java", "bytes": "484148" }, { "name": "Objective-C", "bytes": "2946" }, { "name": "Python", "bytes": "2184" }, { "name": "Shell", "bytes": "298099" }, { "name": "TeX", "bytes": "2741" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Copyright (c) 1997, 2019 Oracle and/or its affiliates. All rights reserved. This program and the accompanying materials are made available under the terms of the Eclipse Distribution License v. 1.0, which is available at http://www.eclipse.org/org/documents/edl-v10.php. SPDX-License-Identifier: BSD-3-Clause --> <endpoints xmlns="http://java.sun.com/xml/ns/jax-ws/ri/runtime" version="2.0"> <endpoint name="catalog" interface="catalog.server.AddNumbersPortType" implementation="catalog.server.AddNumbersImpl" wsdl="WEB-INF/wsdl/AddNumbers.wsdl" service="{http://example.com}AddNumbersService" port="{http://example.com}AddNumbersPort" url-pattern="/addnumbers" /> </endpoints>
{ "content_hash": "cb8973fb49a38743059324779de23f80", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 79, "avg_line_length": 30.25925925925926, "alnum_prop": 0.6780905752753978, "repo_name": "eclipse-ee4j/metro-jax-ws", "id": "125d9e00ef1e3da02bf025763c688a756f5ae895", "size": "817", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "jaxws-ri/docs/samples/src/main/samples/catalog/etc/sun-jaxws.xml", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "2375" }, { "name": "CSS", "bytes": "6017" }, { "name": "Groovy", "bytes": "2488" }, { "name": "HTML", "bytes": "89895" }, { "name": "Java", "bytes": "12221309" }, { "name": "Shell", "bytes": "32701" }, { "name": "XSLT", "bytes": "8914" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Windows.Forms; namespace MathConversions { public partial class Celsius_To_Fahrenheit : Form { public Celsius_To_Fahrenheit() { InitializeComponent(); } //.................... float Celsius; //Var float Fahrenheit; //Var //.................... private void button1_Click(object sender, EventArgs e) { Fahrenheit = Celsius * 9 / 5 + 32; // This is the math formula for the conversoin. textBox2.Text = Celsius + " Degree Celsius = " + Fahrenheit.ToString("N6") + " Degree Fahrenheit"; // This Outputs the anwsner in the format I want in textbox 2 } //......................................................................................................... private void textBox1_TextChanged(object sender, EventArgs e) { float.TryParse(textBox1.Text, out Celsius); // Converts the string representation of a number in a specified style and culture-specific format to its single-precision floating-point number equivalent. } //........................................................................................................... private void textBox2_TextChanged(object sender, EventArgs e) { //Output from textbox1 goes here } //................................................................................................................ private void textBox1_KeyPress(object sender, KeyPressEventArgs e) // method that uses KeyChar property { char ch = e.KeyChar; // Var if (!Char.IsDigit(ch) && ch != 8 && ch != '-' && ch != '.') // if a Char is different then a decimal digit number then this returns true. // Only allows the use of numbers and the "-" sign in the textbox 1. { e.Handled = true; // Gets or sets a value indicating whether the KeyPress event was handled. } } } }
{ "content_hash": "3aa1e81a37ad43407f66cc393d6d98ee", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 219, "avg_line_length": 34.57575757575758, "alnum_prop": 0.485977212971078, "repo_name": "megatarre/Twitch-Stream-C-Project-Math-Conversions-", "id": "fd4274a5d15f8e27fe2bd5eeb7e86a101a9cab04", "size": "2284", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MathConversions/TemperatureConversion/Old Code/Celsius_To_Fahrenheit.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "67732" }, { "name": "XML", "bytes": "23240" } ], "symlink_target": "" }
ActiveRecord::Schema.define(version: 20160125013209) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" create_table "cancellations", force: :cascade do |t| t.string "email", null: false t.string "stripe_customer_id", null: false t.text "reason" t.datetime "created_at", null: false t.datetime "updated_at", null: false end create_table "entries", force: :cascade do |t| t.integer "user_id", null: false t.text "body", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.integer "import_id" t.date "date", null: false t.string "photo" end create_table "imports", force: :cascade do |t| t.integer "user_id", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "ohlife_export", null: false end create_table "subscriptions", force: :cascade do |t| t.integer "user_id", null: false t.string "stripe_customer_id", null: false t.datetime "created_at", null: false t.datetime "updated_at", null: false end add_index "subscriptions", ["stripe_customer_id"], name: "index_subscriptions_on_stripe_customer_id", unique: true, using: :btree add_index "subscriptions", ["user_id"], name: "index_subscriptions_on_user_id", unique: true, using: :btree create_table "users", force: :cascade do |t| t.string "email", default: "", null: false t.string "encrypted_password", default: "", null: false t.string "reset_password_token" t.datetime "reset_password_sent_at" t.datetime "remember_created_at" t.integer "sign_in_count", default: 0, null: false t.datetime "current_sign_in_at" t.datetime "last_sign_in_at" t.inet "current_sign_in_ip" t.inet "last_sign_in_ip" t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "time_zone", default: "Central Time (US & Canada)", null: false t.integer "prompt_delivery_hour", default: 2, null: false t.string "reply_token", null: false end add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree add_index "users", ["reply_token"], name: "index_users_on_reply_token", unique: true, using: :btree add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree end
{ "content_hash": "9799e3455c93476022001bb9d5cb9855", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 131, "avg_line_length": 45.285714285714285, "alnum_prop": 0.5688748685594112, "repo_name": "codecation/trailmix", "id": "5c4e9781086e348ea3cf0854054c8c70794c0696", "size": "3594", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "db/schema.rb", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "37719" }, { "name": "JavaScript", "bytes": "762" }, { "name": "Ruby", "bytes": "112012" }, { "name": "SCSS", "bytes": "2562" }, { "name": "Sass", "bytes": "171" }, { "name": "Shell", "bytes": "1764" } ], "symlink_target": "" }
/** * @license Highcharts JS v6.1.0 (2018-04-13) * * (c) 2009-2017 Torstein Honsi * * License: www.highcharts.com/license */ 'use strict'; (function (factory) { if (typeof module === 'object' && module.exports) { module.exports = factory; } else { factory(Highcharts); } }(function (Highcharts) { (function (H) { /** * (c) 2010-2017 Torstein Honsi * * License: www.highcharts.com/license */ var addEvent = H.addEvent, Axis = H.Axis, Chart = H.Chart, color = H.color, ColorAxis, each = H.each, extend = H.extend, isNumber = H.isNumber, Legend = H.Legend, LegendSymbolMixin = H.LegendSymbolMixin, noop = H.noop, merge = H.merge, pick = H.pick; // If ColorAxis already exists, we may be loading the heatmap module on top of // Highmaps. if (!H.ColorAxis) { /** * The ColorAxis object for inclusion in gradient legends */ ColorAxis = H.ColorAxis = function () { this.init.apply(this, arguments); }; extend(ColorAxis.prototype, Axis.prototype); extend(ColorAxis.prototype, { /** * A color axis for choropleth maps and heat maps. Visually, the color * axis will appear as a gradient or as separate items inside the * legend, depending on whether the axis is scalar or based on data * classes. * * For supported color formats, see the * [docs article about colors](http://www.highcharts.com/docs/chart-design-and-style/colors). * * A scalar color axis is represented by a gradient. The colors either * range between the [minColor](#colorAxis.minColor) and the * [maxColor](#colorAxis.maxColor), or for more fine grained control the * colors can be defined in [stops](#colorAxis.stops). Often times, the * color axis needs to be adjusted to get the right color spread for the * data. In addition to stops, consider using a logarithmic * [axis type](#colorAxis.type), or setting [min](#colorAxis.min) and * [max](#colorAxis.max) to avoid the colors being determined by * outliers. * * When [dataClasses](#colorAxis.dataClasses) are used, the ranges are * subdivided into separate classes like categories based on their * values. This can be used for ranges between two values, but also for * a true category. However, when your data is categorized, it may be as * convenient to add each category to a separate series. * * See [the Axis object](#Axis) for programmatic access to the axis. * @extends {xAxis} * @excluding allowDecimals,alternateGridColor,breaks,categories, * crosshair,dateTimeLabelFormats,lineWidth,linkedTo,maxZoom, * minRange,minTickInterval,offset,opposite,plotBands, * plotLines,showEmpty,title * @product highcharts highmaps * @optionparent colorAxis */ defaultColorAxisOptions: { /** * Whether to allow decimals on the color axis. * @type {Boolean} * @default true * @product highcharts highmaps * @apioption colorAxis.allowDecimals */ /** * Determines how to set each data class' color if no individual * color is set. The default value, `tween`, computes intermediate * colors between `minColor` and `maxColor`. The other possible * value, `category`, pulls colors from the global or chart specific * [colors](#colors) array. * * @validvalue ["tween", "category"] * @type {String} * @sample {highmaps} maps/coloraxis/dataclasscolor/ Category colors * @default tween * @product highcharts highmaps * @apioption colorAxis.dataClassColor */ /** * An array of data classes or ranges for the choropleth map. If * none given, the color axis is scalar and values are distributed * as a gradient between the minimum and maximum colors. * * @type {Array<Object>} * @sample {highmaps} maps/demo/data-class-ranges/ Multiple ranges * @sample {highmaps} maps/demo/data-class-two-ranges/ Two ranges * @product highcharts highmaps * @apioption colorAxis.dataClasses */ /** * The color of each data class. If not set, the color is pulled * from the global or chart-specific [colors](#colors) array. In * styled mode, this option is ignored. Instead, use colors defined * in CSS. * * @type {Color} * @sample {highmaps} maps/demo/data-class-two-ranges/ * Explicit colors * @product highcharts highmaps * @apioption colorAxis.dataClasses.color */ /** * The start of the value range that the data class represents, * relating to the point value. * * The range of each `dataClass` is closed in both ends, but can be * overridden by the next `dataClass`. * * @type {Number} * @product highcharts highmaps * @apioption colorAxis.dataClasses.from */ /** * The name of the data class as it appears in the legend. * If no name is given, it is automatically created based on the * `from` and `to` values. For full programmatic control, * [legend.labelFormatter](#legend.labelFormatter) can be used. * In the formatter, `this.from` and `this.to` can be accessed. * * @type {String} * @sample {highmaps} maps/coloraxis/dataclasses-name/ * Named data classes * @sample {highmaps} maps/coloraxis/dataclasses-labelformatter/ * Formatted data classes * @product highcharts highmaps * @apioption colorAxis.dataClasses.name */ /** * The end of the value range that the data class represents, * relating to the point value. * * The range of each `dataClass` is closed in both ends, but can be * overridden by the next `dataClass`. * * @type {Number} * @product highcharts highmaps * @apioption colorAxis.dataClasses.to */ /** * @ignore-option */ lineWidth: 0, /** * Padding of the min value relative to the length of the axis. A * padding of 0.05 will make a 100px axis 5px longer. * * @type {Number} * @product highcharts highmaps */ minPadding: 0, /** * The maximum value of the axis in terms of map point values. If * `null`, the max value is automatically calculated. If the * `endOnTick` option is true, the max value might be rounded up. * * @type {Number} * @sample {highmaps} maps/coloraxis/gridlines/ * Explicit min and max to reduce the effect of outliers * @product highcharts highmaps * @apioption colorAxis.max */ /** * The minimum value of the axis in terms of map point values. If * `null`, the min value is automatically calculated. If the * `startOnTick` option is true, the min value might be rounded * down. * * @type {Number} * @sample {highmaps} maps/coloraxis/gridlines/ * Explicit min and max to reduce the effect of outliers * @product highcharts highmaps * @apioption colorAxis.min */ /** * Padding of the max value relative to the length of the axis. A * padding of 0.05 will make a 100px axis 5px longer. * * @type {Number} * @product highcharts highmaps */ maxPadding: 0, /** * Color of the grid lines extending from the axis across the * gradient. * * @type {Color} * @sample {highmaps} maps/coloraxis/gridlines/ * Grid lines demonstrated * @default #e6e6e6 * @product highcharts highmaps * @apioption colorAxis.gridLineColor */ /** * The width of the grid lines extending from the axis across the * gradient of a scalar color axis. * * @type {Number} * @sample {highmaps} maps/coloraxis/gridlines/ * Grid lines demonstrated * @default 1 * @product highcharts highmaps */ gridLineWidth: 1, /** * The interval of the tick marks in axis units. When `null`, the * tick interval is computed to approximately follow the * `tickPixelInterval`. * * @type {Number} * @product highcharts highmaps * @apioption colorAxis.tickInterval */ /** * If [tickInterval](#colorAxis.tickInterval) is `null` this option * sets the approximate pixel interval of the tick marks. * * @type {Number} * @default 72 * @product highcharts highmaps */ tickPixelInterval: 72, /** * Whether to force the axis to start on a tick. Use this option * with the `maxPadding` option to control the axis start. * * @type {Boolean} * @default true * @product highcharts highmaps */ startOnTick: true, /** * Whether to force the axis to end on a tick. Use this option with * the [maxPadding](#colorAxis.maxPadding) option to control the * axis end. * * @type {Boolean} * @default true * @product highcharts highmaps */ endOnTick: true, /** @ignore */ offset: 0, /** * The triangular marker on a scalar color axis that points to the * value of the hovered area. To disable the marker, set * `marker: null`. * * @type {Object} * @sample {highmaps} maps/coloraxis/marker/ Black marker * @product highcharts highmaps */ marker: { /** * Animation for the marker as it moves between values. Set to * `false` to disable animation. Defaults to `{ duration: 50 }`. * * @type {Object|Boolean} * @product highcharts highmaps */ animation: { duration: 50 }, /** * @ignore */ width: 0.01, /** * The color of the marker. * * @type {Color} * @default #999999 * @product highcharts highmaps */ color: '#999999' }, /** * The axis labels show the number for each tick. * * For more live examples on label options, see [xAxis.labels in the * Highcharts API.](/highcharts#xAxis.labels) * * @type {Object} * @extends xAxis.labels * @product highcharts highmaps */ labels: { /** * How to handle overflowing labels on horizontal color axis. * Can be undefined or "justify". If "justify", labels will not * render outside the legend area. If there is room to move it, * it will be aligned to the edge, else it will be removed. * * @validvalue [null, "justify"] * @type {String} * @default justify * @product highcharts highmaps */ overflow: 'justify', rotation: 0 }, /** * The color to represent the minimum of the color axis. Unless * [dataClasses](#colorAxis.dataClasses) or * [stops](#colorAxis.stops) are set, the gradient starts at this * value. * * If dataClasses are set, the color is based on minColor and * maxColor unless a color is set for each data class, or the * [dataClassColor](#colorAxis.dataClassColor) is set. * * @type {Color} * @sample {highmaps} maps/coloraxis/mincolor-maxcolor/ * Min and max colors on scalar (gradient) axis * @sample {highmaps} maps/coloraxis/mincolor-maxcolor-dataclasses/ * On data classes * @default #e6ebf5 * @product highcharts highmaps */ minColor: '#e6ebf5', /** * The color to represent the maximum of the color axis. Unless * [dataClasses](#colorAxis.dataClasses) or * [stops](#colorAxis.stops) are set, the gradient ends at this * value. * * If dataClasses are set, the color is based on minColor and * maxColor unless a color is set for each data class, or the * [dataClassColor](#colorAxis.dataClassColor) is set. * * @type {Color} * @sample {highmaps} maps/coloraxis/mincolor-maxcolor/ * Min and max colors on scalar (gradient) axis * @sample {highmaps} maps/coloraxis/mincolor-maxcolor-dataclasses/ * On data classes * @default #003399 * @product highcharts highmaps */ maxColor: '#003399', /** * Color stops for the gradient of a scalar color axis. Use this in * cases where a linear gradient between a `minColor` and `maxColor` * is not sufficient. The stops is an array of tuples, where the * first item is a float between 0 and 1 assigning the relative * position in the gradient, and the second item is the color. * * @type {Array<Array>} * @sample {highmaps} maps/demo/heatmap/ * Heatmap with three color stops * @product highcharts highmaps * @apioption colorAxis.stops */ /** * The pixel length of the main tick marks on the color axis. */ tickLength: 5, /** * The type of interpolation to use for the color axis. Can be * `linear` or `logarithmic`. * * @validvalue ["linear", "logarithmic"] * @type {String} * @default linear * @product highcharts highmaps * @apioption colorAxis.type */ /** * Whether to reverse the axis so that the highest number is closest * to the origin. Defaults to `false` in a horizontal legend and * `true` in a vertical legend, where the smallest value starts on * top. * * @type {Boolean} * @product highcharts highmaps * @apioption colorAxis.reversed */ /** * Fires when the legend item belonging to the colorAxis is clicked. * One parameter, `event`, is passed to the function. * * @type {Function} * @product highcharts highmaps * @apioption colorAxis.events.legendItemClick */ /** * Whether to display the colorAxis in the legend. * * @type {Boolean} * @see [heatmap.showInLegend](#series.heatmap.showInLegend) * @default true * @since 4.2.7 * @product highcharts highmaps */ showInLegend: true }, // Properties to preserve after destroy, for Axis.update (#5881, #6025) keepProps: [ 'legendGroup', 'legendItemHeight', 'legendItemWidth', 'legendItem', 'legendSymbol' ].concat(Axis.prototype.keepProps), /** * Initialize the color axis */ init: function (chart, userOptions) { var horiz = chart.options.legend.layout !== 'vertical', options; this.coll = 'colorAxis'; // Build the options options = merge(this.defaultColorAxisOptions, { side: horiz ? 2 : 1, reversed: !horiz }, userOptions, { opposite: !horiz, showEmpty: false, title: null, visible: chart.options.legend.enabled }); Axis.prototype.init.call(this, chart, options); // Base init() pushes it to the xAxis array, now pop it again // chart[this.isXAxis ? 'xAxis' : 'yAxis'].pop(); // Prepare data classes if (userOptions.dataClasses) { this.initDataClasses(userOptions); } this.initStops(); // Override original axis properties this.horiz = horiz; this.zoomEnabled = false; // Add default values this.defaultLegendLength = 200; }, initDataClasses: function (userOptions) { var chart = this.chart, dataClasses, colorCounter = 0, colorCount = chart.options.chart.colorCount, options = this.options, len = userOptions.dataClasses.length; this.dataClasses = dataClasses = []; this.legendItems = []; each(userOptions.dataClasses, function (dataClass, i) { var colors; dataClass = merge(dataClass); dataClasses.push(dataClass); if (dataClass.color) { return; } if (options.dataClassColor === 'category') { colors = chart.options.colors; colorCount = colors.length; dataClass.color = colors[colorCounter]; dataClass.colorIndex = colorCounter; // increase and loop back to zero colorCounter++; if (colorCounter === colorCount) { colorCounter = 0; } } else { dataClass.color = color(options.minColor).tweenTo( color(options.maxColor), len < 2 ? 0.5 : i / (len - 1) // #3219 ); } }); }, /** * Override so that ticks are not added in data class axes (#6914) */ setTickPositions: function () { if (!this.dataClasses) { return Axis.prototype.setTickPositions.call(this); } }, initStops: function () { this.stops = this.options.stops || [ [0, this.options.minColor], [1, this.options.maxColor] ]; each(this.stops, function (stop) { stop.color = color(stop[1]); }); }, /** * Extend the setOptions method to process extreme colors and color * stops. */ setOptions: function (userOptions) { Axis.prototype.setOptions.call(this, userOptions); this.options.crosshair = this.options.marker; }, setAxisSize: function () { var symbol = this.legendSymbol, chart = this.chart, legendOptions = chart.options.legend || {}, x, y, width, height; if (symbol) { this.left = x = symbol.attr('x'); this.top = y = symbol.attr('y'); this.width = width = symbol.attr('width'); this.height = height = symbol.attr('height'); this.right = chart.chartWidth - x - width; this.bottom = chart.chartHeight - y - height; this.len = this.horiz ? width : height; this.pos = this.horiz ? x : y; } else { // Fake length for disabled legend to avoid tick issues // and such (#5205) this.len = ( this.horiz ? legendOptions.symbolWidth : legendOptions.symbolHeight ) || this.defaultLegendLength; } }, normalizedValue: function (value) { if (this.isLog) { value = this.val2lin(value); } return 1 - ((this.max - value) / ((this.max - this.min) || 1)); }, /** * Translate from a value to a color */ toColor: function (value, point) { var pos, stops = this.stops, from, to, color, dataClasses = this.dataClasses, dataClass, i; if (dataClasses) { i = dataClasses.length; while (i--) { dataClass = dataClasses[i]; from = dataClass.from; to = dataClass.to; if ( (from === undefined || value >= from) && (to === undefined || value <= to) ) { color = dataClass.color; if (point) { point.dataClass = i; point.colorIndex = dataClass.colorIndex; } break; } } } else { pos = this.normalizedValue(value); i = stops.length; while (i--) { if (pos > stops[i][0]) { break; } } from = stops[i] || stops[i + 1]; to = stops[i + 1] || from; // The position within the gradient pos = 1 - (to[0] - pos) / ((to[0] - from[0]) || 1); color = from.color.tweenTo( to.color, pos ); } return color; }, /** * Override the getOffset method to add the whole axis groups inside * the legend. */ getOffset: function () { var group = this.legendGroup, sideOffset = this.chart.axisOffset[this.side]; if (group) { // Hook for the getOffset method to add groups to this parent // group this.axisParent = group; // Call the base Axis.prototype.getOffset.call(this); // First time only if (!this.added) { this.added = true; this.labelLeft = 0; this.labelRight = this.width; } // Reset it to avoid color axis reserving space this.chart.axisOffset[this.side] = sideOffset; } }, /** * Create the color gradient */ setLegendColor: function () { var grad, horiz = this.horiz, reversed = this.reversed, one = reversed ? 1 : 0, zero = reversed ? 0 : 1; grad = horiz ? [one, 0, zero, 0] : [0, zero, 0, one]; // #3190 this.legendColor = { linearGradient: { x1: grad[0], y1: grad[1], x2: grad[2], y2: grad[3] }, stops: this.stops }; }, /** * The color axis appears inside the legend and has its own legend * symbol */ drawLegendSymbol: function (legend, item) { var padding = legend.padding, legendOptions = legend.options, horiz = this.horiz, width = pick( legendOptions.symbolWidth, horiz ? this.defaultLegendLength : 12 ), height = pick( legendOptions.symbolHeight, horiz ? 12 : this.defaultLegendLength ), labelPadding = pick( legendOptions.labelPadding, horiz ? 16 : 30 ), itemDistance = pick(legendOptions.itemDistance, 10); this.setLegendColor(); // Create the gradient item.legendSymbol = this.chart.renderer.rect( 0, legend.baseline - 11, width, height ).attr({ zIndex: 1 }).add(item.legendGroup); // Set how much space this legend item takes up this.legendItemWidth = width + padding + (horiz ? itemDistance : labelPadding); this.legendItemHeight = height + padding + (horiz ? labelPadding : 0); }, /** * Fool the legend */ setState: function (state) { each(this.series, function (series) { series.setState(state); }); }, visible: true, setVisible: noop, getSeriesExtremes: function () { var series = this.series, i = series.length; this.dataMin = Infinity; this.dataMax = -Infinity; while (i--) { if (series[i].valueMin !== undefined) { this.dataMin = Math.min(this.dataMin, series[i].valueMin); this.dataMax = Math.max(this.dataMax, series[i].valueMax); } } }, drawCrosshair: function (e, point) { var plotX = point && point.plotX, plotY = point && point.plotY, crossPos, axisPos = this.pos, axisLen = this.len; if (point) { crossPos = this.toPixels(point[point.series.colorKey]); if (crossPos < axisPos) { crossPos = axisPos - 2; } else if (crossPos > axisPos + axisLen) { crossPos = axisPos + axisLen + 2; } point.plotX = crossPos; point.plotY = this.len - crossPos; Axis.prototype.drawCrosshair.call(this, e, point); point.plotX = plotX; point.plotY = plotY; if ( this.cross && !this.cross.addedToColorAxis && this.legendGroup ) { this.cross .addClass('highcharts-coloraxis-marker') .add(this.legendGroup); this.cross.addedToColorAxis = true; this.cross.attr({ fill: this.crosshair.color }); } } }, getPlotLinePath: function (a, b, c, d, pos) { // crosshairs only return isNumber(pos) ? // pos can be 0 (#3969) ( this.horiz ? [ 'M', pos - 4, this.top - 6, 'L', pos + 4, this.top - 6, pos, this.top, 'Z' ] : [ 'M', this.left, pos, 'L', this.left - 6, pos + 6, this.left - 6, pos - 6, 'Z' ] ) : Axis.prototype.getPlotLinePath.call(this, a, b, c, d); }, update: function (newOptions, redraw) { var chart = this.chart, legend = chart.legend; each(this.series, function (series) { // Needed for Axis.update when choropleth colors change series.isDirtyData = true; }); // When updating data classes, destroy old items and make sure new // ones are created (#3207) if (newOptions.dataClasses && legend.allItems) { each(legend.allItems, function (item) { if (item.isDataClass && item.legendGroup) { item.legendGroup.destroy(); } }); chart.isDirtyLegend = true; } // Keep the options structure updated for export. Unlike xAxis and // yAxis, the colorAxis is not an array. (#3207) chart.options[this.coll] = merge(this.userOptions, newOptions); Axis.prototype.update.call(this, newOptions, redraw); if (this.legendItem) { this.setLegendColor(); legend.colorizeItem(this, true); } }, /** * Extend basic axis remove by also removing the legend item. */ remove: function () { if (this.legendItem) { this.chart.legend.destroyItem(this); } Axis.prototype.remove.call(this); }, /** * Get the legend item symbols for data classes */ getDataClassLegendSymbols: function () { var axis = this, chart = this.chart, legendItems = this.legendItems, legendOptions = chart.options.legend, valueDecimals = legendOptions.valueDecimals, valueSuffix = legendOptions.valueSuffix || '', name; if (!legendItems.length) { each(this.dataClasses, function (dataClass, i) { var vis = true, from = dataClass.from, to = dataClass.to; // Assemble the default name. This can be overridden // by legend.options.labelFormatter name = ''; if (from === undefined) { name = '< '; } else if (to === undefined) { name = '> '; } if (from !== undefined) { name += H.numberFormat(from, valueDecimals) + valueSuffix; } if (from !== undefined && to !== undefined) { name += ' - '; } if (to !== undefined) { name += H.numberFormat(to, valueDecimals) + valueSuffix; } // Add a mock object to the legend items legendItems.push(extend({ chart: chart, name: name, options: {}, drawLegendSymbol: LegendSymbolMixin.drawRectangle, visible: true, setState: noop, isDataClass: true, setVisible: function () { vis = this.visible = !vis; each(axis.series, function (series) { each(series.points, function (point) { if (point.dataClass === i) { point.setVisible(vis); } }); }); chart.legend.colorizeItem(this, vis); } }, dataClass)); }); } return legendItems; }, name: '' // Prevents 'undefined' in legend in IE8 }); /** * Handle animation of the color attributes directly */ each(['fill', 'stroke'], function (prop) { H.Fx.prototype[prop + 'Setter'] = function () { this.elem.attr( prop, color(this.start).tweenTo( color(this.end), this.pos ), null, true ); }; }); /** * Extend the chart getAxes method to also get the color axis */ addEvent(Chart, 'afterGetAxes', function () { var options = this.options, colorAxisOptions = options.colorAxis; this.colorAxis = []; if (colorAxisOptions) { new ColorAxis(this, colorAxisOptions); // eslint-disable-line no-new } }); /** * Add the color axis. This also removes the axis' own series to prevent * them from showing up individually. */ addEvent(Legend, 'afterGetAllItems', function (e) { var colorAxisItems = [], colorAxis = this.chart.colorAxis[0]; if (colorAxis && colorAxis.options) { if (colorAxis.options.showInLegend) { // Data classes if (colorAxis.options.dataClasses) { colorAxisItems = colorAxis.getDataClassLegendSymbols(); // Gradient legend } else { // Add this axis on top colorAxisItems.push(colorAxis); } } // Don't add the color axis' series each(colorAxis.series, function (series) { H.erase(e.allItems, series); }); } while (colorAxisItems.length) { e.allItems.unshift(colorAxisItems.pop()); } }); addEvent(Legend, 'afterColorizeItem', function (e) { if (e.visible && e.item.legendColor) { e.item.legendSymbol.attr({ fill: e.item.legendColor }); } }); // Updates in the legend need to be reflected in the color axis (6888) addEvent(Legend, 'afterUpdate', function () { if (this.chart.colorAxis[0]) { this.chart.colorAxis[0].update({}, arguments[2]); } }); } }(Highcharts)); (function (H) { /** * (c) 2010-2017 Torstein Honsi * * License: www.highcharts.com/license */ var defined = H.defined, each = H.each, noop = H.noop, seriesTypes = H.seriesTypes; /** * Mixin for maps and heatmaps */ H.colorPointMixin = { /** * Color points have a value option that determines whether or not it is * a null point */ isValid: function () { // undefined is allowed return ( this.value !== null && this.value !== Infinity && this.value !== -Infinity ); }, /** * Set the visibility of a single point */ setVisible: function (vis) { var point = this, method = vis ? 'show' : 'hide'; // Show and hide associated elements each(['graphic', 'dataLabel'], function (key) { if (point[key]) { point[key][method](); } }); }, setState: function (state) { H.Point.prototype.setState.call(this, state); if (this.graphic) { this.graphic.attr({ zIndex: state === 'hover' ? 1 : 0 }); } } }; H.colorSeriesMixin = { pointArrayMap: ['value'], axisTypes: ['xAxis', 'yAxis', 'colorAxis'], optionalAxis: 'colorAxis', trackerGroups: ['group', 'markerGroup', 'dataLabelsGroup'], getSymbol: noop, parallelArrays: ['x', 'y', 'value'], colorKey: 'value', pointAttribs: seriesTypes.column.prototype.pointAttribs, /** * In choropleth maps, the color is a result of the value, so this needs * translation too */ translateColors: function () { var series = this, nullColor = this.options.nullColor, colorAxis = this.colorAxis, colorKey = this.colorKey; each(this.data, function (point) { var value = point[colorKey], color; color = point.options.color || ( point.isNull ? nullColor : (colorAxis && value !== undefined) ? colorAxis.toColor(value, point) : point.color || series.color ); if (color) { point.color = color; } }); }, /** * Get the color attibutes to apply on the graphic */ colorAttribs: function (point) { var ret = {}; if (defined(point.color)) { ret[this.colorProp || 'fill'] = point.color; } return ret; } }; }(Highcharts)); (function (H) { /** * (c) 2010-2017 Torstein Honsi * * License: www.highcharts.com/license */ var colorPointMixin = H.colorPointMixin, colorSeriesMixin = H.colorSeriesMixin, each = H.each, LegendSymbolMixin = H.LegendSymbolMixin, merge = H.merge, noop = H.noop, pick = H.pick, Series = H.Series, seriesType = H.seriesType, seriesTypes = H.seriesTypes; /** * A heatmap is a graphical representation of data where the individual values * contained in a matrix are represented as colors. * * @sample highcharts/demo/heatmap/ * Simple heatmap * @sample highcharts/demo/heatmap-canvas/ * Heavy heatmap * @extends {plotOptions.scatter} * @excluding animationLimit,connectEnds,connectNulls,dashStyle, * findNearestPointBy,getExtremesFromAll,linecap,lineWidth,marker, * pointInterval,pointIntervalUnit,pointRange,pointStart,shadow, * softThreshold,stacking,step,threshold * @product highcharts highmaps * @optionparent plotOptions.heatmap */ seriesType('heatmap', 'scatter', { /** * Animation is disabled by default on the heatmap series. * * @type {Boolean|Object} */ animation: false, /** * The border width for each heat map item. */ borderWidth: 0, /** * Padding between the points in the heatmap. * * @type {Number} * @default 0 * @since 6.0 * @apioption plotOptions.heatmap.pointPadding */ /** * The main color of the series. In heat maps this color is rarely used, * as we mostly use the color to denote the value of each point. Unless * options are set in the [colorAxis](#colorAxis), the default value * is pulled from the [options.colors](#colors) array. * * @type {Color} * @default null * @since 4.0 * @product highcharts * @apioption plotOptions.heatmap.color */ /** * The column size - how many X axis units each column in the heatmap * should span. * * @type {Number} * @sample {highcharts} maps/demo/heatmap/ One day * @sample {highmaps} maps/demo/heatmap/ One day * @default 1 * @since 4.0 * @product highcharts highmaps * @apioption plotOptions.heatmap.colsize */ /** * The row size - how many Y axis units each heatmap row should span. * * @type {Number} * @sample {highcharts} maps/demo/heatmap/ 1 by default * @sample {highmaps} maps/demo/heatmap/ 1 by default * @default 1 * @since 4.0 * @product highcharts highmaps * @apioption plotOptions.heatmap.rowsize */ /** * The color applied to null points. In styled mode, a general CSS class is * applied instead. * * @type {Color} */ nullColor: '#f7f7f7', dataLabels: { formatter: function () { // #2945 return this.point.value; }, inside: true, verticalAlign: 'middle', crop: false, overflow: false, padding: 0 // #3837 }, /** * @ignore */ marker: null, /** @ignore */ pointRange: null, // dynamically set to colsize by default tooltip: { pointFormat: '{point.x}, {point.y}: {point.value}<br/>' }, states: { hover: { /** * @ignore */ halo: false, // #3406, halo is disabled on heatmaps by default /** * How much to brighten the point on interaction. Requires the main * color to be defined in hex or rgb(a) format. * * In styled mode, the hover brightening is by default replaced * with a fill-opacity set in the `.highcharts-point:hover` rule. * * @type {Number} * @product highcharts highmaps */ brightness: 0.2 } } }, merge(colorSeriesMixin, { pointArrayMap: ['y', 'value'], hasPointSpecificOptions: true, getExtremesFromAll: true, directTouch: true, /** * Override the init method to add point ranges on both axes. */ init: function () { var options; seriesTypes.scatter.prototype.init.apply(this, arguments); options = this.options; // #3758, prevent resetting in setData options.pointRange = pick(options.pointRange, options.colsize || 1); this.yAxis.axisPointRange = options.rowsize || 1; // general point range }, translate: function () { var series = this, options = series.options, xAxis = series.xAxis, yAxis = series.yAxis, seriesPointPadding = options.pointPadding || 0, between = function (x, a, b) { return Math.min(Math.max(a, x), b); }; series.generatePoints(); each(series.points, function (point) { var xPad = (options.colsize || 1) / 2, yPad = (options.rowsize || 1) / 2, x1 = between( Math.round( xAxis.len - xAxis.translate(point.x - xPad, 0, 1, 0, 1) ), -xAxis.len, 2 * xAxis.len ), x2 = between( Math.round( xAxis.len - xAxis.translate(point.x + xPad, 0, 1, 0, 1) ), -xAxis.len, 2 * xAxis.len ), y1 = between( Math.round(yAxis.translate(point.y - yPad, 0, 1, 0, 1)), -yAxis.len, 2 * yAxis.len ), y2 = between( Math.round(yAxis.translate(point.y + yPad, 0, 1, 0, 1)), -yAxis.len, 2 * yAxis.len ), pointPadding = pick(point.pointPadding, seriesPointPadding); // Set plotX and plotY for use in K-D-Tree and more point.plotX = point.clientX = (x1 + x2) / 2; point.plotY = (y1 + y2) / 2; point.shapeType = 'rect'; point.shapeArgs = { x: Math.min(x1, x2) + pointPadding, y: Math.min(y1, y2) + pointPadding, width: Math.abs(x2 - x1) - pointPadding * 2, height: Math.abs(y2 - y1) - pointPadding * 2 }; }); series.translateColors(); }, drawPoints: function () { seriesTypes.column.prototype.drawPoints.call(this); each(this.points, function (point) { point.graphic.attr(this.colorAttribs(point)); }, this); }, animate: noop, getBox: noop, drawLegendSymbol: LegendSymbolMixin.drawRectangle, alignDataLabel: seriesTypes.column.prototype.alignDataLabel, getExtremes: function () { // Get the extremes from the value data Series.prototype.getExtremes.call(this, this.valueData); this.valueMin = this.dataMin; this.valueMax = this.dataMax; // Get the extremes from the y data Series.prototype.getExtremes.call(this); } }), H.extend({ haloPath: function (size) { if (!size) { return []; } var rect = this.shapeArgs; return [ 'M', rect.x - size, rect.y - size, 'L', rect.x - size, rect.y + rect.height + size, rect.x + rect.width + size, rect.y + rect.height + size, rect.x + rect.width + size, rect.y - size, 'Z' ]; } }, colorPointMixin)); /** * A `heatmap` series. If the [type](#series.heatmap.type) option is * not specified, it is inherited from [chart.type](#chart.type). * * @type {Object} * @extends series,plotOptions.heatmap * @excluding dataParser,dataURL,marker,pointRange,stack * @product highcharts highmaps * @apioption series.heatmap */ /** * An array of data points for the series. For the `heatmap` series * type, points can be given in the following ways: * * 1. An array of arrays with 3 or 2 values. In this case, the values * correspond to `x,y,value`. If the first value is a string, it is * applied as the name of the point, and the `x` value is inferred. * The `x` value can also be omitted, in which case the inner arrays * should be of length 2\. Then the `x` value is automatically calculated, * either starting at 0 and incremented by 1, or from `pointStart` * and `pointInterval` given in the series options. * * ```js * data: [ * [0, 9, 7], * [1, 10, 4], * [2, 6, 3] * ] * ``` * * 2. An array of objects with named values. The objects are point * configuration objects as seen below. If the total number of data * points exceeds the series' [turboThreshold](#series.heatmap.turboThreshold), * this option is not available. * * ```js * data: [{ * x: 1, * y: 3, * value: 10, * name: "Point2", * color: "#00FF00" * }, { * x: 1, * y: 7, * value: 10, * name: "Point1", * color: "#FF00FF" * }] * ``` * * @type {Array<Object|Array>} * @extends series.line.data * @excluding marker * @sample {highcharts} highcharts/chart/reflow-true/ * Numerical values * @sample {highcharts} highcharts/series/data-array-of-arrays/ * Arrays of numeric x and y * @sample {highcharts} highcharts/series/data-array-of-arrays-datetime/ * Arrays of datetime x and y * @sample {highcharts} highcharts/series/data-array-of-name-value/ * Arrays of point.name and y * @sample {highcharts} highcharts/series/data-array-of-objects/ * Config objects * @product highcharts highmaps * @apioption series.heatmap.data */ /** * The color of the point. In heat maps the point color is rarely set * explicitly, as we use the color to denote the `value`. Options for * this are set in the [colorAxis](#colorAxis) configuration. * * @type {Color} * @product highcharts highmaps * @apioption series.heatmap.data.color */ /** * The value of the point, resulting in a color controled by options * as set in the [colorAxis](#colorAxis) configuration. * * @type {Number} * @product highcharts highmaps * @apioption series.heatmap.data.value */ /** * The x value of the point. For datetime axes, * the X value is the timestamp in milliseconds since 1970. * * @type {Number} * @product highcharts highmaps * @apioption series.heatmap.data.x */ /** * The y value of the point. * * @type {Number} * @product highcharts highmaps * @apioption series.heatmap.data.y */ /** * Point padding for a single point. * * @type {Number} * @sample maps/plotoptions/tilemap-pointpadding Point padding on tiles * @apioption series.heatmap.data.pointPadding */ }(Highcharts)); }));
{ "content_hash": "3a5b3c53a2fde7ece6457a7a59fdbaee", "timestamp": "", "source": "github", "line_count": 1518, "max_line_length": 103, "avg_line_length": 34.55270092226614, "alnum_prop": 0.4633276772606814, "repo_name": "cdnjs/cdnjs", "id": "48ac61046da7f065ef535d909f0c297031f97656", "size": "52451", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "ajax/libs/highcharts/6.1.0/modules/heatmap.src.js", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
/* tslint:disable:no-unused-variable */ import { TestBed } from '@angular/core/testing'; import { GeoInputComponent } from './geo-input.component'; import { CUSTOM_ELEMENTS_SCHEMA } from '@angular/core'; import { TranslatePipe } from '@ngx-translate/core'; import { TranslateService } from '@ngx-translate/core'; import { FormsModule } from '@angular/forms'; describe('Component: GeoInput', () => { beforeEach(() => { TestBed.configureTestingModule({ imports: [FormsModule], declarations: [GeoInputComponent, TranslatePipe], schemas: [CUSTOM_ELEMENTS_SCHEMA], providers: [ {provide: TranslateService, useValue: {}}, ] }); }); it('should create an instance', () => { const fixture = TestBed.createComponent(GeoInputComponent); const app = fixture.debugElement.componentInstance; expect(app) .toBeTruthy(); }); });
{ "content_hash": "4d1833a247909ccb91b8bd4e658babe9", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 63, "avg_line_length": 33.48148148148148, "alnum_prop": 0.6548672566371682, "repo_name": "aitarget/aitarget-components", "id": "ad06b796b8a02e5d73e122f80aec057fb6fb3b65", "size": "904", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/lib/components/targeting/targeting-form/geo/geo-input/geo-input.component.spec.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "38874" }, { "name": "HTML", "bytes": "39663" }, { "name": "JavaScript", "bytes": "6581" }, { "name": "TypeScript", "bytes": "407434" } ], "symlink_target": "" }
package hudson.tasks; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import hudson.Launcher; import hudson.Util; import hudson.XmlFile; import hudson.matrix.Axis; import hudson.matrix.AxisList; import hudson.matrix.MatrixProject; import hudson.model.*; import hudson.util.RunList; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.*; import hudson.util.StreamTaskListener; import jenkins.model.Jenkins; import org.hamcrest.Matchers; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.Bug; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.RandomlyFails; import org.jvnet.hudson.test.recipes.LocalData; /** * * @author dty */ @SuppressWarnings("rawtypes") public class FingerprinterTest { private static final String[] singleContents = { "abcdef" }; private static final String[] singleFiles = { "test.txt" }; private static final String[] singleContents2 = { "ghijkl" }; private static final String[] singleFiles2 = { "test2.txt" }; private static final String[] doubleContents = { "abcdef", "ghijkl" }; private static final String[] doubleFiles = { "test.txt", "test2.txt" }; private static final String renamedProject1 = "renamed project 1"; private static final String renamedProject2 = "renamed project 2"; @Rule public JenkinsRule j = new JenkinsRule(); @BeforeClass public static void setUp() throws Exception { Fingerprinter.enableFingerprintsInDependencyGraph = true; } @Test public void fingerprintDependencies() throws Exception { FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get()); j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get()); j.jenkins.rebuildDependencyGraph(); List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects(); List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects(); assertEquals(1, downstreamProjects.size()); assertEquals(1, upstreamProjects.size()); assertTrue(upstreamProjects.contains(upstream)); assertTrue(downstreamProjects.contains(downstream)); } private static class FingerprintAddingBuilder extends Builder { @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { build.addAction(new Fingerprinter.FingerprintAction(build, ImmutableMap.of(singleFiles2[0], "fakefingerprint"))); return true; } } @Test public void presentFingerprintActionIsReused() throws Exception { FreeStyleProject project = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); project.getBuildersList().add(new FingerprintAddingBuilder()); FreeStyleBuild build = j.buildAndAssertSuccess(project); assertThat(build.getActions(Fingerprinter.FingerprintAction.class), hasSize(1)); Fingerprinter.FingerprintAction action = build.getAction(Fingerprinter.FingerprintAction.class); assertEquals(action.getRecords().keySet(), ImmutableSet.of(singleFiles2[0], singleFiles[0])); } @Test public void multipleUpstreamDependencies() throws Exception { FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleProject upstream2 = createFreeStyleProjectWithFingerprints(singleContents2, singleFiles2); FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(doubleContents, doubleFiles); j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get()); j.assertBuildStatusSuccess(upstream2.scheduleBuild2(0).get()); j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get()); j.jenkins.rebuildDependencyGraph(); List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects(); List<AbstractProject> downstreamProjects2 = upstream2.getDownstreamProjects(); List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects(); assertEquals(1, downstreamProjects.size()); assertEquals(1, downstreamProjects2.size()); assertEquals(2, upstreamProjects.size()); assertTrue(upstreamProjects.contains(upstream)); assertTrue(upstreamProjects.contains(upstream2)); assertTrue(downstreamProjects.contains(downstream)); } @Test public void multipleDownstreamDependencies() throws Exception { FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(doubleContents, doubleFiles); FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleProject downstream2 = createFreeStyleProjectWithFingerprints(singleContents2, singleFiles2); j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get()); j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get()); j.assertBuildStatusSuccess(downstream2.scheduleBuild2(0).get()); j.jenkins.rebuildDependencyGraph(); List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects(); List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects(); List<AbstractProject> upstreamProjects2 = downstream2.getUpstreamProjects(); assertEquals(2, downstreamProjects.size()); assertEquals(1, upstreamProjects.size()); assertEquals(1, upstreamProjects2.size()); assertTrue(upstreamProjects.contains(upstream)); assertTrue(upstreamProjects2.contains(upstream)); assertTrue(downstreamProjects.contains(downstream)); assertTrue(downstreamProjects.contains(downstream2)); } @Test public void dependencyExclusion() throws Exception { FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleBuild upstreamBuild = j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get()); j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get()); upstreamBuild.delete(); Jenkins.getInstance().rebuildDependencyGraph(); List<AbstractProject> upstreamProjects = downstream.getUpstreamProjects(); List<AbstractProject> downstreamProjects = upstream.getDownstreamProjects(); assertEquals(0, upstreamProjects.size()); assertEquals(0, downstreamProjects.size()); } @Test public void circularDependency() throws Exception { FreeStyleProject p = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); j.assertBuildStatusSuccess(p.scheduleBuild2(0).get()); j.assertBuildStatusSuccess(p.scheduleBuild2(0).get()); Jenkins.getInstance().rebuildDependencyGraph(); List<AbstractProject> upstreamProjects = p.getUpstreamProjects(); List<AbstractProject> downstreamProjects = p.getDownstreamProjects(); assertEquals(0, upstreamProjects.size()); assertEquals(0, downstreamProjects.size()); } @Test public void matrixDependency() throws Exception { MatrixProject matrixProject = j.createMatrixProject(); matrixProject.setAxes(new AxisList(new Axis("foo", "a", "b"))); FreeStyleProject freestyleProject = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); addFingerprinterToProject(matrixProject, singleContents, singleFiles); j.jenkins.rebuildDependencyGraph(); j.buildAndAssertSuccess(matrixProject); j.buildAndAssertSuccess(freestyleProject); j.waitUntilNoActivity(); j.jenkins.rebuildDependencyGraph(); RunList<FreeStyleBuild> builds = freestyleProject.getBuilds(); assertEquals("There should only be one FreestyleBuild", 1, builds.size()); FreeStyleBuild build = builds.iterator().next(); assertEquals(Result.SUCCESS, build.getResult()); List<AbstractProject> downstream = j.jenkins.getDependencyGraph().getDownstream(matrixProject); assertTrue(downstream.contains(freestyleProject)); List<AbstractProject> upstream = j.jenkins.getDependencyGraph().getUpstream(freestyleProject); assertTrue(upstream.contains(matrixProject)); } @Test public void projectRename() throws Exception { FreeStyleProject upstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleProject downstream = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleBuild upstreamBuild = j.assertBuildStatusSuccess(upstream.scheduleBuild2(0).get()); FreeStyleBuild downstreamBuild = j.assertBuildStatusSuccess(downstream.scheduleBuild2(0).get()); String oldUpstreamName = upstream.getName(); String oldDownstreamName = downstream.getName(); // Verify that owner entry in fingerprint record is changed // after source project is renamed upstream.renameTo(renamedProject1); Fingerprinter.FingerprintAction action = upstreamBuild.getAction(Fingerprinter.FingerprintAction.class); assertNotNull(action); Collection<Fingerprint> fingerprints = action.getFingerprints().values(); for (Fingerprint f: fingerprints) { assertTrue(f.getOriginal().is(upstream)); assertTrue(f.getOriginal().getName().equals(renamedProject1)); assertFalse(f.getOriginal().getName().equals(oldUpstreamName)); } action = downstreamBuild.getAction(Fingerprinter.FingerprintAction.class); assertNotNull(action); fingerprints = action.getFingerprints().values(); for (Fingerprint f: fingerprints) { assertTrue(f.getOriginal().is(upstream)); assertTrue(f.getOriginal().getName().equals(renamedProject1)); assertFalse(f.getOriginal().getName().equals(oldUpstreamName)); } // Verify that usage entry in fingerprint record is changed after // sink project is renamed downstream.renameTo(renamedProject2); upstream.renameTo(renamedProject1); action = upstreamBuild.getAction(Fingerprinter.FingerprintAction.class); assertNotNull(action); fingerprints = action.getFingerprints().values(); for (Fingerprint f: fingerprints) { List<String> jobs = f.getJobs(); assertTrue(jobs.contains(renamedProject2)); assertFalse(jobs.contains(oldDownstreamName)); } action = downstreamBuild.getAction(Fingerprinter.FingerprintAction.class); assertNotNull(action); fingerprints = action.getFingerprints().values(); for (Fingerprint f: fingerprints) { List<String> jobs = f.getJobs(); assertTrue(jobs.contains(renamedProject2)); assertFalse(jobs.contains(oldDownstreamName)); } } @Bug(17125) @LocalData @Test public void actionSerialization() throws Exception { FreeStyleProject job = j.jenkins.getItemByFullName("j", FreeStyleProject.class); assertNotNull(job); FreeStyleBuild build = job.getBuildByNumber(2); assertNotNull(build); Fingerprinter.FingerprintAction action = build.getAction(Fingerprinter.FingerprintAction.class); assertNotNull(action); assertEquals(build, action.getBuild()); assertEquals("{a=2d5fac981a2e865baf0e15db655c7d63}", action.getRecords().toString()); j.assertBuildStatusSuccess(job.scheduleBuild2(0)); job._getRuns().purgeCache(); // force build records to be reloaded build = job.getBuildByNumber(3); assertNotNull(build); System.out.println(new XmlFile(new File(build.getRootDir(), "build.xml")).asString()); action = build.getAction(Fingerprinter.FingerprintAction.class); assertNotNull(action); assertEquals(build, action.getBuild()); assertEquals("{a=f31efcf9afe30617d6c46b919e702822}", action.getRecords().toString()); } @SuppressWarnings("unchecked") @RandomlyFails("for p3.upstreamProjects expected:<[hudson.model.FreeStyleProject@590e5b8[test0]]> but was:<[]>") @Bug(18417) @Test public void fingerprintCleanup() throws Exception { // file names shouldn't matter FreeStyleProject p1 = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); FreeStyleProject p2 = createFreeStyleProjectWithFingerprints(singleContents, singleFiles2); FreeStyleProject p3 = createFreeStyleProjectWithFingerprints(singleContents, singleFiles); j.assertBuildStatusSuccess(p1.scheduleBuild2(0)); j.assertBuildStatusSuccess(p2.scheduleBuild2(0)); j.assertBuildStatusSuccess(p3.scheduleBuild2(0)); Fingerprint f = j.jenkins._getFingerprint(Util.getDigestOf(singleContents[0]+"\n")); assertEquals(3,f.getUsages().size()); j.jenkins.rebuildDependencyGraph(); assertEquals(Arrays.asList(p1), p2.getUpstreamProjects()); assertEquals(Arrays.asList(p1), p3.getUpstreamProjects()); assertEquals(new HashSet(Arrays.asList(p2,p3)), new HashSet(p1.getDownstreamProjects())); // discard the p3 records p3.delete(); new FingerprintCleanupThread().execute(StreamTaskListener.fromStdout()); j.jenkins.rebuildDependencyGraph(); // records for p3 should have been deleted now assertEquals(2,f.getUsages().size()); assertEquals(Arrays.asList(p1), p2.getUpstreamProjects()); assertEquals(Arrays.asList(p2), p1.getDownstreamProjects()); // do a new build in p2 #2 that points to a separate fingerprints p2.getBuildersList().clear(); p2.getPublishersList().clear(); addFingerprinterToProject(p2,singleContents2,singleFiles2); j.assertBuildStatusSuccess(p2.scheduleBuild2(0)); // another garbage collection that gets rid of p2 records from the fingerprint p2.getBuildByNumber(1).delete(); new FingerprintCleanupThread().execute(StreamTaskListener.fromStdout()); assertEquals(1,f.getUsages().size()); } private FreeStyleProject createFreeStyleProjectWithFingerprints(String[] contents, String[] files) throws IOException, Exception { FreeStyleProject project = j.createFreeStyleProject(); addFingerprinterToProject(project, contents, files); return project; } private void addFingerprinterToProject(AbstractProject<?, ?> project, String[] contents, String[] files) throws Exception { StringBuilder targets = new StringBuilder(); for (int i = 0; i < contents.length; i++) { if (project instanceof MatrixProject) { ((MatrixProject)project).getBuildersList().add(new Shell("echo " + contents[i] + " > " + files[i])); } else { ((FreeStyleProject)project).getBuildersList().add(new Shell("echo " + contents[i] + " > " + files[i])); } targets.append(files[i]).append(','); } project.getPublishersList().add(new Fingerprinter(targets.toString(), false)); } }
{ "content_hash": "3d92422e746b65ae7d1b5dac176bae4b", "timestamp": "", "source": "github", "line_count": 366, "max_line_length": 143, "avg_line_length": 43.57377049180328, "alnum_prop": 0.708051166290444, "repo_name": "deadmoose/jenkins", "id": "b270c1ca4e4d78047303e3100921179dd9dce432", "size": "17103", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/src/test/java/hudson/tasks/FingerprinterTest.java", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "2091" }, { "name": "CSS", "bytes": "220816" }, { "name": "GAP", "bytes": "6283" }, { "name": "Groovy", "bytes": "158931" }, { "name": "Java", "bytes": "7088302" }, { "name": "JavaScript", "bytes": "164910" }, { "name": "Perl", "bytes": "13788" }, { "name": "Python", "bytes": "2196" }, { "name": "Ruby", "bytes": "21112" }, { "name": "Shell", "bytes": "20415" } ], "symlink_target": "" }
<?php if (!CM_Db_Db::existsTable('denkmal_model_userinvite')) { CM_Db_Db::exec(" CREATE TABLE IF NOT EXISTS `denkmal_model_userinvite` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `inviter` int(11) unsigned NOT NULL, `key` varchar(100) NOT NULL, `email` varchar(100) DEFAULT NULL, `expires` int(11) unsigned DEFAULT NULL, PRIMARY KEY (`id`), KEY `expires` (`expires`), KEY `key` (`key`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; "); }
{ "content_hash": "33557bf8831f74643df44b61f307c04c", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 63, "avg_line_length": 34.8125, "alnum_prop": 0.5727109515260324, "repo_name": "fvovan/denkmal.org", "id": "ea2b2deb6ace5acbdd3ce29e8b0cbb748b19a803", "size": "557", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "library/Denkmal/resources/db/update/16.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "44099" }, { "name": "HTML", "bytes": "269845" }, { "name": "JavaScript", "bytes": "50961" }, { "name": "PHP", "bytes": "364292" }, { "name": "Puppet", "bytes": "356" }, { "name": "Ruby", "bytes": "227" }, { "name": "Shell", "bytes": "422" }, { "name": "Smarty", "bytes": "43866" } ], "symlink_target": "" }
Color3D GetAchievementColor(eAchievement achievement) { static Color3D medalColors[kAchievement_Gold-kAchievement_Bronze+1]; medalColors[0] = Color3DMake(0.8f, 0.45f, 0.3f, 1.0f); medalColors[1] = Color3DMake(0.9f, 0.9f, 1.0f, 1.0f); medalColors[2] = Color3DMake(1.0f, 0.7f, 0.05f, 1.0f); return medalColors[achievement-kAchievement_Bronze]; } NSString* GetWebPath(NSString* filename) { NSString* path = @"http://www.blitshake.com/fb_images/"; return [path stringByAppendingString:filename]; } NSString* GetGameTimeFormat(int timeValue, bool showEntireClock) { int minutes = (timeValue / 60); int seconds = (timeValue % 60); NSString* minutesString = @""; if (minutes > 0 || showEntireClock) { minutesString = [NSString stringWithFormat:@"%d:", minutes]; } NSString* secondsString = nil; if (seconds < 10 && (minutes > 0 || showEntireClock)) { secondsString = [NSString stringWithFormat:@"0%d", seconds]; } else { secondsString = [NSString stringWithFormat:@"%d", seconds]; } return [NSString stringWithFormat:@"%@%@", minutesString, secondsString]; } extern Boolean IsGameShadowsEnabled() { return false; // return !IsDeviceIPad(); } extern Boolean IsGameMotionBlurEnabled() { return false; // return !IsDeviceIPad(); }
{ "content_hash": "cd4ffd5e7dfe8436249239ff13e8c85d", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 77, "avg_line_length": 30.886363636363637, "alnum_prop": 0.6622516556291391, "repo_name": "srr313/PinataSmash-Open", "id": "f83baaa7412570e61434a80e699564a67952e9ac", "size": "1548", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GameCommon.m", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "23440" }, { "name": "C#", "bytes": "140953" }, { "name": "C++", "bytes": "706" }, { "name": "CSS", "bytes": "48777" }, { "name": "Objective-C", "bytes": "1327901" }, { "name": "Perl", "bytes": "6644" }, { "name": "Racket", "bytes": "10388" }, { "name": "Shell", "bytes": "181" } ], "symlink_target": "" }
<body> <div id="container"> <div class="fixed-alert"> <div class="row"> <div id="alert" class="col-lg-4 col-lg-offset-4"> </div> </div> </div> <div class="row space"></div> <div class="row space"></div> <div class="row no-margin"> <div class="col-lg-4 col-lg-offset-4"> <div class="panel panel-default"> <div class="panel-heading"> <h3 class="panel-title">Login<span class="ui-icon ui-icon-key pull-right"></span></h3> </div> <div class="panel-body"> <form method="POST" action="<?php echo site_url('login/login_result'); ?>"> <div class="form-group"> <label>Nome utente</label> <input name="username" type="text" class="form-control" placeholder="Nome utente"> </div> <div class="form-group"> <label>Password</label> <input name="password" type="password" class="form-control" placeholder="Password"> </div> <div class="row"> <div class="col-lg-12"> <button type="submit" class="btn btn-default pull-right">Entra</button> </div> </div> </form> </div> </div> <div class="row space"></div> <div class="row space"></div> <?php if($retry == 1){ ?> <script type="text/javascript"> my_alert('Nome utente o password non corretti', 1); </script> <?php } ?>
{ "content_hash": "bb539a9db2b072815afd5c646efa01f2", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 92, "avg_line_length": 32.166666666666664, "alnum_prop": 0.5684678016284234, "repo_name": "marc0l92/ArciconfraternitaCarmine_webapp", "id": "e860c5f62334ca96ee7aa728dd48176a5803aa4f", "size": "1351", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "arciconfraternita_carmine/application/views/pages/view_login.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "150" }, { "name": "CSS", "bytes": "14858" }, { "name": "HTML", "bytes": "5016" }, { "name": "JavaScript", "bytes": "3660" }, { "name": "PHP", "bytes": "1550484" } ], "symlink_target": "" }
package tests; import javax.annotation.processing.Generated; import javax.inject.Inject; import javax.inject.Provider; @Generated( value = "com.google.auto.factory.processor.AutoFactoryProcessor", comments = "https://github.com/google/auto/tree/master/factory" ) class ConstructorAnnotatedNonFinalFactory { private final Provider<Object> objProvider; @Inject ConstructorAnnotatedNonFinalFactory(Provider<Object> objProvider) { this.objProvider = checkNotNull(objProvider, 1); } ConstructorAnnotatedNonFinal create() { return new ConstructorAnnotatedNonFinal(); } ConstructorAnnotatedNonFinal create(String s) { return new ConstructorAnnotatedNonFinal(checkNotNull(s, 1)); } ConstructorAnnotatedNonFinal create(int i) { return new ConstructorAnnotatedNonFinal(checkNotNull(objProvider.get(), 1), i); } ConstructorAnnotatedNonFinal create(char c) { return new ConstructorAnnotatedNonFinal(checkNotNull(objProvider.get(), 1), c); } private static <T> T checkNotNull(T reference, int argumentIndex) { if (reference == null) { throw new NullPointerException( "@AutoFactory method argument is null but is not marked @Nullable. Argument index: " + argumentIndex); } return reference; } }
{ "content_hash": "e6b796f47154372a9aa80c02603ff986", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 94, "avg_line_length": 29.363636363636363, "alnum_prop": 0.7383900928792569, "repo_name": "eamonnmcmanus/auto", "id": "25ec894fbe215d32b4d82db3baaf722943ad6ce3", "size": "1881", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "factory/src/test/resources/expected/ConstructorAnnotatedNonFinalFactory.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "2059273" }, { "name": "Kotlin", "bytes": "1439" }, { "name": "Shell", "bytes": "1387" } ], "symlink_target": "" }
<?php namespace enshrined\svgSanitize\data; /** * Class AllowedAttributes * * @package enshrined\svgSanitize\data */ class AllowedAttributes implements AttributeInterface { /** * Returns an array of attributes * * @return array */ public static function getAttributes() { return array( // HTML 'accept','action','align','alt','autocomplete','background','bgcolor', 'border','cellpadding','cellspacing','checked','cite','class','clear','color', 'cols','colspan','coords','datetime','default','dir','disabled', 'download','enctype','face','for','headers','height','hidden','high','href', 'hreflang','id','ismap','label','lang','list','loop', 'low','max', 'maxlength','media','method','min','multiple','name','noshade','novalidate', 'nowrap','open','optimum','pattern','placeholder','poster','preload','pubdate', 'radiogroup','readonly','rel','required','rev','reversed','rows', 'rowspan','spellcheck','scope','selected','shape','size','span', 'srclang','start','src','step','style','summary','tabindex','title', 'type','usemap','valign','value','width','xmlns', // SVG 'accent-height','accumulate','additivive','alignment-baseline', 'ascent','azimuth','baseline-shift','bias','clip','clip-path', 'clip-rule','color','color-interpolation','color-interpolation-filters', 'color-profile','color-rendering','cx','cy','d','dy','dy','direction', 'display','divisor','dur','elevation','end','fill','fill-opacity', 'fill-rule','filter','flood-color','flood-opacity','font-family', 'font-size','font-size-adjust','font-stretch','font-style','font-variant', 'font-weight','image-rendering','in','in2','k1','k2','k3','k4','kerning', 'letter-spacing','lighting-color','local','marker-end','marker-mid', 'marker-start','max','mask','mode','min','offset','operator','opacity', 'order','overflow','paint-order','path','points','r','rx','ry','radius', 'restart','scale','seed','shape-rendering','stop-color','stop-opacity', 'stroke-dasharray','stroke-dashoffset','stroke-linecap','stroke-linejoin', 'stroke-miterlimit','stroke-opacity','stroke','stroke-width','transform', 'text-anchor','text-decoration','text-rendering','u1','u2','viewbox', 'visibility','word-spacing','wrap','writing-mode','x','x1','x2','y', 'y1','y2','z', // MathML 'accent','accentunder','bevelled','close','columnsalign','columnlines', 'columnspan','denomalign','depth','display','displaystyle','fence', 'frame','largeop','length','linethickness','lspace','lquote', 'mathbackground','mathcolor','mathsize','mathvariant','maxsize', 'minsize','movablelimits','notation','numalign','open','rowalign', 'rowlines','rowspacing','rowspan','rspace','rquote','scriptlevel', 'scriptminsize','scriptsizemultiplier','selection','separator', 'separators','stretchy','subscriptshift','supscriptshift','symmetric', 'voffset', // XML 'xlink:href','xml:id','xlink:title','xml:space', // Camel Case "allowReorder", "attributeName", "attributeType", "autoReverse", "baseFrequency", "baseProfile", "calcMode", "clipPathUnits", "contentScriptType", "contentStyleType", "diffuseConstant", "edgeMode", "externalResourcesRequired", "filterRes", "filterUnits", "glyphRef", "gradientTransform", "gradientUnits", "kernelMatrix", "kernelUnitLength", "keyPoints", "keySplines", "keyTimes", "lengthAdjust", "limitingConeAngle", "markerHeight", "markerUnits", "markerWidth", "maskContentUnits", "maskUnits", "numOctaves", "pathLength", "patternContentUnits", "patternTransform", "patternUnits", "pointsAtX", "pointsAtY", "pointsAtZ", "preserveAlpha", "preserveAspectRatio", "primitiveUnits", "refX", "refY", "repeatCount", "repeatDur", "requiredExtensions", "requiredFeatures", "specularConstant", "specularExponent", "spreadMethod", "startOffset", "stdDeviation", "stitchTiles", "surfaceScale", "systemLanguage", "tableValues", "targetX", "targetY", "textLength", "viewBox", "viewTarget", "xChannelSelector", "yChannelSelector", "zoomAndPan", ); } }
{ "content_hash": "667b69e034b0ad1c04465f5f6d071f41", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 98, "avg_line_length": 53.627906976744185, "alnum_prop": 0.5925845620121423, "repo_name": "Kilbourne/biosphaera", "id": "9ae95becee565b908698ac3370452cb0fb643069", "size": "4612", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web/app/plugins/safe-svg/lib/vendor/enshrined/svg-sanitize/src/data/AllowedAttributes.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "223361" }, { "name": "HTML", "bytes": "40850" }, { "name": "JavaScript", "bytes": "44809" }, { "name": "PHP", "bytes": "173651" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <ripple xmlns:android="http://schemas.android.com/apk/res/android" android:color="@color/white_semitransparent_10" />
{ "content_hash": "738666083cdc9a532d22e444d628acb4", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 66, "avg_line_length": 53.333333333333336, "alnum_prop": 0.71875, "repo_name": "pbednarz/TutorialViewPager", "id": "a2f9918be1452eb4bf7550f958923f09394d52e9", "size": "160", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/drawable-v21/btn_semi_transparent_selector.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "12979" } ], "symlink_target": "" }
package org.gradle.internal.reflect; /** * Thrown when a requested method cannot be found. */ public class NoSuchMethodException extends RuntimeException { public NoSuchMethodException(String message) { super(message); } }
{ "content_hash": "2a016ef5bb00bfe80fb6fa08209f3abe", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 61, "avg_line_length": 20.333333333333332, "alnum_prop": 0.7254098360655737, "repo_name": "FinishX/coolweather", "id": "e61a1bbb60210beeb0e67462fc3528537b612889", "size": "859", "binary": false, "copies": "15", "ref": "refs/heads/master", "path": "gradle/gradle-2.8/src/base-services/org/gradle/internal/reflect/NoSuchMethodException.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "277" }, { "name": "C", "bytes": "97569" }, { "name": "C++", "bytes": "912105" }, { "name": "CSS", "bytes": "105486" }, { "name": "CoffeeScript", "bytes": "201" }, { "name": "GAP", "bytes": "212" }, { "name": "Groovy", "bytes": "1162135" }, { "name": "HTML", "bytes": "35827007" }, { "name": "Java", "bytes": "12908568" }, { "name": "JavaScript", "bytes": "195155" }, { "name": "Objective-C", "bytes": "2977" }, { "name": "Objective-C++", "bytes": "442" }, { "name": "Scala", "bytes": "12789" }, { "name": "Shell", "bytes": "5398" } ], "symlink_target": "" }
package mcjty.rftools.blocks.dimletconstruction; import mcjty.entity.GenericTileEntity; import mcjty.rftools.blocks.BlockTools; import mcjty.rftools.items.dimlets.DimletKey; import mcjty.rftools.items.dimlets.DimletObjectMapping; import net.minecraft.nbt.NBTTagCompound; import java.util.Map; import java.util.Random; public class TimeAbsorberTileEntity extends GenericTileEntity { private int absorbing = 0; private float angle = -1.0f; // For pulse detection. private boolean prevIn = false; private int registerTimeout = 0; @Override protected void checkStateClient() { if (absorbing > 0) { Random rand = worldObj.rand; double u = rand.nextFloat() * 2.0f - 1.0f; double v = (float) (rand.nextFloat() * 2.0f * Math.PI); double x = Math.sqrt(1 - u * u) * Math.cos(v); double y = Math.sqrt(1 - u * u) * Math.sin(v); double z = u; double r = 1.0f; worldObj.spawnParticle("portal", xCoord + 0.5f + x * r, yCoord + 0.5f + y * r, zCoord + 0.5f + z * r, -x, -y, -z); } } public int getAbsorbing() { return absorbing; } public float getAngle() { return angle; } public int getRegisterTimeout() { return registerTimeout; } @Override protected void checkStateServer() { super.checkStateServer(); int meta = worldObj.getBlockMetadata(xCoord, yCoord, zCoord); boolean newvalue = BlockTools.getRedstoneSignalIn(meta); boolean pulse = newvalue && !prevIn; prevIn = newvalue; markDirty(); if (registerTimeout > 0) { registerTimeout--; return; } if (pulse) { registerTime(); } } private void registerTime() { if (worldObj.canBlockSeeTheSky(xCoord, yCoord, zCoord)) { float a = worldObj.getCelestialAngle(1.0f); DimletKey bestDimlet = findBestTimeDimlet(a); float besta = DimletObjectMapping.idToCelestialAngle.get(bestDimlet); if (angle < -0.001f) { angle = besta; absorbing = DimletConstructionConfiguration.maxTimeAbsorbtion-1; } else if (Math.abs(besta-angle) < 0.1f) { absorbing--; if (absorbing < 0) { absorbing = 0; } registerTimeout = 3000; } else { absorbing++; if (absorbing >= DimletConstructionConfiguration.maxTimeAbsorbtion) { absorbing = DimletConstructionConfiguration.maxTimeAbsorbtion-1; } } } } public static DimletKey findBestTimeDimlet(float a) { float bestDiff = 10000.0f; DimletKey bestDimlet = null; for (Map.Entry<DimletKey, Float> entry : DimletObjectMapping.idToCelestialAngle.entrySet()) { Float celangle = entry.getValue(); if (celangle != null) { float diff = Math.abs(a - celangle); if (diff < bestDiff) { bestDiff = diff; bestDimlet = entry.getKey(); } diff = Math.abs((a-1.0f) - celangle); if (diff < bestDiff) { bestDiff = diff; bestDimlet = entry.getKey(); } diff = Math.abs((a+1.0f) - celangle); if (diff < bestDiff) { bestDiff = diff; bestDimlet = entry.getKey(); } } } return bestDimlet; } @Override public void writeToNBT(NBTTagCompound tagCompound) { super.writeToNBT(tagCompound); tagCompound.setBoolean("prevIn", prevIn); } @Override public void writeRestorableToNBT(NBTTagCompound tagCompound) { super.writeRestorableToNBT(tagCompound); tagCompound.setInteger("absorbing", absorbing); tagCompound.setFloat("angle", angle); tagCompound.setInteger("registerTimeout", registerTimeout); } @Override public void readFromNBT(NBTTagCompound tagCompound) { super.readFromNBT(tagCompound); prevIn = tagCompound.getBoolean("prevIn"); } @Override public void readRestorableFromNBT(NBTTagCompound tagCompound) { super.readRestorableFromNBT(tagCompound); absorbing = tagCompound.getInteger("absorbing"); if (tagCompound.hasKey("angle")) { angle = tagCompound.getFloat("angle"); } else { angle = -1.0f; } registerTimeout = tagCompound.getInteger("registerTimeout"); } }
{ "content_hash": "aee48ca584361df6cb916c6dacebb54e", "timestamp": "", "source": "github", "line_count": 150, "max_line_length": 126, "avg_line_length": 31.666666666666668, "alnum_prop": 0.5669473684210526, "repo_name": "Adaptivity/RFTools", "id": "efebebc48186fd3447517d814607f0650e9b6886", "size": "4750", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/mcjty/rftools/blocks/dimletconstruction/TimeAbsorberTileEntity.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "2394327" } ], "symlink_target": "" }
package in.twizmwaz.cardinal.command; import com.sk89q.minecraft.util.commands.Command; import com.sk89q.minecraft.util.commands.CommandContext; import com.sk89q.minecraft.util.commands.CommandException; import in.twizmwaz.cardinal.Cardinal; import in.twizmwaz.cardinal.GameHandler; import in.twizmwaz.cardinal.chat.ChatConstant; import in.twizmwaz.cardinal.chat.LocalizedChatMessage; import in.twizmwaz.cardinal.match.Match; import in.twizmwaz.cardinal.match.MatchState; import in.twizmwaz.cardinal.module.GameObjective; import in.twizmwaz.cardinal.module.modules.hill.HillObjective; import in.twizmwaz.cardinal.module.modules.matchTimer.MatchTimer; import in.twizmwaz.cardinal.module.modules.score.ScoreModule; import in.twizmwaz.cardinal.module.modules.team.TeamModule; import in.twizmwaz.cardinal.module.modules.timeLimit.TimeLimit; import in.twizmwaz.cardinal.util.ChatUtils; import in.twizmwaz.cardinal.util.ScoreboardUtils; import in.twizmwaz.cardinal.util.StringUtils; import in.twizmwaz.cardinal.util.TeamUtils; import org.apache.commons.lang.WordUtils; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; public class MatchCommand { @Command(aliases = {"matchinfo", "match"}, desc = "Shows information about the currently playing match.", usage = "") public static void match(final CommandContext args, CommandSender sender) throws CommandException { sender.sendMessage(ChatColor.RED + "" + ChatColor.STRIKETHROUGH + "------" + ChatColor.DARK_AQUA + " " + new LocalizedChatMessage(ChatConstant.UI_MATCH_INFO).getMessage(ChatUtils.getLocale(sender)) + " " + ChatColor.GRAY + "(" + GameHandler.getGameHandler().getMatch().getNumber() + ")" + ChatColor.RED + " " + ChatColor.STRIKETHROUGH + "------"); sender.sendMessage(ChatColor.DARK_PURPLE + new LocalizedChatMessage(ChatConstant.UI_TIME).getMessage(ChatUtils.getLocale(sender)) + ": " + ChatColor.GOLD + (Cardinal.getInstance().getConfig().getBoolean("matchTimeMillis") ? StringUtils.formatTimeWithMillis(MatchTimer.getTimeInSeconds()) : StringUtils.formatTime(MatchTimer.getTimeInSeconds()))); String teams = ""; boolean hasObjectives = false; for (TeamModule team : TeamUtils.getTeams()) { int players = 0; for (Player player : Bukkit.getOnlinePlayers()) { if (TeamUtils.getTeamByPlayer(player) != null) { if (TeamUtils.getTeamByPlayer(player) == team) { players++; } } } teams += team.getCompleteName() + ChatColor.GRAY + ": " + ChatColor.RESET + players + (team.isObserver() ? "" : ChatColor.GRAY + "/" + team.getMax() + ChatColor.AQUA + " | "); if (TeamUtils.getShownObjectives(team).size() > 0) hasObjectives = true; } if (ScoreboardUtils.getHills().size() > 0) hasObjectives = true; sender.sendMessage(teams); Match match = GameHandler.getGameHandler().getMatch(); if (match.isRunning() || match.getState().equals(MatchState.ENDED) || match.getState().equals(MatchState.CYCLING)) { if (hasObjectives) { sender.sendMessage(ChatColor.RED + "---- " + new LocalizedChatMessage(ChatConstant.UI_GOALS).getMessage(ChatUtils.getLocale(sender)) + " ----"); for (TeamModule team : TeamUtils.getTeams()) { if (!team.isObserver()) { if (TeamUtils.getShownObjectives(team).size() > 0 || ScoreboardUtils.getHills().size() > 0) { String objectives = ""; for (GameObjective objective : TeamUtils.getShownObjectives(team)) { objectives += (objective.isComplete() ? ChatColor.GREEN : ChatColor.DARK_RED) + WordUtils.capitalizeFully(objective.getName().replaceAll("_", " ")) + " "; } for (HillObjective hill : ScoreboardUtils.getHills()) { if (hill.getTeam() != null) { if (hill.getTeam() == team) { objectives += ChatColor.GREEN + WordUtils.capitalizeFully(hill.getName().replaceAll("_", " ") + " "); break; } } objectives += ChatColor.DARK_RED + WordUtils.capitalizeFully(hill.getName().replaceAll("_", " ") + " "); } objectives = objectives.trim(); sender.sendMessage(team.getCompleteName() + ChatColor.GRAY + ": " + objectives); } } } } if (ScoreModule.matchHasScoring()) { String score = ""; for (ScoreModule scoreModule : GameHandler.getGameHandler().getMatch().getModules().getModules(ScoreModule.class)) { score += scoreModule.getTeam().getColor() + "" + scoreModule.getScore() + " "; } score = score.trim(); double timeRemaining; if (TimeLimit.getMatchTimeLimit() != 0) { timeRemaining = TimeLimit.getMatchTimeLimit() - MatchTimer.getTimeInSeconds(); sender.sendMessage(ChatColor.DARK_AQUA + "Score: " + score + (TimeLimit.getMatchTimeLimit() != 0 ? ChatColor.RED + " " + StringUtils.formatTime(timeRemaining) : "") + (ScoreModule.matchHasMax() ? ChatColor.GRAY + " [" + ScoreModule.max() + "]" : "")); } } } } }
{ "content_hash": "3089de882ac5a65065fab4695c46d1f8", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 355, "avg_line_length": 63.13186813186813, "alnum_prop": 0.6052219321148825, "repo_name": "Electroid/ExperimentalPGM", "id": "e8558e610a3c7138cfc57ba12b4f36f39b40b77d", "size": "5745", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/java/in/twizmwaz/cardinal/command/MatchCommand.java", "mode": "33261", "license": "mit", "language": [ { "name": "HTML", "bytes": "2089" }, { "name": "Java", "bytes": "940171" } ], "symlink_target": "" }
<?php namespace yiiunit\framework\i18n; use yii\i18n\MessageFormatter; use yiiunit\TestCase; /** * @author Carsten Brandt <[email protected]> * @since 2.0 * @group i18n */ class FallbackMessageFormatterTest extends TestCase { const N = 'n'; const N_VALUE = 42; const SUBJECT = 'сабж'; const SUBJECT_VALUE = 'Answer to the Ultimate Question of Life, the Universe, and Everything'; public function patterns() { return [ [ '{'.self::SUBJECT.'} is {'.self::N.'}', // pattern self::SUBJECT_VALUE.' is '.self::N_VALUE, // expected [ // params self::N => self::N_VALUE, self::SUBJECT => self::SUBJECT_VALUE, ] ], [ '{'.self::SUBJECT.'} is {'.self::N.', number}', // pattern self::SUBJECT_VALUE.' is '.self::N_VALUE, // expected [ // params self::N => self::N_VALUE, self::SUBJECT => self::SUBJECT_VALUE, ] ], [ '{'.self::SUBJECT.'} is {'.self::N.', number, integer}', // pattern self::SUBJECT_VALUE.' is '.self::N_VALUE, // expected [ // params self::N => self::N_VALUE, self::SUBJECT => self::SUBJECT_VALUE, ] ], // This one was provided by Aura.Intl. Thanks! [<<<_MSG_ {gender_of_host, select, female {{num_guests, plural, offset:1 =0 {{host} does not give a party.} =1 {{host} invites {guest} to her party.} =2 {{host} invites {guest} and one other person to her party.} other {{host} invites {guest} and # other people to her party.}}} male {{num_guests, plural, offset:1 =0 {{host} does not give a party.} =1 {{host} invites {guest} to his party.} =2 {{host} invites {guest} and one other person to his party.} other {{host} invites {guest} and # other people to his party.}}} other {{num_guests, plural, offset:1 =0 {{host} does not give a party.} =1 {{host} invites {guest} to their party.} =2 {{host} invites {guest} and one other person to their party.} other {{host} invites {guest} and # other people to their party.}}}} _MSG_ , 'ralph invites beep and 3 other people to his party.', [ 'gender_of_host' => 'male', 'num_guests' => 4, 'host' => 'ralph', 'guest' => 'beep' ] ], [ '{name} is {gender} and {gender, select, female{she} male{he} other{it}} loves Yii!', 'Alexander is male and he loves Yii!', [ 'name' => 'Alexander', 'gender' => 'male', ], ], // verify pattern in select does not get replaced [ '{name} is {gender} and {gender, select, female{she} male{he} other{it}} loves Yii!', 'Alexander is male and he loves Yii!', [ 'name' => 'Alexander', 'gender' => 'male', // following should not be replaced 'he' => 'wtf', 'she' => 'wtf', 'it' => 'wtf', ] ], // verify pattern in select message gets replaced [ '{name} is {gender} and {gender, select, female{she} male{{he}} other{it}} loves Yii!', 'Alexander is male and wtf loves Yii!', [ 'name' => 'Alexander', 'gender' => 'male', 'he' => 'wtf', 'she' => 'wtf', ], ], // some parser specific verifications [ '{gender} and {gender, select, female{she} male{{he}} other{it}} loves {nr} is {gender}!', 'male and wtf loves 42 is male!', [ 'nr' => 42, 'gender' => 'male', 'he' => 'wtf', 'she' => 'wtf', ], ], ]; } /** * @dataProvider patterns */ public function testNamedArguments($pattern, $expected, $args) { $formatter = new FallbackMessageFormatter(); $result = $formatter->fallbackFormat($pattern, $args, 'en-US'); $this->assertEquals($expected, $result, $formatter->getErrorMessage()); } public function testInsufficientArguments() { $expected = '{'.self::SUBJECT.'} is '.self::N_VALUE; $formatter = new FallbackMessageFormatter(); $result = $formatter->fallbackFormat('{'.self::SUBJECT.'} is {'.self::N.'}', [ self::N => self::N_VALUE, ], 'en-US'); $this->assertEquals($expected, $result); } public function testNoParams() { $pattern = '{'.self::SUBJECT.'} is '.self::N; $formatter = new FallbackMessageFormatter(); $result = $formatter->fallbackFormat($pattern, [], 'en-US'); $this->assertEquals($pattern, $result, $formatter->getErrorMessage()); } } class FallbackMessageFormatter extends MessageFormatter { public function fallbackFormat($pattern, $args, $locale) { return parent::fallbackFormat($pattern, $args, $locale); } }
{ "content_hash": "06db59fd7abe018d34e351179c8adbbc", "timestamp": "", "source": "github", "line_count": 167, "max_line_length": 95, "avg_line_length": 26.49700598802395, "alnum_prop": 0.592090395480226, "repo_name": "ostashevdv/yii2", "id": "a067f70359f862101b710f6ab0e9edb17a86a614", "size": "4573", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/unit/framework/i18n/FallbackMessageFormatterTest.php", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE interactive_environment SYSTEM "../../interactive_environments.dtd"> <interactive_environment name="Paraview"> <data_sources> <data_source> <model_class>HistoryDatasetAssociation</model_class> <test type="isinstance" test_attr="datatype" result_type="datatype">images.Image</test> <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.STL</test> <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.PlyAscii</test> <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.PlyBinary</test> <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.VtkAscii</test> <test type="isinstance" test_attr="datatype" result_type="datatype">constructive_solid_geometry.VtkBinary</test> <to_param param_attr="id">dataset_id</to_param> </data_source> </data_sources> <params> <param type="dataset" var_name_in_template="hda" required="true">dataset_id</param> </params> <entry_point entry_point_type="mako">paraview.mako</entry_point> </interactive_environment>
{ "content_hash": "8621415ac847560a4c034b324813406a", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 124, "avg_line_length": 64.6, "alnum_prop": 0.6927244582043344, "repo_name": "BMCV/galaxy-image-analysis", "id": "c756ba9b5486377c65463a3e6e1a4b38a3a6745e", "size": "1292", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "interactive_environments/paraview/config/paraview.xml", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "554" }, { "name": "Mako", "bytes": "2018" }, { "name": "Python", "bytes": "102811" } ], "symlink_target": "" }
#include "simba.h" #include "romeo.h" /* Analog pin constants. */ #define ANALOG_VOLTAGE_MAX 5.0f #define ANALOG_SAMPLES_MAX 1024 /* Measure over one of three resistors. */ #define VOLTAGE_DIVIDER_GAIN 3.0f /* Battery constants. */ #define BATTERY_VOLTAGE_MAX (VOLTAGE_DIVIDER_GAIN * ANALOG_VOLTAGE_MAX) #define BATTERY_VOLTAGE_PER_SAMPLE (BATTERY_VOLTAGE_MAX / ANALOG_SAMPLES_MAX) #define BATTERY_VOLTAGE_EMPTY (11.5f) static int parameter_battery_param_battery_voltage_full_value = 13; static struct fs_parameter_t parameter_battery_param_battery_voltage_full; int battery_module_init() { fs_parameter_init(&parameter_battery_param_battery_voltage_full, FSTR("/robot/parameters/set_battery_voltage_full"), fs_cmd_parameter_int, &parameter_battery_param_battery_voltage_full_value); fs_parameter_register(&parameter_battery_param_battery_voltage_full); return (0); } int battery_init(struct battery_t *battery_p, struct adc_device_t *dev_p, struct pin_device_t *pin_dev_p) { battery_p->battery_voltage_full = parameter_battery_param_battery_voltage_full_value; adc_init(&battery_p->adc, dev_p, pin_dev_p, ADC_REFERENCE_VCC, 1000); return (0); } int battery_async_convert(struct battery_t *battery_p) { /* Start asynchronous convertion. */ return (adc_async_convert(&battery_p->adc, battery_p->ongoing.samples, membersof(battery_p->ongoing.samples))); } int battery_async_wait(struct battery_t *battery_p) { /* Wait for ongoing asynchronous convertion to finish. */ if (!adc_async_wait(&battery_p->adc)) { return (1); } return (0); } int battery_update(struct battery_t *battery_p) { int sample, stored_energy_level; float battery_voltage; /* Save latest sample. */ memcpy(battery_p->updated.samples, battery_p->ongoing.samples, sizeof(battery_p->updated.samples)); sample = battery_p->updated.samples[0]; /* Remove when measured after charging. */ battery_p->battery_voltage_full = parameter_battery_param_battery_voltage_full_value; if (battery_p->battery_voltage_full < BATTERY_VOLTAGE_EMPTY) { battery_p->battery_voltage_full = BATTERY_VOLTAGE_EMPTY; } /* Calculate the battery voltage. */ battery_voltage = (sample * BATTERY_VOLTAGE_PER_SAMPLE); /* Low pass filtering of the battery voltage. */ battery_voltage = (19.0f * battery_p->updated.battery_voltage + 1.0f * battery_voltage) / 20.0f; /* Use the battery voltage to calculate the stored energy level. */ if (battery_voltage < BATTERY_VOLTAGE_EMPTY) { stored_energy_level = POWER_STORED_ENERGY_LEVEL_MIN; } else if (battery_voltage > battery_p->battery_voltage_full) { stored_energy_level = POWER_STORED_ENERGY_LEVEL_MAX; } else { stored_energy_level = ((100.0f * (battery_voltage - BATTERY_VOLTAGE_EMPTY)) / (battery_p->battery_voltage_full - BATTERY_VOLTAGE_EMPTY)); } battery_p->updated.battery_voltage = battery_voltage; battery_p->updated.stored_energy_level = stored_energy_level; return (0); } int battery_get_stored_energy_level(struct battery_t *battery_p) { return (battery_p->updated.stored_energy_level); } float battery_get_battery_voltage(struct battery_t *battery_p) { return (battery_p->updated.battery_voltage); }
{ "content_hash": "e09b219f87001b9b2a972c45cf87839d", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 100, "avg_line_length": 31.210526315789473, "alnum_prop": 0.6531759415401911, "repo_name": "eerimoq/robomower", "id": "f2a24a0561aaf3f9254352b7ef5e6a3dbdc2f03f", "size": "4225", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "romeo/romeo/src/battery.c", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "124076" }, { "name": "C++", "bytes": "751" }, { "name": "Java", "bytes": "42494" }, { "name": "Makefile", "bytes": "17410" }, { "name": "Python", "bytes": "16525" } ], "symlink_target": "" }
from django.conf.urls import patterns, url urlpatterns = patterns( 'wagtail.wagtaildocs.views', url(r'^(\d+)/(.*)$', 'serve.serve', name='wagtaildocs_serve'), )
{ "content_hash": "9283af09bc468161d3b1a67a58894ea9", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 66, "avg_line_length": 28.333333333333332, "alnum_prop": 0.6588235294117647, "repo_name": "h2oloopan/easymerge", "id": "5f8ddaf0398ec4dea20853470e3e8cfea80bc3fb", "size": "170", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "EasyMerge/tests/wagtail/wagtail/wagtaildocs/urls.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "13487" }, { "name": "CSS", "bytes": "416664" }, { "name": "D", "bytes": "2012" }, { "name": "Java", "bytes": "583078" }, { "name": "JavaScript", "bytes": "285692" }, { "name": "Python", "bytes": "4212549" }, { "name": "Ruby", "bytes": "920" }, { "name": "Shell", "bytes": "40508" }, { "name": "TeX", "bytes": "114952" } ], "symlink_target": "" }
// Copyright 2013-2022, University of Colorado Boulder /** * C2H5Cl Molecule * Structure is similar to C2H6, but with Cl replacing one of the H's. * * @author Chris Malley (PixelZoom, Inc.) */ import { combineOptions, EmptySelfOptions } from '../../../phet-core/js/optionize.js'; import Element from '../Element.js'; import nitroglycerin from '../nitroglycerin.js'; import AtomNode, { AtomNodeOptions } from './AtomNode.js'; import MoleculeNode, { MoleculeNodeOptions } from './MoleculeNode.js'; type SelfOptions = EmptySelfOptions; export type C2H5ClNodeOptions = SelfOptions & MoleculeNodeOptions; export default class C2H5ClNode extends MoleculeNode { public constructor( providedOptions?: C2H5ClNodeOptions ) { const atomNodeOptions = providedOptions?.atomNodeOptions; // atoms const leftNode = new AtomNode( Element.C, atomNodeOptions ); const centerNode = new AtomNode( Element.C, combineOptions<AtomNodeOptions>( { centerX: leftNode.right + ( 0.25 * leftNode.width ), centerY: leftNode.centerY }, atomNodeOptions ) ); const smallTopLeftNode = new AtomNode( Element.H, combineOptions<AtomNodeOptions>( { centerX: leftNode.centerX, centerY: leftNode.top }, atomNodeOptions ) ); const smallBottomLeftNode = new AtomNode( Element.H, combineOptions<AtomNodeOptions>( { centerX: smallTopLeftNode.centerX, centerY: leftNode.bottom }, atomNodeOptions ) ); const smallLeftNode = new AtomNode( Element.H, combineOptions<AtomNodeOptions>( { centerX: leftNode.left, centerY: leftNode.centerY }, atomNodeOptions ) ); const smallTopRightNode = new AtomNode( Element.H, combineOptions<AtomNodeOptions>( { centerX: centerNode.centerX, centerY: centerNode.top }, atomNodeOptions ) ); const smallBottomRightNode = new AtomNode( Element.H, combineOptions<AtomNodeOptions>( { centerX: centerNode.centerX, centerY: centerNode.bottom }, atomNodeOptions ) ); const rightNode = new AtomNode( Element.Cl, combineOptions<AtomNodeOptions>( { left: centerNode.centerX + ( 0.11 * leftNode.width ), centerY: centerNode.centerY }, atomNodeOptions ) ); const atomNodes = [ smallBottomRightNode, smallTopRightNode, centerNode, rightNode, smallLeftNode, leftNode, smallBottomLeftNode, smallTopLeftNode ]; super( atomNodes, providedOptions ); } } nitroglycerin.register( 'C2H5ClNode', C2H5ClNode );
{ "content_hash": "6fde865035f7d22ba56fed23e95c54d3", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 92, "avg_line_length": 37.92307692307692, "alnum_prop": 0.7168356997971602, "repo_name": "phetsims/nitroglycerin", "id": "4f1bc2f0bc9640cbafe5d56557c5b68c17f89aeb", "size": "2465", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/nodes/C2H5ClNode.ts", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "4296" }, { "name": "JavaScript", "bytes": "165" }, { "name": "TypeScript", "bytes": "67646" } ], "symlink_target": "" }
<resources> <string name="app_name">blescanner</string> <string name="action_settings">設定</string> <string name="action_scan_device">掃描裝置</string> <string name="ask_for_disconnect">確定要將此裝置斷線嗎?</string> <string name="available">允許連線裝置</string> <string name="back">返回</string> <string name="bonded">已綁定裝置</string> <string name="close">關閉</string> <string name="confirm">確認</string> <string name="device">裝置</string> <string name="disconnect">中斷斷線</string> <string name="has_been_disconnected">已斷線</string> <string name="no_device_found">未搜尋到裝置</string> <string name="no_device_bonded">尚未綁定裝置</string> <string name="ok">確認</string> <string name="scan_completed">搜尋結束</string> <string name="scanning">搜尋中...</string> <string name="stop_scanning">搜尋已中斷</string> <string name="unknown">未知裝置</string> <string name="dismiss">關閉</string> <string name="ble_not_supported">該設備不支援BLE!</string> <string name="bt_not_enabled">該設備未開啟藍牙</string> <string name="connected">已連線</string> <string name="disconnected">已斷線</string> <string name="auto_connect">自動連線</string> <string name="pair_device">綁定裝置</string> </resources>
{ "content_hash": "925e6b25dfec240ca77fbb62a02468ec", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 58, "avg_line_length": 43.17857142857143, "alnum_prop": 0.6724565756823822, "repo_name": "ppcrong/blehelper", "id": "32ba67a7e12411f901d1c8d37b3c4d00211f3d86", "size": "1413", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/res/values-zh-rTW/strings.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "68846" } ], "symlink_target": "" }
Pulls down Guckenheimer's menu for Union Pacific Center and tells me when there's chili. If you're interested in knowing when there's chili in the Union Pacific cafeteria, you can simply add the URL to your calendar. In Google Calendar, just go to Other Calendars and then **Add by URL**. ![Google Calendar - Add Calendar by URL](https://github.com/bdetweiler/chili-time/blob/master/googlecalendar.png "Google Calendar - Add Calendar by URL") Enter the following URL: [http://bdetweiler.github.io/projects/chilitime/chilitime.ics](http://bdetweiler.github.io/projects/chilitime/chilitime.ics) # Frequently Asked Questions ## Q. Why did you write this? Why don't you just look at their website? What, pull up the website every day like some commoner? Psht. I didn't go through 8 years of computer science school to look at websites all day. \*scrolls listlessly through Facebook\* ## Q. Would you consider expanding this to other cafeterias? If I ever work somewhere else, I will address the situation at that time. ## Q. What about other foods? Sorry, I am only passionate about chili.
{ "content_hash": "fbc20c87c3f1ac06bfc762128b7435b8", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 185, "avg_line_length": 49.81818181818182, "alnum_prop": 0.7728102189781022, "repo_name": "bdetweiler/chili-time", "id": "ba46aadd4b0e6d1f95f0b23172c7fb82b146d624", "size": "1109", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "3068" }, { "name": "Shell", "bytes": "214" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <android.support.v7.widget.CardView xmlns:android="http://schemas.android.com/apk/res/android" xmlns:tools="http://schemas.android.com/tools" xmlns:app="http://schemas.android.com/apk/res-auto" android:layout_width="match_parent" android:layout_height="wrap_content" style="@style/MyCardView"> <android.support.percent.PercentRelativeLayout android:layout_width="match_parent" android:layout_height="wrap_content"> <!--suppress AndroidDomInspection --> <ImageView android:id="@+id/post_image" app:layout_widthPercent="100%" app:layout_aspectRatio="178%" android:layout_alignParentLeft="true" android:layout_alignParentStart="true" android:layout_alignParentTop="true" android:scaleType="centerCrop" android:adjustViewBounds="true" android:contentDescription="@string/cdesc_post_image" tools:src="@drawable/ic_launcher" /> <TextView android:id="@+id/post_title" android:layout_width="match_parent" android:layout_height="wrap_content" android:layout_below="@id/post_image" android:paddingBottom="@dimen/padding_inline" android:paddingEnd="@dimen/padding_large" android:paddingLeft="@dimen/padding_large" android:paddingRight="@dimen/padding_large" android:paddingStart="@dimen/padding_large" android:paddingTop="@dimen/padding_large" android:textAppearance="@style/TextAppearance.Light" tools:text="This is a very, very, very, very loooooong post title" /> <ImageView android:id="@+id/post_status_icon" android:layout_width="14dp" android:layout_height="14dp" android:layout_below="@id/post_title" android:layout_alignLeft="@id/post_title" android:layout_alignStart="@id/post_title" android:layout_marginLeft="@dimen/padding_large" android:layout_marginStart="@dimen/padding_large" android:layout_marginTop="3dp" android:scaleType="fitStart" tools:src="@drawable/status_draft" tools:ignore="ContentDescription" /> <TextView android:id="@+id/post_status_text" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_below="@id/post_title" android:layout_toRightOf="@id/post_status_icon" android:layout_toEndOf="@id/post_status_icon" android:layout_toLeftOf="@+id/post_tags" android:layout_toStartOf="@+id/post_tags" android:paddingBottom="@dimen/padding_large" android:paddingEnd="@dimen/padding_large" android:paddingLeft="@dimen/padding_inline" android:paddingRight="@dimen/padding_large" android:paddingStart="@dimen/padding_inline" android:gravity="start" android:maxLines="2" android:textAppearance="@style/TextAppearance.Small.Dim" android:textColor="@color/status_published" app:font="narrow-bold" tools:text="Published 5 days ago" /> <TextView android:id="@+id/post_tags" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_below="@id/post_title" android:layout_alignRight="@id/post_title" android:layout_alignEnd="@id/post_title" android:paddingBottom="@dimen/padding_large" android:paddingEnd="@dimen/padding_large" android:paddingRight="@dimen/padding_large" android:maxLines="1" android:ellipsize="none" android:textAppearance="@style/TextAppearance.Small.Dim" android:textColor="@color/text_tertiary" app:font="narrow-bold" tools:text="#android +2" tools:ignore="RtlSymmetry" /> </android.support.percent.PercentRelativeLayout> </android.support.v7.widget.CardView>
{ "content_hash": "8922c1ab1c13faf15cac66606e046f58", "timestamp": "", "source": "github", "line_count": 101, "max_line_length": 78, "avg_line_length": 42.336633663366335, "alnum_prop": 0.6073433115060805, "repo_name": "vickychijwani/quill", "id": "173837085413288c9e00351ae8e9b556cdb8907e", "size": "4276", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/layout/post_list_item.xml", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "31193" }, { "name": "HTML", "bytes": "2338" }, { "name": "Java", "bytes": "478641" }, { "name": "JavaScript", "bytes": "13334" }, { "name": "Shell", "bytes": "1410" } ], "symlink_target": "" }
declare namespace javax { namespace tools { interface JavaCompiler extends javax.tools.Tool, javax.tools.OptionChecker { getTask(arg0: java.io.Writer, arg1: javax.tools.JavaFileManager, arg2: javax.tools.DiagnosticListener<unknown> | javax.tools.DiagnosticListener$$lambda<unknown>, arg3: java.lang.Iterable<java.lang.String>, arg4: java.lang.Iterable<java.lang.String>, arg5: java.lang.Iterable<javax.tools.JavaFileObject>): javax.tools.JavaCompiler$CompilationTask getStandardFileManager(arg0: javax.tools.DiagnosticListener<unknown> | javax.tools.DiagnosticListener$$lambda<unknown>, arg1: java.util.Locale, arg2: java.nio.charset.Charset): javax.tools.StandardJavaFileManager } } }
{ "content_hash": "d3214ec4646a74a199bcbe956e994789", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 351, "avg_line_length": 71.2, "alnum_prop": 0.7851123595505618, "repo_name": "wizawu/1c", "id": "a6e9c60262f5b97bc578012b755baa7f433fbb39", "size": "712", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "@types/jdk/javax.tools.JavaCompiler.d.ts", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "4841" }, { "name": "JavaScript", "bytes": "419" }, { "name": "Makefile", "bytes": "846" }, { "name": "Shell", "bytes": "841" }, { "name": "TypeScript", "bytes": "67719" } ], "symlink_target": "" }
NSString *const QLWaterfallFlowLayoutIdentifier = @"QLWaterfallFlowLayoutIdentifier"; @interface ViewController ()<QLWaterfallFlowLayoutDelegate> @property (nonatomic, strong) NSMutableArray *dataSource; @end @implementation ViewController - (NSMutableArray *)dataSource { if (!_dataSource) { _dataSource = [[NSMutableArray alloc]initWithCapacity:10]; } return _dataSource; } - (void)viewDidLoad { [super viewDidLoad]; // http://www.pocketdigi.com/20141221/1406.html // [self.collectionView registerClass:[SEFirstTabbarViewCell class] forCellWithReuseIdentifier:SEFirstTabbarViewControllerIdentifier]; for (int i = 0; i < 12; i ++) { CGFloat randomHeight = 100 + arc4random() % 60; [self.dataSource addObject:@(randomHeight)]; } QLWaterfallFlowLayout *layout = (QLWaterfallFlowLayout *)self.collectionView.collectionViewLayout; layout.delegate = self; layout.headerReferenceSize = CGSizeMake(300, 50); layout.sectionInset = UIEdgeInsetsMake(10, 0, 10, 0); UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(handleTapGestureAction:)]; [self.collectionView addGestureRecognizer:tap]; } - (void)handleTapGestureAction:(UITapGestureRecognizer *)sender { if (sender.state == UIGestureRecognizerStateEnded) { CGPoint tapPoint = [sender locationInView:self.collectionView]; NSIndexPath *idx = [self.collectionView indexPathForItemAtPoint:tapPoint]; if (idx) { [self.dataSource removeObjectAtIndex:idx.item]; [self.collectionView performBatchUpdates:^{ [self.collectionView deleteItemsAtIndexPaths:@[idx]]; } completion:^(BOOL finished) { [self.collectionView reloadData]; }]; }else{ CGFloat randomHeight = 100 + arc4random() % 140; [self.dataSource addObject:@(randomHeight)]; NSUInteger count = self.dataSource.count - 1; [self.collectionView performBatchUpdates:^{ NSIndexPath *newIdx = [NSIndexPath indexPathForItem:count inSection:0]; [self.collectionView insertItemsAtIndexPaths:@[newIdx]]; } completion:^(BOOL finished) { }]; } } } - (NSUInteger)numberOfColumnsForLayout:(QLWaterfallFlowLayout *)layout { return 3; } - (CGFloat)layout:(QLWaterfallFlowLayout *)layout heightForItemAtIndexPath:(NSIndexPath *)idx { NSNumber *randomHeight = self.dataSource[idx.item]; return [randomHeight floatValue]; } - (CGFloat)interItemSpaceForLayout:(QLWaterfallFlowLayout *)layout { return 10; } - (NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section { return self.dataSource.count; } - (NSInteger)numberOfSectionsInCollectionView:(UICollectionView *)collectionView { return 1; } - (UICollectionReusableView *)collectionView:(UICollectionView *)collectionView viewForSupplementaryElementOfKind:(NSString *)kind atIndexPath:(NSIndexPath *)indexPath { UICollectionReusableView *view = [UICollectionReusableView new]; view.backgroundColor = [UIColor blueColor]; return view; } - (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath { WaterCollectionViewCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:QLWaterfallFlowLayoutIdentifier forIndexPath:indexPath]; cell.titleLb.text = [NSString stringWithFormat:@"%zi",indexPath.item]; return cell; } - (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration { UIInterfaceOrientation interfaceOrientation = [[UIApplication sharedApplication] statusBarOrientation]; if(UIInterfaceOrientationIsPortrait(interfaceOrientation) != UIInterfaceOrientationIsPortrait(toInterfaceOrientation)){ [self.collectionView reloadData]; } } @end
{ "content_hash": "1bb692ef8eda43d0d668aa452f44a456", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 167, "avg_line_length": 34.547008547008545, "alnum_prop": 0.7236516575952499, "repo_name": "debugly/QLWaterfallCollectionView", "id": "1ce2180eee7cf07ce71d185c83198b8806813aea", "size": "4297", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "QLWaterfallCollectionView/QLWaterfallCollectionView/ViewController.m", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Objective-C", "bytes": "13509" } ], "symlink_target": "" }
RELN_ROOT?= ${.CURDIR}/../../.. DOC?= article FORMATS?= html INSTALL_COMPRESSED?= gz INSTALL_ONLY_COMPRESSED?= JADEFLAGS+= -V %generate-article-toc% # SGML content SRCS+= article.xml SRCS+= proc-alpha.xml SRCS+= ../common/hw.ent SRCS+= ../common/artheader.xml SRCS+= ../common/dev.xml .include "${RELN_ROOT}/share/mk/doc.relnotes.mk" .include "${DOC_PREFIX}/share/mk/doc.project.mk"
{ "content_hash": "367de194ff56e4eec063e0557eb183eb", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 48, "avg_line_length": 21.666666666666668, "alnum_prop": 0.6846153846153846, "repo_name": "dplbsd/zcaplib", "id": "f80fca3e8e9b6cea947afb7cf369716dcc94c4b1", "size": "687", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "head/release/doc/de_DE.ISO8859-1/hardware/alpha/Makefile", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "AGS Script", "bytes": "62471" }, { "name": "Assembly", "bytes": "4478661" }, { "name": "Awk", "bytes": "278525" }, { "name": "Batchfile", "bytes": "20417" }, { "name": "C", "bytes": "383420305" }, { "name": "C++", "bytes": "72796771" }, { "name": "CSS", "bytes": "109748" }, { "name": "ChucK", "bytes": "39" }, { "name": "D", "bytes": "3784" }, { "name": "DIGITAL Command Language", "bytes": "10640" }, { "name": "DTrace", "bytes": "2311027" }, { "name": "Emacs Lisp", "bytes": "65902" }, { "name": "EmberScript", "bytes": "286" }, { "name": "Forth", "bytes": "184405" }, { "name": "GAP", "bytes": "72156" }, { "name": "Groff", "bytes": "32248806" }, { "name": "HTML", "bytes": "6749816" }, { "name": "IGOR Pro", "bytes": "6301" }, { "name": "Java", "bytes": "112547" }, { "name": "KRL", "bytes": "4950" }, { "name": "Lex", "bytes": "398817" }, { "name": "Limbo", "bytes": "3583" }, { "name": "Logos", "bytes": "187900" }, { "name": "Makefile", "bytes": "3551839" }, { "name": "Mathematica", "bytes": "9556" }, { "name": "Max", "bytes": "4178" }, { "name": "Module Management System", "bytes": "817" }, { "name": "NSIS", "bytes": "3383" }, { "name": "Objective-C", "bytes": "836351" }, { "name": "PHP", "bytes": "6649" }, { "name": "Perl", "bytes": "5530761" }, { "name": "Perl6", "bytes": "41802" }, { "name": "PostScript", "bytes": "140088" }, { "name": "Prolog", "bytes": "29514" }, { "name": "Protocol Buffer", "bytes": "61933" }, { "name": "Python", "bytes": "299247" }, { "name": "R", "bytes": "764" }, { "name": "Rebol", "bytes": "738" }, { "name": "Ruby", "bytes": "45958" }, { "name": "Scilab", "bytes": "197" }, { "name": "Shell", "bytes": "10501540" }, { "name": "SourcePawn", "bytes": "463194" }, { "name": "SuperCollider", "bytes": "80208" }, { "name": "Tcl", "bytes": "80913" }, { "name": "TeX", "bytes": "719821" }, { "name": "VimL", "bytes": "22201" }, { "name": "XS", "bytes": "25451" }, { "name": "XSLT", "bytes": "31488" }, { "name": "Yacc", "bytes": "1857830" } ], "symlink_target": "" }
TREFLECT_BEGIN(IObject) TREFLECT_FIELD(m_Flag, "Flag") TREFLECT_END(IObject) //------------------------------------------------------------------------------ #endif // FOUNDATION_REFLECTION_DECL_H
{ "content_hash": "2d988d567de8512c5506e46a7e69c9ab", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 80, "avg_line_length": 33, "alnum_prop": 0.45454545454545453, "repo_name": "hilbertdu/TurboEngine", "id": "c4b98dc8e13b5397d311f21b2ef78f2445bd4084", "size": "664", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Code/Library/Foundation/Reflected/ReflectionDecl.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1430387" }, { "name": "C++", "bytes": "6753994" }, { "name": "Lua", "bytes": "82575" }, { "name": "Makefile", "bytes": "11307" }, { "name": "Objective-C", "bytes": "25783" }, { "name": "Objective-C++", "bytes": "145103" }, { "name": "Scala", "bytes": "3786" }, { "name": "Shell", "bytes": "34980" }, { "name": "SuperCollider", "bytes": "4733" } ], "symlink_target": "" }
package org.hisp.dhis.dxf2.webmessage; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement; import com.google.common.base.MoreObjects; import org.hisp.dhis.common.DxfNamespaces; import org.hisp.dhis.feedback.Status; import org.springframework.http.HttpStatus; /** * @author Morten Olav Hansen <[email protected]> */ @JacksonXmlRootElement( localName = "webMessage", namespace = DxfNamespaces.DXF_2_0 ) @JsonPropertyOrder( { "httpStatus", "httpStatusCode", "status", "code", "message", "devMessage", "response" } ) public class WebMessage { /** * Message status, currently two statuses are available: OK, ERROR. Default * value is OK. * * @see Status */ protected Status status = Status.OK; /** * Internal code for this message. Should be used to help with third party clients which * should not have to resort to string parsing of message to know what is happening. */ protected Integer code; /** * HTTP status. */ protected HttpStatus httpStatus = HttpStatus.OK; /** * Non-technical message, should be simple and could possibly be used to display message * to an end-user. */ protected String message; /** * Technical message that should explain as much details as possible, mainly to be used * for debugging. */ protected String devMessage; /** * When a simple text feedback is not enough, you can use this interface to implement your * own message responses. * * @see WebMessageResponse */ protected WebMessageResponse response; // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- public WebMessage() { } public WebMessage( Status status ) { this.status = status; } public WebMessage( Status status, HttpStatus httpStatus ) { this.status = status; this.httpStatus = httpStatus; } // ------------------------------------------------------------------------- // Logic // ------------------------------------------------------------------------- public boolean isOk() { return Status.OK == status; } public boolean isWarning() { return Status.WARNING == status; } public boolean isError() { return Status.ERROR == status; } // ------------------------------------------------------------------------- // Get and set methods // ------------------------------------------------------------------------- @JsonProperty @JacksonXmlProperty( isAttribute = true ) public Status getStatus() { return status; } public void setStatus( Status status ) { this.status = status; } @JsonProperty @JacksonXmlProperty( isAttribute = true ) public Integer getCode() { return code; } public void setCode( Integer code ) { this.code = code; } @JsonProperty @JacksonXmlProperty( isAttribute = true ) public String getHttpStatus() { return httpStatus.getReasonPhrase(); } public void setHttpStatus( HttpStatus httpStatus ) { this.httpStatus = httpStatus; } @JsonProperty @JacksonXmlProperty( isAttribute = true ) public Integer getHttpStatusCode() { return httpStatus.value(); } @JsonProperty @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public String getMessage() { return message; } public void setMessage( String message ) { this.message = message; } @JsonProperty @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public String getDevMessage() { return devMessage; } public void setDevMessage( String devMessage ) { this.devMessage = devMessage; } @JsonProperty @JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 ) public WebMessageResponse getResponse() { return response; } public void setResponse( WebMessageResponse response ) { this.response = response; } @Override public String toString() { return MoreObjects.toStringHelper( this ) .add( "status", status ) .add( "code", code ) .add( "httpStatus", httpStatus ) .add( "message", message ) .add( "devMessage", devMessage ) .add( "response", response ) .toString(); } }
{ "content_hash": "41a80ecd97b4e7c578d96eb20fc07f38", "timestamp": "", "source": "github", "line_count": 195, "max_line_length": 94, "avg_line_length": 24.861538461538462, "alnum_prop": 0.5666254125412541, "repo_name": "msf-oca-his/dhis-core", "id": "59e286c8eb6f7e7197e2814e471d18e12067f984", "size": "6404", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/webmessage/WebMessage.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "195574" }, { "name": "HTML", "bytes": "68728" }, { "name": "Java", "bytes": "18310173" }, { "name": "JavaScript", "bytes": "1002108" }, { "name": "PLpgSQL", "bytes": "690824" }, { "name": "Ruby", "bytes": "1011" }, { "name": "Shell", "bytes": "394" }, { "name": "XSLT", "bytes": "8281" } ], "symlink_target": "" }
// <copyright file="IAgencyDataAccessor.cs" company="Yojowa, LLC"> // Copyright (c) 2016-2020 All Rights Reserved // </copyright> // <author>Alex Bulankou</author> // <date>01/17/2017</date> namespace Yojowa.WebJobAgency { using System; using System.Collections.Generic; /// <summary> /// Agency data accessor interface /// </summary> public interface IAgencyDataAccessor { /// <summary> /// Gets the jobs. /// </summary> /// <param name="jobState">State of the job.</param> /// <returns>Jobs corresponding to the specified state</returns> IEnumerable<AgencyJobInfo> GetJobs(AgencyJobState jobState = AgencyJobState.Scheduled | AgencyJobState.Running); /// <summary> /// Gets the clients. /// </summary> /// <param name="clientState">State of the client.</param> /// <returns>Clients matching specified state</returns> IEnumerable<AgencyClientInfo> GetClients(AgencyClientState clientState = AgencyClientState.Idle | AgencyClientState.RunningJob); /// <summary> /// Schedules the job. /// </summary> /// <param name="jobId">The job identifier.</param> /// <param name="configuration">The configuration.</param> void ScheduleJob(string jobId, string configuration); /// <summary> /// Adds the client or updates the last updated date for existing client /// </summary> /// <param name="clientId">The client identifier.</param> void AddOrUpdateClient(string clientId); /// <summary> /// Removes the client. /// </summary> /// <param name="clientId">The client identifier.</param> void RemoveClient(string clientId); /// <summary> /// Cancels the job. /// </summary> /// <param name="jobSerialId">The job serial identifier.</param> void CancelJob(int jobSerialId); /// <summary> /// Starts the running job. /// </summary> /// <param name="clientId">The client identifier.</param> /// <param name="jobId">The job identifier.</param> void StartRunningJob( string clientId, string jobId); /// <summary> /// Updates the job progress. /// </summary> /// <param name="jobId">The job identifier.</param> /// <param name="clientId">The client identifier.</param> /// <param name="percentComplete">The percent complete.</param> /// <param name="timeRemaining">Time remaining to completion.</param> void UpdateJobProgress( string jobId, string clientId, int percentComplete, TimeSpan timeRemaining); /// <summary> /// Completes the job. /// </summary> /// <param name="jobId">The job identifier.</param> /// <param name="clientId">The client identifier.</param> /// <param name="completionState">State of the completion.</param> void CompleteJob( string jobId, string clientId, CompletionState completionState); } }
{ "content_hash": "96be807c218f03a752df4b419971939c", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 136, "avg_line_length": 35.78651685393258, "alnum_prop": 0.5905808477237049, "repo_name": "stopbystop/sbs-app", "id": "6f843a0e8d651b66533a9a7939a00826d9492b76", "size": "3187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/agency/IAgencyDataAccessor.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "19197" }, { "name": "C#", "bytes": "238621" }, { "name": "C++", "bytes": "14898" }, { "name": "CSS", "bytes": "1239661" }, { "name": "Dockerfile", "bytes": "3138" }, { "name": "HTML", "bytes": "195630" }, { "name": "Handlebars", "bytes": "52960" }, { "name": "Java", "bytes": "1058057" }, { "name": "JavaScript", "bytes": "1496151" }, { "name": "Objective-C", "bytes": "364263" }, { "name": "QML", "bytes": "2765" }, { "name": "Shell", "bytes": "1927" }, { "name": "Starlark", "bytes": "1720" }, { "name": "TypeScript", "bytes": "173570" } ], "symlink_target": "" }
typedef XID GLXWindow; typedef XID GLXDrawable; typedef struct __GLXFBConfig* GLXFBConfig; typedef struct __GLXcontext* GLXContext; typedef void (*__GLXextproc)(void); typedef int (*PFNGLXGETFBCONFIGATTRIBPROC)(Display*,GLXFBConfig,int,int*); typedef const char* (*PFNGLXGETCLIENTSTRINGPROC)(Display*,int); typedef Bool (*PFNGLXQUERYEXTENSIONPROC)(Display*,int*,int*); typedef Bool (*PFNGLXQUERYVERSIONPROC)(Display*,int*,int*); typedef void (*PFNGLXDESTROYCONTEXTPROC)(Display*,GLXContext); typedef Bool (*PFNGLXMAKECURRENTPROC)(Display*,GLXDrawable,GLXContext); typedef void (*PFNGLXSWAPBUFFERSPROC)(Display*,GLXDrawable); typedef const char* (*PFNGLXQUERYEXTENSIONSSTRINGPROC)(Display*,int); typedef GLXFBConfig* (*PFNGLXGETFBCONFIGSPROC)(Display*,int,int*); typedef GLXContext (*PFNGLXCREATENEWCONTEXTPROC)(Display*,GLXFBConfig,int,GLXContext,Bool); typedef __GLXextproc (* PFNGLXGETPROCADDRESSPROC)(const GLubyte *procName); typedef void (*PFNGLXSWAPINTERVALEXTPROC)(Display*,GLXDrawable,int); typedef XVisualInfo* (*PFNGLXGETVISUALFROMFBCONFIGPROC)(Display*,GLXFBConfig); typedef GLXWindow (*PFNGLXCREATEWINDOWPROC)(Display*,GLXFBConfig,Window,const int*); typedef void (*PFNGLXDESTROYWINDOWPROC)(Display*,GLXWindow); typedef int (*PFNGLXSWAPINTERVALMESAPROC)(int); typedef int (*PFNGLXSWAPINTERVALSGIPROC)(int); typedef GLXContext (*PFNGLXCREATECONTEXTATTRIBSARBPROC)(Display*,GLXFBConfig,GLXContext,Bool,const int*); // libGL.so function pointer typedefs #define glXGetFBConfigs _glfw.glx.GetFBConfigs #define glXGetFBConfigAttrib _glfw.glx.GetFBConfigAttrib #define glXGetClientString _glfw.glx.GetClientString #define glXQueryExtension _glfw.glx.QueryExtension #define glXQueryVersion _glfw.glx.QueryVersion #define glXDestroyContext _glfw.glx.DestroyContext #define glXMakeCurrent _glfw.glx.MakeCurrent #define glXSwapBuffers _glfw.glx.SwapBuffers #define glXQueryExtensionsString _glfw.glx.QueryExtensionsString #define glXCreateNewContext _glfw.glx.CreateNewContext #define glXGetVisualFromFBConfig _glfw.glx.GetVisualFromFBConfig #define glXCreateWindow _glfw.glx.CreateWindow #define glXDestroyWindow _glfw.glx.DestroyWindow #define _GLFW_PLATFORM_CONTEXT_STATE _GLFWcontextGLX glx #define _GLFW_PLATFORM_LIBRARY_CONTEXT_STATE _GLFWlibraryGLX glx // GLX-specific per-context data // typedef struct _GLFWcontextGLX { GLXContext handle; GLXWindow window; } _GLFWcontextGLX; // GLX-specific global data // typedef struct _GLFWlibraryGLX { int major, minor; int eventBase; int errorBase; // dlopen handle for libGL.so.1 void* handle; // GLX 1.3 functions PFNGLXGETFBCONFIGSPROC GetFBConfigs; PFNGLXGETFBCONFIGATTRIBPROC GetFBConfigAttrib; PFNGLXGETCLIENTSTRINGPROC GetClientString; PFNGLXQUERYEXTENSIONPROC QueryExtension; PFNGLXQUERYVERSIONPROC QueryVersion; PFNGLXDESTROYCONTEXTPROC DestroyContext; PFNGLXMAKECURRENTPROC MakeCurrent; PFNGLXSWAPBUFFERSPROC SwapBuffers; PFNGLXQUERYEXTENSIONSSTRINGPROC QueryExtensionsString; PFNGLXCREATENEWCONTEXTPROC CreateNewContext; PFNGLXGETVISUALFROMFBCONFIGPROC GetVisualFromFBConfig; PFNGLXCREATEWINDOWPROC CreateWindow; PFNGLXDESTROYWINDOWPROC DestroyWindow; // GLX 1.4 and extension functions PFNGLXGETPROCADDRESSPROC GetProcAddress; PFNGLXGETPROCADDRESSPROC GetProcAddressARB; PFNGLXSWAPINTERVALSGIPROC SwapIntervalSGI; PFNGLXSWAPINTERVALEXTPROC SwapIntervalEXT; PFNGLXSWAPINTERVALMESAPROC SwapIntervalMESA; PFNGLXCREATECONTEXTATTRIBSARBPROC CreateContextAttribsARB; GLFWbool SGI_swap_control; GLFWbool EXT_swap_control; GLFWbool MESA_swap_control; GLFWbool ARB_multisample; GLFWbool ARB_framebuffer_sRGB; GLFWbool EXT_framebuffer_sRGB; GLFWbool ARB_create_context; GLFWbool ARB_create_context_profile; GLFWbool ARB_create_context_robustness; GLFWbool EXT_create_context_es2_profile; GLFWbool ARB_create_context_no_error; GLFWbool ARB_context_flush_control; } _GLFWlibraryGLX; GLFWbool _glfwInitGLX(void); void _glfwTerminateGLX(void); GLFWbool _glfwCreateContextGLX(_GLFWwindow* window, const _GLFWctxconfig* ctxconfig, const _GLFWfbconfig* fbconfig); void _glfwDestroyContextGLX(_GLFWwindow* window); GLFWbool _glfwChooseVisualGLX(const _GLFWwndconfig* wndconfig, const _GLFWctxconfig* ctxconfig, const _GLFWfbconfig* fbconfig, Visual** visual, int* depth);
{ "content_hash": "f7be30617ac67b53d65963a01bd6fd83", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 105, "avg_line_length": 43.23893805309734, "alnum_prop": 0.7259516987310684, "repo_name": "IntelRealSense/librealsense", "id": "f767cb1417419141fa90006bdf9b37eff7d96f39", "size": "7604", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "third-party/glfw/src/glx_context.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "736" }, { "name": "C", "bytes": "6226973" }, { "name": "C#", "bytes": "541746" }, { "name": "C++", "bytes": "9343375" }, { "name": "CMake", "bytes": "181677" }, { "name": "CSS", "bytes": "9575" }, { "name": "Cuda", "bytes": "38173" }, { "name": "Dockerfile", "bytes": "2393" }, { "name": "HTML", "bytes": "3550" }, { "name": "Java", "bytes": "309233" }, { "name": "JavaScript", "bytes": "480021" }, { "name": "MATLAB", "bytes": "106616" }, { "name": "PowerShell", "bytes": "7989" }, { "name": "Python", "bytes": "485240" }, { "name": "ShaderLab", "bytes": "15538" }, { "name": "Shell", "bytes": "108709" } ], "symlink_target": "" }
using System.ComponentModel.DataAnnotations.Schema; using System.Data.Entity.ModelConfiguration; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Metadata.Builders; namespace NOF2.Demo.Model { public static partial class Mapper { public static void Map(this EntityTypeBuilder<SalesPersonQuotaHistory> builder) { builder.HasKey(t => new { t.BusinessEntityID, t.mappedQuotaDate }); // Properties builder.Property(t => t.BusinessEntityID) .ValueGeneratedNever(); // Table & Column Mappings builder.ToTable("SalesPersonQuotaHistory", "Sales"); builder.Property(t => t.BusinessEntityID).HasColumnName("BusinessEntityID"); builder.Property(t => t.mappedQuotaDate).HasColumnName("QuotaDate"); builder.Property(t => t.mappedSalesQuota).HasColumnName("SalesQuota"); builder.Property(t => t.RowGuid).HasColumnName("rowguid"); builder.Property(t => t.mappedModifiedDate).HasColumnName("ModifiedDate").IsConcurrencyToken(false); // Relationships builder.HasOne(t => t.SalesPerson) .WithMany(t => t.mappedQuotaHistory) .HasForeignKey(d => d.BusinessEntityID); } } }
{ "content_hash": "dd5fb161097e31d8010af0d345771a6a", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 113, "avg_line_length": 41.21875, "alnum_prop": 0.6512509476876421, "repo_name": "NakedObjectsGroup/NakedObjectsFramework", "id": "e8d8ba81c568263c54373059d98e43541ba1f443", "size": "1319", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Test/NOF2.Demo.Database/Mapping/SalesPersonQuotaHistoryMap.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3010" }, { "name": "C#", "bytes": "17252161" }, { "name": "CSS", "bytes": "87105" }, { "name": "F#", "bytes": "2080309" }, { "name": "HTML", "bytes": "77491" }, { "name": "JavaScript", "bytes": "7967" }, { "name": "TSQL", "bytes": "5089" }, { "name": "TypeScript", "bytes": "683807" }, { "name": "Vim Snippet", "bytes": "41860" }, { "name": "Visual Basic .NET", "bytes": "288499" } ], "symlink_target": "" }
// Package status defines a few useful functions for our binaries, // mainly to link the status page with a vtctld instance. package status import ( "fmt" "html/template" "net/url" "strings" "github.com/spf13/pflag" "vitess.io/vitess/go/vt/servenv" ) var vtctldAddr string func registerFlags(fs *pflag.FlagSet) { fs.StringVar(&vtctldAddr, "vtctld_addr", vtctldAddr, "address of a vtctld instance") } func init() { servenv.OnParseFor("vtcombo", registerFlags) servenv.OnParseFor("vtgate", registerFlags) servenv.OnParseFor("vttablet", registerFlags) } // MakeVtctldRedirect returns an absolute vtctld url that will // redirect to the page for the topology object specified in q. func MakeVtctldRedirect(text string, q map[string]string) template.HTML { query := url.Values{} for k, v := range q { query.Set(k, v) } url := "explorers/redirect" + "?" + query.Encode() return VtctldLink(text, url) } // VtctldLink returns the HTML to display a link to the fully // qualified vtctld url whose path is given as parameter. // If no vtctld_addr flag was passed in, we just return the text with no link. func VtctldLink(text, urlPath string) template.HTML { if vtctldAddr == "" { return template.HTML(text) } var fullURL string if strings.HasSuffix(vtctldAddr, "/") { fullURL = vtctldAddr + urlPath } else { fullURL = vtctldAddr + "/" + urlPath } return template.HTML(fmt.Sprintf(`<a href="%v">%v</a>`, fullURL, text)) } // VtctldKeyspace returns the keyspace name, possibly linked to the // keyspace page in vtctld. func VtctldKeyspace(keyspace string) template.HTML { return MakeVtctldRedirect(keyspace, map[string]string{ "type": "keyspace", "keyspace": keyspace, }) } // VtctldShard returns the shard name, possibly linked to the shard // page in vtctld. func VtctldShard(keyspace, shard string) template.HTML { return MakeVtctldRedirect(shard, map[string]string{ "type": "shard", "keyspace": keyspace, "shard": shard, }) } // VtctldSrvCell returns the cell name, possibly linked to the // serving graph page in vtctld for that page. func VtctldSrvCell(cell string) template.HTML { return VtctldLink(cell, "serving_graph/"+cell) } // VtctldSrvKeyspace returns the keyspace name, possibly linked to the // SrvKeyspace page in vtctld. func VtctldSrvKeyspace(cell, keyspace string) template.HTML { return MakeVtctldRedirect(keyspace, map[string]string{ "type": "srv_keyspace", "cell": cell, "keyspace": keyspace, }) } // VtctldReplication returns 'cell/keyspace/shard', possibly linked to the // ShardReplication page in vtctld. func VtctldReplication(cell, keyspace, shard string) template.HTML { return MakeVtctldRedirect(fmt.Sprintf("%v/%v/%v", cell, keyspace, shard), map[string]string{ "type": "replication", "keyspace": keyspace, "shard": shard, "cell": cell, }) } // VtctldTablet returns the tablet alias, possibly linked to the // Tablet page in vtctld. func VtctldTablet(aliasName string) template.HTML { return MakeVtctldRedirect(aliasName, map[string]string{ "type": "tablet", "alias": aliasName, }) } // StatusFuncs returns a FuncMap that contains all of our methods here. // It is exported so tests can use them. var StatusFuncs = template.FuncMap{ "github_com_vitessio_vitess_vtctld_keyspace": VtctldKeyspace, "github_com_vitessio_vitess_vtctld_shard": VtctldShard, "github_com_vitessio_vitess_vtctld_srv_cell": VtctldSrvCell, "github_com_vitessio_vitess_vtctld_srv_keyspace": VtctldSrvKeyspace, "github_com_vitessio_vitess_vtctld_replication": VtctldReplication, "github_com_vitessio_vitess_vtctld_tablet": VtctldTablet, } func init() { servenv.AddStatusFuncs(StatusFuncs) }
{ "content_hash": "0813b7b9e1f63bd9786332c5af6a1b23", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 85, "avg_line_length": 29.234375, "alnum_prop": 0.7252805986103688, "repo_name": "mahak/vitess", "id": "ad11af5050b7a5b6e52ceaff286a340fe00a2bc5", "size": "4307", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "go/vt/status/status.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "198" }, { "name": "CSS", "bytes": "18852" }, { "name": "Dockerfile", "bytes": "28594" }, { "name": "Go", "bytes": "23060389" }, { "name": "HCL", "bytes": "959" }, { "name": "HTML", "bytes": "25753" }, { "name": "Java", "bytes": "990163" }, { "name": "JavaScript", "bytes": "33028" }, { "name": "Jsonnet", "bytes": "121075" }, { "name": "Makefile", "bytes": "23322" }, { "name": "Perl", "bytes": "3161" }, { "name": "Python", "bytes": "1955" }, { "name": "SCSS", "bytes": "41351" }, { "name": "Shell", "bytes": "184185" }, { "name": "Smarty", "bytes": "30493" }, { "name": "TypeScript", "bytes": "711819" }, { "name": "Yacc", "bytes": "162805" } ], "symlink_target": "" }
import statsmodels.api as sm from scipy.stats import norm import numpy as np import pytest import scipy import sys import os from respy.python.solve.solve_auxiliary import get_predictions from codes.auxiliary import write_interpolation_grid from codes.random_init import generate_init from respy.python.shared.shared_auxiliary import read_draws from codes.auxiliary import write_draws from respy.python.estimate.estimate_auxiliary import get_optim_paras from respy.python.shared.shared_auxiliary import replace_missing_values from respy.python.solve.solve_auxiliary import get_endogenous_variable from respy.python.solve.solve_auxiliary import get_future_value from respy.python.shared.shared_auxiliary import get_cholesky from respy.python.shared.shared_constants import IS_FORTRAN from respy.fortran.interface import resfort_interface from respy.python.solve.solve_python import pyth_solve from respy.python.simulate.simulate_python import pyth_simulate from respy.python.evaluate.evaluate_python import pyth_evaluate from respy.python.estimate.estimate_python import pyth_criterion from respy.python.shared.shared_auxiliary import dist_class_attributes from respy.python.shared.shared_auxiliary import dist_model_paras from respy.python.shared.shared_auxiliary import create_draws from respy.python.shared.shared_constants import TEST_RESOURCES_DIR from respy.python.solve.solve_auxiliary import pyth_create_state_space from respy.python.solve.solve_auxiliary import pyth_calculate_payoffs_systematic from respy.python.solve.solve_auxiliary import pyth_backward_induction from respy.python.solve.solve_auxiliary import get_simulated_indicator from respy.python.solve.solve_auxiliary import get_exogenous_variables from respy import RespyCls from respy import simulate # Edit of PYTHONPATH required for PYTHON 2 as no __init__.py in tests # subdirectory. If __init__.py is added, the path resolution for PYTEST # breaks down. if IS_FORTRAN: sys.path.insert(0, TEST_RESOURCES_DIR) import f2py_interface as fort_debug @pytest.mark.skipif(not IS_FORTRAN, reason='No FORTRAN available') @pytest.mark.usefixtures('fresh_directory', 'set_seed') class TestClass(object): """ This class groups together some tests. """ def test_1(self): """ Compare the evaluation of the criterion function for the ambiguity optimization and the simulated expected future value between the FORTRAN and PYTHON implementations. These tests are set up a separate test case due to the large setup cost to construct the ingredients for the interface. """ # Generate constraint periods constraints = dict() constraints['version'] = 'PYTHON' # Generate random initialization file generate_init(constraints) # Perform toolbox actions respy_obj = RespyCls('test.respy.ini') respy_obj = simulate(respy_obj) # Extract class attributes periods_payoffs_systematic, states_number_period, mapping_state_idx, \ periods_emax, num_periods, states_all, num_draws_emax, edu_start, \ edu_max, delta = \ dist_class_attributes(respy_obj, 'periods_payoffs_systematic', 'states_number_period', 'mapping_state_idx', 'periods_emax', 'num_periods', 'states_all', 'num_draws_emax', 'edu_start', 'edu_max', 'delta') # Sample draws draws_standard = np.random.multivariate_normal(np.zeros(4), np.identity(4), (num_draws_emax,)) # Sampling of random period and admissible state index period = np.random.choice(range(num_periods)) k = np.random.choice(range(states_number_period[period])) # Select systematic payoffs payoffs_systematic = periods_payoffs_systematic[period, k, :] # Evaluation of simulated expected future values args = (num_periods, num_draws_emax, period, k, draws_standard, payoffs_systematic, edu_max, edu_start, periods_emax, states_all, mapping_state_idx, delta) py = get_future_value(*args) f90 = fort_debug.wrapper_get_future_value(*args) np.testing.assert_allclose(py, f90, rtol=1e-05, atol=1e-06) def test_2(self): """ Compare results between FORTRAN and PYTHON of selected hand-crafted functions. In test_97() we test FORTRAN implementations against PYTHON intrinsic routines. """ for _ in range(25): # Create grid of admissible state space values. num_periods = np.random.randint(1, 15) edu_start = np.random.randint(1, 5) edu_max = edu_start + np.random.randint(1, 5) # Prepare interface min_idx = min(num_periods, (edu_max - edu_start + 1)) # FORTRAN args = (num_periods, edu_start, edu_max, min_idx) fort_a, fort_b, fort_c, fort_d = \ fort_debug.f2py_create_state_space(*args) py_a, py_b, py_c, py_d = pyth_create_state_space(*args) # Ensure equivalence for obj in [[fort_a, py_a], [fort_b, py_b], [fort_c, py_c], [fort_d, py_d]]: np.testing.assert_allclose(obj[0], obj[1]) for _ in range(100): # Draw random request for testing purposes num_covars = np.random.randint(2, 10) num_agents = np.random.randint(100, 1000) tiny = np.random.normal(size=num_agents) beta = np.random.normal(size=num_covars) # Generate sample exog = np.random.sample((num_agents, num_covars)) exog[:, 0] = 1 endog = np.dot(exog, beta) + tiny # Run statsmodels results = sm.OLS(endog, exog).fit() # Check parameters py = results.params f90 = fort_debug.wrapper_get_coefficients(endog, exog, num_covars, num_agents) np.testing.assert_almost_equal(py, f90) # Check prediction py = results.predict(exog) f90 = fort_debug.wrapper_point_predictions(exog, f90, num_agents) np.testing.assert_almost_equal(py, f90) # Check coefficient of determination and the standard errors. py = [results.rsquared, results.bse] f90 = fort_debug.wrapper_get_pred_info(endog, f90, exog, num_agents, num_covars) for i in range(2): np.testing.assert_almost_equal(py[i], f90[i]) def test_3(self): """ Compare results between FORTRAN and PYTHON of selected functions. """ for _ in range(10): # Draw random requests for testing purposes. num_draws_emax = np.random.randint(2, 1000) dim = np.random.randint(1, 6) matrix = (np.random.multivariate_normal(np.zeros(dim), np.identity(dim), dim)) cov = np.dot(matrix, matrix.T) # PDF of normal distribution args = np.random.normal(size=3) args[-1] **= 2 f90 = fort_debug.wrapper_normal_pdf(*args) py = norm.pdf(*args) np.testing.assert_almost_equal(py, f90) # Singular Value Decomposition py = scipy.linalg.svd(matrix) f90 = fort_debug.wrapper_svd(matrix, dim) for i in range(3): np.testing.assert_allclose(py[i], f90[i], rtol=1e-05, atol=1e-06) # Pseudo-Inverse py = np.linalg.pinv(matrix) f90 = fort_debug.wrapper_pinv(matrix, dim) np.testing.assert_allclose(py, f90, rtol=1e-05, atol=1e-06) # Inverse py = np.linalg.inv(cov) f90 = fort_debug.wrapper_inverse(cov, dim) np.testing.assert_allclose(py, f90, rtol=1e-05, atol=1e-06) # Determinant py = np.linalg.det(cov) f90 = fort_debug.wrapper_determinant(cov) np.testing.assert_allclose(py, f90, rtol=1e-05, atol=1e-06) # Trace py = np.trace(cov) f90 = fort_debug.wrapper_trace(cov) np.testing.assert_allclose(py, f90, rtol=1e-05, atol=1e-06) # Random normal deviates. This only tests the interface, requires # visual inspection in IPYTHON notebook as well. fort_debug.wrapper_standard_normal(num_draws_emax) # Clipping values below and above bounds. num_values = np.random.randint(1, 10000) lower_bound = np.random.randn() upper_bound = lower_bound + np.random.ranf() values = np.random.normal(size=num_values) f90 = fort_debug.wrapper_clip_value(values, lower_bound, upper_bound, num_values) py = np.clip(values, lower_bound, upper_bound) np.testing.assert_almost_equal(py, f90) def test_4(self): """ Testing the core functions of the solution step for the equality of results between the PYTHON and FORTRAN implementations. """ # Generate random initialization file generate_init() # Perform toolbox actions respy_obj = RespyCls('test.respy.ini') # Ensure that backward induction routines use the same grid for the # interpolation. write_interpolation_grid('test.respy.ini') # Extract class attributes num_periods, edu_start, edu_max, min_idx, model_paras, num_draws_emax, \ seed_emax, is_debug, delta, is_interpolated, num_points_interp, = \ dist_class_attributes(respy_obj, 'num_periods', 'edu_start', 'edu_max', 'min_idx', 'model_paras', 'num_draws_emax', 'seed_emax', 'is_debug', 'delta', 'is_interpolated', 'num_points_interp') # Auxiliary objects coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky = \ dist_model_paras(model_paras, is_debug) # Check the state space creation. args = (num_periods, edu_start, edu_max, min_idx) pyth = pyth_create_state_space(*args) f2py = fort_debug.f2py_create_state_space(*args) for i in range(4): np.testing.assert_allclose(pyth[i], f2py[i]) # Carry some results from the state space creation for future use. states_all, states_number_period = pyth[:2] mapping_state_idx, max_states_period = pyth[2:] # Cutting to size states_all = states_all[:, :max(states_number_period), :] # Check calculation of systematic components of payoffs. args = (num_periods, states_number_period, states_all, edu_start, coeffs_a, coeffs_b, coeffs_edu, coeffs_home, max_states_period) pyth = pyth_calculate_payoffs_systematic(*args) f2py = fort_debug.f2py_calculate_payoffs_systematic(*args) np.testing.assert_allclose(pyth, f2py) # Carry some results from the systematic payoff calculation for # future use and create the required set of disturbances. periods_draws_emax = create_draws(num_periods, num_draws_emax, seed_emax, is_debug) periods_payoffs_systematic = pyth # Check backward induction procedure. args = (num_periods, max_states_period, periods_draws_emax, num_draws_emax, states_number_period, periods_payoffs_systematic, edu_max, edu_start, mapping_state_idx, states_all, delta, is_debug, is_interpolated, num_points_interp, shocks_cholesky) pyth = pyth_backward_induction(*args) f2py = fort_debug.f2py_backward_induction(*args) np.testing.assert_allclose(pyth, f2py) def test_5(self): """ This methods ensures that the core functions yield the same results across implementations. """ # Generate random initialization file generate_init() # Perform toolbox actions respy_obj = RespyCls('test.respy.ini') # Ensure that backward induction routines use the same grid for the # interpolation. max_states_period = write_interpolation_grid('test.respy.ini') # Extract class attributes num_periods, edu_start, edu_max, min_idx, model_paras, num_draws_emax, \ is_debug, delta, is_interpolated, num_points_interp, is_myopic, num_agents_sim, \ num_draws_prob, tau, paras_fixed, seed_sim = \ dist_class_attributes( respy_obj, 'num_periods', 'edu_start', 'edu_max', 'min_idx', 'model_paras', 'num_draws_emax', 'is_debug', 'delta', 'is_interpolated', 'num_points_interp', 'is_myopic', 'num_agents_sim', 'num_draws_prob', 'tau', 'paras_fixed', 'seed_sim') # Write out random components and interpolation grid to align the # three implementations. max_draws = max(num_agents_sim, num_draws_emax, num_draws_prob) write_draws(num_periods, max_draws) periods_draws_emax = read_draws(num_periods, num_draws_emax) periods_draws_prob = read_draws(num_periods, num_draws_prob) periods_draws_sims = read_draws(num_periods, num_agents_sim) # Extract coefficients coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky = dist_model_paras( model_paras, True) # Check the full solution procedure base_args = (coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky, is_interpolated, num_draws_emax, num_periods, num_points_interp, is_myopic, edu_start, is_debug, edu_max, min_idx, delta) fort, _ = resfort_interface(respy_obj, 'simulate') pyth = pyth_solve(*base_args + (periods_draws_emax,)) f2py = fort_debug.f2py_solve(*base_args + (periods_draws_emax, max_states_period)) for alt in [f2py, fort]: for i in range(5): np.testing.assert_allclose(pyth[i], alt[i]) # Distribute solution arguments for further use in simulation test. periods_payoffs_systematic, _, mapping_state_idx, periods_emax, states_all = pyth args = (periods_payoffs_systematic, mapping_state_idx, \ periods_emax, states_all, shocks_cholesky, num_periods, edu_start, edu_max, delta, num_agents_sim, periods_draws_sims, seed_sim) pyth = pyth_simulate(*args) f2py = fort_debug.f2py_simulate(*args) np.testing.assert_allclose(pyth, f2py) data_array = pyth base_args = (coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky, is_interpolated, num_draws_emax, num_periods, num_points_interp, is_myopic, edu_start, is_debug, edu_max, min_idx, delta, data_array, num_agents_sim, num_draws_prob, tau) args = base_args + (periods_draws_emax, periods_draws_prob) pyth = pyth_evaluate(*args) args = base_args + (periods_draws_emax, periods_draws_prob) f2py = fort_debug.f2py_evaluate(*args) np.testing.assert_allclose(pyth, f2py) # Evaluation of criterion function x0 = get_optim_paras(coeffs_a, coeffs_b, coeffs_edu, coeffs_home, shocks_cholesky, 'all', paras_fixed, is_debug) args = ( is_interpolated, num_draws_emax, num_periods, num_points_interp, is_myopic, edu_start, is_debug, edu_max, min_idx, delta, data_array, num_agents_sim, num_draws_prob, tau, periods_draws_emax, periods_draws_prob) pyth = pyth_criterion(x0, *args) f2py = fort_debug.f2py_criterion(x0, *args) np.testing.assert_allclose(pyth, f2py) def test_6(self): """ Further tests for the interpolation routines. """ # Generate random initialization file generate_init() # Perform toolbox actions respy_obj = RespyCls('test.respy.ini') respy_obj = simulate(respy_obj) # Extract class attributes periods_payoffs_systematic, states_number_period, mapping_state_idx, seed_prob, periods_emax, num_periods, states_all, num_points_interp, edu_start, num_draws_emax, is_debug, edu_max, delta = dist_class_attributes( respy_obj, 'periods_payoffs_systematic', 'states_number_period', 'mapping_state_idx', 'seed_prob', 'periods_emax', 'num_periods', 'states_all', 'num_points_interp', 'edu_start', 'num_draws_emax', 'is_debug', 'edu_max', 'delta') # Add some additional objects required for the interfaces to the # functions. period = np.random.choice(range(num_periods)) periods_draws_emax = create_draws(num_periods, num_draws_emax, seed_prob, is_debug) draws_emax = periods_draws_emax[period, :, :] num_states = states_number_period[period] shifts = np.random.randn(4) # Slight modification of request which assures that the # interpolation code is working. num_points_interp = min(num_points_interp, num_states) # Get the IS_SIMULATED indicator for the subset of points which are # used for the predication model. args = (num_points_interp, num_states, period, is_debug) is_simulated = get_simulated_indicator(*args) # Construct the exogenous variables for all points of the state # space. args = ( period, num_periods, num_states, delta, periods_payoffs_systematic, shifts, edu_max, edu_start, mapping_state_idx, periods_emax, states_all) py = get_exogenous_variables(*args) f90 = fort_debug.wrapper_get_exogenous_variables(*args) np.testing.assert_equal(py, f90) # Distribute validated results for further functions. exogenous, maxe = py # Construct endogenous variable so that the prediction model can be # fitted. args = (period, num_periods, num_states, delta, periods_payoffs_systematic, edu_max, edu_start, mapping_state_idx, periods_emax, states_all, is_simulated, num_draws_emax, maxe, draws_emax) py = get_endogenous_variable(*args) f90 = fort_debug.wrapper_get_endogenous_variable(*args) np.testing.assert_equal(py, replace_missing_values(f90)) # Distribute validated results for further functions. endogenous = py args = (endogenous, exogenous, maxe, is_simulated, num_points_interp, num_states, is_debug) py = get_predictions(*args) f90 = fort_debug.wrapper_get_predictions(*args[:-1]) np.testing.assert_array_almost_equal(py, f90) def test_7(self): """ This is a special test for auxiliary functions related to the interpolation setup. """ # Impose constraints constr = dict() constr['periods'] = np.random.randint(2, 5) # Construct a random initialization file generate_init(constr) # Extract required information respy_obj = RespyCls('test.respy.ini') # Extract class attributes is_debug, num_periods = dist_class_attributes(respy_obj, 'is_debug', 'num_periods') # Write out a grid for the interpolation max_states_period = write_interpolation_grid('test.respy.ini') # Draw random request for testing num_states = np.random.randint(1, max_states_period) candidates = list(range(num_states)) period = np.random.randint(1, num_periods) num_points_interp = np.random.randint(1, num_states + 1) # Check function for random choice and make sure that there are no # duplicates. f90 = fort_debug.wrapper_random_choice(candidates, num_states, num_points_interp) np.testing.assert_equal(len(set(f90)), len(f90)) np.testing.assert_equal(len(f90), num_points_interp) # Check the standard cases of the function. args = (num_points_interp, num_states, period, is_debug, num_periods) f90 = fort_debug.wrapper_get_simulated_indicator(*args) np.testing.assert_equal(len(f90), num_states) np.testing.assert_equal(np.all(f90) in [0, 1], True) # Test the standardization across PYTHON, F2PY, and FORTRAN # implementations. This is possible as we write out an interpolation # grid to disk which is used for both functions. base_args = (num_points_interp, num_states, period, is_debug) args = base_args py = get_simulated_indicator(*args) args = base_args + (num_periods, ) f90 = fort_debug.wrapper_get_simulated_indicator(*args) np.testing.assert_array_equal(f90, 1*py) os.unlink('interpolation.txt') # Special case where number of interpolation points are same as the # number of candidates. In that case the returned indicator # should be all TRUE. args = (num_states, num_states, period, True, num_periods) f90 = fort_debug.wrapper_get_simulated_indicator(*args) np.testing.assert_equal(sum(f90), num_states) def test_8(self): """ We test the construction of the Cholesky decomposition against each other. """ # Draw a random vector of parameters x = np.random.uniform(size=26) # Construct the Cholesky decompositions py = get_cholesky(x, info=0) fort = fort_debug.wrapper_get_cholesky(x) # Compare the results based on the two methods np.testing.assert_equal(fort, py)
{ "content_hash": "b8d4a9dde1cd3de6590b9105732b15cb", "timestamp": "", "source": "github", "line_count": 539, "max_line_length": 222, "avg_line_length": 40.0556586270872, "alnum_prop": 0.6320055581287634, "repo_name": "restudToolbox/package", "id": "d50fce3896f401d2fcd35f2a8ed61e500e0ab82e", "size": "21590", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "respy/tests/test_f2py.py", "mode": "33188", "license": "mit", "language": [ { "name": "Fortran", "bytes": "571229" }, { "name": "HCL", "bytes": "342" }, { "name": "Python", "bytes": "417314" }, { "name": "Shell", "bytes": "623" } ], "symlink_target": "" }
@import Foundation; @interface EXProvisioningProfile : NSObject + (instancetype)mainProvisioningProfile; @property (nonatomic, readonly, getter=isDevelopment) BOOL development; @end
{ "content_hash": "01d38447e2f9c93cb7e8c98bb0bc2002", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 71, "avg_line_length": 20.666666666666668, "alnum_prop": 0.8118279569892473, "repo_name": "NewSpring/Apollos", "id": "febdf8d304b9f67a0f6a02a8e23b10a2582be1f6", "size": "250", "binary": false, "copies": "3", "ref": "refs/heads/alpha", "path": "ios/Pods/ExpoKit/ios/Exponent/Kernel/Environment/EXProvisioningProfile.h", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "130" }, { "name": "CSS", "bytes": "4867" }, { "name": "HTML", "bytes": "3112" }, { "name": "Java", "bytes": "4564" }, { "name": "JavaScript", "bytes": "1018775" }, { "name": "Objective-C", "bytes": "2381" }, { "name": "Python", "bytes": "3416" }, { "name": "Ruby", "bytes": "2498" }, { "name": "Shell", "bytes": "1801" } ], "symlink_target": "" }
package main // Tests of call-graph queries, -format=json. // See go.tools/oracle/oracle_test.go for explanation. // See callgraph-json.golden for expected query results. func A() {} func B() {} // call is not (yet) treated context-sensitively. func call(f func()) { f() } // nop *is* treated context-sensitively. func nop() {} func call2(f func()) { f() f() } func main() { call(A) call(B) nop() nop() call2(func() { // called twice from main.call2, // but call2 is not context sensitive (yet). }) print("builtin") _ = string("type conversion") call(nil) if false { main() } var nilFunc func() nilFunc() var i interface { f() } i.f() } func deadcode() { main() } // @callgraph callgraph "^"
{ "content_hash": "d803bd42ce593cd57ae5fd18c8f6e488", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 56, "avg_line_length": 14.555555555555555, "alnum_prop": 0.5788804071246819, "repo_name": "dolotech/bullfight", "id": "829a4948daf7db9db846734a25898b2396715d9c", "size": "786", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/code.google.com/p/go.tools/oracle/testdata/src/main/callgraph-json.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "40257" }, { "name": "Batchfile", "bytes": "52" }, { "name": "C", "bytes": "12976" }, { "name": "CSS", "bytes": "10400" }, { "name": "Emacs Lisp", "bytes": "8657" }, { "name": "Go", "bytes": "6231053" }, { "name": "HTML", "bytes": "69906" }, { "name": "JavaScript", "bytes": "30531" }, { "name": "Makefile", "bytes": "15133" }, { "name": "Perl", "bytes": "4395" }, { "name": "Protocol Buffer", "bytes": "70463" }, { "name": "Shell", "bytes": "15691" }, { "name": "Vim script", "bytes": "56" } ], "symlink_target": "" }
class CreateStarships < ActiveRecord::Migration[5.0] def change create_table :starships do |t| t.string :name t.string :model t.string :starship_class t.string :manufacturer t.float :cost_in_credits t.float :length t.string :crew t.string :passengers t.integer :max_atmosphering_speed t.float :hyperdrive_rating t.integer :mglt t.float :cargo_capacity t.string :consumables t.timestamps end end end
{ "content_hash": "2791de8b0c6ff50c6038d7911f16c86f", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 52, "avg_line_length": 23.61904761904762, "alnum_prop": 0.6350806451612904, "repo_name": "GraphQLAcademy/swapi-graphql-ruby", "id": "c73fea925f52b00895c752ba99f86f04ab4770a4", "size": "496", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "db/migrate/20170208172710_create_starships.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "736" }, { "name": "HTML", "bytes": "4837" }, { "name": "JavaScript", "bytes": "120" }, { "name": "Ruby", "bytes": "68778" } ], "symlink_target": "" }
package app.coolweather.com.coolweather.activity; import android.app.Activity; import android.app.ProgressDialog; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.PreferenceManager; import android.text.TextUtils; import android.view.View; import android.view.Window; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import java.util.ArrayList; import java.util.List; import app.coolweather.com.coolweather.R; import app.coolweather.com.coolweather.db.CoolWeatherDB; import app.coolweather.com.coolweather.model.City; import app.coolweather.com.coolweather.model.County; import app.coolweather.com.coolweather.model.Province; import app.coolweather.com.coolweather.util.HttpCallbackListener; import app.coolweather.com.coolweather.util.HttpUtil; import app.coolweather.com.coolweather.util.Utility; /** * Created by Administrator on 2016/1/13. */ public class ChooseAreaActivity extends Activity { public static final int LEVEL_PROVINCE = 0; public static final int LEVEL_CITY = 1; public static final int LEVEL_COUNTY = 2; private ProgressDialog progressDialog; private TextView titleText; private ListView listView; private ArrayAdapter<String> adapter; private CoolWeatherDB coolWeatherDB; private List<String> dataList = new ArrayList<String>(); private List<Province> provinceList; private List<City> cityList; private List<County> countyList; private Province selectedProvince; private City selectedCity; private County selectedCounty; private int currentLevel; private Boolean isFromWeatherActivity; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); isFromWeatherActivity = getIntent().getBooleanExtra("from_weather_activity", false); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); if(prefs.getBoolean("city_selected", false) && !isFromWeatherActivity) { Intent intent = new Intent(this, WeatherActivity.class); startActivity(intent); finish(); return; } requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.choose_area); listView = (ListView) findViewById(R.id.list_view); titleText = (TextView) findViewById(R.id.title_text); adapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, dataList); listView.setAdapter(adapter); coolWeatherDB = CoolWeatherDB.getInstance(this); listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if(currentLevel == LEVEL_PROVINCE) { selectedProvince = provinceList.get(position); queryCities(); } else if(currentLevel == LEVEL_CITY) { selectedCity = cityList.get(position); queryCounties(); } else if(currentLevel == LEVEL_COUNTY) { String countyCode = countyList.get(position).getCountyCode(); Intent intent = new Intent(ChooseAreaActivity.this, WeatherActivity.class); intent.putExtra("county_code", countyCode); startActivity(intent); finish(); } } }); queryProvinces(); } //把省省份的数据传入 datalist private void queryProvinces() { provinceList = coolWeatherDB.loadProvinces(); if(provinceList.size() > 0) { dataList.clear(); for(Province p : provinceList) { dataList.add(p.getProvinceName()); } adapter.notifyDataSetChanged(); listView.setSelection(0); titleText.setText("中国"); currentLevel = LEVEL_PROVINCE; } else { queryFromServer(null, "province"); } } private void queryCities() { cityList = coolWeatherDB.loadCities(selectedProvince.getId()); if(cityList.size() > 0) { dataList.clear(); for(City c : cityList) { dataList.add(c.getCityName()); } adapter.notifyDataSetChanged(); listView.setSelection(0); titleText.setText(selectedProvince.getProvinceName()); currentLevel = LEVEL_CITY; } else { queryFromServer(selectedProvince.getProvinceCode(), "city"); } } private void queryCounties() { countyList = coolWeatherDB.loadCounties(selectedCity.getId()); if(countyList.size() > 0) { dataList.clear(); for(County c : countyList) { dataList.add(c.getCountyName()); } adapter.notifyDataSetChanged(); listView.setSelection(0); titleText.setText(selectedCity.getCityName()); currentLevel = LEVEL_COUNTY; } else { queryFromServer(selectedCity.getCityCode(), "county"); } } /** * 从网络上加载数据. * @param type 网上加载的数据会传入的相应的 type 数据库。并调用queryType() * */ private void queryFromServer(final String code, final String type) { String address = "http://www.weather.com.cn/data/list3/city"; if(!TextUtils.isEmpty(code)) { address += code; } address += ".xml"; showProgressDialog(); HttpUtil.sendHttpRequest(address, new HttpCallbackListener() { @Override public void onFinish(String request) { boolean result = false; if("province".equals(type)) { result = Utility.handleProvincesRequest(coolWeatherDB, request); } else if("city".equals(type)) { result = Utility.handleCitiesRequest(coolWeatherDB, request, selectedProvince.getId()); } else if("county".equals(type)) { result = Utility.handleCountiesRequest(coolWeatherDB, request, selectedCity.getId()); } if(result) { runOnUiThread(new Runnable() { @Override public void run() { closeProgressDialog(); if("province".equals(type)) { queryProvinces(); } else if("city".equals(type)) { queryCities(); } else if("county".equals(type)) { queryCounties(); } } }); } } @Override public void onError(Exception e) { runOnUiThread(new Runnable() { @Override public void run() { closeProgressDialog(); Toast.makeText(ChooseAreaActivity.this, "加载失败", Toast.LENGTH_SHORT).show(); } }); } }); } private void showProgressDialog() { if(progressDialog == null) { progressDialog = new ProgressDialog(this); progressDialog.setMessage("正在加载中..."); progressDialog.setCanceledOnTouchOutside(false); } progressDialog.show(); } private void closeProgressDialog() { if(progressDialog != null) { progressDialog.dismiss(); } } /** * 按 back 建执行的方法. * */ @Override public void onBackPressed() { if(currentLevel == LEVEL_COUNTY) { queryCities(); } else if(currentLevel == LEVEL_CITY) { queryProvinces(); } else if(currentLevel == LEVEL_PROVINCE) { if(isFromWeatherActivity) { Intent intent = new Intent(this, WeatherActivity.class); startActivity(intent); } finish(); } } }
{ "content_hash": "4ac511af776f5491bbf82d43777b2535", "timestamp": "", "source": "github", "line_count": 231, "max_line_length": 107, "avg_line_length": 36.064935064935064, "alnum_prop": 0.583723442563918, "repo_name": "chenjie13/coolweather", "id": "585abec0f0597c806112b517aa79fcc6a7badf43", "size": "8443", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/app/coolweather/com/coolweather/activity/ChooseAreaActivity.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "35895" } ], "symlink_target": "" }
PACKAGE_URL=https://github.com/vert-x3/vertx-web.git PACKAGE_VERSION=${1:-3.9.1} #Install required files yum install -y git maven python3 ln -s /usr/bin/python3 /usr/bin/python #Cloning Repo git clone $PACKAGE_URL cd vertx-web/ git checkout $PACKAGE_VERSION #Build test package mvn install echo "Complete!"
{ "content_hash": "1e73896afa390ecf9bda3bbcf1e60002", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 52, "avg_line_length": 19.375, "alnum_prop": 0.7580645161290323, "repo_name": "ppc64le/build-scripts", "id": "92d973b846867fae19022df69a80f1aa8617df04", "size": "1068", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "v/vertx-web/vertx-web_v3.9.1_ubi8_3.sh", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1464" }, { "name": "C", "bytes": "14922" }, { "name": "Dockerfile", "bytes": "731231" }, { "name": "Groovy", "bytes": "984" }, { "name": "Makefile", "bytes": "7280" }, { "name": "OpenEdge ABL", "bytes": "37872" }, { "name": "Python", "bytes": "37596" }, { "name": "Roff", "bytes": "7458" }, { "name": "Shell", "bytes": "13688799" } ], "symlink_target": "" }
local ADDON_NAME, ADDON = ... local bankFrame = {} bankFrame.__index = bankFrame function DJBagsRegisterBankFrame(self, bags) for k, v in pairs(bankFrame) do self[k] = v end ADDON.eventManager:Add('BANKFRAME_OPENED', self) ADDON.eventManager:Add('BANKFRAME_CLOSED', self) table.insert(UISpecialFrames, self:GetName()) self:RegisterForDrag("LeftButton") self:SetScript("OnDragStart", function(self, ...) self:StartMoving() end) self:SetScript("OnDragStop", function(self, ...) self:StopMovingOrSizing(...) end) self:SetUserPlaced(true) end function DJBagsBankTab_OnClick(tab) PanelTemplates_SetTab(DJBagsBankBar, tab.tab) if tab.tab == 1 then DJBagsBank:Show() DJBagsReagents:Hide() BankFrame.selectedTab = 1 BankFrame.activeTabIndex = 1 else DJBagsBank:Hide() DJBagsReagents:Show() BankFrame.selectedTab = 2 BankFrame.activeTabIndex = 2 end end function bankFrame:BANKFRAME_OPENED() self:Show() DJBagsBag:Show() end function bankFrame:BANKFRAME_CLOSED() self:Hide() end
{ "content_hash": "d81a44051648dfa5fe980bb7b2cdf561", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 53, "avg_line_length": 24.74468085106383, "alnum_prop": 0.649183147033534, "repo_name": "DarkJaguar91/DJBags", "id": "9ff8f9eb74f863c253c4c01d313429d6f1ff593f", "size": "1163", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/bank/BankFrame.lua", "mode": "33188", "license": "mit", "language": [ { "name": "Lua", "bytes": "40633" } ], "symlink_target": "" }
"use strict"; var AutoFocusMixin = require("./AutoFocusMixin"); var DOMPropertyOperations = require("./DOMPropertyOperations"); var LinkedValueUtils = require("./LinkedValueUtils"); var ReactBrowserComponentMixin = require("./ReactBrowserComponentMixin"); var ReactClass = require("./ReactClass"); var ReactElement = require("./ReactElement"); var ReactUpdates = require("./ReactUpdates"); var assign = require("./Object.assign"); var invariant = require("./invariant"); var warning = require("./warning"); var textarea = ReactElement.createFactory("textarea"); function forceUpdateIfMounted() { /*jshint validthis:true */ if (this.isMounted()) { this.forceUpdate(); } } /** * Implements a <textarea> native component that allows setting `value`, and * `defaultValue`. This differs from the traditional DOM API because value is * usually set as PCDATA children. * * If `value` is not supplied (or null/undefined), user actions that affect the * value will trigger updates to the element. * * If `value` is supplied (and not null/undefined), the rendered element will * not trigger updates to the element. Instead, the `value` prop must change in * order for the rendered element to be updated. * * The rendered element will be initialized with an empty value, the prop * `defaultValue` if specified, or the children content (deprecated). */ var ReactDOMTextarea = ReactClass.createClass({ displayName: "ReactDOMTextarea", tagName: "TEXTAREA", mixins: [AutoFocusMixin, LinkedValueUtils.Mixin, ReactBrowserComponentMixin], getInitialState: function getInitialState() { var defaultValue = this.props.defaultValue; // TODO (yungsters): Remove support for children content in <textarea>. var children = this.props.children; if (children != null) { if ("production" !== process.env.NODE_ENV) { "production" !== process.env.NODE_ENV ? warning(false, "Use the `defaultValue` or `value` props instead of setting " + "children on <textarea>.") : null; } "production" !== process.env.NODE_ENV ? invariant(defaultValue == null, "If you supply `defaultValue` on a <textarea>, do not pass children.") : invariant(defaultValue == null); if (Array.isArray(children)) { "production" !== process.env.NODE_ENV ? invariant(children.length <= 1, "<textarea> can only have at most one child.") : invariant(children.length <= 1); children = children[0]; } defaultValue = "" + children; } if (defaultValue == null) { defaultValue = ""; } var value = LinkedValueUtils.getValue(this); return { // We save the initial value so that `ReactDOMComponent` doesn't update // `textContent` (unnecessary since we update value). // The initial value can be a boolean or object so that's why it's // forced to be a string. initialValue: "" + (value != null ? value : defaultValue) }; }, render: function render() { // Clone `this.props` so we don't mutate the input. var props = assign({}, this.props); "production" !== process.env.NODE_ENV ? invariant(props.dangerouslySetInnerHTML == null, "`dangerouslySetInnerHTML` does not make sense on <textarea>.") : invariant(props.dangerouslySetInnerHTML == null); props.defaultValue = null; props.value = null; props.onChange = this._handleChange; // Always set children to the same thing. In IE9, the selection range will // get reset if `textContent` is mutated. return textarea(props, this.state.initialValue); }, componentDidUpdate: function componentDidUpdate(prevProps, prevState, prevContext) { var value = LinkedValueUtils.getValue(this); if (value != null) { var rootNode = this.getDOMNode(); // Cast `value` to a string to ensure the value is set correctly. While // browsers typically do this as necessary, jsdom doesn't. DOMPropertyOperations.setValueForProperty(rootNode, "value", "" + value); } }, _handleChange: function _handleChange(event) { var returnValue; var onChange = LinkedValueUtils.getOnChange(this); if (onChange) { returnValue = onChange.call(this, event); } ReactUpdates.asap(forceUpdateIfMounted, this); return returnValue; } }); module.exports = ReactDOMTextarea;
{ "content_hash": "fc7553d74ad9d576b395a0b6705f8ad8", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 208, "avg_line_length": 37.76315789473684, "alnum_prop": 0.6922183507549361, "repo_name": "yomolify/cs-webserver", "id": "c323921c7e4987050e9be4c7341a5c061e80edda", "size": "4645", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "node_modules/belle/lib/vendor/react/lib/ReactDOMTextarea.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "580003" }, { "name": "JavaScript", "bytes": "932904" } ], "symlink_target": "" }
<?php namespace Zend\Feed\PubSubHubbub; use DateInterval; use DateTime; use Traversable; use Zend\Feed\Uri; use Zend\Http\Request as HttpRequest; use Zend\Stdlib\ArrayUtils; class Subscriber { /** * An array of URLs for all Hub Servers to subscribe/unsubscribe. * * @var array */ protected $hubUrls = array(); /** * An array of optional parameters to be included in any * (un)subscribe requests. * * @var array */ protected $parameters = array(); /** * The URL of the topic (Rss or Atom feed) which is the subject of * our current intent to subscribe to/unsubscribe from updates from * the currently configured Hub Servers. * * @var string */ protected $topicUrl = ''; /** * The URL Hub Servers must use when communicating with this Subscriber * * @var string */ protected $callbackUrl = ''; /** * The number of seconds for which the subscriber would like to have the * subscription active. Defaults to null, i.e. not sent, to setup a * permanent subscription if possible. * * @var int */ protected $leaseSeconds = null; /** * The preferred verification mode (sync or async). By default, this * Subscriber prefers synchronous verification, but is considered * desirable to support asynchronous verification if possible. * * Zend\Feed\Pubsubhubbub\Subscriber will always send both modes, whose * order of occurrence in the parameter list determines this preference. * * @var string */ protected $preferredVerificationMode = PubSubHubbub::VERIFICATION_MODE_SYNC; /** * An array of any errors including keys for 'response', 'hubUrl'. * The response is the actual Zend\Http\Response object. * * @var array */ protected $errors = array(); /** * An array of Hub Server URLs for Hubs operating at this time in * asynchronous verification mode. * * @var array */ protected $asyncHubs = array(); /** * An instance of Zend\Feed\Pubsubhubbub\Model\SubscriptionPersistence used to background * save any verification tokens associated with a subscription or other. * * @var \Zend\Feed\PubSubHubbub\Model\SubscriptionPersistenceInterface */ protected $storage = null; /** * An array of authentication credentials for HTTP Basic Authentication * if required by specific Hubs. The array is indexed by Hub Endpoint URI * and the value is a simple array of the username and password to apply. * * @var array */ protected $authentications = array(); /** * Tells the Subscriber to append any subscription identifier to the path * of the base Callback URL. E.g. an identifier "subkey1" would be added * to the callback URL "http://www.example.com/callback" to create a subscription * specific Callback URL of "http://www.example.com/callback/subkey1". * * This is required for all Hubs using the Pubsubhubbub 0.1 Specification. * It should be manually intercepted and passed to the Callback class using * Zend\Feed\Pubsubhubbub\Subscriber\Callback::setSubscriptionKey(). Will * require a route in the form "callback/:subkey" to allow the parameter be * retrieved from an action using the Zend\Controller\Action::\getParam() * method. * * @var string */ protected $usePathParameter = false; /** * Constructor; accepts an array or Traversable instance to preset * options for the Subscriber without calling all supported setter * methods in turn. * * @param array|Traversable $options */ public function __construct($options = null) { if ($options !== null) { $this->setOptions($options); } } /** * Process any injected configuration options * * @param array|Traversable $options * @return Subscriber * @throws Exception\InvalidArgumentException */ public function setOptions($options) { if ($options instanceof Traversable) { $options = ArrayUtils::iteratorToArray($options); } if (!is_array($options)) { throw new Exception\InvalidArgumentException('Array or Traversable object' . 'expected, got ' . gettype($options)); } if (array_key_exists('hubUrls', $options)) { $this->addHubUrls($options['hubUrls']); } if (array_key_exists('callbackUrl', $options)) { $this->setCallbackUrl($options['callbackUrl']); } if (array_key_exists('topicUrl', $options)) { $this->setTopicUrl($options['topicUrl']); } if (array_key_exists('storage', $options)) { $this->setStorage($options['storage']); } if (array_key_exists('leaseSeconds', $options)) { $this->setLeaseSeconds($options['leaseSeconds']); } if (array_key_exists('parameters', $options)) { $this->setParameters($options['parameters']); } if (array_key_exists('authentications', $options)) { $this->addAuthentications($options['authentications']); } if (array_key_exists('usePathParameter', $options)) { $this->usePathParameter($options['usePathParameter']); } if (array_key_exists('preferredVerificationMode', $options)) { $this->setPreferredVerificationMode( $options['preferredVerificationMode'] ); } return $this; } /** * Set the topic URL (RSS or Atom feed) to which the intended (un)subscribe * event will relate * * @param string $url * @return Subscriber * @throws Exception\InvalidArgumentException */ public function setTopicUrl($url) { if (empty($url) || !is_string($url) || !Uri::factory($url)->isValid()) { throw new Exception\InvalidArgumentException('Invalid parameter "url"' .' of "' . $url . '" must be a non-empty string and a valid' .' URL'); } $this->topicUrl = $url; return $this; } /** * Set the topic URL (RSS or Atom feed) to which the intended (un)subscribe * event will relate * * @return string * @throws Exception\RuntimeException */ public function getTopicUrl() { if (empty($this->topicUrl)) { throw new Exception\RuntimeException('A valid Topic (RSS or Atom' . ' feed) URL MUST be set before attempting any operation'); } return $this->topicUrl; } /** * Set the number of seconds for which any subscription will remain valid * * @param int $seconds * @return Subscriber * @throws Exception\InvalidArgumentException */ public function setLeaseSeconds($seconds) { $seconds = intval($seconds); if ($seconds <= 0) { throw new Exception\InvalidArgumentException('Expected lease seconds' . ' must be an integer greater than zero'); } $this->leaseSeconds = $seconds; return $this; } /** * Get the number of lease seconds on subscriptions * * @return int */ public function getLeaseSeconds() { return $this->leaseSeconds; } /** * Set the callback URL to be used by Hub Servers when communicating with * this Subscriber * * @param string $url * @return Subscriber * @throws Exception\InvalidArgumentException */ public function setCallbackUrl($url) { if (empty($url) || !is_string($url) || !Uri::factory($url)->isValid()) { throw new Exception\InvalidArgumentException('Invalid parameter "url"' . ' of "' . $url . '" must be a non-empty string and a valid' . ' URL'); } $this->callbackUrl = $url; return $this; } /** * Get the callback URL to be used by Hub Servers when communicating with * this Subscriber * * @return string * @throws Exception\RuntimeException */ public function getCallbackUrl() { if (empty($this->callbackUrl)) { throw new Exception\RuntimeException('A valid Callback URL MUST be' . ' set before attempting any operation'); } return $this->callbackUrl; } /** * Set preferred verification mode (sync or async). By default, this * Subscriber prefers synchronous verification, but does support * asynchronous if that's the Hub Server's utilised mode. * * Zend\Feed\Pubsubhubbub\Subscriber will always send both modes, whose * order of occurrence in the parameter list determines this preference. * * @param string $mode Should be 'sync' or 'async' * @return Subscriber * @throws Exception\InvalidArgumentException */ public function setPreferredVerificationMode($mode) { if ($mode !== PubSubHubbub::VERIFICATION_MODE_SYNC && $mode !== PubSubHubbub::VERIFICATION_MODE_ASYNC ) { throw new Exception\InvalidArgumentException('Invalid preferred' . ' mode specified: "' . $mode . '" but should be one of' . ' Zend\Feed\Pubsubhubbub::VERIFICATION_MODE_SYNC or' . ' Zend\Feed\Pubsubhubbub::VERIFICATION_MODE_ASYNC'); } $this->preferredVerificationMode = $mode; return $this; } /** * Get preferred verification mode (sync or async). * * @return string */ public function getPreferredVerificationMode() { return $this->preferredVerificationMode; } /** * Add a Hub Server URL supported by Publisher * * @param string $url * @return Subscriber * @throws Exception\InvalidArgumentException */ public function addHubUrl($url) { if (empty($url) || !is_string($url) || !Uri::factory($url)->isValid()) { throw new Exception\InvalidArgumentException('Invalid parameter "url"' . ' of "' . $url . '" must be a non-empty string and a valid' . ' URL'); } $this->hubUrls[] = $url; return $this; } /** * Add an array of Hub Server URLs supported by Publisher * * @param array $urls * @return Subscriber */ public function addHubUrls(array $urls) { foreach ($urls as $url) { $this->addHubUrl($url); } return $this; } /** * Remove a Hub Server URL * * @param string $url * @return Subscriber */ public function removeHubUrl($url) { if (!in_array($url, $this->getHubUrls())) { return $this; } $key = array_search($url, $this->hubUrls); unset($this->hubUrls[$key]); return $this; } /** * Return an array of unique Hub Server URLs currently available * * @return array */ public function getHubUrls() { $this->hubUrls = array_unique($this->hubUrls); return $this->hubUrls; } /** * Add authentication credentials for a given URL * * @param string $url * @param array $authentication * @return Subscriber * @throws Exception\InvalidArgumentException */ public function addAuthentication($url, array $authentication) { if (empty($url) || !is_string($url) || !Uri::factory($url)->isValid()) { throw new Exception\InvalidArgumentException('Invalid parameter "url"' . ' of "' . $url . '" must be a non-empty string and a valid' . ' URL'); } $this->authentications[$url] = $authentication; return $this; } /** * Add authentication credentials for hub URLs * * @param array $authentications * @return Subscriber */ public function addAuthentications(array $authentications) { foreach ($authentications as $url => $authentication) { $this->addAuthentication($url, $authentication); } return $this; } /** * Get all hub URL authentication credentials * * @return array */ public function getAuthentications() { return $this->authentications; } /** * Set flag indicating whether or not to use a path parameter * * @param bool $bool * @return Subscriber */ public function usePathParameter($bool = true) { $this->usePathParameter = $bool; return $this; } /** * Add an optional parameter to the (un)subscribe requests * * @param string $name * @param string|null $value * @return Subscriber * @throws Exception\InvalidArgumentException */ public function setParameter($name, $value = null) { if (is_array($name)) { $this->setParameters($name); return $this; } if (empty($name) || !is_string($name)) { throw new Exception\InvalidArgumentException('Invalid parameter "name"' . ' of "' . $name . '" must be a non-empty string'); } if ($value === null) { $this->removeParameter($name); return $this; } if (empty($value) || (!is_string($value) && $value !== null)) { throw new Exception\InvalidArgumentException('Invalid parameter "value"' . ' of "' . $value . '" must be a non-empty string'); } $this->parameters[$name] = $value; return $this; } /** * Add an optional parameter to the (un)subscribe requests * * @param array $parameters * @return Subscriber */ public function setParameters(array $parameters) { foreach ($parameters as $name => $value) { $this->setParameter($name, $value); } return $this; } /** * Remove an optional parameter for the (un)subscribe requests * * @param string $name * @return Subscriber * @throws Exception\InvalidArgumentException */ public function removeParameter($name) { if (empty($name) || !is_string($name)) { throw new Exception\InvalidArgumentException('Invalid parameter "name"' . ' of "' . $name . '" must be a non-empty string'); } if (array_key_exists($name, $this->parameters)) { unset($this->parameters[$name]); } return $this; } /** * Return an array of optional parameters for (un)subscribe requests * * @return array */ public function getParameters() { return $this->parameters; } /** * Sets an instance of Zend\Feed\Pubsubhubbub\Model\SubscriptionPersistence used to background * save any verification tokens associated with a subscription or other. * * @param Model\SubscriptionPersistenceInterface $storage * @return Subscriber */ public function setStorage(Model\SubscriptionPersistenceInterface $storage) { $this->storage = $storage; return $this; } /** * Gets an instance of Zend\Feed\Pubsubhubbub\Storage\StoragePersistence used * to background save any verification tokens associated with a subscription * or other. * * @return Model\SubscriptionPersistenceInterface * @throws Exception\RuntimeException */ public function getStorage() { if ($this->storage === null) { throw new Exception\RuntimeException('No storage vehicle ' . 'has been set.'); } return $this->storage; } /** * Subscribe to one or more Hub Servers using the stored Hub URLs * for the given Topic URL (RSS or Atom feed) * * @return void */ public function subscribeAll() { $this->_doRequest('subscribe'); } /** * Unsubscribe from one or more Hub Servers using the stored Hub URLs * for the given Topic URL (RSS or Atom feed) * * @return void */ public function unsubscribeAll() { $this->_doRequest('unsubscribe'); } /** * Returns a boolean indicator of whether the notifications to Hub * Servers were ALL successful. If even one failed, FALSE is returned. * * @return bool */ public function isSuccess() { if (count($this->errors) > 0) { return false; } return true; } /** * Return an array of errors met from any failures, including keys: * 'response' => the Zend\Http\Response object from the failure * 'hubUrl' => the URL of the Hub Server whose notification failed * * @return array */ public function getErrors() { return $this->errors; } /** * Return an array of Hub Server URLs who returned a response indicating * operation in Asynchronous Verification Mode, i.e. they will not confirm * any (un)subscription immediately but at a later time (Hubs may be * doing this as a batch process when load balancing) * * @return array */ public function getAsyncHubs() { return $this->asyncHubs; } /** * Executes an (un)subscribe request * * @param string $mode * @return void * @throws Exception\RuntimeException */ protected function _doRequest($mode) { $client = $this->_getHttpClient(); $hubs = $this->getHubUrls(); if (empty($hubs)) { throw new Exception\RuntimeException('No Hub Server URLs' . ' have been set so no subscriptions can be attempted'); } $this->errors = array(); $this->asyncHubs = array(); foreach ($hubs as $url) { if (array_key_exists($url, $this->authentications)) { $auth = $this->authentications[$url]; $client->setAuth($auth[0], $auth[1]); } $client->setUri($url); $client->setRawBody($this->_getRequestParameters($url, $mode)); $response = $client->send(); if ($response->getStatusCode() !== 204 && $response->getStatusCode() !== 202 ) { $this->errors[] = array( 'response' => $response, 'hubUrl' => $url, ); /** * At first I thought it was needed, but the backend storage will * allow tracking async without any user interference. It's left * here in case the user is interested in knowing what Hubs * are using async verification modes so they may update Models and * move these to asynchronous processes. */ } elseif ($response->getStatusCode() == 202) { $this->asyncHubs[] = array( 'response' => $response, 'hubUrl' => $url, ); } } } /** * Get a basic prepared HTTP client for use * * @return \Zend\Http\Client */ protected function _getHttpClient() { $client = PubSubHubbub::getHttpClient(); $client->setMethod(HttpRequest::METHOD_POST); $client->setOptions(array('useragent' => 'Zend_Feed_Pubsubhubbub_Subscriber/' . Version::VERSION)); return $client; } /** * Return a list of standard protocol/optional parameters for addition to * client's POST body that are specific to the current Hub Server URL * * @param string $hubUrl * @param string $mode * @return string * @throws Exception\InvalidArgumentException */ protected function _getRequestParameters($hubUrl, $mode) { if (!in_array($mode, array('subscribe', 'unsubscribe'))) { throw new Exception\InvalidArgumentException('Invalid mode specified: "' . $mode . '" which should have been "subscribe" or "unsubscribe"'); } $params = array( 'hub.mode' => $mode, 'hub.topic' => $this->getTopicUrl(), ); if ($this->getPreferredVerificationMode() == PubSubHubbub::VERIFICATION_MODE_SYNC ) { $vmodes = array( PubSubHubbub::VERIFICATION_MODE_SYNC, PubSubHubbub::VERIFICATION_MODE_ASYNC, ); } else { $vmodes = array( PubSubHubbub::VERIFICATION_MODE_ASYNC, PubSubHubbub::VERIFICATION_MODE_SYNC, ); } $params['hub.verify'] = array(); foreach ($vmodes as $vmode) { $params['hub.verify'][] = $vmode; } /** * Establish a persistent verify_token and attach key to callback * URL's path/query_string */ $key = $this->_generateSubscriptionKey($params, $hubUrl); $token = $this->_generateVerifyToken(); $params['hub.verify_token'] = $token; // Note: query string only usable with PuSH 0.2 Hubs if (!$this->usePathParameter) { $params['hub.callback'] = $this->getCallbackUrl() . '?xhub.subscription=' . PubSubHubbub::urlencode($key); } else { $params['hub.callback'] = rtrim($this->getCallbackUrl(), '/') . '/' . PubSubHubbub::urlencode($key); } if ($mode == 'subscribe' && $this->getLeaseSeconds() !== null) { $params['hub.lease_seconds'] = $this->getLeaseSeconds(); } // hub.secret not currently supported $optParams = $this->getParameters(); foreach ($optParams as $name => $value) { $params[$name] = $value; } // store subscription to storage $now = new DateTime(); $expires = null; if (isset($params['hub.lease_seconds'])) { $expires = $now->add(new DateInterval('PT' . $params['hub.lease_seconds'] . 'S')) ->format('Y-m-d H:i:s'); } $data = array( 'id' => $key, 'topic_url' => $params['hub.topic'], 'hub_url' => $hubUrl, 'created_time' => $now->format('Y-m-d H:i:s'), 'lease_seconds' => $expires, 'verify_token' => hash('sha256', $params['hub.verify_token']), 'secret' => null, 'expiration_time' => $expires, 'subscription_state' => ($mode == 'unsubscribe')? PubSubHubbub::SUBSCRIPTION_TODELETE : PubSubHubbub::SUBSCRIPTION_NOTVERIFIED, ); $this->getStorage()->setSubscription($data); return $this->_toByteValueOrderedString( $this->_urlEncode($params) ); } /** * Simple helper to generate a verification token used in (un)subscribe * requests to a Hub Server. Follows no particular method, which means * it might be improved/changed in future. * * @return string */ protected function _generateVerifyToken() { if (!empty($this->testStaticToken)) { return $this->testStaticToken; } return uniqid(rand(), true) . time(); } /** * Simple helper to generate a verification token used in (un)subscribe * requests to a Hub Server. * * @param array $params * @param string $hubUrl The Hub Server URL for which this token will apply * @return string */ protected function _generateSubscriptionKey(array $params, $hubUrl) { $keyBase = $params['hub.topic'] . $hubUrl; $key = md5($keyBase); return $key; } /** * URL Encode an array of parameters * * @param array $params * @return array */ protected function _urlEncode(array $params) { $encoded = array(); foreach ($params as $key => $value) { if (is_array($value)) { $ekey = PubSubHubbub::urlencode($key); $encoded[$ekey] = array(); foreach ($value as $duplicateKey) { $encoded[$ekey][] = PubSubHubbub::urlencode($duplicateKey); } } else { $encoded[PubSubHubbub::urlencode($key)] = PubSubHubbub::urlencode($value); } } return $encoded; } /** * Order outgoing parameters * * @param array $params * @return array */ protected function _toByteValueOrderedString(array $params) { $return = array(); uksort($params, 'strnatcmp'); foreach ($params as $key => $value) { if (is_array($value)) { foreach ($value as $keyduplicate) { $return[] = $key . '=' . $keyduplicate; } } else { $return[] = $key . '=' . $value; } } return implode('&', $return); } /** * This is STRICTLY for testing purposes only... */ protected $testStaticToken = null; final public function setTestStaticToken($token) { $this->testStaticToken = (string) $token; } }
{ "content_hash": "e8c6addf181c8cb2c995fe7e400432f8", "timestamp": "", "source": "github", "line_count": 831, "max_line_length": 139, "avg_line_length": 30.842358604091455, "alnum_prop": 0.5629730784237222, "repo_name": "winz-git/zend-2-basic-application-with-doctrine", "id": "14f3cdd128e5261deb3cf8c902fd007d7076c93d", "size": "25932", "binary": false, "copies": "22", "ref": "refs/heads/master", "path": "vendor/ZF2/library/Zend/Feed/PubSubHubbub/Subscriber.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "1046" }, { "name": "JavaScript", "bytes": "1139" }, { "name": "PHP", "bytes": "97344" }, { "name": "Perl", "bytes": "726" }, { "name": "Shell", "bytes": "9" } ], "symlink_target": "" }
<?xml version='1.0' encoding='UTF-8'?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.google.cloud</groupId> <artifactId>google-cloud-eventarc-publishing</artifactId> <version>0.3.8-SNAPSHOT</version><!-- {x-version-update:google-cloud-eventarc-publishing:current} --> <packaging>jar</packaging> <name>Google Eventarc Publishing</name> <url>https://github.com/googleapis/java-eventarc-publishing</url> <description>Eventarc Publishing lets you asynchronously deliver events from Google services, SaaS, and your own apps using loosely coupled services that react to state changes.</description> <parent> <groupId>com.google.cloud</groupId> <artifactId>google-cloud-eventarc-publishing-parent</artifactId> <version>0.3.8-SNAPSHOT</version><!-- {x-version-update:google-cloud-eventarc-publishing:current} --> </parent> <properties> <site.installationModule>google-cloud-eventarc-publishing</site.installationModule> </properties> <dependencies> <dependency> <groupId>io.grpc</groupId> <artifactId>grpc-api</artifactId> </dependency> <dependency> <groupId>io.grpc</groupId> <artifactId>grpc-stub</artifactId> </dependency> <dependency> <groupId>io.grpc</groupId> <artifactId>grpc-protobuf</artifactId> </dependency> <dependency> <groupId>com.google.api</groupId> <artifactId>api-common</artifactId> </dependency> <dependency> <groupId>com.google.protobuf</groupId> <artifactId>protobuf-java</artifactId> </dependency> <dependency> <groupId>com.google.api.grpc</groupId> <artifactId>proto-google-common-protos</artifactId> </dependency> <dependency> <groupId>com.google.api.grpc</groupId> <artifactId>proto-google-cloud-eventarc-publishing-v1</artifactId> </dependency> <dependency> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> </dependency> <dependency> <groupId>com.google.api</groupId> <artifactId>gax</artifactId> </dependency> <dependency> <groupId>com.google.api</groupId> <artifactId>gax-grpc</artifactId> </dependency> <dependency> <groupId>com.google.api</groupId> <artifactId>gax-httpjson</artifactId> </dependency> <dependency> <groupId>org.threeten</groupId> <artifactId>threetenbp</artifactId> </dependency> <!-- Test dependencies --> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <scope>test</scope> <version>4.13.2</version> </dependency> <dependency> <groupId>com.google.api.grpc</groupId> <artifactId>grpc-google-cloud-eventarc-publishing-v1</artifactId> <scope>test</scope> </dependency> <!-- Need testing utility classes for generated gRPC clients tests --> <dependency> <groupId>com.google.api</groupId> <artifactId>gax</artifactId> <classifier>testlib</classifier> <scope>test</scope> </dependency> <dependency> <groupId>com.google.api</groupId> <artifactId>gax-grpc</artifactId> <classifier>testlib</classifier> <scope>test</scope> </dependency> <dependency> <groupId>com.google.api</groupId> <artifactId>gax-httpjson</artifactId> <classifier>testlib</classifier> <scope>test</scope> </dependency> </dependencies> <profiles> <profile> <id>java9</id> <activation> <jdk>[9,)</jdk> </activation> <dependencies> <dependency> <groupId>javax.annotation</groupId> <artifactId>javax.annotation-api</artifactId> </dependency> </dependencies> </profile> </profiles> <build> <plugins> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>flatten-maven-plugin</artifactId> </plugin> </plugins> </build> </project>
{ "content_hash": "b628d4ad173907fa76bec50e8d00d945", "timestamp": "", "source": "github", "line_count": 127, "max_line_length": 204, "avg_line_length": 32.74015748031496, "alnum_prop": 0.6611351611351611, "repo_name": "googleapis/java-eventarc-publishing", "id": "45a6f3ab42fb6d1860a94df861bab3fa8659ce8c", "size": "4158", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "google-cloud-eventarc-publishing/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "801" }, { "name": "Java", "bytes": "267298" }, { "name": "Python", "bytes": "787" }, { "name": "Shell", "bytes": "20480" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_26) on Sun Jan 22 14:18:59 CET 2012 --> <TITLE> Uses of Class soot.toolkits.astmetrics.DataHandlingApplication.ProcessData (Soot API) </TITLE> <META NAME="date" CONTENT="2012-01-22"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class soot.toolkits.astmetrics.DataHandlingApplication.ProcessData (Soot API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../soot/toolkits/astmetrics/DataHandlingApplication/ProcessData.html" title="class in soot.toolkits.astmetrics.DataHandlingApplication"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?soot/toolkits/astmetrics/DataHandlingApplication//class-useProcessData.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="ProcessData.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Class<br>soot.toolkits.astmetrics.DataHandlingApplication.ProcessData</B></H2> </CENTER> No usage of soot.toolkits.astmetrics.DataHandlingApplication.ProcessData <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../soot/toolkits/astmetrics/DataHandlingApplication/ProcessData.html" title="class in soot.toolkits.astmetrics.DataHandlingApplication"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../index.html?soot/toolkits/astmetrics/DataHandlingApplication//class-useProcessData.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="ProcessData.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> </BODY> </HTML>
{ "content_hash": "5751cc269207ec22758b095d205d6612", "timestamp": "", "source": "github", "line_count": 144, "max_line_length": 265, "avg_line_length": 42.68055555555556, "alnum_prop": 0.6282134721770257, "repo_name": "Phortran/SicurezzaInformatica", "id": "3c2630a8a97c0254a10d14760e4843c9491c82e3", "size": "6146", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ApkSSL_Tester/libs/soot/doc/soot/toolkits/astmetrics/DataHandlingApplication/class-use/ProcessData.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "1391" }, { "name": "HTML", "bytes": "95194748" }, { "name": "Java", "bytes": "4138" } ], "symlink_target": "" }
<?php namespace Magnetikonline\Automobile; // This is a Racecar class class Racecar { public function sayWhatIAm() { echo("I'm a Racecar - I drive fast!\n"); } }
{ "content_hash": "e3d2c11fc734f54d76b8f30b04ede84a", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 42, "avg_line_length": 14.166666666666666, "alnum_prop": 0.6823529411764706, "repo_name": "magnetikonline/phpnsautoloader", "id": "bce7603dab387908c6c7652af3f303062b0535bc", "size": "170", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "lib/magnetikonline/automobile/racecar.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "1320" } ], "symlink_target": "" }
module Server.Import (module Server.Types ,module Server.Log ,module Control.Concurrent ,module Control.Monad ,module Control.Exception ,module Control.Monad.Trans ,module Network ,io) where import Server.Types import Server.Log import Control.Concurrent import Control.Monad import Control.Exception import Network import Control.Monad.Trans hiding (liftIO) import qualified Control.Monad.Trans as Trans -- | Gotta have. io :: MonadIO m => IO a -> m a io = Trans.liftIO
{ "content_hash": "415eea6616ac53ca6e16f5c8711dfa17", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 45, "avg_line_length": 21.347826086956523, "alnum_prop": 0.7637474541751528, "repo_name": "kini/ghc-server", "id": "9bda8ba60a01ac2af67af6b9c1b44b8e958883b6", "size": "491", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Server/Import.hs", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
const { merge } = require('webpack-merge') const common = require('./webpack.common') module.exports = merge(common, { mode: 'development', watch: true, devtool: 'inline-source-map', })
{ "content_hash": "b323ef7b50d4b408084470015ae5b94b", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 42, "avg_line_length": 24.125, "alnum_prop": 0.6735751295336787, "repo_name": "githayu/nicofinder-extension", "id": "d14d84d325cde22b0f742c735613621364482391", "size": "193", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/webpack.dev.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "2559" }, { "name": "TypeScript", "bytes": "17472" } ], "symlink_target": "" }
import os class Config(object): DEBUG = False TESTING = False CSRF_ENABLED = True SECRET_KEY = 'this-really-needs-to-be-changed' SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL'] class DevelopmentConfig(Config): DEVELOPMENT = True DEBUG = True
{ "content_hash": "0079d777bb6d7870bd2763fb33cd7872", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 56, "avg_line_length": 23.083333333333332, "alnum_prop": 0.6859205776173285, "repo_name": "jwestgard/elk", "id": "239459cc146bea6f1127f5bd85bfb9a398897be9", "size": "277", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1051" }, { "name": "Python", "bytes": "1802" } ], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>expm1</title> <link rel="stylesheet" href="../../math.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.79.1"> <link rel="home" href="../../index.html" title="Math Toolkit 3.1.0"> <link rel="up" href="../powers.html" title="Basic Functions"> <link rel="prev" href="log1p.html" title="log1p"> <link rel="next" href="cbrt.html" title="cbrt"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../../boost.png"></td> <td align="center"><a href="../../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="log1p.html"><img src="../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../powers.html"><img src="../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="cbrt.html"><img src="../../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h3 class="title"> <a name="math_toolkit.powers.expm1"></a><a class="link" href="expm1.html" title="expm1">expm1</a> </h3></div></div></div> <pre class="programlisting"><span class="preprocessor">#include</span> <span class="special">&lt;</span><span class="identifier">boost</span><span class="special">/</span><span class="identifier">math</span><span class="special">/</span><span class="identifier">special_functions</span><span class="special">/</span><span class="identifier">expm1</span><span class="special">.</span><span class="identifier">hpp</span><span class="special">&gt;</span> </pre> <pre class="programlisting"><span class="keyword">namespace</span> <span class="identifier">boost</span><span class="special">{</span> <span class="keyword">namespace</span> <span class="identifier">math</span><span class="special">{</span> <span class="keyword">template</span> <span class="special">&lt;</span><span class="keyword">class</span> <span class="identifier">T</span><span class="special">&gt;</span> <a class="link" href="../result_type.html" title="Calculation of the Type of the Result"><span class="emphasis"><em>calculated-result-type</em></span></a> <span class="identifier">expm1</span><span class="special">(</span><span class="identifier">T</span> <span class="identifier">x</span><span class="special">);</span> <span class="keyword">template</span> <span class="special">&lt;</span><span class="keyword">class</span> <span class="identifier">T</span><span class="special">,</span> <span class="keyword">class</span> <a class="link" href="../../policy.html" title="Chapter 21. Policies: Controlling Precision, Error Handling etc">Policy</a><span class="special">&gt;</span> <a class="link" href="../result_type.html" title="Calculation of the Type of the Result"><span class="emphasis"><em>calculated-result-type</em></span></a> <span class="identifier">expm1</span><span class="special">(</span><span class="identifier">T</span> <span class="identifier">x</span><span class="special">,</span> <span class="keyword">const</span> <a class="link" href="../../policy.html" title="Chapter 21. Policies: Controlling Precision, Error Handling etc">Policy</a><span class="special">&amp;);</span> <span class="special">}}</span> <span class="comment">// namespaces</span> </pre> <p> Returns e<sup>x</sup> - 1. </p> <p> The return type of this function is computed using the <a class="link" href="../result_type.html" title="Calculation of the Type of the Result"><span class="emphasis"><em>result type calculation rules</em></span></a>: the return is <code class="computeroutput"><span class="keyword">double</span></code> when <span class="emphasis"><em>x</em></span> is an integer type and T otherwise. </p> <p> The final <a class="link" href="../../policy.html" title="Chapter 21. Policies: Controlling Precision, Error Handling etc">Policy</a> argument is optional and can be used to control the behaviour of the function: how it handles errors, what level of precision to use etc. Refer to the <a class="link" href="../../policy.html" title="Chapter 21. Policies: Controlling Precision, Error Handling etc">policy documentation for more details</a>. </p> <p> For small <span class="emphasis"><em>x</em></span>, then <code class="computeroutput">e<sup>x</sup></code> is very close to 1, as a result calculating <code class="computeroutput">e<sup>x</sup> - 1</code> results in catastrophic cancellation errors when <span class="emphasis"><em>x</em></span> is small. <code class="computeroutput"><span class="identifier">expm1</span></code> calculates <code class="computeroutput">e<sup>x</sup> - 1</code> using rational approximations (for up to 128-bit long doubles), otherwise via a series expansion when x is small (giving an accuracy of less than 2ɛ). </p> <p> Finally when BOOST_HAS_EXPM1 is defined then the <code class="computeroutput"><span class="keyword">float</span><span class="special">/</span><span class="keyword">double</span><span class="special">/</span><span class="keyword">long</span> <span class="keyword">double</span></code> specializations of this template simply forward to the platform's native (POSIX) implementation of this function. </p> <p> The following graph illustrates the behaviour of expm1: </p> <div class="blockquote"><blockquote class="blockquote"><p> <span class="inlinemediaobject"><img src="../../../graphs/expm1.svg" align="middle"></span> </p></blockquote></div> <h5> <a name="math_toolkit.powers.expm1.h0"></a> <span class="phrase"><a name="math_toolkit.powers.expm1.accuracy"></a></span><a class="link" href="expm1.html#math_toolkit.powers.expm1.accuracy">Accuracy</a> </h5> <p> For built in floating point types <code class="computeroutput"><span class="identifier">expm1</span></code> should have approximately 1 epsilon accuracy. </p> <div class="table"> <a name="math_toolkit.powers.expm1.table_expm1"></a><p class="title"><b>Table 8.82. Error rates for expm1</b></p> <div class="table-contents"><table class="table" summary="Error rates for expm1"> <colgroup> <col> <col> <col> <col> <col> </colgroup> <thead><tr> <th> </th> <th> <p> GNU C++ version 7.1.0<br> linux<br> long double </p> </th> <th> <p> GNU C++ version 7.1.0<br> linux<br> double </p> </th> <th> <p> Sun compiler version 0x5150<br> Sun Solaris<br> long double </p> </th> <th> <p> Microsoft Visual C++ version 14.1<br> Win32<br> double </p> </th> </tr></thead> <tbody><tr> <td> <p> Random test data </p> </td> <td> <p> <span class="blue">Max = 0.992ε (Mean = 0.402ε)</span><br> <br> (<span class="emphasis"><em>&lt;cmath&gt;:</em></span> Max = 0.992ε (Mean = 0.402ε))<br> (<span class="emphasis"><em>&lt;math.h&gt;:</em></span> Max = 0.992ε (Mean = 0.402ε)) </p> </td> <td> <p> <span class="blue">Max = 0.793ε (Mean = 0.126ε)</span><br> <br> (<span class="emphasis"><em>Rmath 3.2.3:</em></span> Max = 0.793ε (Mean = 0.126ε)) </p> </td> <td> <p> <span class="blue">Max = 1.31ε (Mean = 0.428ε)</span><br> <br> (<span class="emphasis"><em>&lt;math.h&gt;:</em></span> Max = 0.996ε (Mean = 0.426ε)) </p> </td> <td> <p> <span class="blue">Max = 1.31ε (Mean = 0.496ε)</span><br> <br> (<span class="emphasis"><em>&lt;math.h&gt;:</em></span> Max = 1.31ε (Mean = 0.496ε)) </p> </td> </tr></tbody> </table></div> </div> <br class="table-break"><h5> <a name="math_toolkit.powers.expm1.h1"></a> <span class="phrase"><a name="math_toolkit.powers.expm1.testing"></a></span><a class="link" href="expm1.html#math_toolkit.powers.expm1.testing">Testing</a> </h5> <p> A mixture of spot test sanity checks, and random high precision test values calculated using NTL::RR at 1000-bit precision. </p> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright © 2006-2021 Nikhar Agrawal, Anton Bikineev, Matthew Borland, Paul A. Bristow, Marco Guazzone, Christopher Kormanyos, Hubert Holin, Bruno Lalande, John Maddock, Evan Miller, Jeremy Murphy, Matthew Pulver, Johan Råde, Gautam Sewani, Benjamin Sobotta, Nicholas Thompson, Thijs van den Berg, Daryle Walker and Xiaogang Zhang<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="log1p.html"><img src="../../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../powers.html"><img src="../../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="cbrt.html"><img src="../../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "4607928d62f2a30f5dd5fdd746b805ff", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 514, "avg_line_length": 59.34090909090909, "alnum_prop": 0.607047108387591, "repo_name": "davehorton/drachtio-server", "id": "0bee6a3de6decfb6b9d9ca5dde00e61390865f2d", "size": "10475", "binary": false, "copies": "4", "ref": "refs/heads/main", "path": "deps/boost_1_77_0/libs/math/doc/html/math_toolkit/powers/expm1.html", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "662596" }, { "name": "Dockerfile", "bytes": "1330" }, { "name": "JavaScript", "bytes": "60639" }, { "name": "M4", "bytes": "35273" }, { "name": "Makefile", "bytes": "5960" }, { "name": "Shell", "bytes": "47298" } ], "symlink_target": "" }
"use strict"; it.suite("it tdd", function (it) { it.test("should not be null", function () { assert.isNotNull(it); }); it.test("should describe", function () { assert.equal(it.description, "it tdd"); }); it.test("should have methods", function () { assert.isFunction(it.beforeAll); assert.isFunction(it.beforeEach); assert.isFunction(it.afterEach); assert.isFunction(it.afterAll); assert.isFunction(it.test); assert.isFunction(it.suite); assert.isFunction(it.context); }); it.suite("assert extensions", function (it) { it.test('should add methods', function () { assert.isFunction(assert.isFunction); assert.isFunction(assert.isArray); assert.isFunction(assert.isDate); assert.isFunction(assert.isBoolean); assert.isFunction(assert.isString); assert.isFunction(assert.isUndefined); assert.isFunction(assert.isUndefinedOrNull); assert.isFunction(assert.isPromiseLike); assert.isFunction(assert.isRegExp); assert.isFunction(assert.isTrue); assert.isFunction(assert.isFalse); assert.isFunction(assert.truthy); assert.isFunction(assert.falsy); assert.isFunction(assert.isNull); assert.isFunction(assert.isNotNull); assert.isFunction(assert.instanceOf); assert.isFunction(assert.lengthOf); }); it.test('should test properly', function () { assert.doesNotThrow(function () { assert.isFunction(function () { }); }); assert.doesNotThrow(function () { assert.isArray([]); }); assert.doesNotThrow(function () { assert.isDate(new Date()); }); assert.doesNotThrow(function () { assert.isBoolean(true); assert.isBoolean(false); }); assert.doesNotThrow(function () { assert.isString(""); }); assert.doesNotThrow(function () { assert.isUndefined(undefined); }); assert.doesNotThrow(function () { assert.isUndefinedOrNull(null); assert.isUndefinedOrNull(undefined); }); assert.doesNotThrow(function () { assert.isPromiseLike({then: function () { }}); }); assert.doesNotThrow(function () { assert.isRegExp(/hello/i); }); assert.doesNotThrow(function () { assert.isTrue(true); }); assert.doesNotThrow(function () { assert.isFalse(false); }); assert.doesNotThrow(function () { assert.truthy('hello'); }); assert.doesNotThrow(function () { assert.falsy(''); }); assert.doesNotThrow(function () { assert.isNull(null); }); assert.doesNotThrow(function () { assert.isNotNull(true); }); assert.doesNotThrow(function () { assert.instanceOf(new Date(), Date); }); assert.doesNotThrow(function () { assert.lengthOf([1, 2, 3], 3); }); assert.throws(function () { assert.isFunction(true); }); assert.throws(function () { assert.isArray(true); }); assert.throws(function () { assert.isDate("hi"); }); assert.throws(function () { assert.isBoolean(""); }); assert.throws(function () { assert.isString(new Date()); }); assert.throws(function () { assert.isUndefined(null); }); assert.throws(function () { assert.isUndefinedOrNull("hi"); }); assert.throws(function () { assert.isPromiseLike(""); }); assert.throws(function () { assert.isRegExp("/hello/"); }); assert.throws(function () { assert.isTrue(false); }); assert.throws(function () { assert.isFalse(true); }); assert.throws(function () { assert.truthy(''); }); assert.throws(function () { assert.falsy('hi'); }); assert.throws(function () { assert.isNull(undefined); }); assert.throws(function () { assert.isNotNull(null); }); assert.throws(function () { assert.instanceOf(new Date(), Boolean); }); assert.throws(function () { assert.lengthOf([1, 2, 3], 1); }); }); }); it.suite("#beforeAll", function (it) { var called = 0; it.beforeAll(function () { called++; }); it.test("should call beforeAll", function () { assert.equal(called, 1); }); it.test("should call not call beforeAll more than once", function () { assert.equal(called, 1); }); }); it.suite("#beforeAll multi", function (it) { var called = 0, called2 = 0; it.beforeAll(function () { called++; }); it.beforeAll(function () { called2++; }); it.test("should call beforeAll", function () { assert.equal(called, 1); assert.equal(called2, 1); }); it.test("should not call beforeAll more than once", function () { assert.equal(called, 1); assert.equal(called2, 1); }); }); it.suite("#beforeEach", function (it) { var called = 0; it.beforeEach(function () { called++; }); it.test("should call beforeEach", function () { assert.equal(called, 1); }); it.test("should call beforeEach again", function () { assert.equal(called, 2); called = 0; }); }); it.suite("#beforeEach multi", function (it) { var called = 0, called2 = 0; it.beforeEach(function () { called++; }); it.beforeEach(function () { called2++; }); it.test("should call beforeEach", function () { assert.equal(called, 1); assert.equal(called2, 1); }); it.test("should call beforeEach again", function () { assert.equal(called, 2); assert.equal(called2, 2); called = called2 = 0; }); }); it.suite("#afterEach", function (it) { var called = 0; it.afterEach(function () { called++; }); it.test("should call not have called afterEach", function () { assert.equal(called, 0); }); it.test("should have called afterEach", function () { assert.equal(called, 1); }); it.test("should call afterEach again", function () { assert.equal(called, 2); }); }); it.suite("#afterEach multi", function (it) { var called = 0, called2 = 0; it.afterEach(function () { called++; }); it.afterEach(function () { called2++; }); it.test("should call not have called afterEach", function () { assert.equal(called, 0); assert.equal(called2, 0); }); it.test("should have called afterEach", function () { assert.equal(called, 1); assert.equal(called2, 1); }); it.test("should call afterEach again", function () { assert.equal(called, 2); assert.equal(called2, 2); }); }); it.suite("#afterAll", function (it) { var called = 0; it.afterAll(function () { called++; }); it.afterAll(function () { assert.equal(called, 1); }); it.test("should not call afterAll", function () { assert.equal(called, 0); }); it.test("should still not call afterAll", function () { assert.equal(called, 0); }); }); it.suite("#should", function (it) { it.suite("provided a callback with an arity 0 of zero", function (it) { it.test("should callback immediatly", function () { }); it.test("should be called", function () { //just to ensure it was called assert.isTrue(true); }); }); it.suite("provided a callback that returns a promise", function (it) { it.test("should callback when the promise is resolved", function () { return { then: function (cb) { setTimeout(cb, 100); } }; }); it.test("should increment call", function () { assert.equal(it.getAction("should callback when the promise is resolved").get("summary").status, "passed"); }); it.test("should callback when the promise is errored", function () { return { then: function (cb, eb) { setTimeout(function () { eb("error"); }, 100); } }; }); var errbackAction = it.getAction("should callback when the promise is errored"), errbackCalled = false; errbackAction.failed = function (start, end, err) { var summary = this.get("summary"); summary.start = start; summary.end = end; summary.duration = end - start; summary.status = "passed"; summary.error = err || new Error(); this.emit("success", this); errbackCalled = true; return this.get("summary"); }; it.test("should increment call printError", function () { assert.isTrue(errbackCalled); }); }); }); it.test("should have a summary", function () { var summary = it.get("summary"); assert.isObject(summary); var str = []; var expected = [ [ "should not be null", { "status": "passed" } ], [ "should describe", { "status": "passed" } ], [ "should have methods", { "status": "passed" } ], [ "assert extensions", [ "should add methods", { "status": "passed" } ], [ "should test properly", { "status": "passed" } ] ], [ "#beforeAll", [ "should call beforeAll", { "status": "passed" } ], [ "should call not call beforeAll more than once", { "status": "passed" } ] ], [ "#beforeAll multi", [ "should call beforeAll", { "status": "passed" } ], [ "should not call beforeAll more than once", { "status": "passed" } ] ], [ "#beforeEach", [ "should call beforeEach", { "status": "passed" } ], [ "should call beforeEach again", { "status": "passed" } ] ], [ "#beforeEach multi", [ "should call beforeEach", { "status": "passed" } ], [ "should call beforeEach again", { "status": "passed" } ] ], [ "#afterEach", [ "should call not have called afterEach", { "status": "passed" } ], [ "should have called afterEach", { "status": "passed" } ], [ "should call afterEach again", { "status": "passed" } ] ], [ "#afterEach multi", [ "should call not have called afterEach", { "status": "passed" } ], [ "should have called afterEach", { "status": "passed" } ], [ "should call afterEach again", { "status": "passed" } ] ], [ "#afterAll", [ "should not call afterAll", { "status": "passed" } ], [ "should still not call afterAll", { "status": "passed" } ] ], [ "#should", [ "provided a callback with an arity 0 of zero", [ "should callback immediatly", { "status": "passed" } ], [ "should be called", { "status": "passed" } ] ], [ "provided a callback that returns a promise", [ "should callback when the promise is resolved", { "status": "passed" } ], [ "should increment call", { "status": "passed" } ], [ "should callback when the promise is errored", { "status": "passed" } ], [ "should increment call printError", { "status": "passed" } ] ] ], [ "should have a summary", { "status": "pending" } ] ]; (function gather(str, a) { var summaries = a.summaries; for (var k in summaries) { if (summaries[k].summaries) { var newStrs = [k]; gather(newStrs, summaries[k]); str.push(newStrs); } else { var sum = summaries[k]; str.push([k, {status: sum.status}]); } } }(str, summary)); assert.deepEqual(str, expected); assert.isNumber(summary.duration); }); });
{ "content_hash": "dffeb6618b683847e4dd299f41a7ea8e", "timestamp": "", "source": "github", "line_count": 608, "max_line_length": 123, "avg_line_length": 28.192434210526315, "alnum_prop": 0.37955778542675456, "repo_name": "doug-martin/it", "id": "d2aca9ae6175fbdd54d6e1d25e05db8aed999f59", "size": "17141", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/browser/it-tdd.test.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2130" }, { "name": "HTML", "bytes": "7904" }, { "name": "JavaScript", "bytes": "527603" } ], "symlink_target": "" }
<?php require __DIR__ . '/../vendor/autoload.php'; require __DIR__ . '/TestCase.php';
{ "content_hash": "b0326d0490a6bc0bd23caae51b9d475f", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 44, "avg_line_length": 28.666666666666668, "alnum_prop": 0.5930232558139535, "repo_name": "basecrm/basecrm-php", "id": "56a9764507d8662d78f1fed22440344bffe6f5ea", "size": "86", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/bootstrap.php", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "83" }, { "name": "PHP", "bytes": "152329" } ], "symlink_target": "" }
<?php namespace Zend\Http\Header; /** * @throws Exception\InvalidArgumentException * @see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.31 */ class MaxForwards implements HeaderDescription { public static function fromString($headerLine) { $header = new static(); list($name, $value) = preg_split('#: #', $headerLine, 2); // check to ensure proper header type for this factory if (strtolower($name) !== 'max-forwards') { throw new Exception\InvalidArgumentException('Invalid header line for Max-Forwards string: "' . $name . '"'); } // @todo implementation details $header->value = $value; return $header; } public function getFieldName() { return 'Max-Forwards'; } public function getFieldValue() { return $this->value; } public function toString() { return 'Max-Forwards: ' . $this->getFieldValue(); } }
{ "content_hash": "2cd6953bd7d76c58d69040770614b66d", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 121, "avg_line_length": 22.34090909090909, "alnum_prop": 0.6063072227873856, "repo_name": "Techlightenment/zf2", "id": "8f294b4bfdc4a5bd93b033c8335e962e9d407b57", "size": "983", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "library/Zend/Http/Header/MaxForwards.php", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "1153" }, { "name": "JavaScript", "bytes": "30072" }, { "name": "PHP", "bytes": "29274702" }, { "name": "Puppet", "bytes": "2625" }, { "name": "Ruby", "bytes": "10" }, { "name": "Shell", "bytes": "3809" }, { "name": "TypeScript", "bytes": "3445" } ], "symlink_target": "" }
<div class="app-brandname-logo"> <div class="app-brandname-image"> <img src="{{ page.base_url }}assets/images/logo.png" /> </div> <div class="app-brand-claim"> Verein für Umweltmanagement und<br /> Nachhaltigkeit in Finanzinstituten e.V. </div> </div>
{ "content_hash": "e4f991143af8096023506a2c131e87f3", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 58, "avg_line_length": 30.444444444444443, "alnum_prop": 0.6532846715328468, "repo_name": "a25kk/vfu", "id": "83b5d3348547d65f0128575bf13290003080ce69", "size": "275", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "theme/templates/_includes/components/logo.html", "mode": "33261", "license": "mit", "language": [ { "name": "Brainfuck", "bytes": "5661" }, { "name": "CSS", "bytes": "624320" }, { "name": "Dockerfile", "bytes": "110" }, { "name": "HTML", "bytes": "223242" }, { "name": "JavaScript", "bytes": "366575" }, { "name": "Makefile", "bytes": "2499" }, { "name": "Python", "bytes": "85859" }, { "name": "Shell", "bytes": "3078" } ], "symlink_target": "" }
// Iterator over persisted metrics #include "persistent_iterator-win32.h" namespace stats_report { void PersistentMetricsIteratorWin32::Next() { current_value_.reset(); // Try to open the top-level key if we didn't already. if (NULL == key_.m_hKey) { HKEY parent_key = is_machine_ ? HKEY_LOCAL_MACHINE : HKEY_CURRENT_USER; LONG err = key_.Open(parent_key, key_name_, KEY_READ); if (err != ERROR_SUCCESS) return; } // Loop until we find a value while (state_ != kFinished) { if (NULL == sub_key_.m_hKey) { const wchar_t *subkey_name = NULL; switch (state_) { case kUninitialized: state_ = kCounts; subkey_name = kCountsKeyName; break; case kCounts: state_ = kTimings; subkey_name = kTimingsKeyName; break; case kTimings: state_ = kIntegers; subkey_name = kIntegersKeyName; break; case kIntegers: state_ = kBooleans; subkey_name = kBooleansKeyName; break; case kBooleans: state_ = kFinished; break; case kFinished: break; } if (NULL != subkey_name) { LONG err = sub_key_.Open(key_, subkey_name, KEY_READ); // go around the loop on error to try the next key type if (ERROR_SUCCESS != err) continue; } // reset value enumeration value_index_ = 0; } if (state_ != kFinished) { DCHECK(NULL != sub_key_.m_hKey); CString wide_value_name; DWORD value_name_len = 255; DWORD value_type = 0; BYTE buf[sizeof(TimingMetric::TimingData)]; DWORD value_len = sizeof(buf); // Get the next key and value LONG err = ::RegEnumValue(sub_key_, value_index_, CStrBuf(wide_value_name, value_name_len), &value_name_len, 0, &value_type, buf, &value_len); ++value_index_; if (ERROR_NO_MORE_ITEMS == err) { // done with this subkey, go around again sub_key_.Close(); continue; } else if (ERROR_SUCCESS != err) { // some other error, broken into a separate case for ease of debugging DCHECK(false && "Unexpected error during reg value enumeration"); } else { DCHECK(ERROR_SUCCESS == err); // convert value to ASCII current_value_name_ = wide_value_name; switch (state_) { case kCounts: if (value_len != sizeof(uint64)) continue; current_value_.reset( new CountMetric(current_value_name_ .GetString(), *reinterpret_cast<uint64*>(&buf[0]))); break; case kTimings: if (value_len != sizeof(TimingMetric::TimingData)) continue; current_value_.reset( new TimingMetric( current_value_name_.GetString(), *reinterpret_cast<TimingMetric::TimingData*>(&buf[0]))); break; case kIntegers: if (value_len != sizeof(uint64)) continue; current_value_.reset(new IntegerMetric( current_value_name_.GetString(), *reinterpret_cast<uint64*>(&buf[0]))); break; case kBooleans: if (value_len != sizeof(uint32)) continue; current_value_.reset( new BoolMetric(current_value_name_.GetString(), *reinterpret_cast<uint32*>(&buf[0]))); break; default: DCHECK(false && "Impossible state during reg value enumeration"); break; } if (current_value_.get()) return; } } } } } // namespace stats_report
{ "content_hash": "d813d62ab7bd8b52fe8a269e36022b10", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 78, "avg_line_length": 30.417910447761194, "alnum_prop": 0.49975466143277725, "repo_name": "rwatson/chromium-capsicum", "id": "edf687bbfd85bae5d75cc9f8d9f9c17a7c24fefa", "size": "5638", "binary": false, "copies": "3", "ref": "refs/heads/chromium-capsicum", "path": "o3d/statsreport/persistent_iterator-win32.cc", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
package de.taimos.daemon.log4j; import de.taimos.daemon.DaemonProperties; import de.taimos.daemon.DaemonStarter; import de.taimos.daemon.ILoggingConfigurer; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.Configuration; import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilder; import org.apache.logging.log4j.core.config.builder.api.ConfigurationBuilderFactory; import org.apache.logging.log4j.core.config.builder.api.LayoutComponentBuilder; import org.apache.logging.log4j.core.config.builder.impl.BuiltConfiguration; import java.util.Map; public class Log4jLoggingConfigurer implements ILoggingConfigurer { private static final String FALSE_STRING = "false"; @Override public void initializeLogging() { ConfigurationBuilder<BuiltConfiguration> builder = ConfigurationBuilderFactory.newConfigurationBuilder(); String filePath = null; String filePattern = null; String host = null; String facility = null; Level syslogLevel = null; LayoutComponentBuilder syslogLayout = null; if (!DaemonStarter.isDevelopmentMode()) { String daemonName = DaemonStarter.getDaemonName(); filePath = DvalinLog4jConfigurationFactory.getLogFilePath(daemonName); filePattern = DvalinLog4jConfigurationFactory.getLogFilePattern(filePath); host = DaemonStarter.getDaemonProperties().getProperty(Log4jDaemonProperties.SYSLOG_HOST, "localhost"); facility = DaemonStarter.getDaemonProperties().getProperty(Log4jDaemonProperties.SYSLOG_FACILITY, "LOCAL0"); syslogLevel = getLevel(Log4jDaemonProperties.SYSLOG_LEVEL, Log4jDaemonProperties.DEFAULT_LEVEL); syslogLayout = DvalinLog4jConfigurationFactory.createSyslogLayout(builder, daemonName); } LayoutComponentBuilder layout = this.createConfiguredLayout(builder); Configuration config = DvalinLog4jConfigurationFactory.configure(builder, layout, true, filePath, filePattern, host, facility, syslogLayout, syslogLevel); Configurator.reconfigure(config); Configurator.setRootLevel(this.getLevel(Log4jDaemonProperties.LOGGER_LEVEL, Log4jDaemonProperties.DEFAULT_LEVEL)); } @Override public void reconfigureLogging() { final Logger rlog = LogManager.getRootLogger(); rlog.info("Reconfigure Logging"); String daemonName = DaemonStarter.getDaemonName(); boolean console = !DaemonStarter.getDaemonProperties().getProperty(Log4jDaemonProperties.LOGGER_STDOUT, "true").equals(FALSE_STRING); String filePath = null; String filePattern = null; String host = null; String facility = null; Level syslogLevel = null; if (!DaemonStarter.isDevelopmentMode()) { if (!DaemonStarter.getDaemonProperties().getProperty(Log4jDaemonProperties.LOGGER_FILE, FALSE_STRING).equals(FALSE_STRING)) { filePath = DvalinLog4jConfigurationFactory.getLogFilePath(daemonName); filePattern = DvalinLog4jConfigurationFactory.getLogFilePattern(filePath); } if (!DaemonStarter.getDaemonProperties().getProperty(Log4jDaemonProperties.LOGGER_SYSLOG, FALSE_STRING).equals(FALSE_STRING)) { host = DaemonStarter.getDaemonProperties().getProperty(Log4jDaemonProperties.SYSLOG_HOST, "localhost"); facility = DaemonStarter.getDaemonProperties().getProperty(Log4jDaemonProperties.SYSLOG_FACILITY, "LOCAL0"); syslogLevel = getLevel(Log4jDaemonProperties.SYSLOG_LEVEL, Log4jDaemonProperties.DEFAULT_LEVEL); } } ConfigurationBuilder<BuiltConfiguration> builder = ConfigurationBuilderFactory.newConfigurationBuilder(); LayoutComponentBuilder layout = this.createConfiguredLayout(builder); LayoutComponentBuilder syslogLayout = DvalinLog4jConfigurationFactory.createSyslogLayout(builder, daemonName); Configuration config = DvalinLog4jConfigurationFactory.configure(builder, layout, console, filePath, filePattern, host, facility, syslogLayout, syslogLevel); Configurator.reconfigure(config); Level level = this.getLevel(Log4jDaemonProperties.LOGGER_LEVEL, Log4jDaemonProperties.DEFAULT_LEVEL); Configurator.setRootLevel(level); rlog.info("Set root log level to {}", level); Map<String, Level> customLevelMap = Log4jDaemonProperties.getCustomLevelMap(); if (customLevelMap.isEmpty()) { return; } rlog.info("Set custom log levels"); Configurator.setLevel(customLevelMap); } protected Level getLevel(String property, Level defaultLevel) { String propertyValue = DaemonStarter.getDaemonProperties().getProperty(property); return Level.toLevel(propertyValue, defaultLevel); } /** * @param builder config builder * @return layout builder */ protected LayoutComponentBuilder createConfiguredLayout(ConfigurationBuilder<?> builder) { switch (System.getProperty(Log4jDaemonProperties.LOGGER_LAYOUT, Log4jDaemonProperties.LOGGER_LAYOUT_PATTERN)) { case Log4jDaemonProperties.LOGGER_LAYOUT_JSON: return builder.newLayout("JsonTemplateLayout").addAttribute("eventTemplateUri", "classpath:log4j/JsonLogTemplate.json"); case Log4jDaemonProperties.LOGGER_LAYOUT_PATTERN: default: return builder.newLayout("PatternLayout").addAttribute("pattern", Log4jDaemonProperties.DEFAULT_PATTERN); } } @Override public void simpleLogging() { ConfigurationBuilder<BuiltConfiguration> builder = ConfigurationBuilderFactory.newConfigurationBuilder(); Configurator.reconfigure(DvalinLog4jConfigurationFactory.configure(builder)); Level level = getLevel(Log4jDaemonProperties.LOGGER_LEVEL, Log4jDaemonProperties.DEFAULT_LEVEL); Configurator.setRootLevel(level); } public static void setup() { System.setProperty(DaemonProperties.LOGGER_CONFIGURER, Log4jLoggingConfigurer.class.getCanonicalName()); } }
{ "content_hash": "ab5ee2123f189d63d340951e6d1f2ebb", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 165, "avg_line_length": 48.87596899224806, "alnum_prop": 0.7359238699444886, "repo_name": "taimos/dvalin", "id": "25996384431ef43525700d9267fd542d6a05bbec", "size": "6968", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "daemon/src/main/java/de/taimos/daemon/log4j/Log4jLoggingConfigurer.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "533" }, { "name": "HTML", "bytes": "3736" }, { "name": "Java", "bytes": "1249209" }, { "name": "Shell", "bytes": "1497" } ], "symlink_target": "" }
define(function(require) { 'use strict'; var Songs = require('background/collection/songs'); var YouTubeAPIKey = require('background/key/youTubeAPIKey'); var SongType = require('background/enum/songType'); var YouTubeServiceType = require('background/enum/youTubeServiceType'); var Utility = require('common/utility'); var YouTubeV3API = Backbone.Model.extend({ // Performs a search and then grabs the first item and returns its title // Expects options: { title: string, success: function, error: function } getSongByTitle: function(options) { return this.search({ text: options.title, // Expect to find a playable song within the first 10 -- don't need the default 50 items maxResults: 10, success: function(searchResponse) { if (searchResponse.songs.length === 0) { if (options.error) { options.error(chrome.i18n.getMessage('failedToFindSong')); } } else { options.success(searchResponse.songs.first()); } }, error: options.error, complete: options.complete }); }, // Performs a search of YouTube with the provided text and returns a list of playable songs (<= max-results) // Expects options: { maxResults: integer, text: string, fields: string, success: function, error: function } search: function(options) { var activeJqXHR = this._doRequest(YouTubeServiceType.Search, { success: function(response) { var songIds = _.map(response.items, function(item) { return item.id.videoId; }); activeJqXHR = this.getSongs({ songIds: songIds, success: function(songs) { activeJqXHR = null; options.success({ songs: songs, nextPageToken: response.nextPageToken, }); }, error: options.error, complete: options.complete }); }.bind(this), error: function(error) { if (options.error) { options.error(error); } if (options.complete) { options.complete(); } } }, { part: 'id', type: 'video', maxResults: options.maxResults || 50, pageToken: options.pageToken || '', q: options.text.trim(), fields: 'nextPageToken, items/id/videoId', // I don't think it's a good idea to filter out results based on safeSearch for music. safeSearch: 'none', videoEmbeddable: 'true' }); return { promise: activeJqXHR, abort: function() { if (!_.isNull(activeJqXHR)) { activeJqXHR.abort(); } } }; }, getChannelUploadsPlaylistId: function(options) { var listOptions = _.extend({ part: 'contentDetails', fields: 'items/contentDetails/relatedPlaylists/uploads' }, _.pick(options, ['id', 'forUsername'])); return this._doRequest('channels', { success: function(response) { if (_.isUndefined(response.items[0])) { options.error(); throw new Error('No response.items found for options:' + JSON.stringify(options)); } options.success({ uploadsPlaylistId: response.items[0].contentDetails.relatedPlaylists.uploads }); }, error: options.error, complete: options.complete }, listOptions); }, getSong: function(options) { return this.getSongs({ songIds: [options.songId], success: function(songs) { if (songs.length === 0) { options.error(chrome.i18n.getMessage('failedToFindSong') + ' ' + options.songId); } else { options.success(songs.first()); } }, error: options.error, complete: options.complete }); }, // Returns the results of a request for a segment of a channel, playlist, or other dataSource. getPlaylistSongs: function(options) { var activeJqXHR = this._doRequest(YouTubeServiceType.PlaylistItems, { success: function(response) { var songIds = _.map(response.items, function(item) { return item.contentDetails.videoId; }); activeJqXHR = this.getSongs({ songIds: songIds, success: function(songs) { activeJqXHR = null; options.success({ songs: songs, nextPageToken: response.nextPageToken, }); }, error: options.error, complete: options.complete }); }.bind(this), error: function(error) { if (options.error) { options.error(error); } if (options.complete) { options.complete(); } } }, { part: 'contentDetails', maxResults: 50, playlistId: options.playlistId, pageToken: options.pageToken || '', fields: 'nextPageToken, items/contentDetails/videoId' }); return { promise: activeJqXHR, abort: function() { if (!_.isNull(activeJqXHR)) { activeJqXHR.abort(); } } }; }, getRelatedSongs: function(options) { var activeJqXHR = this._doRequest(YouTubeServiceType.Search, { success: function(response) { // It is possible to receive no response if a song was removed from YouTube but is still known to StreamusBG. if (!response) { throw new Error('No response for: ' + JSON.stringify(options)); } var songIds = _.map(response.items, function(item) { return item.id.videoId; }); activeJqXHR = this.getSongs({ songIds: songIds, success: function(songs) { activeJqXHR = null; options.success(songs); }, error: options.error, complete: options.complete }); }.bind(this), error: function(error) { if (options.error) { options.error(error); } if (options.complete) { options.complete(); } } }, { part: 'id', relatedToVideoId: options.songId, maxResults: options.maxResults || 5, // If the relatedToVideoId parameter has been supplied, type must be video. type: 'video', fields: 'items/id/videoId', videoEmbeddable: 'true' }); return { promise: activeJqXHR, abort: function() { if (!_.isNull(activeJqXHR)) { activeJqXHR.abort(); } } }; }, // Converts a list of YouTube song ids into actual video information by querying YouTube with the list of ids. getSongs: function(options) { return this._doRequest(YouTubeServiceType.Videos, { success: function(response) { if (_.isUndefined(response)) { if (options.error) { options.error(); } throw new Error('No response found for options: ' + JSON.stringify(options)); } if (_.isUndefined(response.items)) { if (options.error) { var isSingleSong = options.songIds.length === 1; var errorMessage = chrome.i18n.getMessage(isSingleSong ? 'failedToFindSong' : 'failedToFindSongs'); options.error(errorMessage); } } else { var playableItems = _.filter(response.items, function(item) { // Filter out songs are not able to be embedded since they are unable to be played in StreamusBG. var isEmbeddable = item.status.embeddable; // Songs with 0s duration are unable to be played and YouTube's API // sometimes responds (incorrectly) with PT0S. // https://code.google.com/p/gdata-issues/issues/detail?id=7172 var hasValidDuration = item.contentDetails.duration !== 'PT0S'; return isEmbeddable && hasValidDuration; }); var songs = this._itemListToSongs(playableItems); options.success(songs); } }.bind(this), error: options.error, complete: options.complete }, { part: 'contentDetails, snippet, status', id: options.songIds.join(','), fields: 'items/id, items/contentDetails/duration, items/snippet/title, items/snippet/channelTitle, items/status/embeddable' }); }, getTitle: function(options) { var ajaxDataOptions = _.extend({ part: 'snippet', fields: 'items/snippet/title' }, _.pick(options, ['id', 'forUsername'])); return this._doRequest(options.serviceType, { success: function(response) { if (response.items.length === 0) { options.error(chrome.i18n.getMessage('errorLoadingTitle')); } else { options.success(response.items[0].snippet.title); } }, error: options.error, complete: options.complete }, ajaxDataOptions); }, insertPlaylist: function(options) { return this._doInsertRequest(YouTubeServiceType.Playlists, options.authToken, { success: options.success, error: options.error, complete: options.complete }, { snippet: { title: options.playlistTitle } }); }, insertPlaylistItems: function(options) { if (options.songIds.length > 0) { var songId = options.songIds.shift(); return this._doInsertRequest(YouTubeServiceType.PlaylistItems, options.authToken, { // TODO: Tricky to report songs which failed to insert. complete: this.insertPlaylistItems.bind(this, options) }, { snippet: { playlistId: options.playlistId, resourceId: { kind: 'youtube#video', videoId: songId } } }); } else { console.log('complete'); if (options.success) { options.success(); } if (options.complete) { options.complete(); } } }, _doInsertRequest: function(serviceType, authToken, ajaxOptions, ajaxDataOptions) { return $.ajax(_.extend({ type: 'POST', url: 'https://www.googleapis.com/youtube/v3/' + serviceType + '?part=snippet', beforeSend: function(request) { request.setRequestHeader('Authorization', 'Bearer ' + authToken); }, contentType: 'application/json; charset=utf-8', data: JSON.stringify(_.extend({ key: YouTubeAPIKey }, ajaxDataOptions)), }, ajaxOptions)); }, _doRequest: function(serviceType, ajaxOptions, ajaxDataOptions) { return $.ajax(_.extend({ url: 'https://www.googleapis.com/youtube/v3/' + serviceType, data: _.extend({ key: YouTubeAPIKey }, ajaxDataOptions) }, ajaxOptions)); }, _itemListToSongs: function(itemList) { return new Songs(_.map(itemList, function(item) { return { id: item.id, duration: Utility.iso8061DurationToSeconds(item.contentDetails.duration), title: item.snippet.title, author: item.snippet.channelTitle, type: SongType.YouTube }; })); } }); return new YouTubeV3API(); });
{ "content_hash": "bf6a4f051233f756cf5e47a851c426ab", "timestamp": "", "source": "github", "line_count": 359, "max_line_length": 131, "avg_line_length": 32.43732590529248, "alnum_prop": 0.5575783598110777, "repo_name": "nglinh/StreamusChromeExtension", "id": "e3c281937a68e2a729f9325492b6fa8e66d340fd", "size": "11647", "binary": false, "copies": "3", "ref": "refs/heads/Development", "path": "src/js/background/model/youTubeV3API.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "44004" }, { "name": "HTML", "bytes": "27041" }, { "name": "JavaScript", "bytes": "1944486" } ], "symlink_target": "" }
"""Sort current config.json alphabetically. """ import json import os import re import sys ORIG_CWD = os.getcwd() # Checkout changes cwd def test_infra(*paths): """Return path relative to root of test-infra repo.""" return os.path.join(ORIG_CWD, os.path.dirname(__file__), '..', *paths) def sort(): """Sort config.json alphabetically.""" with open(test_infra('jobs/config.json'), 'r+') as fp: configs = json.loads(fp.read()) regexp = re.compile(r'KUBEKINS_TIMEOUT=(\d+m)') problems = [] for job, values in configs.items(): if values.get('scenario') != 'kubernetes_e2e': continue migrated = any('--timeout=' in a for a in values.get('args', [])) with open(test_infra('jobs/%s.env' % job)) as fp: env = fp.read() if migrated: if 'KUBEKINS_TIMEOUT=' in env: problems.append(job) continue timeout = None lines = [] for line in env.split('\n'): mat = regexp.search(line) if not mat: lines.append(line) continue if timeout: print >>sys.stderr, 'Duplicate timeouts:', job problems.append(job) break timeout = mat.group(1) else: if not timeout: problems.append(job) with open(test_infra('jobs/%s.env' % job), 'w') as fp: fp.write('\n'.join(lines)) values['args'].append('--timeout=%s' % timeout) with open(test_infra('jobs/config.json'), 'w') as fp: fp.write(json.dumps(configs, sort_keys=True, indent=2)) fp.write('\n') if not problems: sys.exit(0) print >>sys.stderr, '%d problems' % len(problems) print '\n'.join(problems) if __name__ == '__main__': sort()
{ "content_hash": "439d8a1fa671fdfa74e9c3db8a367009", "timestamp": "", "source": "github", "line_count": 57, "max_line_length": 74, "avg_line_length": 32.666666666666664, "alnum_prop": 0.5338345864661654, "repo_name": "dchen1107/test-infra", "id": "7b482a0ba89cb570d751d5a753cc3e09ced3b34d", "size": "2473", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "jobs/move_timeout.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "10320" }, { "name": "Go", "bytes": "1625626" }, { "name": "HTML", "bytes": "57297" }, { "name": "JavaScript", "bytes": "70846" }, { "name": "Makefile", "bytes": "32985" }, { "name": "Nginx", "bytes": "1532" }, { "name": "Protocol Buffer", "bytes": "5614" }, { "name": "Python", "bytes": "729862" }, { "name": "Roff", "bytes": "13936" }, { "name": "Shell", "bytes": "122854" } ], "symlink_target": "" }
""" A few bits of helper functions for comment views. """ import textwrap try: from urllib.parse import urlencode except ImportError: # Python 2 from urllib import urlencode from django.http import HttpResponseRedirect from django.shortcuts import render_to_response, resolve_url from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from django.utils.http import is_safe_url import django_comments def next_redirect(request, fallback, **get_kwargs): """ Handle the "where should I go next?" part of comment views. The next value could be a ``?next=...`` GET arg or the URL of a given view (``fallback``). See the view modules for examples. Returns an ``HttpResponseRedirect``. """ next = request.POST.get('next') if not is_safe_url(url=next, host=request.get_host()): next = resolve_url(fallback) if get_kwargs: if '#' in next: tmp = next.rsplit('#', 1) next = tmp[0] anchor = '#' + tmp[1] else: anchor = '' joiner = ('?' in next) and '&' or '?' next += joiner + urlencode(get_kwargs) + anchor return HttpResponseRedirect(next) def confirmation_view(template, doc="Display a confirmation view."): """ Confirmation view generator for the "comment was posted/flagged/deleted/approved" views. """ def confirmed(request): comment = None if 'c' in request.GET: try: comment = django_comments.get_model().objects.get(pk=request.GET['c']) except (ObjectDoesNotExist, ValueError): pass return render_to_response( template, {'comment': comment}, context_instance=RequestContext(request) ) confirmed.__doc__ = textwrap.dedent("""\ %s Templates: :template:`%s`` Context: comment The posted comment """ % (doc, template) ) return confirmed
{ "content_hash": "8e1283a96b84654325ce65be32a1560a", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 86, "avg_line_length": 26.894736842105264, "alnum_prop": 0.6056751467710372, "repo_name": "Maplecroft/django-contrib-comments", "id": "32e73def5e1efc7ed74e3106bdbe21972d66be55", "size": "2044", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "django_comments/views/utils.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "7622" }, { "name": "Python", "bytes": "142450" } ], "symlink_target": "" }
@interface AppDelegate () @end @implementation AppDelegate - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { self.window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds]; self.window.rootViewController = [[LMJCollectionViewController alloc] init]; [self.window makeKeyAndVisible]; return YES; } - (void)applicationWillResignActive:(UIApplication *)application { // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. } - (void)applicationDidEnterBackground:(UIApplication *)application { // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. } - (void)applicationWillEnterForeground:(UIApplication *)application { // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. } - (void)applicationDidBecomeActive:(UIApplication *)application { // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. } - (void)applicationWillTerminate:(UIApplication *)application { // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. } @end
{ "content_hash": "2b286125432360c30ba79d817ba43aee", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 281, "avg_line_length": 49.292682926829265, "alnum_prop": 0.7778327560613557, "repo_name": "NJHu/FlowLayout", "id": "08ca0c57b7bc1e284fc3328a8564cb42c8a6590d", "size": "2235", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CollectionView 基础使用/collectionView的基本使用-1/AppDelegate.m", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "98945" }, { "name": "Ruby", "bytes": "202" } ], "symlink_target": "" }
from __future__ import absolute_import __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: zfs_delegate_admin short_description: Manage ZFS delegated administration (user admin privileges) description: - Manages ZFS file system delegated administration permissions, which allow unprivileged users to perform ZFS operations normally restricted to the superuser. - See the "zfs allow" section of C(zfs(1M)) for detailed explanations of options. This module attempts to adhere to the behavior of the command line tool as much as possible. requirements: - "A ZFS/OpenZFS implementation that supports delegation with `zfs allow`, including: Solaris >= 10, illumos (all versions), FreeBSD >= 8.0R, ZFS on Linux >= 0.7.0." version_added: "2.5" options: name: description: - File system or volume name e.g. C(rpool/myfs) required: true state: description: - Whether to allow (C(present)), or unallow (C(absent)) a permission. When set to C(present), at least one "entity" param of I(users), I(groups), or I(everyone) are required. When set to C(absent), removes permissions from the specified entities, or removes all permissions if no entity params are specified. required: true choices: [present, absent] users: description: - List of users to whom permission(s) should be granted groups: description: - List of groups to whom permission(s) should be granted everyone: description: - Apply permissions to everyone. default: false type: bool permissions: description: - The list of permission(s) to delegate (required if C(state) is C(present)) choices: ['allow','clone','create','destroy',...] local: description: - Apply permissions to C(name) locally (C(zfs allow -l)) default: null type: bool descendents: description: - Apply permissions to C(name)'s descendents (C(zfs allow -d)) default: null type: bool recursive: description: - Unallow permissions recursively (ignored when C(state) is C(present)) default: false type: bool author: "Nate Coraor (@natefoo)" ''' EXAMPLES = ''' # Grant `zfs allow` and `unallow` permission to the `adm` user with the default local+descendents scope - zfs_delegate_admin: name=rpool/myfs users=adm permissions=allow,unallow # Grant `zfs send` to everyone, plus the group `backup` - zfs_delegate_admin: name=rpool/myvol groups=backup everyone=yes permissions=send # Grant `zfs send,receive` to users `foo` and `bar` with local scope only - zfs_delegate_admin: name=rpool/myfs users=foo,bar permissions=send,receive local=yes # Revoke all permissions from everyone (permissions specifically assigned to users and groups remain) - zfs_delegate_admin: name=rpool/myfs state=absent everyone=yes ''' # This module does not return anything other than the standard # changed/state/msg/stdout RETURN = ''' ''' from itertools import product from ansible.module_utils.basic import AnsibleModule class ZfsDelegateAdmin(object): def __init__(self, module): self.module = module self.name = module.params.get('name') self.state = module.params.get('state') self.users = module.params.get('users') self.groups = module.params.get('groups') self.everyone = module.params.get('everyone') self.perms = module.params.get('permissions') self.scope = None self.changed = False self.initial_perms = None self.subcommand = 'allow' self.recursive_opt = [] self.run_method = self.update self.setup(module) def setup(self, module): """ Validate params and set up for run. """ if self.state == 'absent': self.subcommand = 'unallow' if module.params.get('recursive'): self.recursive_opt = ['-r'] local = module.params.get('local') descendents = module.params.get('descendents') if (local and descendents) or (not local and not descendents): self.scope = 'ld' elif local: self.scope = 'l' elif descendents: self.scope = 'd' else: self.module.fail_json(msg='Impossible value for local and descendents') if not (self.users or self.groups or self.everyone): if self.state == 'present': self.module.fail_json(msg='One of `users`, `groups`, or `everyone` must be set') elif self.state == 'absent': self.run_method = self.clear # ansible ensures the else cannot happen here self.zfs_path = module.get_bin_path('zfs', True) @property def current_perms(self): """ Parse the output of `zfs allow <name>` to retrieve current permissions. """ out = self.run_zfs_raw(subcommand='allow') perms = { 'l': {'u': {}, 'g': {}, 'e': []}, 'd': {'u': {}, 'g': {}, 'e': []}, 'ld': {'u': {}, 'g': {}, 'e': []}, } linemap = { 'Local permissions:': 'l', 'Descendent permissions:': 'd', 'Local+Descendent permissions:': 'ld', } scope = None for line in out.splitlines(): scope = linemap.get(line, scope) if not scope: continue try: if line.startswith('\tuser ') or line.startswith('\tgroup '): ent_type, ent, cur_perms = line.split() perms[scope][ent_type[0]][ent] = cur_perms.split(',') elif line.startswith('\teveryone '): perms[scope]['e'] = line.split()[1].split(',') except ValueError: self.module.fail_json(msg="Cannot parse user/group permission output by `zfs allow`: '%s'" % line) return perms def run_zfs_raw(self, subcommand=None, args=None): """ Run a raw zfs command, fail on error. """ cmd = [self.zfs_path, subcommand or self.subcommand] + (args or []) + [self.name] rc, out, err = self.module.run_command(cmd) if rc: self.module.fail_json(msg='Command `%s` failed: %s' % (' '.join(cmd), err)) return out def run_zfs(self, args): """ Run zfs allow/unallow with appropriate options as per module arguments. """ args = self.recursive_opt + ['-' + self.scope] + args if self.perms: args.append(','.join(self.perms)) return self.run_zfs_raw(args=args) def clear(self): """ Called by run() to clear all permissions. """ changed = False stdout = '' for scope, ent_type in product(('ld', 'l', 'd'), ('u', 'g')): for ent in self.initial_perms[scope][ent_type].keys(): stdout += self.run_zfs(['-%s' % ent_type, ent]) changed = True for scope in ('ld', 'l', 'd'): if self.initial_perms[scope]['e']: stdout += self.run_zfs(['-e']) changed = True return (changed, stdout) def update(self): """ Update permissions as per module arguments. """ stdout = '' for ent_type, entities in (('u', self.users), ('g', self.groups)): if entities: stdout += self.run_zfs(['-%s' % ent_type, ','.join(entities)]) if self.everyone: stdout += self.run_zfs(['-e']) return (self.initial_perms != self.current_perms, stdout) def run(self): """ Run an operation, return results for Ansible. """ exit_args = {'state': self.state} self.initial_perms = self.current_perms exit_args['changed'], stdout = self.run_method() if exit_args['changed']: exit_args['msg'] = 'ZFS delegated admin permissions updated' exit_args['stdout'] = stdout self.module.exit_json(**exit_args) def main(): module = AnsibleModule( argument_spec=dict( name=dict(required=True), state=dict(default='present', choices=['absent', 'present']), users=dict(default=[], type='list'), groups=dict(default=[], type='list'), everyone=dict(default=False, type='bool'), permissions=dict(default=[], type='list'), local=dict(default=None, type='bool'), descendents=dict(default=None, type='bool'), recursive=dict(default=False, type='bool') ), supports_check_mode=False, required_if=[('state', 'present', ['permissions'])] ) zfs_delegate_admin = ZfsDelegateAdmin(module) zfs_delegate_admin.run() if __name__ == '__main__': main()
{ "content_hash": "b726ed0997c45ec2208f52eaf493563d", "timestamp": "", "source": "github", "line_count": 243, "max_line_length": 118, "avg_line_length": 36.641975308641975, "alnum_prop": 0.5889487870619946, "repo_name": "galaxyproject/ansible-common-roles", "id": "749dba5b5a178e2d5df6d116a0cb0cfea666fe48", "size": "9645", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "paths/library/zfs_delegate_admin.py", "mode": "33188", "license": "mit", "language": [ { "name": "Jinja", "bytes": "2031" }, { "name": "Python", "bytes": "14543" }, { "name": "Shell", "bytes": "13492" } ], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>basic_signal_set::remove</title> <link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../../boost_asio.html" title="Boost.Asio"> <link rel="up" href="../basic_signal_set.html" title="basic_signal_set"> <link rel="prev" href="implementation_type.html" title="basic_signal_set::implementation_type"> <link rel="next" href="remove/overload1.html" title="basic_signal_set::remove (1 of 2 overloads)"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td> <td align="center"><a href="../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="implementation_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_signal_set.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="remove/overload1.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="boost_asio.reference.basic_signal_set.remove"></a><a class="link" href="remove.html" title="basic_signal_set::remove">basic_signal_set::remove</a> </h4></div></div></div> <p> <a class="indexterm" name="idm45773651995520"></a> Remove a signal from a signal_set. </p> <pre class="programlisting"><span class="keyword">void</span> <a class="link" href="remove/overload1.html" title="basic_signal_set::remove (1 of 2 overloads)">remove</a><span class="special">(</span> <span class="keyword">int</span> <span class="identifier">signal_number</span><span class="special">);</span> <span class="emphasis"><em>&#187; <a class="link" href="remove/overload1.html" title="basic_signal_set::remove (1 of 2 overloads)">more...</a></em></span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">system</span><span class="special">::</span><span class="identifier">error_code</span> <a class="link" href="remove/overload2.html" title="basic_signal_set::remove (2 of 2 overloads)">remove</a><span class="special">(</span> <span class="keyword">int</span> <span class="identifier">signal_number</span><span class="special">,</span> <span class="identifier">boost</span><span class="special">::</span><span class="identifier">system</span><span class="special">::</span><span class="identifier">error_code</span> <span class="special">&amp;</span> <span class="identifier">ec</span><span class="special">);</span> <span class="emphasis"><em>&#187; <a class="link" href="remove/overload2.html" title="basic_signal_set::remove (2 of 2 overloads)">more...</a></em></span> </pre> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2015 Christopher M. Kohlhoff<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="implementation_type.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_signal_set.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="remove/overload1.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "f82c6bfb07f565ce1efb1f601d6d541c", "timestamp": "", "source": "github", "line_count": 57, "max_line_length": 449, "avg_line_length": 78.21052631578948, "alnum_prop": 0.6469268730372364, "repo_name": "zjutjsj1004/third", "id": "9c9fce40259787c6295d4fba9427029f06ceb101", "size": "4458", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "boost/doc/html/boost_asio/reference/basic_signal_set/remove.html", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "224158" }, { "name": "Batchfile", "bytes": "33175" }, { "name": "C", "bytes": "5576593" }, { "name": "C#", "bytes": "41850" }, { "name": "C++", "bytes": "179595990" }, { "name": "CMake", "bytes": "28348" }, { "name": "CSS", "bytes": "331303" }, { "name": "Cuda", "bytes": "26521" }, { "name": "FORTRAN", "bytes": "1856" }, { "name": "Groff", "bytes": "1305458" }, { "name": "HTML", "bytes": "159660377" }, { "name": "IDL", "bytes": "15" }, { "name": "JavaScript", "bytes": "285786" }, { "name": "Lex", "bytes": "1290" }, { "name": "Makefile", "bytes": "1202020" }, { "name": "Max", "bytes": "37424" }, { "name": "Objective-C", "bytes": "3674" }, { "name": "Objective-C++", "bytes": "651" }, { "name": "PHP", "bytes": "60249" }, { "name": "Perl", "bytes": "37297" }, { "name": "Perl6", "bytes": "2130" }, { "name": "Python", "bytes": "1833677" }, { "name": "QML", "bytes": "613" }, { "name": "QMake", "bytes": "17385" }, { "name": "Rebol", "bytes": "372" }, { "name": "Shell", "bytes": "1144162" }, { "name": "Tcl", "bytes": "1205" }, { "name": "TeX", "bytes": "38313" }, { "name": "XSLT", "bytes": "564356" }, { "name": "Yacc", "bytes": "20341" } ], "symlink_target": "" }