text
stringlengths
4
5.48M
meta
stringlengths
14
6.54k
define(function(require) { 'use strict'; var ListItemButton = require('foreground/view/behavior/listItemButton'); var DeleteListItemButtonTemplate = require('text!template/listItemButton/deleteListItemButton.html'); var DeleteIconTemplate = require('text!template/icon/deleteIcon_18.svg'); var DeleteListItemButtonView = Marionette.LayoutView.extend({ template: _.template(DeleteListItemButtonTemplate), templateHelpers: { deleteIcon: _.template(DeleteIconTemplate)() }, behaviors: { ListItemButton: { behaviorClass: ListItemButton } }, attributes: { 'data-tooltip-text': chrome.i18n.getMessage('delete') }, listItem: null, initialize: function(options) { this.listItem = options.listItem; // Ensure that the user isn't able to destroy the model more than once. this._deleteListItem = _.once(this._deleteListItem); }, onClick: function() { this._deleteListItem(); }, _deleteListItem: function() { this.listItem.destroy(); } }); return DeleteListItemButtonView; });
{'content_hash': '76c09686b50b6a6abc095f5df334a566', 'timestamp': '', 'source': 'github', 'line_count': 42, 'max_line_length': 103, 'avg_line_length': 26.333333333333332, 'alnum_prop': 0.6763110307414105, 'repo_name': 'trsouz/StreamusChromeExtension', 'id': '5cd622228369bf9332005854cf38f4a7e057fbbc', 'size': '1108', 'binary': False, 'copies': '5', 'ref': 'refs/heads/Development', 'path': 'src/js/foreground/view/listItemButton/deleteListItemButtonView.js', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '45786'}, {'name': 'HTML', 'bytes': '27983'}, {'name': 'JavaScript', 'bytes': '1876978'}]}
package ij.plugin; import ij.*; import ij.gui.*; import ij.process.*; import ij.measure.*; import ij.plugin.filter.Analyzer; import java.awt.*; import java.awt.image.*; import java.awt.event.*; import java.util.*; /** This plugin generates gel profile plots that can be analyzed using the wand tool. It is similar to the "Gel Plotting Macros" in NIH Image. */ public class GelAnalyzer implements PlugIn { static final String OPTIONS = "gel.options"; static final String VSCALE = "gel.vscale"; static final String HSCALE = "gel.hscale"; static final int OD=1, PERCENT=2, OUTLINE=4, INVERT=8; static int saveID; static int nLanes, saveNLanes; static Rectangle firstRect; static final int MAX_LANES = 100; static int[] x = new int[MAX_LANES+1]; static PlotsCanvas plotsCanvas; static ImageProcessor ipLanes; static ImagePlus gel; static int plotHeight; static int options = (int)Prefs.get(OPTIONS, PERCENT+INVERT); static boolean uncalibratedOD = (options&OD)!=0; static boolean labelWithPercentages = (options&PERCENT)!=0;; static boolean outlineLanes; static boolean invertPeaks = (options&INVERT)!=0; static double verticalScaleFactor = Prefs.get(VSCALE, 1.0); static double horizontalScaleFactor = Prefs.get(HSCALE, 1.0); static Overlay overlay; boolean invertedLut; ImagePlus imp; Font f; double odMin=Double.MAX_VALUE, odMax=-Double.MAX_VALUE; static boolean isVertical; static boolean showLaneDialog = true; public void run(String arg) { if (arg.equals("options")) { showDialog(); return; } imp = WindowManager.getCurrentImage(); if (imp==null) { IJ.noImage(); return; } if (arg.equals("reset")) { nLanes = 0; saveNLanes = 0; saveID = 0; if (plotsCanvas!=null) plotsCanvas.reset(); ipLanes = null; overlay = null; if (gel!=null) { ImageCanvas ic = gel.getCanvas(); if (ic!=null) ic.setDisplayList(null); gel.draw(); } return; } if (arg.equals("percent") && plotsCanvas!=null) { plotsCanvas.displayPercentages(); return; } if (arg.equals("label") && plotsCanvas!=null) { if (plotsCanvas.counter==0) show("There are no peak area measurements."); else plotsCanvas.labelPeaks(); return; } if (imp.getID()!=saveID) { nLanes=0; ipLanes = null; saveID = 0; } if (arg.equals("replot")) { if (saveNLanes==0) { show("The data needed to re-plot the lanes is not available"); return; } nLanes = saveNLanes; plotLanes(gel, true); return; } if (arg.equals("draw")) { outlineLanes(); return; } Roi roi = imp.getRoi(); if (roi==null || roi.getType()!=Roi.RECTANGLE) { show("Rectangular selection required."); return; } Rectangle rect = roi.getBounds(); if (nLanes==0) { invertedLut = imp.isInvertedLut(); IJ.register(GelAnalyzer.class); // keeps this class from being GC'd } if (arg.equals("first")) { selectFirstLane(rect); return; } if (nLanes==0) { show("You must first use the \"Select First Lane\" command."); return; } if (arg.equals("next")) { selectNextLane(rect); return; } if (arg.equals("plot")) { if (( isVertical && (rect.x!=x[nLanes]) ) || ( !(isVertical) && (rect.y!=x[nLanes]) )) { selectNextLane(rect); } plotLanes(gel, false); return; } } void showDialog() { GenericDialog gd = new GenericDialog("Gel Analyzer"); gd.addNumericField("Vertical scale factor:", verticalScaleFactor, 1); gd.addNumericField("Horizontal scale factor:", horizontalScaleFactor, 1); gd.addCheckbox("Uncalibrated OD", uncalibratedOD); gd.addCheckbox("Label with percentages", labelWithPercentages); gd.addCheckbox("Invert peaks", invertPeaks); gd.addHelp(IJ.URL+"/docs/menus/analyze.html#gels"); gd.showDialog(); if (gd.wasCanceled()) return; verticalScaleFactor = gd.getNextNumber(); horizontalScaleFactor = gd.getNextNumber(); uncalibratedOD = gd.getNextBoolean(); labelWithPercentages = gd.getNextBoolean(); invertPeaks = gd.getNextBoolean(); options = 0; if (uncalibratedOD) options |= OD; if (labelWithPercentages) options |= PERCENT; if (invertPeaks) options |= INVERT; if (verticalScaleFactor==0.0) verticalScaleFactor=1.0; if (horizontalScaleFactor==0.0) horizontalScaleFactor=1.0; Prefs.set(OPTIONS, options); Prefs.set(VSCALE, verticalScaleFactor); Prefs.set(HSCALE, horizontalScaleFactor); } void selectFirstLane(Rectangle rect) { if (rect.width/rect.height>=2 || IJ.altKeyDown()) { if (showLaneDialog) { String msg = "Are the lanes really horizontal?\n \n"+ "ImageJ assumes the lanes are\n"+ "horizontal if the selection is more\n"+ "than twice as wide as it is tall. Note\n"+ "that the selection can only be moved\n"+ "vertically when the lanes are horizontal."; GenericDialog gd = new GenericDialog("Gel Analyzer"); gd.addMessage(msg); gd.setOKLabel("Yes"); gd.showDialog(); if (gd.wasCanceled()) return; showLaneDialog = false; } isVertical = false; } else isVertical = true; /* if ( (isVertical && (rect.height/rect.width)<2 ) || (!isVertical && (rect.width/rect.height)<2 ) ) { GenericDialog gd = new GenericDialog("Lane Orientation"); String[] orientations = {"Vertical","Horizontal"}; int defaultOrientation = isVertical?0:1; gd.addChoice("Lane Orientation:", orientations, orientations[defaultOrientation]); gd.showDialog(); if (gd.wasCanceled()) return; String orientation = gd.getNextChoice(); if(orientation.equals(orientations[0])) isVertical=true; else isVertical=false; } */ IJ.showStatus("Lane 1 selected ("+(isVertical?"vertical":"horizontal")+" lanes)"); firstRect = rect; nLanes = 1; saveNLanes = 0; if(isVertical) x[1] = rect.x; else x[1] = rect.y; gel = imp; saveID = imp.getID(); overlay = null; updateRoiList(rect); } void selectNextLane(Rectangle rect) { if (rect.width!=firstRect.width || rect.height!=firstRect.height) { show("Selections must all be the same size."); return; } if (nLanes<MAX_LANES) nLanes += 1; IJ.showStatus("Lane " + nLanes + " selected"); if(isVertical) x[nLanes] = rect.x; else x[nLanes] = rect.y; if (isVertical && rect.y!=firstRect.y) { rect.y = firstRect.y; gel.setRoi(rect); } else if (!isVertical && rect.x!=firstRect.x) { rect.x = firstRect.x; gel.setRoi(rect); } updateRoiList(rect); } void updateRoiList(Rectangle rect) { if (gel==null) return; if (overlay==null) { overlay = new Overlay(); overlay.drawLabels(true); overlay.setLabelColor(Color.white); overlay.drawBackgrounds(true); } overlay.add(new Roi(rect.x, rect.y, rect.width, rect.height, null)); gel.setOverlay(overlay); } void plotLanes(ImagePlus imp, boolean replot) { int topMargin = 16; int bottomMargin = 2; double min = Double.MAX_VALUE; double max = -Double.MAX_VALUE; int plotWidth; double[][] profiles; profiles = new double[MAX_LANES+1][]; IJ.showStatus("Plotting " + nLanes + " lanes"); ImageProcessor ipRotated = imp.getProcessor(); if (isVertical) ipRotated = ipRotated.rotateLeft(); ImagePlus imp2 = new ImagePlus("", ipRotated); imp2.setCalibration(imp.getCalibration()); if (uncalibratedOD && (imp2.getType()==ImagePlus.GRAY16 || imp2.getType()==ImagePlus.GRAY32)) new ImageConverter(imp2).convertToGray8(); if (invertPeaks) { ImageProcessor ip2 = imp2.getProcessor().duplicate(); ip2.invert(); imp2.setProcessor(null, ip2); } //imp2.show(); for (int i=1; i<=nLanes; i++) { if (isVertical) imp2.setRoi(firstRect.y, ipRotated.getHeight() - x[i] - firstRect.width, firstRect.height, firstRect.width); else imp2.setRoi(firstRect.x, x[i], firstRect.width, firstRect.height); ProfilePlot pp = new ProfilePlot(imp2); profiles[i] = pp.getProfile(); if (pp.getMin()<min) min = pp.getMin(); if (pp.getMax()>max) max = pp.getMax(); if (uncalibratedOD) profiles[i] = od(profiles[i]); } if (uncalibratedOD) { min = odMin; max = odMax; } if (isVertical) plotWidth = firstRect.height; else plotWidth = firstRect.width; if (plotWidth<650) plotWidth = 650; if (isVertical) { if (plotWidth>4*firstRect.height) plotWidth = 4*firstRect.height; } else { if (plotWidth>4*firstRect.width) plotWidth = 4*firstRect.width; } if (verticalScaleFactor==0.0) verticalScaleFactor=1.0; if (horizontalScaleFactor==0.0) horizontalScaleFactor=1.0; Dimension screen = IJ.getScreenSize(); if (plotWidth>screen.width-screen.width/6) plotWidth = screen.width - screen.width/6; plotWidth = (int)(plotWidth*horizontalScaleFactor); plotHeight = plotWidth/2; if (plotHeight<250) plotHeight = 250; // if (plotHeight>500) plotHeight = 500; plotHeight = (int)(plotHeight*verticalScaleFactor); ImageProcessor ip = new ByteProcessor(plotWidth, topMargin+nLanes*plotHeight+bottomMargin); ip.setColor(Color.white); ip.fill(); ip.setColor(Color.black); //draw border int h= ip.getHeight(); ip.moveTo(0,0); ip.lineTo(plotWidth-1,0); ip.lineTo(plotWidth-1, h-1); ip.lineTo(0, h-1); ip.lineTo(0, 0); ip.moveTo(0, h-2); ip.lineTo(plotWidth-1, h-2); String s = imp.getTitle()+"; "; Calibration cal = imp.getCalibration(); if (cal.calibrated()) s += cal.getValueUnit(); else if (uncalibratedOD) s += "Uncalibrated OD"; else s += "Uncalibrated"; ip.moveTo(5,topMargin); ip.drawString(s); double xScale = (double)plotWidth/profiles[1].length; double yScale; if ((max-min)==0.0) yScale = 1.0; else yScale = plotHeight/(max-min); for (int i=1; i<=nLanes; i++) { double[] profile = profiles[i]; int top = (i-1)*plotHeight + topMargin; int base = top+plotHeight; ip.moveTo(0, base); ip.lineTo((int)(profile.length*xScale), base); ip.moveTo(0, base-(int)((profile[0]-min)*yScale)); for (int j = 1; j<profile.length; j++) ip.lineTo((int)(j*xScale+0.5), base-(int)((profile[j]-min)*yScale+0.5)); } Line.setWidth(1); ImagePlus plots = new Plots(); plots.setProcessor("Plots of "+imp.getShortTitle(), ip); plots.changes = true; ip.setThreshold(0,0,ImageProcessor.NO_LUT_UPDATE); // Wand tool works better with threshold set if (cal.calibrated()) { double pixelsAveraged = isVertical?firstRect.width:firstRect.height; double scale = Math.sqrt((xScale*yScale)/pixelsAveraged); Calibration plotsCal = plots.getCalibration(); plotsCal.setUnit("unit"); plotsCal.pixelWidth = 1.0/scale; plotsCal.pixelHeight = 1.0/scale; } plots.show(); saveNLanes = nLanes; nLanes = 0; saveID = 0; //gel = null; ipLanes = null; Toolbar toolbar = Toolbar.getInstance(); toolbar.setColor(Color.black); toolbar.setTool(Toolbar.LINE); ImageWindow win = WindowManager.getCurrentWindow(); ImageCanvas canvas = win.getCanvas(); if (canvas instanceof PlotsCanvas) plotsCanvas = (PlotsCanvas)canvas; else plotsCanvas = null; } double[] od(double[] profile) { double v; for (int i=0; i<profile.length; i++) { v = 0.434294481*Math.log(255.0/(255.0-profile[i])); //v = 0.434294481*Math.log(255.0/v); if (v<odMin) odMin = v; if (v>odMax) odMax = v; profile[i] = v; } return profile; } void outlineLanes() { if (gel==null || overlay==null) { show("Data needed to outline lanes is no longer available."); return; } int lineWidth = (int)(1.0/gel.getCanvas().getMagnification()); if (lineWidth<1) lineWidth = 1; Font f = new Font("Helvetica", Font.PLAIN, 12*lineWidth); ImageProcessor ip = gel.getProcessor(); ImageProcessor ipLanes = ip.duplicate(); if (!(ipLanes instanceof ByteProcessor)) ipLanes = ipLanes.convertToByte(true); ipLanes.setFont(f); ipLanes.setLineWidth(lineWidth); setCustomLut(ipLanes); ImagePlus lanes = new ImagePlus("Lanes of "+gel.getShortTitle(), ipLanes); lanes.changes = true; lanes.setRoi(gel.getRoi()); gel.deleteRoi(); for (int i=0; i<overlay.size(); i++) { Roi roi = overlay.get(i); Rectangle r = roi.getBounds(); ipLanes.drawRect(r.x, r.y, r.width, r.height); String s = ""+(i+1); if(isVertical) { int yloc = r.y; if (yloc<lineWidth*12) yloc += lineWidth*14; ipLanes.drawString(s, r.x+r.width/2-ipLanes.getStringWidth(s)/2, yloc); } else { int xloc = r.x-ipLanes.getStringWidth(s)-2; if (xloc<lineWidth*10) xloc = r.x + 2; ipLanes.drawString(s, xloc, r.y+r.height/2+6); } } lanes.deleteRoi(); lanes.show(); } void setCustomLut(ImageProcessor ip) { IndexColorModel cm = (IndexColorModel)ip.getColorModel(); byte[] reds = new byte[256]; byte[] greens = new byte[256]; byte[] blues = new byte[256]; cm.getReds(reds); cm.getGreens(greens); cm.getBlues(blues); reds[1] =(byte) 255; greens[1] = (byte)0; blues[1] = (byte)0; ip.setColorModel(new IndexColorModel(8, 256, reds, greens, blues)); byte[] pixels = (byte[])ip.getPixels(); for (int i=0; i<pixels.length; i++) if ((pixels[i]&255)==1) pixels[i] = 0; ip.setColor(1); } void show(String msg) { IJ.showMessage("Gel Analyzer", msg); } } class Plots extends ImagePlus { /** Overrides ImagePlus.show(). */ public void show() { img = ip.createImage(); ImageCanvas ic = new PlotsCanvas(this); win = new ImageWindow(this, ic); IJ.showStatus(""); if (ic.getMagnification()==1.0) return; while(ic.getMagnification()<1.0) ic.zoomIn(0,0); Point loc = win.getLocation(); int w = getWidth()+20; int h = getHeight()+30; Dimension screen = IJ.getScreenSize(); if (loc.x+w>screen.width) w = screen.width-loc.x-20; if (loc.y+h>screen.height) h = screen.height-loc.y-30; win.setSize(w, h); win.validate(); repaintWindow(); } } class PlotsCanvas extends ImageCanvas { public static final int MAX_PEAKS = 200; double[] actual = {428566.00,351368.00,233977.00,99413.00,60057.00,31382.00, 14531.00,7843.00,2146.00,752.00,367.00}; double[] measured = new double[MAX_PEAKS]; Rectangle[] rect = new Rectangle[MAX_PEAKS]; int counter; ResultsTable rt; public PlotsCanvas(ImagePlus imp) { super(imp); } public void mousePressed(MouseEvent e) { super.mousePressed(e); Roi roi = imp.getRoi(); if (roi==null) return; if (roi.getType()==Roi.LINE) Roi.setColor(Color.blue); else Roi.setColor(Color.yellow); if (Toolbar.getToolId()!=Toolbar.WAND || IJ.spaceBarDown()) return; if (IJ.shiftKeyDown()) { IJ.showMessage("Gel Analyzer", "Unable to measure area because shift key is down."); imp.deleteRoi(); counter = 0; return; } ImageStatistics s = imp.getStatistics(); if (counter==0) { rt = ResultsTable.getResultsTable(); rt.reset(); } //IJ.setColumnHeadings(" \tArea"); double perimeter = roi.getLength(); String error = ""; double circularity = 4.0*Math.PI*(s.pixelCount/(perimeter*perimeter)); if (circularity<0.025) error = " (error?)"; double area = s.pixelCount+perimeter/2.0; // add perimeter/2 to account area under border Calibration cal = imp.getCalibration(); area = area*cal.pixelWidth*cal.pixelHeight; rect[counter] = roi.getBounds(); //area += (rect[counter].width/rect[counter].height)*1.5; // adjustment for small peaks from NIH Image gel macros int places = cal.scaled()?3:0; rt.incrementCounter(); rt.addValue("Area", area); rt.show("Results"); // IJ.write((counter+1)+"\t"+IJ.d2s(area, places)+error); measured[counter] = area; if (counter<MAX_PEAKS) counter++; } public void mouseReleased(MouseEvent e) { super.mouseReleased(e); Roi roi = imp.getRoi(); if (roi!=null && roi.getType()==Roi.LINE) { Undo.setup(Undo.FILTER, imp); imp.getProcessor().snapshot(); roi.drawPixels(); imp.updateAndDraw(); imp.deleteRoi(); } } void reset() { counter = 0; } void labelPeaks() { imp.deleteRoi(); double total = 0.0; for (int i=0; i<counter; i++) total += measured[i]; ImageProcessor ip = imp.getProcessor(); ip.setFont(new Font("SansSerif", Font.PLAIN, 9)); for (int i=0; i<counter; i++) { Rectangle r = rect[i]; String s; if (GelAnalyzer.labelWithPercentages) s = IJ.d2s((measured[i]/total)*100, 2); else s = IJ.d2s(measured[i], 0); int swidth = ip.getStringWidth(s); int x = r.x + r.width/2 - swidth/2; int y = r.y + r.height*3/4 + 9; int[] data = new int[swidth]; ip.getRow(x, y, data, swidth); boolean fits = true; for (int j=0; j<swidth; j++) if (data[j]!=255) { fits = false; break; } fits = fits && measured[i]>500; if (r.height>=(GelAnalyzer.plotHeight-11)) fits = true; if (!fits) y = r.y - 2; ip.drawString(s, x, y); //IJ.write(i+": "+x+" "+y+" "+s+" "+ip.StringWidth(s)/2); } imp.updateAndDraw(); displayPercentages(); //Toolbar.getInstance().setTool(Toolbar.RECTANGLE); reset(); } void displayPercentages() { ResultsTable rt = ResultsTable.getResultsTable(); rt.reset(); //IJ.setColumnHeadings(" \tarea\tpercent"); double total = 0.0; for (int i=0; i<counter; i++) total += measured[i]; if (IJ.debugMode && counter==actual.length) { debug(); return; } for (int i=0; i<counter; i++) { double percent = (measured[i]/total)*100; rt.incrementCounter(); rt.addValue("Area", measured[i]); rt.addValue("Percent", percent); //IJ.write((i+1)+"\t"+IJ.d2s(measured[i],3)+"\t"+IJ.d2s(percent,3)); } rt.show("Results"); } void debug() { for (int i=0; i<counter; i++) { double a = (actual[i]/actual[0])*100; double m = (measured[i]/measured[0])*100; IJ.write(IJ.d2s(a, 4)+" " +IJ.d2s(m, 4)+" " +IJ.d2s(((m-a)/m)*100, 4)); } } }
{'content_hash': '45d99d3ab2e619f3d412713282cf4e62', 'timestamp': '', 'source': 'github', 'line_count': 647, 'max_line_length': 102, 'avg_line_length': 27.479134466769708, 'alnum_prop': 0.6527926205073401, 'repo_name': 'steliann/objectj', 'id': '0f8c09eea25e6104be743ef05fd143abbef65ab4', 'size': '17779', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/ij/plugin/GelAnalyzer.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '2099'}, {'name': 'Java', 'bytes': '5585426'}]}
/* Includes ------------------------------------------------------------------*/ #include "stm32h7xx_hal.h" /** @addtogroup STM32H7xx_HAL_Driver * @{ */ /** @defgroup FLASHEx FLASHEx * @brief FLASH HAL Extension module driver * @{ */ #ifdef HAL_FLASH_MODULE_ENABLED /* Private typedef -----------------------------------------------------------*/ /* Private define ------------------------------------------------------------*/ /** @addtogroup FLASHEx_Private_Constants * @{ */ #define FLASH_TIMEOUT_VALUE 50000U/* 50 s */ /** * @} */ /* Private macro -------------------------------------------------------------*/ /* Private variables ---------------------------------------------------------*/ extern FLASH_ProcessTypeDef pFlash; /* Private function prototypes -----------------------------------------------*/ static void FLASH_MassErase(uint32_t VoltageRange, uint32_t Banks); void FLASH_Erase_Sector(uint32_t Sector, uint32_t Bank, uint32_t VoltageRange); static HAL_StatusTypeDef FLASH_OB_EnableWRP(uint32_t WRPSector, uint32_t Banks); static HAL_StatusTypeDef FLASH_OB_DisableWRP(uint32_t WRPSector, uint32_t Bank); static void FLASH_OB_GetWRP(uint32_t *WRPState, uint32_t *WRPSector, uint32_t Bank); static HAL_StatusTypeDef FLASH_OB_RDPConfig(uint32_t RDPLevel); static uint32_t FLASH_OB_GetRDP(void); static HAL_StatusTypeDef FLASH_OB_PCROPConfig(uint32_t PCROConfigRDP, uint32_t PCROPStartAddr, uint32_t PCROPEndAddr, uint32_t Banks); static void FLASH_OB_GetPCROP(uint32_t *PCROPConfig, uint32_t *PCROPStartAddr,uint32_t *PCROPEndAddr, uint32_t Bank); static HAL_StatusTypeDef FLASH_OB_BOR_LevelConfig(uint8_t Level); static uint32_t FLASH_OB_GetBOR(void); static HAL_StatusTypeDef FLASH_OB_UserConfig(uint32_t UserType, uint32_t UserConfig); static uint32_t FLASH_OB_GetUser(void); static HAL_StatusTypeDef FLASH_OB_BootAddConfig(uint32_t BootOption, uint32_t BootAddress0, uint32_t BootAddress1); static void FLASH_OB_GetBootAdd(uint32_t *BootAddress0, uint32_t *BootAddress1); static HAL_StatusTypeDef FLASH_OB_SecureAreaConfig(uint32_t SecureAreaConfig, uint32_t SecureAreaStartAddr, uint32_t SecureAreaEndAddr, uint32_t Banks); static void FLASH_OB_GetSecureArea(uint32_t *SecureAreaConfig, uint32_t *SecureAreaStartAddr, uint32_t *SecureAreaEndAddr, uint32_t Bank); /* Private functions ---------------------------------------------------------*/ /** @defgroup FLASHEx_Private_Functions Extended FLASH Private functions * @{ */ /** @defgroup FLASHEx_Group1 Extended IO operation functions * @brief Extended IO operation functions * @verbatim =============================================================================== ##### Extended programming operation functions ##### =============================================================================== [..] This subsection provides a set of functions allowing to manage the Extension FLASH programming operations Operations. @endverbatim * @{ */ /** * @brief Perform a mass erase or erase the specified FLASH memory sectors * @param[in] pEraseInit pointer to an FLASH_EraseInitTypeDef structure that * contains the configuration information for the erasing. * * @param[out] SectorError pointer to variable that * contains the configuration information on faulty sector in case of error * (0xFFFFFFFF means that all the sectors have been correctly erased) * * @retval HAL Status */ HAL_StatusTypeDef HAL_FLASHEx_Erase(FLASH_EraseInitTypeDef *pEraseInit, uint32_t *SectorError) { HAL_StatusTypeDef status = HAL_OK; uint32_t index = 0; /* Process Locked */ __HAL_LOCK(&pFlash); /* Check the parameters */ assert_param(IS_FLASH_TYPEERASE(pEraseInit->TypeErase)); assert_param(IS_VOLTAGERANGE(pEraseInit->VoltageRange)); assert_param(IS_FLASH_BANK(pEraseInit->Banks)); /* Wait for last operation to be completed */ if((pEraseInit->Banks & FLASH_BANK_1) == FLASH_BANK_1) { status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); } if((pEraseInit->Banks & FLASH_BANK_2) == FLASH_BANK_2) { status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } if(status == HAL_OK) { /*Initialization of SectorError variable*/ *SectorError = 0xFFFFFFFF; if(pEraseInit->TypeErase == FLASH_TYPEERASE_MASSERASE) { /*Mass erase to be done*/ FLASH_MassErase(pEraseInit->VoltageRange, pEraseInit->Banks); /* Wait for last operation to be completed */ if((pEraseInit->Banks & FLASH_BANK_1) == FLASH_BANK_1) { status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); /* if the erase operation is completed, disable the Bank1 BER Bit */ FLASH->CR1 &= (~FLASH_CR_BER); } if((pEraseInit->Banks & FLASH_BANK_2) == FLASH_BANK_2) { status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); /* if the erase operation is completed, disable the Bank2 BER Bit */ FLASH->CR2 &= (~FLASH_CR_BER); } } else { /* Check the parameters */ assert_param(IS_FLASH_BANK_EXCLUSIVE(pEraseInit->Banks)); assert_param(IS_FLASH_NBSECTORS(pEraseInit->NbSectors + pEraseInit->Sector)); /* Erase by sector by sector to be done*/ for(index = pEraseInit->Sector; index < (pEraseInit->NbSectors + pEraseInit->Sector); index++) { FLASH_Erase_Sector(index, pEraseInit->Banks, pEraseInit->VoltageRange); if((pEraseInit->Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); /* If the erase operation is completed, disable the SER Bit */ FLASH->CR1 &= (~(FLASH_CR_SER | FLASH_CR_SNB)); } if((pEraseInit->Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); /* If the erase operation is completed, disable the SER Bit */ FLASH->CR2 &= (~(FLASH_CR_SER | FLASH_CR_SNB)); } if(status != HAL_OK) { /* In case of error, stop erase procedure and return the faulty sector*/ *SectorError = index; break; } } } } /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } /** * @brief Perform a mass erase or erase the specified FLASH memory sectors with interrupt enabled * @param pEraseInit pointer to an FLASH_EraseInitTypeDef structure that * contains the configuration information for the erasing. * * @retval HAL Status */ HAL_StatusTypeDef HAL_FLASHEx_Erase_IT(FLASH_EraseInitTypeDef *pEraseInit) { HAL_StatusTypeDef status = HAL_OK; /* Process Locked */ __HAL_LOCK(&pFlash); /* Check the parameters */ assert_param(IS_FLASH_TYPEERASE(pEraseInit->TypeErase)); assert_param(IS_VOLTAGERANGE(pEraseInit->VoltageRange)); assert_param(IS_FLASH_BANK(pEraseInit->Banks)); if((pEraseInit->Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* Clear bank 1 pending flags (if any) */ __HAL_FLASH_CLEAR_FLAG_BANK1(FLASH_FLAG_EOP_BANK1 | FLASH_FLAG_ALL_ERRORS_BANK1); /* Enable End of Operation and Error interrupts for Bank 1 */ __HAL_FLASH_ENABLE_IT_BANK1(FLASH_IT_EOP_BANK1 | FLASH_IT_WRPERR_BANK1 | FLASH_IT_PGSERR_BANK1 | \ FLASH_IT_STRBERR_BANK1 | FLASH_IT_INCERR_BANK1 | FLASH_IT_OPERR_BANK1); } if((pEraseInit->Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* Clear bank 2 pending flags (if any) */ __HAL_FLASH_CLEAR_FLAG_BANK2(FLASH_FLAG_EOP_BANK2 | FLASH_FLAG_ALL_ERRORS_BANK2); /* Enable End of Operation and Error interrupts for Bank 2 */ __HAL_FLASH_ENABLE_IT_BANK2(FLASH_IT_EOP_BANK2 | FLASH_IT_WRPERR_BANK2 | FLASH_IT_PGSERR_BANK2 | \ FLASH_IT_STRBERR_BANK2 | FLASH_IT_INCERR_BANK2 | FLASH_IT_OPERR_BANK2); } if(pEraseInit->TypeErase == FLASH_TYPEERASE_MASSERASE) { /*Mass erase to be done*/ if(pEraseInit->Banks == FLASH_BANK_1) { pFlash.ProcedureOnGoing = FLASH_PROC_MASSERASE_BANK1; } else if(pEraseInit->Banks == FLASH_BANK_2) { pFlash.ProcedureOnGoing = FLASH_PROC_MASSERASE_BANK2; } else { pFlash.ProcedureOnGoing = FLASH_PROC_ALLBANK_MASSERASE; } FLASH_MassErase(pEraseInit->VoltageRange, pEraseInit->Banks); } else { /* Erase by sector to be done*/ /* Check the parameters */ assert_param(IS_FLASH_BANK_EXCLUSIVE(pEraseInit->Banks)); assert_param(IS_FLASH_NBSECTORS(pEraseInit->NbSectors + pEraseInit->Sector)); if(pEraseInit->Banks == FLASH_BANK_1) { pFlash.ProcedureOnGoing = FLASH_PROC_SECTERASE_BANK1; } else { pFlash.ProcedureOnGoing = FLASH_PROC_SECTERASE_BANK2; } pFlash.NbSectorsToErase = pEraseInit->NbSectors; pFlash.Sector = pEraseInit->Sector; pFlash.VoltageForErase = pEraseInit->VoltageRange; /*Erase 1st sector and wait for IT*/ FLASH_Erase_Sector(pEraseInit->Sector, pEraseInit->Banks, pEraseInit->VoltageRange); } return status; } /** * @brief Program option bytes * @param pOBInit pointer to an FLASH_OBInitStruct structure that * contains the configuration information for the programming. * * @retval HAL Status */ HAL_StatusTypeDef HAL_FLASHEx_OBProgram(FLASH_OBProgramInitTypeDef *pOBInit) { HAL_StatusTypeDef status = HAL_OK; /* Process Locked */ __HAL_LOCK(&pFlash); /* Check the parameters */ assert_param(IS_OPTIONBYTE(pOBInit->OptionType)); pFlash.ErrorCode = HAL_FLASH_ERROR_NONE; /*Write protection configuration*/ if((pOBInit->OptionType & OPTIONBYTE_WRP) == OPTIONBYTE_WRP) { assert_param(IS_WRPSTATE(pOBInit->WRPState)); assert_param(IS_FLASH_BANK(pOBInit->Banks)); if(pOBInit->WRPState == OB_WRPSTATE_ENABLE) { /*Enable of Write protection on the selected Sector*/ status = FLASH_OB_EnableWRP(pOBInit->WRPSector,pOBInit->Banks); } else { /*Disable of Write protection on the selected Sector*/ status = FLASH_OB_DisableWRP(pOBInit->WRPSector, pOBInit->Banks); } if(status != HAL_OK) { /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } } /* Read protection configuration */ if((pOBInit->OptionType & OPTIONBYTE_RDP) != RESET) { /* Configure the Read protection level */ status = FLASH_OB_RDPConfig(pOBInit->RDPLevel); if(status != HAL_OK) { /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } } /* User Configuration */ if((pOBInit->OptionType & OPTIONBYTE_USER) != RESET) { /* Configure the user option bytes */ status = FLASH_OB_UserConfig(pOBInit->USERType, pOBInit->USERConfig); if(status != HAL_OK) { /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } } /* PCROP Configuration */ if((pOBInit->OptionType & OPTIONBYTE_PCROP) != RESET) { assert_param(IS_FLASH_BANK(pOBInit->Banks)); /*Configure the Proprietary code readout protection */ status = FLASH_OB_PCROPConfig(pOBInit->PCROPConfig, pOBInit->PCROPStartAddr, pOBInit->PCROPEndAddr, pOBInit->Banks); if(status != HAL_OK) { /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } } /*BOR Level configuration*/ if((pOBInit->OptionType & OPTIONBYTE_BOR) == OPTIONBYTE_BOR) { status = FLASH_OB_BOR_LevelConfig(pOBInit->BORLevel); if(status != HAL_OK) { /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } } /*Boot Address configuration*/ if((pOBInit->OptionType & OPTIONBYTE_BOOTADD) == OPTIONBYTE_BOOTADD) { status = FLASH_OB_BootAddConfig(pOBInit->BootConfig, pOBInit->BootAddr0, pOBInit->BootAddr1); if(status != HAL_OK) { /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } } /*Bank1 secure area configuration*/ if((pOBInit->OptionType & OPTIONBYTE_SECURE_AREA) == OPTIONBYTE_SECURE_AREA) { status = FLASH_OB_SecureAreaConfig(pOBInit->SecureAreaConfig, pOBInit->SecureAreaStartAddr, pOBInit->SecureAreaEndAddr,pOBInit->Banks); if(status != HAL_OK) { /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } } /* Process Unlocked */ __HAL_UNLOCK(&pFlash); return status; } /** * @brief Get the Option byte configuration * @note The parameter Banks of the pOBInit structure must be exclusively FLASH_BANK_1 or FLASH_BANK_2 as this parameter is use to get the given Bank WRP, PCROP and secured area. * @param pOBInit pointer to an FLASH_OBInitStruct structure that * contains the configuration information for the programming. * * @retval None */ void HAL_FLASHEx_OBGetConfig(FLASH_OBProgramInitTypeDef *pOBInit) { /* Check the parameters */ assert_param(IS_FLASH_BANK_EXCLUSIVE(pOBInit->Banks)); pOBInit->OptionType = (OPTIONBYTE_WRP | OPTIONBYTE_RDP | \ OPTIONBYTE_USER | OPTIONBYTE_PCROP | \ OPTIONBYTE_BOR | OPTIONBYTE_BOOTADD | \ OPTIONBYTE_SECURE_AREA); /* Get write protection on the selected area */ FLASH_OB_GetWRP(&(pOBInit->WRPState), &(pOBInit->WRPSector), pOBInit->Banks); /* Get Read protection level */ pOBInit->RDPLevel = FLASH_OB_GetRDP(); /* Get the user option bytes */ pOBInit->USERConfig = FLASH_OB_GetUser(); /* Get the Proprietary code readout protection */ FLASH_OB_GetPCROP(&(pOBInit->PCROPConfig), &(pOBInit->PCROPStartAddr), &(pOBInit->PCROPEndAddr), pOBInit->Banks); /*Get BOR Level*/ pOBInit->BORLevel = FLASH_OB_GetBOR(); /*Get Boot Address*/ FLASH_OB_GetBootAdd(&(pOBInit->BootAddr0), &(pOBInit->BootAddr1)); /*Get Bank Secure area*/ FLASH_OB_GetSecureArea(&(pOBInit->SecureAreaConfig), &(pOBInit->SecureAreaStartAddr), &(pOBInit->SecureAreaEndAddr), pOBInit->Banks); } /** * @brief Unlock the FLASH Bank1 control registers access * @retval HAL Status */ HAL_StatusTypeDef HAL_FLASHEx_Unlock_Bank1(void) { if(READ_BIT(FLASH->CR1, FLASH_CR_LOCK) != RESET) { /* Authorize the FLASH A Registers access */ WRITE_REG(FLASH->KEYR1, FLASH_KEY1); WRITE_REG(FLASH->KEYR1, FLASH_KEY2); } else { return HAL_ERROR; } return HAL_OK; } /** * @brief Locks the FLASH Bank1 control registers access * @retval HAL Status */ HAL_StatusTypeDef HAL_FLASHEx_Lock_Bank1(void) { /* Set the LOCK Bit to lock the FLASH A Registers access */ SET_BIT(FLASH->CR1, FLASH_CR_LOCK); return HAL_OK; } /** * @brief Unlock the FLASH Bank2 control registers access * @retval HAL Status */ HAL_StatusTypeDef HAL_FLASHEx_Unlock_Bank2(void) { if(READ_BIT(FLASH->CR2, FLASH_CR_LOCK) != RESET) { /* Authorize the FLASH A Registers access */ WRITE_REG(FLASH->KEYR2, FLASH_KEY1); WRITE_REG(FLASH->KEYR2, FLASH_KEY2); } else { return HAL_ERROR; } return HAL_OK; } /** * @brief Locks the FLASH Bank2 control registers access * @retval HAL Status */ HAL_StatusTypeDef HAL_FLASHEx_Lock_Bank2(void) { /* Set the LOCK Bit to lock the FLASH A Registers access */ SET_BIT(FLASH->CR2, FLASH_CR_LOCK); return HAL_OK; } /** * @brief Full erase of FLASH memory sectors * @param VoltageRange The device program/erase parallelism. * This parameter can be one of the following values: * @arg FLASH_VOLTAGE_RANGE_1 : Flash program/erase by 8 bits * @arg FLASH_VOLTAGE_RANGE_2 : Flash program/erase by 16 bits * @arg FLASH_VOLTAGE_RANGE_3 : Flash program/erase by 32 bits * @arg FLASH_VOLTAGE_RANGE_4 : Flash program/erase by 64 bits * * @param Banks Banks to be erased * This parameter can be one of the following values: * @arg FLASH_BANK_1: Bank1 to be erased * @arg FLASH_BANK_2: Bank2 to be erased * @arg FLASH_BANK_BOTH: Bank1 and Bank2 to be erased * * @retval HAL Status */ static void FLASH_MassErase(uint32_t VoltageRange, uint32_t Banks) { /* Check the parameters */ assert_param(IS_FLASH_BANK(Banks)); assert_param(IS_VOLTAGERANGE(VoltageRange)); /* proceed to erase all sectors */ if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* reset Program/erase VoltageRange for Bank1 */ FLASH->CR1 &= (~FLASH_CR_PSIZE); /* Bank1 will be erased, and set voltage range*/ FLASH->CR1 |= FLASH_CR_BER | VoltageRange; FLASH->CR1 |= FLASH_CR_START; } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* reset Program/erase VoltageRange for Bank2 */ FLASH->CR2 &= (~FLASH_CR_PSIZE); /* Bank2 will be erased, and set voltage range*/ FLASH->CR2 |= FLASH_CR_BER | VoltageRange; FLASH->CR2 |= FLASH_CR_START; } } /** * @brief Erase the specified FLASH memory sector * @param Sector FLASH sector to erase * @param Banks Banks to be erased * This parameter can be one of the following values: * @arg FLASH_BANK_1: Bank1 to be erased * @arg FLASH_BANK_2: Bank2 to be erased * @arg FLASH_BANK_BOTH: Bank1 and Bank2 to be erased * @param VoltageRange The device program/erase parallelism. * This parameter can be one of the following values: * @arg FLASH_VOLTAGE_RANGE_1 : Flash program/erase by 8 bits * @arg FLASH_VOLTAGE_RANGE_2 : Flash program/erase by 16 bits * @arg FLASH_VOLTAGE_RANGE_3 : Flash program/erase by 32 bits * @arg FLASH_VOLTAGE_RANGE_4 : Flash program/erase by 62 bits * * @retval None */ void FLASH_Erase_Sector(uint32_t Sector, uint32_t Banks, uint32_t VoltageRange) { assert_param(IS_FLASH_BANK_EXCLUSIVE(Banks)); assert_param(IS_VOLTAGERANGE(VoltageRange)); assert_param(IS_FLASH_SECTOR(Sector)); if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* reset Program/erase VoltageRange for Bank1 */ FLASH->CR1 &= ~(FLASH_CR_PSIZE | FLASH_CR_SNB); FLASH->CR1 |= (FLASH_CR_SER | VoltageRange | (Sector << POSITION_VAL(FLASH_CR_SNB))); FLASH->CR1 |= FLASH_CR_START; } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* reset Program/erase VoltageRange for Bank2 */ FLASH->CR2 &= ~(FLASH_CR_PSIZE | FLASH_CR_SNB); FLASH->CR2 |= (FLASH_CR_SER | VoltageRange | (Sector << POSITION_VAL(FLASH_CR_SNB))); FLASH->CR2 |= FLASH_CR_START; } } /** * @brief Enable the write protection of the desired bank1 or bank 2 sectors * @param WRPSector specifies the sector(s) to be write protected. * This parameter can be one of the following values: * @arg WRPSector: A combination of OB_WRP_SECTOR_0 to OB_WRP_SECTOR_0 or OB_WRP_SECTOR_All * * @param Banks the specific bank to apply WRP sectors * This parameter can be one of the following values: * @arg FLASH_BANK_1: WRP enable on specified bank1 sectors * @arg FLASH_BANK_2: WRP enable on specified bank2 sectors * @arg FLASH_BANK_BOTH: WRP enable bank1 and bank2 specified sectors * * @retval HAL FLASH State */ static HAL_StatusTypeDef FLASH_OB_EnableWRP(uint32_t WRPSector, uint32_t Banks) { HAL_StatusTypeDef status = HAL_OK; /* Check the parameters */ assert_param(IS_FLASH_BANK(Banks)); if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { assert_param(IS_OB_WRP_SECTOR(WRPSector)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); if(status == HAL_OK) { FLASH->WPSN_PRG1 &= (~(WRPSector & FLASH_WPSN_WRPSN)); } } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { assert_param(IS_OB_WRP_SECTOR(WRPSector)); /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); if(status == HAL_OK) { FLASH->WPSN_PRG2 &= (~(WRPSector & FLASH_WPSN_WRPSN)); } } if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } return status; } /** * @brief Disable the write protection of the desired bank1 or bank 2 sectors * @param WRPSector specifies the sector(s) to disable write protection. * This parameter can be one of the following values: * @arg WRPSector: A combination of FLASH_OB_WRP_SECTOR_0 to FLASH_OB_WRP_SECTOR_7 or FLASH_OB_WRP_SECTOR_All * * @param Banks the specific bank to apply WRP sectors * This parameter can be one of the following values: * @arg FLASH_BANK_1: WRP disable on specified bank1 sectors * @arg FLASH_BANK_2: WRP disable on specified bank2 sectors * @arg FLASH_BANK_BOTH: WRP disable bank1 and bank2 specified sectors * * @retval HAL FLASH State */ static HAL_StatusTypeDef FLASH_OB_DisableWRP(uint32_t WRPSector, uint32_t Banks) { HAL_StatusTypeDef status = HAL_OK; /* Check the parameters */ assert_param(IS_FLASH_BANK(Banks)); assert_param(IS_OB_WRP_SECTOR(WRPSector)); if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); if(status == HAL_OK) { FLASH->WPSN_PRG1 |= (WRPSector & FLASH_WPSN_WRPSN); } } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); if(status == HAL_OK) { FLASH->WPSN_PRG2 |= (WRPSector & FLASH_WPSN_WRPSN); } } if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } return status; } /** * @brief Get the write protection of the given bank1 or bank 2 sectors * @param WRPState gives the write protection state on the given bank . * This parameter can be one of the following values: * @arg WRPState: OB_WRPSTATE_DISABLE or OB_WRPSTATE_ENABLE * @param WRPSector gives the write protected sector(s) on the given bank . * This parameter can be one of the following values: * @arg WRPSector: A combination of FLASH_OB_WRP_SECTOR_0 to FLASH_OB_WRP_SECTOR_7 or FLASH_OB_WRP_SECTOR_All * * @param Bank the specific bank to apply WRP sectors * This parameter can be exclusively one of the following values: * @arg FLASH_BANK_1: Get bank1 WRP sectors * @arg FLASH_BANK_2: Get bank2 WRP sectors * @arg FLASH_BANK_BOTH: note allowed in this functions * * @retval HAL FLASH State */ static void FLASH_OB_GetWRP(uint32_t *WRPState, uint32_t *WRPSector, uint32_t Bank) { uint32_t regvalue = 0; /* Check the parameters */ assert_param(IS_FLASH_BANK_EXCLUSIVE(Bank)); if((Bank & FLASH_BANK_BOTH) == FLASH_BANK_1) { regvalue = FLASH->WPSN_CUR1; } if((Bank & FLASH_BANK_BOTH) == FLASH_BANK_2) { regvalue = FLASH->WPSN_CUR2; } (*WRPSector) = (~(regvalue & FLASH_WPSN_WRPSN)) & FLASH_WPSN_WRPSN; if(*WRPSector == 0) { (*WRPState) = OB_WRPSTATE_DISABLE; } else { (*WRPState) = OB_WRPSTATE_ENABLE; } } /** * @brief Set the read protection level. * * @note To configure the RDP level, the option lock bit OPTLOCK must be * cleared with the call of the HAL_FLASH_OB_Unlock() function. * @note To validate the RDP level, the option bytes must be reloaded * through the call of the HAL_FLASH_OB_Launch() function. * @note !!! Warning : When enabling OB_RDP level 2 it's no more possible * to go back to level 1 or 0 !!! * * @param RDPLevel specifies the read protection level. * This parameter can be one of the following values: * @arg OB_RDP_LEVEL_0: No protection * @arg OB_RDP_LEVEL_1: Read protection of the memory * @arg OB_RDP_LEVEL_2: Full chip protection * * @retval HAL status */ static HAL_StatusTypeDef FLASH_OB_RDPConfig(uint32_t RDPLevel) { HAL_StatusTypeDef status = HAL_OK; /* Check the parameters */ assert_param(IS_OB_RDP_LEVEL(RDPLevel)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); if(status == HAL_OK) { /* Configure the RDP level in the option bytes register */ MODIFY_REG(FLASH->OPTSR_PRG, FLASH_OPTSR_RDP, RDPLevel); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } return status; } /** * @brief Get the read protection level. * @retval RDPLevel specifies the read protection level. * This parameter can be one of the following values: * @arg OB_RDP_LEVEL_0: No protection * @arg OB_RDP_LEVEL_1: Read protection of the memory * @arg OB_RDP_LEVEL_2: Full chip protection */ static uint32_t FLASH_OB_GetRDP(void) { return (FLASH->OPTSR_CUR & FLASH_OPTSR_RDP); } /** * @brief Program the FLASH User Option Byte. * * @note To configure the user option bytes, the option lock bit OPTLOCK must * be cleared with the call of the HAL_FLASH_OB_Unlock() function. * * @note To validate the user option bytes, the option bytes must be reloaded * through the call of the HAL_FLASH_OB_Launch() function. * * @param UserType The FLASH User Option Bytes to be modified : * a combination of @arg FLASH_OB_USER_Type * * @param UserConfig The FLASH User Option Bytes values: * IWDG_SW(Bit4), WWDG_SW(Bit 5), nRST_STOP(Bit 6), nRST_STDY(Bit 7), * FZ_IWDG_STOP(Bit 17), FZ_IWDG_SDBY(Bit 18), ST_RAM_SIZE(Bit[19:20]), * ePcROP_EN(Bit 21), SWAP_BANK_OPT(Bit 31) . * * @retval HAL status */ static HAL_StatusTypeDef FLASH_OB_UserConfig(uint32_t UserType, uint32_t UserConfig) { uint32_t optr_reg_val = 0; uint32_t optr_reg_mask = 0; HAL_StatusTypeDef status = HAL_OK; /* Check the parameters */ assert_param(IS_OB_USER_TYPE(UserType)); /* Wait for OB change operation to be completed */ status = FLASH_OB_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE); if(status == HAL_OK) { if((UserType & OB_USER_IWDG1_SW) != RESET) { /* IWDG_HW option byte should be modified */ assert_param(IS_OB_IWDG1_SOURCE(UserConfig & FLASH_OPTSR_IWDG1_SW)); /* Set value and mask for IWDG_HW option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_IWDG1_SW); optr_reg_mask |= FLASH_OPTSR_IWDG1_SW; } if((UserType & OB_USER_NRST_STOP_D1) != RESET) { /* NRST_STOP option byte should be modified */ assert_param(IS_OB_STOP_D1_RESET(UserConfig & FLASH_OPTSR_NRST_STOP_D1)); /* Set value and mask for NRST_STOP option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_NRST_STOP_D1); optr_reg_mask |= FLASH_OPTSR_NRST_STOP_D1; } if((UserType & OB_USER_NRST_STDBY_D1) != RESET) { /* NRST_STDBY option byte should be modified */ assert_param(IS_OB_STDBY_D1_RESET(UserConfig & FLASH_OPTSR_NRST_STBY_D1)); /* Set value and mask for NRST_STDBY option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_NRST_STBY_D1); optr_reg_mask |= FLASH_OPTSR_NRST_STBY_D1; } if((UserType & OB_USER_IWDG_STOP) != RESET) { /* IWDG_STOP option byte should be modified */ assert_param(IS_OB_USER_IWDG_STOP(UserConfig & FLASH_OPTSR_FZ_IWDG_STOP)); /* Set value and mask for IWDG_STOP option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_FZ_IWDG_STOP); optr_reg_mask |= FLASH_OPTSR_FZ_IWDG_STOP; } if((UserType & OB_USER_IWDG_STDBY) != RESET) { /* IWDG_STDBY option byte should be modified */ assert_param(IS_OB_USER_IWDG_STDBY(UserConfig & FLASH_OPTSR_FZ_IWDG_SDBY)); /* Set value and mask for IWDG_STDBY option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_FZ_IWDG_SDBY); optr_reg_mask |= FLASH_OPTSR_FZ_IWDG_SDBY; } if((UserType & OB_USER_SECURITY) != RESET) { /* SECURITY option byte should be modified */ assert_param(IS_OB_USER_SECURITY(UserConfig & FLASH_OPTSR_SECURITY)); /* Set value and mask for ePcROP_EN option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_SECURITY); optr_reg_mask |= FLASH_OPTSR_SECURITY; } if((UserType & OB_USER_SWAP_BANK) != RESET) { /* SWAP_BANK_OPT option byte should be modified */ assert_param(IS_OB_USER_SWAP_BANK(UserConfig & FLASH_OPTSR_SWAP_BANK_OPT)); /* Set value and mask for SWAP_BANK_OPT option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_SWAP_BANK_OPT); optr_reg_mask |= FLASH_OPTSR_SWAP_BANK_OPT; } if((UserType & OB_USER_IOHSLV) != RESET) { /* IOHSLV_OPT option byte should be modified */ assert_param(IS_OB_USER_IOHSLV(UserConfig & FLASH_OPTSR_IO_HSLV)); /* Set value and mask for IOHSLV_OPT option byte */ optr_reg_val |= (UserConfig & FLASH_OPTSR_IO_HSLV); optr_reg_mask |= FLASH_OPTSR_IO_HSLV; } /* Configure the option bytes register */ MODIFY_REG(FLASH->OPTSR_PRG, optr_reg_mask, optr_reg_val); } return status; } /** * @brief Return the FLASH User Option Byte value. * @retval The FLASH User Option Bytes values * IWDG_SW(Bit4), WWDG_SW(Bit 5), nRST_STOP(Bit 6), nRST_STDY(Bit 7), * FZ_IWDG_STOP(Bit 17), FZ_IWDG_SDBY(Bit 18), ST_RAM_SIZE(Bit[19:20]), * ePcROP_EN(Bit 21), SWAP_BANK_OPT(Bit 31) . */ static uint32_t FLASH_OB_GetUser(void) { uint32_t userConfig = READ_REG(FLASH->OPTSR_CUR); userConfig &= (~(FLASH_OPTSR_BOR_LEV | FLASH_OPTSR_RDP)); return userConfig; } /** * @brief Configure the Proprietary code readout protection of the desired addresses * * @note To configure the PCROP options, the option lock bit OPTLOCK must be * cleared with the call of the HAL_FLASH_OB_Unlock() function. * @note To validate the PCROP options, the option bytes must be reloaded * through the call of the HAL_FLASH_OB_Launch() function. * * @param PCROPConfig specifies if the PCROP area for the given Bank shall be erased or not * when RDP level decreased from Level 1 to Level 0. * This parameter must be a value of @arg FLASH_OB_PCROP_RDP enumeration * * @param PCROPStartAddr specifies the start address of the Proprietary code readout protection * This parameter can be an address between begin and end of the bank * * @param PCROPEndAddr specifies the end address of the Proprietary code readout protection * This parameter can be an address between PCROPStartAddr and end of the bank * * @param Banks the specific bank to apply PCROP sectors * This parameter can be one of the following values: * @arg FLASH_BANK_1: PCROP on specified bank1 area * @arg FLASH_BANK_2: PCROP on specified bank2 area * @arg FLASH_BANK_BOTH: PCROP on specified bank1 and bank2 area (same config will be applied on both banks) * * @retval HAL Status */ static HAL_StatusTypeDef FLASH_OB_PCROPConfig(uint32_t PCROPConfig, uint32_t PCROPStartAddr, uint32_t PCROPEndAddr, uint32_t Banks) { HAL_StatusTypeDef status = HAL_OK; /* Check the parameters */ assert_param(IS_FLASH_BANK(Banks)); assert_param(IS_OB_PCROP_RDP(PCROPConfig)); assert_param(IS_FLASH_PROGRAM_ADDRESS(PCROPStartAddr)); assert_param(IS_FLASH_PROGRAM_ADDRESS(PCROPEndAddr)); if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK1(PCROPStartAddr)); assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK1(PCROPEndAddr)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE,FLASH_BANK_1); if(status == HAL_OK) { /* Configure the Proprietary code readout protection */ FLASH->PRAR_PRG1 = ((PCROPStartAddr - FLASH_BANK1_BASE) >> 8); FLASH->PRAR_PRG1 |= (((PCROPEndAddr - FLASH_BANK1_BASE) >> 8) << POSITION_VAL(FLASH_PRAR_PROT_AREA_END)) ; FLASH->PRAR_PRG1 |= PCROPConfig; /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); } } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK2(PCROPStartAddr)); assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK2(PCROPEndAddr)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE,FLASH_BANK_2); if(status == HAL_OK) { FLASH->PRAR_PRG2 = ((PCROPStartAddr - FLASH_BANK2_BASE) >> 8); FLASH->PRAR_PRG2 |= (((PCROPEndAddr - FLASH_BANK2_BASE) >> 8) << POSITION_VAL(FLASH_PRAR_PROT_AREA_END)) ; FLASH->PRAR_PRG2 |= PCROPConfig; /* Wait for last operation to be completed */ status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } } return status; } /** * @brief Get the Proprietary code readout protection configuration on a given Bank * * @param PCROPConfig gives if the PCROP area for the given Bank shall be erased or not * when RDP level decreased from Level 1 to Level 0 or during a mass erase. * * @param PCROPStartAddr gives the start address of the Proprietary code readout protection of the bank * * @param PCROPEndAddr gives the end address of the Proprietary code readout protection of the bank * * @param Bank the specific bank to apply PCROP sectors * This parameter can be exclusively one of the following values: * @arg FLASH_BANK_1: PCROP on specified bank1 area * @arg FLASH_BANK_2: PCROP on specified bank2 area * @arg FLASH_BANK_BOTH: is not allowed here * * @retval HAL Status */ static void FLASH_OB_GetPCROP(uint32_t *PCROPConfig, uint32_t *PCROPStartAddr,uint32_t *PCROPEndAddr, uint32_t Bank) { uint32_t regvalue = 0; uint32_t bankBase = 0; /* Check the parameters */ assert_param(IS_FLASH_BANK_EXCLUSIVE(Bank)); if((Bank & FLASH_BANK_BOTH) == FLASH_BANK_1) { regvalue = FLASH->PRAR_CUR1; bankBase = FLASH_BANK1_BASE; } if((Bank & FLASH_BANK_BOTH) == FLASH_BANK_2) { regvalue = FLASH->PRAR_CUR2; bankBase = FLASH_BANK2_BASE; } (*PCROPConfig) = (regvalue & FLASH_PRAR_DMEP); (*PCROPStartAddr) = ((regvalue & FLASH_PRAR_PROT_AREA_START) << 8) + bankBase; (*PCROPEndAddr) = (regvalue & FLASH_PRAR_PROT_AREA_END) >> POSITION_VAL(FLASH_PRAR_PROT_AREA_END) ; (*PCROPEndAddr) = ((*PCROPEndAddr) << 8) + bankBase; } /** * @brief Set the BOR Level. * @param Level specifies the Option Bytes BOR Reset Level. * This parameter can be one of the following values: * @arg OB_BOR_LEVEL1: Supply voltage ranges from 1.69V - 1.8V * @arg OB_BOR_LEVEL2: Supply voltage ranges from 1.94V - 2.1V * @arg OB_BOR_LEVEL3: Supply voltage ranges from 2.30V - 2.49V * @retval HAL Status */ static HAL_StatusTypeDef FLASH_OB_BOR_LevelConfig(uint8_t Level) { HAL_StatusTypeDef status = HAL_OK; assert_param(IS_OB_BOR_LEVEL(Level)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); if(status == HAL_OK) { /* Configure BOR_LEV option byte */ MODIFY_REG(FLASH->OPTSR_PRG, FLASH_OPTSR_BOR_LEV, Level ); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } return status; } /** * @brief Get the BOR Level. * @retval The Option Bytes BOR Reset Level. * This parameter can be one of the following values: * @arg OB_BOR_LEVEL1: Supply voltage ranges from 1.69V - 1.8V * @arg OB_BOR_LEVEL2: Supply voltage ranges from 1.94V - 2.1V * @arg OB_BOR_LEVEL3: Supply voltage ranges from 2.30V - 2.49V */ static uint32_t FLASH_OB_GetBOR(void) { return (FLASH->OPTSR_CUR & FLASH_OPTSR_BOR_LEV); } /** * @brief Set Boot address * @param BootOption Boot address option byte to be programmed, * This parameter must be a value of @ref FLASHEx_OB_BOOT_OPTION (OB_BOOT_ADD0, OB_BOOT_ADD1 or OB_BOOT_ADD_BOTH) * * @param BootAddress0 Specifies the Boot Address 0 * @param BootAddress1 Specifies the Boot Address 1 * @retval HAL Status */ static HAL_StatusTypeDef FLASH_OB_BootAddConfig(uint32_t BootOption, uint32_t BootAddress0, uint32_t BootAddress1) { HAL_StatusTypeDef status = HAL_OK; /* Check the parameters */ assert_param(IS_OB_BOOT_ADD_OPTION(BootOption)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); if(status == HAL_OK) { if((BootOption & OB_BOOT_ADD0) == OB_BOOT_ADD0) { /* Check the parameters */ assert_param(IS_BOOT_ADDRESS(BootAddress0)); /* Configure CM7 BOOT ADD0 */ MODIFY_REG(FLASH->BOOT_PRG, FLASH_BOOT_ADD0, (BootAddress0 >> 16)); } if((BootOption & OB_BOOT_ADD1) == OB_BOOT_ADD1) { /* Check the parameters */ assert_param(IS_BOOT_ADDRESS(BootAddress1)); /* Configure CM7 BOOT ADD1 */ MODIFY_REG(FLASH->BOOT_PRG, FLASH_BOOT_ADD1, BootAddress1 ); } /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } return status; } /** * @brief Get Boot address * @param BootAddress0 Specifies the Boot Address 0. * @param BootAddress1 Specifies the Boot Address 1. * @retval HAL Status */ static void FLASH_OB_GetBootAdd(uint32_t *BootAddress0, uint32_t *BootAddress1) { uint32_t regvalue = 0; regvalue = FLASH->BOOT_CUR; (*BootAddress0) = (regvalue & FLASH_BOOT_ADD0) << 16; (*BootAddress1) = (regvalue & FLASH_BOOT_ADD1); } /** * @brief Set secure area configuration * @param SecureAreaConfig specify if the secure area will be deleted or not during next mass-erase, * * @param SecureAreaStartAddr Specifies the secure area start address * @param SecureAreaEndAddr Specifies the secure area end address * @param Banks Specifies the Bank * @retval HAL Status */ static HAL_StatusTypeDef FLASH_OB_SecureAreaConfig(uint32_t SecureAreaConfig, uint32_t SecureAreaStartAddr, uint32_t SecureAreaEndAddr, uint32_t Banks) { HAL_StatusTypeDef status = HAL_OK; /* Check the parameters */ assert_param(IS_FLASH_BANK_EXCLUSIVE(Banks)); assert_param(IS_OB_SECURE_RDP(SecureAreaConfig)); if((Banks & FLASH_BANK_1) == FLASH_BANK_1) { /* Check the parameters */ assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK1(SecureAreaStartAddr)); assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK1(SecureAreaEndAddr)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); if(status == HAL_OK) { /* Configure the secure area */ FLASH->SCAR_PRG1 = ((SecureAreaStartAddr - FLASH_BANK1_BASE) >> 8); FLASH->SCAR_PRG1 |= (((SecureAreaEndAddr - FLASH_BANK1_BASE) >> 8) << POSITION_VAL(FLASH_SCAR_SEC_AREA_END)) ; FLASH->SCAR_PRG1 |= (SecureAreaConfig & FLASH_SCAR_DMES); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_1); } } if((Banks & FLASH_BANK_2) == FLASH_BANK_2) { /* Check the parameters */ assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK2(SecureAreaStartAddr)); assert_param(IS_FLASH_PROGRAM_ADDRESS_BANK2(SecureAreaEndAddr)); /* Wait for last operation to be completed */ status = FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); if(status == HAL_OK) { /* Configure the secure area */ FLASH->SCAR_PRG2 = ((SecureAreaStartAddr - FLASH_BANK2_BASE) >> 8); FLASH->SCAR_PRG2 |= (((SecureAreaEndAddr - FLASH_BANK2_BASE) >> 8) << POSITION_VAL(FLASH_SCAR_SEC_AREA_END)) ; FLASH->SCAR_PRG2 |= (SecureAreaConfig & FLASH_SCAR_DMES); status |= FLASH_WaitForLastOperation((uint32_t)FLASH_TIMEOUT_VALUE, FLASH_BANK_2); } } return status; } /** * @brief Set secure area configuration * @param SecureAreaConfig specify if the secure area will be deleted or not during next mass-erase, * * @param SecureAreaStartAddr Specifies the secure area start address * @param SecureAreaEndAddr Specifies the secure area end address * @param Bank Specifies the Bank * @retval HAL Status */ static void FLASH_OB_GetSecureArea(uint32_t *SecureAreaConfig, uint32_t *SecureAreaStartAddr, uint32_t *SecureAreaEndAddr, uint32_t Bank) { uint32_t regvalue = 0; uint32_t bankBase = 0; /* Check the parameters */ assert_param(IS_FLASH_BANK_EXCLUSIVE(Bank)); if((Bank & FLASH_BANK_BOTH) == FLASH_BANK_1) { regvalue = FLASH->SCAR_CUR1; bankBase = FLASH_BANK1_BASE; } if((Bank & FLASH_BANK_BOTH) == FLASH_BANK_2) { regvalue = FLASH->SCAR_CUR2; bankBase = FLASH_BANK2_BASE; } (*SecureAreaConfig) = (regvalue & FLASH_SCAR_DMES); (*SecureAreaStartAddr) = ((regvalue & FLASH_SCAR_SEC_AREA_START) << 8) + bankBase; (*SecureAreaEndAddr) = (regvalue & FLASH_SCAR_SEC_AREA_END) >> POSITION_VAL(FLASH_SCAR_SEC_AREA_END) ; (*SecureAreaEndAddr) = ((*SecureAreaEndAddr) << 8) + bankBase; } /** * @} */ /** * @} */ #endif /* HAL_FLASH_MODULE_ENABLED */ /** * @} */ /** * @} */ /************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
{'content_hash': '7f2db0dd5471ad1600be8f3ac4019a4f', 'timestamp': '', 'source': 'github', 'line_count': 1279, 'max_line_length': 152, 'avg_line_length': 34.213448006254886, 'alnum_prop': 0.641468040860166, 'repo_name': 'yongli3/rt-thread', 'id': '576d12a14e5e254fb34ce891d3845b353db83347', 'size': '47881', 'binary': False, 'copies': '8', 'ref': 'refs/heads/master', 'path': 'bsp/stm32h743-nucleo/Libraries/STM32H7xx_HAL_Driver/Src/stm32h7xx_hal_flash_ex.c', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Assembly', 'bytes': '11333158'}, {'name': 'Batchfile', 'bytes': '11246'}, {'name': 'C', 'bytes': '531459994'}, {'name': 'C++', 'bytes': '4923297'}, {'name': 'CMake', 'bytes': '23011'}, {'name': 'CSS', 'bytes': '9978'}, {'name': 'DIGITAL Command Language', 'bytes': '13234'}, {'name': 'GDB', 'bytes': '11796'}, {'name': 'HTML', 'bytes': '4369259'}, {'name': 'Lex', 'bytes': '7026'}, {'name': 'Logos', 'bytes': '7078'}, {'name': 'M4', 'bytes': '17515'}, {'name': 'Makefile', 'bytes': '256896'}, {'name': 'Module Management System', 'bytes': '1548'}, {'name': 'Objective-C', 'bytes': '4097279'}, {'name': 'PAWN', 'bytes': '1427'}, {'name': 'Perl', 'bytes': '6931'}, {'name': 'Python', 'bytes': '983462'}, {'name': 'RPC', 'bytes': '14162'}, {'name': 'Roff', 'bytes': '4486'}, {'name': 'Ruby', 'bytes': '869'}, {'name': 'Shell', 'bytes': '407071'}, {'name': 'TeX', 'bytes': '3113'}, {'name': 'Yacc', 'bytes': '16084'}]}
<?php namespace OCP\SystemTag; /** * Public interface for a system-wide tag. * * @since 9.0.0 */ interface ISystemTag { /** * Returns the tag id * * @return string id * * @since 9.0.0 */ public function getId(); /** * Returns the tag display name * * @return string tag display name * * @since 9.0.0 */ public function getName(); /** * Returns whether the tag is visible for regular users * * @return bool true if visible, false otherwise * * @since 9.0.0 */ public function isUserVisible(); /** * Returns whether the tag can be assigned to objects by regular users * * @return bool true if assignable, false otherwise * * @since 9.0.0 */ public function isUserAssignable(); /** * Returns whether the tag can be assigned to objects by regular users * * @return bool true if editable, false otherwise * * @since 10.0.11 */ public function isUserEditable(); }
{'content_hash': '98ac34ad0f663dc6b9fa598c2659c59d', 'timestamp': '', 'source': 'github', 'line_count': 57, 'max_line_length': 71, 'avg_line_length': 16.49122807017544, 'alnum_prop': 0.6382978723404256, 'repo_name': 'phil-davis/core', 'id': 'a25fc1fb49ce187ccdf9bba9281d615505cae888', 'size': '1685', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/public/SystemTag/ISystemTag.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Dockerfile', 'bytes': '262'}, {'name': 'Makefile', 'bytes': '473'}, {'name': 'Shell', 'bytes': '8644'}]}
import { get, uniq } from "lodash"; import { Dictionary, Identifier, ScopeDeclarationBodyItem, VariableDeclaration, Execute, } from "farmbot"; import { SequenceResource as Sequence, } from "farmbot/dist/resources/api_resources"; import { maybeTagStep } from "../../resources/sequence_tagging"; import { NOTHING_SELECTED } from "./handle_select"; // ======= TYPE DECLARATIONS ======= /** Less strict version of CeleryScript args. It's traversable, or unknown. */ type Args = Dictionary<Traversable | unknown>; type Body = Traversable[] | undefined; /** Less strict CeleryScript node used for the sake of recursion. */ export interface Traversable { kind: string; args: Args; body?: Body; } type TreeClimberCB = (item: Traversable) => void; type InterestingNodes = Identifier | Execute; // ======= END TYPE DECLARATIONS ======= // ======= CONST / LITERAL / DYNAMIC KEY DECLARATIONS ======= const ARGS: keyof InterestingNodes = "args"; const KIND: keyof InterestingNodes = "kind"; const IDENTIFIER: Identifier["kind"] = "identifier"; const EXECUTE: Execute["kind"] = "execute"; const OBJECT = "object"; const STRING = "string"; // ======= END CONST / LITERAL DECLARATIONS ======= /** Is it a fully-formed CeleryScript node? Can we continue recursing? */ const isTraversable = (x: unknown): x is Traversable => { const hasKind = typeof get(x, KIND, -1) == STRING; const hasArgs = typeof get(x, ARGS, -1) == OBJECT; return hasKind && hasArgs; }; /** Is it a variable (identifier)? */ const isIdentifier = (x: Traversable): x is Identifier => (x.kind === IDENTIFIER); /** Is it an execute block? */ const isExecute = (x: Traversable): x is Execute => { return !!((x.kind === EXECUTE) && (x as Execute).args.sequence_id); }; const newVar = (label: string): VariableDeclaration => ({ kind: "variable_declaration", args: { label, data_value: NOTHING_SELECTED } }); function climb(t: Traversable | unknown, cb: TreeClimberCB) { const climbArgs = /** RECURSION ALERT! */ (a: Args) => Object.keys(a).map(arg => climb(a[arg], cb)); const climbBody = /** WEE OOO WEE OO */ (body: Traversable[]) => body.map(item => climb(item, cb)); if (isTraversable(t)) { t.body = t.body || []; climbArgs(t.args); climbBody(t.body); cb(t); } } interface SanitizationResult { thisSequence: Sequence; callsTheseSequences: number[]; } /* 1. Recursively tag all CeleryScript nodes with a `uuid` property to * prevent subtle React issues. SEE: Explanation in `sequence_tagging.ts` * 2. Add unbound variables to `locals` declaration (prevent NPEs). * 3. Remove unused variables from `locals` declaration. */ export const sanitizeNodes = (thisSequence: Sequence): SanitizationResult => { // Collect all *declared* variables. Required for fixing unbound vars. const declared: Dictionary<ScopeDeclarationBodyItem> = {}; (thisSequence.args.locals.body || []).map(var_ => declared[var_.args.label] = var_); const { id } = thisSequence; // Collect all *referenced* variables. Required for removing unused vars. const used: Dictionary<Identifier> = {}; const collectUniqVariables = (_id: Identifier) => used[_id.args.label] = _id; const idList: number[] = []; climb(thisSequence, node => { maybeTagStep(node); isIdentifier(node) && collectUniqVariables(node); if (isExecute(node)) { const { sequence_id } = node.args; // Recursion does not qualify as "in_use" (sequence_id != id) && idList.push(sequence_id); } }); // Add unbound variables to locals array. Unused variables magically disappear thisSequence.args.locals.body = Object.values(used) .map(({ args }) => declared[args.label] || newVar(args.label)) .map(node => { maybeTagStep(node); return node; }); return { thisSequence, callsTheseSequences: uniq(idList) }; };
{'content_hash': '0c5d27b4d07be21d35236bca0445498e', 'timestamp': '', 'source': 'github', 'line_count': 105, 'max_line_length': 86, 'avg_line_length': 36.61904761904762, 'alnum_prop': 0.6712613784135241, 'repo_name': 'gabrielburnworth/Farmbot-Web-App', 'id': '30b2791a9e4ba4767f5f3ccf4a2c44c39e4357aa', 'size': '3845', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'frontend/sequences/locals_list/sanitize_nodes.ts', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '92635'}, {'name': 'Dockerfile', 'bytes': '966'}, {'name': 'HTML', 'bytes': '96279'}, {'name': 'JavaScript', 'bytes': '14963'}, {'name': 'Ruby', 'bytes': '743494'}, {'name': 'Shell', 'bytes': '4027'}, {'name': 'TypeScript', 'bytes': '2370182'}]}
@interface FAKOcticons : FAKIcon // Generated Code + (instancetype)alertIconWithSize:(CGFloat)size; + (instancetype)alignmentAlignIconWithSize:(CGFloat)size; + (instancetype)alignmentAlignedToIconWithSize:(CGFloat)size; + (instancetype)alignmentUnalignIconWithSize:(CGFloat)size; + (instancetype)arrowDownIconWithSize:(CGFloat)size; + (instancetype)arrowLeftIconWithSize:(CGFloat)size; + (instancetype)arrowRightIconWithSize:(CGFloat)size; + (instancetype)arrowSmallDownIconWithSize:(CGFloat)size; + (instancetype)arrowSmallLeftIconWithSize:(CGFloat)size; + (instancetype)arrowSmallRightIconWithSize:(CGFloat)size; + (instancetype)arrowSmallUpIconWithSize:(CGFloat)size; + (instancetype)arrowUpIconWithSize:(CGFloat)size; + (instancetype)beerIconWithSize:(CGFloat)size; + (instancetype)bookIconWithSize:(CGFloat)size; + (instancetype)bookmarkIconWithSize:(CGFloat)size; + (instancetype)briefcaseIconWithSize:(CGFloat)size; + (instancetype)broadcastIconWithSize:(CGFloat)size; + (instancetype)browserIconWithSize:(CGFloat)size; + (instancetype)bugIconWithSize:(CGFloat)size; + (instancetype)calendarIconWithSize:(CGFloat)size; + (instancetype)checkIconWithSize:(CGFloat)size; + (instancetype)checklistIconWithSize:(CGFloat)size; + (instancetype)chevronDownIconWithSize:(CGFloat)size; + (instancetype)chevronLeftIconWithSize:(CGFloat)size; + (instancetype)chevronRightIconWithSize:(CGFloat)size; + (instancetype)chevronUpIconWithSize:(CGFloat)size; + (instancetype)circleSlashIconWithSize:(CGFloat)size; + (instancetype)circuitBoardIconWithSize:(CGFloat)size; + (instancetype)clippyIconWithSize:(CGFloat)size; + (instancetype)clockIconWithSize:(CGFloat)size; + (instancetype)cloudDownloadIconWithSize:(CGFloat)size; + (instancetype)cloudUploadIconWithSize:(CGFloat)size; + (instancetype)codeIconWithSize:(CGFloat)size; + (instancetype)colorModeIconWithSize:(CGFloat)size; + (instancetype)commentAddIconWithSize:(CGFloat)size; + (instancetype)commentIconWithSize:(CGFloat)size; + (instancetype)commentDiscussionIconWithSize:(CGFloat)size; + (instancetype)creditCardIconWithSize:(CGFloat)size; + (instancetype)dashIconWithSize:(CGFloat)size; + (instancetype)dashboardIconWithSize:(CGFloat)size; + (instancetype)databaseIconWithSize:(CGFloat)size; + (instancetype)deviceCameraIconWithSize:(CGFloat)size; + (instancetype)deviceCameraVideoIconWithSize:(CGFloat)size; + (instancetype)deviceDesktopIconWithSize:(CGFloat)size; + (instancetype)deviceMobileIconWithSize:(CGFloat)size; + (instancetype)diffIconWithSize:(CGFloat)size; + (instancetype)diffAddedIconWithSize:(CGFloat)size; + (instancetype)diffIgnoredIconWithSize:(CGFloat)size; + (instancetype)diffModifiedIconWithSize:(CGFloat)size; + (instancetype)diffRemovedIconWithSize:(CGFloat)size; + (instancetype)diffRenamedIconWithSize:(CGFloat)size; + (instancetype)ellipsisIconWithSize:(CGFloat)size; + (instancetype)eyeUnwatchIconWithSize:(CGFloat)size; + (instancetype)eyeWatchIconWithSize:(CGFloat)size; + (instancetype)eyeIconWithSize:(CGFloat)size; + (instancetype)fileBinaryIconWithSize:(CGFloat)size; + (instancetype)fileCodeIconWithSize:(CGFloat)size; + (instancetype)fileDirectoryIconWithSize:(CGFloat)size; + (instancetype)fileMediaIconWithSize:(CGFloat)size; + (instancetype)filePdfIconWithSize:(CGFloat)size; + (instancetype)fileSubmoduleIconWithSize:(CGFloat)size; + (instancetype)fileSymlinkDirectoryIconWithSize:(CGFloat)size; + (instancetype)fileSymlinkFileIconWithSize:(CGFloat)size; + (instancetype)fileTextIconWithSize:(CGFloat)size; + (instancetype)fileZipIconWithSize:(CGFloat)size; + (instancetype)flameIconWithSize:(CGFloat)size; + (instancetype)foldIconWithSize:(CGFloat)size; + (instancetype)gearIconWithSize:(CGFloat)size; + (instancetype)giftIconWithSize:(CGFloat)size; + (instancetype)gistIconWithSize:(CGFloat)size; + (instancetype)gistSecretIconWithSize:(CGFloat)size; + (instancetype)gitBranchCreateIconWithSize:(CGFloat)size; + (instancetype)gitBranchDeleteIconWithSize:(CGFloat)size; + (instancetype)gitBranchIconWithSize:(CGFloat)size; + (instancetype)gitCommitIconWithSize:(CGFloat)size; + (instancetype)gitCompareIconWithSize:(CGFloat)size; + (instancetype)gitMergeIconWithSize:(CGFloat)size; + (instancetype)gitPullRequestAbandonedIconWithSize:(CGFloat)size; + (instancetype)gitPullRequestIconWithSize:(CGFloat)size; + (instancetype)globeIconWithSize:(CGFloat)size; + (instancetype)graphIconWithSize:(CGFloat)size; + (instancetype)heartIconWithSize:(CGFloat)size; + (instancetype)historyIconWithSize:(CGFloat)size; + (instancetype)homeIconWithSize:(CGFloat)size; + (instancetype)horizontalRuleIconWithSize:(CGFloat)size; + (instancetype)hourglassIconWithSize:(CGFloat)size; + (instancetype)hubotIconWithSize:(CGFloat)size; + (instancetype)inboxIconWithSize:(CGFloat)size; + (instancetype)infoIconWithSize:(CGFloat)size; + (instancetype)issueClosedIconWithSize:(CGFloat)size; + (instancetype)issueOpenedIconWithSize:(CGFloat)size; + (instancetype)issueReopenedIconWithSize:(CGFloat)size; + (instancetype)jerseyIconWithSize:(CGFloat)size; + (instancetype)jumpDownIconWithSize:(CGFloat)size; + (instancetype)jumpLeftIconWithSize:(CGFloat)size; + (instancetype)jumpRightIconWithSize:(CGFloat)size; + (instancetype)jumpUpIconWithSize:(CGFloat)size; + (instancetype)keyIconWithSize:(CGFloat)size; + (instancetype)keyboardIconWithSize:(CGFloat)size; + (instancetype)lawIconWithSize:(CGFloat)size; + (instancetype)lightBulbIconWithSize:(CGFloat)size; + (instancetype)linkIconWithSize:(CGFloat)size; + (instancetype)linkExternalIconWithSize:(CGFloat)size; + (instancetype)listOrderedIconWithSize:(CGFloat)size; + (instancetype)listUnorderedIconWithSize:(CGFloat)size; + (instancetype)locationIconWithSize:(CGFloat)size; + (instancetype)gistPrivateIconWithSize:(CGFloat)size; + (instancetype)mirrorPrivateIconWithSize:(CGFloat)size; + (instancetype)gitForkPrivateIconWithSize:(CGFloat)size; + (instancetype)lockIconWithSize:(CGFloat)size; + (instancetype)logoGithubIconWithSize:(CGFloat)size; + (instancetype)mailIconWithSize:(CGFloat)size; + (instancetype)mailReadIconWithSize:(CGFloat)size; + (instancetype)mailReplyIconWithSize:(CGFloat)size; + (instancetype)markGithubIconWithSize:(CGFloat)size; + (instancetype)markdownIconWithSize:(CGFloat)size; + (instancetype)megaphoneIconWithSize:(CGFloat)size; + (instancetype)mentionIconWithSize:(CGFloat)size; + (instancetype)microscopeIconWithSize:(CGFloat)size; + (instancetype)milestoneIconWithSize:(CGFloat)size; + (instancetype)mirrorPublicIconWithSize:(CGFloat)size; + (instancetype)mirrorIconWithSize:(CGFloat)size; + (instancetype)mortarBoardIconWithSize:(CGFloat)size; + (instancetype)moveDownIconWithSize:(CGFloat)size; + (instancetype)moveLeftIconWithSize:(CGFloat)size; + (instancetype)moveRightIconWithSize:(CGFloat)size; + (instancetype)moveUpIconWithSize:(CGFloat)size; + (instancetype)muteIconWithSize:(CGFloat)size; + (instancetype)noNewlineIconWithSize:(CGFloat)size; + (instancetype)octofaceIconWithSize:(CGFloat)size; + (instancetype)organizationIconWithSize:(CGFloat)size; + (instancetype)packageIconWithSize:(CGFloat)size; + (instancetype)paintcanIconWithSize:(CGFloat)size; + (instancetype)pencilIconWithSize:(CGFloat)size; + (instancetype)personAddIconWithSize:(CGFloat)size; + (instancetype)personFollowIconWithSize:(CGFloat)size; + (instancetype)personIconWithSize:(CGFloat)size; + (instancetype)pinIconWithSize:(CGFloat)size; + (instancetype)playbackFastForwardIconWithSize:(CGFloat)size; + (instancetype)playbackPauseIconWithSize:(CGFloat)size; + (instancetype)playbackPlayIconWithSize:(CGFloat)size; + (instancetype)playbackRewindIconWithSize:(CGFloat)size; + (instancetype)plugIconWithSize:(CGFloat)size; + (instancetype)repoCreateIconWithSize:(CGFloat)size; + (instancetype)gistNewIconWithSize:(CGFloat)size; + (instancetype)fileDirectoryCreateIconWithSize:(CGFloat)size; + (instancetype)fileAddIconWithSize:(CGFloat)size; + (instancetype)plusIconWithSize:(CGFloat)size; + (instancetype)podiumIconWithSize:(CGFloat)size; + (instancetype)primitiveDotIconWithSize:(CGFloat)size; + (instancetype)primitiveSquareIconWithSize:(CGFloat)size; + (instancetype)pulseIconWithSize:(CGFloat)size; + (instancetype)puzzleIconWithSize:(CGFloat)size; + (instancetype)questionIconWithSize:(CGFloat)size; + (instancetype)quoteIconWithSize:(CGFloat)size; + (instancetype)radioTowerIconWithSize:(CGFloat)size; + (instancetype)repoDeleteIconWithSize:(CGFloat)size; + (instancetype)repoIconWithSize:(CGFloat)size; + (instancetype)repoCloneIconWithSize:(CGFloat)size; + (instancetype)repoForcePushIconWithSize:(CGFloat)size; + (instancetype)gistForkIconWithSize:(CGFloat)size; + (instancetype)repoForkedIconWithSize:(CGFloat)size; + (instancetype)repoPullIconWithSize:(CGFloat)size; + (instancetype)repoPushIconWithSize:(CGFloat)size; + (instancetype)rocketIconWithSize:(CGFloat)size; + (instancetype)rssIconWithSize:(CGFloat)size; + (instancetype)rubyIconWithSize:(CGFloat)size; + (instancetype)screenFullIconWithSize:(CGFloat)size; + (instancetype)screenNormalIconWithSize:(CGFloat)size; + (instancetype)searchSaveIconWithSize:(CGFloat)size; + (instancetype)searchIconWithSize:(CGFloat)size; + (instancetype)serverIconWithSize:(CGFloat)size; + (instancetype)settingsIconWithSize:(CGFloat)size; + (instancetype)logInIconWithSize:(CGFloat)size; + (instancetype)signInIconWithSize:(CGFloat)size; + (instancetype)logOutIconWithSize:(CGFloat)size; + (instancetype)signOutIconWithSize:(CGFloat)size; + (instancetype)splitIconWithSize:(CGFloat)size; + (instancetype)squirrelIconWithSize:(CGFloat)size; + (instancetype)starAddIconWithSize:(CGFloat)size; + (instancetype)starDeleteIconWithSize:(CGFloat)size; + (instancetype)starIconWithSize:(CGFloat)size; + (instancetype)stepsIconWithSize:(CGFloat)size; + (instancetype)stopIconWithSize:(CGFloat)size; + (instancetype)repoSyncIconWithSize:(CGFloat)size; + (instancetype)syncIconWithSize:(CGFloat)size; + (instancetype)tagRemoveIconWithSize:(CGFloat)size; + (instancetype)tagAddIconWithSize:(CGFloat)size; + (instancetype)tagIconWithSize:(CGFloat)size; + (instancetype)telescopeIconWithSize:(CGFloat)size; + (instancetype)terminalIconWithSize:(CGFloat)size; + (instancetype)threeBarsIconWithSize:(CGFloat)size; + (instancetype)thumbsdownIconWithSize:(CGFloat)size; + (instancetype)thumbsupIconWithSize:(CGFloat)size; + (instancetype)toolsIconWithSize:(CGFloat)size; + (instancetype)trashcanIconWithSize:(CGFloat)size; + (instancetype)triangleDownIconWithSize:(CGFloat)size; + (instancetype)triangleLeftIconWithSize:(CGFloat)size; + (instancetype)triangleRightIconWithSize:(CGFloat)size; + (instancetype)triangleUpIconWithSize:(CGFloat)size; + (instancetype)unfoldIconWithSize:(CGFloat)size; + (instancetype)unmuteIconWithSize:(CGFloat)size; + (instancetype)versionsIconWithSize:(CGFloat)size; + (instancetype)removeCloseIconWithSize:(CGFloat)size; + (instancetype)xIconWithSize:(CGFloat)size; + (instancetype)zapIconWithSize:(CGFloat)size; @end
{'content_hash': '0c5530458966014bdd29c24e83ed1959', 'timestamp': '', 'source': 'github', 'line_count': 211, 'max_line_length': 66, 'avg_line_length': 51.73933649289099, 'alnum_prop': 0.8288907208940185, 'repo_name': 'Jamonek/School-For-Me', 'id': 'f1a2d558e33350de0be05e6f72e8d98ef751cec0', 'size': '10938', 'binary': False, 'copies': '18', 'ref': 'refs/heads/master', 'path': 'Pods/FontAwesomeKit/FontAwesomeKit/FAKOcticons.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Ruby', 'bytes': '432'}, {'name': 'Swift', 'bytes': '66718'}]}
package org.apache.lens.api.ml; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; /** * The Class TestReport. */ @XmlRootElement /** * Instantiates a new test report. * * @param testTable * the test table * @param outputTable * the output table * @param outputColumn * the output column * @param labelColumn * the label column * @param featureColumns * the feature columns * @param algorithm * the algorithm * @param modelID * the model id * @param reportID * the report id * @param queryID * the query id */ @AllArgsConstructor /** * Instantiates a new test report. */ @NoArgsConstructor public class TestReport { /** The test table. */ @XmlElement @Getter private String testTable; /** The output table. */ @XmlElement @Getter private String outputTable; /** The output column. */ @XmlElement @Getter private String outputColumn; /** The label column. */ @XmlElement @Getter private String labelColumn; /** The feature columns. */ @XmlElement @Getter private String featureColumns; /** The algorithm. */ @XmlElement @Getter private String algorithm; /** The model id. */ @XmlElement @Getter private String modelID; /** The report id. */ @XmlElement @Getter private String reportID; /** The query id. */ @XmlElement @Getter private String queryID; /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("Input test table: ").append(testTable).append('\n'); builder.append("Algorithm: ").append(algorithm).append('\n'); builder.append("Report id: ").append(reportID).append('\n'); builder.append("Model id: ").append(modelID).append('\n'); builder.append("Lens Query id: ").append(queryID).append('\n'); builder.append("Feature columns: ").append(featureColumns).append('\n'); builder.append("Labelled column: ").append(labelColumn).append('\n'); builder.append("Predicted column: ").append(outputColumn).append('\n'); builder.append("Test output table: ").append(outputTable).append('\n'); return builder.toString(); } }
{'content_hash': 'ce87a14974087b357117082d2c1840cc', 'timestamp': '', 'source': 'github', 'line_count': 108, 'max_line_length': 76, 'avg_line_length': 22.34259259259259, 'alnum_prop': 0.6560298383754662, 'repo_name': 'rajubairishetti/lens', 'id': '4c0bf5176a985869beba07fde488bf3e5a2ed0b1', 'size': '3221', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'lens-ml-lib/src/main/java/org/apache/lens/api/ml/TestReport.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '3124035'}, {'name': 'JavaScript', 'bytes': '284450'}, {'name': 'Shell', 'bytes': '9553'}]}
</div> <!-- #main --> </div> <!-- #main-container --> <div class="footer-container"> <footer class="wrapper"> <a href="{{.Site.BaseURL}}"><h3>{{ .Site.Title }}</h3></a> <script type="text/javascript" src='http://adn.ebay.com/files/js/min/jquery-1.6.2-min.js'></script> </footer> </div> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.11.2/jquery.min.js"></script> <script>window.jQuery || document.write('<script src="{{.Site.BaseURL}}/js/vendor/jquery-1.11.2.min.js"><\/script>')</script> <script src="{{.Site.BaseURL}}/js/main.js"></script> </script> </body> </html>
{'content_hash': 'e9d19010c79a01a8a04addb3b9b48511', 'timestamp': '', 'source': 'github', 'line_count': 19, 'max_line_length': 133, 'avg_line_length': 36.94736842105263, 'alnum_prop': 0.5256410256410257, 'repo_name': 'mana-planorama/site-cms-3', 'id': 'e46986e081c8120a958b1d29c36eff745742626d', 'size': '704', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'layouts/partials/footer.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '6487'}, {'name': 'HTML', 'bytes': '5937'}, {'name': 'JavaScript', 'bytes': '1'}, {'name': 'Shell', 'bytes': '1339'}]}
import mincemeat import glob import logging import repr import socket import asyncore """ example-sf-election -- elects (self) as server, or become a client To run this test, simply start multiple instances of this script: python example-sf-election.py The first will become the server; the remainder will become clients. This is the initial phase of development for systems the dynamically must decide if there is a server available, and spawn one if a server is not found. Here is the original simple dictionary datasource: data = ["Humpty Dumpty sat on a wall", "Humpty Dumpty had a great fall", "All the King's horses and all the King's men", "Couldn't put Humpty together again", ] # The data source can be any dictionary-like object datasource = dict(enumerate(data)) Alternatively, here is a iterable that returns the contents of a set of files designated by name (or a "glob" pattern match): """ class file_contents(object): def __init__(self, pattern ): self.text_files = glob.glob( pattern ) def __len__(self): return len(self.text_files) def __iter__(self): return iter(self.text_files) def __getitem__(self, key): f = open(key) try: return f.read() finally: f.close() # Obtain CD ISO from: http://www.gutenberg.org/cdproject/pgsfcd-032007.zip.torrent datasource = file_contents( '../Gutenberg SF CD/Gutenberg SF/*moon*.txt' ) # # Map Functions. # # Take a name and corpus of data, and map it onto an iterable of # (key,value) pairs. # def get_lower_split( name, corpus ): import string logging.debug( "Corpus: %-40s: %d bytes" %( name, len( corpus ))) for line in corpus.split("\n"): for word in line.replace('--',' ').split(): word = word.lower().strip(string.punctuation+ string.whitespace+ string.digits) if "'" in word: for suffix in [ "'s", "'ll", "'d", "'ve" ]: if word.endswith( suffix ): word = word[:-len( suffix )] if word: yield word, 1 def get_lower_simple( k, v ): for w in v.split(): yield w.lower(), 1 # # Collect, Reduce, or Finish Functions. # # Take (key,value) or (key,[value,...]) pairs, or an iterable # producing such, and return the single value mapped to that key. The # functional version returns just the value; the iterable version must # return the (key,value) pair. # # If the function is resilient to taking a value that is either an # iterable OR is a single value, then the same function may be used # for any of the Collect, Reduce or Finish functions. Collect and # Reduce will always be provided with (key,[value,...]) arguments; # Finish may be provided with (key,[value,...]) OR (key,value). Try # isistance(vs,list) or hasattr(vs,'__iter__'), or use functions that # throw TypeError on non-iterables, and catch the exception. # def sum_values( k, vs ): try: return sum( vs ) # Will throw unless vs is iterable, summable except TypeError: return vs def sum_values_generator( kvi ): for k, vs in kvi: try: yield k, sum( vs ) # Will throw unless vs is iterable, summable except TypeError: yield k, vs # # Map Phase # # Each Map client runs a full pass of mapfn over the incoming data, followed # (optionally) by a pass of collectfn over all values for each Map data_key: # # mapfn( source_key, data ) # --> { map_key1: [ value, ...] ), map_key2: [ value, ...], ... } # collectfn( map_key1, [ value, value ] ) # --> data_key1: [ value ] # # The optional collectfn would be appropriate to (for example) # reduce the communication payload size (eg. store the map data in # some global filesystem, and instead return the filesystem path.) # # Or, if the mapfn is simple (doesn't retain information about the # data corpus), the collectfn might collapse information about the # result values. For example, in the simple "word count" example, the # mapfn returns lists of the form [ 1, 1, 1, ...., 1 ]. Instead of # transmitting this, we should use the collect function to sum these # counters, returning a list with a single value. # # The .collectfn may take a (key, values) tuple (must be a scalar, # eg. int, string and an iterable, eg. list), and return a single # scalar value, which will be returned as a single-entry list. Or, it # may take an iterator producing the key, values tuples, and must # return an (key, values) list of the same types (eg. a scalar key, # and an iterable value). # mapfn = get_lower_split # When the map function produces non-optimal results, it may be # desirable to run a collect phase, to post-process the results before # returning them to the server. For example, the trivial map function # for word counting produces a (very long) list of the form [1, 1, # ..., 1]; it might be desirable to sum this list before returning. A # less contrived example might post-process the entire set of keys # produced by the map; a generator-style collect function can retain # state between invocations with each key, and may decide to modify # (or even skip) keys, or return return new/additional keys. Try # setting collectfn to sum_values or sum_values_generator to see the # differences in the results of the map (dramatically smaller returned # lists) collectfn = None #collectfn = sum_values #collectfn = sum_values_generator # # Reduce Phase # # The Reduce phase takes the output of Map: # # mapped[key] = [ value, value, ... ] # # data, and produces: # # result[key] = value # # If no Server.reducefn is supplied, then the Reduce phase is skipped, # and the mapped data is passed directly to the result: # # result[key] = [ value, value, ... ] # # Therefore, any supplied Server.finishfn() must be able to handle # either a scalar value (indicating that Reduce has completed), or # sequence values (indicating that the Reduce phase was skipped.) # NOTE: In the case where the reduce function is trivial (such as in # the word counting example), it will take *significantly* longer to # run this test, than if you specify None for reducefn, and (instead) # use the finishfn to run the entire reduce phase in the server... # The results should be identical. To see the difference, try # changing reducefn to None, and setting finishfn to sum_values or # sum_values_generator. # Skip the Reduce phase; use the Reduce function as Server.finishfn #reducefn = None reducefn = sum_values #reducefn = sum_values_generator finishfn = None #finishfn = sum_values #finishfn = sum_values_generator # Specify an externally visible server network interface name instead # of "localhost", if you wish to try this example accross multiple # hosts. Note that the empty string '' implies INADDR_ANY for bind, and # addr_info = { 'password': 'changeme', 'interface': 'localhost', 'port': mincemeat.DEFAULT_PORT } def server( credentials ): """ Run a Map-Reduce Server, and process a single Map-Reduce """ s = mincemeat.Server() s.datasource = datasource s.mapfn = mapfn s.collectfn = collectfn s.reducefn = reducefn s.finishfn = finishfn results = s.run_server( **credentials ) # Map-Reduce over 'datasource' complete. Enumerate results, # ordered both lexicographically and by count bycount = {} for k,v in results.items(): if v in bycount: bycount[v].append( k ) else: bycount[v] = [k] bycountlist = [] for k,l in sorted(bycount.items()): for w in sorted( l ): bycountlist.append( (k, w) ) for k, lt in zip( sorted( results.keys() ), bycountlist ): print "%8d %-40.40s %8d %s" % ( results[k], k, lt[0], lt[1] ) def client( credentials ): logging.debug( " socket_map at client startup: %s" % ( repr.repr( asyncore.socket_map ))) c = mincemeat.Client() c.conn( **credentials ) # Client communications with Server done; either server completed # success, or exited without completing our authentication. if not c.authenticated(): raise Exception( "No server authenticated!" ) if __name__ == '__main__': logging.basicConfig( level=logging.INFO ) try: logging.info( "Trying as client..." ) client( addr_info ) # If we get here, we succeeded in connecting and authenticating... logging.info( "Client terminating normally" ) except: logging.info( "Client connection failed; Trying as server..." ) server( addr_info )
{'content_hash': '0c66cd69fe3e704508828df888b503e8', 'timestamp': '', 'source': 'github', 'line_count': 261, 'max_line_length': 84, 'avg_line_length': 34.206896551724135, 'alnum_prop': 0.6472894265232975, 'repo_name': 'pjkundert/mincemeatpy', 'id': 'db26b2fcd361145fb6d932c36b68bdf700f676ea', 'size': '8950', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'example-sf-election.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Makefile', 'bytes': '814'}, {'name': 'Python', 'bytes': '226525'}]}
namespace media { namespace cast { class CongestionControl { public: virtual ~CongestionControl(); // Called with latest measured rtt value. virtual void UpdateRtt(base::TimeDelta rtt) = 0; // Called with an updated target playout delay value. virtual void UpdateTargetPlayoutDelay(base::TimeDelta delay) = 0; // Called when an encoded frame is enqueued for transport. virtual void SendFrameToTransport(FrameId frame_id, size_t frame_size_in_bits, base::TimeTicks when) = 0; // Called when we receive an ACK for a frame. virtual void AckFrame(FrameId frame_id, base::TimeTicks when) = 0; // Called when the RTP receiver received frames that have frame ID larger // than |last_acked_frame_|. virtual void AckLaterFrames(std::vector<FrameId> received_frames, base::TimeTicks when) = 0; // Returns the bitrate we should use for the next frame. virtual int GetBitrate(base::TimeTicks playout_time, base::TimeDelta playout_delay) = 0; }; CongestionControl* NewAdaptiveCongestionControl(const base::TickClock* clock, int max_bitrate_configured, int min_bitrate_configured, double max_frame_rate); CongestionControl* NewFixedCongestionControl(int bitrate); } // namespace cast } // namespace media #endif // MEDIA_CAST_SENDER_CONGESTION_CONTROL_H_
{'content_hash': 'faebd8410235091ba0cac0d7729fbea7', 'timestamp': '', 'source': 'github', 'line_count': 42, 'max_line_length': 77, 'avg_line_length': 37.023809523809526, 'alnum_prop': 0.6231511254019293, 'repo_name': 'scheib/chromium', 'id': 'e63bbff8b2078da12b0423e4cd45c381116e2622', 'size': '1999', 'binary': False, 'copies': '5', 'ref': 'refs/heads/main', 'path': 'media/cast/sender/congestion_control.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []}
<?php namespace Sylius\Bundle\PromotionBundle\Form\EventListener; use Sylius\Component\Promotion\Model\PromotionActionInterface; /** * This listener adds configuration form to a action, * if selected action requires one. * * @author Saša Stamenković <[email protected]> * @author Arnaud Langlade <[email protected]> */ final class BuildPromotionActionFormSubscriber extends AbstractConfigurationSubscriber { /** * @param PromotionActionInterface $action * * @return array */ protected function getConfiguration($action) { if ($action instanceof PromotionActionInterface && null !== $action->getConfiguration()) { return $action->getConfiguration(); } return []; } }
{'content_hash': 'c1d5dcffc9e45ed3393fb590a70ec56e', 'timestamp': '', 'source': 'github', 'line_count': 31, 'max_line_length': 98, 'avg_line_length': 24.29032258064516, 'alnum_prop': 0.6945551128818062, 'repo_name': 'kiranthomas/sylius-docker', 'id': '186a74108a983ada7efcdaf408f21f18cc52b83e', 'size': '966', 'binary': False, 'copies': '8', 'ref': 'refs/heads/master', 'path': 'src/Sylius/Bundle/PromotionBundle/Form/EventListener/BuildPromotionActionFormSubscriber.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '601'}, {'name': 'CSS', 'bytes': '1792'}, {'name': 'Gherkin', 'bytes': '754844'}, {'name': 'HTML', 'bytes': '257794'}, {'name': 'JavaScript', 'bytes': '53396'}, {'name': 'PHP', 'bytes': '6067243'}, {'name': 'Shell', 'bytes': '27443'}]}
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){ 'use strict'; /** * Skip back button * * This feature creates a button to rewind media a specific number of seconds. */ // Feature configuration Object.assign(mejs.MepDefaults, { /** * @type {Number} */ skipBackInterval: 30, /** * @type {String} */ skipBackText: '' }); Object.assign(MediaElementPlayer.prototype, { /** * Feature constructor. * * Always has to be prefixed with `build` and the name that will be used in MepDefaults.features list * @param {MediaElementPlayer} player * @param {$} controls * @param {$} layers * @param {HTMLElement} media */ buildskipback: function buildskipback(player, controls, layers, media) { var t = this, defaultTitle = mejs.i18n.t('mejs.time-skip-back', t.options.skipBackInterval), skipTitle = t.options.skipBackText ? t.options.skipBackText.replace('%1', t.options.skipBackInterval) : defaultTitle; $('<div class="' + t.options.classPrefix + 'button ' + t.options.classPrefix + 'skip-back-button">' + ('<button type="button" aria-controls="' + t.id + '" title="' + skipTitle + '" aria-label="' + skipTitle + '" tabindex="0">') + (t.options.skipBackInterval + '</button>') + '</div>') // append it to the toolbar .appendTo(controls) // add a click toggle event .click(function () { if (media.duration) { media.setCurrentTime(Math.max(media.currentTime - t.options.skipBackInterval, 0)); $(this).find('button').blur(); } }); } }); },{}]},{},[1]);
{'content_hash': 'd9d595f41ce1cfc8deec93a03b47c5b7', 'timestamp': '', 'source': 'github', 'line_count': 52, 'max_line_length': 480, 'avg_line_length': 37.5, 'alnum_prop': 0.6487179487179487, 'repo_name': 'tonytomov/cdnjs', 'id': 'a5468e717ef9761478ecb2262ab966dae8827e73', 'size': '1950', 'binary': False, 'copies': '27', 'ref': 'refs/heads/master', 'path': 'ajax/libs/mediaelement-plugins/1.2.0/skip-back/skip-back.js', 'mode': '33188', 'license': 'mit', 'language': []}
import os import pickle import sys if sys.hexversion < 0x03000000: import __builtin__ else: import builtins as __builtin__ # PyPI imports import pytest # Putil imports import putil.exh ### # Functions ### def log(line, append=True): """ xdist debugging function """ with open( os.path.join(os.environ['HOME'], 'xdist-debug.log'), 'a' if append else 'w' ) as fobj: fobj.write('{0}\n'.format(line)) def pytest_configure(config): """ Pytest configuration, both for the slave and master """ if not hasattr(config, "slaveinput"): # Master configuration pass def pytest_configure_node(node): """ Per node configuration """ # pylint: disable=W0613 if hasattr(__builtin__, '_EXDOC_EXCLUDE'): node.slaveinput['exclude'] = pickle.dumps(__builtin__._EXDOC_EXCLUDE) if hasattr(__builtin__, '_EXDOC_FULL_CNAME'): node.slaveinput['full_cname'] = pickle.dumps( __builtin__._EXDOC_FULL_CNAME ) if hasattr(__builtin__, '_EXDOC_CALLABLES_FNAME'): node.slaveinput['callables_fname'] = pickle.dumps( __builtin__._EXDOC_CALLABLES_FNAME ) def pytest_testnodedown(node, error): """ Integrate received exception handler form sub-process into main one """ if error: raise RuntimeError('Slave node reported an error') if 'msg' in node.slaveoutput: obj = pickle.loads(node.slaveoutput['msg']) if not hasattr(__builtin__, '_EXH_LIST'): setattr(__builtin__, '_EXH_LIST', [obj]) else: getattr(__builtin__, '_EXH_LIST').append(obj) @pytest.fixture(autouse=True, scope="module") def exhobj(request): """ Fixture to a) get the global exception handler in sub-process and b) send the exception handler after tests done This fixture runs in the slave session with NO connection to master except through slaveinput/slaveoutput """ xdist_run = hasattr(request.config, 'slaveinput') def fin(): """ Tear down function """ if (hasattr(request.config, 'slaveoutput') and hasattr(request.module.__builtin__, '_EXH')): request.config.slaveoutput['msg'] = pickle.dumps( getattr(request.module.__builtin__, '_EXH') ) request.addfinalizer(fin) if xdist_run: # sub-process modname = '__builtin__' if sys.hexversion < 0x03000000 else 'builtins' if not hasattr(request.module, '__builtin__'): setattr(request.module, '__builtin__', __import__(modname)) exclude = (pickle.loads(request.config.slaveinput['exclude']) if 'exclude' in request.config.slaveinput else None) full_cname = (pickle.loads(request.config.slaveinput['full_cname']) if 'full_cname' in request.config.slaveinput else False) callables_fname = ( pickle.loads(request.config.slaveinput['callables_fname']) if 'callables_fname' in request.config.slaveinput else None ) setattr( request.module.__builtin__, '_EXH', putil.exh.ExHandle( full_cname=full_cname, exclude=exclude, callables_fname=callables_fname ) )
{'content_hash': '3a0f0f0338c2d62176df33d7450e201e', 'timestamp': '', 'source': 'github', 'line_count': 108, 'max_line_length': 78, 'avg_line_length': 31.333333333333332, 'alnum_prop': 0.5892434988179669, 'repo_name': 'pmacosta/putil', 'id': '7d4f7a7c9e24f8ebcfd25a01e836fbd06a116fa1', 'size': '3560', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'tests/conftest.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Batchfile', 'bytes': '16611'}, {'name': 'Makefile', 'bytes': '2425'}, {'name': 'PowerShell', 'bytes': '7209'}, {'name': 'Python', 'bytes': '1220525'}, {'name': 'Shell', 'bytes': '56372'}]}
package org.apache.camel.test.junit5.params; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) public @interface Parameters { String name() default "{index}"; }
{'content_hash': '77bba0e314d78f0f4b134c67233d3e2b', 'timestamp': '', 'source': 'github', 'line_count': 13, 'max_line_length': 44, 'avg_line_length': 26.384615384615383, 'alnum_prop': 0.8017492711370262, 'repo_name': 'nicolaferraro/camel', 'id': 'cabf2e521cb0689a378cba9f7c941040d4b042f0', 'size': '1145', 'binary': False, 'copies': '14', 'ref': 'refs/heads/master', 'path': 'components/camel-test-junit5/src/main/java/org/apache/camel/test/junit5/params/Parameters.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Apex', 'bytes': '6521'}, {'name': 'Batchfile', 'bytes': '2353'}, {'name': 'CSS', 'bytes': '5472'}, {'name': 'Elm', 'bytes': '10852'}, {'name': 'FreeMarker', 'bytes': '8015'}, {'name': 'Groovy', 'bytes': '14479'}, {'name': 'HTML', 'bytes': '916625'}, {'name': 'Java', 'bytes': '82748568'}, {'name': 'JavaScript', 'bytes': '100326'}, {'name': 'Makefile', 'bytes': '513'}, {'name': 'Shell', 'bytes': '17240'}, {'name': 'TSQL', 'bytes': '28835'}, {'name': 'Tcl', 'bytes': '4974'}, {'name': 'Thrift', 'bytes': '6979'}, {'name': 'XQuery', 'bytes': '546'}, {'name': 'XSLT', 'bytes': '275257'}]}
package org.opencloudb.sqlengine; import java.util.HashMap; import java.util.List; import java.util.Map; import org.opencloudb.net.mysql.FieldPacket; import org.opencloudb.net.mysql.RowDataPacket; public class OneRawSQLQueryResultHandler implements SQLJobHandler { private Map<String, Integer> fetchColPosMap; private final SQLQueryResultListener<SQLQueryResult<Map<String, String>>> callback; private final String[] fetchCols; private int fieldCount = 0; private Map<String, String> result ; public OneRawSQLQueryResultHandler(String[] fetchCols, SQLQueryResultListener<SQLQueryResult<Map<String, String>>> callBack) { this.fetchCols = fetchCols; this.callback = callBack; } public void onHeader(String dataNode, byte[] header, List<byte[]> fields) { fieldCount = fields.size(); fetchColPosMap = new HashMap<String, Integer>(); for (String watchFd : fetchCols) { for (int i = 0; i < fieldCount; i++) { byte[] field = fields.get(i); FieldPacket fieldPkg = new FieldPacket(); fieldPkg.read(field); String fieldName = new String(fieldPkg.name); if (watchFd.equalsIgnoreCase(fieldName)) { fetchColPosMap.put(fieldName, i); } } } } @Override public boolean onRowData(String dataNode, byte[] rowData) { RowDataPacket rowDataPkg = new RowDataPacket(fieldCount); rowDataPkg.read(rowData); result = new HashMap<String, String>(); for (String fetchCol : fetchCols) { Integer ind = fetchColPosMap.get(fetchCol); if (ind != null) { byte[] columnData = rowDataPkg.fieldValues.get(ind); String columnVal = new String(columnData); result.put(fetchCol, columnVal); } else { LOGGER.warn("cant't find column in sql query result " + fetchCol); } } return false; } @Override public void finished(String dataNode, boolean failed) { SQLQueryResult<Map<String, String>> queryRestl=new SQLQueryResult<Map<String, String>>(this.result,!failed); this.callback.onRestult(queryRestl); } }
{'content_hash': 'e406537a843faa9f2d681356814cdca8', 'timestamp': '', 'source': 'github', 'line_count': 68, 'max_line_length': 110, 'avg_line_length': 29.426470588235293, 'alnum_prop': 0.719640179910045, 'repo_name': 'aimer1027/Mycat-Server', 'id': '3d3145a9e2359ad3d20486fb687c37e0b8e87d38', 'size': '2001', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'src/main/java/org/opencloudb/sqlengine/OneRawSQLQueryResultHandler.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '4273'}, {'name': 'CSS', 'bytes': '5337'}, {'name': 'HTML', 'bytes': '15511'}, {'name': 'Java', 'bytes': '2302156'}, {'name': 'JavaScript', 'bytes': '3555'}, {'name': 'Shell', 'bytes': '8450'}]}
#import "NSObject.h" @interface NSObject (MFUtilities) - (id)mf_objectWithHighest:(id)arg1; @end
{'content_hash': 'ef299659023fe86e5ec562e02c09eafe', 'timestamp': '', 'source': 'github', 'line_count': 8, 'max_line_length': 36, 'avg_line_length': 12.625, 'alnum_prop': 0.7227722772277227, 'repo_name': 'matthewsot/CocoaSharp', 'id': 'c50ff7d8613d4387dafdefbf4f6b70a3c68b7cee', 'size': '241', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Headers/Frameworks/MessageUI/NSObject-MFUtilities.h', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '259784'}, {'name': 'C#', 'bytes': '2789005'}, {'name': 'C++', 'bytes': '252504'}, {'name': 'Objective-C', 'bytes': '24301417'}, {'name': 'Smalltalk', 'bytes': '167909'}]}
#include <private/qabstractprotocolhandler_p.h> #include <private/qhttpnetworkconnectionchannel_p.h> #ifndef QT_NO_HTTP QT_BEGIN_NAMESPACE QAbstractProtocolHandler::QAbstractProtocolHandler(QHttpNetworkConnectionChannel *channel) : m_channel(channel), m_reply(0), m_socket(m_channel->socket), m_connection(m_channel->connection) { Q_ASSERT(m_channel); Q_ASSERT(m_socket); Q_ASSERT(m_connection); } QAbstractProtocolHandler::~QAbstractProtocolHandler() { } void QAbstractProtocolHandler::setReply(QHttpNetworkReply *reply) { m_reply = reply; } QT_END_NAMESPACE #endif // QT_NO_HTTP
{'content_hash': '1acc94b9c4de2f48922eee8604c0ab40', 'timestamp': '', 'source': 'github', 'line_count': 29, 'max_line_length': 102, 'avg_line_length': 21.03448275862069, 'alnum_prop': 0.7573770491803279, 'repo_name': 'TanguyPatte/phantomjs-packaging', 'id': 'e72bb63236710d8f2c8bb42bf35585623b41fbb6', 'size': '2582', 'binary': False, 'copies': '18', 'ref': 'refs/heads/master', 'path': 'src/qt/qtbase/src/network/access/qabstractprotocolhandler.cpp', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'ASP', 'bytes': '825'}, {'name': 'ApacheConf', 'bytes': '268333'}, {'name': 'Assembly', 'bytes': '268988'}, {'name': 'Bison', 'bytes': '12791'}, {'name': 'C', 'bytes': '8681869'}, {'name': 'C#', 'bytes': '1101'}, {'name': 'C++', 'bytes': '126516996'}, {'name': 'CMake', 'bytes': '554470'}, {'name': 'CSS', 'bytes': '760708'}, {'name': 'DTrace', 'bytes': '1931'}, {'name': 'Diff', 'bytes': '9194'}, {'name': 'Emacs Lisp', 'bytes': '393'}, {'name': 'GAP', 'bytes': '194281'}, {'name': 'Groff', 'bytes': '570631'}, {'name': 'HTML', 'bytes': '5465225'}, {'name': 'Java', 'bytes': '339535'}, {'name': 'JavaScript', 'bytes': '9553650'}, {'name': 'Objective-C', 'bytes': '2820750'}, {'name': 'Objective-C++', 'bytes': '7499435'}, {'name': 'Perl', 'bytes': '1714307'}, {'name': 'Prolog', 'bytes': '14631'}, {'name': 'Protocol Buffer', 'bytes': '8758'}, {'name': 'Python', 'bytes': '5465147'}, {'name': 'QML', 'bytes': '170356'}, {'name': 'QMake', 'bytes': '584006'}, {'name': 'Ruby', 'bytes': '424059'}, {'name': 'Shell', 'bytes': '469059'}, {'name': 'XSLT', 'bytes': '1047'}]}
{% if page.author %} {% assign author = site.data.authors[page.author] %}{% else %}{% assign author = site.owner %} {% endif %} <div class="cover-card table-cell table-middle"> {% if author.avatar %} <img src="{{ site.url }}/img/{{ author.avatar }}" alt="" class="avatar"> {% endif %} <a href="{{ site.url }}/" class="author_name">{{ author.name }}</a> <span class="author_job">{{ author.job }}</span> <span class="author_bio mbm">{{ author.bio }}</span> <nav class="nav"> <ul class="nav-list"> <li class="nav-item"> <a href="{{ site.url }}/">首页</a> </li> {% for page in site.pages %} {% if page.title %} <li class="nav-item"> <a href="{{ site.url }}{{ page.url }}">{{ page.title }}</a> </li> {% endif %} {% endfor %} </ul> </nav> {% include social-links.html %} </div>
{'content_hash': '9428ccf8ff18e4b0ced138bef11b08dc', 'timestamp': '', 'source': 'github', 'line_count': 23, 'max_line_length': 127, 'avg_line_length': 36.91304347826087, 'alnum_prop': 0.5312131919905771, 'repo_name': 'dearlancer/dearlancer.github.io', 'id': 'eda1c9de305a9df172626e86e714165501ac983f', 'size': '853', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '_includes/sidebar.html', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '78827'}, {'name': 'HTML', 'bytes': '543766'}, {'name': 'JavaScript', 'bytes': '70'}]}
module Search class GroupService < Search::GlobalService attr_accessor :group def initialize(user, group, params) super(user, params) @group = group end def projects return Project.none unless group return @projects if defined? @projects @projects = super.inside_path(group.full_path) end end end
{'content_hash': '940172973c2b91d5efbe940e49427e22', 'timestamp': '', 'source': 'github', 'line_count': 18, 'max_line_length': 52, 'avg_line_length': 19.72222222222222, 'alnum_prop': 0.6647887323943662, 'repo_name': 'htve/GitlabForChinese', 'id': '29478e3251f39161939ac4d39a49b9a01f109a8f', 'size': '355', 'binary': False, 'copies': '6', 'ref': 'refs/heads/9-2-zh', 'path': 'app/services/search/group_service.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '499575'}, {'name': 'Gherkin', 'bytes': '140955'}, {'name': 'HTML', 'bytes': '979335'}, {'name': 'JavaScript', 'bytes': '1909827'}, {'name': 'Ruby', 'bytes': '10590735'}, {'name': 'Shell', 'bytes': '26903'}, {'name': 'Vue', 'bytes': '81150'}]}
tws-to-twee-converter ===================== Converts Twine .tws files to .twee files The python script takes a file as an argument, and prints the resulting Twee file to stdout. To use, do something like: ```` python twsimporter.py twinefile.tws > newtweefile.tw ```` Also accepts a `-twee2` option, which will include the position of each passage in `twee2` output file format: ```` python twsimporter.py twinefile.tws -twee2 > newtweefile.tw2 ````
{'content_hash': 'bd57de9d1401b961d0664219d3875f47', 'timestamp': '', 'source': 'github', 'line_count': 18, 'max_line_length': 110, 'avg_line_length': 25.333333333333332, 'alnum_prop': 0.7105263157894737, 'repo_name': 'v21/tws-to-twee-converter', 'id': 'dae08bd92109262cc03d563bc4b2bbe331641d02', 'size': '456', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Python', 'bytes': '11531'}]}
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>pocklington: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.13.0 / pocklington - 8.5.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> pocklington <small> 8.5.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-04-16 14:32:00 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-04-16 14:32:00 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-num base Num library distributed with the OCaml compiler base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 4 Virtual package relying on a GMP lib system installation coq 8.13.0 Formal proof management system num 0 The Num library for arbitrary-precision integer and rational arithmetic ocaml 4.05.0 The OCaml compiler (virtual package) ocaml-base-compiler 4.05.0 Official 4.05.0 release ocaml-config 1 OCaml Switch Configuration ocamlfind 1.9.3 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;https://github.com/coq-contribs/pocklington&quot; license: &quot;LGPL 2&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/Pocklington&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.5&quot; &amp; &lt; &quot;8.6~&quot;} ] tags: [ &quot;keyword:Pocklington&quot; &quot;keyword:number theory&quot; &quot;keyword:prime numbers&quot; &quot;keyword:primality&quot; &quot;keyword:Fermat&#39;s little theorem&quot; &quot;category:Mathematics/Arithmetic and Number Theory/Number theory&quot; &quot;date:2000-11&quot; ] authors: [ &quot;Martijn Oostdijk &lt;&gt;&quot; &quot;Olga Caprotti &lt;&gt;&quot; ] bug-reports: &quot;https://github.com/coq-contribs/pocklington/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/pocklington.git&quot; synopsis: &quot;Pocklington&#39;s criterion&quot; description: &quot;&quot;&quot; Pocklington&#39;s criterion for checking primality for large natural numbers. Includes a proof of Fermat&#39;s little theorem.&quot;&quot;&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/pocklington/archive/v8.5.0.tar.gz&quot; checksum: &quot;md5=2c742f543d56b36a2812eb1a0e8473e7&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-pocklington.8.5.0 coq.8.13.0</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.13.0). The following dependencies couldn&#39;t be met: - coq-pocklington -&gt; coq &lt; 8.6~ -&gt; ocaml &lt; 4.03.0 base of this switch (use `--unlock-base&#39; to force) Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-pocklington.8.5.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{'content_hash': '79d0ed497a0d11d53ee24726fcb49971', 'timestamp': '', 'source': 'github', 'line_count': 176, 'max_line_length': 159, 'avg_line_length': 41.4375, 'alnum_prop': 0.5521733168791992, 'repo_name': 'coq-bench/coq-bench.github.io', 'id': '5b45bab0276cc9ca31f7e8934612c56f8b8756c0', 'size': '7318', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'clean/Linux-x86_64-4.05.0-2.0.1/released/8.13.0/pocklington/8.5.0.html', 'mode': '33188', 'license': 'mit', 'language': []}
<?xml version="1.0" encoding="utf-8" ?> <update xmlns="http://ns.adobe.com/air/framework/update/description/1.0"> <version>1.0.0</version> <timestamp>20141224.2</timestamp> <AppTimestamp>20141224.1</AppTimestamp> <AppUrlMac>https://github.com/ikCourage/iSunday/raw/master/iSunday/iSunday.dmg</AppUrlMac> <AppUrlMac_crc32>1948CJ4</AppUrlMac_crc32> <AppUrlWin>https://github.com/ikCourage/iSunday/raw/master/iSunday/iSunday.exe</AppUrlWin> <AppUrlWin_crc32>1AUV0SS</AppUrlWin_crc32> <url>https://github.com/ikCourage/iSunday/raw/master/iSunday/iSunday.air</url> <mainUrl>https://github.com/ikCourage/iSunday/raw/master/iSunday/iSunday.swf</mainUrl> <description><![CDATA[iSunday]]></description> <fileList> <file crc32="OREGXN" path="/imoon.swf" url="https://github.com/ikCourage/iSunday/raw/master/iSunday/imoon.swf"></file> </fileList> <ingz> <file crc32="CX30RS" path="iPlayer.ingz" url="https://github.com/ikCourage/iSunday/raw/master/iSunday/ingz/iPlayer.ingz"></file> <file crc32="X439JI" path="AConsole.ingz" url="https://github.com/ikCourage/iSunday/raw/master/iSunday/ingz/AConsole.ingz"></file> </ingz> </update>
{'content_hash': '18422cb1c427aa5b5755203105d1973c', 'timestamp': '', 'source': 'github', 'line_count': 20, 'max_line_length': 132, 'avg_line_length': 57.0, 'alnum_prop': 0.7517543859649123, 'repo_name': 'ikCourage/iSunday', 'id': '283cb602787b463e149aaaaa7b124814eb911dd5', 'size': '1140', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'iSunday/iSundayUpdate.xml', 'mode': '33188', 'license': 'apache-2.0', 'language': []}
from optionaldict import optionaldict from wechatpy.client.api.base import BaseWeChatAPI class MerchantOrder(BaseWeChatAPI): API_BASE_URL = "https://api.weixin.qq.com/" def get(self, order_id): res = self._post( "merchant/order/getbyid", data={"order_id": order_id}, result_processor=lambda x: x["order"], ) return res def get_by_filter(self, status=None, begin_time=None, end_time=None): filter_dict = optionaldict(status=status, begintime=begin_time, endtime=end_time) res = self._post( "merchant/order/getbyfilter", data=dict(filter_dict), result_processor=lambda x: x["order_list"], ) return res def set_delivery(self, order_id, company, track_no, need_delivery=1, is_others=0): return self._post( "merchant/order/setdelivery", data={ "order_id": order_id, "delivery_company": company, "delivery_track_no": track_no, "need_delivery": need_delivery, "is_others": is_others, }, ) def close(self, order_id): return self._post("merchant/order/close", data={"order_id": order_id})
{'content_hash': '00c6d41b0017ebb6323c765dcebb4de6', 'timestamp': '', 'source': 'github', 'line_count': 40, 'max_line_length': 89, 'avg_line_length': 31.9, 'alnum_prop': 0.5666144200626959, 'repo_name': 'wechatpy/wechatpy', 'id': 'a7935da8343133dca8c9205758da7ab590e30ccf', 'size': '1301', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'wechatpy/client/api/merchant/order.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Python', 'bytes': '852103'}]}
-- MySQL dump 10.13 Distrib 5.6.27, for osx10.10 (x86_64) -- -- Host: localhost Database: gift_management -- ------------------------------------------------------ -- Server version 5.6.27 /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- -- Table structure for table `book_goods_mapping` -- DROP TABLE IF EXISTS `book_goods_mapping`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `book_goods_mapping` ( `gift_book_id` int(10) unsigned NOT NULL COMMENT '礼册id', `gift_id` int(10) unsigned NOT NULL COMMENT '商品id', `gift_num` int(11) DEFAULT NULL COMMENT '商品数量', `ctime` datetime NOT NULL COMMENT '添加时间', PRIMARY KEY (`gift_book_id`,`gift_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `cancel_card_order` -- DROP TABLE IF EXISTS `cancel_card_order`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `cancel_card_order` ( `id` int(11) NOT NULL AUTO_INCREMENT, `sales_id` int(11) DEFAULT NULL COMMENT '销售员id,从user表获取', `custom_id` int(11) DEFAULT NULL COMMENT '客户id,从客户表取', `cancel_date` date DEFAULT NULL COMMENT '退卡日期', `end_user` varchar(20) DEFAULT NULL COMMENT '最终用户', `remark` text COMMENT '备注', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1:退卡不可重新销售 2: 退卡可重新销售', `modify_user` int(20) DEFAULT NULL COMMENT '操作人id', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `cancel_order_card` -- DROP TABLE IF EXISTS `cancel_order_card`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `cancel_order_card` ( `cancel_id` int(11) NOT NULL COMMENT '退卡id', `start_code` bigint(20) DEFAULT NULL COMMENT '开始号码', `end_code` bigint(20) DEFAULT NULL COMMENT '结束号码', `num` int(11) DEFAULT NULL COMMENT '数量', PRIMARY KEY (`cancel_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `card_order` -- DROP TABLE IF EXISTS `card_order`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `card_order` ( `id` int(11) NOT NULL AUTO_INCREMENT, `order_name` varchar(45) DEFAULT NULL COMMENT '册1*2,册2*3', `sales_id` int(11) DEFAULT NULL COMMENT '销售员id,从user表获取', `custom_id` int(11) DEFAULT NULL COMMENT '客户id,从客户表取', `contact_person` varchar(20) DEFAULT NULL COMMENT '联系人', `delever_id` int(11) DEFAULT NULL COMMENT '快递公司id', `trade_date` date NOT NULL COMMENT '下单日期', `expire_date` date DEFAULT NULL COMMENT '失效日期', `cancel_date` date DEFAULT NULL COMMENT '退卡日期', `wechat_id` int(11) DEFAULT NULL COMMENT '微信模版id', `price` decimal(15,2) NOT NULL DEFAULT '0.00' COMMENT '总价格', `pay_status` tinyint(1) NOT NULL DEFAULT '2' COMMENT '付款状态(1:已付款,2:未付款)', `pay_remark` varchar(520) DEFAULT NULL COMMENT '付款备注', `end_user` varchar(20) DEFAULT NULL COMMENT '最终用户', `remark` text COMMENT '备注', `delivrer_num` varchar(45) DEFAULT NULL COMMENT '快递单号', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1:待审核 2: 待发货 3: 已完成 4作废 5已退卡', `modify_user` varchar(20) DEFAULT NULL COMMENT '操作人', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1000000 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `change_order` -- DROP TABLE IF EXISTS `change_order`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `change_order` ( `id` int(11) NOT NULL AUTO_INCREMENT, `card_id` int(11) NOT NULL DEFAULT '0', `card_num` varchar(45) NOT NULL DEFAULT '', `gift_id` int(11) DEFAULT NULL COMMENT '选择兑换的商品id', `customer_name` varchar(45) DEFAULT NULL COMMENT '用户名称,收件人', `phone` varchar(45) DEFAULT NULL COMMENT '电话', `address` varchar(255) DEFAULT NULL COMMENT '收件地址', `postcode` varchar(45) DEFAULT NULL COMMENT '邮编', `deliver_id` int(11) DEFAULT NULL COMMENT '快递公司id', `deliver_date` datetime DEFAULT NULL COMMENT '发货日期', `remark` text COMMENT '备注', `status` int(11) DEFAULT NULL COMMENT '订单状态', `deliver_num` varchar(45) DEFAULT NULL COMMENT '快递单号', `order_source` int(11) NOT NULL COMMENT '订单来源,1: 电话,2: 官网,3:微信', `ctime` datetime DEFAULT NULL, `utime` datetime DEFAULT NULL, `end_user` varchar(32) DEFAULT NULL COMMENT '最终用户', PRIMARY KEY (`id`,`card_id`,`card_num`) ) ENGINE=InnoDB AUTO_INCREMENT=10000000 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `customer` -- DROP TABLE IF EXISTS `customer`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `customer` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) NOT NULL COMMENT '客户名称', `type` int(11) DEFAULT NULL COMMENT '1:代理商 2:企业大客户', `contact_person` varchar(45) DEFAULT NULL COMMENT '联系人', `phone` varchar(45) DEFAULT NULL COMMENT '手机号', `address` varchar(45) DEFAULT NULL COMMENT '地址', `status` int(11) NOT NULL DEFAULT '2' COMMENT '1: 启用 2:停用', `ctime` datetime DEFAULT NULL COMMENT '创建时间', `utime` datetime DEFAULT NULL COMMENT '更新时间', `email` varchar(45) DEFAULT NULL COMMENT '邮箱', `postcode` varchar(45) DEFAULT NULL COMMENT '邮编', `remark` text COMMENT '备注', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `deliver` -- DROP TABLE IF EXISTS `deliver`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `deliver` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT '快递自增ID', `name` varchar(20) NOT NULL COMMENT '快递名称', `status` tinyint(4) NOT NULL DEFAULT '1' COMMENT '状态(1:使用中,2:停用)', `remark` varchar(120) DEFAULT NULL COMMENT '备注', `ctime` datetime DEFAULT NULL COMMENT '创建时间', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `dim` -- DROP TABLE IF EXISTS `dim`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `dim` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '自增ID', `dim_id` int(10) unsigned NOT NULL COMMENT '维度类型ID', `dim_type` varchar(45) NOT NULL COMMENT '维度类型,gift_type: 商品类型,deliver:快递列表 3: wechat_style', `dim_value` varchar(45) NOT NULL COMMENT '维度值', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `entity_order` -- DROP TABLE IF EXISTS `entity_order`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `entity_order` ( `id` int(11) NOT NULL AUTO_INCREMENT, `sales` varchar(45) DEFAULT NULL, `deal_date` date DEFAULT NULL, `enduser` varchar(45) DEFAULT NULL, `expire_date` date DEFAULT NULL, `remark` varchar(255) DEFAULT NULL, `ctime` datetime DEFAULT NULL, `utime` datetime DEFAULT NULL, `order_name` varchar(255) DEFAULT NULL, `price` varchar(30) DEFAULT NULL, `status` int(11) DEFAULT NULL, `pay_remark` varchar(255) DEFAULT NULL, `oper_person` varchar(45) DEFAULT NULL, `customer_id` int(11) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=100000000 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `entity_order_book_map` -- DROP TABLE IF EXISTS `entity_order_book_map`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `entity_order_book_map` ( `id` int(11) NOT NULL AUTO_INCREMENT, `eorder_id` int(11) DEFAULT NULL, `book_id` int(11) DEFAULT NULL, `book_name` varchar(255) DEFAULT NULL, `price` int(11) DEFAULT NULL, `discount` int(11) DEFAULT NULL, `book_count` int(11) DEFAULT NULL, `sum_price` int(11) DEFAULT NULL, `book_remark` varchar(255) DEFAULT NULL, `status` int(11) DEFAULT NULL, `ctime` datetime DEFAULT NULL, `utime` datetime DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `exchange_order_detail` -- DROP TABLE IF EXISTS `exchange_order_detail`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `exchange_order_detail` ( `id` int(11) NOT NULL AUTO_INCREMENT, `order_id` int(11) DEFAULT NULL, `to_gift` int(11) DEFAULT NULL, `diliver_money` int(11) DEFAULT NULL, `remark` varchar(255) DEFAULT NULL, `oper_person` varchar(45) DEFAULT NULL, `ctime` datetime DEFAULT NULL, `utime` datetime DEFAULT NULL, `from_gift` varchar(45) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `order_id_UNIQUE` (`order_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `gift` -- DROP TABLE IF EXISTS `gift`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `gift` ( `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '商品id', `name` varchar(40) NOT NULL COMMENT '商品名称', `groupid` varchar(320) NOT NULL COMMENT '仅针对组合商品,33*2,34*4. 单品次字段为''''', `type` int(11) DEFAULT NULL COMMENT 'dim表id', `classify_id` int(11) DEFAULT NULL COMMENT '商品分类id', `brand_id` int(11) DEFAULT NULL COMMENT '商品品牌id', `supply_id` int(11) DEFAULT NULL COMMENT '商品供应商id', `sale_price` decimal(15,2) DEFAULT NULL COMMENT '销售价格', `buy_price` decimal(15,2) DEFAULT NULL COMMENT '采购价格', `store_num` int(11) DEFAULT NULL COMMENT '库存', `munit` varchar(45) DEFAULT NULL COMMENT '商品计量单位', `deliver_id` int(11) DEFAULT NULL COMMENT '快递id', `desciption` text COMMENT '商品描述', `pic_ids` varchar(255) DEFAULT NULL COMMENT '宣传图片id,用逗号的拼接列表', `remark` text COMMENT '备注', `status` int(11) NOT NULL DEFAULT '1' COMMENT '状态 1: 上架, 2 下架', `ctime` datetime DEFAULT NULL COMMENT '创建时间', `utime` datetime DEFAULT NULL COMMENT '更新时间', `sold_num` int(11) NOT NULL DEFAULT '0' COMMENT '售出数量,初始为0', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=10000 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `gift_book` -- DROP TABLE IF EXISTS `gift_book`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `gift_book` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) NOT NULL COMMENT '礼册名称', `theme_id` int(11) DEFAULT NULL COMMENT '礼册主题id', `set_id` int(11) DEFAULT NULL COMMENT '礼册系列id', `wechat_id` int(11) DEFAULT NULL COMMENT '微信模版id', `type_id` int(11) DEFAULT NULL COMMENT '礼册类型id 1: 普通 2: 年卡 3: 半年卡 4:季卡', `sale_price` float DEFAULT NULL COMMENT '销售价格', `group_ids` varchar(255) DEFAULT NULL COMMENT '33*3,34*2', `describe` text COMMENT '礼册描述', `pic_id` int(11) DEFAULT NULL COMMENT '上传后,需要保存到 media表', `remark` text COMMENT '备注', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1: 启用 2: 停用', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=10000 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `gift_brand` -- DROP TABLE IF EXISTS `gift_brand`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `gift_brand` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) NOT NULL COMMENT '品牌名称', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1:使用 2:停用', `remark` text COMMENT '备注', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `gift_card` -- DROP TABLE IF EXISTS `gift_card`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `gift_card` ( `id` int(11) NOT NULL AUTO_INCREMENT, `num_code` int(11) DEFAULT NULL COMMENT '礼品卡号码', `password` varchar(12) DEFAULT NULL COMMENT '密码', `ctime` datetime DEFAULT NULL COMMENT '生成时间', `status` int(11) NOT NULL DEFAULT '1' COMMENT '状态 1: 未激活 2: 已激活 3:已使用 4: 已过期 5: 已退卡 6: 冻结', `book_id` int(11) DEFAULT NULL COMMENT '礼册id', `price` decimal(15,2) NOT NULL DEFAULT '0.00' COMMENT '价格', `discount` decimal(15,2) DEFAULT NULL COMMENT '折扣 0-10', `expire_date` datetime DEFAULT NULL COMMENT '失效时间', `is_draw` tinyint(1) NOT NULL DEFAULT '1' COMMENT '是否可兑换(0:不可兑换,1:可兑换)', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `gift_classify` -- DROP TABLE IF EXISTS `gift_classify`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `gift_classify` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) NOT NULL COMMENT '分类名称', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1:使用 2:停用', `remark` text COMMENT '备注', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `gift_supply` -- DROP TABLE IF EXISTS `gift_supply`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `gift_supply` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) NOT NULL COMMENT '品牌名称', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1:使用 2:停用', `remark` text COMMENT '备注', `contact_person` varchar(45) DEFAULT NULL COMMENT '联系人', `phone` varchar(45) DEFAULT NULL COMMENT '手机号', `qq` varchar(45) DEFAULT NULL COMMENT 'qq号', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `map_order_card` -- DROP TABLE IF EXISTS `map_order_card`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `map_order_card` ( `id` int(11) NOT NULL AUTO_INCREMENT, `order_id` int(11) NOT NULL COMMENT '订单id', `card_id` int(11) NOT NULL COMMENT '礼品卡id', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `media` -- DROP TABLE IF EXISTS `media`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `media` ( `id` int(11) NOT NULL AUTO_INCREMENT, `path` varchar(255) NOT NULL COMMENT '存储路径,包括商品图片,多媒体管理里面的图片/视频/音频', `name` varchar(45) DEFAULT NULL COMMENT '名称', `status` int(11) NOT NULL DEFAULT '2' COMMENT '1:停用 2启用', `ctime` datetime DEFAULT NULL COMMENT '创建时间', `utime` datetime DEFAULT NULL COMMENT '更新时间', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `mediainfo` -- DROP TABLE IF EXISTS `mediainfo`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `mediainfo` ( `id` int(11) NOT NULL AUTO_INCREMENT, `media_id` int(11) NOT NULL COMMENT '存储路径,包括商品图片,多媒体管理里面的图片/视频/音频', `name` varchar(45) DEFAULT NULL COMMENT '名称', `status` int(11) NOT NULL DEFAULT '2' COMMENT '1:停用 2启用', `ctime` datetime DEFAULT NULL COMMENT '创建时间', `utime` datetime DEFAULT NULL COMMENT '更新时间', `type` int(11) DEFAULT NULL, `author` varchar(45) DEFAULT NULL, `remark` varchar(255) DEFAULT NULL, `expire_date` date DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `return_order_detail` -- DROP TABLE IF EXISTS `return_order_detail`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `return_order_detail` ( `id` int(11) NOT NULL AUTO_INCREMENT, `order_id` varchar(45) DEFAULT NULL, `return_amount` float DEFAULT NULL, `bank` int(11) DEFAULT NULL, `open_bank_address` varchar(255) DEFAULT NULL, `bank_card_num` varchar(45) DEFAULT NULL, `bank_card_name` varchar(45) DEFAULT NULL, `ctime` datetime DEFAULT NULL, `utime` datetime DEFAULT NULL, `oper_person` varchar(45) DEFAULT NULL, `remark` varchar(255) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `order_id_UNIQUE` (`order_id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `role` -- DROP TABLE IF EXISTS `role`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `role` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '角色ID', `name` varchar(45) NOT NULL COMMENT '角色名称', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `sales_order_book` -- DROP TABLE IF EXISTS `sales_order_book`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `sales_order_book` ( `id` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT '自增ID', `order_id` int(11) NOT NULL COMMENT '订单ID', `book_id` bigint(20) NOT NULL COMMENT '礼册ID', `book_name` varchar(40) NOT NULL COMMENT '礼册名', `price` decimal(15,2) NOT NULL DEFAULT '0.00' COMMENT '单价', `discount` decimal(15,2) DEFAULT NULL COMMENT '折扣', `scode` bigint(20) NOT NULL COMMENT '开始号码', `ecode` bigint(20) NOT NULL COMMENT '结束号码', `num` int(11) NOT NULL DEFAULT '0' COMMENT '数量', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `set` -- DROP TABLE IF EXISTS `set`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `set` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '系列ID', `name` varchar(45) NOT NULL COMMENT '系列名', `remark` text COMMENT '备注', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1: 启用 2: 停用', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `theme` -- DROP TABLE IF EXISTS `theme`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `theme` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) NOT NULL COMMENT '主题名', `remark` text COMMENT '备注', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1: 启用 2: 停用', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `user` -- DROP TABLE IF EXISTS `user`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `user` ( `id` int(11) NOT NULL AUTO_INCREMENT COMMENT '用户ID', `user_name` varchar(45) NOT NULL COMMENT '账号4-15数字字母任意组合', `nick_name` varchar(42) DEFAULT NULL, `password` varchar(45) DEFAULT NULL COMMENT '密码', `email` varchar(45) DEFAULT NULL COMMENT '邮箱', `phone` varchar(45) DEFAULT NULL COMMENT '手机号', `role` varchar(120) DEFAULT NULL COMMENT '角色身份id', `create_time` datetime DEFAULT NULL COMMENT '创建时间', PRIMARY KEY (`id`), UNIQUE KEY `user_name` (`user_name`) ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Temporary view structure for view `view_book_card` -- DROP TABLE IF EXISTS `view_book_card`; /*!50001 DROP VIEW IF EXISTS `view_book_card`*/; SET @saved_cs_client = @@character_set_client; SET character_set_client = utf8; /*!50001 CREATE VIEW `view_book_card` AS SELECT 1 AS `num_code`, 1 AS `expire_date`, 1 AS `book_id`, 1 AS `book_name`, 1 AS `sale_price`*/; SET character_set_client = @saved_cs_client; -- -- Temporary view structure for view `view_book_gift` -- DROP TABLE IF EXISTS `view_book_gift`; /*!50001 DROP VIEW IF EXISTS `view_book_gift`*/; SET @saved_cs_client = @@character_set_client; SET character_set_client = utf8; /*!50001 CREATE VIEW `view_book_gift` AS SELECT 1 AS `gift_id`, 1 AS `book_id`, 1 AS `gift_name`, 1 AS `sale_price`, 1 AS `store_num`, 1 AS `sold_num`*/; SET character_set_client = @saved_cs_client; -- -- Temporary view structure for view `view_eorder_customer_user` -- DROP TABLE IF EXISTS `view_eorder_customer_user`; /*!50001 DROP VIEW IF EXISTS `view_eorder_customer_user`*/; SET @saved_cs_client = @@character_set_client; SET character_set_client = utf8; /*!50001 CREATE VIEW `view_eorder_customer_user` AS SELECT 1 AS `id`, 1 AS `sales`, 1 AS `deal_date`, 1 AS `enduser`, 1 AS `expire_date`, 1 AS `remark`, 1 AS `order_name`, 1 AS `price`, 1 AS `status`, 1 AS `oper_person`, 1 AS `customer_id`, 1 AS `pay_remark`, 1 AS `customer_name`, 1 AS `contact_person`, 1 AS `phone`, 1 AS `address`, 1 AS `sales_name`*/; SET character_set_client = @saved_cs_client; -- -- Temporary view structure for view `view_order_gift_card` -- DROP TABLE IF EXISTS `view_order_gift_card`; /*!50001 DROP VIEW IF EXISTS `view_order_gift_card`*/; SET @saved_cs_client = @@character_set_client; SET character_set_client = utf8; /*!50001 CREATE VIEW `view_order_gift_card` AS SELECT 1 AS `id`, 1 AS `card_num`, 1 AS `gift_id`, 1 AS `gift_name`, 1 AS `customer_name`, 1 AS `phone`, 1 AS `address`, 1 AS `deliver_id`, 1 AS `status`, 1 AS `deliver_num`, 1 AS `order_source`, 1 AS `book_id`*/; SET character_set_client = @saved_cs_client; -- -- Table structure for table `website` -- DROP TABLE IF EXISTS `website`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `website` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) DEFAULT NULL COMMENT '网站名称', `type` int(11) DEFAULT NULL COMMENT '1:兑换网站 2:礼册商城', `domain` varchar(45) DEFAULT NULL COMMENT '绑定域名', `hotline` varchar(45) DEFAULT NULL COMMENT '客服热线', `qq` varchar(45) DEFAULT NULL COMMENT 'qq号码', `expire_date` date DEFAULT NULL COMMENT '有效期', `pic_id` int(11) DEFAULT NULL COMMENT 'log id,来自呀media表', `description` text COMMENT '描述', `remark` varchar(45) DEFAULT NULL COMMENT '备注', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1: 启用 2:停用', `ctime` datetime DEFAULT NULL COMMENT '创建时间', `utime` datetime DEFAULT NULL COMMENT '更新时间', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=36 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Table structure for table `wechat` -- DROP TABLE IF EXISTS `wechat`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `wechat` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(45) NOT NULL COMMENT '模版名称', `style` int(11) DEFAULT NULL COMMENT '样式,从dim 取wechat_style', `pic_id` int(11) DEFAULT NULL COMMENT '图片id', `audio_id` varchar(45) DEFAULT NULL COMMENT '音频id', `vedio_id` varchar(45) DEFAULT NULL COMMENT '视频id', `copywriter` text COMMENT '文案', `url` varchar(255) DEFAULT NULL COMMENT '网址', `expire_time` date DEFAULT NULL COMMENT '有效期', `status` int(11) NOT NULL DEFAULT '1' COMMENT '1: 启用 2:停用', `sender` varchar(45) DEFAULT NULL, `reciver` varchar(45) DEFAULT NULL, `remark` text COMMENT '备注', `ctime` datetime DEFAULT NULL, `utime` datetime DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Final view structure for view `view_book_card` -- /*!50001 DROP VIEW IF EXISTS `view_book_card`*/; /*!50001 SET @saved_cs_client = @@character_set_client */; /*!50001 SET @saved_cs_results = @@character_set_results */; /*!50001 SET @saved_col_connection = @@collation_connection */; /*!50001 SET character_set_client = utf8 */; /*!50001 SET character_set_results = utf8 */; /*!50001 SET collation_connection = utf8_general_ci */; /*!50001 CREATE ALGORITHM=UNDEFINED */ /*!50013 DEFINER=`root`@`localhost` SQL SECURITY DEFINER */ /*!50001 VIEW `view_book_card` AS select `a`.`num_code` AS `num_code`,`a`.`expire_date` AS `expire_date`,`b`.`id` AS `book_id`,`b`.`name` AS `book_name`,`b`.`sale_price` AS `sale_price` from (`gift_card` `a` join `gift_book` `b` on((`a`.`book_id` = `b`.`id`))) */; /*!50001 SET character_set_client = @saved_cs_client */; /*!50001 SET character_set_results = @saved_cs_results */; /*!50001 SET collation_connection = @saved_col_connection */; -- -- Final view structure for view `view_book_gift` -- /*!50001 DROP VIEW IF EXISTS `view_book_gift`*/; /*!50001 SET @saved_cs_client = @@character_set_client */; /*!50001 SET @saved_cs_results = @@character_set_results */; /*!50001 SET @saved_col_connection = @@collation_connection */; /*!50001 SET character_set_client = utf8 */; /*!50001 SET character_set_results = utf8 */; /*!50001 SET collation_connection = utf8_general_ci */; /*!50001 CREATE ALGORITHM=UNDEFINED */ /*!50013 DEFINER=`root`@`localhost` SQL SECURITY DEFINER */ /*!50001 VIEW `view_book_gift` AS select `a`.`id` AS `gift_id`,`b`.`gift_book_id` AS `book_id`,`a`.`name` AS `gift_name`,`a`.`sale_price` AS `sale_price`,`a`.`store_num` AS `store_num`,`a`.`sold_num` AS `sold_num` from (`gift` `a` join `book_goods_mapping` `b` on((`a`.`id` = `b`.`gift_id`))) */; /*!50001 SET character_set_client = @saved_cs_client */; /*!50001 SET character_set_results = @saved_cs_results */; /*!50001 SET collation_connection = @saved_col_connection */; -- -- Final view structure for view `view_eorder_customer_user` -- /*!50001 DROP VIEW IF EXISTS `view_eorder_customer_user`*/; /*!50001 SET @saved_cs_client = @@character_set_client */; /*!50001 SET @saved_cs_results = @@character_set_results */; /*!50001 SET @saved_col_connection = @@collation_connection */; /*!50001 SET character_set_client = utf8 */; /*!50001 SET character_set_results = utf8 */; /*!50001 SET collation_connection = utf8_general_ci */; /*!50001 CREATE ALGORITHM=UNDEFINED */ /*!50013 DEFINER=`root`@`localhost` SQL SECURITY DEFINER */ /*!50001 VIEW `view_eorder_customer_user` AS select `a`.`id` AS `id`,`a`.`sales` AS `sales`,`a`.`deal_date` AS `deal_date`,`a`.`enduser` AS `enduser`,`a`.`expire_date` AS `expire_date`,`a`.`remark` AS `remark`,`a`.`order_name` AS `order_name`,`a`.`price` AS `price`,`a`.`status` AS `status`,`a`.`oper_person` AS `oper_person`,`a`.`customer_id` AS `customer_id`,`a`.`pay_remark` AS `pay_remark`,`b`.`name` AS `customer_name`,`b`.`contact_person` AS `contact_person`,`b`.`phone` AS `phone`,`b`.`address` AS `address`,`c`.`nick_name` AS `sales_name` from ((`entity_order` `a` join `customer` `b`) join `user` `c` on(((`a`.`customer_id` = `b`.`id`) and (`a`.`sales` = `c`.`id`)))) */; /*!50001 SET character_set_client = @saved_cs_client */; /*!50001 SET character_set_results = @saved_cs_results */; /*!50001 SET collation_connection = @saved_col_connection */; -- -- Final view structure for view `view_order_gift_card` -- /*!50001 DROP VIEW IF EXISTS `view_order_gift_card`*/; /*!50001 SET @saved_cs_client = @@character_set_client */; /*!50001 SET @saved_cs_results = @@character_set_results */; /*!50001 SET @saved_col_connection = @@collation_connection */; /*!50001 SET character_set_client = utf8 */; /*!50001 SET character_set_results = utf8 */; /*!50001 SET collation_connection = utf8_general_ci */; /*!50001 CREATE ALGORITHM=UNDEFINED */ /*!50013 DEFINER=`root`@`localhost` SQL SECURITY DEFINER */ /*!50001 VIEW `view_order_gift_card` AS select `a`.`id` AS `id`,`a`.`card_num` AS `card_num`,`a`.`gift_id` AS `gift_id`,`b`.`name` AS `gift_name`,`a`.`customer_name` AS `customer_name`,`a`.`phone` AS `phone`,`a`.`address` AS `address`,`a`.`deliver_id` AS `deliver_id`,`a`.`status` AS `status`,`a`.`deliver_num` AS `deliver_num`,`a`.`order_source` AS `order_source`,`c`.`book_id` AS `book_id` from ((`change_order` `a` join `gift` `b`) join `gift_card` `c` on(((`a`.`gift_id` = `b`.`id`) and (`a`.`card_num` = `c`.`num_code`)))) */; /*!50001 SET character_set_client = @saved_cs_client */; /*!50001 SET character_set_results = @saved_cs_results */; /*!50001 SET collation_connection = @saved_col_connection */; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2015-12-30 11:32:28
{'content_hash': '5db88e8845473422e3ef8bbf55891416', 'timestamp': '', 'source': 'github', 'line_count': 770, 'max_line_length': 680, 'avg_line_length': 39.346753246753245, 'alnum_prop': 0.6739941248308413, 'repo_name': 'neusdq/www', 'id': '97fae71ba3bb2bedb7ec81981f99df3756ae03ea', 'size': '31831', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'doc/schema.sql', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '1584'}, {'name': 'CSS', 'bytes': '269550'}, {'name': 'Go', 'bytes': '15680'}, {'name': 'HTML', 'bytes': '9598186'}, {'name': 'JavaScript', 'bytes': '668442'}, {'name': 'PHP', 'bytes': '2480996'}, {'name': 'Python', 'bytes': '12750'}]}
cd ./docker && ./deploy.sh The first time you run ./deploy.sh, it might take several minutes, but will take a few seconds on future runs. ## Destroying the environment docker-compose down ## Opening a shell Once Docker is set up (deployed), open a shell like this: cd ./docker && docker-compose exec scrapers-ca /bin/bash ## Usage To run a scraper, e.g. `ca_on`, open a shell as above, and: pupa update ca_on To wipe the database: docker-compose exec scrapers-ca sudo -u postgres dropdb pupa docker-compose exec scrapers-ca sudo -u postgres createdb pupa Then, open a shell as above, and: pupa dbinit ca To interact with the postgres db directly for debugging purposes: psql pupa root
{'content_hash': '590667f9fa8bd8f22411c255ce12b268', 'timestamp': '', 'source': 'github', 'line_count': 34, 'max_line_length': 110, 'avg_line_length': 21.58823529411765, 'alnum_prop': 0.7084468664850136, 'repo_name': 'opencivicdata/scrapers-ca', 'id': '6a80d711c2c87423f7a24b64eeb760ec947f4fa2', 'size': '754', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'docker/README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Dockerfile', 'bytes': '832'}, {'name': 'Python', 'bytes': '374889'}, {'name': 'Shell', 'bytes': '1759'}]}
package selinux import ( "testing" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/apis/extensions" ) func TestRunAsAnyOptions(t *testing.T) { _, err := NewRunAsAny(nil) if err != nil { t.Fatalf("unexpected error initializing NewRunAsAny %v", err) } _, err = NewRunAsAny(&extensions.SELinuxStrategyOptions{}) if err != nil { t.Errorf("unexpected error initializing NewRunAsAny %v", err) } } func TestRunAsAnyGenerate(t *testing.T) { s, err := NewRunAsAny(&extensions.SELinuxStrategyOptions{}) if err != nil { t.Fatalf("unexpected error initializing NewRunAsAny %v", err) } uid, err := s.Generate(nil, nil) if uid != nil { t.Errorf("expected nil uid but got %v", *uid) } if err != nil { t.Errorf("unexpected error generating uid %v", err) } } func TestRunAsAnyValidate(t *testing.T) { s, err := NewRunAsAny(&extensions.SELinuxStrategyOptions{ SELinuxOptions: &api.SELinuxOptions{ Level: "foo", }, }, ) if err != nil { t.Fatalf("unexpected error initializing NewRunAsAny %v", err) } errs := s.Validate(nil, nil) if len(errs) != 0 { t.Errorf("unexpected errors validating with ") } s, err = NewRunAsAny(&extensions.SELinuxStrategyOptions{}) if err != nil { t.Fatalf("unexpected error initializing NewRunAsAny %v", err) } errs = s.Validate(nil, nil) if len(errs) != 0 { t.Errorf("unexpected errors validating %v", errs) } }
{'content_hash': '71fcd166348aa8e72f7fbd89ac47cd9e', 'timestamp': '', 'source': 'github', 'line_count': 59, 'max_line_length': 63, 'avg_line_length': 23.491525423728813, 'alnum_prop': 0.6825396825396826, 'repo_name': 'sjpotter/kubernetes', 'id': '4f5db4e68fd53b78dde31b8fc1c400c5905c7497', 'size': '1975', 'binary': False, 'copies': '40', 'ref': 'refs/heads/master', 'path': 'pkg/security/podsecuritypolicy/selinux/runasany_test.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '998'}, {'name': 'Go', 'bytes': '25576375'}, {'name': 'HTML', 'bytes': '1193990'}, {'name': 'Makefile', 'bytes': '57660'}, {'name': 'Protocol Buffer', 'bytes': '231743'}, {'name': 'Python', 'bytes': '34565'}, {'name': 'SaltStack', 'bytes': '47867'}, {'name': 'Shell', 'bytes': '1363964'}]}
package ru.yandex.qatools.allure.config; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Arrays; import java.util.Collection; import static org.junit.Assert.assertEquals; import static ru.yandex.qatools.allure.config.AllureNamingUtils.replaceBadXmlCharactersBySpace; /** * @author Dmitry Baev [email protected] * Date: 18.03.15 */ @RunWith(Parameterized.class) public class ReplaceBadXmlCharactersBySpaceTest { @Parameterized.Parameter(0) public String string; @Parameterized.Parameter(1) public int off; @Parameterized.Parameter(2) public int len; @Parameterized.Parameter(3) public String expected; @Parameterized.Parameters public static Collection<Object[]> data() { return Arrays.asList( new Object[]{"asdzxcADSZXC", 0, 12, "asdzxcADSZXC"}, new Object[]{"апрячсАПРЯЧС", 0, 12, "апрячсАПРЯЧС"}, new Object[]{"hi\nall", 0, 6, "hi\nall"}, new Object[]{"hi\u0019all", 0, 6, "hi all"}, new Object[]{"hi all\u0019", 0, 6, "hi all\u0019"}, new Object[]{"hi\u0019 all\u0019", 5, 2, "hi\u0019 all\u0019"}, new Object[]{"hi\u0019 all\u0019", 5, 3, "hi\u0019 all "} ); } @Test public void shouldDetectBadCharacter() throws Exception { char[] cbuf = string.toCharArray(); replaceBadXmlCharactersBySpace(cbuf, off, len); String actual = String.copyValueOf(cbuf); assertEquals(expected, actual); } }
{'content_hash': '961e6eb6825ded38e771da90c13f0797', 'timestamp': '', 'source': 'github', 'line_count': 54, 'max_line_length': 95, 'avg_line_length': 29.62962962962963, 'alnum_prop': 0.643125, 'repo_name': 'allure-framework/allure1', 'id': 'b515d70108daab7ec200fedd427292a1ae557b88', 'size': '1624', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'allure-model/src/test/java/ru/yandex/qatools/allure/config/ReplaceBadXmlCharactersBySpaceTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '2521'}, {'name': 'CSS', 'bytes': '21989'}, {'name': 'FreeMarker', 'bytes': '536'}, {'name': 'Groovy', 'bytes': '121593'}, {'name': 'HTML', 'bytes': '20546'}, {'name': 'Java', 'bytes': '342254'}, {'name': 'JavaScript', 'bytes': '95541'}, {'name': 'Shell', 'bytes': '1788'}]}
[![Build Status](https://travis-ci.org/trustedanalytics/spark-tk.svg?branch=master)](https://travis-ci.org/trustedanalytics/spark-tk) # spark-tk **spark-tk** is a library which enhances the Spark experience by providing [a rich, easy-to-use API for Python and Scala](http://trustedanalytics.github.io/spark-tk/). It adds new machine learning capabilities and other operations, like working with DICOM images for example. ## Overview Spark-tk simplifies applying machine learning to big data for superior knowledge, discovery and predictive modeling across a wide variety of use cases and solutions. Its APIs span feature engineering, graph construction, and various types of machine learning. The APIs are geared at an abstraction level familiar to data scientists (similar to Python pandas, scikit-learn) and removes the complexity of cluster computing and parallel processing. The library works alongside Spark and makes it easier to program. The lower-level Spark APIs are also seamlessly exposed through the library. Applications written with Spark-tk will have access the best of both worlds for the given situation. All functionality operates at full scale according to the Spark configuration. ### Frame Interface Spark-tk uses a Frame object for its scalable data frame representation, which is familiar and intuitive to data researchers compared to low level HDFS file and Spark RDD/DataFrame/DataSet formats. The library provides an API to manipulate the data frames for feature engineering and exploration, such as joins and aggregations. User-defined transformations and filters can be written and applied to large data sets using distributed processing. ### Graph Analytics Spark-tk uses a Graph object for its scalable graph representation, based on a Frame holding vertices and another Frame holding edges. Graph representations are broadly useful. + **Use Case:** linking disparate data with arbitrary edge types and then analyzing the connections for powerful predictive signals that can otherwise be missed with entity-based methods. Working with graph representations can often be more intuitive and computationally efficient for data sets where the connections between data observations are more numerous and more important than the data points alone. Spark-tk brings together the capabilities to create and analyze graphs, including engineering features and applying graph-based algorithms. Since the graphs are built using frames, Frame operations may be seamlessly applied to graphs. + **Use Case:** applying a clustering algorithm to a vertex list with features developed using graph analytics. Spark-tk supports importing and exporting graphs to the OrientDB's scalable graph database. Graph databases allow users to run real-time queries on their graph data. ### Machine Learning The toolkit provides algorithms for supervised, unsupervised, and semi-supervised machine learning using both entity and graphical machine learning tools. Examples include time-series analysis, recommender systems using collaborative filtering, topic modeling using Latent Dirichlet Allocation, clustering using K-means, and classification using logistic regression. Available graph algorithms such as label propagation and loopy belief propagation exploit the connections in the graph structure and provide powerful new methods of labeling or classifying graph data. Most of the Machine Learning is exposed through the Models API. The Models API provides a simplified interface for data scientists to create, train, and test the performance of their models. The trained models can then be used for predictions, classifications and recommendations. Data scientists can also persist models by using the model save and load methods. ### Image Processing Spark-tk includes support for ingesting and processing DICOM images in a distributed environment. DICOM is the international standard for medical images and related information (ISO 12052). Sparktk provides queries, filters, and analytics on collections of these images. ### Documentation API Reference pages for Python and Scala are located [here](http://trustedanalytics.github.io/spark-tk/). ## Example: Create a TkContext [//]:# "<skip>" >>> from sparktk import TkContext >>> tc = TkContext() [//]:# "</skip>" Upload some tabular data >>> frame1 = tc.frame.create(data=[[2, 3], ... [1, 4], ... [7, 1], ... [1, 1], ... [9, 2], ... [2, 4], ... [0, 4], ... [6, 3], ... [5, 6]], ... schema=[("a", int), ("b", int)]) Do a linear transform >>> frame1.add_columns(lambda row: row.a * 2 + row.b, schema=("c", int)) >>> frame1.inspect() [#] a b c ============= [0] 2 3 7 [1] 1 4 6 [2] 7 1 15 [3] 1 1 3 [4] 9 2 20 [5] 2 4 8 [6] 0 4 4 [7] 6 3 15 [8] 5 6 16 Train a K-Means model >>> km = tc.models.clustering.kmeans.train(frame1, "c", k=3, seed=5) >>> km.centroids [[5.6000000000000005], [15.333333333333332], [20.0]] Add cluster predictions to the frame >>> pf = km.predict(frame1) >>> pf.inspect() [#] a b c cluster ====================== [0] 2 3 7 0 [1] 1 4 6 0 [2] 7 1 15 1 [3] 1 1 3 0 [4] 9 2 20 2 [5] 2 4 8 0 [6] 0 4 4 0 [7] 6 3 15 1 [8] 5 6 16 1 Upload some new data and predict >>> frame2 = tc.frame.create([[3], [8], [16], [1], [13], [18]]) >>> pf2 = km.predict(frame2, 'C0') >>> pf2.inspect() [#] C0 cluster ================ [0] 3 0 [1] 8 0 [2] 16 1 [3] 1 0 [4] 13 1 [5] 18 2
{'content_hash': '533fb3493cb10886288fcf9c7c044b61', 'timestamp': '', 'source': 'github', 'line_count': 148, 'max_line_length': 134, 'avg_line_length': 41.24324324324324, 'alnum_prop': 0.6590760157273918, 'repo_name': 'Haleyo/spark-tk', 'id': '0ab95c9648156284ca1958d1a76b9910e3281b64', 'size': '6104', 'binary': False, 'copies': '9', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '31130'}, {'name': 'Python', 'bytes': '1699127'}, {'name': 'R', 'bytes': '2242'}, {'name': 'Scala', 'bytes': '2211252'}, {'name': 'Shell', 'bytes': '28677'}]}
import _extends from "@babel/runtime/helpers/esm/extends"; import _objectWithoutPropertiesLoose from "@babel/runtime/helpers/esm/objectWithoutPropertiesLoose"; import * as React from 'react'; import PropTypes from 'prop-types'; import clsx from 'clsx'; import withStyles from '../styles/withStyles'; export const styles = { /* Styles applied to the root element. */ root: { display: 'flex', alignItems: 'center', padding: 8 }, /* Styles applied to the root element if `disableSpacing={false}`. */ spacing: { '& > :not(:first-child)': { marginLeft: 8 } } }; const CardActions = React.forwardRef(function CardActions(props, ref) { const { disableSpacing = false, classes, className } = props, other = _objectWithoutPropertiesLoose(props, ["disableSpacing", "classes", "className"]); return /*#__PURE__*/React.createElement("div", _extends({ className: clsx(classes.root, className, !disableSpacing && classes.spacing), ref: ref }, other)); }); process.env.NODE_ENV !== "production" ? CardActions.propTypes = { // ----------------------------- Warning -------------------------------- // | These PropTypes are generated from the TypeScript type definitions | // | To update them edit the d.ts file and run "yarn proptypes" | // ---------------------------------------------------------------------- /** * The content of the component. */ children: PropTypes.node, /** * Override or extend the styles applied to the component. * See [CSS API](#css) below for more details. */ classes: PropTypes.object, /** * @ignore */ className: PropTypes.string, /** * If `true`, the actions do not have additional margin. */ disableSpacing: PropTypes.bool } : void 0; export default withStyles(styles, { name: 'MuiCardActions' })(CardActions);
{'content_hash': '27c2700addeb8b6740245c0b247f0708', 'timestamp': '', 'source': 'github', 'line_count': 64, 'max_line_length': 100, 'avg_line_length': 29.171875, 'alnum_prop': 0.614354579539368, 'repo_name': 'cdnjs/cdnjs', 'id': 'f2e173b0a60b9ecaf49fa15860d82ae39693194c', 'size': '1867', 'binary': False, 'copies': '9', 'ref': 'refs/heads/master', 'path': 'ajax/libs/material-ui/4.10.0/es/CardActions/CardActions.js', 'mode': '33188', 'license': 'mit', 'language': []}
<script src="/bower_components/webcomponentsjs/webcomponents.min.js"></script> <link rel="import" href="/bower_components/polymer/polymer.html"> <link href="file-uploader/assets/css/style.css" rel="stylesheet" /> <!-- JavaScript Includes --> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <script src="file-uploader/assets/js/jquery.knob.js"></script> <!-- jQuery File Upload Dependencies --> <script src="file-uploader/assets/js/jquery.ui.widget.js"></script> <script src="file-uploader/assets/js/jquery.iframe-transport.js"></script> <script src="file-uploader/assets/js/jquery.fileupload.js"></script> <!-- Our main JS file --> <script src="file-uploader/assets/js/script.js"></script> <!-- Only used for the demos. Please ignore and remove. --> <script src="http://cdn.tutorialzine.com/misc/enhance/v1.js" async></script> <dom-module id="file-uploader"> <template> <form id="upload" method="post" action="file-uploader/upload.php" enctype="multipart/form-data"> <div id="drop"> Drop Here <a>Browse</a> <input type="file" name="upl" multiple /> </div> <ul> <!-- The file uploads will be shown here --> </ul> </form> </template> <script> Polymer({ is: 'file-uploader', properties: { } }); </script> </dom-module>
{'content_hash': 'ad28922240849dd9103a98ec151c9056', 'timestamp': '', 'source': 'github', 'line_count': 43, 'max_line_length': 104, 'avg_line_length': 32.25581395348837, 'alnum_prop': 0.6438356164383562, 'repo_name': 'all15343/ProPro-Web', 'id': '751394626bf0e5aedb8bdd55bc958ef94307a603', 'size': '1387', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'elements/file-uploader.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '11802'}, {'name': 'HTML', 'bytes': '24848'}, {'name': 'JavaScript', 'bytes': '34899'}, {'name': 'PHP', 'bytes': '20784'}]}
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta content="text/html; charset=UTF-8" http-equiv="Content-Type" /> <meta content="2013-12-25 10:18:47 -0700" http-equiv="change-date" /> <title>1CR 1</title> <script src='../js/jquery-3.1.1.min.js' type='text/javascript' charset='utf-8'></script> <script src='../js/bpi.js' type="text/javascript" charset="utf-8"></script> <link rel="stylesheet" href='../css/bpi.css' > </head> <body> <div class="header"><h1 id="titulo">1º Crônicas 1<span id="trecho"></span></h1></div> <div id="passagem"> <div class="bible1 verses"> <p class="verse" verse="1"><sup>1</sup>Adão, Sete, Enos,</p> <p class="verse" verse="2"><sup>2</sup>Quenã, Maalalel, Jarede,</p> <p class="verse" verse="3"><sup>3</sup>Enoque, Matusalém, Lameque,</p> <p class="verse" verse="4"><sup>4</sup>Noé, Sem, Cam e Jafé.</p> <p class="verse" verse="5"><sup>5</sup>Os filhos de Jafé: Gomer, Magogue, Madai, Javã, Tubal, Meseque e Tiras.</p> <p class="verse" verse="6"><sup>6</sup>Os filhos de Gomer: Asquenaz, Rifate e Togarma.</p> <p class="verse" verse="7"><sup>7</sup>Os filhos de Javã: Elisá, Társis, Quitim e Dodanim.</p> <p class="verse" verse="8"><sup>8</sup>Os filhos de Cam: Cuche, Mizraim, Pute e Canaã.</p> <p class="verse" verse="9"><sup>9</sup>Os filhos de Cuche: Seba, Havilá, Sabtá, Raamá e Sabtecá; e os filhos de Raamá: Sebá e Dedã.</p> <p class="verse" verse="10"><sup>10</sup>Cuche foi pai de Ninrode, o qual foi o primeiro a ser poderoso na terra:</p> <p class="verse" verse="11"><sup>11</sup>De Mizraim descenderam os ludeus, os anameus, os leabeus, os naftueus,</p> <p class="verse" verse="12"><sup>12</sup>os patrusins, os casluins {dos quais procederam os filisteus} e os caftoreus.</p> <p class="verse" verse="13"><sup>13</sup>Canaã foi pai de Sidom, seu primogênito, e de Hete,</p> <p class="verse" verse="14"><sup>14</sup>e dos jebuseus, dos amorreus, dos girgaseus,</p> <p class="verse" verse="15"><sup>15</sup>dos heveus, dos arqueus, dos sineus,</p> <p class="verse" verse="16"><sup>16</sup>dos arvadeus, dos zemareus e dos hamateus.</p> <p class="verse" verse="17"><sup>17</sup>Os filhos de Sem: Elão, Assur, Arfaxade, Lude, Arã, Uz, Hul, Geter e Meseque.</p> <p class="verse" verse="18"><sup>18</sup>Arfaxade foi pai de Selá; e Selá foi pai de Eber.</p> <p class="verse" verse="19"><sup>19</sup>A Eber nasceram dois filhos: o nome dum foi Pelegue, pois nos seus dias se repartiu a terra; e o nome do seu irmão foi Joctã.</p> <p class="verse" verse="20"><sup>20</sup>Joctã foi pai de Almodá, Selefe, Hazarmavé, Jerá,</p> <p class="verse" verse="21"><sup>21</sup>Hadorão, Uzal, Diclá,</p> <p class="verse" verse="22"><sup>22</sup>Ebal, Abimael, Sebá,</p> <p class="verse" verse="23"><sup>23</sup>Ofir, Havilá e Jobabe; todos esses foram filhos de Joctã.</p> <p class="verse" verse="24"><sup>24</sup>Sem, Arfaxade, Selá;</p> <p class="verse" verse="25"><sup>25</sup>Eber, Pelegue, Reú;</p> <p class="verse" verse="26"><sup>26</sup>Serugue, Naor, Tera;</p> <p class="verse" verse="27"><sup>27</sup>Abrão, que é Abraão.</p> <p class="verse" verse="28"><sup>28</sup>Os filhos de Abraão: Isaque e Ismael.</p> <p class="verse" verse="29"><sup>29</sup>Estas são as suas gerações: o primogênito de Ismael, Nebaiote; depois Quedar, Adbeel, Mibsão,</p> <p class="verse" verse="30"><sup>30</sup>Misma, Dumá, Massá, Hadade, Tema,</p> <p class="verse" verse="31"><sup>31</sup>Jetur, Nafis e Quedemá; esses foram os filhos de Ismael.</p> <p class="verse" verse="32"><sup>32</sup>Quanto aos filhos de Quetura, concubina de Abraão, esta deu à luz Zinrã, Jocsã, Medã, Midiã, Isbaque e Suá. Os filhos de Jocsã foram Sebá e Dedã.</p> <p class="verse" verse="33"><sup>33</sup>Os filhos de Midiã: Efá, Efer, Hanoque, Abidá e Eldá; todos esses foram filhos de Quetura.</p> <p class="verse" verse="34"><sup>34</sup>Abraão foi pai de Isaque. Os filhos de Isaque: Esaú e Israel.</p> <p class="verse" verse="35"><sup>35</sup>Os filhos de Esaú: Elifaz, Reuel, Jeús, Jalão e Corá.</p> <p class="verse" verse="36"><sup>36</sup>Os filhos de Elifaz: Temã, Omar, Zefi, Gatã, Quenaz, Timna e Amaleque.</p> <p class="verse" verse="37"><sup>37</sup>Os filhos de Reuel: Naate, Zerá, Samá e Mizá.</p> <p class="verse" verse="38"><sup>38</sup>Os filhos de Seir: Lotã, Sobal, Zibeão, Anás, Disom, Eser e Disã.</p> <p class="verse" verse="39"><sup>39</sup>Os filhos de Lotã: Hori, e Homã; e a irmã de Lotã foi Timna.</p> <p class="verse" verse="40"><sup>40</sup>Os filhos de Sobal: Aliã, Manaate, Ebal, Sefi e Onã. Os filhos de Zibeão: Aías e Anás.</p> <p class="verse" verse="41"><sup>41</sup>Anás foi pai de Disom. Os filhos de Disom: Hanrão, Esbã, Itrã e Querã.</p> <p class="verse" verse="42"><sup>42</sup>Os filhos de Eser: Bilã, Zaavã e Jaacã. Os filhos de Disã: Uz e Arã.</p> <p class="verse" verse="43"><sup>43</sup>Estes foram os reis que reinaram na terra de Edom, antes que houvesse rei sobre os filhos de Israel: Belá, filho de Beor; e era o nome da sua cidade Dinabá.</p> <p class="verse" verse="44"><sup>44</sup>Morreu Belá, e reinou em seu lugar Jobabe, filho de Zerá, de Bozra.</p> <p class="verse" verse="45"><sup>45</sup>Morreu Jobabe, e reinou em seu lugar Husão, da terra dos temanitas.</p> <p class="verse" verse="46"><sup>46</sup>Morreu Husão, e reinou em seu lugar Hadade, filho de Bedade, que derrotou os midianitas no campo de Moabe; e era o nome da sua cidade Avite.</p> <p class="verse" verse="47"><sup>47</sup>Morreu Hadade, e reinou em seu lugar Sâmela, de Masreca.</p> <p class="verse" verse="48"><sup>48</sup>Morreu Sâmela, e reinou em seu lugar Saul, de Reobote junto ao rio.</p> <p class="verse" verse="49"><sup>49</sup>Morreu Saul, e reinou em seu lugar Baal-Ranã, filho de Acbor.</p> <p class="verse" verse="50"><sup>50</sup>Morreu Baal-Hanã, e Hadade reinou em seu lugar; e era o nome da sua cidade Paí. O nome de sua mulher era Meetabel, filha de Matrede, filha de Me-Zaabe.</p> <p class="verse" verse="51"><sup>51</sup>E morreu Hadade. Os príncipes de Edom foram: o príncipe Timna, o príncipe Aliá, o príncipe Jetete,</p> <p class="verse" verse="52"><sup>52</sup>o príncipe Aolíbama, o príncipe Elá, o príncipe Pinom,</p> <p class="verse" verse="53"><sup>53</sup>o príncipe Quenaz, o príncipe Temã, o príncipe Mibzar,</p> <p class="verse" verse="54"><sup>54</sup>o príncipe Magdiel, o príncipe lrã. Estes foram os príncipes de Edom.</p> </div> </div> <br/> <br/> <br/> <br/> <br/> <br/> <br/> <br/> <br/> <p class="copyright">Almeida Revista e Atualizada© Copyright © 1993 Sociedade Bíblica do Brasil. Todos os direitos reservados. Texto bíblico utilizado com autorização. Saiba mais sobre a Sociedade Bíblica do Brasil. A Sociedade Bíblica do Brasil trabalha para que a Bíblia esteja, efetivamente, ao alcance de todos e seja lida por todos. A SBB é uma entidade sem fins lucrativos, dedicada a promover o desenvolvimento integral do ser humano.</p> <br/> <br/> <br/> <br/></body> </html>
{'content_hash': 'b10689dfaa50bb03e87007a6fde3a32d', 'timestamp': '', 'source': 'github', 'line_count': 87, 'max_line_length': 445, 'avg_line_length': 84.2183908045977, 'alnum_prop': 0.6680769755698103, 'repo_name': 'ahsbjunior/biblia-para-igrejas', 'id': '82c194565af3a01a2d330efd6c9d5925188a8499', 'size': '7465', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'ara/13-1.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '3643'}, {'name': 'HTML', 'bytes': '8559420'}, {'name': 'JavaScript', 'bytes': '23081'}, {'name': 'Ruby', 'bytes': '834'}]}
Ramer-Douglas-Peucker-JS-Geo ============================ Попытка реализовать алгоритм на сфере. Не проверялось. Attempt to implement the algorithm on the sphere. Not tested.
{'content_hash': '015bc608015e0b721468a81595826362', 'timestamp': '', 'source': 'github', 'line_count': 4, 'max_line_length': 61, 'avg_line_length': 43.75, 'alnum_prop': 0.6857142857142857, 'repo_name': 'miksir/Ramer-Douglas-Peucker-JS-Geo', 'id': 'e74435da2b3ce6225143d40cfe902277b171bb1e', 'size': '221', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '5834'}]}
@interface SR_InterPageDetailItemBookModel : SR_BaseModel @property(nonatomic,copy)NSString * author; @property(nonatomic,copy)NSString * company; @property(nonatomic,copy)NSString * book_id; @property(nonatomic,copy)NSString * picture; @property(nonatomic,copy)NSString * publisher; @property(nonatomic,copy)NSString * title; @end @interface SR_InterPageDetailItemMoudleListItemModel : SR_BaseModel @property(nonatomic,copy)NSString * content; @property(nonatomic,copy)NSString * field1; @property(nonatomic,copy)NSString * field2; @property(nonatomic,copy)NSString * moduleItemId; @property(nonatomic,copy)NSArray * itemList; @property(nonatomic,copy)NSString * page_id; @property(nonatomic,copy)NSArray * photoList; @property(nonatomic,copy)NSString * picture; @property(nonatomic,copy)NSString * title; @property(nonatomic,copy)NSString * type; @end @interface SR_InterPageDetailItemMoudleListItemPhotoListItemModel : SR_BaseModel @property(nonatomic,copy)NSString * photoModelId; @property(nonatomic,copy)NSString * pic; @property(nonatomic,copy)NSString * picture; @property(nonatomic,copy)NSString * sort; @property(nonatomic,assign)NSInteger time_create; @property(nonatomic,assign)NSInteger time_update; @property(nonatomic,copy)NSString * title; @property(nonatomic,copy)NSString * url; @end @interface SR_InterPageDetailItemModel : SR_BaseModel @property(nonatomic,strong)SR_InterPageDetailItemBookModel * book; @property(nonatomic,copy)NSString * content; @property(nonatomic,copy)NSString * modelDescription; @property(nonatomic,copy)NSString * member_total; @property(nonatomic,copy)NSArray * moduleList; @property(nonatomic,copy)NSString * note_total; @property(nonatomic,copy)NSString * picture; @property(nonatomic,copy)NSString * title; @end
{'content_hash': '2a1b8856bcfd6de19622780a23fdee6d', 'timestamp': '', 'source': 'github', 'line_count': 43, 'max_line_length': 80, 'avg_line_length': 41.02325581395349, 'alnum_prop': 0.8100907029478458, 'repo_name': 'zhaiyjgithub/scanreader', 'id': 'd25b038183df1b6bf0438225519970d038e6a87a', 'size': '1934', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'scanreader/Application/SR_InterPage/SR_InterPageDetailItemModel.h', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '220954'}, {'name': 'C++', 'bytes': '1638915'}, {'name': 'Objective-C', 'bytes': '706124'}, {'name': 'Objective-C++', 'bytes': '36849'}, {'name': 'Ruby', 'bytes': '878'}]}
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_60-ea) on Tue Aug 16 17:15:32 EDT 2016 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>org.wildfly.swarm.topology.jgroups (Public javadocs 2016.8.1 API)</title> <meta name="date" content="2016-08-16"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.wildfly.swarm.topology.jgroups (Public javadocs 2016.8.1 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2016.8.1</div> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/wildfly/swarm/topology/deployment/package-summary.html">Prev&nbsp;Package</a></li> <li><a href="../../../../../org/wildfly/swarm/topology/webapp/package-summary.html">Next&nbsp;Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/topology/jgroups/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Package" class="title">Package&nbsp;org.wildfly.swarm.topology.jgroups</h1> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../../org/wildfly/swarm/topology/jgroups/JGroupsTopologyFraction.html" title="class in org.wildfly.swarm.topology.jgroups">JGroupsTopologyFraction</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2016.8.1</div> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../../org/wildfly/swarm/topology/deployment/package-summary.html">Prev&nbsp;Package</a></li> <li><a href="../../../../../org/wildfly/swarm/topology/webapp/package-summary.html">Next&nbsp;Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/topology/jgroups/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2016 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p> </body> </html>
{'content_hash': '5b499e16dbc1db34ce4496b0ae5ca496', 'timestamp': '', 'source': 'github', 'line_count': 146, 'max_line_length': 194, 'avg_line_length': 37.294520547945204, 'alnum_prop': 0.6262626262626263, 'repo_name': 'wildfly-swarm/wildfly-swarm-javadocs', 'id': 'd847c5c44184fad7223cd70a7b394835f335fa67', 'size': '5445', 'binary': False, 'copies': '1', 'ref': 'refs/heads/gh-pages', 'path': '2016.8.1/apidocs/org/wildfly/swarm/topology/jgroups/package-summary.html', 'mode': '33188', 'license': 'apache-2.0', 'language': []}
angular.module('cookbookApp', []) .directive('dateselect', function() { return { restrict: 'E', template: '<select ng-model="date.month" ' + 'ng-options="month for month in months"></select>' + '<select ng-model="date.day" ' + 'ng-options="day for day in days"></select>' + '<select ng-model="date.year" ' + 'ng-options="year for year in years"></select>', scope : { model: '=' }, controller: function($scope) { var i; $scope.date = {}; $scope.days = []; for (i = 1; i <= 31; i++) { $scope.days.push(i); } $scope.months = []; for (i = 1; i <= 12; i++) { $scope.months.push(i); } $scope.years = []; for (i = 1980; i <= (new Date().getFullYear()); i++) { $scope.years.push(i); } $scope.$watch('model', function(newDate) { $scope.date.month = newDate.getMonth()+1; $scope.date.day = newDate.getDate(); $scope.date.year = newDate.getFullYear(); }, true); $scope.$watch('date', function(newDate) { $scope.model.setDate(newDate.day); $scope.model.setMonth(newDate.month-1); $scope.model.setFullYear(newDate.year); }, true); } }; }) .controller('MainController', function($scope) { $scope.current = new Date(); });
{'content_hash': '5f06e8e2ecc16a9c3535733c6ef6c31a', 'timestamp': '', 'source': 'github', 'line_count': 48, 'max_line_length': 62, 'avg_line_length': 29.395833333333332, 'alnum_prop': 0.4982282069454288, 'repo_name': 'sbrink/angularjs-cookbook-code', 'id': '92bb9617153f857d63525fbbd05e1c95d9a5dfff', 'size': '1411', 'binary': False, 'copies': '1', 'ref': 'refs/heads/gh-pages', 'path': 'directives-birthdate-select/application.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '4012'}, {'name': 'JavaScript', 'bytes': '26467'}]}
using System.Collections.Generic; using System.Threading.Tasks; using Proverb.Data.CommandQuery.Interfaces; using Proverb.Data.Models; using Proverb.Services.Interfaces; namespace Proverb.Services { public class SageService : ISageService { public SageService(ISageCommand sageCommand, ISageQuery sageQuery) { _sageCommand = sageCommand; _sageQuery = sageQuery; } private readonly ISageCommand _sageCommand; private readonly ISageQuery _sageQuery; public async Task<int> CreateAsync(Sage sage) { return await _sageCommand.CreateAsync(sage); } public async Task DeleteAsync(int id) { await _sageCommand.DeleteAsync(id); } public async Task<ICollection<Sage>> GetAllAsync() { return await _sageQuery.GetAllAsync(); } public async Task<Sage> GetByIdAsync(int id) { return await _sageQuery.GetByIdAsync(id); } public async Task UpdateAsync(Sage sage) { await _sageCommand.UpdateAsync(sage); } } }
{'content_hash': '5283f47e506707ed56ecc6170ff4599c', 'timestamp': '', 'source': 'github', 'line_count': 46, 'max_line_length': 74, 'avg_line_length': 25.195652173913043, 'alnum_prop': 0.6160483175150993, 'repo_name': 'johnnyreilly/proverb-offline', 'id': '395c66f61143f959fdbb9ee075e061bac37c0bf9', 'size': '1161', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'Proverb.Services/SageService.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ASP', 'bytes': '102'}, {'name': 'C#', 'bytes': '78770'}, {'name': 'CSS', 'bytes': '81116'}, {'name': 'HTML', 'bytes': '19361'}, {'name': 'JavaScript', 'bytes': '160058'}, {'name': 'TypeScript', 'bytes': '87625'}]}
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.connector.dual; import com.facebook.presto.spi.Connector; import com.google.inject.Binder; import com.google.inject.Module; import static com.google.inject.multibindings.MapBinder.newMapBinder; public class DualModule implements Module { @Override public void configure(Binder binder) { newMapBinder(binder, String.class, Connector.class).addBinding("dual").to(DualConnector.class); } }
{'content_hash': 'fb1dbbad6800b585848803778c3275d0', 'timestamp': '', 'source': 'github', 'line_count': 30, 'max_line_length': 103, 'avg_line_length': 33.733333333333334, 'alnum_prop': 0.7470355731225297, 'repo_name': 'vishalsan/presto', 'id': '4feac85b958f728f4fb0530ba7b5364ef4b940bc', 'size': '1012', 'binary': False, 'copies': '8', 'ref': 'refs/heads/master', 'path': 'presto-main/src/main/java/com/facebook/presto/connector/dual/DualModule.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'GAP', 'bytes': '40560'}, {'name': 'HTML', 'bytes': '20033'}, {'name': 'Java', 'bytes': '5042248'}, {'name': 'Makefile', 'bytes': '6819'}, {'name': 'PLSQL', 'bytes': '7000'}, {'name': 'Python', 'bytes': '2102'}]}
package org.apache.carbondata.spark.testsuite.secondaryindex import org.apache.spark.sql.test.util.QueryTest import org.scalatest.BeforeAndAfterAll import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.util.CarbonProperties import org.apache.carbondata.spark.testsuite.secondaryindex.TestSecondaryIndexUtils.isFilterPushedDownToSI /** * test cases for testing reindex command on index table/main table/DB level */ class TestIndexRepair extends QueryTest with BeforeAndAfterAll { override def beforeAll { sql("drop index if exists indextable1 on maintable") sql("drop index if exists indextable2 on maintable") sql("drop table if exists maintable") CarbonProperties.getInstance() .addProperty(CarbonCommonConstants.CARBON_CLEAN_FILES_FORCE_ALLOWED, "true") } test("reindex command after deleting segments from SI table") { sql("drop table if exists maintable") sql("CREATE TABLE maintable(a INT, b STRING, c STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table maintable(c) as 'carbondata'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") val preDeleteSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() sql("DELETE FROM TABLE INDEXTABLE1 WHERE SEGMENT.ID IN(0,1)") sql("CLEAN FILES FOR TABLE INDEXTABLE1 options('force'='true')") val df1 = sql("select * from maintable where c = 'string2'").queryExecution.sparkPlan assert(isFilterPushedDownToSI(df1)) val postDeleteSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments!=postDeleteSegments) sql("REINDEX INDEX TABLE indextable1 ON MAINTABLE") val df2 = sql("select * from maintable where c = 'string2'").queryExecution.sparkPlan val postRepairSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments == postRepairSegments) assert(isFilterPushedDownToSI(df2)) sql("drop table if exists maintable") } test("reindex command after deleting segments from SI table on other database without use") { sql("drop table if exists test.maintable") sql("drop database if exists test cascade") sql("create database test") sql("CREATE TABLE test.maintable(a INT, b STRING, c STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table test.maintable(c) as 'carbondata'") sql("INSERT INTO test.maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO test.maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO test.maintable SELECT 1,'string1', 'string2'") val preDeleteSegments = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE1").count() sql("DELETE FROM TABLE test.INDEXTABLE1 WHERE SEGMENT.ID IN(0,1,2)") sql("CLEAN FILES FOR TABLE test.INDEXTABLE1 options('force'='true')") val df1 = sql("select * from test.maintable where c = 'string2'").queryExecution.sparkPlan assert(isFilterPushedDownToSI(df1)) val postDeleteSegments = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE1").count() assert(preDeleteSegments!=postDeleteSegments) sql("REINDEX INDEX TABLE indextable1 ON test.MAINTABLE") val df2 = sql("select * from test.maintable where c = 'string2'").queryExecution.sparkPlan val postRepairSegments = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE1").count() assert(preDeleteSegments == postRepairSegments) assert(isFilterPushedDownToSI(df2)) sql("drop index if exists indextable1 on test.maintable") sql("drop table if exists test.maintable") sql("drop database if exists test cascade") } test("reindex command using segment.id after deleting segments from SI table") { sql("drop table if exists maintable") sql("CREATE TABLE maintable(a INT, b STRING, c STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table maintable(c) as 'carbondata'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") val preDeleteSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() sql("DELETE FROM TABLE INDEXTABLE1 WHERE SEGMENT.ID IN(0,1,2)") sql("CLEAN FILES FOR TABLE INDEXTABLE1 options('force'='true')") val postDeleteSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments!=postDeleteSegments) val df1 = sql("select * from maintable where c = 'string2'").queryExecution.sparkPlan assert(isFilterPushedDownToSI(df1)) sql("REINDEX INDEX TABLE indextable1 ON MAINTABLE WHERE SEGMENT.ID IN (0,1)") val postFirstRepair = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(postDeleteSegments + 2 == postFirstRepair) val df2 = sql("select * from maintable where c = 'string2'").queryExecution.sparkPlan assert(isFilterPushedDownToSI(df2)) sql("REINDEX INDEX TABLE indextable1 ON MAINTABLE WHERE SEGMENT.ID IN (2)") val postRepairSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments == postRepairSegments) val df3 = sql("select * from maintable where c = 'string2'").queryExecution.sparkPlan assert(isFilterPushedDownToSI(df3)) sql("drop table if exists maintable") } test("reindex command with stale files") { sql("drop table if exists maintable") sql("CREATE TABLE maintable(a INT, b STRING, c STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table maintable(c) as 'carbondata'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("DELETE FROM TABLE INDEXTABLE1 WHERE SEGMENT.ID IN(0,1,2)") sql("REINDEX INDEX TABLE indextable1 ON MAINTABLE WHERE SEGMENT.ID IN (0,1)") assert(sql("select * from maintable where c = 'string2'").count() == 3) sql("drop table if exists maintable") } test("insert command after deleting segments from SI table") { sql("drop table if exists maintable") sql("CREATE TABLE maintable(a INT, b STRING, c STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table maintable(c) as 'carbondata'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") val preDeleteSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() sql("DELETE FROM TABLE INDEXTABLE1 WHERE SEGMENT.ID IN(1,2,3)") sql("CLEAN FILES FOR TABLE INDEXTABLE1 options('force'='true')") val postDeleteSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments!=postDeleteSegments) val df1 = sql("select * from maintable where c = 'string2'").queryExecution.sparkPlan assert(isFilterPushedDownToSI(df1)) sql("INSERT INTO maintable SELECT 1,'string1', 'string2'") val postLoadSegments = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments + 1 == postLoadSegments) val df2 = sql("select * from maintable where c = 'string2'").queryExecution.sparkPlan assert(isFilterPushedDownToSI(df2)) sql("drop table if exists maintable") } test("reindex command on main table") { sql("drop table if exists maintable") sql("CREATE TABLE maintable(a INT, b STRING, c STRING, d STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table maintable(c) as 'carbondata'") sql("CREATE INDEX indextable2 on table maintable(d) as 'carbondata'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2', 'string3'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2', 'string3'") val preDeleteSegments = sql("SHOW SEGMENTS FOR TABLE MAINTABLE").count() sql("DELETE FROM TABLE INDEXTABLE1 WHERE SEGMENT.ID IN(0)") sql("CLEAN FILES FOR TABLE INDEXTABLE1 options('force'='true')") sql("DELETE FROM TABLE INDEXTABLE2 WHERE SEGMENT.ID IN(0,1)") sql("CLEAN FILES FOR TABLE INDEXTABLE2 options('force'='true')") val postDeleteSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() val postDeleteSegmentsIndexTwo = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE2").count() assert(preDeleteSegments!=postDeleteSegmentsIndexOne) assert(preDeleteSegments!=postDeleteSegmentsIndexTwo) sql("REINDEX ON TABLE MAINTABLE") val postRepairSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() val postRepairSegmentsIndexTwo = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE2").count() assert(preDeleteSegments == postRepairSegmentsIndexOne) assert(preDeleteSegments == postRepairSegmentsIndexTwo) sql("drop table if exists maintable") } test("reindex command on main table with delete command") { sql("drop table if exists maintable") sql("CREATE TABLE maintable(a INT, b STRING, c STRING, d STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table maintable(c) as 'carbondata'") sql("CREATE INDEX indextable2 on table maintable(d) as 'carbondata'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2', 'string3'") sql("INSERT INTO maintable SELECT 1,'string1', 'string2', 'string3'") val preDeleteSegments = sql("SHOW SEGMENTS FOR TABLE MAINTABLE").count() sql("DELETE FROM TABLE INDEXTABLE1 WHERE SEGMENT.ID IN(0)") sql("CLEAN FILES FOR TABLE INDEXTABLE1 options('force'='true')") sql("DELETE FROM TABLE INDEXTABLE2 WHERE SEGMENT.ID IN(1)") sql("CLEAN FILES FOR TABLE INDEXTABLE2 options('force'='true')") var postDeleteSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() val postDeleteSegmentsIndexTwo = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE2").count() assert(preDeleteSegments != postDeleteSegmentsIndexOne) assert(preDeleteSegments != postDeleteSegmentsIndexTwo) sql("REINDEX ON TABLE MAINTABLE WHERE SEGMENT.ID IN(0,1)") var postRepairSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() val postRepairSegmentsIndexTwo = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE2").count() assert(preDeleteSegments == postRepairSegmentsIndexOne) assert(preDeleteSegments == postRepairSegmentsIndexTwo) sql("DELETE FROM TABLE INDEXTABLE1 WHERE SEGMENT.STARTTIME BEFORE '2099-01-01 01:00:00'") sql("CLEAN FILES FOR TABLE INDEXTABLE1 options('force'='true')") postDeleteSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments != postDeleteSegmentsIndexOne) sql("REINDEX ON TABLE MAINTABLE WHERE SEGMENT.ID IN(0,1)") postRepairSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE INDEXTABLE1").count() assert(preDeleteSegments == postRepairSegmentsIndexOne) sql("drop table if exists maintable") } test("reindex command on database") { sql("drop database if exists test cascade") sql("create database test") sql("drop table if exists maintable1") // table 1 sql("CREATE TABLE test.maintable1(a INT, b STRING, c STRING, d STRING) stored as carbondata") sql("CREATE INDEX indextable1 on table test.maintable1(c) as 'carbondata'") sql("CREATE INDEX indextable2 on table test.maintable1(d) as 'carbondata'") sql("INSERT INTO test.maintable1 SELECT 1,'string1', 'string2', 'string3'") sql("INSERT INTO test.maintable1 SELECT 1,'string1', 'string2', 'string3'") val preDeleteSegmentsTableOne = sql("SHOW SEGMENTS FOR TABLE test.MAINTABLE1").count() sql("DELETE FROM TABLE test.INDEXTABLE1 WHERE SEGMENT.ID IN(0)") sql("CLEAN FILES FOR TABLE test.INDEXTABLE1 options('force'='true')") sql("DELETE FROM TABLE test.INDEXTABLE2 WHERE SEGMENT.ID IN(0,1)") sql("CLEAN FILES FOR TABLE test.INDEXTABLE2 options('force'='true')") val postDeleteSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE1").count() val postDeleteSegmentsIndexTwo = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE2").count() // table 2 sql("CREATE TABLE test.maintable2(a INT, b STRING, c STRING, d STRING) stored as carbondata") sql("CREATE INDEX indextable3 on table test.maintable2(c) as 'carbondata'") sql("CREATE INDEX indextable4 on table test.maintable2(d) as 'carbondata'") sql("INSERT INTO test.maintable2 SELECT 1,'string1', 'string2', 'string3'") sql("INSERT INTO test.maintable2 SELECT 1,'string1', 'string2', 'string3'") val preDeleteSegmentsTableTwo = sql("SHOW SEGMENTS FOR TABLE test.MAINTABLE2").count() sql("DELETE FROM TABLE test.INDEXTABLE3 WHERE SEGMENT.ID IN(1)") sql("CLEAN FILES FOR TABLE test.INDEXTABLE3 options('force'='true')") sql("DELETE FROM TABLE test.INDEXTABLE4 WHERE SEGMENT.ID IN(0,1)") sql("CLEAN FILES FOR TABLE test.INDEXTABLE4 options('force'='true')") val postDeleteSegmentsIndexThree = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE3").count() val postDeleteSegmentsIndexFour = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE4").count() assert(preDeleteSegmentsTableOne!=postDeleteSegmentsIndexOne) assert(preDeleteSegmentsTableOne!=postDeleteSegmentsIndexTwo) assert(preDeleteSegmentsTableTwo!=postDeleteSegmentsIndexThree) assert(preDeleteSegmentsTableTwo!=postDeleteSegmentsIndexFour) sql("REINDEX DATABASE TEST") val postRepairSegmentsIndexOne = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE1").count() val postRepairSegmentsIndexTwo = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE2").count() val postRepairSegmentsIndexThree = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE3").count() val postRepairSegmentsIndexFour = sql("SHOW SEGMENTS FOR TABLE test.INDEXTABLE4").count() assert(preDeleteSegmentsTableOne == postRepairSegmentsIndexOne) assert(preDeleteSegmentsTableOne == postRepairSegmentsIndexTwo) assert(preDeleteSegmentsTableTwo == postRepairSegmentsIndexThree) assert(preDeleteSegmentsTableTwo == postRepairSegmentsIndexFour) sql("drop index if exists indextable1 on test.maintable1") sql("drop index if exists indextable2 on test.maintable1") sql("drop table if exists test.maintable1") sql("drop index if exists indextable3 on test.maintable2") sql("drop index if exists indextable4 on test.maintable2") sql("drop table if exists test.maintable2") sql("drop database if exists test cascade") } override def afterAll { sql("drop index if exists indextable1 on maintable") sql("drop index if exists indextable2 on maintable") sql("drop table if exists maintable") CarbonProperties.getInstance() .removeProperty(CarbonCommonConstants.CARBON_CLEAN_FILES_FORCE_ALLOWED) } }
{'content_hash': '96c7917b595baf65c1d63cc5d8252b99', 'timestamp': '', 'source': 'github', 'line_count': 260, 'max_line_length': 106, 'avg_line_length': 56.81923076923077, 'alnum_prop': 0.7375617680904353, 'repo_name': 'zzcclp/carbondata', 'id': 'dd918f957c25757b9c5b6e9aac0c419ee5213e0e', 'size': '15573', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'index/secondary-index/src/test/scala/org/apache/carbondata/spark/testsuite/secondaryindex/TestIndexRepair.scala', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ANTLR', 'bytes': '16022'}, {'name': 'Batchfile', 'bytes': '1639'}, {'name': 'C#', 'bytes': '86'}, {'name': 'C++', 'bytes': '110888'}, {'name': 'CMake', 'bytes': '1555'}, {'name': 'Java', 'bytes': '7859129'}, {'name': 'Python', 'bytes': '368778'}, {'name': 'Scala', 'bytes': '12011736'}, {'name': 'Shell', 'bytes': '7259'}, {'name': 'Thrift', 'bytes': '23385'}]}
package service import model._ import scala.slick.driver.H2Driver.simple._ import Database.threadLocalSession import service.SystemSettingsService.SystemSettings import util.StringUtil._ import model.GroupMember import scala.Some import model.Account import util.LDAPUtil import org.slf4j.LoggerFactory trait AccountService { private val logger = LoggerFactory.getLogger(classOf[AccountService]) def authenticate(settings: SystemSettings, userName: String, password: String): Option[Account] = if(settings.ldapAuthentication){ ldapAuthentication(settings, userName, password) } else { defaultAuthentication(userName, password) } /** * Authenticate by internal database. */ private def defaultAuthentication(userName: String, password: String) = { getAccountByUserName(userName).collect { case account if(!account.isGroupAccount && account.password == sha1(password)) => Some(account) } getOrElse None } /** * Authenticate by LDAP. */ private def ldapAuthentication(settings: SystemSettings, userName: String, password: String): Option[Account] = { LDAPUtil.authenticate(settings.ldap.get, userName, password) match { case Right(ldapUserInfo) => { // Create or update account by LDAP information getAccountByUserName(ldapUserInfo.userName, true) match { case Some(x) if(!x.isRemoved) => { updateAccount(x.copy(mailAddress = ldapUserInfo.mailAddress, fullName = ldapUserInfo.fullName)) getAccountByUserName(ldapUserInfo.userName) } case Some(x) if(x.isRemoved) => { logger.info(s"LDAP Authentication Failed: Account is already registered but disabled..") defaultAuthentication(userName, password) } case None => getAccountByMailAddress(ldapUserInfo.mailAddress, true) match { case Some(x) if(!x.isRemoved) => { updateAccount(x.copy(fullName = ldapUserInfo.fullName)) getAccountByUserName(ldapUserInfo.userName) } case Some(x) if(x.isRemoved) => { logger.info(s"LDAP Authentication Failed: Account is already registered but disabled..") defaultAuthentication(userName, password) } case None => { createAccount(ldapUserInfo.userName, "", ldapUserInfo.fullName, ldapUserInfo.mailAddress, false, None) getAccountByUserName(ldapUserInfo.userName) } } } } case Left(errorMessage) => { logger.info(s"LDAP Authentication Failed: ${errorMessage}") defaultAuthentication(userName, password) } } } def getAccountByUserName(userName: String, includeRemoved: Boolean = false): Option[Account] = Query(Accounts) filter(t => (t.userName is userName.bind) && (t.removed is false.bind, !includeRemoved)) firstOption def getAccountByMailAddress(mailAddress: String, includeRemoved: Boolean = false): Option[Account] = Query(Accounts) filter(t => (t.mailAddress.toLowerCase is mailAddress.toLowerCase.bind) && (t.removed is false.bind, !includeRemoved)) firstOption def getAllUsers(includeRemoved: Boolean = true): List[Account] = if(includeRemoved){ Query(Accounts) sortBy(_.userName) list } else { Query(Accounts) filter (_.removed is false.bind) sortBy(_.userName) list } def createAccount(userName: String, password: String, fullName: String, mailAddress: String, isAdmin: Boolean, url: Option[String]): Unit = Accounts insert Account( userName = userName, password = password, fullName = fullName, mailAddress = mailAddress, isAdmin = isAdmin, url = url, registeredDate = currentDate, updatedDate = currentDate, lastLoginDate = None, image = None, isGroupAccount = false, isRemoved = false) def updateAccount(account: Account): Unit = Accounts .filter { a => a.userName is account.userName.bind } .map { a => a.password ~ a.fullName ~ a.mailAddress ~ a.isAdmin ~ a.url.? ~ a.registeredDate ~ a.updatedDate ~ a.lastLoginDate.? ~ a.removed } .update ( account.password, account.fullName, account.mailAddress, account.isAdmin, account.url, account.registeredDate, currentDate, account.lastLoginDate, account.isRemoved) def updateAvatarImage(userName: String, image: Option[String]): Unit = Accounts.filter(_.userName is userName.bind).map(_.image.?).update(image) def updateLastLoginDate(userName: String): Unit = Accounts.filter(_.userName is userName.bind).map(_.lastLoginDate).update(currentDate) def createGroup(groupName: String, url: Option[String]): Unit = Accounts insert Account( userName = groupName, password = "", fullName = groupName, mailAddress = groupName + "@devnull", isAdmin = false, url = url, registeredDate = currentDate, updatedDate = currentDate, lastLoginDate = None, image = None, isGroupAccount = true, isRemoved = false) def updateGroup(groupName: String, url: Option[String], removed: Boolean): Unit = Accounts.filter(_.userName is groupName.bind).map(t => t.url.? ~ t.removed).update(url, removed) def updateGroupMembers(groupName: String, members: List[(String, Boolean)]): Unit = { Query(GroupMembers).filter(_.groupName is groupName.bind).delete members.foreach { case (userName, isManager) => GroupMembers insert GroupMember (groupName, userName, isManager) } } def getGroupMembers(groupName: String): List[GroupMember] = Query(GroupMembers) .filter(_.groupName is groupName.bind) .sortBy(_.userName) .list def getGroupsByUserName(userName: String): List[String] = Query(GroupMembers) .filter(_.userName is userName.bind) .sortBy(_.groupName) .map(_.groupName) .list def removeUserRelatedData(userName: String): Unit = { Query(GroupMembers).filter(_.userName is userName.bind).delete Query(Collaborators).filter(_.collaboratorName is userName.bind).delete Query(Repositories).filter(_.userName is userName.bind).delete } } object AccountService extends AccountService
{'content_hash': '4b7cf82f43abb0597fbbb2c86b3b200e', 'timestamp': '', 'source': 'github', 'line_count': 168, 'max_line_length': 151, 'avg_line_length': 38.26190476190476, 'alnum_prop': 0.6653702551337897, 'repo_name': 'ihad28/gitbucket', 'id': 'acd76c9d1159ac2705f1b5dba9ed1d014e7e7533', 'size': '6428', 'binary': False, 'copies': '9', 'ref': 'refs/heads/master', 'path': 'src/main/scala/service/AccountService.scala', 'mode': '33188', 'license': 'apache-2.0', 'language': []}
package com.evolvedbinary.j8fu.function; import com.evolvedbinary.j8fu.Either; import java.util.Objects; import java.util.function.Function; import static com.evolvedbinary.j8fu.Either.Left; import static com.evolvedbinary.j8fu.Either.Right; /** * Similar to {@link QuadFunctionE} but * permits six statically known Exceptions to be thrown * * @param <T> Function parameter 1 type * @param <U> Function parameter 2 type * @param <V> Function parameter 3 type * @param <W> Function parameter 4 type * @param <R> Function return type * @param <E1> Function throws exception type * @param <E2> Function throws exception type * @param <E3> Function throws exception type * @param <E4> Function throws exception type * @param <E5> Function throws exception type * @param <E6> Function throws exception type * * @author <a href="mailto:[email protected]">Adam Retter</a> */ @FunctionalInterface public interface QuadFunction6E<T, U, V, W, R, E1 extends Throwable, E2 extends Throwable, E3 extends Throwable, E4 extends Throwable, E5 extends Throwable, E6 extends Throwable> { /** * Applies this function to the given arguments. * * @param t the first function argument * @param u the second function argument * @param v the third function argument * @param w the fourth function argument * @return the function result * * @throws E1 An exception of type {@code E1} * @throws E2 An exception of type {@code E2} * @throws E3 An exception of type {@code E3} * @throws E4 An exception of type {@code E4} * @throws E5 An exception of type {@code E5} * @throws E6 An exception of type {@code E6} */ R apply(final T t, final U u, final V v, final W w) throws E1, E2, E3, E4, E5, E6; /** * Returns a composed function that first applies this function to * its input, and then applies the {@code after} function to the result. * If evaluation of either function throws an exception, it is relayed to * the caller of the composed function. * * @param <R2> the type of output of the {@code after} function, and of the * composed function * @param after the function to apply after this function is applied * @return a composed function that first applies this function and then * applies the {@code after} function * @throws NullPointerException if after is null */ default <R2> QuadFunction6E<T, U, V, W, R2, E1, E2, E3, E4, E5, E6> andThen(final Function6E<? super R, ? extends R2, ? extends E1, ? extends E2, ? extends E3, ? extends E4, ? extends E5, ? extends E6> after) { Objects.requireNonNull(after); return (T t, U u, V v, W w) -> after.apply(apply(t, u, v, w)); } /** * Returns a composed function that first applies this function to * its input, and then applies the {@code after} function to the result. * If evaluation of either function throws an exception, it is relayed to * the caller of the composed function. * * @param <R2> the type of output of the {@code after} function, and of the * composed function * @param after the function to apply after this function is applied * @return a composed function that first applies this function and then * applies the {@code after} function * @throws NullPointerException if after is null */ default <R2> QuadFunction6E<T, U, V, W, R2, E1, E2, E3, E4, E5, E6> andThen(final Function5E<? super R, ? extends R2, ? extends E1, ? extends E2, ? extends E3, ? extends E4, ? extends E5> after) { Objects.requireNonNull(after); return (T t, U u, V v, W w) -> after.apply(apply(t, u, v, w)); } /** * Returns a composed function that first applies this function to * its input, and then applies the {@code after} function to the result. * If evaluation of either function throws an exception, it is relayed to * the caller of the composed function. * * @param <R2> the type of output of the {@code after} function, and of the * composed function * @param after the function to apply after this function is applied * @return a composed function that first applies this function and then * applies the {@code after} function * @throws NullPointerException if after is null */ default <R2> QuadFunction6E<T, U, V, W, R2, E1, E2, E3, E4, E5, E6> andThen(final Function4E<? super R, ? extends R2, ? extends E1, ? extends E2, ? extends E3, ? extends E4> after) { Objects.requireNonNull(after); return (T t, U u, V v, W w) -> after.apply(apply(t, u, v, w)); } /** * Returns a composed function that first applies this function to * its input, and then applies the {@code after} function to the result. * If evaluation of either function throws an exception, it is relayed to * the caller of the composed function. * * @param <R2> the type of output of the {@code after} function, and of the * composed function * @param after the function to apply after this function is applied * @return a composed function that first applies this function and then * applies the {@code after} function * @throws NullPointerException if after is null */ default <R2> QuadFunction6E<T, U, V, W, R2, E1, E2, E3, E4, E5, E6> andThen(final Function3E<? super R, ? extends R2, ? extends E1, ? extends E2, ? extends E3> after) { Objects.requireNonNull(after); return (T t, U u, V v, W w) -> after.apply(apply(t, u, v, w)); } /** * Returns a composed function that first applies this function to * its input, and then applies the {@code after} function to the result. * If evaluation of either function throws an exception, it is relayed to * the caller of the composed function. * * @param <R2> the type of output of the {@code after} function, and of the * composed function * @param after the function to apply after this function is applied * @return a composed function that first applies this function and then * applies the {@code after} function * @throws NullPointerException if after is null */ default <R2> QuadFunction6E<T, U, V, W, R2, E1, E2, E3, E4, E5, E6> andThen(final Function2E<? super R, ? extends R2, ? extends E1, ? extends E2> after) { Objects.requireNonNull(after); return (T t, U u, V v, W w) -> after.apply(apply(t, u, v, w)); } /** * Returns a composed function that first applies this function to * its input, and then applies the {@code after} function to the result. * If evaluation of either function throws an exception, it is relayed to * the caller of the composed function. * * @param <R2> the type of output of the {@code after} function, and of the * composed function * @param after the function to apply after this function is applied * @return a composed function that first applies this function and then * applies the {@code after} function * @throws NullPointerException if after is null */ default <R2> QuadFunction6E<T, U, V, W, R2, E1, E2, E3, E4, E5, E6> andThen(final FunctionE<? super R, ? extends R2, ? extends E1> after) { Objects.requireNonNull(after); return (T t, U u, V v, W w) -> after.apply(apply(t, u, v, w)); } /** * Returns a composed function that first applies this function to * its input, and then applies the {@code after} function to the result. * If evaluation of either function throws an exception, it is relayed to * the caller of the composed function. * * @param <R2> the type of output of the {@code after} function, and of the * composed function * @param after the function to apply after this function is applied * @return a composed function that first applies this function and then * applies the {@code after} function * @throws NullPointerException if after is null */ default <R2> QuadFunction6E<T, U, V, W, R2, E1, E2, E3, E4, E5, E6> andThen(final Function<? super R, ? extends R2> after) { Objects.requireNonNull(after); return (T t, U u, V v, W w) -> after.apply(apply(t, u, v, w)); } /** * Returns a quad-function that applies this quad-function and returns the * result as an {@link Either}. * * @return a quad-function which will return either a throwable or the result {@code R}. */ default QuadFunction<T, U, V, W, Either<Throwable, R>> toQuadFunction() { return (T t, U u, V v, W w) -> { try { return Right(apply(t, u, v, w)); } catch (final Throwable e) { return Left(e); } }; } /** * Lifts a standard {@code QuadFunction<T, R>} to a {@code QuadFunction6E<T, R, E1, E2, E3, E4, E5, E6>}. * * @param function the function to lift. * * @return the QuadFunction6E. * * @param <T> the type of the first input object to the function * @param <U> the type of the second input object to the function * @param <V> the type of the third input object to the function * @param <W> the type of the fourth input object to the function * @param <R> the type of the output object to the function * @param <E1> An exception of type {@code E1} * @param <E2> An exception of type {@code E2} * @param <E3> An exception of type {@code E3} * @param <E4> An exception of type {@code E4} * @param <E5> An exception of type {@code E5} * @param <E6> An exception of type {@code E6} */ static <T, U, V, W, R, E1 extends Throwable, E2 extends Throwable, E3 extends Throwable, E4 extends Throwable, E5 extends Throwable, E6 extends Throwable> QuadFunction6E<T, U, V, W, R, E1, E2, E3, E4, E5, E6> lift(final QuadFunction<T, U, V, W, R> function) { return function::apply; } /** * Lifts an exception of type {@code <E>} to a {@code QuadFunction6E<T, T, E1, E2, E3, E4, E5, E6>} * which will always throw the exception. * * @param exception the exception to lift. * * @return the QuadFunction6E. * * @param <T> the type of the input object to the function * @param <U> the type of the second input object to the function * @param <V> the type of the third input object to the function * @param <W> the type of the fourth input object to the function * @param <R> the type of the output object to the function * @param <E1> An exception of type {@code E1} * @param <E2> An exception of type {@code E2} * @param <E3> An exception of type {@code E3} * @param <E4> An exception of type {@code E4} * @param <E5> An exception of type {@code E5} * @param <E6> An exception of type {@code E6} */ static <T, U, V, W, R, E1 extends Throwable, E2 extends Throwable, E3 extends Throwable, E4 extends Throwable, E5 extends Throwable, E6 extends Throwable> QuadFunction6E<T, U, V, W, R, E1, E2, E3, E4, E5, E6> lift(final E1 exception) { return (t, u, v, w) -> { throw exception; }; } }
{'content_hash': 'eca37589d9c017a25ff68c0f232d6f7e', 'timestamp': '', 'source': 'github', 'line_count': 240, 'max_line_length': 263, 'avg_line_length': 47.0875, 'alnum_prop': 0.6431289266436598, 'repo_name': 'adamretter/j8fu', 'id': 'abebd4d6e480e873d57506e16736d9725f323d85', 'size': '12908', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/evolvedbinary/j8fu/function/QuadFunction6E.java', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Java', 'bytes': '1070995'}]}
[![Gem Version](https://badge.fury.io/rb/ika.svg)](http://badge.fury.io/rb/ika) [![Circle CI](https://circleci.com/gh/Aqutras/ika.svg?style=shield)](https://circleci.com/gh/Aqutras/ika) [![Coverage Status](https://coveralls.io/repos/Aqutras/ika/badge.svg?branch=master)](https://coveralls.io/r/Aqutras/ika?branch=master) Ika implements the function that export/import ActiveModel data with json. Ika also supports [carrierwave](https://github.com/carrierwaveuploader/carrierwave). ## Installation In Rails, add it to your Gemfile: ```ruby gem 'ika' ``` ## Usage You can use `export` or `import` method on your model or relation. ### Example In case: `Group` has many tags and `User` belongs to multiple groups with `GroupUsers` such as below. ```ruby class User < ActiveRecord::Base has_many :group_users has_many :groups, through: :group_users end class GroupUsers < ActiveRecord::Base belongs_to :user belongs_to :group end class Group < ActiveRecord::Base has_many :group_users has_many :users, through: :group_users has_many :tags end class Tag < ActiveRecord::Base belongs_to :group end ``` Now you can export with `export` method on your model or relation and import with `import` method on your model. ```ruby require 'json' # with no options JSON.parse User.export # => [{"id":1,"name":"iruca3"},{"id":2,"name":"inkling"}] JSON.parse User.where(id: 1).export # => [{"id":1,"name":"iruca3"}] JSON.parse User.find(id: 2).export # => {"id":2,"name":"inkling"} # with include option JSON.parse User.export(include: :groups) # => [{"id":1,"name":"iruca3","groups":[{"id":1,"name":"aqutras"},{"id":2,"name":"Splatoon"}]},{"id":2,"name":"inkling","groups":[{"id":2,"name":"Splatoon"}]}] data = JSON.parse(User.find(id: 1).export(include: [{groups: [:tags]}])) # => {"id":1,"name":"iruca3","groups":[{"id":1,"name":"aqutras","tags":[{"id":1,"name":"Company"}]},{"id":2,"name":"Splatoon","tags":[{"id":2,"name":"Game"},{"id":3,"name":"Inkling"}]}]} # import (id, created_at and updated_at are completely imported with the same value) User.destroy_all Group.destroy_all Tag.destroy_all User.import(data) # sync mode is available. User.import(User.where(id: 1).export, sync: true) User.exist?(id: 2) # => false ``` ## Sync mode Sync mode performs that ika deletes all data of importing models. For example, if exporting data includes that id is 1 and 2, and there are already exists data that id is 1, 2 and 3, sync importing deletes the data of id 3. ## Others * **DO NOT USE sync mode if you are using `include` option.** * If the same id exists, Ika uses `UPDATE`. * Uploaded files by `carrierwave` will be checked their md5 hash and do nothing if they exist and md5 is matched. * If there already exists `import` or `export` methods, you can use `ika_import` or `ika_export` methods. ## Contributing ### Test You need to run `bundle exec rake db:create db:migrate` on `spec/dummy` before testing. ## License Copyright (c) 2015-2016 Aqutras This project rocks and uses MIT-LICENSE.
{'content_hash': '96a9dddc037e03cf149d627e1e2cac6a', 'timestamp': '', 'source': 'github', 'line_count': 96, 'max_line_length': 223, 'avg_line_length': 31.510416666666668, 'alnum_prop': 0.696198347107438, 'repo_name': 'Aqutras/ika', 'id': '3f2517990a03e4547b869b33d76edac5c4f9bb62', 'size': '3031', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '686'}, {'name': 'HTML', 'bytes': '4883'}, {'name': 'JavaScript', 'bytes': '596'}, {'name': 'Ruby', 'bytes': '42300'}]}
sudo mkdir -p /opt/mongodb sudo chown ${USER} /opt/mongodb -R
{'content_hash': '93dc488e8afb3a7a2e1585a04b1bad65', 'timestamp': '', 'source': 'github', 'line_count': 2, 'max_line_length': 34, 'avg_line_length': 31.0, 'alnum_prop': 0.7096774193548387, 'repo_name': 'zodern/meteor-up', 'id': 'bb8ef9bd010ff88e94f8f53f9bd4bc0248ec4df8', 'size': '75', 'binary': False, 'copies': '5', 'ref': 'refs/heads/master', 'path': 'src/plugins/mongo/assets/mongo-setup.sh', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '229152'}, {'name': 'Shell', 'bytes': '31012'}]}
layout: post date: 2017-07-18 title: "Martina Liana Satin Wedding Dresses Style 677 Sleeveless Court Train Mermaid/Trumpet" category: Martina Liana tags: [Martina Liana,Mermaid/Trumpet,Sweetheart,Court Train,Sleeveless] --- ### Martina Liana Satin Wedding Dresses Style 677 Just **$329.99** ### Sleeveless Court Train Mermaid/Trumpet <table><tr><td>BRANDS</td><td>Martina Liana</td></tr><tr><td>Silhouette</td><td>Mermaid/Trumpet</td></tr><tr><td>Neckline</td><td>Sweetheart</td></tr><tr><td>Hemline/Train</td><td>Court Train</td></tr><tr><td>Sleeve</td><td>Sleeveless</td></tr></table> <a href="https://www.readybrides.com/en/martina-liana/13597-martina-liana-satin-wedding-dresses-style-677.html"><img src="//img.readybrides.com/31022/martina-liana-satin-wedding-dresses-style-677.jpg" alt="Martina Liana Satin Wedding Dresses Style 677" style="width:100%;" /></a> <!-- break --><a href="https://www.readybrides.com/en/martina-liana/13597-martina-liana-satin-wedding-dresses-style-677.html"><img src="//img.readybrides.com/31021/martina-liana-satin-wedding-dresses-style-677.jpg" alt="Martina Liana Satin Wedding Dresses Style 677" style="width:100%;" /></a> Buy it: [https://www.readybrides.com/en/martina-liana/13597-martina-liana-satin-wedding-dresses-style-677.html](https://www.readybrides.com/en/martina-liana/13597-martina-liana-satin-wedding-dresses-style-677.html)
{'content_hash': '3e643c802c8e83ee2218d3cf23607006', 'timestamp': '', 'source': 'github', 'line_count': 18, 'max_line_length': 255, 'avg_line_length': 79.0, 'alnum_prop': 0.729957805907173, 'repo_name': 'HOLEIN/HOLEIN.github.io', 'id': 'b5b9d0384bb0918e15d93567b23d89de95e8e8df', 'size': '1426', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '_posts/2017-07-18-Martina-Liana-Satin-Wedding-Dresses-Style-677-Sleeveless-Court-Train-MermaidTrumpet.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '83876'}, {'name': 'HTML', 'bytes': '14547'}, {'name': 'Ruby', 'bytes': '897'}]}
module HealthSeven::V2_7 class InuU05 < ::HealthSeven::Message attribute :msh, Msh, position: "MSH", require: true attribute :sfts, Array[Sft], position: "SFT", multiple: true attribute :uac, Uac, position: "UAC" attribute :equ, Equ, position: "EQU", require: true attribute :invs, Array[Inv], position: "INV", require: true, multiple: true attribute :rol, Rol, position: "ROL" end end
{'content_hash': '14261d789d3d627d2400d6cb835a587b', 'timestamp': '', 'source': 'github', 'line_count': 10, 'max_line_length': 77, 'avg_line_length': 39.7, 'alnum_prop': 0.7052896725440806, 'repo_name': 'niquola/health_seven', 'id': 'd3aa5df76075536c577245c3d923d9ce12428b42', 'size': '397', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/health_seven/2.7/messages/inu_u05.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Ruby', 'bytes': '4124541'}]}
package org.jabref.logic.integrity; import java.util.Collections; import java.util.List; import java.util.Optional; import org.jabref.logic.l10n.Localization; import org.jabref.model.entry.BibEntry; import org.jabref.model.entry.field.StandardField; import org.jabref.model.entry.types.StandardEntryType; public class TypeChecker implements EntryChecker { @Override public List<IntegrityMessage> check(BibEntry entry) { Optional<String> value = entry.getField(StandardField.PAGES); if (value.isEmpty()) { return Collections.emptyList(); } if (StandardEntryType.Proceedings.equals(entry.getType())) { return Collections.singletonList(new IntegrityMessage( Localization.lang("wrong entry type as proceedings has page numbers"), entry, StandardField.PAGES)); } return Collections.emptyList(); } }
{'content_hash': 'b6c89b5e039fa4040b2110b035bec6f1', 'timestamp': '', 'source': 'github', 'line_count': 28, 'max_line_length': 120, 'avg_line_length': 32.285714285714285, 'alnum_prop': 0.7112831858407079, 'repo_name': 'sauliusg/jabref', 'id': 'f26db2a0e06341f2870e252524d3d0fb22ed4af9', 'size': '904', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'src/main/java/org/jabref/logic/integrity/TypeChecker.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ANTLR', 'bytes': '1751'}, {'name': 'AppleScript', 'bytes': '1378'}, {'name': 'Batchfile', 'bytes': '142'}, {'name': 'CSS', 'bytes': '47179'}, {'name': 'GAP', 'bytes': '1470'}, {'name': 'Groovy', 'bytes': '4948'}, {'name': 'Java', 'bytes': '7055949'}, {'name': 'PowerShell', 'bytes': '1635'}, {'name': 'Python', 'bytes': '8314'}, {'name': 'Ruby', 'bytes': '19971'}, {'name': 'Shell', 'bytes': '9434'}, {'name': 'TeX', 'bytes': '403476'}, {'name': 'XSLT', 'bytes': '2185'}]}
#ifndef _TIME_H_ #define _TIME_H_ #include <sys/param.h> #ifndef NULL #define NULL 0 #endif #if !defined(_CLOCK_T) #define _CLOCK_T typedef unsigned long clock_t; /* relative time in a specified resolution */ #endif #if !defined(_SIZE_T) #define _SIZE_T typedef unsigned int size_t; /* size of something in bytes */ #endif #if !defined(_TIME_T) #define _TIME_T typedef long time_t; /* time of day in seconds */ #endif struct tm { int tm_sec; /* seconds after the minute [0-60] */ int tm_min; /* minutes after the hour [0-59] */ int tm_hour; /* hours since midnight [0-23] */ int tm_mday; /* day of the month [1-31] */ int tm_mon; /* months since January [0-11] */ int tm_year; /* years since 1900 */ int tm_wday; /* days since Sunday [0-6] */ int tm_yday; /* days since January 1 [0-365] */ int tm_isdst; /* Daylight Savings Time flag */ }; #define CLK_TCK HZ /* ticks per second */ #define CLOCKS_PER_SEC HZ /* same with CLK_TCK */ #include <sys/cdefs.h> __BEGIN_DECLS char *asctime(const struct tm *); clock_t clock(void); char *ctime(const time_t *); double difftime(time_t, time_t); struct tm *gmtime(const time_t *); struct tm *localtime(const time_t *); time_t mktime(struct tm *); size_t strftime(char *, size_t, const char *, const struct tm *); time_t time(time_t *); char *asctime_r(const struct tm *, char *); struct tm *gmtime_r(const time_t *, struct tm *); struct tm *localtime_r(const time_t *, struct tm *); #ifndef _ANSI_SOURCE void tzset(void); #endif /* not ANSI */ #if !defined(_ANSI_SOURCE) && !defined(_POSIX_SOURCE) char *timezone(int, int); void tzsetwall(void); #endif /* neither ANSI nor POSIX */ __END_DECLS #endif /* !_TIME_H_ */
{'content_hash': '590fb4f1addcb31a007e853a325d78d6', 'timestamp': '', 'source': 'github', 'line_count': 70, 'max_line_length': 76, 'avg_line_length': 24.271428571428572, 'alnum_prop': 0.6539140670982931, 'repo_name': 'kimim/kimix', 'id': '532142f0ffd28d1ddd68bef76ec256b6ed375e4b', 'size': '3637', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'usr/include/time.h', 'mode': '33261', 'license': 'bsd-3-clause', 'language': [{'name': 'Assembly', 'bytes': '192952'}, {'name': 'Batchfile', 'bytes': '490'}, {'name': 'C', 'bytes': '2348888'}, {'name': 'C++', 'bytes': '20711'}, {'name': 'Emacs Lisp', 'bytes': '169'}, {'name': 'Makefile', 'bytes': '34415'}, {'name': 'Shell', 'bytes': '169'}]}
<!DOCTYPE html> <!-- Copyright (c) 2013 The Chromium Authors. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. --> <link rel="import" href="/core/timeline_viewport.html"> <script> 'use strict'; tv.b.unittest.testSuite(function() { test('memoization', function() { var vp = new tv.c.TimelineViewport(document.createElement('div')); var slice = { guid: 1 }; vp.modelTrackContainer = { addEventsToTrackMap: function(eventToTrackMap) { eventToTrackMap.addEvent(slice, 'track'); }, addEventListener: function() {} }; assert.isUndefined(vp.trackForEvent(slice)); vp.rebuildEventToTrackMap(); assert.equal(vp.trackForEvent(slice), 'track'); }); }); </script>
{'content_hash': 'eaf6b59f434d12cff454b967bcd4b9bf', 'timestamp': '', 'source': 'github', 'line_count': 34, 'max_line_length': 70, 'avg_line_length': 23.205882352941178, 'alnum_prop': 0.6730038022813688, 'repo_name': 'guorendong/iridium-browser-ubuntu', 'id': 'ca8826e079b2b2430c4ab63d2afd50aca27a3b00', 'size': '789', 'binary': False, 'copies': '2', 'ref': 'refs/heads/ubuntu/precise', 'path': 'third_party/trace-viewer/trace_viewer/core/timeline_viewport_test.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'AppleScript', 'bytes': '8402'}, {'name': 'Assembly', 'bytes': '256197'}, {'name': 'Batchfile', 'bytes': '34966'}, {'name': 'C', 'bytes': '15445429'}, {'name': 'C++', 'bytes': '276628399'}, {'name': 'CMake', 'bytes': '27829'}, {'name': 'CSS', 'bytes': '867238'}, {'name': 'Emacs Lisp', 'bytes': '3348'}, {'name': 'Go', 'bytes': '13628'}, {'name': 'Groff', 'bytes': '7777'}, {'name': 'HTML', 'bytes': '20250399'}, {'name': 'Java', 'bytes': '9950308'}, {'name': 'JavaScript', 'bytes': '13873772'}, {'name': 'LLVM', 'bytes': '1169'}, {'name': 'Logos', 'bytes': '6893'}, {'name': 'Lua', 'bytes': '16189'}, {'name': 'Makefile', 'bytes': '179129'}, {'name': 'Objective-C', 'bytes': '1871766'}, {'name': 'Objective-C++', 'bytes': '9674498'}, {'name': 'PHP', 'bytes': '42038'}, {'name': 'PLpgSQL', 'bytes': '163248'}, {'name': 'Perl', 'bytes': '63937'}, {'name': 'Protocol Buffer', 'bytes': '474121'}, {'name': 'Python', 'bytes': '11646662'}, {'name': 'Ragel in Ruby Host', 'bytes': '104923'}, {'name': 'Scheme', 'bytes': '10604'}, {'name': 'Shell', 'bytes': '1151673'}, {'name': 'Standard ML', 'bytes': '5034'}, {'name': 'VimL', 'bytes': '4075'}, {'name': 'nesC', 'bytes': '18347'}]}
from textwrap import dedent import unittest from ..query_formatting.graphql_formatting import pretty_print_graphql class GraphQLPrettyPrintTests(unittest.TestCase): def test_graphql_pretty_print_indentation(self) -> None: bad_query = """{ Animal { name @output(out_name: "name") } }""" four_space_output = dedent( """\ { Animal { name @output(out_name: "name") } } """ ) two_space_output = dedent( """\ { Animal { name @output(out_name: "name") } } """ ) self.assertEqual(four_space_output, pretty_print_graphql(bad_query)) self.assertEqual(two_space_output, pretty_print_graphql(bad_query, use_four_spaces=False)) def test_filter_directive_order(self) -> None: bad_query = """{ Animal @filter(value: ["$name"], op_name: "name_or_alias") { uuid @filter(value: ["$max_uuid"], op_name: "<=") out_Entity_Related { ...on Species{ name @output(out_name: "related_species") } } } }""" expected_output = dedent( """\ { Animal @filter(op_name: "name_or_alias", value: ["$name"]) { uuid @filter(op_name: "<=", value: ["$max_uuid"]) out_Entity_Related { ... on Species { name @output(out_name: "related_species") } } } } """ ) self.assertEqual(expected_output, pretty_print_graphql(bad_query)) def test_args_not_in_schema(self) -> None: bad_query = """{ Animal @filter(value: ["$name"], unknown_arg: "value", op_name: "name_or_alias") { uuid @filter(value: ["$max_uuid"], op_name: "<=") out_Entity_Related { ...on Species{ name @output(out_name: "related_species") } } } }""" expected_output = dedent( """\ { Animal @filter(op_name: "name_or_alias", value: ["$name"], unknown_arg: "value") { uuid @filter(op_name: "<=", value: ["$max_uuid"]) out_Entity_Related { ... on Species { name @output(out_name: "related_species") } } } } """ ) self.assertEqual(expected_output, pretty_print_graphql(bad_query)) def test_missing_args(self) -> None: bad_query = """{ Animal @filter(value: ["$name"]) { uuid @filter(value: ["$max_uuid"], op_name: "<=") out_Entity_Related { ...on Species{ name @output(out_name: "related_species") } } } }""" expected_output = dedent( """\ { Animal @filter(value: ["$name"]) { uuid @filter(op_name: "<=", value: ["$max_uuid"]) out_Entity_Related { ... on Species { name @output(out_name: "related_species") } } } } """ ) self.assertEqual(expected_output, pretty_print_graphql(bad_query)) def test_other_directive(self) -> None: bad_query = """{ Animal @filter(value: ["$name"]) { uuid @filter(value: ["$max_uuid"], op_name: "<=") out_Entity_Related @other(arg1: "val1", arg2: "val2") { ...on Species{ name @output(out_name: "related_species") } } } }""" expected_output = dedent( """\ { Animal @filter(value: ["$name"]) { uuid @filter(op_name: "<=", value: ["$max_uuid"]) out_Entity_Related @other(arg1: "val1", arg2: "val2") { ... on Species { name @output(out_name: "related_species") } } } } """ ) self.assertEqual(expected_output, pretty_print_graphql(bad_query))
{'content_hash': '3fbb2d1e331bbe3290bae9f2ce685549', 'timestamp': '', 'source': 'github', 'line_count': 153, 'max_line_length': 98, 'avg_line_length': 29.69281045751634, 'alnum_prop': 0.4131631080783623, 'repo_name': 'kensho-technologies/graphql-compiler', 'id': '78f38e5a158b625177f169e9748e7a88cbc9ecd2', 'size': '4594', 'binary': False, 'copies': '1', 'ref': 'refs/heads/main', 'path': 'graphql_compiler/tests/test_graphql_pretty_print.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Python', 'bytes': '8213336'}, {'name': 'Shell', 'bytes': '12556'}]}
namespace Microsoft.Azure.Management.DataFactory.Models { using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; /// <summary> /// Execute Synapse notebook activity. /// </summary> [Newtonsoft.Json.JsonObject("SynapseNotebook")] [Rest.Serialization.JsonTransformation] public partial class SynapseNotebookActivity : ExecutionActivity { /// <summary> /// Initializes a new instance of the SynapseNotebookActivity class. /// </summary> public SynapseNotebookActivity() { Notebook = new SynapseNotebookReference(); CustomInit(); } /// <summary> /// Initializes a new instance of the SynapseNotebookActivity class. /// </summary> /// <param name="name">Activity name.</param> /// <param name="notebook">Synapse notebook reference.</param> /// <param name="additionalProperties">Unmatched properties from the /// message are deserialized this collection</param> /// <param name="description">Activity description.</param> /// <param name="dependsOn">Activity depends on condition.</param> /// <param name="userProperties">Activity user properties.</param> /// <param name="linkedServiceName">Linked service reference.</param> /// <param name="policy">Activity policy.</param> /// <param name="sparkPool">The name of the big data pool which will be /// used to execute the notebook.</param> /// <param name="parameters">Notebook parameters.</param> /// <param name="executorSize">Number of core and memory to be used for /// executors allocated in the specified Spark pool for the session, /// which will be used for overriding 'executorCores' and /// 'executorMemory' of the notebook you provide. Type: string (or /// Expression with resultType string).</param> /// <param name="conf">Spark configuration properties, which will /// override the 'conf' of the notebook you provide.</param> /// <param name="driverSize">Number of core and memory to be used for /// driver allocated in the specified Spark pool for the session, which /// will be used for overriding 'driverCores' and 'driverMemory' of the /// notebook you provide. Type: string (or Expression with resultType /// string).</param> /// <param name="numExecutors">Number of executors to launch for this /// session, which will override the 'numExecutors' of the notebook you /// provide.</param> public SynapseNotebookActivity(string name, SynapseNotebookReference notebook, IDictionary<string, object> additionalProperties = default(IDictionary<string, object>), string description = default(string), IList<ActivityDependency> dependsOn = default(IList<ActivityDependency>), IList<UserProperty> userProperties = default(IList<UserProperty>), LinkedServiceReference linkedServiceName = default(LinkedServiceReference), ActivityPolicy policy = default(ActivityPolicy), BigDataPoolParametrizationReference sparkPool = default(BigDataPoolParametrizationReference), IDictionary<string, NotebookParameter> parameters = default(IDictionary<string, NotebookParameter>), object executorSize = default(object), object conf = default(object), object driverSize = default(object), int? numExecutors = default(int?)) : base(name, additionalProperties, description, dependsOn, userProperties, linkedServiceName, policy) { Notebook = notebook; SparkPool = sparkPool; Parameters = parameters; ExecutorSize = executorSize; Conf = conf; DriverSize = driverSize; NumExecutors = numExecutors; CustomInit(); } /// <summary> /// An initialization method that performs custom operations like setting defaults /// </summary> partial void CustomInit(); /// <summary> /// Gets or sets synapse notebook reference. /// </summary> [JsonProperty(PropertyName = "typeProperties.notebook")] public SynapseNotebookReference Notebook { get; set; } /// <summary> /// Gets or sets the name of the big data pool which will be used to /// execute the notebook. /// </summary> [JsonProperty(PropertyName = "typeProperties.sparkPool")] public BigDataPoolParametrizationReference SparkPool { get; set; } /// <summary> /// Gets or sets notebook parameters. /// </summary> [JsonProperty(PropertyName = "typeProperties.parameters")] public IDictionary<string, NotebookParameter> Parameters { get; set; } /// <summary> /// Gets or sets number of core and memory to be used for executors /// allocated in the specified Spark pool for the session, which will /// be used for overriding 'executorCores' and 'executorMemory' of the /// notebook you provide. Type: string (or Expression with resultType /// string). /// </summary> [JsonProperty(PropertyName = "typeProperties.executorSize")] public object ExecutorSize { get; set; } /// <summary> /// Gets or sets spark configuration properties, which will override /// the 'conf' of the notebook you provide. /// </summary> [JsonProperty(PropertyName = "typeProperties.conf")] public object Conf { get; set; } /// <summary> /// Gets or sets number of core and memory to be used for driver /// allocated in the specified Spark pool for the session, which will /// be used for overriding 'driverCores' and 'driverMemory' of the /// notebook you provide. Type: string (or Expression with resultType /// string). /// </summary> [JsonProperty(PropertyName = "typeProperties.driverSize")] public object DriverSize { get; set; } /// <summary> /// Gets or sets number of executors to launch for this session, which /// will override the 'numExecutors' of the notebook you provide. /// </summary> [JsonProperty(PropertyName = "typeProperties.numExecutors")] public int? NumExecutors { get; set; } /// <summary> /// Validate the object. /// </summary> /// <exception cref="ValidationException"> /// Thrown if validation fails /// </exception> public override void Validate() { base.Validate(); if (Notebook == null) { throw new ValidationException(ValidationRules.CannotBeNull, "Notebook"); } if (Notebook != null) { Notebook.Validate(); } if (SparkPool != null) { SparkPool.Validate(); } } } }
{'content_hash': '29f7d429c6d0f921792fbbe37adbdea6', 'timestamp': '', 'source': 'github', 'line_count': 150, 'max_line_length': 816, 'avg_line_length': 47.32666666666667, 'alnum_prop': 0.6330469080152135, 'repo_name': 'Azure/azure-sdk-for-net', 'id': '1a50ff8d86bbe6f3d5996deffbbd45b396cf9d75', 'size': '7452', 'binary': False, 'copies': '1', 'ref': 'refs/heads/main', 'path': 'sdk/datafactory/Microsoft.Azure.Management.DataFactory/src/Generated/Models/SynapseNotebookActivity.cs', 'mode': '33188', 'license': 'mit', 'language': []}
default_target: all .PHONY : default_target #============================================================================= # Special targets provided by cmake. # Disable implicit rules so canonical targets will work. .SUFFIXES: # Remove some rules from gmake that .SUFFIXES does not remove. SUFFIXES = .SUFFIXES: .hpux_make_needs_suffix_list # Suppress display of executed commands. $(VERBOSE).SILENT: # A target that is always out of date. cmake_force: .PHONY : cmake_force #============================================================================= # Set environment variables for the build. # The shell in which to execute make rules. SHELL = /bin/sh # The CMake executable. CMAKE_COMMAND = /usr/bin/cmake # The command to remove a file. RM = /usr/bin/cmake -E remove -f # Escaping for special characters. EQUALS = = # The program to use to edit the cache. CMAKE_EDIT_COMMAND = /usr/bin/cmake-gui # The top-level source directory on which CMake was run. CMAKE_SOURCE_DIR = /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33 # The top-level build directory on which CMake was run. CMAKE_BINARY_DIR = /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build #============================================================================= # Targets provided globally by CMake. # Special rule for the target edit_cache edit_cache: @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake cache editor..." /usr/bin/cmake-gui -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) .PHONY : edit_cache # Special rule for the target edit_cache edit_cache/fast: edit_cache .PHONY : edit_cache/fast # Special rule for the target install install: preinstall @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Install the project..." /usr/bin/cmake -P cmake_install.cmake .PHONY : install # Special rule for the target install install/fast: preinstall/fast @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Install the project..." /usr/bin/cmake -P cmake_install.cmake .PHONY : install/fast # Special rule for the target install/local install/local: preinstall @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing only the local directory..." /usr/bin/cmake -DCMAKE_INSTALL_LOCAL_ONLY=1 -P cmake_install.cmake .PHONY : install/local # Special rule for the target install/local install/local/fast: install/local .PHONY : install/local/fast # Special rule for the target install/strip install/strip: preinstall @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Installing the project stripped..." /usr/bin/cmake -DCMAKE_INSTALL_DO_STRIP=1 -P cmake_install.cmake .PHONY : install/strip # Special rule for the target install/strip install/strip/fast: install/strip .PHONY : install/strip/fast # Special rule for the target list_install_components list_install_components: @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Available install components are: \"Unspecified\" \"libassimp3.0-r1270-OGLtuts\"" .PHONY : list_install_components # Special rule for the target list_install_components list_install_components/fast: list_install_components .PHONY : list_install_components/fast # Special rule for the target rebuild_cache rebuild_cache: @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..." /usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) .PHONY : rebuild_cache # Special rule for the target rebuild_cache rebuild_cache/fast: rebuild_cache .PHONY : rebuild_cache/fast # The main all target all: cmake_check_build_system cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(CMAKE_COMMAND) -E cmake_progress_start /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build/CMakeFiles /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build/external/assimp-3.0.1270/contrib/zlib/CMakeFiles/progress.marks cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f CMakeFiles/Makefile2 external/assimp-3.0.1270/contrib/zlib/all $(CMAKE_COMMAND) -E cmake_progress_start /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build/CMakeFiles 0 .PHONY : all # The main clean target clean: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f CMakeFiles/Makefile2 external/assimp-3.0.1270/contrib/zlib/clean .PHONY : clean # The main clean target clean/fast: clean .PHONY : clean/fast # Prepare targets for installation. preinstall: all cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f CMakeFiles/Makefile2 external/assimp-3.0.1270/contrib/zlib/preinstall .PHONY : preinstall # Prepare targets for installation. preinstall/fast: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f CMakeFiles/Makefile2 external/assimp-3.0.1270/contrib/zlib/preinstall .PHONY : preinstall/fast # clear depends depend: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1 .PHONY : depend # Convenience name for target. external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/rule: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f CMakeFiles/Makefile2 external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/rule .PHONY : external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/rule # Convenience name for target. zlib: external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/rule .PHONY : zlib # fast build rule for target. zlib/fast: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build .PHONY : zlib/fast adler32.o: adler32.c.o .PHONY : adler32.o # target to build an object file adler32.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/adler32.c.o .PHONY : adler32.c.o adler32.i: adler32.c.i .PHONY : adler32.i # target to preprocess a source file adler32.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/adler32.c.i .PHONY : adler32.c.i adler32.s: adler32.c.s .PHONY : adler32.s # target to generate assembly for a file adler32.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/adler32.c.s .PHONY : adler32.c.s compress.o: compress.c.o .PHONY : compress.o # target to build an object file compress.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/compress.c.o .PHONY : compress.c.o compress.i: compress.c.i .PHONY : compress.i # target to preprocess a source file compress.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/compress.c.i .PHONY : compress.c.i compress.s: compress.c.s .PHONY : compress.s # target to generate assembly for a file compress.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/compress.c.s .PHONY : compress.c.s crc32.o: crc32.c.o .PHONY : crc32.o # target to build an object file crc32.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/crc32.c.o .PHONY : crc32.c.o crc32.i: crc32.c.i .PHONY : crc32.i # target to preprocess a source file crc32.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/crc32.c.i .PHONY : crc32.c.i crc32.s: crc32.c.s .PHONY : crc32.s # target to generate assembly for a file crc32.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/crc32.c.s .PHONY : crc32.c.s deflate.o: deflate.c.o .PHONY : deflate.o # target to build an object file deflate.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/deflate.c.o .PHONY : deflate.c.o deflate.i: deflate.c.i .PHONY : deflate.i # target to preprocess a source file deflate.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/deflate.c.i .PHONY : deflate.c.i deflate.s: deflate.c.s .PHONY : deflate.s # target to generate assembly for a file deflate.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/deflate.c.s .PHONY : deflate.c.s inffast.o: inffast.c.o .PHONY : inffast.o # target to build an object file inffast.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inffast.c.o .PHONY : inffast.c.o inffast.i: inffast.c.i .PHONY : inffast.i # target to preprocess a source file inffast.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inffast.c.i .PHONY : inffast.c.i inffast.s: inffast.c.s .PHONY : inffast.s # target to generate assembly for a file inffast.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inffast.c.s .PHONY : inffast.c.s inflate.o: inflate.c.o .PHONY : inflate.o # target to build an object file inflate.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inflate.c.o .PHONY : inflate.c.o inflate.i: inflate.c.i .PHONY : inflate.i # target to preprocess a source file inflate.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inflate.c.i .PHONY : inflate.c.i inflate.s: inflate.c.s .PHONY : inflate.s # target to generate assembly for a file inflate.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inflate.c.s .PHONY : inflate.c.s inftrees.o: inftrees.c.o .PHONY : inftrees.o # target to build an object file inftrees.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inftrees.c.o .PHONY : inftrees.c.o inftrees.i: inftrees.c.i .PHONY : inftrees.i # target to preprocess a source file inftrees.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inftrees.c.i .PHONY : inftrees.c.i inftrees.s: inftrees.c.s .PHONY : inftrees.s # target to generate assembly for a file inftrees.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/inftrees.c.s .PHONY : inftrees.c.s trees.o: trees.c.o .PHONY : trees.o # target to build an object file trees.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/trees.c.o .PHONY : trees.c.o trees.i: trees.c.i .PHONY : trees.i # target to preprocess a source file trees.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/trees.c.i .PHONY : trees.c.i trees.s: trees.c.s .PHONY : trees.s # target to generate assembly for a file trees.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/trees.c.s .PHONY : trees.c.s zutil.o: zutil.c.o .PHONY : zutil.o # target to build an object file zutil.c.o: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/zutil.c.o .PHONY : zutil.c.o zutil.i: zutil.c.i .PHONY : zutil.i # target to preprocess a source file zutil.c.i: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/zutil.c.i .PHONY : zutil.c.i zutil.s: zutil.c.s .PHONY : zutil.s # target to generate assembly for a file zutil.c.s: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(MAKE) -f external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/build.make external/assimp-3.0.1270/contrib/zlib/CMakeFiles/zlib.dir/zutil.c.s .PHONY : zutil.c.s # Help Target help: @echo "The following are some of the valid targets for this Makefile:" @echo "... all (the default if no target is provided)" @echo "... clean" @echo "... depend" @echo "... edit_cache" @echo "... install" @echo "... install/local" @echo "... install/strip" @echo "... list_install_components" @echo "... rebuild_cache" @echo "... zlib" @echo "... adler32.o" @echo "... adler32.i" @echo "... adler32.s" @echo "... compress.o" @echo "... compress.i" @echo "... compress.s" @echo "... crc32.o" @echo "... crc32.i" @echo "... crc32.s" @echo "... deflate.o" @echo "... deflate.i" @echo "... deflate.s" @echo "... inffast.o" @echo "... inffast.i" @echo "... inffast.s" @echo "... inflate.o" @echo "... inflate.i" @echo "... inflate.s" @echo "... inftrees.o" @echo "... inftrees.i" @echo "... inftrees.s" @echo "... trees.o" @echo "... trees.i" @echo "... trees.s" @echo "... zutil.o" @echo "... zutil.i" @echo "... zutil.s" .PHONY : help #============================================================================= # Special targets to cleanup operation of make. # Special rule to run CMake to check the build system integrity. # No rule that depends on this can have commands that come from listfiles # because they might be regenerated. cmake_check_build_system: cd /home/thijs/codes/BSshadow/OpenGL-tutorial_v0014_33/build && $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0 .PHONY : cmake_check_build_system
{'content_hash': 'bf798fc3004db44b115a2386007999a8', 'timestamp': '', 'source': 'github', 'line_count': 424, 'max_line_length': 296, 'avg_line_length': 38.134433962264154, 'alnum_prop': 0.7313377450677222, 'repo_name': 'thijser/BSshadow', 'id': '6243564d4227d8fe76040ca0b40941b8c9b5737e', 'size': '16331', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'OpenGL-tutorial_v0014_33/build/external/assimp-3.0.1270/contrib/zlib/Makefile', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '12791'}, {'name': 'C#', 'bytes': '659'}, {'name': 'C++', 'bytes': '302711'}, {'name': 'CMake', 'bytes': '122089'}, {'name': 'GLSL', 'bytes': '10617'}, {'name': 'Makefile', 'bytes': '135'}, {'name': 'Shell', 'bytes': '119079'}]}
class RemoteRepoRevision < ActiveRecord::Base belongs_to :repo, class_name: 'RemoteRepo', foreign_key: 'remote_repo_id' belongs_to :author, class_name: 'User' belongs_to :committer, class_name: 'User' has_and_belongs_to_many :related_issues, class_name: 'Issue', join_table: 'remote_repo_related_issues' has_and_belongs_to_many :refs, class_name: 'RemoteRepoRef', join_table: 'remote_repo_refs_revs' has_many :applied_hooks, class_name: 'RemoteRepoHook', dependent: :nullify has_many :time_entries, dependent: :destroy validates :repo, presence: true validates :sha, presence: true def uri [repo.uri.chomp('/'), 'commit', sha].join('/') end def redmine_uri "\"#{short_sha}\":#{uri}" end def short_sha sha[0..7] end def branches refs.map(&:name).uniq end def ensure_issue_is_related(issue) related_issues << issue unless related_issues.exists?(issue.id) end def author_string "#{author_name} <#{author_email}>" end def committer_string "#{committer_name} <#{committer_email}>" end end
{'content_hash': '159acfd6f67029e0a422289c5336c5d7', 'timestamp': '', 'source': 'github', 'line_count': 41, 'max_line_length': 104, 'avg_line_length': 26.024390243902438, 'alnum_prop': 0.6776007497656982, 'repo_name': 'Restream/redmine_undev_git', 'id': '4f2cbfd90affb44ae4243b701406f297f207683a', 'size': '1067', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'app/models/remote_repo_revision.rb', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '1145'}, {'name': 'HTML', 'bytes': '15892'}, {'name': 'JavaScript', 'bytes': '514'}, {'name': 'Ruby', 'bytes': '280047'}]}
(function () { var defs = {}; // id -> {dependencies, definition, instance (possibly undefined)} // Used when there is no 'main' module. // The name is probably (hopefully) unique so minification removes for releases. var register_3795 = function (id) { var module = dem(id); var fragments = id.split('.'); var target = Function('return this;')(); for (var i = 0; i < fragments.length - 1; ++i) { if (target[fragments[i]] === undefined) target[fragments[i]] = {}; target = target[fragments[i]]; } target[fragments[fragments.length - 1]] = module; }; var instantiate = function (id) { var actual = defs[id]; var dependencies = actual.deps; var definition = actual.defn; var len = dependencies.length; var instances = new Array(len); for (var i = 0; i < len; ++i) instances[i] = dem(dependencies[i]); var defResult = definition.apply(null, instances); if (defResult === undefined) throw 'module [' + id + '] returned undefined'; actual.instance = defResult; }; var def = function (id, dependencies, definition) { if (typeof id !== 'string') throw 'module id must be a string'; else if (dependencies === undefined) throw 'no dependencies for ' + id; else if (definition === undefined) throw 'no definition function for ' + id; defs[id] = { deps: dependencies, defn: definition, instance: undefined }; }; var dem = function (id) { var actual = defs[id]; if (actual === undefined) throw 'module [' + id + '] was undefined'; else if (actual.instance === undefined) instantiate(id); return actual.instance; }; var req = function (ids, callback) { var len = ids.length; var instances = new Array(len); for (var i = 0; i < len; ++i) instances.push(dem(ids[i])); callback.apply(null, callback); }; var ephox = {}; ephox.bolt = { module: { api: { define: def, require: req, demand: dem } } }; var define = def; var require = req; var demand = dem; // this helps with minificiation when using a lot of global references var defineGlobal = function (id, ref) { define(id, [], function () { return ref; }); }; /*jsc ["tinymce.plugins.table.Plugin","tinymce.plugins.table.model.TableGrid","tinymce.plugins.table.util.Quirks","tinymce.plugins.table.selection.CellSelection","tinymce.plugins.table.ui.Dialogs","tinymce.plugins.table.ui.ResizeBars","tinymce.core.util.Tools","tinymce.core.dom.TreeWalker","tinymce.core.Env","tinymce.core.PluginManager","global!tinymce.util.Tools.resolve","tinymce.plugins.table.util.Utils","tinymce.plugins.table.model.SplitCols","tinymce.core.util.VK","tinymce.core.util.Delay"] jsc*/ defineGlobal("global!tinymce.util.Tools.resolve", tinymce.util.Tools.resolve); /** * ResolveGlobal.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ define( 'tinymce.core.util.Tools', [ 'global!tinymce.util.Tools.resolve' ], function (resolve) { return resolve('tinymce.util.Tools'); } ); /** * ResolveGlobal.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ define( 'tinymce.core.Env', [ 'global!tinymce.util.Tools.resolve' ], function (resolve) { return resolve('tinymce.Env'); } ); /** * Utils.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * Various utility functions. * * @class tinymce.table.util.Utils * @private */ define( 'tinymce.plugins.table.util.Utils', [ 'tinymce.core.Env' ], function (Env) { var setSpanVal = function (name) { return function (td, val) { if (td) { val = parseInt(val, 10); if (val === 1 || val === 0) { td.removeAttribute(name, 1); } else { td.setAttribute(name, val, 1); } } }; }; var getSpanVal = function (name) { return function (td) { return parseInt(td.getAttribute(name) || 1, 10); }; }; function paddCell(cell) { if (!Env.ie || Env.ie > 9) { if (!cell.hasChildNodes()) { cell.innerHTML = '<br data-mce-bogus="1" />'; } } } return { setColSpan: setSpanVal('colSpan'), setRowSpan: setSpanVal('rowspan'), getColSpan: getSpanVal('colSpan'), getRowSpan: getSpanVal('rowSpan'), setSpanVal: function (td, name, value) { setSpanVal(name)(td, value); }, getSpanVal: function (td, name) { return getSpanVal(name)(td); }, paddCell: paddCell }; } ); /** * SplitCols.js * * Released under LGPL License. * Copyright (c) 1999-2016 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * Contains logic for handling splitting of merged rows. * * @class tinymce.table.model.SplitCols * @private */ define( 'tinymce.plugins.table.model.SplitCols', [ 'tinymce.core.util.Tools', 'tinymce.plugins.table.util.Utils' ], function (Tools, Utils) { var getCellAt = function (grid, x, y) { return grid[y] ? grid[y][x] : null; }; var getCellElmAt = function (grid, x, y) { var cell = getCellAt(grid, x, y); return cell ? cell.elm : null; }; var countHoles = function (grid, x, y, delta) { var y2, cell, count = 0, elm = getCellElmAt(grid, x, y); for (y2 = y; delta > 0 ? y2 < grid.length : y2 >= 0; y2 += delta) { cell = getCellAt(grid, x, y2); if (elm !== cell.elm) { break; } count++; } return count; }; var findRealElm = function (grid, x, y) { var cell, row = grid[y]; for (var x2 = x; x2 < row.length; x2++) { cell = row[x2]; if (cell.real) { return cell.elm; } } return null; }; var getRowSplitInfo = function (grid, y) { var cell, result = [], row = grid[y]; for (var x = 0; x < row.length; x++) { cell = row[x]; result.push({ elm: cell.elm, above: countHoles(grid, x, y, -1) - 1, below: countHoles(grid, x, y, 1) - 1 }); x += Utils.getColSpan(cell.elm) - 1; } return result; }; var createCell = function (info, rowSpan) { var doc = info.elm.ownerDocument; var newCell = doc.createElement('td'); Utils.setColSpan(newCell, Utils.getColSpan(info.elm)); Utils.setRowSpan(newCell, rowSpan); Utils.paddCell(newCell); return newCell; }; var insertOrAppendCell = function (grid, newCell, x, y) { var realCellElm = findRealElm(grid, x + 1, y); if (!realCellElm) { realCellElm = findRealElm(grid, 0, y); realCellElm.parentNode.appendChild(newCell); } else { realCellElm.parentNode.insertBefore(newCell, realCellElm); } }; var splitAbove = function (grid, info, x, y) { if (info.above !== 0) { Utils.setRowSpan(info.elm, info.above); var cell = createCell(info, info.below + 1); insertOrAppendCell(grid, cell, x, y); return cell; } return null; }; var splitBelow = function (grid, info, x, y) { if (info.below !== 0) { Utils.setRowSpan(info.elm, info.above + 1); var cell = createCell(info, info.below); insertOrAppendCell(grid, cell, x, y + 1); return cell; } return null; }; var splitAt = function (grid, x, y, before) { var rowInfos = getRowSplitInfo(grid, y); var rowElm = getCellElmAt(grid, x, y).parentNode; var cells = []; Tools.each(rowInfos, function (info, x) { var cell = before ? splitAbove(grid, info, x, y) : splitBelow(grid, info, x, y); if (cell !== null) { cells.push(cells); } }); return { cells: cells, row: rowElm }; }; return { splitAt: splitAt }; } ); /** * TableGrid.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * This class creates a grid out of a table element. This * makes it a whole lot easier to handle complex tables with * col/row spans. * * @class tinymce.table.model.TableGrid * @private */ define( 'tinymce.plugins.table.model.TableGrid', [ 'tinymce.core.util.Tools', 'tinymce.core.Env', 'tinymce.plugins.table.util.Utils', 'tinymce.plugins.table.model.SplitCols' ], function (Tools, Env, Utils, SplitCols) { var each = Tools.each, getSpanVal = Utils.getSpanVal, setSpanVal = Utils.setSpanVal; return function (editor, table, selectedCell) { var grid, gridWidth, startPos, endPos, selection = editor.selection, dom = selection.dom; function removeCellSelection() { editor.$('td[data-mce-selected],th[data-mce-selected]').removeAttr('data-mce-selected'); } function isEditorBody(node) { return node === editor.getBody(); } function getChildrenByName(node, names) { if (!node) { return []; } names = Tools.map(names.split(','), function (name) { return name.toLowerCase(); }); return Tools.grep(node.childNodes, function (node) { return Tools.inArray(names, node.nodeName.toLowerCase()) !== -1; }); } function buildGrid() { var startY = 0; grid = []; gridWidth = 0; each(['thead', 'tbody', 'tfoot'], function (part) { var partElm = getChildrenByName(table, part)[0]; var rows = getChildrenByName(partElm, 'tr'); each(rows, function (tr, y) { y += startY; each(getChildrenByName(tr, 'td,th'), function (td, x) { var x2, y2, rowspan, colspan; // Skip over existing cells produced by rowspan if (grid[y]) { while (grid[y][x]) { x++; } } // Get col/rowspan from cell rowspan = getSpanVal(td, 'rowspan'); colspan = getSpanVal(td, 'colspan'); // Fill out rowspan/colspan right and down for (y2 = y; y2 < y + rowspan; y2++) { if (!grid[y2]) { grid[y2] = []; } for (x2 = x; x2 < x + colspan; x2++) { grid[y2][x2] = { part: part, real: y2 == y && x2 == x, elm: td, rowspan: rowspan, colspan: colspan }; } } gridWidth = Math.max(gridWidth, x + 1); }); }); startY += rows.length; }); } function fireNewRow(node) { editor.fire('newrow', { node: node }); return node; } function fireNewCell(node) { editor.fire('newcell', { node: node }); return node; } function cloneNode(node, children) { node = node.cloneNode(children); node.removeAttribute('id'); return node; } function getCell(x, y) { var row; row = grid[y]; if (row) { return row[x]; } } function getRow(grid, y) { return grid[y] ? grid[y] : null; } function getColumn(grid, x) { var out = []; for (var y = 0; y < grid.length; y++) { out.push(getCell(x, y)); } return out; } function isCellSelected(cell) { return cell && (!!dom.getAttrib(cell.elm, 'data-mce-selected') || cell == selectedCell); } function getSelectedRows() { var rows = []; each(table.rows, function (row) { each(row.cells, function (cell) { if (dom.getAttrib(cell, 'data-mce-selected') || (selectedCell && cell == selectedCell.elm)) { rows.push(row); return false; } }); }); return rows; } function countSelectedCols() { var cols = 0; each(grid, function (row) { each(row, function (cell) { if (isCellSelected(cell)) { cols++; } }); if (cols) { return false; } }); return cols; } function deleteTable() { var rng = dom.createRng(); if (isEditorBody(table)) { return; } rng.setStartAfter(table); rng.setEndAfter(table); selection.setRng(rng); dom.remove(table); } function cloneCell(cell) { var formatNode, cloneFormats = {}; if (editor.settings.table_clone_elements !== false) { cloneFormats = Tools.makeMap( (editor.settings.table_clone_elements || 'strong em b i span font h1 h2 h3 h4 h5 h6 p div').toUpperCase(), /[ ,]/ ); } // Clone formats Tools.walk(cell, function (node) { var curNode; if (node.nodeType == 3) { each(dom.getParents(node.parentNode, null, cell).reverse(), function (node) { if (!cloneFormats[node.nodeName]) { return; } node = cloneNode(node, false); if (!formatNode) { formatNode = curNode = node; } else if (curNode) { curNode.appendChild(node); } curNode = node; }); // Add something to the inner node if (curNode) { curNode.innerHTML = Env.ie && Env.ie < 10 ? '&nbsp;' : '<br data-mce-bogus="1" />'; } return false; } }, 'childNodes'); cell = cloneNode(cell, false); fireNewCell(cell); setSpanVal(cell, 'rowSpan', 1); setSpanVal(cell, 'colSpan', 1); if (formatNode) { cell.appendChild(formatNode); } else { Utils.paddCell(cell); } return cell; } function cleanup() { var rng = dom.createRng(), row; // Empty rows each(dom.select('tr', table), function (tr) { if (tr.cells.length === 0) { dom.remove(tr); } }); // Empty table if (dom.select('tr', table).length === 0) { rng.setStartBefore(table); rng.setEndBefore(table); selection.setRng(rng); dom.remove(table); return; } // Empty header/body/footer each(dom.select('thead,tbody,tfoot', table), function (part) { if (part.rows.length === 0) { dom.remove(part); } }); // Restore selection to start position if it still exists buildGrid(); // If we have a valid startPos object if (startPos) { // Restore the selection to the closest table position row = grid[Math.min(grid.length - 1, startPos.y)]; if (row) { selection.select(row[Math.min(row.length - 1, startPos.x)].elm, true); selection.collapse(true); } } } function fillLeftDown(x, y, rows, cols) { var tr, x2, r, c, cell; tr = grid[y][x].elm.parentNode; for (r = 1; r <= rows; r++) { tr = dom.getNext(tr, 'tr'); if (tr) { // Loop left to find real cell for (x2 = x; x2 >= 0; x2--) { cell = grid[y + r][x2].elm; if (cell.parentNode == tr) { // Append clones after for (c = 1; c <= cols; c++) { dom.insertAfter(cloneCell(cell), cell); } break; } } if (x2 == -1) { // Insert nodes before first cell for (c = 1; c <= cols; c++) { tr.insertBefore(cloneCell(tr.cells[0]), tr.cells[0]); } } } } } function split() { each(grid, function (row, y) { each(row, function (cell, x) { var colSpan, rowSpan, i; if (isCellSelected(cell)) { cell = cell.elm; colSpan = getSpanVal(cell, 'colspan'); rowSpan = getSpanVal(cell, 'rowspan'); if (colSpan > 1 || rowSpan > 1) { setSpanVal(cell, 'rowSpan', 1); setSpanVal(cell, 'colSpan', 1); // Insert cells right for (i = 0; i < colSpan - 1; i++) { dom.insertAfter(cloneCell(cell), cell); } fillLeftDown(x, y, rowSpan - 1, colSpan); } } }); }); } function findItemsOutsideOfRange(items, start, end) { var out = []; for (var i = 0; i < items.length; i++) { if (i < start || i > end) { out.push(items[i]); } } return out; } function getFakeCells(cells) { return Tools.grep(cells, function (cell) { return cell.real === false; }); } function getUniqueElms(cells) { var elms = []; for (var i = 0; i < cells.length; i++) { var elm = cells[i].elm; if (elms[elms.length - 1] !== elm) { elms.push(elm); } } return elms; } function reduceRowSpans(grid, startX, startY, endX, endY) { var count = 0; if (endY - startY < 1) { return 0; } for (var y = startY + 1; y <= endY; y++) { var allCells = findItemsOutsideOfRange(getRow(grid, y), startX, endX); var fakeCells = getFakeCells(allCells); if (allCells.length === fakeCells.length) { Tools.each(getUniqueElms(fakeCells), function (elm) { Utils.setRowSpan(elm, Utils.getRowSpan(elm) - 1); }); count++; } } return count; } function reduceColSpans(grid, startX, startY, endX, endY) { var count = 0; if (endX - startX < 1) { return 0; } for (var x = startX + 1; x <= endX; x++) { var allCells = findItemsOutsideOfRange(getColumn(grid, x), startY, endY); var fakeCells = getFakeCells(allCells); if (allCells.length === fakeCells.length) { Tools.each(getUniqueElms(fakeCells), function (elm) { Utils.setColSpan(elm, Utils.getColSpan(elm) - 1); }); count++; } } return count; } function merge(cell, cols, rows) { var pos, startX, startY, endX, endY, x, y, startCell, endCell, children, count, reducedRows, reducedCols; // Use specified cell and cols/rows if (cell) { pos = getPos(cell); startX = pos.x; startY = pos.y; endX = startX + (cols - 1); endY = startY + (rows - 1); } else { startPos = endPos = null; // Calculate start/end pos by checking for selected cells in grid works better with context menu each(grid, function (row, y) { each(row, function (cell, x) { if (isCellSelected(cell)) { if (!startPos) { startPos = { x: x, y: y }; } endPos = { x: x, y: y }; } }); }); // Use selection, but make sure startPos is valid before accessing if (startPos) { startX = startPos.x; startY = startPos.y; endX = endPos.x; endY = endPos.y; } } // Find start/end cells startCell = getCell(startX, startY); endCell = getCell(endX, endY); // Check if the cells exists and if they are of the same part for example tbody = tbody if (startCell && endCell && startCell.part == endCell.part) { // Split and rebuild grid split(); buildGrid(); reducedRows = reduceRowSpans(grid, startX, startY, endX, endY); reducedCols = reduceColSpans(grid, startX, startY, endX, endY); // Set row/col span to start cell startCell = getCell(startX, startY).elm; var colSpan = (endX - startX - reducedCols) + 1; var rowSpan = (endY - startY - reducedRows) + 1; // All cells in table selected then just make it a table with one cell if (colSpan === gridWidth && rowSpan === grid.length) { colSpan = 1; rowSpan = 1; } // Multiple whole rows selected then just make it one rowSpan if (colSpan === gridWidth && rowSpan > 1) { rowSpan = 1; } setSpanVal(startCell, 'colSpan', colSpan); setSpanVal(startCell, 'rowSpan', rowSpan); // Remove other cells and add it's contents to the start cell for (y = startY; y <= endY; y++) { for (x = startX; x <= endX; x++) { if (!grid[y] || !grid[y][x]) { continue; } cell = grid[y][x].elm; /*jshint loopfunc:true */ /*eslint no-loop-func:0 */ if (cell != startCell) { // Move children to startCell children = Tools.grep(cell.childNodes); each(children, function (node) { startCell.appendChild(node); }); // Remove bogus nodes if there is children in the target cell if (children.length) { children = Tools.grep(startCell.childNodes); count = 0; each(children, function (node) { if (node.nodeName == 'BR' && count++ < children.length - 1) { startCell.removeChild(node); } }); } dom.remove(cell); } } } // Remove empty rows etc and restore caret location cleanup(); } } function insertRow(before) { var posY, cell, lastCell, x, rowElm, newRow, newCell, otherCell, rowSpan, spanValue; // Find first/last row each(grid, function (row, y) { each(row, function (cell) { if (isCellSelected(cell)) { cell = cell.elm; rowElm = cell.parentNode; newRow = fireNewRow(cloneNode(rowElm, false)); posY = y; if (before) { return false; } } }); if (before) { return posY === undefined; } }); // If posY is undefined there is nothing for us to do here...just return to avoid crashing below if (posY === undefined) { return; } for (x = 0, spanValue = 0; x < grid[0].length; x += spanValue) { // Cell not found could be because of an invalid table structure if (!grid[posY][x]) { continue; } cell = grid[posY][x].elm; spanValue = getSpanVal(cell, 'colspan'); if (cell != lastCell) { if (!before) { rowSpan = getSpanVal(cell, 'rowspan'); if (rowSpan > 1) { setSpanVal(cell, 'rowSpan', rowSpan + 1); continue; } } else { // Check if cell above can be expanded if (posY > 0 && grid[posY - 1][x]) { otherCell = grid[posY - 1][x].elm; rowSpan = getSpanVal(otherCell, 'rowSpan'); if (rowSpan > 1) { setSpanVal(otherCell, 'rowSpan', rowSpan + 1); continue; } } } // Insert new cell into new row newCell = cloneCell(cell); setSpanVal(newCell, 'colSpan', cell.colSpan); newRow.appendChild(newCell); lastCell = cell; } } if (newRow.hasChildNodes()) { if (!before) { dom.insertAfter(newRow, rowElm); } else { rowElm.parentNode.insertBefore(newRow, rowElm); } } } function insertRows(before, num) { num = num || getSelectedRows().length || 1; for (var i = 0; i < num; i++) { insertRow(before); } } function insertCol(before) { var posX, lastCell; // Find first/last column each(grid, function (row) { each(row, function (cell, x) { if (isCellSelected(cell)) { posX = x; if (before) { return false; } } }); if (before) { return posX === undefined; } }); each(grid, function (row, y) { var cell, rowSpan, colSpan; if (!row[posX]) { return; } cell = row[posX].elm; if (cell != lastCell) { colSpan = getSpanVal(cell, 'colspan'); rowSpan = getSpanVal(cell, 'rowspan'); if (colSpan == 1) { if (!before) { dom.insertAfter(cloneCell(cell), cell); fillLeftDown(posX, y, rowSpan - 1, colSpan); } else { cell.parentNode.insertBefore(cloneCell(cell), cell); fillLeftDown(posX, y, rowSpan - 1, colSpan); } } else { setSpanVal(cell, 'colSpan', cell.colSpan + 1); } lastCell = cell; } }); } function insertCols(before, num) { num = num || countSelectedCols() || 1; for (var i = 0; i < num; i++) { insertCol(before); } } function getSelectedCells(grid) { return Tools.grep(getAllCells(grid), isCellSelected); } function getAllCells(grid) { var cells = []; each(grid, function (row) { each(row, function (cell) { cells.push(cell); }); }); return cells; } function deleteCols() { var cols = []; if (isEditorBody(table)) { if (grid[0].length == 1) { return; } if (getSelectedCells(grid).length == getAllCells(grid).length) { return; } } // Get selected column indexes each(grid, function (row) { each(row, function (cell, x) { if (isCellSelected(cell) && Tools.inArray(cols, x) === -1) { each(grid, function (row) { var cell = row[x].elm, colSpan; colSpan = getSpanVal(cell, 'colSpan'); if (colSpan > 1) { setSpanVal(cell, 'colSpan', colSpan - 1); } else { dom.remove(cell); } }); cols.push(x); } }); }); cleanup(); } function deleteRows() { var rows; function deleteRow(tr) { var pos, lastCell; // Move down row spanned cells each(tr.cells, function (cell) { var rowSpan = getSpanVal(cell, 'rowSpan'); if (rowSpan > 1) { setSpanVal(cell, 'rowSpan', rowSpan - 1); pos = getPos(cell); fillLeftDown(pos.x, pos.y, 1, 1); } }); // Delete cells pos = getPos(tr.cells[0]); each(grid[pos.y], function (cell) { var rowSpan; cell = cell.elm; if (cell != lastCell) { rowSpan = getSpanVal(cell, 'rowSpan'); if (rowSpan <= 1) { dom.remove(cell); } else { setSpanVal(cell, 'rowSpan', rowSpan - 1); } lastCell = cell; } }); } // Get selected rows and move selection out of scope rows = getSelectedRows(); if (isEditorBody(table) && rows.length == table.rows.length) { return; } // Delete all selected rows each(rows.reverse(), function (tr) { deleteRow(tr); }); cleanup(); } function cutRows() { var rows = getSelectedRows(); if (isEditorBody(table) && rows.length == table.rows.length) { return; } dom.remove(rows); cleanup(); return rows; } function copyRows() { var rows = getSelectedRows(); each(rows, function (row, i) { rows[i] = cloneNode(row, true); }); return rows; } function pasteRows(rows, before) { var splitResult, targetRow, newRows; // indices of the rows where rowspans expire (a way to handle multiple rowspans in the same row) var rowSpansDueAt = []; // Nothing to paste if (!rows) { return; } splitResult = SplitCols.splitAt(grid, startPos.x, startPos.y, before); targetRow = splitResult.row; Tools.each(splitResult.cells, fireNewCell); newRows = Tools.map(rows, function (row) { return row.cloneNode(true); }); each(newRows, function (row, y, rows) { var x, cellCount = row.cells.length, cell, colCount = 0, rowSpan, colSpan; fireNewRow(row); for (x = 0; x < cellCount; x++) { cell = row.cells[x]; colSpan = getSpanVal(cell, 'colspan'); rowSpan = getSpanVal(cell, 'rowspan'); colCount += colSpan; if (rowSpan > 1) { colCount--; // decrement for every activated rowspan (count will be adjusted below) if (y + rowSpan > rows.length) { // adjust rowspan to the number of available rows rowSpan = rows.length - y; setSpanVal(cell, 'rowSpan', rowSpan); rowSpansDueAt.push(rows.length - 1); } else { rowSpansDueAt.push(y + rowSpan - 1); } } fireNewCell(cell); } // take into account currently active rowspans each(rowSpansDueAt, function (dueY) { if (y <= dueY) { colCount++; } }); // Needs more cells for (x = colCount; x < gridWidth; x++) { row.appendChild(cloneCell(row.cells[cellCount - 1])); } // Needs less cells for (x = gridWidth; x < colCount; x++) { cell = row.cells[row.cells.length - 1]; colSpan = getSpanVal(cell, 'colspan'); if (colSpan > 1) { setSpanVal(cell, 'colSpan', colSpan - 1); } else { dom.remove(cell); } } // Add before/after if (before) { targetRow.parentNode.insertBefore(row, targetRow); } else { targetRow = dom.insertAfter(row, targetRow); } }); removeCellSelection(); } function getPos(target) { var pos; each(grid, function (row, y) { each(row, function (cell, x) { if (cell.elm == target) { pos = { x: x, y: y }; return false; } }); return !pos; }); return pos; } function setStartCell(cell) { startPos = getPos(cell); } function findEndPos() { var maxX, maxY; maxX = maxY = 0; each(grid, function (row, y) { each(row, function (cell, x) { var colSpan, rowSpan; if (isCellSelected(cell)) { cell = grid[y][x]; if (x > maxX) { maxX = x; } if (y > maxY) { maxY = y; } if (cell.real) { colSpan = cell.colspan - 1; rowSpan = cell.rowspan - 1; if (colSpan) { if (x + colSpan > maxX) { maxX = x + colSpan; } } if (rowSpan) { if (y + rowSpan > maxY) { maxY = y + rowSpan; } } } } }); }); return { x: maxX, y: maxY }; } function setEndCell(cell) { var startX, startY, endX, endY, maxX, maxY, colSpan, rowSpan, x, y; endPos = getPos(cell); if (startPos && endPos) { // Get start/end positions startX = Math.min(startPos.x, endPos.x); startY = Math.min(startPos.y, endPos.y); endX = Math.max(startPos.x, endPos.x); endY = Math.max(startPos.y, endPos.y); // Expand end position to include spans maxX = endX; maxY = endY; // This logic tried to expand the selection to always be a rectangle // Expand startX /*for (y = startY; y <= maxY; y++) { cell = grid[y][startX]; if (!cell.real) { newX = startX - (cell.colspan - 1); if (newX < startX && newX >= 0) { startX = newX; } } } // Expand startY for (x = startX; x <= maxX; x++) { cell = grid[startY][x]; if (!cell.real) { newY = startY - (cell.rowspan - 1); if (newY < startY && newY >= 0) { startY = newY; } } }*/ // Find max X, Y for (y = startY; y <= endY; y++) { for (x = startX; x <= endX; x++) { cell = grid[y][x]; if (cell.real) { colSpan = cell.colspan - 1; rowSpan = cell.rowspan - 1; if (colSpan) { if (x + colSpan > maxX) { maxX = x + colSpan; } } if (rowSpan) { if (y + rowSpan > maxY) { maxY = y + rowSpan; } } } } } removeCellSelection(); // Add new selection for (y = startY; y <= maxY; y++) { for (x = startX; x <= maxX; x++) { if (grid[y][x]) { dom.setAttrib(grid[y][x].elm, 'data-mce-selected', '1'); } } } } } function moveRelIdx(cellElm, delta) { var pos, index, cell; pos = getPos(cellElm); index = pos.y * gridWidth + pos.x; do { index += delta; cell = getCell(index % gridWidth, Math.floor(index / gridWidth)); if (!cell) { break; } if (cell.elm != cellElm) { selection.select(cell.elm, true); if (dom.isEmpty(cell.elm)) { selection.collapse(true); } return true; } } while (cell.elm == cellElm); return false; } function splitCols(before) { if (startPos) { var splitResult = SplitCols.splitAt(grid, startPos.x, startPos.y, before); Tools.each(splitResult.cells, fireNewCell); } } table = table || dom.getParent(selection.getStart(true), 'table'); buildGrid(); selectedCell = selectedCell || dom.getParent(selection.getStart(true), 'th,td'); if (selectedCell) { startPos = getPos(selectedCell); endPos = findEndPos(); selectedCell = getCell(startPos.x, startPos.y); } Tools.extend(this, { deleteTable: deleteTable, split: split, merge: merge, insertRow: insertRow, insertRows: insertRows, insertCol: insertCol, insertCols: insertCols, splitCols: splitCols, deleteCols: deleteCols, deleteRows: deleteRows, cutRows: cutRows, copyRows: copyRows, pasteRows: pasteRows, getPos: getPos, setStartCell: setStartCell, setEndCell: setEndCell, moveRelIdx: moveRelIdx, refresh: buildGrid }); }; } ); /** * ResolveGlobal.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ define( 'tinymce.core.util.VK', [ 'global!tinymce.util.Tools.resolve' ], function (resolve) { return resolve('tinymce.util.VK'); } ); /** * ResolveGlobal.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ define( 'tinymce.core.util.Delay', [ 'global!tinymce.util.Tools.resolve' ], function (resolve) { return resolve('tinymce.util.Delay'); } ); /** * Quirks.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * This class includes fixes for various browser quirks. * * @class tinymce.table.util.Quirks * @private */ define( 'tinymce.plugins.table.util.Quirks', [ 'tinymce.core.util.VK', 'tinymce.core.util.Delay', 'tinymce.core.Env', 'tinymce.core.util.Tools', 'tinymce.plugins.table.util.Utils' ], function (VK, Delay, Env, Tools, Utils) { var each = Tools.each, getSpanVal = Utils.getSpanVal; return function (editor) { /** * Fixed caret movement around tables on WebKit. */ function moveWebKitSelection() { function eventHandler(e) { var key = e.keyCode; function handle(upBool, sourceNode) { var siblingDirection = upBool ? 'previousSibling' : 'nextSibling'; var currentRow = editor.dom.getParent(sourceNode, 'tr'); var siblingRow = currentRow[siblingDirection]; if (siblingRow) { moveCursorToRow(editor, sourceNode, siblingRow, upBool); e.preventDefault(); return true; } var tableNode = editor.dom.getParent(currentRow, 'table'); var middleNode = currentRow.parentNode; var parentNodeName = middleNode.nodeName.toLowerCase(); if (parentNodeName === 'tbody' || parentNodeName === (upBool ? 'tfoot' : 'thead')) { var targetParent = getTargetParent(upBool, tableNode, middleNode, 'tbody'); if (targetParent !== null) { return moveToRowInTarget(upBool, targetParent, sourceNode); } } return escapeTable(upBool, currentRow, siblingDirection, tableNode); } function getTargetParent(upBool, topNode, secondNode, nodeName) { var tbodies = editor.dom.select('>' + nodeName, topNode); var position = tbodies.indexOf(secondNode); if (upBool && position === 0 || !upBool && position === tbodies.length - 1) { return getFirstHeadOrFoot(upBool, topNode); } else if (position === -1) { var topOrBottom = secondNode.tagName.toLowerCase() === 'thead' ? 0 : tbodies.length - 1; return tbodies[topOrBottom]; } return tbodies[position + (upBool ? -1 : 1)]; } function getFirstHeadOrFoot(upBool, parent) { var tagName = upBool ? 'thead' : 'tfoot'; var headOrFoot = editor.dom.select('>' + tagName, parent); return headOrFoot.length !== 0 ? headOrFoot[0] : null; } function moveToRowInTarget(upBool, targetParent, sourceNode) { var targetRow = getChildForDirection(targetParent, upBool); if (targetRow) { moveCursorToRow(editor, sourceNode, targetRow, upBool); } e.preventDefault(); return true; } function escapeTable(upBool, currentRow, siblingDirection, table) { var tableSibling = table[siblingDirection]; if (tableSibling) { moveCursorToStartOfElement(tableSibling); return true; } var parentCell = editor.dom.getParent(table, 'td,th'); if (parentCell) { return handle(upBool, parentCell, e); } var backUpSibling = getChildForDirection(currentRow, !upBool); moveCursorToStartOfElement(backUpSibling); e.preventDefault(); return false; } function getChildForDirection(parent, up) { var child = parent && parent[up ? 'lastChild' : 'firstChild']; // BR is not a valid table child to return in this case we return the table cell return child && child.nodeName === 'BR' ? editor.dom.getParent(child, 'td,th') : child; } function moveCursorToStartOfElement(n) { editor.selection.setCursorLocation(n, 0); } function isVerticalMovement() { return key == VK.UP || key == VK.DOWN; } function isInTable(editor) { var node = editor.selection.getNode(); var currentRow = editor.dom.getParent(node, 'tr'); return currentRow !== null; } function columnIndex(column) { var colIndex = 0; var c = column; while (c.previousSibling) { c = c.previousSibling; colIndex = colIndex + getSpanVal(c, "colspan"); } return colIndex; } function findColumn(rowElement, columnIndex) { var c = 0, r = 0; each(rowElement.children, function (cell, i) { c = c + getSpanVal(cell, "colspan"); r = i; if (c > columnIndex) { return false; } }); return r; } function moveCursorToRow(ed, node, row, upBool) { var srcColumnIndex = columnIndex(editor.dom.getParent(node, 'td,th')); var tgtColumnIndex = findColumn(row, srcColumnIndex); var tgtNode = row.childNodes[tgtColumnIndex]; var rowCellTarget = getChildForDirection(tgtNode, upBool); moveCursorToStartOfElement(rowCellTarget || tgtNode); } function shouldFixCaret(preBrowserNode) { var newNode = editor.selection.getNode(); var newParent = editor.dom.getParent(newNode, 'td,th'); var oldParent = editor.dom.getParent(preBrowserNode, 'td,th'); return newParent && newParent !== oldParent && checkSameParentTable(newParent, oldParent); } function checkSameParentTable(nodeOne, NodeTwo) { return editor.dom.getParent(nodeOne, 'TABLE') === editor.dom.getParent(NodeTwo, 'TABLE'); } if (isVerticalMovement() && isInTable(editor)) { var preBrowserNode = editor.selection.getNode(); Delay.setEditorTimeout(editor, function () { if (shouldFixCaret(preBrowserNode)) { handle(!e.shiftKey && key === VK.UP, preBrowserNode, e); } }, 0); } } editor.on('KeyDown', function (e) { eventHandler(e); }); } function fixBeforeTableCaretBug() { // Checks if the selection/caret is at the start of the specified block element function isAtStart(rng, par) { var doc = par.ownerDocument, rng2 = doc.createRange(), elm; rng2.setStartBefore(par); rng2.setEnd(rng.endContainer, rng.endOffset); elm = doc.createElement('body'); elm.appendChild(rng2.cloneContents()); // Check for text characters of other elements that should be treated as content return elm.innerHTML.replace(/<(br|img|object|embed|input|textarea)[^>]*>/gi, '-').replace(/<[^>]+>/g, '').length === 0; } // Fixes an bug where it's impossible to place the caret before a table in Gecko // this fix solves it by detecting when the caret is at the beginning of such a table // and then manually moves the caret infront of the table editor.on('KeyDown', function (e) { var rng, table, dom = editor.dom; // On gecko it's not possible to place the caret before a table if (e.keyCode == 37 || e.keyCode == 38) { rng = editor.selection.getRng(); table = dom.getParent(rng.startContainer, 'table'); if (table && editor.getBody().firstChild == table) { if (isAtStart(rng, table)) { rng = dom.createRng(); rng.setStartBefore(table); rng.setEndBefore(table); editor.selection.setRng(rng); e.preventDefault(); } } } }); } // Fixes an issue on Gecko where it's impossible to place the caret behind a table // This fix will force a paragraph element after the table but only when the forced_root_block setting is enabled function fixTableCaretPos() { editor.on('KeyDown SetContent VisualAid', function () { var last; // Skip empty text nodes from the end for (last = editor.getBody().lastChild; last; last = last.previousSibling) { if (last.nodeType == 3) { if (last.nodeValue.length > 0) { break; } } else if (last.nodeType == 1 && (last.tagName == 'BR' || !last.getAttribute('data-mce-bogus'))) { break; } } if (last && last.nodeName == 'TABLE') { if (editor.settings.forced_root_block) { editor.dom.add( editor.getBody(), editor.settings.forced_root_block, editor.settings.forced_root_block_attrs, Env.ie && Env.ie < 10 ? '&nbsp;' : '<br data-mce-bogus="1" />' ); } else { editor.dom.add(editor.getBody(), 'br', { 'data-mce-bogus': '1' }); } } }); editor.on('PreProcess', function (o) { var last = o.node.lastChild; if (last && (last.nodeName == "BR" || (last.childNodes.length == 1 && (last.firstChild.nodeName == 'BR' || last.firstChild.nodeValue == '\u00a0'))) && last.previousSibling && last.previousSibling.nodeName == "TABLE") { editor.dom.remove(last); } }); } // this nasty hack is here to work around some WebKit selection bugs. function fixTableCellSelection() { function tableCellSelected(ed, rng, n, currentCell) { // The decision of when a table cell is selected is somewhat involved. The fact that this code is // required is actually a pointer to the root cause of this bug. A cell is selected when the start // and end offsets are 0, the start container is a text, and the selection node is either a TR (most cases) // or the parent of the table (in the case of the selection containing the last cell of a table). var TEXT_NODE = 3, table = ed.dom.getParent(rng.startContainer, 'TABLE'); var tableParent, allOfCellSelected, tableCellSelection; if (table) { tableParent = table.parentNode; } allOfCellSelected = rng.startContainer.nodeType == TEXT_NODE && rng.startOffset === 0 && rng.endOffset === 0 && currentCell && (n.nodeName == "TR" || n == tableParent); tableCellSelection = (n.nodeName == "TD" || n.nodeName == "TH") && !currentCell; return allOfCellSelected || tableCellSelection; } function fixSelection() { var rng = editor.selection.getRng(); var n = editor.selection.getNode(); var currentCell = editor.dom.getParent(rng.startContainer, 'TD,TH'); if (!tableCellSelected(editor, rng, n, currentCell)) { return; } if (!currentCell) { currentCell = n; } // Get the very last node inside the table cell var end = currentCell.lastChild; while (end.lastChild) { end = end.lastChild; } // Select the entire table cell. Nothing outside of the table cell should be selected. if (end.nodeType == 3) { rng.setEnd(end, end.data.length); editor.selection.setRng(rng); } } editor.on('KeyDown', function () { fixSelection(); }); editor.on('MouseDown', function (e) { if (e.button != 2) { fixSelection(); } }); } /** * Delete table if all cells are selected. */ function deleteTable() { function placeCaretInCell(cell) { editor.selection.select(cell, true); editor.selection.collapse(true); } function clearCell(cell) { editor.$(cell).empty(); Utils.paddCell(cell); } editor.on('keydown', function (e) { if ((e.keyCode == VK.DELETE || e.keyCode == VK.BACKSPACE) && !e.isDefaultPrevented()) { var table, tableCells, selectedTableCells, cell; table = editor.dom.getParent(editor.selection.getStart(), 'table'); if (table) { tableCells = editor.dom.select('td,th', table); selectedTableCells = Tools.grep(tableCells, function (cell) { return !!editor.dom.getAttrib(cell, 'data-mce-selected'); }); if (selectedTableCells.length === 0) { // If caret is within an empty table cell then empty it for real cell = editor.dom.getParent(editor.selection.getStart(), 'td,th'); if (editor.selection.isCollapsed() && cell && editor.dom.isEmpty(cell)) { e.preventDefault(); clearCell(cell); placeCaretInCell(cell); } return; } e.preventDefault(); editor.undoManager.transact(function () { if (tableCells.length == selectedTableCells.length) { editor.execCommand('mceTableDelete'); } else { Tools.each(selectedTableCells, clearCell); placeCaretInCell(selectedTableCells[0]); } }); } } }); } /** * When caption is empty and we continue to delete, caption gets deleted along with the contents. * So, we take over delete operation (both forward and backward) and once caption is empty, we do * prevent it from disappearing. */ function handleDeleteInCaption() { var ZWSP = '\uFEFF'; var isEmptyNode = function (node) { return editor.dom.isEmpty(node) || node.firstChild === node.lastChild && isCaretContainer(node.firstChild); }; var isTableCaption = function (node) { return node && node.nodeName == 'CAPTION' && node.parentNode.nodeName == 'TABLE'; }; var isTheHeirOf = function (heir, ancestor) { var node = ancestor.firstChild; do { if (node === heir) { return true; } } while ((node = node.firstChild)); return false; }; var isCaretContainer = function (node) { if (node.nodeType === 3) { if (node.data === ZWSP) { return true; } node = node.parentNode; } return node.nodeType === 1 && node.hasAttribute('data-mce-caret'); }; var caretIsAtTheLeftEdgeOf = function (node) { var rng = editor.selection.getRng(); return !rng.startOffset && !rng.startContainer.previousSibling && isTheHeirOf(rng.startContainer, node); }; var appendCaretContainer = function (node, isBlock) { var caretNode; if (!isBlock) { caretNode = node.ownerDocument.createTextNode(ZWSP); } else { caretNode = editor.dom.create('p', { 'data-mce-caret': 'after', 'data-mce-bogus': 'all' }, '<br data-mce-bogus="1">'); } node.appendChild(caretNode); }; var restoreCaretPlaceholder = function (container, insertCaret) { var lastChild = container.lastChild; var rng = editor.selection.getRng(); // save the current position of the caret to restore it later (IE requires this) var caretContainer = rng.startContainer; var caretOffset = rng.startOffset; // if container contains only debris, we replace the contents with inline caret placeholder, to avoid // vertical stretching of the caption if (isEmptyNode(container)) { container.innerHTML = ZWSP; // in IE caret springs off from the caption (to the first td), we need to bring it back caretContainer = container.lastChild; caretOffset = 0; } else if (!isCaretContainer(lastChild)) { appendCaretContainer(container, editor.dom.isBlock(lastChild)); } // in IE caret is off after restoration editor.selection.setCursorLocation(caretContainer, caretOffset); }; var contractSelectionTo = function (caption) { var rng = editor.selection.getRng(); var newRng = editor.dom.createRng(); var firstChild = caption.firstChild; if (rng.commonAncestorContainer === caption.parentNode && isTheHeirOf(rng.startContainer, caption)) { // ignore backward selections // rng.selectNodeContents() didn't work in IE newRng.setStart(caption, 0); if (firstChild.nodeType === 1) { newRng.setEnd(caption, caption.childNodes.length); } else { newRng.setEnd(firstChild, firstChild.nodeValue.length); } editor.selection.setRng(newRng); } }; editor.on('keydown', function (e) { if (e.keyCode !== VK.DELETE && e.keyCode !== VK.BACKSPACE || e.isDefaultPrevented()) { return; } var container = editor.dom.getParent(editor.selection.getStart(), 'caption'); if (!isTableCaption(container)) { return; } // in IE caption collapses if caret placeholder is deleted (and it is very much possible) if (!editor.selection.isCollapsed()) { // in Chrome triple click selects beyond the boundaries of the caption, if then delete is pressed, // contents are being removed with the whole caption, so make sure we stay in caption contractSelectionTo(container); // if the whole contents are selected, caret placeholder will be deleted too and we take // over delete operation here to do it manually and restore the placeholder if required editor.undoManager.transact(function () { editor.execCommand('Delete'); restoreCaretPlaceholder(container); }); e.preventDefault(); } else { restoreCaretPlaceholder(container); if (isEmptyNode(container) || e.keyCode === VK.BACKSPACE && caretIsAtTheLeftEdgeOf(container)) { // see TINY-979 e.preventDefault(); } } }); } handleDeleteInCaption(); deleteTable(); if (Env.webkit) { moveWebKitSelection(); fixTableCellSelection(); } if (Env.gecko) { fixBeforeTableCaretBug(); fixTableCaretPos(); } if (Env.ie > 9) { fixBeforeTableCaretBug(); fixTableCaretPos(); } }; } ); /** * ResolveGlobal.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ define( 'tinymce.core.dom.TreeWalker', [ 'global!tinymce.util.Tools.resolve' ], function (resolve) { return resolve('tinymce.dom.TreeWalker'); } ); /** * CellSelection.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * This class handles table cell selection by faking it using a css class that gets applied * to cells when dragging the mouse from one cell to another. * * @class tinymce.table.selection.CellSelection * @private */ define( 'tinymce.plugins.table.selection.CellSelection', [ 'tinymce.plugins.table.model.TableGrid', 'tinymce.core.dom.TreeWalker', 'tinymce.core.util.Tools' ], function (TableGrid, TreeWalker, Tools) { return function (editor, selectionChange) { var dom = editor.dom, tableGrid, startCell, startTable, lastMouseOverTarget, hasCellSelection = true, resizing, dragging; function clear(force) { // Restore selection possibilities editor.getBody().style.webkitUserSelect = ''; if (force || hasCellSelection) { editor.$('td[data-mce-selected],th[data-mce-selected]').removeAttr('data-mce-selected'); hasCellSelection = false; } } var endSelection = function () { startCell = tableGrid = startTable = lastMouseOverTarget = null; selectionChange(false); }; function isCellInTable(table, cell) { if (!table || !cell) { return false; } return table === dom.getParent(cell, 'table'); } function cellSelectionHandler(e) { var sel, target = e.target, currentCell; if (resizing || dragging) { return; } // Fake mouse enter by keeping track of last mouse over if (target === lastMouseOverTarget) { return; } lastMouseOverTarget = target; if (startTable && startCell) { currentCell = dom.getParent(target, 'td,th'); if (!isCellInTable(startTable, currentCell)) { currentCell = dom.getParent(startTable, 'td,th'); } // Selection inside first cell is normal until we have expanted if (startCell === currentCell && !hasCellSelection) { return; } selectionChange(true); if (isCellInTable(startTable, currentCell)) { e.preventDefault(); if (!tableGrid) { tableGrid = new TableGrid(editor, startTable, startCell); editor.getBody().style.webkitUserSelect = 'none'; } tableGrid.setEndCell(currentCell); hasCellSelection = true; // Remove current selection sel = editor.selection.getSel(); try { if (sel.removeAllRanges) { sel.removeAllRanges(); } else { sel.empty(); } } catch (ex) { // IE9 might throw errors here } } } } editor.on('SelectionChange', function (e) { if (hasCellSelection) { e.stopImmediatePropagation(); } }, true); // Add cell selection logic editor.on('MouseDown', function (e) { if (e.button != 2 && !resizing && !dragging) { clear(); startCell = dom.getParent(e.target, 'td,th'); startTable = dom.getParent(startCell, 'table'); } }); editor.on('mouseover', cellSelectionHandler); editor.on('remove', function () { dom.unbind(editor.getDoc(), 'mouseover', cellSelectionHandler); clear(); }); editor.on('MouseUp', function () { var rng, sel = editor.selection, selectedCells, walker, node, lastNode; function setPoint(node, start) { var walker = new TreeWalker(node, node); do { // Text node if (node.nodeType == 3 && Tools.trim(node.nodeValue).length !== 0) { if (start) { rng.setStart(node, 0); } else { rng.setEnd(node, node.nodeValue.length); } return; } // BR element if (node.nodeName == 'BR') { if (start) { rng.setStartBefore(node); } else { rng.setEndBefore(node); } return; } } while ((node = (start ? walker.next() : walker.prev()))); } // Move selection to startCell if (startCell) { if (tableGrid) { editor.getBody().style.webkitUserSelect = ''; } // Try to expand text selection as much as we can only Gecko supports cell selection selectedCells = dom.select('td[data-mce-selected],th[data-mce-selected]'); if (selectedCells.length > 0) { rng = dom.createRng(); node = selectedCells[0]; rng.setStartBefore(node); rng.setEndAfter(node); setPoint(node, 1); walker = new TreeWalker(node, dom.getParent(selectedCells[0], 'table')); do { if (node.nodeName == 'TD' || node.nodeName == 'TH') { if (!dom.getAttrib(node, 'data-mce-selected')) { break; } lastNode = node; } } while ((node = walker.next())); setPoint(lastNode); sel.setRng(rng); } editor.nodeChanged(); endSelection(); } }); editor.on('KeyUp Drop SetContent', function (e) { clear(e.type == 'setcontent'); endSelection(); resizing = false; }); editor.on('ObjectResizeStart ObjectResized', function (e) { resizing = e.type != 'objectresized'; }); editor.on('dragstart', function () { dragging = true; }); editor.on('drop dragend', function () { dragging = false; }); return { clear: clear }; }; } ); /** * Dialogs.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /*eslint dot-notation:0*/ /** * ... * * @class tinymce.table.ui.Dialogs * @private */ define( 'tinymce.plugins.table.ui.Dialogs', [ 'tinymce.core.util.Tools', 'tinymce.core.Env' ], function (Tools, Env) { var each = Tools.each; return function (editor) { var self = this; function createColorPickAction() { var colorPickerCallback = editor.settings.color_picker_callback; if (colorPickerCallback) { return function () { var self = this; colorPickerCallback.call( editor, function (value) { self.value(value).fire('change'); }, self.value() ); }; } } function createStyleForm(dom) { return { title: 'Advanced', type: 'form', defaults: { onchange: function () { updateStyle(dom, this.parents().reverse()[0], this.name() == "style"); } }, items: [ { label: 'Style', name: 'style', type: 'textbox' }, { type: 'form', padding: 0, formItemDefaults: { layout: 'grid', alignH: ['start', 'right'] }, defaults: { size: 7 }, items: [ { label: 'Border color', type: 'colorbox', name: 'borderColor', onaction: createColorPickAction() }, { label: 'Background color', type: 'colorbox', name: 'backgroundColor', onaction: createColorPickAction() } ] } ] }; } function removePxSuffix(size) { return size ? size.replace(/px$/, '') : ""; } function addSizeSuffix(size) { if (/^[0-9]+$/.test(size)) { size += "px"; } return size; } function unApplyAlign(elm) { each('left center right'.split(' '), function (name) { editor.formatter.remove('align' + name, {}, elm); }); } function unApplyVAlign(elm) { each('top middle bottom'.split(' '), function (name) { editor.formatter.remove('valign' + name, {}, elm); }); } function buildListItems(inputList, itemCallback, startItems) { function appendItems(values, output) { output = output || []; Tools.each(values, function (item) { var menuItem = { text: item.text || item.title }; if (item.menu) { menuItem.menu = appendItems(item.menu); } else { menuItem.value = item.value; if (itemCallback) { itemCallback(menuItem); } } output.push(menuItem); }); return output; } return appendItems(inputList, startItems || []); } function updateStyle(dom, win, isStyleCtrl) { var data = win.toJSON(); var css = dom.parseStyle(data.style); if (isStyleCtrl) { win.find('#borderColor').value(css["border-color"] || '')[0].fire('change'); win.find('#backgroundColor').value(css["background-color"] || '')[0].fire('change'); } else { css["border-color"] = data.borderColor; css["background-color"] = data.backgroundColor; } win.find('#style').value(dom.serializeStyle(dom.parseStyle(dom.serializeStyle(css)))); } function appendStylesToData(dom, data, elm) { var css = dom.parseStyle(dom.getAttrib(elm, 'style')); if (css["border-color"]) { data.borderColor = css["border-color"]; } if (css["background-color"]) { data.backgroundColor = css["background-color"]; } data.style = dom.serializeStyle(css); } function mergeStyles(dom, elm, styles) { var css = dom.parseStyle(dom.getAttrib(elm, 'style')); each(styles, function (style) { css[style.name] = style.value; }); dom.setAttrib(elm, 'style', dom.serializeStyle(dom.parseStyle(dom.serializeStyle(css)))); } self.tableProps = function () { self.table(true); }; self.table = function (isProps) { var dom = editor.dom, tableElm, colsCtrl, rowsCtrl, classListCtrl, data = {}, generalTableForm, stylesToMerge; function onSubmitTableForm() { //Explore the layers of the table till we find the first layer of tds or ths function styleTDTH(elm, name, value) { if (elm.tagName === "TD" || elm.tagName === "TH") { dom.setStyle(elm, name, value); } else { if (elm.children) { for (var i = 0; i < elm.children.length; i++) { styleTDTH(elm.children[i], name, value); } } } } var captionElm; updateStyle(dom, this); data = Tools.extend(data, this.toJSON()); if (data["class"] === false) { delete data["class"]; } editor.undoManager.transact(function () { if (!tableElm) { tableElm = editor.plugins.table.insertTable(data.cols || 1, data.rows || 1); } editor.dom.setAttribs(tableElm, { style: data.style, 'class': data['class'] }); if (editor.settings.table_style_by_css) { stylesToMerge = []; stylesToMerge.push({ name: 'border', value: data.border }); stylesToMerge.push({ name: 'border-spacing', value: addSizeSuffix(data.cellspacing) }); mergeStyles(dom, tableElm, stylesToMerge); dom.setAttribs(tableElm, { 'data-mce-border-color': data.borderColor, 'data-mce-cell-padding': data.cellpadding, 'data-mce-border': data.border }); if (tableElm.children) { for (var i = 0; i < tableElm.children.length; i++) { styleTDTH(tableElm.children[i], 'border', data.border); styleTDTH(tableElm.children[i], 'padding', addSizeSuffix(data.cellpadding)); } } } else { editor.dom.setAttribs(tableElm, { border: data.border, cellpadding: data.cellpadding, cellspacing: data.cellspacing }); } if (dom.getAttrib(tableElm, 'width') && !editor.settings.table_style_by_css) { dom.setAttrib(tableElm, 'width', removePxSuffix(data.width)); } else { dom.setStyle(tableElm, 'width', addSizeSuffix(data.width)); } dom.setStyle(tableElm, 'height', addSizeSuffix(data.height)); // Toggle caption on/off captionElm = dom.select('caption', tableElm)[0]; if (captionElm && !data.caption) { dom.remove(captionElm); } if (!captionElm && data.caption) { captionElm = dom.create('caption'); captionElm.innerHTML = !Env.ie ? '<br data-mce-bogus="1"/>' : '\u00a0'; tableElm.insertBefore(captionElm, tableElm.firstChild); } unApplyAlign(tableElm); if (data.align) { editor.formatter.apply('align' + data.align, {}, tableElm); } editor.focus(); editor.addVisual(); }); } function getTDTHOverallStyle(elm, name) { var cells = editor.dom.select("td,th", elm), firstChildStyle; function checkChildren(firstChildStyle, elms) { for (var i = 0; i < elms.length; i++) { var currentStyle = dom.getStyle(elms[i], name); if (typeof firstChildStyle === "undefined") { firstChildStyle = currentStyle; } if (firstChildStyle != currentStyle) { return ""; } } return firstChildStyle; } firstChildStyle = checkChildren(firstChildStyle, cells); return firstChildStyle; } if (isProps === true) { tableElm = dom.getParent(editor.selection.getStart(), 'table'); if (tableElm) { data = { width: removePxSuffix(dom.getStyle(tableElm, 'width') || dom.getAttrib(tableElm, 'width')), height: removePxSuffix(dom.getStyle(tableElm, 'height') || dom.getAttrib(tableElm, 'height')), cellspacing: removePxSuffix(dom.getStyle(tableElm, 'border-spacing') || dom.getAttrib(tableElm, 'cellspacing')), cellpadding: dom.getAttrib(tableElm, 'data-mce-cell-padding') || dom.getAttrib(tableElm, 'cellpadding') || getTDTHOverallStyle(tableElm, 'padding'), border: dom.getAttrib(tableElm, 'data-mce-border') || dom.getAttrib(tableElm, 'border') || getTDTHOverallStyle(tableElm, 'border'), borderColor: dom.getAttrib(tableElm, 'data-mce-border-color'), caption: !!dom.select('caption', tableElm)[0], 'class': dom.getAttrib(tableElm, 'class') }; each('left center right'.split(' '), function (name) { if (editor.formatter.matchNode(tableElm, 'align' + name)) { data.align = name; } }); } } else { colsCtrl = { label: 'Cols', name: 'cols' }; rowsCtrl = { label: 'Rows', name: 'rows' }; } if (editor.settings.table_class_list) { if (data["class"]) { data["class"] = data["class"].replace(/\s*mce\-item\-table\s*/g, ''); } classListCtrl = { name: 'class', type: 'listbox', label: 'Class', values: buildListItems( editor.settings.table_class_list, function (item) { if (item.value) { item.textStyle = function () { return editor.formatter.getCssText({ block: 'table', classes: [item.value] }); }; } } ) }; } generalTableForm = { type: 'form', layout: 'flex', direction: 'column', labelGapCalc: 'children', padding: 0, items: [ { type: 'form', labelGapCalc: false, padding: 0, layout: 'grid', columns: 2, defaults: { type: 'textbox', maxWidth: 50 }, items: (editor.settings.table_appearance_options !== false) ? [ colsCtrl, rowsCtrl, { label: 'Width', name: 'width' }, { label: 'Height', name: 'height' }, { label: 'Cell spacing', name: 'cellspacing' }, { label: 'Cell padding', name: 'cellpadding' }, { label: 'Border', name: 'border' }, { label: 'Caption', name: 'caption', type: 'checkbox' } ] : [ colsCtrl, rowsCtrl, { label: 'Width', name: 'width' }, { label: 'Height', name: 'height' } ] }, { label: 'Alignment', name: 'align', type: 'listbox', text: 'None', values: [ { text: 'None', value: '' }, { text: 'Left', value: 'left' }, { text: 'Center', value: 'center' }, { text: 'Right', value: 'right' } ] }, classListCtrl ] }; if (editor.settings.table_advtab !== false) { appendStylesToData(dom, data, tableElm); editor.windowManager.open({ title: "Table properties", data: data, bodyType: 'tabpanel', body: [ { title: 'General', type: 'form', items: generalTableForm }, createStyleForm(dom) ], onsubmit: onSubmitTableForm }); } else { editor.windowManager.open({ title: "Table properties", data: data, body: generalTableForm, onsubmit: onSubmitTableForm }); } }; self.merge = function (grid, cell) { editor.windowManager.open({ title: "Merge cells", body: [ { label: 'Cols', name: 'cols', type: 'textbox', value: '1', size: 10 }, { label: 'Rows', name: 'rows', type: 'textbox', value: '1', size: 10 } ], onsubmit: function () { var data = this.toJSON(); editor.undoManager.transact(function () { grid.merge(cell, data.cols, data.rows); }); } }); }; self.cell = function () { var dom = editor.dom, cellElm, data, classListCtrl, cells = []; function setAttrib(elm, name, value) { if (cells.length === 1 || value) { dom.setAttrib(elm, name, value); } } function setStyle(elm, name, value) { if (cells.length === 1 || value) { dom.setStyle(elm, name, value); } } function onSubmitCellForm() { updateStyle(dom, this); data = Tools.extend(data, this.toJSON()); editor.undoManager.transact(function () { each(cells, function (cellElm) { setAttrib(cellElm, 'scope', data.scope); setAttrib(cellElm, 'style', data.style); setAttrib(cellElm, 'class', data['class']); setStyle(cellElm, 'width', addSizeSuffix(data.width)); setStyle(cellElm, 'height', addSizeSuffix(data.height)); // Switch cell type if (data.type && cellElm.nodeName.toLowerCase() !== data.type) { cellElm = dom.rename(cellElm, data.type); } // Remove alignment if (cells.length === 1) { unApplyAlign(cellElm); unApplyVAlign(cellElm); } // Apply alignment if (data.align) { editor.formatter.apply('align' + data.align, {}, cellElm); } // Apply vertical alignment if (data.valign) { editor.formatter.apply('valign' + data.valign, {}, cellElm); } }); editor.focus(); }); } // Get selected cells or the current cell cells = editor.dom.select('td[data-mce-selected],th[data-mce-selected]'); cellElm = editor.dom.getParent(editor.selection.getStart(), 'td,th'); if (!cells.length && cellElm) { cells.push(cellElm); } cellElm = cellElm || cells[0]; if (!cellElm) { // If this element is null, return now to avoid crashing. return; } if (cells.length > 1) { data = { width: '', height: '', scope: '', 'class': '', align: '', style: '', type: cellElm.nodeName.toLowerCase() }; } else { data = { width: removePxSuffix(dom.getStyle(cellElm, 'width') || dom.getAttrib(cellElm, 'width')), height: removePxSuffix(dom.getStyle(cellElm, 'height') || dom.getAttrib(cellElm, 'height')), scope: dom.getAttrib(cellElm, 'scope'), 'class': dom.getAttrib(cellElm, 'class') }; data.type = cellElm.nodeName.toLowerCase(); each('left center right'.split(' '), function (name) { if (editor.formatter.matchNode(cellElm, 'align' + name)) { data.align = name; } }); each('top middle bottom'.split(' '), function (name) { if (editor.formatter.matchNode(cellElm, 'valign' + name)) { data.valign = name; } }); appendStylesToData(dom, data, cellElm); } if (editor.settings.table_cell_class_list) { classListCtrl = { name: 'class', type: 'listbox', label: 'Class', values: buildListItems( editor.settings.table_cell_class_list, function (item) { if (item.value) { item.textStyle = function () { return editor.formatter.getCssText({ block: 'td', classes: [item.value] }); }; } } ) }; } var generalCellForm = { type: 'form', layout: 'flex', direction: 'column', labelGapCalc: 'children', padding: 0, items: [ { type: 'form', layout: 'grid', columns: 2, labelGapCalc: false, padding: 0, defaults: { type: 'textbox', maxWidth: 50 }, items: [ { label: 'Width', name: 'width' }, { label: 'Height', name: 'height' }, { label: 'Cell type', name: 'type', type: 'listbox', text: 'None', minWidth: 90, maxWidth: null, values: [ { text: 'Cell', value: 'td' }, { text: 'Header cell', value: 'th' } ] }, { label: 'Scope', name: 'scope', type: 'listbox', text: 'None', minWidth: 90, maxWidth: null, values: [ { text: 'None', value: '' }, { text: 'Row', value: 'row' }, { text: 'Column', value: 'col' }, { text: 'Row group', value: 'rowgroup' }, { text: 'Column group', value: 'colgroup' } ] }, { label: 'H Align', name: 'align', type: 'listbox', text: 'None', minWidth: 90, maxWidth: null, values: [ { text: 'None', value: '' }, { text: 'Left', value: 'left' }, { text: 'Center', value: 'center' }, { text: 'Right', value: 'right' } ] }, { label: 'V Align', name: 'valign', type: 'listbox', text: 'None', minWidth: 90, maxWidth: null, values: [ { text: 'None', value: '' }, { text: 'Top', value: 'top' }, { text: 'Middle', value: 'middle' }, { text: 'Bottom', value: 'bottom' } ] } ] }, classListCtrl ] }; if (editor.settings.table_cell_advtab !== false) { editor.windowManager.open({ title: "Cell properties", bodyType: 'tabpanel', data: data, body: [ { title: 'General', type: 'form', items: generalCellForm }, createStyleForm(dom) ], onsubmit: onSubmitCellForm }); } else { editor.windowManager.open({ title: "Cell properties", data: data, body: generalCellForm, onsubmit: onSubmitCellForm }); } }; self.row = function () { var dom = editor.dom, tableElm, cellElm, rowElm, classListCtrl, data, rows = [], generalRowForm; function setAttrib(elm, name, value) { if (rows.length === 1 || value) { dom.setAttrib(elm, name, value); } } function setStyle(elm, name, value) { if (rows.length === 1 || value) { dom.setStyle(elm, name, value); } } function onSubmitRowForm() { var tableElm, oldParentElm, parentElm; updateStyle(dom, this); data = Tools.extend(data, this.toJSON()); editor.undoManager.transact(function () { var toType = data.type; each(rows, function (rowElm) { setAttrib(rowElm, 'scope', data.scope); setAttrib(rowElm, 'style', data.style); setAttrib(rowElm, 'class', data['class']); setStyle(rowElm, 'height', addSizeSuffix(data.height)); if (toType !== rowElm.parentNode.nodeName.toLowerCase()) { tableElm = dom.getParent(rowElm, 'table'); oldParentElm = rowElm.parentNode; parentElm = dom.select(toType, tableElm)[0]; if (!parentElm) { parentElm = dom.create(toType); if (tableElm.firstChild) { tableElm.insertBefore(parentElm, tableElm.firstChild); } else { tableElm.appendChild(parentElm); } } parentElm.appendChild(rowElm); if (!oldParentElm.hasChildNodes()) { dom.remove(oldParentElm); } } // Apply/remove alignment if (rows.length === 1) { unApplyAlign(rowElm); } if (data.align) { editor.formatter.apply('align' + data.align, {}, rowElm); } }); editor.focus(); }); } tableElm = editor.dom.getParent(editor.selection.getStart(), 'table'); cellElm = editor.dom.getParent(editor.selection.getStart(), 'td,th'); each(tableElm.rows, function (row) { each(row.cells, function (cell) { if (dom.getAttrib(cell, 'data-mce-selected') || cell == cellElm) { rows.push(row); return false; } }); }); rowElm = rows[0]; if (!rowElm) { // If this element is null, return now to avoid crashing. return; } if (rows.length > 1) { data = { height: '', scope: '', 'class': '', align: '', type: rowElm.parentNode.nodeName.toLowerCase() }; } else { data = { height: removePxSuffix(dom.getStyle(rowElm, 'height') || dom.getAttrib(rowElm, 'height')), scope: dom.getAttrib(rowElm, 'scope'), 'class': dom.getAttrib(rowElm, 'class') }; data.type = rowElm.parentNode.nodeName.toLowerCase(); each('left center right'.split(' '), function (name) { if (editor.formatter.matchNode(rowElm, 'align' + name)) { data.align = name; } }); appendStylesToData(dom, data, rowElm); } if (editor.settings.table_row_class_list) { classListCtrl = { name: 'class', type: 'listbox', label: 'Class', values: buildListItems( editor.settings.table_row_class_list, function (item) { if (item.value) { item.textStyle = function () { return editor.formatter.getCssText({ block: 'tr', classes: [item.value] }); }; } } ) }; } generalRowForm = { type: 'form', columns: 2, padding: 0, defaults: { type: 'textbox' }, items: [ { type: 'listbox', name: 'type', label: 'Row type', text: 'Header', maxWidth: null, values: [ { text: 'Header', value: 'thead' }, { text: 'Body', value: 'tbody' }, { text: 'Footer', value: 'tfoot' } ] }, { type: 'listbox', name: 'align', label: 'Alignment', text: 'None', maxWidth: null, values: [ { text: 'None', value: '' }, { text: 'Left', value: 'left' }, { text: 'Center', value: 'center' }, { text: 'Right', value: 'right' } ] }, { label: 'Height', name: 'height' }, classListCtrl ] }; if (editor.settings.table_row_advtab !== false) { editor.windowManager.open({ title: "Row properties", data: data, bodyType: 'tabpanel', body: [ { title: 'General', type: 'form', items: generalRowForm }, createStyleForm(dom) ], onsubmit: onSubmitRowForm }); } else { editor.windowManager.open({ title: "Row properties", data: data, body: generalRowForm, onsubmit: onSubmitRowForm }); } }; }; } ); /** * ResizeBars.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * This class handles table column and row resizing by adding divs over the columns and rows of the table. * These divs are then manipulated using mouse events to resize the underlying table. * * @class tinymce.table.ui.ResizeBars * @private */ define( 'tinymce.plugins.table.ui.ResizeBars', [ 'tinymce.core.util.Tools', 'tinymce.core.util.VK' ], function (Tools, VK) { var hoverTable; return function (editor) { var RESIZE_BAR_CLASS = 'mce-resize-bar', RESIZE_BAR_ROW_CLASS = 'mce-resize-bar-row', RESIZE_BAR_ROW_CURSOR_STYLE = 'row-resize', RESIZE_BAR_ROW_DATA_ATTRIBUTE = 'data-row', RESIZE_BAR_ROW_DATA_INITIAL_TOP_ATTRIBUTE = 'data-initial-top', RESIZE_BAR_COL_CLASS = 'mce-resize-bar-col', RESIZE_BAR_COL_CURSOR_STYLE = 'col-resize', RESIZE_BAR_COL_DATA_ATTRIBUTE = 'data-col', RESIZE_BAR_COL_DATA_INITIAL_LEFT_ATTRIBUTE = 'data-initial-left', RESIZE_BAR_THICKNESS = 4, RESIZE_MINIMUM_WIDTH = 10, RESIZE_MINIMUM_HEIGHT = 10, RESIZE_BAR_DRAGGING_CLASS = 'mce-resize-bar-dragging'; var percentageBasedSizeRegex = new RegExp(/(\d+(\.\d+)?%)/), pixelBasedSizeRegex = new RegExp(/px|em/); var delayDrop, dragging, blockerElement, dragBar, lastX, lastY; // Get the absolute position's top edge. function getTopEdge(index, row) { return { index: index, y: editor.dom.getPos(row).y }; } // Get the absolute position's bottom edge. function getBottomEdge(index, row) { return { index: index, y: editor.dom.getPos(row).y + row.offsetHeight }; } // Get the absolute position's left edge. function getLeftEdge(index, cell) { return { index: index, x: editor.dom.getPos(cell).x }; } // Get the absolute position's right edge. function getRightEdge(index, cell) { return { index: index, x: editor.dom.getPos(cell).x + cell.offsetWidth }; } function isRtl() { var dir = editor.getBody().dir; return dir === 'rtl'; } function isInline() { return editor.inline; } function getBody() { return isInline ? editor.getBody().ownerDocument.body : editor.getBody(); } function getInnerEdge(index, cell) { return isRtl() ? getRightEdge(index, cell) : getLeftEdge(index, cell); } function getOuterEdge(index, cell) { return isRtl() ? getLeftEdge(index, cell) : getRightEdge(index, cell); } function getPercentageWidthFallback(element, table) { return getComputedStyleSize(element, 'width') / getComputedStyleSize(table, 'width') * 100; } function getComputedStyleSize(element, property) { var widthString = editor.dom.getStyle(element, property, true); var width = parseInt(widthString, 10); return width; } function getCurrentTablePercentWidth(table) { var tableWidth = getComputedStyleSize(table, 'width'); var tableParentWidth = getComputedStyleSize(table.parentElement, 'width'); return tableWidth / tableParentWidth * 100; } function getCellPercentDelta(table, delta) { var tableWidth = getComputedStyleSize(table, 'width'); return delta / tableWidth * 100; } function getTablePercentDelta(table, delta) { var tableParentWidth = getComputedStyleSize(table.parentElement, 'width'); return delta / tableParentWidth * 100; } // Find the left/right (ltr/rtl) or top side locations of the cells to measure. // This is the location of the borders we need to draw over. function findPositions(getInner, getOuter, thingsToMeasure) { var tablePositions = []; // Skip the first item in the array = no left (LTR), right (RTL) or top bars for (var i = 1; i < thingsToMeasure.length; i++) { // Get the element from the details var item = thingsToMeasure[i].element; // We need to zero index this again tablePositions.push(getInner(i - 1, item)); } var lastTableLineToMake = thingsToMeasure[thingsToMeasure.length - 1]; tablePositions.push(getOuter(thingsToMeasure.length - 1, lastTableLineToMake.element)); return tablePositions; } // Clear the bars. function clearBars() { var bars = editor.dom.select('.' + RESIZE_BAR_CLASS, getBody()); Tools.each(bars, function (bar) { editor.dom.remove(bar); }); } // Refresh the bars. function refreshBars(tableElement) { clearBars(); drawBars(tableElement); } // Generates a resize bar object for the editor to add. function generateBar(classToAdd, cursor, left, top, height, width, indexAttr, index) { var bar = { 'data-mce-bogus': 'all', 'class': RESIZE_BAR_CLASS + ' ' + classToAdd, 'unselectable': 'on', 'data-mce-resize': false, style: 'cursor: ' + cursor + '; ' + 'margin: 0; ' + 'padding: 0; ' + 'position: absolute; ' + 'left: ' + left + 'px; ' + 'top: ' + top + 'px; ' + 'height: ' + height + 'px; ' + 'width: ' + width + 'px; ' }; bar[indexAttr] = index; return bar; } // Draw the row bars over the row borders. function drawRows(rowPositions, tableWidth, tablePosition) { Tools.each(rowPositions, function (rowPosition) { var left = tablePosition.x, top = rowPosition.y - RESIZE_BAR_THICKNESS / 2, height = RESIZE_BAR_THICKNESS, width = tableWidth; editor.dom.add(getBody(), 'div', generateBar(RESIZE_BAR_ROW_CLASS, RESIZE_BAR_ROW_CURSOR_STYLE, left, top, height, width, RESIZE_BAR_ROW_DATA_ATTRIBUTE, rowPosition.index)); }); } // Draw the column bars over the column borders. function drawCols(cellPositions, tableHeight, tablePosition) { Tools.each(cellPositions, function (cellPosition) { var left = cellPosition.x - RESIZE_BAR_THICKNESS / 2, top = tablePosition.y, height = tableHeight, width = RESIZE_BAR_THICKNESS; editor.dom.add(getBody(), 'div', generateBar(RESIZE_BAR_COL_CLASS, RESIZE_BAR_COL_CURSOR_STYLE, left, top, height, width, RESIZE_BAR_COL_DATA_ATTRIBUTE, cellPosition.index)); }); } // Get a matrix of the cells in each row and the rows in the table. function getTableDetails(table) { return Tools.map(table.rows, function (row) { var cells = Tools.map(row.cells, function (cell) { var rowspan = cell.hasAttribute('rowspan') ? parseInt(cell.getAttribute('rowspan'), 10) : 1; var colspan = cell.hasAttribute('colspan') ? parseInt(cell.getAttribute('colspan'), 10) : 1; return { element: cell, rowspan: rowspan, colspan: colspan }; }); return { element: row, cells: cells }; }); } // Get a grid model of the table. function getTableGrid(tableDetails) { function key(rowIndex, colIndex) { return rowIndex + ',' + colIndex; } function getAt(rowIndex, colIndex) { return access[key(rowIndex, colIndex)]; } function getAllCells() { var allCells = []; Tools.each(rows, function (row) { allCells = allCells.concat(row.cells); }); return allCells; } function getAllRows() { return rows; } var access = {}; var rows = []; var maxRows = 0; var maxCols = 0; Tools.each(tableDetails, function (row, rowIndex) { var currentRow = []; Tools.each(row.cells, function (cell) { var start = 0; while (access[key(rowIndex, start)] !== undefined) { start++; } var current = { element: cell.element, colspan: cell.colspan, rowspan: cell.rowspan, rowIndex: rowIndex, colIndex: start }; for (var i = 0; i < cell.colspan; i++) { for (var j = 0; j < cell.rowspan; j++) { var cr = rowIndex + j; var cc = start + i; access[key(cr, cc)] = current; maxRows = Math.max(maxRows, cr + 1); maxCols = Math.max(maxCols, cc + 1); } } currentRow.push(current); }); rows.push({ element: row.element, cells: currentRow }); }); return { grid: { maxRows: maxRows, maxCols: maxCols }, getAt: getAt, getAllCells: getAllCells, getAllRows: getAllRows }; } function range(start, end) { var r = []; for (var i = start; i < end; i++) { r.push(i); } return r; } // Attempt to get a representative single block for this column. // If we can't find a single block, all blocks in this row/column are spanned // and we'll need to fallback to getting the first cell in the row/column. function decide(getBlock, isSingle, getFallback) { var inBlock = getBlock(); var singleInBlock; for (var i = 0; i < inBlock.length; i++) { if (isSingle(inBlock[i])) { singleInBlock = inBlock[i]; } } return singleInBlock ? singleInBlock : getFallback(); } // Attempt to get representative blocks for the width of each column. function getColumnBlocks(tableGrid) { var cols = range(0, tableGrid.grid.maxCols); var rows = range(0, tableGrid.grid.maxRows); return Tools.map(cols, function (col) { function getBlock() { var details = []; for (var i = 0; i < rows.length; i++) { var detail = tableGrid.getAt(i, col); if (detail && detail.colIndex === col) { details.push(detail); } } return details; } function isSingle(detail) { return detail.colspan === 1; } function getFallback() { var item; for (var i = 0; i < rows.length; i++) { item = tableGrid.getAt(i, col); if (item) { return item; } } return null; } return decide(getBlock, isSingle, getFallback); }); } // Attempt to get representative blocks for the height of each row. function getRowBlocks(tableGrid) { var cols = range(0, tableGrid.grid.maxCols); var rows = range(0, tableGrid.grid.maxRows); return Tools.map(rows, function (row) { function getBlock() { var details = []; for (var i = 0; i < cols.length; i++) { var detail = tableGrid.getAt(row, i); if (detail && detail.rowIndex === row) { details.push(detail); } } return details; } function isSingle(detail) { return detail.rowspan === 1; } function getFallback() { return tableGrid.getAt(row, 0); } return decide(getBlock, isSingle, getFallback); }); } // Draw resize bars over the left/right (ltr/rtl) or top side locations of the cells to measure. // This is the location of the borders we need to draw over. function drawBars(table) { var tableDetails = getTableDetails(table); var tableGrid = getTableGrid(tableDetails); var rows = getRowBlocks(tableGrid); var cols = getColumnBlocks(tableGrid); var tablePosition = editor.dom.getPos(table); var rowPositions = rows.length > 0 ? findPositions(getTopEdge, getBottomEdge, rows) : []; var colPositions = cols.length > 0 ? findPositions(getInnerEdge, getOuterEdge, cols) : []; drawRows(rowPositions, table.offsetWidth, tablePosition); drawCols(colPositions, table.offsetHeight, tablePosition); } // Attempt to deduce the width/height of a column/row that has more than one cell spanned. function deduceSize(deducables, index, isPercentageBased, table) { if (index < 0 || index >= deducables.length - 1) { return ""; } var current = deducables[index]; if (current) { current = { value: current, delta: 0 }; } else { var reversedUpToIndex = deducables.slice(0, index).reverse(); for (var i = 0; i < reversedUpToIndex.length; i++) { if (reversedUpToIndex[i]) { current = { value: reversedUpToIndex[i], delta: i + 1 }; } } } var next = deducables[index + 1]; if (next) { next = { value: next, delta: 1 }; } else { var rest = deducables.slice(index + 1); for (var j = 0; j < rest.length; j++) { if (rest[j]) { next = { value: rest[j], delta: j + 1 }; } } } var extras = next.delta - current.delta; var pixelWidth = Math.abs(next.value - current.value) / extras; return isPercentageBased ? pixelWidth / getComputedStyleSize(table, 'width') * 100 : pixelWidth; } function getStyleOrAttrib(element, property) { var sizeString = editor.dom.getStyle(element, property); if (!sizeString) { sizeString = editor.dom.getAttrib(element, property); } if (!sizeString) { sizeString = editor.dom.getStyle(element, property, true); } return sizeString; } function getWidth(element, isPercentageBased, table) { var widthString = getStyleOrAttrib(element, 'width'); var widthNumber = parseInt(widthString, 10); var getWidthFallback = isPercentageBased ? getPercentageWidthFallback(element, table) : getComputedStyleSize(element, 'width'); // If this is percentage based table, but this cell isn't percentage based. // Or if this is a pixel based table, but this cell isn't pixel based. if (isPercentageBased && !isPercentageBasedSize(widthString) || !isPercentageBased && !isPixelBasedSize(widthString)) { // set the widthnumber to 0 widthNumber = 0; } return !isNaN(widthNumber) && widthNumber > 0 ? widthNumber : getWidthFallback; } // Attempt to get the css width from column representative cells. function getWidths(tableGrid, isPercentageBased, table) { var cols = getColumnBlocks(tableGrid); var backups = Tools.map(cols, function (col) { return getInnerEdge(col.colIndex, col.element).x; }); var widths = []; for (var i = 0; i < cols.length; i++) { var span = cols[i].element.hasAttribute('colspan') ? parseInt(cols[i].element.getAttribute('colspan'), 10) : 1; // Deduce if the column has colspan of more than 1 var width = span > 1 ? deduceSize(backups, i) : getWidth(cols[i].element, isPercentageBased, table); // If everything's failed and we still don't have a width width = width ? width : RESIZE_MINIMUM_WIDTH; widths.push(width); } return widths; } // Attempt to get the pixel height from a cell. function getPixelHeight(element) { var heightString = getStyleOrAttrib(element, 'height'); var heightNumber = parseInt(heightString, 10); if (isPercentageBasedSize(heightString)) { heightNumber = 0; } return !isNaN(heightNumber) && heightNumber > 0 ? heightNumber : getComputedStyleSize(element, 'height'); } // Attempt to get the css height from row representative cells. function getPixelHeights(tableGrid) { var rows = getRowBlocks(tableGrid); var backups = Tools.map(rows, function (row) { return getTopEdge(row.rowIndex, row.element).y; }); var heights = []; for (var i = 0; i < rows.length; i++) { var span = rows[i].element.hasAttribute('rowspan') ? parseInt(rows[i].element.getAttribute('rowspan'), 10) : 1; var height = span > 1 ? deduceSize(backups, i) : getPixelHeight(rows[i].element); height = height ? height : RESIZE_MINIMUM_HEIGHT; heights.push(height); } return heights; } // Determine how much each column's css width will need to change. // Sizes = result = pixels widths OR percentage based widths function determineDeltas(sizes, column, step, min, isPercentageBased) { var result = sizes.slice(0); function generateZeros(array) { return Tools.map(array, function () { return 0; }); } function onOneColumn() { var deltas; if (isPercentageBased) { // If we have one column in a percent based table, that column should be 100% of the width of the table. deltas = [100 - result[0]]; } else { var newNext = Math.max(min, result[0] + step); deltas = [newNext - result[0]]; } return deltas; } function onLeftOrMiddle(index, next) { var startZeros = generateZeros(result.slice(0, index)); var endZeros = generateZeros(result.slice(next + 1)); var deltas; if (step >= 0) { var newNext = Math.max(min, result[next] - step); deltas = startZeros.concat([step, newNext - result[next]]).concat(endZeros); } else { var newThis = Math.max(min, result[index] + step); var diffx = result[index] - newThis; deltas = startZeros.concat([newThis - result[index], diffx]).concat(endZeros); } return deltas; } function onRight(previous, index) { var startZeros = generateZeros(result.slice(0, index)); var deltas; if (step >= 0) { deltas = startZeros.concat([step]); } else { var size = Math.max(min, result[index] + step); deltas = startZeros.concat([size - result[index]]); } return deltas; } var deltas; if (sizes.length === 0) { // No Columns deltas = []; } else if (sizes.length === 1) { // One Column deltas = onOneColumn(); } else if (column === 0) { // Left Column deltas = onLeftOrMiddle(0, 1); } else if (column > 0 && column < sizes.length - 1) { // Middle Column deltas = onLeftOrMiddle(column, column + 1); } else if (column === sizes.length - 1) { // Right Column deltas = onRight(column - 1, column); } else { deltas = []; } return deltas; } function total(start, end, measures) { var r = 0; for (var i = start; i < end; i++) { r += measures[i]; } return r; } // Combine cell's css widths to determine widths of colspan'd cells. function recalculateWidths(tableGrid, widths) { var allCells = tableGrid.getAllCells(); return Tools.map(allCells, function (cell) { var width = total(cell.colIndex, cell.colIndex + cell.colspan, widths); return { element: cell.element, width: width, colspan: cell.colspan }; }); } // Combine cell's css heights to determine heights of rowspan'd cells. function recalculateCellHeights(tableGrid, heights) { var allCells = tableGrid.getAllCells(); return Tools.map(allCells, function (cell) { var height = total(cell.rowIndex, cell.rowIndex + cell.rowspan, heights); return { element: cell.element, height: height, rowspan: cell.rowspan }; }); } // Calculate row heights. function recalculateRowHeights(tableGrid, heights) { var allRows = tableGrid.getAllRows(); return Tools.map(allRows, function (row, i) { return { element: row.element, height: heights[i] }; }); } function isPercentageBasedSize(size) { return percentageBasedSizeRegex.test(size); } function isPixelBasedSize(size) { return pixelBasedSizeRegex.test(size); } // Adjust the width of the column of table at index, with delta. function adjustWidth(table, delta, index) { var tableDetails = getTableDetails(table); var tableGrid = getTableGrid(tableDetails); function setSizes(newSizes, styleExtension) { Tools.each(newSizes, function (cell) { editor.dom.setStyle(cell.element, 'width', cell.width + styleExtension); editor.dom.setAttrib(cell.element, 'width', null); }); } function getNewTablePercentWidth() { return index < tableGrid.grid.maxCols - 1 ? getCurrentTablePercentWidth(table) : getCurrentTablePercentWidth(table) + getTablePercentDelta(table, delta); } function getNewTablePixelWidth() { return index < tableGrid.grid.maxCols - 1 ? getComputedStyleSize(table, 'width') : getComputedStyleSize(table, 'width') + delta; } function setTableSize(newTableWidth, styleExtension, isPercentBased) { if (index == tableGrid.grid.maxCols - 1 || !isPercentBased) { editor.dom.setStyle(table, 'width', newTableWidth + styleExtension); editor.dom.setAttrib(table, 'width', null); } } var percentageBased = isPercentageBasedSize(table.width) || isPercentageBasedSize(table.style.width); var widths = getWidths(tableGrid, percentageBased, table); var step = percentageBased ? getCellPercentDelta(table, delta) : delta; // TODO: change the min for percentage maybe? var deltas = determineDeltas(widths, index, step, RESIZE_MINIMUM_WIDTH, percentageBased, table); var newWidths = []; for (var i = 0; i < deltas.length; i++) { newWidths.push(deltas[i] + widths[i]); } var newSizes = recalculateWidths(tableGrid, newWidths); var styleExtension = percentageBased ? '%' : 'px'; var newTableWidth = percentageBased ? getNewTablePercentWidth() : getNewTablePixelWidth(); editor.undoManager.transact(function () { setSizes(newSizes, styleExtension); setTableSize(newTableWidth, styleExtension, percentageBased); }); } // Adjust the height of the row of table at index, with delta. function adjustHeight(table, delta, index) { var tableDetails = getTableDetails(table); var tableGrid = getTableGrid(tableDetails); var heights = getPixelHeights(tableGrid); var newHeights = [], newTotalHeight = 0; for (var i = 0; i < heights.length; i++) { newHeights.push(i === index ? delta + heights[i] : heights[i]); newTotalHeight += newTotalHeight[i]; } var newCellSizes = recalculateCellHeights(tableGrid, newHeights); var newRowSizes = recalculateRowHeights(tableGrid, newHeights); editor.undoManager.transact(function () { Tools.each(newRowSizes, function (row) { editor.dom.setStyle(row.element, 'height', row.height + 'px'); editor.dom.setAttrib(row.element, 'height', null); }); Tools.each(newCellSizes, function (cell) { editor.dom.setStyle(cell.element, 'height', cell.height + 'px'); editor.dom.setAttrib(cell.element, 'height', null); }); editor.dom.setStyle(table, 'height', newTotalHeight + 'px'); editor.dom.setAttrib(table, 'height', null); }); } function scheduleDelayedDropEvent() { delayDrop = setTimeout(function () { drop(); }, 200); } function cancelDelayedDropEvent() { clearTimeout(delayDrop); } function getBlockerElement() { var blocker = document.createElement('div'); blocker.setAttribute('style', 'margin: 0; ' + 'padding: 0; ' + 'position: fixed; ' + 'left: 0px; ' + 'top: 0px; ' + 'height: 100%; ' + 'width: 100%;'); blocker.setAttribute('data-mce-bogus', 'all'); return blocker; } function bindBlockerEvents(blocker, dragHandler) { editor.dom.bind(blocker, 'mouseup', function () { drop(); }); editor.dom.bind(blocker, 'mousemove', function (e) { cancelDelayedDropEvent(); if (dragging) { dragHandler(e); } }); editor.dom.bind(blocker, 'mouseout', function () { scheduleDelayedDropEvent(); }); } function drop() { editor.dom.remove(blockerElement); if (dragging) { editor.dom.removeClass(dragBar, RESIZE_BAR_DRAGGING_CLASS); dragging = false; var index, delta; if (isCol(dragBar)) { var initialLeft = parseInt(editor.dom.getAttrib(dragBar, RESIZE_BAR_COL_DATA_INITIAL_LEFT_ATTRIBUTE), 10); var newLeft = editor.dom.getPos(dragBar).x; index = parseInt(editor.dom.getAttrib(dragBar, RESIZE_BAR_COL_DATA_ATTRIBUTE), 10); delta = isRtl() ? initialLeft - newLeft : newLeft - initialLeft; if (Math.abs(delta) >= 1) { // simple click with no real resize (<1px) must not add CSS properties adjustWidth(hoverTable, delta, index); } } else if (isRow(dragBar)) { var initialTop = parseInt(editor.dom.getAttrib(dragBar, RESIZE_BAR_ROW_DATA_INITIAL_TOP_ATTRIBUTE), 10); var newTop = editor.dom.getPos(dragBar).y; index = parseInt(editor.dom.getAttrib(dragBar, RESIZE_BAR_ROW_DATA_ATTRIBUTE), 10); delta = newTop - initialTop; if (Math.abs(delta) >= 1) { // simple click with no real resize (<1px) must not add CSS properties adjustHeight(hoverTable, delta, index); } } refreshBars(hoverTable); editor.nodeChanged(); } } function setupBaseDrag(bar, dragHandler) { blockerElement = blockerElement ? blockerElement : getBlockerElement(); dragging = true; editor.dom.addClass(bar, RESIZE_BAR_DRAGGING_CLASS); dragBar = bar; bindBlockerEvents(blockerElement, dragHandler); editor.dom.add(getBody(), blockerElement); } function isCol(target) { return editor.dom.hasClass(target, RESIZE_BAR_COL_CLASS); } function isRow(target) { return editor.dom.hasClass(target, RESIZE_BAR_ROW_CLASS); } function colDragHandler(event) { lastX = lastX !== undefined ? lastX : event.clientX; // we need a firstX var deltaX = event.clientX - lastX; lastX = event.clientX; var oldLeft = editor.dom.getPos(dragBar).x; editor.dom.setStyle(dragBar, 'left', oldLeft + deltaX + 'px'); } function rowDragHandler(event) { lastY = lastY !== undefined ? lastY : event.clientY; var deltaY = event.clientY - lastY; lastY = event.clientY; var oldTop = editor.dom.getPos(dragBar).y; editor.dom.setStyle(dragBar, 'top', oldTop + deltaY + 'px'); } function setupColDrag(bar) { lastX = undefined; setupBaseDrag(bar, colDragHandler); } function setupRowDrag(bar) { lastY = undefined; setupBaseDrag(bar, rowDragHandler); } function mouseDownHandler(e) { var target = e.target, body = editor.getBody(); // Since this code is working on global events we need to work on a global hoverTable state // and make sure that the state is correct according to the events fired if (!editor.$.contains(body, hoverTable) && hoverTable !== body) { return; } if (isCol(target)) { e.preventDefault(); var initialLeft = editor.dom.getPos(target).x; editor.dom.setAttrib(target, RESIZE_BAR_COL_DATA_INITIAL_LEFT_ATTRIBUTE, initialLeft); setupColDrag(target); } else if (isRow(target)) { e.preventDefault(); var initialTop = editor.dom.getPos(target).y; editor.dom.setAttrib(target, RESIZE_BAR_ROW_DATA_INITIAL_TOP_ATTRIBUTE, initialTop); setupRowDrag(target); } else { clearBars(); } } editor.on('init', function () { // Needs to be like this for inline mode, editor.on does not bind to elements in the document body otherwise editor.dom.bind(getBody(), 'mousedown', mouseDownHandler); }); // If we're updating the table width via the old mechanic, we need to update the constituent cells' widths/heights too. editor.on('ObjectResized', function (e) { var table = e.target; if (table.nodeName === 'TABLE') { var newCellSizes = []; Tools.each(table.rows, function (row) { Tools.each(row.cells, function (cell) { var width = editor.dom.getStyle(cell, 'width', true); newCellSizes.push({ cell: cell, width: width }); }); }); Tools.each(newCellSizes, function (newCellSize) { editor.dom.setStyle(newCellSize.cell, 'width', newCellSize.width); editor.dom.setAttrib(newCellSize.cell, 'width', null); }); } }); editor.on('mouseover', function (e) { if (!dragging) { var tableElement = editor.dom.getParent(e.target, 'table'); if (e.target.nodeName === 'TABLE' || tableElement) { hoverTable = tableElement; refreshBars(tableElement); } } }); // Prevents the user from moving the caret inside the resize bars on Chrome // Only does it on arrow keys since clearBars might be an epxensive operation // since it's querying the DOM editor.on('keydown', function (e) { switch (e.keyCode) { case VK.LEFT: case VK.RIGHT: case VK.UP: case VK.DOWN: clearBars(); break; } }); editor.on('remove', function () { clearBars(); editor.dom.unbind(getBody(), 'mousedown', mouseDownHandler); }); return { adjustWidth: adjustWidth, adjustHeight: adjustHeight, clearBars: clearBars, drawBars: drawBars, determineDeltas: determineDeltas, getTableGrid: getTableGrid, getTableDetails: getTableDetails, getWidths: getWidths, getPixelHeights: getPixelHeights, isPercentageBasedSize: isPercentageBasedSize, isPixelBasedSize: isPixelBasedSize, recalculateWidths: recalculateWidths, recalculateCellHeights: recalculateCellHeights, recalculateRowHeights: recalculateRowHeights }; }; } ); /** * ResolveGlobal.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ define( 'tinymce.core.PluginManager', [ 'global!tinymce.util.Tools.resolve' ], function (resolve) { return resolve('tinymce.PluginManager'); } ); /** * Plugin.js * * Released under LGPL License. * Copyright (c) 1999-2017 Ephox Corp. All rights reserved * * License: http://www.tinymce.com/license * Contributing: http://www.tinymce.com/contributing */ /** * This class contains all core logic for the table plugin. * * @class tinymce.table.Plugin * @private */ define( 'tinymce.plugins.table.Plugin', [ 'tinymce.plugins.table.model.TableGrid', 'tinymce.plugins.table.util.Quirks', 'tinymce.plugins.table.selection.CellSelection', 'tinymce.plugins.table.ui.Dialogs', 'tinymce.plugins.table.ui.ResizeBars', 'tinymce.core.util.Tools', 'tinymce.core.dom.TreeWalker', 'tinymce.core.Env', 'tinymce.core.PluginManager' ], function (TableGrid, Quirks, CellSelection, Dialogs, ResizeBars, Tools, TreeWalker, Env, PluginManager) { var each = Tools.each; function Plugin(editor) { var clipboardRows, self = this, dialogs = new Dialogs(editor), resizeBars; if (editor.settings.object_resizing && editor.settings.table_resize_bars !== false && (editor.settings.object_resizing === true || editor.settings.object_resizing === 'table')) { resizeBars = ResizeBars(editor); } function cmd(command) { return function () { editor.execCommand(command); }; } function insertTable(cols, rows) { var y, x, html, tableElm; html = '<table id="__mce"><tbody>'; for (y = 0; y < rows; y++) { html += '<tr>'; for (x = 0; x < cols; x++) { html += '<td>' + (Env.ie && Env.ie < 10 ? '&nbsp;' : '<br>') + '</td>'; } html += '</tr>'; } html += '</tbody></table>'; editor.undoManager.transact(function () { editor.insertContent(html); tableElm = editor.dom.get('__mce'); editor.dom.setAttrib(tableElm, 'id', null); editor.$('tr', tableElm).each(function (index, row) { editor.fire('newrow', { node: row }); editor.$('th,td', row).each(function (index, cell) { editor.fire('newcell', { node: cell }); }); }); editor.dom.setAttribs(tableElm, editor.settings.table_default_attributes || {}); editor.dom.setStyles(tableElm, editor.settings.table_default_styles || {}); }); return tableElm; } function handleDisabledState(ctrl, selector, sameParts) { function bindStateListener() { var selectedElm, selectedCells, parts = {}, sum = 0, state; selectedCells = editor.dom.select('td[data-mce-selected],th[data-mce-selected]'); selectedElm = selectedCells[0]; if (!selectedElm) { selectedElm = editor.selection.getStart(); } // Make sure that we don't have a selection inside thead and tbody at the same time if (sameParts && selectedCells.length > 0) { each(selectedCells, function (cell) { return parts[cell.parentNode.parentNode.nodeName] = 1; }); each(parts, function (value) { sum += value; }); state = sum !== 1; } else { state = !editor.dom.getParent(selectedElm, selector); } ctrl.disabled(state); editor.selection.selectorChanged(selector, function (state) { ctrl.disabled(!state); }); } if (editor.initialized) { bindStateListener(); } else { editor.on('init', bindStateListener); } } function postRender() { /*jshint validthis:true*/ handleDisabledState(this, 'table'); } function postRenderCell() { /*jshint validthis:true*/ handleDisabledState(this, 'td,th'); } function postRenderMergeCell() { /*jshint validthis:true*/ handleDisabledState(this, 'td,th', true); } function generateTableGrid() { var html = ''; html = '<table role="grid" class="mce-grid mce-grid-border" aria-readonly="true">'; for (var y = 0; y < 10; y++) { html += '<tr>'; for (var x = 0; x < 10; x++) { html += '<td role="gridcell" tabindex="-1"><a id="mcegrid' + (y * 10 + x) + '" href="#" ' + 'data-mce-x="' + x + '" data-mce-y="' + y + '"></a></td>'; } html += '</tr>'; } html += '</table>'; html += '<div class="mce-text-center" role="presentation">1 x 1</div>'; return html; } function selectGrid(tx, ty, control) { var table = control.getEl().getElementsByTagName('table')[0]; var x, y, focusCell, cell, active; var rtl = control.isRtl() || control.parent().rel == 'tl-tr'; table.nextSibling.innerHTML = (tx + 1) + ' x ' + (ty + 1); if (rtl) { tx = 9 - tx; } for (y = 0; y < 10; y++) { for (x = 0; x < 10; x++) { cell = table.rows[y].childNodes[x].firstChild; active = (rtl ? x >= tx : x <= tx) && y <= ty; editor.dom.toggleClass(cell, 'mce-active', active); if (active) { focusCell = cell; } } } return focusCell.parentNode; } if (editor.settings.table_grid === false) { editor.addMenuItem('inserttable', { text: 'Table', icon: 'table', context: 'table', onclick: dialogs.table }); } else { editor.addMenuItem('inserttable', { text: 'Table', icon: 'table', context: 'table', ariaHideMenu: true, onclick: function (e) { if (e.aria) { this.parent().hideAll(); e.stopImmediatePropagation(); dialogs.table(); } }, onshow: function () { selectGrid(0, 0, this.menu.items()[0]); }, onhide: function () { var elements = this.menu.items()[0].getEl().getElementsByTagName('a'); editor.dom.removeClass(elements, 'mce-active'); editor.dom.addClass(elements[0], 'mce-active'); }, menu: [ { type: 'container', html: generateTableGrid(), onPostRender: function () { this.lastX = this.lastY = 0; }, onmousemove: function (e) { var target = e.target, x, y; if (target.tagName.toUpperCase() == 'A') { x = parseInt(target.getAttribute('data-mce-x'), 10); y = parseInt(target.getAttribute('data-mce-y'), 10); if (this.isRtl() || this.parent().rel == 'tl-tr') { x = 9 - x; } if (x !== this.lastX || y !== this.lastY) { selectGrid(x, y, e.control); this.lastX = x; this.lastY = y; } } }, onclick: function (e) { var self = this; if (e.target.tagName.toUpperCase() == 'A') { e.preventDefault(); e.stopPropagation(); self.parent().cancel(); editor.undoManager.transact(function () { insertTable(self.lastX + 1, self.lastY + 1); }); editor.addVisual(); } } } ] }); } editor.addMenuItem('tableprops', { text: 'Table properties', context: 'table', onPostRender: postRender, onclick: dialogs.tableProps }); editor.addMenuItem('deletetable', { text: 'Delete table', context: 'table', onPostRender: postRender, cmd: 'mceTableDelete' }); editor.addMenuItem('cell', { separator: 'before', text: 'Cell', context: 'table', menu: [ { text: 'Cell properties', onclick: cmd('mceTableCellProps'), onPostRender: postRenderCell }, { text: 'Merge cells', onclick: cmd('mceTableMergeCells'), onPostRender: postRenderMergeCell }, { text: 'Split cell', onclick: cmd('mceTableSplitCells'), onPostRender: postRenderCell } ] }); editor.addMenuItem('row', { text: 'Row', context: 'table', menu: [ { text: 'Insert row before', onclick: cmd('mceTableInsertRowBefore'), onPostRender: postRenderCell }, { text: 'Insert row after', onclick: cmd('mceTableInsertRowAfter'), onPostRender: postRenderCell }, { text: 'Delete row', onclick: cmd('mceTableDeleteRow'), onPostRender: postRenderCell }, { text: 'Row properties', onclick: cmd('mceTableRowProps'), onPostRender: postRenderCell }, { text: '-' }, { text: 'Cut row', onclick: cmd('mceTableCutRow'), onPostRender: postRenderCell }, { text: 'Copy row', onclick: cmd('mceTableCopyRow'), onPostRender: postRenderCell }, { text: 'Paste row before', onclick: cmd('mceTablePasteRowBefore'), onPostRender: postRenderCell }, { text: 'Paste row after', onclick: cmd('mceTablePasteRowAfter'), onPostRender: postRenderCell } ] }); editor.addMenuItem('column', { text: 'Column', context: 'table', menu: [ { text: 'Insert column before', onclick: cmd('mceTableInsertColBefore'), onPostRender: postRenderCell }, { text: 'Insert column after', onclick: cmd('mceTableInsertColAfter'), onPostRender: postRenderCell }, { text: 'Delete column', onclick: cmd('mceTableDeleteCol'), onPostRender: postRenderCell } ] }); var menuItems = []; each("inserttable tableprops deletetable | cell row column".split(' '), function (name) { if (name == '|') { menuItems.push({ text: '-' }); } else { menuItems.push(editor.menuItems[name]); } }); editor.addButton("table", { type: "menubutton", title: "Table", menu: menuItems }); // Select whole table is a table border is clicked if (!Env.isIE) { editor.on('click', function (e) { e = e.target; if (e.nodeName === 'TABLE') { editor.selection.select(e); editor.nodeChanged(); } }); } self.quirks = new Quirks(editor); editor.on('Init', function () { self.cellSelection = new CellSelection(editor, function (selecting) { if (selecting && resizeBars) { resizeBars.clearBars(); } }); self.resizeBars = resizeBars; }); editor.on('PreInit', function () { // Remove internal data attributes editor.serializer.addAttributeFilter( 'data-mce-cell-padding,data-mce-border,data-mce-border-color', function (nodes, name) { var i = nodes.length; while (i--) { nodes[i].attr(name, null); } }); }); // Register action commands each({ mceTableSplitCells: function (grid) { grid.split(); }, mceTableMergeCells: function (grid) { var cell; cell = editor.dom.getParent(editor.selection.getStart(), 'th,td'); if (!editor.dom.select('td[data-mce-selected],th[data-mce-selected]').length) { dialogs.merge(grid, cell); } else { grid.merge(); } }, mceTableInsertRowBefore: function (grid) { grid.insertRows(true); }, mceTableInsertRowAfter: function (grid) { grid.insertRows(); }, mceTableInsertColBefore: function (grid) { grid.insertCols(true); }, mceTableInsertColAfter: function (grid) { grid.insertCols(); }, mceTableDeleteCol: function (grid) { grid.deleteCols(); }, mceTableDeleteRow: function (grid) { grid.deleteRows(); }, mceTableCutRow: function (grid) { clipboardRows = grid.cutRows(); }, mceTableCopyRow: function (grid) { clipboardRows = grid.copyRows(); }, mceTablePasteRowBefore: function (grid) { grid.pasteRows(clipboardRows, true); }, mceTablePasteRowAfter: function (grid) { grid.pasteRows(clipboardRows); }, mceSplitColsBefore: function (grid) { grid.splitCols(true); }, mceSplitColsAfter: function (grid) { grid.splitCols(false); }, mceTableDelete: function (grid) { if (resizeBars) { resizeBars.clearBars(); } grid.deleteTable(); } }, function (func, name) { editor.addCommand(name, function () { var grid = new TableGrid(editor); if (grid) { func(grid); editor.execCommand('mceRepaint'); self.cellSelection.clear(); } }); }); // Register dialog commands each({ mceInsertTable: dialogs.table, mceTableProps: function () { dialogs.table(true); }, mceTableRowProps: dialogs.row, mceTableCellProps: dialogs.cell }, function (func, name) { editor.addCommand(name, function (ui, val) { func(val); }); }); function addButtons() { editor.addButton('tableprops', { title: 'Table properties', onclick: dialogs.tableProps, icon: 'table' }); editor.addButton('tabledelete', { title: 'Delete table', onclick: cmd('mceTableDelete') }); editor.addButton('tablecellprops', { title: 'Cell properties', onclick: cmd('mceTableCellProps') }); editor.addButton('tablemergecells', { title: 'Merge cells', onclick: cmd('mceTableMergeCells') }); editor.addButton('tablesplitcells', { title: 'Split cell', onclick: cmd('mceTableSplitCells') }); editor.addButton('tableinsertrowbefore', { title: 'Insert row before', onclick: cmd('mceTableInsertRowBefore') }); editor.addButton('tableinsertrowafter', { title: 'Insert row after', onclick: cmd('mceTableInsertRowAfter') }); editor.addButton('tabledeleterow', { title: 'Delete row', onclick: cmd('mceTableDeleteRow') }); editor.addButton('tablerowprops', { title: 'Row properties', onclick: cmd('mceTableRowProps') }); editor.addButton('tablecutrow', { title: 'Cut row', onclick: cmd('mceTableCutRow') }); editor.addButton('tablecopyrow', { title: 'Copy row', onclick: cmd('mceTableCopyRow') }); editor.addButton('tablepasterowbefore', { title: 'Paste row before', onclick: cmd('mceTablePasteRowBefore') }); editor.addButton('tablepasterowafter', { title: 'Paste row after', onclick: cmd('mceTablePasteRowAfter') }); editor.addButton('tableinsertcolbefore', { title: 'Insert column before', onclick: cmd('mceTableInsertColBefore') }); editor.addButton('tableinsertcolafter', { title: 'Insert column after', onclick: cmd('mceTableInsertColAfter') }); editor.addButton('tabledeletecol', { title: 'Delete column', onclick: cmd('mceTableDeleteCol') }); } function isTable(table) { var selectorMatched = editor.dom.is(table, 'table') && editor.getBody().contains(table); return selectorMatched; } function addToolbars() { var toolbarItems = editor.settings.table_toolbar; if (toolbarItems === '' || toolbarItems === false) { return; } if (!toolbarItems) { toolbarItems = 'tableprops tabledelete | ' + 'tableinsertrowbefore tableinsertrowafter tabledeleterow | ' + 'tableinsertcolbefore tableinsertcolafter tabledeletecol'; } editor.addContextToolbar( isTable, toolbarItems ); } function getClipboardRows() { return clipboardRows; } function setClipboardRows(rows) { clipboardRows = rows; } addButtons(); addToolbars(); // Enable tab key cell navigation if (editor.settings.table_tab_navigation !== false) { editor.on('keydown', function (e) { var cellElm, grid, delta; if (e.keyCode == 9) { cellElm = editor.dom.getParent(editor.selection.getStart(), 'th,td'); if (cellElm) { e.preventDefault(); grid = new TableGrid(editor); delta = e.shiftKey ? -1 : 1; editor.undoManager.transact(function () { if (!grid.moveRelIdx(cellElm, delta) && delta > 0) { grid.insertRow(); grid.refresh(); grid.moveRelIdx(cellElm, delta); } }); } } }); } self.insertTable = insertTable; self.setClipboardRows = setClipboardRows; self.getClipboardRows = getClipboardRows; } PluginManager.add('table', Plugin); return function () { }; } ); dem('tinymce.plugins.table.Plugin')(); })();
{'content_hash': 'a076896118787fb7c9887589a3f910fd', 'timestamp': '', 'source': 'github', 'line_count': 4774, 'max_line_length': 493, 'avg_line_length': 29.662966066191874, 'alnum_prop': 0.51380189392067, 'repo_name': 'him2him2/cdnjs', 'id': 'e9aef5ac8ce5ca08c62fd987f86b9f70d30454fe', 'size': '141611', 'binary': False, 'copies': '48', 'ref': 'refs/heads/master', 'path': 'ajax/libs/tinymce/4.6.1/plugins/table/plugin.js', 'mode': '33188', 'license': 'mit', 'language': []}
using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; namespace Orleans.Runtime { internal class ActivationDirectory : IEnumerable<KeyValuePair<ActivationId, ActivationData>> { private static readonly TraceLogger logger = TraceLogger.GetLogger("ActivationDirectory", TraceLogger.LoggerType.Runtime); private readonly ConcurrentDictionary<ActivationId, ActivationData> activations; // Activation data (app grains) only. private readonly ConcurrentDictionary<ActivationId, SystemTarget> systemTargets; // SystemTarget only. private readonly ConcurrentDictionary<GrainId, List<ActivationData>> grainToActivationsMap; // Activation data (app grains) only. private readonly ConcurrentDictionary<string, CounterStatistic> grainCounts; // simple statistics type->count private readonly ConcurrentDictionary<string, CounterStatistic> systemTargetCounts; // simple statistics systemTargetTypeName->count internal ActivationDirectory() { activations = new ConcurrentDictionary<ActivationId, ActivationData>(); systemTargets = new ConcurrentDictionary<ActivationId, SystemTarget>(); grainToActivationsMap = new ConcurrentDictionary<GrainId, List<ActivationData>>(); grainCounts = new ConcurrentDictionary<string, CounterStatistic>(); systemTargetCounts = new ConcurrentDictionary<string, CounterStatistic>(); } public int Count { get { return activations.Count; } } public IEnumerable<SystemTarget> AllSystemTargets() { return systemTargets.Values; } public ActivationData FindTarget(ActivationId key) { ActivationData target; return activations.TryGetValue(key, out target) ? target : null; } public SystemTarget FindSystemTarget(ActivationId key) { SystemTarget target; return systemTargets.TryGetValue(key, out target) ? target : null; } internal void IncrementGrainCounter(string grainTypeName) { if (logger.IsVerbose2) logger.Verbose2("Increment Grain Counter {0}", grainTypeName); CounterStatistic ctr = FindGrainCounter(grainTypeName); ctr.Increment(); } internal void DecrementGrainCounter(string grainTypeName) { if (logger.IsVerbose2) logger.Verbose2("Decrement Grain Counter {0}", grainTypeName); CounterStatistic ctr = FindGrainCounter(grainTypeName); ctr.DecrementBy(1); } private CounterStatistic FindGrainCounter(string grainTypeName) { CounterStatistic ctr; if (grainCounts.TryGetValue(grainTypeName, out ctr)) return ctr; var counterName = new StatisticName(StatisticNames.GRAIN_COUNTS_PER_GRAIN, grainTypeName); ctr = grainCounts[grainTypeName] = CounterStatistic.FindOrCreate(counterName, false); return ctr; } private CounterStatistic FindSystemTargetCounter(string systemTargetTypeName) { CounterStatistic ctr; if (systemTargetCounts.TryGetValue(systemTargetTypeName, out ctr)) return ctr; var counterName = new StatisticName(StatisticNames.SYSTEM_TARGET_COUNTS, systemTargetTypeName); ctr = systemTargetCounts[systemTargetTypeName] = CounterStatistic.FindOrCreate(counterName, false); return ctr; } public void RecordNewTarget(ActivationData target) { if (!activations.TryAdd(target.ActivationId, target)) return; grainToActivationsMap.AddOrUpdate(target.Grain, g => new List<ActivationData> { target }, (g, list) => { lock (list) { list.Add(target); } return list; }); } public void RecordNewSystemTarget(SystemTarget target) { systemTargets.TryAdd(target.ActivationId, target); if (!Constants.IsSingletonSystemTarget(target.GrainId)) { FindSystemTargetCounter(Constants.SystemTargetName(target.GrainId)).Increment(); } } public void RemoveSystemTarget(SystemTarget target) { SystemTarget ignore; systemTargets.TryRemove(target.ActivationId, out ignore); if (!Constants.IsSingletonSystemTarget(target.GrainId)) { FindSystemTargetCounter(Constants.SystemTargetName(target.GrainId)).DecrementBy(1); } } public void RemoveTarget(ActivationData target) { ActivationData ignore; if (!activations.TryRemove(target.ActivationId, out ignore)) return; List<ActivationData> list; if (grainToActivationsMap.TryGetValue(target.Grain, out list)) { lock (list) { list.Remove(target); if (list.Count == 0) { List<ActivationData> list2; // == list if (grainToActivationsMap.TryRemove(target.Grain, out list2)) { lock (list2) { if (list2.Count > 0) { grainToActivationsMap.AddOrUpdate(target.Grain, g => list2, (g, list3) => { lock (list3) { list3.AddRange(list2); } return list3; }); } } } } } } } // Returns null if no activations exist for this grain ID, rather than an empty list public List<ActivationData> FindTargets(GrainId key) { List<ActivationData> tmp; if (grainToActivationsMap.TryGetValue(key, out tmp)) { lock (tmp) { return tmp.ToList(); } } return null; } public IEnumerable<KeyValuePair<string, long>> GetSimpleGrainStatistics() { return grainCounts .Select(s => new KeyValuePair<string, long>(s.Key, s.Value.GetCurrentValue())) .Where(p => p.Value > 0); } public void PrintActivationDirectory() { if (logger.IsInfo) { string stats = Utils.EnumerableToString(activations.Values.OrderBy(act => act.Name), act => string.Format("++{0}", act.DumpStatus()), "\r\n"); if (stats.Length > 0) { logger.LogWithoutBulkingAndTruncating(Logger.Severity.Info, ErrorCode.Catalog_ActivationDirectory_Statistics, String.Format("ActivationDirectory.PrintActivationDirectory(): Size = {0}, Directory:\n{1}", activations.Count, stats)); } } } #region Implementation of IEnumerable public IEnumerator<KeyValuePair<ActivationId, ActivationData>> GetEnumerator() { return activations.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } #endregion } }
{'content_hash': '10bbae28a5bd12e240cbe357ab9bc9d2', 'timestamp': '', 'source': 'github', 'line_count': 193, 'max_line_length': 222, 'avg_line_length': 39.601036269430054, 'alnum_prop': 0.581577914431506, 'repo_name': 'akoeplinger/orleans', 'id': '11aac7db5a1232972cb84f343f5a32ee7a539f5f', 'size': '8793', 'binary': False, 'copies': '1', 'ref': 'refs/heads/mono', 'path': 'src/OrleansRuntime/Catalog/ActivationDirectory.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '74'}, {'name': 'C#', 'bytes': '3572249'}, {'name': 'F#', 'bytes': '745'}, {'name': 'Shell', 'bytes': '6491'}, {'name': 'Smalltalk', 'bytes': '2664'}, {'name': 'Visual Basic', 'bytes': '22831'}]}
package com.jetbrains.python.actions; import com.intellij.ide.actions.QualifiedNameProvider; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiElement; import com.intellij.psi.util.QualifiedName; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.psi.PyClass; import com.jetbrains.python.psi.PyFunction; import com.jetbrains.python.psi.stubs.PyClassNameIndex; import com.jetbrains.python.psi.stubs.PyFunctionNameIndex; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; public class PyQualifiedNameProvider implements QualifiedNameProvider { @Override public PsiElement adjustElementToCopy(PsiElement element) { return element instanceof PyClass || element instanceof PyFunction ? element : null; } @Nullable @Override public String getQualifiedName(PsiElement element) { if (element instanceof PyClass) { return ((PyClass)element).getQualifiedName(); } if (element instanceof PyFunction) { return ((PyFunction)element).getQualifiedName(); } return null; } @Nullable @Override public PsiElement qualifiedNameToElement(String fqn, Project project) { final PyClass aClass = PyClassNameIndex.findClass(fqn, project); if (aClass != null) { return aClass; } final PyFunction func = findFunctionByQualifiedName(fqn, project); if (func != null) { return func; } return null; } // TODO make it part of PyPsiFacade similarly to createClassByQName() @Nullable private static PyFunction findFunctionByQualifiedName(@NotNull String qname, @NotNull Project project) { final QualifiedName qualifiedName = QualifiedName.fromDottedString(qname); final Collection<PyFunction> shortNameMatches = PyFunctionNameIndex.find(qualifiedName.getLastComponent(), project); return ContainerUtil.find(shortNameMatches, func -> qname.equals(func.getQualifiedName())); } }
{'content_hash': 'a6a0f7bbeec394ff07907400361afca0', 'timestamp': '', 'source': 'github', 'line_count': 59, 'max_line_length': 120, 'avg_line_length': 33.610169491525426, 'alnum_prop': 0.767523953605648, 'repo_name': 'jwren/intellij-community', 'id': '66b5ab6458f31747f7e4ab7c73d8a6ddf51a177f', 'size': '2583', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'python/src/com/jetbrains/python/actions/PyQualifiedNameProvider.java', 'mode': '33188', 'license': 'apache-2.0', 'language': []}
""" facilities.py ~~~~~~~~~~~~ This module implements settings HP OneView REST API """ from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() __title__ = 'facilities' __version__ = '0.0.1' __copyright__ = '(C) Copyright 2012-2015 Hewlett-Packard Development ' \ ' Development LP' __license__ = 'MIT' __status__ = 'Development' ### # (C) Copyright (2012-2015) Hewlett Packard Enterprise Development LP # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. ### from hpOneView.common import * from hpOneView.connection import * from hpOneView.activity import * from hpOneView.exceptions import * class facilities(object): def __init__(self, con): self._con = con self._activity = activity(con) def get_datacenters(self): body = self._con.get(uri['datacenters']) return body def get_powerdevices(self): body = self._con.get(uri['powerDevices']) return body def get_racks(self): body = self._con.get(uri['racks']) return body def delete_datacenter(self, datacenter, force=False, blocking=True, verbose=False): if force: task, body = self._con.delete(datacenter['uri'] + '?force=True') else: task, body = self._con.delete(datacenter['uri']) if blocking is True: task = self._activity.wait4task(task, tout=600, verbose=verbose) return task def delete_rack(self, rack, force=False, blocking=True, verbose=False): if force: task, body = self._con.delete(rack['uri'] + '?force=True') else: task, body = self._con.delete(rack['uri']) if blocking is True: task = self._activity.wait4task(task, tout=600, verbose=verbose) return task def delete_powerdevice(self, powerdevice, force=False, blocking=True, verbose=False): if force: task, body = self._con.delete(powerdevice['uri'] + '?force=True') else: task, body = self._con.delete(powerdevice['uri']) if blocking is True: task = self._activity.wait4task(task, tout=600, verbose=verbose) return task def add_datacenter(self, datacenter, blocking=True, verbose=False): task, body = self._con.post(uri['datacenters'], datacenter) if blocking is True: task = self._activity.wait4task(task, tout=600, verbose=verbose) if task: if 'type' in task and task['type'].startswith('Task'): entity = self._activity.get_task_associated_resource(task) datacenter = self._con.get(entity['resourceUri']) return datacenter return body return task def add_rack(self, rack, blocking=True, verbose=False): task, body = self._con.post(uri['racks'], rack) if blocking is True: task = self._activity.wait4task(task, tout=600, verbose=verbose) if task: if 'type' in task and task['type'].startswith('Task'): entity = self._activity.get_task_associated_resource(task) rack = self._con.get(entity['resourceUri']) return rack return body return task def add_powerdevice(self, powerdevice, blocking=True, verbose=False): task, body = self._con.post(uri['powerDevices'], powerdevice) if blocking is True: task = self._activity.wait4task(task, tout=600, verbose=verbose) if task: if 'type' in task and task['type'].startswith('Task'): entity = self._activity.get_task_associated_resource(task) powerdevice = self._con.get(entity['resourceUri']) return powerdevice return body return task def add_iPDU(self, host, user, passwd, blocking=True, verbose=False): request = {'hostname': host, 'username': user, 'password': passwd} task, body = self._con.post(uri['powerDevicesDiscover'], request) if blocking is True: task = self._activity.wait4task(task, tout=600, verbose=verbose) if task: if 'type' in task and task['type'].startswith('Task'): entity = self._activity.get_task_associated_resource(task) powerdevice = self._con.get(entity['resourceUri']) return powerdevice return body return task # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
{'content_hash': 'a1d084d6f17ab3dd8831e9b37aea384e', 'timestamp': '', 'source': 'github', 'line_count': 149, 'max_line_length': 79, 'avg_line_length': 39.19463087248322, 'alnum_prop': 0.6191780821917808, 'repo_name': 'ufcg-lsd/python-hpOneView', 'id': 'b475fa4662155cdecb4fa4e0d885cfd42653311b', 'size': '5865', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'hpOneView/facilities.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Python', 'bytes': '195557'}]}
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="jp.co.nohana.widget.tutorial"> <application android:allowBackup="true"/> </manifest>
{'content_hash': '3e724bf5bd31419bd2d046dcc3411a63', 'timestamp': '', 'source': 'github', 'line_count': 5, 'max_line_length': 62, 'avg_line_length': 35.0, 'alnum_prop': 0.72, 'repo_name': 'nohana/SimpleFeatureTutorialOverlayView', 'id': '54f5cc2fb6cd10900f26406d5f9093dcdc2793f5', 'size': '175', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'library/src/main/AndroidManifest.xml', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '13880'}]}
package org.apache.kafka.streams.state.internals; import org.apache.kafka.streams.processor.ProcessorContext; import org.apache.kafka.streams.processor.StateStoreSupplier; import org.apache.kafka.streams.state.KeyValueStore; import org.apache.kafka.streams.state.Stores; import org.junit.Test; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; public class InMemoryKeyValueStoreTest extends AbstractKeyValueStoreTest { @SuppressWarnings("unchecked") @Override protected <K, V> KeyValueStore<K, V> createKeyValueStore( ProcessorContext context, Class<K> keyClass, Class<V> valueClass, boolean useContextSerdes) { StateStoreSupplier supplier; if (useContextSerdes) { supplier = Stores.create("my-store").withKeys(context.keySerde()).withValues(context.valueSerde()).inMemory().build(); } else { supplier = Stores.create("my-store").withKeys(keyClass).withValues(valueClass).inMemory().build(); } KeyValueStore<K, V> store = (KeyValueStore<K, V>) supplier.get(); store.init(context, store); return store; } @Test public void shouldRemoveKeysWithNullValues() { store.close(); // Add any entries that will be restored to any store // that uses the driver's context ... driver.addEntryToRestoreLog(0, "zero"); driver.addEntryToRestoreLog(1, "one"); driver.addEntryToRestoreLog(2, "two"); driver.addEntryToRestoreLog(3, "three"); driver.addEntryToRestoreLog(0, null); store = createKeyValueStore(driver.context(), Integer.class, String.class, true); context.restore(store.name(), driver.restoredEntries()); assertEquals(3, driver.sizeOf(store)); assertThat(store.get(0), nullValue()); } }
{'content_hash': '64f012046494e56a1d12e704d5fe1d8a', 'timestamp': '', 'source': 'github', 'line_count': 54, 'max_line_length': 130, 'avg_line_length': 36.0, 'alnum_prop': 0.6846707818930041, 'repo_name': 'zzwlstarby/mykafka', 'id': '541c0038261f17336f77b125e21de7f22e4f4bd7', 'size': '2742', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'streams/src/test/java/org/apache/kafka/streams/state/internals/InMemoryKeyValueStoreTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'HTML', 'bytes': '302893'}, {'name': 'Java', 'bytes': '10518581'}, {'name': 'Python', 'bytes': '600674'}, {'name': 'Scala', 'bytes': '4765404'}, {'name': 'Shell', 'bytes': '49310'}, {'name': 'XSLT', 'bytes': '7116'}]}
<fieldset> <h2>Profile Information</h2> <div class="row"> <div class="col-lg-6"> <div class="form-group"> <label>First name *</label> <input id="name" name="name" ng-model="formData.name" type="text" class="form-control required"> </div> <div class="form-group"> <label>Last name *</label> <input id="surname" name="surname" ng-model="formData.lastName" type="text" class="form-control required"> </div> </div> <div class="col-lg-6"> <div class="form-group"> <label>Email *</label> <input id="email" name="email" type="text" ng-model="formData.email" class="form-control required email"> </div> <div class="form-group"> <label>Address *</label> <input id="address" name="address" type="text" ng-model="formData.address" class="form-control"> </div> </div> </div> <a ui-sref="forms.wizard.step_one" class="btn btn-default">Previous</a> <a ui-sref="forms.wizard.step_three" class="btn btn-primary">Next</a> </fieldset>
{'content_hash': '7b579914abc39a0d16cea856cb5591c5', 'timestamp': '', 'source': 'github', 'line_count': 28, 'max_line_length': 122, 'avg_line_length': 42.607142857142854, 'alnum_prop': 0.5331098072087175, 'repo_name': 'Slukad/Matozap', 'id': '702577d6f9845ba5a4e4a0e6f4190f89c84e3aa7', 'size': '1193', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'src/Matozap.Web/wwwroot/Views/wizard/step_two.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '36'}, {'name': 'Batchfile', 'bytes': '65'}, {'name': 'C#', 'bytes': '204479'}, {'name': 'CSS', 'bytes': '736472'}, {'name': 'CoffeeScript', 'bytes': '3263'}, {'name': 'HTML', 'bytes': '2060306'}, {'name': 'JavaScript', 'bytes': '4520322'}, {'name': 'PHP', 'bytes': '4522'}]}
from __future__ import print_function, absolute_import, division from unittest2 import TestCase from mock import patch import json from pils.aws import get_lambda_config_property class PilsTests(TestCase): class Context(object): def __init__(self): self.invoked_function_arn = "42" self.function_version = "23" class BotoClient(object): def __init__(self, return_value): self.return_value = return_value def get_function_configuration(self, FunctionName=None, Qualifier=None): return self.return_value def setUp(self): self.context = self.Context() @patch("boto3.client") def test_get_lambda_config_property_with_property(self, mock_boto3_client): properties = { "key1": 42, "key2": "string", "key3": [2, 3] } property_dict = {"Description": json.dumps(properties)} mock_boto3_client.return_value = self.BotoClient(property_dict) self.assertEqual(get_lambda_config_property(self.context, "key1"), properties['key1']) @patch("boto3.client") def test_get_lambda_config_property_with_unknown_property(self, mock_boto3_client): properties = { "key1": 42, "key2": "string", "key3": [2, 3] } property_dict = {"Description": json.dumps(properties)} mock_boto3_client.return_value = self.BotoClient(property_dict) self.assertEqual(get_lambda_config_property(self.context, "key4"), None) @patch("boto3.client") def test_get_lambda_config_property_without_property(self, mock_boto3_client): properties = { "key1": 42, "key2": "string", "key3": [2, 3] } property_dict = {"Description": json.dumps(properties)} mock_boto3_client.return_value = self.BotoClient(property_dict) self.assertEqual(get_lambda_config_property(self.context), properties) @patch("boto3.client") def test_get_lambda_config_property_without_json(self, mock_boto3_client): property_dict = {"Description": "foobar"} mock_boto3_client.return_value = self.BotoClient(property_dict) self.assertRaises(ValueError, get_lambda_config_property, self.context)
{'content_hash': 'e68c948d85cd9c78d895a23953da762a', 'timestamp': '', 'source': 'github', 'line_count': 67, 'max_line_length': 94, 'avg_line_length': 34.343283582089555, 'alnum_prop': 0.6284224250325945, 'repo_name': 'ImmobilienScout24/pils', 'id': '15cc89800c4c452c420e6a6f9de65a80b7a8c991', 'size': '2301', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/unittest/python/aws_tests.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Python', 'bytes': '14868'}]}
package openstack import ( "context" "fmt" "net" "reflect" "strings" "time" "github.com/golang/glog" "github.com/gophercloud/gophercloud" "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions" "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/external" "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/layer3/floatingips" "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/lbaas_v2/listeners" "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/lbaas_v2/loadbalancers" v2monitors "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/lbaas_v2/monitors" v2pools "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/lbaas_v2/pools" "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/security/groups" "github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/security/rules" "github.com/gophercloud/gophercloud/openstack/networking/v2/networks" neutronports "github.com/gophercloud/gophercloud/openstack/networking/v2/ports" "github.com/gophercloud/gophercloud/pagination" "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/kubernetes/pkg/api/v1/service" "k8s.io/kubernetes/pkg/cloudprovider" ) // Note: when creating a new Loadbalancer (VM), it can take some time before it is ready for use, // this timeout is used for waiting until the Loadbalancer provisioning status goes to ACTIVE state. const ( // loadbalancerActive* is configuration of exponential backoff for // going into ACTIVE loadbalancer provisioning status. Starting with 1 // seconds, multiplying by 1.2 with each step and taking 19 steps at maximum // it will time out after 128s, which roughly corresponds to 120s loadbalancerActiveInitDelay = 1 * time.Second loadbalancerActiveFactor = 1.2 loadbalancerActiveSteps = 19 // loadbalancerDelete* is configuration of exponential backoff for // waiting for delete operation to complete. Starting with 1 // seconds, multiplying by 1.2 with each step and taking 13 steps at maximum // it will time out after 32s, which roughly corresponds to 30s loadbalancerDeleteInitDelay = 1 * time.Second loadbalancerDeleteFactor = 1.2 loadbalancerDeleteSteps = 13 activeStatus = "ACTIVE" errorStatus = "ERROR" ServiceAnnotationLoadBalancerFloatingNetworkID = "loadbalancer.openstack.org/floating-network-id" // ServiceAnnotationLoadBalancerInternal is the annotation used on the service // to indicate that we want an internal loadbalancer service. // If the value of ServiceAnnotationLoadBalancerInternal is false, it indicates that we want an external loadbalancer service. Default to false. ServiceAnnotationLoadBalancerInternal = "service.beta.kubernetes.io/openstack-internal-load-balancer" ) // LbaasV2 is a LoadBalancer implementation for Neutron LBaaS v2 API type LbaasV2 struct { LoadBalancer } type empty struct{} func networkExtensions(client *gophercloud.ServiceClient) (map[string]bool, error) { seen := make(map[string]bool) pager := extensions.List(client) err := pager.EachPage(func(page pagination.Page) (bool, error) { exts, err := extensions.ExtractExtensions(page) if err != nil { return false, err } for _, ext := range exts { seen[ext.Alias] = true } return true, nil }) return seen, err } func getFloatingIPByPortID(client *gophercloud.ServiceClient, portID string) (*floatingips.FloatingIP, error) { opts := floatingips.ListOpts{ PortID: portID, } pager := floatingips.List(client, opts) floatingIPList := make([]floatingips.FloatingIP, 0, 1) err := pager.EachPage(func(page pagination.Page) (bool, error) { f, err := floatingips.ExtractFloatingIPs(page) if err != nil { return false, err } floatingIPList = append(floatingIPList, f...) if len(floatingIPList) > 1 { return false, ErrMultipleResults } return true, nil }) if err != nil { if isNotFound(err) { return nil, ErrNotFound } return nil, err } if len(floatingIPList) == 0 { return nil, ErrNotFound } else if len(floatingIPList) > 1 { return nil, ErrMultipleResults } return &floatingIPList[0], nil } func getLoadbalancerByName(client *gophercloud.ServiceClient, name string) (*loadbalancers.LoadBalancer, error) { opts := loadbalancers.ListOpts{ Name: name, } pager := loadbalancers.List(client, opts) loadbalancerList := make([]loadbalancers.LoadBalancer, 0, 1) err := pager.EachPage(func(page pagination.Page) (bool, error) { v, err := loadbalancers.ExtractLoadBalancers(page) if err != nil { return false, err } loadbalancerList = append(loadbalancerList, v...) if len(loadbalancerList) > 1 { return false, ErrMultipleResults } return true, nil }) if err != nil { if isNotFound(err) { return nil, ErrNotFound } return nil, err } if len(loadbalancerList) == 0 { return nil, ErrNotFound } else if len(loadbalancerList) > 1 { return nil, ErrMultipleResults } return &loadbalancerList[0], nil } func getListenersByLoadBalancerID(client *gophercloud.ServiceClient, id string) ([]listeners.Listener, error) { var existingListeners []listeners.Listener err := listeners.List(client, listeners.ListOpts{LoadbalancerID: id}).EachPage(func(page pagination.Page) (bool, error) { listenerList, err := listeners.ExtractListeners(page) if err != nil { return false, err } for _, l := range listenerList { for _, lb := range l.Loadbalancers { if lb.ID == id { existingListeners = append(existingListeners, l) break } } } return true, nil }) if err != nil { return nil, err } return existingListeners, nil } // get listener for a port or nil if does not exist func getListenerForPort(existingListeners []listeners.Listener, port v1.ServicePort) *listeners.Listener { for _, l := range existingListeners { if listeners.Protocol(l.Protocol) == toListenersProtocol(port.Protocol) && l.ProtocolPort == int(port.Port) { return &l } } return nil } // Get pool for a listener. A listener always has exactly one pool. func getPoolByListenerID(client *gophercloud.ServiceClient, loadbalancerID string, listenerID string) (*v2pools.Pool, error) { listenerPools := make([]v2pools.Pool, 0, 1) err := v2pools.List(client, v2pools.ListOpts{LoadbalancerID: loadbalancerID}).EachPage(func(page pagination.Page) (bool, error) { poolsList, err := v2pools.ExtractPools(page) if err != nil { return false, err } for _, p := range poolsList { for _, l := range p.Listeners { if l.ID == listenerID { listenerPools = append(listenerPools, p) } } } if len(listenerPools) > 1 { return false, ErrMultipleResults } return true, nil }) if err != nil { if isNotFound(err) { return nil, ErrNotFound } return nil, err } if len(listenerPools) == 0 { return nil, ErrNotFound } else if len(listenerPools) > 1 { return nil, ErrMultipleResults } return &listenerPools[0], nil } func getMembersByPoolID(client *gophercloud.ServiceClient, id string) ([]v2pools.Member, error) { var members []v2pools.Member err := v2pools.ListMembers(client, id, v2pools.ListMembersOpts{}).EachPage(func(page pagination.Page) (bool, error) { membersList, err := v2pools.ExtractMembers(page) if err != nil { return false, err } members = append(members, membersList...) return true, nil }) if err != nil { return nil, err } return members, nil } // Check if a member exists for node func memberExists(members []v2pools.Member, addr string, port int) bool { for _, member := range members { if member.Address == addr && member.ProtocolPort == port { return true } } return false } func popListener(existingListeners []listeners.Listener, id string) []listeners.Listener { for i, existingListener := range existingListeners { if existingListener.ID == id { existingListeners[i] = existingListeners[len(existingListeners)-1] existingListeners = existingListeners[:len(existingListeners)-1] break } } return existingListeners } func popMember(members []v2pools.Member, addr string, port int) []v2pools.Member { for i, member := range members { if member.Address == addr && member.ProtocolPort == port { members[i] = members[len(members)-1] members = members[:len(members)-1] } } return members } func getSecurityGroupName(service *v1.Service) string { securityGroupName := fmt.Sprintf("lb-sg-%s-%s-%s", service.UID, service.Namespace, service.Name) //OpenStack requires that the name of a security group is shorter than 255 bytes. if len(securityGroupName) > 255 { securityGroupName = securityGroupName[:255] } return securityGroupName } func getSecurityGroupRules(client *gophercloud.ServiceClient, opts rules.ListOpts) ([]rules.SecGroupRule, error) { pager := rules.List(client, opts) var securityRules []rules.SecGroupRule err := pager.EachPage(func(page pagination.Page) (bool, error) { ruleList, err := rules.ExtractRules(page) if err != nil { return false, err } securityRules = append(securityRules, ruleList...) return true, nil }) if err != nil { return nil, err } return securityRules, nil } func waitLoadbalancerActiveProvisioningStatus(client *gophercloud.ServiceClient, loadbalancerID string) (string, error) { backoff := wait.Backoff{ Duration: loadbalancerActiveInitDelay, Factor: loadbalancerActiveFactor, Steps: loadbalancerActiveSteps, } var provisioningStatus string err := wait.ExponentialBackoff(backoff, func() (bool, error) { loadbalancer, err := loadbalancers.Get(client, loadbalancerID).Extract() if err != nil { return false, err } provisioningStatus = loadbalancer.ProvisioningStatus if loadbalancer.ProvisioningStatus == activeStatus { return true, nil } else if loadbalancer.ProvisioningStatus == errorStatus { return true, fmt.Errorf("loadbalancer has gone into ERROR state") } else { return false, nil } }) if err == wait.ErrWaitTimeout { err = fmt.Errorf("loadbalancer failed to go into ACTIVE provisioning status within alloted time") } return provisioningStatus, err } func waitLoadbalancerDeleted(client *gophercloud.ServiceClient, loadbalancerID string) error { backoff := wait.Backoff{ Duration: loadbalancerDeleteInitDelay, Factor: loadbalancerDeleteFactor, Steps: loadbalancerDeleteSteps, } err := wait.ExponentialBackoff(backoff, func() (bool, error) { _, err := loadbalancers.Get(client, loadbalancerID).Extract() if err != nil { if err == ErrNotFound { return true, nil } return false, err } return false, nil }) if err == wait.ErrWaitTimeout { err = fmt.Errorf("loadbalancer failed to delete within the alloted time") } return err } func toRuleProtocol(protocol v1.Protocol) rules.RuleProtocol { switch protocol { case v1.ProtocolTCP: return rules.ProtocolTCP case v1.ProtocolUDP: return rules.ProtocolUDP default: return rules.RuleProtocol(strings.ToLower(string(protocol))) } } func toListenersProtocol(protocol v1.Protocol) listeners.Protocol { switch protocol { case v1.ProtocolTCP: return listeners.ProtocolTCP default: return listeners.Protocol(string(protocol)) } } func createNodeSecurityGroup(client *gophercloud.ServiceClient, nodeSecurityGroupID string, port int, protocol v1.Protocol, lbSecGroup string) error { v4NodeSecGroupRuleCreateOpts := rules.CreateOpts{ Direction: rules.DirIngress, PortRangeMax: port, PortRangeMin: port, Protocol: toRuleProtocol(protocol), RemoteGroupID: lbSecGroup, SecGroupID: nodeSecurityGroupID, EtherType: rules.EtherType4, } v6NodeSecGroupRuleCreateOpts := rules.CreateOpts{ Direction: rules.DirIngress, PortRangeMax: port, PortRangeMin: port, Protocol: toRuleProtocol(protocol), RemoteGroupID: lbSecGroup, SecGroupID: nodeSecurityGroupID, EtherType: rules.EtherType6, } _, err := rules.Create(client, v4NodeSecGroupRuleCreateOpts).Extract() if err != nil { return err } _, err = rules.Create(client, v6NodeSecGroupRuleCreateOpts).Extract() if err != nil { return err } return nil } func (lbaas *LbaasV2) createLoadBalancer(service *v1.Service, name string, internalAnnotation bool) (*loadbalancers.LoadBalancer, error) { createOpts := loadbalancers.CreateOpts{ Name: name, Description: fmt.Sprintf("Kubernetes external service %s", name), VipSubnetID: lbaas.opts.SubnetID, Provider: lbaas.opts.LBProvider, } loadBalancerIP := service.Spec.LoadBalancerIP if loadBalancerIP != "" && internalAnnotation { createOpts.VipAddress = loadBalancerIP } loadbalancer, err := loadbalancers.Create(lbaas.lb, createOpts).Extract() if err != nil { return nil, fmt.Errorf("error creating loadbalancer %v: %v", createOpts, err) } return loadbalancer, nil } // GetLoadBalancer returns whether the specified load balancer exists and its status func (lbaas *LbaasV2) GetLoadBalancer(ctx context.Context, clusterName string, service *v1.Service) (*v1.LoadBalancerStatus, bool, error) { loadBalancerName := cloudprovider.GetLoadBalancerName(service) loadbalancer, err := getLoadbalancerByName(lbaas.lb, loadBalancerName) if err == ErrNotFound { return nil, false, nil } if loadbalancer == nil { return nil, false, err } status := &v1.LoadBalancerStatus{} portID := loadbalancer.VipPortID if portID != "" { floatIP, err := getFloatingIPByPortID(lbaas.network, portID) if err != nil { return nil, false, fmt.Errorf("error getting floating ip for port %s: %v", portID, err) } status.Ingress = []v1.LoadBalancerIngress{{IP: floatIP.FloatingIP}} } else { status.Ingress = []v1.LoadBalancerIngress{{IP: loadbalancer.VipAddress}} } return status, true, err } // The LB needs to be configured with instance addresses on the same // subnet as the LB (aka opts.SubnetID). Currently we're just // guessing that the node's InternalIP is the right address - and that // should be sufficient for all "normal" cases. func nodeAddressForLB(node *v1.Node) (string, error) { addrs := node.Status.Addresses if len(addrs) == 0 { return "", ErrNoAddressFound } for _, addr := range addrs { if addr.Type == v1.NodeInternalIP { return addr.Address, nil } } return addrs[0].Address, nil } //getStringFromServiceAnnotation searches a given v1.Service for a specific annotationKey and either returns the annotation's value or a specified defaultSetting func getStringFromServiceAnnotation(service *v1.Service, annotationKey string, defaultSetting string) string { glog.V(4).Infof("getStringFromServiceAnnotation(%v, %v, %v)", service, annotationKey, defaultSetting) if annotationValue, ok := service.Annotations[annotationKey]; ok { //if there is an annotation for this setting, set the "setting" var to it // annotationValue can be empty, it is working as designed // it makes possible for instance provisioning loadbalancer without floatingip glog.V(4).Infof("Found a Service Annotation: %v = %v", annotationKey, annotationValue) return annotationValue } //if there is no annotation, set "settings" var to the value from cloud config glog.V(4).Infof("Could not find a Service Annotation; falling back on cloud-config setting: %v = %v", annotationKey, defaultSetting) return defaultSetting } // getSubnetIDForLB returns subnet-id for a specific node func getSubnetIDForLB(compute *gophercloud.ServiceClient, node v1.Node) (string, error) { ipAddress, err := nodeAddressForLB(&node) if err != nil { return "", err } instanceID := node.Spec.ProviderID if ind := strings.LastIndex(instanceID, "/"); ind >= 0 { instanceID = instanceID[(ind + 1):] } interfaces, err := getAttachedInterfacesByID(compute, instanceID) if err != nil { return "", err } for _, intf := range interfaces { for _, fixedIP := range intf.FixedIPs { if fixedIP.IPAddress == ipAddress { return fixedIP.SubnetID, nil } } } return "", ErrNotFound } // getNodeSecurityGroupIDForLB lists node-security-groups for specific nodes func getNodeSecurityGroupIDForLB(compute *gophercloud.ServiceClient, network *gophercloud.ServiceClient, nodes []*v1.Node) ([]string, error) { secGroupNames := sets.NewString() for _, node := range nodes { nodeName := types.NodeName(node.Name) srv, err := getServerByName(compute, nodeName, true) if err != nil { return []string{}, err } // use the first node-security-groups // case 0: node1:SG1 node2:SG1 return SG1 // case 1: node1:SG1 node2:SG2 return SG1,SG2 // case 2: node1:SG1,SG2 node2:SG3,SG4 return SG1,SG3 // case 3: node1:SG1,SG2 node2:SG2,SG3 return SG1,SG2 secGroupNames.Insert(srv.SecurityGroups[0]["name"].(string)) } secGroupIDs := make([]string, secGroupNames.Len()) for i, name := range secGroupNames.List() { secGroupID, err := groups.IDFromName(network, name) if err != nil { return []string{}, err } secGroupIDs[i] = secGroupID } return secGroupIDs, nil } // isSecurityGroupNotFound return true while 'err' is object of gophercloud.ErrResourceNotFound func isSecurityGroupNotFound(err error) bool { errType := reflect.TypeOf(err).String() errTypeSlice := strings.Split(errType, ".") errTypeValue := "" if len(errTypeSlice) != 0 { errTypeValue = errTypeSlice[len(errTypeSlice)-1] } if errTypeValue == "ErrResourceNotFound" { return true } return false } // getFloatingNetworkIDForLB returns a floating-network-id for cluster. func getFloatingNetworkIDForLB(client *gophercloud.ServiceClient) (string, error) { var floatingNetworkIds []string type NetworkWithExternalExt struct { networks.Network external.NetworkExternalExt } err := networks.List(client, networks.ListOpts{}).EachPage(func(page pagination.Page) (bool, error) { var externalNetwork []NetworkWithExternalExt err := networks.ExtractNetworksInto(page, &externalNetwork) if err != nil { return false, err } for _, externalNet := range externalNetwork { if externalNet.External { floatingNetworkIds = append(floatingNetworkIds, externalNet.ID) } } if len(floatingNetworkIds) > 1 { return false, ErrMultipleResults } return true, nil }) if err != nil { if isNotFound(err) { return "", ErrNotFound } if err == ErrMultipleResults { glog.V(4).Infof("find multiple external networks, pick the first one when there are no explicit configuration.") return floatingNetworkIds[0], nil } return "", err } if len(floatingNetworkIds) == 0 { return "", ErrNotFound } return floatingNetworkIds[0], nil } // TODO: This code currently ignores 'region' and always creates a // loadbalancer in only the current OpenStack region. We should take // a list of regions (from config) and query/create loadbalancers in // each region. // EnsureLoadBalancer creates a new load balancer 'name', or updates the existing one. func (lbaas *LbaasV2) EnsureLoadBalancer(ctx context.Context, clusterName string, apiService *v1.Service, nodes []*v1.Node) (*v1.LoadBalancerStatus, error) { glog.V(4).Infof("EnsureLoadBalancer(%v, %v, %v, %v, %v, %v, %v)", clusterName, apiService.Namespace, apiService.Name, apiService.Spec.LoadBalancerIP, apiService.Spec.Ports, nodes, apiService.Annotations) if len(nodes) == 0 { return nil, fmt.Errorf("there are no available nodes for LoadBalancer service %s/%s", apiService.Namespace, apiService.Name) } if len(lbaas.opts.SubnetID) == 0 { // Get SubnetID automatically. // The LB needs to be configured with instance addresses on the same subnet, so get SubnetID by one node. subnetID, err := getSubnetIDForLB(lbaas.compute, *nodes[0]) if err != nil { glog.Warningf("Failed to find subnet-id for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) return nil, fmt.Errorf("no subnet-id for service %s/%s : subnet-id not set in cloud provider config, "+ "and failed to find subnet-id from OpenStack: %v", apiService.Namespace, apiService.Name, err) } lbaas.opts.SubnetID = subnetID } ports := apiService.Spec.Ports if len(ports) == 0 { return nil, fmt.Errorf("no ports provided to openstack load balancer") } floatingPool := getStringFromServiceAnnotation(apiService, ServiceAnnotationLoadBalancerFloatingNetworkID, lbaas.opts.FloatingNetworkID) if len(floatingPool) == 0 { var err error floatingPool, err = getFloatingNetworkIDForLB(lbaas.network) if err != nil { glog.Warningf("Failed to find floating-network-id for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } } var internalAnnotation bool internal := getStringFromServiceAnnotation(apiService, ServiceAnnotationLoadBalancerInternal, "false") switch internal { case "true": glog.V(4).Infof("Ensure an internal loadbalancer service.") internalAnnotation = true case "false": if len(floatingPool) != 0 { glog.V(4).Infof("Ensure an external loadbalancer service, using floatingPool: %v", floatingPool) internalAnnotation = false } else { return nil, fmt.Errorf("floating-network-id or loadbalancer.openstack.org/floating-network-id should be specified when ensuring an external loadbalancer service") } default: return nil, fmt.Errorf("unknown service.beta.kubernetes.io/openstack-internal-load-balancer annotation: %v, specify \"true\" or \"false\" ", internal) } // Check for TCP protocol on each port // TODO: Convert all error messages to use an event recorder for _, port := range ports { if port.Protocol != v1.ProtocolTCP { return nil, fmt.Errorf("only TCP LoadBalancer is supported for openstack load balancers") } } sourceRanges, err := service.GetLoadBalancerSourceRanges(apiService) if err != nil { return nil, fmt.Errorf("failed to get source ranges for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } if !service.IsAllowAll(sourceRanges) && !lbaas.opts.ManageSecurityGroups { return nil, fmt.Errorf("source range restrictions are not supported for openstack load balancers without managing security groups") } affinity := apiService.Spec.SessionAffinity var persistence *v2pools.SessionPersistence switch affinity { case v1.ServiceAffinityNone: persistence = nil case v1.ServiceAffinityClientIP: persistence = &v2pools.SessionPersistence{Type: "SOURCE_IP"} default: return nil, fmt.Errorf("unsupported load balancer affinity: %v", affinity) } name := cloudprovider.GetLoadBalancerName(apiService) loadbalancer, err := getLoadbalancerByName(lbaas.lb, name) if err != nil { if err != ErrNotFound { return nil, fmt.Errorf("error getting loadbalancer %s: %v", name, err) } glog.V(2).Infof("Creating loadbalancer %s", name) loadbalancer, err = lbaas.createLoadBalancer(apiService, name, internalAnnotation) if err != nil { // Unknown error, retry later return nil, fmt.Errorf("error creating loadbalancer %s: %v", name, err) } } else { glog.V(2).Infof("LoadBalancer %s already exists", name) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } lbmethod := v2pools.LBMethod(lbaas.opts.LBMethod) if lbmethod == "" { lbmethod = v2pools.LBMethodRoundRobin } oldListeners, err := getListenersByLoadBalancerID(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("error getting LB %s listeners: %v", name, err) } for portIndex, port := range ports { listener := getListenerForPort(oldListeners, port) if listener == nil { glog.V(4).Infof("Creating listener for port %d", int(port.Port)) listener, err = listeners.Create(lbaas.lb, listeners.CreateOpts{ Name: fmt.Sprintf("listener_%s_%d", name, portIndex), Protocol: listeners.Protocol(port.Protocol), ProtocolPort: int(port.Port), LoadbalancerID: loadbalancer.ID, }).Extract() if err != nil { // Unknown error, retry later return nil, fmt.Errorf("error creating LB listener: %v", err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } glog.V(4).Infof("Listener for %s port %d: %s", string(port.Protocol), int(port.Port), listener.ID) // After all ports have been processed, remaining listeners are removed as obsolete. // Pop valid listeners. oldListeners = popListener(oldListeners, listener.ID) pool, err := getPoolByListenerID(lbaas.lb, loadbalancer.ID, listener.ID) if err != nil && err != ErrNotFound { // Unknown error, retry later return nil, fmt.Errorf("error getting pool for listener %s: %v", listener.ID, err) } if pool == nil { glog.V(4).Infof("Creating pool for listener %s", listener.ID) pool, err = v2pools.Create(lbaas.lb, v2pools.CreateOpts{ Name: fmt.Sprintf("pool_%s_%d", name, portIndex), Protocol: v2pools.Protocol(port.Protocol), LBMethod: lbmethod, ListenerID: listener.ID, Persistence: persistence, }).Extract() if err != nil { // Unknown error, retry later return nil, fmt.Errorf("error creating pool for listener %s: %v", listener.ID, err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } glog.V(4).Infof("Pool for listener %s: %s", listener.ID, pool.ID) members, err := getMembersByPoolID(lbaas.lb, pool.ID) if err != nil && !isNotFound(err) { return nil, fmt.Errorf("error getting pool members %s: %v", pool.ID, err) } for _, node := range nodes { addr, err := nodeAddressForLB(node) if err != nil { if err == ErrNotFound { // Node failure, do not create member glog.Warningf("Failed to create LB pool member for node %s: %v", node.Name, err) continue } else { return nil, fmt.Errorf("error getting address for node %s: %v", node.Name, err) } } if !memberExists(members, addr, int(port.NodePort)) { glog.V(4).Infof("Creating member for pool %s", pool.ID) _, err := v2pools.CreateMember(lbaas.lb, pool.ID, v2pools.CreateMemberOpts{ ProtocolPort: int(port.NodePort), Address: addr, SubnetID: lbaas.opts.SubnetID, }).Extract() if err != nil { return nil, fmt.Errorf("error creating LB pool member for node: %s, %v", node.Name, err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } else { // After all members have been processed, remaining members are deleted as obsolete. members = popMember(members, addr, int(port.NodePort)) } glog.V(4).Infof("Ensured pool %s has member for %s at %s", pool.ID, node.Name, addr) } // Delete obsolete members for this pool for _, member := range members { glog.V(4).Infof("Deleting obsolete member %s for pool %s address %s", member.ID, pool.ID, member.Address) err := v2pools.DeleteMember(lbaas.lb, pool.ID, member.ID).ExtractErr() if err != nil && !isNotFound(err) { return nil, fmt.Errorf("error deleting obsolete member %s for pool %s address %s: %v", member.ID, pool.ID, member.Address, err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } monitorID := pool.MonitorID if monitorID == "" && lbaas.opts.CreateMonitor { glog.V(4).Infof("Creating monitor for pool %s", pool.ID) monitor, err := v2monitors.Create(lbaas.lb, v2monitors.CreateOpts{ PoolID: pool.ID, Type: string(port.Protocol), Delay: int(lbaas.opts.MonitorDelay.Duration.Seconds()), Timeout: int(lbaas.opts.MonitorTimeout.Duration.Seconds()), MaxRetries: int(lbaas.opts.MonitorMaxRetries), }).Extract() if err != nil { return nil, fmt.Errorf("error creating LB pool healthmonitor: %v", err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } monitorID = monitor.ID } else if lbaas.opts.CreateMonitor == false { glog.V(4).Infof("Do not create monitor for pool %s when create-monitor is false", pool.ID) } if monitorID != "" { glog.V(4).Infof("Monitor for pool %s: %s", pool.ID, monitorID) } } // All remaining listeners are obsolete, delete for _, listener := range oldListeners { glog.V(4).Infof("Deleting obsolete listener %s:", listener.ID) // get pool for listener pool, err := getPoolByListenerID(lbaas.lb, loadbalancer.ID, listener.ID) if err != nil && err != ErrNotFound { return nil, fmt.Errorf("error getting pool for obsolete listener %s: %v", listener.ID, err) } if pool != nil { // get and delete monitor monitorID := pool.MonitorID if monitorID != "" { glog.V(4).Infof("Deleting obsolete monitor %s for pool %s", monitorID, pool.ID) err = v2monitors.Delete(lbaas.lb, monitorID).ExtractErr() if err != nil && !isNotFound(err) { return nil, fmt.Errorf("error deleting obsolete monitor %s for pool %s: %v", monitorID, pool.ID, err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } // get and delete pool members members, err := getMembersByPoolID(lbaas.lb, pool.ID) if err != nil && !isNotFound(err) { return nil, fmt.Errorf("error getting members for pool %s: %v", pool.ID, err) } if members != nil { for _, member := range members { glog.V(4).Infof("Deleting obsolete member %s for pool %s address %s", member.ID, pool.ID, member.Address) err := v2pools.DeleteMember(lbaas.lb, pool.ID, member.ID).ExtractErr() if err != nil && !isNotFound(err) { return nil, fmt.Errorf("error deleting obsolete member %s for pool %s address %s: %v", member.ID, pool.ID, member.Address, err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } } glog.V(4).Infof("Deleting obsolete pool %s for listener %s", pool.ID, listener.ID) // delete pool err = v2pools.Delete(lbaas.lb, pool.ID).ExtractErr() if err != nil && !isNotFound(err) { return nil, fmt.Errorf("error deleting obsolete pool %s for listener %s: %v", pool.ID, listener.ID, err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } // delete listener err = listeners.Delete(lbaas.lb, listener.ID).ExtractErr() if err != nil && !isNotFound(err) { return nil, fmt.Errorf("error deleteting obsolete listener: %v", err) } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return nil, fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } glog.V(2).Infof("Deleted obsolete listener: %s", listener.ID) } portID := loadbalancer.VipPortID floatIP, err := getFloatingIPByPortID(lbaas.network, portID) if err != nil && err != ErrNotFound { return nil, fmt.Errorf("error getting floating ip for port %s: %v", portID, err) } if floatIP == nil && floatingPool != "" && !internalAnnotation { glog.V(4).Infof("Creating floating ip for loadbalancer %s port %s", loadbalancer.ID, portID) floatIPOpts := floatingips.CreateOpts{ FloatingNetworkID: floatingPool, PortID: portID, } loadBalancerIP := apiService.Spec.LoadBalancerIP if loadBalancerIP != "" { floatIPOpts.FloatingIP = loadBalancerIP } floatIP, err = floatingips.Create(lbaas.network, floatIPOpts).Extract() if err != nil { return nil, fmt.Errorf("error creating LB floatingip %+v: %v", floatIPOpts, err) } } status := &v1.LoadBalancerStatus{} if floatIP != nil { status.Ingress = []v1.LoadBalancerIngress{{IP: floatIP.FloatingIP}} } else { status.Ingress = []v1.LoadBalancerIngress{{IP: loadbalancer.VipAddress}} } if lbaas.opts.ManageSecurityGroups { err := lbaas.ensureSecurityGroup(clusterName, apiService, nodes, loadbalancer) if err != nil { // cleanup what was created so far _ = lbaas.EnsureLoadBalancerDeleted(ctx, clusterName, apiService) return status, err } } return status, nil } // ensureSecurityGroup ensures security group exist for specific loadbalancer service. // Creating security group for specific loadbalancer service when it does not exist. func (lbaas *LbaasV2) ensureSecurityGroup(clusterName string, apiService *v1.Service, nodes []*v1.Node, loadbalancer *loadbalancers.LoadBalancer) error { // find node-security-group for service var err error if len(lbaas.opts.NodeSecurityGroupIDs) == 0 { lbaas.opts.NodeSecurityGroupIDs, err = getNodeSecurityGroupIDForLB(lbaas.compute, lbaas.network, nodes) if err != nil { return fmt.Errorf("failed to find node-security-group for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } } glog.V(4).Infof("find node-security-group %v for loadbalancer service %s/%s", lbaas.opts.NodeSecurityGroupIDs, apiService.Namespace, apiService.Name) // get service ports ports := apiService.Spec.Ports if len(ports) == 0 { return fmt.Errorf("no ports provided to openstack load balancer") } // get service source ranges sourceRanges, err := service.GetLoadBalancerSourceRanges(apiService) if err != nil { return fmt.Errorf("failed to get source ranges for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } // ensure security group for LB lbSecGroupName := getSecurityGroupName(apiService) lbSecGroupID, err := groups.IDFromName(lbaas.network, lbSecGroupName) if err != nil { // If the security group of LB not exist, create it later if isSecurityGroupNotFound(err) { lbSecGroupID = "" } else { return fmt.Errorf("error occurred finding security group: %s: %v", lbSecGroupName, err) } } if len(lbSecGroupID) == 0 { // create security group lbSecGroupCreateOpts := groups.CreateOpts{ Name: getSecurityGroupName(apiService), Description: fmt.Sprintf("Security Group for %s/%s Service LoadBalancer in cluster %s", apiService.Namespace, apiService.Name, clusterName), } lbSecGroup, err := groups.Create(lbaas.network, lbSecGroupCreateOpts).Extract() if err != nil { return fmt.Errorf("failed to create Security Group for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } lbSecGroupID = lbSecGroup.ID //add rule in security group for _, port := range ports { for _, sourceRange := range sourceRanges.StringSlice() { ethertype := rules.EtherType4 network, _, err := net.ParseCIDR(sourceRange) if err != nil { return fmt.Errorf("error parsing source range %s as a CIDR: %v", sourceRange, err) } if network.To4() == nil { ethertype = rules.EtherType6 } lbSecGroupRuleCreateOpts := rules.CreateOpts{ Direction: rules.DirIngress, PortRangeMax: int(port.Port), PortRangeMin: int(port.Port), Protocol: toRuleProtocol(port.Protocol), RemoteIPPrefix: sourceRange, SecGroupID: lbSecGroup.ID, EtherType: ethertype, } _, err = rules.Create(lbaas.network, lbSecGroupRuleCreateOpts).Extract() if err != nil { return fmt.Errorf("error occurred creating rule for SecGroup %s: %v", lbSecGroup.ID, err) } } } lbSecGroupRuleCreateOpts := rules.CreateOpts{ Direction: rules.DirIngress, PortRangeMax: 4, // ICMP: Code - Values for ICMP "Destination Unreachable: Fragmentation Needed and Don't Fragment was Set" PortRangeMin: 3, // ICMP: Type Protocol: rules.ProtocolICMP, RemoteIPPrefix: "0.0.0.0/0", // The Fragmentation packet can come from anywhere along the path back to the sourceRange - we need to all this from all SecGroupID: lbSecGroup.ID, EtherType: rules.EtherType4, } _, err = rules.Create(lbaas.network, lbSecGroupRuleCreateOpts).Extract() if err != nil { return fmt.Errorf("error occurred creating rule for SecGroup %s: %v", lbSecGroup.ID, err) } lbSecGroupRuleCreateOpts = rules.CreateOpts{ Direction: rules.DirIngress, PortRangeMax: 0, // ICMP: Code - Values for ICMP "Packet Too Big" PortRangeMin: 2, // ICMP: Type Protocol: rules.ProtocolICMP, RemoteIPPrefix: "::/0", // The Fragmentation packet can come from anywhere along the path back to the sourceRange - we need to all this from all SecGroupID: lbSecGroup.ID, EtherType: rules.EtherType6, } _, err = rules.Create(lbaas.network, lbSecGroupRuleCreateOpts).Extract() if err != nil { return fmt.Errorf("error occurred creating rule for SecGroup %s: %v", lbSecGroup.ID, err) } // get security groups of port portID := loadbalancer.VipPortID port, err := getPortByID(lbaas.network, portID) if err != nil { return err } // ensure the vip port has the security groups found := false for _, portSecurityGroups := range port.SecurityGroups { if portSecurityGroups == lbSecGroup.ID { found = true break } } // update loadbalancer vip port if !found { port.SecurityGroups = append(port.SecurityGroups, lbSecGroup.ID) updateOpts := neutronports.UpdateOpts{SecurityGroups: &port.SecurityGroups} res := neutronports.Update(lbaas.network, portID, updateOpts) if res.Err != nil { msg := fmt.Sprintf("Error occurred updating port %s for loadbalancer service %s/%s: %v", portID, apiService.Namespace, apiService.Name, res.Err) return fmt.Errorf(msg) } } } // ensure rules for every node security group for _, port := range ports { for _, nodeSecurityGroupID := range lbaas.opts.NodeSecurityGroupIDs { opts := rules.ListOpts{ Direction: string(rules.DirIngress), SecGroupID: nodeSecurityGroupID, RemoteGroupID: lbSecGroupID, PortRangeMax: int(port.NodePort), PortRangeMin: int(port.NodePort), Protocol: string(port.Protocol), } secGroupRules, err := getSecurityGroupRules(lbaas.network, opts) if err != nil && !isNotFound(err) { msg := fmt.Sprintf("Error finding rules for remote group id %s in security group id %s: %v", lbSecGroupID, nodeSecurityGroupID, err) return fmt.Errorf(msg) } if len(secGroupRules) != 0 { // Do not add rule when find rules for remote group in the Node Security Group continue } // Add the rules in the Node Security Group err = createNodeSecurityGroup(lbaas.network, nodeSecurityGroupID, int(port.NodePort), port.Protocol, lbSecGroupID) if err != nil { return fmt.Errorf("error occurred creating security group for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } } } return nil } // UpdateLoadBalancer updates hosts under the specified load balancer. func (lbaas *LbaasV2) UpdateLoadBalancer(ctx context.Context, clusterName string, service *v1.Service, nodes []*v1.Node) error { loadBalancerName := cloudprovider.GetLoadBalancerName(service) glog.V(4).Infof("UpdateLoadBalancer(%v, %v, %v)", clusterName, loadBalancerName, nodes) if len(lbaas.opts.SubnetID) == 0 && len(nodes) > 0 { // Get SubnetID automatically. // The LB needs to be configured with instance addresses on the same subnet, so get SubnetID by one node. subnetID, err := getSubnetIDForLB(lbaas.compute, *nodes[0]) if err != nil { glog.Warningf("Failed to find subnet-id for loadbalancer service %s/%s: %v", service.Namespace, service.Name, err) return fmt.Errorf("no subnet-id for service %s/%s : subnet-id not set in cloud provider config, "+ "and failed to find subnet-id from OpenStack: %v", service.Namespace, service.Name, err) } lbaas.opts.SubnetID = subnetID } ports := service.Spec.Ports if len(ports) == 0 { return fmt.Errorf("no ports provided to openstack load balancer") } loadbalancer, err := getLoadbalancerByName(lbaas.lb, loadBalancerName) if err != nil { return err } if loadbalancer == nil { return fmt.Errorf("loadbalancer %s does not exist", loadBalancerName) } // Get all listeners for this loadbalancer, by "port key". type portKey struct { Protocol listeners.Protocol Port int } var listenerIDs []string lbListeners := make(map[portKey]listeners.Listener) allListeners, err := getListenersByLoadBalancerID(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("error getting listeners for LB %s: %v", loadBalancerName, err) } for _, l := range allListeners { key := portKey{Protocol: listeners.Protocol(l.Protocol), Port: l.ProtocolPort} lbListeners[key] = l listenerIDs = append(listenerIDs, l.ID) } // Get all pools for this loadbalancer, by listener ID. lbPools := make(map[string]v2pools.Pool) for _, listenerID := range listenerIDs { pool, err := getPoolByListenerID(lbaas.lb, loadbalancer.ID, listenerID) if err != nil { return fmt.Errorf("error getting pool for listener %s: %v", listenerID, err) } lbPools[listenerID] = *pool } // Compose Set of member (addresses) that _should_ exist addrs := map[string]empty{} for _, node := range nodes { addr, err := nodeAddressForLB(node) if err != nil { return err } addrs[addr] = empty{} } // Check for adding/removing members associated with each port for _, port := range ports { // Get listener associated with this port listener, ok := lbListeners[portKey{ Protocol: toListenersProtocol(port.Protocol), Port: int(port.Port), }] if !ok { return fmt.Errorf("loadbalancer %s does not contain required listener for port %d and protocol %s", loadBalancerName, port.Port, port.Protocol) } // Get pool associated with this listener pool, ok := lbPools[listener.ID] if !ok { return fmt.Errorf("loadbalancer %s does not contain required pool for listener %s", loadBalancerName, listener.ID) } // Find existing pool members (by address) for this port getMembers, err := getMembersByPoolID(lbaas.lb, pool.ID) if err != nil { return fmt.Errorf("error getting pool members %s: %v", pool.ID, err) } members := make(map[string]v2pools.Member) for _, member := range getMembers { members[member.Address] = member } // Add any new members for this port for addr := range addrs { if _, ok := members[addr]; ok && members[addr].ProtocolPort == int(port.NodePort) { // Already exists, do not create member continue } _, err := v2pools.CreateMember(lbaas.lb, pool.ID, v2pools.CreateMemberOpts{ Address: addr, ProtocolPort: int(port.NodePort), SubnetID: lbaas.opts.SubnetID, }).Extract() if err != nil { return err } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } // Remove any old members for this port for _, member := range members { if _, ok := addrs[member.Address]; ok && member.ProtocolPort == int(port.NodePort) { // Still present, do not delete member continue } err = v2pools.DeleteMember(lbaas.lb, pool.ID, member.ID).ExtractErr() if err != nil && !isNotFound(err) { return err } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } } if lbaas.opts.ManageSecurityGroups { err := lbaas.updateSecurityGroup(clusterName, service, nodes, loadbalancer) if err != nil { return fmt.Errorf("failed to update Security Group for loadbalancer service %s/%s: %v", service.Namespace, service.Name, err) } } return nil } // updateSecurityGroup updating security group for specific loadbalancer service. func (lbaas *LbaasV2) updateSecurityGroup(clusterName string, apiService *v1.Service, nodes []*v1.Node, loadbalancer *loadbalancers.LoadBalancer) error { originalNodeSecurityGroupIDs := lbaas.opts.NodeSecurityGroupIDs var err error lbaas.opts.NodeSecurityGroupIDs, err = getNodeSecurityGroupIDForLB(lbaas.compute, lbaas.network, nodes) if err != nil { return fmt.Errorf("failed to find node-security-group for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } glog.V(4).Infof("find node-security-group %v for loadbalancer service %s/%s", lbaas.opts.NodeSecurityGroupIDs, apiService.Namespace, apiService.Name) original := sets.NewString(originalNodeSecurityGroupIDs...) current := sets.NewString(lbaas.opts.NodeSecurityGroupIDs...) removals := original.Difference(current) // Generate Name lbSecGroupName := getSecurityGroupName(apiService) lbSecGroupID, err := groups.IDFromName(lbaas.network, lbSecGroupName) if err != nil { return fmt.Errorf("error occurred finding security group: %s: %v", lbSecGroupName, err) } ports := apiService.Spec.Ports if len(ports) == 0 { return fmt.Errorf("no ports provided to openstack load balancer") } for _, port := range ports { for removal := range removals { // Delete the rules in the Node Security Group opts := rules.ListOpts{ Direction: string(rules.DirIngress), SecGroupID: removal, RemoteGroupID: lbSecGroupID, PortRangeMax: int(port.NodePort), PortRangeMin: int(port.NodePort), Protocol: string(port.Protocol), } secGroupRules, err := getSecurityGroupRules(lbaas.network, opts) if err != nil && !isNotFound(err) { return fmt.Errorf("error finding rules for remote group id %s in security group id %s: %v", lbSecGroupID, removal, err) } for _, rule := range secGroupRules { res := rules.Delete(lbaas.network, rule.ID) if res.Err != nil && !isNotFound(res.Err) { return fmt.Errorf("error occurred deleting security group rule: %s: %v", rule.ID, res.Err) } } } for _, nodeSecurityGroupID := range lbaas.opts.NodeSecurityGroupIDs { opts := rules.ListOpts{ Direction: string(rules.DirIngress), SecGroupID: nodeSecurityGroupID, RemoteGroupID: lbSecGroupID, PortRangeMax: int(port.NodePort), PortRangeMin: int(port.NodePort), Protocol: string(port.Protocol), } secGroupRules, err := getSecurityGroupRules(lbaas.network, opts) if err != nil && !isNotFound(err) { return fmt.Errorf("error finding rules for remote group id %s in security group id %s: %v", lbSecGroupID, nodeSecurityGroupID, err) } if len(secGroupRules) != 0 { // Do not add rule when find rules for remote group in the Node Security Group continue } // Add the rules in the Node Security Group err = createNodeSecurityGroup(lbaas.network, nodeSecurityGroupID, int(port.NodePort), port.Protocol, lbSecGroupID) if err != nil { return fmt.Errorf("error occurred creating security group for loadbalancer service %s/%s: %v", apiService.Namespace, apiService.Name, err) } } } return nil } // EnsureLoadBalancerDeleted deletes the specified load balancer func (lbaas *LbaasV2) EnsureLoadBalancerDeleted(ctx context.Context, clusterName string, service *v1.Service) error { loadBalancerName := cloudprovider.GetLoadBalancerName(service) glog.V(4).Infof("EnsureLoadBalancerDeleted(%v, %v)", clusterName, loadBalancerName) loadbalancer, err := getLoadbalancerByName(lbaas.lb, loadBalancerName) if err != nil && err != ErrNotFound { return err } if loadbalancer == nil { return nil } if loadbalancer.VipPortID != "" { portID := loadbalancer.VipPortID floatingIP, err := getFloatingIPByPortID(lbaas.network, portID) if err != nil && err != ErrNotFound { return err } if floatingIP != nil { err = floatingips.Delete(lbaas.network, floatingIP.ID).ExtractErr() if err != nil && !isNotFound(err) { return err } } } // get all listeners associated with this loadbalancer listenerList, err := getListenersByLoadBalancerID(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("error getting LB %s listeners: %v", loadbalancer.ID, err) } // get all pools (and health monitors) associated with this loadbalancer var poolIDs []string var monitorIDs []string for _, listener := range listenerList { pool, err := getPoolByListenerID(lbaas.lb, loadbalancer.ID, listener.ID) if err != nil && err != ErrNotFound { return fmt.Errorf("error getting pool for listener %s: %v", listener.ID, err) } if pool != nil { poolIDs = append(poolIDs, pool.ID) // If create-monitor of cloud-config is false, pool has not monitor. if pool.MonitorID != "" { monitorIDs = append(monitorIDs, pool.MonitorID) } } } // get all members associated with each poolIDs var memberIDs []string for _, pool := range poolIDs { membersList, err := getMembersByPoolID(lbaas.lb, pool) if err != nil && !isNotFound(err) { return fmt.Errorf("error getting pool members %s: %v", pool, err) } for _, member := range membersList { memberIDs = append(memberIDs, member.ID) } } // delete all monitors for _, monitorID := range monitorIDs { err := v2monitors.Delete(lbaas.lb, monitorID).ExtractErr() if err != nil && !isNotFound(err) { return err } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } // delete all members and pools for _, poolID := range poolIDs { // delete all members for this pool for _, memberID := range memberIDs { err := v2pools.DeleteMember(lbaas.lb, poolID, memberID).ExtractErr() if err != nil && !isNotFound(err) { return err } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } // delete pool err := v2pools.Delete(lbaas.lb, poolID).ExtractErr() if err != nil && !isNotFound(err) { return err } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } // delete all listeners for _, listener := range listenerList { err := listeners.Delete(lbaas.lb, listener.ID).ExtractErr() if err != nil && !isNotFound(err) { return err } provisioningStatus, err := waitLoadbalancerActiveProvisioningStatus(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("failed to loadbalance ACTIVE provisioning status %v: %v", provisioningStatus, err) } } // delete loadbalancer err = loadbalancers.Delete(lbaas.lb, loadbalancer.ID).ExtractErr() if err != nil && !isNotFound(err) { return err } err = waitLoadbalancerDeleted(lbaas.lb, loadbalancer.ID) if err != nil { return fmt.Errorf("failed to delete loadbalancer: %v", err) } // Delete the Security Group if lbaas.opts.ManageSecurityGroups { err := lbaas.EnsureSecurityGroupDeleted(clusterName, service) if err != nil { return fmt.Errorf("Failed to delete Security Group for loadbalancer service %s/%s: %v", service.Namespace, service.Name, err) } } return nil } // EnsureSecurityGroupDeleted deleting security group for specific loadbalancer service. func (lbaas *LbaasV2) EnsureSecurityGroupDeleted(clusterName string, service *v1.Service) error { // Generate Name lbSecGroupName := getSecurityGroupName(service) lbSecGroupID, err := groups.IDFromName(lbaas.network, lbSecGroupName) if err != nil { if isSecurityGroupNotFound(err) { // It is OK when the security group has been deleted by others. return nil } return fmt.Errorf("Error occurred finding security group: %s: %v", lbSecGroupName, err) } lbSecGroup := groups.Delete(lbaas.network, lbSecGroupID) if lbSecGroup.Err != nil && !isNotFound(lbSecGroup.Err) { return lbSecGroup.Err } if len(lbaas.opts.NodeSecurityGroupIDs) == 0 { // Just happen when nodes have not Security Group, or should not happen // UpdateLoadBalancer and EnsureLoadBalancer can set lbaas.opts.NodeSecurityGroupIDs when it is empty // And service controller call UpdateLoadBalancer to set lbaas.opts.NodeSecurityGroupIDs when controller manager service is restarted. glog.Warningf("Can not find node-security-group from all the nodes of this cluster when delete loadbalancer service %s/%s", service.Namespace, service.Name) } else { // Delete the rules in the Node Security Group for _, nodeSecurityGroupID := range lbaas.opts.NodeSecurityGroupIDs { opts := rules.ListOpts{ SecGroupID: nodeSecurityGroupID, RemoteGroupID: lbSecGroupID, } secGroupRules, err := getSecurityGroupRules(lbaas.network, opts) if err != nil && !isNotFound(err) { msg := fmt.Sprintf("Error finding rules for remote group id %s in security group id %s: %v", lbSecGroupID, nodeSecurityGroupID, err) return fmt.Errorf(msg) } for _, rule := range secGroupRules { res := rules.Delete(lbaas.network, rule.ID) if res.Err != nil && !isNotFound(res.Err) { return fmt.Errorf("Error occurred deleting security group rule: %s: %v", rule.ID, res.Err) } } } } return nil }
{'content_hash': 'a1e10a052ae4cb7630f364a93e95423e', 'timestamp': '', 'source': 'github', 'line_count': 1551, 'max_line_length': 204, 'avg_line_length': 35.032882011605416, 'alnum_prop': 0.714425058892815, 'repo_name': 'kjvalencik/kubernetes', 'id': '9e170deee32b67737352821a3a1bac16b85c0a59', 'size': '54905', 'binary': False, 'copies': '1', 'ref': 'refs/heads/auth0', 'path': 'pkg/cloudprovider/providers/openstack/openstack_loadbalancer.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '2840'}, {'name': 'Dockerfile', 'bytes': '54785'}, {'name': 'Go', 'bytes': '41537570'}, {'name': 'HTML', 'bytes': '1199467'}, {'name': 'Makefile', 'bytes': '78700'}, {'name': 'Python', 'bytes': '2747876'}, {'name': 'Ruby', 'bytes': '1780'}, {'name': 'Shell', 'bytes': '1421716'}, {'name': 'sed', 'bytes': '10263'}]}
package org.gradle.internal.component.external.model; import org.gradle.api.internal.artifacts.ivyservice.NamespaceId; import java.util.Map; /** * Meta-data for a module version resolved from an Ivy repository. */ public interface IvyModuleResolveMetaData extends ModuleComponentResolveMetaData { /*** * Returns the branch attribute for the module. * * @return the branch attribute for the module */ String getBranch(); /** * Returns the extra info for the module. * * @return the extra info for the module */ Map<NamespaceId, String> getExtraInfo(); }
{'content_hash': 'ee9199a384e21e11a9a42afee8935bdc', 'timestamp': '', 'source': 'github', 'line_count': 25, 'max_line_length': 82, 'avg_line_length': 24.64, 'alnum_prop': 0.6948051948051948, 'repo_name': 'Artificial-Engineering/lycheeJS', 'id': '3b2831a6fa8dde8bdf1a15a7cec4cba998d5870b', 'size': '1231', 'binary': False, 'copies': '6', 'ref': 'refs/heads/development', 'path': 'lycheejs/bin/runtime/html-webview/android-toolchain/gradle/src/dependency-management/org/gradle/internal/component/external/model/IvyModuleResolveMetaData.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'AppleScript', 'bytes': '258'}, {'name': 'CSS', 'bytes': '21259'}, {'name': 'HTML', 'bytes': '56420'}, {'name': 'JavaScript', 'bytes': '1206074'}, {'name': 'Shell', 'bytes': '38111'}, {'name': 'Smarty', 'bytes': '5714'}]}
package com.thoughtworks.go.domain; import static com.thoughtworks.go.config.RunIfConfig.PASSED; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import com.thoughtworks.go.config.RunIfConfig; import org.junit.Test; public class RunIfConfigsTest { @Test public void shouldMatchWhenContainsCondition() { RunIfConfigs configs = new RunIfConfigs(PASSED); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Passed.toLowerCase())), is(true)); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Failed.toLowerCase())), is(false)); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Cancelled.toLowerCase())), is(false)); } @Test public void shouldMatchAnyWhenAnyIsDefined() { RunIfConfigs configs = new RunIfConfigs(RunIfConfig.ANY); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Passed.toLowerCase())), is(true)); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Failed.toLowerCase())), is(true)); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Cancelled.toLowerCase())), is(true)); } @Test public void testOnlyMatchPassedWhenNoneIsDefined() { RunIfConfigs configs = new RunIfConfigs(); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Passed.toLowerCase())), is(true)); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Failed.toLowerCase())), is(false)); assertThat(configs.match(RunIfConfig.fromJobResult(JobResult.Cancelled.toLowerCase())), is(false)); } @Test public void shouldAddErrorsToErrorCollectionOfTheCollectionAsWellAsEachRunIfConfig() { RunIfConfigs configs = new RunIfConfigs(); RunIfConfig config = new RunIfConfig("passed"); config.addError("status", "some error"); configs.add(config); configs.addError("key", "some error"); assertThat(configs.errors().on("key"), is("some error")); assertThat(configs.get(0).errors().on("status"), is("some error")); } }
{'content_hash': '9918a938817d3dda1d9075d081ba352e', 'timestamp': '', 'source': 'github', 'line_count': 48, 'max_line_length': 107, 'avg_line_length': 43.875, 'alnum_prop': 0.7241215574548908, 'repo_name': 'naveenbhaskar/gocd', 'id': '70bdd4c27691bbdeaf7d1d90517062a0ac8b3d7b', 'size': '2707', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'common/src/test/java/com/thoughtworks/go/domain/RunIfConfigsTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '6804'}, {'name': 'CSS', 'bytes': '757466'}, {'name': 'FreeMarker', 'bytes': '8662'}, {'name': 'Groovy', 'bytes': '1683044'}, {'name': 'HTML', 'bytes': '718498'}, {'name': 'Java', 'bytes': '20573725'}, {'name': 'JavaScript', 'bytes': '3000785'}, {'name': 'NSIS', 'bytes': '16898'}, {'name': 'PLSQL', 'bytes': '2984'}, {'name': 'PLpgSQL', 'bytes': '6074'}, {'name': 'PowerShell', 'bytes': '768'}, {'name': 'Ruby', 'bytes': '2638300'}, {'name': 'SQLPL', 'bytes': '9330'}, {'name': 'Shell', 'bytes': '186387'}, {'name': 'TypeScript', 'bytes': '1471077'}, {'name': 'XSLT', 'bytes': '183239'}]}
package com.navercorp.pinpoint.profiler.monitor.metric; import com.navercorp.pinpoint.common.trace.ServiceType; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * @author emeroad */ public class MetricRegistry { private final ConcurrentMap<Short, RpcMetric> rpcCache = new ConcurrentHashMap<Short, RpcMetric>(); private final ContextMetric contextMetric; public MetricRegistry(ServiceType serviceType) { if (serviceType == null) { throw new NullPointerException("serviceType must not be null"); } if (!serviceType.isWas()) { throw new IllegalArgumentException("illegal serviceType:" + serviceType); } this.contextMetric = new ContextMetric(serviceType); } public RpcMetric getRpcMetric(ServiceType serviceType) { if (serviceType == null) { throw new NullPointerException("serviceType must not be null"); } if (!serviceType.isRecordStatistics()) { throw new IllegalArgumentException("illegal serviceType:" + serviceType); } final Short code = serviceType.getCode(); final RpcMetric hit = rpcCache.get(code); if (hit != null) { return hit; } final RpcMetric rpcMetric = new DefaultRpcMetric(serviceType); final RpcMetric exist = rpcCache.putIfAbsent(code, rpcMetric); if (exist != null) { return exist; } return rpcMetric; } public ContextMetric getResponseMetric() { return contextMetric; } public void addResponseTime(int mills, boolean error) { this.contextMetric.addResponseTime(mills, error); } public Collection<HistogramSnapshot> createRpcResponseSnapshot() { final List<HistogramSnapshot> histogramSnapshotList = new ArrayList<HistogramSnapshot>(16); for (RpcMetric metric : rpcCache.values()) { histogramSnapshotList.addAll(metric.createSnapshotList()); } return histogramSnapshotList; } public HistogramSnapshot createWasResponseSnapshot() { return null; } }
{'content_hash': '36f156de159dc2eaae96846cfa4986e1', 'timestamp': '', 'source': 'github', 'line_count': 74, 'max_line_length': 103, 'avg_line_length': 30.405405405405407, 'alnum_prop': 0.6706666666666666, 'repo_name': 'sbcoba/pinpoint', 'id': '5f2fdcb76ae8928a5b0e1f3ee66c271480376f4d', 'size': '2844', 'binary': False, 'copies': '12', 'ref': 'refs/heads/master', 'path': 'profiler/src/main/java/com/navercorp/pinpoint/profiler/monitor/metric/MetricRegistry.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '22853'}, {'name': 'CSS', 'bytes': '136502'}, {'name': 'CoffeeScript', 'bytes': '10124'}, {'name': 'Groovy', 'bytes': '1423'}, {'name': 'HTML', 'bytes': '465721'}, {'name': 'Java', 'bytes': '6641636'}, {'name': 'JavaScript', 'bytes': '3147748'}, {'name': 'Makefile', 'bytes': '5246'}, {'name': 'PLSQL', 'bytes': '4156'}, {'name': 'Python', 'bytes': '3523'}, {'name': 'Ruby', 'bytes': '943'}, {'name': 'Shell', 'bytes': '32073'}, {'name': 'Thrift', 'bytes': '7081'}]}
package ca.corefacility.bioinformatics.irida.ria.web.components.agGrid; /** * AgGrid column is used to represent a generic AgGrid Column * * @see <a href="https://www.ag-grid.com/javascript-grid-column-properties/">Column Properties</a> */ public class AgGridColumn { /** * The name to render in the column header. */ private String headerName; /** * The field of the row to get the cells data from */ private String field; /** * Which type of column to render (data, text, etc...) * TODO: Convert this into an enum? */ private String type; /** * Set to true to hide this column initially */ private boolean hide; /** * Set to true to make column editable. */ private boolean editable; /** * Set 'left' or 'right' to pin the that side of table */ private String pinned; /** * Set to true to always have column displayed first. */ private boolean lockPosition; /** * Set to true block pinning column via the UI */ private boolean lockPinned; /** * Set to true to render a selection checkbox in the column. */ private boolean checkboxSelection; /** * Set to true to render a select all / none checkbox in the column header */ private boolean headerCheckboxSelection; /** * Type of column filter. */ private String filter; /** * Suppress the ability to resize this column */ private boolean resizable; /** * Set to 'asc' or 'desc' to sort by this column by default. */ private String sort; public AgGridColumn() { } /** * Create a column header for a UI Ag Grid instance * * @param headerName {@link String} the text to display in the column header * @param field {@link String} the key to the row data * @param type {@link String} the type of column (date, text) * @param hide {@link Boolean} whether the column is visible or not * @param editable {@link Boolean} whether the contents of the cells in the column are editable. */ public AgGridColumn(String headerName, String field, String type, boolean hide, boolean editable) { this.headerName = headerName; this.field = field; this.type = type; this.hide = hide; this.editable = editable; // Default to be resizable unless explicitly set. this.resizable = true; } public String getField() { return field; } public String getHeaderName() { return headerName; } public String getType() { return type; } public void setHide(boolean hide) { this.hide = hide; } public boolean isHide() { return hide; } public boolean isEditable() { return editable; } public String getPinned() { return pinned; } public void setPinned(String position) { this.pinned = position; } public boolean isLockPosition() { return lockPosition; } public void setLockPosition(boolean lockPosition) { this.lockPosition = lockPosition; } public boolean isLockPinned() { return lockPinned; } public void setLockPinned(boolean lockPinned) { this.lockPinned = lockPinned; } public String getSort() { return sort; } public void setSort(String sort) { this.sort = sort; } public boolean isCheckboxSelection() { return checkboxSelection; } public void setCheckboxSelection(boolean checkboxSelection) { this.checkboxSelection = checkboxSelection; } public boolean isHeaderCheckboxSelection() { return headerCheckboxSelection; } public void setHeaderCheckboxSelection(boolean headerCheckboxSelection) { this.headerCheckboxSelection = headerCheckboxSelection; } public String getFilter() { return filter; } public void setFilter(String filter) { this.filter = filter; } public boolean isResizable() { return resizable; } public void setResizable(boolean resizable) { this.resizable = resizable; } }
{'content_hash': 'f9b20ac36b3d9ada46b7abb53f120033', 'timestamp': '', 'source': 'github', 'line_count': 184, 'max_line_length': 100, 'avg_line_length': 20.48913043478261, 'alnum_prop': 0.6978779840848807, 'repo_name': 'phac-nml/irida', 'id': '10b7159562ad2414bdbd3621ab55aa13e0e963b4', 'size': '3770', 'binary': False, 'copies': '1', 'ref': 'refs/heads/development', 'path': 'src/main/java/ca/corefacility/bioinformatics/irida/ria/web/components/agGrid/AgGridColumn.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '22180'}, {'name': 'Dockerfile', 'bytes': '4941'}, {'name': 'HTML', 'bytes': '44649'}, {'name': 'Java', 'bytes': '5071675'}, {'name': 'JavaScript', 'bytes': '941290'}, {'name': 'Kotlin', 'bytes': '19144'}, {'name': 'Perl', 'bytes': '6980'}, {'name': 'Python', 'bytes': '8985'}, {'name': 'Shell', 'bytes': '13029'}, {'name': 'TypeScript', 'bytes': '344558'}]}
inline void *getDFSanArgTLSPtrForJIT() { extern __thread __attribute__((tls_model("initial-exec"))) void *__dfsan_arg_tls; return (void *)&__dfsan_arg_tls; } inline void *getDFSanRetValTLSPtrForJIT() { extern __thread __attribute__((tls_model("initial-exec"))) void *__dfsan_retval_tls; return (void *)&__dfsan_retval_tls; } #endif namespace llvm { class ModulePass; class FunctionPass; // Insert GCOV profiling instrumentation struct GCOVOptions { static GCOVOptions getDefault(); // Specify whether to emit .gcno files. bool EmitNotes; // Specify whether to modify the program to emit .gcda files when run. bool EmitData; // A four-byte version string. The meaning of a version string is described in // gcc's gcov-io.h char Version[4]; // Emit a "cfg checksum" that follows the "line number checksum" of a // function. This affects both .gcno and .gcda files. bool UseCfgChecksum; // Add the 'noredzone' attribute to added runtime library calls. bool NoRedZone; // Emit the name of the function in the .gcda files. This is redundant, as // the function identifier can be used to find the name from the .gcno file. bool FunctionNamesInData; }; ModulePass *createGCOVProfilerPass(const GCOVOptions &Options = GCOVOptions::getDefault()); // Insert AddressSanitizer (address sanity checking) instrumentation FunctionPass *createAddressSanitizerFunctionPass( bool CheckInitOrder = true, bool CheckUseAfterReturn = false, bool CheckLifetime = false, StringRef BlacklistFile = StringRef(), bool ZeroBaseShadow = false); ModulePass *createAddressSanitizerModulePass( bool CheckInitOrder = true, StringRef BlacklistFile = StringRef(), bool ZeroBaseShadow = false); // Insert MemorySanitizer instrumentation (detection of uninitialized reads) FunctionPass *createMemorySanitizerPass(bool TrackOrigins = false, StringRef BlacklistFile = StringRef()); // Insert ThreadSanitizer (race detection) instrumentation FunctionPass *createThreadSanitizerPass(StringRef BlacklistFile = StringRef()); // Insert DataFlowSanitizer (dynamic data flow analysis) instrumentation ModulePass *createDataFlowSanitizerPass(StringRef ABIListFile = StringRef(), void *(*getArgTLS)() = 0, void *(*getRetValTLS)() = 0); #if defined(__GNUC__) && defined(__linux__) inline ModulePass *createDataFlowSanitizerPassForJIT(StringRef ABIListFile = StringRef()) { return createDataFlowSanitizerPass(ABIListFile, getDFSanArgTLSPtrForJIT, getDFSanRetValTLSPtrForJIT); } #endif // BoundsChecking - This pass instruments the code to perform run-time bounds // checking on loads, stores, and other memory intrinsics. FunctionPass *createBoundsCheckingPass(); /// createDebugIRPass - Enable interactive stepping through LLVM IR in LLDB (or /// GDB) and generate a file with the LLVM IR to be /// displayed in the debugger. /// /// Existing debug metadata is preserved (but may be modified) in order to allow /// accessing variables in the original source. The line table and file /// information is modified to correspond to the lines in the LLVM IR. If /// Filename and Directory are empty, a file name is generated based on existing /// debug information. If no debug information is available, a temporary file /// name is generated. /// /// @param HideDebugIntrinsics Omit debug intrinsics in emitted IR source file. /// @param HideDebugMetadata Omit debug metadata in emitted IR source file. /// @param Directory Embed this directory in the debug information. /// @param Filename Embed this file name in the debug information. ModulePass *createDebugIRPass(bool HideDebugIntrinsics, bool HideDebugMetadata, StringRef Directory = StringRef(), StringRef Filename = StringRef()); /// createDebugIRPass - Enable interactive stepping through LLVM IR in LLDB /// (or GDB) with an existing IR file on disk. When creating /// a DebugIR pass with this function, no source file is /// output to disk and the existing one is unmodified. Debug /// metadata in the Module is created/updated to point to /// the existing textual IR file on disk. /// NOTE: If the IR file to be debugged is not on disk, use the version of this /// function with parameters in order to generate the file that will be /// seen by the debugger. ModulePass *createDebugIRPass(); } // End llvm namespace #endif
{'content_hash': 'a69928f6392317c0e4ef5eff0ca9753e', 'timestamp': '', 'source': 'github', 'line_count': 113, 'max_line_length': 80, 'avg_line_length': 42.83185840707964, 'alnum_prop': 0.677892561983471, 'repo_name': 'tangyibin/goblin-core', 'id': '8a1b34e488be77b069a875ee7cb0dcdff8e71d45', 'size': '5502', 'binary': False, 'copies': '10', 'ref': 'refs/heads/master', 'path': 'llvm/3.4.2/llvm-3.4.2.src/include/llvm/Transforms/Instrumentation.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'AppleScript', 'bytes': '1429'}, {'name': 'Assembly', 'bytes': '37219664'}, {'name': 'Awk', 'bytes': '1296'}, {'name': 'Bison', 'bytes': '769886'}, {'name': 'C', 'bytes': '121618095'}, {'name': 'C#', 'bytes': '12418'}, {'name': 'C++', 'bytes': '125510142'}, {'name': 'CMake', 'bytes': '708668'}, {'name': 'CSS', 'bytes': '43924'}, {'name': 'Cuda', 'bytes': '12393'}, {'name': 'D', 'bytes': '23091496'}, {'name': 'DTrace', 'bytes': '8533449'}, {'name': 'E', 'bytes': '3290'}, {'name': 'Eiffel', 'bytes': '2314'}, {'name': 'Elixir', 'bytes': '314'}, {'name': 'Emacs Lisp', 'bytes': '41146'}, {'name': 'FORTRAN', 'bytes': '377751'}, {'name': 'Forth', 'bytes': '4188'}, {'name': 'GAP', 'bytes': '21991'}, {'name': 'GDScript', 'bytes': '54941'}, {'name': 'Gnuplot', 'bytes': '446'}, {'name': 'Groff', 'bytes': '940592'}, {'name': 'HTML', 'bytes': '1118040'}, {'name': 'JavaScript', 'bytes': '24233'}, {'name': 'LLVM', 'bytes': '48362057'}, {'name': 'M', 'bytes': '2548'}, {'name': 'Makefile', 'bytes': '5469249'}, {'name': 'Mathematica', 'bytes': '5497'}, {'name': 'Matlab', 'bytes': '54444'}, {'name': 'Mercury', 'bytes': '1222'}, {'name': 'Nemerle', 'bytes': '141'}, {'name': 'OCaml', 'bytes': '748821'}, {'name': 'Objective-C', 'bytes': '4996482'}, {'name': 'Objective-C++', 'bytes': '1419213'}, {'name': 'Perl', 'bytes': '974117'}, {'name': 'Perl6', 'bytes': '80156'}, {'name': 'Pure Data', 'bytes': '22171'}, {'name': 'Python', 'bytes': '1375992'}, {'name': 'R', 'bytes': '627855'}, {'name': 'Rebol', 'bytes': '51929'}, {'name': 'Scheme', 'bytes': '4296232'}, {'name': 'Shell', 'bytes': '2237613'}, {'name': 'Standard ML', 'bytes': '5682'}, {'name': 'SuperCollider', 'bytes': '734239'}, {'name': 'Tcl', 'bytes': '2234'}, {'name': 'TeX', 'bytes': '601780'}, {'name': 'VimL', 'bytes': '26411'}]}
package com.badlogic.gdx.physics.bullet.collision; import com.google.gwt.core.client.JavaScriptObject; public class btPairCachingGhostObject extends btGhostObject { @Override public JavaScriptObject createMe() { return createObj(); } private native JavaScriptObject createObj() /*-{ var obj = new $wnd.Ammo.btPairCachingGhostObject(); obj.javaObject = this; return obj; }-*/; }
{'content_hash': '0daaac1d883b59e25417bbe6be8490f6', 'timestamp': '', 'source': 'github', 'line_count': 17, 'max_line_length': 59, 'avg_line_length': 24.294117647058822, 'alnum_prop': 0.7239709443099274, 'repo_name': 'xpenatan/gdx-bullet-gwt', 'id': 'd9f18ce833f97e38b00fde5a298fd21470678641', 'size': '413', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'gdx-bullet-gwt/src/com/badlogic/gdx/physics/bullet/gwt/emu/com/badlogic/gdx/physics/bullet/collision/btPairCachingGhostObject.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '287424'}]}
package builders.loom.plugin.java; import java.io.IOException; import java.nio.file.Path; import java.util.List; public interface FileCacher { boolean filesCached(List<Path> srcPaths) throws IOException; void cacheFiles(List<Path> srcPaths) throws IOException; }
{'content_hash': '987aabff6ee43b27ca13135ae74a8ec5', 'timestamp': '', 'source': 'github', 'line_count': 15, 'max_line_length': 64, 'avg_line_length': 18.533333333333335, 'alnum_prop': 0.7697841726618705, 'repo_name': 'loom-build-tool/loom', 'id': '5d7167c774ad29767648206ac3ea5ce90f533d18', 'size': '893', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'modules/plugin-java/src/main/java/builders/loom/plugin/java/FileCacher.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '1411'}, {'name': 'Java', 'bytes': '630304'}, {'name': 'Shell', 'bytes': '1760'}]}
<?php // Call Zend_Dojo_View_Helper_SubmitButtonTest::main() if this source file is executed directly. if (!defined("PHPUnit_MAIN_METHOD")) { define("PHPUnit_MAIN_METHOD", "Zend_Dojo_View_Helper_SubmitButtonTest::main"); } /** Zend_Dojo_View_Helper_SubmitButton */ require_once 'Zend/Dojo/View/Helper/SubmitButton.php'; /** Zend_View */ require_once 'Zend/View.php'; /** Zend_Registry */ require_once 'Zend/Registry.php'; /** Zend_Dojo_View_Helper_Dojo */ require_once 'Zend/Dojo/View/Helper/Dojo.php'; /** * Test class for Zend_Dojo_View_Helper_SubmitButton. * * @category Zend * @package Zend_Dojo * @subpackage UnitTests * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @group Zend_Dojo * @group Zend_Dojo_View */ class Zend_Dojo_View_Helper_SubmitButtonTest extends PHPUnit_Framework_TestCase { /** * Runs the test methods of this class. * * @return void */ public static function main() { $suite = new PHPUnit_Framework_TestSuite("Zend_Dojo_View_Helper_SubmitButtonTest"); $result = PHPUnit_TextUI_TestRunner::run($suite); } /** * Sets up the fixture, for example, open a network connection. * This method is called before a test is executed. * * @return void */ public function setUp() { Zend_Registry::_unsetInstance(); Zend_Dojo_View_Helper_Dojo::setUseDeclarative(); $this->view = $this->getView(); $this->helper = new Zend_Dojo_View_Helper_SubmitButton(); $this->helper->setView($this->view); } /** * Tears down the fixture, for example, close a network connection. * This method is called after a test is executed. * * @return void */ public function tearDown() { } public function getView() { require_once 'Zend/View.php'; $view = new Zend_View(); $view->addHelperPath('Zend/Dojo/View/Helper/', 'Zend_Dojo_View_Helper'); return $view; } public function getElement() { return $this->helper->submitButton( 'elementId', 'foo', array(), array() ); } public function testShouldAllowDeclarativeDijitCreation() { $html = $this->getElement(); $this->assertRegexp('/<input[^>]*(type="submit")/', $html, $html); $this->assertRegexp('/<input[^>]*(dojoType="dijit.form.Button")/', $html, $html); $this->assertRegexp('/<input[^>]*(label="foo")/', $html, $html); } public function testShouldAllowProgrammaticDijitCreation() { Zend_Dojo_View_Helper_Dojo::setUseProgrammatic(); $html = $this->getElement(); $this->assertNotRegexp('/<input[^>]*(dojoType="dijit.form.Button")/', $html); $this->assertNotNull($this->view->dojo()->getDijit('elementId')); } /** * @group ZF-4977 */ public function testHelperShouldRenderContentKeyAsLabelWhenPassed() { $html = $this->helper->submitButton('foo', '', array('content' => 'Label')); $this->assertRegexp('/<input[^>]*(value="Label")/', $html, $html); } } // Call Zend_Dojo_View_Helper_SubmitButtonTest::main() if this source file is executed directly. if (PHPUnit_MAIN_METHOD == "Zend_Dojo_View_Helper_SubmitButtonTest::main") { Zend_Dojo_View_Helper_SubmitButtonTest::main(); }
{'content_hash': '0e95cc11534db28b37d7e9af23fb02fc', 'timestamp': '', 'source': 'github', 'line_count': 118, 'max_line_length': 96, 'avg_line_length': 29.720338983050848, 'alnum_prop': 0.6156258910749929, 'repo_name': 'JeancarloPerez/booking-system', 'id': '4bdfae2f137e98676a998263caa045d64365ff2e', 'size': '4222', 'binary': False, 'copies': '22', 'ref': 'refs/heads/master', 'path': 'tests/Zend/Dojo/View/Helper/SubmitButtonTest.php', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'ApacheConf', 'bytes': '566'}, {'name': 'Batchfile', 'bytes': '3330'}, {'name': 'CSS', 'bytes': '7453'}, {'name': 'Groff', 'bytes': '310'}, {'name': 'HTML', 'bytes': '146909'}, {'name': 'JavaScript', 'bytes': '30072'}, {'name': 'PHP', 'bytes': '47853338'}, {'name': 'PowerShell', 'bytes': '2056'}, {'name': 'Puppet', 'bytes': '871'}, {'name': 'Shell', 'bytes': '11283'}, {'name': 'TypeScript', 'bytes': '3445'}]}
'use strict'; import config from '../config'; import path from 'path'; import gulp from 'gulp'; import {Server} from 'karma'; gulp.task('unit', ['views'], function() { new Server({ configFile: path.resolve(__dirname, '../..', config.test.karma), singleRun: true }).start(); });
{'content_hash': '2da4b1b5bbe2224951f047054bd10f7e', 'timestamp': '', 'source': 'github', 'line_count': 15, 'max_line_length': 68, 'avg_line_length': 20.2, 'alnum_prop': 0.6039603960396039, 'repo_name': 'capheshift/capheshift-angular', 'id': 'f1a36776014f9624efe854fbbe865c78ff93b6e3', 'size': '303', 'binary': False, 'copies': '12', 'ref': 'refs/heads/master', 'path': 'gulp/tasks/unit.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '1149'}, {'name': 'HTML', 'bytes': '3756'}, {'name': 'JavaScript', 'bytes': '21924'}]}
CSS left-to-right to right-to-left converter. Takes CSS, LESS, SASS files and converts them from a left-to-right orientation to a right-to-left orientation. Useful when trying to convert English / Latin / LTR based stylesheets to support RTL languages like Hebrew / Arabic. ## Installation Add this line to your application's Gemfile: gem 'rtlit' And then execute: $ bundle Or install it yourself as: $ gem install rtlit ## Usage You can use RTLit in one of three ways ### In your code Include RTLit in your application and convert any CSS string to RTL require 'rtlit' ltr_css = File.open('/path/to/ltr/file.css','r') { |f| f.read } rtl_css = RTLit::Converter.to_rtl ltr_css ### As a Rake task Convert a single file with RTLit rake rtlit:convert[/path/to/src.css,/path/to/dest.css] Convert a directory with RTLit rake rtlit:convert[/path/to/src/,/path/to/dest/] Convert files in a directory filtered by extension rake rtlit:convert[/path/to/src/,/path/to/dest/, less] # will convert only *.less files in /path/to/src/ ### As a CLI command $ rtlit /path/to/src/file.css /path/to/dest/file-rtl.css # convert /path/to/src/file.css and output to /path/to/dest/file-rtl.css $ rtlit /path/to/src /path/to/dest # convert all files in /path/to/src/ and output to /path/to/dest $ rtlit -x less /path/to/src /path/to/dest # convert only *.less files in /path/to/src/ ## Contributing 1. Fork it 2. Create your feature branch (`git checkout -b my-new-feature`) 3. Commit your changes (`git commit -am 'Added some feature'`) 4. Push to the branch (`git push origin my-new-feature`) 5. Create new Pull Request Feel free to contribute to docs on Omniref [![rtlit API Documentation](https://www.omniref.com/ruby/gems/rtlit.png)](https://www.omniref.com/ruby/gems/rtlit)
{'content_hash': '5462ea118dbd6b3d5e87915d4e04225a', 'timestamp': '', 'source': 'github', 'line_count': 62, 'max_line_length': 227, 'avg_line_length': 29.532258064516128, 'alnum_prop': 0.7105406881485528, 'repo_name': 'zohararad/rtlit', 'id': 'b05e6c17bc818ac98c3d344164fe771c7a96f5e8', 'size': '1840', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '1076'}, {'name': 'Ruby', 'bytes': '10488'}]}
var express = require('express'); var path = require('path'); var favicon = require('serve-favicon'); var logger = require('morgan'); var cookieParser = require('cookie-parser'); var bodyParser = require('body-parser'); var mongoose = require("mongoose"); require("./models/Posts"); require("./models/Comments"); require("./models/Users"); var passport = require("passport"); require("./config/passport"); var indexRoutes = require("./routes/index"); var partialsRoutes = require("./routes/partials"); var postsRoutes = require("./routes/posts"); var commentsRoutes = require("./routes/comments"); var authRoutes = require("./routes/auth"); var usersRoutes = require("./routes/users"); var app = express(); //mongoose.connect("mongodb://localhost/news"); mongoose.connect("mongodb://youbernewsdaddybase:[email protected]:52819/youbernews"); // view engine setup app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); // uncomment after placing your favicon in /public //app.use(favicon(__dirname + '/public/favicon.ico')); app.use(logger('dev')); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(cookieParser()); app.use(express.static(path.join(__dirname, 'public'))); app.use(passport.initialize()); app.use("/", indexRoutes); app.use("/", partialsRoutes); app.use("/", postsRoutes); app.use("/", commentsRoutes); app.use("/", authRoutes); app.use("/", usersRoutes); // catch 404 and forward to error handler app.use(function(req, res, next) { var err = new Error('Not Found'); err.status = 404; next(err); }); // error handlers // development error handler // will print stacktrace if (app.get('env') === 'development') { app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: err }); }); } // production error handler // no stacktraces leaked to user app.use(function(err, req, res, next) { res.status(err.status || 500); res.render('error', { message: err.message, error: {} }); }); module.exports = app;
{'content_hash': '9da58692a7f46e213935799824311d1d', 'timestamp': '', 'source': 'github', 'line_count': 82, 'max_line_length': 91, 'avg_line_length': 25.75609756097561, 'alnum_prop': 0.6747159090909091, 'repo_name': 'AlexanderVanDamme/WebappsV---YoutubeNews', 'id': '8cb494211cc9d2518cffaf83db1eed74dbfb9ae3', 'size': '2112', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '111'}, {'name': 'HTML', 'bytes': '9126'}, {'name': 'JavaScript', 'bytes': '37630'}]}
package com.atguigu.servlet; import java.io.IOException; import javax.servlet.AsyncContext; import javax.servlet.ServletException; import javax.servlet.ServletResponse; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @WebServlet(value="/async",asyncSupported=true) public class HelloAsyncServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { //1¡¢Ö§³ÖÒì²½´¦ÀíasyncSupported=true //2¡¢¿ªÆôÒ첽ģʽ System.out.println("Ö÷Ï߳̿ªÊ¼¡£¡£¡£"+Thread.currentThread()+"==>"+System.currentTimeMillis()); AsyncContext startAsync = req.startAsync(); //3¡¢ÒµÎñÂß¼­½øÐÐÒì²½´¦Àí;¿ªÊ¼Òì²½´¦Àí startAsync.start(new Runnable() { @Override public void run() { try { System.out.println("¸±Ï߳̿ªÊ¼¡£¡£¡£"+Thread.currentThread()+"==>"+System.currentTimeMillis()); sayHello(); startAsync.complete(); //»ñÈ¡µ½Òì²½ÉÏÏÂÎÄ AsyncContext asyncContext = req.getAsyncContext(); //4¡¢»ñÈ¡ÏìÓ¦ ServletResponse response = asyncContext.getResponse(); response.getWriter().write("hello async..."); System.out.println("¸±Ï߳̽áÊø¡£¡£¡£"+Thread.currentThread()+"==>"+System.currentTimeMillis()); } catch (Exception e) { } } }); System.out.println("Ö÷Ï߳̽áÊø¡£¡£¡£"+Thread.currentThread()+"==>"+System.currentTimeMillis()); } public void sayHello() throws Exception{ System.out.println(Thread.currentThread()+" processing..."); Thread.sleep(3000); } }
{'content_hash': '046c799a1f28f5a795d0ce8046b12e06', 'timestamp': '', 'source': 'github', 'line_count': 48, 'max_line_length': 110, 'avg_line_length': 34.166666666666664, 'alnum_prop': 0.7146341463414634, 'repo_name': 'mayonghui2112/helloWorld', 'id': '539008ce6dfd72451469b629bf17304a7475baf9', 'size': '1640', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'sourceCode/testMaven/servlet3.0/src/main/java/com/atguigu/servlet/HelloAsyncServlet.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'AngelScript', 'bytes': '521'}, {'name': 'Batchfile', 'bytes': '39234'}, {'name': 'C', 'bytes': '22329'}, {'name': 'C++', 'bytes': '13466'}, {'name': 'CSS', 'bytes': '61000'}, {'name': 'Go', 'bytes': '6819'}, {'name': 'Groovy', 'bytes': '8821'}, {'name': 'HTML', 'bytes': '9234922'}, {'name': 'Java', 'bytes': '21874329'}, {'name': 'JavaScript', 'bytes': '46483'}, {'name': 'NSIS', 'bytes': '42042'}, {'name': 'Objective-C++', 'bytes': '26102'}, {'name': 'PLpgSQL', 'bytes': '3746'}, {'name': 'Perl', 'bytes': '13860'}, {'name': 'Python', 'bytes': '33132'}, {'name': 'Shell', 'bytes': '51005'}, {'name': 'TSQL', 'bytes': '50756'}, {'name': 'XSLT', 'bytes': '38702'}]}
package checkpoint import ( "fmt" apiv1 "k8s.io/api/core/v1" apiequality "k8s.io/apimachinery/pkg/api/equality" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" clientset "k8s.io/client-go/kubernetes" "k8s.io/kubernetes/pkg/api" utilcodec "k8s.io/kubernetes/pkg/kubelet/kubeletconfig/util/codec" utillog "k8s.io/kubernetes/pkg/kubelet/kubeletconfig/util/log" ) // RemoteConfigSource represents a remote config source object that can be downloaded as a Checkpoint type RemoteConfigSource interface { // UID returns the UID of the remote config source object UID() string // Download downloads the remote config source object returns a Checkpoint backed by the object, // or a sanitized failure reason and error if the download fails Download(client clientset.Interface) (Checkpoint, string, error) // Encode returns a []byte representation of the object behind the RemoteConfigSource Encode() ([]byte, error) // object returns the underlying source object. If you want to compare sources for equality, use EqualRemoteConfigSources, // which compares the underlying source objects for semantic API equality. object() interface{} } // NewRemoteConfigSource constructs a RemoteConfigSource from a v1/NodeConfigSource object, or returns // a sanitized failure reason and an error if the `source` is blatantly invalid. // You should only call this with a non-nil config source. func NewRemoteConfigSource(source *apiv1.NodeConfigSource) (RemoteConfigSource, string, error) { // exactly one subfield of the config source must be non-nil, toady ConfigMapRef is the only reference if source.ConfigMapRef == nil { reason := "invalid NodeConfigSource, exactly one subfield must be non-nil, but all were nil" return nil, reason, fmt.Errorf("%s, NodeConfigSource was: %#v", reason, source) } // validate the NodeConfigSource: // at this point we know we're using the ConfigMapRef subfield ref := source.ConfigMapRef // name, namespace, and UID must all be non-empty for ConfigMapRef if ref.Name == "" || ref.Namespace == "" || string(ref.UID) == "" { reason := "invalid ObjectReference, all of UID, Name, and Namespace must be specified" return nil, reason, fmt.Errorf("%s, ObjectReference was: %#v", reason, ref) } return &remoteConfigMap{source}, "", nil } // DecodeRemoteConfigSource is a helper for using the apimachinery to decode serialized RemoteConfigSources; // e.g. the objects stored in the .cur and .lkg files by checkpoint/store/fsstore.go func DecodeRemoteConfigSource(data []byte) (RemoteConfigSource, error) { // decode the remote config source obj, err := runtime.Decode(api.Codecs.UniversalDecoder(), data) if err != nil { return nil, fmt.Errorf("failed to decode, error: %v", err) } // for now we assume we are trying to load an apiv1.NodeConfigSource, // this may need to be extended if e.g. a new version of the api is born // convert it to the external NodeConfigSource type, so we're consistently working with the external type outside of the on-disk representation cs := &apiv1.NodeConfigSource{} err = api.Scheme.Convert(obj, cs, nil) if err != nil { return nil, fmt.Errorf("failed to convert decoded object into a v1 NodeConfigSource, error: %v", err) } source, _, err := NewRemoteConfigSource(cs) return source, err } // EqualRemoteConfigSources is a helper for comparing remote config sources by // comparing the underlying API objects for semantic equality. func EqualRemoteConfigSources(a, b RemoteConfigSource) bool { if a != nil && b != nil { return apiequality.Semantic.DeepEqual(a.object(), b.object()) } if a == nil && b == nil { return true } return false } // remoteConfigMap implements RemoteConfigSource for v1/ConfigMap config sources type remoteConfigMap struct { source *apiv1.NodeConfigSource } func (r *remoteConfigMap) UID() string { return string(r.source.ConfigMapRef.UID) } func (r *remoteConfigMap) Download(client clientset.Interface) (Checkpoint, string, error) { var reason string uid := string(r.source.ConfigMapRef.UID) utillog.Infof("attempting to download ConfigMap with UID %q", uid) // get the ConfigMap via namespace/name, there doesn't seem to be a way to get it by UID cm, err := client.CoreV1().ConfigMaps(r.source.ConfigMapRef.Namespace).Get(r.source.ConfigMapRef.Name, metav1.GetOptions{}) if err != nil { reason = fmt.Sprintf("could not download ConfigMap with name %q from namespace %q", r.source.ConfigMapRef.Name, r.source.ConfigMapRef.Namespace) return nil, reason, fmt.Errorf("%s, error: %v", reason, err) } // ensure that UID matches the UID on the reference, the ObjectReference must be unambiguous if r.source.ConfigMapRef.UID != cm.UID { reason = fmt.Sprintf("invalid ObjectReference, UID %q does not match UID of downloaded ConfigMap %q", r.source.ConfigMapRef.UID, cm.UID) return nil, reason, fmt.Errorf(reason) } checkpoint, err := NewConfigMapCheckpoint(cm) if err != nil { reason = fmt.Sprintf("invalid downloaded object") return nil, reason, fmt.Errorf("%s, error: %v", reason, err) } utillog.Infof("successfully downloaded ConfigMap with UID %q", uid) return checkpoint, "", nil } func (r *remoteConfigMap) Encode() ([]byte, error) { encoder, err := utilcodec.NewJSONEncoder(apiv1.GroupName) if err != nil { return nil, err } data, err := runtime.Encode(encoder, r.source) if err != nil { return nil, err } return data, nil } func (r *remoteConfigMap) object() interface{} { return r.source }
{'content_hash': 'e1a709b465f9a42c00dee30d790f2031', 'timestamp': '', 'source': 'github', 'line_count': 143, 'max_line_length': 146, 'avg_line_length': 38.60839160839161, 'alnum_prop': 0.7437058503894222, 'repo_name': 'wallrj/kubernetes', 'id': '14374a4cbf3f586d6b19a16a3e3b3cc98ce7c171', 'size': '6090', 'binary': False, 'copies': '10', 'ref': 'refs/heads/master', 'path': 'pkg/kubelet/kubeletconfig/checkpoint/download.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '2525'}, {'name': 'Go', 'bytes': '44589930'}, {'name': 'HTML', 'bytes': '2714044'}, {'name': 'Makefile', 'bytes': '74191'}, {'name': 'Nginx', 'bytes': '595'}, {'name': 'PowerShell', 'bytes': '4261'}, {'name': 'Protocol Buffer', 'bytes': '515103'}, {'name': 'Python', 'bytes': '2309285'}, {'name': 'Ruby', 'bytes': '1591'}, {'name': 'SaltStack', 'bytes': '52331'}, {'name': 'Shell', 'bytes': '1609181'}]}
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> <log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/" debug="false"> <appender name="FILE" class="org.apache.log4j.FileAppender"> <param name="File" value="target/camel-infinispan-test.log" /> <param name="Append" value="true" /> <param name="Threshold" value="INFO" /> <layout class="org.apache.log4j.PatternLayout"> <param name="ConversionPattern" value="%d [%-15.15t] %-5p %-30.30c{1} - %m%n" /> </layout> </appender> <appender name="CONSOLE" class="org.apache.log4j.ConsoleAppender"> <param name="Threshold" value="INFO" /> <layout class="org.apache.log4j.PatternLayout"> <param name="ConversionPattern" value="[%30.30t] %-30.30c{1} %-5p %m%n" /> </layout> </appender> <!-- ================ --> <!-- Limit categories --> <!-- ================ --> <category name="org.jboss.arquillian"> <priority value="INFO" /> </category> <category name="org.infinispan.arquillian.core.WithRunningServerObserver"> <priority value="INFO" /> </category> <category name="org.infinispan.server.test.util.TestsuiteListener"> <priority value="INFO" /> </category> <category name="org.apache.http"> <priority value="INFO" /> </category> <category name="org.infinispan"> <priority value="INFO" /> </category> <category name="org.jgroups"> <priority value="INFO" /> </category> <category name="org.apache.commons.httpclient.auth"> <priority value="INFO"/> </category> <category name="org.apache.auth"> <priority value="INFO"/> </category> <category name="org.apache.directory"> <priority value="WARN"/> </category> <!-- ======================= --> <!-- Setup the Root category --> <!-- ======================= --> <root> <priority value="INFO" /> <appender-ref ref="CONSOLE" /> </root> </log4j:configuration>
{'content_hash': '362e65a731f3ef24e7efc8fd6a7b77c7', 'timestamp': '', 'source': 'github', 'line_count': 70, 'max_line_length': 92, 'avg_line_length': 30.1, 'alnum_prop': 0.5647840531561462, 'repo_name': 'nboukhed/camel', 'id': 'd041f183f6e02f953679ce1d61b8f9ebec557862', 'size': '2107', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'components/camel-infinispan/src/test/resources/log4j.xml', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Apex', 'bytes': '6519'}, {'name': 'Batchfile', 'bytes': '683'}, {'name': 'CSS', 'bytes': '30373'}, {'name': 'Elm', 'bytes': '10852'}, {'name': 'FreeMarker', 'bytes': '11410'}, {'name': 'Groovy', 'bytes': '51766'}, {'name': 'HTML', 'bytes': '183393'}, {'name': 'Java', 'bytes': '63250081'}, {'name': 'JavaScript', 'bytes': '90232'}, {'name': 'Makefile', 'bytes': '513'}, {'name': 'Protocol Buffer', 'bytes': '2126'}, {'name': 'Python', 'bytes': '36'}, {'name': 'Ruby', 'bytes': '4802'}, {'name': 'Scala', 'bytes': '323653'}, {'name': 'Shell', 'bytes': '17092'}, {'name': 'Tcl', 'bytes': '4974'}, {'name': 'XQuery', 'bytes': '546'}, {'name': 'XSLT', 'bytes': '284394'}]}
import _extends from 'babel-runtime/helpers/extends'; import _classCallCheck from 'babel-runtime/helpers/classCallCheck'; import _createClass from 'babel-runtime/helpers/createClass'; import _possibleConstructorReturn from 'babel-runtime/helpers/possibleConstructorReturn'; import _inherits from 'babel-runtime/helpers/inherits'; import React from 'react'; import RcTree, { TreeNode } from 'rc-tree'; import animation from '../_util/openAnimation'; export var AntTreeNode = function (_React$Component) { _inherits(AntTreeNode, _React$Component); function AntTreeNode() { _classCallCheck(this, AntTreeNode); return _possibleConstructorReturn(this, (AntTreeNode.__proto__ || Object.getPrototypeOf(AntTreeNode)).apply(this, arguments)); } _createClass(AntTreeNode, [{ key: 'render', value: function render() { return React.createElement(AntTreeNode, this.props); } }]); return AntTreeNode; }(React.Component); var Tree = function (_React$Component2) { _inherits(Tree, _React$Component2); function Tree() { _classCallCheck(this, Tree); return _possibleConstructorReturn(this, (Tree.__proto__ || Object.getPrototypeOf(Tree)).apply(this, arguments)); } _createClass(Tree, [{ key: 'render', value: function render() { var props = this.props; var prefixCls = props.prefixCls, className = props.className; var checkable = props.checkable; return React.createElement( RcTree, _extends({}, props, { className: className, checkable: checkable ? React.createElement('span', { className: prefixCls + '-checkbox-inner' }) : checkable }), this.props.children ); } }]); return Tree; }(React.Component); export default Tree; Tree.TreeNode = TreeNode; Tree.defaultProps = { prefixCls: 'ant-tree', checkable: false, showIcon: false, openAnimation: animation };
{'content_hash': '8f0e08011f3e1f52385802a0fd2e0d90', 'timestamp': '', 'source': 'github', 'line_count': 64, 'max_line_length': 172, 'avg_line_length': 31.75, 'alnum_prop': 0.6496062992125984, 'repo_name': 'ligangwolai/blog', 'id': '3fd4362687914c11cc2381c8f33d008bf45fa624', 'size': '2032', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'node_modules/[email protected]@antd/es/tree/index.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '832'}, {'name': 'HTML', 'bytes': '282'}, {'name': 'JavaScript', 'bytes': '42612'}]}
var city, forecast, ref, temp, weatherReport; weatherReport = function(location) { return [location, 72, "Mostly Sunny"]; }; ref = weatherReport("Berkeley, CA"), city = ref[0], temp = ref[1], forecast = ref[2];
{'content_hash': '55fb7e9ba79a80c58053021d5b0a5f1c', 'timestamp': '', 'source': 'github', 'line_count': 7, 'max_line_length': 85, 'avg_line_length': 30.714285714285715, 'alnum_prop': 0.6744186046511628, 'repo_name': 'mekuriam/coffeescript', 'id': '82a9eb6ab664c80e56d9d4acba946db83b63951e', 'size': '250', 'binary': False, 'copies': '32', 'ref': 'refs/heads/master', 'path': 'documentation/coffee/multiple_return_values.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CoffeeScript', 'bytes': '469646'}, {'name': 'HTML', 'bytes': '180964'}, {'name': 'JavaScript', 'bytes': '79856'}]}
from distutils.core import setup, Extension module1 = Extension('graph', sources = ['graph.cpp']) setup (name = 'igem2013', version = '1.0', description = 'This is a demo package', ext_modules = [module1])
{'content_hash': '2db9a746413158781a663fc5e0eae4b3', 'timestamp': '', 'source': 'github', 'line_count': 5, 'max_line_length': 108, 'avg_line_length': 41.8, 'alnum_prop': 0.69377990430622, 'repo_name': 'igemsoftware/SYSU-Software2013', 'id': 'e986d8a12b727e8596c468b5c95804606e45d274', 'size': '232', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'project/Python27_32/web/setup.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ASP', 'bytes': '4234'}, {'name': 'C', 'bytes': '2246655'}, {'name': 'C#', 'bytes': '30903'}, {'name': 'C++', 'bytes': '344228'}, {'name': 'CSS', 'bytes': '437211'}, {'name': 'F#', 'bytes': '9222'}, {'name': 'JavaScript', 'bytes': '7288480'}, {'name': 'Python', 'bytes': '55202181'}, {'name': 'Shell', 'bytes': '23510'}, {'name': 'Tcl', 'bytes': '3329368'}, {'name': 'Visual Basic', 'bytes': '4330'}, {'name': 'XSLT', 'bytes': '38160'}]}
<?php defined('C5_EXECUTE') or die("Access Denied.");?> <?php if (!isset($headerMenu)) { ?> <div class="ccm-dashboard-header-buttons"> <?php $manage = new \Concrete\Controller\Element\Dashboard\Express\Menu($entity); $manage->render(); ?> </div> <?php } ?> <div class="row"> <?php View::element('dashboard/express/detail_navigation', array('entity' => $entity))?> <div class="col-md-8"> <?php $attributeView->render(); ?> </div> </div>
{'content_hash': '2ffea557d20e9021bbc1b4f563c836d4', 'timestamp': '', 'source': 'github', 'line_count': 27, 'max_line_length': 92, 'avg_line_length': 18.37037037037037, 'alnum_prop': 0.5766129032258065, 'repo_name': 'drago2308/WaniKani-Classroom', 'id': 'd9f94d25cb6956f70feec7f09655aa5656e17296', 'size': '496', 'binary': False, 'copies': '14', 'ref': 'refs/heads/master', 'path': 'concrete/single_pages/dashboard/system/express/entities/attributes.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '303'}, {'name': 'Batchfile', 'bytes': '41'}, {'name': 'CSS', 'bytes': '496907'}, {'name': 'JavaScript', 'bytes': '825941'}, {'name': 'PHP', 'bytes': '8675208'}, {'name': 'Shell', 'bytes': '355'}]}
import { Operator } from '../Operator'; import { Subscriber } from '../Subscriber'; import { Subscription } from '../Subscription'; import { async } from '../scheduler/async'; import { Observable } from '../Observable'; import { ThrottleConfig, defaultThrottleConfig } from './throttle'; import { MonoTypeOperatorFunction, SchedulerLike, TeardownLogic } from '../types'; /** * Emits a value from the source Observable, then ignores subsequent source * values for `duration` milliseconds, then repeats this process. * * <span class="informal">Lets a value pass, then ignores source values for the * next `duration` milliseconds.</span> * * ![](throttleTime.png) * * `throttleTime` emits the source Observable values on the output Observable * when its internal timer is disabled, and ignores source values when the timer * is enabled. Initially, the timer is disabled. As soon as the first source * value arrives, it is forwarded to the output Observable, and then the timer * is enabled. After `duration` milliseconds (or the time unit determined * internally by the optional `scheduler`) has passed, the timer is disabled, * and this process repeats for the next source value. Optionally takes a * {@link SchedulerLike} for managing timers. * * ## Example * Emit clicks at a rate of at most one click per second * ```javascript * const clicks = fromEvent(document, 'click'); * const result = clicks.pipe(throttleTime(1000)); * result.subscribe(x => console.log(x)); * ``` * * @see {@link auditTime} * @see {@link debounceTime} * @see {@link delay} * @see {@link sampleTime} * @see {@link throttle} * * @param {number} duration Time to wait before emitting another value after * emitting the last value, measured in milliseconds or the time unit determined * internally by the optional `scheduler`. * @param {SchedulerLike} [scheduler=async] The {@link SchedulerLike} to use for * managing the timers that handle the throttling. * @param {Object} config a configuration object to define `leading` and * `trailing` behavior. Defaults to `{ leading: true, trailing: false }`. * @return {Observable<T>} An Observable that performs the throttle operation to * limit the rate of emissions from the source. * @method throttleTime * @owner Observable */ export function throttleTime<T>(duration: number, scheduler: SchedulerLike = async, config: ThrottleConfig = defaultThrottleConfig): MonoTypeOperatorFunction<T> { return (source: Observable<T>) => source.lift(new ThrottleTimeOperator(duration, scheduler, config.leading, config.trailing)); } class ThrottleTimeOperator<T> implements Operator<T, T> { constructor(private duration: number, private scheduler: SchedulerLike, private leading: boolean, private trailing: boolean) { } call(subscriber: Subscriber<T>, source: any): TeardownLogic { return source.subscribe( new ThrottleTimeSubscriber(subscriber, this.duration, this.scheduler, this.leading, this.trailing) ); } } /** * We need this JSDoc comment for affecting ESDoc. * @ignore * @extends {Ignored} */ class ThrottleTimeSubscriber<T> extends Subscriber<T> { private throttled: Subscription; private _hasTrailingValue: boolean = false; private _trailingValue: T = null; constructor(destination: Subscriber<T>, private duration: number, private scheduler: SchedulerLike, private leading: boolean, private trailing: boolean) { super(destination); } protected _next(value: T) { if (this.throttled) { if (this.trailing) { this._trailingValue = value; this._hasTrailingValue = true; } } else { this.add(this.throttled = this.scheduler.schedule<DispatchArg<T>>(dispatchNext, this.duration, { subscriber: this })); if (this.leading) { this.destination.next(value); } } } protected _complete() { if (this._hasTrailingValue) { this.destination.next(this._trailingValue); this.destination.complete(); } else { this.destination.complete(); } } clearThrottle() { const throttled = this.throttled; if (throttled) { if (this.trailing && this._hasTrailingValue) { this.destination.next(this._trailingValue); this._trailingValue = null; this._hasTrailingValue = false; } throttled.unsubscribe(); this.remove(throttled); this.throttled = null; } } } interface DispatchArg<T> { subscriber: ThrottleTimeSubscriber<T>; } function dispatchNext<T>(arg: DispatchArg<T>) { const { subscriber } = arg; subscriber.clearThrottle(); }
{'content_hash': '9cf513fb14e4d35fc51153c166111e25', 'timestamp': '', 'source': 'github', 'line_count': 136, 'max_line_length': 128, 'avg_line_length': 34.911764705882355, 'alnum_prop': 0.6823925863521483, 'repo_name': 'mrtequino/JSW', 'id': '73a03007de559d8ed2605eb8999de5e5e059686d', 'size': '4748', 'binary': False, 'copies': '11', 'ref': 'refs/heads/master', 'path': 'nodejs/BIGDATA/node_modules/rxjs/src/internal/operators/throttleTime.ts', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '243783'}, {'name': 'HTML', 'bytes': '137440'}, {'name': 'Java', 'bytes': '360339'}, {'name': 'JavaScript', 'bytes': '93395'}, {'name': 'TypeScript', 'bytes': '291910'}, {'name': 'Vue', 'bytes': '14811'}]}
package com.marshalchen.common.demoofui.easyandroidanimations; import android.content.Intent; import android.os.Bundle; import android.app.Activity; import com.marshalchen.common.demoofui.R; /** * An activity representing a list of Animations. This activity has different * presentations for phones and tablets. On phones, the activity presents a list * of items, which when touched, lead to a {@link AnimationDetailActivity} * representing item details. On tablets, the activity presents the list of * items and item details side-by-side using two vertical panes. * <p> * The activity makes heavy use of fragments. The list of items is * {@link AnimationListFragment} and the item details (if present) is * {@link AnimationDetailFragment}. * <p> * This activity also implements the required * {@link AnimationListFragment.Callbacks} interface to listen for item * selections. */ public class EasyAnimationListActivity extends Activity implements AnimationListFragment.Callbacks { /** * Whether or not the activity is in two-pane mode, i.e. running on a tablet * device. */ private boolean mTwoPane; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.easy_animation_activity_animation_list); if (findViewById(R.id.animation_detail_container) != null) { // The detail container view will be present only in the // large-screen layouts (res/values-large and // res/values-sw600dp). If this view is present, then the // activity should be in two-pane mode. mTwoPane = true; // In two-pane mode, list items should be given the // 'activated' state when touched. ((AnimationListFragment) getFragmentManager().findFragmentById( R.id.animation_list)).setActivateOnItemClick(true); } // TODO: If exposing deep links into your app, handle intents here. } /** * Callback method from {@link AnimationListFragment.Callbacks} indicating * that the item with the given ID was selected. */ @Override public void onItemSelected(int id) { if (mTwoPane) { // In two-pane mode, show the detail view in this activity by // adding or replacing the detail fragment using a // fragment transaction. Bundle arguments = new Bundle(); arguments.putInt(AnimationDetailFragment.ARG_ITEM_ID, id); AnimationDetailFragment fragment = new AnimationDetailFragment(); fragment.setArguments(arguments); getFragmentManager().beginTransaction() .replace(R.id.animation_detail_container, fragment) .commit(); } else { // In single-pane mode, simply start the detail activity // for the selected item ID. Intent detailIntent = new Intent(this, AnimationDetailActivity.class); detailIntent.putExtra(AnimationDetailFragment.ARG_ITEM_ID, id); startActivity(detailIntent); } } }
{'content_hash': '9a7aaacb197797270081737b93531093', 'timestamp': '', 'source': 'github', 'line_count': 81, 'max_line_length': 80, 'avg_line_length': 35.23456790123457, 'alnum_prop': 0.7438682550805886, 'repo_name': 'sitexa/UltimateAndroid', 'id': '6560b4cc968ee168f12515a16a5a0df0f0703f52', 'size': '2854', 'binary': False, 'copies': '44', 'ref': 'refs/heads/master', 'path': 'UltimateAndroidGradle/demoofui/src/main/java/com/marshalchen/common/demoofui/easyandroidanimations/EasyAnimationListActivity.java', 'mode': '33261', 'license': 'apache-2.0', 'language': [{'name': 'HTML', 'bytes': '8171'}, {'name': 'Java', 'bytes': '14107035'}]}
<?php /** * Customize API: WP_Customize_Nav_Menu_Control class * * @package WordPress * @subpackage Customize * @since 4.4.0 */ /** * Customize Nav Menu Control Class. * * @since 4.3.0 */ class WP_Customize_Nav_Menu_Control extends WP_Customize_Control { /** * Control type. * * @since 4.3.0 * @access public * @var string */ public $type = 'nav_menu'; /** * The nav menu setting. * * @since 4.3.0 * @access public * @var WP_Customize_Nav_Menu_Setting */ public $setting; /** * Don't render the control's content - it uses a JS template instead. * * @since 4.3.0 * @access public */ public function render_content() {} /** * JS/Underscore template for the control UI. * * @since 4.3.0 * @access public */ public function content_template() { ?> <button type="button" class="button add-new-menu-item" aria-label="<?php esc_attr_e( 'Add or remove menu items' ); ?>" aria-expanded="false" aria-controls="available-menu-items"> <?php _e( 'Add Items' ); ?> </button> <button type="button" class="button-link reorder-toggle" aria-label="<?php esc_attr_e( 'Reorder menu items' ); ?>" aria-describedby="reorder-items-desc-{{ data.menu_id }}"> <span class="reorder"><?php _e( 'Reorder' ); ?></span> <span class="reorder-done"><?php _e( 'Done' ); ?></span> </button> <p class="screen-reader-text" id="reorder-items-desc-{{ data.menu_id }}"><?php _e( 'When in reorder mode, additional controls to reorder menu items will be available in the items list above.' ); ?></p> <span class="menu-delete-item"> <button type="button" class="button-link button-link-delete"> <?php _e( 'Delete Menu' ); ?> </button> </span> <?php if ( current_theme_supports( 'menus' ) ) : ?> <ul class="menu-settings"> <li class="customize-control"> <span class="customize-control-title"><?php _e( 'Display Location' ); ?></span> </li> <?php foreach ( get_registered_nav_menus() as $location => $description ) : ?> <li class="customize-control customize-control-checkbox assigned-menu-location"> <label> <input type="checkbox" data-menu-id="{{ data.menu_id }}" data-location-id="<?php echo esc_attr( $location ); ?>" class="menu-location" /> <?php echo $description; ?> <span class="theme-location-set"><?php /* translators: %s: menu name */ printf( _x( '(Current: %s)', 'menu location' ), '<span class="current-menu-location-name-' . esc_attr( $location ) . '"></span>' ); ?></span> </label> </li> <?php endforeach; ?> </ul> <?php endif; } /** * Return parameters for this control. * * @since 4.3.0 * @access public * * @return array Exported parameters. */ public function json() { $exported = parent::json(); $exported['menu_id'] = $this->setting->term_id; return $exported; } }
{'content_hash': '6699673b706c755f1491b2f23cb6a341', 'timestamp': '', 'source': 'github', 'line_count': 102, 'max_line_length': 203, 'avg_line_length': 28.058823529411764, 'alnum_prop': 0.6107617051013278, 'repo_name': '20steps/alexa', 'id': '04fa470f0b4ea7245f5cd27dbb81ca64b2259d5e', 'size': '2862', 'binary': False, 'copies': '97', 'ref': 'refs/heads/master', 'path': 'web/wp-includes/customize/class-wp-customize-nav-menu-control.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '4889348'}, {'name': 'CoffeeScript', 'bytes': '8023'}, {'name': 'Gherkin', 'bytes': '4336'}, {'name': 'HTML', 'bytes': '357026'}, {'name': 'JavaScript', 'bytes': '9076289'}, {'name': 'PHP', 'bytes': '33450039'}, {'name': 'Perl', 'bytes': '365'}, {'name': 'Ruby', 'bytes': '3054'}, {'name': 'Shell', 'bytes': '30158'}, {'name': 'TypeScript', 'bytes': '35051'}, {'name': 'VCL', 'bytes': '22958'}, {'name': 'XSLT', 'bytes': '5437'}]}
<!DOCTYPE html> <!-- Copyright (c) 2014 Intel Corporation. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of works must retain the original copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the original copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this work without specific prior written permission. THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Authors: Cao, Jun <[email protected]> --> <html> <head> <title>WebGL Test: webglrenderingcontext_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_value</title> <link rel="author" title="Intel" href="http://www.intel.com" /> <link rel="help" href="https://www.khronos.org/registry/webgl/specs/1.0/" /> <meta name="flags" content="" /> <meta name="assert" content="Check if WebGLRenderingContext.FRAMEBUFFER_ATTACHMENT_OBJECT_NAME constant value is 0x8CD1"/> <script src="../resources/testharness.js"></script> <script src="../resources/testharnessreport.js"></script> <script src="support/webgl.js"></script> </head> <body> <div id="log"></div> <canvas id="canvas" width="200" height="100" style="border:1px solid #c3c3c3;"> Your browser does not support the canvas element. </canvas> <script> getwebgl(); webgl_constant_value(webgl, 'FRAMEBUFFER_ATTACHMENT_OBJECT_NAME', 0x8CD1); </script> </body> </html>
{'content_hash': '878ccadd64947448b260be55d7852379', 'timestamp': '', 'source': 'github', 'line_count': 54, 'max_line_length': 126, 'avg_line_length': 45.03703703703704, 'alnum_prop': 0.7467105263157895, 'repo_name': 'XiaosongWei/crosswalk-test-suite', 'id': '8dda2c272c0ad9ba0d25e7d58b5dacef6c39ce32', 'size': '2432', 'binary': False, 'copies': '18', 'ref': 'refs/heads/master', 'path': 'webapi/tct-webgl-nonw3c-tests/webgl/webglrenderingcontext_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_value.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Batchfile', 'bytes': '1693'}, {'name': 'C', 'bytes': '28136'}, {'name': 'CSS', 'bytes': '403677'}, {'name': 'CoffeeScript', 'bytes': '18978'}, {'name': 'Cucumber', 'bytes': '76562'}, {'name': 'GLSL', 'bytes': '6990'}, {'name': 'Groff', 'bytes': '12'}, {'name': 'HTML', 'bytes': '41078525'}, {'name': 'Java', 'bytes': '786204'}, {'name': 'JavaScript', 'bytes': '4639929'}, {'name': 'Logos', 'bytes': '12'}, {'name': 'Makefile', 'bytes': '1044'}, {'name': 'PHP', 'bytes': '45668'}, {'name': 'Python', 'bytes': '4057992'}, {'name': 'Shell', 'bytes': '850195'}]}
import ScrollArea from './components/ScrollArea'; export default ScrollArea;
{'content_hash': '0e880996683e6e8d05884582dc26ff9d', 'timestamp': '', 'source': 'github', 'line_count': 3, 'max_line_length': 49, 'avg_line_length': 26.0, 'alnum_prop': 0.8076923076923077, 'repo_name': 'xiCO2k/react-scroll-area', 'id': '89336957f5c12595d7bb08edd489a1d4997cf476', 'size': '78', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/index.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '1478'}, {'name': 'HTML', 'bytes': '1372'}, {'name': 'JavaScript', 'bytes': '49864'}]}
grafos ====== The MIT License (MIT) Copyright (c) 2013 gadld Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{'content_hash': '419ced15e0ac30fb844d924a9012e18b', 'timestamp': '', 'source': 'github', 'line_count': 22, 'max_line_length': 80, 'avg_line_length': 49.36363636363637, 'alnum_prop': 0.7983425414364641, 'repo_name': 'gadld/grafos', 'id': '2639ace635abbd5d78cb69ed8bb2af06012995f9', 'size': '1086', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '5349'}, {'name': 'C++', 'bytes': '2560'}, {'name': 'Objective-C', 'bytes': '325464'}]}
__doc__ = """hashlib module - A common interface to many hash functions. new(name, data=b'') - returns a new hash object implementing the given hash function; initializing the hash using the given binary data. Named constructor functions are also available, these are faster than using new(name): md5(), sha1(), sha224(), sha256(), sha384(), and sha512() More algorithms may be available on your platform but the above are guaranteed to exist. See the algorithms_guaranteed and algorithms_available attributes to find out what algorithm names can be passed to new(). NOTE: If you want the adler32 or crc32 hash functions they are available in the zlib module. Choose your hash function wisely. Some have known collision weaknesses. sha384 and sha512 will be slow on 32 bit platforms. Hash objects have these methods: - update(arg): Update the hash object with the bytes in arg. Repeated calls are equivalent to a single call with the concatenation of all the arguments. - digest(): Return the digest of the bytes passed to the update() method so far. - hexdigest(): Like digest() except the digest is returned as a unicode object of double length, containing only hexadecimal digits. - copy(): Return a copy (clone) of the hash object. This can be used to efficiently compute the digests of strings that share a common initial substring. For example, to obtain the digest of the string 'Nobody inspects the spammish repetition': >>> import hashlib >>> m = hashlib.md5() >>> m.update(b"Nobody inspects") >>> m.update(b" the spammish repetition") >>> m.digest() b'\\xbbd\\x9c\\x83\\xdd\\x1e\\xa5\\xc9\\xd9\\xde\\xc9\\xa1\\x8d\\xf0\\xff\\xe9' More condensed: >>> hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest() 'a4337bc45a8fc544c03f52dc550cd6e1e87021bc896588bd79e901e2' """ # This tuple and __get_builtin_constructor() must be modified if a new # always available algorithm is added. __always_supported = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') algorithms_guaranteed = set(__always_supported) algorithms_available = set(__always_supported) __all__ = __always_supported + ('new', 'algorithms_guaranteed', 'algorithms_available') def __get_builtin_constructor(name): try: if name in ('SHA1', 'sha1'): import _sha1 return _sha1.sha1 elif name in ('MD5', 'md5'): import _md5 return _md5.md5 elif name in ('SHA256', 'sha256', 'SHA224', 'sha224'): import _sha256 bs = name[3:] if bs == '256': return _sha256.sha256 elif bs == '224': return _sha256.sha224 elif name in ('SHA512', 'sha512', 'SHA384', 'sha384'): import _sha512 bs = name[3:] if bs == '512': return _sha512.sha512 elif bs == '384': return _sha512.sha384 except ImportError: pass # no extension module, this hash is unsupported. raise ValueError('unsupported hash type ' + name) def __get_openssl_constructor(name): try: f = getattr(_hashlib, 'openssl_' + name) # Allow the C module to raise ValueError. The function will be # defined but the hash not actually available thanks to OpenSSL. f() # Use the C function directly (very fast) return f except (AttributeError, ValueError): return __get_builtin_constructor(name) def __py_new(name, data=b''): """new(name, data=b'') - Return a new hashing object using the named algorithm; optionally initialized with data (which must be bytes). """ return __get_builtin_constructor(name)(data) def __hash_new(name, data=b''): """new(name, data=b'') - Return a new hashing object using the named algorithm; optionally initialized with data (which must be bytes). """ try: return _hashlib.new(name, data) except ValueError: # If the _hashlib module (OpenSSL) doesn't support the named # hash, try using our builtin implementations. # This allows for SHA224/256 and SHA384/512 support even though # the OpenSSL library prior to 0.9.8 doesn't provide them. return __get_builtin_constructor(name)(data) try: import _hashlib new = __hash_new __get_hash = __get_openssl_constructor algorithms_available = algorithms_available.union( _hashlib.openssl_md_meth_names) except ImportError: new = __py_new __get_hash = __get_builtin_constructor for __func_name in __always_supported: # try them all, some may not work due to the OpenSSL # version not supporting that algorithm. try: globals()[__func_name] = __get_hash(__func_name) except ValueError: import logging logging.exception('code for hash %s was not found.', __func_name) # Cleanup locals() del __always_supported, __func_name, __get_hash del __py_new, __hash_new, __get_openssl_constructor
{'content_hash': '6c0b73d2956d055e33c861014b977a65', 'timestamp': '', 'source': 'github', 'line_count': 144, 'max_line_length': 83, 'avg_line_length': 36.138888888888886, 'alnum_prop': 0.6354727132974635, 'repo_name': 'methoxid/micropystat', 'id': '21454c7d3066f5fecb00c7ff678f323ca00a4a49', 'size': '5321', 'binary': False, 'copies': '31', 'ref': 'refs/heads/master', 'path': 'tests/bytecode/pylib-tests/hashlib.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Assembly', 'bytes': '50694'}, {'name': 'C', 'bytes': '20071807'}, {'name': 'C++', 'bytes': '3052203'}, {'name': 'HTML', 'bytes': '84456'}, {'name': 'Makefile', 'bytes': '47396'}, {'name': 'Objective-C', 'bytes': '9651'}, {'name': 'Python', 'bytes': '825056'}, {'name': 'Shell', 'bytes': '6229'}]}
package bdv.viewer.overlay; import java.awt.Color; import java.awt.Font; import java.awt.Graphics2D; import java.awt.font.FontRenderContext; import java.awt.font.TextLayout; import java.awt.geom.Rectangle2D; import java.text.DecimalFormat; import bdv.ui.UIUtils; import bdv.util.Affine3DHelpers; import bdv.util.Prefs; import bdv.viewer.Source; import bdv.viewer.SourceAndConverter; import bdv.viewer.ViewerState; import mpicbg.spim.data.sequence.VoxelDimensions; import net.imglib2.realtransform.AffineTransform3D; public class ScaleBarOverlayRenderer { private final DecimalFormat format = new DecimalFormat("0.####"); private final AffineTransform3D transform = new AffineTransform3D(); private final AffineTransform3D sourceTransform = new AffineTransform3D(); /** * Try to keep the scale bar as close to this length (in pixels) as possible. */ private final int targetScaleBarLength = 100; /** * For finding the value to display on the scalebar: into how many parts is * each power of ten divided? For example, 4 means the following are * possible values: * <em>..., 0.1, 0.25, 0.5, 0.75, 1, 2.5, 5, 7.5, 10, ...</em> */ private final int subdivPerPowerOfTen = 4; private double scaleBarLength; private double scale; private String unit; private boolean drawScaleBar; public synchronized void paint( final Graphics2D g ) { if ( drawScaleBar ) { final Font font = UIUtils.getFont( "defaultFont" ); final String scaleBarText = format.format( scale ) + " " + unit; final Color color = new Color( Prefs.scaleBarColor(), true ); final Color bgcolor = new Color( Prefs.scaleBarBgColor(), true ); // scalebar position final int x = 20; final int y = ( int ) g.getClipBounds().getHeight() - 30; // label position final FontRenderContext frc = g.getFontRenderContext(); final TextLayout layout = new TextLayout( scaleBarText, font, frc ); final Rectangle2D bounds = layout.getBounds(); final float tx = ( float ) ( 20 + ( scaleBarLength - bounds.getMaxX() ) / 2 ); final float ty = y - 5; // draw background g.setColor( bgcolor ); g.fillRect( x - 7, ( int ) ( ty - bounds.getHeight() - 3 ), ( int ) scaleBarLength + 14, ( int ) bounds.getHeight() + 25 ); // draw scalebar g.setColor( color ); g.fillRect( x, y, ( int ) scaleBarLength, 10 ); // draw label layout.draw( g, tx, ty ); } } private static final String[] lengthUnits = { "nm", "µm", "mm", "m", "km" }; /** * Update data to show in the overlay. */ @Deprecated public synchronized void setViewerState( final bdv.viewer.state.ViewerState state ) { synchronized ( state ) { setViewerState( state.getState() ); } } /** * Update data to show in the overlay. */ public synchronized void setViewerState( final ViewerState state ) { synchronized ( state ) { final SourceAndConverter< ? > current = state.getCurrentSource(); if ( current == null ) { drawScaleBar = false; return; } final Source< ? > spimSource = current.getSpimSource(); final VoxelDimensions voxelDimensions = spimSource.getVoxelDimensions(); if ( voxelDimensions == null ) { drawScaleBar = false; return; } drawScaleBar = true; state.getViewerTransform( transform ); final int t = state.getCurrentTimepoint(); spimSource.getSourceTransform( t, 0, sourceTransform ); transform.concatenate( sourceTransform ); final double sizeOfOnePixel = voxelDimensions.dimension( 0 ) / Affine3DHelpers.extractScale( transform, 0 ); // find good scaleBarLength and corresponding scale value final double sT = targetScaleBarLength * sizeOfOnePixel; final double pot = Math.floor( Math.log10( sT ) ); final double l2 = sT / Math.pow( 10, pot ); final int fracs = ( int ) ( 0.1 * l2 * subdivPerPowerOfTen ); final double scale1 = ( fracs > 0 ) ? Math.pow( 10, pot + 1 ) * fracs / subdivPerPowerOfTen : Math.pow( 10, pot ); final double scale2 = ( fracs == 3 ) ? Math.pow( 10, pot + 1 ) : Math.pow( 10, pot + 1 ) * ( fracs + 1 ) / subdivPerPowerOfTen; final double lB1 = scale1 / sizeOfOnePixel; final double lB2 = scale2 / sizeOfOnePixel; if ( Math.abs( lB1 - targetScaleBarLength ) < Math.abs( lB2 - targetScaleBarLength ) ) { scale = scale1; scaleBarLength = lB1; } else { scale = scale2; scaleBarLength = lB2; } // If unit is a known unit (such as nm) then try to modify scale // and unit such that the displayed string is short. // For example, replace "0.021 µm" by "21 nm". String scaleUnit = voxelDimensions.unit(); if ( "um".equals( scaleUnit ) ) scaleUnit = "µm"; int scaleUnitIndex = -1; for ( int i = 0; i < lengthUnits.length; ++i ) if ( lengthUnits[ i ].equals( scaleUnit ) ) { scaleUnitIndex = i; break; } if ( scaleUnitIndex >= 0 ) { int shifts = ( int ) Math.floor( ( Math.log10( scale ) + 1 ) / 3 ); int shiftedIndex = scaleUnitIndex + shifts; if ( shiftedIndex < 0 ) { shifts = -scaleUnitIndex; shiftedIndex = 0; } else if ( shiftedIndex >= lengthUnits.length ) { shifts = lengthUnits.length - 1 - scaleUnitIndex; shiftedIndex = lengthUnits.length - 1; } scale = scale / Math.pow( 1000, shifts ); unit = lengthUnits[ shiftedIndex ]; } else { unit = scaleUnit; } } } }
{'content_hash': 'f4ff39a7357929bc054c4abc94203dae', 'timestamp': '', 'source': 'github', 'line_count': 187, 'max_line_length': 130, 'avg_line_length': 28.844919786096256, 'alnum_prop': 0.6642565813867259, 'repo_name': 'bigdataviewer/bigdataviewer-core', 'id': 'a89b393d452ebc03f888e0914925510142150335', 'size': '6846', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'src/main/java/bdv/viewer/overlay/ScaleBarOverlayRenderer.java', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'HTML', 'bytes': '6036'}, {'name': 'Java', 'bytes': '1871259'}]}
/*jshint bitwise: false*/ /* This file is part of web3.js. web3.js is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. web3.js is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with web3.js. If not, see <http://www.gnu.org/licenses/>. */ /** * @file bloom.js * @author Bas van Kervel <[email protected]> * @date 2017 */ /** * Ethereum bloom filter support. * * TODO UNDOCUMENTED * * @module bloom * @class [bloom] bloom */ var utils = require("./utils.js"); function codePointToInt(codePoint) { if (codePoint >= 48 && codePoint <= 57) { /*['0'..'9'] -> [0..9]*/ return codePoint-48; } if (codePoint >= 65 && codePoint <= 70) { /*['A'..'F'] -> [10..15]*/ return codePoint-55; } if (codePoint >= 97 && codePoint <= 102) { /*['a'..'f'] -> [10..15]*/ return codePoint-87; } throw "invalid bloom"; } function testBytes(bloom, bytes) { var hash = utils.sha3(bytes).replace('0x',''); for (var i = 0; i < 12; i += 4) { // calculate bit position in bloom filter that must be active var bitpos = ((parseInt(hash.substr(i, 2), 16) << 8) + parseInt(hash.substr((i+2), 2), 16)) & 2047; // test if bitpos in bloom is active var code = codePointToInt(bloom.charCodeAt(bloom.length - 1 - Math.floor(bitpos/4))); var offset = 1 << (bitpos % 4); if ((code&offset) !== offset) { return false; } } return true; } /** * Returns true if address is part of the given bloom. * note: false positives are possible. * * @method testAddress * @param {String} hex encoded bloom * @param {String} address in hex notation * @returns {Boolean} topic is (probably) part of the block */ var testAddress = function(bloom, address) { if (!utils.isBloom(bloom)) { throw 'Invalid bloom given'; } if (!utils.isAddress(address)) { throw 'Invalid address given: "'+ address +'\"'; } return testBytes(bloom, address); }; /** * Returns true if the topic is part of the given bloom. * note: false positives are possible. * * @method hasTopic * @param {String} hex encoded bloom * @param {String} address in hex notation * @returns {Boolean} topic is (probably) part of the block */ var testTopic = function(bloom, topic) { if (!utils.isBloom(bloom)) throw "invalid bloom"; if (!utils.isTopic(topic)) throw "invalid topic"; return testBytes(bloom, topic); }; module.exports = { testAddress: testAddress, testTopic: testTopic };
{'content_hash': '8a05551fa55eb32c36efdd82613d7555', 'timestamp': '', 'source': 'github', 'line_count': 111, 'max_line_length': 107, 'avg_line_length': 26.81081081081081, 'alnum_prop': 0.6354166666666666, 'repo_name': 'hsavit1/gosofi_webpage', 'id': '0aa855ab902d9cd6128868e925f530eb13ad9b8e', 'size': '2976', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'node_modules/web3-utils/src/bloomFilter.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '2768'}, {'name': 'JavaScript', 'bytes': '14179050'}]}
"""Utilities for trainer binary.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import sys # Dependency imports from DLT2T import models # pylint: disable=unused-import from DLT2T.data_generators import all_problems # pylint: disable=unused-import from DLT2T.utils import data_reader from DLT2T.utils import decoding from DLT2T.utils import devices from DLT2T.utils import input_fn_builder from DLT2T.utils import model_builder from DLT2T.utils import registry import tensorflow as tf from tensorflow.contrib.learn.python.learn import learn_runner from tensorflow.python import debug flags = tf.flags FLAGS = flags.FLAGS flags.DEFINE_bool("registry_help", False, "If True, logs the contents of the registry and exits.") flags.DEFINE_bool("tfdbg", False, "If True, use the TF debugger CLI on train/eval.") flags.DEFINE_bool("export_saved_model", False, "Whether to export a SavedModel for serving.") flags.DEFINE_bool("dbgprofile", False, "If True, record the timeline for chrome://tracing/.") flags.DEFINE_string("model", "", "Which model to use.") flags.DEFINE_string("hparams_set", "", "Which parameters to use.") flags.DEFINE_string("hparams_range", "", "Parameters range.") flags.DEFINE_string( "hparams", "", """A comma-separated list of `name=value` hyperparameter values. This flag is used to override hyperparameter settings either when manually selecting hyperparameters or when using Vizier. If a hyperparameter setting is specified by this flag then it must be a valid hyperparameter name for the model.""") flags.DEFINE_string("problems", "", "Dash separated list of problems to " "solve.") flags.DEFINE_string("data_dir", None, "Directory with training data.") flags.DEFINE_string("train_mode", None,"Train mode: pretrain_A2B, pretrian_B2A, or dual.") flags.DEFINE_string("infer_mode", None,"Infer mode: A2B or B2A.") flags.DEFINE_integer("train_steps", 250000, "The number of steps to run training for.") flags.DEFINE_bool("eval_run_autoregressive", False, "Run eval autoregressively where we condition on previous" "generated output instead of the actual target.") flags.DEFINE_bool("eval_use_test_set", False, "Whether to use the '-test' data for EVAL (and PREDICT).") flags.DEFINE_integer("keep_checkpoint_max", 20, "How many recent checkpoints to keep.") flags.DEFINE_bool("experimental_optimize_placement", False, "Optimize ops placement with experimental session options.") flags.DEFINE_integer("keep_checkpoint_every_n_hours", 10000, "Number of hours between each checkpoint to be saved. " "The default value 10,000 hours effectively disables it.") flags.DEFINE_integer("save_checkpoints_secs", 0, "Save checkpoints every this many seconds. " "Default=0 means let tensorflow.contrib.learn.python.learn" " decide, which is currently set to 600 = 10 minutes.") flags.DEFINE_bool("log_device_placement", False, "Whether to log device placement.") # Distributed training flags flags.DEFINE_integer("local_eval_frequency", 2000, "Run evaluation every this steps during local training.") flags.DEFINE_bool("locally_shard_to_cpu", False, "Use CPU as a sharding device running locally. This allows " "to test sharded model construction on a machine with 1 GPU.") flags.DEFINE_bool("daisy_chain_variables", True, "copy variables around in a daisy chain") flags.DEFINE_bool("sync", False, "Sync compute on PS.") flags.DEFINE_string("worker_job", "/job:localhost", "name of worker job") flags.DEFINE_integer("worker_gpu", 1, "How many GPUs to use.") flags.DEFINE_integer("worker_replicas", 1, "How many workers to use.") flags.DEFINE_integer("worker_id", 0, "Which worker task are we.") flags.DEFINE_float("worker_gpu_memory_fraction", 0.95, "Fraction of GPU memory to allocate.") flags.DEFINE_integer("ps_gpu", 0, "How many GPUs to use per ps.") flags.DEFINE_string("gpu_order", "", "Optional order for daisy-chaining gpus." " e.g. \"1 3 2 4\"") flags.DEFINE_string("ps_job", "/job:ps", "name of ps job") flags.DEFINE_integer("ps_replicas", 0, "How many ps replicas.") # Decoding flags flags.DEFINE_string( "decode_hparams", "", "Comma-separated list of name=value pairs to control decode behavior. " "See decoding.decode_hparams for defaults.") def make_experiment_fn(data_dir, model_name, train_steps, eval_steps): """Returns experiment_fn for learn_runner. Wraps create_experiment.""" def experiment_fn(run_config, hparams): return create_experiment( data_dir, model_name=model_name, train_steps=train_steps, eval_steps=eval_steps, hparams=hparams, run_config=run_config) return experiment_fn def create_experiment(data_dir, model_name, train_steps, eval_steps, hparams, run_config): """Create Experiment.""" estimator, input_fns = create_experiment_components( data_dir=data_dir, model_name=model_name, hparams=hparams, run_config=run_config) train_monitors = [] eval_hooks = [] if FLAGS.tfdbg: hook = debug.LocalCLIDebugHook() train_monitors.append(hook) eval_hooks.append(hook) if FLAGS.dbgprofile: # Recorded traces can be visualized with chrome://tracing/ # The memory/tensor lifetime is also profiled train_monitors.append( tf.contrib.hooks.ProfilerHook( save_steps=10, output_dir=run_config.model_dir, show_dataflow=True, show_memory=True,)) optional_kwargs = {} if FLAGS.export_saved_model: assert len(hparams.problem_instances) == 1 problem = hparams.problem_instances[0] optional_kwargs["export_strategies"] = [ make_export_strategy(problem, hparams) ] return tf.contrib.learn.Experiment( estimator=estimator, train_input_fn=input_fns[tf.estimator.ModeKeys.TRAIN], eval_input_fn=input_fns[tf.estimator.ModeKeys.EVAL], train_steps=train_steps, eval_steps=eval_steps, min_eval_frequency=FLAGS.local_eval_frequency, train_monitors=train_monitors, eval_hooks=eval_hooks, **optional_kwargs) def make_export_strategy(problem, hparams): return tf.contrib.learn.make_export_strategy( lambda: data_reader.serving_input_fn(problem, hparams), as_text=True) def create_experiment_components(data_dir, model_name, hparams, run_config): """Constructs and returns Estimator and train/eval input functions.""" tf.logging.info("Creating experiment, storing model files in %s", run_config.model_dir) add_problem_hparams(hparams, FLAGS.problems) # hparams batch_size is used as minibatch size instead of tokens in batch batch_size = (hparams.use_fixed_batch_size and hparams.batch_size) or None num_datashards = devices.data_parallelism().n train_input_fn = input_fn_builder.build_input_fn( mode=tf.estimator.ModeKeys.TRAIN, train_mode=FLAGS.train_mode, infer_mode=FLAGS.infer_mode, hparams=hparams, data_dir=data_dir, num_datashards=num_datashards, worker_replicas=FLAGS.worker_replicas, worker_id=FLAGS.worker_id, batch_size=batch_size) eval_input_fn = input_fn_builder.build_input_fn( mode=tf.estimator.ModeKeys.EVAL, train_mode=FLAGS.train_mode, infer_mode=FLAGS.infer_mode, hparams=hparams, data_dir=data_dir, num_datashards=num_datashards, worker_replicas=FLAGS.worker_replicas, worker_id=FLAGS.worker_id, dataset_split="test" if FLAGS.eval_use_test_set else None) model_fn = model_builder.build_model_fn( model_name, train_mode=FLAGS.train_mode, infer_mode=FLAGS.infer_mode, problem_names=FLAGS.problems.split("-"), train_steps=FLAGS.train_steps, worker_id=FLAGS.worker_id, worker_replicas=FLAGS.worker_replicas, eval_run_autoregressive=FLAGS.eval_run_autoregressive, decode_hparams=decoding.decode_hparams(FLAGS.decode_hparams)) estimator = tf.estimator.Estimator( model_fn=model_fn, model_dir=run_config.model_dir, params=hparams, config=run_config) return estimator, { tf.estimator.ModeKeys.TRAIN: train_input_fn, tf.estimator.ModeKeys.EVAL: eval_input_fn } def log_registry(): if FLAGS.registry_help: tf.logging.info(registry.help_string()) sys.exit(0) def add_problem_hparams(hparams, problems): """Add problem hparams for the problems.""" hparams.problems = [] hparams.problem_instances = [] for problem_name in problems.split("-"): try: problem = registry.problem(problem_name) except LookupError: all_problem_names = sorted(registry.list_problems()) error_lines = ["%s not in the set of supported problems:" % problem_name ] + all_problem_names error_msg = "\n * ".join(error_lines) raise LookupError(error_msg) p_hparams = problem.get_hparams(hparams) hparams.problem_instances.append(problem) hparams.problems.append(p_hparams) def save_metadata(output_dir, hparams): """Saves FLAGS and hparams to output_dir.""" # Save FLAGS in txt file if hasattr(FLAGS, "flags_into_string"): flags_str = FLAGS.flags_into_string() t2t_flags_str = "\n".join([ "--%s=%s" % (f.name, f.value) for f in FLAGS.flags_by_module_dict()[ "DLT2T.utils.trainer_utils"] ]) else: flags_dict = FLAGS.__dict__["__flags"] flags_str = "\n".join( ["--%s=%s" % (name, str(f)) for (name, f) in flags_dict.items()]) t2t_flags_str = None flags_txt = os.path.join(output_dir, "flags.txt") with tf.gfile.Open(flags_txt, "w") as f: f.write(flags_str) if t2t_flags_str: t2t_flags_txt = os.path.join(output_dir, "flags_t2t.txt") with tf.gfile.Open(t2t_flags_txt, "w") as f: f.write(t2t_flags_str) # Save hparams as hparams.json hparams_fname = os.path.join(output_dir, "hparams.json") with tf.gfile.Open(hparams_fname, "w") as f: f.write(hparams.to_json()) def create_hparams(params_id, data_dir, passed_hparams=None): """Returns hyperparameters, including any flag value overrides. If the hparams FLAG is set, then it will use any values specified in hparams to override any individually-set hyperparameter. This logic allows tuners to override hyperparameter settings to find optimal values. Args: params_id: which set of parameters to choose (must be in _PARAMS above). data_dir: the directory containing the training data. passed_hparams: command-line overrides for some hparams. Returns: The hyperparameters as a tf.contrib.training.HParams object. """ hparams = registry.hparams(params_id)() hparams.add_hparam("data_dir", data_dir) # Command line flags override any of the preceding hyperparameter values. if passed_hparams: hparams = hparams.parse(passed_hparams) return hparams def create_run_config(output_dir): """Create a RunConfig object.""" run_config = tf.contrib.learn.RunConfig( model_dir=output_dir, master=FLAGS.master, gpu_memory_fraction=FLAGS.worker_gpu_memory_fraction, session_config=session_config(), keep_checkpoint_max=FLAGS.keep_checkpoint_max, keep_checkpoint_every_n_hours=FLAGS.keep_checkpoint_every_n_hours, save_checkpoints_secs=FLAGS.save_checkpoints_secs) return run_config def run(data_dir, model, output_dir, train_steps, eval_steps, schedule): """Runs an Estimator locally or distributed. Args: data_dir: The directory the data can be found in. model: The name of the model to use. output_dir: The directory to store outputs in. train_steps: The number of steps to run training for. eval_steps: The number of steps to run evaluation for. schedule: (str) The schedule to run. The value here must be the name of one of Experiment's methods. """ exp_fn = make_experiment_fn( data_dir=data_dir, model_name=model, train_steps=train_steps, eval_steps=eval_steps) # Create hparams and run_config run_config = create_run_config(output_dir) hparams = create_hparams( FLAGS.hparams_set, data_dir, passed_hparams=FLAGS.hparams) if is_chief(): save_metadata(output_dir, hparams) learn_runner.run( experiment_fn=exp_fn, schedule=schedule, run_config=run_config, hparams=hparams) def validate_flags(): if not FLAGS.model: raise ValueError("Must specify a model with --model.") if not FLAGS.problems: raise ValueError("Must specify a set of problems with --problems.") if not (FLAGS.hparams_set or FLAGS.hparams_range): raise ValueError("Must specify either --hparams_set or --hparams_range.") if not FLAGS.schedule: raise ValueError("Must specify --schedule.") if not FLAGS.output_dir: FLAGS.output_dir = "/tmp/DLT2T" tf.logging.warning("It is strongly recommended to specify --output_dir. " "Using default output_dir=%s.", FLAGS.output_dir) def is_chief(): schedules = ["train", "train_and_evaluate"] return FLAGS.worker_id == 0 and FLAGS.schedule in schedules def session_config(): """The TensorFlow Session config to use.""" graph_options = tf.GraphOptions(optimizer_options=tf.OptimizerOptions( opt_level=tf.OptimizerOptions.L1, do_function_inlining=False)) if FLAGS.experimental_optimize_placement: rewrite_options = tf.RewriterConfig(optimize_tensor_layout=True) rewrite_options.optimizers.append("pruning") rewrite_options.optimizers.append("constfold") rewrite_options.optimizers.append("layout") graph_options = tf.GraphOptions( rewrite_options=rewrite_options, infer_shapes=True) gpu_options = tf.GPUOptions( per_process_gpu_memory_fraction=FLAGS.worker_gpu_memory_fraction) config = tf.ConfigProto( allow_soft_placement=True, graph_options=graph_options, gpu_options=gpu_options, log_device_placement=FLAGS.log_device_placement) return config
{'content_hash': '361af3c17ed860265cb564c40b07b4fd', 'timestamp': '', 'source': 'github', 'line_count': 389, 'max_line_length': 90, 'avg_line_length': 37.095115681233935, 'alnum_prop': 0.6824670824670824, 'repo_name': 'renqianluo/DLT2T', 'id': '91c6b56593ee4556bbca2e8faa7fa15e008c2b17', 'size': '15028', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'DLT2T/utils/trainer_utils.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'JavaScript', 'bytes': '10699'}, {'name': 'Jupyter Notebook', 'bytes': '14574'}, {'name': 'Python', 'bytes': '1155627'}, {'name': 'Shell', 'bytes': '744'}]}
import os import stat import subprocess import re import netaddr from orderedattrdict import AttrDict from enum import Enum from filelock import Timeout, FileLock from socket import gethostbyname from time import sleep from random import random import lib.logger as logger from lib.ssh import SSH from lib.switch_exception import SwitchException from lib.genesis import get_switch_lock_path FILE_PATH = os.path.dirname(os.path.abspath(__file__)) SWITCH_LOCK_PATH = get_switch_lock_path() class SwitchCommon(object): ENABLE_REMOTE_CONFIG = 'configure terminal ; {} ' IFC_ETH_CFG = 'interface ethernet {} ' IFC_PORT_CH_CFG = 'interface port-channel {} ' NO_IFC_PORT_CH_CFG = 'no interface port-channel {} ' PORT_PREFIX = 'Eth' SEP = ';' SHOW_VLANS = 'show vlan' CREATE_VLAN = 'vlan {}' DELETE_VLAN = 'no vlan {}' SHOW_PORT = 'show interface brief' CLEAR_MAC_ADDRESS_TABLE = 'clear mac address-table dynamic' SHOW_MAC_ADDRESS_TABLE = 'show mac address-table ;' ENABLE_LACP = 'feature lacp' NO_CHANNEL_GROUP = 'no channel-group' CHANNEL_GROUP_MODE = 'channel-group {} mode {} ' SHOW_PORT_CHANNEL = 'show port-channel summary' SWITCHPORT_MODE = 'switchport mode {} ' SWITCHPORT_ACCESS_VLAN = 'switchport access vlan {} ' SWITCHPORT_TRUNK_NATIVE_VLAN = 'switchport trunk native vlan {} ' SWITCHPORT_TRUNK_ALLOWED_VLAN = 'switchport trunk allowed vlan {} {}' SET_MTU = 'mtu {}' NO_MTU = 'no mtu' SHUTDOWN = 'shutdown' NO_SHUTDOWN = 'no shutdown' FORCE = 'force' MGMT_INTERFACE_CONFIG = 'interface ip {}' SET_INTERFACE_IPADDR = ' ;ip address {}' SET_INTERFACE_MASK = ' ;ip netmask {}' SET_VLAN = ' ;vlan {}' SHOW_IP_INTERFACE_BRIEF = 'show ip interface brief' SHOW_INTERFACE = 'show interface vlan{}' SET_INTERFACE = ('feature interface-vlan ;' 'interface vlan {} ;' 'ip address {} {} ;' 'management ;' 'no shutdown') def __init__(self, host=None, userid=None, password=None, mode=None, outfile=None): self.log = logger.getlogger() pass class AllowOp(Enum): ADD = 'add' ALL = 'all' EXCEPT = 'except' NONE = 'none' REMOVE = 'remove' class PortMode(Enum): ACCESS = 'access' FEX_FABRIC = 'fex-fabric' TRUNK = 'trunk' HYBRID = '' TRUNK_NATIVE = '' def send_cmd(self, cmd): if self.mode == 'passive': f = open(self.outfile, 'a+') f.write(cmd + '\n') f.close() return host_ip = gethostbyname(self.host) lockfile = os.path.join(SWITCH_LOCK_PATH, host_ip + '.lock') if not os.path.isfile(lockfile): os.mknod(lockfile) os.chmod(lockfile, stat.S_IRWXO | stat.S_IRWXG | stat.S_IRWXU) lock = FileLock(lockfile) cnt = 0 while cnt < 5 and not lock.is_locked: if cnt > 0: self.log.info('Waiting to acquire lock for switch {}'. format(self.host)) cnt += 1 try: lock.acquire(timeout=5, poll_intervall=0.05) # 5 sec, 50 ms sleep(0.01) # give switch a chance to close out comms except Timeout: pass if lock.is_locked: if self.ENABLE_REMOTE_CONFIG: cmd = self.ENABLE_REMOTE_CONFIG.format(cmd) self.log.debug(cmd) ssh = SSH() __, data, _ = ssh.exec_cmd( self.host, self.userid, self.password, cmd, ssh_log=True, look_for_keys=False) lock.release() # sleep 60 ms to give other processes a chance. sleep(0.06 + random() / 100) # lock acquire polls at 50 ms if lock.is_locked: self.log.error('Lock is locked. Should be unlocked') return data.decode("utf-8") else: self.log.error('Unable to acquire lock for switch {}'.format(self.host)) raise SwitchException('Unable to acquire lock for switch {}'. format(self.host)) def get_enums(self): return self.PortMode, self.AllowOp def show_ports(self, format='raw'): if self.mode == 'passive': return None ports = {} port_info = self.send_cmd(self.SHOW_PORT) if format == 'raw': return port_info elif format == 'std': port_info = port_info.splitlines() for line in port_info: match = re.search( r'Eth([0-9/]+)\s+(\d+)\s+\w+\s+(access|trunk)', line) if match: # mode, avlans = self._get_port_detail(match) ports[match.group(1)] = { 'mode': match.group(3), 'nvlan': match.group(2), 'avlans': ''} port_info = self.send_cmd('show interface trunk').split('Port') for item in port_info: if 'Vlans Allowed on Trunk' in item: item = item.splitlines() for line in item: match = re.search( r'Eth((?:\d+/)+\d+)\s+((?:\d+[,-])*\d+)', line) if match: ports[match.group(1)]['avlans'] = match.group(2) return ports def show_vlans(self): if self.mode == 'passive': return None self.log.debug(self.SHOW_VLANS) vlan_info = self.send_cmd(self.SHOW_VLANS) return vlan_info def show_native_vlan(self, port): if self.mode == 'passive': return None port = str(port) ports = self.show_ports(format='std') return ports[port]['nvlan'] def set_switchport_mode(self, port, mode, vlan=None): port = str(port) cmd = self.IFC_ETH_CFG.format(port) + self.SEP cmd += self.SWITCHPORT_MODE.format(mode.value) if vlan: if mode.value == 'trunk': cmd += self.SEP + self.SWITCHPORT_TRUNK_NATIVE_VLAN.format(vlan) if mode.value == 'access': cmd += self.SEP + self.SWITCHPORT_ACCESS_VLAN.format(vlan) self.send_cmd(cmd) ports = self.show_ports(format='std') if port not in ports: msg = 'Unable to verify setting of switchport mode' msg += 'for port {}. May already be in a channel group.' msg.format(port) self.log.debug(msg) return if self.mode == 'passive' or ports[port]['mode'] == mode.value: self.log.debug( 'Port {} is in {} mode'.format(port, mode.value)) else: raise SwitchException( 'Failed setting port {} to {} mode'.format(port, mode.value)) if vlan: if self.mode == 'passive' or str(vlan) == ports[port]['nvlan']: msg = 'PVID/Native vlan {} set on port {}'.format(vlan, port) self.log.debug(msg) else: msg = 'Failed setting PVID/Native vlan {} on port {}'.format( vlan, port) self.log.error(msg) raise SwitchException(msg) def is_port_in_trunk_mode(self, port): """Allows determination if a port is in 'trunk' mode. """ if self.mode == 'passive': return None port = str(port) ports = self.show_ports(format='std') return self.PortMode.TRUNK.value in ports[port]['mode'] def is_port_in_access_mode(self, port): if self.mode == 'passive': return None port = str(port) ports = self.show_ports('std') return self.PortMode.ACCESS.value in ports[port]['mode'] def allowed_vlans_port(self, port, operation, vlans=''): """ configure vlans on a port channel ARGS: operation (enum of AllowOp): add | all | except | none | remove vlan (str or tuple or list). if type string, can be of the following formats: '4' or '4,5,8' or '5-10' """ if isinstance(vlans, (tuple, list)): vlans = vlans[:] vlans = [str(vlans[i]) for i in range(len(vlans))] vlans = ','.join(vlans) else: vlans = str(vlans) cmd = self.IFC_ETH_CFG.format(port) + self.SEP + \ self.SWITCHPORT_TRUNK_ALLOWED_VLAN.format(operation.value, vlans) self.send_cmd(cmd) res = self.is_vlan_allowed_for_port(vlans, port) if operation.value == 'add': if res is None: return elif not res: msg = 'Not all vlans in {} were added to port {}'. \ format(vlans, port) self.log.error(msg) else: self.log.debug('vlans {} were added to port {}'. format(vlans, port)) if operation.value == 'remove': if res is None: return elif res: msg = 'Not all vlans in {} were removed from port {}'. \ format(vlans, port) self.log.error(msg) else: self.log.debug('vlans {} were removed from port {}'. format(vlans, port)) def is_vlan_allowed_for_port(self, vlans, port): """ Test that all vlans in vlans are allowed for the given port Args: vlans: (int or str) string can be of form 'n', 'n,m,p', 'n-p' port: (int or str) Returns True if all vlans in vlans argument are allowed for port """ if self.mode == 'passive': return None vlans = str(vlans) vlans = vlans.split(',') result = True port = str(port) ports = self.show_ports('std') if port not in ports: msg = 'Unable to verify setting of vlans ' msg += 'for port {}. May already be in a channel group.' msg = msg.format(port) self.log.debug(msg) return avlans = ports[port]['avlans'] avlans = avlans.split(',') for vlan in vlans: res = False for i, _vlans in enumerate(avlans): _vlans = _vlans.strip(' ') if not vlan: res = True break if not _vlans: break elif '-' in vlan and vlan == _vlans: res = True break elif int(vlan) >= int(_vlans.split('-')[0]) and \ int(vlan) <= int(_vlans.split('-')[-1]): res = True break else: pass result = result and res return result def create_vlan(self, vlan): self.send_cmd(self.CREATE_VLAN.format(vlan)) if self.mode == 'passive' or self.is_vlan_created(vlan): self.log.debug('Created VLAN {}'.format(vlan)) else: raise SwitchException('Failed creating VLAN {}'.format(vlan)) def delete_vlan(self, vlan): self.send_cmd(self.DELETE_VLAN.format(vlan)) if self.mode == 'active' and self.is_vlan_created(vlan): self.log.warning( 'Failed deleting VLAN {}'.format(vlan)) raise SwitchException( 'Failed deleting VLAN {}'.format(vlan)) self.log.info('vlan {} deleted.'.format(vlan)) return def is_vlan_created(self, vlan): if self.mode == 'passive': return None if re.search( r'^' + str(vlan), self.send_cmd(self.SHOW_VLANS), re.MULTILINE): return True return False def set_mtu_for_port(self, port, mtu): # Bring port down self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.SHUTDOWN) # Set MTU if mtu == 0: self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.NO_MTU) else: self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.SET_MTU.format(mtu)) # Bring port up self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.NO_SHUTDOWN) def show_mac_address_table(self, format=False): """Get switch mac address table. The returned text string can be raw or optionally fomatted. Args: format (boolean) : set to 'dict' or 'std' to return a dictionary Returns: raw string if format=False dictionary of ports and mac address values in native switch form if format = 'dict'. ordered dictionary of ports and mac address values in a standard format if fmt = 'std'. """ if self.mode == 'passive': mac_info = {} try: with open(self.host, 'r') as f: mac_info = f.read() except IOError as error: self.log.error( 'Passive switch MAC address table file not found (%s)' % error) raise mac_info = self.get_port_to_mac(mac_info) return mac_info mac_info = self.send_cmd(self.SHOW_MAC_ADDRESS_TABLE) if not format or format == 'raw': return mac_info return self.get_port_to_mac(mac_info, format, self.PORT_PREFIX) def clear_mac_address_table(self): """Clear switch mac address table by writing the CLEAR_MAC_ADDRESS_TABLE string to the switch. Args: None. The CLEAR_MAC_ADDRESS_TABLE string can be over-ridden in the specific switch class module. Returns: Nothing """ self.send_cmd(self.CLEAR_MAC_ADDRESS_TABLE) def is_pingable(self): try: if self.mode == 'passive': return None output = subprocess.check_output( ['bash', '-c', 'ping -c2 -i.5 ' + self.host]).decode("utf-8") if '0% packet loss' in output: return True else: return False except subprocess.CalledProcessError as exc: self.log.error('Unable to ping switch. {}'.format(exc)) return False def get_port_to_mac(self, mac_address_table, fmt='std', port_prefix=' '): """Convert MAC address table to dictionary. Args: mac_address_table (string): MAC address table. Lines delimited with line feed. Assumes a header row with "Port" as a column header followed by a delimiter row composed of dashes ('-') which delimit columns. Handles MAC addresses formatted as 'cc:cc:cc:cc:cc:cc' or 'cccc.cccc.cccc' Returns: dictionary: Keys are string port numbers and values are a list of MAC addresses both in native switch format. """ import lib.logger as logger log = logger.getlogger() pos = None mac_dict = AttrDict() _mac_iee802 = r'([\dA-F]{2}[\.:-]){5}([\dA-F]{2})' _mac_cisco = r'([\dA-F]{4}\.){2}[\dA-F]{4}' _mac_all = "%s|%s" % (_mac_iee802, _mac_cisco) _mac_regex = re.compile(_mac_all, re.I) mac_address_table = mac_address_table.splitlines() p2 = re.compile('Port', re.IGNORECASE) for line in mac_address_table: # find row with 'Port' label match = p2.search(line) if match: pos = match.start() # find header seperator row if re.search(r'--+', line): log.debug('Found header seperator row: {}'.format(line)) iter = re.finditer(r'--+', line) for i, match in enumerate(iter): # find column aligned with 'Port' if (pos is not None and pos >= match.span()[0] and pos < match.span()[1]): port_span = (match.span()[0], match.span()[1]) # find rows with MACs match = _mac_regex.search(line) if match: line = self.sanitize_line(line) mac = match.group() log.debug('Found mac address: {}'.format(mac)) _mac = mac if fmt == 'std': _mac = mac[0:2] mac = re.sub(r'\.|\:', '', mac) for i in (2, 4, 6, 8, 10): _mac = _mac + ':' + mac[i:i + 2] # Extract port section of row port = line[port_span[0] - 1:port_span[1]].strip(' ') if fmt == 'std': port = port.replace(port_prefix, '') if port not in mac_dict.keys(): mac_dict[port] = [_mac] else: mac_dict[port].append(_mac) return mac_dict @staticmethod def sanitize_line(line): return line def enable_lacp(self): self.send_cmd(self.ENABLE_LACP) def show_port_channel_interfaces(self): return self.send_cmd(self.SHOW_PORT_CHANNEL) def remove_ports_from_port_channel_ifc(self, ports): # Remove interface from channel-group for port in ports: self.send_cmd( self.IFC_ETH_CFG.format(port) + self.SEP + self.NO_CHANNEL_GROUP) port_chan_summ = self.show_port_channel_interfaces() for port in ports: if re.findall(self.PORT_PREFIX + str(port) + r'[\s+|\(]', port_chan_summ): self.log.error('Port {} not removed from port channel'.format( port)) def remove_port_channel_ifc(self, lag_ifc): # Remove LAG interface cmd = self.NO_IFC_PORT_CH_CFG.format(lag_ifc) self.send_cmd(cmd) def create_port_channel_ifc(self, lag_ifc): # Create a LAG cmd = self.IFC_PORT_CH_CFG.format(lag_ifc) self.send_cmd(cmd) def set_port_channel_mode(self, port_ch, mode, nvlan=None): cmd = self.IFC_PORT_CH_CFG.format(port_ch) + self.SEP +\ self.SWITCHPORT_MODE.format(mode.value) if nvlan: cmd += self.SEP + self.SWITCHPORT_TRUNK_NATIVE_VLAN.format(nvlan) self.send_cmd(cmd) def add_ports_to_port_channel_ifc(self, ports, lag_ifc, mode='active'): # Map a physical port to the LAG in specified mode (active for LACP) for port in ports: cmd = self.IFC_ETH_CFG.format(port) + self.SEP + \ self.CHANNEL_GROUP_MODE.format(lag_ifc, mode) self.send_cmd(cmd) port_chan_summ = self.show_port_channel_interfaces() for port in ports: if not re.findall(self.PORT_PREFIX + str(port) + r'[\s+|\(]', port_chan_summ): self.log.error('Port {} not added to port channel {}'.format( port, lag_ifc)) raise SwitchException('Port {} not added to port channel {}'. format(port, lag_ifc)) def add_vlans_to_port_channel(self, port, vlans): """ DEPRECATED """ ports = self.show_ports('std') port = str(port) if port not in ports: raise SwitchException( 'Port inaccessible (may already be in port channel).' '\nFailed adding vlans {} to port {}'.format(vlans, port)) # Enable trunk mode for port self.send_cmd(self.SET_LAG_PORT_CHANNEL_MODE_TRUNK.format(port)) # Add VLANs to port for vlan in vlans: self.send_cmd( self.LAG_PORT_CHANNEL.format(port) + 'switchport trunk allowed vlan add {}'.format(vlan)) def allowed_vlans_port_channel(self, port, operation, vlans=''): """ configure vlans on a port channel ARGS: operation (str): add | all | except | none | remove vlan (str or tuple or list). if type string, can be of the following formats: '4' or '4,5,8' or '5-10' """ if isinstance(vlans, (tuple, list)): vlans = [str(vlans[i]) for i in range(len(vlans))] vlans = ','.join(vlans) else: vlans = str(vlans) cmd = self.IFC_PORT_CH_CFG.format(port) + self.SEP + \ self.SWITCHPORT_TRUNK_ALLOWED_VLAN.format(operation.value, vlans) self.send_cmd(cmd) def set_mtu_for_port_channel(self, port, mtu): # Set port-channel MTU if mtu == 0: self.send_cmd( self.LAG_PORT_CHANNEL.format(port) + 'no mtu ' + self.FORCE) else: self.send_cmd( self.LAG_PORT_CHANNEL.format(port) + self.SET_MTU.format(mtu) + ' ' + self.FORCE) def remove_interface(self, vlan, host, netmask): """Removes an in-band management interface. Args: host (string): hostname or ipv4 address in dot decimal notation netmask (string): netmask in dot decimal notation vlan (int or string): value between 1 and 4094. raises: SwitchException if unable to remove interface """ vlan = str(vlan) interfaces = self.show_interfaces(vlan, host, netmask, format='std') if interfaces[-1][0]['configured']: self.send_cmd('interface vlan {} ;no ip address {} {}'. format(vlan, host, netmask)) self.send_cmd('no interface vlan {}'.format(vlan)) interfaces = self.show_interfaces(vlan, host, netmask, format='std') if interfaces[-1][0]['configured']: self.log.debug('Failed to remove interface Vlan {}.'.format(vlan)) raise SwitchException('Failed to remove interface Vlan {}.'. format(vlan)) else: if interfaces[-1][0]['found vlan']: self.log.debug('Specified interface on vlan {} does not exist.'. format(vlan)) raise SwitchException('Failed to remove interface Vlan {}.'. format(vlan)) def show_interfaces(self, vlan='', host=None, netmask=None, format=None): """Gets from the switch a list of programmed in-band interfaces. The standard format consists of a list of lists. Each list entry contains the vlan number, the ip address, netmask and the number of the interface. which do not number the in-band interfaces, the last item in each list is set to '-'. When vlan, host and netmask are specified, the last list item contains 'True' or 'False' indicating whether an interface already exists with the specified vlan, host and netmask. For switches which do number the interfaces, (ie Lenovo) the last list item also contains the next available interface number and the number of the found interface. Args: vlan (string): String representation of integer between 1 and 4094. If none specified, usually the default vlan is used. host (string): hostname or ipv4 address in dot decimal notation netmask (string): netmask in dot decimal notation format (string): 'std' If format is not specified, The native (raw) format is returned. If format is set to 'std', a 'standard' format is returned. Returns: If format is unspecified, returns a raw string of data as it comes from the switch. If format == 'std' a standard format is returned. Standard format consists of a list of lists. Each list entry contains the vlan number, the ip address, netmask and the number of the interface. For switches which do not number the in-band interfaces, the last item in each list is set to '-'. When vlan, host and netmask are specified, the last list item contains a dictionary. The dictionary has three entries; 'configured' : set to True or False indicating whether an interface already exists with the specified vlan, host and netmask. 'avail ifc' : For switches which do number the interfaces, (ie Lenovo) this dictioanary entry contains the next available interface number. 'found ifc' : For switches which do number the interfaces, this entry contains the number of the found interface. """ if self.mode == 'passive': return None ifcs = [] ifc_info = '' vlan = str(vlan) found, found_vlan = False, False ifc_info_brief = self.send_cmd(self.SHOW_IP_INTERFACE_BRIEF) vlan_ifcs = re.findall(r'Vlan(\d+)', ifc_info_brief, re.MULTILINE) for ifc in vlan_ifcs: ifc_info = ifc_info + self.send_cmd(self.SHOW_INTERFACE.format(ifc)) if format is None: return ifc_info ifc_info = ifc_info.split('Vlan') for line in ifc_info: match = re.search(r'(\d+).*Internet Address is\s+' r'((\w+.\w+.\w+.\w+)/\d+)', line, re.DOTALL) if match: mask = netaddr.IPNetwork(match.group(2)) mask = str(mask.netmask) ifcs.append( [match.group(1), match.group(3), mask, '-']) if (vlan, host, netmask, '-') == tuple(ifcs[-1]): found = True if vlan in ifcs[-1]: found_vlan = True ifcs.append([{'configured': found, 'found vlan': found_vlan}]) return ifcs def configure_interface(self, host, netmask, vlan=1, intf=None): """Configures a management interface. This implementation checks if the host ip is already in use. If it is, a check is made to see if it is configured as specified. If not, an exception is raised. Lenovo numbers interfaces. The specified vlan will be created if it does not already exist. When implementing this method for a new switch, minimally this method should configure (overwrite if necessary) the specified interface. Args: host (string): hostname or ipv4 address in dot decimal notation netmask (string): netmask in dot decimal notation vlan (string): String representation of integer between 1 and 4094. The management interface is created on the specified vlan intf (string): optional. String representation of integer between 1 and 128. raises: SwitchException if unable to program interface """ vlan = str(vlan) interfaces = self.show_interfaces(vlan, host, netmask, format='std') if interfaces[-1][0]['configured']: self.log.debug( 'Switch interface vlan {} already configured'.format(vlan)) return if interfaces[-1][0]['found vlan']: self.log.debug( 'Conflicting address. Interface vlan {} already configured'. format(vlan)) raise SwitchException( 'Conflicting address exists on interface vlan {}'.format(vlan)) return # create vlan if it does not already exist self.create_vlan(vlan) # create the interface self.send_cmd(self.SET_INTERFACE.format(vlan, host, netmask)) interfaces = self.show_interfaces(vlan, host, netmask, format='std') if not interfaces[-1][0]['configured']: raise SwitchException( 'Failed configuring management interface vlan {}'.format(vlan))
{'content_hash': '6b734f67df99e0fb0950c963aaf84e64', 'timestamp': '', 'source': 'github', 'line_count': 702, 'max_line_length': 84, 'avg_line_length': 40.31054131054131, 'alnum_prop': 0.5364690084104884, 'repo_name': 'open-power-ref-design-toolkit/cluster-genesis', 'id': 'c8cae4b444f1391fa54f66996904614f7bd236aa', 'size': '28897', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'scripts/python/lib/switch_common.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Python', 'bytes': '609082'}, {'name': 'Shell', 'bytes': '27326'}]}
using System; using System.Collections.Generic; using Amazon.DynamoDBv2.DocumentModel; using Amazon.DynamoDBv2.Model; namespace Amazon.DynamoDBv2.DataModel { /// <summary> /// Context interface for using the DataModel mode of DynamoDB. /// Used to interact with the service, save/load objects, etc. /// </summary> public partial interface IDynamoDBContext : IDisposable { #region Save/serialize /// <summary> /// Serializes an object to a Document. /// </summary> /// <typeparam name="T">Type to serialize as.</typeparam> /// <param name="value">Object to serialize.</param> /// <returns>Document with attributes populated from object.</returns> Document ToDocument<T>(T value); /// <summary> /// Serializes an object to a Document. /// </summary> /// <typeparam name="T">Type to serialize as.</typeparam> /// <param name="value">Object to serialize.</param> /// <param name="operationConfig">Config object which can be used to override the table used.</param> /// <returns>Document with attributes populated from object.</returns> Document ToDocument<T>(T value, DynamoDBOperationConfig operationConfig); #endregion #region Load/deserialize /// <summary> /// Deserializes a document to an instance of type T. /// </summary> /// <typeparam name="T">Type to populate.</typeparam> /// <param name="document">Document with properties to use.</param> /// <returns> /// Object of type T, populated with properties from the document. /// </returns> T FromDocument<T>(Document document); /// <summary> /// Deserializes a document to an instance of type T. /// </summary> /// <typeparam name="T">Type to populate.</typeparam> /// <param name="document">Document with properties to use.</param> /// <param name="operationConfig">Config object which can be used to override the table used.</param> /// <returns> /// Object of type T, populated with properties from the document. /// </returns> T FromDocument<T>(Document document, DynamoDBOperationConfig operationConfig); /// <summary> /// Deserializes a collections of documents to a collection of instances of type T. /// </summary> /// <typeparam name="T">Type to populate.</typeparam> /// <param name="documents">Documents to deserialize.</param> /// <returns> /// Collection of items of type T, each populated with properties from a corresponding document. /// </returns> IEnumerable<T> FromDocuments<T>(IEnumerable<Document> documents); /// <summary> /// Deserializes a collections of documents to a collection of instances of type T. /// </summary> /// <typeparam name="T">Type to populate.</typeparam> /// <param name="documents">Documents to deserialize.</param> /// <param name="operationConfig">Config object which can be used to override the table used.</param> /// <returns> /// Collection of items of type T, each populated with properties from a corresponding document. /// </returns> IEnumerable<T> FromDocuments<T>(IEnumerable<Document> documents, DynamoDBOperationConfig operationConfig); #endregion } }
{'content_hash': 'cf905e2344cf06ab71b0bcebf88ff9f0', 'timestamp': '', 'source': 'github', 'line_count': 84, 'max_line_length': 114, 'avg_line_length': 41.19047619047619, 'alnum_prop': 0.6294797687861272, 'repo_name': 'Shogan/Unity3D.CharacterCreator', 'id': 'fb331e63fefade3af20a160de83f661d8202734e', 'size': '4048', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Assets/AWSSDK/src/Services/DynamoDBv2/Custom/DataModel/IDynamoDBContext.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C#', 'bytes': '3723750'}, {'name': 'Objective-C++', 'bytes': '1311'}]}
.. EN-Revision: none .. _zend.file.transfer.validators: Validateurs pour Zend\File\Transfer =================================== ``Zend\File\Transfer`` est fourni avec de multiples validateurs liés qui doivent être utiliser pour accroître la sécurité et prévenir les attaques possibles. Notez que les validateurs ne sont bons que si vous les utilisez. Tous les validateurs qui sont fournis avec ``Zend\File\Transfer`` peuvent être trouvés avec le composant ``Zend_Validator`` et sont nommés ``Zend\Validate\File\*``. Les validateurs suivants sont actuellement disponibles : - *Count*\  : ce validateur vérifie le nombre de fichiers. Il est possible de définir un minimum et un maximum et une erreur sera lancée s'ils sont dépassés. - *Crc32*\  : ce validateur vérifie la valeur de hachage crc32 du contenu d'un fichier. Il est basé sur le validateur *Hash* et en simplifiant son utilisation par le support unique du Crc32. - *ExcludeExtension*\  : ce validateur vérifie l'extension des fichiers. Il lancera une erreur quand un fichier aura une extension non souhaitée. Ainsi vous pouvez empêcher la validation de certaines extensions. - *ExcludeMimeType*\  : ce validateur vérifie le type *MIME* des fichiers. Il est aussi capable de valider un groupe de type *MIME* et générera une erreur quand le type *MIME* d'un fichier donné correspond. - *Exists*\  : ce validateur vérifie l'existence des fichiers. Il lancera une erreur quand un fichier n'existera pas. - *Extension*\  : ce validateur vérifie l'extension des fichiers. Il lancera une erreur quand un fichier n'aura pas l'extension définie. - *FilesSize*\  : ce validateur vérifie la taille complète de tous les fichiers à valider. Il conserve en mémoire la taille de tous les fichiers chargés et lance une erreur quand la somme de tous les fichiers dépasse la taille définie. Il est aussi possible de définir une taille minimum et maximum. - *ImageSize*\  : ce validateur vérifie la taille des images. Il valide la largeur et la hauteur et permet de paramétrer à la fois une valeur minimum et maximum. - *IsCompressed*\  : ce validateur vérifie si le fichier est compressé. Il est basé sur le validateur *MimeType* et valide les archives compressées comme zip ou arc. Vous pouvez aussi limiter à des types d'archives particuliers. - *IsImage*\  : ce validateur vérifie si un fichier est une image. Il est basé sur le validateur *MimeType* et valide les images comme jpg ou gif. Vous pouvez aussi limiter à des types d'images particuliers. - *Hash*\  : ce validateur vérifie la valeur de hachage md5 du contenu d'un fichier. Il supporte de multiples algorithmes. - *Md5*\  : ce validateur vérifie la valeur de hachage md5 du contenu d'un fichier. Il est basé sur le validateur *Hash* et en simplifiant son utilisation par le support unique du Md5. - *MimeType*\  : ce validateur vérifie le type *MIME* des fichiers. Il est aussi capable de valider des groupes de type *MIME* et de générer une erreur quand le type *MIME* d'un fichier donné ne correspond pas. - *NotExists*\  : ce validateur vérifie l'existence des fichiers. Il lancera une erreur quand un fichier existera déjà. - *Sha1*\  : ce validateur vérifie la valeur de hachage sha1 du contenu d'un fichier. Il est basé sur le validateur *Hash* et en simplifiant son utilisation par le support unique du Sha1. - *Size*\  : ce validateur permet de valider la taille d'un fichier en particulier. Il est possible de définir un minimum et un maximum et une erreur sera lancée s'ils sont dépassés. - *Upload*\  : ce validateur est interne, il vérifie si l'upload a produit une erreur. Vous ne devez pas le paramétrer, il est automatiquement activé par *Zend\File\Transfer* lui-même. Vous pouvez donc oublier ce validateur. Il faut juste savoir qu'il existe. - *WordCount*\  : ce validateur est capable de vérifier le nombre de mots à l'intérieur du fichier. Il permet de définir des valeurs minimum et maximum et émettra une erreur si l'un ou l'autre des seuils est dépassé. .. _zend.file.transfer.validators.usage: Utiliser les validateurs avec Zend\File\Transfer ------------------------------------------------ L'utilisation des validateurs est assez simple. Il existe de multiples méthodes pour ajouter et manipuler les validateurs. - ``isValid($files = null)``\  : vérifie le(s) fichier(s) fourni(s) avec tout le jeu de validateurs paramétrés. ``$files`` peut être soit un vrai nom de fichier, soit des noms d'éléments de formulaire ou des noms de fichiers temporaires. - *addValidator($validator, $breakChainOnFailure, $options = null, $files = null)*\  : ajoute le validateur à la pile des validateurs (optionnellement seul le(s) fichier(s) spécifié(s)). ``$validator`` peut être soit une instance d'un validateur réel, ou un nom court spécifiant le validateur (par exemple, "Count"). - ``addValidators(array $validators, $files = null)``\  : ajoute les validateurs à la pile des validateurs. Chaque entrée peut être soit une paire type/options, ou un tableau avec la clé "validator" spécifiant le validateur (tous les autres options seront considérées comme des options du validateur au moment de l'instanciation). - ``setValidators(array $validators, $files = null)``\  : surcharge les validateurs existants avec les validateurs spécifiés. Les validateurs doivent respecter la même syntaxe que ``addValidators()``. - ``hasValidator($name)``\  : indique si un validateur est enregistré. - ``getValidator($name)``\  : retourne un validateur préalablement enregistré. - ``getValidators($files = null)``\  : retourne les validateurs enregistrés ; si ``$files`` est fourni, retourne les validateurs pour ce fichier en particulier ou pour tous les fichiers. - ``removeValidator($name)``\  : enlève le validateur préalablement enregistré. - ``clearValidators()``\  : efface tous les validateurs. .. _zend.file.transfer.validators.usage.example: .. rubric:: Ajouter des validateurs au(x) fichier(s) transféré(s) .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Paramètre un poids de fichier de 20000 octets $upload->addValidator('Size', false, 20000); // Paramètre un poids de fichier de 20 octets minimum // et de 20000 octets maximum $upload->addValidator('Size', false, array('min' => 20, 'max' => 20000)); // Paramètre un poids de fichier de 20 octets minimum et // de 20000 octets maximum et un nombre de fichiers en une seule étape $upload->setValidators(array( 'Size' => array('min' => 20, 'max' => 20000), 'Count' => array('min' => 1, 'max' => 3), )); .. _zend.file.transfer.validators.usage.exampletwo: .. rubric:: Limiter les validateurs à des fichiers uniques ``addValidator()``, ``addValidators()``, et ``setValidators()`` accepte chacun un argument final ``$files``. Cet argument peut être utilisé pour spécifier un fichier en particulier ou un tableau de fichiers sur lequel appliqué le validateur donné. .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Paramètre un poids de fichier de 20000 octets et // limite celui-ci à 'file2' $upload->addValidator('Size', false, 20000, 'file2'); Généralement vous devriez simplement utiliser la méthode ``addValidators()``, qui peut être appelée plusieurs fois. .. _zend.file.transfer.validators.usage.examplemultiple: .. rubric:: Ajouter des validateurs multiples Souvent il est plus simple d'appeler plusieurs fois ``addValidator()``\  : un appel pour chaque validateur. Ceci améliore aussi la lisibilité et rend votre code plus maintenable. Comme toutes les méthodes fournissent un interface fluide, vous pouvez enchaîner les appels comme montré ci-dessous : .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Paramètre un poids de fichier de 20000 octets $upload->addValidator('Size', false, 20000) ->addValidator('Count', false, 2) ->addValidator('Filessize', false, 25000); .. note:: Notez que même si l'ajout du même validateur plusieurs fois est autorisé, faire ceci peut entraîner des problèmes si vous utilisez différentes options pour le même validateur. Et pour finir vous pouvez tout simplement vérifier le(s) fichier(s) en appelant ``isValid()``. .. _zend.file.transfer.validators.usage.exampleisvalid: .. rubric:: Valider les fichiers ``isValid()`` accepte les fichiers uploadés ou télécharger, le nom de fichier temporaire et bien sûr le nom de l'élément de formulaire. Si aucun paramètre ou ``NULL`` est fourni, tous les fichiers seront vérifiés. .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Paramètre un poids de fichier de 20000 octets $upload->addValidator('Size', false, 20000) ->addValidator('Count', false, 2) ->addValidator('Filessize', false, 25000); if (!$upload->isValid()) { print "Echec de validation"; } .. note:: Notez que ``isValid()`` sera automatiquement appelé quand vous recevez les fichiers et qu'il n'a pas été appelé auparavant. Quand une validation a échouée, c'est probablement intéressant d'avoir des informations sur les problèmes rencontrés. A cette fin, vous pouvez utiliser la méthode ``getMessages()`` qui retourne tous les messages de validation sous la forme d'un tableau, ``getErrors()`` qui retourne tous les codes d'erreurs et ``hasErrors()`` qui retourne ``TRUE`` dès qu'une erreur de validation est rencontrée. .. _zend.file.transfer.validators.count: Validateur Count ---------------- Le validateur *Count* vérifie le nombre de fichiers fournis. Il supporte les clés d'options suivantes : - *min*\  : paramètre le nombre minimum de fichiers à transférer. .. note:: Attention : quand vous utilisez cette option vous devez donner le nombre minimum au moment où vous appelez ce validateur la première fois ; sinon vous aurez une erreur en retour. Avec cette option vous pouvez définir le nombre de fichiers que vous souhaitez recevoir. - *max*\  : paramètre le nombre maximum de fichiers à transférer. Avec cette option vous pouvez limiter le nombre de fichiers que vous acceptez mais vous permet aussi de détecter une possible attaque quand plus de fichiers, que votre formulaire n'en définit, sont fournis. Vous pouvez initialiser ce validateur avec une chaîne ou un entier, la valeur sera utilisée en tant que *max*. Mais vous pouvez aussi utiliser les méthodes ``setMin()`` et ``setMax()`` pour paramétrer ces options plus tard et ``getMin()`` et ``getMax()`` pour les récupérer. .. _zend.file.transfer.validators.count.example: .. rubric:: Utiliser le validateur Count .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Limite le nombre de fichiers à 2 maximum $upload->addValidator('Count', false, 2); // Limite le nombre de fichiers à 5 maximum, // tout en obligeant au moins 1 fichier $upload->addValidator('Count', false, array('min' =>1, 'max' => 5)); .. note:: Notez que ce validateur stocke le nombre de fichiers vérifiés en interne. Le fichier qui excédera le maximum sera retourné en tant qu'erreur. .. _zend.file.transfer.validators.crc32: Validateur Crc32 ---------------- Le validateur *Crc32* vérifie le contenu du fichier transféré en le hachant. Ce validateur utilise l'extension de hachage de *PHP* avec l'algorithme crc32. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Les valeurs seront utilisées pour vérifier la valeur de hachage. Vous pouvez paramétrer de multiples hachages en utilisant différentes clés. Chacun sera vérifié et seulement si tous échouent, la validation elle-même échouera. .. _zend.file.transfer.validators.crc32.example: .. rubric:: Utiliser le validateur Crc32 .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Vérifie si le contenu d'un fichier uploadé correspond au hachage fourni $upload->addValidator('Crc32', false, '3b3652f'); // Limite ce validateur à deux différents hachages $upload->addValidator('Crc32', false, array('3b3652f', 'e612b69')); .. _zend.file.transfer.validators.excludeextension: Validateur ExcludeExtension --------------------------- Le validateur *ExcludeExtension* vérifie l'extension des fichiers fournis. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Les valeurs seront utilisées en tant qu'extensions à vérifier que le fichier n'utilise pas. - *case*\  : paramètre une validation qui tient compte de la casse. Par défaut, ce n'est pas sensible à la casse. Notez que cette clé est utilisée pour toutes les extensions. Ce validateur accepte des extensions multiples soit sous la forme d'une chaîne utilisant le caractère virgule (",") comme séparateur ou sous la forme d'un tableau. Vous pouvez aussi utiliser les méthodes ``setExtension()``, ``addExtension()``, et ``getExtension()`` pour paramétrer et récupérer les extensions. Dans certains cas, il est utile vérifier aussi la casse. A cette fin le constructeur autorise un second paramètre ``$case`` qui, s'il est réglé à ``TRUE``, validera l'extension en vérifiant aussi la casse. .. _zend.file.transfer.validators.excludeextension.example: .. rubric:: Utiliser le validateur ExcludeExtension .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Refuser les fichiers avec l'extension php ou exe $upload->addValidator('ExcludeExtension', false, 'php,exe'); // Refuser les fichiers avec l'extension php ou exe en utilisant // la notation de type tableau $upload->addValidator('ExcludeExtension', false, array('php', 'exe')); // Vérifier aussi la casse $upload->addValidator('ExcludeExtension', false, array('php', 'exe', 'case' => true)); .. note:: Notez que ce validateur ne vérifie que l'extension de fichier. Il ne vérifie pas le type *MIME* réel du fichier. .. _zend.file.transfer.validators.excludemimetype: Validateur ExcludeMimeType -------------------------- Le validateur *ExcludeMimeType* vérifie le type *MIME* des fichiers transférés. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Paramètre le type *MIME* à vérifier. Avec cette option vous pouvez définir le(s) type(s) *MIME* que vous souhaitez exclure. - *headerCheck*\  : si spécifié à ``TRUE``, cette option va vérifier l'information *HTTP* concernant le type de fichier quand les extensions **fileInfo** ou **mimeMagic** ne seront pas trouvées. La valeur par défaut de cette option est ``FALSE``. Ce validateur accepte des types *MIME* multiples soit sous la forme d'une chaîne utilisant le caractère virgule (",") comme séparateur ou sous la forme d'un tableau. Vous pouvez aussi utiliser les méthodes ``setMimeType()``, ``addMimeType()``, et ``getMimeType()`` pour paramétrer et récupérer les types *MIME*. .. _zend.file.transfer.validators.excludemimetype.example: .. rubric:: Utiliser le validateur ExcludeMimeType .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Refuser le type MIME d'image gif pour tous les fichiers $upload->addValidator('ExcludeMimeType', false, 'image/gif'); // Refuser le type MIME d'image gif et jpg pour tous les fichiers $upload->addValidator('ExcludeMimeType', false, array('image/gif', 'image/jpeg'); // Refuser les types MIME du groupe image pour tous les fichiers $upload->addValidator('ExcludeMimeType', false, 'image'); L'exemple ci-dessus montre qu'il est aussi possible de limiter le type *MIME* accepté à un groupe de type *MIME*. Pour refuser toutes les images utilisez simplement "image" en tant que type *MIME*. Ceci peut être appliqué à tous les groupes de type *MIME* comme "image", "audio", "video", "text" et plus encore. .. note:: Notez que refuser un groupe de type *MIME* refusera tous les membres de ce groupe même si ce n'est pas votre intention. Par exemple quand vous refusez "image", vous refusez donc "image/jpeg" ou "image/vasa". Quand vous n'êtes pas sûr de vouloir refuser tous les types, vous devriez définir individuellement les types *MIME* plutôt que le groupe complet. .. _zend.file.transfer.validators.exists: Validateur Exists ----------------- Le validateur *Exists* l'existence des fichiers fournis. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Vérifie si le fichier existe dans le dossier fourni. Ce validateur accepte des extensions multiples soit sous la forme d'une chaîne utilisant le caractère virgule (",") comme séparateur ou sous la forme d'un tableau. Vous pouvez aussi utiliser les méthodes ``setDirectory()``, ``addDirectory()``, et ``getDirectory()`` pour paramétrer et récupérer les extensions. .. _zend.file.transfer.validators.exists.example: .. rubric:: Utiliser le validateur Exists .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Ajoute le dossier temporaire à vérifier $upload->addValidator('Exists', false, '\temp'); // Ajoute deux dossiers en utilsant la notation de type tableau $upload->addValidator('Exists', false, array('\home\images', '\home\uploads')); .. note:: Notez que ce validateur vérifie si le fichier existe dans tous les dossiers fournis. La validation échoue si le fichier est manquant dans l'un des dossiers. .. _zend.file.transfer.validators.extension: Validateur Extension -------------------- Le validateur *Extension* vérifie l'=es extensions des fichiers qui ont été fournis. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Paramètre l'extension à vérifier. - *case*\  : paramètre une validation sensible à la casse. Par défaut, la validation n'est pas sensible à la casse. Notez que cette clé est utilisée pour toutes les extensions. Ce validateur accepte des extensions multiples soit sous la forme d'une chaîne utilisant le caractère virgule (",") comme séparateur ou sous la forme d'un tableau. Vous pouvez aussi utiliser les méthodes ``setExtension()``, ``addExtension()``, et ``getExtension()`` pour paramétrer et récupérer les extensions. Dans certains cas, il est utile vérifier aussi la casse. A cette fin le constructeur autorise un second paramètre ``$case`` qui, s'il est réglé à ``TRUE``, validera l'extension en vérifiant aussi la casse. .. _zend.file.transfer.validators.extension.example: .. rubric:: Utiliser le validateur Extension .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Limite les extensions à jpg et png $upload->addValidator('Extension', false, 'jpg,png'); // Limite les extensions à jpg et png en utilisant // la notation de type tableau $upload->addValidator('Extension', false, array('jpg', 'png')); // Vérifie aussi la casse $upload->addValidator('Extension', false, array('mo', 'png', 'case' => true)); if (!$upload->isValid('C:\temp\myfile.MO')) { print 'Non valide à cause de MO au lieu de mo'; } .. note:: Notez que ce validateur ne vérifie que l'extension de fichier. Il ne vérifie pas le type *MIME* réel du fichier. .. _zend.file.transfer.validators.filessize: Validateur FilesSize -------------------- Le validateur *FilesSize* vérifie le poids total de tous les fichiers transférés. Il supporte les options suivantes : - *min*\  : paramètre le poids minimum de tous les fichiers. Avec cette option vous pouvez définir le poids minimum de tous les fichiers que vous souhaitez transférer. - *max*\  : paramètre le poids maximum de tous les fichiers. Avec cette option vous pouvez limiter le poids total des fichiers qui doivent être transférés, mais pas la taille individuelle de chaque fichier. - *bytestring*\  : définit si un échec est retourné avec un taille plus facilement lisible pour l'utilisateur, ou avec une taille de fichier brute. Avec cette option vous pouvez en fait définir si l'utilisateur récupérera "10864" ou "10MB". La valeur par défaut est ``TRUE`` qui retournera "10MB". Vous pouvez initialiser seulement avec une chaîne qui sera utilisée en tant que *max*. Mais vous pouvez aussi utiliser les méthodes ``setMin()`` et ``setMax()`` pour paramétrer ces options plus tard et ``getMin()`` et ``getMax()`` pour les récupérer. La taille elle-même est acceptée en notation SI comme sur la plupart des systèmes d'exploitation. Au lieu de 20000 octets (NdT. : "bytes" en anglais), vous pouvez utiliser **20kB**. Toutes les unités sont converties en utilisant 1024 comme valeur de base. Les unités suivantes sont acceptées : *kB*, *MB*, *GB*, *TB*, *PB* et *EB*. Comme mentionné précédemment vous devez noter que 1kB équivaut à 1024 octets. .. _zend.file.transfer.validators.filessize.example: .. rubric:: Utiliser le validateur FilesSize .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Limite la taille de tous les fichiers à 40000 octets $upload->addValidator('FilesSize', false, 40000); // Limite la taille de tous les fichiers dans une plage de 10kB à 4MB $upload->addValidator('FilesSize', false, array('min' => '10kB', 'max' => '4MB')); // Comme ci-dessus, mais retourne la taille de fichier brute plutôt qu'une chaîne $upload->addValidator('FilesSize', false, array('min' => '10kB', 'max' => '4MB', 'bytestring' => false)); .. note:: Notez que ce validateur stocke le poids des fichiers vérifiés en interne. Le fichier qui excédera le poids maximum sera retourné en tant qu'erreur. .. _zend.file.transfer.validators.imagesize: Validateur ImageSize -------------------- Le validateur *ImageSize* vérifie la taille des images. Il supporte les options suivantes : - *minheight*\  : paramètre la hauteur minimum d'une image. - *maxheight*\  : paramètre la hauteur maximum d'une image. - *minwidth*\  : paramètre la largeur minimum d'une image. - *maxwidth*\  : paramètre la largeur maximum d'une image. Vous pouvez aussi utiliser les méthodes ``setImageMin()`` et ``setImageMax()`` pour régler les valeurs minimum et maximum plus tard et ``getMin()`` et ``getMax()`` pour les récupérer. Par commodité, il existe aussi les méthodes *setImageWidth* et *setImageHeight* qui paramètrent la largeur et la hauteur minimum et maximum. Bien sûr les méthodes associées *getImageWidth* et *getImageHeight* sont aussi disponibles. Pour désactiver la validation d'une dimension, ne paramétrez pas l'option correspondante. .. _zend.file.transfer.validators.imagesize.example: .. rubric:: Utiliser le validateur ImageSize .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Limite la taille de l'image à une hauteur de 100 à 200 et // une largeur de 40 à 80 pixels $upload->addValidator('ImageSize', false, array('minwidth' => 40, 'maxwidth' => 80, 'minheight' => 100, 'maxheight' => 200); // Autre possibilité de réglage $upload->setImageWidth(array('minwidth' => 20, 'maxwidth' => 200)); .. _zend.file.transfer.validators.iscompressed: Validateur IsCompressed ----------------------- Le validateur *IsCompressed* vérifie si un fichier transféré est une archive compressée comme zip ou arc. Ce validateur est basée sur le validateur *MimeType* et supportent les mêmes méthodes et options. Vous pouvez limiter ce validateur à des types de compression particuliers avec les méthodes décrites ci-dessous. .. _zend.file.transfer.validators.iscompressed.example: .. rubric:: Utiliser le validateur IsCompressed .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Vérifie si un fichier uploadé est une archive compressée $upload->addValidator('IsCompressed', false); // Limite ce validateur aux fichiers zip seulement $upload->addValidator('IsCompressed', false, array('application/zip')); // Limite ce validateur aux fichiers zip mais avec la notation simplifiée $upload->addValidator('IsCompressed', false, 'zip'); .. note:: Notez qu'il n'y a pas de vérification si vous paramétrez un type de fichier qui n'est pas un type de compression. Ainsi il est donc possible de définir que les fichiers gif sont acceptés par ce validateur même si ce n'est pas logique. .. _zend.file.transfer.validators.isimage: Validateur IsImage ------------------ Le validateur *IsImage* vérifie si un fichier transféré est une image comme gif ou jpeg. Ce validateur est basée sur le validateur *MimeType* et supportent les mêmes méthodes et options. Vous pouvez limiter ce validateur à des types d'image particuliers avec les méthodes décrites ci-dessous. .. _zend.file.transfer.validators.isimage.example: .. rubric:: Utiliser le validateur IsImage .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Vérifie si un fichier uploadé est une image $upload->addValidator('IsImage', false); // Limite ce validateur aux fichiers gif seulement $upload->addValidator('IsImage', false, array('application/gif')); // Limite ce validateur aux fichiers jpeg mais avec la notation simplifiée $upload->addValidator('IsImage', false, 'jpeg'); .. note:: Notez qu'il n'y a pas de vérification si vous paramétrez un type de fichier qui n'est pas un type d'image. Ainsi il est donc possible de définir que les fichiers zip sont acceptés par ce validateur même si ce n'est pas logique. .. _zend.file.transfer.validators.hash: Validateur Hash --------------- Le validateur *Hash* vérifie le contenu du fichier transféré en le hachant. Ce validateur utilise l'extension de hachage de *PHP*. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Paramètre la valeur de hachage qui doit être vérifié. Vous pouvez paramétrer de multiples hachages en les fournissant sous la forme d'un tableau. Chacun sera vérifié et seulement si tous échouent, la validation elle-même échouera. - *algorithm*\  : paramètre l'algorithme à utiliser pour hacher le contenu. Vous pouvez paramétrer de multiples algorithmes en utilisant la méthode ``addHash()`` plusieurs fois. .. _zend.file.transfer.validators.hash.example: .. rubric:: Utiliser le validateur Hash .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Vérifie si le contenu d'un fichier uploadé correspond au hachage fourni $upload->addValidator('Hash', false, '3b3652f'); // Limite ce validateur à deux différents hachages $upload->addValidator('Hash', false, array('3b3652f', 'e612b69')); // Paramètre un algorithme différent pour effectuer le hachage $upload->addValidator('Hash', false, array('315b3cd8273d44912a7', 'algorithm' => 'md5')); .. note:: Ce validateur supporte environ 34 algorithmes de hachage différents. Les plus connus sont "crc32", "md5" and "sha1". Si vous souhaitez connaître les autres algorithmes, voyez `la méthode hash_algos de PHP`_. .. _zend.file.transfer.validators.md5: Validateur Md5 -------------- Le validateur *Md5* vérifie le contenu du fichier transféré en le hachant. Ce validateur utilise l'extension de hachage de *PHP* avec l'algorithme md5. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Paramètre la valeur de hachage qui doit être vérifié. Vous pouvez paramétrer de multiples hachages en les fournissant sous la forme d'un tableau. Chacun sera vérifié et seulement si tous échouent, la validation elle-même échouera. .. _zend.file.transfer.validators.md5.example: .. rubric:: Utiliser le validateur Md5 .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Vérifie si le contenu d'un fichier uploadé correspond au hachage fourni $upload->addValidator('Md5', false, '3b3652f336522365223'); // Limite ce validateur à deux différents hachages $upload->addValidator('Md5', false, array('3b3652f336522365223', 'eb3365f3365ddc65365')); .. _zend.file.transfer.validators.mimetype: Validateur MimeType ------------------- Le validateur *MimeType* vérifie le type *MIME* des fichiers transférés. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Paramètre le type *MIME* à contrôler. Avec cette option vous pouvez définir le type *MIME* des fichiers qui seront acceptés. - *headerCheck*\  : si spécifié à ``TRUE``, cette option va vérifier l'information *HTTP* concernant le type de fichier quand les extensions **fileInfo** ou **mimeMagic** ne seront pas trouvées. La valeur par défaut de cette option est ``FALSE``. - *magicfile*\  : le magicfile qui sera utilisé. Avec cette option vous pouvez définir le magicfile à utiliser. Quand il n'est pas utilisé ou vide, la constante ``MAGIC`` sera utilisée. Cette option est disponible à partir de la version 1.7.1 de Zend Framework. Ce validateur accepte des types *MIME* multiples soit sous la forme d'une chaîne utilisant le caractère virgule (",") comme séparateur ou sous la forme d'un tableau. Vous pouvez aussi utiliser les méthodes ``setMimeType()``, ``addMimeType()``, et ``getMimeType()`` pour paramétrer et récupérer les types *MIME*. Vous pouvez aussi paramétrer le magicfile qui sera utilisé par fileinfo avec l'option *magicfile*. De plus il existe les méthodes ``setMagicFile()`` et ``getMagicFile()`` qui permettent de paramétrer ou récupérer plus tard le paramètre *magicfile*. Ces méthodes sont disponibles à partir de la version Zend Framework 1.7.1. .. _zend.file.transfer.validators.mimetype.example: .. rubric:: Utiliser le validateur MimeType .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Limite le type MIME de tous les fichiers aux images gif $upload->addValidator('MimeType', false, 'image/gif'); // Limite le type MIME de tous les fichiers aux images jpeg et gif $upload->adValidator('MimeType', false, array('image/gif', 'image/jpeg'); // Limite le type MIME de tous les fichiers au groupe des images $upload->addValidator('MimeType', false, 'image'); // Utilise un magicfile différent $upload->addValidator('MimeType', false, array('image', 'magicfile' => '/path/to/magicfile.mgx')); L'exemple ci-dessus montre qu'il est aussi possible de limiter le type *MIME* accepté à un groupe de type *MIME*. Pour autoriser toutes les images utilisez simplement "image" en tant que type *MIME*. Ceci peut être appliqué à tous les groupes de type *MIME* comme "image", "audio", "video", "text" et plus encore. .. note:: Notez qu'autoriser un groupe de type *MIME* acceptera tous les membres de ce groupe même si votre application ne les supporte pas. Par exemple quand vous autorisez "image", vous autorisez donc "image/xpixmap" ou "image/vasa", ce qui peut être problématique. Quand vous n'êtes pas sûr que votre application supporte tous les types, vous devriez définir individuellement les types *MIME* plutôt que le groupe complet. .. note:: Ce composant utilise l'extension *fileinfo* si elle est disponible. Si ce n'est pas le cas, il utilisera alors la fonction *mime_content_type*. Et si l'appel de fonction échoue, il utilisera le type *MIME* fourni par *HTTP*. Vous devez cependant être averti de possibles problèmes de sécurité si, ni *fileinfo*, ni *mime_content_type* ne sont disponibles : le type *MIME* fourni pas *HTTP* n'étant pas sécurisé et pouvant être facilement manipulé. .. _zend.file.transfer.validators.notexists: Validateur NotExists -------------------- Le validateur *NotExists* l'existence des fichiers fournis. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Vérifie si le fichier n'existe pas dans le dossier fourni. Ce validateur accepte des extensions multiples soit sous la forme d'une chaîne utilisant le caractère virgule (",") comme séparateur ou sous la forme d'un tableau. Vous pouvez aussi utiliser les méthodes ``setDirectory()``, ``addDirectory()``, et ``getDirectory()`` pour paramétrer et récupérer les extensions. .. _zend.file.transfer.validators.notexists.example: .. rubric:: Utiliser le validateur NotExists .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Ajoute le dossier temporaire à vérifier $upload->addValidator('NotExists', false, '\temp'); // Ajoute deux dossiers en utilisant la notation de type tableau $upload->addValidator('NotExists', false, array('\home\images', '\home\uploads')); .. note:: Notez que ce validateur vérifie si le fichier n'existe dans aucun des dossiers fournis. La validation échoue si le fichier existe dans l'un des dossiers. .. _zend.file.transfer.validators.sha1: Validateur Sha1 --------------- Le validateur *Sha1* vérifie le contenu du fichier transféré en le hachant. Ce validateur utilise l'extension de hachage de *PHP* avec l'algorithme sha1. Il supporte les options suivantes : - ***\  : vous pouvez paramétrer n'importe quelle clé ou utiliser un tableau numérique. Paramètre la valeur de hachage qui doit être vérifié. Vous pouvez paramétrer de multiples hachages en les fournissant sous la forme d'un tableau. Chacun sera vérifié et seulement si tous échouent, la validation elle-même échouera. .. _zend.file.transfer.validators.sha1.example: .. rubric:: Utiliser le validateur Sha1 .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Vérifie si le contenu d'un fichier uploadé correspond au hachage fourni $upload->addValidator('Sha1', false, '3b3652f336522365223'); // Limite ce validateur à deux différents hachages $upload->addValidator('Sha1', false, array('3b3652f336522365223', 'eb3365f3365ddc65365')); .. _zend.file.transfer.validators.size: Validateur Size --------------- Le validateur *Size* vérifie le poids d'un fichier unique. Il supporte les options suivantes : - *min*\  : paramètre le poids minimum du fichier. - *max*\  : paramètre le poids maximum du fichier. - *bytestring*\  : définit si un échec est retourné avec un taille plus facilement lisible pour l'utilisateur, ou avec une taille de fichier brute. Avec cette option vous pouvez en fait définir si l'utilisateur récupérera "10864" ou "10MB". La valeur par défaut est ``TRUE`` qui retournera "10MB". Vous pouvez initialiser seulement avec une chaîne qui sera utilisée en tant que *max*. Mais vous pouvez aussi utiliser les méthodes ``setMin()`` et ``setMax()`` pour paramétrer ces options plus tard et ``getMin()`` et ``getMax()`` pour les récupérer. Quand seule une chaîne est fournie, elle est utilisée en tant que *max*. Mais vous pouvez aussi utiliser les méthodes ``setMin()`` et ``setMax()`` pour paramétrer ces options plus tard et ``getMin()`` et ``getMax()`` pour les récupérer. La taille elle-même est acceptée en notation SI comme sur la plupart des systèmes d'exploitation. Au lieu de 20000 octets, vous pouvez utiliser **20kB**. Toutes les unités sont converties en utilisant 1024 comme valeur de base. Les unités suivantes sont acceptées : *kB*, *MB*, *GB*, *TB*, *PB* et *EB*. Comme mentionné précédemment vous devez noter que 1kB équivaut à 1024 octets. .. _zend.file.transfer.validators.size.example: .. rubric:: Utiliser le validateur Size .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Limite la taille d'un fichier à 40000 octets $upload->addValidator('Size', false, 40000); // Limite la taille du fichier 'uploadfile' dans une plage de 10kB à 4MB // Additionally returns the plain number in case of an error instead of a userfriendly one $upload->addValidator('Size', false, array('min' => '10kB', 'max' => '4MB', 'bytestring' => false)); .. _zend.file.transfer.validators.wordcount: Validateur WordCount -------------------- Le validateur *WordCount* vérifie le nombre de mots à l'intérieur des fichiers fournis. Il supporte les options suivantes : - *min*\  : spécifie le nombre de mots minimum qui doivent être trouvés. - *max*\  : spécifie le nombre de mots maximum qui doivent être trouvés. Si vous initialisez ce validateur avec une chaîne ou un entier, la valeur sera utilisée en tant que *max*. Mais vous pouvez aussi utiliser les méthodes ``setMin()`` et ``setMax()`` pour paramétrer ces options plus tard et ``getMin()`` et ``getMax()`` pour les récupérer. .. _zend.file.transfer.validators.wordcount.example: .. rubric:: Utiliser le validateur WordCount .. code-block:: php :linenos: $upload = new Zend\File\Transfer(); // Limite le nombre maximum de mots dans les fichiers à 2000 $upload->addValidator('WordCount', false, 2000); // Limite le nombre de mots dans les fichiers entre un minimum de 1000 // et un maximum de 5000 mots $upload->addValidator('WordCount', false, array('min' => 1000, 'max' => 5000)); .. _`la méthode hash_algos de PHP`: http://php.net/manual/fr/function.hash-algos.php
{'content_hash': '9de7ca648d9efc069b0a778ad87c6b99', 'timestamp': '', 'source': 'github', 'line_count': 915, 'max_line_length': 115, 'avg_line_length': 41.169398907103826, 'alnum_prop': 0.7207061322006902, 'repo_name': 'rettal/zf2-documentation', 'id': 'e0d0f74f4b4c1652e6ae1bc1a9e02d979e628615', 'size': '38359', 'binary': False, 'copies': '10', 'ref': 'refs/heads/master', 'path': 'docs/languages/fr/modules/zend.file.transfer.validators.rst', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Python', 'bytes': '8185'}, {'name': 'Shell', 'bytes': '281'}]}
<?php Yii::import('bootstrap.widgets.input.TbInput'); /** * Bootstrap active form widget. */ class TbActiveForm extends CActiveForm { // Form types. const TYPE_VERTICAL = 'vertical'; const TYPE_INLINE = 'inline'; const TYPE_HORIZONTAL = 'horizontal'; const TYPE_SEARCH = 'search'; // Input classes. const INPUT_HORIZONTAL = 'bootstrap.widgets.input.TbInputHorizontal'; const INPUT_INLINE = 'bootstrap.widgets.input.TbInputInline'; const INPUT_SEARCH = 'bootstrap.widgets.input.TbInputSearch'; const INPUT_VERTICAL = 'bootstrap.widgets.input.TbInputVertical'; /** * @var string the form type. See class constants. */ public $type = self::TYPE_VERTICAL; /** * @var string input class. */ public $input; /** * @var boolean indicates whether to display errors as blocks. */ public $inlineErrors; /** * Initializes the widget. * This renders the form open tag. */ public function init() { if (!isset($this->htmlOptions['class'])) $this->htmlOptions['class'] = 'form-'.$this->type; else $this->htmlOptions['class'] .= ' form-'.$this->type; if (!isset($this->inlineErrors)) $this->inlineErrors = $this->type === self::TYPE_HORIZONTAL; if ($this->inlineErrors) $this->errorMessageCssClass = 'help-inline error'; else $this->errorMessageCssClass = 'help-block error'; parent::init(); } /** * Renders a checkbox input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function checkBoxRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CHECKBOX, $model, $attribute, null, $htmlOptions); } /** * Renders a checkbox list input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function checkBoxListRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CHECKBOXLIST, $model, $attribute, $data, $htmlOptions); } /** * Renders a checkbox list inline input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function checkBoxListInlineRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CHECKBOXLIST_INLINE, $model, $attribute, $data, $htmlOptions); } /** * Renders a drop-down list input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function dropDownListRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_DROPDOWN, $model, $attribute, $data, $htmlOptions); } /** * Renders a file field input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function fileFieldRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_FILE, $model, $attribute, null, $htmlOptions); } /** * Renders a password field input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function passwordFieldRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_PASSWORD, $model, $attribute, null, $htmlOptions); } /** * Renders a radio button input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function radioButtonRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_RADIO, $model, $attribute, null, $htmlOptions); } /** * Renders a radio button list input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function radioButtonListRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_RADIOLIST, $model, $attribute, $data, $htmlOptions); } /** * Renders a radio button list inline input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function radioButtonListInlineRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_RADIOLIST_INLINE, $model, $attribute, $data, $htmlOptions); } /** * Renders a text field input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function textFieldRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_TEXT, $model, $attribute, null, $htmlOptions); } /** * Renders a text area input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function textAreaRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_TEXTAREA, $model, $attribute, null, $htmlOptions); } /** * Renders a captcha row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row * @since 0.9.3 */ public function captchaRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CAPTCHA, $model, $attribute, null, $htmlOptions); } /** * Renders an uneditable text field row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row * @since 0.9.5 */ public function uneditableRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_UNEDITABLE, $model, $attribute, null, $htmlOptions); } /** * Renders a checkbox list for a model attribute. * This method is a wrapper of {@link CHtml::activeCheckBoxList}. * Please check {@link CHtml::activeCheckBoxList} for detailed information * about the parameters for this method. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data value-label pairs used to generate the check box list. * @param array $htmlOptions additional HTML options. * @return string the generated check box list * @since 0.9.5 */ public function checkBoxList($model, $attribute, $data, $htmlOptions = array()) { return $this->inputsList(true, $model, $attribute, $data, $htmlOptions); } /** * Renders a radio button list for a model attribute. * This method is a wrapper of {@link CHtml::activeRadioButtonList}. * Please check {@link CHtml::activeRadioButtonList} for detailed information * about the parameters for this method. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data value-label pairs used to generate the radio button list. * @param array $htmlOptions additional HTML options. * @return string the generated radio button list * @since 0.9.5 */ public function radioButtonList($model, $attribute, $data, $htmlOptions = array()) { return $this->inputsList(false, $model, $attribute, $data, $htmlOptions); } /** * Renders an input list. * @param boolean $checkbox flag that indicates if the list is a checkbox-list. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data value-label pairs used to generate the input list. * @param array $htmlOptions additional HTML options. * @return string the generated input list. * @since 0.9.5 */ protected function inputsList($checkbox, $model, $attribute, $data, $htmlOptions = array()) { CHtml::resolveNameID($model, $attribute, $htmlOptions); $select = CHtml::resolveValue($model, $attribute); if ($model->hasErrors($attribute)) { if (isset($htmlOptions['class'])) $htmlOptions['class'] .= ' '.CHtml::$errorCss; else $htmlOptions['class'] = CHtml::$errorCss; } $name = $htmlOptions['name']; unset($htmlOptions['name']); if (array_key_exists('uncheckValue', $htmlOptions)) { $uncheck = $htmlOptions['uncheckValue']; unset($htmlOptions['uncheckValue']); } else $uncheck = ''; $hiddenOptions = isset($htmlOptions['id']) ? array('id' => CHtml::ID_PREFIX.$htmlOptions['id']) : array('id' => false); $hidden = $uncheck !== null ? CHtml::hiddenField($name, $uncheck, $hiddenOptions) : ''; if (isset($htmlOptions['template'])) $template = $htmlOptions['template']; else $template = '<label class="{labelCssClass}">{input}{label}</label>'; unset($htmlOptions['template'], $htmlOptions['separator'], $htmlOptions['hint']); if ($checkbox && substr($name, -2) !== '[]') $name .= '[]'; unset($htmlOptions['checkAll'], $htmlOptions['checkAllLast']); $labelOptions = isset($htmlOptions['labelOptions']) ? $htmlOptions['labelOptions'] : array(); unset($htmlOptions['labelOptions']); $items = array(); $baseID = CHtml::getIdByName($name); $id = 0; $method = $checkbox ? 'checkBox' : 'radioButton'; $labelCssClass = $checkbox ? 'checkbox' : 'radio'; if (isset($htmlOptions['inline'])) { $labelCssClass .= ' inline'; unset($htmlOptions['inline']); } foreach ($data as $value => $label) { $checked = !is_array($select) && !strcmp($value, $select) || is_array($select) && in_array($value, $select); $htmlOptions['value'] = $value; $htmlOptions['id'] = $baseID.'_'.$id++; $option = CHtml::$method($name, $checked, $htmlOptions); $label = CHtml::label($label, $htmlOptions['id'], $labelOptions); $items[] = strtr($template, array( '{labelCssClass}' => $labelCssClass, '{input}' => $option, '{label}' => $label, )); } return $hidden.implode('', $items); } /** * Displays a summary of validation errors for one or several models. * This method is very similar to {@link CHtml::errorSummary} except that it also works * when AJAX validation is performed. * @param mixed $models the models whose input errors are to be displayed. This can be either * a single model or an array of models. * @param string $header a piece of HTML code that appears in front of the errors * @param string $footer a piece of HTML code that appears at the end of the errors * @param array $htmlOptions additional HTML attributes to be rendered in the container div tag. * @return string the error summary. Empty if no errors are found. * @see CHtml::errorSummary */ public function errorSummary($models, $header = null, $footer = null, $htmlOptions = array()) { if (!isset($htmlOptions['class'])) $htmlOptions['class'] = 'alert alert-block alert-error'; // Bootstrap error class as default return parent::errorSummary($models, $header, $footer, $htmlOptions); } /** * Displays the first validation error for a model attribute. * @param CModel $model the data model * @param string $attribute the attribute name * @param array $htmlOptions additional HTML attributes to be rendered in the container div tag. * @param boolean $enableAjaxValidation whether to enable AJAX validation for the specified attribute. * @param boolean $enableClientValidation whether to enable client-side validation for the specified attribute. * @return string the validation result (error display or success message). */ public function error($model, $attribute, $htmlOptions = array(), $enableAjaxValidation = true, $enableClientValidation = true) { if (!$this->enableAjaxValidation) $enableAjaxValidation = false; if (!$this->enableClientValidation) $enableClientValidation = false; if (!isset($htmlOptions['class'])) $htmlOptions['class'] = $this->errorMessageCssClass; if (!$enableAjaxValidation && !$enableClientValidation) return $this->renderError($model, $attribute, $htmlOptions); $id = CHtml::activeId($model,$attribute); $inputID = isset($htmlOptions['inputID']) ? $htmlOptions['inputID'] : $id; unset($htmlOptions['inputID']); if (!isset($htmlOptions['id'])) $htmlOptions['id'] = $inputID.'_em_'; $option = array( 'id'=>$id, 'inputID'=>$inputID, 'errorID'=>$htmlOptions['id'], 'model'=>get_class($model), 'name'=>CHtml::resolveName($model, $attribute), 'enableAjaxValidation'=>$enableAjaxValidation, 'inputContainer'=>'div.control-group', // Bootstrap requires this ); $optionNames = array( 'validationDelay', 'validateOnChange', 'validateOnType', 'hideErrorMessage', 'inputContainer', 'errorCssClass', 'successCssClass', 'validatingCssClass', 'beforeValidateAttribute', 'afterValidateAttribute', ); foreach ($optionNames as $name) { if (isset($htmlOptions[$name])) { $option[$name] = $htmlOptions[$name]; unset($htmlOptions[$name]); } } if ($model instanceof CActiveRecord && !$model->isNewRecord) $option['status'] = 1; if ($enableClientValidation) { $validators = isset($htmlOptions['clientValidation']) ? array($htmlOptions['clientValidation']) : array(); $attributeName = $attribute; if (($pos = strrpos($attribute, ']')) !== false && $pos !== strlen($attribute) - 1) // e.g. [a]name $attributeName = substr($attribute, $pos + 1); foreach ($model->getValidators($attributeName) as $validator) { if ($validator->enableClientValidation) if (($js = $validator->clientValidateAttribute($model, $attributeName)) != '') $validators[] = $js; } if ($validators !== array()) $option['clientValidation'] = "js:function(value, messages, attribute) {\n".implode("\n", $validators)."\n}"; } $html = $this->renderError($model, $attribute, $htmlOptions); if ($html === '') { if (isset($htmlOptions['style'])) $htmlOptions['style'] = rtrim($htmlOptions['style'], ';').'; display: none'; else $htmlOptions['style'] = 'display: none'; $html = CHtml::tag('span', $htmlOptions, ''); } $this->attributes[$inputID] = $option; return $html; } /** * Displays the first validation error for a model attribute. * @param CModel $model the data model * @param string $attribute the attribute name * @param array $htmlOptions additional HTML attributes to be rendered in the container div tag. * @return string the error display. Empty if no errors are found. * @see CModel::getErrors * @see errorMessageCss */ protected static function renderError($model, $attribute, $htmlOptions = array()) { CHtml::resolveName($model, $attribute); // turn [a][b]attr into attr $error = $model->getError($attribute); return $error != '' ? CHtml::tag('span', $htmlOptions, $error) : ''; } /** * Creates an input row of a specific type. * @param string $type the input type * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the data for list inputs * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function inputRow($type, $model, $attribute, $data = null, $htmlOptions = array()) { ob_start(); $this->getOwner()->widget($this->getInputClassName(), array( 'form'=>$this, 'type'=>$type, 'model'=>$model, 'attribute'=>$attribute, 'data'=>$data, 'htmlOptions'=>$htmlOptions, )); return ob_get_clean(); } /** * Returns the input widget class name suitable for the form. * @return string the class name */ protected function getInputClassName() { if (isset($this->input)) return $this->input; else { switch ($this->type) { case self::TYPE_HORIZONTAL: return self::INPUT_HORIZONTAL; break; case self::TYPE_INLINE: return self::INPUT_INLINE; break; case self::TYPE_SEARCH: return self::INPUT_SEARCH; break; case self::TYPE_VERTICAL: default: return self::INPUT_VERTICAL; break; } } } }
{'content_hash': 'ee8a9e0a631388a2fe01dba9a5f36b68', 'timestamp': '', 'source': 'github', 'line_count': 526, 'max_line_length': 128, 'avg_line_length': 33.243346007604565, 'alnum_prop': 0.6600709138739563, 'repo_name': 'pablo-trejo/proyectosoftware', 'id': 'de5fc9a034e1d6aac99c203d6f4983c283372ea6', 'size': '17749', 'binary': False, 'copies': '45', 'ref': 'refs/heads/master', 'path': 'protected/extensions/bootstrap/widgets/TbActiveForm.php', 'mode': '33261', 'license': 'apache-2.0', 'language': []}
define([ "dojo/_base/declare", "dojo/_base/lang", "dojo/_base/window", "dojo/dom-class", "dojo/dom-construct", "dojo/dom-style", "dojo/dom-attr", "./sniff", "./_ItemBase", "dojo/has!dojo-bidi?dojox/mobile/bidi/ToolBarButton" ], function(declare, lang, win, domClass, domConstruct, domStyle, domAttr, has, ItemBase, BidiToolBarButton){ // module: // dojox/mobile/ToolBarButton var ToolBarButton = declare(has("dojo-bidi") ? "dojox.mobile.NonBidiToolBarButton" : "dojox.mobile.ToolBarButton", ItemBase, { // summary: // A button widget which is placed in the Heading widget. // description: // ToolBarButton is a button which is typically placed in the // Heading widget. It is a subclass of dojox/mobile/_ItemBase just // like ListItem or IconItem. So, unlike Button, it has basically // the same capability as ListItem or IconItem, such as icon // support, transition, etc. // selected: Boolean // If true, the button is in the selected state. selected: false, // arrow: [const] String // Specifies "right" or "left" to be an arrow button. // Note that changing the value of the property after the widget // creation has no effect. arrow: "", // light: [const] Boolean // If true, this widget produces only a single `<span>` node when it // has only an icon or only a label, and has no arrow. In that // case, you cannot have both icon and label, or arrow even if you // try to set them. // Note that changing the value of the property after the widget // creation has no effect. light: true, // defaultColor: String // CSS class for the default color. // Note: If this button has an arrow (typically back buttons on iOS), // the class selector used for it is the value of defaultColor + "45". // For example, by default the arrow selector is "mblColorDefault45". defaultColor: "mblColorDefault", // selColor: String // CSS class for the selected color. // Note: If this button has an arrow (typically back buttons on iOS), // the class selector used for it is the value of selColor + "45". // For example, by default the selected arrow selector is "mblColorDefaultSel45". selColor: "mblColorDefaultSel", /* internal properties */ baseClass: "mblToolBarButton", _selStartMethod: "touch", _selEndMethod: "touch", buildRendering: function(){ if(!this.label && this.srcNodeRef){ this.label = this.srcNodeRef.innerHTML; } this.label = lang.trim(this.label); this.domNode = (this.srcNodeRef && this.srcNodeRef.tagName === "SPAN") ? this.srcNodeRef : domConstruct.create("span"); domAttr.set(this.domNode, "role", "button"); this.inherited(arguments); if(this.light && !this.arrow && (!this.icon || !this.label)){ this.labelNode = this.tableNode = this.bodyNode = this.iconParentNode = this.domNode; domClass.add(this.domNode, this.defaultColor + " mblToolBarButtonBody" + (this.icon ? " mblToolBarButtonLightIcon" : " mblToolBarButtonLightText")); return; } this.domNode.innerHTML = ""; if(this.arrow === "left" || this.arrow === "right"){ this.arrowNode = domConstruct.create("span", { className: "mblToolBarButtonArrow mblToolBarButton" + (this.arrow === "left" ? "Left" : "Right") + "Arrow " + (has("ie") < 10 ? "" : (this.defaultColor + " " + this.defaultColor + "45")) }, this.domNode); domClass.add(this.domNode, "mblToolBarButtonHas" + (this.arrow === "left" ? "Left" : "Right") + "Arrow"); } this.bodyNode = domConstruct.create("span", {className:"mblToolBarButtonBody"}, this.domNode); this.tableNode = domConstruct.create("table", {cellPadding:"0",cellSpacing:"0",border:"0",role:"presentation"}, this.bodyNode); if(!this.label && this.arrow){ // The class mblToolBarButtonText is needed for arrow shape too. // If the button has a label, the class is set by _setLabelAttr. If no label, do it here. this.tableNode.className = "mblToolBarButtonText"; } var row = this.tableNode.insertRow(-1); this.iconParentNode = row.insertCell(-1); this.labelNode = row.insertCell(-1); this.iconParentNode.className = "mblToolBarButtonIcon"; this.labelNode.className = "mblToolBarButtonLabel"; if(this.icon && this.icon !== "none" && this.label){ domClass.add(this.domNode, "mblToolBarButtonHasIcon"); domClass.add(this.bodyNode, "mblToolBarButtonLabeledIcon"); } domClass.add(this.bodyNode, this.defaultColor); }, startup: function(){ if(this._started){ return; } this.connect(this.domNode, "onkeydown", "_onClick"); // for desktop browsers this.inherited(arguments); if(!this._isOnLine){ this._isOnLine = true; // retry applying the attribute for which the custom setter delays the actual // work until _isOnLine is true. this.set("icon", this._pendingIcon !== undefined ? this._pendingIcon : this.icon); // Not needed anymore (this code executes only once per life cycle): delete this._pendingIcon; } }, _onClick: function(e){ // summary: // Internal handler for click events. // tags: // private if(e && e.type === "keydown" && e.keyCode !== 13){ return; } if(this.onClick(e) === false){ return; } // user's click action this.defaultClickAction(e); }, onClick: function(/*Event*/ /*===== e =====*/){ // summary: // User defined function to handle clicks // tags: // callback }, _setLabelAttr: function(/*String*/text){ // summary: // Sets the button label text. this.inherited(arguments); domClass.toggle(this.tableNode, "mblToolBarButtonText", text || this.arrow); // also needed if only arrow }, _setSelectedAttr: function(/*Boolean*/selected){ // summary: // Makes this widget in the selected or unselected state. var replace = function(node, a, b){ domClass.replace(node, a + " " + a + "45", b + " " + b + "45"); }; this.inherited(arguments); if(selected){ domClass.replace(this.bodyNode, this.selColor, this.defaultColor); if(!(has("ie") < 10) && this.arrowNode){ replace(this.arrowNode, this.selColor, this.defaultColor); } }else{ domClass.replace(this.bodyNode, this.defaultColor, this.selColor); if(!(has("ie") < 10) && this.arrowNode){ replace(this.arrowNode, this.defaultColor, this.selColor); } } domClass.toggle(this.domNode, "mblToolBarButtonSelected", selected); domClass.toggle(this.bodyNode, "mblToolBarButtonBodySelected", selected); } }); return has("dojo-bidi") ? declare("dojox.mobile.ToolBarButton", [ToolBarButton, BidiToolBarButton]) : ToolBarButton; });
{'content_hash': 'e57bbe50a22b788f621f0f565f285333', 'timestamp': '', 'source': 'github', 'line_count': 178, 'max_line_length': 130, 'avg_line_length': 37.359550561797754, 'alnum_prop': 0.6688721804511278, 'repo_name': 'victorynox/TestR', 'id': '65cd9886e986948a2e964802794a26dd9a255db1', 'size': '6650', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'public/scripts/dojo/dojox/mobile/ToolBarButton.js', 'mode': '33261', 'license': 'bsd-3-clause', 'language': [{'name': 'ActionScript', 'bytes': '21071'}, {'name': 'ApacheConf', 'bytes': '709'}, {'name': 'Batchfile', 'bytes': '2292'}, {'name': 'CSS', 'bytes': '2648729'}, {'name': 'Groff', 'bytes': '934'}, {'name': 'HTML', 'bytes': '13479298'}, {'name': 'Java', 'bytes': '131059'}, {'name': 'JavaScript', 'bytes': '21070551'}, {'name': 'Makefile', 'bytes': '1824'}, {'name': 'PHP', 'bytes': '639715'}, {'name': 'Perl', 'bytes': '6881'}, {'name': 'Python', 'bytes': '1386'}, {'name': 'Ruby', 'bytes': '911'}, {'name': 'Shell', 'bytes': '21731'}, {'name': 'XQuery', 'bytes': '799'}, {'name': 'XSLT', 'bytes': '104207'}]}
require 'rubygems' require 'helper' class TestRealTime < Test::Unit::TestCase include MovableInk def test_unathorized_access MovableInkClient.token = nil assert_raise ArgumentError do response = MovableInkClient.live_pics end end def test_live_pics MovableInkClient.token = ::API_KEY live_pics = MovableInkClient.live_pics assert !live_pics.nil? assert live_pics.kind_of?(Array) assert live_pics.first.kind_of?(LivePic) assert !live_pics.first.url.nil? end def test_live_pic MovableInkClient.token = ::API_KEY response = MovableInkClient.live_pic ::LIVE_PIC_ID assert !response.nil? assert response.kind_of?(LivePic) assert !response.url.nil? end def test_live_pic_stats MovableInkClient.token = ::API_KEY response = MovableInkClient.live_pic_stats ::LIVE_PIC_ID assert !response.nil? assert response.kind_of?(LivePicStats) assert !response.impressions.nil? end def test_live_pic_wrapper lp = LivePic.new lp.name = "My Name" lp.height = "10" params = lp.to_params assert params["live_pic[name]"], "My Name" assert params["live_pic[height]"], "10" end def test_create_live_pic MovableInkClient.token = ::API_KEY lp = LivePic.new lp.name = "My Name" lp.height = "10" lp.width = "10" lp.external_url = "http://google.com/" lp.x_offset = "20" lp.y_offset = "20" lp.target_url = 'http://google.com/' new_live_pic = MovableInkClient.create_live_pic lp assert new_live_pic.kind_of?(LivePic) assert new_live_pic.name, "My Name" assert new_live_pic.height, "10" end def test_update_live_pic MovableInkClient.token = ::API_KEY lp = MovableInkClient.live_pics.first updated_pic_id = lp.id name = "Updated Name - #{Time.now.to_i}" lp.name = name MovableInkClient.update_live_pic lp updated_pic = MovableInkClient.live_pic updated_pic_id assert updated_pic.name, name end def test_delete_live_pic MovableInkClient.token = ::API_KEY lp = MovableInkClient.live_pics.first deleted = MovableInkClient.delete_live_pic lp assert deleted end end
{'content_hash': '0621e3102dd96f2c42c9e8c8abdeedf4', 'timestamp': '', 'source': 'github', 'line_count': 84, 'max_line_length': 60, 'avg_line_length': 26.047619047619047, 'alnum_prop': 0.6704753199268738, 'repo_name': 'briandoll/movableink-api', 'id': '082e9ae05e96e26959dc3debde65bb52040b9e9b', 'size': '2188', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'test/test_real_time.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Ruby', 'bytes': '15355'}]}
TEST_F(ChromeManifestTest, DefaultPathForExtent) { scoped_refptr<extensions::Extension> extension( LoadAndExpectSuccess("default_path_for_extent.json")); ASSERT_EQ(1u, extension->web_extent().patterns().size()); EXPECT_EQ("/*", extension->web_extent().patterns().begin()->path()); EXPECT_TRUE(extension->web_extent().MatchesURL( GURL("http://www.google.com/monkey"))); }
{'content_hash': '3fbcd28797f9a9d7e896becc8e7b47fd', 'timestamp': '', 'source': 'github', 'line_count': 9, 'max_line_length': 70, 'avg_line_length': 43.55555555555556, 'alnum_prop': 0.6989795918367347, 'repo_name': 'nwjs/chromium.src', 'id': '28ce760e3229701c3ab3a84c1395ad94b75d58ad', 'size': '699', 'binary': False, 'copies': '6', 'ref': 'refs/heads/nw70', 'path': 'chrome/common/extensions/manifest_tests/extension_manifests_default_extent_path_unittest.cc', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []}
namespace blink { bool packImageData(Image* image, WebGLImageConversion::ImageHtmlDomSource domSource, unsigned width, unsigned height, Vector<uint8_t>& data) { WebGLImageConversion::ImageExtractor imageExtractor(image, domSource, false, false); if (!imageExtractor.imagePixelData()) return false; WebGLImageConversion::DataFormat sourceDataFormat = imageExtractor.imageSourceFormat(); WebGLImageConversion::AlphaOp alphaOp = imageExtractor.imageAlphaOp(); const void* imagePixelData = imageExtractor.imagePixelData(); unsigned imageSourceUnpackAlignment = imageExtractor.imageSourceUnpackAlignment(); return WebGLImageConversion::packImageData(image, imagePixelData, GL_RGBA, GL_UNSIGNED_BYTE, false, alphaOp, sourceDataFormat, width, height, imageSourceUnpackAlignment, data); } bool WebCLHTMLUtil::extractDataFromCanvas(HTMLCanvasElement* canvas, Vector<uint8_t>& data, size_t& canvasSize, ExceptionState& es) { // Currently the data is read back from gpu to cpu, and uploaded from cpu to gpu // when OpenCL kernel funtion is assigned to run on GPU device. // TODO(junmin-zhu): should directly copy or share gpu memory in that case. if (!canvas) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } if (!packImageData(canvas->copiedImage(BackBuffer, PreferAcceleration).get(), WebGLImageConversion::HtmlDomCanvas, canvas->width(), canvas->height(), data)) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } canvasSize = data.size(); if (!data.data() || !canvasSize) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } return true; } bool WebCLHTMLUtil::extractDataFromImage(HTMLImageElement* image, Vector<uint8_t>& data, size_t& imageSize, ExceptionState& es) { if (!image || !image->cachedImage()) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } if (!packImageData(image->cachedImage()->image(), WebGLImageConversion::HtmlDomImage, image->width(), image->height(), data)) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } imageSize = data.size(); if (!data.data() || !imageSize) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } return true; } bool WebCLHTMLUtil::extractDataFromImageData(ImageData* srcPixels, void*& hostPtr, size_t& pixelSize, ExceptionState& es) { if (!srcPixels && !srcPixels->data() && !srcPixels->data()->data()) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } pixelSize = srcPixels->data()->length(); hostPtr = static_cast<void*>(srcPixels->data()->data()); if (!hostPtr || !pixelSize) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } return true; } bool WebCLHTMLUtil::extractDataFromVideo(HTMLVideoElement* video, Vector<uint8_t>& data, size_t& videoSize, ExceptionState& es) { // Currently the data is read back from gpu to cpu, and uploaded from cpu to gpu // when OpenCL kernel funtion is assigned to run on GPU device. // TODO(junmin-zhu): should directly copy or share gpu memory in that case. if (!video) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } RefPtr<Image> image = videoFrameToImage(video); if (!image) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } if (!packImageData(image.get(), WebGLImageConversion::HtmlDomVideo, video->clientWidth(), video->clientHeight(), data)) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } videoSize = data.size(); if (!data.data() || !videoSize) { es.throwWebCLException(WebCLException::INVALID_HOST_PTR, WebCLException::invalidHostPTRMessage); return false; } return true; } PassRefPtr<Image> WebCLHTMLUtil::videoFrameToImage(HTMLVideoElement* video) { if (!video || !video->clientWidth() || !video->clientHeight()) return nullptr; IntSize size(video->clientWidth(), video->clientHeight()); ImageBuffer* imageBufferObject = m_generatedImageCache.imageBuffer(size); if (!imageBufferObject) return nullptr; IntRect destRect(0, 0, size.width(), size.height()); video->paintCurrentFrame(imageBufferObject->canvas(), destRect, nullptr); return imageBufferObject->newImageSnapshot(); } WebCLHTMLUtil::WebCLHTMLUtil(unsigned capacity) : m_generatedImageCache(capacity) { } WebCLHTMLUtil::ImageBufferCache::ImageBufferCache(unsigned capacity) : m_capacity(capacity) { m_buffers.reserveCapacity(capacity); } // Get the imageBuffer with the same size as input argument, and swtich it to front for reusing. ImageBuffer* WebCLHTMLUtil::ImageBufferCache::imageBuffer(const IntSize& size) { unsigned i; for (i = 0; i < m_buffers.size(); ++i) { ImageBuffer* buf = m_buffers[i].get(); if (buf->size() != size) continue; if (i > 0) m_buffers[i].swap(m_buffers[0]); return buf; } OwnPtr<ImageBuffer> temp = ImageBuffer::create(size); if (!temp) return nullptr; if (i < m_capacity - 1) { m_buffers.append(temp.release()); } else { m_buffers[m_capacity - 1] = temp.release(); i = m_capacity - 1; } ImageBuffer* buf = m_buffers[i].get(); if (i > 0) m_buffers[i].swap(m_buffers[0]); return buf; } } // blink
{'content_hash': 'd839019f7d6d28572fc875ad8e8fd643', 'timestamp': '', 'source': 'github', 'line_count': 166, 'max_line_length': 180, 'avg_line_length': 36.74096385542169, 'alnum_prop': 0.6955238563698967, 'repo_name': 'joone/chromium-crosswalk', 'id': 'dd90e4f73a85b798a38514948e0f85b83dc908f4', 'size': '6708', 'binary': False, 'copies': '5', 'ref': 'refs/heads/2016.04.css-round-display-edtior-draft-1', 'path': 'third_party/WebKit/Source/modules/webcl/WebCLHTMLUtil.cpp', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []}
<?php use Symfony\Component\DependencyInjection\Argument\RewindableGenerator; use Symfony\Component\DependencyInjection\ContainerInterface; use Symfony\Component\DependencyInjection\Container; use Symfony\Component\DependencyInjection\Exception\InvalidArgumentException; use Symfony\Component\DependencyInjection\Exception\LogicException; use Symfony\Component\DependencyInjection\Exception\RuntimeException; use Symfony\Component\DependencyInjection\ParameterBag\FrozenParameterBag; use Symfony\Component\DependencyInjection\ParameterBag\ParameterBagInterface; /** * This class has been auto-generated * by the Symfony Dependency Injection Component. * * @final */ class Symfony_DI_PhpDumper_Test_Aliases_Deprecation extends Container { private $parameters = []; public function __construct() { $this->services = $this->privates = []; $this->methodMap = [ 'foo' => 'getFooService', 'alias_for_foo_deprecated' => 'getAliasForFooDeprecatedService', ]; $this->aliases = [ 'alias_for_foo_non_deprecated' => 'foo', ]; } public function compile(): void { throw new LogicException('You cannot compile a dumped container that was already compiled.'); } public function isCompiled(): bool { return true; } public function getRemovedIds(): array { return [ 'Psr\\Container\\ContainerInterface' => true, 'Symfony\\Component\\DependencyInjection\\ContainerInterface' => true, ]; } /** * Gets the public 'foo' shared service. * * @return \stdClass */ protected function getFooService() { return $this->services['foo'] = new \stdClass(); } /** * Gets the public 'alias_for_foo_deprecated' alias. * * @return object The "foo" service. */ protected function getAliasForFooDeprecatedService() { @trigger_error('The "alias_for_foo_deprecated" service alias is deprecated. You should stop using it, as it will be removed in the future.', E_USER_DEPRECATED); return $this->get('foo'); } }
{'content_hash': '69326669b80f909dae75864ca324d82f', 'timestamp': '', 'source': 'github', 'line_count': 73, 'max_line_length': 168, 'avg_line_length': 29.643835616438356, 'alnum_prop': 0.6686691312384473, 'repo_name': 'zerkms/symfony', 'id': '3434cfc61847f4cd2f75ddb1229113163b2006c7', 'size': '2164', 'binary': False, 'copies': '10', 'ref': 'refs/heads/4.4', 'path': 'src/Symfony/Component/DependencyInjection/Tests/Fixtures/php/container_alias_deprecation.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '48219'}, {'name': 'HTML', 'bytes': '16735'}, {'name': 'Hack', 'bytes': '48'}, {'name': 'JavaScript', 'bytes': '28639'}, {'name': 'PHP', 'bytes': '20620388'}, {'name': 'Shell', 'bytes': '3136'}, {'name': 'Twig', 'bytes': '390641'}]}
#import "TiViewProxy.h" @interface ComQnypTisvprogresshubViewProxy : TiViewProxy { } @end
{'content_hash': '80113b3ab4e9dc8c36ba3a7e806607c9', 'timestamp': '', 'source': 'github', 'line_count': 8, 'max_line_length': 58, 'avg_line_length': 11.75, 'alnum_prop': 0.7659574468085106, 'repo_name': 'mochiz/TiSVProgressHUD', 'id': 'edd0d17a1acc4b358d3e65478bf539eaba2d1b2f', 'size': '334', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Classes/ComQnypTisvprogresshubViewProxy.h', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '3266'}, {'name': 'Objective-C', 'bytes': '45150'}, {'name': 'Python', 'bytes': '8647'}]}
package com.renodino.adapter; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.Inet4Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.NetworkInterface; import java.net.Socket; import java.net.SocketException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Properties; import java.util.Vector; import org.apache.log4j.Logger; class RDServicePeerConnection extends RDCoordinatedClient { static final Logger m_log = Logger.getLogger(RDServicePeerConnection.class); private int m_listen_port; protected String m_listen_addr; protected int m_num_peers; // number of expected service connections for // this adapter instance protected Vector<SocketChannel> m_peers; protected RDServiceCommandThread m_cmd_thread; static final private long SETUP_TIMEOUT = 10 * 60 * 1000; // for now, wait 10 minutes, maybe tunable in future protected Selector m_sktselector; protected ServerSocketChannel m_listenchan; protected RDTupleMetadata m_metadata; protected RDCommandInterface m_commander; protected String m_schema; static final private byte[] m_empty_schema = { 0,0,0,0 }; private HashMap<String, String> m_listen_map; // simple inner class for running the peer command in the background protected class RDServiceCommandThread extends Thread { private boolean m_done; public RDServiceCommandThread() { m_done = false; } public void run() { // execute the query try { m_commander.start_command(); } catch (RDAdapterException rde) { m_log.error("Can't start background command: " + rde); } m_done = true; } public boolean is_done() { return m_done; } } public RDServicePeerConnection(Properties rdprops) throws RDAdapterException { super(rdprops); m_schema = null; m_peers = new Vector<SocketChannel>(); get_net_interface_address(rdprops.getProperty("renodino.service.NET_DEVICE")); m_listenchan = open_listener(); m_listen_port = m_listenchan.socket().getLocalPort(); } // waits for all expected peer connections, then closes the listener public void init(Properties rdprops) throws RDAdapterException { // connect to coordinator register_client(rdprops); // provide instance name using listen address:port m_coordclient.set_instance_name(m_listen_addr + ":" + m_listen_port); // we should have the schema string by now, so create metadata for serde m_metadata = RDTupleMetadata.parse_metadata_string(m_schema); wait_for_service(m_listenchan); // done with the listener m_listenchan.keyFor(m_sktselector).cancel(); try { m_listenchan.close(); } catch (IOException ioe) {} // don't care at this point } @Override public byte[] start_master(Properties rdprops) { // if we're master, then we've recv'd the listener list, and need to provide it // to the command handler in case its needed (eg, for Hive table properties) rdprops.setProperty("renodino.service.LISTENER_LIST", compress_listen_list()); // now we can create the command handler try { m_commander = RDCommandHandlerFactory.get_command_handler(rdprops); m_commander.setup_command(); } catch (RDAdapterException rde) { m_log.error("Can't create command handler: " + rde); return null; } byte[] schemabytes = get_schema_bytes(); if (null == schemabytes) return null; // something went amiss! // master starts the Service query executor m_cmd_thread = new RDServiceCommandThread(); m_cmd_thread.start(); return schemabytes; } // default is for import jobs: return empty schema, since all isntances get the schema // property directly protected byte[] get_schema_bytes() { return m_empty_schema; } private void get_net_interface_address(String netifstr) throws RDAdapterException { NetworkInterface netif = null; if ((null != netifstr) && !netifstr.trim().equals("")) { // get address of specified interface try { netif = NetworkInterface.getByName(netifstr); } catch (SocketException se) { throw new RDAdapterException(se); } if (null == netif) throw new RDAdapterException("Specified network interface " + netifstr + " does not exist."); try { if (!netif.isUp()) throw new RDAdapterException("Specified network interface " + netifstr + " is not currently online."); } catch (SocketException se) { throw new RDAdapterException(se); } } else { // no explicit interface, so lookup eth0, bind0, or tun0, and return // the 1st try { netif = NetworkInterface.getByName("bond0"); if ((null == netif) || !netif.isUp()) netif = NetworkInterface.getByName("eth0"); if ((null == netif) || !netif.isUp()) netif = NetworkInterface.getByName("tun0"); if ((null == netif) || !netif.isUp()) throw new RDAdapterException("No online external network interface found."); } catch (SocketException se) { throw new RDAdapterException(se); } } Enumeration<InetAddress> inetAddresses = netif.getInetAddresses(); for (InetAddress inetAddress : Collections.list(inetAddresses)) { // for now we prefer ipv4 addresses (but tolerate ipv6), so scan the // list until we get // an ipv4 m_listen_addr = inetAddress.getHostAddress(); if (inetAddress instanceof Inet4Address) return; } } private ServerSocketChannel open_listener() throws RDAdapterException { // open a listener on an ephemeral port // NOTE: is this appropriate ? what if the data source/target is remote // and comes thru a firewall ? // !!! we may need to create a portmux on a single well known port, and // hand off connections // grabs ephemeral port on INADDR_ANY // NOTE: if an explicit interface was requested, we should probably only // bind to it... // open listen socket in nonblocking mode ServerSocketChannel listenchan = null; try { listenchan = ServerSocketChannel.open(); listenchan.socket().bind(new InetSocketAddress(0)); listenchan.configureBlocking(false); m_sktselector = Selector.open(); listenchan.register(m_sktselector, SelectionKey.OP_ACCEPT); } catch (IOException ioe) { throw new RDAdapterException("Can't init adapter listener socket: " + ioe); } return listenchan; } // collect the instance context to be stored in the instance ZK node // default just stores the listen address/port @Override protected byte[] get_registration_context() { String addr = m_listen_addr + ":" + m_listen_port; try { return addr.getBytes("UTF8"); } catch (UnsupportedEncodingException uee) { return null; } } // wait up to SETUP_TIMEOUT for all expected connections, while also // checking for coordinator events and query manager exit // note that the 1st data recv'd from the peer is the number of peer // connection this instance should expect private void wait_for_service(ServerSocketChannel listenchan) throws RDAdapterException { m_log.debug("Waiting for service peers"); long expires = System.currentTimeMillis() + SETUP_TIMEOUT; m_peers.clear(); try { while ((m_num_peers > m_peers.size()) && (System.currentTimeMillis() <= expires)) { // bust out every 5 secs to check if query manager has died // or coordinator has a control update if (0 != m_sktselector.select(5000)) { m_log.debug("Got service peer connection indication"); Iterator<SelectionKey> keys = m_sktselector.selectedKeys().iterator(); while (keys.hasNext()) { SelectionKey key = keys.next(); keys.remove(); if (key.isAcceptable()) { // accept Service connection SocketChannel sktchan = listenchan.accept(); Socket skt = sktchan.socket(); skt.setTcpNoDelay(true); m_peers.add(sktchan); // exports get number of expected connections as 1st integer // on each connection; imports just return current num_peers m_num_peers = get_num_peers(sktchan); m_log.debug("Requested number of peers: " + m_num_peers); } } // end while keys } // end if select if ((null != m_cmd_thread) && !m_cmd_thread.isAlive()) throw new RDAdapterException("Service query failed: " + m_commander.get_error()); check_coordinator(); } // end while } catch (IOException ioe) { throw new RDAdapterException(ioe); } } // close everything on exit public void close() { super.close(); try { m_sktselector.close(); } catch (Exception e) { // do nothing - server failed } for (SocketChannel p : m_peers) { try { p.close(); } catch (Exception e) { // do nothing - server failed } } if (null != m_cmd_thread) { if (m_cmd_thread.isAlive()) { m_cmd_thread.interrupt(); // in case its still executing the command } boolean joined = false; while (!joined) { try { m_cmd_thread.join(); joined = true; } catch (InterruptedException ie) { } } try { m_commander.teardown_command(); m_commander.close(); } catch (Exception e) { } // / don't care } } // collect global context: by default we only expect the schema, and only for export jobs // (import jobs provide the schema as property string) @Override public void set_registration_context(byte[] context) throws RDAdapterException { if (0 == context.length) return; ByteArrayInputStream bin = new ByteArrayInputStream(context); DataInputStream din = new DataInputStream(bin); byte[] schemabytes = null; try { int schemalen = din.readInt(); if (0 != schemalen) { schemabytes = new byte[schemalen]; din.read(schemabytes); } } catch (IOException ioe) // should never happen { } if (null == schemabytes) return; try { m_schema = new String(schemabytes, "UTF8"); } catch (UnsupportedEncodingException e) { // should never happen! } } // accumulate all the listen addresses // !!! probably should compress this (ie group instances on same node under same address) @Override public void add_instance_context(byte[] context) { String listenaddr = null; try { listenaddr = new String(context, "UTF8"); } catch (UnsupportedEncodingException uee) {} // should never happen! String[] parts = listenaddr.split(":"); if (2 != parts.length) { m_log.warn("Invalid instance context: " + listenaddr); return; } int port = 0; try { port = Integer.parseInt(parts[1]); } catch (Exception e) { m_log.warn("Invalid listen port: " + parts[1]); return; } if ((port < 1025) || (port > 65535)) { m_log.warn("Invalid listen port: " + parts[1]); return; } if (null == m_listen_map) m_listen_map = new HashMap<String, String>(); String ports = m_listen_map.get(parts[0]); if (null == ports) m_listen_map.put(parts[0], parts[1]); else m_listen_map.put(parts[0], ports + "," + parts[1]); } protected int get_num_peers(SocketChannel sktchan) throws RDAdapterException { return m_num_peers; } // convert current listen list to a compressed version protected String compress_listen_list() { StringBuilder str = new StringBuilder(); for (String addr : m_listen_map.keySet()) { if (0 != str.length()) str.append(','); str.append(addr).append('[').append(m_listen_map.get(addr)).append(']'); } return str.toString(); } }
{'content_hash': '7aa5da9f3dbe25403e87ec29922402c4', 'timestamp': '', 'source': 'github', 'line_count': 442, 'max_line_length': 111, 'avg_line_length': 26.574660633484164, 'alnum_prop': 0.6817640047675805, 'repo_name': 'renodino/rdadapter', 'id': '17be436dc38af9fe73e254a0158173f4a58ced21', 'size': '12752', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/renodino/adapter/RDServicePeerConnection.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '3254'}, {'name': 'Java', 'bytes': '221874'}, {'name': 'Makefile', 'bytes': '3242'}, {'name': 'Perl', 'bytes': '18617'}, {'name': 'Shell', 'bytes': '4943'}]}
ABAPSyntaxHighlighter (PrismABAP): ===================== It's a lightweight, syntax highlighter for SAP's programming language ABAP and it's written in JavaScript - ( Duh!....ABAP is not written in JavaScript but just this syntax highlighter ) <br> This is an extension of open source Syntax Highlighter [Prism](http://www.prismjs.com). ### Why ABAPSyntaxHighlighter ( PrismABAP )? ------------------------------------------------------------------------------------------------------------------------ I wanted to put together a JavaScript library that can achieve effective syntax-highlighting for ABAP Code. ABAPSyntaxHighlighter ( PrismABAP ) offers a practical solution with acceptable margins of error. Goal is to have an easy to install and useable JavaScript Library for the purpose. ### How to use ABAP syntax-highlighter on a website / blog: ------------------------------------------------------------------------------------------------------------------------ * Site/Blog Administrator/owner should add the [JavaScript](http://www.pragmatiqa.com/docs/ABAPSyntaxHighlighter/Final/prismABAP.js) and [CSS](http://www.pragmatiqa.com/docs/ABAPSyntaxHighlighter/Final/prismABAP.css) files as below (both files can be downloaded from this repository). Obviously this is one time activity.<br /> ```html <head> .... <link href='http://www.pragmatiqa.com/docs/ABAPSyntaxHighlighter/Final/prismABAP.css' rel='stylesheet'/> <script src='http://www.pragmatiqa.com/docs/ABAPSyntaxHighlighter/Final/prismABAP.js'></script> .... </head> ``` * Post writer will enclose the ABAP code with `<pre>` and `<code>` and specify the CSS class name: line-numbers language-abap as below: ```html <pre class="line-numbers language-abap"><code> * Actual ABAP Code lines should be pasted here - site/blog post editor should take care of converting any starting html brackets in ABAP code to &amp;lt; for example field-symbols etc. </code></pre> ``` A sample highlighted code and some details are available at [this Blog Post](http://sapblog.rmtiwari.com/2014/01/hacking-together-abap-syntax.html). If you want to try on your Blog/site – it’s free unless you are hell-bent on paying for it :)
{'content_hash': 'af6b4eb5921be3c4e768c9b8024ab37e', 'timestamp': '', 'source': 'github', 'line_count': 35, 'max_line_length': 327, 'avg_line_length': 62.628571428571426, 'alnum_prop': 0.6729014598540146, 'repo_name': 'rmtiwari/ABAPSyntaxHighlighter', 'id': '6b0e6c7272b06a3476659203f6a09c1a6dde6883', 'size': '2196', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Awk', 'bytes': '20'}, {'name': 'CSS', 'bytes': '4566'}, {'name': 'HTML', 'bytes': '4342'}, {'name': 'JavaScript', 'bytes': '28689'}]}