query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
sequencelengths
30
30
negative_scores
sequencelengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Azimuth in DD (decimal degrees) format validation.
def validate_azm_dd(a): if REGEX_AZM_DD.match(a): is_valid = VALID else: is_valid = NOT_VALID return is_valid
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checkAltAz ( rawAltAz ):\n #-- 1 --\n # [ if rawAltAz contains either a '+' or a '-' ->\n # m := a re.match instance describing the first matching\n # character\n # else ->\n # sys.stderr +:= error message\n # stop execution ]\n m = SIGN_PAT.search ( rawAltAz )\n if m is None:\n usage ( \"Equatorial coordinates must be separated by \"\n \"'+' or '-'.\" )\n #-- 2 --\n # [ rawAz := rawAltAz up to the match described by m\n # sign := characters matched by m\n # rawAlt := rawAltAz past the match described by m ]\n rawAz = rawAltAz[:m.start()]\n sign = m.group()\n rawAlt = rawAltAz[m.end():]\n\n #-- 3 --\n # [ if rawAz is a valid angle ->\n # az := that angle as radians\n # else ->\n # sys.stderr +:= error message\n # stop execution ]\n try:\n az = sidereal.parseAngle ( rawAz )\n except SyntaxError, detail:\n usage ( \"Azimuth '%s' should have the form \"\n \"'NNNd[NNm[NN.NNNs]]'.\" % rawAz )\n\n #-- 4 --\n # [ if rawAlt is a valid angle ->\n # alt := that angle as radians\n # else ->\n # sys.stderr +:= error message\n # stop execution ]\n try:\n absAlt = sidereal.parseAngle ( rawAlt )\n except SyntaxError, detail:\n usage ( \"Altitude '%s' should have the form \"\n \"'NNd[NNm[NN.NNNs]]'.\" % rawAlt )\n\n #-- 5 --\n if sign == '-': alt = - absAlt\n else: alt = absAlt\n\n #-- 6 --\n return sidereal.AltAz ( alt, az )", "def get_compass_dir_azimuth(azimuth, resolution='intercardinal', format='short'):\n if azimuth < 0:\n azimuth += 360\n if format not in ['short', 'long']:\n raise KeyError(f'Direction format {format} is not supported')\n if resolution not in ['cardinal', 'intercardinal', 'meteorological']:\n raise KeyError(f'Direction resolution {resolution} is not supported')\n if resolution == 'cardinal':\n angles = np.arange(0, 360 + 90, 90)\n if format == 'long':\n points = LONG_CARDINAL_POINTS\n else:\n points = SHORT_CARDINAL_POINTS\n elif resolution == 'intercardinal':\n angles = np.arange(0, 360 + 45, 45)\n if format == 'long':\n points = LONG_INTERCARDINAL_POINTS\n else:\n points = SHORT_INTERCARDINAL_POINTS\n elif resolution == 'meteorological':\n angles = np.arange(0, 360 + 22.5, 22.5)\n if format == 'long':\n points = LONG_METEOROLOGICAL_POINTS\n else:\n points = SHORT_METEOROLOGICAL_POINTS\n\n adiff = abs(azimuth - angles)\n i = adiff.argmin()\n return points[i]", "def azimuth(vv, v0, v1):\n with np.errstate(divide='ignore', invalid='ignore'):\n n0 = np.cross(v0, v1)\n n0 /= np.dual.norm(n0, axis=-1)[..., np.newaxis]\n nn = np.cross(v0, vv)\n nn /= np.dual.norm(nn, axis=-1)[..., np.newaxis]\n\n azi = np.arccos(np.sum(nn * n0, -1))\n if len(np.shape(azi)) > 0:\n azi[np.dot(vv, n0) < 0] *= -1\n # arbitrary angle where vv is (anti)parallel to v0\n azi[np.isnan(azi)] = 0\n elif np.isnan(azi):\n return 0\n elif np.dot(vv, v0) < 1 and azi > 0:\n azi *= -1\n\n return azi", "def check(self, X):\n if (np.min(X) < -90.) or (np.max(X) > 90.):\n print \"Warning: X may be defined in degrees instead of radians\"", "def check_doa(geometry, doa):\n if doa < 0:\n return False\n if geometry == \"linear\" and doa > 180:\n return False\n if geometry == \"circular\" and doa >= 360:\n return False\n return True", "def parse_sexagesimal_angle(angle):\n if diag:\n print \"parse_sexagesimal_angle: >\"+angle+\"<\"\n if re.search('\\.',angle):\n DDMMSS,second_fraction = angle.split('.')\n second_fraction = float(\"0.\"+second_fraction)\n else:\n DDMMSS = angle\n second_fraction = 0.\n # At the very least, the first four digits should be unambiguous\n if diag:\n print \"parse_sexagesimal_angle: >\"+DDMMSS+\"<\"\n DD = int(DDMMSS[:2])\n if len(DDMMSS) == 2:\n # This should never happen for right ascension\n aa = DD\n elif len(DDMMSS) == 3:\n # This should never happen for right ascension\n aa = DD + float(DDMMSS[2])/10\n else:\n MM = int(DDMMSS[2:4])\n aa = DD + MM/60.\n if len(DDMMSS) == 6:\n # It should be this if there was a decimal\n SS = int(DDMMSS[4:6])\n aa += (SS+second_fraction)/3600.\n elif len(DDMMSS) == 5:\n MM += float(DDMMSS[4])/10\n aa += MM/60.\n elif len(DDMMSS) > 6:\n SS = int(DDMMSS[4:6])\n extra_digits = len(DDMMSS) - 6\n divisor = pow(10,extra_digits)\n second_fraction = float(DDMMSS[6:])/divisor\n aa += (SS+second_fraction)/3600.\n return aa", "def _is_trivial_angle(rad: float, atol: float) -> bool:\n return abs(rad) < atol or abs(abs(rad) - np.pi / 4) < atol", "def test_str_phaseangledeg(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx, \"TestSensor\", group_address_state=\"1/2/3\", value_type=\"phaseangledeg\"\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0xC5,\n 0x25,\n 0x13,\n 0x38,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), -2641.201171875)\n self.assertEqual(sensor.unit_of_measurement(), \"°\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_rotation_angle(self):\n\n self.test_shape.azimuth_placement_angle = [45, 135, 225, 315]\n test_volume = self.test_shape.volume()\n self.test_shape.rotation_angle = 180\n assert self.test_shape.volume() == pytest.approx(test_volume * 0.5)", "def looks_azimuth(self) -> Optional[int]:\n return self._get_property(LOOKS_AZIMUTH_PROP, int)", "def angle(z):", "def check_angle(self):\n self.find_pixels()\n alpha_theta=np.deg2rad(70)\n alpha_phi=np.deg2rad(70)\n extreme_values=self.compute_extreme_values(alpha_phi, alpha_theta)\n x=np.linspace(extreme_values[0], extreme_values[1], self.number_of_pix[1])\n y=np.linspace(extreme_values[2], extreme_values[3], self.number_of_pix[0])\n phi_0=20\n phi_0=np.deg2rad(phi_0)\n j, diff=self.compute_phi(\"find_orient.png\")\n print \"j=\", j\n print \"diff=\", diff", "def fun_azimuth(self):\n\n energy_kev = self.energy_kev.get()\n hkl = self.hkl_magnetic.get()\n hkl = hkl.replace(',', ' ') # remove commas\n hkl = hkl.replace('(', '').replace(')', '') # remove brackets\n hkl = hkl.replace('[', '').replace(']', '') # remove brackets\n hkl = np.fromstring(hkl, sep=' ')\n\n azi = self.azim_zero.get()\n azi = azi.replace(',', ' ') # remove commas\n azi = azi.replace('(', '').replace(')', '') # remove brackets\n azi = azi.replace('[', '').replace(']', '') # remove brackets\n azi = np.fromstring(azi, sep=' ')\n\n pol = self.polval.get()\n if pol == u'\\u03c3-\\u03c3':\n pol = 's-s'\n elif pol == u'\\u03c3-\\u03c0':\n pol = 's-p'\n elif pol == u'\\u03c0-\\u03c3':\n pol = 'p-s'\n else:\n pol = 'p-p'\n\n F0 = self.resF0.get()\n F1 = self.resF1.get()\n F2 = self.resF2.get()\n\n isres = self.isres.get()\n if isres:\n # Resonant scattering\n self.xtl.Plot.simulate_azimuth_resonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol,\n F0=F0, F1=F1, F2=F2)\n plt.show()\n else:\n # Non-Resonant scattering\n self.xtl.Plot.simulate_azimuth_nonresonant(\n hkl,\n energy_kev=energy_kev,\n azim_zero=azi,\n polarisation=pol)\n plt.show()", "def parse_azimuth_elevation(filename):\n match = REGEX.match(filename)\n return int(match.group(1)), int(match.group(2))", "def _check_ra_dec(ra, dec):\n ra = np.atleast_1d(ra)\n dec = np.atleast_1d(dec)\n msg = []\n if (ra < 0).any():\n msg.append('RA must be >= 0, %f' % ra[ra < 0][0])\n if (ra >= 360).any():\n msg.append('RA must be < 360, %f' % ra[ra >= 360][0])\n if (dec < -90).any():\n msg.append('Dec must be >= -90, %f' % dec[dec < -90][0])\n if (dec > 90).any():\n msg.append('Dec must be <= 90, %f' % dec[dec > 90][0])\n\n if msg:\n raise ValueError('\\n'.join(msg))", "def test_float_input_angles_and_phase(self):\n decomposer = OneQubitEulerDecomposer(\"PSX\")\n input_matrix = np.array(\n [\n [0.70710678, 0.70710678],\n [0.70710678, -0.70710678],\n ],\n dtype=np.float64,\n )\n (theta, phi, lam, gamma) = decomposer.angles_and_phase(input_matrix)\n expected_theta = 1.5707963267948966\n expected_phi = 0.0\n expected_lam = 3.141592653589793\n expected_gamma = -0.7853981633974483\n self.assertAlmostEqual(theta, expected_theta)\n self.assertAlmostEqual(phi, expected_phi)\n self.assertAlmostEqual(lam, expected_lam)\n self.assertAlmostEqual(gamma, expected_gamma)", "def calc_surface_azimuth(xdir, ydir, B):\n B = radians(B)\n teta_z = degrees(asin(xdir / sin(B)))\n # set the surface azimuth with on the sing convention (E,N)=(+,+)\n if xdir < 0:\n if ydir <0:\n surface_azimuth = 180 + teta_z # (xdir,ydir) = (-,-)\n else: surface_azimuth = 360 + teta_z # (xdir,ydir) = (-,+)\n elif ydir < 0:\n surface_azimuth = 180 + teta_z # (xdir,ydir) = (+,-)\n else: surface_azimuth = teta_z # (xdir,ydir) = (+,+)\n return surface_azimuth # degree", "def resolution_azimuth(self) -> Optional[float]:\n return self._get_property(RESOLUTION_AZIMUTH_PROP, float)", "def check_heading(heading):\n if not (heading >= -180 and heading <= 360):\n raise CompassRangeError(\n 'Not in a valid compass range of -180 to 180 degrees, or '\n '0 to 360 degrees')", "def azimuth(source):\n srcAzEl = subarrayControl.s.azel(source, 0.0);\n return srcAzEl[0];", "def test_str_angle_deg(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx, \"TestSensor\", group_address_state=\"1/2/3\", value_type=\"angle_deg\"\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x44,\n 0x5C,\n 0x20,\n 0x2B,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 880.5026245117188)\n self.assertEqual(sensor.unit_of_measurement(), \"°\")\n self.assertEqual(sensor.ha_device_class(), None)", "def parse_decimal_angle(angle):\n if re.search('.',angle) or len(angle) == 2 or len(angle) == 3:\n da = float(angle)\n elif len(angle) > 3:\n exponent = len(angle)-3\n divisor = pow(10,exponent)\n da = float(angle)/divisor\n else:\n # Must be a missing leading zero.\n da = float(angle)\n return da", "def set_azimuth(self):\n self.azimuth = self.Calculations.convert_to_azimuth( self.declination, self.right_ascension, self.Latitude, self.LHA)\n if self.azimuth < 0:\n self.azimuth = self.azimuth + 360.0\n return self.azimuth\n else:\n pass\n return self.azimuth\n print('azimuth set to', self.azimuth)", "def rad2deg(a):", "def test_is_unital_depolarizing_choi_true():\n np.testing.assert_equal(is_unital(depolarizing(4)), True)", "def get_azimuth(self):\n self.degrees = self.azimuth_encoder.get_degrees()\n self.tele_azimuth = self.Calculations.convert_degrees(self.degrees)\n return self.tele_azimuth", "def getAzimuthAngle(self):\n return self._azimuth", "def azimuth_update(self):\n self.current_azimuth = self.azimuth_encoder.get_degrees()\n azimuth_error = self.azimuth - float(self.current_azimuth)\n # print('goal azimuth', self.azimuth, 'current azimuth', self.azimuth_encoder.get_degrees(), 'difference in azimuth', azimuth_error)\n if azimuth_error >0:\n # print('positive azimuth')\n self.azimuth_motor.set_direction(1)\n elif azimuth_error > 0:\n # print('negative azimuth')\n self.azimuth_motor.set_direction(0)\n azimuth_error = abs(azimuth_error)\n self.azimuth_error = azimuth_error\n if azimuth_error >= 0:\n self.azimuth_motor.set_speed(0)\n if azimuth_error >= 35:\n self.azimuth_motor.set_speed(1)\n if azimuth_error >= 40:\n self.azimuth_motor.set_speed(2)\n if azimuth_error >= 80:\n self.azimuth_motor.set_speed(3)\n if azimuth_error >= 160:\n self.azimuth_motor.set_speed(4)\n if azimuth_error >= 280:\n self.azimuth_motor.set_speed(5)\n self.azimuth_error = azimuth_error\n print('debug_azimuth', self.current_azimuth, self.azimuth_error, self.azimuth_motor.speed)\n return self.azimuth_error", "def check_degrees(degrees):\n type_check(degrees, (numbers.Number, list, tuple), \"degrees\")\n if isinstance(degrees, numbers.Number):\n check_value(degrees, (0, float(\"inf\")), \"degrees\")\n elif isinstance(degrees, (list, tuple)):\n if len(degrees) == 2:\n type_check_list(degrees, (numbers.Number,), \"degrees\")\n if degrees[0] > degrees[1]:\n raise ValueError(\"degrees should be in (min,max) format. Got (max,min).\")\n else:\n raise TypeError(\"If degrees is a sequence, the length must be 2.\")", "def assertAngle(self, angle: float):\n assert isinstance(angle, float), f\"bad angle instance, type of angle is {type(angle)}\"\n assert -180 <= angle <= 180" ]
[ "0.65725535", "0.5832522", "0.5805457", "0.57412726", "0.5735171", "0.5672594", "0.56711775", "0.5633739", "0.56279016", "0.55926555", "0.5545153", "0.5520772", "0.5499564", "0.54983383", "0.5493291", "0.54628223", "0.53977174", "0.5384473", "0.5372744", "0.5369902", "0.53441924", "0.534307", "0.5338827", "0.5335255", "0.5321239", "0.53144026", "0.52888966", "0.5257386", "0.52139306", "0.5199641" ]
0.62599325
1
Magnetic variation validation. Format decimal degrees with E or W prefix (easter or western magnetic variation)
def validate_magvar(mv): result = VALID mag_var = None try: mag_var = float(mv) if (mag_var > 360) or (mag_var < -360): result = NOT_VALID except ValueError: try: prefix = mv[0] if REGEX_MAGVAR_DD.match(mv[1:]): # Check if there are only numbers > 0, eg. 1.5, not -1.5 mag_var = float(mv[1:]) if prefix == 'W': result = -mag_var elif prefix == 'E': result = mag_var else: result = NOT_VALID else: result = NOT_VALID if (mag_var != None) and ((mag_var > 360) or (mag_var < -360)): result = NOT_VALID except ValueError: result = NOT_VALID mag_var = None return result, mag_var
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testCalspecMags(self):\n std = MKIDStd.MKIDStd()\n bFilter = std.filters['B']\n vFilter = std.filters['V']\n\n # BD17\n bd17Flux = std.load(\"bd17\")\n B = std.getVegaMag(bd17Flux, bFilter)\n V = std.getVegaMag(bd17Flux, vFilter)\n self.assertAlmostEqual(B-V, 0.44, places=1, msg=\"value=%f\"%B)\n self.assertAlmostEqual(B, 9.47, places=0, msg=\"value=%f\"%B)", "def test_mag_form_fac_case1():\n ion = MagneticFormFactor('Fe')\n formfac, _temp = ion.calc_mag_form_fac()[0], ion.calc_mag_form_fac()[1:]\n del _temp\n assert (abs(np.sum(formfac) - 74.155233575216599) < 1e-12)", "def test_mag_form_fac():\n ion = MagneticFormFactor('Fe')\n formfac, _temp = ion.calc_mag_form_fac(q=1.)[0], ion.calc_mag_form_fac(q=1.)[1:]\n del _temp\n assert (abs(formfac - 0.932565) < 1e-6)", "def test_str_magnetic_moment(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx,\n \"TestSensor\",\n group_address_state=\"1/2/3\",\n value_type=\"magnetic_moment\",\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0xC3,\n 0x8E,\n 0x7F,\n 0x73,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), -284.9956970214844)\n self.assertEqual(sensor.unit_of_measurement(), \"A m²\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_str_magnetic_field_strength(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx,\n \"TestSensor\",\n group_address_state=\"1/2/3\",\n value_type=\"magnetic_field_strength\",\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x44,\n 0x15,\n 0xF1,\n 0xAD,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 599.7761840820312)\n self.assertEqual(sensor.unit_of_measurement(), \"A/m\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_str_electromagnetic_moment(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx,\n \"TestSensor\",\n group_address_state=\"1/2/3\",\n value_type=\"electromagnetic_moment\",\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x45,\n 0x82,\n 0x48,\n 0xAE,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 4169.0849609375)\n self.assertEqual(sensor.unit_of_measurement(), \"A m²\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_setMassFrac(self):\n target35 = 0.2\n self.fuel.setMassFrac(\"U235\", target35)\n self.assertAlmostEqual(self.fuel.getMassFrac(\"U235\"), target35)", "def testGetVegaMag(self):\n std = MKIDStd.MKIDStd()\n vegaFlux = std.load(\"vega\")\n bd17Flux = std.load(\"bd17\")\n for filter in ['U','B','V','R','I']:\n aFilter = std.filters[filter] \n mag = std.getVegaMag(vegaFlux, aFilter)\n self.assertAlmostEqual(0.03, mag, msg=\"filter=%s mag=%f\"%(filter,mag))", "def test_maf(self):\n # Flipping the strand\n self.assertAlmostEqual(self.g.maf(), 8 / 18)", "def test_mag_form_fac_case2():\n ion = MagneticFormFactor('Fe')\n formfac, _temp = ion.calc_mag_form_fac(qrange=[0, 2])[0], ion.calc_mag_form_fac(qrange=[0, 2])[1:]\n del _temp\n assert (abs(np.sum(formfac) - 74.155233575216599) < 1e-12)", "def test_str_magnetic_flux(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx, \"TestSensor\", group_address_state=\"1/2/3\", value_type=\"magnetic_flux\"\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0xC5,\n 0xCB,\n 0x3C,\n 0x98,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), -6503.57421875)\n self.assertEqual(sensor.unit_of_measurement(), \"Wb\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_str_electric_field_strength(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx,\n \"TestSensor\",\n group_address_state=\"1/2/3\",\n value_type=\"electric_field_strength\",\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0xC6,\n 0x17,\n 0x1C,\n 0x39,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), -9671.0556640625)\n self.assertEqual(sensor.unit_of_measurement(), \"V/m\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_float(self):\n self.assertFalse(validate_measure_input('0.0', self.measures))\n self.assertFalse(validate_measure_input('1.0', self.measures))\n self.assertFalse(validate_measure_input('1.1', self.measures))", "def ST_zero_mag(self):\n return 21.1", "def ST_zero_mag(self):\n return 21.1", "def test_setMassFracOnComponentMaterial(self):\n # Negative value is not acceptable.\n with self.assertRaises(ValueError):\n self.fuel.material.setMassFrac(\"U235\", -0.1)\n\n # Greater than 1.0 value is not acceptable.\n with self.assertRaises(ValueError):\n self.fuel.material.setMassFrac(\"U235\", 1.1)\n\n # String is not acceptable.\n with self.assertRaises(TypeError):\n self.fuel.material.setMassFrac(\"U235\", \"\")\n\n # `NoneType` is not acceptable.\n with self.assertRaises(TypeError):\n self.fuel.material.setMassFrac(\"U235\", None)\n\n # Zero is acceptable\n self.fuel.material.setMassFrac(\"U235\", 0.0)\n self.assertAlmostEqual(self.fuel.material.getMassFrac(\"U235\"), 0.0)\n\n # One is acceptable\n self.fuel.material.setMassFrac(\"U235\", 1.0)\n self.assertAlmostEqual(self.fuel.material.getMassFrac(\"U235\"), 1.0)", "def test_norm(self):\n self.assertEqual(\"Maximaal 3 dagen. Meer dan 7 dagen is rood.\", self.__metric.norm())", "def test_str_magnetic_flux_density(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx,\n \"TestSensor\",\n group_address_state=\"1/2/3\",\n value_type=\"magnetic_flux_density\",\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x45,\n 0xB6,\n 0xBD,\n 0x42,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 5847.6572265625)\n self.assertEqual(sensor.unit_of_measurement(), \"T\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_dry_static_energy():\n dse = dry_static_energy(1000 * units.m, 25 * units.degC)\n assert_almost_equal(dse, 309.3479 * units('kJ/kg'), 4)", "def test_convert(self):\n height = 1.6 * self.meter\n foot = .305 * self.meter\n inch = 1 / 12 * foot\n\n self.assertTrue(abs(height / foot - 5.246) < .001)\n self.assertTrue(abs(height / inch - 62.951) < .001)\n\n newton = self.kgram * self.meter / (self.second ** 2)\n pound = 4.448222 * newton\n accel = 9.8 * self.meter / (self.second ** 2)\n\n weight = 150 * pound\n mass = weight / accel\n self.assertTrue(abs(mass / self.kgram - 68.085) < .001)", "def test_str_electric_potential_difference(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx,\n \"TestSensor\",\n group_address_state=\"1/2/3\",\n value_type=\"electric_potential_difference\",\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0xC6,\n 0xF,\n 0x1D,\n 0x6,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), -9159.255859375)\n self.assertEqual(sensor.unit_of_measurement(), \"V\")\n self.assertEqual(sensor.ha_device_class(), None)", "def test_spectral_density_vega_wf(wf, fluxd, to):\n v = fluxd.to(to.unit, spectral_density_vega(wf))\n assert v.unit == to.unit\n if to.unit in (VEGAmag, JMmag):\n assert np.isclose(v.value, to.value, atol=0.001)\n else:\n assert np.isclose(v.value, to.value, rtol=0.001)", "def V_magEarth(alpha,a_p,d):\n V = 5.*np.log10(a_p*d) - 3.99 - 1.060e-3*alpha + 2.054e-4*alpha**2.\n return V", "def _random_magnitude_error(self):\n magnitude = random.uniform(MIN_MAG, MAX_MAG)\n snr = random.uniform(MIN_SNR, MAX_SNR)\n merror = snr_to_error(snr)[1] # the positive error\n return magnitude, merror", "def convert_mass(self, event):\n try:\n #Compare other unit to one unit(kilograms)\n current_value, current_unit = float(\"0\" + str(self.v.get())), self.dropdown.get()\n unit_comp = {\"Earth masses\": 5.97219e+24, \"Solar masses\": 1.9890000000000002e+30, \"carats\": 0.0002, \"cental\": 45.359237, \"decagrams\": 0.01, \"femtograms\": 1e-18, \"grains\": 6.479891000000001e-05, \"grams\": 0.001, \"hectograms\": 0.1, \"hundredweights\": 50.802345, \"kilograms\": 1.0, \"kilotonnes\": 1000000.0, \"megatonnes\": 1000000000.0, \"micrograms\": 1e-09, \"milligrams\": 1e-06, \"nanograms\": 1e-12, \"ounces(US & UK)\": 0.02835, \"ounces(precious metals)\": 0.031103, \"picograms\": 1e-15, \"pounds(US & UK)\": 0.453592, \"pounds(precious metals)\": 0.373242, \"slugs\": 14.593903, \"stones\": 6.350293, \"tonnes(metric)\": 1000.0, \"tons(UK)\": 1016.046909, \"tons(US)\": 907.18474}\n value_comp, printer = current_value * unit_comp[current_unit], \"\"\n unit_list = sorted(unit_comp.keys())\n unit_list.remove(current_unit)\n for unit in unit_list:\n printer += \"To %s \" % unit + \" \" * (max([len(i) for i in unit_list]) - len(unit)) + str(value_comp / unit_comp[unit]) + [\"\", \"\\n\"][unit_list[-1] != unit]\n except ValueError: #In case user enter the other type of value, not Int or Float\n printer = \"Value is invalid.\"\n self.print_text(printer)", "def test_str(self):\n self.assertEqual(str(self.meter), \"1 * m\")\n\n newton = self.kgram * self.meter / (self.second ** 2)\n\n self.assertEqual(str(newton), \"1.0 * kg * m * s^-2\")", "def human_m(v):\n if v < 1e-2:\n return (v*1.0e3, 'mm')\n if v < 1:\n return (v*1.0e2, 'cm')\n if v < 1000:\n return (v, 'm')\n return (v/1.0e3, 'km')", "def mass_handling_gear(\n design_mass_TOGW: float,\n):\n return 3e-4 * design_mass_TOGW", "def test_str_electric_dipole_moment(self):\n xknx = XKNX()\n sensor = Sensor(\n xknx,\n \"TestSensor\",\n group_address_state=\"1/2/3\",\n value_type=\"electric_dipole_moment\",\n )\n sensor.sensor_value.payload = DPTArray(\n (\n 0x45,\n 0x58,\n 0xF1,\n 0x73,\n )\n )\n\n self.assertEqual(sensor.resolve_state(), 3471.090576171875)\n self.assertEqual(sensor.unit_of_measurement(), \"C m\")\n self.assertEqual(sensor.ha_device_class(), None)", "def difficulty(mag):\n mag = float(mag)\n if mag <= -4:\n return \"Visible in daytime.\"\n elif mag <= 6:\n return \"Visible at night.\"\n else:\n flux = mag_def(\"%s x\" % mag)\n needed_flux = mag_def(\"6 x\")\n eye_area = math.pi * (0.005**2)\n needed_power = needed_flux * eye_area\n diameter = 2 * math.sqrt(needed_power / (flux*math.pi))\n return \"%s m telescope needed.\" % diameter" ]
[ "0.6224968", "0.60714537", "0.6064959", "0.60200757", "0.60145694", "0.5896334", "0.5814267", "0.5782454", "0.5778193", "0.5773811", "0.56517756", "0.55824465", "0.5580473", "0.555041", "0.555041", "0.5545234", "0.55397755", "0.55223715", "0.54866207", "0.5440148", "0.54325366", "0.54229546", "0.5417857", "0.53764385", "0.53711724", "0.53590405", "0.5355462", "0.53508407", "0.5332281", "0.53258455" ]
0.66408545
0
Creates temprary layer name, from current datie
def tmp_layer_name(): curr_time = datetime.datetime.now() tmp_lyr_name = str(curr_time).replace('-', '') tmp_lyr_name = tmp_lyr_name.replace(':','') tmp_lyr_name = tmp_lyr_name.replace(' ', '_') tmp_lyr_name = tmp_lyr_name[:15] return tmp_lyr_name + '_tmp_memory'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_layer_name(self, layer):\n label = '{}-{}'.format(layer.label, layer.rank)\n if label not in self.naming_map:\n self.naming_map[label] = {}\n\n if layer not in self.naming_map[label].keys():\n self.naming_map[label][layer] = len(self.naming_map[label]) + 1\n return '{}-{}'.format(label, self.naming_map[label][layer])", "def CreateLayer(self,layername):\n\t\treturn self.acad.ActiveDocument.Layers.Add(layername)", "def name_layer_factory(num=0, name_prefix=\"\", name_suffix=\"\"):\n def name_layer_fn(layer):\n return '{}{}{}-{}'.format(name_prefix, layer, name_suffix, num)\n \n return name_layer_fn", "def layer_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"layer_name\")", "def generate_name(self, name):\n return \"{}/{}.{}\".format(self.name, self._layer_counter, name)", "def layer_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"layer_name\")", "def nice_layer_name(weight_key):\n return \"Layer {num} {name}\".format(num=weight_key[1] + 1, name=weight_key[0])", "def create_tmp_layer(self, l_name):\n output_lyr = QgsVectorLayer('Point?crs=epsg:4326', l_name, 'memory')\n prov = output_lyr.dataProvider()\n output_lyr.startEditing()\n prov.addAttributes([QgsField(\"ID\", QVariant.Int),\n QgsField(\"P_NAME\", QVariant.String),\n QgsField(\"LAT_DMS\", QVariant.String),\n QgsField(\"LON_DMS\", QVariant.String)])\n output_lyr.commitChanges()\n QgsMapLayerRegistry.instance().addMapLayers([output_lyr])", "def create(self):\n\n if rs.IsLayer(self.name):\n\n return self\n\n mom = \"\"\n \n for s in self.path:\n \n son = s if (mom == \"\") else (mom + \"::\" + s)\n\n mommy = None if mom == \"\" else mom\n\n if not rs.IsLayer(son):\n\n rs.AddLayer(s, color = None, visible = True, locked = False, parent = mommy)\n\n mom = son\n \n return self", "def create_layers_table():\n\n table_name = f\"{BQ_LAYERS_TABLE}\"", "def layer_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"layer_name\")", "def add_layer(self, layer_name, layer_def):\n\n layer_idx, datatype = layer_def.split(\"/\")\n layer_idx = int(layer_idx)\n datatype = int(datatype)\n self.layers[layer_name] = LayerInfo(layer_idx, datatype, layer_name)", "def createRenderLayer(*args, empty: bool=True, g: bool=True, makeCurrent: bool=True, name:\n AnyStr=\"\", noRecurse: bool=True, number: int=0, **kwargs)->AnyStr:\n pass", "def init_name(self):\r\n try:\r\n rval = self.name\r\n except AttributeError:\r\n if 0:\r\n l = []\r\n for n in self.fgraph.toposort():\r\n if hasattr(n.op, \"name\") and n.op.name is not None:\r\n v = n.op.name\r\n if v.startswith(\"Composite\"):\r\n v = v[len(\"Composite\"):]\r\n else:\r\n v = n.op.__class__.__name__\r\n l.append(v)\r\n rval = \"Composite{\" + \",\".join(l) + \"}\"\r\n else:\r\n for i, r in enumerate(self.fgraph.inputs):\r\n r.name = 'i%i' % i\r\n for i, r in enumerate(self.fgraph.outputs):\r\n r.name = 'o%i' % i\r\n io = set(self.fgraph.inputs + self.fgraph.outputs)\r\n for i, r in enumerate(self.fgraph.variables):\r\n if r not in io and len(r.clients) > 1:\r\n r.name = 't%i' % i\r\n rval = \"Composite{%s}\" % str(self.fgraph)\r\n self.name = rval", "def name(self):\n return 'Greenlet-%d' % (self.minimal_ident,)", "def get_unique_name(self, prefix):\n\t\tident = sum(t.startswith(prefix) for t, _ in self.layers.items()) + 1\n\t\treturn '%s_%d' % (prefix, ident)", "def GenTemp(root=\"obj-\"):\n global _GT_NAMES_\n if root in _GT_NAMES_.keys():\n _GT_NAMES_[root]+=1\n else:\n _GT_NAMES_[root]=0\n return \"%s%d\" % (root, _GT_NAMES_[root])", "def _build_name(name_id):\n return \"xp_%08d\" % name_id", "def _build_name(self):\n self.ctrl_name = NameUtils.get_unique_name(self.asset,\n self.side,\n self.part,\n \"ctrl\")", "def name(self):\n return \"Separate Layers\"", "def make_tex_name(self, end):\n tex_name = \"\"\n if hasattr(self, 'labels'):\n if self.labels.dict['data_name'] == '':\n tex_name += \"data_\"\n else:\n tex_name += \"true_%s_\"%self.labels.dict['data_name']\n if self.detector is not None:\n tex_name += \"%s_\"%self.detector\n if self.selection is not None:\n tex_name += \"%s_\"%self.selection\n tex_name += end\n tex_name += \".tex\"\n return tex_name", "def make_layers(self):\r\n #assuming temporal field is always the first column!\r\n timeCol = self.data.columns[0]\r\n times = self.data[timeCol].unique() \r\n lat = self.data.lat.unique()\r\n lon = self.data.lon.unique()\r\n shape = (len(lat), len(lon))\r\n depths, hours = [None], [None]\r\n if 'depth' in self.data.columns:\r\n depths = self.data.depth.unique()\r\n if 'hour' in self.data.columns:\r\n hours = self.data.hour.unique()\r\n layers, titles = [], []\r\n for t in times:\r\n for h in hours:\r\n for z in depths:\r\n frame = self.data[self.data[timeCol] == t]\r\n\r\n if timeCol == 'time':\r\n sub = self.variable + self.unit + ', ' + str(datetime.strptime(t, '%Y-%m-%dT%H:%M:%S').date())\r\n else:\r\n sub = self.variable + self.unit + ', ' + timeCol + ': ' + str(t) \r\n\r\n if h != None:\r\n frame = frame[frame['hour'] == h]\r\n sub = sub + ', hour: ' + str(h) + 'hr'\r\n if z != None:\r\n frame = frame[frame['depth'] == z] \r\n sub = sub + ', depth: %2.2f' % z + ' [m]' \r\n try: \r\n layers.append(frame[self.variable].values.reshape(shape))\r\n titles.append(sub)\r\n except Exception as e:\r\n continue \r\n return layers, titles, lat, lon", "def create_layer(layer_name, table, flds=None, where=None, shp_prefix=None):\n # if list of names provided, convert to dictionary\n if isinstance(flds, list):\n t = {}\n for item in flds:\n t[item] = item\n flds = t\n\n # add shape fields if desired\n if shp_prefix is not None:\n desc = arcpy.Describe(table)\n if desc.shapeType == \"Polygon\":\n flds[desc.AreaFieldName] = shp_prefix + '_area'\n\n # create field definitions\n fi = arcpy.FieldInfo()\n for fld in arcpy.ListFields(table):\n fld_name = fld.name\n if flds is None:\n fi.addField(fld_name, fld_name, 'VISIBLE', '')\n else:\n value = flds.get(fld_name, None)\n if value is not None:\n fi.addField(fld_name, value, 'VISIBLE', '')\n else:\n fi.addField(fld_name, fld_name, 'HIDDEN', '')\n\n # create the feature layer\n if where is None:\n arcpy.MakeFeatureLayer_management(table, layer_name, field_info=fi)\n else:\n arcpy.MakeFeatureLayer_management(table, layer_name, where, field_info=fi)", "def create_name(name, epochs, lr, lr_decay_step, dilation, batch_size):\n\treturn '{}_ep-{}_lr-{}_de-{}_di-{}_bs-{}'.format(name, epochs, lr, lr_decay_step, sum(dilation), batch_size)", "def tilefilename(self, x, y, z):\n\n tileIndex = x + y * self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z]\n return os.path.join(\"TileGroup%.0f\" % math.floor( tileIndex / 256 ),\n \"%s-%s-%s.%s\" % ( z, x, y, self.tileformat))", "def create_savename(self):\n \n savename = self.config.get('grid', 'dir') + self.fname.split('/')[-1]\n newsuffix = '_gridded_%ix%ix%i.nc' % (self.nx, self.ny, self.nz)\n savename = savename.replace('.nc', newsuffix)\n \n return savename", "def createEmptyLayer(self):\n # , wt.greeting: False , wt.ects: False, wt.preReqs: False, wt.courseCodeMentioned: False\n layer = {wt.questionWord: \"\", wt.pronoun: \"\", wt.verb: \"\", wt.websiteName: \"\", wt.timeWord: \"\", wt.about: \"\",\n wt.weather: \"\", wt.when: \"\", wt.keywords: [], wt.courseID: \"\", wt.structureUnitCode: \"\",\n wt.sentence: [], wt.hangman: \"\", wt.what: \"\"}\n return layer", "def getDefaultLayerName(self):\n\t\treturn self._fileSystem.getDefaultLayerName()", "def ogrCreateLayer(sourceLayer, pgConn, destinationLayer):\r\n print \" Creating {0}\".format(destinationLayer)\r\n newLayer = pgConn.CreateLayer(destinationLayer)\r\n\r\n lyrDefn = sourceLayer.GetLayerDefn()\r\n for i in range( lyrDefn.GetFieldCount() ):\r\n ##print \"Creating field: {0}\".format(lyrDefn.GetFieldDefn( i ).GetName())\r\n\r\n fieldName = lyrDefn.GetFieldDefn( i ).GetName()\r\n fieldType = lyrDefn.GetFieldDefn( i ).GetType()\r\n newField = ogr.FieldDefn(fieldName, fieldType)\r\n newLayer.CreateField(newField)", "def generate_surface_temp(self):\n tcl_name = output_folder + \"/surface_output_\" + str(self.input_pdb_path).split(\"/\")[-1][0:-4] + \"_\" + str(self.current_chain) + \".tcl\"\n opened_file = open(tcl_name, \"w\")\n writeable_string = surface(self.input_pdb_path).surface_template(chain = str(self.current_chain))\n opened_file.write(writeable_string)" ]
[ "0.65522903", "0.6454748", "0.64497757", "0.6411795", "0.64012676", "0.63176304", "0.62226367", "0.61490446", "0.6055188", "0.59703803", "0.5958316", "0.5849348", "0.5835588", "0.58297", "0.58277553", "0.5794272", "0.5777519", "0.57633513", "0.5748867", "0.572543", "0.5639063", "0.5631172", "0.5625015", "0.5604598", "0.5594685", "0.5587945", "0.55594385", "0.5551681", "0.55332273", "0.55197066" ]
0.72437567
0
Select output csv file
def select_output_file(self): output_file = QFileDialog.getSaveFileName(self.dlg, "Select output file ", "", '*.csv') self.dlg.leOutCsv.setText(output_file)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def export_csv(self):\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".csv\",\n filetypes=((\"comma seperated values\", \"*.csv\"),\n (\"All Files\", \"*.*\")))\n if outputfile:\n tabledata = self.tabs.window.aistracker.create_table_data()\n export.write_csv_file(tabledata, outputfile)\n else:\n raise ExportAborted('Export cancelled by user.')", "def file_select(self):\n fname = QFileDialog.getSaveFileName(self,\n 'select file',\n '/home/pi/Documents/output.csv',\n \"csv file (*.csv)\")\n self.ui.qtBrowse.clear()\n self.ui.qtBrowse.setText(fname)", "def saveCSV(self):\n filename=tkFileDialog.asksaveasfilename(defaultextension='.csv',\n initialdir=os.getcwd(),\n filetypes=[(\"csv\",\"*.csv\"),(\"All files\",\"*.*\")])\n if not filename:\n return\n for m in self.matrices:\n matrix = self.matrices[m] \n if matrix != None: \n c=matrix.csvRepresentation()\n f=open(filename,'w')\n f.write(c)\n f.close()\n return", "def export_to_csv(da_locals, selection_widget, out):\n df_name = selection_widget.value\n da_locals[df_name].to_csv(\"output/{}.csv\".format(df_name), index=False)\n out.clear_output()\n out.append_display_data(FileLinks(\"output\"))", "def save_csv_file():\n global output_on_display, import_lst, column_names, data\n if data_base == '':\n mistake_load_table()\n else:\n column_names = data[0]\n save_name = asksaveasfilename(title=\"Select file\", filetypes=((\"CSV\", \"*.csv\"), (\"all files\", \"*.*\")),\n confirmoverwrite=True, defaultextension='.csv')\n step = len(column_names)\n data_csv = import_lst\n if len(data_csv[0]) == step:\n pass\n else:\n data_csv = import_lst[step::]\n\n with open(save_name, 'w+') as csv_file:\n csv_writer = csv.writer(csv_file)\n csv_writer.writerow(column_names)\n csv_writer.writerows(data_csv)", "def exportCSV(self, log, csvFile):\n return 0", "def export_to_csv(self, outputfilepath, separator):\n\n con = fdb.connect(\n database=self.db_filepath,\n # dsn='localhost:~/test/CGI.vvv', #localhost:3050\n user='sysdba', password='masterkey'\n #charset='UTF8' # specify a character set for the connection\n )\n cur = con.cursor()\n statement = \"select * from FILES\"\n cur.execute(statement)\n # Retrieve all rows as a sequence and print that sequence:\n print(cur.fetchall())\n\n # VVV export format: Volume,Path,Name,Size,Ext,Last modified,Description\n\n with open(outputfilepath, 'w') as csvfile:\n writer = csv.writer(csvfile)\n writer.writerow([ i[0] for i in cur.description ]) \n writer.writerows(cur.fetchall())", "def csv(self, outname=None):\n # the first line is the vertical band line and is thus excluded\n profiles = self.ax2.get_lines()[1:]\n if len(profiles) == 0:\n return\n \n if outname is None:\n root = Tk()\n # Hide the main window\n root.withdraw()\n outname = filedialog.asksaveasfilename(initialdir=os.path.expanduser('~'),\n defaultextension='.csv',\n filetypes=(('csv', '*.csv'),\n ('all files', '*.*')))\n if outname is None:\n return\n \n with open(outname, 'w') as csv:\n csv.write('id;bandname;row;column;xdata;ydata\\n')\n for i in range(0, len(profiles)):\n line = profiles[i]\n xdata = line.get_xdata()\n ydata = line.get_ydata()\n \n # get the row and column indices of the profile\n legend_text = self.ax2.get_legend().texts[i].get_text()\n legend_items = re.sub('[xy: ]', '', legend_text).split(';')\n col, row = [int(x) for x in legend_items]\n \n for j in range(0, self.bands):\n entry = '{};{};{};{};{};{}\\n'.format(i + 1, self.bandnames[j], row, col, xdata[j], ydata[j])\n csv.write(entry)\n csv.close()", "def file(self):\n result = []\n completePath = CompletePath(self.path, self.filename) \n with open(completePath.path(), 'w', newline='') as csvfile:\n fieldnames = ['Activity', 'Points']\n writer = csv.DictWriter(csvfile, fieldnames = fieldnames)\n writer.writeheader()\n for i in range ( len( self.groupPriority.rows() ) ):\n tmp = self.groupPriority.rows()[i]\n self.log.info ( \"FinalCSV\", \"file\",\"data {0},{1}\".format( tmp.activity(), tmp.points() ) )\n writer.writerow({'Activity': tmp.activity(), 'Points': tmp.points()})\n self.log.info(\"FinalCSV\", \"file\", \"Elaborated file: {0}\".format ( completePath.path() ) )", "def export_csv(self, outpath):\n\n\t\tself.df.to_csv(outpath)", "def write_csv(self):\n self.tableView.df.to_csv('Data export.csv', index=False)\n print('CSV file exported')", "def _write_csv(self):\n\n # add the label to the header\n if self.input_data.get_value(InputType.TIME_PERIOD) == 'all':\n self.header.append('Date')\n else:\n self.header.append('sample id')\n\n key_list = []\n\n for i, cube in enumerate(self.cube_list):\n if self.input_data.get_value(InputType.TIME_PERIOD) == 'all':\n self._write_sample_with_date(cube, i, key_list)\n else:\n self._write_sample(cube, i, key_list)\n\n output_data_file_path = self._get_full_file_name()\n self._write_data_dict(output_data_file_path, key_list)\n\n return [output_data_file_path]", "def write_csv(self, out_file_name, header):\n\n with open(out_file_name, 'wb') as outf:\n writer = csv.writer(outf, quoting=csv.QUOTE_ALL)\n writer.writerow(header)\n writer.writerows(self.records)", "def save_csv(self, filename): # DONE\n self.data.to_csv(filename)", "def generate_csv(self, output_file):\n try: # We are going to \"try\" something\n csv_file = open(output_file, 'w+') # open \"output_file\" as a writable file and return a handle called \"csv_file\"\n except OSError as err: # If something goes wrong with the open, we catch the exception\n fatal(\"{0}\".format(err), -1) # exit with something other than 0 so the shell knows something went wrong\n \n writer = csv.writer(csv_file) # create a CSV writing object that's pointing at our open file handle\n\n writer.writerow([\"Question\",\"Answers\"]) # Let's write the top row\n for k in self.questions.keys(): # Let's walk down the directory by key\n # write the \"key\" (which is the question) and then let's take the list of answers and create a comma delmited list.\n # this is likely totally wrong since you could have an answer in it that also has a comma...\n writer.writerow([k, \",\".join(self.questions[k].answers)]) # insert a key (which is the question) and then let's take the array of \n\n csv_file.close() # close the csv_file file handle", "def csv_file(data,output_dir,filename,order = [],head = True):\n with open(output_dir + filename + '.csv', 'w') as f:\n write = csv.writer(f)\n write.writerows(manip.dic_to_list(data,order,head),)\n return None", "def write_csv(self, directory = None):\n if ((directory is None) and\n (self._session.config.folder_basename is not None)):\n directory = self._session.results._full_path\n else:\n return\n \n file = CSV_file(self, directory)\n file.write()\n return file", "def on_export_button(self, event):\n wildcard = \"Filtered _iso_res_filt.csv file (*_iso_res_filt.csv)|*_iso_res_filt.csv|\"\\\n \"All files (*.*)|*.*|\"\n defFile = self.datafile[:-4]+'_filt.csv'\n dlg = wx.FileDialog(\n self, message=\"Save file as ...\", \n defaultDir=self.currentDirectory, \n defaultFile=defFile, wildcard=wildcard, style=wx.SAVE\n )\n if dlg.ShowModal() == wx.ID_OK:\n path = dlg.GetPath()\n self.recalcAll()\n self.redrawAll()\n self.dataFrame['priorFilter'] = self.dataFrame['allFPass']\n self.dataFrame.to_csv(path, index=False)\n summaryCSVPath = path.split('.')[0] + '_median_[' + ''.join(self.calcNum) + ']_[' + ''.join(self.calcDen) + '].csv'\n self.writeSummaryCSV(summaryCSVPath)\n \n dlg.Destroy()", "def write_csv(self, filename, cutoff=2):\n f = csv.writer(open(filename, 'wb'))\n for row in self.rows(cutoff=cutoff):\n f.writerow(row)", "def csv_file(input_file):\n\n current_dir = os.getcwd()\n directory_name = current_dir + '\\\\' + 'data' + '\\\\'\n csv_out = directory_name + input_file\n return csv_out", "def prepare_out_csv(output_dir, filename):\n out_columns_pi = ['fasta_file', 'acc.code',\n 'organism', 'EC.code', 'species',\n 'note', 'pi', 'modification', 'category']\n string = ''\n for i in out_columns_pi:\n if i == out_columns_pi[-1]:\n string += i\n else:\n string += i+','\n string += '\\n'\n with open(output_dir+filename, 'w') as f:\n f.write(string)", "def csv(self, destination_path):\n # todo - test for single and duplicate base cases\n to_csv(self._axl_data, destination_path)", "def export(self, out_filename='RESULT.csv'):\n\n query = input(\n \"Enter output filename (Default = {}):\\n>\".format(out_filename))\n # Set filename to the default value if user doesn't enter one\n if query == '':\n query = out_filename\n\n with open(query, encoding='utf-8', mode='w', newline='\\n') as out_file:\n writer = csv.DictWriter(out_file, fieldnames=self.headers)\n\n writer.writeheader() # Write the headers to the first row\n for record in self.records:\n writer.writerow(record.attributes)\n\n print(\"Successfully output to {}\".format(query))", "def generate_dataset_csv(request):\n\n response = csv_export(request,Dataset)\n return response", "def outputLevelCsv(self):\n # extract level information from result info\n extract_level = []\n extract_level = [item for item in self._result_info if self._result_info[2][0:5]=='LEVEL']\n if extract_level == []:\n print('No Result of LEVEL')\n return None\n # copy need information\n for i, item in enumerate(extract_level):\n self._level_csv_list[i][0] = item[1]\n self._level_csv_list[i][1] = item[2].split('-')[1]\n self._level_csv_list[i][2] = item[2].split('-')[2]\n self._level_csv_list[i][3] = item[4]\n # set csv file name\n csv_file_name = self._filename.rsplit('.', 1)[1] + '.csv'\n # write csv\n with open(csv_file_name, 'w') as csv_file:\n writer = csv.writer(csv_file)\n writer.writerows(self._level_csv_list)", "def write_csv(self, file: str, table: str, libref: str =\"\", nosub: bool =False, dsopts: dict = None, opts: dict = None) -> 'The LOG showing the results of the step':\n dsopts = dsopts if dsopts is not None else {}\n opts = opts if opts is not None else {}\n\n code = \"filename x \\\"\"+file+\"\\\";\\n\"\n code += \"options nosource;\\n\"\n code += \"proc export data=\"\n\n if len(libref):\n code += libref+\".\"\n\n code += \"'\"+table.strip()+\"'n \"+self._sb._dsopts(dsopts)+\" outfile=x dbms=csv replace; \"\n code += self._sb._expopts(opts)+\" run\\n;\"\n code += \"options source;\\n\"\n\n if nosub:\n print(code)\n else:\n ll = self.submit(code, \"text\")\n return ll['LOG']", "def save_csv(self, filename: str, type='n', **args):\n if type == 'n':\n df = self.export_nodes()\n else:\n df = self.export_edges()\n df.to_csv(filename, index=False)", "def to_csv(self, path):\n results = self.all()\n if self.stop_check is not None and self.stop_check():\n return\n results.to_csv(path)", "def create_csv_file(self):\r\n # Create a new csv-file\r\n with open(self.fname, 'w') as f:\r\n writer = csv.writer(f, dialect='excel')\r\n writer.writerow(['set_time',\r\n 'read_time_P_ac',\r\n 'read_time_P_bat',\r\n 'soc',\r\n 'set_value',\r\n 'P_ac',\r\n 'P_bat'])", "def write_to_file(self, results):\n with open(self.outputFilename, \"w\") as csvFile:\n csvWriter = csv.writer(csvFile, delimiter=',') \n title_row = ('asset_id', 'component_id', 'latitude', 'longitude', 'installation_date', 'commissioning_date', 'street_name', 'cabinet_id', 'nominal_wattage', 'current_time', 'current_LogValue', 'current_IsLogValueOff') \n csvWriter.writerow(title_row)\n for record in results:\n csvWriter.writerow(record)" ]
[ "0.7275462", "0.6781139", "0.66611564", "0.6605735", "0.65933424", "0.6562594", "0.6442843", "0.64320594", "0.6408648", "0.63245755", "0.62903845", "0.62317413", "0.62267977", "0.6220033", "0.6213078", "0.620042", "0.6174863", "0.61640847", "0.6160434", "0.61497813", "0.6149688", "0.6128555", "0.61273986", "0.6127008", "0.6112569", "0.61038965", "0.6081243", "0.6080904", "0.60654664", "0.6047348" ]
0.7275964
0
Asks the Azure Face API to detect faces from a file. It more specifically asks for the emotion_attribute. It also asks Azure to use 'recognition_02' model which is a little bit more accurate than 'recognition_01' (default).
def request(): return face_client.face.detect_with_stream(image=open("frame.png", 'rb'), return_face_attributes=[emotion_attribute], recognition_model='recognition_02')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def detect_face(face_file, max_results=4):\n image_content = face_file.read()\n batch_request = [{\n 'image': {\n 'content': base64.b64encode(image_content).decode('utf-8')\n },\n 'features': [{\n 'type': 'FACE_DETECTION',\n 'maxResults': max_results,\n }]\n }]\n\n service = get_vision_service()\n request = service.images().annotate(body={\n 'requests': batch_request,\n })\n loop = asyncio.get_event_loop()\n response = await loop.run_in_executor(None, request.execute)\n\n return response['responses'][0]['faceAnnotations'] if 'faceAnnotations' in response['responses'][0] else None", "def detect_face(face_file, max_results=4):\n client = vision.ImageAnnotatorClient()\n\n content = face_file.read()\n image = types.Image(content=content)\n\n return client.face_detection(image=image).face_annotations", "def detect_face(face_file, max_results=10):\n client = vision.ImageAnnotatorClient()\n\n content = face_file.read()\n image = types.Image(content=content)\n\n return client.face_detection(image=image).face_annotations", "def detect_faces_in_video(self):\r\n logger.debug('Executing face detection')\r\n\r\n use_eyes_position = c.USE_EYES_POSITION\r\n\r\n if ((self.params is not None) and\r\n (c.USE_EYES_POSITION_KEY in self.params)):\r\n use_eyes_position = self.params[c.USE_EYES_POSITION_KEY]\r\n\r\n det_loaded = False\r\n\r\n # Try to load YAML file with detection results\r\n if os.path.exists(self.det_file_path):\r\n\r\n print 'Loading YAML file with detection results'\r\n logger.debug('Loading YAML file with detection results')\r\n\r\n det_faces = utils.load_YAML_file(self.det_file_path)\r\n\r\n if det_faces:\r\n self.detected_faces = det_faces\r\n\r\n print 'YAML file with detection results loaded'\r\n logger.debug('YAML file with detection results loaded')\r\n\r\n det_loaded = True\r\n\r\n if not det_loaded:\r\n\r\n # Check existence of frame list\r\n if len(self.frame_list) == 0:\r\n\r\n # Try to load YAML file with frame list\r\n if os.path.exists(self.frames_file_path):\r\n\r\n print 'Loading YAML file with frame list'\r\n logger.debug('Loading YAML file with frame list')\r\n\r\n f_list = utils.load_YAML_file(self.frames_file_path)\r\n\r\n if f_list:\r\n\r\n self.frame_list = f_list\r\n\r\n print 'YAML file with frame list loaded'\r\n logger.debug('YAML file with frame list loaded')\r\n\r\n else:\r\n\r\n print 'Warning! Error in loading file!'\r\n logger.warning('Error in loading file!')\r\n\r\n else:\r\n\r\n print 'Warning! No frame list found!'\r\n logger.warning('No frame list found!')\r\n\r\n return\r\n\r\n print '\\n\\n### Face detection ###\\n'\r\n logger.debug('\\n\\n### Face detection ###\\n')\r\n\r\n # Save processing time\r\n start_time = cv2.getTickCount()\r\n\r\n if not (os.path.exists(self.det_path)):\r\n # Create directory for this video\r\n\r\n os.makedirs(self.det_path)\r\n\r\n if not (os.path.exists(self.align_path)):\r\n # Create directory with aligned faces\r\n\r\n os.makedirs(self.align_path)\r\n\r\n frame_counter = 0\r\n self.detected_faces = []\r\n\r\n detection_results = []\r\n\r\n # Build list of frame names, frame paths and elapsed time\r\n frame_name_list = []\r\n\r\n frame_path_list = []\r\n\r\n elapsed_s_list = []\r\n\r\n for frame_dict in self.frame_list:\r\n frame_name = frame_dict[c.SAVED_FRAME_NAME_KEY]\r\n\r\n frame_name_list.append(frame_name)\r\n\r\n frame_path = os.path.join(self.frames_path, frame_name)\r\n\r\n frame_path_list.append(frame_path)\r\n\r\n elapsed_s = frame_dict[c.ELAPSED_VIDEO_TIME_KEY]\r\n\r\n elapsed_s_list.append(elapsed_s)\r\n\r\n # Iterate through frame paths in list\r\n for frame_path in frame_path_list:\r\n self.progress = 100 * (frame_counter / self.saved_frames)\r\n\r\n print('progress: ' + str(self.progress) + ' % \\r'),\r\n\r\n detection_result = fd.detect_faces_in_image(\r\n frame_path, self.align_path, self.params, False)\r\n\r\n detection_results.append(detection_result)\r\n\r\n frame_counter += 1\r\n\r\n frame_counter = 0\r\n\r\n # Iterate through detection results\r\n for detection_result in detection_results:\r\n\r\n detection_error = detection_result[c.ERROR_KEY]\r\n\r\n detection_dict = {\r\n c.SAVED_FRAME_NAME_KEY: frame_name_list[frame_counter],\r\n c.FRAME_COUNTER_KEY: frame_counter}\r\n\r\n elapsed_s = elapsed_s_list[frame_counter]\r\n\r\n detection_dict[c.ELAPSED_VIDEO_TIME_KEY] = elapsed_s\r\n\r\n faces = []\r\n if not detection_error:\r\n\r\n det_faces = detection_result[c.FACES_KEY]\r\n\r\n for det_face in det_faces:\r\n\r\n face_dict = {c.BBOX_KEY: det_face[c.BBOX_KEY]}\r\n\r\n if use_eyes_position:\r\n face_dict[c.LEFT_EYE_POS_KEY] = (\r\n det_face[c.LEFT_EYE_POS_KEY])\r\n\r\n face_dict[c.RIGHT_EYE_POS_KEY] = (\r\n det_face[c.RIGHT_EYE_POS_KEY])\r\n\r\n face_dict[c.NOSE_POSITION_KEY] = (\r\n det_face[c.NOSE_POSITION_KEY])\r\n\r\n face_dict[c.ALIGNED_FACE_FILE_NAME_KEY] = (\r\n det_face[c.ALIGNED_FACE_FILE_NAME_KEY])\r\n\r\n faces.append(face_dict)\r\n\r\n detection_dict[c.FACES_KEY] = faces\r\n\r\n self.detected_faces.append(detection_dict)\r\n\r\n frame_counter += 1\r\n\r\n # Save detection results in YAML file\r\n\r\n utils.save_YAML_file(self.det_file_path, self.detected_faces)\r\n\r\n time_in_clocks = cv2.getTickCount() - start_time\r\n time_in_seconds = time_in_clocks / cv2.getTickFrequency()\r\n\r\n print 'Time for face detection: ', time_in_seconds, 's\\n'\r\n logger.debug('Time for face detection: ', time_in_seconds, 's\\n')\r\n\r\n self.anal_times[c.FACE_DETECTION_TIME_KEY] = time_in_seconds\r\n\r\n utils.save_YAML_file(self.analysis_file_path, self.anal_times)", "def validate(model: str = \"hog\"):\n for filepath in Path(\"validation\").rglob(\"*\"):\n if filepath.is_file():\n recognize_faces(\n image_location=str(filepath.absolute()), model=model\n )", "def recognize(self, options):\n params = {\n 'faceRectangles': options['faceRectangles'] if 'faceRectangles' in options else ''\n }\n\n return Base._postWithOptions(self, _emotionRecognizeUrl, options, params)", "def detect_face_task(img):\n\n # paramter for detect\n # image_size = 160\n # margin = 44\n minsize = 20 # minimum size of face\n threshold = [0.6, 0.7, 0.7] # three steps's threshold\n factor = 0.709 # scale factor\n\n # caffe model\n pnet = caffe_model.get_pnet()\n rnet = caffe_model.get_rnet()\n onet = caffe_model.get_onet()\n\n bounding_boxes, _ = detect_face.detect_face(img, minsize, pnet, rnet, onet, threshold, factor)\n print('detect bounding: ', bounding_boxes)\n print('Find faces: ', bounding_boxes.shape[0])\n\n # all_faces is faces information list, include face bytes, face position\n all_faces = []\n for face_position in bounding_boxes:\n face_position = face_position.astype(int)\n print('face position: ', face_position)\n\n # each face information, include position, face image\n head_rect = face_position[:4].tolist() # numpy array to python list\n head_img = misc.toimage(img).crop(head_rect)\n head_img_io = StringIO.StringIO()\n head_img.save(head_img_io, format='JPEG')\n head_img_b64 = base64.b64encode(head_img_io.getvalue())\n\n # construct response\n face_info = {}\n face_info['rect'] = head_rect\n face_info['image'] = head_img_b64\n\n all_faces.append(face_info)\n\n return all_faces", "def detectFaces():\n faceEngine = VLFaceEngine()\n detector = faceEngine.createFaceDetector(DetectorType.FACE_DET_V3)\n\n imageWithOneFace = VLImage.load(filename=EXAMPLE_O)\n pprint.pprint(detector.detectOne(imageWithOneFace, detect5Landmarks=False, detect68Landmarks=False).asDict())\n detection = detector.detectOne(imageWithOneFace, detect5Landmarks=False, detect68Landmarks=False)\n pprint.pprint(detector.redetectOne(image=imageWithOneFace, bBox=detection))\n pprint.pprint(detector.redetectOne(image=imageWithOneFace, bBox=detection.boundingBox.rect))\n\n imageWithSeveralFaces = VLImage.load(filename=EXAMPLE_SEVERAL_FACES)\n severalFaces = detector.detect([imageWithSeveralFaces], detect5Landmarks=False, detect68Landmarks=False)\n\n pprint.pprint(\n detector.redetect(\n images=[\n ImageForRedetection(imageWithSeveralFaces, [face.boundingBox.rect for face in severalFaces[0]]),\n ImageForRedetection(imageWithOneFace, [detection.boundingBox.rect]),\n ImageForRedetection(imageWithOneFace, [Rect(0, 0, 1, 1)]),\n ]\n )\n )", "def recognize_faces(image_file_path):\n image_pil = Image.open(image_file_path)\n draw = ImageDraw.Draw(image_pil)\n\n known_face_encodings_dict = get_known_face_encodings_dict()\n known_names = list(known_face_encodings_dict.keys())\n known_face_encodings = list(known_face_encodings_dict.values())\n\n del known_face_encodings_dict\n\n for face_location in face_detection.get_face_locations(image_file_path):\n face_encoding = get_face_encodings(\n image_file_path, known_face_locations=[face_location]\n )[0]\n\n recognition_flags = face_recognition.compare_faces(\n known_face_encodings, face_encoding\n )\n\n for flag, name in zip(recognition_flags, known_names):\n if not flag:\n continue\n\n top, right, bottom, left = face_location\n draw.rectangle((left, top, right, bottom), outline=\"#FF1493\")\n text_width, text_height = draw.textsize(name)\n draw.rectangle(\n (left, bottom, right, bottom + text_height + 10),\n fill=\"#FF1493\",\n outline=\"#FF1493\",\n )\n draw.text((left + 6, bottom + 5), name, fill=\"white\")\n\n del draw # conserve resources\n image_pil.show()", "def recognize_faces(\n image_location: str,\n model: str = \"hog\",\n encodings_location: Path = DEFAULT_ENCODINGS_PATH,\n) -> None:\n with encodings_location.open(mode=\"rb\") as f:\n loaded_encodings = pickle.load(f)\n\n input_image = face_recognition.load_image_file(image_location)\n\n input_face_locations = face_recognition.face_locations(\n input_image, model=model\n )\n input_face_encodings = face_recognition.face_encodings(\n input_image, input_face_locations\n )\n\n pillow_image = Image.fromarray(input_image)\n draw = ImageDraw.Draw(pillow_image)\n\n for bounding_box, unknown_encoding in zip(\n input_face_locations, input_face_encodings\n ):\n name = _recognize_face(unknown_encoding, loaded_encodings)\n if not name:\n name = \"Unknown\"\n _display_face(draw, bounding_box, name)\n\n del draw\n pillow_image.show()", "def faces(self, image):\n\n response = self._send_request(\"faces\", files=dict(image=image))\n return response['objectdetection']", "def _recognize_face(unknown_encoding, loaded_encodings):\n boolean_matches = face_recognition.compare_faces(\n loaded_encodings[\"encodings\"], unknown_encoding\n )\n votes = Counter(\n name\n for match, name in zip(boolean_matches, loaded_encodings[\"names\"])\n if match\n )\n if votes:\n return votes.most_common(1)[0][0]", "def recognize_person(known_face_encodings, known_face_names):\n\n # Initialize model for body detection\n detection_graph = tf.Graph()\n with detection_graph.as_default():\n od_graph_def = tf.GraphDef()\n with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:\n serialized_graph = fid.read()\n od_graph_def.ParseFromString(serialized_graph)\n tf.import_graph_def(od_graph_def, name='')\n\n label_map = label_map_util.load_labelmap(PATH_TO_LABELS)\n categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES,\n use_display_name=True)\n\n category_index = label_map_util.create_category_index(categories)\n\n # Initialize connect with server\n credentials = pika.PlainCredentials(USER, PASSWORD)\n parameters = pika.ConnectionParameters(IP, PORT, credentials=credentials)\n connection = pika.BlockingConnection(parameters)\n channel = connection.channel()\n\n # Initialize parameters for logging\n last_visible = np.array([False for _ in range(0, len(known_face_names))], dtype=np.bool)\n last_visible_time = [datetime.datetime.min for _ in range(0, len(known_face_names))]\n\n last_no_face = False\n last_no_face_time = datetime.datetime.min\n\n last_unknown = False\n last_unknown_time = datetime.datetime.min\n\n last_update_face_base = datetime.datetime(1, 1, 1, 0, 0, 0)\n update_time = time.time() + TIMEOUT_UPDATE\n\n process_this_frame = True\n\n # Get video stream and processed frame\n camera = cv2.VideoCapture(CAMERA_ID)\n\n with detection_graph.as_default():\n with tf.Session(graph=detection_graph) as sess:\n while True:\n # Check for timeout for updating database\n if time.time() > update_time:\n update_time = time.time() + TIMEOUT_UPDATE\n if (datetime.datetime.now() - last_update_face_base).days >= TIME_TO_UPDATE:\n known_face_encodings, known_face_names = read_known_faces()\n last_update_face_base = datetime.datetime.now()\n\n # Get picture from stream\n ret, frame = camera.read()\n small_frame = cv2.resize(frame, (0, 0), fx=1/DECREASING_LEVEL, fy=1/DECREASING_LEVEL)\n rgb_small_frame = small_frame[:, :, ::-1]\n\n if process_this_frame:\n # Get detected objects (bodies and faces)\n image_np_expanded = np.expand_dims(frame, axis=0)\n image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')\n boxes = detection_graph.get_tensor_by_name('detection_boxes:0')\n scores = detection_graph.get_tensor_by_name('detection_scores:0')\n classes = detection_graph.get_tensor_by_name('detection_classes:0')\n num_detections = detection_graph.get_tensor_by_name('num_detections:0')\n (boxes, scores, classes, num_detections) = sess.run(\n [boxes, scores, classes, num_detections],\n feed_dict={image_tensor: image_np_expanded})\n\n n_body = 0\n for i in range(0, scores.shape[1]):\n if scores[0][i] > 0.5:\n n_body += 1\n else:\n break\n\n # Get coordinates of box around faces\n face_locations = face_recognition.face_locations(rgb_small_frame)\n\n now_no_face = False\n\n # Check number of detected faces and bodies\n n_faces = len(face_locations)\n if n_body > n_faces:\n # Send alarm if anybody try to hide face\n now_no_face = True\n now = datetime.datetime.now()\n if not last_no_face:\n last_no_face_time = now\n else:\n if last_no_face_time != datetime.datetime.min:\n delta = now - last_no_face_time\n if delta.seconds > TIMEOUT:\n with open(\"logging.txt\", \"a+\") as log_file:\n user_id = None\n send_data = {\"userId\": user_id,\n \"cameraId\": str(CAMERA_ID)}\n json_send_data = json.dumps(send_data)\n\n channel.basic_publish(exchange='', routing_key='users', body=json_send_data)\n\n log_file.write(\"\\nALARM NO FACE at \" + now.strftime(\"%H:%M:%S %d-%m-%Y\"))\n last_no_face_time = datetime.datetime.min\n\n # Get identified faces embeddings\n face_encodings = face_recognition.face_encodings(rgb_small_frame, face_locations)\n face_names = []\n now_visible = np.array([False for _ in range(0, len(known_face_names))], dtype=np.bool)\n now_unknown = False\n\n # Find similar face from database\n for face_encoding in face_encodings:\n name = \"Unknown\"\n matches = face_recognition.compare_faces(known_face_encodings, face_encoding)\n\n face_distances = face_recognition.face_distance(known_face_encodings, face_encoding)\n best_match_index = np.argmin(face_distances)\n if matches[best_match_index]:\n # Current face was recognized - send record about it\n name = known_face_names[best_match_index]\n now_visible[best_match_index] = True\n now = datetime.datetime.now()\n if not last_visible[best_match_index]:\n last_visible_time[best_match_index] = now\n else:\n if last_visible_time[best_match_index] != datetime.datetime.min:\n delta = now - last_visible_time[best_match_index]\n if delta.seconds > TIMEOUT:\n with open(\"logging.txt\", \"a+\") as log_file:\n user_id = name.split('_')[0]\n send_data = {\"userId\": user_id, \"cameraId\": CAMERA_ID}\n json_send_data = json.dumps(send_data)\n\n channel.basic_publish(exchange='', routing_key='users', body=json_send_data)\n\n log_file.write(\n \"\\nRecognize \" + name + \" at \" + now.strftime(\"%H:%M:%S %d-%m-%Y\"))\n last_visible_time[best_match_index] = datetime.datetime.min\n else:\n # Current face was NOT recognized - send alarm about it\n now_unknown = True\n now = datetime.datetime.now()\n if not last_unknown:\n last_unknown_time = now\n else:\n if last_unknown_time != datetime.datetime.min:\n delta = now - last_unknown_time\n if delta.seconds > TIMEOUT:\n with open(\"logging.txt\", \"a+\") as log_file:\n user_id = None\n send_data = {\"userId\": user_id, \"cameraId\": CAMERA_ID}\n json_send_data = json.dumps(send_data)\n\n channel.basic_publish(exchange='', routing_key='users', body=json_send_data)\n\n log_file.write(\"\\nALARM at \" + now.strftime(\"%H:%M:%S %d-%m-%Y\"))\n last_unknown_time = datetime.datetime.min\n\n face_names.append(name)\n\n last_visible = copy.deepcopy(now_visible)\n last_no_face = now_no_face\n last_unknown = now_unknown\n\n process_this_frame = not process_this_frame\n\n # Visualize box around person\n vis_util.visualize_boxes_and_labels_on_image_array(frame, np.squeeze(boxes),\n np.squeeze(classes).astype(np.int32),\n np.squeeze(scores), category_index,\n use_normalized_coordinates=True,\n line_thickness=8, skip_labels=True,\n skip_scores=True)\n\n # Visualize box around face with name\n for (face_top, face_right, face_bottom, face_left), name in zip(face_locations, face_names):\n face_coordinates = {\"top\": face_top * DECREASING_LEVEL,\n \"right\": face_right * DECREASING_LEVEL,\n \"bottom\": face_bottom * DECREASING_LEVEL,\n \"left\": face_left * DECREASING_LEVEL\n }\n\n if name == \"Unknown\":\n color = RED_COLOR\n else:\n color = BLUE_COLOR\n\n # Get face's coordinates\n cv2.rectangle(frame, (face_coordinates[\"left\"], face_coordinates[\"top\"]),\n (face_coordinates[\"right\"], face_coordinates[\"bottom\"]), color, 2)\n\n # Visualize person's name if he was recognized\n text_coordinates = get_text_coordinates(name, face_coordinates)\n cv2.rectangle(frame, (text_coordinates[\"left\"] - 5, face_coordinates[\"bottom\"]),\n (text_coordinates[\"right\"] + 5, text_coordinates[\"bottom\"] + 8),\n color, cv2.FILLED)\n cv2.putText(frame, name, (text_coordinates[\"left\"], text_coordinates[\"bottom\"] + 4),\n TEXT_FONT, 1.0, WHITE_COLOR, 1)\n\n cv2.imshow('Video', frame)\n\n # Press 'q' to quit\n if cv2.waitKey(1) & 0xFF == ord('q'):\n break\n\n process_this_frame = not process_this_frame\n\n connection.close()\n camera.release()\n cv2.destroyAllWindows()\n\n return known_face_encodings, known_face_names", "def extract_face_detections(self):\n self.detector.setInput(self.image_blob)\n self.detections = self.detector.forward()", "def make_face_recognition(update: Update, _: CallbackContext) -> None:\n # message.photo is a list of PhotoSize objects,\n # which represent different sizes of the same photo\n\n # print(\"Enter to make_face_recognition\")\n img_from_user = update.message.photo[-1].get_file()\n img_file = io.BytesIO()\n img_from_user.download(out=img_file)\n img_array = face_recognition.load_image_file(img_file)\n # Find all the faces in the image\n face_locations = face_recognition.face_locations(img_array)\n # print(face_locations)\n img_with_rects = _make_rects(img_array, face_locations)\n out_file = 'tmp.jpg'\n Image.fromarray(img_with_rects, 'RGB').save(out_file, format=\"JPEG\")\n update.message.bot.send_photo(\n update.message.chat_id,\n photo=open(out_file, 'rb'))", "def track_faces_in_video(self):\r\n\r\n logger.debug('Executing face tracking')\r\n\r\n track_loaded = False\r\n\r\n # Try to load YAML file with tracking results\r\n if os.path.exists(self.track_file_path):\r\n\r\n print 'Loading YAML file with tracking results'\r\n logger.debug('Loading YAML file with tracking results')\r\n\r\n track_faces = utils.load_YAML_file(self.track_file_path)\r\n\r\n if track_faces:\r\n self.tracked_faces = track_faces\r\n\r\n print 'YAML file with tracking results loaded'\r\n logger.debug('YAML file with tracking results loaded')\r\n\r\n track_loaded = True\r\n\r\n if not track_loaded:\r\n\r\n # Check existence of detection results\r\n\r\n if len(self.detected_faces) == 0:\r\n\r\n # Try to load YAML file\r\n if os.path.exists(self.det_file_path):\r\n\r\n print 'Loading YAML file with detection results'\r\n logger.debug('Loading YAML file with detection results')\r\n\r\n with open(self.det_file_path) as f:\r\n\r\n self.detected_faces = yaml.load(f)\r\n\r\n print 'YAML file with detection results loaded'\r\n logger.debug('YAML file with detection results loaded')\r\n\r\n else:\r\n\r\n print 'Warning! No detection results found!'\r\n logger.warning('No detection results found!')\r\n\r\n return\r\n\r\n # Get shot cuts\r\n self.calc_hist_diff()\r\n\r\n print '\\n\\n### Face tracking ###\\n'\r\n logger.debug('\\n\\n### Face tracking ###\\n')\r\n\r\n # Save processing time\r\n start_time = cv2.getTickCount()\r\n\r\n self.tracked_faces = []\r\n\r\n self.disc_tracked_faces = []\r\n\r\n # Counter for frames with detected faces\r\n frame_counter = 0\r\n\r\n # If a reduced frame rate is used, frames are less\r\n use_or_fps = c.USE_ORIGINAL_FPS\r\n used_fps = c.USED_FPS\r\n min_segment_duration = c.MIN_SEGMENT_DURATION\r\n tracking_min_int_area = c.TRACKING_MIN_INT_AREA\r\n min_size_width = c.FACE_DETECTION_MIN_SIZE_WIDTH\r\n min_size_height = c.FACE_DETECTION_MIN_SIZE_HEIGHT\r\n max_fr_with_miss_det = c.MAX_FR_WITH_MISSED_DET\r\n use_aligned_face = c.USE_ALIGNED_FACE_IN_TRACKING\r\n\r\n if self.params is not None:\r\n if c.USE_ORIGINAL_FPS_KEY in self.params:\r\n use_or_fps = self.params[c.USE_ORIGINAL_FPS_KEY]\r\n if c.USED_FPS_KEY in self.params:\r\n used_fps = self.params[c.USED_FPS_KEY]\r\n if c.MIN_SEGMENT_DURATION_KEY in self.params:\r\n min_segment_duration = self.params[\r\n c.MIN_SEGMENT_DURATION_KEY]\r\n if c.TRACKING_MIN_INT_AREA_KEY in self.params:\r\n tracking_min_int_area = self.params[\r\n c.TRACKING_MIN_INT_AREA_KEY]\r\n if c.MIN_SIZE_WIDTH_KEY in self.params:\r\n min_size_width = self.params[c.MIN_SIZE_WIDTH_KEY]\r\n if c.MIN_SIZE_HEIGHT_KEY in self.params:\r\n min_size_height = self.params[c.MIN_SIZE_HEIGHT_KEY]\r\n if c.MAX_FR_WITH_MISSED_DET_KEY in self.params:\r\n max_fr_with_miss_det = self.params[\r\n c.MAX_FR_WITH_MISSED_DET_KEY]\r\n if c.USE_ALIGNED_FACE_IN_TRACKING_KEY in self.params:\r\n use_aligned_face = self.params[\r\n c.USE_ALIGNED_FACE_IN_TRACKING_KEY]\r\n\r\n # Minimum duration of a segment in frames\r\n min_segment_frames = int(\r\n math.ceil(self.fps * min_segment_duration))\r\n\r\n if not use_or_fps:\r\n min_segment_frames = int(\r\n math.ceil((used_fps + 1) * min_segment_duration))\r\n\r\n # Make copy of detected faces\r\n detection_list = list(self.detected_faces)\r\n\r\n # Iterate through frames in detected_faces\r\n for detection_dict in detection_list:\r\n\r\n self.progress = 100 * (frame_counter / self.saved_frames)\r\n\r\n print('progress: ' + str(self.progress) + ' % \\r'),\r\n\r\n elapsed_s = detection_dict[c.ELAPSED_VIDEO_TIME_KEY]\r\n\r\n frame_name = detection_dict[c.SAVED_FRAME_NAME_KEY]\r\n\r\n faces = detection_dict[c.FACES_KEY]\r\n\r\n face_counter = 0\r\n\r\n # Iterate though faces in frame\r\n for face_dict in faces:\r\n\r\n track_window = face_dict[c.BBOX_KEY]\r\n\r\n left_eye_pos = face_dict[c.LEFT_EYE_POS_KEY]\r\n\r\n right_eye_pos = face_dict[c.RIGHT_EYE_POS_KEY]\r\n\r\n nose_pos = face_dict[c.NOSE_POSITION_KEY]\r\n\r\n file_name = face_dict[c.ALIGNED_FACE_FILE_NAME_KEY]\r\n\r\n # Counter for faces in segment\r\n segment_face_counter = 1\r\n\r\n segment_frame_list = []\r\n\r\n # Start new segment\r\n segment_frame_dict = {c.FRAME_COUNTER_KEY: frame_counter,\r\n c.ELAPSED_VIDEO_TIME_KEY: elapsed_s,\r\n c.DETECTION_BBOX_KEY: track_window,\r\n c.TRACKING_BBOX_KEY: track_window,\r\n c.LEFT_EYE_POS_KEY: left_eye_pos,\r\n c.RIGHT_EYE_POS_KEY: right_eye_pos,\r\n c.NOSE_POSITION_KEY: nose_pos,\r\n c.ALIGNED_FACE_FILE_NAME_KEY: file_name,\r\n c.DETECTED_KEY: True,\r\n c.SAVED_FRAME_NAME_KEY: frame_name}\r\n\r\n segment_frame_list.append(segment_frame_dict)\r\n\r\n aligned_file_path = None\r\n rgb_roi = None\r\n if use_aligned_face:\r\n # Use the aligned face as the\r\n # Region of Interest for tracking\r\n complete_file_name = file_name + '.png'\r\n aligned_file_path = os.path.join(\r\n self.align_path, complete_file_name)\r\n\r\n rgb_roi = cv2.imread(\r\n aligned_file_path, cv2.IMREAD_COLOR)\r\n\r\n else:\r\n # Use detected face as the\r\n # Region of Interest for tracking\r\n x0 = track_window[0]\r\n y0 = track_window[1]\r\n w = track_window[2]\r\n h = track_window[3]\r\n x1 = x0 + w\r\n y1 = y0 + h\r\n\r\n frame_path = os.path.join(\r\n self.frames_path, frame_name)\r\n\r\n # Whole frame\r\n rgb = cv2.imread(frame_path, cv2.IMREAD_COLOR)\r\n\r\n # Face\r\n rgb_roi = rgb[y0:y1, x0:x1]\r\n\r\n if rgb_roi is None:\r\n print('Warning! Face to be tracked is None')\r\n\r\n if use_aligned_face:\r\n logger.warning(\r\n 'Face ' + aligned_file_path + ' is None')\r\n else:\r\n logger.warning(\r\n 'Face from frame ' + frame_name + ' is None')\r\n\r\n face_counter += 1\r\n\r\n continue\r\n\r\n # Convert image to hsv\r\n hsv_roi = cv2.cvtColor(rgb_roi, cv2.COLOR_BGR2HSV)\r\n\r\n mask_roi = cv2.inRange(\r\n hsv_roi, np.array((0., 60., 32.)),\r\n np.array((180., 255., 255.)))\r\n\r\n hist = cv2.calcHist(\r\n [hsv_roi], [0], mask_roi, [16], [0, 180])\r\n\r\n cv2.normalize(hist, hist, 0, 255, cv2.NORM_MINMAX)\r\n hist = hist.reshape(-1)\r\n\r\n # Face should not be considered anymore\r\n del (detection_list[frame_counter]\r\n [c.FACES_KEY][face_counter])\r\n\r\n sub_frame_counter = frame_counter + 1\r\n\r\n missed_det_counter = 0\r\n\r\n # Iterate through subsequent frames\r\n for sub_det_dict in detection_list[sub_frame_counter:]:\r\n\r\n # Check if a new shot begins\r\n if sub_frame_counter in self.cut_idxs:\r\n break\r\n\r\n sub_frame_name = sub_det_dict[c.SAVED_FRAME_NAME_KEY]\r\n\r\n sub_frame_path = os.path.join(\r\n self.frames_path, sub_frame_name)\r\n\r\n # Read image from given path\r\n sub_image = cv2.imread(\r\n sub_frame_path, cv2.IMREAD_COLOR)\r\n\r\n if sub_image is None:\r\n print('Warning! Image is None')\r\n logger.warning(\r\n 'Image ' + sub_frame_path + ' is None')\r\n\r\n continue\r\n\r\n # Convert image to hsv\r\n sub_hsv = cv2.cvtColor(sub_image, cv2.COLOR_BGR2HSV)\r\n\r\n sub_mask = cv2.inRange(sub_hsv,\r\n np.array((0., 60., 32.)),\r\n np.array((180., 255., 255.)))\r\n\r\n # Apply meanshift to get the new location\r\n prob = cv2.calcBackProject(\r\n [sub_hsv], [0], hist, [0, 180], 1)\r\n prob &= sub_mask\r\n term_crit = (cv2.TERM_CRITERIA_EPS\r\n | cv2.TERM_CRITERIA_COUNT, 10, 1)\r\n\r\n track_box, track_window = cv2.CamShift(\r\n prob, track_window, term_crit)\r\n\r\n track_x0 = track_window[0]\r\n track_y0 = track_window[1]\r\n track_w = track_window[2]\r\n track_h = track_window[3]\r\n\r\n # Check size of track window\r\n if ((track_w <= min_size_width)\r\n or (track_h <= min_size_height)):\r\n\r\n break\r\n\r\n segment_frame_dict = {}\r\n\r\n track_list = (\r\n int(track_x0), int(track_y0), int(track_w),\r\n int(track_h))\r\n\r\n segment_frame_dict[c.TRACKING_BBOX_KEY] = track_list\r\n\r\n sub_faces = sub_det_dict[c.FACES_KEY]\r\n\r\n sub_face_counter = 0\r\n\r\n sim = False\r\n\r\n det_bbox = None\r\n\r\n for sub_face_dict in sub_faces:\r\n\r\n det_bbox = sub_face_dict[c.BBOX_KEY]\r\n\r\n # If track window corresponds to\r\n # a detected face,\r\n # delete detection from list\r\n\r\n (sim, int_area, int_area_pct) = utils.is_rect_similar(\r\n track_window, det_bbox, tracking_min_int_area)\r\n\r\n if sim:\r\n # det_face_counter = det_face_counter + 1\r\n\r\n track_window = det_bbox\r\n\r\n break\r\n\r\n sub_face_counter += 1\r\n\r\n t_x0 = track_window[0]\r\n t_y0 = track_window[1]\r\n t_w = track_window[2]\r\n t_h = track_window[3]\r\n\r\n segment_frame_dict[c.DETECTION_BBOX_KEY] = det_bbox\r\n\r\n # If a detected face corresponds to track window\r\n # delete detected face from detection list\r\n\r\n if sim:\r\n\r\n missed_det_counter = 0\r\n\r\n segment_frame_dict[c.DETECTED_KEY] = True\r\n\r\n segment_frame_dict[c.LEFT_EYE_POS_KEY] = (\r\n sub_face_dict[c.LEFT_EYE_POS_KEY])\r\n segment_frame_dict[c.RIGHT_EYE_POS_KEY] = (\r\n sub_face_dict[c.RIGHT_EYE_POS_KEY])\r\n\r\n segment_frame_dict[c.NOSE_POSITION_KEY] = (\r\n sub_face_dict[c.NOSE_POSITION_KEY])\r\n\r\n segment_frame_dict[c.ALIGNED_FACE_FILE_NAME_KEY] = (\r\n sub_face_dict[c.ALIGNED_FACE_FILE_NAME_KEY])\r\n\r\n del (detection_list[sub_frame_counter]\r\n [c.FACES_KEY][sub_face_counter])\r\n\r\n else:\r\n\r\n # Check if distance from last detection\r\n # is too big\r\n missed_det_counter += 1\r\n\r\n if missed_det_counter > max_fr_with_miss_det:\r\n\r\n # Remove last frames and\r\n # interrupt tracking\r\n for i in range(0, max_fr_with_miss_det):\r\n segment_frame_list.pop()\r\n\r\n segment_face_counter = (\r\n segment_face_counter - max_fr_with_miss_det)\r\n\r\n break\r\n\r\n segment_frame_dict[c.DETECTED_KEY] = False\r\n\r\n elapsed_ms = sub_det_dict[c.ELAPSED_VIDEO_TIME_KEY]\r\n\r\n # Update list of frames for segment\r\n segment_frame_dict[\r\n c.FRAME_COUNTER_KEY] = sub_frame_counter\r\n segment_frame_dict[\r\n c.ELAPSED_VIDEO_TIME_KEY] = elapsed_ms\r\n\r\n track_list = (\r\n int(t_x0), int(t_y0), int(t_w), int(t_h))\r\n\r\n segment_frame_dict[c.TRACKING_BBOX_KEY] = track_list\r\n segment_frame_dict[\r\n c.SAVED_FRAME_NAME_KEY] = sub_frame_name\r\n\r\n segment_frame_list.append(segment_frame_dict)\r\n\r\n del sub_image\r\n\r\n sub_frame_counter += 1\r\n\r\n segment_face_counter += 1\r\n\r\n # Segment must be considered only if its number\r\n # of frames is greater or equals than a minimum\r\n if segment_face_counter >= min_segment_frames:\r\n\r\n segments = self.divide_segment_by_face(\r\n segment_frame_list)\r\n\r\n if len(segments) > 0:\r\n self.tracked_faces.extend(segments)\r\n\r\n else:\r\n\r\n segment_dict = {c.FRAMES_KEY: segment_frame_list}\r\n\r\n self.disc_tracked_faces.append(segment_dict)\r\n\r\n # Check histograms of detected faces and\r\n # divide segment accordingly\r\n\r\n face_counter += 1\r\n\r\n frame_counter += 1\r\n\r\n # Create directory for this video\r\n\r\n if not (os.path.exists(self.track_path)):\r\n os.makedirs(self.track_path)\r\n\r\n # Save tracking result in YAML file\r\n utils.save_YAML_file(self.track_file_path, self.tracked_faces)\r\n\r\n # Save processing time\r\n time_in_clocks = cv2.getTickCount() - start_time\r\n time_in_seconds = time_in_clocks / cv2.getTickFrequency()\r\n\r\n print 'Time for face tracking:', time_in_seconds, 's\\n'\r\n logger.debug('Time for face tracking:', time_in_seconds, 's\\n')\r\n\r\n self.anal_times[c.FACE_TRACKING_TIME_KEY] = time_in_seconds\r\n\r\n utils.save_YAML_file(self.analysis_file_path, self.anal_times)", "def get_faces():\n detected_faces = request()\n\n if not detected_faces:\n raise FaceNotDetectedError()\n return detected_faces", "def read_known_faces():\n known_face_encodings = []\n known_face_names = []\n\n for file_name in glob.glob(DATASET_FOLDER + \"/*.jpg\"):\n face_encoding = read_face_encoding(file_name)\n\n known_face_encodings.append(face_encoding)\n\n name = file_name.split('.jpg')[0].split('/')[-1]\n if len(name.split('_')) != 2:\n raise Exception(\"\\n\\nERROR: file \\'\" + file_name + \"\\' has incorrect name\\n\\n\")\n\n known_face_names.append(name)\n\n return known_face_encodings, known_face_names", "def face_detector(img):\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n # faceCascade imports in the previously made classifier\n faceCascade = cv2.CascadeClassifier('src/face_detection/haarcascade_frontalface_default.xml')\n faces = faceCascade.detectMultiScale(\n gray, \n scaleFactor=1.2,\n minNeighbors=1, \n minSize=(100, 100)\n )\n\n return faces", "def face_rec(file):\n for name, known_file in known_faces:\n try:\n if compare_faces(known_file, file):\n age, female, male = dex.estimate(known_file)\n # gender = \"woman: {:.3f}, man: {:.3f}\".format(female, male)\n _age = \"{:.0f}\".format(age)\n if \"{:.3f}\".format(female) > \"{:.3f}\".format(male):\n gender = \"Female\"\n else:\n gender = \"Male\"\n return {\n \"name\": name,\n # \"gender\": gender,\n # \"age\": _age,\n }\n except :\n return 'Unknown'", "async def asyncRedetectFaces():\n faceEngine = VLFaceEngine()\n detector = faceEngine.createFaceDetector(DetectorType.FACE_DET_V3)\n\n imageWithSeveralFaces = VLImage.load(filename=EXAMPLE_SEVERAL_FACES)\n severalFaces = detector.detect([imageWithSeveralFaces], detect5Landmarks=False, detect68Landmarks=False)\n\n detections = await detector.redetect(\n images=[\n ImageForRedetection(imageWithSeveralFaces, [face.boundingBox.rect for face in severalFaces[0]]),\n ],\n asyncEstimate=True,\n )\n pprint.pprint(detections)\n task1 = detector.redetect(\n images=[\n ImageForRedetection(imageWithSeveralFaces, [severalFaces[0][0].boundingBox.rect]),\n ],\n asyncEstimate=True,\n )\n task2 = detector.redetect(\n images=[\n ImageForRedetection(imageWithSeveralFaces, [severalFaces[0][1].boundingBox.rect]),\n ],\n asyncEstimate=True,\n )\n for task in (task1, task2):\n pprint.pprint(task.get())", "def detect_face(image):\n cascadePath = \"haarcascade_frontalface_default.xml\"\n faceCascade = cv2.CascadeClassifier(cascadePath)\n faces = faceCascade.detectMultiScale(image)\n if len(faces)>=1:#Should be == , not >=\n return True\n return False", "def detect(self, audio_file: str) -> SpeechDetectedResponse:\n pass", "def detect_faces(self, img):\n with tf.Graph().as_default():\n gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=.7)\n sess = tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, log_device_placement=False))\n with sess.as_default():\n pnet, rnet, onet = detect_face.create_mtcnn(sess, None)\n\n minsize = 20 # minimum size of face\n threshold = [ 0.6, 0.7, 0.7 ] # three steps's threshold\n factor = 0.709 # scale factor\n\n bounding_boxes, _ = detect_face.detect_face(img, minsize, pnet, rnet, onet, threshold, factor)\n\n nrof_faces = bounding_boxes.shape[0]\n img_size = np.asarray(img.shape)[0:2]\n\n faces = []\n faces_rects = []\n\n for i in range(nrof_faces):\n det = bounding_boxes[i,0:4]\n bb = np.zeros(4, dtype=np.int32)\n bb[0] = np.maximum(det[0]-5/2, 0)\n bb[1] = np.maximum(det[1]-5/2, 0)\n bb[2] = np.minimum(det[2]+5/2, img_size[1])\n bb[3] = np.minimum(det[3]+5/2, img_size[0])\n faces.append(img[bb[1]:bb[3], bb[0]:bb[2], :])\n faces_rects.append({'name': 'none', 'x': bb[0], 'y': bb[1], 'w': bb[2]-bb[0], 'h': bb[3]-bb[1]})\n\n return [img, faces, faces_rects]", "def face_detection_image(self, image_path):\n body = {'file': (image_path, open(image_path, 'rb'), \"multipart/form-data\")}\n url = self.base_url + '/vision-service/phoenix-vision/face-detection/image'\n headers = {\"ApiKey\": self.api_key}\n response = requests.post(url=url, files=body, headers=headers).text\n return response", "def detect_face(self, img):\n #convert the test image to gray image as opencv face detector expects gray images\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n #let's detect multiscale (some images may be closer to camera than others) images\n #result is a list of faces\n faces = self.face_cascade.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=5);\n\n #if no faces are detected then return None\n if (len(faces) == 0):\n return None, None\n\n #under the assumption that there will be only one face,\n #extract the face area\n (x, y, w, h) = faces[0]\n\n #return only the face part of the image\n return gray[y:y+w, x:x+h], faces[0]", "def recognize(self, file_name=None,\n file_object=None,\n content_type=None,\n content_callback=None,\n event_loop=None,\n model=None,\n customization_id=None,\n inactivity_timeout=None,\n keywords=None,\n keywords_threshold=None,\n max_alternatives=None,\n word_alternatives_threshold=None,\n word_confidence=None,\n timestamps=None,\n profanity_filter=None,\n smart_formatting=None,\n speaker_labels=None):\n\n if event_loop is None:\n event_loop = asyncio.get_event_loop()\n\n if model is None:\n model = 'en-US_BroadbandModel'\n\n authstring = \"{0}:{1}\".format(self.username, self.password)\n encoded_auth = base64.b64encode(authstring.encode('utf-8')).decode('utf-8')\n\n headers = {'Authorization': 'Basic {0}'.format(encoded_auth)}\n\n unfiltered_options = {\n 'content_type': content_type,\n 'inactivity_timeout': inactivity_timeout,\n 'interim_results': True,\n 'inactivity_timeout': inactivity_timeout,\n 'word_confidence': word_confidence,\n 'timestamps': timestamps,\n 'max_alternatives': max_alternatives,\n 'word_alternatives_threshold': word_alternatives_threshold,\n 'profanity_filter': profanity_filter,\n 'smart_formatting': smart_formatting,\n 'keywords': keywords,\n 'keywords_threshold': keywords_threshold,\n 'max_alternatives': max_alternatives,\n 'speaker_labels': speaker_labels}\n\n options = dict([(k, unfiltered_options[k])\n for k\n in unfiltered_options.keys()\n if unfiltered_options[k] is not None])\n if file_object is None:\n with open(file_name, 'rb') as audiofile:\n return event_loop.run_until_complete(\n self._convert(audiofile,\n headers,\n options,\n model,\n content_callback))\n else:\n return event_loop.run_until_complete(\n self._convert(file_object,\n headers,\n options,\n model,\n content_callback))", "def extract_faces(image_path: str, pk: int):\n image = Image.open(image_path)\n image = np.array(image)\n\n if image.shape[0] <= 0 or image.shape[1] <= 0:\n return None\n\n import mtcnn\n\n # detect faces from image\n face_detector = mtcnn.MTCNN()\n detections = face_detector.detect_faces(image)\n\n if len(detections) < 1:\n return None\n\n from deepface.basemodels.Facenet import InceptionResNetV2\n\n # load InceptionResNet model provided by deepface\n facenet_model = InceptionResNetV2()\n facenet_model.load_weights(get_weights(\"facenet\"))\n\n # normalize faces and get embeddings\n faces = [normalize_face(image, face) for face in detections]\n embeddings = facenet_model.predict(np.vstack(faces), batch_size=len(faces))\n\n for i in range(len(faces)):\n person_id = recognize_person(embeddings[i])\n print(person_id, flush=True)\n face_obj = models.Face.objects.create(\n confidence=detections[i]['confidence'],\n left=detections[i]['box'][0],\n top=detections[i]['box'][1],\n width=detections[i]['box'][2],\n height=detections[i]['box'][3],\n photo_id=pk,\n person_id=person_id\n )\n\n save_embeddings(embeddings[i], face_obj.id, person_id)", "def camera_operation(self):\r\n ret, self.frame = self.cap.read() #get frame/ read from camera\r\n\r\n #try finding faces\r\n try:\r\n gray = cv2.cvtColor(self.frame, cv2.COLOR_BGR2GRAY)\r\n faces = FACE_CASCADE.detectMultiScale(gray, scaleFactor = 1.5, minNeighbors = 5)\r\n #print(faces)\r\n for(x, y, w, h) in faces:\r\n #print(x, y, w, h) \r\n self.roi_gray = gray[y: y+h, x: x+w] #region of interest is face\r\n #Drawing Rectangle\r\n color = (255, 0, 0)\r\n stroke = 2\r\n end_cord_x = x+w\r\n end_cord_y = y+h\r\n cv2.rectangle(self.frame, (x,y), (end_cord_x, end_cord_y), color, stroke)\r\n self.FACE_FOUND = True\r\n\r\n \"\"\"While training if more than one face detected\"\"\"\r\n if (self.TRAIN_FLAG == True) and (len(faces) > 1):\r\n self.pop_window(title=\"Warning\", msg=\"Training takes only one face. \\nMultiple face detected.\")\r\n self.FACE_FOUND = False\r\n\r\n \"\"\"recognize faces, show with name\"\"\"\r\n if self.RECOGNIZE_FLAG == True:\r\n Id, confidence = RECOGNIZER.predict(self.roi_gray)\r\n print(confidence)\r\n \r\n name = self.names[Id-1] #get corresponding name\r\n\r\n \"\"\"if id not found, lock the screen\"\"\"\r\n if (confidence > CONFIDENCE_THRESHOLD) and (self.RECOGNIZE_FLAG == True):\r\n subprocess.call(LOCK_CODE)\r\n print(\"Unknown\")\r\n\r\n \"\"\"put name with face bouding box\"\"\"\r\n #if confidence value less than threshold value,\r\n #the smalller the value the better the accuracy\r\n if (name in self.names) and (confidence < CONFIDENCE_THRESHOLD) and (self.TRAIN_FLAG == False):\r\n cv2.putText(self.frame, name, (x, y+w+20), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, (250, 250, 250))\r\n print(Id)\r\n\r\n\r\n\r\n \r\n except:\r\n #self.FACE_FOUND = False\r\n pass #run anyway\r\n \r\n\r\n #_______________________Check record flag____________________________________\r\n #print(self.RECORD_FLAG)\r\n if self.RECORD_FLAG == True:\r\n print(\"Recording man!\")\r\n self.video_writer.write(self.frame)\r\n #notify on image about recording\r\n cv2.putText(self.frame, \"Recording..\", (5, 380), cv2.FONT_HERSHEY_SCRIPT_SIMPLEX, 0.5, (0, 255, 0), 1, cv2.LINE_AA)\r\n\r\n #_______________________Train model with new face____________________________\r\n #print(self.TRAIN_FLAG)\r\n if self.TRAIN_FLAG == True:\r\n #print(\"Training Mode\")\r\n #notify about Training\r\n cv2.putText(self.frame, \"Training Mode\", (5, 75), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 1, cv2.LINE_AA)\r\n #put sample number on screen\r\n cv2.putText(self.frame, str(self.sample_num), (10, 300), cv2.FONT_HERSHEY_COMPLEX, 4, (255, 255, 255), 2, cv2.LINE_AA)\r\n \r\n self.counter += 1 #start counter\r\n #print(self.counter)\r\n \r\n if self.sample_num == MAX_SAMPLE_COLLECTION_NUM: #reached max sample number\r\n cv2.putText(self.frame, \"Training, wait!\", (10, 350), cv2.FONT_HERSHEY_COMPLEX, 2, (255, 255, 255), 1, cv2.LINE_AA)\r\n self.update_img_label(self.frame)\r\n self.sample_num = 0 #set sample number to zero\r\n self.TRAIN_FLAG = False #stop saving\r\n self.pop_window(title=\"INFO\", msg=\"Sample images collected, Train?\")\r\n\r\n self.train()\r\n\r\n\r\n elif (self.counter == 12) and (self.FACE_FOUND == True): #after 1 sec and if face found\r\n print(\"saving roi\")\r\n self.sample_num += 1 #increment sample number\r\n cv2.imwrite(f\"{PARENT_PATH}\\\\{DATASET_DIR}\\\\user.{self.id}.{self.sample_num}.jpg\", self.roi_gray)\r\n \r\n self.counter = 0 #make it zero\r\n self.FACE_FOUND = False #False, wait for next face confirmation\r\n\r\n elif self.counter == 12:\r\n print(\"Waiting for face\")\r\n self.counter = 0\r\n \r\n\r\n \r\n #_______________set current frame in QLabel___________________\r\n self.update_img_label(self.frame)", "def detect_faces(image):\n\n face_locations = face_recognition.face_locations(image)\n return face_locations" ]
[ "0.70216525", "0.66169727", "0.6613031", "0.6373871", "0.6334004", "0.62129205", "0.619253", "0.613904", "0.6095122", "0.6089204", "0.59748024", "0.5890032", "0.58845556", "0.5798849", "0.5774772", "0.5732005", "0.56989664", "0.56987834", "0.5698466", "0.5687578", "0.5671011", "0.56524575", "0.56468576", "0.56440455", "0.5635756", "0.56004936", "0.5592731", "0.55796576", "0.55792356", "0.55597895" ]
0.7085617
0
Looks into the response of Azure and compiles an easytoread dictionary of emotion values for the first detected face.
def get_emotion(detected_faces): result = {} for face in detected_faces: emotions = face.face_attributes.emotion result[face.face_id] = {'anger': emotions.anger, 'contempt': emotions.contempt, 'disgust': emotions.disgust, 'fear': emotions.fear, 'happiness': emotions.happiness, 'neutral': emotions.neutral, 'sadness': emotions.sadness, 'surprise': emotions.surprise} break # !! IMPORTANT !!: We are only checking the first face that has been detected. return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def request():\n return face_client.face.detect_with_stream(image=open(\"frame.png\", 'rb'),\n return_face_attributes=[emotion_attribute],\n recognition_model='recognition_02')", "def predict(self, face):\r\n # Resize the face to the model input size\r\n face = resize(image=rgb2gray(face),output_shape=self.input_size)\r\n # Predict the probabilities of each emotion\r\n probabilities = self.network.predict(face[None,...,None])[0]\r\n # Take the most probable emotion\r\n max_prob = probabilities.argmax()\r\n # Take this label if the confidence is high enough, or Missing Value (None) elsewhere.\r\n emotion = EMOTIONS[max_prob] if probabilities[max_prob] > MIN_CONFIDENCE else None\r\n return emotion", "def faces_info_export(frame):\n faces_info_dict = {}\n faces_info_dict.setdefault('name', [])\n # faces_info_dict.setdefault('Info', [])\n faces_info_dict.setdefault('time_mark', [])\n # faces_info_dict.setdefault('image_info', [])\n faces_info_dict.setdefault('accuracy', [])\n faces_info_dict.setdefault('face_on_cam', [])\n # faces_info_dict.setdefault('employee_info', [])\n\n # path_of_img = frame\n #print(frame)\n # frame = cv2.imread(path_of_img)\n #cv2.imshow('parh', frame)\n #print(frame)\n # Для более быстрой обработки измениним размер в 1/4 раза\n #small_frame = cv2.resize(frame, (0, 0), fx=1, fy=1)\n # Конвертируем цветвоую схему получаемого изображения\n #rgb_small_frame = small_frame[:, :, ::-1]\n try:\n rgb_frame = frame[:,:,::-1].copy()\n recognize_faces_params = [config_gettype('recognize_faces', 'FRS.ini', param) for param in\n inspect.getfullargspec(recognize_faces)[0]]\n recognize_faces_params.remove('rgb_small_frame')\n recognize_faces_params.insert(0, rgb_frame)\n predictions = recognize_faces(*recognize_faces_params)\n\n # txt_path = [config_gettype('read_txt', 'FRS.ini', 'path')]\n # txt_path=str(txt_path)\n for name, _, accur, rec in predictions:\n faces_info_dict['name'].append(name)\n faces_info_dict['time_mark'].append(datetime.now())\n # faces_info_dict['image_info'].append(str(path_of_img))\n faces_info_dict['accuracy'].append(float(accur))\n faces_info_dict['face_on_cam'].append(bool(rec))\n\n # if name != 'unknown':\n # faces_info_dict['employee_info'].append(str(open(read_txt(txt_path)[name]).read()))\n # else:\n # faces_info_dict['employee_info'].append('no_info')\n\n faces_info_df = pd.DataFrame.from_dict(faces_info_dict)\n faces_info_df.to_csv('faces_info_csv')\n return faces_info_df\n except TypeError as e:\n print('None')", "def detect_face_api(self, img):\n\n curr_face_loc, name_list, info_list = load_encode_loc(img, self.kwn_names,\n self.kwn_encoding,\n self.status_list, self.since_list)\n print('Current value is ', curr_face_loc, name_list)\n face_list = []\n face_area = []\n print('face loc', curr_face_loc)\n if len(curr_face_loc):\n\n for (top, right, bottom, left), name in zip(curr_face_loc, name_list):\n print(top, right, bottom, left)\n cv2.rectangle(img, (top, right), (bottom, left), (0, 255, 2), 2)\n\n w = right - left\n h = bottom - top\n cx = left + w // 2\n cy = top + h // 2\n area = w * h\n\n for idx, info in enumerate(info_list):\n cv2.putText(img, info, (bottom, int(left * idx * 0.2)),\n cv2.FONT_HERSHEY_COMPLEX, 1,\n (0, 0, 255), 1)\n\n face_list.append([cx, cy])\n face_area.append(area)\n\n i = face_area.index(max(face_area))\n\n return img, [face_list[i], face_area[i]]\n\n else:\n return img, [[0, 0], 0]", "def test_face_detector_measure():\n dict_results = {}\n video_capture = cv2.VideoCapture(config.CAM_SRC)\n success, frame = video_capture.read()\n while success:\n FaceDetector().run(frame, dict_results)\n print(dict_results)\n success, frame = video_capture.read()", "def get_frame(self):\r\n\r\n # Reading the Video and grasping the Frames\r\n _, frame = self.video.read()\r\n\r\n # Converting the Color image to Gray Scale\r\n gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\r\n\r\n # Image size is reduced by 30% at each image scale.\r\n scaleFactor = 1.3\r\n\r\n # 5 neighbors should be present for each rectangle to be retained.\r\n minNeighbors = 5\r\n\r\n # Detect the Faces in the given Image and store it in faces.\r\n faces = facec.detectMultiScale(gray_frame, scaleFactor, minNeighbors)\r\n\r\n # Iterating through all the faces detected\r\n for (x, y, w, h) in faces:\r\n\r\n # Taking the Face part in the Image as Region of Interest.\r\n roi = gray_frame[y:y+h, x:x+w]\r\n\r\n # Let us resize the Image accordingly to use pretrained model.\r\n roi = cv2.resize(roi, (48, 48))\r\n\r\n # Let us make the Prediction of Emotion present in the Image.\r\n prediction = model.predict_emotion(\r\n roi[np.newaxis, :, :, np.newaxis])\r\n\r\n # Custom Symbols to print with text of emotion.\r\n Symbols = {\"Happy\": \":)\", \"Sad\": \":}\", \"Surprise\": \"!!\",\r\n \"Angry\": \"?\", \"Disgust\": \"#\", \"Neutral\": \".\", \"Fear\": \"~\"}\r\n\r\n # Defining the Parameters for putting Text on Image\r\n Text = str(prediction) + Symbols[str(prediction)]\r\n Text_Color = (180, 105, 255)\r\n\r\n Thickness = 2\r\n Font_Scale = 1\r\n Font_Type = cv2.FONT_HERSHEY_SIMPLEX\r\n\r\n # Inserting the Text on Image\r\n cv2.putText(frame, Text, (x, y), Font_Type,\r\n Font_Scale, Text_Color, Thickness)\r\n\r\n # Finding the Coordinates and Radius of Circle\r\n xc = int((x + x+w)/2)\r\n yc = int((y + y+h)/2)\r\n radius = int(w/2)\r\n\r\n # Drawing the Circle on the Image\r\n cv2.circle(frame, (xc, yc), radius, (0, 255, 0), Thickness)\r\n\r\n # Encoding the Image into a memory buffer\r\n _, jpeg = cv2.imencode('.jpg', frame)\r\n\r\n # Returning the image as a bytes object\r\n return jpeg.tobytes()", "def face_sentiment(frame):\n\n # initial function call with multithreading won't have a frame\n if frame is None:\n return ''\n\n # Convert to an image, then write to a buffer.\n image_from_frame = Image.fromarray(np.uint8(frame))\n buffer = io.BytesIO()\n image_from_frame.save(buffer, format='PNG')\n buffer.seek(0)\n\n # Use the buffer like a file.\n content = buffer.read()\n\n image = vision.Image(content=content)\n\n response = client.face_detection(image=image)\n faces = response.face_annotations\n\n if faces:\n\n # get first face\n face = faces[0]\n\n # score emotions of the face\n emotions = {'anger': int(face.anger_likelihood),\n 'joy': int(face.joy_likelihood),\n 'surprise': int(face.surprise_likelihood),\n 'sorrow': int(face.sorrow_likelihood)}\n\n # select most prominent emotion\n most_expressed_emotion = max(emotions, key=emotions.get)\n else:\n most_expressed_emotion = ''\n\n if response.error.message:\n raise Exception(\n '{}\\nFor more info on error messages, check: '\n 'https://cloud.google.com/apis/design/errors'.format(\n response.error.message))\n\n return most_expressed_emotion", "def renderResultOnImage(self, result, img ):\n \n for currFace in result:\n faceRectangle = currFace['faceRectangle']\n currEmotion = max(currFace['scores'].items(), key=operator.itemgetter(1))[0]\n '''if currEmotion == 'anger':\n logo = cv2.imread('/home/chang/图片/anger.jpg')\n elif currEmotion == 'contempt':\n logo = cv2.imread('/home/chang/图片/contempt.jpg')\n elif currEmotion == 'disgust':\n logo = cv2.imread('/home/chang/图片/disgust.jpg')\n elif currEmotion == 'fear':\n logo = cv2.imread('/home/chang/图片/fear.jpg')\n elif currEmotion == 'happiness':\n logo = cv2.imread('/home/chang/图片/happiness.jpg')\n elif currEmotion == 'neutral':\n logo = cv2.imread('/home/chang/图片/neutral.jpg')\n elif currEmotion == 'sadness':\n logo = cv2.imread('/home/chang/图片/sadness.jpg')\n elif currEmotion == 'surprise':\n logo = cv2.imread('/home/chang/图片/surprise.jpg')\n width=faceRectangle['width']#171\n top=faceRectangle['top']#130\n left=faceRectangle['left']#101\n height=faceRectangle['height']#169\n\n\n logo=cv2.resize(logo,((width-10),(height-10)),interpolation=cv2.INTER_CUBIC)#rows=189 cols=191\n\n logo_gray = cv2.cvtColor(logo, cv2.COLOR_BGR2GRAY)\n rows, cols, channels = logo.shape#\n \n roi = img[(top):(top+rows),(left):(cols+left)]#row:188 col:190\n \n # binary & mask\n ret, mask = cv2.threshold(logo_gray, 220, 255, cv2.THRESH_BINARY)\n # dst\n dst = roi\n re_row,re_col,re_channel = dst.shape\n \n\n for r in xrange(re_row):#0-188\n for c in xrange(re_col):#0-190\n if mask[r, c] == 0:\n dst[r, c, :] = logo[r, c, :]\n #img[(top):(top+rows),(left):(cols+left)] = dst\n #textToWrite = \"%s\" % ( currEmotion )\n #cv2.putText( img, textToWrite, (faceRectangle['left'],faceRectangle['top']-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,0,0), 1 )\n '''\n return currEmotion", "def detect_emotions(self, img, face_rectangles) -> list:\n if img is None or not hasattr(img, \"shape\"):\n raise InvalidImage(\"Image not valid.\")\n\n gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n emotions = []\n for temp, face_coordinates in enumerate(face_rectangles):\n (startX, startY) = face_coordinates[0], face_coordinates[1]\n (endX, endY) = face_coordinates[2], face_coordinates[3]\n gray_face = gray_img[startY:endY, startX:endX]\n try:\n gray_face = cv2.resize(gray_face, self.__emotion_target_size)\n except Exception as e:\n print(\"{} resize failed\".format(gray_face.shape))\n continue\n\n if not self.deployment:\n\n # Local Keras model\n gray_face = self.__preprocess_input(gray_face, True)\n gray_face = np.expand_dims(gray_face, 0)\n gray_face = np.expand_dims(gray_face, -1)\n emotion_prediction = self.__emotion_classifier.predict(\n gray_face)[0]\n labelled_emotions = {\n self.__labels[idx]: round(score, 2)\n for idx, score in enumerate(emotion_prediction)\n }\n elif self.deployment:\n emotion_prediction = self.__emotion_classifier.predict(\n gray_face)\n labelled_emotions = {\n emotion: round(score, 2)\n for emotion, score in emotion_prediction.items()\n }\n else:\n raise NotImplemented()\n\n emotions.append({\n 'coordinates': face_coordinates,\n 'emotions': labelled_emotions\n })\n return emotions", "def emotion(input_list, output_dict):\n\temotion_mapping = {\"😀\":\"มีความสุข\",\n\t\"😃\":\"มีความสุข\",\n\t\"😄\":\"มีความสุข\",\n\t\"😁\":\"มีความสุข\",\n\t\"😆\":\"มีความสุข\",\n\t\"😅\":\"มีความสุข\",\n\t\"😂\":\"มีความสุข\",\n\t\"🤣\":\"มีความสุข\",\n\t\"😊\":\"มีความสุข\",\n\t\"😇\":\"มีความสุข\",\n\t\"🙂\":\"มีความสุข\",\n\t\"🙃\":\"มีความสุข\",\n\t\"😉\":\"มีความสุข\",\n\t\"😌\":\"มีความสุข\",\n\t\"😍\":\"มีความสุข\",\n\t\"😘\":\"มีความสุข\",\n\t\"😗\":\"มีความสุข\",\n\t\"😙\":\"มีความสุข\",\n\t\"😚\":\"มีความสุข\",\n\t\"😋\":\"มีความสุข\",\n\t\"😜\":\"มีความสุข\",\n\t\"😝\":\"มีความสุข\",\n\t\"😛\":\"มีความสุข\",\n\t\"🤗\":\"มีความสุข\",\n\t\"🤓\":\"มีความสุข\",\n\t\"😎\":\"มีความสุข\",\n\t\"🤡\":\"มีความสุข\",\n\t\"🤠\":\"มีความสุข\",\n\t\"😒\":\"ไม่มีความสุข\",\n\t\"😞\":\"ไม่มีความสุข\",\n\t\"😔\":\"ไม่มีความสุข\",\n\t\"😟\":\"ไม่มีความสุข\",\n\t\"☹\" :\"ไม่มีความสุข\",\n\t\"😕\":\"ไม่มีความสุข\",\n\t\"🙁\":\"ไม่มีความสุข\",\n\t\"😣\":\"ไม่มีความสุข\",\n\t\"😖\":\"ไม่มีความสุข\",\n\t\"😫\":\"ไม่มีความสุข\",\n\t\"😩\":\"ไม่มีความสุข\",\n\t\"😤\":\"ไม่มีความสุข\",\n\t\"😠\":\"ไม่มีความสุข\",\n\t\"😡\":\"ไม่มีความสุข\",\n\t\"😐\":\"ไม่มีความสุข\",\n\t\"😑\":\"ไม่มีความสุข\",\n\t\"😯\":\"ไม่มีความสุข\",\n\t\"😦\":\"ไม่มีความสุข\",\n\t\"😧\":\"ไม่มีความสุข\",\n\t\"😮\":\"มีความสุข\",\n\t\"😲\":\"มีความสุข\",\n\t\"😵\":\"ไม่มีความสุข\",\n\t\"😳\":\"มีความสุข\",\n\t\"😱\":\"ไม่มีความสุข\",\n\t\"😨\":\"ไม่มีความสุข\",\n\t\"😰\":\"ไม่มีความสุข\",\n\t\"😢\":\"ไม่มีความสุข\",\n\t\"😥\":\"ไม่มีความสุข\",\n\t\"🤤\":\"ไม่มีความสุข\",\n\t\"😭\":\"ไม่มีความสุข\",\n\t\"😓\":\"ไม่มีความสุข\",\n\t\"😪\":\"ไม่มีความสุข\",\n\t\"😴\":\"ไม่มีความสุข\",\n\t\"🤥\":\"ไม่มีความสุข\",\n\t\"😬\":\"มีความสุข\",\n\t\"🤢\":\"ไม่มีความสุข\",\n\t\"🤧\":\"ไม่มีความสุข\",\n\t\"😷\":\"ไม่มีความสุข\",\n\t\"🤒\":\"ไม่มีความสุข\",\n\t\"🤕\":\"ไม่มีความสุข\",\n\t\"😈\":\"ไม่มีความสุข\",\n\t\"👿\":\"ไม่มีความสุข\",\n\t\"👹\":\"ไม่มีความสุข\",\n\t\"👺\":\"ไม่มีความสุข\",\n\t\"💩\":\"ไม่มีความสุข\",\n\t\"😺\":\"มีความสุข\",\n\t\"😸\":\"มีความสุข\",\n\t\"😹\":\"มีความสุข\",\n\t\"😻\":\"มีความสุข\",\n\t\"😼\":\"มีความสุข\",\n\t\"😽\":\"มีความสุข\",\n\t\"🙀\":\"มีความสุข\",\n\t\"😿\":\"ไม่มีความสุข\",\n\t\"😾\":\"ไม่มีความสุข\",\n\t\"👏\":\"มีความสุข\",\n\t\"👍\":\"มีความสุข\",\n\t\"👎\":\"ไม่มีความสุข\",\n\t\"🖕\":\"ไม่มีความสุข\",\n\t\"❤\":\"มีความสุข\",\n\t\"💛\":\"มีความสุข\",\n\t\"💚\":\"มีความสุข\",\n\t\"💙\":\"มีความสุข\",\n\t\"💜\":\"มีความสุข\",\n\t\"🖤\":\"ไม่มีความสุข\",\n\t\"💕\":\"มีความสุข\",\n\t\"💞\":\"มีความสุข\",\n\t\"💓\":\"มีความสุข\",\n\t\"💗\":\"มีความสุข\",\n\t\"💖\":\"มีความสุข\",\n\t\"💘\":\"มีความสุข\",\n\t\"💝\":\"มีความสุข\",\n\t\"💟\":\"มีความสุข\"}\n\n\tfor emo in input_list:\n\t\tif emo in emotion_mapping:\n\t\t\tfeeling = emotion_mapping[emo]\n\t\t\tif feeling not in output_dict:\n\t\t\t\toutput_dict[feeling] = 1\n\t\t\telse:\n\t\t\t\toutput_dict[feeling] += 1\n\n\treturn output_dict", "def lookup_known_face(self, face_encoding, known_face_encodings, known_face_metadata):\n metadata = None\n\n # If our known face list is empty, just return nothing since we can't possibly have seen this face.\n if len(known_face_encodings) == 0:\n return metadata\n\n # Calculate the face distance between the unknown face and every face on in our known face list\n # This will return a floating point number between 0.0 and 1.0 for each known face. The smaller the number,\n # the more similar that face was to the unknown face.\n face_distances = face_recognition.face_distance(known_face_encodings, face_encoding)\n\n # Get the known face that had the lowest distance (i.e. most similar) from the unknown face.\n best_match_index = np.argmin(face_distances)\n # best_match_index = np.argmax(face_distances)\n\n # print('best_match_index=%i' % best_match_index)\n\n # If the face with the lowest distance had a distance under 0.6, we consider it a face match.\n # 0.6 comes from how the face recognition model was trained. It was trained to make sure pictures\n # of the same person always were less than 0.6 away from each other.\n # Here, we are loosening the threshold a little bit to 0.65 because it is unlikely that two very similar\n # people will come up to the door at the same time.\n ident_limit = 0.6\n if face_distances[best_match_index] < ident_limit:\n # If we have a match, look up the metadata we've saved for it (like the first time we saw it, etc)\n metadata = known_face_metadata[best_match_index]\n\n metadata[\"face_distance\"] = face_distances[best_match_index]\n\n # print('metadata:')\n # print(metadata)\n\n return metadata", "def parse(self, message):\n resp = json.loads((self.send_api_request(message)).decode('utf-8'))\n\n nlu_response = NLUResponse()\n nlu_response.text = message\n intent_schema = IntentSchema()\n if resp[\"result\"][\"metadata\"]:\n intent_schema.name = resp[\"result\"][\"metadata\"][\"intentName\"]\n intent_schema.confidence = resp[\"result\"][\"score\"]\n else: # fallback if no intent is given by the nlu\n intent_schema.name = \"greet\"\n intent_schema.confidence = 0.0\n nlu_response.intent = intent_schema\n print(\"Recognized Intent by Dialogflow {}\".format(intent_schema.name ))\n\n pp = pprint.PrettyPrinter(indent=4)\n #pp.pprint(resp)\n\n try:\n nlu_response.entities = []\n entities = resp[\"result\"][\"parameters\"]\n resolved_query = resp[\"result\"][\"resolvedQuery\"]\n\n for key, value in entities.items():\n if value:\n entity_schema = EntitiesSchema()\n entity_schema.start = resolved_query.find(value)\n entity_schema.end = resolved_query.find(value) + len(value)\n entity_schema.entity = key\n entity_schema.value = value\n nlu_response.entities.append(entity_schema)\n #print(\"Key: {}, Value: {}\".format(key, value))\n except Exception as err:\n logging.warning('No Entites extracted {}'.format(err))\n\n schema = RasaNLUSchema()\n data, error = schema.dump(nlu_response)\n\n return data", "def get_data(self):\n global CAM\n count = 0\n while CAM.isOpened():\n count += 1\n print('COUNT' + str(count))\n _, frame = CAM.read()\n\n # cropped face\n cropped_face, bbox_coordinate, anchor_coordinate = detect_faces(frame)\n if cropped_face is None:\n print(\"NONE FACE DETECTED\")\n sleep(1)\n continue\n\n # get fake face\n fake_face, profile_feature_vector = generate_frontal_face(cropped_face)\n\n cropped_face = cv2.cvtColor(cropped_face, cv2.COLOR_BGR2RGB)\n fake_face = cv2.cvtColor(fake_face, cv2.COLOR_BGR2RGB)\n\n # face matching\n face_matcher = FaceMatcher()\n matched_face, matched_name, matched_front_fake_face, matched_diff = \\\n face_matcher.match(cropped_face, fake_face, profile_feature_vector)\n\n matched_face = cv2.cvtColor(matched_face, cv2.COLOR_BGR2RGB)\n matched_front_fake_face = cv2.cvtColor(matched_front_fake_face, cv2.COLOR_BGR2RGB)\n\n _, cropped_face_jpeg = cv2.imencode('.jpg', cropped_face)\n _, fake_face_jpeg = cv2.imencode('.jpg', fake_face)\n _, matched_face_jpeg = cv2.imencode('.jpg', matched_face)\n _, matched_front_fake_face_jpeg = cv2.imencode('.jpg', matched_front_fake_face)\n\n encoded_cropped_face = \"data:image/jpg;base64,\" + str(\n base64.b64encode(cropped_face_jpeg.tobytes()).decode())\n encoded_fake_face = \"data:image/jpg;base64,\" + str(\n base64.b64encode(fake_face_jpeg.tobytes()).decode())\n\n encoded_matched_face = \"data:image/jpg;base64,\" + str(\n base64.b64encode(matched_face_jpeg.tobytes()).decode())\n encoded_matched_front_fake_face = \"data:image/jpg;base64,\" + str(\n base64.b64encode(matched_front_fake_face_jpeg.tobytes()).decode())\n\n # get detection model return here and send to face frontalization model\n SIO.emit('detection', {'cropped_face': encoded_cropped_face,\n 'fake_face': encoded_fake_face,\n 'matched_face': encoded_matched_face,\n 'matched_name': matched_name,\n 'matched_front_fake_face': encoded_matched_front_fake_face,\n 'id': uuid.uuid4().hex},\n namespace='/detections')\n sleep(self.delay)", "def face_rec(file):\n for name, known_file in known_faces:\n try:\n if compare_faces(known_file, file):\n age, female, male = dex.estimate(known_file)\n # gender = \"woman: {:.3f}, man: {:.3f}\".format(female, male)\n _age = \"{:.0f}\".format(age)\n if \"{:.3f}\".format(female) > \"{:.3f}\".format(male):\n gender = \"Female\"\n else:\n gender = \"Male\"\n return {\n \"name\": name,\n # \"gender\": gender,\n # \"age\": _age,\n }\n except :\n return 'Unknown'", "def recognize(self, options):\n params = {\n 'faceRectangles': options['faceRectangles'] if 'faceRectangles' in options else ''\n }\n\n return Base._postWithOptions(self, _emotionRecognizeUrl, options, params)", "async def detect_face(face_file, max_results=4):\n image_content = face_file.read()\n batch_request = [{\n 'image': {\n 'content': base64.b64encode(image_content).decode('utf-8')\n },\n 'features': [{\n 'type': 'FACE_DETECTION',\n 'maxResults': max_results,\n }]\n }]\n\n service = get_vision_service()\n request = service.images().annotate(body={\n 'requests': batch_request,\n })\n loop = asyncio.get_event_loop()\n response = await loop.run_in_executor(None, request.execute)\n\n return response['responses'][0]['faceAnnotations'] if 'faceAnnotations' in response['responses'][0] else None", "def detect_face_task(img):\n\n # paramter for detect\n # image_size = 160\n # margin = 44\n minsize = 20 # minimum size of face\n threshold = [0.6, 0.7, 0.7] # three steps's threshold\n factor = 0.709 # scale factor\n\n # caffe model\n pnet = caffe_model.get_pnet()\n rnet = caffe_model.get_rnet()\n onet = caffe_model.get_onet()\n\n bounding_boxes, _ = detect_face.detect_face(img, minsize, pnet, rnet, onet, threshold, factor)\n print('detect bounding: ', bounding_boxes)\n print('Find faces: ', bounding_boxes.shape[0])\n\n # all_faces is faces information list, include face bytes, face position\n all_faces = []\n for face_position in bounding_boxes:\n face_position = face_position.astype(int)\n print('face position: ', face_position)\n\n # each face information, include position, face image\n head_rect = face_position[:4].tolist() # numpy array to python list\n head_img = misc.toimage(img).crop(head_rect)\n head_img_io = StringIO.StringIO()\n head_img.save(head_img_io, format='JPEG')\n head_img_b64 = base64.b64encode(head_img_io.getvalue())\n\n # construct response\n face_info = {}\n face_info['rect'] = head_rect\n face_info['image'] = head_img_b64\n\n all_faces.append(face_info)\n\n return all_faces", "def get_gender(image_file=\"/data/datasets/CelebA-HQ/celeba-1024/000004.jpg\"):\n cap = cv.VideoCapture(image_file)\n padding = 20\n while cv.waitKey(1) < 0:\n # Read frame\n t = time.time()\n hasFrame, frame = cap.read()\n if not hasFrame:\n cv.waitKey()\n break\n\n frameFace, bboxes = getFaceBox(faceNet, frame)\n if not bboxes:\n print(\"No face Detected, Checking next frame\")\n continue\n\n for bbox in bboxes:\n # print(bbox)\n face = frame[max(0, bbox[1] - padding):min(bbox[3] + padding, frame.shape[0] - 1),\n max(0, bbox[0] - padding):min(bbox[2] + padding, frame.shape[1] - 1)]\n\n blob = cv.dnn.blobFromImage(face, 1.0, (227, 227), MODEL_MEAN_VALUES, swapRB=False)\n genderNet.setInput(blob)\n genderPreds = genderNet.forward()\n gender = genderList[genderPreds[0].argmax()]\n confidence = genderPreds[0].max()\n # print(\"Gender Output : {}\".format(genderPreds))\n print(\"Gender : {}, conf = {:.3f}\".format(gender, confidence))\n return gender, confidence", "def detect_face(self, img):\n # Fetch face location from the frame with 128 encoding of face landmarks\n curr_face_loc, name_list, info_list = load_encode_loc(img, self.kwn_names,\n self.kwn_encoding,\n self.status_list, self.since_list)\n print('Current value is ', curr_face_loc, name_list)\n face_list = []\n face_area = []\n print('face loc', curr_face_loc)\n if len(curr_face_loc):\n\n for (top, right, bottom, left), name in zip(curr_face_loc, name_list):\n print(top, right, bottom, left)\n cv2.rectangle(img, (top, right), (bottom, left), (0, 255, 2), 2)\n\n w = right - left\n h = bottom - top\n cx = left + w // 2\n cy = top + h // 2\n area = w * h\n\n for idx, info in enumerate(info_list):\n cv2.putText(img, info, (bottom, int(left * idx * 0.2)),\n cv2.FONT_HERSHEY_COMPLEX, 1,\n (0, 0, 255), 1)\n\n face_list.append([cx, cy])\n face_area.append(area)\n\n i = face_area.index(max(face_area))\n\n return img, [face_list[i], face_area[i]]\n\n else:\n return img, [[0, 0], 0]", "def response(speech_message):\n return {\n 'version': '1.0',\n 'response': speech_message\n }", "def response(speech_message):\n return {\n 'version': '1.0',\n 'response': speech_message\n }", "def classify_face(im):\r\n faces = get_encoded_faces()\r\n faces_encoded = list(faces.values())\r\n known_face_names = list(faces.keys())\r\n\r\n img = cv2.imread(im, 1)\r\n \"\"\"\r\n Resize optinal \r\n \"\"\"\r\n #img = cv2.resize(img, (0, 0), fx=0.5, fy=0.5)\r\n #img = img[:,:,::-1]\r\n face_locations = face_recognition.face_locations(img)\r\n unknown_face_encodings = face_recognition.face_encodings(img, face_locations)\r\n\r\n face_names = []\r\n for face_encoding in unknown_face_encodings:\r\n # See if the face is a match for the known face(s)\r\n matches = face_recognition.compare_faces(faces_encoded, face_encoding)\r\n name = \"Unknown\"\r\n\r\n # use the known face with the smallest distance to the new face\r\n face_distances = face_recognition.face_distance(faces_encoded, face_encoding)\r\n best_match_index = np.argmin(face_distances)\r\n if matches[best_match_index]:\r\n name = known_face_names[best_match_index]\r\n\r\n face_names.append(name)\r\n\r\n \"\"\"\r\n All the photo lables in the faces foler end with (number) so a simiple .find(\"(\") command takes the () away from\r\n the label leaving us with the full name of the person\r\n\r\n \"\"\"\r\n\r\n result = name.find('(') \r\n fullname = (name[:result])\r\n \"\"\"\r\n If face_recogntion module recognizes a face but that face is not in the faces module then \r\n it will print unknown and we print 12345678 to use it on the start attednace program \r\n\r\n \"\"\"\r\n if (name == \"Unknown\"):\r\n print(\"12345678\")\r\n else:\r\n \"\"\"\r\n f'{len(face_locayion)}-people - will return the number of people in photo taken by Nao'\r\n \"\"\"\r\n print (f'{len(face_locations)}-people')\r\n print (fullname)\r\n print(courseid)\r\n print (lateornot)\r\n c34 = fullname.find(' ')\r\n firstname = (fullname[:c34])\r\n lastname = (fullname[c34:])\r\n \"\"\"\r\n We get all the data courseid , fristname , lastname, datetime1,and late or not and submited on the website \r\n \r\n\r\n \"\"\"\r\n login_data = {\r\n\t 'Course': courseid,\r\n\t 'FirstName': firstname,\r\n\t 'LastName': lastname,\r\n\t 'Date': datetime2,\r\n\t 'Attendance': 'on',\r\n\t 'Late': latev,\r\n\t 'submitbutton': 'Submit'\r\n }\r\n if(fullname == \"Unknow\"):\r\n \tprint(\"I-dont-know-you\")\r\n else:\r\n \r\n with requests.Session() as s:\r\n \turl = \"https://rbattendance.000webhostapp.com/update.php\"\r\n \tr = s.get(url)\r\n \tsoup = BeautifulSoup(r.content, 'html5lib')\r\n \tr = s.post(url, data = login_data)\r\n \t#print(r.content)\r\n \r\n \r\n\r\n\r\n\r\n\r\n \"\"\"\r\n This for loop is reponsible for drawing on the image \r\n \"\"\"\r\n\r\n for (top, right, bottom, left), name in zip(face_locations, face_names):\r\n # Draw a box around the face\r\n cv2.rectangle(img, (left-20, top-20), (right+20, bottom+20), (255, 0, 0), 2)\r\n\r\n # Draw a label with a name below the face\r\n cv2.rectangle(img, (left-20, bottom -15), (right+20, bottom+20), (255, 0, 0), cv2.FILLED)\r\n font = cv2.FONT_HERSHEY_DUPLEX\r\n cv2.putText(img, name, (left -20, bottom + 15), font, 1.0, (255, 255, 255), 2)\r\n\r\n\r\n # Display the resulting image\r\n \r\n \r\n while True:\r\n #cv2.imshow('Video', img)\r\n #if cv2.waitKey(1) & 0xFF == ord('q'):\r\n return face_names", "def get_response(image):\n encoded = base64.b64encode(image.read())\n GOOGLE_CLOUD_VISION_API_URL = 'https://vision.googleapis.com/v1/images:annotate?key='\n API_KEY = 'AIzaSyCKFsYnfYoLFeD2OHpvcjky9opfhHKFnP0'\n api_url = GOOGLE_CLOUD_VISION_API_URL + API_KEY\n header = {'Content-Type': 'application/json'}\n body = json.dumps({\n\t\t\t'requests': [{\n\t\t\t\t'image': {\n\t\t\t\t\t'content': encoded.decode(\"utf-8\"),\n\t\t\t\t},\n\t\t\t\t'features': [{\n\t\t\t\t\t'type': 'DOCUMENT_TEXT_DETECTION',\n\t\t\t\t}]\n\t\t\t}]\n\t\t})\n d = requests.post(api_url,data=body).json()\n return d", "def show_frame(self):\n\n # sets the ad page with empty amount of people initially\n # no detection set premium ad\n\n if not self.db.check(self.db.select()):\n self.p3.ad = classify([])\n\n # reads the frame\n _, frame = self.cap.read()\n # gets image to be ready for face detection\n frame = cv2.flip(frame, 1)\n input_img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)\n img_h, img_w, _ = np.shape(input_img)\n # detects\n detected = self.detector(input_img, 1)\n faces = np.empty((len(detected), 96, 96, 3))\n # if detected enter here\n if len(detected) > 0:\n\n # for every face get their location of x and y with dimensions\n for i, d in enumerate(detected):\n x1, y1, x2, y2, w, h = d.left(), d.top(), d.right() + 1, d.bottom() + 1, d.width(), d.height()\n xw1 = max(int(x1 - self.margin * w), 0)\n yw1 = max(int(y1 - self.margin * h), 0)\n xw2 = min(int(x2 + self.margin * w), img_w - 1)\n yw2 = min(int(y2 + self.margin * h), img_h - 1)\n cv2.rectangle(frame, (x1, y1), (x2, y2), (255, 0, 0), 2)\n faces[i, :, :, :] = cv2.resize(frame[yw1:yw2 + 1, xw1:xw2 + 1, :], (self.img_size, self.img_size))\n\n if len(faces) > 0:\n # predict ages and genders of the detected faces\n results = self.model.predict(faces)\n predicted_genders = results[0]\n ages = np.arange(0, 101).reshape(101, 1)\n predicted_ages = results[1].dot(ages).flatten()\n\n face_list = []\n for i, d in enumerate(detected):\n # predicts the gender\n if predicted_genders[i][0] > 0.7:\n gend = \"F\"\n else:\n gend = \"M\"\n\n # sets the label and draws it\n label = \"{}, {}\".format(int(predicted_ages[i]), gend)\n self.draw_label(frame, (d.left(), d.top()), label)\n # append to the list\n face_list.append([gend, int(predicted_ages[i])])\n\n # calculate list and give results to the ad page\n if not self.db.check(self.db.select()):\n self.p3.ad = classify(face_list)\n\n # show the frame and save the image shown onto the label\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = PIL.Image.fromarray(cv2image)\n imgtk = ImageTk.PhotoImage(image=img)\n self.p2.label.imgtk = imgtk\n self.p2.label.configure(image=imgtk)\n # refresh after 10ms\n self.p2.label.after(10, self.show_frame)", "def _recognize_face(unknown_encoding, loaded_encodings):\n boolean_matches = face_recognition.compare_faces(\n loaded_encodings[\"encodings\"], unknown_encoding\n )\n votes = Counter(\n name\n for match, name in zip(boolean_matches, loaded_encodings[\"names\"])\n if match\n )\n if votes:\n return votes.most_common(1)[0][0]", "def guest_identify_func(self, pic_display):\n (pic_height, pic_width) = pic_display.shape[:2]\n\n # Create OpenCV image blob\n image_blob = cv2.dnn.blobFromImage(\n cv2.resize(pic_display, (300, 300)),\n 1.0,\n (300, 300),\n self.trainRBGavg,\n swapRB=False,\n crop=False)\n\n # Use previously loaded face detector on the blob\n self.detector.setInput(image_blob)\n detections = self.detector.forward()\n\n self.guest_ids = {}\n for i in range(0, detections.shape[2]):\n # Determine detection confidence\n curr_confidence = detections[0, 0, i, 2]\n\n # Threshold confidence via configuration file\n if curr_confidence > self.min_detec_conf:\n # Return bounding box (x,y)-coordinates\n bound_box = detections[0, 0, i, 3:7] * np.array([pic_width,\n pic_height,\n pic_width,\n pic_height])\n (x_start, y_start, x_end, y_end) = bound_box.astype(\"int\")\n\n # Return face region of interest dimensions\n face = pic_display[y_start:y_end, x_start:x_end]\n\n # Skip faces below a min size\n (face_height, face_width) = face.shape[:2]\n if face_height < self.min_face_px[0] \\\n or face_width < self.min_face_px[1]:\n continue\n # Only detect faces fully in the frame\n if x_start < 0 or y_start < 0:\n continue\n if x_end > self.image_width or y_end > self.image_width:\n continue\n\n # Create OpenCV blob for face region of interest\n face_blob = cv2.dnn.blobFromImage(cv2.resize(face, (96, 96)),\n 1.0 / 255,\n (96, 96),\n (0, 0, 0),\n swapRB=True,\n crop=False)\n # Pass face blob into embedder,\n # return 128-D describing vector\n self.embedder.setInput(face_blob)\n face_vec = self.embedder.forward()\n\n # Use previously loaded recognizer on the face blob\n # to recognize the face\n preds = self.recognizer.predict_proba(face_vec)[0]\n max_pred_ind = np.argmax(preds)\n prob = preds[max_pred_ind]\n guest_id = self.label_encoder.classes_[max_pred_ind]\n\n # Filter out low classification probabilies\n # I.e. camera images must have a facial detection\n # of min_detect_conf and facial recognition\n # classification probability of min_recog_prob\n\n #print(curr_confidence, # Keep for optimizing the detect/recog %\n # prob,\n # self.determine_guest_info(self.known_guest_meta,\n # guest_id))\n if prob >= self.min_recog_prob:\n # Store guest_id info as dict of {guest_id:prob}\n self.guest_ids[guest_id] = round(prob, 4)\n\n # Print guest_info from known_guest_meta data\n guest_info = self.determine_guest_info(self.known_guest_meta,\n guest_id)\n\n # Write out guest_info and recog probability\n text = \"{:.2f}%: {}\".format(round(prob*100, 2), guest_info)\n y = y_start - 15 if y_start - 15 > 15 else y_start + 15\n cv2.rectangle(pic_display,\n (x_start, y_start),\n (x_end, y_end),\n (17, 190, 252),\n 2)\n cv2.putText(pic_display,\n text,\n (x_start, y),\n cv2.FONT_HERSHEY_SIMPLEX,\n 0.45,\n (17, 190, 252),\n 2)\n return pic_display # Show the output frame", "def make_face_recognition(update: Update, _: CallbackContext) -> None:\n # message.photo is a list of PhotoSize objects,\n # which represent different sizes of the same photo\n\n # print(\"Enter to make_face_recognition\")\n img_from_user = update.message.photo[-1].get_file()\n img_file = io.BytesIO()\n img_from_user.download(out=img_file)\n img_array = face_recognition.load_image_file(img_file)\n # Find all the faces in the image\n face_locations = face_recognition.face_locations(img_array)\n # print(face_locations)\n img_with_rects = _make_rects(img_array, face_locations)\n out_file = 'tmp.jpg'\n Image.fromarray(img_with_rects, 'RGB').save(out_file, format=\"JPEG\")\n update.message.bot.send_photo(\n update.message.chat_id,\n photo=open(out_file, 'rb'))", "def get_entity(doc):\n doc = clean_doc(doc)\n length = len(doc)\n num = length // 63\n if num < 1: num = 1\n header = {\n 'content-type': 'application/json'\n }\n enti = []\n for i in range(num):\n time.sleep(1)\n chaxun = doc[i * 63: (i + 1)*63].strip()\n try:\n res = requests.post('https://aip.baidubce.com/rpc/2.0/kg/v1/cognitive/entity_annotation', \n params={'access_token': token},\n headers=header,\n json={\"data\": chaxun}).json()['entity_annotation']\n for item in res:\n enti.append(item['mention'])\n except KeyError as e:\n print(e)\n print('chauxn:', chaxun)\n continue\n return enti", "def extract_video_features():\r\n\r\n # Face feature extraction from Openface output file\r\n file = open(\"Extracted_Features/\"+input_video[:len(input_video)-4]+\"_Features/\"+input_video[:len(input_video)-4]+\".csv\")\r\n reader = csv.DictReader(file)\r\n features = {}\r\n\r\n for row in reader:\r\n\r\n # Taking only good frames where faces have been detected with a confidence higher than 0.8 (Openface standard)\r\n if int(row[' success']) == 1 and float(row[' confidence']) > 0.5:\r\n face_id = int(row[' face_id'])\r\n frame = int(row['frame']) - 1\r\n\r\n features.setdefault(frame, {})\r\n face_features = []\r\n\r\n # Mouth LandMarks\r\n for i in range(0, 68):\r\n face_features.append(float(row[' x_' + str(i)]))\r\n\r\n for i in range(0, 68):\r\n face_features.append(float(row[' y_' + str(i)]))\r\n\r\n if f_type == \"AU\":\r\n au = [\"10\", \"12\", \"14\", \"15\", \"17\", \"20\", \"23\", \"25\", \"26\"]\r\n for i in au:\r\n face_features.append(float(row[' AU' + i + '_r']))\r\n\r\n features[frame][face_id] = face_features\r\n\r\n return features", "def face_detection_json(self, image_path):\n body = {'file': (image_path, open(image_path, 'rb'), \"multipart/form-data\")}\n url = self.base_url + '/vision-service/phoenix-vision/face-detection/json'\n headers = {\"ApiKey\": self.api_key}\n response = requests.post(url=url, files=body, headers=headers).json()\n return response" ]
[ "0.6500252", "0.60568655", "0.5718272", "0.570259", "0.56329", "0.5624102", "0.5542544", "0.55339795", "0.552066", "0.5511222", "0.55075353", "0.5460417", "0.54255056", "0.5361626", "0.53300476", "0.5326978", "0.5318767", "0.5302779", "0.52869296", "0.5283752", "0.5283752", "0.52819705", "0.52785707", "0.52302814", "0.5216282", "0.521247", "0.5202305", "0.52020246", "0.5188986", "0.51805127" ]
0.67391
0
Returns the ship count of all Fleet objects in this set
def ship_count(self): return sum(f.ship_count for f in self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_active_ships_count(self):\n active_ship_count = 0\n for row_index in range(self.rows):\n for column_index in range(self.columns):\n cell = self.grid[row_index][column_index]\n if cell.has_active_ship():\n active_ship_count += 1\n\n return active_ship_count", "def get_destroyed_ships_count(self):\n destroyed_ships_count = 0\n for row_index in range(self.rows):\n for column_index in range(self.columns):\n cell = self.grid[row_index][column_index]\n if cell.has_destroyed_ship():\n destroyed_ships_count += 1\n\n return destroyed_ships_count", "def tracking(self):\n cnt = 0\n self.untrackedufos = deepcopy(self.ufoLocations)\n\n for ship in self.ships:\n if ship in self.untrackedufos:\n cnt += 1\n self.trackingship[ship] = self.untrackedufos[ship].pop()\n if not self.untrackedufos[ship]:\n del self.untrackedufos[ship]\n\n return cnt", "def ship_size(coordinates, field):\r\n return len(ship_coordinates(coordinates, field))\\\r\n if has_ship(coordinates, field) else 0", "def numnems(self):\n count = 0\n for o in self._objs.values():\n count += len(o.netifs())\n return count", "def getShips(self):\n\n return self.ships", "def count(self, cls=None):\n return len(self.all(cls))", "def count(self):\n objects = self.all()\n return len(objects)", "def lives_counter(self):\n count = 15\n for row in self.board:\n for column in row:\n if column == HITSHIP:\n count -= 1\n self.lives = count\n return self.lives", "def getNbStations(self) :\n return len(self._stations)", "def Points_Counting(self):\n return len(self.__traectory_list)", "def count(self):\n return sum(1 for _ in self)", "def get_count(cls):\n total = 0\n for counter in SimpleCounterShard.objects.all():\n total += counter.count\n return total", "def get_population(self):\n population = 0\n for i in self:\n population += i.count(self.cell_state['alive'])\n return population", "def get_number_of_cheeses(self):\n number = 0\n for i in range(len(self._stools)):\n number += len(self._stools[i])\n return number", "def count(self):\n return len(self.deck)", "def count(self):\n return self.size()", "def __len__(self):\n count = 0\n for recovery_set in self.recovery_sets.values():\n count += len(recovery_set.packets)\n return count", "def getNrStations(self):\n return len(self.stationData)", "def get_num_goats(self) -> int:\n return len(self.get_all_goat_positions())", "def size(self) -> int:\n return sum(ob.size for ob in self.objects.ravel())", "def count(self):\n return len(self)", "def count(self):\n return len(self.objects)", "def __len__(self):\n\n if self.is_finite_set:\n size = 0\n for set in self.sets:\n size += len(set)\n return size\n else:\n raise ValueError(\"'%s' is not a finite set.\" % self)", "def getNumShrines(self, iPlayer):\n\t\tiNumShrines = 0\n\t\tapCityList = PyPlayer(iPlayer).getCityList()\n\t\tfor pCity in apCityList:\n\t\t\tif pCity.getNumBuilding(con.iCatholicShrine): iNumShrines += 1\n\t\t\tif pCity.getNumBuilding(con.iOrthodoxShrine): iNumShrines += 1\n\t\t\tif pCity.getNumBuilding(con.iHinduShrine): iNumShrines += 1\n\t\t\tif pCity.getNumBuilding(con.iSunniShrine): iNumShrines += 1\n\t\t\tif pCity.getNumBuilding(con.iShiaShrine): iNumShrines += 1\n\t\treturn iNumShrines", "def all_valid(self, tower) -> int:\r\n count = 0\r\n for layer in range(1, len(tower.tower)):\r\n for index in range(1, 4):\r\n if self.is_valid(layer, index, tower):\r\n count += 1\r\n \r\n return count", "def counts(self):\n return sum(self.counter.values()), len(self.visited)", "def counts(self):\n return sum(self.counter.values()), len(self.visited)", "def getTotalShips(session):\n html = session.get()\n return int(re.search(r'maxTransporters\">(\\d+)<', html).group(1))", "def num_species_on_map(self):\n # tot_herbivores = 0\n # tot_carnivores = 0\n # for cells in itertools.chain.from_iterable(self.map):\n # curr_herbivore, curr_carnivore = cells.num_species_per_cell()\n # tot_herbivores += curr_herbivore\n # tot_carnivores += curr_carnivore\n\n return (sum(x) for x in zip(*[cells.num_species_per_cell() for cells in itertools.chain.from_iterable(self.map)]))\n\n # (sum(x) for x in zip(*[cells.num_species_per_cell() for cells in itertools.chain.from_iterable(self.map)]))" ]
[ "0.7439754", "0.7292249", "0.6891802", "0.6331213", "0.6320823", "0.6243501", "0.61365724", "0.6089166", "0.6058078", "0.60143656", "0.60038716", "0.6002114", "0.60006964", "0.5988058", "0.5968548", "0.59447414", "0.5932767", "0.5919404", "0.5915653", "0.5890712", "0.5857965", "0.5857877", "0.5846865", "0.5815831", "0.5803271", "0.5800854", "0.5799456", "0.5799456", "0.5793879", "0.57833266" ]
0.88019377
0
Returns an iterator that yields tuples of (turns_to_wait, Attacks) for all Subfleets that arrive in this many turns in ascending order (use reverse=True for descending).
def arrivals(self, reverse=False): turn_getter = attrgetter("turns_to_wait") for k, attacks in groupby(sorted(self, key=turn_getter, reverse=reverse), turn_getter): yield (k, Attacks(attacks))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def available_moves(self):\n\n heaps = range(len(self.heaps))\n return [(h, take) for h in range(len(self.heaps))\n for take in range(1, self.heaps[h] + 1)]", "def _get_children(self) -> Sequence[\"HallwayState\"]:\n if self.cur_pos < self.end_pos:\n if self.cur_pos > 0: # Stop the hallway from going negative\n yield self.new(self.cur_pos - 1)\n yield self.new(self.cur_pos + 1)", "def obstacle_iterator(self):\n for obstacle in self.tmx_data.get_layer_by_name(\"obstacles\"):\n yield obstacle", "def enumerate_transfers_for_update(self):\n transfer_batches = self.single_robot_transfer_batches_for_update()\n for transfer_batch in transfer_batches:\n for transfer in transfer_batch.transfers:\n yield transfer", "def get_tiger_capturing_moves(self) -> List[tuple]:\n tuples: List[tuple] = []\n for pos in self.get_all_tiger_positions():\n for landing_pos, goat_pos in pos.piece._get_capturing_positions():\n tuples.append((landing_pos, goat_pos))\n\n return tuples", "def get_next_activities(self, n=None):\n\n if n > self.available():\n # !!! This is not quite as specified (see method docs) !!!\n raise IllegalState('not enough elements available in this list')\n else:\n next_list = []\n x = 0\n while x < n:\n try:\n next_list.append(next(self))\n except Exception: # Need to specify exceptions here!\n raise OperationFailed()\n x = x + 1\n return next_list", "def get_stages(self):\n current_stage = self.wf['root']\n\n while current_stage:\n yield current_stage\n next_stage = set()\n for n in current_stage:\n next_stage.update(self.wf['action'][n].get('next', set()))\n current_stage = next_stage", "def successors(self):\n next_states = []\n if self.stock_pile:\n tableau = self.tableau\n stock = self.stock_pile[1:]\n waste = self.stock_pile[0]\n next_states.append(State(tableau, stock, waste))\n for i, card in enumerate(self.tableau):\n if self.is_face_up(i) and self.can_be_moved(card):\n tableau = tuple(c if c != card else None for c in self.tableau)\n stock = self.stock_pile\n waste = card\n next_states.append(State(tableau, stock, waste))\n return next_states", "def lookahead(self, n=1):\n while len(self._remaining) < n:\n self._remaining.append(next(self))\n return [self._remaining[i] for i in range(n)]", "def enumerate_moves(self):\n add_ew = lambda x: [x+'e', x+'w']\n allowed_catches = add_ew(self._directions[0])\n moves = []\n # First add the one/two step forward moves\n new_slot = self._board.get_dir(self._current_space, self._directions[0])\n if new_slot and new_slot.is_free():\n moves.append(ChessMove(self._current_space, new_slot))\n if (self._side == BLACK and new_slot.row == self._board.size - 1) or \\\n (self._side == WHITE and new_slot.row == 0):\n moves[-1].add_promotion()\n if (self._side == BLACK and self._current_space.row == 1) or \\\n (self._side == WHITE and self._current_space.row == self._board.size -2):\n new_slot = self._board.get_dir(new_slot, self._directions[0])\n if new_slot and new_slot.is_free():\n moves.append(ChessMove(self._current_space, new_slot))\n\n # Now add all the captures.\n for direction in allowed_catches:\n new_slot = self._board.get_dir(self._current_space, direction)\n if new_slot and new_slot.has_opponent(self._side):\n moves.append(ChessMove(self._current_space, new_slot, [new_slot]))\n if (self._side == BLACK and new_slot.row == self._board.size - 1) or \\\n (self._side == WHITE and new_slot.row == 0):\n moves[-1].add_promotion()\n return moves", "def iterate(self): # pragma: no mccabe\n for case_result in self.suite_result.passed:\n for scenario_result in case_result.passed:\n yield scenario_result, True, True\n for scenario_result in case_result.failed:\n yield scenario_result, False, True # pragma: no cover\n\n for case_result in self.suite_result.failed:\n for scenario_result in case_result.passed:\n yield scenario_result, True, False # pragma: no cover\n for scenario_result in case_result.failed:\n yield scenario_result, False, False", "def game_sequence(self, upper_limit):\n for i in range(1, upper_limit):\n response = self.build_text_response_for_number_(i)\n yield response if response else i", "def __find_all_moves(self, tower) -> list:\r\n choice = []\r\n for height in range(1,len(tower.tower)-2):\r\n for index in range(1,4):\r\n if self.stat_brain.is_valid(height, index, tower):\r\n choice.append((height, index))\r\n \r\n r.shuffle(choice)\r\n return choice", "def __iter__(self):\n for run in self.runs:\n yield run", "def possible_moves(self, side: models.Side) -> typing.Iterator[\n typing.Tuple[models.Piece, int, int]]:\n raise NotImplementedError", "def iterate():\n # States are of the form (coordinates, word so far, used spots)\n # Load the initial states into the stack\n global theStack\n for r,layer in enumerate(honeycomb):\n for e,el in enumerate(layer):\n theStack.append( ((e,r), [el],set([(e,r)])) )\n \n while (len(theStack) != 0):\n #pop the next run\n (e,r),soFar,used=theStack[-1]\n theStack=theStack[:-1]\n #run it!\n step((e,r),soFar,used)", "def window_trey_best(iterable, n):\n iterator = iter(iterable)\n from collections import deque\n from itertools import islice\n current = deque(islice(iterator, n), maxlen=n)\n yield tuple(current)\n for item in iterator:\n current.append(item)\n yield tuple(current)", "def iterate(self):\n yield self\n for x in self:\n for y in x.iterate():\n yield y", "def descendants(self):\n for child in self.children:\n yield child\n if isinstance(child, LoggedAction):\n for descendant in child.descendants():\n yield descendant", "def bees_for(self, player: PlayerID) -> Iterable[Bee]:\n for bee in self.bees:\n if bee.player == player:\n yield bee", "def split_trials(self) -> tuple[list[Trial], list[Trial]]:\n\n trials: list[Trial] = []\n for trial in self.registry:\n if trial.status != \"completed\":\n trial = self.strategy.infer(trial)\n\n if trial is not None:\n trials.append(trial)\n # NOTE: This assumes that all trials have an objective. Making assumption explicit.\n assert all(trial.objective is not None for trial in trials)\n sorted_trials = sorted(trials, key=lambda trial: trial.objective.value) # type: ignore\n\n split_index = int(numpy.ceil(self.gamma * len(sorted_trials)))\n\n below = sorted_trials[:split_index]\n above = sorted_trials[split_index:]\n\n return below, above", "def iterate_retries(self, state=None):\n for atom in self.iterate_nodes((co.RETRY,)):\n if not state or self.get_state(atom) == state:\n yield atom", "def next(self):\n # first, am i legal? Do I exit the bounds of 3x3x3, or do i intersect an\n # already filled block?\n\n piece_len = pieces[self.index]\n final_pos = tuple(self.pos[i] + (self.direction[i] * piece_len) for i in range(3))\n \n # the only values should be 0,1,2\n if not all(0 <= val <= 2 for val in final_pos):\n return []\n \n # next, lets update fill_state, checking that its not already filled\n for i in range(piece_len):\n self.pos = tuple(self.pos[i] + self.direction[i] for i in range(3))\n # check that the currnt value is 0\n if self.fill_state.get(self.pos):\n return []\n # mark the box as filled\n self.fill_state[self.pos] = True\n \n # And if we made it this far, we know now that the Step is valid\n\n # sanity check that we're where we should be\n assert final_pos == self.pos\n\n # Next we need to find the four directions we can turn\n next_steps = []\n for i in range(len(self.direction)):\n # if we're moving in this direction (pos or neg), we can't turn that way\n if self.direction[i] != 0:\n continue\n for posneg in [1, -1]:\n direction = [0, 0, 0]\n direction[i] = posneg\n next_steps.append(\n Step(self.index+1, final_pos, direction, self.fill_state.copy(), self)\n )\n\n return next_steps", "def get_possible_moves(self) -> list:\n if self.p1_turn:\n name = '2'\n else:\n name = '1'\n\n count = 0\n for i in self.claim:\n if i == name:\n count += 1\n over = count >= 0.5 * len(self.claim)\n\n moves = []\n if not over:\n for i in self.letters:\n if i.isalpha():\n moves.append(i)\n return moves", "def find_moves(self):\n\n from itertools import product\n free_position = self.find_free()\n return [list(free_position+i) for i in [[0,1],[1,0],[-1,0],[0,-1]] if tuple(i+free_position) in product(range(self.size),repeat=2)]", "def iter_chains(self):\n return iter(self.chain_list)", "def iter_segments_(self, X):\n\n # heuristic that tries to avoid highly-overlapping sub-segments\n # (i.e. with more than 50% overlap on average) for short speech turns\n n_samples = len(X)\n n = (n_samples - self.n_samples_) // (self.n_samples_ // 2) + 1\n n = min(n, self.per_turn)\n\n # choose (and yield) n sub-segments at random\n for i in np.random.randint(0, n_samples - self.n_samples_, n):\n yield X[i: i + self.n_samples_]", "def thermals(self) -> Iterator[\"Flight\"]:\n self = cast(\"Flight\", self)\n all_segments = (\n self.unwrap()\n .diff(\"track_unwrapped\")\n .agg_time(\"1T\", vertical_rate=\"max\", track_unwrapped_diff=\"median\")\n .abs(track_unwrapped_diff_median=\"track_unwrapped_diff_median\")\n .query(\"vertical_rate_max > 2 and track_unwrapped_diff_median > 5\")\n )\n if all_segments is not None:\n yield from all_segments.split(\"1T\")", "def traverseRook(self):\n\t\tmoves = np.empty(14, dtype=object)\n\t\tcnt = [0]\n\t\tPiece.traverse(self, cnt, moves, -1, 0)\n\t\tPiece.traverse(self, cnt, moves, 1, 0)\n\t\tPiece.traverse(self, cnt, moves, 0, -1)\n\t\tPiece.traverse(self, cnt, moves, 0, 1)\n\t\treturn moves[:cnt[0]]", "def swissPairings():\n\n # Ok This is where things get interesting, how in the world should i solve this problem\n # A question to the udacity reviewer. Shouldn't standings be passed in to this function since weve already called it in tournament_test.testPairings\n\n #anyways\n\n nextRoundPlayers = []\n standings = playerStandings()\n \n # since our players are ordered by wins, first place first and we have an even number of players,\n # this seems like a no-brainer to just have every 2 tuples starting from the beginning to be the next match\n # however this needs to to be implemented algorithmically\n \n #loop through our players and when we get to an even index, we get the previous two players and assign their ids and names to the next tuple \n #in nextRoundPlayers\n \n i = 0\n while i < len(standings):\n if i % 2 == 0:\n id1 = standings[i-1][0]\n name1 = standings[i-1][1]\n\n id2 = standings[i-2][0]\n name2 = standings[i-2][1]\n\n nextRoundPlayers.append((id1, name1, id2, name2))\n\n i += 1\n \n return nextRoundPlayers" ]
[ "0.5195459", "0.5156163", "0.5128677", "0.51036954", "0.5094777", "0.50646555", "0.5048493", "0.5037157", "0.5025919", "0.49986604", "0.4967178", "0.49644658", "0.4949757", "0.4949631", "0.49480528", "0.4945935", "0.4902823", "0.49011064", "0.4890652", "0.4874848", "0.48563534", "0.48465014", "0.4840531", "0.48365545", "0.4836444", "0.4833482", "0.48300683", "0.48280382", "0.48164248", "0.4804862" ]
0.7062301
0
draw sky on screen
def draw_sky(self): win.blit(self.sky, (0, 0))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw(self):\n\n surf = self.get_oxygen_surface()\n surf.set_alpha(255)\n self.screen.blit(surf, self.pos)", "def draw():\n screen.fill((0, 0, 0))\n alien.draw()", "def draw():", "def draw_environment():\n rect(screen, LIGHT_GRAY, (0, 0, 800, 450)) # grey sky\n rect(screen, WHITE, (0, 450, 800, 1000)) # white ground", "def draw(self, screen):", "def draw(self, screen):\n self.draw_left_zone(screen)\n self.draw_middle_zone(screen)\n self.draw_right_zone(screen)", "def draw_ground(self):\r\n win.blit(self.ground, (0, 400))", "def draw(self, screen):\n\t\tpygame.draw.circle(screen, self.color, self.pos, self.radius)", "def draw(self):\n self.scene.draw(self.screen)", "def drawSimple(self, screen):\r\n self.worlds[0].renderer.render(screen)", "def background():\n sky_color = (66, 170, 255) # color of the sky\n grass_color = (0, 128, 0) # color of the grass\n\n rect(screen, sky_color, (0, 0, 500, 250), 0) # sky\n rect(screen, grass_color, (0, 250, 500, 250), 0) # grass", "def draw(self):\n self.screen.fill(BACKGROUND_COLOR)\n self.cannon.draw(self.screen)\n self.objects.draw(self.screen)", "def draw_kame(self):\r\n #pygame.draw.rect(self.screen, self.color, self.rect, self.image)\r\n self.screen.blit(self.image, self.rect)", "def draw(self, screen):\n screen.blit(self.surface, self.rect)", "def draw(self):\n self.draw_body()\n shadow_surface = self.transparent_subsurface(self.surface, self.rect)\n rect = shadow_surface.get_rect()\n # works with topleft, but doesn't seems to work with center\n center, radius = rect.topleft, int(self.rect.w/2)\n pg.draw.circle(shadow_surface, (0, 0, 0, 100), center, radius)\n self.surface.blit(shadow_surface, self.rect.topleft)", "def render(self, screen):\n # print(\"Drawing scene {}\".format(self.imgname))\n screen.fill(self.color)", "def render(self, screen):\n x,y = self.getBallPos()\n pygame.draw.circle(screen, (255, 255, 255), (x, y), self.RADIUS)", "def draw_scene():\n # Place the camera\n camera.placeCamera()\n \n \n # Set up the global ambient light. (Try commenting out.)\n amb = [ 0*brightness, 0*brightness, 0*brightness, 1.0 ]\n glLightModelfv(GL_LIGHT_MODEL_AMBIENT, amb)\n\n # Set up the main light (LIGHT0)... or not.\n if is_light_on:\n place_blue_light()\n place_red_light()\n place_green_light()\n place_lamp_light()\n else:\n glDisable(GL_LIGHT0)\n glDisable(GL_LIGHT1)\n glDisable(GL_LIGHT2)\n glDisable(GL_LIGHT3)\n\n if lamp_light:\n place_lamp_light()\n else:\n glDisable(GL_LIGHT3)\n\n if headlamp_is_on:\n place_headlamp_light()\n else:\n glDisable(GL_LIGHT4)\n\n # Now spin the world around the y-axis (for effect).\n glRotated(angle_movement, 0, 1, 0)\n draw_objects()", "def draw(self):\n if (libt.map_is_in_fov(self.handler.fov_map, self.x, self.y) or \n self.handler.world.map[self.x][self.y].seen and self.visible_in_fog):\n libt.console_set_default_foreground(self.handler.game_map, self.colour)\n libt.console_put_char(self.handler.game_map, self.x, self.y, \n self.char, libt.BKGND_NONE)", "def draw(self):\n self.screen_surf.fill(BKGD_COLOUR)\n self.all_tiles.draw(self.screen_surf) # Tiles before other sprites.\n self.nests.draw(self.screen_surf) # Nests before chipmunks.\n self.chipmunks.draw(self.screen_surf)\n self.acorns.draw(self.screen_surf)\n self.screen_surf.blit(self.acorn_surf, self.acorn_surf.get_rect())\n self.screen_surf.blit(self.timer_surf, self.timer_rect)", "def draw_sun():\n lisandro.penup()\n lisandro.goto(40, 90)\n lisandro.begin_fill()\n lisandro.circle(150) # draws out a circle with a radius of 150 for the sun.\n lisandro.end_fill()\n lisandro.hideturtle()", "def draw(self, screen):\n \n # Draw the background\n screen.fill(CAVE)\n screen.blit(self.background,(self.world_shift // 3,0))\n \n # Draw all the sprite lists that we have\n self.platform_list.draw(screen)\n #self.enemy_list.draw(screen)\n self.enemy_list.draw(screen)", "def on_draw():\n window.clear()\n world.draw()", "def draw( self ):\n\t\t\t\n\t\ttransposition = lambda point: (point[0] + WINDOW_X, WINDOW_Y - point[1])\n\t\t\t \n\t\tx, y = transposition( self.position.xy )\n\t\tpygame.draw.circle(self.screen, self.color, ( int(x + 0.5), int(y + 0.5) ), self.r)", "def draw(self):\n self.bufferX = (self.appWidth/2) - self.viewX\n self.bufferY = (self.appHeight/2) - self.viewY\n anwp.sl.engine.clear()\n anwp.sl.engine.drawImage(0, 0, self.appWidth, self.appHeight, self.backgroundImage)\n self.drawWarpLines()\n \n # render engine\n anwp.sl.engine.render()\n self.drawSystemInfo()\n self.drawWarpGateInfo()\n self.drawWarpTradeInfo()", "def draw(self, screen):\n halfScale = int(self.screenScale / 2)\n\n x = int(self.x)\n y = int(self.y)\n for i in range(-halfScale, halfScale):\n for j in range(-halfScale, halfScale):\n\n pygame.Surface.set_at(\n screen, (x * self.screenScale + i, y * self.screenScale + j), self.color)", "def render_shore_noise(self, points):\n point_list = [(x + 50, -y + 800) for x, y in points] # Up is -ve\n pygame.draw.line(self.surface, CYAN, (50, 800), (410, 800), 1) # x-axis\n pygame.draw.line(self.surface, CYAN, (50, 800), (50, 700), 1) # y-axis\n\n for x, y in point_list: # points\n self.surface.set_at((int(x), int(y)), RED)", "def on_draw(self):\n # Clearing the buffers\n self.clear()\n self.set3d()\n # Makes it so color can be added\n glColor3d(1, 1, 1)\n\n self.push(self.player.pos, self.player.rot)\n self.model.draw()\n glPopMatrix()\n self.model.process_queue_slowly()\n\n # Draws the crosshairs on the screen\n self.set2d()\n self.draw_position_label()\n self.draw_reticle()", "def draw(self, screen):\n screen.blit(self.rotate_surface, [self.x_pos, self.y_pos])\n self.draw_radar(screen)", "def __draw(self, screen):\n\n pygame.draw.rect(screen, (200, 255, 200), (self.x, self.y, self.width, self.height))" ]
[ "0.7317279", "0.7185691", "0.7096785", "0.706331", "0.7061475", "0.6913213", "0.68679565", "0.680892", "0.67900133", "0.6760028", "0.67510056", "0.67199045", "0.6709187", "0.66909724", "0.6651719", "0.6638934", "0.66279453", "0.66244906", "0.65821725", "0.6575303", "0.6527863", "0.6527514", "0.6513638", "0.6472861", "0.6423807", "0.64152426", "0.6413423", "0.641171", "0.6400596", "0.6399458" ]
0.853518
0
draw ground on screen
def draw_ground(self): win.blit(self.ground, (0, 400))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def draw_ground():\n for i in range(3):\n groundturtle.forward(1450)\n groundturtle.left(90)\n groundturtle.forward(25)\n groundturtle.left(90)\n groundturtle.forward(1450)\n groundturtle.right(90)\n groundturtle.forward(25)\n groundturtle.right(90)", "def drawBackground(self,screen):\n pygame.draw.rect(screen,(240,240,240),(self.basepos[0],self.basepos[1],204,504))\n pygame.draw.rect(screen,(0,0,0),(self.basepos[0]+2,self.basepos[1]+2,200,500))", "def draw(self):\n self.screen.fill(BACKGROUND_COLOR)\n self.cannon.draw(self.screen)\n self.objects.draw(self.screen)", "def draw(self, background):\n background.blit(self.image, (self.x_pos, self.y_pos))", "def draw():\n screen.fill((0, 0, 0))\n alien.draw()", "def draw(self):\n self.screen.fill(pygame.Color(0,0,0))\n for brick in self.model.bricks:\n pygame.draw.rect(self.screen, brick.color, pygame.Rect(brick.x,brick.y,brick.width,brick.height))\n pygame.draw.rect(self.screen, pygame.Color(255,255,255), pygame.Rect(self.model.paddle.x,self.model.paddle.y,self.model.paddle.width,self.model.paddle.height))\n pygame.draw.ellipse(self.screen, pygame.Color(128,128,128),(self.model.ball.x-self.model.ball.r, self.model.ball.y-self.model.ball.r, 2*self.model.ball.r,2*self.model.ball.r))\n pygame.display.update()", "def draw(self):\n \n # Draw the background\n self.world.fill(BLUE)\n \n # Draw all the sprite lists that we have\n self.wall_list.draw(self.world)\n self.enemy_list.draw(self.world)\n self.sludge.draw(self.world)\n self.consumeable.draw(self.world)\n self.can_climb.draw(self.world)", "def draw(self, screen):\n self.draw_left_zone(screen)\n self.draw_middle_zone(screen)\n self.draw_right_zone(screen)", "def draw(self, draw_surface):\n draw_surface.blit(self.background_frame, (0, 120))", "def draw(self, screen):\n if self.status == self.STATUS_RUNNING: \n self.update()\n\n screen.blit(pygame.transform.scale(self.background_img, (self.background_rect.width,self.background_rect.height)), self.background_rect)\n screen.blit(self.platform, pygame.Rect((self.left_landing_bounds, self.bottom_limit - 48), (128, 48)))\n \n if abs(self.velocity_x) > 4 or self.velocity_y > 4: \n screen.blit(self.shipfast, self.rect)\n else:\n screen.blit(self.shipmed, self.rect)", "def draw(self, screen):", "def draw():", "def draw(self, screen):\n screen.blit(self.surface, self.rect)", "def draw_environment():\n rect(screen, LIGHT_GRAY, (0, 0, 800, 450)) # grey sky\n rect(screen, WHITE, (0, 450, 800, 1000)) # white ground", "def draw(self):\n self.screen_surf.fill(BKGD_COLOUR)\n self.all_tiles.draw(self.screen_surf) # Tiles before other sprites.\n self.nests.draw(self.screen_surf) # Nests before chipmunks.\n self.chipmunks.draw(self.screen_surf)\n self.acorns.draw(self.screen_surf)\n self.screen_surf.blit(self.acorn_surf, self.acorn_surf.get_rect())\n self.screen_surf.blit(self.timer_surf, self.timer_rect)", "def draw_background(self):\n backgrounds = {\n \"forest\": (38, 106, 46),\n \"desert\": (194, 178, 128)\n }\n self.background_surface.fill(backgrounds[self.geography])", "def draw(self, screen):\n for branch_points in self.branches:\n pygame.draw.polygon(screen, self.branch_color, branch_points)\n for bottom_points in self.bottom:\n pygame.draw.polygon(screen, self.bottom_color, bottom_points)", "def __draw(self, screen):\n\n pygame.draw.rect(screen, (200, 255, 200), (self.x, self.y, self.width, self.height))", "def draw(self):\n self.screen.blit(self.image, self.rect)", "def setup_ground(self):\n\n ground_rect1 = ground_step.Ground(0, 200, 2953, 60)\n ground_rect2 = ground_step.Ground(3048, 200, 635, 60)\n ground_rect3 = ground_step.Ground(3819, 200, 2735, 60)\n ground_rect4 = ground_step.Ground(6647, 200, 2300, 60)\n\n self.ground_group = pygame.sprite.Group(ground_rect1,\n ground_rect2,\n ground_rect3,\n ground_rect4)", "def draw(self):\n self.ball_sprite.draw()", "def draw(self):\n self.game.screen.blit(self.image, self.game.off(self.pos))", "def draw_a50(self):\r\n\t\tpg.draw.rect(self.image, (100, 200, 100), self.rect)\r\n\t\r\n\t\t#self.display_surface.blit(self.image, self.rect)\r", "def draw(self):\n arcade.draw_rectangle_filled(self.center.x, self.center.y, PADDLE_WIDTH, PADDLE_HEIGHT, PADDLE_COLOR)\n pass", "def draw_brick(self, x, y):\n pygame.draw.rect(self.main_surface, self.color, (x, y, self.width, self.height), 0)\n pygame.display.update()", "def draw(self):\n self.screen.fill((0,51,102))\n # get the new drawables\n self.drawables = (self.game_model.get_background_drawables()\n + self.game_model.get_plane_drawables()\n + self.game_model.get_bullet_drawables()\n + self.game_model.get_enemy_drawables())\n for d in self.drawables:\n rect = d.get_rect()\n surf = d.get_surface()\n surf.set_colorkey((255,255,255))\n self.screen.blit(surf, rect)", "def draw(self):\r\n arcade.draw_rectangle_filled(self.center.x, self.center.y, self.radius, self.radius, TARGET_SAFE_COLOR)", "def draw_bg(self):\n self.screen.fill(self.bg)", "def draw(self, screen):\n \n # Draw the background\n screen.fill(CAVE)\n screen.blit(self.background,(self.world_shift // 3,0))\n \n # Draw all the sprite lists that we have\n self.platform_list.draw(screen)\n #self.enemy_list.draw(screen)\n self.enemy_list.draw(screen)", "def draw_kame(self):\r\n #pygame.draw.rect(self.screen, self.color, self.rect, self.image)\r\n self.screen.blit(self.image, self.rect)" ]
[ "0.7634187", "0.72167027", "0.71854854", "0.71728796", "0.7116057", "0.7026043", "0.70170915", "0.698256", "0.69724965", "0.6947137", "0.69152075", "0.6908207", "0.690745", "0.6899846", "0.68936545", "0.686858", "0.68661445", "0.6832472", "0.678626", "0.67738503", "0.6763737", "0.67635643", "0.6742641", "0.67340446", "0.67308676", "0.67202264", "0.66896284", "0.6687539", "0.6679385", "0.667054" ]
0.8539729
0
show user's current score
def scorer(self, current_score): text = self.field.render("Score: " + str(current_score // 2), True, BLACK_COLOUR) win.blit(text, (0, 0))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def disp_score():", "def print_scores(self):\n print(\"scores: \", self.get_scores())", "def update_score(self, engine, *args):\n #pdb.set_trace()\n self.score_label.text = \"Gold: {}/{}\".format(str(engine.score),\n str(engine.win_score))", "def score(self):\n return self.client.call('GET', self.name + 'score')", "def to_score(self):\n self._bottom_tab(2)\n self._goto(\"score\")", "def show_score(x, y):\n score = font.render(\"Score: \" + str(score_value), True, (255, 255, 255))\n screen.blit(score, (x, y))", "def update_score():\n pass", "def get_score(self):\n\n sql = \"SELECT score FROM Users WHERE username = '\" + self.username + \"'\"\n self.cursor.execute(sql)\n return self.cursor.fetchall()[0][0]", "def score(self):\n score_message = {\n 'Onewins': \"\\nThe Winner is Player 1!\",\n 'Twowins': \"\\nThe Winner is Player 2!\",\n 'Tie': \"\\nTie! Looks like everyone's a winner!\",\n 'Nowinner': \"\\nYikes, neither of you win!\"\n }\n if self.pone_score > self.ptwo_score:\n print(score_message['Onewins'])\n elif self.pone_score < self.ptwo_score:\n print(score_message['Twowins'])\n elif self.pone_score == 0 and self.ptwo_score == 0:\n print(score_message['Nowinner'])\n else:\n print(score_message['Tie'])", "def print_scores(self):\n ### FILL IN ###", "def your_score(score):\n value = score_font.render(\"Your Score: \" + str(score), True, green)\n dis.blit(value, [0, 0])", "def add_score(score):\n global SCORE\n SCORE = SCORE + score\n # update the display\n mvaddstr(1, 2, \"Score:\", color_pair(HEADING_COLOUR) | A_BOLD)\n mvaddstr(1, 9, \"%d\" % SCORE, color_pair(TEXT_COLOUR) | A_BOLD)", "def show_score(self, display, score):\n text = self.FONT.render(f'Score: {int(score)}', True, c.WHITE, c.BLACK)\n text_rect = text.get_rect()\n text_rect.centerx = self.SCORE_POS_X\n text_rect.centery = self.SCORE_POS_Y\n display.blit(text, text_rect)\n pygame.display.update()", "def show_score(self):\n self._pause = True # pause the game when you check the score\n score_list = self.get_high_score(self._filename) # get the record\n top = tk.Toplevel() # create a Toplevel\n top.title('Score Board')\n # create a text label for notification\n title = tk.Label(top, text='High Scored Player in This Level', width=70)\n title.pack(side=tk.TOP, ipady=1)\n if score_list is None: # check whether the record is empty\n tk.Label(top, text='No record in this level yet!', width=70).pack(side=tk.TOP, ipady=1)\n else: # if not empty\n for record in score_list: # shows up all the detail\n tk.Label(top, text=record[0] + ' : ' + record[1]).pack(side=tk.TOP, ipady=1)", "def get_score(self):\n return self.score", "def get_score(self):\n return self.score", "def get_score(self):\n return self.score", "def get_score(self):\n return self.score", "def display_score(self, win, player, computer):\n font = pygame.font.SysFont('comicsans', 70)\n if player < 10 and computer < 10:\n pygame.draw.rect(win, black, (150, 30, 75, 50))\n pygame.draw.rect(win, black, (295, 30, 75, 50))\n text1 = font.render(str(player), 1, white)\n text2 = font.render(str(computer), 1, white)\n win.blit(text1, (185, 35))\n win.blit(text2, (297, 35))", "def update_score(self):\n score_text = ' ' + str(self.x_score) + ' - ' + str(self.o_score) + ' '\n self.Score_Label.configure(text=score_text, foreground='#FFFFFF')", "def draw_score(self):\n score_text = \"Score: {}\".format(self.score)\n start_x = 10\n start_y = (SCREEN_HEIGHT - 20)\n arcade.draw_text(score_text, start_x=start_x, start_y=start_y, font_size=12, color=arcade.color.BLACK)", "def showtopscores(self):\n top_scores = LeaderBoard.gettopscorerslist(CURRENT_GAME_LEVEL)\n level_string = \"\"\n if CURRENT_GAME_LEVEL == DifficultyLevel.ExpertLevel:\n level_string = \"Expert level\"\n elif CURRENT_GAME_LEVEL == DifficultyLevel.BeginnerLevel:\n level_string = \"Beginner level\"\n else:\n level_string = \"Intermediate level\"\n leaderboard = \"Rank\".ljust(10) + \"Player Name\".ljust(30) + \"Score\".ljust(10) + '\\n'\n print leaderboard,\n rank = 1\n for score in top_scores:\n score = str(rank).ljust(10) + score\n print score,\n leaderboard = leaderboard + score\n rank = rank + 1\n QtGui.QMessageBox.about(self, \"Leaderboard for \" + level_string, leaderboard)", "def view_scores(jenni, input):\n scores.view_scores(jenni, input)", "def display_score(self):\n if self.args:\n return self.display_score_for_group()\n return self.display_top_donor_for_each_group()", "def draw_score(self):\r\n score_text = \"Score: {}\".format(self.score)\r\n start_x = 10\r\n start_y = SCREEN_HEIGHT - 20\r\n arcade.draw_text(score_text, start_x=start_x, start_y=start_y, font_size=12, color=arcade.color.NAVY_BLUE)", "def draw_score(self):\n score_text = \"Score: {}\".format(self.score)\n start_x = 10\n start_y = SCREEN_HEIGHT - 20\n arcade.draw_text(score_text, start_x=start_x, start_y=start_y, font_size=12, color=arcade.color.NAVY_BLUE)", "def score(self):", "def print_current_scores(self, round_num, index):\n print(f'\\n{self._players_list[index].name.upper()} '\n f'YOUR TURN. ROUND: {round_num + 1}')\n\n print('-'*21)\n print('ROLL SCORES'.rjust(16))\n self._players_list[index].print_stacked_score_dict()\n\n print('-'*21)\n print('TOP SCORE BONUS'.rjust(19))\n print(f\"Top Score:\".ljust(16) +\n f\"{self._players_list[index].get_top_score()}\".rjust(3))\n print(f\"Top Bonus Score:\".ljust(16) +\n f\"{self._players_list[index].get_top_bonus_score()}\".rjust(3))\n\n print('-'*21)\n print('TOTAL SCORES'.rjust(19))\n print(f\"Total Top:\".ljust(16) +\n f\"{self._players_list[index].get_total_top_score()}\".rjust(3))\n print(f\"Total Bottom:\".ljust(16) +\n f\"{self._players_list[index].get_total_bottom_score()}\".rjust(3))\n\n print('-'*21)\n print(f\"GRAND TOTAL:\".ljust(16) +\n f\"{self._players_list[index].get_grand_total_score()}\".rjust(3))", "def display_scores(scores):\n\n\t#print(\"\\t\\t\\tScores: \", list(scores))\n\tprint(\"\\t\\t\\tMean: \", scores.mean())\n\tprint(\"\\t\\t\\tStandard Deviation: \", scores.std())", "def show_scores(self):\n for text in self.score_text:\n text.draw()" ]
[ "0.77043575", "0.72004783", "0.71493894", "0.706976", "0.70139277", "0.700324", "0.6931816", "0.6891288", "0.6878507", "0.68636274", "0.6858917", "0.68321425", "0.68244326", "0.67983705", "0.6768852", "0.6762979", "0.6762979", "0.6762979", "0.6751136", "0.6742192", "0.67155415", "0.6702235", "0.6699891", "0.6681637", "0.6681627", "0.6681549", "0.66700166", "0.6668742", "0.6633821", "0.663268" ]
0.73849154
1
Override the replay prefix
def set_replay_path(self, replay_folder, replay_prefix): self.replay_prefix = replay_prefix self.replay_folder = replay_folder
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def default_prefix(self) -> str:", "def prefix(self, prefix):\n\n self._prefix = prefix", "def prefix(self, prefix):\n\n self._prefix = prefix", "def replay():\n roku_master.replay()", "def pause_review(self, prefix: Nibbles) -> None:\n self._active_prefixes.add(prefix)", "def test_prefix():\n\n dispatcher = ntelebot.dispatch.Dispatcher()\n dispatcher.add_prefix('prefix', lambda ctx: 'PREFIX')\n ctx = MockContext()\n ctx.type = 'message'\n assert dispatcher(ctx) is False\n ctx.prefix = 'prefix'\n assert dispatcher(ctx) == 'PREFIX'", "def llm_prefix(self) -> str:\n return \"Thought:\"", "def prefix(self, group):\n return", "def _temp_prefix(cls) -> str:\n pass", "async def prefix(self, ctx, *, prefix=None):\n\n current = self.bot.prefix\n embed = Embed(\n title=\"Current prefix\", color=self.bot.main_color, description=f\"{current}\"\n )\n\n if prefix is None:\n await ctx.send(embed=embed)\n else:\n embed.title = \"Changed prefix!\"\n embed.description = f\"Set prefix to `{prefix}`\"\n self.bot.config[\"prefix\"] = prefix\n await self.bot.config.update()\n await ctx.send(embed=embed)", "def prefix(self, prefix, *args):\n new_prefix = '%s%s' % (self.prefixes[-1], prefix % args)\n self.prefixes.append(new_prefix)\n try:\n yield\n finally:\n assert self.prefixes.pop() == new_prefix", "def set_prefix(prefix):\n PLUGINS.set_prefix(prefix)", "def prefixed(self, prefix):\n if not prefix:\n return self.clone()\n else:\n return self.using(join(prefix, self))", "def prefix(self):\n return self._prefix", "def prefix(self):\n return self._prefix", "def prefix(self):\n return self._prefix", "def prefix(self, prefix):\n self._path_prefix = prefix", "def setPrefix(self, *args):\n return _libsbml.ASTBasePlugin_setPrefix(self, *args)", "def prefix(self):\n return self[\"prefix\"]", "def prefix(self):\n return self[\"prefix\"]", "def default_prefix(self) -> str:\n return \"\"", "def default_prefix(self) -> str:\n return \"\"", "async def prefix(self, ctx, prefix):\n # Get the server language\n lang = getLang(ctx.message.guild.id)\n\n if len(prefix) > 10:\n with open(f\"embeds/{lang}/prefix.json\", \"r\") as f:\n await ctx.reply(embed=discord.Embed.from_dict(json.load(f)['len-error']), delete_after=20)\n\n # Change prefix\n with open('serverconfig/prefixes.json', 'r') as f:\n prefixes = json.load(f)\n old_prefix = prefixes[str(ctx.guild.id)]\n prefixes[str(ctx.guild.id)] = prefix\n with open('serverconfig/prefixes.json', 'w') as f:\n json.dump(prefixes, f, indent=4)\n\n # Get the embed of the right language and send with replaced variable\n with open(f\"embeds/{lang}/prefix.json\", \"r\") as f:\n embed = json.load(f)['embed']\n\n embed['description'] = embed['description'].replace(\"%VAR\", prefix)\n await ctx.reply(embed=discord.Embed.from_dict(embed), mention_author=False, delete_after=20)", "def _init_prefix(self):\n self._.prefix = \"v%x\" % (hash(self) % Integer(2)**32)", "def prefix_id(self, name):\n if \":\" in name: return name\n return self.prefix + \":\" + name", "def set_prefix(self, prefix):\n self._prefix = prefix\n self._update_layout()", "def get_prefix(self):\n return self.prefix", "async def prefix(self, _bot, message: discord.Message):\n mention = [self.user.mention + ' ', f'<@!{self.user.id}> ']\n additional_prefixes = await self.get_prefixes(message.guild)\n return self.cfg['bot']['prefixes'] + mention + additional_prefixes", "def resetPrefix(self):\n pp = self.rendererWindow.getCurrentPipelinePage()\n\n if pp is None:\n filename = \"\"\n\n else:\n filename = pp.filename\n\n guess = self.guessFilePrefix(filename)\n\n self.fileprefix.setText(guess)", "def append_prefix(self, prefix):\n self._prefix_stack.append(prefix)\n return Record._add_prefix_context()" ]
[ "0.63496584", "0.6304424", "0.6304424", "0.62225986", "0.62107915", "0.6182443", "0.61162794", "0.59398854", "0.5909899", "0.5886368", "0.5860298", "0.58344734", "0.5813779", "0.57819796", "0.57819796", "0.57819796", "0.5765664", "0.57609695", "0.5745051", "0.5745051", "0.57254565", "0.57254565", "0.57106197", "0.567296", "0.5668594", "0.5618018", "0.56105953", "0.558628", "0.55784875", "0.5572876" ]
0.666854
0
Gets the items of this InlineResponse200.
def items(self) -> List[InlineResponse200Items]: return self._items
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_items_from_response(self, response):\n raise NotImplementedError", "def get_items(self):\n return self.items", "def get_items(self):\n return self.item_list", "def get_items(self):\n return (item for item in self.items)", "def get_items():\n return requester.perform_request(Uri.items)", "def __call__(self):\n return self.get_items()", "def items(self):\n return self._headers[:]", "def get_items(self):\n return self.item_ids", "def get_items(self) -> list:\r\n return self._items", "def get_items(self):\n return []", "def items(self):\n return self._items", "def get(self):\n return self._items", "def get(self):\n return self._items", "def data(self) -> List[InlineResponse2002]:\n return self._data", "def __init__(self, items: List[InlineResponse200Items]=None): # noqa: E501\n self.swagger_types = {\n 'items': List[InlineResponse200Items]\n }\n\n self.attribute_map = {\n 'items': 'items'\n }\n self._items = items", "def get_items(self):\r\n return self.items()", "def items(self, items: List[InlineResponse200Items]):\n if items is None:\n raise ValueError(\"Invalid value for `items`, must not be `None`\") # noqa: E501\n\n self._items = items", "def get_all(self) -> tuple:\n items = self.model.get_all()\n\n return {'items': list_map(items)}, 200", "def all_items_handler():\n items = getAllItems()\n return jsonify(items=[i.serialize for i in items])", "def items(self) -> List[Item]:\n return self._items", "def get_all_items(self):\n return self.api.state['items']", "def items(self):\n return list(self.items_generator())", "def all(self):\n return self.client.request_with_method(Methods.LIST % self.name)['items']", "def items(self):\n return [x.item for x in self]", "def invoiceitems(self):\r\n return InvoiceItems(self)", "def get_items(self):\n return self.order_items", "def items(self):\n return self._as_dict().items()", "def items(self):\r\n return self._as_dict().items()", "def items(self):\n return self.__items(())", "def get_response_content_iterator(self):\n return self.__response.iter_lines()" ]
[ "0.7298296", "0.682365", "0.6715298", "0.6598859", "0.6588794", "0.65261364", "0.64762795", "0.6466019", "0.64575994", "0.6452882", "0.6352481", "0.63157487", "0.63157487", "0.6306265", "0.62805", "0.62279594", "0.60577524", "0.60419965", "0.60099256", "0.60069144", "0.60065466", "0.5992721", "0.5979651", "0.59493166", "0.5916227", "0.591619", "0.59158844", "0.5885075", "0.58615816", "0.58419985" ]
0.8648576
0
Sets the items of this InlineResponse200.
def items(self, items: List[InlineResponse200Items]): if items is None: raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501 self._items = items
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, items: List[InlineResponse200Items]=None): # noqa: E501\n self.swagger_types = {\n 'items': List[InlineResponse200Items]\n }\n\n self.attribute_map = {\n 'items': 'items'\n }\n self._items = items", "def items(self) -> List[InlineResponse200Items]:\n return self._items", "def set_all(self, value):\n self.__items = value", "def setitems(self, items):\n self.clear()\n # FIXME: this allows you to pass in an OrderedDict as well :-)\n self.update(items)", "def preset_items(self):\r\n\r\n raise NotImplementedError", "def get_items_from_response(self, response):\n raise NotImplementedError", "def SetItems(self, items: Union[Iterable, dict]):\n if not items:\n return\n if isinstance(items, dict):\n items = [[key, str(value)] for key, value in items.items()]\n if self._sorted:\n items = sorted(items, key=lambda x: x[1])\n self._items = [key for key, _ in items]\n super().SetItems([value for _, value in items])\n else:\n if self._sorted:\n self._items = tuple(sorted(items))\n else:\n self._items = tuple(items)\n super().SetItems([str(v) for v in self._items])\n self.SetSelection(0)", "def get_paginated_response(self, data, status_code=200, **kwargs): # pylint: disable=arguments-differ\n resp = super().get_paginated_response(data)\n for (key, value) in kwargs.items():\n resp.data[key] = value\n resp.status_code = status_code\n return resp", "def line_items(self, line_items):\n\n self._line_items = line_items", "def put_response(self, item):\n self.export.put_response(item)", "def inventory_items(self, inventory_items):\n\n self._inventory_items = inventory_items", "def data(self, data: List[InlineResponse2002]):\n\n self._data = data", "def yield_item(self, response):\n item = BrobotBotsItem()\n item.update(self.data)\n yield item", "def yield_item(self, response):\n item = BrobotBotsItem()\n item.update(self.data)\n yield item", "def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None, client=None,\r\n herd=True):\r\n if client is None:\r\n client = self.get_client(write=True)\r\n\r\n set_function = self.set if herd else super(HerdClient, self).set\r\n\r\n try:\r\n pipeline = client.pipeline()\r\n for key, value in data.items():\r\n set_function(key, value, timeout, version=version, client=pipeline)\r\n pipeline.execute()\r\n except ConnectionError:\r\n raise ConnectionInterrupted(connection=client)", "def _itemsToResponse(self, items):\n itemsToSend = []\n count = 0\n if items:\n size = 0\n while size < self._maxSize:\n try:\n item = items.pop()\n except (KeyError, IndexError):\n # We're done.\n # Note: because records is an iterable (list or set)\n # we're catching both KeyError and IndexError.\n break\n size = size + len(item)\n itemsToSend.append(item)\n count += 1\n\n response = {\"items\": itemsToSend}\n\n if items:\n response[\"continuation\"] = self._storeContinuation(items, \"items\")\n\n return response", "def response_values(self, response_values):\n\n self._response_values = response_values", "def update_items(self, request, *a, **kw):\n item_def = request.data\n cpdoc = self.get_object()\n item_def['id'] = cpdoc.id\n\n item_ser = self.get_serializer(instance=obj_cp, data=item_def)\n item_ser.is_valid(raise_exception=True)\n item_obj = item_ser.save()\n headers = self.get_success_headers(item_ser.data)\n return response.Response(item_ser.data, headers=headers)", "def set_response_list(self, r_list):\n self.response_list = r_list", "def set_response_list(cls,response_list):\n if not isinstance(response_list,list):\n raise ValueError('response_list arg must be a list')\n\n # Run through the responses\n for response in response_list:\n if not isinstance(response,(list,dict)):\n raise ValueError('response_list entry must be a list or dict')\n\n if isinstance(response,list):\n if len(response) != 3:\n raise ValueError('response_list entry must have 3 fields')\n\n # Grab each fields\n url = response[0]\n method = 'POST'\n status = response[1]\n type = 'xml'\n message = response[2]\n\n else:\n # response is a dict\n url = response.get('base_url')\n method = response.get('method','POST')\n status = response.get('status')\n type = response.get('type','xml')\n message = response.get('message')\n\n response_list = BonitaMockedServerImpl.get_response_list()\n response_list.add_or_augment_response_list(url,method,status,type,message)", "def set_attachments(self):\n self.response['attachments'] = []", "def update(self, items: Mapping[Any, Any]) -> None:\n self.extend(list(items.values()))\n return", "def set_item(self, item):\n self.item = item", "def set_item(self, item):\n self.item = item", "def process_new_items(self, new_items):\n self.items_hat = np.hstack([self.items_hat, new_items])", "def set_invocation_metadata(self, items: Tuple[Tuple[str, str]]):\n self._invocation_metadata = items", "def __init__(self, items, abort_on_error=False):\n self._items = items\n self.abort_on_error = abort_on_error", "def Set(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def items(self, value):\n if value is None:\n self._items = None\n self.active = None\n else:\n self._items = value\n self.active = [True] * len(self._items)", "def set_srv_response(self, srvs):\n with self._context.lock:\n self._context.data[\"services\"] = srvs" ]
[ "0.7157411", "0.64879584", "0.5950706", "0.5830931", "0.5624914", "0.55170625", "0.5450405", "0.54015255", "0.54010475", "0.5339431", "0.53360486", "0.53321445", "0.5307274", "0.5307274", "0.5305917", "0.52636474", "0.52428657", "0.5224288", "0.52015966", "0.5196767", "0.5181081", "0.50616", "0.50376314", "0.50376314", "0.50297767", "0.50141823", "0.49986187", "0.49929652", "0.49798945", "0.4974186" ]
0.6958488
1
shifts data so as to minimize distance in latitude. Lets B be stationary while index shifting A and C forward so that A[t] > A[t t_BA] B[t] > B[t] C[t] > C[t t_BC]
def shifter(self): #self.BA_shift = self.timeshift_latitude(self.latB, self.latA) #self.BC_shift = self.timeshift_latitude(self.latB, self.latC) self.shifted = True #changing boolean to True when function is called. secondsA = self.secondsA secondsB = self.secondsB secondsC = self.secondsC NeA = self.holefill(self.NeA, secondsA) NeB = self.holefill(self.NeB, secondsB) NeC = self.holefill(self.NeC, secondsC) start = 0 stop = len(NeA) - np.max(np.array([self.BA_shift, self.BC_shift])) startA = start + self.BA_shift stopA = stop + self.BA_shift startC = start + self.BC_shift stopC = stop + self.BC_shift NeA = NeA[startA:stopA] NeB = NeB[start:stop] NeC = NeC[startC:stopC] longA = self.holefill(self.longA, secondsA) longB = self.holefill(self.longB, secondsB) longC = self.holefill(self.longC, secondsC) longA = longA[startA:stopA] longB = longB[start:stop] longC = longC[startC:stopC] latA = self.holefill(self.latA, secondsA) latB = self.holefill(self.latB, secondsB) latC = self.holefill(self.latC, secondsC) latA = latA[startA:stopA] latB = latB[start:stop] latC = latC[startC:stopC] radA = self.holefill(self.radA, secondsA) radB = self.holefill(self.radB, secondsB) radC = self.holefill(self.radC, secondsC) radA = radA[startA:stopA] radB = radB[start:stop] radC = radC[startC:stopC] velA = self.holefill(self.velA, secondsA) velB = self.holefill(self.velB, secondsB) velC = self.holefill(self.velC, secondsC) velA = velA[startA:stopA] velB = velB[start:stop] velC = velC[start:stop] altA = self.holefill(self.altA, secondsA) altB = self.holefill(self.altB, secondsB) altC = self.holefill(self.altC, secondsC) altA = altA[startA:stopA] altB = altB[start:stop] altC = altC[startC:stopC] mlatA = self.holefill(self.mlatA, secondsA) mlatB = self.holefill(self.mlatB, secondsB) mlatC = self.holefill(self.mlatC, secondsC) mlatA = mlatA[startA:stopA] mlatB = mlatB[start:stop] mlatC = mlatC[startC:stopC] mlongA = self.holefill(self.mlongA, secondsA) mlongB = self.holefill(self.mlongB, secondsB) mlongC = self.holefill(self.mlongC, secondsC) mlongA = mlongA[startA:stopA] mlongB = mlongB[start:stop] mlongC = mlongC[startC:stopC] mltA = self.holefill(self.mltA, secondsA) mltB = self.holefill(self.mltB, secondsB) mltC = self.holefill(self.mltC, secondsC) mltA = mltA[startA:stopA] mltB = mltB[start:stop] mltC = mltC[startC:stopC] secondsA = self.holefill(secondsA, secondsA) secondsB = self.holefill(secondsB, secondsB) secondsC = self.holefill(secondsC, secondsC) secondsA = secondsA[startA:stopA] secondsB = secondsB[start:stop] secondsC = secondsC[startC:stopC] indsA = np.nonzero(secondsA)[0] indsB = np.nonzero(secondsB)[0] indsC = np.nonzero(secondsC)[0] inds = np.intersect1d(indsA, indsB) inds = np.intersect1d(inds, indsC) self.NeA = NeA[inds] self.NeB = NeB[inds] self.NeC = NeC[inds] self.longA = longA[inds] self.longB = longB[inds] self.longC = longC[inds] self.latA = latA[inds] self.latB = latB[inds] self.latC = latC[inds] self.radA = radA[inds] self.radB = radB[inds] self.radC = radC[inds] self.velA = velA[inds] self.velB = velB[inds] self.velC = velC[inds] self.altA = altA[inds] self.altB = altB[inds] self.altC = altC[inds] self.mlatA = mlatA[inds] self.mlatB = mlatB[inds] self.mlatC = mlatC[inds] self.mlongA = mlongA[inds] self.mlongB = mlongB[inds] self.mlongC = mlongC[inds] self.mltA = mltA[inds] self.mltB = mltB[inds] self.mltC = mltC[inds] self.secondsA = secondsA[inds] self.secondsB = secondsB[inds] self.secondsC = secondsC[inds]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def minimizeTimes(self):\n from copy import deepcopy as dcp\n tmin = self.get_tmin()\n for t in self.srcData: \n old = dcp(self.srcData[t])\n new_t = t - tmin\n self.outData[new_t] = old", "def topsort_lat(lat, random_shift=False, max_state=None):\n\n V = {arc[STATE_FROM] for arc in lat} | {arc[STATE_TO] for arc in lat}\n A = {i: set() for i in V}\n for arc in lat:\n A[arc[STATE_TO]].add(arc[STATE_FROM])\n newid2oldid = [0]\n while len(newid2oldid) <= len(V):\n vs = [i for i, v in A.items() if len(v) == 0]\n if len(vs) == 0:\n print(f\"Lat: {lat}\")\n print(f\"V: {V}\")\n print(f\"A: {A}\")\n print(f\"newid2oldid: {newid2oldid}\")\n raise RuntimeError(f\"Topsort error.\")\n i = np.random.choice(vs)\n A.pop(i)\n newid2oldid.append(i)\n for a in A.values():\n a.discard(i)\n old2new = {i_old: i_new for i_new, i_old in enumerate(newid2oldid)}\n if random_shift:\n shift=0\n max_shift = max_state - len(old2new)\n max_step = max_state // len(old2new)\n for k,v in old2new.items():\n if v == 0 or v == 1:\n continue\n new_shift = random.randint(0, min(max_step, max_shift))\n shift += new_shift\n max_shift -= new_shift\n old2new[k] += shift\n\n sorted_lat = np.array([(arc[0], old2new[arc[1]], old2new[arc[2]]) for arc in lat])\n return sorted_lat", "def measure(self, A, B, start_index): \n #code modifed from wikipedia\n Dlp = lambda x,y: abs(x-y)\n timeSB = np.arange(1,len(B)+1)\n timeSA = np.arange(1,len(A)+1)\n nu = self.v\n _lambda = self.gamma\n # Reference :\n # Marteau, P.; F. (2009). \"Time Warp Edit Distance with Stiffness Adjustment for Time Series Matching\".\n # IEEE Transactions on Pattern Analysis and Machine Intelligence. 31 (2): 306–318. arXiv:cs/0703033\n # http://people.irisa.fr/Pierre-Francois.Marteau/\n\n # Check if input arguments\n if len(A) != len(timeSA):\n print(\"The length of A is not equal length of timeSA\")\n return None, None\n \n if len(B) != len(timeSB):\n print(\"The length of B is not equal length of timeSB\")\n return None, None\n\n if nu < 0:\n print(\"nu is negative\")\n return None, None\n\n # Add padding\n A = np.array([0] + list(A))\n timeSA = np.array([0] + list(timeSA))\n B = np.array([0] + list(B))\n timeSB = np.array([0] + list(timeSB))\n\n n = len(A)\n m = len(B)\n # Dynamical programming\n DP = np.zeros((n, m))\n\n # Initialize DP Matrix and set first row and column to infinity\n DP[0, :] = np.inf\n DP[:, 0] = np.inf\n DP[0, 0] = 0\n\n # Compute minimal cost\n for i in range(1, n):\n for j in range(1, m):\n # Calculate and save cost of various operations\n C = np.ones((3, 1)) * np.inf\n # Deletion in A\n C[0] = (\n DP[i - 1, j]\n + Dlp(A[i - 1], A[i])\n + nu * (timeSA[i] - timeSA[i - 1])\n + _lambda\n )\n # Deletion in B\n C[1] = (\n DP[i, j - 1]\n + Dlp(B[j - 1], B[j])\n + nu * (timeSB[j] - timeSB[j - 1])\n + _lambda\n )\n # Keep data points in both time series\n C[2] = (\n DP[i - 1, j - 1]\n + Dlp(A[i], B[j])\n + Dlp(A[i - 1], B[j - 1])\n + nu * (abs(timeSA[i] - timeSB[j]) + abs(timeSA[i - 1] - timeSB[j - 1]))\n )\n # Choose the operation with the minimal cost and update DP Matrix\n DP[i, j] = np.min(C)\n distance = DP[n - 1, m - 1]\n self.M = DP\n self.decision_scores_.append((start_index, distance))\n return distance", "def forward_committor_sensitivity(T, A, B, index):\n\n n = len(T)\n set_X = numpy.arange(n) # set(range(n))\n set_A = numpy.unique(A) # set(A)\n set_B = numpy.unique(B) # set(B)\n set_AB = numpy.union1d(set_A, set_B) # set_A | set_B\n notAB = numpy.setdiff1d(set_X, set_AB, True) # list(set_X - set_AB)\n m = len(notAB)\n\n K = T - numpy.diag(numpy.ones(n))\n\n U = K[numpy.ix_(notAB.tolist(), notAB.tolist())]\n\n v = numpy.zeros(m)\n\n # for i in xrange(0, m):\n # for k in xrange(0, len(set_B)):\n # v[i] = v[i] - K[notAB[i], B[k]]\n v[:] = v[:] - K[notAB[:], B[:]]\n\n qI = numpy.linalg.solve(U, v)\n\n q_forward = numpy.zeros(n)\n #q_forward[set_A] = 0 # double assignment.\n q_forward[set_B] = 1\n #for i in range(len(notAB)):\n q_forward[notAB[:]] = qI[:]\n\n target = numpy.eye(1, n, index)\n target = target[0, notAB]\n\n UinvVec = numpy.linalg.solve(U.T, target)\n Siab = numpy.zeros((n, n))\n\n for i in range(m):\n Siab[notAB[i]] = - UinvVec[i] * q_forward\n\n return Siab", "def shift_30min_up(da):\n return da.assign_coords(time=da.coords[\"time\"] - pd.to_timedelta(\"30m\"))", "def moving(filtertype, S0, n):\n print('-------------------------- moving')\n \n # Constants:\n S = S0.copy() # Avoid overwritting data:\n S_new = np.zeros(len(S))\n nzero = np.zeros(2*n+1)\n \n # Moving median filter:\n if filtertype=='median':\n print 'Moving median filter'\n # Interval: d[n, 1+n, ... , N-1, N-n]\n for i in range(len(S)-2*n): \n S_new[n+i] = np.median(S[range((n+i)-n, (n+i)+n+1)])\n for i in range(n):\n # Interval: d[-n, -(n-1), ... , n-1, n] - Low end of data\n low = nzero\n low[range(n-i)] = S[0]*np.ones(n-i)\n low[-(n+1+i):] = S[range(0, n+1+i)]\n S_new[i] = np.median(low)\n # Interval: d[N-n, N-(n-1), ... , N+(n-1), N+n] - High end of data\n high = nzero\n high[range(n+1+i)] = S[range(len(S)-(n+i+1), len(S))]\n high[-(n-i):] = S[-1]*np.ones(n-i)\n S_new[len(S)-1-i] = np.median(high)\n\n # Moving mean filter:\n if filtertype=='mean':\n print 'Moving mean filter'\n # Interval: d[n, 1+n, ... , N-1, N-n]\n for i in range(len(S)-2*n): \n S_new[n+i] = np.mean(S[range((n+i)-n, (n+i)+n+1)])\n for i in range(n):\n # Interval: d[-n, -(n-1), ... , n-1, n] - Low end of data\n low = nzero\n low[range(n-i)] = S[0]*np.ones(n-i)\n low[-(n+1+i):] = S[range(0, n+1+i)]\n S_new[i] = np.mean(low)\n # Interval: d[N-n, N-(n-1), ... , N+(n-1), N+n] - High end of data\n high = nzero\n high[range(n+1+i)] = S[range(len(S)-(n+1+i), len(S))]\n high[-(n-i):] = S[-1]*np.ones(n-i)\n S_new[len(S)-1-i] = np.mean(high)\n\n # Output:\n return S_new", "def test_gleckler_index(self):\n\n # generate sample data\n # sample data\n tmp = np.zeros((5, 3, 1))\n tmp[:,0,0] = np.ones(5)*1.\n tmp[:,1,0] = np.ones(5)*2.\n tmp[:,2,0] = np.ones(5)*5.\n\n # The data is like ...\n #| 1 | 2 | 5 |\n #| 1 | 2 | 5 |\n #| 1 | 2 | 5 |\n #| 1 | 2 | 5 |\n #| 1 | 2 | 5 |\n\n x = self.D.copy()\n x._temporal_subsetting(0, 4)\n\n x.data = np.ma.array(tmp, mask=tmp!=tmp)\n x.std = np.ones(x.data.shape)\n x.time[0] = pl.datestr2num('2000-02-15')\n x.time[1] = pl.datestr2num('2000-03-15')\n x.time[2] = pl.datestr2num('2000-04-15')\n x.time[3] = pl.datestr2num('2000-05-15')\n x.time[4] = pl.datestr2num('2000-06-15')\n\n y = self.D.copy()\n y._temporal_subsetting(0, 4)\n tmp = np.ones(x.data.shape) # sample data 2\n y.data = np.ma.array(tmp, mask=tmp!=tmp)\n y.time[0] = pl.datestr2num('2000-02-15')\n y.time[1] = pl.datestr2num('2000-03-15')\n y.time[2] = pl.datestr2num('2000-04-15')\n y.time[3] = pl.datestr2num('2000-05-15')\n y.time[4] = pl.datestr2num('2000-06-15')\n\n # Case 1: same area weights\n # cell area\n tmp = np.ones((3, 1))\n x.cell_area = tmp*1.\n\n #| 1-1 | 2-1 | 5-1 |\n #| 1-1 | 2-1 | 5-1 |\n #| 1-1 | 2-1 | 5-1 |\n #| 1-1 | 2-1 | 5-1 |\n #| 1-1 | 2-1 | 5-1 |\n #===================\n #| 0 | 5 | 5*4**2=5*16. = 80 |\n #==> E2 = sqrt(85./(15.))\n D = GlecklerPlot()\n r = D.calc_index(x, y, 'a', 'b', time_weighting=False)\n\n wt = np.ones(5) / 5.\n ref = np.sqrt(((85./15.) * wt).sum())\n t = np.abs(1. - r / ref)\n self.assertLess(t, 0.000001) # relative error\n\n D = GlecklerPlot()\n r = D.calc_index(x, y, 'a', 'b')\n\n wt = np.asarray([29., 31., 30., 31., 30.])\n wt = wt / wt.sum()\n ref = np.sqrt(((85./15.) * wt).sum())\n t = np.abs(1. - r / ref)\n self.assertLess(t, 0.000001) # relative error\n\n\n\n # Case 2: Different area weights\n # cell area\n tmp = np.ones((3, 1))\n tmp[1, 0] = 2.\n x.cell_area = tmp*1.\n\n #| 1-1=0 | 2-1=1 | 5-1=16 |\n #| 1-1=0 | 2-1=1 | 5-1=16 |\n #| 1-1=0 | 2-1=1 | 5-1=16 |\n #| 1-1=0 | 2-1=1 | 5-1=16 |\n #| 1-1=0 | 2-1=1 | 5-1=16 |\n #--------------------------\n # w = 0.25 w = 0.5 w=0.25|\n #--------------------------\n\n # 0.25*0 + 0.5 * 1 + 0.25 * 16 = 0 + 0.5 + 4 = 4.5\n # the mean of that is 4.5 for each timestep\n # mean because the overall weights are calculated as such that\n # they give a total weight if 1\n\n # diagnostic\n D = GlecklerPlot()\n r = D.calc_index(x, y, 'a', 'b', time_weighting=False)\n\n wt = np.ones(5) / 5.\n ref = np.sqrt((4.5 * wt).sum())\n t = np.abs(1. - r / ref)\n self.assertLess(t, 0.000001) # relative error\n\n wt = np.asarray([29., 31., 30., 31., 30.])\n wt = wt / wt.sum()\n ref = np.sqrt((4.5 * wt).sum())\n t = np.abs(1. - r / ref)\n self.assertLess(t, 0.000001) # relative error\n\n # Case 3: use different std\n x.std = np.ones(x.data.shape)\n x.std[:, 2, 0] = 0.5\n\n #| 1-1=0 | 2-1=1 | 5-1=16 / 0.5 |\n #| 1-1=0 | 2-1=1 | 5-1=16 / 0.5 |\n #| 1-1=0 | 2-1=1 | 5-1=16 / 0.5 |\n #| 1-1=0 | 2-1=1 | 5-1=16 / 0.5 |\n #| 1-1=0 | 2-1=1 | 5-1=16 / 0.5 |\n #--------------------------------\n # w = 0.25 w = 0.5 w=0.25|\n # 0 + 0.5 + 0.25*32 = 0.5 + 8 = 8.5\n\n D = GlecklerPlot()\n r = D.calc_index(x, y, 'a', 'b', time_weighting=False)\n\n wt = np.ones(5) / 5.\n ref = np.sqrt((8.5 * wt).sum())\n t = np.abs(1. - r / ref)\n self.assertLess(t, 0.000001) # relative error\n\n wt = np.asarray([29., 31., 30., 31., 30.])\n wt = wt / wt.sum()\n ref = np.sqrt((8.5 * wt).sum())\n t = np.abs(1. - r / ref)\n self.assertLess(t, 0.000001) # relative error", "def find_best_shift(l_x_as, l_y_as, l_yp_as, r_x_orig_as, r_y_as, r_yp_as, x_stride):\n logg = logging.getLogger(f\"c.{__name__}.find_best_shift\")\n # logg.debug(f\"Start find_best_shift\")\n\n shift_start = timer()\n\n # find how much the right segment can shift\n shift_11 = l_x_as[-1] - r_x_orig_as[-1] - (l_x_as[-1] - l_x_as[0]) / 2\n shift_10 = l_x_as[-1] - r_x_orig_as[0]\n # align the shift on the stride grid: now if you sum the shift to l_x_as\n # the points are still aligned.\n shift_a_11 = math.floor(shift_11 / x_stride) * x_stride\n shift_a_10 = math.ceil(shift_10 / x_stride) * x_stride\n shift_range = np.arange(shift_a_11, shift_a_10 + x_stride / 2, x_stride)\n # recap = f\"shift_11: {shift_11} shift_10: {shift_10}\"\n # recap += f\" shift_a_11: {shift_a_11} shift_a_10: {shift_a_10}\"\n # logg.debug(recap)\n\n best_dist_x_touch = float(\"inf\")\n best_shift = None\n best_r_x_as = None\n best_l_tang_y_as = None\n\n tangent_times = []\n\n for shift in shift_range:\n r_x_as = r_x_orig_as + shift\n # logg.debug(f\"\\nNew shift r_x_as[0]: {r_x_as[0]} r_x_as[-1]: {r_x_as[-1]}\")\n\n # ax.plot(r_x_as, r_y_as, color=\"y\", ls=\"-\", marker=\"\")\n # ax.plot(r_x_as, r_y_as, color=\"y\", ls=\"\", marker=\".\")\n\n # find the indexes where the tangent touches the curves\n l_xid, r_xid, l_tang_y_as, tangent_time = find_lower_tangent(\n l_x_as, l_y_as, r_x_as, r_y_as, r_yp_as\n )\n\n tangent_times.append(tangent_time)\n\n if l_xid == -1:\n # logg.debug(f\"Tangent not found\")\n continue\n\n # find where the tangent touches the segments\n l_x_touch = l_x_as[l_xid]\n r_x_touch = r_x_as[r_xid]\n\n if r_x_touch < l_x_touch:\n # logg.debug(f\"Tangent goes the wrong way\")\n continue\n\n # compute how far are the two contacts\n dist_x_touch = r_x_touch - l_x_touch\n\n # if this shift does not improve the distance, go to the next\n if dist_x_touch >= best_dist_x_touch:\n continue\n\n # save info about the current shift\n best_dist_x_touch = dist_x_touch\n best_shift = shift\n best_r_x_as = r_x_as\n best_l_tang_y_as = l_tang_y_as\n\n # extend the points of contact\n best_l_x_ext = l_x_touch - dist_x_touch / 2\n best_r_x_ext = r_x_touch + dist_x_touch / 2\n # recap = f\"l_x_touch: {l_x_touch:.4f} r_x_touch {r_x_touch:.4f}\"\n # recap += f\" dist_x_touch: {dist_x_touch:.4f}\"\n # recap += f\" best_l_x_ext: {best_l_x_ext:.4f} best_r_x_ext {best_r_x_ext:.4f}\"\n # logg.debug(recap)\n\n tangent_time_mean = sum(tangent_times) / len(tangent_times)\n logg.debug(f\"Mean tangent time: {tangent_time_mean:.6f}\")\n\n # extract the best value as current (r_x_as = r_x_orig_as + best_shift)\n r_x_as = best_r_x_as\n\n # find the index of the touch point on the left segment\n l_lower_x = l_x_as < best_l_x_ext\n # argmin returns the *first* occurrence of the min value\n l_id_e_x = np.argmin(l_lower_x)\n # for symmetry, if we can, we keep the previous index (the last of the True)\n if l_id_e_x > 0:\n l_id_e_x -= 1\n\n # find the index of the touch point on the right segment\n r_lower_x = r_x_as < best_r_x_ext\n r_id_e_x = np.argmin(r_lower_x)\n\n # recap = f\"l_id_e_x: {l_id_e_x}\"\n # recap += f\" l_x_as[l_id_e_x]: {l_x_as[l_id_e_x]:.4f}\"\n # recap += f\" r_id_e_x: {r_id_e_x}\"\n # recap += f\" r_x_as[r_id_e_x]: {r_x_as[r_id_e_x]:.4f}\"\n # logg.debug(recap)\n\n # find the extended contact point\n l_p_ext = OrientedPoint(\n l_x_as[l_id_e_x], l_y_as[l_id_e_x], slope2deg(l_yp_as[l_id_e_x])\n )\n r_p_ext = OrientedPoint(\n r_x_as[r_id_e_x], r_y_as[r_id_e_x], slope2deg(r_yp_as[r_id_e_x])\n )\n _, ext_x_as, ext_y_as, _ = compute_aligned_cubic_segment(\n l_p_ext,\n r_p_ext,\n x_stride,\n )\n\n # recap = f\"l_id_e_x: {l_id_e_x}\"\n # recap += f\" l_x_as[l_id_e_x]: {l_x_as[l_id_e_x]:.4f}\"\n # recap += f\" ext_x_as[0]: {ext_x_as[0]:.4f}\"\n # recap += f\" ext_x_as[-1]: {ext_x_as[-1]:.4f}\"\n # recap += f\" r_id_e_x: {r_id_e_x}\"\n # recap += f\" r_x_as[r_id_e_x]: {r_x_as[r_id_e_x]:.4f}\"\n # logg.debug(recap)\n\n # show id to use when plotting\n l_id_s_x = l_id_e_x\n r_id_s_x = r_id_e_x\n\n # fix the ext ids, there is a gap of 1 (one) stride missing on one side\n if not math.isclose(l_x_as[l_id_e_x], ext_x_as[0]):\n logg.debug(f\"Left not close\")\n # check that is not the last\n if l_id_e_x < l_x_as.shape[0] - 1:\n l_id_s_x = l_id_e_x + 1\n\n if not math.isclose(r_x_as[r_id_e_x], ext_x_as[-1]):\n logg.debug(f\"Right not close\")\n # check that is not the first\n if r_id_e_x > 0:\n r_id_s_x = r_id_e_x - 1\n\n shift_end = timer()\n logg.debug(f\"Time to find optimal shift: {shift_end - shift_start:.6f}\")\n\n return (\n best_shift,\n best_r_x_as,\n best_l_tang_y_as,\n l_id_s_x,\n r_id_s_x,\n l_p_ext,\n r_p_ext,\n ext_x_as,\n ext_y_as,\n )", "def shift_observable(self,M):\n u = np.array([[1]])\n for i in range(0,minsite):\n M[i] = np.tensordot(u, M[i],axes=(-1,1)).transpose(1,0,2)\n l,u = self.left_cannonical(M[i])\n M[i] = l", "def perform_fast_marching(D,start_points):\n D_temp = np.copy(D)\n D_temp[start_points[0,:],start_points[1,:]] = 0\n return fmm.distance(D_temp) + 1e-15", "def alignshift(self, hits):\n return hits.shift(self.horizon, axis=0) \\\n .align(self.truth, axis=0, join='right')[0]", "def place(data, times, out, startx, starty):\n if times > 3 or times < 0:\n times = times % 4\n s0, s1 = data.shape\n o0, o1 = out.shape\n if startx < 0 or starty < 0:\n raise ValueError('startx and starty must be postive')\n if times == 0 or times == 2:\n if startx + s0 > o0 or starty + s1 > o1:\n raise ValueError('out too small, will not fit')\n elif startx + s1 > o0 or starty + s0 > o1:\n raise ValueError('out to small, will not fit')\n\n if times == 0:\n for i in numba.prange(s0):\n for j in range(s1):\n out[startx + i, starty + j] = data[i, j]\n return\n elif times == 1:\n for j in numba.prange(s0):\n for i in range(s1):\n out[startx + i, starty + j] = data[j, s1 - i - 1]\n return\n elif times == 2:\n for i in numba.prange(s0):\n for j in range(s1):\n out[startx + i, starty + j] = data[s0 - i - 1, s1 - j - 1]\n return\n elif times == 3:\n for j in numba.prange(s0):\n for i in range(s1):\n out[startx + i, starty + j] = data[s0 - j - 1, i]\n return", "def test_lifted_index():\n pressure = np.array([1014., 1000., 997., 981.2, 947.4, 925., 914.9, 911.,\n 902., 883., 850., 822.3, 816., 807., 793.2, 770.,\n 765.1, 753., 737.5, 737., 713., 700., 688., 685.,\n 680., 666., 659.8, 653., 643., 634., 615., 611.8,\n 566.2, 516., 500., 487., 484.2, 481., 475., 460.,\n 400.]) * units.hPa\n temperature = np.array([24.2, 24.2, 24., 23.1, 21., 19.6, 18.7, 18.4,\n 19.2, 19.4, 17.2, 15.3, 14.8, 14.4, 13.4, 11.6,\n 11.1, 10., 8.8, 8.8, 8.2, 7., 5.6, 5.6,\n 5.6, 4.4, 3.8, 3.2, 3., 3.2, 1.8, 1.5,\n -3.4, -9.3, -11.3, -13.1, -13.1, -13.1, -13.7, -15.1,\n -23.5]) * units.degC\n dewpoint = np.array([23.2, 23.1, 22.8, 22., 20.2, 19., 17.6, 17.,\n 16.8, 15.5, 14., 11.7, 11.2, 8.4, 7., 4.6,\n 5., 6., 4.2, 4.1, -1.8, -2., -1.4, -0.4,\n -3.4, -5.6, -4.3, -2.8, -7., -25.8, -31.2, -31.4,\n -34.1, -37.3, -32.3, -34.1, -37.3, -41.1, -37.7, -58.1,\n -57.5]) * units.degC\n parcel_prof = parcel_profile(pressure, temperature[0], dewpoint[0])\n li = lifted_index(pressure, temperature, parcel_prof)\n assert_almost_equal(li, -7.9115691 * units.delta_degree_Celsius, 2)", "def filter(self):\n M, p, q = self.M, self.p, self.q\n x = self.x\n idx = len(self.x) - (p + 1)\n x_ = self.x_prev + (x[idx + p] - x[idx - q]) / M\n self.t_.append(self.t[idx])\n self.t_filtered.append(self.t[idx])\n self.x_.append(x_)\n self.x_filtered.append(x_)\n self.x_prev = x_", "def distort(signals, labels, amps_p, amps_t):\n assert len(signals.shape) == 3\n assert len(labels.shape) == 3\n segments0 = []\n labels0 = []\n label_len = labels.shape[2]\n assert label_len == 8 or label_len == 9\n for l in range(signals.shape[0]):\n segments1 = []\n labels1 = []\n for k in range(signals.shape[1]):\n signal = signals[l, k, :]\n label = labels[l, k, :]\n p = label[1]\n if label_len == 8:\n t = label[6]\n else:\n t = label[7]\n index_array = np.arange(0, len(signal))\n\n width_p_l = 2\n width_p_r = 0.5\n a_l_p = index_array[0:p]\n a_r_p = index_array[p:]\n center_l_p = max((label[1] + label[0]) / 2 - 5, 0)\n center_r_p = min((label[2] + label[1]) / 2 + 5, len(signal) - 1)\n a_l_p = 1 / (1 + np.exp(-(a_l_p - center_l_p) / width_p_l))\n a_r_p = 1 - 1 / (1 + np.exp(-(a_r_p - center_r_p) / width_p_r))\n\n width_t_l = 10\n width_t_r = 10\n a_l_t = index_array[0: t]\n a_r_t = index_array[t:]\n if label_len == 8:\n center_r_t = min((label[6] + label[7]) / 2 + 5, len(signal) - 1)\n else:\n center_r_t = min((label[7] + label[8]) / 2 + 5, len(signal) - 1)\n center_l_t = 2 * t - center_r_t\n a_l_t = 1 / (1 + np.exp(-(a_l_t - center_l_t) / width_t_l))\n a_r_t = 1 - 1 / (1 + np.exp(-(a_r_t - center_r_t) / width_t_r))\n\n for amp_p in amps_p:\n for amp_t in amps_t:\n a_p = np.concatenate([a_l_p * amp_p, a_r_p * amp_p])\n raw_segment_p = np.multiply(signal, a_p)\n a_t = np.concatenate([a_l_t * amp_t, a_r_t * amp_t])\n raw_segment_t = np.multiply(signal, a_t)\n raw_segment = raw_segment_p + raw_segment_t + signal\n segments1.append(np.expand_dims(raw_segment, axis=0))\n labels1.append(np.expand_dims(label, axis=0))\n\n segments0.append(np.concatenate(segments1, axis=0))\n labels0.append(np.concatenate(labels1, axis=0))\n\n return np.array(segments0), np.array(labels0)", "def lt_inplace(a,b):", "def add_shortest_route(df):\n\n df['gmaps_dist'] = df.apply(lambda row: gmaps.getTotDist((row['pick_lon'], row['pick_lat']), (row['drop_lon'], row['drop_lat'])), axis=1)\n df['gmaps_dur'] = df.apply(lambda row: gmaps.getTotDur((row['pick_lon'], row['pick_lat']), (row['drop_lon'], row['drop_lat'])), axis=1)", "def _data_move_in_mc_on_w(tik_inst, dst, src, data_pos_info):\n\n sub_h_size, sub_w_size, h_size, w_size, w_offset = data_pos_info\n data_cnt_one_block = _get_elment_cnt_one_block(src.dtype)\n sub_w_block = _ceil_div(sub_w_size, data_cnt_one_block)\n sub_h_align_block_size = sub_h_size // data_cnt_one_block * data_cnt_one_block\n sub_h_left = sub_h_size % data_cnt_one_block\n is_not_w_block_align = w_size % data_cnt_one_block > 0\n is_h_size_smaller_one_block = h_size < data_cnt_one_block\n\n def _move_in_one_more_block():\n \"\"\"\n move in one more block of h when h > sub_h and sub_h is not block align\n \"\"\"\n with tik_inst.for_range(0, sub_h_align_block_size) as sub_h_idx:\n tik_inst.data_move(dst[sub_w_block * data_cnt_one_block * sub_h_idx],\n src[w_offset + w_size * sub_h_idx], 0, 1, sub_w_block, 0, 0)\n # in order to avoid dirty data when multiple core\n with tik_inst.for_range(0, data_cnt_one_block) as sub_h_idx_1:\n tik_inst.data_move(dst[sub_w_block * data_cnt_one_block *\n (sub_h_align_block_size + sub_h_idx_1)],\n src[w_offset +\n w_size * (sub_h_size - data_cnt_one_block + sub_h_idx_1)],\n 0, 1, sub_w_block, 0, 0)\n\n with tik_inst.if_scope(is_not_w_block_align):\n # sub_h is block align or h is not enough one block\n with tik_inst.if_scope(tik.any(sub_h_left == 0, is_h_size_smaller_one_block)):\n with tik_inst.for_range(0, sub_h_size) as sub_h_idx:\n tik_inst.data_move(dst[sub_w_block * data_cnt_one_block * sub_h_idx],\n src[w_offset + w_size * sub_h_idx], 0, 1, sub_w_block, 0, 0)\n with tik_inst.else_scope():\n _move_in_one_more_block()\n\n with tik_inst.else_scope():\n with tik_inst.if_scope(tik.any(sub_h_left == 0, is_h_size_smaller_one_block)):\n src_strides = w_size // data_cnt_one_block - sub_w_block\n # mte max strides value is 65535\n with tik_inst.if_scope(src_strides > MTE_STRIDES):\n with tik_inst.for_range(0, sub_h_size) as sub_h_idx_2:\n tik_inst.data_move(dst[sub_w_size * sub_h_idx_2],\n src[w_offset + w_size * sub_h_idx_2],\n 0, 1, sub_w_block, 0, 0)\n with tik_inst.else_scope():\n tik_inst.data_move(dst, src[w_offset], 0, sub_h_size, sub_w_block, src_strides, 0)\n with tik_inst.else_scope():\n _move_in_one_more_block()", "def backward_committor_sensitivity(T, A, B, index):\n\n # This is really ugly to compute. The problem is, that changes in T induce changes in\n # the stationary distribution and so we need to add this influence, too\n # I implemented something which is correct, but don't ask me about the derivation\n\n n = len(T)\n\n trT = numpy.transpose(T)\n\n one = numpy.ones(n)\n eq = stationary_distribution(T)\n\n mEQ = numpy.diag(eq)\n mIEQ = numpy.diag(1.0 / eq)\n mSEQ = numpy.diag(1.0 / eq / eq)\n\n backT = numpy.dot(mIEQ, numpy.dot(trT, mEQ))\n\n qMat = forward_committor_sensitivity(backT, A, B, index)\n\n matA = trT - numpy.identity(n)\n matA = numpy.concatenate((matA, [one]))\n\n phiM = numpy.linalg.pinv(matA)\n\n phiM = phiM[:, 0:n]\n\n trQMat = numpy.transpose(qMat)\n\n d1 = numpy.dot(mSEQ, numpy.diagonal(numpy.dot(numpy.dot(trT, mEQ), trQMat), 0))\n d2 = numpy.diagonal(numpy.dot(numpy.dot(trQMat, mIEQ), trT), 0)\n\n psi1 = numpy.dot(d1, phiM)\n psi2 = numpy.dot(-d2, phiM)\n\n v1 = psi1 - one * numpy.dot(psi1, eq)\n v3 = psi2 - one * numpy.dot(psi2, eq)\n\n part1 = numpy.outer(eq, v1)\n part2 = numpy.dot(numpy.dot(mEQ, trQMat), mIEQ)\n part3 = numpy.outer(eq, v3)\n\n sensitivity = part1 + part2 + part3\n\n return sensitivity", "def ge_inplace(a,b):", "def restrict_lat( mv, latmin, latmax ):\n if latmin==-90: latmin = -91 # just to make sure\n if latmax==90: latmax = 91\n\n # axes\n latax,idx = latAxis2(mv)\n if latax is None: return None\n imin = min( [i for i in range(len(latax)) if latax[i]>=latmin and latax[i]<=latmax ] )\n imax = max( [i for i in range(len(latax)) if latax[i]>=latmin and latax[i]<=latmax ] )\n newlatax = latax.subaxis( imin, imax+1 )\n # TO DO: use latax.bounds (if present) for newlatax.bounds\n # At the moment, I'm working with data for which latax.bounds doesn't exist.\n # At the moment, we don't need bounds. This would get us through if necessary:\n # newlatax.bounds = newlatax.genGenericBounds()\n newaxes = list( allAxes(mv) ) # shallow copy\n newaxes[idx] = newlatax\n\n # shrink the data to match the shrunk lat axis\n newmv_shape = list( mv.shape )\n newmv_shape[idx] = imax+1 - imin\n if imin>0:\n nd = numpy.delete( mv.data, slice(0,imin), idx ) # doesn't change mv\n else:\n nd = mv\n lenidx = nd.shape[idx]\n if lenidx > newmv_shape[idx]:\n newdata = numpy.delete( nd.data, slice(imax+1-imin,lenidx), idx )\n else:\n newdata = nd\n\n # new variable\n newmv = cdms2.createVariable( newdata, copy=True, axes=newaxes, id=mv.id )\n newmv.units = mv.units\n return newmv", "def solve_tsp(list_of_locations, list_of_homes, starting_car_location, adjacency_matrix, params=[]):\n drop_off_dict = {}\n car_path = []\n home_map = {}\n home_indexes = convert_locations_to_indices(list_of_homes, list_of_locations)\n\n start = list_of_locations.index(starting_car_location)\n graph, msg = adjacency_matrix_to_graph(adjacency_matrix)\n all_paths = dict(nx.all_pairs_dijkstra(graph))\n\n start_in_home = start in home_indexes\n if start in home_indexes:\n home_indexes.remove(start)\n home_indexes.insert(0, start)\n home_count = 0;\n\n for home in home_indexes:\n #print(home, end = \" \")\n home_map[home_count] = home\n home_count += 1\n # Instantiate the data problem.\n #print(len(home_map))\n data = create_data_model(home_indexes, 0)\n\n # Create the routing index manager.\n manager = pywrapcp.RoutingIndexManager(len(data['locations']),\n data['num_vehicles'], data['depot'])\n\n #print(manager.NodeToIndex(15))\n # Create Routing Model.\n routing = pywrapcp.RoutingModel(manager)\n\n def distance_callback(from_index, to_index):\n \"\"\"Returns the distance between the two nodes.\"\"\"\n # Convert from routing variable Index to distance matrix NodeIndex.\n #print(home_map[to_index], end = \" \")\n from_index = manager.IndexToNode(from_index)\n to_index = manager.IndexToNode(to_index)\n dist_to = all_paths.get(home_map[from_index])[0][home_map[to_index]]\n #if from_index >= 25 or to_index >= 25:\n # print(\"from\" if from_index >= 25 else \"to\", end = \" \")\n #dist_to = all_paths[from_index][0][to_index]\n return dist_to\n\n transit_callback_index = routing.RegisterTransitCallback(distance_callback)\n\n # Define cost of each arc.\n routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)\n\n # Setting first solution heuristic.\n \"\"\"\n search_parameters = pywrapcp.DefaultRoutingSearchParameters()\n search_parameters.first_solution_strategy = (\n routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)\n \"\"\"\n\n search_parameters = pywrapcp.DefaultRoutingSearchParameters()\n search_parameters.local_search_metaheuristic = (\n routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)\n search_parameters.time_limit.seconds = 3\n #search_parameters.log_search = True\n\n # Solve the problem.\n assignment = routing.SolveWithParameters(search_parameters)\n\n # if assignment:\n # print_solution(manager, routing, assignment)\n # Print solution on console.\n\n if start in home_indexes:\n drop_off_dict[start] = [start]\n\n\n index = routing.Start(0)\n car_path.append(start)\n\n while not routing.IsEnd(index):\n previous_index = manager.IndexToNode(index)\n index = assignment.Value(routing.NextVar(index))\n\n car_path.pop();\n to_index = manager.IndexToNode(index)\n path_to = all_paths.get(home_map[previous_index])[1][home_map[to_index]]\n drop_off_dict[home_map[to_index]] = [home_map[to_index]]\n #print(to_index, end = ' ')\n car_path.extend(path_to)\n #route_distance += routing.GetArcCostForVehicle(previous_index, index, 0)\n # for i in car_path:\n # print(i)\n if start in drop_off_dict.keys() and not start_in_home:\n drop_off_dict.pop(start, None)\n\n return car_path, drop_off_dict", "def shift(self, arr, shift_amt, pre_context, post_context):\n result = arr[pre_context - shift_amt:arr.shape[0] - post_context -\n shift_amt, :]\n return result", "def compute_backpointers(s0, s1): #Tillverkar en array med backpointrs\r\n if s0 == None or s1 == None:\r\n raise Exception('Both s0 and s1 have to be set')\r\n rows = len(s0)+1 # antalet rader\r\n columns = len(s1)+1 # antalet kolumner\r\n\r\n ####### Tillverkar Levenshtein matrisen ########\r\n # Gör en tom matris med nollor\r\n distance = [[0 for y in range(len(s1)+1)] for x in range(len(s0)+1)]\r\n\r\n # Gör de yttre lagrerna i matrisen 0 -> len(str) vertikalt och horisontellt\r\n for i in range(1,rows):\r\n distance[i][0] = i\r\n for i in range(1,columns):\r\n distance[0][i] = i\r\n\r\n # Beräknar kostnaderna för varje plats inne i matrisen och sätter in dem\r\n # kollar om bokstaven på indexet i de två orden är samma i sådana fall kostar det 0\r\n # och skall ha samma värde som diagonalt innan, annars kostar det 1 från över eller underself.\r\n for column in range(1,columns):\r\n for row in range(1,rows): # kolla varje rad i vare column\r\n if s0[row-1] == s1[column -1]: # om det är samma bokstav kostar det 0\r\n c = 0\r\n else: # annars kostar det 2\r\n c = 2\r\n distance[row][column] = min(distance[row-1][column] + 1,distance[row][column-1] + 1,distance[row-1][column-1] + c)\r\n # raden över säger att det minsta värdet av över eller bredvid + 1 eller diagonalt innan plus (0 eller 2)\r\n # skall sättas in på platsen i matrisen.\r\n\r\n # det minsta avståndet är\r\n cost = distance[row][column]\r\n print(\"totalkostnaden är\")\r\n print(cost)\r\n\r\n\r\n ####### Tillverkar backptr-matrisen ########\r\n # Tillverkar en tom matris med [0,0] för till backptr-matrisen\r\n backptr = [[[0, 0] for y in range(len(s1)+1)] for x in range(len(s0)+1)]\r\n\r\n # går igenom platserna i Levenshtein matrisen bakirfrån\r\n for column in range(columns-1,0,-1):\r\n for row in range(rows-1,0,-1):\r\n # Om värdet till vänster är det minsta: peka vänster\r\n if distance[row][column-1] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row\r\n backptr[row][column][1] = column -1\r\n # Om värdet över är det minsta: peka upp\r\n if distance[row-1][column] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row -1\r\n backptr[row][column][1] = column\r\n # om värdet diagonalt är minst: peka på diagonalt\r\n if distance[row-1][column-1] == min(distance[row-1][column-1],distance[row][column-1],distance[row-1][column]):\r\n backptr[row][column][0] = row-1\r\n backptr[row][column][1] = column -1\r\n\r\n # Gör yttervärdena i matrisen, (OBS behövs ej)\r\n for i in range(0,rows):\r\n j = i-1\r\n backptr[i][0][0] = j\r\n backptr[i][0][1] = 0\r\n for i in range(0,columns):\r\n j = i-1\r\n backptr[0][i][1] = j\r\n backptr[0][i][0] = 0\r\n\r\n return backptr", "def minimum_inplace(a, b):", "def manual_930_adjust(raw: pd.DataFrame):\n # SC offset = UTC <-> Eastern offset\n sc_offsets = (\n raw.index.tz_convert(\"US/Eastern\").to_series().apply(lambda s: s.utcoffset())\n )\n # After Dec 31, 2020, the offset is 0\n sc_offsets[\"2020-12-31 00:00:00+00\":] = timedelta(0)\n # make new data so we don't mess up other data indexing\n sc_dat = raw[get_columns(\"SC\", raw.columns)].copy()\n sc_idx = pd.DatetimeIndex(sc_dat.index + sc_offsets) # make shifted dates\n sc_dat.index = sc_idx # use shifted dates\n sc_dat = sc_dat[~sc_dat.index.duplicated(keep=\"first\")]\n # exchange old rows with new\n raw = raw.drop(columns=sc_dat.columns)\n raw = pd.concat([raw, sc_dat], axis=\"columns\")\n\n # PJM, CISO, TEPC: shift by one hour\n for ba in [\"PJM\", \"CISO\", \"TEPC\"]:\n cols = get_columns(ba, raw.columns)\n new = raw[cols].shift(1, freq=\"H\")\n raw = raw.drop(columns=cols)\n raw = pd.concat([raw, new], axis=\"columns\")\n\n # Interchange sign. Do before we change interchange time for PJM, because\n # identification of sign shift is based on raw data\n cols = get_int_columns(\n \"PJM\", raw.columns, [\"CPLE\", \"CPLW\", \"DUK\", \"LGEE\", \"MISO\", \"NYIS\", \"TVA\"]\n )\n raw.loc[raw.index < \"2019-10-31 04:00:00+00\", cols] = (\n raw.loc[raw.index < \"2019-10-31 04:00:00+00\", cols] * -1\n )\n\n # Interchange AZPS - SRP is wonky before 6/1/2020 7:00 UTC. Use SRP - AZPS (inverted)\n azps_srp = get_int_columns(\"AZPS\", raw.columns, [\"SRP\"])\n srp_azps = get_int_columns(\"SRP\", raw.columns, [\"AZPS\"])\n replacement = (raw.loc[:, srp_azps] * (-1)).rename(\n columns={srp_azps[0]: azps_srp[0]} # rename so Pandas will do the right thing\n )\n raw.loc[:\"2020-06-01 07:00:00+00\", azps_srp] = replacement[\n :\"2020-06-01 07:00:00+00\"\n ]\n # Update total interchange\n all_cols = [c for c in get_int_columns(\"AZPS\", raw.columns) if \"ALL\" not in c]\n total_col = \"EBA.AZPS-ALL.TI.H\"\n raw.loc[:\"2020-06-01 07:00:00+00\", total_col] = raw.loc[\n :\"2020-06-01 07:00:00+00\", all_cols\n ].sum(axis=1)\n\n # Interchange TEPC is uniformly lagged\n cols = get_int_columns(\"TEPC\", raw.columns)\n new = raw[cols].shift(-7, freq=\"H\")\n raw = raw.drop(columns=cols)\n raw = pd.concat([raw, new], axis=\"columns\")\n\n # Interchange PJM is lagged differently across DST boundary\n is_dst = raw.index.tz_convert(\"US/Eastern\").to_series().apply(\n lambda s: s.utcoffset()\n ) == timedelta(hours=-4)\n pjm_offset = [\n timedelta(hours=-3) if is_d else timedelta(hours=-4) for is_d in is_dst\n ]\n\n # make new data so we don't mess up other data indexing\n pjm_dat = raw[\n get_int_columns(\n \"PJM\",\n raw.columns,\n [\"CPLE\", \"CPLW\", \"DUK\", \"LGEE\", \"MISO\", \"NYIS\", \"TVA\", \"ALL\"],\n )\n ].copy()\n # make shifted dates\n pjm_idx = pd.DatetimeIndex(pjm_dat.index + pd.Series(pjm_offset))\n pjm_dat.index = pjm_idx # use shifted dates\n # delete duplicates\n pjm_dat = pjm_dat[~pjm_dat.index.duplicated(keep=\"first\")]\n # exchange old rows with new\n raw = raw.drop(columns=pjm_dat.columns)\n raw = pd.concat([raw, pjm_dat], axis=\"columns\")\n\n # Shift all -1 hour to make start-of-hour\n return raw.shift(-1, freq=\"H\")", "def forward(self,i,direction):\n \"\"\"the direction argument is used to dertermine the direcrtion of the forward function, designed for the equilibrium of the two classes of the datasets\"\"\"\n if(direction):\n self.mask_A = self.netG_Amask[self.orders[i]](self.real_A)\n self.A = self.netG_A[self.orders[i]](self.real_A)\n self.fake_B = self.A.mul(self.mask_A\n )+(1-self.mask_A).mul(self.real_A) # G_A(A)\n self.mask_B = self.netG_Bmask[self.orders[i]](self.fake_B)\n self.B = self.netG_B[self.orders[i]](self.fake_B)\n self.rec_A = self.B.mul(self.mask_B)+(1-self.mask_B).mul(self.fake_B) # G_B(G_A(A))\n else:\n self.mask_A = self.netG_Bmask[self.orders_rev[i]](self.real_A)\n self.A = self.netG_B[self.orders_rev[i]](self.real_A)\n self.fake_B = self.A.mul(self.mask_A\n )+(1-self.mask_A).mul(self.real_A) # G_A(A)\n self.mask_B = self.netG_Amask[self.orders_rev[i]](self.fake_B)\n self.B = self.netG_A[self.orders_rev[i]](self.fake_B)\n self.rec_A = self.B.mul(\n self.mask_B)+(self.mask_B).mul(1-self.fake_B) # G_B(G_A(A))", "def synchronize_data(self, is_master):\n\n if is_master:\n self.data_mapping.append(self.invalid_map)\n\n #\n # Default offsets before synchronization begins\n #\n if self.first_sync:\n if is_master and (len(self.band_1.timestamps) > 0):\n self.first_offset = len(self.band_1.timestamps) - 1\n elif (not is_master) and (len(self.band_2.timestamps) > 0):\n self.second_offset = len(self.band_2.timestamps) - 1\n\n #\n # Data needs to be synchronized\n #\n if (self.first_offset is not None) and (self.second_offset is not None):\n\n # Find minimum (timestamp) distance from first device, relative to second device's first timestamp\n # Note: Timestamps from both devices are enforced to be strictly increasing (elsewhere)\n #\n if self.first_sync:\n self.first_sync = False\n first_timestamp = self.band_1.timestamps[self.first_offset]\n sec_timestamp = self.band_2.timestamps[self.second_offset]\n min_distance = abs(first_timestamp - sec_timestamp)\n\n while self.first_offset > 0:\n temp_idx = self.first_offset - 1\n temp_timestamp = self.band_1.timestamps[temp_idx]\n new_distance = abs(temp_timestamp - sec_timestamp)\n\n if new_distance > min_distance:\n break\n else:\n self.first_offset = temp_idx\n min_distance = new_distance\n\n self.data_mapping[self.first_offset] = self.second_offset\n\n else:\n\n #\n # Need to wait for new data to arrive\n #\n if ((self.first_offset + 1 >= len(self.band_1.timestamps) - 1) or\n (self.second_offset + 1 >= len(self.band_2.timestamps) - 1)):\n return\n\n self.first_offset += 1\n self.second_offset += 1\n first_timestamp = self.band_1.timestamps[self.first_offset]\n sec_timestamp = self.band_2.timestamps[self.second_offset]\n min_distance = first_timestamp - sec_timestamp\n in_sync = False\n\n if abs(min_distance) < COPY_THRESHOLD:\n in_sync = True\n else:\n\n if min_distance > 0:\n while self.second_offset < len(self.band_2.timestamps) - 1:\n temp_timestamp = self.band_2.timestamps[self.second_offset]\n temp_distance = first_timestamp - temp_timestamp\n\n if abs(temp_distance) > abs(min_distance):\n break\n else:\n self.second_offset += 1\n min_distance = temp_distance\n\n if abs(min_distance) < COPY_THRESHOLD:\n in_sync = True\n\n # min_distance <= (-1) * COPY_THRESHOLD\n else:\n while self.first_offset < len(self.band_1.timestamps) - 1:\n temp_timestamp = self.band_1.timestamps[self.first_offset]\n temp_distance = temp_timestamp - sec_timestamp\n\n if abs(temp_distance) > abs(min_distance):\n break\n else:\n self.first_offset += 1\n min_distance = temp_distance\n\n if abs(min_distance) < COPY_THRESHOLD:\n in_sync = True\n\n # Data from two devices is (now) in sync\n if in_sync:\n self.data_mapping[self.first_offset] = self.second_offset", "def shift_1_cust(self, sol_in1, cust, c_loc, curr_temp, sol_type1, sa_lns):\r\n\r\n route_ing = copy.deepcopy(sol_in1[c_loc[0]])\r\n route_new = route_ing\r\n move_to_route = c_loc[0]\r\n orgn_type1 = sol_type1[c_loc[0]]\r\n origin_cost1 = check_violation(route_ing, orgn_type1)[1]\r\n route_ing.remove(cust) # move c in the current route\r\n new_type1 = route_type(route_ing)\r\n adjust_cost1 = check_violation(route_ing, new_type1)[1]\r\n best_cut_cost0 = -1000\r\n best_cut_cost = best_cut_cost0 # best cost cut of moving this customer\r\n for j, rou in enumerate(sol_in1):\r\n orgn_type2 = sol_type1[j]\r\n origin_cost2 = check_violation(rou, orgn_type2)[1]\r\n if j == c_loc[0]: # moving in the same route\r\n for k in range(1, len(route_ing)):\r\n if k == c_loc[1]:\r\n continue # do not put it at the original position\r\n rou_test = route_ing[:k] + [cust] + route_ing[k:]\r\n if check_violation(rou_test, orgn_type2)[0]:\r\n adjust_cost2 = check_violation(rou_test, orgn_type2)[1]\r\n cost_cut_test = origin_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n else: # moving to a different route\r\n for k in range(1, len(rou)):\r\n rou_test = rou[:k] + [cust] + rou[k:]\r\n\r\n if check_violation(rou_test, 5)[0]:\r\n new_type2 = route_type(rou_test)\r\n adjust_cost2 = check_violation(rou_test, new_type2)[1]\r\n cost_cut_test = origin_cost1 + origin_cost2 - adjust_cost1 - adjust_cost2\r\n if cost_cut_test > best_cut_cost:\r\n best_cut_cost = cost_cut_test\r\n route_new = rou_test\r\n move_to_route = j\r\n\r\n\r\n if best_cut_cost > 1e-5:\r\n # print('shift1 good', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n elif sa_lns and best_cut_cost < -1e-5:\r\n prb = random.uniform(0, 1)\r\n if np.exp(best_cut_cost/curr_temp) > prb:\r\n # print('shift1', best_cut_cost)\r\n sol_in1[move_to_route] = route_new\r\n sol_type1[move_to_route] = route_type(route_new)\r\n if move_to_route != c_loc[0]: # moving to a different route\r\n sol_in1[c_loc[0]] = route_ing\r\n sol_type1[c_loc[0]] = route_type(route_ing)\r\n\r\n\r\n\r\n # return sol_in1\r", "def _compute(self, w_beg, w_end, signal, station_availability):\n\n avail_idx = np.where(station_availability == 1)[0]\n sige = signal[0]\n sign = signal[1]\n sigz = signal[2]\n\n p_onset_raw, p_onset = self._compute_p_onset(sigz,\n self.sampling_rate)\n s_onset_raw, s_onset = self._compute_s_onset(sige, sign,\n self.sampling_rate)\n self.data.p_onset = p_onset\n self.data.s_onset = s_onset\n self.data.p_onset_raw = p_onset_raw\n self.data.s_onset_raw = s_onset_raw\n\n ps_onset = np.concatenate((self.data.p_onset, self.data.s_onset))\n ps_onset[np.isnan(ps_onset)] = 0\n\n p_ttime = self.lut.fetch_index(\"TIME_P\", self.sampling_rate)\n s_ttime = self.lut.fetch_index(\"TIME_S\", self.sampling_rate)\n ttime = np.c_[p_ttime, s_ttime]\n del p_ttime, s_ttime\n\n nchan, tsamp = ps_onset.shape\n\n pre_smp = int(round(self.pre_pad * int(self.sampling_rate)))\n pos_smp = int(round(self.post_pad * int(self.sampling_rate)))\n nsamp = tsamp - pre_smp - pos_smp\n\n # Prep empty 4-D coalescence map and run C-compiled ilib.migrate()\n ncell = tuple(self.lut.cell_count)\n map_4d = np.zeros(ncell + (nsamp,), dtype=np.float64)\n ilib.migrate(ps_onset, ttime, pre_smp, pos_smp, nsamp, map_4d,\n self.n_cores)\n\n # Prep empty coa and loc arrays and run C-compiled ilib.find_max_coa()\n max_coa = np.zeros(nsamp, np.double)\n grid_index = np.zeros(nsamp, np.int64)\n ilib.find_max_coa(map_4d, max_coa, grid_index, 0, nsamp, self.n_cores)\n\n # Get max_coa_norm\n sum_coa = np.sum(map_4d, axis=(0, 1, 2))\n max_coa_norm = max_coa / sum_coa\n max_coa_norm = max_coa_norm * map_4d.shape[0] * map_4d.shape[1] * \\\n map_4d.shape[2]\n\n tmp = np.arange(w_beg + self.pre_pad,\n w_end - self.post_pad + (1 / self.sampling_rate),\n 1 / self.sampling_rate)\n daten = [x.datetime for x in tmp]\n\n # Calculate max_coa (with correction for number of stations)\n max_coa = np.exp((max_coa / (len(avail_idx) * 2)) - 1.0)\n\n loc = self.lut.xyz2index(grid_index, inverse=True)\n\n return daten, max_coa, max_coa_norm, loc, map_4d" ]
[ "0.54937327", "0.5475339", "0.53958184", "0.5358197", "0.51967794", "0.5139213", "0.51332134", "0.512268", "0.5112759", "0.5105039", "0.5103316", "0.50753844", "0.5063595", "0.50197667", "0.5012734", "0.49768087", "0.4920332", "0.4919076", "0.4918012", "0.49093962", "0.49030602", "0.49000055", "0.48881462", "0.48879468", "0.48850128", "0.4865227", "0.4863151", "0.48604912", "0.48535845", "0.48479778" ]
0.64903045
0
plots an example of index shifting
def index_shift_plot(): file = "Data/matfiles/20131221.mat" object = MatReader(file) thing = object.shifted == False assert thing, "shifter must be commented out" start = 1920 #index of 16 minutes stop = 3120 #index of 26 minutes NeA = object.NeA[start:stop] NeB = object.NeB[start:stop] NeC = object.NeC[start:stop] NeA = object.meanie(NeA, 5) NeB = object.meanie(NeB, 5) NeC = object.meanie(NeC, 5) secondsA = object.secondsA[start:stop] secondsB = object.secondsB[start:stop] secondsC = object.secondsC[start:stop] plt.plot(secondsB, NeB, "r") plt.plot(secondsB, NeA, "g") plt.plot(secondsB, NeC, "b") plt.xlabel("Seconds since midnight UTC of satellite B") plt.ylabel("Electron density [cm$^{-3}$]") plt.legend(["Satellite B", "Satellite A", "Satellite C"]) plt.title("Electron density data before index-shift") plt.savefig("Figures/preshift_example.pdf") plt.show() object.shifter() NeA = object.NeA[start:stop] NeB = object.NeB[start:stop] NeC = object.NeC[start:stop] NeA = object.meanie(NeA, 5) NeB = object.meanie(NeB, 5) NeC = object.meanie(NeC, 5) secondsA = object.secondsA[start:stop] secondsB = object.secondsB[start:stop] secondsC = object.secondsC[start:stop] plt.plot(secondsB, NeB, "r") plt.plot(secondsB, NeA, "g") plt.plot(secondsB, NeC, "b") plt.xlabel("Seconds since midnight UTC of satellite B") plt.ylabel("Electron density [cm$^{-3}$]") plt.legend(["Satellite B", "Satellite A", "Satellite C"]) plt.title("Electron density data after index-shift") plt.savefig("Figures/postshift_example.pdf") plt.show()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def refractive_index(self):\n wd = np.arange(80,820,10)\n nd = self.boundary.imat.refractive_index(wd) \n\n plt.plot(wd, nd)\n\n return wd, nd", "def plotOfSlice(self,index=0):\n\t\tj=index;\n\t\t[n,m]=_np.shape(self._data)\n\t\ty=_np.zeros(n);\n\t\tfor i in range(0,n):\n\t\t\t\ty[i]=self._data[i][j]*1e4\n\t\tp1=_plot.plot(shotno=[self.shotno],\n\t\t\t\t\t title=self.title+', t='+str(self.time[j]*1000)+'ms.')\n\t\tphi=_np.linspace(0,_np.pi*2,100)\n\t\tn1Fit=self._x[0,j]+self._x[1,j]*_np.sin(phi)+self._x[2,j]*_np.cos(phi)\n\t\tn2Fit=self._x[0,j]+self._x[3,j]*_np.sin(2*phi)+self._x[4,j]*_np.cos(2*phi)\n\t\tfitTotal=self._x[0,j]+self._x[1,j]*_np.sin(phi)+self._x[2,j]*_np.cos(phi)+self._x[3,j]*_np.sin(2*phi)+self._x[4,j]*_np.cos(2*phi)\n\n\t\t# plot\n\t\tp1.addTrace(yData=y,xData=self._phi,\n\t\t\t\t\tmarker='x',linestyle='',yLegendLabel='raw') \n\t\tp1.addTrace(yData=n1Fit,xData=phi,\n\t\t\t\t\tyLegendLabel='n=1') \n\t\tp1.addTrace(yData=n2Fit,xData=phi,\n\t\t\t\t\tyLegendLabel='n=2') \n\t\tp1.addTrace(yData=fitTotal,xData=phi,\n\t\t\t\t\tyLegendLabel='Superposition') \n\t\treturn p1", "def animate_scatters(iteration, ax, pos):\n \n # Change viewing angle\n ax.view_init(pos[iteration][0], pos[iteration][1])", "def test_showalter_index():\n pressure = units.Quantity(np.array([931.0, 925.0, 911.0, 891.0, 886.9, 855.0, 850.0, 825.6,\n 796.3, 783.0, 768.0, 759.0, 745.0, 740.4, 733.0, 715.0,\n 700.0, 695.0, 687.2, 684.0, 681.0, 677.0, 674.0, 661.9,\n 657.0, 639.0, 637.6, 614.0, 592.0, 568.9, 547.4, 526.8,\n 500.0, 487.5, 485.0]), 'hPa')\n temps = units.Quantity(np.array([18.4, 19.8, 20.0, 19.6, 19.3, 16.8, 16.4, 15.1, 13.4,\n 12.6, 11.2, 10.4, 8.6, 8.3, 7.8, 5.8, 4.6, 4.2, 3.4, 3.0,\n 3.0, 4.4, 5.0, 5.1, 5.2, 3.4, 3.3, 2.4, 1.4, -0.4, -2.2,\n -3.9, -6.3, -7.6, -7.9]), 'degC')\n dewp = units.Quantity(np.array([9.4, 8.8, 6.0, 8.6, 8.4, 6.8, 6.4, 4.0, 1.0, -0.4, -1.1,\n -1.6, 1.6, -0.2, -3.2, -3.2, -4.4, -2.8, -3.6, -4.0, -6.0,\n -17.6, -25.0, -31.2, -33.8, -29.6, -30.1, -39.0, -47.6,\n -48.9, -50.2, -51.5, -53.3, -55.5, -55.9]), 'degC')\n\n result = showalter_index(pressure, temps, dewp)\n assert_almost_equal(result, units.Quantity(7.6024, 'delta_degC'), 4)", "def _index(orig, off):\n orig_x, orig_y = orig\n off_x, off_y = off\n return (orig_y - off_y) * self.ncols + (orig_x - off_x)", "def plot_tseries_index(*args, **kwargs) :\n data = kwargs.pop('data')\n return data.dropna().plot(y=args[0], **kwargs)", "def plotOfSlice(self,index=0):\n\t\tj=index;\n\t\t[n,m]=_np.shape(self._data)\n\t\ty=_np.zeros(n);\n\t\tfor i in range(0,n):\n\t\t\ty[i]=self._data[i][j]*1e4\n\t\tp1=_plot.plot(title='t=%.3f ms. %s ' % (self.time[j]*1000, self.title),\n\t\t\t\t\t shotno=self.shotno)\n\t\ttheta=_np.linspace(self._theta[0],self._theta[-1],100)\n#\t\tm0Fit=self._x[0,j]\n\t\tm1Fit=self._x[0,j]+self._x[1,j]*_np.sin(theta)+self._x[2,j]*_np.cos(theta)\n\t\tm2Fit=self._x[0,j]+self._x[3,j]*_np.sin(2*theta)+self._x[4,j]*_np.cos(2*theta)\n\t\tm3Fit=self._x[0,j]+self._x[5,j]*_np.sin(3*theta)+self._x[6,j]*_np.cos(3*theta)\n\t\tm4Fit=self._x[0,j]+self._x[7,j]*_np.sin(4*theta)+self._x[8,j]*_np.cos(4*theta)\n\t\tm5Fit=self._x[0,j]+self._x[9,j]*_np.sin(5*theta)+self._x[10,j]*_np.cos(5*theta)\n\t\tfitTotal=(-4.)*self._x[0,j]+m1Fit+m2Fit+m3Fit+m4Fit+m5Fit # the -4 corrects for the 4 extra offsets added from the preview 5 fits\n\t\t\n\t\tp1.addTrace(yData=y,xData=self._theta,\n\t\t\t\t\tlinestyle='',marker='.',yLegendLabel='raw')\n\t\tp1.addTrace(yData=m1Fit,xData=theta,\n\t\t\t\t\tyLegendLabel='m=1')\n\t\tp1.addTrace(yData=m2Fit,xData=theta,\n\t\t\t\t\tyLegendLabel='m=2')\n\t\tp1.addTrace(yData=m3Fit,xData=theta,\n\t\t\t\t\tyLegendLabel='m=3')\n\t\tp1.addTrace(yData=m4Fit,xData=theta,\n\t\t\t\t\tyLegendLabel='m=4')\n\t\tp1.addTrace(yData=m5Fit,xData=theta,\n\t\t\t\t\tyLegendLabel='m=5')\n\t\tp1.addTrace(yData=fitTotal,xData=theta,\n\t\t\t\t\tyLegendLabel='m=1-5')\n\t\treturn p1", "def setPlotShift(x,y):\n dislin.trfshf(x,y)", "def plot_index_vs_Z(ax, Z, index, index_name, alpha_fe=0.0, age=13.0):\n\n #reshape the arrays to get them into [alpha, age, Z]\n index=np.transpose(index.reshape(4, 6, 20), (0, 2, 1))\n Z=np.transpose(Z.reshape(4, 6, 20), (0, 2, 1))\n\n #get the indices corresponding to the alpha and age we want\n alpha_fe_ind=get_numpy_indices_for_params(alpha_fe=alpha_fe)\n age_ind=get_numpy_indices_for_params(age=age)\n \n \n \n \n ax.plot(Z[alpha_fe_ind, age_ind, :], index2[alpha_fe_ind, age_ind, :], label=r\"$\\alpha$/Fe={}, age={}\".format(alpha_fe, age), linewidth=3.0)\n\n ax.set_xlabel(\"Z/H\")\n ax.set_ylabel(\"{}\".format(index_name))", "def plot_cell_indices(adata, key='group', basis='diffmap', components=[1, 2],\n legend_loc='top_right', tools='pan, reset, wheel_zoom, save'):\n from bokeh.layouts import column\n from bokeh.models import ColumnDataSource\n from bokeh.plotting import figure, show\n from bokeh.palettes import viridis\n from bokeh.models.widgets.buttons import Button\n from bokeh.models.callbacks import CustomJS\n from bokeh.models import LabelSet, CategoricalColorMapper\n\n if key not in adata.obs:\n raise ValueError(f'{key} not found in adata.obs')\n\n if f'X_{basis}' not in adata.obsm_keys():\n raise ValueError(f'basis `X_{basis}` not found in adata.obsm')\n\n if not isinstance(components, type(np.array)):\n components = np.array(components)\n\n df = pd.DataFrame(adata.obsm[f'X_{basis}'][:, components - (0 if basis == 'diffmap' else 1)], columns=['x', 'y'])\n df[key] = list(adata.obs[key])\n df['index'] = range(len(df))\n\n palette = adata.uns.get(f'{key}_colors', viridis(len(df[key].unique())))\n\n p = figure(title=f'{key}', tools=tools)\n key_col = adata.obs[key].astype('category') if adata.obs[key].dtype.name != 'category' else adata.obs[key]\n for c, color in zip(key_col.cat.categories, palette):\n data = ColumnDataSource(df[df[key] == c])\n p.scatter(x='x', y='y', size=10, color=color, legend=str(c), source=data)\n\n p.legend.location = legend_loc\n p.xaxis.axis_label = f'{basis}_{components[0]}'\n p.yaxis.axis_label = f'{basis}_{components[1]}'\n\n source = ColumnDataSource(df)\n labels = LabelSet(x='x', y='y', text='index',\n x_offset=4, y_offset=4,\n level='glyph',\n source=source, render_mode='canvas')\n labels.visible = False\n p.add_layout(labels)\n\n button = Button(label='Toggle Indices', button_type='primary')\n button.callback = CustomJS(args=dict(l=labels), code='l.visible = !l.visible;')\n\n show(column(button, p))", "def animate(i):\r\n plot_x.set_data(history_samples[i][:, 0], history_samples[i][:, 1])", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='green')", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='red')", "def plot(self): \n\t\txandy = sep_xy(self.start, self.end)\n\t\tplt.plot(xandy[0], xandy[1], 'k-', lw=1, color='blue')", "def _plot_reconstructed_position(self, index):\n\n x = (0, np.real(np.exp(self.event_df.loc[index, 'angle'] * 1j)) * 400)\n y = (0, np.imag(np.exp(self.event_df.loc[index, 'angle'] * 1j)) * 400)\n angle = self.hv.Curve((x, y)).opts(\n color='orange',\n line_dash='dashed',\n xlim=(-350, 350),\n ylim=(-350, 350))\n return angle", "def plot_series_and_differences(series, ax, num_diff, title=''):\n plt.xticks(rotation=40)\n ax[0].plot(series.index, series)\n ax[0].set_title('Raw series: {}'.format(title))\n ax[0].set_xticklabels(labels=series.index.date, rotation=45)\n for i in range(1, num_diff+1):\n diff = series.diff(i)\n ax[i].plot(series.index, diff)\n ax[i].set_title('Difference # {}'.format(str(i)))\n ax[i].set_xticklabels(labels=series.index.date, rotation=45)", "def plotwithslices(data,figsize=(16,4),slices=5, RollingDistance=20,rollingwidth=1,stdwidth = 1, Testoutput = 0, doubleaxis=True):\r\n print(\"\\n -- Plotting Data with %i Slices and RollingDist of %i and %i-- \" %(slices,RollingDistance,RollingDistance*5))\r\n totallen=len(data)\r\n slicelens=int(totallen/slices)\r\n fig, ax1=plt.subplots(figsize=figsize)\r\n plt.plot(data,label = 'Data',linewidth = 2)\r\n plt.plot(data.rolling(RollingDistance).mean(),label = '%i R Mean' %(RollingDistance), linestyle=':', linewidth=rollingwidth)\r\n plt.plot(data.rolling(RollingDistance*5).mean(),label = '%i R Mean' %(RollingDistance*5), linestyle=':', linewidth=rollingwidth);plt.legend()\r\n if (doubleaxis==True):\r\n ax2 = ax1.twinx()\r\n plt.plot(data.rolling(RollingDistance).std(),color='r',linewidth=stdwidth, label = '%i R Std' %(RollingDistance-1))\r\n for i in range(slices-1): \r\n plt.axvline(x=data.index[int(totallen/slices)*(i+1)],color='r',linestyle='--',linewidth=1)\r\n plt.legend();\r\n plt.show()\r\n \r\n if (Testoutput == True):\r\n print(' Slice 0 S: %4i E: %4i ' %(0, totallen), end=\"\")\r\n DickeyFullerPrint(data[0:totallen])\r\n normalitytest(data[0:totallen])\r\n for i in range(slices): \r\n print('\\n Slice %2i S: %4i E: %4i ' %(i+1, slicelens*(i), slicelens*(i+1)), end=\"\")\r\n DickeyFullerPrint(data[slicelens*(i):slicelens*(i+1)])\r\n normalitytest(data[slicelens*(i):slicelens*(i+1)])\r\n return", "def Bubbleplot(data,indexes='show',headers='show',aspect_size=200., value_max=100,marker='s',\r\n ax=None,legend='normal',legend_title=None, color='blue'):\r\n data=data.T\r\n\r\n try:\r\n sns.set_style(\"whitegrid\")\r\n except:\r\n pass\r\n\r\n #NORMALIZE\r\n size=data.values.ravel()/float(value_max)\r\n\r\n y,x=np.arange(data.shape[0]-1,-1,-1), np.arange(data.shape[1])\r\n X,Y=plt.meshgrid(x,y)\r\n X,Y=X.ravel(), Y.ravel()\r\n\r\n\r\n if ax is None:\r\n ax = plt.subplot(111)\r\n\r\n ax.yaxis.tick_right()\r\n ax.scatter(X,Y,s=size*float(aspect_size),marker=marker, color= color)\r\n\r\n #AXES\r\n if headers is None:\r\n ax.set_xticklabels(ax.get_xticklabels(), visible=False)\r\n\r\n\r\n elif headers=='show':\r\n ax.set_xticks(x+0.5)\r\n ax.set_xticklabels(data.columns.values, ha='right',rotation=90)\r\n ax.set_xlim([-.5,max(x)+.5])\r\n\r\n elif headers=='hide':\r\n ax.set_xticks(x+0.5)\r\n ax.set_xlim([-.5,max(x)+.5])\r\n ax.set_xticklabels(data.columns.values,rotation=90, visible=False)\r\n\r\n\r\n\r\n if indexes=='show':\r\n ax.set_yticks(y-0.5)\r\n ax.set_yticklabels(data.index.values,va='bottom')\r\n else:\r\n ax.yaxis.set_visible(False)\r\n ax.set_ylim([-.5,max(y)+.5])\r\n\r\n #LEGEND\r\n if legend!='hide':\r\n sizes = np.array([0.1,0.5,1.])\r\n labels=(sizes*value_max).astype(type(value_max))\r\n\r\n\r\n lines=[plt.scatter([],[], s=s*aspect_size, edgecolors='none',marker=marker, color= color) for s in sizes]\r\n legend_prop=dict(ncol=len(labels), frameon=False,loc = 2,scatterpoints = 1,\r\n bbox_to_anchor=(1, 0), borderaxespad=0.25,title=legend_title)\r\n if legend=='normal':\r\n\r\n leg = ax.legend(lines, labels, fontsize=12, handlelength=2, borderpad = 1.8, handletextpad=1 ,**legend_prop)\r\n elif legend=='slim':\r\n leg = ax.legend(lines, labels, fontsize='x-small', borderpad = 0.7,**legend_prop)\r\n return ax", "def plot_index_vs_age(ax, age, index, index_name, alpha_fe=0.0, Z=0.0):\n\n index=index.reshape(4, 6, 20)\n age=age.reshape(4, 6, 20)\n\n alpha_fe_ind=get_numpy_indices_for_params(alpha_fe=alpha_fe)\n Z_ind=get_numpy_indices_for_params(Z=Z)\n\n \n ax.plot(age[alpha_fe_ind, Z_ind, :], index[alpha_fe_ind, Z_ind, :], label=r\"Z/H={}, $\\alpha$/Fe={}\".format(Z, alpha_fe))\n\n ax.set_xlabel(\"Age\")\n ax.set_ylabel(\"{}\".format(index_name))", "def pic_scatter():\n vu.pic_scatter(annual_report_indexes, 'annual_report')", "def plot_slide(\n df,\n values,\n cmap='viridis',\n colorbar=False,\n vmin=None,\n vmax=None,\n title=None,\n ax=None,\n figure=None,\n ticks=True,\n dsize=37,\n colorticks=None,\n row_key='row',\n col_key='col',\n cat_palette=None,\n spot_borders=False,\n border_color='black',\n border_size=0.3\n ):\n\n y = -1 * np.array(df[row_key])\n x = df[col_key]\n\n if ax is None:\n if colorbar:\n width = 7\n else:\n width = 5\n figure, ax = plt.subplots(\n 1,\n 1,\n figsize=(width,5)\n )\n\n #if spot_borders:\n # if border_size is None:\n # border_size = dsize+5\n # _plot_slide_one_color(\n # df,\n # border_color,\n # row_key=row_key,\n # col_key=col_key,\n # dsize=border_size,\n # ax=ax\n # )\n \n if cmap == 'categorical':\n if cat_palette is None:\n pal = PALETTE_MANY \n else:\n pal = cat_palette\n\n val_to_index = {\n val: ind\n for ind, val in enumerate(sorted(set(values)))\n }\n colors = [\n pal[val_to_index[val]]\n for val in values\n ]\n patches = [\n mpatches.Patch(color=pal[val_to_index[val]], label=val)\n for val in sorted(set(values))\n ]\n if spot_borders:\n ax.scatter(x,y,c=colors, s=dsize, edgecolors=border_color, linewidths=border_size)\n else:\n ax.scatter(x,y,c=colors, s=dsize)\n if colorbar:\n ax.legend(handles=patches, bbox_to_anchor=(1.05, 1), loc='upper left',)\n else:\n if spot_borders:\n im = ax.scatter(x,y,c=values, cmap=cmap, s=dsize, vmin=vmin, vmax=vmax, edgecolors=border_color, linewidths=border_size)\n else:\n im = ax.scatter(x,y,c=values, cmap=cmap, s=dsize, vmin=vmin, vmax=vmax)\n if colorbar:\n if vmin is None or vmax is None:\n figure.colorbar(im, ax=ax, ticks=colorticks)\n else:\n figure.colorbar(im, ax=ax, boundaries=np.linspace(vmin,vmax,100), ticks=colorticks)\n if title is not None:\n ax.set_title(title)\n if not ticks:\n ax.set_xticks([])\n ax.set_yticks([])", "def exercise_indexes():\n print(exercise_indexes.__doc__)\n print(\"The indexes of 'data' are:\", data.index)\n print(data, \"\\n\")\n print(\"Changing the indexes of 'data'\")\n print(data.reindex([2, 0, 1]), \"\\n\")\n print(\"Changing the indexes of 'data' randomly\")\n print(data.reindex(np.random.permutation(data.index)))", "def show(self):\n \n \n \n \n \n \n r = 4\n f, axarr = plt.subplots(r, r, figsize=(8,8))\n counter = 0\n for i in range(r):\n for j in range(r):\n temp = self.x[counter,:]\n counter += 1\n img = self.x[counter,:]\n axarr[i][j].imshow(img)\n #######################################################################\n # #\n # #\n # TODO: YOUR CODE HERE #\n # #\n # #\n #######################################################################", "def receptive_fields_visualization(W):\n W = W.cpu()\n \n hidden_dim = int(np.sqrt(W.shape[1]))\n side_dim = 10\n indices = [np.random.randint(0,W.shape[0]) for _ in range(side_dim**2)]\n \n fig = plt.figure(figsize=(10,10))\n for i in range(len(indices)):\n ax = fig.add_subplot(side_dim, side_dim, i+1, xticks = [], yticks = [])\n ax.imshow(W[i,:].view(hidden_dim, hidden_dim),cmap = 'gray')\n plt.subplots_adjust(wspace=0.01, hspace=0.01)\n #end\n \n plt.show()\n plt.close('all')", "def add_figure(self,sig,index,title='',xlabel='',ylabel=''):\n self.last_index = index\n ax = self.fig.add_subplot(self.position+index)\n ax.set_title(title)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.plot(sig)", "def add_figure1(self,x,y,index=1,title='',xlabel='',ylabel=''):\n self.last_index = index\n ax = self.fig.add_subplot(self.position+index)\n ax.set_title(title)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.plot(x,y)", "def analyze_on_axis(phase_space, id_begin, id_end, ds_slice, zplot):\n\n ps = phase_space[:, (id_begin-1):id_end, :]\n # print(np.shape(ps))\n # ps = ps[numpy.logical_not(numpy.isnan(ps))]\n\n x = ps[0, :, :]\n px = ps[1, :, :]\n y = ps[2, :, :]\n py = ps[3, :, :]\n\n id_on_axis = np.zeros((4, int(id_end-id_begin+1)))\n\n for n in range(int(id_end-id_begin+1)):\n x_this = x[n, :]\n px_this = px[n, :]\n y_this = y[n, :]\n py_this = py[n, :]\n\n # Remove all NAN elements in the phase space array\n x_this = x_this[np.logical_not(np.isnan(x_this))]\n px_this = px_this[np.logical_not(np.isnan(px_this))]\n y_this = y_this[np.logical_not(np.isnan(y_this))]\n py_this = py_this[np.logical_not(np.isnan(py_this))]\n\n ## Plot X\n plt.subplot(2, 2, 1)\n plt.plot(zplot[0:len(x_this)]*1e+6, x_this*1e+6)\n plt.ylabel('Position in X/ $\\mu$m', fontsize=10)\n\n ## Plot Y\n plt.subplot(2, 2, 2)\n plt.plot(zplot[0:len(y_this)]*1e+6, y_this*1e+6)\n plt.ylabel('Position in Y/ $\\mu$m', fontsize=10)\n\n ## Plot px\n plt.subplot(2, 2, 3)\n plt.plot(zplot[0:len(px_this)]*1e+6, px_this)\n plt.ylabel('Angle in X', fontsize=10)\n\n ## Plot py\n plt.subplot(2, 2, 4)\n plt.plot(zplot[0:len(py_this)]*1e+6, py_this)\n plt.ylabel('Angle in Y', fontsize=10)\n\n\n # plt.xlabel('Longitudianl Direction of the Bunch $s$/ $\\mu$m')\n # plt.title('First Undulator Section')\n # plt.title('Second Undulator Section')\n # plt.title('Third Undulator Section')\n\n id_on_axis[0, n] = np.argmin(np.abs(x_this))\n id_on_axis[1, n] = np.argmin(np.abs(px_this))\n id_on_axis[2, n] = np.argmin(np.abs(y_this))\n id_on_axis[3, n] = np.argmin(np.abs(py_this))\n\n fig = plt.gcf()\n fig.set_size_inches(13.5, 9)\n ax = plt.gca()\n ax.yaxis.get_major_formatter().set_powerlimits((0,1))\n fig.savefig('phase_space_U3_new.png', dpi=100)\n plt.show()\n\n\n s_on_axis = np.average(id_on_axis[2:4,:])*ds_slice\n\n return id_on_axis, s_on_axis", "def plot_data(self):", "def plot_spectrumxichange(self):\n countgood = 0 ; countbad = 0\n for idata in self.datarg:\n if idata[-1, 0] == 1.: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'b') \n countgood += 1\n print countgood , 'good solution'\n else: \n self.fig.axes[0].plot(idata[0:,0], idata[0: ,1] ,'r') \n print countbad, 'bad solution'\n countbad += 1\n print 'We found %g good solutions and %g tda startdistributions that broke down before xi = 1, we hope that\\'s what you expected' %(countgood,countbad)\n #Create custom artistsr[goodline,badline],['solution','breakdown']\n goodline = pl.Line2D((0,1),(0,0), color='b') \n badline = pl.Line2D((0,1),(0,0), color='r')\n self.layout(self.reader.depvar['depvar'] , r'energy spectrum (a.u.)' , tit = r'All tda start distributions $\\xi$' , legendhand = [goodline , badline] , legendlab = ['solution', 'breakdown'] )\n self.savefig('xispec')", "def axis(ind):\n return ind % 15, ind // 15" ]
[ "0.62381613", "0.6047222", "0.58126885", "0.576814", "0.57115436", "0.5666652", "0.56549144", "0.5643994", "0.56199855", "0.56148964", "0.5598178", "0.5544485", "0.5536026", "0.55217725", "0.5493668", "0.54794174", "0.5448919", "0.5438047", "0.5414845", "0.5381245", "0.5355028", "0.5319329", "0.5314977", "0.53014207", "0.52958196", "0.5279616", "0.527703", "0.5265322", "0.5264716", "0.52585375" ]
0.6937431
0
Generate markdown for checklist
def _markdown(checklist): checklist = json.load(open(checklist), object_pairs_hook=OrderedDict) mdFile = MdUtils(file_name='Ikigai-Checklist', title='PDP 2019 Checklist') mdFile.new_paragraph(checklist['overview']) sections = sorted(checklist['checklist'].values(), key=lambda s: int(s['no'])) for section in sections: mdFile.new_header(level=1, title=section['title']) for subject in section['subjects'].values(): mdFile.new_header(level=2, title=subject['title']) mdFile.new_paragraph(subject['description']) mdFile.new_paragraph("Reference: " + subject['provisions']) mdFile.new_paragraph("Functions: " + ", ".join(subject['functions'])) mdFile.new_paragraph("Groups: " + ", ".join(subject['groups'])) actions = [['No','Description', 'Tags', 'Check']] actions += [[a['no'], a['description'], ", ".join(a['tags']),''] for a in subject['actions'].values()] rows = len(actions) actions = flatten(actions) mdFile.new_table(columns=4, rows=rows, text=actions, text_align='left') mdFile.create_md_file()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checklist_line(line, day, checklist):\n\n output_lines = f'<b>Checklist items due before class on {day}</b>'\n output_lines += f'<ul class=\"checklist\" id=\"check-list-{day}\">'\n for item in line[2:].split(' | '):\n item = item.strip()\n item_without_markdown_link = re.sub(r\"\\[(.+)\\]\\(.+\\)\", r\"\\1\", item)\n if day in checklist and item_without_markdown_link in checklist[day]:\n output_lines += f'<li class=\"unchecked checked\">{item}</li>'\n else:\n output_lines += f'<li class=\"unchecked\">{item}</li>'\n if item_without_markdown_link == 'Practice (no points) worksheet demo.':\n print(output_lines)\n output_lines += '</ul>'\n return output_lines", "def pc_md_to_html(data_list):\n pcrenderer = PanelCodeRenderer()\n markdown = mistune.Markdown(renderer=pcrenderer)\n label = '<p style=\"font-size:x-small\"><em>panelcode: markdown processor (mistune)</em></p>\\n'\n return markdown(\"\\n\".join(data_list) + label)", "def test_with_big_lists(self):\n\n self.check_markdown(\n '''\n - List\n\n ??? note \"Details\"\n\n - Paragraph\n\n Paragraph\n\n - Paragraph\n\n paragraph\n ''',\n '''\n <ul>\n <li>\n <p>List</p>\n <details class=\"note\">\n <summary>Details</summary>\n <ul>\n <li>\n <p>Paragraph</p>\n <p>Paragraph</p>\n </li>\n <li>\n <p>Paragraph</p>\n <p>paragraph</p>\n </li>\n </ul>\n </details>\n </li>\n </ul>\n ''',\n True\n )", "def _generate_conclusion_markdown(self, data):\n pass", "def test_with_complex_lists(self):\n\n self.check_markdown(\n '''\n - List\n\n ??? note \"Details\"\n\n - Paragraph\n\n ??? note \"Details\"\n\n 1. Paragraph\n\n Paragraph\n ''',\n '''\n <ul>\n <li>\n <p>List</p>\n <details class=\"note\">\n <summary>Details</summary>\n <ul>\n <li>\n <p>Paragraph</p>\n <details class=\"note\">\n <summary>Details</summary>\n <ol>\n <li>\n <p>Paragraph</p>\n <p>Paragraph</p>\n </li>\n </ol>\n </details>\n </li>\n </ul>\n </details>\n </li>\n </ul>\n ''',\n True\n )", "def status_create_markdown():\n def write_properties(property_files):\n markdown.append(\"\\n| Property file | Keys | Keys translated | Keys not translated | % translated |\\n\")\n markdown.append(\"| ------------- | ---- | --------------- | ------------------- | ------------ |\\n\")\n\n for file in property_files:\n lines = read_file(file)\n keys = get_translations_as_dict(lines=lines)\n keys_missing_value = get_empty_keys(lines=lines)\n\n num_keys = len(keys)\n num_keys_missing_value = len(keys_missing_value)\n num_keys_translated = num_keys - num_keys_missing_value\n percent_translated = int((num_keys_translated / float(num_keys)) * 100) if num_keys != 0 else 0\n\n markdown.append(\"| {file} | {num_keys} | {num_keys_translated} | {num_keys_missing} | {percent_translated} |\\n\"\n .format(file=get_filename(filepath=file), num_keys=num_keys, num_keys_translated=num_keys_translated, num_keys_missing=num_keys_missing_value, percent_translated=percent_translated))\n\n markdown = []\n date = datetime.datetime.now().strftime(\"%Y-%m-%d %H:%M\")\n markdown.append(\"### Localization files status ({date} - Branch `{branch}` `{hash}`)\\n\".format(date=date, branch=get_current_branch(), hash=get_current_hash_short()))\n\n write_properties(property_files=get_all_jabref_properties())\n write_properties(property_files=get_all_menu_properties())\n write_file(STATUS_FILE, markdown)\n logger.ok(\"Current status written to {}\".format(STATUS_FILE))\n open_file(STATUS_FILE)", "def get_markdown(tools: List[Dict[str, Any]]) -> str:\n padding = 43\n\n # First Row\n markdown = \"| {name: <{padding}}| PHP \".format(name=\"Tool\", padding=padding)\n markdown += \" | PHP \".join(PHP_VERSIONS)\n markdown += \" |\\n\"\n # Second Row\n markdown += \"|{name:-<{padding}}-|\".format(name=\"\", padding=padding)\n for php in PHP_VERSIONS:\n markdown += \"---------|\"\n markdown += \"\\n\"\n for tool in tools:\n markdown += \"| {name: <{padding}}|\".format(\n name=\"[\" + tool[\"name\"] + \"][lnk_\" + tool[\"name\"] + \"]\", padding=padding\n )\n for php in PHP_VERSIONS:\n if str(php) in tool[\"exclude\"]:\n markdown += \" |\"\n else:\n markdown += \" ✓ |\"\n markdown += \"\\n\"\n\n markdown += \"\\n\"\n for tool in tools:\n markdown += \"[lnk_\" + tool[\"name\"] + \"]: \" + tool[\"dir\"] + \"\\n\"\n\n return markdown", "def test_description_markdown_with_custom_options() -> None:\n soup = generate_case(\n \"description_markdown\",\n GenerationConfiguration(\n markdown_options={\n \"cuddled-lists\": True,\n }\n ),\n )\n\n assert (\n str(soup.find(\"span\", class_=\"description\"))\n == \"\"\"<span class=\"description\"><p>DOC </p> <ul> <li>List 1</li> <li>List 2</li> </ul> </span>\"\"\"\n )", "def test_definition_list(self):\n\n self.check_markdown(\n '''\n - List\n\n ??? note \"Details\"\n\n Term\n\n : Definition\n\n More text\n\n : Another\n definition\n\n Even more text\n ''',\n '''\n <ul>\n <li>\n <p>List</p>\n <details class=\"note\">\n <summary>Details</summary>\n <dl>\n <dt>Term</dt>\n <dd>\n <p>Definition</p>\n <p>More text</p>\n </dd>\n <dd>\n <p>Another\n definition</p>\n <p>Even more text</p>\n </dd>\n </dl>\n </details>\n </li>\n </ul>\n ''',\n True\n )", "def markdown(value):\n return Markup(md(value))", "def _generate_markdown(self, case):\n # Lucene query generation\n lucene_dict = {\n \"sources\": case[\"detection\"][\"sources\"],\n \"data\": case[\"input_arguments\"],\n }\n case[\"lucene_query\"] = self.templates[\"lucene\"].render(lucene_dict)\n # AWS CLI command generation\n command_template = jinja2.Template(case[\"executors\"][\"sh\"][\"code\"])\n if case[\"input_arguments\"]:\n aws_cli_render_args = {}\n for arg in case[\"input_arguments\"]:\n aws_cli_render_args[arg] = case[\"input_arguments\"][arg][\"value\"]\n case[\"compiled_command\"] = command_template.render(aws_cli_render_args)\n else:\n case[\"compiled_command\"] = command_template.render()\n\n render_dict = {\"case\": case}\n return self.templates[\"markdown\"].render(render_dict)", "def to_markdown(self):\n s = \"[\" + self.label + \"]\"\n if self.is_reflink:\n s += \": \" + self.url\n else:\n s += \"(\" + self.url + \")\"\n return s", "def workbench_scenarios():\n return [\n (\"Markdown\",\n \"\"\"<markdowna />\n \"\"\")\n ]", "def description():\n #rule = request.url_rule\n #print(rule)\n file = open('./contest/content/description.md', 'r')\n rawText = file.read()\n file.close()\n content = Markup(markdown(rawText, \n extensions=['markdown.extensions.fenced_code', 'markdown.extensions.tables']))\n return render_template('markdowntemplate.html', \n title='Description', \n content=content)", "def gen_md(self):\n # https://pythonhosted.org/Markdown/extensions/index.html\n extensions = ['extra', 'codehilite', 'admonition',\n 'toc', 'smarty', 'sane_lists', 'wikilinks']\n # TODO\n extension_configs = {'toc': {\n 'anchorlink': False,\n 'permalink': False\n }\n }\n output_format = 'html5'\n md = markdown.Markdown(extensions=extensions,\n extension_configs=extension_configs,\n output_format=output_format)\n html = md.convert(self.md)\n toc = getattr(md, 'toc', '')\n if toc:\n toc = process_toc(toc)\n return html, toc", "def rules():\n file = open('./contest/content/rules.md', 'r')\n rawText = file.read()\n file.close()\n content = Markup(markdown(rawText, \n extensions=['markdown.extensions.fenced_code', 'markdown.extensions.tables']))\n return render_template('markdowntemplate.html', \n title='Rules', \n content=content)", "def task_generate_sc_markdown():\n for dept in Department.list():\n yield {\n 'name': dept.name,\n 'file_dep': [],\n 'targets': [dept.sc_markdown_path],\n 'actions': [dept.generate_sc_markdown],\n 'clean': True,\n 'uptodate': [False],\n }", "def paragraph_p12(candidates_tup, return_html=False):\n\n elim_list = \"\"\n for i, c in candidates_tup:\n elim_list += f\"<dt><b>{i:>2}: {c}</b></dt>\"\n \n text = \"\"\"<h3>* Insights from Problems 1 and 2</h3><p style=\"font-size:110%;\">\"\"\"\n text += \"\"\"On the basis of Figures 1 and 2, which show the number of new nodes created, \n and the time spent by each search function, respectively, the searches that are candidates \n for elimination for more complex problems are those at the intersection of the average-ranked \n costliest sets viz new nodes creation and search time.<br>These searches are:</p><pre><dl>\"\"\"\n text += f\"<dl>{elim_list}</dl></p></pre>\"\n \n if return_html:\n return text\n else:\n return Markdown(text)", "def html_from_markdown(content): \n\n \"\"\"\n Bold \n \"\"\" \n # Convert to <strong></strong>\n regx = re.compile(r\"^\\*\\*(.*?)\\*\\*\", re.MULTILINE)\n content = regx.sub(r\"<strong>\\1</strong>\",content) \n\n \"\"\"\n Link \n \"\"\" \n # Convert to <a>\n regx = re.compile(r\"\\[(.*)\\]\\((.*)\\)\", re.MULTILINE)\n content = regx.sub(r\"<a href=\\2>\\1</a>\",content) \n\n \"\"\"\n Paragraph \n \"\"\" \n new_content = \"\"\n for line in content.splitlines():\n line = re.sub(r'^(?!#|\\*)(.+)', r'<p>\\1</p>', line)\n new_content = new_content + line + \"\\n\"\n content = new_content\n\n \"\"\"\n Unordered lists\n \"\"\" \n new_content = \"\" \n u_list = False\n for line in content.splitlines():\n\n if len(line) > 0: # Check the line is not empty\n\n l = line[:2]\n if u_list and l!=\"* \": # check if there and unordered list to be closed.\n new_content = new_content + \"</ul>\"\n u_list = False # Flag indicates the unordered list has finished\n\n #if line[0]!=\"#\" and line[0]!=\"*\": # Add the paragraph to the line\n # line = \"<p>\" + line + \"</p>\\n\"\n\n if line[:2]==\"* \": # Check if the lins is an unordered list\n if not u_list: # Check if it´s the first item of the list\n line = \"<ul><li>\" + line [2:] + \"</li>\"\n u_list = True # Flag indicates the unordered list has started.\n else:\n line = \"<li>\" + line [2:] + \"</li>\"\n\n new_content = new_content + line + \"\\n\"\n\n if u_list : # in case still have an unordered list to be closed.\n new_content = new_content + \"</ul>\"\n\n content = new_content\n\n \"\"\"\n Headers \n \"\"\" \n # Convert to h1\n regx = re.compile(r\"^#\\s(.*?)\\n\", re.MULTILINE)\n content = regx.sub(r\"<h1>\\1</h1>\\n\",content) \n\n # Convert to h2\n regx = re.compile(r\"^##\\s(.*?)\\n\", re.MULTILINE)\n content = regx.sub(r\"<h2>\\1</h2>\\n\",content) \n\n # Convert to h3\n regx = re.compile(r\"^###\\s(.*?)\\n\", re.MULTILINE)\n content = regx.sub(r\"<h3>\\1</h3>\\n\",content) \n\n # Convert to h4\n regx = re.compile(r\"^####\\s(.*?)\\n\", re.MULTILINE)\n content = regx.sub(r\"<h4>\\1</h4>\\n\",content) \n\n # Convert to h5\n regx = re.compile(r\"^#####\\s(.*?)\\n\", re.MULTILINE)\n content = regx.sub(r\"<h5>\\1</h5>\\n\",content) \n\n # Convert to h6\n regx = re.compile(r\"^######\\s(.*?)\\n\", re.MULTILINE) \n content = regx.sub(r\"<h6>\\1</h6>\\n\",content) \n\n\n return content", "def enml_to_markdown(enml):\n pass", "def unordered_list_html(list_items: List[str]) -> str:\n return \"<ul>{}</ul>\".format(\"\".join(list_items))", "def build_readme(comic: str, readme: str) -> str:\n img = f\"{START_COMMENT}\\n{comic}\\n{END_COMMENT}\"\n return re.sub(listReg, img, readme)", "def markdown_table(self, which):\n if which == 'C':\n coef = 'C'\n elif which == 'c':\n coef = 'c'\n elif which == 'f':\n coef = 'f'\n str = '|order|'\n for i in range(1,N+1):\n str = str + '$%s_{%d}$ |' % (coef,i)\n str = str + '\\n|'\n for i in range(1,N+1):\n str = str + '-|'\n str = str + '\\n'\n for i in range(1,self.N+1):\n str = str + (self.dat[i]).markdown_row(self.N, which)\n return str", "def readme_md(cls):\n\n template = Helpers.File(Settings.readme_me_template).read()\n\n template = Helpers.Regex(\n template, r\"%%version%%\", replace_with=Settings.version\n ).replace()\n template = Helpers.Regex(\n template, r\"%%lenHosts%%\", replace_with=format(len(Settings.domains), \",d\")\n ).replace()\n template = Helpers.Regex(\n template, r\"%%lenIPs%%\", replace_with=format(len(Settings.ips), \",d\")\n ).replace()\n template = Helpers.Regex(\n template,\n r\"%%lenHostsIPs%%\",\n replace_with=format(len(Settings.ips) + len(Settings.domains), \",d\"),\n ).replace()\n\n print(\"Generation of %s\" % Settings.readme_md_file, end=\" \")\n Helpers.File(Settings.readme_md_file).write(template, overwrite=True)\n print(Settings.done)", "def _get_delta_markdown_string(self):\n markdown_string = \"\"\n\n if self.is_commit_test is True:\n if len(self.delta_fp_string_dict.delta_dict[\"commits\"]) > 0:\n markdown_string += (\n os.linesep\n + \"## Commit history SHA1 for this analysis:\"\n + os.linesep\n )\n for sha1_commit in self.delta_fp_string_dict.delta_dict[\"commits\"]:\n markdown_string += \"- `\" + sha1_commit + \"`\" + os.linesep\n markdown_string += os.linesep\n elif self.is_branch_test is True:\n if len(self.delta_fp_string_dict.delta_dict[\"branches\"]) > 0:\n markdown_string += (\n os.linesep + \"## Branches under analysis:\" + os.linesep\n )\n for branch in self.delta_fp_string_dict.delta_dict[\"branches\"]:\n markdown_string += \"- \" + branch + os.linesep\n markdown_string += os.linesep\n\n # Added files block\n markdown_string += \"## Added Files\" + os.linesep\n if len(self.delta_fp_string_dict.delta_dict[\"added\"]) > 0:\n for added_file in self.delta_fp_string_dict.delta_dict[\"added\"]:\n markdown_string += \"- \" + added_file + os.linesep\n else:\n markdown_string += \"- None\" + os.linesep\n\n # Deleted files block\n markdown_string += os.linesep + os.linesep + \"## Deleted Files\" + os.linesep\n if len(self.delta_fp_string_dict.delta_dict[\"deleted\"]) > 0:\n for deleted_file in self.delta_fp_string_dict.delta_dict[\"deleted\"]:\n markdown_string += \"- \" + deleted_file + os.linesep\n else:\n markdown_string += \"- None\" + os.linesep\n\n # Modified files block\n markdown_string += os.linesep + os.linesep + \"## Modified Files\" + os.linesep\n if len(self.delta_fp_string_dict.delta_dict[\"modified\"]) > 0:\n for modified_file in self.delta_fp_string_dict.delta_dict[\"modified\"]:\n markdown_string += \"- \" + modified_file + os.linesep\n else:\n markdown_string += \"- None\" + os.linesep\n\n # Project URL + version footer\n markdown_string += (\n os.linesep\n + os.linesep\n + \"---\"\n + os.linesep\n + \"[ufodiff](https://github.com/source-foundry/ufodiff) v\"\n + major_version\n + \".\"\n + minor_version\n + \".\"\n + patch_version\n )\n\n return markdown_string", "def test_get_checklists_html(self):\r\n response = self.client.get(self.checklists_url, HTTP_ACCEPT='text/html')\r\n self.assertContains(response, \"Getting Started With Studio\")\r\n # The HTML generated will define the handler URL (for use by the Backbone model).\r\n self.assertContains(response, self.checklists_url)", "def __str__(self):\n\n index_start = 1\n display_list = []\n max_name_len = 20\n additional_signs = 9\n\n heading = self.create_table_heading()\n underline = \"-\" * (max_name_len + additional_signs)\n display_list.append(\"\".join(heading))\n\n for index, item in enumerate(self.todo_items, index_start):\n display_list.append(\"| \" + str(index) + \" | \" + str(item.name) + \" \" + item.is_done_mark + \"\\n\")\n display_list.append(underline + \"\\n\")\n return \"\".join(display_list)", "def _generate_pr_comment_markdown(self, data):\n pass", "def test_markdown(self):\n with sphinx_build('pyexample'):\n with open('_build/text/docfx_yaml/example.example.Foo.yml') as yml_file:\n data = yaml.safe_load(yml_file)\n for item in data['items']:\n if item['uid'] == 'example.example.Foo.method_markdown':\n self.assertEqual(\n item['summary'],\n 'Check out our '\n '[site](http://sphinx-docfx-yaml.readthedocs.io/en/latest/)'\n ' for more info.',\n )", "def generate():\n\n # Verify if directory exists\n if not os.path.isdir(config.techniques_markdown_path):\n os.mkdir(config.techniques_markdown_path)\n\n #Write the technique index.html page\n with open(os.path.join(config.techniques_markdown_path, \"overview.md\"), \"w\", encoding='utf8') as md_file:\n md_file.write(config.technique_overview_md)\n\n for domain in config.domains:\n generate_domain_markdown(domain)" ]
[ "0.6669581", "0.6598202", "0.65573514", "0.65129644", "0.6231226", "0.62290126", "0.6188656", "0.6103123", "0.60594684", "0.6040152", "0.60326", "0.5967212", "0.5893543", "0.57857776", "0.5730832", "0.56839097", "0.5666371", "0.5651796", "0.565042", "0.5640402", "0.56363374", "0.5629589", "0.56254613", "0.5570131", "0.5568795", "0.55622387", "0.5561522", "0.5558062", "0.55531883", "0.5526697" ]
0.8231156
0
beam search for better preds (not alignment) with MAP. We add length penalty to overcome poor precision, one error predict within the boundry will bring two edit errors.
def beam_search_MAP(logits, beam_size=20, lp=50.0): inf = 1e10 distribution = tf.nn.softmax(logits) B, T, V = distribution.shape aligns = tf.zeros([B * beam_size, 0], tf.int32) scores = tf.constant([0.0] + [-inf]*(beam_size-1), dtype=tf.float32) # [beam_size] scores = tf.tile(scores, multiples=[B]) # [B x beam_size] base_indices = tf.reshape(tf.tile(tf.range(B)[:, None], multiples=[1, beam_size]), [-1]) preds_prev = -1 * tf.ones([B * beam_size, beam_size], tf.int32) lengths = tf.zeros([B * beam_size], tf.int32) # marks_token = tf.zeros([B * beam_size, 0], tf.int32) prev = time() for t in range(T): p_prior = tf.ones([B*beam_size, V]) / V p_past = tf.ones([B*beam_size, V]) / V p_cur = tf.reshape(tf.tile(distribution[:, t, :], [1, beam_size]), [B*beam_size, V]) p_log = tf.math.log(p_past) + tf.math.log(p_cur) - tf.math.log(p_prior) scores_cur, preds_cur = tf.nn.top_k(p_log, k=beam_size, sorted=True) # current scores scores = scores[:, None] + scores_cur # [B x beam_size, beam_size] scores = tf.reshape(scores, [B, beam_size ** 2]) # current predicts marks_cur = tf.cast(tf.not_equal(preds_cur, preds_prev), tf.int32) # length penalty lengths = lengths[:, None] + marks_cur lp_score = tf.reshape(tf.pow((5+tf.cast(lengths, tf.float32))/6, lp), [B, beam_size ** 2]) # lp_score = 1.0 scores /= lp_score # pruning _, k_indices = tf.nn.top_k(scores, k=beam_size) k_indices = base_indices * beam_size * beam_size + tf.reshape(k_indices, [-1]) # [B x beam_size] # # update marks_token # marks_cur = tf.reshape(marks_cur, [-1]) # marks_cur = tf.gather(marks_cur, k_indices) # marks_token = tf.gather(marks_token, k_indices // beam_size) # marks_token = tf.concat([marks_token, marks_cur[:, None]], 1) # update lengths lengths = tf.reshape(lengths, [-1]) lengths = tf.gather(lengths, k_indices) # print('lengths:', (lengths - tf.reduce_sum((marks_token), -1)).numpy()) # Update scores scores = tf.reshape(scores, [-1]) scores = tf.gather(scores, k_indices) # update preds preds_prev = preds_cur preds_cur = tf.reshape(preds_cur, [-1]) preds_cur = tf.gather(preds_cur, k_indices) # k_indices: [0~B x beam_size x beam_size], preds: [0~B x beam_size] aligns = tf.gather(aligns, k_indices // beam_size) aligns = tf.concat([aligns, preds_cur[:, None]], -1) print(time() - prev, 's') prev = time() aligns = aligns[::beam_size, :] # marks_token = marks_token[::beam_size, :] # lengths = lengths[::beam_size] # max_len = tf.reduce_max(lengths) # predicts = [] # for b in range(B): # predict = tf.reshape(tf.gather(aligns[b, :], tf.where(marks_token[b, :]>0)), [-1]) # pad = tf.zeros([max_len - lengths[b]], tf.int32) # predicts.append(tf.concat([predict, pad], 0)) # tf.stack(predicts, 0) return aligns
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def beam_search(X, u, w, b, relLabels):\n\n candidate_paths = [[] for _ in range(10)] # contains the candidate label sets\n candidate_vals =[[] for _ in range(10)] # contains the label values (-1/1) for each candidate set\n candidate_scores = [0. for _ in range(10)]\n min_score = -1000\n\n iter = 0\n start = 0\n while True:\n # print(\"Iter: \", iter)\n intermediate_paths = {}\n # intermediate_paths_val = []\n interim_scores = []\n hash_table = {}\n\n cnt_paths = 0\n for cp in range(5):\n labels_curr = candidate_paths[cp]\n labels_val_curr = candidate_vals[cp]\n scores_curr = candidate_scores[cp]\n Y = -np.ones((10, 1))\n for lv in range(len(labels_val_curr)):\n Y[labels_curr[lv]] = labels_val_curr[lv]\n\n for l in range(10):\n candidate_interim = labels_curr[:]\n candidate_vals_interim = labels_val_curr[:]\n # if l in labels_curr:\n # continue\n\n temp_relLabels = []\n for lc in range(len(labels_curr)):\n temp_relLabels.extend(relLabels[labels_curr[lc]])\n\n # temp_relLabels = np.array(list(set(temp_relLabels)))\n temp_relLabels = np.array(list(set(relLabels[l]).intersection(set(labels_curr))))\n model_pos = returnModelVal(X, Y, 1.0, u[l], u[l], b[l][0], np.array(temp_relLabels))\n candidate_interim.append(l)\n\n if model_pos < 0:\n # print('hello')\n candidate_vals_interim.append(-1)\n interim_scores.append(-model_pos)\n else:\n candidate_vals_interim.append(1)\n interim_scores.append(model_pos)\n\n hash_table[cnt_paths] = candidate_interim\n intermediate_paths[cnt_paths] = candidate_vals_interim\n cnt_paths += 1\n # For the first iteration, just iterate once - all labels in one iteration\n if start == 0:\n start = 1\n break\n\n temp_paths = intermediate_paths\n interim_zip = zip(intermediate_paths, interim_scores)\n sorted_scores = sorted(interim_zip, key=lambda x: x[1], reverse=True)[:5]\n intermediate_paths, scores = zip(*sorted_scores)\n\n temp_cand = []\n temp_val = []\n for i in range(len(intermediate_paths)):\n temp_cand.append(hash_table[intermediate_paths[i]])\n temp_val.append(temp_paths[intermediate_paths[i]])\n # candidate_scores[i] += scores[i]\n\n candidate_paths = temp_cand\n candidate_vals = temp_val\n print(candidate_paths)\n print(candidate_vals)\n # print(scores)\n # candidate_scores = scores\n\n # Exit condition from loop\n # if max(interim_scores) < min_score:\n # break\n #\n # min_score = min(interim_scores)\n\n iter += 1\n if iter > 5:\n break\n\n candidate_dict = {}\n for i in range(5):\n for c in range(len(candidate_paths[i])):\n if candidate_paths[i][c] not in candidate_dict:\n candidate_dict[candidate_paths[i][c]] = candidate_vals[i][c]\n elif candidate_dict[candidate_paths[i][c]] != 2:\n if candidate_dict[candidate_paths[i][c]] != candidate_vals[i][c]:\n candidate_dict[candidate_paths[i][c]] = 2.\n\n print(candidate_dict)\n exit()\n return candidate_dict", "def _beam_search(self,\n source: mx.nd.NDArray,\n bucket_key: int,\n max_output_length: int) -> Tuple[mx.nd.NDArray, mx.nd.NDArray, mx.nd.NDArray, mx.nd.NDArray]:\n # Length of encoded sequence (may differ from initial input length)\n encoded_source_length = self.models[0].encoder.get_encoded_seq_len(bucket_key)\n utils.check_condition(all(encoded_source_length ==\n model.encoder.get_encoded_seq_len(bucket_key) for model in self.models),\n \"Models must agree on encoded sequence length\")\n\n lengths = mx.nd.zeros((self.beam_size, 1), ctx=self.context)\n finished = mx.nd.zeros((self.beam_size,), dtype='int32', ctx=self.context)\n # sequences: (beam_size, output_length)\n sequences = mx.nd.array(np.full((self.beam_size, max_output_length), C.PAD_ID), dtype='int32', ctx=self.context)\n # attentions: (beam_size, output_length, encoded_source_length)\n attentions = mx.nd.zeros((self.beam_size, max_output_length, encoded_source_length), ctx=self.context)\n\n # best_hyp_indices: row indices of smallest scores (ascending).\n best_hyp_indices = mx.nd.zeros((self.beam_size,), ctx=self.context)\n # best_word_indices: column indices of smallest scores (ascending).\n best_word_indices = mx.nd.zeros((self.beam_size,), ctx=self.context, dtype='int32')\n # scores_accumulated: chosen smallest scores in scores (ascending).\n scores_accumulated = mx.nd.zeros((self.beam_size, 1), ctx=self.context)\n\n # reset all padding distribution cells to np.inf\n self.pad_dist[:] = np.inf\n\n # (0) encode source sentence\n model_states = self._encode(source, bucket_key)\n\n for t in range(0, max_output_length):\n\n # (1) obtain next predictions and advance models' state\n # scores: (beam_size, target_vocab_size)\n # attention_scores: (beam_size, bucket_key)\n scores, attention_scores, model_states = self._decode_step(model_states)\n\n # (2) compute length-normalized accumulated scores in place\n if t == 0: # only one hypothesis at t==0\n scores = scores[:1]\n else:\n # renormalize scores by length+1 ...\n scores = (scores + scores_accumulated * lengths) / (lengths + 1)\n # ... but not for finished hyps.\n # their predicted distribution is set to their accumulated scores at C.PAD_ID.\n self.pad_dist[:, C.PAD_ID] = scores_accumulated\n # this is equivalent to doing this in numpy:\n # self.pad_dist[finished, :] = np.inf\n # self.pad_dist[finished, C.PAD_ID] = scores_accumulated[finished]\n scores = mx.nd.where(finished, self.pad_dist, scores)\n\n # (3) get beam_size winning hypotheses\n # TODO(fhieber): once mx.nd.topk is sped-up no numpy conversion necessary anymore.\n (best_hyp_indices[:], best_word_indices_np), scores_accumulated_np = utils.smallest_k(scores.asnumpy(),\n self.beam_size)\n scores_accumulated[:] = np.expand_dims(scores_accumulated_np, axis=1)\n best_word_indices[:] = best_word_indices_np\n\n # (4) get hypotheses and their properties for beam_size winning hypotheses (ascending)\n sequences = mx.nd.take(sequences, best_hyp_indices)\n lengths = mx.nd.take(lengths, best_hyp_indices)\n finished = mx.nd.take(finished, best_hyp_indices)\n attention_scores = mx.nd.take(attention_scores, best_hyp_indices)\n attentions = mx.nd.take(attentions, best_hyp_indices)\n\n # (5) update best hypotheses, their attention lists and lengths (only for non-finished hyps)\n sequences[:, t] = mx.nd.expand_dims(best_word_indices, axis=1)\n attentions[:, t, :] = mx.nd.expand_dims(attention_scores, axis=1)\n lengths += mx.nd.cast(1 - mx.nd.expand_dims(finished, axis=1), dtype='float32')\n\n # (6) determine which hypotheses in the beam are now finished\n finished = ((best_word_indices == C.PAD_ID) + (best_word_indices == self.vocab_target[C.EOS_SYMBOL]))\n if mx.nd.sum(finished).asscalar() == self.beam_size: # all finished\n break\n\n # (7) update models' state with winning hypotheses (ascending)\n for ms in model_states:\n ms.sort_state(best_hyp_indices, best_word_indices)\n\n return sequences, attentions, scores_accumulated, lengths", "def _forward_beam_search(self, state: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:\n batch_size = state[\"source_mask\"].size()[0]\n start_predictions = state[\"source_mask\"].new_full(\n (batch_size,), fill_value=self._start_index)\n\n # shape (all_top_k_predictions): (batch_size, beam_size, num_decoding_steps)\n # shape (log_probabilities): (batch_size, beam_size)\n all_top_k_predictions, log_probabilities = self._beam_search.search(\n start_predictions, state, self.take_step)\n\n output_dict = {\n \"class_log_probabilities\": log_probabilities,\n \"predictions\": all_top_k_predictions\n }\n return output_dict", "def translate_beam_search(source_sentence: List[int], model: Seq2SeqAttentionModel,\n beam_width: int, max_length=10) -> Tuple[List[int], float]:\n encoder_hiddens = encode_all(source_sentence, model)\n beam_elems = []\n # stack x hid_dim\n prev_hidden = encoder_hiddens[-1]\n prev_context = torch.zeros(model.hidden_dim)\n\n beam_elems= [([SOS_token], float(0), prev_hidden, prev_context)]\n candidate_translations = []\n available_width = beam_width\n\n for i in range(max_length):\n if available_width >0:\n candidate_beam_elems = []\n for b in range(len(beam_elems)):\n prev_predict, prev_log_prob, prev_hidden, prev_context = beam_elems[b]\n probs, prev_hidden, prev_context, _ = decode(prev_hidden, encoder_hiddens, prev_context,\n prev_predict[-1], model)\n log_probs = torch.log(probs)\n top_log_probs, top_preds = torch.topk(log_probs,available_width)\n for k in range(len(top_log_probs)):\n curr_log_prob = prev_log_prob + top_log_probs[k].item()\n curr_pred_list = prev_predict + [top_preds[k].item()]\n candidate = (curr_pred_list, curr_log_prob, prev_hidden, prev_context)\n candidate_pos = -1\n for pos in range(len(candidate_beam_elems)):\n if curr_log_prob > candidate_beam_elems[pos][1]:\n candidate_pos = pos\n if not candidate_pos == -1:\n candidate_beam_elems.insert(candidate_pos+1, candidate)\n elif len(candidate_beam_elems) < available_width:\n candidate_beam_elems.append(candidate)\n if len(candidate_beam_elems) > available_width:\n candidate_beam_elems.pop()\n\n beam_elems = []\n for candidate in candidate_beam_elems:\n if candidate[0][-1] == EOS_token or i==(max_length-1):\n candidate_translations.append(candidate)\n available_width -= 1\n else:\n beam_elems.append(candidate)\n\n max_prob = -math.inf\n best_elem = -1\n for pos in range(len(candidate_translations)):\n norm_prob = candidate_translations[pos][1]/len(candidate_translations[pos][0])\n if norm_prob > max_prob:\n max_prob = norm_prob\n best_elem = pos\n\n # remove SOS token from the beginning\n del candidate_translations[best_elem][0][0]\n\n return candidate_translations[best_elem][0], candidate_translations[best_elem][1]", "def beam_search(decoding_function,\n initial_ids,\n initial_memories,\n int_dtype,\n float_dtype,\n translation_maxlen,\n batch_size,\n beam_size,\n vocab_size,\n eos_id,\n normalization_alpha):\n\n def _extend_hypotheses(current_time_step, alive_sequences, alive_log_probs, alive_memories):\n \"\"\" Generates top-k extensions of the alive beam candidates from the previous time-step, which are subsequently\n used to update the alive and finished sets at the current time-step; top-k = 2 s* beam_size \"\"\"\n # Get logits for the current prediction step\n next_ids = alive_sequences[:, :, -1] # [batch_size, beam_size]\n next_ids = tf.transpose(next_ids, [1, 0]) # [beam_size, batch_size]; transpose to match model\n next_ids = tf.reshape(next_ids, [-1, 1]) # [beam_size * batch_size, 1]\n\n step_logits, alive_memories = decoding_function(next_ids, current_time_step, alive_memories)\n step_logits = tf.reshape(step_logits, [beam_size, batch_size, -1]) # [beam_size, batch_size, num_words]\n step_logits = tf.transpose(step_logits, [1, 0, 2]) # [batch_size, beam_size, num_words]; transpose back\n\n # Calculate the scores for all possible extensions of alive hypotheses\n candidate_log_probs = tf.nn.log_softmax(step_logits, axis=-1)\n curr_log_probs = candidate_log_probs + tf.expand_dims(alive_log_probs, axis=2)\n\n # Apply length normalization\n length_penalty = 1.\n if normalization_alpha > 0.:\n length_penalty = ((5. + tf.to_float(current_time_step)) ** normalization_alpha) / \\\n ((5. + 1.) ** normalization_alpha)\n curr_scores = curr_log_probs / length_penalty\n\n # Select top-k highest scores\n flat_curr_scores = tf.reshape(curr_scores, [batch_size, -1])\n top_scores, top_ids = tf.nn.top_k(flat_curr_scores, k=beam_size ** 2)\n\n # Recover non-normalized scores for tracking\n top_log_probs = top_scores * length_penalty\n\n # Determine the beam from which the top-scoring items originate and their identity (i.e. token-ID)\n top_beam_indices = top_ids // vocab_size\n top_ids %= vocab_size\n\n # Determine the location of top candidates\n batch_index_matrix = compute_batch_indices(batch_size, beam_size ** 2) # [batch_size, beam_size * factor]\n top_coordinates = tf.stack([batch_index_matrix, top_beam_indices], axis=2)\n\n # Extract top decoded sequences\n top_sequences = tf.gather_nd(alive_sequences, top_coordinates) # [batch_size, beam_size * factor, sent_len]\n top_sequences = tf.concat([top_sequences, tf.expand_dims(top_ids, axis=2)], axis=2)\n\n # Extract top memories\n top_memories = gather_memories(alive_memories, top_coordinates)\n # top_memories = alive_memories\n\n # Check how many of the top sequences have terminated\n top_eos_flags = tf.equal(top_ids, eos_id) # [batch_size, beam_size * factor]\n\n # Diversify beams at the outset of the generation\n init_top_sequences = tf.reshape(\n tf.reshape(top_sequences, [batch_size, beam_size, beam_size, -1])[:, :, 1, :], [batch_size, beam_size, -1])\n init_top_log_probs = \\\n tf.reshape(tf.reshape(top_log_probs, [batch_size, beam_size, beam_size])[:, :, 1], [batch_size, beam_size])\n init_top_scores = \\\n tf.reshape(tf.reshape(top_scores, [batch_size, beam_size, beam_size])[:, :, 1], [batch_size, beam_size])\n init_top_eos_flags = \\\n tf.reshape(tf.reshape(top_eos_flags, [batch_size, beam_size, beam_size])[:, :, 1], [batch_size, beam_size])\n\n top_sequences, top_log_probs, top_scores, top_eos_flags = \\\n tf.cond(tf.equal(current_time_step, 1),\n lambda: [init_top_sequences, init_top_log_probs, init_top_scores, init_top_eos_flags],\n lambda: [top_sequences, top_log_probs, top_scores, top_eos_flags])\n\n return top_sequences, top_log_probs, top_scores, top_eos_flags, top_memories\n\n def _update_alive(top_sequences, top_scores, top_log_probs, top_eos_flags, top_memories):\n \"\"\" Assembles an updated set of unfinished beam candidates from the set of top-k translation hypotheses\n generated at the current time-step; top-k for the incoming sequences in 2 * beam_size \"\"\"\n # Exclude completed sequences from the alive beam by setting their scores to a large negative value\n top_scores += tf.to_float(top_eos_flags) * (-1. * 1e7)\n # Update the alive beam\n updated_alive_sequences, updated_alive_log_probs, updated_alive_eos_flags, updated_alive_memories = \\\n gather_top_sequences(top_sequences,\n top_scores,\n top_log_probs,\n top_eos_flags,\n top_memories,\n beam_size,\n batch_size,\n 'alive')\n\n return updated_alive_sequences, updated_alive_log_probs, updated_alive_eos_flags, updated_alive_memories\n\n def _update_finished(finished_sequences, finished_scores, finished_eos_flags, top_sequences, top_scores,\n top_eos_flags):\n \"\"\" Updates the list of completed translation hypotheses (i.e. ones terminating in <EOS>) on the basis of the\n top-k hypotheses generated at the current time-step; top-k for the incoming sequences in 2 * beam_size \"\"\"\n # Match the length of the 'finished sequences' tensor with the length of the 'finished scores' tensor\n zero_padding = tf.zeros([batch_size, beam_size, 1], dtype=int_dtype)\n finished_sequences = tf.concat([finished_sequences, zero_padding], axis=2)\n # Exclude incomplete sequences from the finished beam by setting their scores to a large negative value\n top_scores += (1. - tf.to_float(top_eos_flags)) * (-1. * 1e7)\n # Combine sequences finished at previous time steps with the top sequences from current time step, as well as\n # their scores and eos-flags, for the selection of a new, most likely, set of finished sequences\n top_finished_sequences = tf.concat([finished_sequences, top_sequences], axis=1)\n top_finished_scores = tf.concat([finished_scores, top_scores], axis=1)\n top_finished_eos_flags = tf.concat([finished_eos_flags, top_eos_flags], axis=1)\n # Update the finished beam\n updated_finished_sequences, updated_finished_scores, updated_finished_eos_flags, _ = \\\n gather_top_sequences(top_finished_sequences,\n top_finished_scores,\n top_finished_scores,\n top_finished_eos_flags,\n None,\n beam_size,\n batch_size,\n 'finished')\n\n return updated_finished_sequences, updated_finished_scores, updated_finished_eos_flags\n\n def _decoding_step(current_time_step,\n alive_sequences,\n alive_log_probs,\n finished_sequences,\n finished_scores,\n finished_eos_flags,\n alive_memories):\n \"\"\" Defines a single step of greedy decoding. \"\"\"\n # 1. Get the top sequences/ scores/ flags for the current time step\n top_sequences, top_log_probs, top_scores, top_eos_flags, top_memories = \\\n _extend_hypotheses(current_time_step,\n alive_sequences,\n alive_log_probs,\n alive_memories)\n\n # 2. Update the alive beam\n alive_sequences, alive_log_probs, alive_eos_flags, alive_memories = \\\n _update_alive(top_sequences,\n top_scores,\n top_log_probs,\n top_eos_flags,\n top_memories)\n\n # 3. Update the finished beam\n finished_sequences, finished_scores, finished_eos_flags = \\\n _update_finished(finished_sequences,\n finished_scores,\n finished_eos_flags,\n top_sequences,\n top_scores,\n top_eos_flags)\n\n return current_time_step + 1, alive_sequences, alive_log_probs, finished_sequences, finished_scores, \\\n finished_eos_flags, alive_memories\n\n def _continue_decoding(curr_time_step,\n alive_sequences,\n alive_log_probs,\n finished_sequences,\n finished_scores,\n finished_eos_flags,\n alive_memories):\n \"\"\" Returns 'True' if all of the sequences in the extended hypotheses exceeded the maximum specified\n length or if none of the extended hypotheses are more likely than the lowest scoring finished hypothesis. \"\"\"\n # Check if the maximum prediction length has been reached\n length_criterion = tf.greater(curr_time_step, translation_maxlen)\n\n # Otherwise, check if the most likely alive hypothesis is less likely than the least probable completed sequence\n # Calculate the best possible score of the most probably sequence currently alive\n max_length_penalty = 1.\n if normalization_alpha > 0.:\n max_length_penalty = ((5. + tf.to_float(translation_maxlen)) ** normalization_alpha) / \\\n ((5. + 1.) ** normalization_alpha)\n\n highest_alive_score = alive_log_probs[:, 0] / max_length_penalty\n # Calculate the score of the least likely sequence currently finished\n lowest_finished_score = tf.reduce_min(finished_scores * tf.cast(finished_eos_flags, float_dtype), axis=1)\n # Account for the case in which none of the sequences in 'finished' have terminated so far;\n # In that case, each of the unfinished sequences is assigned a high negative probability, so that the\n # termination condition is not met\n mask_unfinished = (1. - tf.to_float(tf.reduce_any(finished_eos_flags, 1))) * (-1. * 1e7)\n lowest_finished_score += mask_unfinished\n\n # Check is the current highest alive score is lower than the current lowest finished score\n likelihood_criterion = tf.reduce_all(tf.greater(lowest_finished_score, highest_alive_score))\n\n # Decide whether to continue the decoding process\n return tf.logical_not(tf.logical_or(length_criterion, likelihood_criterion))\n\n # Initialize alive sequence and score trackers and expand to beam size\n alive_log_probs = tf.zeros([batch_size, beam_size])\n\n # Initialize decoded sequences\n alive_sequences = tf.expand_dims(batch_to_beam(initial_ids, beam_size), 2)\n\n # Initialize finished sequence, score, and flag trackers\n finished_sequences = tf.expand_dims(batch_to_beam(initial_ids, beam_size), 2)\n finished_scores = tf.ones([batch_size, beam_size]) * (-1. * 1e7) # initialize to a low value\n finished_eos_flags = tf.zeros([batch_size, beam_size], dtype=tf.bool)\n\n # Initialize memories\n alive_memories = initial_memories\n\n # Execute the auto-regressive decoding step via while loop\n _, alive_sequences, alive_log_probs, finished_sequences, finished_scores, finished_eos_flags, _ = \\\n tf.while_loop(\n _continue_decoding,\n _decoding_step,\n [tf.constant(1), alive_sequences, alive_log_probs, finished_sequences, finished_scores, finished_eos_flags,\n alive_memories],\n shape_invariants=[tf.TensorShape([]),\n tf.TensorShape([None, None, None]),\n alive_log_probs.get_shape(),\n tf.TensorShape([None, None, None]),\n finished_scores.get_shape(),\n finished_eos_flags.get_shape(),\n get_memory_invariants(alive_memories)],\n parallel_iterations=10,\n swap_memory=False,\n back_prop=False)\n\n alive_sequences.set_shape((None, beam_size, None))\n finished_sequences.set_shape((None, beam_size, None))\n\n # Account for the case in which a particular sequence never terminates in <EOS>;\n # in that case, copy the contents of the alive beam for that item into the finished beam (sequence + score)\n # tf.reduce_any(finished_eos_flags, 1) is False if there exists no completed translation hypothesis for a source\n # sentence in either of the beams , i.e. no replacement takes place if there is at least one finished translation\n finished_sequences = tf.where(tf.reduce_any(finished_eos_flags, 1), finished_sequences, alive_sequences)\n # Attention: alive_scores are not length normalized!\n finished_scores = tf.where(tf.reduce_any(finished_eos_flags, 1), finished_scores, alive_log_probs)\n # Truncate initial <GO> in finished sequences\n finished_sequences = finished_sequences[:, :, 1:]\n\n return finished_sequences, finished_scores", "def _generate_beam_search(\n self,\n input_ids,\n cur_len,\n max_length,\n min_length,\n do_sample,\n early_stopping,\n temperature,\n top_k,\n top_p,\n repetition_penalty,\n no_repeat_ngram_size,\n bad_words_ids,\n pad_token_id,\n eos_token_id,\n batch_size,\n num_return_sequences,\n length_penalty,\n num_beams,\n vocab_size,\n encoder_outputs,\n attention_mask,\n use_cache,\n model_specific_kwargs,\n ):\n\n # generated hypotheses\n generated_hyps = [\n BeamHypotheses(num_beams, max_length, length_penalty, early_stopping=early_stopping)\n for _ in range(batch_size)\n ]\n\n # scores for each sentence in the beam\n beam_scores = torch.zeros((batch_size, num_beams), dtype=torch.float, device=input_ids.device)\n\n # for greedy decoding it is made sure that only tokens of the first beam are considered to avoid sampling the exact same tokens three times\n if do_sample is False:\n beam_scores[:, 1:] = -1e9\n beam_scores = beam_scores.view(-1) # shape (batch_size * num_beams,)\n\n # cache compute states\n past = (encoder_outputs, None) if encoder_outputs is not None else None\n\n # done sentences\n done = [False for _ in range(batch_size)]\n\n while cur_len < max_length:\n model_inputs = self.model.prepare_inputs_for_generation(\n input_ids, past=past, attention_mask=attention_mask, use_cache=use_cache, **model_specific_kwargs\n )\n outputs = self.model(**model_inputs) # (batch_size * num_beams, cur_len, vocab_size)\n next_token_logits = outputs[0][:, -1, :] # (batch_size * num_beams, vocab_size)\n\n # if model has past, then set the past variable to speed up decoding\n if self.model._use_cache(outputs, use_cache):\n past = outputs[1]\n if self.model.config.is_encoder_decoder and do_sample is False:\n # TODO (PVP) still a bit hacky here - there might be a better solution\n next_token_logits = self.model.adjust_logits_during_generation(\n next_token_logits, cur_len=cur_len, max_length=max_length\n )\n\n scores = F.log_softmax(next_token_logits, dim=-1) # (batch_size * num_beams, vocab_size)\n\n scores = self.model.postprocess_next_token_scores(\n scores=scores,\n input_ids=input_ids,\n no_repeat_ngram_size=no_repeat_ngram_size,\n bad_words_ids=bad_words_ids,\n cur_len=cur_len,\n min_length=min_length,\n max_length=max_length,\n eos_token_id=eos_token_id,\n repetition_penalty=repetition_penalty,\n batch_size=batch_size,\n num_beams=num_beams,\n )\n\n assert scores.shape == (batch_size * num_beams, vocab_size), \"Shapes of scores: {} != {}\".format(\n scores.shape, (batch_size * num_beams, vocab_size)\n )\n\n if do_sample:\n _scores = scores + beam_scores[:, None].expand_as(scores) # (batch_size * num_beams, vocab_size)\n # Temperature\n if temperature != 1.0:\n _scores = _scores / temperature\n # Top-p/top-k filtering\n _scores = top_k_top_p_filtering(\n _scores, top_k=top_k, top_p=top_p, min_tokens_to_keep=2\n ) # (batch_size * num_beams, vocab_size)\n # re-organize to group the beam together to sample from all beam_idxs\n _scores = _scores.contiguous().view(\n batch_size, num_beams * vocab_size\n ) # (batch_size, num_beams * vocab_size)\n\n # Sample 2 next tokens for each beam (so we have some spare tokens and match output of greedy beam search)\n probs = F.softmax(_scores, dim=-1)\n next_tokens = torch.multinomial(probs, num_samples=2 * num_beams) # (batch_size, num_beams * 2)\n # Compute next scores\n next_scores = torch.gather(_scores, -1, next_tokens) # (batch_size, num_beams * 2)\n # sort the sampled vector to make sure that the first num_beams samples are the best\n next_scores, next_scores_indices = torch.sort(next_scores, descending=True, dim=1)\n next_tokens = torch.gather(next_tokens, -1, next_scores_indices) # (batch_size, num_beams * 2)\n\n else:\n next_scores = scores + beam_scores[:, None].expand_as(scores) # (batch_size * num_beams, vocab_size)\n\n # re-organize to group the beam together (we are keeping top hypothesis accross beams)\n next_scores = next_scores.view(\n batch_size, num_beams * vocab_size\n ) # (batch_size, num_beams * vocab_size)\n\n next_scores, next_tokens = torch.topk(next_scores, 2 * num_beams, dim=1, largest=True, sorted=True)\n\n assert next_scores.size() == next_tokens.size() == (batch_size, 2 * num_beams)\n\n # next batch beam content\n next_batch_beam = []\n\n # for each sentence\n for batch_idx in range(batch_size):\n\n # if we are done with this sentence, add a pad token\n if done[batch_idx]:\n assert (\n len(generated_hyps[batch_idx]) >= num_beams\n ), \"Batch can only be done if at least {} beams have been generated\".format(num_beams)\n assert (\n eos_token_id is not None and pad_token_id is not None\n ), \"generated beams >= num_beams -> eos_token_id and pad_token have to be defined\"\n next_batch_beam.extend([(0, pad_token_id, 0)] * num_beams) # pad the batch\n continue\n\n # next sentence beam content, this will get added to next_batch_beam\n next_sent_beam = []\n\n # next tokens for this sentence\n for beam_token_rank, (beam_token_id, beam_token_score) in enumerate(\n zip(next_tokens[batch_idx], next_scores[batch_idx])\n ):\n # get beam and token IDs\n beam_id = beam_token_id // vocab_size\n token_id = beam_token_id % vocab_size\n\n effective_beam_id = batch_idx * num_beams + beam_id\n # add to generated hypotheses if end of sentence\n if (eos_token_id is not None) and (token_id.item() == eos_token_id):\n # if beam_token does not belong to top num_beams tokens, it should not be added\n is_beam_token_worse_than_top_num_beams = beam_token_rank >= num_beams\n if is_beam_token_worse_than_top_num_beams:\n continue\n generated_hyps[batch_idx].add(\n input_ids[effective_beam_id].clone(), beam_token_score.item(),\n )\n else:\n # add next predicted token since it is not eos_token\n next_sent_beam.append((beam_token_score, token_id, effective_beam_id))\n\n # once the beam for next step is full, don't add more tokens to it.\n if len(next_sent_beam) == num_beams:\n break\n\n # Check if we are done so that we can save a pad step if all(done)\n done[batch_idx] = done[batch_idx] or generated_hyps[batch_idx].is_done(\n next_scores[batch_idx].max().item(), cur_len\n )\n\n # update next beam content\n assert len(next_sent_beam) == num_beams, \"Beam should always be full\"\n next_batch_beam.extend(next_sent_beam)\n assert len(next_batch_beam) == num_beams * (batch_idx + 1), \"We should have added num_beams each step\"\n\n # stop when we are done with each sentence\n if all(done):\n break\n\n # sanity check / prepare next batch\n assert len(next_batch_beam) == batch_size * num_beams\n beam_scores = beam_scores.new([x[0] for x in next_batch_beam])\n beam_tokens = input_ids.new([x[1] for x in next_batch_beam])\n beam_idx = input_ids.new([x[2] for x in next_batch_beam])\n\n # re-order batch and update current length\n input_ids = input_ids[beam_idx, :]\n input_ids = torch.cat([input_ids, beam_tokens.unsqueeze(1)], dim=-1)\n cur_len = cur_len + 1\n\n # re-order internal states\n if past is not None:\n past = self.model._reorder_cache(past, beam_idx)\n\n # extend attention_mask for new generated input if only decoder\n if self.model.config.is_encoder_decoder is False:\n attention_mask = torch.cat(\n [attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1\n )\n\n # finalize all open beam hypotheses and add to generated hypotheses\n for batch_idx in range(batch_size):\n if done[batch_idx]:\n continue\n\n # test that beam scores match previously calculated scores if not eos and batch_idx not done\n if eos_token_id is not None and all(\n (token_id % vocab_size).item() != eos_token_id for token_id in next_tokens[batch_idx]\n ):\n assert torch.all(\n next_scores[batch_idx, :num_beams] == beam_scores.view(batch_size, num_beams)[batch_idx]\n ), \"If batch_idx is not done, final next scores: {} have to equal to accumulated beam_scores: {}\".format(\n next_scores[:, :num_beams][batch_idx], beam_scores.view(batch_size, num_beams)[batch_idx],\n )\n\n # need to add best num_beams hypotheses to generated hyps\n for beam_id in range(num_beams):\n effective_beam_id = batch_idx * num_beams + beam_id\n final_score = beam_scores[effective_beam_id].item()\n final_tokens = input_ids[effective_beam_id]\n generated_hyps[batch_idx].add(final_tokens, final_score)\n\n # depending on whether greedy generation is wanted or not define different output_batch_size and output_num_return_sequences_per_batch\n output_batch_size = batch_size if do_sample else batch_size * num_return_sequences\n output_num_return_sequences_per_batch = 1 if do_sample else num_return_sequences\n\n # select the best hypotheses\n sent_lengths = input_ids.new(output_batch_size)\n best = []\n\n # retrieve best hypotheses\n for i, hypotheses in enumerate(generated_hyps):\n sorted_hyps = sorted(hypotheses.beams, key=lambda x: x[0])\n for j in range(output_num_return_sequences_per_batch):\n effective_batch_idx = output_num_return_sequences_per_batch * i + j\n best_hyp = sorted_hyps.pop()[1]\n sent_lengths[effective_batch_idx] = len(best_hyp)\n best.append(best_hyp)\n\n # shorter batches are padded\n if sent_lengths.min().item() != sent_lengths.max().item():\n assert pad_token_id is not None, \"`Pad_token_id` has to be defined\"\n sent_max_len = min(sent_lengths.max().item() + 1, max_length)\n decoded = input_ids.new(output_batch_size, sent_max_len).fill_(pad_token_id)\n\n # fill with hypothesis and eos_token_id if necessary\n for i, hypo in enumerate(best):\n decoded[i, : sent_lengths[i]] = hypo\n if sent_lengths[i] < max_length:\n decoded[i, sent_lengths[i]] = eos_token_id\n else:\n # none of the hypotheses have an eos_token\n assert (len(hypo) == max_length for hypo in best)\n # decoded = torch.stack(best).type(torch.long).to(next(self.parameters()).device)\n decoded = torch.stack(best).type_as(input_ids)\n\n return decoded", "def beam_test_case(sess, model, data, onset='VALIDATION'):\n def pad_list(lst, pad=-1):\n inner_max_len = max(map(len, lst))\n map(lambda x: x.extend([pad]*(inner_max_len-len(x))), lst)\n return np.array(lst)\n\n print '#'*20, 'ON '+onset+' SET START ', '#'*20\n pred, true_label, beam_seq, beam_prob = model.beam_predict(sess, data)\n \n true_label = pad_list(true_label)\n true_label = np.array(true_label)\n pred = pad_list(pred, pad=0)\n pred = np.array(pred)\n \n true_label += 1\n true_label = np.concatenate([true_label, np.zeros([np.shape(true_label)[0], 1], dtype=np.int32)], axis=1)\n true_label = true_label.tolist()\n pred = pred.tolist()\n accuracy = helper.calculate_accuracy_seq(pred, true_label, eos_id=0)\n# helper.print_pred_seq(pred[:10], true_label[:10])\n with open(model.weight_path+'pred_beam', 'wb') as fd:\n pkl.dump(beam_seq, fd)\n pkl.dump(beam_prob, fd)\n \n print 'Overall '+onset+' accuracy is: {}'.format(accuracy)\n logging.info('Overall '+onset+' accuracy is: {}'.format(accuracy))\n print '#'*20, 'ON '+onset+' SET END ', '#'*20\n \n return pred, true_label, accuracy", "def model_lookup2(taskid, beam):\n # Assumes running on happili-05:\n model_dir = '/data/kutkin/cbeams/'\n\n weekly_gaussian_regression = False\n if weekly_gaussian_regression == True:\n all_dates = get_dates()\n all_beam_stats = get_beam_stats(all_dates)\n if beam > all_beam_stats.shape[1] - 1:\n print(\"\\t{}: Pick a valid beam number 0-39.\".format(beam))\n exit()\n beam_stats = all_beam_stats[:, beam]\n\n # Divide into before & after beam attenuation on October 1st (big impact on beam quality)!\n taskid = str(taskid)[:6]\n if int(taskid) < 191001:\n # *** Until we have a full beam complement ***:\n index = np.where(all_dates == '190821')[0][0]\n # index = np.where(all_dates == '190916')[0][0]\n dates = all_dates[:index + 1]\n beams = beam_stats[:index + 1]\n else:\n # index = np.where(all_dates == '191002')[0][0]\n index = np.where(all_dates == '191023')[0][0]\n dates = all_dates[index:]\n beams = beam_stats[index:]\n\n print(\"[MODEL_LOOKUP] Searching for appropriate beam model for beam {}.\".format(beam))\n if np.all(beams == 0):\n print(\"\\tNo good beam model options for period when this was observed. Do more drift scans (or edit code).\")\n exit()\n elif len(beams[beams == 1]) == 1:\n # If only one good beam model exists, use it.\n best = dates[beams == 1][0]\n else:\n # Use nearest. Don't have enough beam statistics for floor, I think.\n dates = dates[beams == 1]\n best = nearest_date(dates, taskid)\n\n # *** Until we have a full beam complement ***:\n if beam >= 32:\n # best = '191002'\n best = '191023'\n\n model = model_dir + '{:02}_gp_avg_orig.fits'.format(beam)\n\n return model", "def predict_MAP(self,new_times: ndarray) -> ndarray:\n return self._pred_mag(self.map_params,new_times)", "def beam_search(\n encoder_outputs,\n init, step, update,\n length_penalty=no_length_penalty,\n **kwargs\n):\n K = kwargs.get('beam', 5)\n mxlen = kwargs.get('mxlen', 100)\n bsz = encoder_outputs.output.size(0)\n device = encoder_outputs.output.device\n with torch.no_grad():\n extra = init(encoder_outputs, K)\n paths = torch.full((bsz, K, 1), Offsets.GO, dtype=torch.long, device=device)\n # This tracks the log prob of each beam. This is distinct from score which\n # is based on the log prob and penalties.\n log_probs = torch.zeros((bsz, K), dtype=torch.float, device=device)\n # Tracks the lengths of the beams, unfinished beams have a lengths of zero.\n lengths = torch.zeros((bsz, K), dtype=torch.long, device=device)\n last = paths[:, :, -1] # [B, K]\n\n for i in range(mxlen - 1):\n probs, extra = step(paths, extra)\n V = probs.size(-1)\n probs = probs.view((bsz, K, V)) # [B, K, V]\n if i > 0:\n # This mask is for all beams that are done.\n done_mask = (lengths != 0).unsqueeze(-1) # [B, K, 1]\n # Can creating this mask be moved out of the loop? It never changes but we don't have V\n # This mask selects the EOS token\n eos_mask = torch.zeros((1, 1, V), dtype=torch.uint8, device=device)\n eos_mask[:, :, Offsets.EOS] = 1\n # This mask selects the EOS token of only the beams that are done.\n mask = done_mask & eos_mask\n # Put all probability mass on the EOS token for finished beams.\n # Otherwise as the other beams get longer they will all give\n # up and eventually select this beam and all outputs become\n # the same.\n probs = probs.masked_fill(done_mask, -np.inf)\n probs = probs.masked_fill(mask, 0)\n probs = log_probs.unsqueeze(-1) + probs # [B, K, V]\n # Calculate the score of the beam based on the current length.\n path_scores = probs / length_penalty(lengths.masked_fill(lengths==0, i+1))\n else:\n # On the first step we only look at probabilities for the first beam.\n # If we don't then the probs will be the same for each beam\n # This means the same token will be selected for each beam\n # And we won't get any diversity.\n # Using only the first beam ensures K different starting points.\n path_scores = probs[:, 0, :]\n\n flat_scores = path_scores.view(bsz, -1) # [B, K * V]\n best_scores, best_idx = flat_scores.topk(K, 1)\n # Get the log_probs of the best scoring beams\n log_probs = probs.view(bsz, -1).gather(1, best_idx).view(bsz, K)\n\n best_beams = best_idx / V # Get which beam it came from\n best_idx = best_idx % V # Get the index of the word regardless of which beam it is.\n\n # Best Beam index is relative within the batch (only [0, K)).\n # This makes the index global (e.g. best beams for the second\n # batch example is in [K, 2*K)).\n offsets = torch.arange(bsz, dtype=torch.long, device=device) * K\n offset_beams = best_beams + offsets.unsqueeze(-1)\n flat_beams = offset_beams.view(bsz * K)\n # Select the paths to extend based on the best beams\n flat_paths = paths.view(bsz * K, -1)\n new_paths = flat_paths[flat_beams, :].view(bsz, K, -1)\n # Add the selected outputs to the paths\n paths = torch.cat([new_paths, best_idx.unsqueeze(-1)], dim=2)\n\n # Select the lengths to keep tracking based on the valid beams left.\n lengths = lengths.view(-1)[flat_beams].view((bsz, K))\n\n extra = update(flat_beams, extra)\n\n # Updated lengths based on if we hit EOS\n last = paths[:, :, -1]\n eoses = (last == Offsets.EOS)\n lengths = update_lengths(lengths, eoses, i + 1)\n if (lengths != 0).all():\n break\n else:\n # This runs if the loop didn't break meaning one beam hit the max len\n # Add an EOS to anything that hasn't hit the end. This makes the scores real.\n probs, extra = step(paths, extra)\n\n V = probs.size(-1)\n probs = probs.view((bsz, K, V))\n probs = probs[:, :, Offsets.EOS] # Select the score of EOS\n # If any of the beams are done mask out the score of this EOS (they already had an EOS)\n probs = probs.masked_fill((lengths != 0), 0)\n log_probs = log_probs + probs\n end_tokens = torch.full((bsz, K, 1), Offsets.EOS, device=device, dtype=paths.dtype)\n paths = torch.cat([paths, end_tokens], dim=2)\n lengths = update_lengths(lengths, torch.ones_like(lengths) == 1, mxlen)\n best_scores = log_probs / length_penalty(lengths).squeeze(-1)\n\n # Slice off the Offsets.GO token\n paths = paths[:, :, 1:]\n return paths, lengths, best_scores", "def compute_precision(gold_dict, src_word2ind, trg_word2ind, src_words, trg_words, scores, args, BATCH_SIZE=20, verbose = False):\n oov = set()\n correct = 0\n n,m = scores.shape # The actual size of mapping computed, might be smaller that total size of dict\n\n precisions = {}\n if verbose:\n print('@{:2} {:10} {:30} {:30}'.format('k', 'Src','Predicted','Gold'))\n print_row = '{:2} {:10} {:30} {:30} {}'\n\n for k in [1,5,10]:\n correct = 0\n for src_idx,tgt_idx in gold_dict.items():\n if src_idx > n or np.all([e>m for e in tgt_idx]):#Src word not in mapping\n oov.add(src_idx)\n continue\n else:\n knn = np.argpartition(scores[src_idx,:], -k)[-k:] # argpartition returns top k not in order\n knn_sort = knn[np.argsort(-scores[src_idx,knn])] # With - to get descending order\n if set(knn_sort).intersection(tgt_idx):\n correct +=1\n correct_string = ' '\n else:\n correct_string = 'X'\n if verbose:\n src_str = src_words[src_idx]\n pred_str = ','.join([trg_words[k] for k in knn_sort])\n gold_str = ','.join([trg_words[k] for k in tgt_idx])\n print(print_row.format(k,src_str,pred_str,gold_str,correct_string))\n\n\n coverage = len(gold_dict.keys()) / (len(gold_dict.keys()) + len(oov))\n if verbose:\n print('Coverage: {:7.2%} Precision @{:2}:{:7.2%}'.format(coverage, k, correct / len(gold_dict)))\n precisions[k] = correct / len(gold_dict)\n return precisions", "def beam_search_with_heuristics(model, inputs, beam_size, max_steps, from_cond=True):\n orig_inputs = inputs['orig_inputs'][0]\n #inference_state, next_choices = model.inference(inputs, orig_inputs.db)\n inference_state, next_choices = model(inputs, db=orig_inputs.db, is_train=False)\n beam = [Hypothesis4Filtering(inference_state, next_choices)]\n\n cached_finished_seqs = [] # cache filtered trajectories\n beam_prefix = beam\n while True:\n # search prefixes with beam search\n prefixes2fill_from = []\n for step in range(max_steps):\n if len(prefixes2fill_from) >= beam_size:\n break\n\n candidates = []\n for hyp in beam_prefix:\n if hyp.inference_state.cur_item.state == hyp.inference_state.State.CHILDREN_APPLY \\\n and hyp.inference_state.cur_item.node_type == \"from\":\n # 仅差 from 未填充,单独保存,后续逻辑专门处理\n prefixes2fill_from.append(hyp)\n else:\n candidates += [(hyp, choice, choice_score.numpy().item(), hyp.score + choice_score.numpy().item())\n for choice, choice_score in hyp.next_choices]\n # 排序并限定 beam_size 个候选(减去待填充 from 的候选)\n candidates.sort(key=operator.itemgetter(3), reverse=True)\n candidates = candidates[:beam_size - len(prefixes2fill_from)]\n\n # Create the new hypotheses from the expansions\n beam_prefix = []\n for hyp, choice, choice_score, cum_score in candidates:\n inference_state = hyp.inference_state.clone()\n\n # cache column choice\n column_history = hyp.column_history[:]\n if hyp.inference_state.cur_item.state == hyp.inference_state.State.POINTER_APPLY and \\\n hyp.inference_state.cur_item.node_type == \"column\":\n column_history = column_history + [choice]\n\n # beam 内的每个候选执行一次 step(),获取下一步 choices\n next_choices = inference_state.step(choice)\n assert next_choices is not None\n beam_prefix.append(\n Hypothesis4Filtering(inference_state, next_choices, cum_score,\n hyp.choice_history + [choice],\n hyp.score_history + [choice_score],\n column_history))\n\n prefixes2fill_from.sort(key=operator.attrgetter('score'), reverse=True)\n # assert len(prefixes) == beam_size\n\n # emuerating \n beam_from = prefixes2fill_from\n max_size = 6\n unfiltered_finished = []\n prefixes_unfinished = []\n for step in range(max_steps):\n if len(unfiltered_finished) + len(prefixes_unfinished) > max_size:\n break\n\n candidates = []\n for hyp in beam_from:\n if step > 0 and hyp.inference_state.cur_item.state == hyp.inference_state.State.CHILDREN_APPLY \\\n and hyp.inference_state.cur_item.node_type == \"from\":\n prefixes_unfinished.append(hyp)\n else:\n candidates += [(hyp, choice, choice_score.numpy().item(),\n hyp.score + choice_score.numpy().item())\n for choice, choice_score in hyp.next_choices]\n candidates.sort(key=operator.itemgetter(3), reverse=True)\n candidates = candidates[:max_size - len(prefixes_unfinished)]\n\n beam_from = []\n for hyp, choice, choice_score, cum_score in candidates:\n inference_state = hyp.inference_state.clone()\n\n # cache table choice\n table_history = hyp.table_history[:]\n key_column_history = hyp.key_column_history[:]\n if hyp.inference_state.cur_item.state == hyp.inference_state.State.POINTER_APPLY:\n if hyp.inference_state.cur_item.node_type == \"table\":\n table_history = table_history + [choice]\n elif hyp.inference_state.cur_item.node_type == \"column\":\n key_column_history = key_column_history + [choice]\n\n next_choices = inference_state.step(choice)\n if next_choices is None:\n unfiltered_finished.append(Hypothesis4Filtering(\n inference_state,\n None,\n cum_score,\n hyp.choice_history + [choice],\n hyp.score_history + [choice_score],\n hyp.column_history, table_history,\n key_column_history))\n else:\n beam_from.append(\n Hypothesis4Filtering(inference_state, next_choices, cum_score,\n hyp.choice_history + [choice],\n hyp.score_history + [choice_score],\n hyp.column_history, table_history,\n key_column_history))\n # [END] for step in range(max_steps)\n\n unfiltered_finished.sort(key=operator.attrgetter('score'), reverse=True)\n\n # filtering\n filtered_finished = []\n for hyp in unfiltered_finished:\n mentioned_column_ids = set(hyp.column_history)\n mentioned_key_column_ids = set(hyp.key_column_history)\n mentioned_table_ids = set(hyp.table_history)\n\n # duplicate tables\n if len(mentioned_table_ids) != len(hyp.table_history):\n continue\n\n # the foreign key should be correctly used\n # NOTE: the new version does not predict conditions in FROM clause anymore\n if from_cond:\n covered_tables = set()\n must_include_key_columns = set()\n candidate_table_ids = sorted(mentioned_table_ids)\n start_table_id = candidate_table_ids[0]\n for table_id in candidate_table_ids[1:]:\n if table_id in covered_tables:\n continue\n try:\n path = nx.shortest_path(\n orig_inputs.db.foreign_key_graph, source=start_table_id, target=table_id)\n except (nx.NetworkXNoPath, nx.NodeNotFound):\n covered_tables.add(table_id)\n continue\n\n for source_table_id, target_table_id in zip(path, path[1:]):\n if target_table_id in covered_tables:\n continue\n if target_table_id not in mentioned_table_ids:\n continue\n col1, col2 = orig_inputs.db.foreign_key_graph[source_table_id][target_table_id]['columns']\n must_include_key_columns.add(col1)\n must_include_key_columns.add(col2)\n if not must_include_key_columns == mentioned_key_column_ids:\n continue\n\n # tables whose columns are mentioned should also exist\n must_table_ids = set()\n for col in mentioned_column_ids:\n tab_ = orig_inputs.db.columns[col].table\n if tab_ is not None:\n must_table_ids.add(tab_.id)\n if not must_table_ids.issubset(mentioned_table_ids):\n continue\n\n filtered_finished.append(hyp)\n\n filtered_finished.sort(key=operator.attrgetter('score'), reverse=True)\n # filtered.sort(key=lambda x: x.score / len(x.choice_history), reverse=True)\n prefixes_unfinished.sort(key=operator.attrgetter('score'), reverse=True)\n # new_prefixes.sort(key=lambda x: x.score / len(x.choice_history), reverse=True)\n\n prefixes_, filtered_ = merge_beams(prefixes_unfinished, filtered_finished, beam_size)\n\n if filtered_:\n cached_finished_seqs = cached_finished_seqs + filtered_\n cached_finished_seqs.sort(key=operator.attrgetter('score'), reverse=True)\n\n if prefixes_ and len(prefixes_[0].choice_history) < 200:\n beam_prefix = prefixes_\n for hyp in beam_prefix:\n hyp.table_history = []\n hyp.column_history = []\n hyp.key_column_history = []\n elif cached_finished_seqs:\n return cached_finished_seqs[:beam_size]\n else:\n return unfiltered_finished[:beam_size]", "def _generate_beam_search(\n self,\n input_ids,\n cur_len,\n max_length,\n min_length,\n do_sample,\n early_stopping,\n temperature,\n top_k,\n top_p,\n repetition_penalty,\n no_repeat_ngram_size,\n bad_words_ids,\n pad_token_id,\n eos_token_id,\n batch_size,\n num_return_sequences,\n length_penalty,\n num_beams,\n vocab_size,\n encoder_outputs,\n attention_mask,\n use_cache,\n model_specific_kwargs,\n):\n # generated hypotheses\n eos_token_id = 198 # newline\n generated_hyps = [\n BeamHypotheses(num_beams, max_length, length_penalty, early_stopping=early_stopping)\n for _ in range(batch_size)\n ]\n\n # scores for each sentence in the beam\n beam_scores = torch.zeros((batch_size, num_beams), dtype=torch.float, device=input_ids.device)\n\n # for greedy decoding it is made sure that only tokens of the first beam are considered to avoid sampling the exact same tokens three times\n if do_sample is False:\n beam_scores[:, 1:] = -1e9\n beam_scores = beam_scores.view(-1) # shape (batch_size * num_beams,)\n\n # cache compute states\n past = (encoder_outputs, None) if encoder_outputs is not None else None\n\n # done sentences\n done = [False for _ in range(batch_size)]\n\n while cur_len < max_length:\n model_inputs = self.prepare_inputs_for_generation(\n input_ids, past=past, attention_mask=attention_mask, use_cache=use_cache, **model_specific_kwargs\n )\n outputs = self(**model_inputs) # (batch_size * num_beams, cur_len, vocab_size)\n next_token_logits = outputs[0][:, -1, :] # (batch_size * num_beams, vocab_size)\n\n # if model has past, then set the past variable to speed up decoding\n if self._use_cache(outputs, use_cache):\n past = outputs[1]\n if self.config.is_encoder_decoder and do_sample is False:\n # TODO (PVP) still a bit hacky here - there might be a better solution\n next_token_logits = self.adjust_logits_during_generation(\n next_token_logits, cur_len=cur_len, max_length=max_length\n )\n\n scores = F.log_softmax(next_token_logits, dim=-1) # (batch_size * num_beams, vocab_size)\n\n scores = self.postprocess_next_token_scores(\n scores=scores,\n input_ids=input_ids,\n no_repeat_ngram_size=no_repeat_ngram_size,\n bad_words_ids=bad_words_ids,\n cur_len=cur_len,\n min_length=min_length,\n max_length=max_length,\n eos_token_id=eos_token_id,\n repetition_penalty=repetition_penalty,\n batch_size=batch_size,\n num_beams=num_beams,\n )\n\n assert scores.shape == (batch_size * num_beams, vocab_size), \"Shapes of scores: {} != {}\".format(\n scores.shape, (batch_size * num_beams, vocab_size)\n )\n\n if do_sample:\n _scores = scores + beam_scores[:, None].expand_as(scores) # (batch_size * num_beams, vocab_size)\n # Temperature\n if temperature != 1.0:\n _scores = _scores / temperature\n # Top-p/top-k filtering\n _scores = top_k_top_p_filtering(\n _scores, top_k=top_k, top_p=top_p, min_tokens_to_keep=2\n ) # (batch_size * num_beams, vocab_size)\n # re-organize to group the beam together to sample from all beam_idxs\n _scores = _scores.contiguous().view(\n batch_size, num_beams * vocab_size\n ) # (batch_size, num_beams * vocab_size)\n\n # Sample 2 next tokens for each beam (so we have some spare tokens and match output of greedy beam search)\n probs = F.softmax(_scores, dim=-1)\n next_tokens = torch.multinomial(probs, num_samples=2 * num_beams) # (batch_size, num_beams * 2)\n # Compute next scores\n next_scores = torch.gather(_scores, -1, next_tokens) # (batch_size, num_beams * 2)\n # sort the sampled vector to make sure that the first num_beams samples are the best\n next_scores, next_scores_indices = torch.sort(next_scores, descending=True, dim=1)\n next_tokens = torch.gather(next_tokens, -1, next_scores_indices) # (batch_size, num_beams * 2)\n\n else:\n next_scores = scores + beam_scores[:, None].expand_as(scores) # (batch_size * num_beams, vocab_size)\n\n # re-organize to group the beam together (we are keeping top hypothesis accross beams)\n next_scores = next_scores.view(\n batch_size, num_beams * vocab_size\n ) # (batch_size, num_beams * vocab_size)\n\n next_scores, next_tokens = torch.topk(next_scores, 2 * num_beams, dim=1, largest=True, sorted=True)\n\n assert next_scores.size() == next_tokens.size() == (batch_size, 2 * num_beams)\n\n # next batch beam content\n next_batch_beam = []\n\n # for each sentence\n for batch_idx in range(batch_size):\n\n # if we are done with this sentence, add a pad token\n if done[batch_idx]:\n assert (\n len(generated_hyps[batch_idx]) >= num_beams\n ), \"Batch can only be done if at least {} beams have been generated\".format(num_beams)\n assert (\n eos_token_id is not None and pad_token_id is not None\n ), \"generated beams >= num_beams -> eos_token_id and pad_token have to be defined\"\n next_batch_beam.extend([(0, pad_token_id, 0)] * num_beams) # pad the batch\n continue\n\n # next sentence beam content, this will get added to next_batch_beam\n next_sent_beam = []\n\n # next tokens for this sentence\n for beam_token_rank, (beam_token_id, beam_token_score) in enumerate(\n zip(next_tokens[batch_idx], next_scores[batch_idx])\n ):\n # get beam and token IDs\n beam_id = beam_token_id // vocab_size\n token_id = beam_token_id % vocab_size\n\n effective_beam_id = batch_idx * num_beams + beam_id\n # add to generated hypotheses if end of sentence (eos or newline)\n if ((eos_token_id is not None) and (token_id.item() == eos_token_id)):\n # if beam_token does not belong to top num_beams tokens, it should not be added\n is_beam_token_worse_than_top_num_beams = beam_token_rank >= num_beams\n if is_beam_token_worse_than_top_num_beams:\n continue\n generated_hyps[batch_idx].add(\n input_ids[effective_beam_id].clone(), beam_token_score.item(),\n )\n else:\n # add next predicted token since it is not eos_token\n next_sent_beam.append((beam_token_score, token_id, effective_beam_id))\n\n # once the beam for next step is full, don't add more tokens to it.\n if len(next_sent_beam) == num_beams:\n break\n\n # Check if we are done so that we can save a pad step if all(done)\n done[batch_idx] = done[batch_idx] or generated_hyps[batch_idx].is_done(\n next_scores[batch_idx].max().item(), cur_len\n )\n\n # update next beam content\n assert len(next_sent_beam) == num_beams, \"Beam should always be full\"\n next_batch_beam.extend(next_sent_beam)\n assert len(next_batch_beam) == num_beams * (batch_idx + 1), \"We should have added num_beams each step\"\n\n # stop when we are done with each sentence\n if all(done):\n break\n\n # sanity check / prepare next batch\n assert len(next_batch_beam) == batch_size * num_beams\n beam_scores = beam_scores.new([x[0] for x in next_batch_beam])\n beam_tokens = input_ids.new([x[1] for x in next_batch_beam])\n beam_idx = input_ids.new([x[2] for x in next_batch_beam])\n\n # re-order batch and update current length\n input_ids = input_ids[beam_idx, :]\n input_ids = torch.cat([input_ids, beam_tokens.unsqueeze(1)], dim=-1)\n cur_len = cur_len + 1\n\n # re-order internal states\n if past is not None:\n past = self._reorder_cache(past, beam_idx)\n\n # extend attention_mask for new generated input if only decoder\n if self.config.is_encoder_decoder is False:\n attention_mask = torch.cat(\n [attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1\n )\n\n # finalize all open beam hypotheses and add to generated hypotheses\n for batch_idx in range(batch_size):\n if done[batch_idx]:\n continue\n\n # test that beam scores match previously calculated scores if not eos and batch_idx not done\n if eos_token_id is not None and all(\n ((token_id % vocab_size).item() != eos_token_id) for token_id in next_tokens[batch_idx]\n ):\n assert torch.all(\n next_scores[batch_idx, :num_beams] == beam_scores.view(batch_size, num_beams)[batch_idx]\n ), \"If batch_idx is not done, final next scores: {} have to equal to accumulated beam_scores: {}\".format(\n next_scores[:, :num_beams][batch_idx], beam_scores.view(batch_size, num_beams)[batch_idx],\n )\n\n # need to add best num_beams hypotheses to generated hyps\n for beam_id in range(num_beams):\n effective_beam_id = batch_idx * num_beams + beam_id\n final_score = beam_scores[effective_beam_id].item()\n final_tokens = input_ids[effective_beam_id]\n generated_hyps[batch_idx].add(final_tokens, final_score)\n\n # depending on whether greedy generation is wanted or not define different output_batch_size and output_num_return_sequences_per_batch\n output_batch_size = batch_size if do_sample else batch_size * num_return_sequences\n output_num_return_sequences_per_batch = 1 if do_sample else num_return_sequences\n\n # select the best hypotheses\n sent_lengths = input_ids.new(output_batch_size)\n best = []\n scores = []\n\n # retrieve best hypotheses\n for i, hypotheses in enumerate(generated_hyps):\n sorted_hyps = sorted(hypotheses.beams, key=lambda x: x[0])\n for j in range(output_num_return_sequences_per_batch):\n effective_batch_idx = output_num_return_sequences_per_batch * i + j\n score, best_hyp = sorted_hyps.pop()\n sent_lengths[effective_batch_idx] = len(best_hyp)\n best.append(best_hyp)\n scores.append(score)\n\n scores = torch.exp(torch.tensor(scores))\n return best, scores", "def fit_map(self,data,maps):\n fname = data.filename.split('/')[-1]\n\n mjd0 = data['level1/spectrometer/MJD'][0]\n\n\n # If the source is Jupiter we will use the beam model\n self.model.set_fixed(**{'x0':True,'y0':True,'phi':True,'sigx':True,'sigy_scale':True})\n def limfunc(P):\n A,sigx,sigy,B = P\n if (sigx < 0) | (sigy < 0):\n return True\n return False\n\n self.map_parameters = self.model.get_param_names()\n \n nFeeds, nBands, nChans = maps.shape\n # Setup fit containers\n self.map_fits ={'Values': np.zeros((nFeeds,\n nBands,\n nChans,\n self.model.nparams)),\n 'Errors': np.zeros((nFeeds,\n nBands,\n nChans,\n self.model.nparams)),\n 'Chi2': np.zeros((nFeeds,\n nBands,\n nChans,\n 2)),\n 'MJD':mjd0}\n\n\n self.free_parameters = ['A','B']\n self.fixed_parameters = ['x0','sigx','y0','sigy_scale','phi']\n pbar = tqdm(total=nFeeds*nBands*nChans,desc=f'{self.name}:fit_map:{self.source}')\n for ifeed in self.feedlist: \n for isb in range(nBands):\n for ichan in range(nChans):\n try:\n m,c,x,y,P0 = self.prepare_maps(maps[ifeed,isb,ichan]['map'],\n maps[ifeed,isb,ichan]['cov'],\n maps[ifeed,isb,ichan]['xygrid'])\n except AssertionError:\n pbar.update(1)\n continue\n\n if np.nansum(m) == 0:\n pbar.update(1)\n continue\n\n P0_priors = {}\n\n self.model.set_defaults(x0 =self.avg_map_fits[isb]['Values'][1],\n sigx=self.avg_map_fits[isb]['Values'][2],\n y0 =self.avg_map_fits[isb]['Values'][3],\n sigy_scale=self.avg_map_fits[isb]['Values'][4],\n phi =self.avg_map_fits[isb]['Values'][5])\n\n try:\n gd = (c != 0) & np.isfinite(m) & np.isfinite(c)\n\n result, error,samples, min_chi2, ddof = self.model(P0, (x[gd],y[gd]), m[gd], c[gd],\n P0_priors=P0_priors,return_array=True)\n\n self.map_fits['Values'][ifeed,isb,ichan,:] = result\n self.map_fits['Errors'][ifeed,isb,ichan,:] = error\n self.map_fits['Chi2'][ifeed,isb,ichan,:] = min_chi2, ddof\n \n except ValueError as e:\n pbar.update(1)\n result = 0\n error = 0\n try:\n self.logger(f'{fname}:emcee:{e}',error=e)\n except TypeError:\n self.logger(f'{fname}:emcee:{e}')\n\n pbar.update(1)\n pbar.close()\n return self.map_fits", "def beam_search(self, src_sent, beam_size=5, max_decoding_time_step=70):\n src_sents_var = to_tensor(self.vocabs.src, [src_sent], device=self.device)\n\n src_encodings, dec_init_vec = self.encoder(src_sents_var, [len(src_sent)])\n src_encodings_att_linear = self.decoder.attn_projection(src_encodings)\n\n h_tm1 = dec_init_vec\n att_tm1 = torch.zeros(1, self.decoder.hidden_size, device=self.device)\n\n hypotheses = [[\"<sos>\"]]\n hyp_scores = torch.zeros(len(hypotheses), dtype=torch.float, device=self.device)\n completed_hypotheses = []\n\n t = 0\n while len(completed_hypotheses) < beam_size and t < max_decoding_time_step:\n t += 1\n hyp_num = len(hypotheses)\n\n exp_src_encodings = src_encodings.expand(hyp_num,\n src_encodings.size(1),\n src_encodings.size(2))\n\n exp_src_encodings_att_linear = src_encodings_att_linear.expand(hyp_num,\n src_encodings_att_linear.size(1),\n src_encodings_att_linear.size(2))\n\n y_tm1 = torch.tensor([self.vocabs.tgt.w2i[hyp[-1]] for hyp in hypotheses], dtype=torch.long, device=self.device)\n y_t_embed = self.decoder.embedding(y_tm1)\n\n x = torch.cat([y_t_embed, att_tm1], dim=-1)\n\n (h_t, cell_t), att_t, _ = self.decoder.step(x, h_tm1,\n exp_src_encodings, exp_src_encodings_att_linear, encoder_masks=None)\n\n # log probabilities over target words\n log_p_t = F.log_softmax(self.decoder.vocab_projection(att_t), dim=-1)\n\n live_hyp_num = beam_size - len(completed_hypotheses)\n contiuating_hyp_scores = (hyp_scores.unsqueeze(1).expand_as(log_p_t) + log_p_t).view(-1)\n top_cand_hyp_scores, top_cand_hyp_pos = torch.topk(contiuating_hyp_scores, k=live_hyp_num)\n\n prev_hyp_ids = top_cand_hyp_pos / len(self.vocabs.tgt)\n hyp_word_ids = top_cand_hyp_pos % len(self.vocabs.tgt)\n\n new_hypotheses = []\n live_hyp_ids = []\n new_hyp_scores = []\n\n for prev_hyp_id, hyp_word_id, cand_new_hyp_score in zip(prev_hyp_ids, hyp_word_ids, top_cand_hyp_scores):\n prev_hyp_id = prev_hyp_id.item()\n hyp_word_id = hyp_word_id.item()\n cand_new_hyp_score = cand_new_hyp_score.item()\n\n hyp_word = self.vocabs.tgt.i2w[hyp_word_id]\n new_hyp_sent = hypotheses[prev_hyp_id] + [hyp_word]\n if hyp_word == \"<eos>\":\n completed_hypotheses.append(Hypothesis(value=new_hyp_sent[1:-1],\n score=cand_new_hyp_score))\n else:\n new_hypotheses.append(new_hyp_sent)\n live_hyp_ids.append(prev_hyp_id)\n new_hyp_scores.append(cand_new_hyp_score)\n\n if len(completed_hypotheses) == beam_size:\n break\n\n live_hyp_ids = torch.tensor(live_hyp_ids, dtype=torch.long, device=self.device)\n h_tm1 = (h_t[live_hyp_ids], cell_t[live_hyp_ids])\n att_tm1 = att_t[live_hyp_ids]\n\n hypotheses = new_hypotheses\n hyp_scores = torch.tensor(new_hyp_scores, dtype=torch.float, device=self.device)\n\n if len(completed_hypotheses) == 0:\n completed_hypotheses.append(Hypothesis(value=hypotheses[0][1:],\n score=hyp_scores[0].item()))\n\n completed_hypotheses.sort(key=lambda hyp: hyp.score, reverse=True)\n\n return completed_hypotheses", "def __init__(self, models, tgt_dict, beam_size=1, minlen=1, maxlen=None, stop_early=True, normalize_scores=True, len_penalty=0, unk_reward=0, lexicon_reward=0, retain_dropout=False, word_reward=0, model_weights=None, use_char_source=False, align_to=1):\n self.models = models\n self.pad = tgt_dict.pad()\n self.unk = tgt_dict.unk()\n self.eos = tgt_dict.eos()\n self.vocab_size = len(tgt_dict)\n self.beam_size = beam_size\n self.minlen = minlen\n max_decoder_len = min(m.max_decoder_positions() for m in self.models)\n self.maxlen = max_decoder_len if maxlen is None else min(maxlen, max_decoder_len)\n self.stop_early = stop_early\n self.normalize_scores = normalize_scores\n self.len_penalty = len_penalty\n self.unk_reward = unk_reward\n self.lexicon_reward = lexicon_reward\n self.lexicon_indices = tgt_dict.lexicon_indices_list()\n self.retain_dropout = retain_dropout\n self.word_reward = word_reward\n if model_weights is not None:\n assert len(models) == len(model_weights)\n self.model_weights = model_weights\n else:\n self.model_weights = [1.0 / len(models)] * len(models)\n self.use_char_source = use_char_source", "def compute_sw_threshold(flanking_reads, paf_dict, fasta_dict, window_size):\n\n max_scores = []\n for query, target in itertools.product(flanking_reads, flanking_reads):\n\n if str(query + target) in paf_dict:\n overlap_info = paf_dict[query+target]\n elif str(target + query) in paf_dict:\n # get info and swap them\n overlap_info = paf_dict[target+query]\n query, target = target, query\n else:\n continue\n\n query_start = overlap_info['query_start']\n query_end = overlap_info['query_end']\n target_start = overlap_info['target_start']\n target_end = overlap_info['target_end']\n\n query_seq = fasta_dict[query][query_start:query_end]\n target_seq = fasta_dict[target][target_start:target_end]\n\n # Get scores for this pair; store in cur_scores\n cur_scores = []\n if window_size:\n # Use rolling window\n min_len = min(len(query_seq), len(target_seq))\n for start, end in utils.pairwise(range(0, min_len, window_size)):\n qs = query_seq[start:end]\n ts = target_seq[start:end]\n score = smith_waterman.smith_waterman(qs, ts)\n cur_scores.append(score)\n\n if cur_scores:\n score = max(cur_scores)\n max_scores.append(score)\n else:\n # No rolling window\n score = smith_waterman.smith_waterman(query_seq, target_seq)\n max_scores.append(score)\n\n threshold = 0.9 * max(max_scores)\n\n print(\"using {} as threshold\".format(threshold))\n\n plt.subplot(2, 3, 2)\n plt.hist(max_scores)\n plt.title(\"FLANKING READS\\nhistogram of num_gaps / len(aligned_sequence)\\nthreshold = {}\\nwindow_size = {}\\nshowing {} scores\"\n .format(threshold, window_size, len(max_scores)))\n\n\n\n return threshold", "def predict_proba(self):\n ...", "def model_lookup(taskid, beam):\n # Assumes running on happili-05:\n model_dir = '/tank/apertif/driftscans/fits_files/'\n all_dates = get_dates()\n all_beam_stats = get_beam_stats(all_dates)\n if beam > all_beam_stats.shape[1] - 1:\n print(\"\\t{}: Pick a valid beam number 0-39.\".format(beam))\n exit()\n beam_stats = all_beam_stats[:, beam]\n\n # Divide into before & after beam attenuation on October 1st (big impact on beam quality)!\n taskid = str(taskid)[:6]\n if int(taskid) < 191001:\n # *** Until we have a full beam complement ***:\n index = np.where(all_dates == '190821')[0][0]\n # index = np.where(all_dates == '190916')[0][0]\n dates = all_dates[:index + 1]\n beams = beam_stats[:index + 1]\n else:\n # index = np.where(all_dates == '191002')[0][0]\n index = np.where(all_dates == '191023')[0][0]\n dates = all_dates[index:]\n beams = beam_stats[index:]\n\n print(\"[MODEL_LOOKUP] Searching for appropriate beam model for beam {}.\".format(beam))\n if np.all(beams == 0):\n print(\"\\tNo good beam model options for period when this was observed. Do more drift scans (or edit code).\")\n exit()\n elif len(beams[beams == 1]) == 1:\n # If only one good beam model exists, use it.\n best = dates[beams == 1][0]\n else:\n # Use nearest. Don't have enough beam statistics for floor, I think.\n dates = dates[beams == 1]\n best = nearest_date(dates, taskid)\n\n # *** Until we have a full beam complement ***:\n if beam >= 32:\n # best = '191002'\n best = '191023'\n\n model = model_dir + '{}/beam_models/chann_9/{}_{:02}_I_model.fits'.format(best, best, beam)\n\n return model", "def solvePostOverlap(targetNum, defenders, dRewards, dPenalties, dCosts, aTypes, aRewards, aPenalties, q):\n \"\"\"Contains a dummy target for defenders and attackers\"\"\"\n # Add the extra dummy target\n _dRewards = copy.deepcopy(dRewards)\n _dPenalties = copy.deepcopy(dPenalties)\n _dCosts = copy.deepcopy(dCosts)\n _aRewards = copy.deepcopy(aRewards)\n _aPenalties = copy.deepcopy(aPenalties)\n for m in defenders:\n for defenderCount in defenders:\n _dRewards[m].append(0)\n _dPenalties[m].append(0)\n _dCosts[m].append(0)\n for lam in aTypes:\n _aRewards[lam].append(0)\n _aPenalties[lam].append(0)\n targetNumWithDummies = len(_dRewards[0])\n targetRange = list(range(targetNumWithDummies))\n attackerActions = targetRange\n placements = getPlacements(defenders, targetNumWithDummies)\n omegaKeys = getOmegaKeys(aTypes, placements, attackerActions)\n\n # Build the model\n model = Model('PrimalWithOverlap')\n w = model.continuous_var_dict(keys=omegaKeys, lb=0, ub=1, name=\"w\")\n objectiveFunction = sum([q[lam] * sum([w[s,a,lam] * defenderSocialUtility(s,a,defenders,_dRewards,_dCosts,_dPenalties) for s in placements for a in attackerActions]) for lam in aTypes])\n c1 = [sum([w[s,a,lam] * aUtility(s,a,lam,_aPenalties,_aRewards) for s in placements]) \\\n >= sum([w[s,a,lam] * aUtility(s,b,lam,_aPenalties,_aRewards) for s in placements])\n for lam in aTypes for a in attackerActions for b in attackerActions if a != b]\n c1 = [constraint for constraint in c1 if not isinstance(constraint, bool)]\n c1 = model.add_constraints(c1)\n c2 = model.add_constraints([sum([q[lam] * sum([w[s,a,lam] * utilityM(d,s,a,m,_dRewards,_dPenalties,_dCosts) for a in attackerActions for s in placements if s[m] == d]) for lam in aTypes]) \\\n >= sum([q[lam] * sum([w[s,a,lam] * utilityM(e,s,a,m,_dRewards,_dPenalties,_dCosts) for a in attackerActions for s in placements if s[m] == d]) for lam in aTypes])\n for m in defenders for d in targetRange for e in targetRange if d!=e])\n c3 = model.add_constraints([sum([w[(s,a,lam)] for s in placements for a in attackerActions]) == 1 for lam in aTypes])\n # Solve the model\n model.maximize(objectiveFunction)\n model.solve()\n # Now that w contains all the outcomes and their probabilities, sum the attacker utilities up.\n utilityPerAttacker = 0\n for k,v in w.items():\n prob = float(v)\n s,a,lam = k\n utilityPerAttacker += aUtility(s,a,lam,_aPenalties,_aRewards) * prob\n utilityPerAttacker /= len(aTypes)\n utilityPerDefender = model.solution.get_objective_value()\n utilityPerDefender /= len(defenders)\n\n return utilityPerDefender, utilityPerAttacker, None", "def pslMap( options ):\n\n if options.format == \"gtf\":\n use_copy = False\n else:\n use_copy = True\n\n ninput, noutput, ndiscarded, nskipped, nskipped_small_queries = 0, 0, 0, 0, 0\n\n min_length = options.min_aligned\n\n for match, qx, tx in iterator_psl_intervals( options ):\n\n map_query2target = match.getMapQuery2Target()\n\n ninput += 1\n\n ## if no filter on qx or tx, use full segment\n if qx == None:\n qx = [ (match.mQueryFrom,match.mQueryTo,0) ]\n elif tx == None:\n tx = [ (match.mSbjctFrom,match.mSbjctTo,0) ]\n\n ## if no overlap: return\n if not qx or not tx: \n nskipped += 1\n continue\n\n for query in qx:\n\n qstart, qend, qval = query\n\n # skip elements that are too small\n if qend - qstart < min_length: \n E.debug( \"query too small - skipped at %s:%i-%i\" % (match.mQueryId, qstart, qend) )\n nskipped_small_queries += 1\n continue\n\n E.debug( \"working on query %s:%i-%i\" % (match.mQueryId, qstart, qend) )\n\n mqstart, mqend = ( map_query2target.mapRowToCol(qstart, \n alignlib_lite.py_RIGHT), \n map_query2target.mapRowToCol(qend, \n alignlib_lite.py_LEFT) )\n \n \n if match.strand == \"-\":\n qstart, qend = match.mQueryLength - qend, match.mQueryLength - qstart\n\n for target in tx:\n\n tstart, tend, tval = target\n if tstart >= mqend or tend <= mqstart: continue\n if tend - tstart < min_length: continue\n\n new = alignlib_lite.py_makeAlignmentBlocks()\n \n if use_copy:\n # do copy with range filter\n if options.loglevel >= 3:\n\n mtstart, mtend = map_query2target.mapColToRow(tstart), map_query2target.mapColToRow(tend) \n E.debug( \"query: %i-%i (len=%i)-> %i-%i(len=%i); target: %i-%i (len=%i)-> %i-%i (len=%i)\" % \\\n (qstart, qend,\n qend - qstart,\n mqstart, mqend,\n mqend - mqstart,\n tstart, tend,\n tend - tstart,\n mtstart, mtend,\n mtend - mtstart ) )\n \n alignlib_lite.py_copyAlignment( \n new, \n map_query2target,\n qstart, qend,\n tstart, tend )\n else:\n # do copy with alignment filter\n map_query = qval\n if map_query:\n tmp = alignlib_lite.py_makeAlignmentBlocks() \n alignlib_lite.py_copyAlignment( tmp, map_query2target, map_query, alignlib_lite.py_RR )\n if options.loglevel >= 5:\n options.stdlog.write( \"######## mapping query ###########\\n\" )\n options.stdlog.write( \"# %s\\n\" % str(alignlib_lite.py_AlignmentFormatEmissions( map_query2target ) ))\n options.stdlog.write( \"# %s\\n\" % str(alignlib_lite.py_AlignmentFormatEmissions( map_query ) ))\n options.stdlog.write( \"# %s\\n\" % str(alignlib_lite.py_AlignmentFormatEmissions( tmp ) ))\n else:\n tmp = map_query2target\n \n map_target = tval\n if map_target:\n new = alignlib_lite.py_makeAlignmentBlocks()\n alignlib_lite.py_copyAlignment( new, tmp, map_target, alignlib_lite.py_CR ) \n if options.loglevel >= 5:\n options.stdlog.write( \"######## mapping target ###########\\n\" )\n options.stdlog.write( \"# before: %s\\n\" % str(alignlib_lite.py_AlignmentFormatEmissions( tmp ) ))\n options.stdlog.write( \"# map : %s\\n\" % str(alignlib_lite.py_AlignmentFormatEmissions( map_target ) ))\n options.stdlog.write( \"# after : %s\\n\" % str(alignlib_lite.py_AlignmentFormatEmissions( new ) ))\n else:\n new = tmp\n\n if options.loglevel >= 4:\n E.debug(\"putative match with intervals: %s and %s: %i-%i\" % \\\n (str(query), str(target), qstart, qend ))\n if options.loglevel >= 5:\n E.debug( \"input : %s\" % str(alignlib_lite.py_AlignmentFormatEmissions( map_query2target ) ))\n E.debug( \"final : %s\" % str(alignlib_lite.py_AlignmentFormatEmissions( new ) ) )\n\n if new.getLength() > 0:\n n = match.copy()\n n.fromMap( new, use_strand = True )\n E.info( \"match : %s\" % (str(n)))\n\n if new.getNumAligned() > options.min_aligned:\n n = match.copy()\n n.fromMap( new, use_strand = True )\n options.stdout.write( str(n) + \"\\n\" )\n noutput += 1\n else:\n ndiscarded += 1\n\n E.info( \"map: ninput=%i, noutput=%i, nskipped=%i, ndiscarded=%i, nsmall_queries=%i\" % \\\n (ninput, noutput, nskipped, ndiscarded, nskipped_small_queries) )", "def beam_search_step(state, logits, eos_id, beam_width, is_first_step, length_penalty):\n _, vocab_size = logits.shape\n\n bsz, beam_width = state.log_probs.shape\n onehot_eos = paddle.cast(nn.functional.one_hot(paddle.ones([1], 'int64') * eos_id, vocab_size), 'int64') #[1, V]\n\n probs = paddle.log(nn.functional.softmax(logits)) #[B*W, V]\n probs = mask_prob(probs, onehot_eos, state.finished) #[B*W, V]\n allprobs = paddle.reshape(state.log_probs, [-1, 1]) + probs #[B*W, V]\n\n not_finished = 1 - paddle.reshape(state.finished, [-1, 1]) #[B*W,1]\n not_eos = 1 - onehot_eos\n length_to_add = not_finished * not_eos #[B*W,V]\n alllen = paddle.reshape(state.lengths, [-1, 1]) + length_to_add\n\n allprobs = paddle.reshape(allprobs, [-1, beam_width * vocab_size])\n alllen = paddle.reshape(alllen, [-1, beam_width * vocab_size])\n allscore = hyp_score(allprobs, alllen, length_penalty)\n if is_first_step:\n allscore = paddle.reshape(allscore, [bsz, beam_width, -1])[:, 0, :] # first step only consiter beam 0\n scores, idx = paddle.topk(allscore, k=beam_width) #[B, W]\n next_beam_id = idx // vocab_size #[B, W]\n next_word_id = idx % vocab_size\n\n gather_idx = paddle.concat([paddle.nonzero(idx != -1)[:, :1], paddle.reshape(idx, [-1, 1])], 1)\n next_probs = paddle.reshape(paddle.gather_nd(allprobs, gather_idx), idx.shape)\n next_len = paddle.reshape(paddle.gather_nd(alllen, gather_idx), idx.shape)\n\n gather_idx = paddle.concat([paddle.nonzero(next_beam_id != -1)[:, :1], paddle.reshape(next_beam_id, [-1, 1])], 1)\n next_finished = paddle.reshape(paddle.gather_nd(state.finished, gather_idx),\n state.finished.shape) #[gather new beam state according to new beam id]\n\n next_finished += paddle.cast(next_word_id == eos_id, 'int64')\n next_finished = paddle.cast(next_finished > 0, 'int64')\n\n next_state = BeamSearchState(log_probs=next_probs, lengths=next_len, finished=next_finished)\n output = BeamSearchOutput(scores=scores, predicted_ids=next_word_id, beam_parent_ids=next_beam_id)\n\n return output, next_state", "def calc_doc_ret_MAP():\n \n# docs_norm_scores_dicts_path = linux_base_path+\"/docs_norm_scores_dicts\"\n claim_rel_docno_dict = read_pickle(\"claim_rel_docno_dict\") #key is clm, value is a set of the relevant docno\n# nDCG_MAP_res = base_path +\"\\\\nDCG_MAP_res\\\\\"\n# docs_norms_path = base_path+\"\\\\docs_norm_scores_dicts\\\\\"\n docs_norms_path = linux_base_path+\"/docs_norm_scores_dicts/\"\n nDCG_MAP_res = linux_base_path +\"/nDCG_MAP_res/\"\n \n AP_cut_off = 1000\n k_val = 100\n p = 10\n log = open(\"calc_doc_avg_nDCG_MAP_log_k_top_docs_\"+str(k_val)+\"_at_\"+str(p),\"wb\")\n res_file = open(nDCG_MAP_res+\"doc_ret_nDCG_MAP_res_k_top_docs_\"+str(k_val)+\"_at_\"+str(p),\"wb\")\n# each_params_AVGnDCG_MAP_dict = {} #key is alpha,beta,k_docs,lambda and val is the avg nDCG and MAP across all claims together\n each_params_MAP_dict = {}\n# NDCG_AP_all_claims_all_param_values = {}\n AP_all_claims_all_param_values = {}\n best_avg_nDCG = 0\n best_MAP = 0 #across all claims in a given configuration, find the max measures\n \n# docs_norms_path = base_path+\"\\\\docs_norm_scores_dicts\\\\\"\n claims_dict = read_pickle(\"claim_dict\")\n #count the number of sentences that were retrived that are in the true data....sum for each claim, then average.\n \n \n for alpha in range(0,11,1): #change just for test!\n for beta in range(0,10,1):\n (alpha_f,beta_f) = turn_to_float([alpha,beta])\n NDCG_all_claims= {} #key is a claim, value is the nDCG\n AP_all_claims= {} \n AP_cut_off = 1000\n curr_filename = docs_norms_path+\"clm_key_ranked_list_of_docs_alpha_\"+str(alpha_f)+\"_beta_\"+str(beta_f)\n clm_key_ranked_list_of_docs_baseline = read_pickle(curr_filename) #key is qID and docid\n #need to turn it to a list of docs\n for clm in claim_list:\n try:\n# nDCG_score = utils_linux.calc_doc_emp_NDCG(curr_source,str(clm),clm_key_ranked_list_of_docs_baseline[clm],claim_rel_docno_dict[claims_dict[str(clm)]],p)\n# NDCG_all_claims[clm] = nDCG_score\n AP_score = utils_linux.calc_doc_AP_relevance(AP_cut_off,curr_source,clm,clm_key_ranked_list_of_docs_baseline[clm],claim_rel_docno_dict[claims_dict[str(clm)]])\n AP_all_claims[clm] = AP_score\n AP_all_claims_all_param_values[clm,alpha_f,beta_f,k_val] = AP_score\n except Exception as err: \n log.write('problem in calculations: in source: '+ curr_source+' in clm '+ claims_dict[str(clm)]+\" alpha:\"+str(alpha_f)+ \"beta:\"+str(beta_f)+\" \\n\" ) \n for arg in err.args:\n log.write(arg+\" \") \n log.write(\"\\n\") \n# average_NDCG = float(float(sum(NDCG_all_claims.values()))/float(len(NDCG_all_claims))) #across all claims...\n# if average_NDCG > best_avg_nDCG:\n# best_avg_nDCG = average_NDCG\n# best_avg_nDCG_configuration = (alpha_f,beta_f,k_val)\n MAP = float(float(sum(AP_all_claims.values()))/float(len(AP_all_claims)))\n if MAP > best_MAP:\n best_MAP = MAP\n best_MAP_configuration = (alpha_f,beta_f,k_val)\n each_params_MAP_dict[alpha_f,beta_f,k_val] = MAP\n utils_linux.save_pickle(nDCG_MAP_res+\"doc_ret_NDCG_all_claims_alpha_\"+str(alpha_f)+\"_beta_\"+str(beta_f)+\"_top_k_docs_\"+str(k_val)+\"_at_\"+str(p),NDCG_all_claims)\n utils_linux.save_pickle(nDCG_MAP_res+\"doc_ret_AP_all_claims_alpha_\"+str(alpha_f)+\"_beta_\"+str(beta_f)+\"_top_k_docs_\"+str(k_val),AP_all_claims)\n# res_file.write(\"alpha_\"+str(alpha_f)+\"_beta_\"+str(beta_f)+\"_top_k_docs_\"+str(k_val)+\"|\"+\"AnDCG_\"+str(average_NDCG)+\"_MAP_\"+str(MAP)+\"\\n\")\n res_file.write(\"alpha_\"+str(alpha_f)+\"_beta_\"+str(beta_f)+\"_top_k_docs_\"+str(k_val)+\"|_MAP_\"+str(MAP)+\"\\n\")\n save_pickle(nDCG_MAP_res+\"doc_ret_NDCG_AP_all_claims_all_param_values_k_top_docs_\"+str(k_val)+\"_at_\"+str(p),AP_all_claims_all_param_values)\n save_pickle(nDCG_MAP_res+\"doc_ret_each_params_AVGnDCG_MAP_dict_k_top_docs_\"+str(k_val)+\"_at_\"+str(p),each_params_MAP_dict)\n# best_row = \"best_avg_nDCG|\"+str(best_avg_nDCG)+\"|best_avg_nDCG_configuration|\"+str(best_avg_nDCG_configuration[0])+\",\"+str(best_avg_nDCG_configuration[1])+\",\"+str(best_avg_nDCG_configuration[3])+\",\"+str(best_avg_nDCG_configuration[2])+\"|\"\n best_row = \"best_MAP|\" +str(best_MAP)+\"|best_MAP_configuration|\"+str(best_MAP_configuration[0])+\",\"+str(best_MAP_configuration[1])+\",\"+str(best_MAP_configuration[2])\n res_file.write(best_row)\n res_file.close()\n log.close()", "def solvePostNoOverlap(targetNum, defenders, dRewards, dPenalties, dCosts, aTypes, aRewards, aPenalties, q):\n \"\"\"Contains as many dummy targets as defenders, for defenders and attackers\"\"\"\n # Add the extra dummy targets\n _dRewards = copy.deepcopy(dRewards)\n _dPenalties = copy.deepcopy(dPenalties)\n _dCosts = copy.deepcopy(dCosts)\n _aRewards = copy.deepcopy(aRewards)\n _aPenalties = copy.deepcopy(aPenalties)\n for m in defenders:\n for defenderCount in defenders:\n _dRewards[m].append(0)\n _dPenalties[m].append(0)\n _dCosts[m].append(0)\n for lam in aTypes:\n _aRewards[lam].append(0)\n _aPenalties[lam].append(0)\n targetNumWithDummies = len(_dRewards[0])\n targetRange = list(range(targetNumWithDummies))\n attackerActions = targetRange\n # Get the suggestions that occur with no overlap\n overlapPlacements = getPlacements(defenders, targetNumWithDummies)\n placements = list(filter(lambda x: len(set(x)) == len(x), overlapPlacements))\n omegaKeys = getOmegaKeys(aTypes, placements, attackerActions)\n\n # Build the model\n model = Model('PrimalWithOverlap')\n w = model.continuous_var_dict(keys=omegaKeys, lb=0, ub=1, name=\"w\")\n objectiveFunction = sum([q[lam] * sum([w[s,a,lam] * defenderSocialUtility(s,a,defenders,_dRewards,_dCosts,_dPenalties) for s in placements for a in attackerActions]) for lam in aTypes])\n c1 = [sum([w[s,a,lam] * aUtility(s,a,lam,_aPenalties,_aRewards) for s in placements]) \\\n >= sum([w[s,a,lam] * aUtility(s,b,lam,_aPenalties,_aRewards) for s in placements])\n for lam in aTypes for a in attackerActions for b in attackerActions if a != b]\n c1 = [constraint for constraint in c1 if not isinstance(constraint, bool)]\n c1 = model.add_constraints(c1)\n c2 = model.add_constraints([sum([q[lam] * sum([w[s,a,lam] * utilityM(d,s,a,m,_dRewards,_dPenalties,_dCosts) for a in attackerActions for s in placements if s[m] == d]) for lam in aTypes]) \\\n >= sum([q[lam] * sum([w[s,a,lam] * utilityM(e,s,a,m,_dRewards,_dPenalties,_dCosts) for a in attackerActions for s in placements if s[m] == d]) for lam in aTypes])\n for m in defenders for d in targetRange for e in targetRange if d!=e])\n c3 = model.add_constraints([sum([w[(s,a,lam)] for s in placements for a in attackerActions]) == 1 for lam in aTypes])\n # Solve the model\n model.maximize(objectiveFunction)\n model.solve()\n # Now that w contains all the outcomes and their probabilities, sum the attacker utilities up.\n utilityPerAttacker = 0\n for k,v in w.items():\n prob = float(v)\n s,a,lam = k\n utilityPerAttacker += aUtility(s,a,lam,_aPenalties,_aRewards) * prob\n utilityPerAttacker /= len(aTypes)\n utilityPerDefender = model.solution.get_objective_value()\n utilityPerDefender /= len(defenders)\n return utilityPerDefender, utilityPerAttacker, None", "def predict_hydrophobic_length( targets ):\n\n\t# Steps of this script\n\t# 1. read all of the targets in as a long list of oriented PDB files\n\t# 2. add the membrane framework\n\t# 3. Backwards compute for t range 0-30 what \n\t# 4. \n\t# 5. ", "def forward_mbr(self, eouts, elens, ys_ref, recog_params, idx2token):\n bs, xmax, xdim = eouts.size()\n nbest = recog_params.get('recog_beam_width')\n assert nbest >= 2\n assert idx2token is not None\n scaling_factor = 1.0\n training = self.training\n self.eval()\n with torch.no_grad():\n nbest_hyps_id, _, scores = self.beam_search(eouts, elens, params=recog_params, nbest=nbest, exclude_eos=True)\n exp_wer = 0\n nbest_hyps_id_batch = []\n grad_list = []\n for b in range(bs):\n nbest_hyps_id_b = [np.fromiter(y, dtype=np.int64) for y in nbest_hyps_id[b]]\n nbest_hyps_id_batch += nbest_hyps_id_b\n scores_b = np2tensor(np.array(scores[b], dtype=np.float32), eouts.device)\n probs_b_norm = torch.softmax(scaling_factor * scores_b, dim=-1)\n wers_b = np2tensor(np.array([(compute_wer(ref=idx2token(ys_ref[b]).split(' '), hyp=idx2token(nbest_hyps_id_b[n]).split(' '))[0] / 100) for n in range(nbest)], dtype=np.float32), eouts.device)\n exp_wer_b = (probs_b_norm * wers_b).sum()\n grad_list += [(probs_b_norm * (wers_b - exp_wer_b)).sum()]\n exp_wer += exp_wer_b\n exp_wer /= bs\n if training:\n self.train()\n eouts_expand = eouts.unsqueeze(1).expand(-1, nbest, -1, -1).contiguous().view(bs * nbest, xmax, xdim)\n elens_expand = elens.unsqueeze(1).expand(-1, nbest).contiguous().view(bs * nbest)\n ys_in, ys_out, ylens = append_sos_eos(nbest_hyps_id_batch, self.eos, self.eos, self.pad, eouts.device)\n dstates = self.zero_state(bs * nbest)\n cv = eouts.new_zeros(bs * nbest, 1, self.enc_n_units)\n self.score.reset()\n aw = None\n lmout, lmstate = None, None\n ys_emb = self.embed_token_id(ys_in)\n src_mask = make_pad_mask(elens_expand).unsqueeze(1)\n logits = []\n for i in range(ys_in.size(1)):\n if self.lm is not None:\n lmout, lmstate, _ = self.lm.predict(ys_in[:, i:i + 1], lmstate)\n dstates, cv, aw, attn_state, attn_v = self.decode_step(eouts_expand, dstates, cv, ys_emb[:, i:i + 1], src_mask, aw, lmout, mode='parallel')\n logits.append(attn_v)\n if self.attn_type in ['gmm', 'sagmm']:\n aw = attn_state['myu']\n logits = self.output(torch.cat(logits, dim=1))\n log_probs = torch.log_softmax(logits, dim=-1)\n eos = ys_in.new_zeros((1,)).fill_(self.eos)\n nbest_hyps_id_batch_pad = pad_list([torch.cat([np2tensor(y, eouts.device), eos], dim=0) for y in nbest_hyps_id_batch], self.pad)\n grad = eouts.new_zeros(bs * nbest, nbest_hyps_id_batch_pad.size(1), self.vocab)\n for b in range(bs):\n onehot = torch.eye(self.vocab)[nbest_hyps_id_batch_pad[b * nbest:(b + 1) * nbest]]\n grad[b * nbest:(b + 1) * nbest] = grad_list[b] * onehot\n grad = grad.masked_fill_((nbest_hyps_id_batch_pad == self.pad).unsqueeze(2), 0)\n loss_mbr = self.mbr(log_probs, nbest_hyps_id_batch_pad, exp_wer, grad)\n loss_ce = torch.zeros((1,), dtype=torch.float32, device=eouts.device)\n if self.mbr_ce_weight > 0:\n loss_ce = self.forward_att(eouts, elens, ys_ref)[0]\n loss_ce = loss_ce.unsqueeze(0)\n return loss_mbr, loss_ce", "def constrain(self, gamut_map, processed, to_process):\n gamut_map[self._referent] = range_intersect(\n gamut_map.get(self._referent, None),\n self.gamut\n )\n if self._referent not in processed:\n to_process.add(self._referent)", "def mend(aligns_dict, predictions, bound_info):\n wav_names, bound_indices, times = zip(*bound_info)\n print('bound_info length: %d' % len(bound_info))\n print('predictions length: %d' % len(predictions))\n df = pd.DataFrame({'wav_names': wav_names, 'bound_indices': bound_indices,\n 'times': times, 'predictions': predictions})\n bound_dict = load_boundaries(aligns_dict)\n\n bound_count = 0\n bound_moved = 0\n move_dist_sum = 0\n\n for (name, idx), group in df[['predictions', 'times']].groupby([wav_names, bound_indices]):\n preds = list(group.iloc[:, 0])\n assert len(preds) == 3\n '''judge three predictions, decide new boundary time and frame distance'''\n old_time, last_phone, next_phone, old_frame_dist = bound_dict[name][idx]\n '''make new boundaries'''\n new_time, new_frame_dist, moved, move_dist = AlignMender.__update_boundary(preds, old_frame_dist, old_time)\n bound_dict[name][idx] = (new_time, last_phone, next_phone, new_frame_dist)\n '''statistic move info'''\n if moved:\n bound_moved += 1\n move_dist_sum += move_dist\n bound_count += 1\n move_dist_mean = move_dist_sum/bound_moved if bound_moved != 0 else 0\n\n '''refresh boundaries of align_dict'''\n new_align_dict = AlignMender.__apply_boundaries(aligns_dict, bound_dict)\n return new_align_dict, bound_dict, bound_count, bound_moved, move_dist_mean", "def recognize_beam(self, h, recog_args, rnnlm=None, target_left_mask=-1):\n beam = recog_args.beam_size\n k_range = min(beam, self.odim)\n nbest = recog_args.nbest\n normscore = recog_args.score_norm_transducer\n\n if rnnlm:\n kept_hyps = [\n {\"score\": 0.0, \"yseq\": [self.blank], \"cache\": None, \"lm_state\": None}\n ]\n else:\n kept_hyps = [{\"score\": 0.0, \"yseq\": [self.blank], \"cache\": None}]\n\n for i, hi in enumerate(h):\n hyps = kept_hyps\n kept_hyps = []\n\n while True:\n new_hyp = max(hyps, key=lambda x: x[\"score\"])\n hyps.remove(new_hyp)\n\n ys = to_device(self, torch.tensor(new_hyp[\"yseq\"]).unsqueeze(0))\n if target_left_mask > -1:\n ys_mask = to_device(\n self, subsequent_mask_limit(len(new_hyp[\"yseq\"]), target_left_mask).unsqueeze(0)\n )\n else:\n ys_mask = to_device(\n self, subsequent_mask(len(new_hyp[\"yseq\"])).unsqueeze(0)\n )\n y, c = self.forward_one_step(ys, ys_mask, new_hyp[\"cache\"])\n\n ytu = torch.log_softmax(self.joint(hi, y[0]), dim=0)\n\n if rnnlm:\n rnnlm_state, rnnlm_scores = rnnlm.predict(\n new_hyp[\"lm_state\"], ys[:, -1]\n )\n\n for k in six.moves.range(self.odim):\n beam_hyp = {\n \"score\": new_hyp[\"score\"] + float(ytu[k]),\n \"yseq\": new_hyp[\"yseq\"][:],\n \"cache\": new_hyp[\"cache\"],\n }\n\n if rnnlm:\n beam_hyp[\"lm_state\"] = new_hyp[\"lm_state\"]\n\n if k == self.blank:\n kept_hyps.append(beam_hyp)\n else:\n beam_hyp[\"yseq\"].append(int(k))\n beam_hyp[\"cache\"] = c\n\n if rnnlm:\n beam_hyp[\"lm_state\"] = rnnlm_state\n beam_hyp[\"score\"] += (\n recog_args.lm_weight * rnnlm_scores[0][k]\n )\n\n hyps.append(beam_hyp)\n\n if len(kept_hyps) >= k_range:\n break\n\n if normscore:\n nbest_hyps = sorted(\n kept_hyps, key=lambda x: x[\"score\"] / len(x[\"yseq\"]), reverse=True\n )[:nbest]\n else:\n nbest_hyps = sorted(kept_hyps, key=lambda x: x[\"score\"], reverse=True)[\n :nbest\n ]\n\n return nbest_hyps", "def experiment_BeamSearch(input_dir, output_fname, train_size, thinking_budget,\n beam_width, children_count, loss_type,\n seed, homogeneous=False, print_vector=False,\n num_cpus=1):\n # load the data\n cover_X, cover_Y, val_X, val_Y, test_X, test_Y, kernel_matrix = \\\n load_dataset(input_dir)\n\n loss_function = loss_01 if loss_type == '01' else loss_logistic\n\n # parameters\n max_K, alpha = 1, 0.05\n learner_params = {'random_state': seed, 'fit_intercept': not homogeneous}\n\n search_alg = BeamSearch(load_lr_learner, fit_lr_learner, learner_params,\n select_instances, cover_X, cover_Y,\n val_X, val_Y, kernel_matrix, max_K, loss_function,\n train_size, beam_width, children_count,\n thinking_budget=thinking_budget, alpha=alpha,\n output_fname=output_fname,\n seed=seed, num_cpus=num_cpus)\n results = search_alg.search_optimum_teaching_seq()\n\n print('\\n\\nInput Arguments')\n print('Input Dir: {}'.format(input_dir))\n print('Output File: {}'.format(output_fname))\n print('Size of train set: {}'.format(train_size))\n print('Thinking budget: {}'.format(thinking_budget))\n print('Beam width: {}'.format(beam_width))\n print('Children count: {}'.format(children_count))\n print('Loss Function: {}'.format(loss_type))\n print('Seed: {}'.format(seed))\n print('Homogeneous: {}'.format(homogeneous))\n print('Print Vector: {}'.format(print_vector))\n\n print('\\n\\nOutput')\n print('Length of optimal sequence: {}'.format(len(results['opt_indices'])))\n\n # get the error on after training\n learner = load_lr_learner(learner_params)\n opt_indices = results['opt_indices']\n train_X, train_Y = select_instances(cover_X, cover_Y, opt_indices)\n learner.fit(train_X, train_Y)\n cover_error, val_error, test_error = \\\n 1.0 - learner.score(cover_X, cover_Y), \\\n 1.0 - learner.score(val_X, val_Y), \\\n 1.0 - learner.score(test_X, test_Y)\n print('01 loss: Cover error: {}, Validation error: {}, Test error: {}'.format(cover_error, val_error, test_error))\n cover_error, val_error, test_error = \\\n loss_logistic(learner, cover_X, cover_Y), \\\n loss_logistic(learner, val_X, val_Y), \\\n loss_logistic(learner, test_X, test_Y)\n print('logistic loss: Cover error: {}, Validation error: {}, Test error: {}'.format(cover_error, val_error,\n test_error))\n\n # count positive and negative instances\n pos_count = 0\n for y in train_Y:\n if y == 1:\n pos_count += 1\n print('Pos count: {}, Neg count: {}'.format(pos_count,\n len(train_Y) - pos_count))\n\n # print vectors\n if print_vector:\n if not homogeneous:\n weights = np.zeros(len(learner.coef_[0]) + 1)\n weights[0] = learner.intercept_[0]\n weights[1:] = learner.coef_[0]\n print('Weights: {}'.format(weights))\n else:\n print('Weights: {}'.format(learner.coef_[0]))" ]
[ "0.60872704", "0.580832", "0.57483256", "0.5723475", "0.5720976", "0.5689994", "0.56269765", "0.56127894", "0.5611332", "0.5581564", "0.5544426", "0.55419797", "0.55245256", "0.54159904", "0.5393713", "0.5388387", "0.5363377", "0.5335509", "0.53137904", "0.52877265", "0.52820575", "0.52797556", "0.52691245", "0.5245048", "0.52377456", "0.5234968", "0.52331036", "0.5212582", "0.5206955", "0.51981556" ]
0.68497777
0
Start a Clipper instance.
def start(self): with hide("output", "warnings", "running"): self._execute_standard("rm -f docker-compose.yml") self._execute_append( "docker-compose.yml", yaml.dump( DOCKER_COMPOSE_DICT, default_flow_style=False)) self._execute_root("docker-compose up -d query_frontend") print("Clipper is running")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def start(self):\n self.p.start()", "def start(self):\n control_process = mp.Process(target = self._start, args = [])\n control_process.start()", "def run(self):\n self.process.start()", "def start():\n global running\n # os.system('python3 /Users/bowenwaugh/Documents/GA/GA_Puzzles/simple.py')\n global process\n process = Popen(['python3', '/Users/bowenwaugh/Documents/GA/GA_Puzzles/simple.py'])\n running = True", "def start(self):\n self.j_pump.start()\n return self", "def run():\n from ._gui import WaveTool\n WaveTool().run()", "def start_pupil():\n # runs Pupil Capture from source\n path = os.path.abspath(\"../../pupil/pupil_src/capture/main.py\")\n return subprocess.call('python ' + shellformat(path), shell=True)\n \n # if running Pupil Capture using the app, comment the above code and uncomment below:\n '''\n path = os.path.abspath(\"../pupil_capture_0.4.1_mac.app\")\n return subprocess.call('open ' + shellformat(path), shell=True)\n '''", "def Start(self):\n\n\n\n assert not self._process, 'Start() can only be called once'\n self._process = subprocess.Popen(self._args)", "def launch(**kwargs):\n logger.info('launch dream command')\n launch_gui()", "def start_client(self):\n if self.client is not None:\n return\n\n # Arguments for the client\n browser = self.vim.vars.get('markdown_composer_browser')\n open_browser = (\n self.vim.vars.get('markdown_composer_open_browser', 1) == 1)\n syntax_theme = self.vim.vars.get('markdown_composer_syntax_theme')\n current_buffer = '\\n'.join(self.vim.current.buffer)\n\n plugin_root = Path(__file__).parents[3]\n args = ['cargo', 'run', '--release', '--']\n if browser:\n args.append('--browser=%s' % browser)\n\n if not open_browser:\n args.append('--no-browser')\n\n if syntax_theme:\n args.append('--highlight-theme=%s' % syntax_theme)\n\n args.append('--working-directory=%s' % os.getcwd())\n\n if os.path.isfile(self.vim.current.buffer.name):\n args.append(self.vim.current.buffer.name)\n\n self.client = subprocess.Popen(args,\n bufsize=0,\n cwd=str(plugin_root),\n stdout=subprocess.PIPE,\n stdin=subprocess.PIPE)", "def make_clips(self):\n print('starting to make clips!')\n #TODO parallelize this with multiprocessing\n clip_number = 1\n \n for requested_clip in self.requested_clips:\n streamer_output_file_location = output_file_location + requested_clip['source_clip'] + '/'\n streamer_clips_output_file_location = streamer_output_file_location + 'clips/'\n\n print('opening file ' + streamer_output_file_location+requested_clip['source_clip']+'.mkv')\n entire_stream_clip = VideoFileClip(streamer_output_file_location+requested_clip['source_clip']+'.mkv')\n print('requested time: ' + str(requested_clip['start_time'].total_seconds()))\n print('requested end time: ' + str(requested_clip['end_time'].total_seconds()))\n print('clip duration:'+ str(entire_stream_clip.duration))\n clip = None\n if(requested_clip['end_time'].total_seconds()>entire_stream_clip.duration):\n #longer time than clip specified, use end of clip as end time\n clip = entire_stream_clip.subclip(requested_clip['start_time'].total_seconds(),entire_stream_clip.duration)\n else:\n clip = entire_stream_clip.subclip(requested_clip['start_time'].total_seconds(),requested_clip['end_time'].total_seconds())\n \n if not os.path.exists(streamer_clips_output_file_location):\n print('No directory found for given streamer, making new dir...')\n os.makedirs(streamer_clips_output_file_location)\n print(\"now rendering clip \" + self.requested_clips[0]['source_clip']+str(clip_number)+'.mp4 out of ' + str(len(self.requested_clips)))\n clip.write_videofile(streamer_clips_output_file_location + str(clip_number)+'.mp4')\n clip_number = clip_number + 1", "def __run(self):\n # init snake show\n self.__init_snake()\n self.__introduction.hide()\n # start ticktock for snake moving\n self.__ticker.start()\n # enable key press\n self.__enable_key = True", "def start_box(self, autorun=True, interactive=False):\n if len(self.sources) == 0:\n print(\"No Sources Attached. Exiting...\")\n return\n\n self.start_hooks.fire(StartEvent())\n if interactive:\n self.start_interactive()\n elif autorun:\n self.start_auto()", "def spawn(cls, *args, **kwargs):\n g = cls(*args, **kwargs)\n g.start()\n return g", "def start (self):\n pass", "def start (self):\n pass", "def start():", "def start():", "def start():", "def start():", "def activate(self):\n self.start()", "def run(self):\n self.started()", "def start(self, _=False):\n if not self._stop:\n self._current_execution += 1\n flags = self.flags\n if '--write' not in flags:\n flags.extend(['--write', self.writepath])\n if '--output-format' not in flags:\n flags.extend(['--output-format', 'csv'])\n line = [\"airodump-ng\"] + flags + self.arguments + [self.interface]\n self._proc = Popen(line, bufsize=0,\n env={'PATH': os.environ['PATH']},\n stderr=DEVNULL, stdin=DEVNULL, stdout=DEVNULL)\n os.system('stty sane')\n\n time.sleep(5)\n watcher = threading.Thread(target=self.watch_process)\n watcher.start()", "def start(self) -> None:\n ...", "def start(self) -> None:\n ...", "def start(self):\n ...", "def start(self):\n self.active = True", "def start(self):\n pass", "def start(self):\n pass", "def start(self):\n pass" ]
[ "0.6104004", "0.5790243", "0.56691045", "0.5641376", "0.5600039", "0.5569598", "0.5557139", "0.5544228", "0.5495919", "0.5495538", "0.5490122", "0.5458819", "0.54459316", "0.5409652", "0.5348426", "0.5348426", "0.53407854", "0.53407854", "0.53407854", "0.53407854", "0.531293", "0.53100634", "0.5309564", "0.5307759", "0.5307759", "0.5288295", "0.52801186", "0.5275258", "0.5275258", "0.5275258" ]
0.6762592
0
List the names of all applications registered with Clipper. Returns str The string describing each registered application. If no applications are found, an empty string is returned.
def list_apps(self): with hide("output", "running"): result = local(("redis-cli -h {host} -p 6379 -n {db} keys \"*\"" .format(host=self.host, db=REDIS_APPLICATION_DB_NUM)), capture=True) if len(result.stdout) > 0: return result.stdout else: print("Clipper has no applications registered") return ""
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_apps(self) -> list:\n apps = self.app.list_apps()\n app_list = [app[\"title\"] for app in apps]\n return app_list", "def ls():\n cfgmgr = ConfigManager()\n apps = cfgmgr['apps']\n for i in apps:\n print(fc(\"- {g}{appname}{rst}\", appname=i))", "def get_app_list(self):\n return self.get_setting('applications', 'installed_apps')", "def listapps(self):\n return jsoncall.do_call(\"listapps\", {'modelname':self.modelname,\\\n 'user':self.user,\\\n 'password':self.password},\n self.connection)", "def get_apps(self) -> List[str]:\n return list(self.config[\"apps\"].keys())", "async def app_list(self) -> List[interface.App]:\n return await self.relay(\"app_list\")()", "def app_names(self):\n return self.get_app_names()", "def applications(self):\n return [self.app] + self.mounts.values()", "def RApps(self):\n\t\treturn self.acad.ActiveDocument.RegisteredApplications", "def RAppNames(self):\n\t\tnames=[]\n\t\tfor item in range(self.rApps.Count):\n\t\t\tnames.append(self.rApps.Item(item).Name)\n\t\treturn names", "def get_app_list(self):\n\n return self._get().keys()", "def list(ctx):\n # pylint: disable=redefined-builtin\n _list_apps(ctx.obj['config'], ctx.obj['client'])", "def get_all_apps(self):\n return list(self.apps.values())", "def applications():\n storeapps = APP.config[\"storage\"]\n base_url = request.host_url + \"application/\"\n\n response = {\"applications\": []}\n for application in nativeapps.io.ls(storeapps, r\".*\\.(apk|ipa)$\"):\n tokens = application.decode(\"utf-8\").split(os.path.sep)\n directory = tokens[-2]\n name, version = os.path.basename(directory).split(\"-\", 1)\n meta_path = os.path.join(os.path.dirname(application), \"metadata.json\")\n\n link = base_url + \"/\".join(tokens[-3:])\n if application.endswith(\".ipa\"):\n link = \"itms-services://?action=download-manifest&url=\" + \\\n base_url + \"/\".join(tokens[-3:-1]) + \"/\" + \"manifest.plist\"\n\n response[\"applications\"].append({\n \"url\": base_url + \"/\".join(tokens[-3:]),\n \"name\": name,\n \"version\": version,\n \"metadata\": nativeapps.io.readfile(meta_path),\n \"link\": link,\n \"type\": application.split(\".\")[-1],\n })\n return flask.jsonify(response)", "def applications(self):\r\n return applications.Applications(self)", "def all_registered_appnames():\n yield from sorted(Registry.monomers.keys())", "def apps(self):\n return list(self.ctx.keys())", "def app_list(self, third_only=False):\n return self.adb.app_list(third_only)", "def list_apps(self, ns_name):\n\n return self.helm_client.list(namespace=ns_name)", "def ListApps(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def get_applications(rest, sessionsArg, option):\n applications = []\n if option == 'heartbeat':\n appsString = rest.get_environment_applications(sessionsArg).strip();\n else:\n appsString = rest.get_all_applications().strip();\n rawList = appsString.split('\\n<\\n')\n for raw in rawList:\n if printtrace: print '_' * 20\n if applicationdataok(raw):\n attributes = [a.split(': ')[1] for a in raw.split('\\n')]\n if printtrace: print attributes\n\n a = Application()\n a.sessionId = attributes[0]\n a.nameInEnvironmentView = attributes[1]\n a.fileName = attributes[2]\n a.processString = attributes[3]\n a.discoveryChecks = attributes[4:]\n a.isgeneric = a.nameInEnvironmentView == 'generic application' or a.fileName.find('generic-application') > 0\n if not a.isgeneric:\n applications.append(a)\n return applications", "def dock_app_list(data):\n apps = []\n count = data['extra_dock'] + 1\n for i in range(count):\n name = data['app_name_%s' % str(i)]\n path = data['app_path_%s' % str(i)]\n if name not in [None, '']:\n apps.append({'name': name, 'path': path})\n return apps", "def get_applications():\n applications = []\n with codecs.open('input.txt', 'r', encoding='UTF-8') as file:\n for string in file.readlines():\n applications.append(string.strip())\n\n return applications", "def get_apps(self):\n return self.apps", "def selected_applications(self) -> Optional[pulumi.Input['NamespacedNamesArgs']]:\n return pulumi.get(self, \"selected_applications\")", "def selected_applications(self) -> Optional[pulumi.Input['NamespacedNamesArgs']]:\n return pulumi.get(self, \"selected_applications\")", "def installed_appnames():\n appnames = set()\n for finder in sys.meta_path:\n if hasattr(finder, 'appname'):\n appnames.add(finder.appname)\n return appnames", "def applebooks(self) -> str:\n\n return self._get_via_app_bundle(path=\"/System/Applications/Books.app\")", "def list_programs():\n return list(INFO)", "def _generateApplicationName(self, obj, **args):\n result = []\n try:\n result.append(obj.getApplication().name)\n except:\n pass\n return result" ]
[ "0.7535104", "0.7332687", "0.72032297", "0.7056077", "0.7032159", "0.7025359", "0.6980595", "0.6800879", "0.67648196", "0.6708339", "0.6688796", "0.6655295", "0.66490656", "0.66431016", "0.6637069", "0.6617001", "0.6543629", "0.6511506", "0.6457436", "0.64264077", "0.64050114", "0.6368894", "0.6359245", "0.63313985", "0.6316343", "0.6316343", "0.6293679", "0.6292658", "0.6286793", "0.62613416" ]
0.8137001
0
Gets detailed information about a registered application.
def get_app_info(self, name): with hide("output", "running"): result = local("redis-cli -h {host} -p 6379 -n {db} hgetall {name}".format( host=self.host, name=name, db=REDIS_APPLICATION_DB_NUM), capture=True) if len(result.stdout) > 0: splits = result.stdout.split("\n") fmt_result = dict([(splits[i], splits[i+1]) for i in range(0, len(splits), 2)]) pp = pprint.PrettyPrinter(indent=2) pp.pprint(fmt_result) return fmt_result else: warn("Application \"%s\" not found" % name) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getAppInfo(self):\n data = self._client.Application.find(self.app_id)\n return data", "def android_app_info(self) -> 'outputs.AndroidAppInfoResponse':\n return pulumi.get(self, \"android_app_info\")", "def get_app_details(self, app_id):\n app_data = AppData.objects.get(uid=app_id)\n return model_to_dict(app_data)", "def ios_app_info(self) -> 'outputs.IosAppInfoResponse':\n return pulumi.get(self, \"ios_app_info\")", "def get(self):\n app_info = {\n 'developedBy': 'This app was developed by the Melbourne eResearch Group (www.eresearch.unimelb.edu.au) within the School of Computing and Information Systems (https://cis.unimelb.edu.au) at The University of Melbourne (www.unimelb.edu.au). ',\n 'description': 'The app uses artificial intelligence (convolutional neural networks) to identify the age, gender, and emotion of the people.',\n 'contact': 'https://eresearch.unimelb.edu.au',\n 'developedByHTML': '<p>This app was developed by the Melbourne eResearch Group (<a href=\\\"www.eresearch.unimelb.edu.au\\\" target=\\\"_blank\\\">www.eresearch.unimelb.edu.au</a>) within the School of Computing and Information Systems (<a href=\\\"https://cis.unimelb.edu.au\\\" target=\\\"_blank\\\">https://cis.unimelb.edu.au</a>) at The University of Melbourne (<a href=\\\"www.unimelb.edu.au\\\" target=\\\"_blank\\\">www.unimelb.edu.au</a>).</p>',\n 'descriptionHTML': '<p>The app uses artificial intelligence (convolutional neural networks) to identify the age, gender, and emotion of the people.</p>',\n 'contactHTML': '<p>Please contact us at: <a href=\\\"eresearch.unimelb.edu.au\\\" target=\\\"_blank\\\">eresearch.unimelb.edu.au</a></p>'\n }\n\n return send_json_response(app_info, 200)", "def applicationsdetails():\n appdicts = db.hgetall('applications')\n finaldict = OrderedDict()\n for appname in sorted(appdicts):\n instances = json.loads(appdicts.get(appname))\n instance_map = OrderedDict()\n for key in sorted(instances):\n instance_map.__setitem__(key,instances.get(key))\n finaldict.__setitem__(appname,instance_map)\n return render_template('robots.html', appdicts=finaldict)", "def process_app_info(self):\n pass", "def _fetch_app_info(app_id):\n try:\n assert len(app_id), \"Empty string\"\n lookup_url = \"https://itunes.apple.com/lookup?id=\"\n target_url = lookup_url + app_id\n if sys.version_info < (3, 5):\n response = urllib2.urlopen(target_url)\n else:\n response = urllib.request.urlopen(target_url)\n data = response.read() # a `bytes` object\n text = data.decode('utf-8')\n app_info = json.loads(text)\n return app_info\n except AssertionError as e:\n print(e)\n sys.exit(\"Exit script with error code %s\" % e)\n except urllib2.URLError as e:\n print(e)\n sys.exit(\"Exit script with error code %s\" % e)\n except urllib.error.URLError as e:\n print(e)\n sys.exit(\"Exit script with error code %s\" % e)\n except urllib2.HTTPError as e:\n print(e)\n sys.exit(\"Exit script with error code %s\" % e)\n\n except:\n e = sys.exc_info()[0]\n print(\"Error: %s\" % e)\n sys.exit(\"Exit script with error code %s\" % e)", "def get_app_info(app_list, info_list):\n\n app_names = [app.__name__ for app in app_list]\n for app in info_list:\n if app in app_names:\n class_obj = next(i for i in app_list if i.__name__ == app)\n print(app)\n print(' {}'.format(class_obj.__doc__))\n print(' setup args: {}'.format(ARGS.get(app)))\n print(' setup kwargs: {}'.format(KWARGS.get(app)))\n print('')\n\n else:\n print('App {} does not exist'.format(app.__name__))", "async def get_app(self, app_id: str) -> dict:\r\n return await self.get(API_APP.format(app_id=app_id))", "def _get_app_info(self):\n info_plist = None\n\n for data in self.filelist:\n if re.match(self.info_plist_regex, data.filename):\n info_plist = data\n\n if not info_plist:\n self._raise_ipa_error()\n\n info_plist = self.read(info_plist)\n self.app_info = readPlistFromString(info_plist)\n\n return self.app_info", "def _get_app_info_Primary(self):\n return self._Primary_app_info", "def show(ctx, appeui):\n if '.' in appeui:\n appeui = str(hexStringInt(str(appeui)))\n \n # Form the url and payload\n server = ctx.obj['server']\n payload = {'token': ctx.obj['token']}\n url = 'http://{}/api/v{}'.format(server, str(version))\n url += '/apps' if appeui == 'all' else '/app/{}'.format(appeui)\n \n # Make the request\n data = restRequest(server, url, 'get', payload, 200)\n if data is None:\n return\n \n # Single application\n if appeui != 'all':\n a = data\n indent = ' ' * 10\n if a['appinterface_id'] == 0:\n a['appinterface_id'] = '-'\n if a['domain'] is None:\n a['domain'] = '-'\n click.echo('Application EUI: ' + euiString(a['appeui']))\n click.echo('{}name: {}'.format(indent, a['name']))\n click.echo('{}domain: {}'.format(indent, a['domain']))\n click.echo('{}fport: {}'.format(indent, a['fport']))\n click.echo('{}interface: {}'.format(indent, a['appinterface_id']))\n if a['appinterface_id'] != '-':\n click.echo('{}Properties:'.format(indent))\n properties = sorted(a['properties'].values(), key=lambda k: k['port'])\n for p in properties:\n click.echo('{} {} {}:{}'.format(indent, p['port'], p['name'], p['type']))\n return\n \n # All applications\n click.echo('{:14}'.format('Application') + \\\n '{:24}'.format('AppEUI') + \\\n '{:15}'.format('Domain') + \\\n '{:6}'.format('Fport') + \\\n '{:10}'.format('Interface'))\n for i,a in data.iteritems():\n if a['appinterface_id'] == 0:\n a['appinterface_id'] = '-'\n if a['domain'] is None:\n a['domain'] = '-'\n click.echo('{:13.13}'.format(a['name']) + ' ' + \\\n '{:23}'.format(euiString(a['appeui'])) + ' ' + \\\n '{:14.14}'.format(a['domain']) + ' ' + \\\n '{:5.5}'.format(str(a['fport'])) + ' ' + \\\n '{:10}'.format(str(a['appinterface_id'])))", "def _app_info(self):\n redirect_url = parse.urlparse(self._redirect_url())\n if re.search(\"okta\", redirect_url.hostname):\n app_info = re.match(\n r\"^\\/app\\/(\\w+)\\/(\\w+)\\/sso/saml$\",\n redirect_url.path\n )\n return app_info.groups(0)", "def info ():\n\n info = {\n 'name' : app.config['APPLICATION_NAME'],\n 'short_name' : app.config['APPLICATION_SHORT_NAME'],\n 'main_page_url' : app.config['APPLICATION_MAIN_URL'],\n # 'css_url' : app.config.get ('APPLICATION_CSS_URL', ''),\n 'css' : 'span.smalltext { font-size: smaller }',\n 'supported_langs_query' : [ LANG ],\n }\n return make_json_response (info)", "def print_app_data(self):\n print(\"===================================\")\n print(\"== RESULTS: ==\")\n print(\"===================================\")\n\n # Analog application results\n print(\"--------------------------\")\n print(\"-- Analog applications --\")\n print(\"--------------------------\")\n print(\"Number of analog application processed: {}\".format(len(self.analog_apps)))\n if (self.verbose):\n for app in self.analog_apps:\n print(\" Application data:\")\n print(\" - - - - - - - - - - - - -\")\n print(' - EPICS PREFIX: MPLN:{}:{}:{}'.format(app[\"link_node_area\"].upper(), app[\"link_node_location\"].upper(), app[\"card_index\"]))\n print(\" - App ID : {}\".format(app[\"app_id\"]))\n print(\" - Cpu name : {}\".format(app[\"cpu_name\"]))\n print(\" - Crate ID : {}\".format(app[\"crate_id\"]))\n print(\" - Slot number : {}\".format(app[\"slot_number\"]))\n print(\" - Link node name : {}\".format(app[\"link_node_name\"]))\n print(\" - Link node area : {}\".format(app[\"link_node_area\"]))\n print(\" - Link node location : {}\".format(app[\"link_node_location\"]))\n print(\" - Card index : {}\".format(app[\"card_index\"]))\n print(\" - Number of devices : {}\".format(len(app[\"devices\"])))\n for device in app[\"devices\"]:\n print(\" Device data:\")\n print(\" .....................\")\n print(\" - EPICS PREFIX: {}:{}:{}\".format(device[\"type_name\"], device[\"area\"], device[\"position\"]))\n print(\" - Type name : {}\".format(device[\"type_name\"]))\n print(\" - Bay number : {}\".format(device[\"bay_number\"]))\n print(\" - Channel number : {}\".format(device[\"channel_number\"]))\n print(\" - Area : {}\".format(device[\"area\"]))\n print(\" - Position : {}\".format(device[\"position\"]))\n print(\" - Number of faults : {}\".format(len(device[\"faults\"])))\n for fault_id,fault_data in device[\"faults\"].items():\n print(\" Fault data:\")\n print(\" . . . . . . . . . . . . \")\n print(\" - EPICS PREFIX: {}_T{}\".format(fault_data[\"name\"], fault_data[\"bit_positions\"][0]))\n print(\" - ID : {}\".format(fault_id))\n print(\" - Name : {}\".format(fault_data[\"name\"]))\n print(\" - Description : {}\".format(fault_data[\"description\"]))\n print(\" - Bit positions : {}\".format(fault_data[\"bit_positions\"]))\n print(\" . . . . . . . . . . . . \")\n print(\" .....................\")\n print(\" - - - - - - - - - - - - -\")\n print(\"\")\n print(\"--------------------------\")\n\n # Digital application result\n print(\"----------------------------\")\n print(\"-- Digital applications --\")\n print(\"----------------------------\")\n print(\"Number of digital application processed: {}\".format(len(self.digital_apps)))\n if (self.verbose):\n for app in self.digital_apps:\n print(\" Application data:\")\n print(\" - - - - - - - - - - - - -\")\n print(' - EPICS PREFIX: MPLN:{}:{}:{}'.format(app[\"link_node_area\"].upper(), app[\"link_node_location\"].upper(), app[\"card_index\"]))\n print(\" - App ID : {}\".format(app[\"app_id\"]))\n print(\" - Cpu name : {}\".format(app[\"cpu_name\"]))\n print(\" - Crate ID : {}\".format(app[\"crate_id\"]))\n print(\" - Slot number : {}\".format(app[\"slot_number\"]))\n print(\" - Link node name : {}\".format(app[\"link_node_name\"]))\n print(\" - Link node area : {}\".format(app[\"link_node_area\"]))\n print(\" - Link node location : {}\".format(app[\"link_node_location\"]))\n print(\" - Card index : {}\".format(app[\"card_index\"]))\n print(\" - Number of devices : {}\".format(len(app[\"devices\"])))\n for device in app[\"devices\"]:\n print(\" Device data:\")\n print(\" .....................\")\n print(\" - EPICS PREFIX: {}:{}:{}\".format(device[\"type_name\"], device[\"area\"], device[\"position\"]))\n print(\" - Type name : {}\".format(device[\"type_name\"]))\n print(\" - Area : {}\".format(device[\"area\"]))\n print(\" - Position : {}\".format(device[\"position\"]))\n print(\" - Number of inputs : {}\".format(len(device[\"inputs\"])))\n for input in device[\"inputs\"]:\n print(\" Input data:\")\n print(\" . . . . . . . . . . . . \")\n print(\" - EPICS PREFIX: {}\".format(input[\"name\"]))\n print(\" - Name : {}\".format(input[\"name\"]))\n print(\" - Bit position : {}\".format(input[\"bit_position\"]))\n print(\" - Zero name : {}\".format(input[\"zero_name\"]))\n print(\" - One name : {}\".format(input[\"one_name\"]))\n print(\" - Alarm state : {}\".format(input[\"alarm_state\"]))\n print(\" - Debounce : {}\".format(input[\"debounce\"]))\n print(\" . . . . . . . . . . . . \")\n print(\" .....................\")\n print(\" - - - - - - - - - - - - -\")\n print(\"\")\n print(\"----------------------------\")\n\n\n print(\"===================================\")\n\n print('Found {} link nodes:'.format(len(self.link_nodes)))\n for k,v in self.link_nodes.items():\n print('{}: {}'.format(k, v['type']))", "def _get_app_info_Secondary(self):\n return self._Secondary_app_info", "def ios_app_info(self) -> Optional[pulumi.Input['IosAppInfoArgs']]:\n return pulumi.get(self, \"ios_app_info\")", "def app_details(request, object_id):\n app = Application.objects.get(pk=object_id)\n app_class = BOOTSTRAP_LABEL.get(app.app_status.all()[0].name, '') # all()[0] for bogus M2M\n rels = Application.objects.filter(acronym=app.acronym).values('id', 'release', 'app_status__name').order_by('release').distinct() # worthless 'distinct'\n releases = []\n # Is there a away to do this to 'rels' in place, or with a comprehension?\n for rel in rels:\n rel.update({'app_class': BOOTSTRAP_LABEL.get(rel.pop('app_status__name'))})\n releases.append(rel)\n return render_to_response('application/application_details.html',\n {'app': app,\n 'app_class': app_class,\n 'releases': releases,\n 'bootstrap_label': BOOTSTRAP_LABEL,\n 'search_suggestions': _search_suggestions(),\n },\n context_instance=RequestContext(request));", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def apps_information(self):\n with open(self.app_data_path, 'r') as app_csv_file:\n csv_reader = csv.reader(app_csv_file)\n apps = [self.AppInformation(app[0], app[1], app[2], app[3], app[4], app[5]) for app in csv_reader]\n return apps", "def AppGetApp(self, request, context):\n context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)", "def get(self):\n apps = Application.objects()\n\n # TODO return more information\n apps_clean = []\n for app in apps:\n # don't include invalid apps\n if app[\"validated\"] is True:\n apps_clean.append(\n {\"name\": app[\"name\"]}\n )\n\n return apps_clean, 200", "def get_app(self, app_id):\n return req(self.logger, self.access_token, 'GET', '/apps/'+app_id, {})", "def android_app_info(self) -> Optional[pulumi.Input['AndroidAppInfoArgs']]:\n return pulumi.get(self, \"android_app_info\")", "def info(self):\n resp = requests.get(\"%s/api/info\"%self.urlbase, verify=False)\n return resp.json", "def get_application_info( tree ):\n application_name = None\n # most machines store the machine name string in the tag 'ApplicationName'\n for application_name in tree.getroot().iter( 'ApplicationName' ):\n application_name = application_name.text\n break\n # NovaSeq stores the machine name string in the tag 'Application'\n if( application_name == None ):\n for application_name in tree.getroot().iter( 'Application' ):\n application_name = application_name.text\n break\n if( application_name == None ):\n raise ValueError( 'Unable to find Application* element in BCL RunParameters.xml' )\n\n application_version = None\n for application_version in tree.getroot().iter( 'ApplicationVersion' ):\n application_version = application_version.text\n break\n if( application_version == None ):\n raise ValueError( 'ApplicationVersion element missing in BCL RunParameters.xml' )\n\n re_models = '|'.join( application_name_dict.keys() )\n re_pattern = '(%s)' % re_models\n mobj = re.match( re_pattern, application_name )\n if( mobj == None ):\n raise ValueError( 'unrecognized ApplicationName in RunParameters.xml file' )\n instrument_model = application_name_dict[mobj.group( 1 )]\n\n # Distinguish between HiSeq models 3000 and 4000 using Andrew's(?) method.\n # Note: the p5 index orientations differ between these two models.\n if( instrument_model == 'HiSeq' ):\n application_major_version = int(application_version.split('.')[0])\n if application_major_version > 2:\n instrument_model = 'HiSeq4000'\n else:\n instrument_model = 'HiSeq3000'\n\n return( instrument_model, application_version )", "def info(self):\n return self._fetch_json('/api/info')", "def getApp(self):\n return self.serviceClass.app", "def AppGetApp(self, request, timeout, metadata=None, with_call=False, protocol_options=None):\n raise NotImplementedError()" ]
[ "0.7732737", "0.6940945", "0.6844895", "0.67889345", "0.6697233", "0.664308", "0.66361076", "0.6622556", "0.66064054", "0.66047084", "0.6589803", "0.65076977", "0.64865416", "0.6436909", "0.6332687", "0.62825936", "0.6240869", "0.6210926", "0.6195001", "0.61817044", "0.6177552", "0.61709976", "0.61695606", "0.61254287", "0.6124704", "0.61066836", "0.60885453", "0.60671216", "0.60521764", "0.60316926" ]
0.7246406
1
Create a new container for an existing model. Starts a new container for a model that has already been added to Clipper. Note that models are uniquely identified by both name and version, so this method will fail if you have not already called `Clipper.add_model()` for the specified name and version.
def add_container(self, model_name, model_version): with hide("warnings", "output", "running"): # Look up model info in Redis model_key = "{mn}:{mv}".format(mn=model_name, mv=model_version) result = local("redis-cli -h {host} -p 6379 -n {db} hgetall {key}".format( host=self.host, key=model_key, db=REDIS_MODEL_DB_NUM), capture=True) if "nil" in result.stdout: # Model not found warn( "Trying to add container but model {mn}:{mv} not in " "Redis".format( mn=model_name, mv=model_version)) return False splits = result.stdout.split("\n") model_metadata = dict([(splits[i].strip(), splits[i + 1].strip()) for i in range(0, len(splits), 2)]) image_name = model_metadata["container_name"] model_data_path = model_metadata["model_data_path"] model_input_type = model_metadata["input_type"] # Start container add_container_cmd = ( "docker run -d --network={nw} -v {path}:/model:ro " "-e \"CLIPPER_MODEL_NAME={mn}\" -e \"CLIPPER_MODEL_VERSION={mv}\" " "-e \"CLIPPER_IP=query_frontend\" -e \"CLIPPER_INPUT_TYPE={mip}\" " "{image}".format( path=model_data_path, nw=DOCKER_NW, image=image_name, mn=model_name, mv=model_version, mip=model_input_type)) result = self._execute_root(add_container_cmd) return result.return_code == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_model(ModelName=None, PrimaryContainer=None, Containers=None, ExecutionRoleArn=None, Tags=None, VpcConfig=None, EnableNetworkIsolation=None):\n pass", "def createNewModel(self, modelName):\n try:\n storage = FileSystemStorage(join(settings.MEDIA_ROOT, 'models'))\n\n folderSufix = 1\n new_model_name = modelName\n while storage.exists(join(storage.base_location, new_model_name)):\n folderSufix += 1\n new_model_name = f'{modelName}_{folderSufix}'\n\n folder_path = join(storage.base_location, new_model_name)\n model_file = join(folder_path, f'{new_model_name}.ppl')\n\n if not storage.exists(folder_path):\n os.mkdir(folder_path)\n\n calcEngine = CalcEngine.factory(self.client_session)\n if calcEngine.createNewModel(model_file, new_model_name):\n self.closeModel()\n return self.openModel(join(storage.base_location, new_model_name, f'{new_model_name}.ppl'))\n except Exception as ex:\n raise ex", "def container(name, ostemplate, **kwargs):\n if not openvz.exists(name):\n ctid = openvz.get_available_ctid()\n openvz.create(ctid, ostemplate=ostemplate, **kwargs)\n openvz.set(ctid, name=name)\n return Container(name)", "def add_instance(self,name):\n new = self.create_instance(name)\n self.model.append(new)\n return new", "def build(model_name):\n return pretrain.factory.create(model_name)", "def create(self):\n\n if rs.IsLayer(self.name):\n\n return self\n\n mom = \"\"\n \n for s in self.path:\n \n son = s if (mom == \"\") else (mom + \"::\" + s)\n\n mommy = None if mom == \"\" else mom\n\n if not rs.IsLayer(son):\n\n rs.AddLayer(s, color = None, visible = True, locked = False, parent = mommy)\n\n mom = son\n \n return self", "def deploy_model(self, name, version, model_data, container_name, labels, input_type, num_containers=1):\n with hide(\"warnings\", \"output\", \"running\"):\n if isinstance(model_data, base.BaseEstimator):\n fname = name.replace(\"/\", \"_\")\n pkl_path = '/tmp/%s/%s.pkl' % (fname, fname)\n model_data_path = \"/tmp/%s\" % fname\n try:\n os.mkdir(model_data_path)\n except OSError:\n pass\n joblib.dump(model_data, pkl_path)\n elif isinstance(model_data, str):\n # assume that model_data is a path to the serialized model\n model_data_path = model_data\n else:\n warn(\"%s is invalid model format\" % str(type(model)))\n return False\n\n if (not self._put_container_on_host(container_name)):\n return False\n\n # Put model parameter data on host\n vol = \"{model_repo}/{name}/{version}\".format(\n model_repo=MODEL_REPO, name=name, version=version)\n with hide(\"warnings\", \"output\", \"running\"):\n self._execute_standard(\"mkdir -p {vol}\".format(vol=vol))\n\n with cd(vol):\n with hide(\"warnings\", \"output\", \"running\"):\n if model_data_path.startswith(\"s3://\"):\n with hide(\"warnings\", \"output\", \"running\"):\n aws_cli_installed = self._execute_standard(\n \"dpkg-query -Wf'${db:Status-abbrev}' awscli 2>/dev/null | grep -q '^i'\",\n warn_only=True).return_code == 0\n if not aws_cli_installed:\n self._execute_root(\"apt-get update -qq\")\n self._execute_root(\"apt-get install -yqq awscli\")\n if self._execute_root(\n \"stat ~/.aws/config\",\n warn_only=True).return_code != 0:\n self._execute_standard(\"mkdir -p ~/.aws\")\n self._execute_append(\"~/.aws/config\", aws_cli_config.format(\n access_key=os.environ[\"AWS_ACCESS_KEY_ID\"],\n secret_key=os.environ[\"AWS_SECRET_ACCESS_KEY\"]))\n\n self._execute_standard(\"aws s3 cp {model_data_path} {dl_path} --recursive\".format(\n model_data_path=model_data_path, dl_path=os.path.join(\n vol, os.path.basename(model_data_path))))\n else:\n with hide(\"output\", \"running\"):\n self._execute_put(model_data_path, vol)\n\n print(\"Copied model data to host\")\n if not self._publish_new_model(name, version, labels, input_type,\n container_name,\n os.path.join(vol, os.path.basename(model_data_path))):\n return False\n else:\n print(\"Published model to Clipper\")\n # aggregate results of starting all containers\n return all([self.add_container(name, version)\n for r in range(num_containers)])", "def add(self, model):\n assert isinstance(model, self.model_class) # it's a homogeneous collection\n m_id = str(model.get_id())\n assert m_id != None # needs a real id or cid\n # If the models have already been loaded, verify the model being added is\n # not already in the set. This allows for create_child to be used before a potential\n # lazy load has happened, which might load the newly created child from the DB again.\n if self._loaded:\n assert m_id not in self._models # collision\n model._set_parent(self)\n self._models[m_id] = model\n return model", "def mount(xpu, model):\n # Unwrap the core model if necessary\n model = xpu.raw(model)\n model = xpu.move(model)\n if xpu._device_ids and len(xpu._device_ids) > 1:\n model = ContainerDataParallel(\n model, device_ids=xpu._device_ids,\n output_device=xpu._main_device_id)\n else:\n model = DataSerial(model)\n return model", "def _create_container(self, container_name):\n try:\n container = self.swift.head_container(container_name)\n except client.ClientException:\n self.swift.put_container(container_name)\n else:\n return container", "def addModel(self, model, inputs, name=None, outputFilter=Identity, inputChecker=NullChecker):\n if name is None:\n import dill\n name = dill.source.getname(model)\n if name is None:\n for i in range(len(self._names)+1):\n name = 'model'+str(i)\n if name not in self._names: break\n elif name in self._names:\n print \"Model [%s] already in database.\" % name\n raise AssertionError\n self._names.append(name)\n self._forwardFactories.append(model)\n self._inputs.append(inputs)\n self._outputFilters.append(outputFilter)\n self._inputCheckers.append(inputChecker)", "def create(self, req, body):\n context = req.environ['meteos.context']\n\n if not self.is_valid_body(body, 'model'):\n raise exc.HTTPUnprocessableEntity()\n\n model = body['model']\n\n LOG.debug(\"Create model with request: %s\", model)\n\n try:\n experiment = self.engine_api.get_experiment(\n context, model['experiment_id'])\n utils.is_valid_status(experiment.__class__.__name__,\n experiment.status,\n constants.STATUS_AVAILABLE)\n template = self.engine_api.get_template(\n context, experiment.template_id)\n except exception.NotFound:\n raise exc.HTTPNotFound()\n except exception.InvalidStatus:\n raise\n\n display_name = model.get('display_name')\n display_description = model.get('display_description')\n experiment_id = model.get('experiment_id')\n source_dataset_url = model.get('source_dataset_url')\n dataset_format = model.get('dataset_format', 'csv')\n model_type = model.get('model_type')\n model_params = model.get('model_params')\n swift_tenant = model.get('swift_tenant')\n swift_username = model.get('swift_username')\n swift_password = model.get('swift_password')\n\n new_model = self.engine_api.create_model(context,\n display_name,\n display_description,\n source_dataset_url,\n dataset_format,\n model_type,\n model_params,\n template.id,\n template.job_template_id,\n experiment_id,\n experiment.cluster_id,\n swift_tenant,\n swift_username,\n swift_password)\n\n return self._view_builder.detail(req, new_model)", "def create_new_component(self, cname):\n while True:\n try:\n self.model.get_component_by_name(cname)\n cname += u'_'\n except KeyError:\n # Component with this name doesn't exist\n break\n # Create the component\n comp = cellml_component.create_new(self.model, cname)\n self.model._add_component(comp)\n return comp", "def request_new_instance( self ):\n\n request_id = self.az_commands.invoke( \"add\",\n background=True,\n bg_callback=self.process_new_instance,\n name=\"{0}-{1}\".format( self.instance_name, self.next_host_id ),\n registrypassword=self.docker_auth,\n image=self.instance_type,\n query=\"'{id:id, name:name, ip:ipAddress.ip, type:containers[0].image, status:provisioningState}'\")[0]\n\n # create a new host object\n hobj = hostObject.HostObject(self.next_host_id, hostObject.HostObject.TYPE_CONTAINER)\n\n self.active_request[ request_id ] = hobj\n self.instances.append( hobj )\n\n self.next_host_id += 1", "def create_container(cls, s_instance, *args):\n if hasattr(s_instance, 'content'):\n if hasattr(s_instance.content, 'viewManager'):\n return s_instance.content.viewManager.CreateContainerView(*args)\n\n return ValueError", "def CreateService(self, name=\"default_model_container\", type=None, arguments=None):\n\n service = self._service_registry.Create(name, type, arguments)\n\n self._loaded_services.append(service)\n\n return service", "def create(self, name, image, fetch_image=False, network=None, volumes={},\n **kwargs):\n create_kwargs = {\n 'detach': True,\n }\n\n # Convert network & volume models to IDs\n network = self._network_for_container(network, kwargs)\n if network is not None:\n network_id, network = (\n self._network_helper._get_id_and_model(network))\n create_kwargs['network'] = network_id\n\n if volumes:\n create_kwargs['volumes'] = self._volumes_for_container(volumes)\n\n create_kwargs.update(kwargs)\n\n if fetch_image:\n self._image_helper.fetch(image)\n\n container = super().create(name, image, **create_kwargs)\n\n if network is not None:\n self._connect_container_network(container, network, aliases=[name])\n\n return container", "async def CreateContainer(self, *args):\n # Create and configure a LXD container for use with a proxy charm.\n (model, application, _, _) = args\n\n debug(\"[CreateContainer] {}\".format(args))\n container = None\n\n try:\n # Execute 'get-ssh-public-key' primitive and get returned value\n uuid = await self.n2vc.ExecutePrimitive(\n model,\n application,\n \"get-ssh-public-key\",\n None,\n )\n\n result = await self.n2vc.GetPrimitiveOutput(model, uuid)\n pubkey = result['pubkey']\n\n container = create_lxd_container(\n public_key=pubkey,\n name=os.path.basename(__file__)\n )\n\n return container\n except Exception as ex:\n debug(\"Error creating container: {}\".format(ex))\n pass\n\n return None", "def add_object(self, model_name, position={\"x\": 0, \"y\": 0, \"z\": 0}, rotation={\"x\": 0, \"y\": 0, \"z\": 0}, env_id=0):\n\n object_id = Controller.get_unique_id()\n self.communicate({\"$type\": \"add_object\",\n \"env_id\": env_id,\n \"model_name\": model_name,\n \"position\": position,\n \"rotation\": rotation,\n \"id\": object_id})\n return object_id", "def _spawn_model(self, model_xml: str):\n self.spawn_publisher.publish(f'<sdf version=\"1.7\">{model_xml}</sdf>')", "def __init__(self, *args):\n this = _libsbml.new_ModelCreator(*args)\n try: self.this.append(this)\n except: self.this = this", "def create_model(self):\n pass", "def create_model(self):\n pass", "def create_model(self, **inputs):\n raise NotImplementedError('This method has to be overwritten.')", "def pyfunc_build_image(model_uri, extra_args=None):\n name = uuid.uuid4().hex\n cmd = [\"mlflow\", \"models\", \"build-docker\", \"-m\", model_uri, \"-n\", name]\n if extra_args:\n cmd += extra_args\n p = subprocess.Popen(cmd,)\n assert p.wait() == 0, \"Failed to build docker image to serve model from %s\" % model_uri\n return name", "def _constructInstance(self, container, id, *args, **kw):\n file, title = None, ''\n id = container.manage_addProduct['OFSP'].manage_addImage(id, file, title)\n return container.get(id, None)", "def create_model(configuration):\n model = find_model_using_name(configuration['model_name'])\n instance = model(configuration)\n print(\"model [{0}] was created\".format(type(instance).__name__))\n return instance", "def make_model():\n m = model_class(*argv[2:-1])\n modelobj[\"model\"] = m", "def create_new_volume(self, volumeInfo, change_name=True):\n size = volumeInfo.get(SVC_KEY_VDISK_CAPACITY)\n if (change_name):\n new_volume_name = self._get_new_volume_name(\n volumeInfo.get(SVC_KEY_VDISK_NAME))\n else:\n new_volume_name = volumeInfo.get(SVC_KEY_VDISK_NAME)\n if SVC_KEY_VOLUME_GROUP in volumeInfo:\n volumeGroup = volumeInfo.get(SVC_KEY_VOLUME_GROUP)\n elif self.dft_stg_pool:\n volumeGroup = self.dft_stg_pool\n else:\n volumeGroup = self.get_mdisk_grp_by_size(size)\n\n if volumeGroup is None:\n raise SVCNoSANStoragePoolException\n\n # iogrp parameter should not use name since it could be\n # customized. It is always safe to use iogrp 0.\n cmd = \"svctask mkvdisk -name %s -iogrp 0 -mdiskgrp %s \" \\\n \"-size %s -unit b\" % (new_volume_name, volumeGroup, size)\n\n output, err_output = self._svc_command(cmd)\n\n volume_uid = self.get_uid(new_volume_name)\n\n # Check if it got created\n if not volume_uid:\n # The SVC message of out of space is not really user friendly.\n # So, we will manully check whether the pool ran out of space\n free_capacity = self.get_mdisk_grp_size(volumeGroup)\n\n if float(size) > float(free_capacity):\n ex_args = {'pool_name': volumeGroup,\n 'size': size,\n 'free_capacity': free_capacity}\n raise SVCVolumeGroupOutOfSpace(**ex_args)\n if err_output:\n ex_args = {'new_volume_name': new_volume_name,\n 'err_output': err_output}\n raise SVCVolumeCreationFailed(**ex_args)\n else:\n # failed to create volume but with no error msg\n # really shouldn't hit this condition\n ex_args = {'cmd': cmd,\n 'e': _(\"No error available\")}\n raise SVCCommandException(**ex_args)\n\n return new_volume_name, volume_uid", "def add_model(self, label):\n s = label.replace(' ', '_')\n if s not in self.models:\n self.models.append(s)" ]
[ "0.62423486", "0.59782505", "0.580102", "0.57813376", "0.5492073", "0.5476516", "0.54686433", "0.54077226", "0.5275172", "0.52743244", "0.52491695", "0.5195662", "0.5185085", "0.51352257", "0.5109263", "0.5091158", "0.5048849", "0.5037702", "0.5011328", "0.5009769", "0.49816322", "0.49746954", "0.49746954", "0.49647695", "0.4953032", "0.49414945", "0.49334624", "0.48995763", "0.4898497", "0.4883818" ]
0.737132
0
Fetches metrics from the running Clipper instance. Returns str The JSON string containing the current set of metrics for this instance. On error, the string will be an error message (not JSON formatted).
def inspect_instance(self): url = "http://%s:1337/metrics" % self.host r = requests.get(url) try: s = r.json() except TypeError: s = r.text return s
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metrics(self):\r\n url = '{0}/{1}'.format(self.get_url(), 'metrics')\r\n\r\n return http.Request('GET', url), parsers.parse_json", "def metrics(self) -> pulumi.Output['outputs.RuntimeMetricsResponse']:\n return pulumi.get(self, \"metrics\")", "def get_prom_metrics(self):\n base_url = self.get_config().get(\"prometheus_endpoint\", PROM_BASE_URL).rstrip(\"/\")\n\n url = \"%s%slabel/__name__/values\" % (base_url, PROM_API_PATH)\n\n self.debug(\"Getting url: \", url)\n r = requests.get(url)\n\n assert r.status_code == 200, \"Prometheus server returned http code: \" + str(r.status_code)\n\n try:\n data = r.json()\n except:\n raise Exception(\"Failed to parse Prometheus JSON response\")\n\n self.debug(\"Got reponse data: \", data)\n\n assert (\"status\" in data and data[\"status\"] == \"success\"), \"Prometheus server did not return status success\"\n assert \"data\" in data, \"Prometheus server did not return data in output\"\n assert len(data[\"data\"]) > 0, \"Prometheus server returned no metrics\"\n\n known_metrics = data[\"data\"]\n assert isinstance(known_metrics, list)", "async def get(self):\n config = self.settings['nbresuse_display_config']\n cur_process = psutil.Process()\n all_processes = [cur_process] + cur_process.children(recursive=True)\n limits = {}\n\n mem_usage = dict(line.split() for line in list(open('/sys/fs/cgroup/memory/memory.stat').readlines()))\n\n # Get memory information\n rss = int(mem_usage['rss'])\n mem_limit = int(memory_stats['hierarchical_memory_limit'])\n\n # A better approach would use cpu_affinity to account for the\n # fact that the number of logical CPUs in the system is not\n # necessarily the same as the number of CPUs the process\n # can actually use. But cpu_affinity isn't available for OS X.\n cpu_count = psutil.cpu_count()\n\n if config.track_cpu_percent:\n self.cpu_percent = await self.update_cpu_percent(all_processes)\n\n limits['memory'] = {\n 'rss': mem_limit\n }\n if config.mem_warning_threshold != 0:\n limits['memory']['warn'] = (mem_limit - rss) < (mem_limit * config.mem_warning_threshold)\n\n # Optionally get CPU information\n if config.track_cpu_percent:\n self.cpu_percent = await self.update_cpu_percent(all_processes)\n\n if config.cpu_limit != 0:\n limits['cpu'] = {\n 'cpu': config.cpu_limit\n }\n if config.cpu_warning_threshold != 0:\n limits['cpu']['warn'] = (config.cpu_limit - self.cpu_percent) < (config.cpu_limit * config.cpu_warning_threshold)\n\n metrics = {\n 'rss': rss,\n 'limits': limits,\n }\n if config.track_cpu_percent:\n metrics.update(cpu_percent=self.cpu_percent,\n cpu_count=self.cpu_count)\n\n self.log.debug(\"NBResuse metrics: %s\", metrics)\n self.write(json.dumps(metrics))", "def get_metrics(self):\n self.logger.debug(\"Fetching metrics.\")\n return self._api_query(\"metrics\")['metrics']", "def mymetrics(): \n _update_metric_counters()\n logging.debug(prom_objects_seen.collect())\n return flask.Response(generate_latest(), mimetype='text/plain')", "def get_metrics(self) -> dict:\n return self.metric_dict", "def get_metrics(self) -> Dict[str, base.Number]:\n return self._metrics", "def get(self):\r\n query = self.build_query()\r\n metrics = query.all()\r\n result = self.schema_collection.dump(metrics)\r\n return success(result)", "def get_metrics(self):\n return None", "def stats():\n return jsonify(shorten.get_stats(get_db(), app.config['MINI_URL_BASE']))", "def fetch_metrics():\n try:\n s = redis.Redis(host=REDIS_HOST, port=REDIS_PORT)\n log_verbose('Connected to Redis at %s:%s' % (REDIS_HOST, REDIS_PORT))\n except socket.error, e:\n collectd.error('redis_metrics plugin: Error connecting to %s:%d - %r'\n % (REDIS_HOST, REDIS_PORT, e))\n return None\n log_verbose('Retrieving data')\n data = s.hgetall(METRICS_HASH)\n log_verbose('Recieved data: %s' % data)\n\n return data", "def metrics(self):\n return self.__metrics", "def metrics(self):\n return self._metrics", "def metrics(self):\n return self._metrics", "def stats(self):\n url = client.build_url('stats')\n _, res_json = client.get(url, headers=self.headers)\n\n return res_json", "def metrics(self):\n self.metrics = []\n \n self.clients()\n\n if len(self.metrics) > 0:\n return self.metrics\n else:\n return []", "def summarize_as_json(self):\n return json.dumps({\n 'total_time': self.total_time,\n 'std_dev_total_time': self.std_dev_total_time,\n 'max_memory': self.max_memory,\n 'std_dev_max_memory': self.std_dev_max_memory,\n 'average_memory': self.average_memory,\n 'std_dev_average_memory': self.std_dev_average_memory,\n 'average_cpu': self.average_cpu,\n 'std_dev_average_cpu': self.std_dev_average_cpu,\n }, indent=2)", "def metrics_get(period):\n return flask.jsonify({\"message\": \"noop\"}), 200", "def getReportMetrics(self):\n return self.__unwrapResults().reportMetrics", "def metrics(self):\n\n data = requests.get(\n f\"http://{self.prometheus_host}:{self.prometheus_port}/metrics\"\n ).content.decode()\n lines = [line for line in data.split(\"\\n\") if not line.startswith(\"#\")]\n metrics = {}\n for line in lines:\n if not line:\n continue\n\n name, value = line.split(\" \")\n\n try:\n value = int(value) # type: ignore\n except ValueError:\n value = float(value) # type: ignore\n\n if \"{\" in name and \"}\" in name:\n base = name[: name.index(\"{\")]\n tags = name[name.index(\"{\") + 1 : -1]\n tags = [tag.split(\"=\") for tag in tags.split(\",\")]\n tags = [(key, val.replace('\"', \"\")) for key, val in tags]\n\n name = base + \"#\" + \",\".join(f\"{k}:{v}\" for k, v in sorted(tags))\n\n metrics[name] = value\n\n return metrics", "def list_metrics(self):\n pass", "def getCurrentMetrics(self):\n self.notifyPut('Obtaining Current Display Metrics')\n try:\n data = []\n data = win32api.EnumDisplayMonitors(None, None)\n screens = {}\n scrNum = 0\n for screen in data:\n screens[scrNum] = screen[2]\n scrNum += 1\n return screens \n except Exception, e:\n self.logQ.put('{0} - Unable to capture current metrics'.format(e))", "def get_metrics() -> Response:\n\n try:\n with get_cursor(db_creds, commit=False) as cur:\n data = get_sensors_data(cur)\n return jsonify(status_code=200, data=data)\n except psycopg2.Error as e:\n return jsonify(\n message=f\"Psycopg2 driver error: {type(e)}\",\n args=e.args,\n status_code=500,\n error_type=\"Internal Server Error\",\n )\n except Exception as e:\n return jsonify(\n message=f\"Internal Server Error: {type(e)}\",\n args=e.args,\n status_code=500,\n error_type=\"Internal Server Error\",\n )", "def metrics(self):\r\n return Metrics(self)", "def metrics(self, request):\n return OtterMetrics(self.store).app.resource()", "def metrics(env):\n envs = environments()\n check_env(env, envs)\n\n metrics = get_or_abort(puppetdb._query, 'mbean')\n return render_template('metrics.html',\n metrics=sorted(metrics.keys()),\n envs=envs,\n current_env=env)", "def prometheus_metrics(request):\n if not settings.DEBUG:\n return HttpResponseNotFound()\n\n # DEPRECATED: prometheus_multiproc_dir has been replaced by PROMETHEUS_MULTIPROC_DIR\n if \"PROMETHEUS_MULTIPROC_DIR\" in os.environ or \"prometheus_multiproc_dir\" in os.environ:\n registry = prometheus_client.CollectorRegistry()\n multiprocess.MultiProcessCollector(registry)\n else:\n registry = prometheus_client.REGISTRY\n metrics_page = prometheus_client.generate_latest(registry)\n return HttpResponse(\n metrics_page, content_type=prometheus_client.CONTENT_TYPE_LATEST\n )", "def metrics(self):\n raise NotImplementedError(\"metrics\")", "def read_metrics(self):\n raise NotImplementedError()" ]
[ "0.6661778", "0.6530863", "0.62208194", "0.6195161", "0.6111613", "0.6066885", "0.58400106", "0.5813678", "0.58036125", "0.5755982", "0.5711879", "0.5672189", "0.5664587", "0.56643957", "0.56643957", "0.5648688", "0.56091595", "0.5596024", "0.55851555", "0.5569048", "0.55676854", "0.5561622", "0.55431515", "0.5489346", "0.5482248", "0.54777914", "0.5462921", "0.5457199", "0.54256916", "0.54011077" ]
0.697125
0
Cleans up all Docker artifacts. This will stop and remove all Docker containers and images from the host and destroy the Docker network Clipper uses.
def cleanup(self): with hide("output", "warnings", "running"): self.stop_all() self._execute_standard("rm -rf {model_repo}".format(model_repo=MODEL_REPO)) self._execute_root("docker rmi --force $(docker images -q)", warn_only=True) self._execute_root("docker network rm clipper_nw", warn_only=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cleanup():\n cmd='docker rmi --force $(docker images -a -q)'\n bash_command(\"Deleting all images\", cmd)", "def clean(all):\n docker_clean(all)", "def destroy_all(self) -> None:\n try:\n containers = self.docker.containers.list(\n all=True,\n filters={\n 'label': LABEL_TASK_ID,\n },\n )\n\n for container in containers:\n container.remove(force=True)\n\n except requests.exceptions.ConnectionError:\n raise ProviderError('Docker engine unavailable')", "def prune():\n if not gce_instance_id():\n log.warning('Not cleaning up docker on non-gce machines to prevent '\n 'deleting containers in dev')\n return\n dkr = docker.from_env()\n dkr.api.prune_containers()\n dkr.api.prune_images()\n check_disk_usage()", "def cli(ctx):\n stopped = click.style(\"Stopped\", fg=\"red\")\n removed = click.style(\"Removed\", fg=\"blue\")\n for container in ctx.docker.get_containers():\n name = container.hostname\n node_name = ''.join([i for i in name if not i.isdigit()])\n image_name = container.dictionary['Config']['Image']\n if node_name in TO_KILL:\n container.stop(timeout=0)\n else:\n container.stop(timeout=5)\n # container.execute(\"poweroff\", \"root\", \"/\", False)\n # container.wait()\n ctx.log(\"Container %s --> %s\" % (name, stopped))\n container.remove(v=False, link=False, force=True)\n ctx.log(\"Container %s --> %s\" % (name, removed))\n ctx.state['containers'].remove(container.short_id)\n ctx.state.fast_dump()\n # remove untagged image\n if not image_name.startswith(ctx.prefix):\n ctx.docker.remove_image(image_name, force=True)\n ctx.docker.remove_network()", "def clean_docker(c):\n c.run(f'docker image rm -f $(docker image ls --filter reference={docker_repo} -q) || true')", "def cleanup_networks(self):\n for network in self.networks:\n try:\n network.remove()\n network.client.api.close()\n network.client.close()\n self.log_message(\n f'{dateutils.get_current_time()} '\n f'destroying docker network {network}'\n )\n except Exception:\n self.log_message(\n f'{dateutils.get_current_time()} ERROR: Could not remove docker '\n f'network {network}'\n )\n self.networks.clear()", "def containers_reset(driver='openstack', skipswarm=False):\n\n mach = Dockerizing(driver)\n # Find machines in list which are based on this driver\n for node in mach.list(with_driver=driver):\n # Clean containers inside those machines\n mach.prepare(node)\n mach.destroy_all(skipswarm)\n mach.exit()\n _logger.info(\"Completed\")", "def clean():\n clean_flatbuffer_binaries()\n clean_webp_textures()", "def cleanup(self):\n logging.debug(\"cleanup called\")\n self.delete_networks()\n self.delete_machines()", "def stop_all(self):\n print(\"Stopping Clipper and all running models...\")\n with hide(\"output\", \"warnings\", \"running\"):\n self._execute_root(\"docker-compose stop\", warn_only=True)\n self._execute_root(\"docker stop $(docker ps -a -q)\", warn_only=True)\n self._execute_root(\"docker rm $(docker ps -a -q)\", warn_only=True)", "def destroy(self):\r\n for container in self._containers.copy():\r\n container.destroy()\r\n\r\n assert len(self._containers) == 0", "def clean(c):\n clean_docker(c)\n clean_repo(c)", "def rm_network(c):\n print('Stopping local test network and removing containers')\n with c.cd('images'):\n c.run('sudo docker-compose down -v', hide='stderr')\n\n c.run('sudo rm -rf volumes/stellar-core/opt/stellar-core/buckets')\n c.run('sudo rm -f volumes/stellar-core/opt/stellar-core/*.log')\n c.run('sudo rm -rf volumes/stellar-core/tmp')", "def cleanup(self):\n self.qemu.clean_run_files()\n for tmp in glob.glob(self.configfile + \"?*\"):\n os.unlink(tmp)", "def clean(params) -> None:\n print(\"Cleaning docker image...\")\n cmd = \"docker rmi bg_changer >/dev/null 2>&1\"\n if os.system(cmd) == 0:\n print(\" Success !\")\n else:\n print(\" Failure !\")", "def cleanup(self):\n\n # NOTE(jbresnah) call stop on each of the servers instead of\n # checking the pid file. stop() will wait until the child\n # server is dead. This eliminates the possibility of a race\n # between a child process listening on a port actually dying\n # and a new process being started\n servers = [self.api_server, self.conductor_server, ]\n for s in servers:\n try:\n s.stop()\n except Exception:\n pass\n\n for f in self.files_to_destroy:\n if os.path.exists(f):\n os.unlink(f)", "def clean_up(self):\n dist.destroy_process_group()", "def cleanup():\n dist.destroy_process_group()", "def terminate(self):\r\n deferreds = []\r\n\r\n for container in self._containers.copy():\r\n deferreds.append(container.remote_destroy())\r\n\r\n if deferreds:\r\n deferredList = DeferredList(deferreds)\r\n deferredList.addCallback(self._cleanPackageDir)\r\n return deferredList\r\n else:\r\n self._cleanPackageDir()", "def kill_all_docker_containers():\n running_container_ids = subprocess.check_output(['docker', 'ps', '-q'])\n running_container_ids = running_container_ids.strip().split() # Remove trailing \\n and convert to list\n\n if running_container_ids:\n subprocess.call(['docker', 'kill'] + running_container_ids)", "def stop_all():\n subprocess.check_call(\n ['./run.py --down'], shell=True,\n cwd=orc8_docker_path,\n )\n subprocess.check_call(\n 'docker-compose down', shell=True,\n cwd=feg_docker_integ_test_path,\n )\n subprocess.check_call(\n 'vagrant halt magma', shell=True,\n cwd=agw_path,\n )", "def network_cleanup(self, args):\n pass", "def cleanup():\n management.call_command('cleanup')", "def teardown(self):\n self.containers._teardown()\n self.networks._teardown()\n self.volumes._teardown()\n\n # We need to close the underlying APIClient explicitly to avoid\n # ResourceWarnings from unclosed HTTP connections.\n self._client.api.close()", "def cleanUp(self):\r\n for group in self._groups.values():\r\n group.destroy()\r\n\r\n assert len(self._groups) == 0\r\n\r\n for machine in self._machines.copy():\r\n self.destroyMachine(machine)\r\n\r\n assert len(self._machines) == 0\r\n\r\n self.unregisterIAASHook()", "def clean(context):\n print(f\"Attempting to forcefully remove image {IMAGE_NAME}:{IMAGE_VER}\")\n context.run(f\"docker rmi {IMAGE_NAME}:{IMAGE_VER} --force\")\n print(f\"Successfully removed image {IMAGE_NAME}:{IMAGE_VER}\")", "def server_clean(self):\n # Kill any doas servers running on the hosts\n self.kill()\n # Clean up any files that exist on the hosts\n self.clean_files()", "def _delete_all_containers(self):\n for container_ref in self.created_entities['container']:\n self.barbicanclient.containers.delete(container_ref)", "def dist_cleanup():\n dist.destroy_process_group()" ]
[ "0.7861326", "0.7753446", "0.74676013", "0.73080105", "0.7184034", "0.7155834", "0.7131024", "0.695112", "0.6933661", "0.6863181", "0.6823708", "0.67930967", "0.6702913", "0.66411686", "0.66257644", "0.65616155", "0.65316635", "0.6490241", "0.6471442", "0.6443562", "0.63980955", "0.63963914", "0.636048", "0.6339552", "0.63342077", "0.63291353", "0.63021404", "0.6256531", "0.6254775", "0.62499106" ]
0.8082461
0
Puts the provided container on the host.
def _put_container_on_host(self, container_name): with hide("output", "warnings", "running"): # first see if container is already present on host host_result = self._execute_root( "docker images -q {cn}".format(cn=container_name)) if len(host_result.stdout) > 0: print("Found %s on host" % container_name) return True # now try to pull from Docker Hub hub_result = self._execute_root("docker pull {cn}".format(cn=container_name), warn_only=True) if hub_result.return_code == 0: print("Found %s in Docker hub" % container_name) return True # assume container_name refers to a local container and # copy it to host local_result = local( "docker images -q {cn}".format(cn=container_name)) if len(local_result.stdout) > 0: saved_fname = container_name.replace("/", "_") subprocess.call("docker save -o /tmp/{fn}.tar {cn}".format( fn=saved_fname, cn=container_name)) tar_loc = "/tmp/{fn}.tar".format(fn=saved_fname) self._execute_put(tar_loc, tar_loc) self._execute_root("docker load -i {loc}".format(loc=tar_loc)) # self._execute_root("docker tag {image_id} {cn}".format( # image_id=image_id, cn=cn)) # now check to make sure we can access it host_result = self._execute_root( "docker images -q {cn}".format(cn=container_name)) if len(host_result.stdout) > 0: print("Successfuly copied %s to host" % container_name) return True else: warn( "Problem copying container %s to host" % container_name) return False # out of options warn("Could not find %s, please try with a valid " "container docker image") return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def put_container(self, account, container):\n \n pass", "def put_container(self, container):\n if self.onest.create_bucket(container):\n LOG.debug('put_container, create success. '\n 'Container: %s.', container)\n else:\n # If return false, means exist\n LOG.info(_LI('put_container, '\n 'container(%s) exist, just use it.'), container)", "def container(self, container):\n if not container.is_public():\n container.make_public()\n self._container = container", "def register_this_container(cache, db):\n\n # Get container id.\n # bash_command = \"\"\"head -1 /proc/self/cgroup|cut -d/ -f3\"\"\"\n # output = str(subprocess.check_output(['bash','-c', bash_command]), \"utf-8\").strip()\n\n # logger.info(output)\n\n my_host_name = socket.gethostname()\n my_ip = socket.gethostbyname(my_host_name)\n cache[\"ip\"] = my_ip\n cache[\"host\"] = my_host_name\n\n free_cpu, free_mem = get_resources()\n\n logger.info({\"host_name\": my_host_name, \"ip\": my_ip})\n try:\n pipe = db.pipeline()\n pipe.sadd(SET_NAME, my_ip).hset(my_ip, mapping={\"host_id\": my_host_name, \"cpu\": free_cpu, \"mem\": free_mem})\n pipe.execute()\n except Exception as e:\n logger.error(e)\n raise e", "def DeployContainer(self, name, container_spec):\n raise NotImplementedError()", "def _update_container(self):\n client = docker.from_env()\n self.container = client.containers.get(self.session.container_id)", "def container(self, container):\n\n self._container = container", "def container(self, container):\n\n self._container = container", "def docker_enter(self, user, host, container):\n import os\n logging.debug(\"\")\n logging.debug(\"************************************************************\")\n ssh_host = user+\"@\"+host\n ssh_timeout = \"5\"\n ssh_options = \"-A -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o ConnectTimeout=\"+ssh_timeout+\" -o ConnectionAttempts=1 -tt\"\n docker_cmd = \"\\\"/opt/bin/docker-enter \"+container+\"\\\" \"\n cmd = \"ssh \"+ssh_options+\" \"+ssh_host+\" \"+docker_cmd\n logging.debug(\"Executing Command: %s\" % (cmd))\n returned = os.system(cmd)\n logging.debug(\"docker_enter func Exiting with code %i\" % (returned))\n sys.exit(returned)", "def test_add_container(self):\n with DockerHost('host', dind=False) as host:\n # Create a container with --net=none, add a calico interface to\n # it then check felix programs a route.\n node = host.create_workload(\"node\", network=NET_NONE)\n host.calicoctl(\"container add %s 192.168.1.1\" % node)\n\n # Create the profile, get the endpoint IDs for the containers and\n # add the profile to the endpoint so felix will pick it up.\n host.calicoctl(\"profile add TEST_GROUP\")\n ep = host.calicoctl(\"container %s endpoint-id show\" % node)\n host.calicoctl(\"endpoint %s profile set TEST_GROUP\" % ep)\n\n # Wait for felix to program down the route.\n check_route = partial(host.execute,\n \"ip route | grep '192\\.168\\.1\\.1'\")\n retry_until_success(check_route, ex_class=CalledProcessError)", "def DeployContainer(self, base_name, container_spec):\n name = base_name + str(len(self.containers[base_name]))\n container = KubernetesContainer(container_spec=container_spec, name=name)\n self.containers[base_name].append(container)\n container.Create()", "def create_container(self, container_name):\n response = self.client.put_container(container_name)\n return response", "def put(self, key, value):\n self.container[key] = value", "def run(self, container_config: ContainerConfig) -> Container:", "def _execute_container(self):\n pass", "def post(self, run=False, **container_dict):\n context = pecan.request.context\n compute_api = pecan.request.compute_api\n policy.enforce(context, \"container:create\",\n action=\"container:create\")\n\n try:\n run = strutils.bool_from_string(run, strict=True)\n except ValueError:\n msg = _('Valid run values are true, false, 0, 1, yes and no')\n raise exception.InvalidValue(msg)\n try:\n container_dict['tty'] = strutils.bool_from_string(\n container_dict.get('tty', False), strict=True)\n container_dict['stdin_open'] = strutils.bool_from_string(\n container_dict.get('stdin_open', False), strict=True)\n except ValueError:\n msg = _('Valid tty and stdin_open values are ''true'', '\n '\"false\", True, False, \"True\" and \"False\"')\n raise exception.InvalidValue(msg)\n\n # NOTE(mkrai): Intent here is to check the existence of image\n # before proceeding to create container. If image is not found,\n # container create will fail with 400 status.\n images = compute_api.image_search(context, container_dict['image'],\n True)\n if not images:\n raise exception.ImageNotFound(container_dict['image'])\n container_dict['project_id'] = context.project_id\n container_dict['user_id'] = context.user_id\n name = container_dict.get('name') or \\\n self._generate_name_for_container()\n container_dict['name'] = name\n if container_dict.get('memory'):\n container_dict['memory'] = \\\n str(container_dict['memory']) + 'M'\n if container_dict.get('restart_policy'):\n self._check_for_restart_policy(container_dict)\n container_dict['status'] = fields.ContainerStatus.CREATING\n new_container = objects.Container(context, **container_dict)\n new_container.create(context)\n\n if run:\n compute_api.container_run(context, new_container)\n else:\n compute_api.container_create(context, new_container)\n # Set the HTTP Location Header\n pecan.response.location = link.build_url('containers',\n new_container.uuid)\n pecan.response.status = 202\n return view.format_container(pecan.request.host_url, new_container)", "def _create_container(self, container_name):\n try:\n container = self.swift.head_container(container_name)\n except client.ClientException:\n self.swift.put_container(container_name)\n else:\n return container", "def run_container(self,\n name: str,\n command: Optional[str] = None,\n env: Optional[StrDict] = None,\n volumes: Optional[Dict[str, StrDict]] = None,\n ports: Optional[Dict[Any, Any]] = None,\n dns: Optional[List[str]] = None,\n pid_mode: Optional[str] = None,\n entrypoint: Optional[str] = None):\n if volumes is None:\n volumes = {}\n if dns is None:\n dns = []\n\n expose = []\n port_bindings = {}\n for port in ports.keys():\n if isinstance(port, tuple):\n proto = port[1] if len(port) == 2 else \"tcp\"\n key = \"%d/%s\" % (port[0], proto)\n else:\n key = port\n port_bindings[key] = ports[port]\n expose.append(port)\n\n result = self._client.create_container(\n name, command=command, environment=env,\n volumes=[volume['bind'] for volume in volumes.values()],\n ports=expose,\n entrypoint=entrypoint)\n\n container = result[\"Id\"]\n result = self._client.start(container, binds=volumes,\n port_bindings=port_bindings, dns=dns,\n pid_mode=pid_mode)\n response = self._client.inspect_container(container)\n return response", "def start(self, container: Container):", "def control_container(client, cont_name, action):\n try:\n container = client.containers.get(cont_name)\n print(\"{}ing container {}\".format(action, cont_name))\n if action == \"start\":\n container.start(wait=True)\n elif action == \"stop\":\n container.stop(wait=True)\n else:\n raise NameError(\"Uknown action specified: {}\".format(action))\n\n except pylxd.exceptions.LXDAPIException as err:\n print(\"error when trying to {} container {}\".format(action, cont_name))\n raise err", "def DeployContainerService(self, name, container_spec, num_containers):\n raise NotImplementedError()", "def put_container(self, filesystem, acc_dir, cont_dir, \\\n account, container, metadata, req):\n try:\n # create path\n path = self.create_path(filesystem, acc_dir, cont_dir, account, container)\n # Remove this after container library update\n self.logger.debug(('PUT container called for path: %(path)s'),\n {'path' : path})\n if not os.path.exists(path):\n os.makedirs(path)\n timestamp = normalize_timestamp(req.headers['x-timestamp'])\n created_at = normalize_timestamp(time.time())\n # create container stat object\n cont_stat = ContainerStat(account, container, created_at, \\\n timestamp, '0', 0, 0, '', str(uuid4()), 'ADDED', '', metadata)\n\t #get component number\n\t component_name = req.headers['x-component-number']\n # call container library to create container\n status_obj = self.__create_cont(path, filesystem, cont_stat, component_name)\n status = status_obj.get_return_status()\n self.logger.info(('Status from container library comes '\n 'out to be: %(status)s'),\n {'status' : status})\n return status, cont_stat\n except Exception as err:\n self.logger.error(('PUT request failed for account/container:'\n ' %(account)s/%(container)s '\n 'close failure: %(exc)s : %(stack)s'),\n {'account' : account, 'container' : container,\n 'exc': err, 'stack': ''.join(traceback.format_stack())})\n raise err", "def container(self, image: str, **kwargs) -> container.Container:\n raise NotImplementedError", "def container_name(self, container_name):\n\n self._container_name = container_name", "def start_container(client, work_package, load_saved):\n package_path = os.path.join(PATH, \"work_packages\")\n\n client.containers.run(image=\"scrape_light\",\n environment=[\"PACKAGE=\"+work_package, \"LOAD_FILE=\" + load_saved,\n \"[email protected]\", \"PASSWORD=LA#kYs1#o:`Z\"],\n detach=True, tty=True, stdin_open=True,\n sysctls={\"net.ipv4.conf.all.rp_filter\": 2},\n privileged=True,\n devices=[\"/dev/net/tun\"],\n name=\"scrape_\" + str(work_package),\n cap_add=[\"NET_ADMIN\", \"SYS_MODULE\"],\n volumes={package_path: {\"bind\": \"/work_packages\"}})", "def DeployContainerService(self, name, container_spec):\n service = KubernetesContainerService(container_spec, name)\n self.services[name] = service\n service.Create()", "def docker_container():\n if SETUP_SPLASH:\n dm = DockerManager()\n dm.start_container()\n\n try:\n requests.post('{}/_gc'.format(SPLASH_URL))\n except requests.exceptions.RequestException:\n pass\n\n yield", "def container(name, ostemplate, **kwargs):\n if not openvz.exists(name):\n ctid = openvz.get_available_ctid()\n openvz.create(ctid, ostemplate=ostemplate, **kwargs)\n openvz.set(ctid, name=name)\n return Container(name)", "def create_container_if_missing(container, swift_conn, options):\n try:\n swift_conn.head_container(container)\n except swift_client.ClientException, e:\n if e.http_status == httplib.NOT_FOUND:\n add_container = config.get_option(options,\n 'swift_store_create_container_on_put',\n type='bool', default=False)\n if add_container:\n try:\n swift_conn.put_container(container)\n except ClientException, e:\n msg = _(\"Failed to add container to Swift.\\n\"\n \"Got error from Swift: %(e)s\") % locals()\n raise glance.store.BackendException(msg)\n else:\n msg = (_(\"The container %(container)s does not exist in \"\n \"Swift. Please set the \"\n \"swift_store_create_container_on_put option\"\n \"to add container to Swift automatically.\")\n % locals())\n raise glance.store.BackendException(msg)\n else:\n raise", "def move_container(i3, name, monitor, container_id=None):\n i3.command(f'move container to workspace {name}')\n i3.command(f'workspace {name}, move workspace to output {monitor}')\n if container_id:\n i3.command(f'[con_id=\"{container_id}\"] focus')" ]
[ "0.777786", "0.70849335", "0.6411199", "0.62735075", "0.62369967", "0.6106427", "0.60992134", "0.60992134", "0.59490246", "0.58224803", "0.573343", "0.57095087", "0.5694259", "0.5600121", "0.55540144", "0.5498277", "0.5437863", "0.54335755", "0.5419815", "0.5414302", "0.5357078", "0.53528535", "0.5313023", "0.52970314", "0.52700305", "0.5235344", "0.5225639", "0.5220213", "0.5192418", "0.5170318" ]
0.7162493
1
Get the info dictionary that contains additional metadata about the model. Arguments
def info(model: str = None) -> dict: model_instance = get_model(model) log.debug("Get info for " + str(model_instance)) return model_instance.info()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def model_info():\n pass", "def get_model_meta_info(model_name):\n return dict(dict(model_meta_info)[model_name])", "def info(self):\n meta = {\n \"name\": self.name,\n \"description\": self.description,\n \"version\": self.version,\n \"labels\": self.labels,\n \"models\": {k: v.info() for k, v in self._infers.items() if v.is_valid()},\n \"trainers\": {k: v.info() for k, v in self._trainers.items()},\n \"strategies\": {k: v.info() for k, v in self._strategies.items()},\n \"scoring\": {k: v.info() for k, v in self._scoring_methods.items()},\n \"train_stats\": {k: v.stats() for k, v in self._trainers.items()},\n \"datastore\": self._datastore.status(),\n }\n\n # If labels are not provided, aggregate from all individual infers\n if not self.labels:\n merged = []\n for labels in [v.get(\"labels\", []) for v in meta[\"models\"].values()]:\n if labels and isinstance(labels, dict):\n labels = [k for k, _ in sorted(labels.items(), key=lambda item: item[1])] # type: ignore\n for label in labels:\n if label not in merged:\n merged.append(label)\n meta[\"labels\"] = merged\n\n return meta", "def info_view(self):\n view_dict = dict(\n env_class=self.env_class,\n model_structure=None,\n model_kwargs={key: value for key, value in self.model_kwargs.items() \n if isinstance(value,(str,int,float,tuple,list,dict)) \n and len(str(value))<100},\n model_class=self.model_class,\n hyperparams=self.hyperparams)\n return view_dict", "def info(self) -> dict:", "def ExtraInfo(self) -> object:", "def info(self):\n return {\n \"dimension_x\": self.dimension_x,\n \"dimension_y\": self.dimension_y,\n \"api_level\": self.api_level,\n \"device_model\": self.model,\n }", "def model_info(self):\n if not self._model_info:\n self._load_model_info()\n try:\n data = json.loads(self._model_info)\n except (TypeError, ValueError):\n data = {}\n return data", "def get_info(self) -> Optional[Dict[str, Any]]:", "def get_info(self):\n return {}", "def get_info(self):\n pass", "def get_info(self):\n pass", "def get_main_information(self) -> Dict:\n if self.information is None:\n self.information = self.orthanc.get_instance_information(\n self.identifier\n )\n\n return self.information", "def model_information(self):\n return self._read(MX_MODEL_INFORMATION)", "def info(self):\n return {}", "def _get_information(self):\n pass", "def detail(self):\n info = self.info()\n return info", "def car_info(manufacturer, model_name, **other_info):\r\n car_Profile = {}\r\n car_Profile['manufacturer'] = manufacturer.title()\r\n car_Profile['model'] = model_name.title()\r\n for key, value in other_info.items():\r\n car_Profile[key] = value\r\n return car_Profile", "def info(self):\r\n return self._get('info', {})", "def info() -> Dict[str, Any]:", "def _metadata(self):\n meta = super()._metadata\n meta.update({\n \"name\": self.name,\n \"lead_in_time\": self.lead_in_time,\n \"amplification\": self.amplification,\n \"amplifier_clipping\": self.amplifier_clipping,\n \"power_threshold\": self.power_threshold,\n })\n return meta", "def get_info_dict(self):\n return {\n 'bidi': self.bidi,\n 'code': self.code,\n 'name': self.name,\n 'name_local': self.name_local\n }", "def get_info(self, info):\r\n pass", "def getInfo():", "def get_info(self): \n return {\n \"ident\": self.ident,\n \"interval\": self._interval,\n \"exception\": self._exception,\n \"execute\": self._execute,\n \"args\": self._args,\n \"kwargs\": self._kwargs}", "def _metadata(self) -> Dict[str, Any]:\n return self.__metadata", "def metadata(self) -> dict:\n meta = {}\n meta['name'] = self.name\n meta['id'] = self.id\n meta['family'] = self.family\n \n meta['ptd_type'] = []\n meta['pos'] = []\n meta['atype'] = []\n meta['db_vect'] = []\n meta['scale'] = []\n for cp in self.parameters:\n meta['ptd_type'].append(cp.get('ptd_type', None))\n meta['pos'].append(cp.get('pos', None))\n meta['atype'].append(cp.get('atype', None))\n meta['db_vect'].append(cp.get('db_vect', None))\n meta['scale'].append(cp.get('scale', None))\n \n return meta", "def model_info(model):\n return juju.CLIENT.Client(request=\"ModelInfo\",\n params={\"Name\": model})", "def get_metadata(self):\n return {}", "def extra(self) -> Dict[str, Any]:\n extra = self.extras.copy()\n if isinstance(self.author, str):\n extra['Author'] = self.author\n if isinstance(self.email, str):\n extra['Email'] = self.email\n if isinstance(self.description, str):\n extra['Description'] = self.description\n return extra" ]
[ "0.74650854", "0.73257154", "0.72857666", "0.72088987", "0.71202224", "0.7080812", "0.7052952", "0.7009202", "0.6969219", "0.6927327", "0.68372357", "0.68372357", "0.6695758", "0.66955394", "0.6636088", "0.66226965", "0.66111636", "0.65891314", "0.65803796", "0.6525537", "0.6519215", "0.64995587", "0.6493927", "0.6493675", "0.64922094", "0.64907473", "0.64710367", "0.6468055", "0.6442558", "0.6426798" ]
0.7487704
0
Extend author datafield by CDS authority id and Beard tag. Extends the author datafield by the MARC subfields
def extend_author_field(author_field, cds_id): cds_authority_id = "AUTHOR|(CDS){0}".format(cds_id) if cds_authority_id not in field_get_subfield_values(author_field, '0'): field_add_subfield(author_field, "0", cds_authority_id) field_add_subfield(author_field, "9", "#BEARD#") return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_record(record_id, authors):\n record = get_record(record_id)\n record_author = record_get_field_instances(record, \"100\")\n record_coauthors = record_get_field_instances(record, \"700\")\n\n if len(record_author) > 1:\n print (\"Oops: several '100' (main author) fields have been found in \"\n \"record '{0}'\".format(record_id))\n return \"\"\n\n datafields = \"\"\n author = False\n for author_field in record_author:\n try:\n author_name = field_get_subfield_values(author_field, 'a')[0]\n try:\n cds_id = authors[author_name]\n if extend_author_field(author_field, cds_id):\n datafields += field_xml_output(author_field, \"100\")\n author = True\n except KeyError:\n pass\n except IndexError:\n # Author field (`100`) does not have subfield `a`\n pass\n\n if len(authors) > 1 or not author:\n for coauthor_field in record_coauthors:\n try:\n coauthor_name = field_get_subfield_values(\n coauthor_field, 'a')[0]\n try:\n cds_id = authors[coauthor_name]\n if extend_author_field(coauthor_field, cds_id):\n author = True\n except KeyError:\n pass\n except IndexError:\n # Co-author field (`700`) does not have subfield `a`\n pass\n datafields += field_xml_output(coauthor_field, \"700\")\n\n # Nothing to update\n if not author:\n # print \"No authors to update in record '{0}'\".format(record_id)\n return \"\"\n\n record = ('<record><controlfield tag=\"001\">{0}</controlfield>{1}'\n '</record>'.format(record_id, datafields))\n return record", "def customizations(record):\n #record = bibtexparser.customization.author(record)\n record = bibtexparser.customization.add_plaintext_fields(record)\n record = bibtexparser.customization.doi(record)\n\n return record", "def addSetAuthor(self,val):\n self.bookAuthor = val", "def extra_bibparse(db):\n for key,entry in db.entries.items():\n for auth in entry.persons[\"author\"]:\n if (\"Harrison\" not in auth.first_names or\n \"Chapman\" not in auth.last_names):\n entry.add_person(auth, \"otherauthor\")", "def setAuthor(self,value):\n self.PDFreactorConfiguration.in1[\"author\"] = value", "def author(self, value):\n self._set_attr('author', value)", "def __add_author(self, key_name, others_names, personal_information):\n for name in others_names:\n self.author_to_authorID[name] = (key_name, personal_information)", "def set_author(self, author):\n self.author = author\n self.opf.author = author", "def writeAuthor(self,author):\n author = author[:min(32,len(author))]\n self.tes3.hedr.author = author\n self.tes3.hedr.setChanged()\n self.writeHedr()", "def svn_client_commit_info_t_author_set(svn_client_commit_info_t_self, char_author): # real signature unknown; restored from __doc__\n pass", "def author(self, author):\n self._author = author", "def addAuthor(author):\n author_dict = dict()\n # author_dict['id'] = \"{}/api/{}\".format(DOMAIN, author.id)\n author_dict['id'] = \"{}/api/author/{}\".format(DOMAIN, author.id)\n author_dict['host'] = \"{}/api/\".format(author.host_url)\n author_dict['displayName'] = author.username\n author_dict['github'] = author.github_url\n author_dict['url'] = \"{}/api/author/{}\".format(DOMAIN, author.id)\n\n # Optional Attributes\n if author.github_url:\n author_dict['github'] = author.github_url\n if author.user.first_name:\n author_dict['firstName'] = author.user.first_name\n if author.user.last_name:\n author_dict['lastName'] = author.user.last_name\n if author.user.email:\n author_dict['email'] = author.user.email\n if author.bio:\n author_dict['bio'] = author.bio\n\n return author_dict", "def alt_authors(self, key, value):\n _authors = self.get(\"authors\", [])\n if _authors:\n for i, v in enumerate(force_list(value)):\n _authors[i].update({\"alternative_names\": clean_val(\"a\", v, str)})\n return _authors", "def marc21_to_identifiedBy_from_field_020(self, key, value):\n def build_identifier_from(subfield_data, status=None):\n subfield_data = subfield_data.strip()\n identifier = {'value': subfield_data}\n subfield_c = value.get('c', '').strip()\n if subfield_c:\n identifier['acquisitionTerms'] = subfield_c\n if value.get('q'): # $q is repetitive\n identifier['qualifier'] = \\\n ', '.join(utils.force_list(value.get('q')))\n\n match = re.search(r'^(.+?)\\s*\\((.*)\\)$', subfield_data)\n if match:\n # match.group(2): parentheses content\n identifier['qualifier'] = ', '.join(\n filter(\n None,\n [match.group(2), identifier.get('qualifier', '')]\n )\n )\n # value without parenthesis and parentheses content\n identifier['value'] = match.group(1)\n if status:\n identifier['status'] = status\n identifier['type'] = 'bf:Isbn'\n identifiedBy.append(identifier)\n\n identifiedBy = self.get('identifiedBy', [])\n isbns = list_of_identifiers(identifiedBy, 'bf:isbn')\n\n subfield_a = value.get('a')\n if subfield_a:\n for isbn_value in utils.force_list(subfield_a):\n if isbn_value not in isbns:\n build_identifier_from(isbn_value)\n\n subfield_z = value.get('z')\n if subfield_z:\n for isbn_value in utils.force_list(subfield_z):\n if isbn_value not in isbns:\n build_identifier_from(isbn_value,\n status='invalid or cancelled')\n\n return None", "def orcid_for_inspire_author(self, author):\n ids = author.get(\"ids\", [])\n for id in ids:\n if id[\"schema\"] == \"ORCID\":\n return id[\"value\"]", "def remoteAddAuthor(author):\n author_dict = dict()\n author_dict['id'] = author.get('id')\n author_dict['host'] = author.get('host')\n author_dict['displayName'] = author.get('displayName')\n author_dict['github'] = author.get('github')\n author_dict['url'] = author.get('url')\n\n # Optional Attributes\n if author.get('github_url'):\n author_dict['github'] = author.get('github_url')\n if author.get('firstName'):\n author_dict['firstName'] = author.get('firstName')\n if author.get('lastName'):\n author_dict['lastName'] = author.get('lastName')\n if author.get('email'):\n author_dict['email'] = author.get('email')\n if author.get('bio'):\n author_dict['bio'] = author.get('bio')\n\n return author_dict", "def update(self, instance, validated_data):\n artno = validated_data['article'].artno\n color = validated_data['color'].code\n category = validated_data['category']\n validated_data['artid'] = '-'.join([artno, color, category])\n validated_data['mcategory'] = categorize(value=category)\n\n article_info = super().update(instance, validated_data)\n return article_info", "def author_id(self, author_id):\n\n self._author_id = author_id", "def author(self, author):\n\n self._author = author", "def author(self, author):\n\n self._author = author", "def authors(self, key, value):\n _authors = self.get(\"authors\", [])\n item = build_ils_contributor(value)\n if item and item not in _authors:\n _authors.append(item)\n try:\n if \"u\" in value:\n other = [\"et al.\", \"et al\"]\n val_u = list(force_list(value.get(\"u\")))\n if [i for i in other if i in val_u]:\n self[\"other_authors\"] = True\n except UnexpectedValue:\n pass\n return _authors", "def _cookIdCore(self, ref, **kwargs):\n\n # AUTHORS\n namepart='nobody'\n if self._refHasAuthorNames(ref):\n lastnames = []\n for each in ref['authors']:\n if each.get('lastname', None):\n lastnames.append(each['lastname'])\n\n if len(lastnames) > 1:\n namepart = '%s' % ''.join([ lastname[0] for lastname in lastnames ])\n elif len(lastnames) == 1:\n namepart = lastnames[0][:3]\n else:\n pass\n\n # PUBLICATION YEAR\n if ref.get('publication_year', None):\n yearpart = str(ref['publication_year'])\n else:\n yearpart = \"1000\"\n\n return namepart + yearpart", "def add_authors(self, author_data, instance):\n for idx, author in enumerate(author_data):\n Author.objects.create(dataset=instance, order=idx, author=author)", "def set_author(self, **kwargs):\n self.author_name = kwargs.get('name')\n self.author_url = kwargs.get('url')\n self.author_icon = kwargs.get('icon_url')", "def add_author_node(a, nodes, retval, size=0):\n if a.id not in nodes:\n nodes[a.id] = len(nodes)\n retval[\"nodes\"].append({\"id\": str(a.id), \"title\": a.name, \"size\": size})", "def to_internal_value(self, data):\n authors = []\n for author in data:\n path = urlparse(author).path\n resolved_func, __, resolved_kwargs = resolve(path)\n person = resolved_func.cls.queryset.get(pk=resolved_kwargs['pk'])\n authors.append(person)\n\n return {'authors': authors}", "def __init__(self, *args, **kwargs):\n\n # Construct the base instance.\n super(FilterAuthor, self).__init__(*args, **kwargs)\n\n # Construct a regular expression tag evaluator.\n regextag = self.thistag.find('AuthorRegex')\n if regextag == None:\n raise ValueError('Required tag missing: AuthorRegex')\n self.regex = RegexTag(regextag, re.IGNORECASE)\n\n # Get the author of the transaction or revision. This will\n # cache the author name in the \"Author\" token.\n self.author = self.context.get_author()\n logger.debug('author = \"{0}\"'.format(self.author))", "def author(self, author: str):\n\n self._author = author", "def set_author (self, author):\n self.author = author", "def get_author_data(authors):\n\n try:\n author = authors.author.name.cdata.encode(\"utf8\")\n author_id = int(authors.author.id.cdata.encode(\"utf8\"))\n except: # FIXME: running into errors when book has multiple authors\n author = authors.author[0].cdata.encode(\"utf8\")\n author_id = authors.author[0].cdata.encode(\"utf8\")\n\n return (author, author_id)" ]
[ "0.59337986", "0.5925962", "0.59027773", "0.5860587", "0.57954335", "0.57940024", "0.5690788", "0.561277", "0.5400082", "0.5397839", "0.53956693", "0.5353431", "0.52892834", "0.52811795", "0.5277077", "0.5275605", "0.5230356", "0.51902807", "0.51870674", "0.51870674", "0.51565665", "0.5148006", "0.51439875", "0.5134714", "0.51305884", "0.5129792", "0.51220405", "0.5112446", "0.51107424", "0.5036835" ]
0.7661265
0
Update authors in CDS record.
def update_record(record_id, authors): record = get_record(record_id) record_author = record_get_field_instances(record, "100") record_coauthors = record_get_field_instances(record, "700") if len(record_author) > 1: print ("Oops: several '100' (main author) fields have been found in " "record '{0}'".format(record_id)) return "" datafields = "" author = False for author_field in record_author: try: author_name = field_get_subfield_values(author_field, 'a')[0] try: cds_id = authors[author_name] if extend_author_field(author_field, cds_id): datafields += field_xml_output(author_field, "100") author = True except KeyError: pass except IndexError: # Author field (`100`) does not have subfield `a` pass if len(authors) > 1 or not author: for coauthor_field in record_coauthors: try: coauthor_name = field_get_subfield_values( coauthor_field, 'a')[0] try: cds_id = authors[coauthor_name] if extend_author_field(coauthor_field, cds_id): author = True except KeyError: pass except IndexError: # Co-author field (`700`) does not have subfield `a` pass datafields += field_xml_output(coauthor_field, "700") # Nothing to update if not author: # print "No authors to update in record '{0}'".format(record_id) return "" record = ('<record><controlfield tag="001">{0}</controlfield>{1}' '</record>'.format(record_id, datafields)) return record
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updateAuthors(self,event=None):\r\n self.popAuthors()\r\n self.primeAuthor.updateVals(self.authorList)\r\n self.coAuthor.updateVals(self.authorList)\r\n self.correspond.updateVals(self.authorList)", "def authors(self, authors):\n\n self._authors = authors", "def update_author():\n try:\n key = list(request.args.keys())[0]\n val = request.args[key].strip('\"')\n data = request.get_json()\n filter = {key: val}\n except IndexError:\n queryVal = request.form.to_dict()\n filter_val, change_to_val = parse_filter_newValue(queryVal)\n filter = {filter_val[0]: filter_val[1]}\n print(filter)\n data = {change_to_val[0]: change_to_val[1]}\n print(data)\n if all(value == '' for value in data.values()) or all(value == '' for value in filter.values()):\n return render_template('error.html', message=\"Please enter both fields\"), 400\n new_values = {'$set': data}\n mongo.db.Authors.update_one(filter, new_values, upsert=False)\n return render_template('updated_author.html', message=\"Author has been Updated\")", "def addSetAuthor(self,val):\n self.bookAuthor = val", "def author(self, value):\n self._set_attr('author', value)", "def add_authors(self, author_data, instance):\n for idx, author in enumerate(author_data):\n Author.objects.create(dataset=instance, order=idx, author=author)", "def writeAuthor(self,author):\n author = author[:min(32,len(author))]\n self.tes3.hedr.author = author\n self.tes3.hedr.setChanged()\n self.writeHedr()", "def setAuthor(self,value):\n self.PDFreactorConfiguration.in1[\"author\"] = value", "def update(self, instance, validated_data):\n\n # Use an atomic transaction for managing dataset and authors\n with transaction.atomic():\n # pop off the authors data\n if \"authors\" in validated_data.keys():\n author_data = validated_data.pop('authors')\n\n instance._change_reason = 'Adding Authors to Dataset Metadata'\n # remove the existing authors\n Author.objects.filter(dataset_id=instance.id).delete() # delete first\n self.add_authors(author_data, instance)\n\n instance._change_reason = 'Update Dataset Metadata'\n\n # Update Dataset metadata\n super(self.__class__, self).update(instance=instance, validated_data=validated_data)\n\n return instance", "def author(self, author):\n self._author = author", "def set_author(self, author):\n self.author = author\n self.opf.author = author", "def __add_author(self, key_name, others_names, personal_information):\n for name in others_names:\n self.author_to_authorID[name] = (key_name, personal_information)", "def update():\n from dojson.contrib.marc21.utils import create_record\n from inspirehep.dojson.hepnames import hepnames\n\n recid = request.values.get('recid', 0, type=int)\n\n data = {}\n if recid:\n try:\n url = os.path.join(\n current_app.config[\"AUTHORS_UPDATE_BASE_URL\"],\n \"record\", str(recid), \"export\", \"xm\")\n xml = requests.get(url)\n record_regex = re.compile(\n r\"\\<record\\>.*\\<\\/record\\>\", re.MULTILINE + re.DOTALL)\n xml_content = record_regex.search(xml.content).group()\n\n data = strip_empty_values(\n hepnames.do(create_record(xml_content))) # .encode(\"utf-8\")\n convert_for_form(data)\n except requests.exceptions.RequestException:\n pass\n data[\"recid\"] = recid\n else:\n return redirect(url_for(\"inspirehep_authors_holdingpen.new\"))\n form = AuthorUpdateForm(data=data, is_update=True)\n ctx = {\n \"action\": url_for('.submitupdate'),\n \"name\": \"authorUpdateForm\",\n \"id\": \"authorUpdateForm\",\n }\n\n # FIXME create template in authors module\n return render_template('authors/forms/update_form.html', form=form, **ctx)", "def set_author (self, author):\n self.author = author", "def editAuthorByID(id: int, name: str, birth: str):\n if not id:\n abort(400)\n author = Author.query.get(id)\n if not author:\n abort(404, \"Author is not found\")\n if name:\n author.name = name\n if birth:\n author.birth = birth\n db.session.commit()\n app.logger.info(f\"The author {id} has been edited\")", "def author(self, author):\n\n self._author = author", "def author(self, author):\n\n self._author = author", "def corporate_authors(self, key, value):\n _corporate_authors = self.get(\"authors\", [])\n\n for v in force_list(value):\n if key == \"710__\":\n if \"a\" in v:\n _corporate_authors.append(\n {\n \"full_name\": clean_val(\"a\", v, str),\n \"type\": \"ORGANISATION\",\n }\n )\n else:\n self[\"authors\"] = collaborations(self, key, value)\n raise IgnoreKey(\"corporate_authors\")\n else:\n _corporate_authors.append(\n {\"full_name\": clean_val(\"a\", v, str), \"type\": \"ORGANISATION\"}\n )\n return _corporate_authors", "def update_by_id(cls, id, name, surname):\n\t\tauthor = Author.query.get(id)\n\t\tauthor.name = name\n\t\tauthor.surname = surname\n\t\tdb.session.commit()", "def alt_authors(self, key, value):\n _authors = self.get(\"authors\", [])\n if _authors:\n for i, v in enumerate(force_list(value)):\n _authors[i].update({\"alternative_names\": clean_val(\"a\", v, str)})\n return _authors", "def author_name(self, author_name):\n\n self._author_name = author_name", "def svn_client_commit_info_t_author_set(svn_client_commit_info_t_self, char_author): # real signature unknown; restored from __doc__\n pass", "def extend_author_field(author_field, cds_id):\n cds_authority_id = \"AUTHOR|(CDS){0}\".format(cds_id)\n if cds_authority_id not in field_get_subfield_values(author_field, '0'):\n field_add_subfield(author_field, \"0\", cds_authority_id)\n field_add_subfield(author_field, \"9\", \"#BEARD#\")\n return True\n\n return False", "def author_id(self, author_id):\n\n self._author_id = author_id", "def author(self, author: str):\n\n self._author = author", "def modified_author(self, modified_author):\n\n self._modified_author = modified_author", "def authors(self, key, value):\n _authors = self.get(\"authors\", [])\n item = build_ils_contributor(value)\n if item and item not in _authors:\n _authors.append(item)\n try:\n if \"u\" in value:\n other = [\"et al.\", \"et al\"]\n val_u = list(force_list(value.get(\"u\")))\n if [i for i in other if i in val_u]:\n self[\"other_authors\"] = True\n except UnexpectedValue:\n pass\n return _authors", "def _on_authors_list(self, evt):\n \n # raise authors management dialog\n dlg = AuthorsView(self, self._library)\n response = dlg.ShowModal()\n dlg.Destroy()\n \n # check response\n if response != wx.ID_OK:\n return\n \n # refresh collections view\n self._collections_view.UpdateCounts()\n \n # refresh articles view\n self._articles_view.ShowArticles()", "def authors():\n\tclick.clear()\n\trich.print(\"[bold]IDT[/bold] was initially made by [bold magenta]Deliton Junior[/bold magenta] and [bold red]Misael Kelviny[/bold red]\")", "def popAuthors(self):\r\n# cur = self.dbConn.execute(\"SELECT * FROM People WHERE PersonID>0 ORDER BY Lastname\")\r\n# res = cur.fetchall()\r\n res = self.dbConn.execute(\"SELECT * FROM People WHERE PersonID>0 ORDER BY Lastname\").fetchall()\r\n\r\n self.authorList = [formatNameSQL(ln) for ln in res]\r\n self.quickAuthors = [ln[\"Lastname\"].lower() for ln in res]\r\n vals = [ln[\"PersonID\"] for ln in res]\r\n \r\n self.authorLookup = dict(zip(self.authorList,vals))" ]
[ "0.731476", "0.6796486", "0.6614002", "0.6487669", "0.6476942", "0.64210784", "0.6345062", "0.6321634", "0.6265239", "0.6230697", "0.61914086", "0.61902046", "0.61164945", "0.6104606", "0.6094158", "0.6070964", "0.6070964", "0.605973", "0.6028583", "0.60037154", "0.59922695", "0.59580064", "0.5955667", "0.59375626", "0.59362525", "0.5918481", "0.5883263", "0.5725412", "0.5716847", "0.5702109" ]
0.7023796
1
indexes of target nodes in the integral graph
def target_nodes_indexes(self) -> _TargetNodes: return self.__target_nodes_indexes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def all_sampled_nodes_indexes(self) -> torch.LongTensor:\n all_sampled_nodes_indexes: _typing.Any = self.__all_sampled_nodes_indexes\n return all_sampled_nodes_indexes", "def eligible_edges_with_indexes(self):\n return enumerate(self.edges)", "def reference_nodes_idx(self) -> Dict[str, torch.Tensor]:\n return self.node_idx_references", "def output_node_ids(self):\n return [\n i\n for i in range(\n self.n_inputs + self.n_hidden,\n self.n_inputs + self.n_hidden + self.n_outputs,\n )\n ]", "def all_node_ids(self):\n return [i for i in range(0, self.n_inputs + self.n_hidden + self.n_outputs)]", "def reference_nodes_graph_idx(self) -> Dict[str, torch.Tensor]:\n return self.node_graph_idx_reference", "def master_ndindex(self): # itermaster_indices(self):\n return itertools_product(\n *[range(*r) for r in self.location]\n ) # TODO check", "def indices(self):\n i, j, _edge = self.indicesAndEdge()\n return i, j", "def hoggar_indices():\n return list(product([0,1], repeat=6))", "def input_node_ids(self):\n return [i for i in range(self.n_inputs)]", "def get_indegrees(graph: Graph):\n transpose = get_transpose_graph(graph)\n return {node: len(target_nodes) for node, target_nodes in transpose.items()}", "def narration_target(self):", "def getNeighbours(seg,meta,inversedIndex):\n return np.unique(np.fromiter( (inversedIndex[x] for x in np.concatenate([meta.loc[seg]['ins'],meta.loc[seg]['outs']])),dtype=np.int))", "def getLandmarkindices(self):\n return self.subsetnodes_indices", "def _get_target_index(self):\n return (self.index + self.source_window * (not self.overlapping) +\n self.offset)", "def hidden_node_ids(self):\n return [i for i in range(self.n_inputs, self.n_inputs + self.n_hidden)]", "def _get_index_mapper_list(graph, next_graph, cur_node_start_idx, next_node_start_idx):\n cur_idx = []\n next_idx = []\n\n next_graph_tag = next_graph.ndata['tag']\n ally_node_index = get_filtered_node_index_by_type(graph, NODE_ALLY)\n for cn_index in graph.nodes()[ally_node_index]:\n curr_tag = graph.ndata['tag'][cn_index]\n next_graph_index = (next_graph_tag == curr_tag).nonzero().squeeze().int()\n if next_graph_index.nelement() == 0:\n pass\n elif next_graph_index.nelement() == 1:\n cur_idx.append((cn_index + cur_node_start_idx).tolist())\n next_idx.append((next_graph_index + next_node_start_idx).tolist())\n else:\n raise RuntimeError(\"Existing multiple units with same tag in next graph\")\n return cur_idx, next_idx", "def get_pulling_indices(self, weight):\n pass", "def nodeid_to_index(G):\n\n d = {node_id: i for i, node_id in enumerate(G.nodes)}\n\n return d", "def get_main_points(neuron):\n (branch_index,) = np.where(neuron.branch_order[neuron.n_soma:] == 2)\n (endpoint_index,) = np.where(neuron.branch_order[neuron.n_soma:] == 0)\n selected_index = np.union1d(branch_index + neuron.n_soma,\n endpoint_index + neuron.n_soma)\n selected_index = np.append(range(neuron.n_soma), selected_index)\n return selected_index", "def get_gt_hom_idxs(alt_num):\n last = -1\n hom_idxs = []\n for a in range(alt_num + 1):\n last = last + (a + 1)\n hom_idxs.append(last)\n return hom_idxs", "def eligible_edges_with_indexes(self):\n return list(map(lambda e: (self.edges.index(e), e), self.eligible_edges))", "def index_nodes(self):\n index_nodes = []\n for node in self.nodes:\n if 'indexnode' == node.get('infos').get('type'):\n index_nodes.append(node)\n return index_nodes", "def agent_locs_idx(self):\n return tuple(self.agent_locs.T)", "def vir_indices(self):\n indices = []\n for index,item in enumerate(self):\n if item==0:\n indices.append(index)\n return indices", "def run_adding_edges(self):\n indices = np.where(self.X==0)\n idx=[]\n for i in range(len(indices[0])):\n idx.append((indices[0][i],indices[1][i]))\n idx = np.array(idx)\n return self.node_equivalent(idx)", "def mainIndices(self):\n return self.i1, self.i2", "def ind_nodes(self, graph=None):\n if graph is None:\n graph = self.graph\n\n dependent_nodes = set(\n node for dependents in graph.values() for node in dependents\n )\n return [node for node in graph.keys() if node not in dependent_nodes]", "def indices(self):\n return range(len(self))", "def get_indices_input_target(num_obs, input_len, step_size, forecast_horizon, target_len):\n input_len = round(input_len) # just a precaution\n start_position = 0\n stop_position = num_obs - 1\n\n inpseq_first_idx = start_position\n inpseq_last_idx = inpseq_first_idx + input_len\n target_first_idx = inpseq_last_idx + forecast_horizon\n target_last_idx = target_first_idx + target_len\n print(\"target_last_idx = {}\".format(target_last_idx))\n print(\"stop_position = {}\".format(stop_position))\n indices = []\n while target_last_idx <= stop_position:\n indices.append((inpseq_first_idx, inpseq_last_idx, target_first_idx, target_last_idx))\n inpseq_first_idx += step_size\n inpseq_last_idx += step_size\n target_first_idx += inpseq_last_idx + forecast_horizon\n target_last_idx += target_first_idx + target_len\n return indices" ]
[ "0.6612173", "0.6522753", "0.65165037", "0.64686227", "0.6402145", "0.6356008", "0.63395274", "0.632982", "0.6309057", "0.62649596", "0.62639195", "0.62190074", "0.6187654", "0.61834425", "0.614431", "0.6137135", "0.6136961", "0.6086665", "0.6084775", "0.6081723", "0.6038501", "0.59883875", "0.5970336", "0.5969782", "0.5963428", "0.5930912", "0.5912919", "0.5877759", "0.5868496", "0.58622515" ]
0.7760791
0
indexes of all sampled nodes in the integral graph
def all_sampled_nodes_indexes(self) -> torch.LongTensor: all_sampled_nodes_indexes: _typing.Any = self.__all_sampled_nodes_indexes return all_sampled_nodes_indexes
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getLandmarkindices(self):\n return self.subsetnodes_indices", "def all_node_ids(self):\n return [i for i in range(0, self.n_inputs + self.n_hidden + self.n_outputs)]", "def getVertexNumbers(self):\n return self.vertexIndex.keys()", "def input_node_ids(self):\n return [i for i in range(self.n_inputs)]", "def vir_indices(self):\n indices = []\n for index,item in enumerate(self):\n if item==0:\n indices.append(index)\n return indices", "def target_nodes_indexes(self) -> _TargetNodes:\n return self.__target_nodes_indexes", "def master_ndindex(self): # itermaster_indices(self):\n return itertools_product(\n *[range(*r) for r in self.location]\n ) # TODO check", "def get_indices(self):\r\n return self._indices", "def output_node_ids(self):\n return [\n i\n for i in range(\n self.n_inputs + self.n_hidden,\n self.n_inputs + self.n_hidden + self.n_outputs,\n )\n ]", "def get_node_count(self) -> Iterable:\n return self._g.V().count().toList()[0]", "def indices(self):\n i, j, _edge = self.indicesAndEdge()\n return i, j", "def iter_node_map(self):\n return self.d_inv.keys()", "def eligible_edges_with_indexes(self):\n return enumerate(self.edges)", "def hoggar_indices():\n return list(product([0,1], repeat=6))", "def indices(self):\n _indices = []\n for h in self.miller.indices():\n _indices.append(self.indices_hkl(*h)[0])\n return _indices", "def index_nodes(self):\n index_nodes = []\n for node in self.nodes:\n if 'indexnode' == node.get('infos').get('type'):\n index_nodes.append(node)\n return index_nodes", "def get_indexes(self, dataset: BaseDataset) -> int:\n\n index = [np.random.randint(0, len(dataset)) for _ in range(1)]\n\n return index", "def getGlobalIdxVals( self, i : int ):\n return range(self._layout.starts[i],self._layout.ends[i])", "def hidden_node_ids(self):\n return [i for i in range(self.n_inputs, self.n_inputs + self.n_hidden)]", "def get_active_register_indices(self):\n assert self.sketch.ndim == 1, 'Currently only support 1-dimensional sketch.'\n return np.flatnonzero(self.sketch)", "def indices(self) -> np.ndarray:\n return self.impl.indices", "def indices(self):\n return range(len(self))", "def indices(self):\n return self.index.indices", "def reference_nodes_idx(self) -> Dict[str, torch.Tensor]:\n return self.node_idx_references", "def get_unprescribed_indexes(self):\n total_dof = DOF_PER_NODE_STRUCTURAL * len(self.preprocessor.nodes)\n all_indexes = np.arange(total_dof)\n return np.delete(all_indexes, self.prescribed_indexes)", "def idx_adjacency_lists(self) -> List[List[int]]:\n result = []\n\n for intersection in self._intersection_list:\n nbs = []\n\n for nb in self.adj_dict[intersection]:\n nbs.append(self._intersection_to_idx[nb])\n\n result.append(nbs)\n\n return result", "def get_node_indices_and_levels(nd: np.ndarray):\n indices = []\n lvs = []\n for j in range(1, nd.shape[0]):\n if j == 1:\n indices = nd[j]\n lvs = nd[j + 1]\n elif j % 2 != 0 and j > 1:\n indices = np.append(indices, nd[j])\n elif j % 2 == 0 and j > 2:\n lvs = np.append(lvs, nd[j])\n return indices, lvs", "def get_prescribed_indexes(self):\n global_prescribed = []\n for node in self.preprocessor.nodes.values():\n if node.there_are_prescribed_dofs:\n starting_position = node.global_index * DOF_PER_NODE_STRUCTURAL\n dofs = np.array(node.get_prescribed_dofs_bc_indexes()) + starting_position\n global_prescribed.extend(dofs)\n return global_prescribed", "def get_indexes(self):\n indexes = []\n for c in self.components:\n indexes.extend(c.get_indexes())\n return indexes", "def getNeighbours(seg,meta,inversedIndex):\n return np.unique(np.fromiter( (inversedIndex[x] for x in np.concatenate([meta.loc[seg]['ins'],meta.loc[seg]['outs']])),dtype=np.int))" ]
[ "0.6573811", "0.6432765", "0.6346587", "0.6314584", "0.6272244", "0.62536925", "0.6206393", "0.61999273", "0.6198595", "0.61503786", "0.61315864", "0.6097065", "0.60742855", "0.6071434", "0.6031599", "0.60140324", "0.6012557", "0.60074216", "0.59971064", "0.5995", "0.5981299", "0.59749836", "0.5970037", "0.59690106", "0.59269416", "0.59221846", "0.5911864", "0.59001976", "0.5896882", "0.5883945" ]
0.7695562
0
Sample edges for one specific layer, expected to be implemented in subclass.
def _sample_edges_for_layer( self, __current_layer_target_nodes_indexes: torch.LongTensor, __top_layer_target_nodes_indexes: torch.LongTensor, layer_argument: _typing.Any, *args, **kwargs ) -> _typing.Tuple[torch.LongTensor, _typing.Optional[torch.Tensor]]: raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __filterEdges(self):", "def test_edges(self):\n return self._test_edges", "def sample_edge_uniform(adj_list, degrees, n_triplets, sample_size):\n all_edges = np.arange(n_triplets)\n return np.random.choice(all_edges, sample_size, replace=False)", "def sample_edge_neighborhood(adj_list, degrees, n_triplets, sample_size):\n edges = np.zeros((sample_size), dtype=np.int32)\n\n # initialize\n sample_counts = np.array([d for d in degrees])\n picked = np.array([False for _ in range(n_triplets)])\n seen = np.array([False for _ in degrees])\n\n for i in range(0, sample_size):\n weights = sample_counts * seen\n\n if np.sum(weights) == 0:\n weights = np.ones_like(weights)\n weights[np.where(sample_counts == 0)] = 0\n\n probabilities = (weights) / np.sum(weights)\n chosen_vertex = np.random.choice(np.arange(degrees.shape[0]),\n p=probabilities)\n\n chosen_adj_list = adj_list[chosen_vertex]\n seen[chosen_vertex] = True\n\n chosen_edge = np.random.choice(np.arange(chosen_adj_list.shape[0]))\n chosen_edge = chosen_adj_list[chosen_edge]\n edge_number = chosen_edge[0]\n\n while picked[edge_number]:\n chosen_edge = np.random.choice(np.arange(chosen_adj_list.shape[0]))\n chosen_edge = chosen_adj_list[chosen_edge]\n edge_number = chosen_edge[0]\n\n edges[i] = edge_number\n other_vertex = chosen_edge[1]\n picked[edge_number] = True\n sample_counts[chosen_vertex] -= 1\n sample_counts[other_vertex] -= 1\n seen[other_vertex] = True\n\n return edges", "def _pick_edge(self, edges):\n total_weight = sum([e.weight for e in edges])\n r = rand.random() * total_weight\n running_total = 0\n for e in edges:\n running_total += e.weight\n if running_total >= r:\n return e", "def test_iter0(self):\n test_edges = self.read_edges('data/ring_0.mat')\n\n img_gray = self.read_image('images/ring.tif')\n edges = subpixel_edges(img_gray, 25, 0, 2)\n\n assert np.array_equiv(edges.position, test_edges.position - 1)\n assert np.allclose(edges.x, test_edges.x - 1)\n assert np.allclose(edges.y, test_edges.y - 1)\n assert np.allclose(edges.nx, test_edges.nx)\n assert np.allclose(edges.ny, test_edges.ny)\n assert np.allclose(edges.curv, test_edges.curv)\n assert np.allclose(edges.i0, test_edges.i0)\n assert np.allclose(edges.i1, test_edges.i1)", "def subsample_graph(graph, max_degree,\n rng):\n edges = sampler.get_adjacency_lists(graph)\n edges = sampler.sample_adjacency_lists(edges, graph.train_nodes, max_degree,\n rng)\n senders = []\n receivers = []\n for u in edges:\n for v in edges[u]:\n senders.append(u)\n receivers.append(v)\n\n graph.senders = senders\n graph.receivers = receivers\n return graph", "def sample(self, shape):\n\t\traise NotImplementedError()", "def edges( self ):\n raise NotImplementedError(\"edges\");", "def _sample(self, geometry: Geometry) -> math.Tensor:\n raise NotImplementedError(self)", "def sample(self, graph: nx.classes.graph.Graph) -> nx.classes.graph.Graph:\n self._nodes = set()\n self._edges = set()\n self._check_graph(graph)\n self._graph = graph\n self._create_initial_seed_set()\n while len(self._nodes) < self.number_of_nodes:\n self._reweight()\n self._do_update()\n new_graph = nx.from_edgelist(self._edges)\n return new_graph", "def _get_sample(self):\n return [layer._get_sample() for layer in self.layers]", "def edges(self):\r\n return self.__generate_edges()", "def edges(self):\n for e in self._edges:\n yield e", "def test_iter1(self):\n test_edges = self.read_edges('data/ring_1.mat')\n\n img_gray = self.read_image('images/ring.tif')\n edges = subpixel_edges(img_gray, 25, 1, 2)\n\n mask = np.ones(len(edges.position), dtype=bool)\n # Excluded values that are known to be different\n mask[[258]] = False\n\n assert np.array_equiv(edges.position[mask], test_edges.position[mask] - 1)\n assert np.allclose(edges.x[mask], test_edges.x[mask] - 1)\n assert np.allclose(edges.y[mask], test_edges.y[mask] - 1)\n assert np.allclose(edges.nx[mask], test_edges.nx[mask])\n assert np.allclose(edges.ny[mask], test_edges.ny[mask])\n assert np.allclose(edges.curv[mask], test_edges.curv[mask])\n assert np.allclose(edges.i0[mask], test_edges.i0[mask])\n assert np.allclose(edges.i1[mask], test_edges.i1[mask])", "def sample(self):\n raise NotImplementedError", "def sample(self):\n raise NotImplementedError", "def regular_subsample(neuron):\n # select all the main points\n selected_index = get_main_points(neuorn)\n\n # Computing the parent id of the selected nodes\n neuron = neuron_with_selected_nodes(selected_index)\n return neuron", "def edges(self):\n return self.generate_edges()", "def sample(self):\r\n raise NotImplementedError", "def test_sample_from_extra_bounds_good(self):\n dim = Real(\"yolo\", \"norm\", 0, 2, low=-5, high=+5, shape=(4, 4))\n for _ in range(8):\n samples = dim.sample(8)\n for sample in samples:\n assert sample in dim", "def choose_random(self, exclude):\n other_edges = list(set(self.vertices()) - set(exclude))\n return random.choice(other_edges)", "def sample(self, batch_size):\n raise NotImplementedError", "def __init__(self, sample_size, neighbours, lengths, offsets, seed=0):\n self.sample_size = sample_size\n self.seed, self.seed2 = random_seed.get_seed(seed)\n self.neighbours = neighbours\n self.lengths = lengths\n self.offsets = offsets\n super(UniformEdgeDataset, self).__init__()", "def edges(self, layout):\n return", "def sample(self):\n raise NotImplementedError(\"Override me!\")", "def edges(self):\n return self.__generate_edges()", "def edges(self):\n return self.__generate_edges()", "def edges(self):\n return self.__generate_edges()", "def test_sampling(self):\n dim = Fidelity(\"epoch\", 1, 2)\n assert dim.sample() == [2]\n dim = Fidelity(\"epoch\", 1, 5)\n assert dim.sample() == [5]\n dim = Fidelity(\"epoch\", 1, 5)\n assert dim.sample(4) == [5] * 4" ]
[ "0.5996003", "0.58144414", "0.57052094", "0.56026417", "0.55999064", "0.55881745", "0.55750936", "0.5531893", "0.55224186", "0.5510966", "0.55064124", "0.5459333", "0.5412571", "0.5396114", "0.53951234", "0.53936285", "0.53936285", "0.5372117", "0.5318894", "0.5314331", "0.52587354", "0.52501816", "0.5228887", "0.5226325", "0.52152574", "0.52090704", "0.51935446", "0.51935446", "0.51935446", "0.5185882" ]
0.6477802
0
Initializes the class by setting up the root based on the given name and tags
def __init__(self, xml_file, root_name, tags=[]): self.xml_file = xml_file self.tree = ET.ElementTree(ET.Element(root_name)) self.root = self.tree.getroot() for tag in tags: self.root.set(tag[0], tag[1])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, name: str) -> None:\n\t\t# Set variables to blank values\n\t\tself.name = name\n\t\tself.has_subfile = False\n\t\tself.root_nodes: List[RootNode] = []\n\t\tself.root_names: Set[str] = set()\n\t\tself.node_names: Set[str] = set()\n\t\tself.subnode_names: Dict[str, Set[str]] = {}", "def _setup(self) -> None:\n\t\t# Set name from tag\n\t\tself.name = self._element.tag\n\n\t\t# Get attributes and add them to the attributes list\n\t\tfor attribute in self._element.attrib:\n\t\t\tself.attributes.append(attribute)\n\n\t\t# Get Children, add them to their respective lists\n\t\tchild: ET.Element\n\t\tfor child in self._element:\n\t\t\t# Determine if child is a SubNode or a Node\n\t\t\t# If child has children or attributes it is a Node\n\t\t\tif len(child) or len(child.attrib):\n\t\t\t\t# Add Node\n\t\t\t\tself.add_node(Node(child))\n\t\t\telse:\n\t\t\t\tself.add_subnode(SubNode(child))", "def __init__(self, root=None):\n self.set_root(root)", "def __init__(self,tag,attributes=None,children=None): \n self.tag = mapping[tag]\n if attributes is None:\n self.attributes = []\n else:\n self.attributes = attributes\n if children is None:\n self.children = []\n else:\n self.children = children", "def __init__(self, tags=''):\n self.tags = tags", "def __init__(self, root):\n self.root = root", "def __init__(self, root):\n self.root = root", "def __init__(self):\n \n r\"\"\"\n Actually ElementTree root element\n \n Following is the header as given in zend-config::\n \n /**\n * XML Reader instance.\n *\n * @var XMLReader\n */\n \"\"\"\n self.root = None\n \n r\"\"\"\n Following is the header as given in zend-config::\n \n /**\n * Directory of the JSON file\n *\n * @var string\n */\n \"\"\"\n self.directory = ''", "def __init__(self, tags):\n self.tags = tags", "def __init__(self,\r\n name=None,\r\n assets=None,\r\n drivers=None,\r\n machines=None,\r\n parent_tag_id=None,\r\n sensors=None,\r\n vehicles=None):\r\n\r\n # Initialize members of the class\r\n self.assets = assets\r\n self.drivers = drivers\r\n self.machines = machines\r\n self.name = name\r\n self.parent_tag_id = parent_tag_id\r\n self.sensors = sensors\r\n self.vehicles = vehicles", "def __init__(self, source):\n self.tree = ET.parse(source)\n self.root = self.tree.getroot()", "def __init__(self, name):\n debug.printMsg(\"We Initiated a BST with no root node\")\n self.name = name\n self.root = None\n self.size = 0", "def __init__(self, root, style=DocstringStyle.SPHINX):\n # type: (Union[CykNode, str], DocstringStyle) -> None # noqa: E501\n if isinstance(root, CykNode):\n self.root = root # type: Optional[CykNode]\n else:\n self.root = parse(condense(lex(root)))\n self._lookup = self._discover()", "def __init__(self, root, branches=None):\n self.tree_dict = {}\n self.directory = Path(root)\n self.start = str(self.directory).rfind(os.sep) + 1\n self.branches = branches\n self.get()", "def __init__(self, app, root=None, name=\"\"):\n self.app = app\n self.root=root\n self.name = str(name)", "def __init__(self):\n self.root = Node('')", "def __init__(self):\n self.root = self.Node(None)", "def __init__(self):\n self.root = Node(\"\")", "def __init__(self):\n self.root = Node(\"\")", "def __init__(self):\n self.root = self.Node()", "def setUpClass(cls):\n filename = 'root://eosatlas.cern.ch//eos/atlas/user/t/turra/user.blenzi.4956574.EXT0._000001.AOD.pool.root'\n if (not ROOT.xAOD.Init().isSuccess()):\n print(\"Failed xAOD.Init()\")\n\n treeName = \"CollectionTree\"\n\n f = ROOT.TFile.Open(filename)\n if not f:\n print(\"file %s not found\" % filename)\n cls.tree = ROOT.xAOD.MakeTransientTree(f, treeName)", "def __init__(self, tag):\n self.tag = tag.lower()\n self.attrs = {}\n self.contents = ()", "def _initialize_trees(self):", "def __init__(self):\n self.root = {}", "def __init__(self):\n self.root = {}", "def __init__(self):\n self.root = {}", "def __init__(self):\n self.root = {}", "def __init__(self):\n self.root = {}", "def __init__(self):\n\n self.root = Node(name='root',children=set())\n\n self.map = {}\n self.map['root'] = self.root\n\n self.jsonStr = \"\"", "def __init__(self, root):\r\n self.root = root\r\n self.nodes = [root]\r\n self.nodes.extend(Node.all_descendants(self.root))\r\n self.node_ids = [ n.id for n in self.nodes ]" ]
[ "0.6723491", "0.67032516", "0.65701365", "0.6407076", "0.6393281", "0.6339078", "0.6339078", "0.6282168", "0.6281084", "0.6245782", "0.6195248", "0.6186578", "0.6186554", "0.6166327", "0.61432064", "0.6107229", "0.6097506", "0.6090887", "0.6090887", "0.6085682", "0.6081844", "0.6069891", "0.6056137", "0.60393614", "0.60393614", "0.60393614", "0.60393614", "0.60393614", "0.6001983", "0.5995853" ]
0.7406777
0
Init required project's objects using defined configuration. Returns an object dict that is added to project object. This method is called in server load step.
def init_objects(config_dict): # only testing purposes obj_list = dict() obj_list['input_cfg'] = config_dict return obj_list
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init():\n defaults = _project_defaults()\n\n if Project.prompt:\n defaults['name'] = prompt(\"Enter the project's name:\", defaults['name'])\n defaults['package'] = prompt(\"Enter the project's package:\", defaults['package'])\n defaults['author'] = prompt(\"Enter the project's author:\", defaults['author'])\n defaults['author_email'] = prompt(\"Enter the project's author's email:\", defaults['author_email'])\n defaults['description'] = prompt(\"Enter the project's description:\", defaults['description'])\n\n # print(\"defaults:\\n{defaults}\".format(defaults=pformat(defaults)))\n\n if Project.use_templates:\n\n template = Template()\n\n for template_dir in [os.path.abspath(os.path.join(herringlib, 'herringlib', 'templates'))\n for herringlib in HerringFile.herringlib_paths]:\n\n info(\"template directory: %s\" % template_dir)\n # noinspection PyArgumentEqualDefault\n template.generate(template_dir, defaults, overwrite=False)", "def init_project(self,project_name,project_dir):\n projectkey = id_generator(10)\n if \"towercrane\" not in os.listdir(project_dir):\n print(f'Initializing project:\"{project_name}\" with projectkey: \"{projectkey}\" ')\n self.TowercraneConfig = {\"project_name\":project_name,\n \"projectkey\":projectkey,\n \"publicurl\":\"private_project\"\n }\n write_config(project_dir,self.TowercraneConfig)\n project_insert_report = self.db.create_project(project_name,project_dir,projectkey)\n print(project_insert_report)\n \n elif \"towercrane\" in os.listdir(project_dir):\n self.TowercraneConfig = read_config(project_dir)\n print(f'project:\"{self.TowercraneConfig[\"project_name\"]}\" with projectkey: \"{self.TowercraneConfig[\"projectkey\"]}\" Already Exists')", "def __init__(self):\n\n # open json config file that reads in information\n config_path = open(\"config.json\", \"r\")\n config_json = config_path.read()\n config_dict = json.loads(config_json)\n\n # assign object variables\n self.project_id = config_dict[\"project-id\"]\n self.bucket_name = config_dict[\"bucket-name\"]\n self.location_id = config_dict[\"key-location\"]\n self.key_ring_id = config_dict[\"key-ring-id\"]\n self.crypto_key_id = config_dict[\"crypto-key-id\"]\n self.service_account_email = config_dict[\"service-account-email\"]\n\n # close the file\n config_path.close()", "def pre_project_create(self, resource_dict):\n pass", "def _prepare_projects(self):\n self._projects = {}\n self._cfgs = {}\n self._plugins = {}\n\n working_bins = []\n for b in self._seed_bins:\n if any([nb in b for nb in self._ignore_bins]):\n continue\n\n log.info(\"Building %s CFG (this may take some time)\" % b.split('/')[-1])\n try:\n blob = False\n try:\n self._projects[b] = angr.Project(b, auto_load_libs=False)\n except:\n log.info(\"We got a blob\")\n self._projects[b] = angr.Project(b, auto_load_libs=False, load_options={'main_opts': {'custom_arch': self.config['arch'], 'backend': 'blob', 'custom_base_addr': int(self.config['base_addr'], 16)}})\n blob = True\n\n self._cfgs[b] = self._projects[b].analyses.CFG(collect_data_references=True, extra_cross_references=True)\n\n self._plugins[b] = []\n\n if blob:\n memcplike = find_memcmp_like(self._projects[b], self._cfgs[b])\n else:\n memcplike = []\n\n for plugin in self._enabled_plugins:\n self._plugins[b].append(plugin(self._projects[b], self._cfgs[b], self._fw_path, memcmp_like_functions=memcplike,log=log))\n working_bins.append(b)\n except Exception as e:\n log.warning(\"Skipping binary %s\" % b)\n import ipdb; ipdb.set_trace()\n self._seed_bins = list(working_bins)", "def __init__(self, config):\n\n self.mode9 = config[sC.PROJECT_DETAILS][sC.MODE] == '9'\n self.admins = eval(handler.config[sC.PROJECT_DETAILS][sC.ADMIN_IDS])\n self.approvers = eval(handler.config[sC.COUNTER_STRIKE_ADMINS][sC.APPROVERS])", "def init(projectfolder, projectname, example):\n\n productline_dir = path.join(projectfolder, \"productline\")\n configs_path = path.join(productline_dir, \"configs\")\n bddfeatures_path = path.join(projectfolder, \"bddfeatures\")\n testreports_path = path.join(projectfolder, \"testreports\")\n\n if not path.exists(productline_dir):\n makedirs(productline_dir)\n\n if not path.exists(configs_path):\n makedirs(configs_path)\n\n if not path.exists(bddfeatures_path):\n makedirs(bddfeatures_path)\n\n if not path.exists(testreports_path):\n makedirs(testreports_path)\n\n model_src = pkg_resources.resource_filename(__name__, \"templates/model.xml\")\n model_dst = path.join(productline_dir, \"model.xml\")\n shutil.copyfile(model_src, model_dst)\n utilities.sed_inplace(model_dst, \"{{PROJECT_NAME}}\", projectname.replace(\" \", \"\"))\n\n configtemplate_src = pkg_resources.resource_filename(__name__, 'templates/aplet.yml')\n configtemplate_dst = path.join(projectfolder, \"aplet.yml\")\n shutil.copyfile(configtemplate_src, configtemplate_dst)\n utilities.sed_inplace(configtemplate_dst, \"{{PROJECT_NAME}}\", projectname)\n\n # copy docs templates from aplet application into projectfolder\n lektortemplates_path = pkg_resources.resource_filename(__name__, 'templates/lektor')\n doc_templates_path = path.join(projectfolder, \"doc_templates\")\n if not path.exists(doc_templates_path):\n shutil.copytree(lektortemplates_path, doc_templates_path)\n\n\n if example:\n examples_dir = \"templates/exampleproject\"\n model_src = pkg_resources.resource_filename(__name__, path.join(examples_dir, \"model.xml\"))\n shutil.copyfile(model_src, model_dst)\n exampleconfig_src = pkg_resources.resource_filename(__name__, path.join(examples_dir, \"ExampleProduct.config\"))\n shutil.copyfile(exampleconfig_src, path.join(configs_path, \"ExampleProduct.config\"))\n configtemplate_src = pkg_resources.resource_filename(__name__, path.join(examples_dir, \"aplet.yml\"))\n shutil.copyfile(configtemplate_src, configtemplate_dst)", "def pub_init(args, project=\"\", account=\"\", base_url=\"\",\n api_key=\"\", dj_api_key=\"\"):\n #pylint:disable=too-many-arguments,unused-argument\n _, _, updated = get_project_connect(\n 'djaodjin',\n base_url=DEFAULT_API_ENDPOINT,\n api_key=dj_api_key)\n project, account, updated_next = get_project_account(\n project=project, account=account)\n updated |= updated_next\n project, base_url, api_key, updated_next = get_project_config(\n project=project, base_url=base_url, api_key=api_key)\n updated |= updated_next\n if updated:\n save_config()", "def init_configs(self):\n\n # get current location\n self.script_dir = os.path.dirname(__file__)\n\n # load configuration file\n with open(os.path.join(self.script_dir, \"config.json\")) as f:\n self.configs = json.load(f)\n \n # load some configs as attributes\n self.resource_folder = os.path.join(self.script_dir, self.configs[\"resource_path\"], self.resource_type, self.language)\n self.pre_processed_folder = os.path.join(self.resource_folder, self.configs[\"pre_processed_path\"])\n self.results_folder = os.path.join(self.resource_folder, self.configs[\"results_path\"])\n self.chunk_size = self.configs[\"resources\"][self.resource_type][\"chunk_size\"]", "def _prepare(self):\n\n # Set configuration defaults and save to the project document\n self.config.setdefault('PAGINATION', True)\n self.config.setdefault('PER_PAGE', 25)\n\n # Create and configure the Flask application\n self.app = self._create_app(self.config)\n\n # Add assets and routes\n self.assets = self._create_assets()\n self._register_routes()\n\n # Add module assets and routes\n self._module_assets = []\n for module in self.modules:\n try:\n module.register(self)\n except Exception as e:\n logger.error('Error while registering {} module: {}'.format(\n module.name, e))\n logger.error('Removing module {} from dashboard.'.format(\n module.name))\n self.modules.remove(module)\n\n # Clear dashboard and project caches.\n self.update_cache()", "def _initialize_project_variables(self):\n self.Source = ''\n self.Regional = ''\n self.Vernacular = ''\n self.Fallback = dict()\n self.New_Target = dict()\n self.Biblical_Terms = dict()\n self.Old_Target = dict()\n\n# self.list_projects = []\n# self.project_lines = []\n# self.indent = 0\n# self.Treed = False\n self.root = etree.Element('root')\n# #add child 'settings', all user configurable bits under here\n self.settings = etree.SubElement(self.root, \"settings\")\n# self.old_mode = dict()\n# self.spreferred = etree.SubElement(self.settings, \"preferred\")\n# self.smode = etree.SubElement(self.settings, \"mode\")\n# self.stemp = etree.SubElement(self.settings, \"template\")\n self.sf0 = etree.SubElement(self.settings, \"f0\")\n self.sf1 = etree.SubElement(self.settings, \"f1\")\n self.sf2 = etree.SubElement(self.settings, \"f2\")\n self.trout = etree.SubElement(self.root, \"tree\")", "def __init__(self, project):\n super(NovaExtractor, self).__init__(project)\n\n self.nova = self._get_nova_client()\n self.glance = self._get_glance_client()\n self.neutron = self._get_neutron_client()\n\n self.flavors = self._get_flavors()\n self.images = self._get_images()", "def __initialize(self):\n self.__object = None\n \n self.__mainAct = None\n self.__mainMenu = None\n \n self.__e5project = e5App().getObject(\"Project\")\n \n self.__supportedVariants = []", "def __init__(self):\n\t\tself.instances = {}\n\t\twith open(os.path.join(os.path.dirname(__file__), 'conf', 'parliaments.json'), 'r') as f:\n\t\t\tparliaments = json.load(f)\n\t\tfor c, cp in parliaments.items():\n\t\t\tfor p in cp:\n\t\t\t\tpfx = c + '/' + p['code']\n\t\t\t\tself.instances[pfx] = create_app(c, p)", "def initialize_project():\n # Initialize work flow\n wk_flow = vdapi.VDriveAPI('VULCAN')\n archive_root = '/SNS/VULCAN'\n if os.path.exists(archive_root) is False:\n archive_root = None\n wk_flow.set_data_root_directory(archive_root)\n wk_flow.set_working_directory('~/Temp/VDriveTest/')\n\n # Set to my_data\n my_data.set(wk_flow)\n\n return", "def init(self):\n # Create the default project files\n self.create_from_templates()\n\n # Add all the newly created files to the git staging area\n self.add_all_untracked()\n\n # Check that a compatible version of Python is available; install it if not\n self._pyenv.ensure_python(self.get_python_version())\n\n # Create virtualenv\n self._pyenv.create_virtualenv(self.name, self.get_python_version())", "def _initialize(self):\n configured_providers = self.domain.config[\"DATABASES\"]\n provider_objects = {}\n\n if configured_providers and isinstance(configured_providers, dict):\n if \"default\" not in configured_providers:\n raise ConfigurationError(\"You must define a 'default' provider\")\n\n for provider_name, conn_info in configured_providers.items():\n provider_full_path = conn_info[\"PROVIDER\"]\n provider_module, provider_class = provider_full_path.rsplit(\n \".\", maxsplit=1\n )\n\n provider_cls = getattr(\n importlib.import_module(provider_module), provider_class\n )\n provider = provider_cls(provider_name, self.domain, conn_info)\n\n provider_objects[provider_name] = provider\n\n self._providers = provider_objects", "def initilize(self):\n if not self.project_path.exists():\n self.project_path.mkdir()", "def load_project(self, path):\n base = self._get_base(path)\n\n filename, pathname, description = imp.find_module('tarbell_config', [path])\n project = imp.load_module('project', filename, pathname, description)\n\n try:\n self.key = project.SPREADSHEET_KEY\n self.client = get_drive_api()\n except AttributeError:\n self.key = None\n self.client = None\n\n try:\n project.CREATE_JSON\n except AttributeError:\n project.CREATE_JSON = False\n\n try:\n project.S3_BUCKETS\n except AttributeError:\n project.S3_BUCKETS = {}\n\n project.EXCLUDES = list(set(EXCLUDES + getattr(project, 'EXCLUDES', []) + getattr(base, 'EXCLUDES', [])))\n\n # merge project template types with defaults\n project.TEMPLATE_TYPES = set(getattr(project, 'TEMPLATE_TYPES', [])) | set(TEMPLATE_TYPES)\n\n try:\n project.DEFAULT_CONTEXT\n except AttributeError:\n project.DEFAULT_CONTEXT = {}\n\n project.DEFAULT_CONTEXT.update({\n \"PROJECT_PATH\": self.path,\n \"ROOT_URL\": \"127.0.0.1:5000\",\n \"SPREADSHEET_KEY\": self.key,\n \"BUCKETS\": project.S3_BUCKETS,\n \"SITE\": self,\n })\n\n # Set up template loaders\n template_dirs = [path]\n if base:\n template_dirs.append(base.base_dir)\n error_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'error_templates')\n template_dirs.append(error_path)\n\n self.app.jinja_loader = TarbellFileSystemLoader(template_dirs)\n\n # load the project blueprint, if it exists\n if hasattr(project, 'blueprint') and isinstance(project.blueprint, Blueprint):\n self.app.register_blueprint(project.blueprint, site=self)\n\n return project, base", "def __init__(self):\n \n # base_path == this floder\n base_path = os.path.dirname(os.path.abspath(__file__))\n #\n self._curvetypes = app.app_utils.read_json_file(\n os.path.join(base_path, CURVETYPES_FILE)\n )\n # \n self._parametersdict = app.app_utils.read_json_file(\n os.path.join(base_path, PARAMETERS_FILE)\n )", "def obj_initialization(cls):\n listimdata = cls.retrieve_json()\n for elem in listimdata:\n CloudCtx.retrieve_from_json(elem)", "def init_hiero_project(self, event):\n if event.project not in hiero.core.projects(hiero.core.Project.kStartupProjects) and event.project.name() != 'Tag Presets': # Why is the \"Tag Presets\" project not of type kStartupProjects?????\n # Remove OnCreate callback so it doesn't double up with the kAfterProjectLoad event\n all_clips = []\n for clip in event.project.clips():\n all_clips.extend([v.item() for v in clip.binItem().items()])\n \n logger.debug('Registering %s clips from loaded project \"%s\" (currently holding %s clips)', len(all_clips),\n event.project.name(),\n self.model.rowCount())\n for i, loaded_clip in enumerate(all_clips):\n self.model.add_item(NodeItem(loaded_clip.readNode(), loaded_clip))\n logger.debug('registered {}/{} clips'.format(i, len(all_clips)))\n\n logger.debug('Finished registering loaded clips')\n # make sure new clips will be added as well:\n if self.session_has_callbacks:\n logger.debug('Adding back onCreate callback that was temporarily removed')\n nuke.addOnCreate(nuke.localizationPanelSignals.register_node)\n logger.debug('Already registered all other callbacks, skipping...')\n else:\n self.add_callbacks()\n self.session_has_callbacks = True", "def __init__(self, projects=None, users=None, deployments=None, deployment_versions=None, pipelines=None, pipeline_versions=None, local_vars_configuration=None): # noqa: E501 # noqa: E501\n if local_vars_configuration is None:\n local_vars_configuration = Configuration()\n self.local_vars_configuration = local_vars_configuration\n\n self._projects = None\n self._users = None\n self._deployments = None\n self._deployment_versions = None\n self._pipelines = None\n self._pipeline_versions = None\n self.discriminator = None\n\n if projects is not None:\n self.projects = projects\n if users is not None:\n self.users = users\n if deployments is not None:\n self.deployments = deployments\n if deployment_versions is not None:\n self.deployment_versions = deployment_versions\n if pipelines is not None:\n self.pipelines = pipelines\n if pipeline_versions is not None:\n self.pipeline_versions = pipeline_versions", "def initialize(context):\n from thet.helsinki.project.content import project\n\n content_types, constructors, ftis = atapi.process_types(\n atapi.listTypes(config.PROJECTNAME),\n config.PROJECTNAME)\n\n for atype, constructor in zip(content_types, constructors):\n utils.ContentInit(\"%s: %s\" % (config.PROJECTNAME, atype.portal_type),\n content_types = (atype,),\n permission = config.ADD_PERMISSIONS[atype.portal_type],\n extra_constructors = (constructor,),\n ).initialize(context)", "def __init__(self):\n self.machines = {}\n self.configs = {}\n self.systems = {}\n self.jobs = {}\n self.benchmarks = {}\n self.projects = {}", "def _createModuleObj(self):\n ModuleInitialCondition.__init__(self)", "def initialize(self, **kwargs):\n\n # Defining the configuration object\n self.config = kwargs.get('config')", "def __setup(self):\n\n backupFolder = self.config['destination']\n self.__createBackupFolder(backupFolder)\n\n # create the project based backup folder\n today = date.today()\n\n if 'projects' in self.config:\n for project in self.config['projects'].iterkeys():\n timestamp = datetime.now().strftime('%d-%H-%M-%S')\n backupDestination = os.path.join(backupFolder, project, str(today.year), today.strftime('%m'), timestamp)\n self.__createBackupFolder(backupDestination)\n self.config['projects'][project]['destination'] = backupDestination", "def __init__(self):\n\n # Primary configuration of the module is via the container environment.\n # We need to recognise that some or all of these may not be defined.\n # All run-time config that's required is given a __CFG prefix to\n # simplify checking whether all that's required has been defined.\n #\n # The SQUONK2_SLUG is limited to 10 characters, when combined with\n # \"Fragalysis {SLUG} \", this leaves (80-22) 58 characters for the\n # use with the target-access-string and session project strings\n # to form Squonk2 Unit and Project names.\n self.__CFG_SQUONK2_ASAPI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_ASAPI_URL')\n self.__CFG_SQUONK2_DMAPI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_DMAPI_URL')\n self.__CFG_SQUONK2_UI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_UI_URL')\n self.__CFG_SQUONK2_ORG_UUID: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_UUID')\n self.__CFG_SQUONK2_UNIT_BILLING_DAY: Optional[str] =\\\n os.environ.get('SQUONK2_UNIT_BILLING_DAY')\n self.__CFG_SQUONK2_PRODUCT_FLAVOUR: Optional[str] =\\\n os.environ.get('SQUONK2_PRODUCT_FLAVOUR')\n self.__CFG_SQUONK2_SLUG: Optional[str] =\\\n os.environ.get('SQUONK2_SLUG', '')[:_MAX_SLUG_LENGTH]\n self.__CFG_SQUONK2_ORG_OWNER: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_OWNER')\n self.__CFG_SQUONK2_ORG_OWNER_PASSWORD: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_OWNER_PASSWORD')\n self.__CFG_OIDC_AS_CLIENT_ID: Optional[str] = \\\n os.environ.get('OIDC_AS_CLIENT_ID')\n self.__CFG_OIDC_DM_CLIENT_ID: Optional[str] = \\\n os.environ.get('OIDC_DM_CLIENT_ID')\n self.__CFG_OIDC_KEYCLOAK_REALM: Optional[str] = \\\n os.environ.get('OIDC_KEYCLOAK_REALM')\n\n # Optional config (no '__CFG_' prefix)\n self.__DUMMY_TARGET_TITLE: Optional[str] =\\\n os.environ.get('DUMMY_TARGET_TITLE')\n self.__DUMMY_USER: Optional[str] =\\\n os.environ.get('DUMMY_USER')\n self.__DUMMY_TAS: Optional[str] =\\\n os.environ.get('DUMMY_TAS')\n self.__SQUONK2_VERIFY_CERTIFICATES: Optional[str] = \\\n os.environ.get('SQUONK2_VERIFY_CERTIFICATES')\n\n # The integer billing day, valid if greater than zero\n self.__unit_billing_day: int = 0\n # True if configured...\n self.__configuration_checked: bool = False\n self.__configured: bool = False\n # Ignore cert errors? (no)\n self.__verify_certificates: bool = True\n\n # The record ID of the Squonk2Org for this deployment.\n # Set on successful 'pre-flight-check'\n self.__org_record: Optional[Squonk2Org] = None\n\n self.__org_owner_as_token: str = ''\n self.__org_owner_dm_token: str = ''\n self.__keycloak_hostname: str = ''\n self.__keycloak_realm: str = ''\n\n # The Safe QuerySet from the security module.\n # Used when we are given a tas (target access string).\n # It allows us to check that a user is permitted to use the access ID\n # and relies on ISPyB credentials present in the environment.\n self.__ispyb_safe_query_set: ISpyBSafeQuerySet = ISpyBSafeQuerySet()", "def _get_initial_config(self):\r\n config = GeneralConfiguration()\r\n caching_config = CacheBaseyearConfiguration()\r\n config.merge(caching_config)\r\n return config" ]
[ "0.6469144", "0.64588916", "0.62322617", "0.6208262", "0.61827815", "0.6177319", "0.6113617", "0.6104447", "0.6062008", "0.6061132", "0.604619", "0.6014433", "0.60107535", "0.6006592", "0.5994414", "0.5987781", "0.59561527", "0.59465915", "0.5927197", "0.58842665", "0.5858934", "0.5826249", "0.5808512", "0.5805246", "0.5801869", "0.5797123", "0.5785968", "0.57595927", "0.575931", "0.5747361" ]
0.6459655
1
SetActualXDimensionIsOddInput(itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 self, itkSimpleDataObjectDecoratorB _arg)
def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> "void": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOddInput(self, _arg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOddInput(self, _arg)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOdd(self, *args)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOddInput(self)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOdd(self, *args)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOdd(self)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOdd(self)", "def half_hermitian_to_real_inverse_fft_image_filter(*args, **kwargs):\n import itk\n instance = itk.HalfHermitianToRealInverseFFTImageFilter.New(*args, **kwargs)\n return instance.__internal_call__()", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def SetKernel(self, _arg: 'itkFlatStructuringElement2') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS2ISS2SE2_SetKernel(self, _arg)", "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def SetKernel(self, _arg: 'itkFlatStructuringElement2') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_SetKernel(self, _arg)", "def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def SetFullyConnected(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS2ISS2SE2_SetFullyConnected(self, _arg)", "def SetInputNarrowBandwidth(self, _arg: 'double') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBandwidth(self, _arg)", "def SetFullyConnected(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_SetFullyConnected(self, _arg)", "def SetOutputNarrowBandwidth(self, _arg: 'double') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetOutputNarrowBandwidth(self, _arg)", "def SetInputNarrowBand(self, ptr: 'itkVectorContainerUILSNF2') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBand(self, ptr)", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def SetPreserveIntensities(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_SetPreserveIntensities(self, _arg)", "def SetInput(self, histogram: 'itkHistogramF') -> \"void\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_SetInput(self, histogram)", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def SetNarrowBanding(self, _arg: 'bool const') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetNarrowBanding(self, _arg)", "def SetPreserveIntensities(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS2ISS2SE2_SetPreserveIntensities(self, _arg)", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size,)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def __fill_real_return__(im, ax, real_return, origi_shape):\n\n if real_return == 'full':\n if type(ax) == tuple:\n ax = list(ax)\n axis = ax[-1] # axis of rfft;\n ax = ax[:-1] # axis of fft\n\n half = im.swapaxes(axis, -1)\n if np.mod(origi_shape[axis], 2) == 0:\n half = np.flipud(np.conjugate(half[1:-1]))\n else:\n half = np.flipud(np.conjugate(half[1:]))\n half = half.swapaxes(axis, -1)\n if len(ax) > 0:\n for a in ax:\n half = half.swapaxes(a, -1)\n half = half[::-1] # Reverse the other axis since the real fft is point symmetric\n half = np.roll(half, 1, 0) # for some reason one has to roll the axis, otherwise there will be one point wrong :(\n half = half.swapaxes(a, -1)\n return np.concatenate((im, half), axis)\n else:\n return (im)", "def SetInput(self, histogram: 'itkHistogramD') -> \"void\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF2_Superclass_SetInput(self, histogram)", "def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)" ]
[ "0.8969456", "0.86000365", "0.8532017", "0.8177083", "0.7982844", "0.76595676", "0.71240073", "0.6296384", "0.61453384", "0.5576631", "0.5311787", "0.51991284", "0.51852775", "0.51783836", "0.51714885", "0.51295906", "0.50509685", "0.4981996", "0.49758014", "0.4968456", "0.4968456", "0.4959381", "0.49411106", "0.49239343", "0.49190962", "0.4914457", "0.49068925", "0.4902486", "0.4894923", "0.48842877" ]
0.9319235
0
GetActualXDimensionIsOddInput(itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 self) > itkSimpleDataObjectDecoratorB
def GetActualXDimensionIsOddInput(self) -> "itkSimpleDataObjectDecoratorB const *": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOddInput(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOddInput(self, _arg)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOddInput(self, _arg)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOdd(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOdd(self, *args)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOdd(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOdd(self, *args)", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def half_hermitian_to_real_inverse_fft_image_filter(*args, **kwargs):\n import itk\n instance = itk.HalfHermitianToRealInverseFFTImageFilter.New(*args, **kwargs)\n return instance.__internal_call__()", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size,)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def GetOutput(self) -> \"itkSimpleDataObjectDecoratorF *\":\n return _itkHistogramThresholdCalculatorPython.itkHistogramThresholdCalculatorHFF_GetOutput(self)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def test_shapes_dinn_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_single.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on a batch in DeepInvertibleModel')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')", "def test_shapes_dinn_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_single.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on a batch in DeepInvertibleModel')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def _imfilter(x_data, f_data):\n return pipe(f_data, ifftshift, fftn, lambda x: x * fftn(x_data), ifftn).real", "def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def divi_img(x, h1, h2):\n return x[:, :, :, h1:h2]", "def __check_type__(im, ft_axes, orig, name, real_axis=0, shift_axes=[]):\n if type(orig) == image.image:\n im = im.view(image.image) # note: view casting -> this is not the viewer!\n if type(orig.name) is str:\n im.name = name + ' of ' + orig.name\n im.info = orig.info\n pxs = []\n\n for a in ft_axes:\n if a not in orig.spectral_axes:\n im.spectral_axes += [a]\n im.shift_axes = shift_axes\n if type(orig.unit) is str:\n im.unit = ''\n for i in range(im.ndim):\n if i in ft_axes:\n if name == 'IRFT' and real_axis == i:\n pxs += [1 / (orig.pixelsize[i] * 2 * (orig.shape[i] - 1))]\n else:\n pxs += [1 / (orig.pixelsize[i] * orig.shape[i])]\n if type(orig.unit) is str:\n im.unit += orig.unit + '^-1 '\n else:\n try: # TODO: FIX THIS!!!\n pxs += [orig.pixelsize[i]]\n except:\n print('Error in setting pixel size')\n if type(orig.unit) is str:\n im.unit += orig.unit + ' '\n im.pixelsize = pxs\n return (im)\n else:\n return (im)\n\n # ifft shift", "def test_odd(self):\n actual = cm.ring_mask((5, 5), 1, 2)\n expected = np.array([[False, False, True, False, False],\n [False, True, False, True, False],\n [True, False, False, False, True],\n [False, True, False, True, False],\n [False, False, True, False, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def is_odd(self, x):\n return x%2", "def test_shapes_dinn_even(self):\n\n out_single, out_single_J = self.dINN_even(self.x_single_even, self.y_single)\n out_batch, out_batch_J = self.dINN_even(self.x_batch_even, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_single.shape[1], self.x_dim_even,\n 'Input/Output shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[1], self.x_dim_even,\n 'Input/Output shape mismatch on a batch in DeepInvertibleModel')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')", "def test_shapes_dinn_even(self):\n\n out_single, out_single_J = self.dINN_even(self.x_single_even, self.y_single)\n out_batch, out_batch_J = self.dINN_even(self.x_batch_even, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_single.shape[1], self.x_dim_even,\n 'Input/Output shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[1], self.x_dim_even,\n 'Input/Output shape mismatch on a batch in DeepInvertibleModel')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')", "def _fake_only_visualize(fake_img, real_img, caption, num, num_per_caption, num_per_row, model):\n fake_img = _post_process(fake_img, model)\n grid = _make_grid(fake_img, cols=num_per_row)\n grid = tf.convert_to_tensor(grid, dtype=tf.uint8)\n return fake_img, grid", "def indicator_kernel(h: np.ndarray, Xi: np.ndarray, x: np.ndarray) -> np.ndarray:\n return (Xi - x) == 0", "def test_even(self):\n actual = cm.ring_mask((4, 4), 1.5, 2)\n expected = np.array([[False, True, True, False],\n [True, False, False, True],\n [True, False, False, True],\n [False, True, True, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def GetOutput(self) -> \"itkSimpleDataObjectDecoratorF *\":\n return _itkHistogramThresholdCalculatorPython.itkHistogramThresholdCalculatorHDF_GetOutput(self)" ]
[ "0.8846005", "0.84832937", "0.8194069", "0.80633116", "0.7708489", "0.7641021", "0.71924984", "0.5695571", "0.56210387", "0.5349565", "0.5323007", "0.52913964", "0.5235574", "0.5194547", "0.5194547", "0.51583934", "0.51583934", "0.5149409", "0.51164955", "0.51159143", "0.5115007", "0.5092312", "0.5024071", "0.49931595", "0.49931595", "0.49692413", "0.49597698", "0.4949836", "0.49451303", "0.49389037" ]
0.9107605
0
GetActualXDimensionIsOdd(itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 self) > bool const &
def GetActualXDimensionIsOdd(self) -> "bool const &": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOdd(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOdd(self)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOdd(self, *args)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOdd(self, *args)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOddInput(self, _arg)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOddInput(self, _arg)", "def is_odd(self, x):\n return x%2", "def is_odd(self):\n return S(self.parity()).is_odd", "def indicator_kernel(h: np.ndarray, Xi: np.ndarray, x: np.ndarray) -> np.ndarray:\n return (Xi - x) == 0", "def odd(self):\n return not self.even()", "def is_odd(x):\n return x % 2 != 0", "def is_odd(x):\n if (x%2==1):\n return True\n else:\n return False", "def is_odd(n):\r\n return not is_even(n)", "def is_even(self, x):\n return x%2 == 0", "def is_even(self):\n return S(self.parity()).is_even", "def ishomog(tr):\n \n return tr.shape == (4, 4)", "def test_fourier_dim_2():\n expected_res = np.array([[1 / np.sqrt(2), 1 / np.sqrt(2)], [1 / np.sqrt(2), -1 / np.sqrt(2)]])\n\n res = fourier(2)\n\n bool_mat = np.isclose(res, expected_res)\n np.testing.assert_equal(np.all(bool_mat), True)", "def is_even(self):\n return True", "def is_imaginary(self) -> bool:\n return self < 0", "def _verify(self, dimension):\n value = self.texture.__getattribute__(dimension)\n while value > 1:\n div_float = float(value) / 2.0\n div_int = int(div_float)\n if not (div_float == div_int):\n raise Exception('image %s is %d, which is not a power of 2' % (\n dimension, self.texture.__getattribute__(dimension)))\n value = div_int", "def test_odd(self):\n actual = cm.ring_mask((5, 5), 1, 2)\n expected = np.array([[False, False, True, False, False],\n [False, True, False, True, False],\n [True, False, False, False, True],\n [False, True, False, True, False],\n [False, False, True, False, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def is_odd(x):\n\n return (x & 1) == 1", "def is_isotropic(self):\n return self.fibres is None", "def test_even(self):\n actual = cm.ring_mask((4, 4), 1.5, 2)\n expected = np.array([[False, True, True, False],\n [True, False, False, True],\n [True, False, False, True],\n [False, True, True, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def inside(i,j,im,h=H): #X\n return i-h >=0 and j-h >=0 and i+h+1<=im.shape[0] and j+h+1<=im.shape[1]", "def check_image_invert(image_data, border_width = 30):\n \n _, avg_intensity_borders, avg_intensity_inside = \\\n _auto_threshold_borders(image_data,border_width = border_width);\n \n # if image borders are darker than the mean image, it's a surface tension\n # image:\n if(avg_intensity_inside > avg_intensity_borders):\n return False;\n # else, it's a shadowgraph image:\n else:\n return True;", "def is_even(self):\n pass", "def is_odd(n):\n if n % 2:\n return True\n else:\n return False", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)" ]
[ "0.8801514", "0.8127065", "0.81071246", "0.7921351", "0.7688207", "0.7573678", "0.73435515", "0.5987669", "0.57538724", "0.5639284", "0.5608152", "0.5573267", "0.5572946", "0.5502984", "0.5374126", "0.53627056", "0.53548497", "0.53279", "0.53225464", "0.5297045", "0.5288904", "0.52789164", "0.52725095", "0.52426374", "0.5192637", "0.51704174", "0.51598835", "0.5129374", "0.5129285", "0.5114391" ]
0.9140366
0
GetSizeGreatestPrimeFactor(itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 self) > unsigned long long
def GetSizeGreatestPrimeFactor(self) -> "unsigned long long": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetSizeGreatestPrimeFactor(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetSizeGreatestPrimeFactor(self) -> \"unsigned long long\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetSizeGreatestPrimeFactor(self)", "def _get_max_image_bytes(self):\n raise NotImplementedError(\"Abstract method not implemented\")", "def find_optimal_threshold(self, hist):\n k = 256\n threshold = int(k / 2)\n lastexpected1 = lastexpected2 = 0\n\n while True:\n expected1 = expected2 = 0\n t_exp1 = sum(hist[:threshold])\n t_exp2 = sum(hist[threshold:])\n for i in range(threshold):\n expected1 += (hist[i] / t_exp1) * i\n\n for i in range(threshold, k):\n expected2 += (hist[i] / t_exp2) * i\n\n threshold = (expected1 + expected2) / 2\n if abs(expected1 - lastexpected1) != 0 and abs(expected2 - lastexpected2) != 0:\n break\n lastexpected1 = expected1\n lastexpected2 = expected2\n # print(expected2, expected1)\n return threshold", "def __len__(self):\n return _itkRGBAPixelPython.itkRGBAPixelF___len__(self)", "def get_largest_two_component(img, print_info = False, threshold = None):\n s = ndimage.generate_binary_structure(3,2) # iterate structure\n labeled_array, numpatches = ndimage.label(img,s) # labeling\n sizes = ndimage.sum(img,labeled_array,range(1,numpatches+1)) \n sizes_list = [sizes[i] for i in range(len(sizes))]\n sizes_list.sort()\n if(print_info):\n print('component size', sizes_list)\n if(len(sizes) == 1):\n out_img = img\n else:\n if(threshold):\n out_img = np.zeros_like(img)\n for temp_size in sizes_list:\n if(temp_size > threshold):\n temp_lab = np.where(sizes == temp_size)[0] + 1\n temp_cmp = labeled_array == temp_lab\n out_img = (out_img + temp_cmp) > 0\n return out_img\n else: \n max_size1 = sizes_list[-1]\n max_size2 = sizes_list[-2]\n max_label1 = np.where(sizes == max_size1)[0] + 1\n max_label2 = np.where(sizes == max_size2)[0] + 1\n component1 = labeled_array == max_label1\n component2 = labeled_array == max_label2\n if(max_size2*10 > max_size1):\n component1 = (component1 + component2) > 0\n out_img = component1\n return out_img", "def get_largest_two_component(img, print_info = False, threshold = None):\n s = ndimage.generate_binary_structure(3,2) # iterate structure\n labeled_array, numpatches = ndimage.label(img,s) # labeling\n sizes = ndimage.sum(img,labeled_array,range(1,numpatches+1)) \n sizes_list = [sizes[i] for i in range(len(sizes))]\n sizes_list.sort()\n if(print_info):\n print('component size', sizes_list)\n if(len(sizes) == 1):\n out_img = img\n else:\n if(threshold):\n out_img = np.zeros_like(img)\n for temp_size in sizes_list:\n if(temp_size > threshold):\n temp_lab = np.where(sizes == temp_size)[0] + 1\n temp_cmp = labeled_array == temp_lab\n out_img = (out_img + temp_cmp) > 0\n return out_img\n else: \n max_size1 = sizes_list[-1]\n max_size2 = sizes_list[-2]\n max_label1 = np.where(sizes == max_size1)[0] + 1\n max_label2 = np.where(sizes == max_size2)[0] + 1\n component1 = labeled_array == max_label1\n component2 = labeled_array == max_label2\n if(max_size2*10 > max_size1):\n component1 = (component1 + component2) > 0\n out_img = component1\n return out_img", "def get_size(img):\n ih, iw = img.shape[:2]\n return iw * ih", "def bitpix_size (bitpix):\n return abs(int(bitpix))", "def CalculateMaxImageSize(self, partition_size):\n raise NotImplementedError", "def size(img):\n\treturn img.size", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def subimage_size_from_inner_size(self, inner_size:int) -> int:\n return (2 ** self.n_folds) * (inner_size + 2) - 2", "def get_kernel_size(factor):\r\n return 2 * factor - factor % 2", "def get_image_size(self):", "def dimension(self):\r\n a = 0\r\n for x in self.faces():\r\n if (len(x) > a):\r\n a = len(x) \r\n return a-1", "def __len__(self):\n return self.flat_image.size", "def npixfeh(self):\n return len(self.fehedges)-1", "def get_largest_blob(img, invert):\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n \n gray_threshed = np.zeros_like(gray)\n if invert:\n gray_threshed[gray<np.mean(gray)] = 255\n else:\n gray_threshed[gray>np.mean(gray)] = 255\n\n # First, detect very large regions and remove them\n if DEBUG:\n print(\"gray_threshed start\")\n disp(gray_threshed)\n retval, labels, stats, centroids = cv2.connectedComponentsWithStats(gray_threshed)\n for i in range(len(stats)):\n show = show_cc(gray_threshed, stats[i])\n x0, y0, w, h, _ = stats[i]\n if (w*h) > 0.5*(gray_threshed.shape[0]*gray_threshed.shape[1]):\n gray_threshed[labels == i] = 0\n if DEBUG:\n print(\"gray_threshed large removed\")\n disp(gray_threshed)\n\n # ds = 2\n # element = cv2.getStructuringElement(cv2.MORPH_RECT, (ds*2+1, ds*2+1), (ds, ds))\n element = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 2), (1, 1))\n gray_threshed_dilated = cv2.dilate(gray_threshed, element)\n gray_threshed_dilated = cv2.erode(gray_threshed_dilated, element)\n\n if DEBUG:\n print(\"gray_threshed dilated\")\n disp(gray_threshed_dilated)\n # return\n\n retval, labels, stats, centroids = cv2.connectedComponentsWithStats(gray_threshed_dilated)\n sorted_indices = np.argsort(-1.0*stats[:,-1]) #Sort by size descending \n idx = sorted_indices[1] # Take the largest component aside from full the bounding box \n\n show = show_cc(img, stats[idx])\n if DEBUG:\n print('CC')\n disp(show)\n\n x0, y0, x1, y1, _ = stats[idx]\n x1 += x0 + 5\n y1 += y0 + 5\n x0 -= 5\n y0 -= 5\n x0 = max(0, x0)\n y0 = max(0, y0)\n x1 = min(x1, img.shape[1]-1)\n y1 = min(y1, img.shape[0]-1)\n\n return img[y0 : y1, x0 : x1, :]", "def calc_psf_size_inpix_quick(arr):\n\tarr1d = arr.sum(axis=0)\n\tx = np.arange(arr1d.size)\n\tspline = si.UnivariateSpline(x, arr1d-np.max(arr1d)/2, s=0)\n\tr1, r2 = spline.roots()\n\n\treturn np.absolute(r2 - r1)", "def GetInputNarrowBandwidth(self) -> \"double\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_GetInputNarrowBandwidth(self)", "def optimalBinSize(x):\n interquartile = np.diff(np.prctile(x, [25, 75]))\n return 2. * interquartile * len(x)**(-1./3)", "def find_optimal_threshold(self, hist):\n\n\n threshold = int((len(hist)-1)/2)\n ct = len(hist) - 1\n\n while True:\n if(ct < 1):\n break\n threshold1 = self.evalue(hist,0,threshold)\n threshold2 = self.evalue(hist,threshold,len(hist) - 2)\n nt = int((threshold1+threshold2)/2)\n ct = nt - threshold\n threshold = nt\n\n return threshold", "def get_bitsize(self) -> int:\n return self._surface.get_bitsize()", "def __len__(self):\n\n return math.ceil(len(self.img_files) * self.gen_count / self.batch_size)", "def get_img_output_length(width, height):\n def get_output_length(input_length):\n return input_length//16\n\n return get_output_length(width), get_output_length(height)", "def get_pixel_size(img):\n\tp1 = img.get_attr_default(\"apix_x\", -1.0)\n\tcc = img.get_attr_default(\"ctf\", None)\n\tif cc == None:\n\t\tp2 = -1.0\n\telse:\n\t\tp2 = round(cc.apix, 3)\n\tif p1 == -1.0 and p2 == -1.0:\n\t\tERROR(\"Pixel size not set\", \"get_pixel_size\", 0)\n\t\treturn -1.0\n\telif p1 > -1.0 and p2 > -1.0:\n\t\tif abs(p1-p2) >= 0.001:\n\t\t\tERROR(\"Conflict between pixel size in attribute and in ctf object\", \"get_pixel_size\", 0)\n\t\t# pixel size is positive, so what follows omits -1 problem\n\t\treturn max(p1, p2)\n\telse:\n\t\treturn max(p1, p2)", "def get_num_of_images(self):", "def _calculate_fp_metric(self, image, transform, size):\n self.count += 1\n radius = int(round(size/2))\n center = transform.trans\n angle = transform.rot\n\n rotated = image.rotate(angle, center)\n\n sx1, sy1 = center.x - radius, center.y - radius\n sx2, sy2 = center.x + radius, center.y + radius\n thick = int(round(size / 14))\n\n # Top\n x1, y1 = sx1, sy1\n x2, y2 = sx2, sy1 + thick\n top = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Left\n x1, y1 = sx1, sy1\n x2, y2 = sx1 + thick, sy2\n left = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Bottom\n x1, y1 = sx1, sy2 - thick\n x2, y2 = sx2, sy2\n bottom = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Right\n x1, y1 = sx2 - thick, sy1\n x2, y2 = sx2, sy2\n right = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Identify finder edges\n if top < bottom and left < right:\n val = top + left\n elif top < bottom and right < left:\n val = top + right\n elif bottom < top and left < right:\n val = bottom + left\n elif bottom < top and right < left:\n val = bottom + right\n else:\n val = 100000000\n\n return val", "def GetThreshold(self) -> \"float const &\":\n return _itkHistogramThresholdCalculatorPython.itkHistogramThresholdCalculatorHFF_GetThreshold(self)", "def feature_size(self):\n return self.fingerprint_length" ]
[ "0.8544165", "0.5794391", "0.57624966", "0.57615536", "0.5714787", "0.5714787", "0.56717813", "0.5517006", "0.55071205", "0.55050975", "0.5497939", "0.5494971", "0.54841167", "0.5444231", "0.54136324", "0.5411584", "0.53939754", "0.53831863", "0.53828776", "0.5376495", "0.5359352", "0.53461957", "0.5342108", "0.53394467", "0.5333097", "0.5332989", "0.53130853", "0.52934813", "0.5281966", "0.52772605" ]
0.8905682
0
cast(itkLightObject obj) > itkHalfHermitianToRealInverseFFTImageFilterICF2IF2
def cast(obj: 'itkLightObject') -> "itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)", "def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)", "def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2 *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC2_cast(obj)", "def itkHistogramToIntensityImageFilterHFIF2_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2 *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_cast(obj)", "def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterISS2 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterISS2_cast(obj)", "def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)", "def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2ISS2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)", "def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)" ]
[ "0.8758252", "0.8369405", "0.82023007", "0.7600542", "0.75206214", "0.7492559", "0.74633193", "0.7455476", "0.7385218", "0.7378154", "0.7370113", "0.7342949", "0.7299217", "0.72991484", "0.72808546", "0.7278873", "0.72355413", "0.7199508", "0.71967554", "0.71873283", "0.7185349", "0.71760637", "0.7170695", "0.7169957", "0.7168443", "0.7157959", "0.7148155", "0.71453685", "0.71371394", "0.71327645" ]
0.87814647
0
New() > itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 Create a new object of the class itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'.
def New(*args, **kargs): obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__() import itkTemplate itkTemplate.New(obj, *args, **kargs) return obj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHFIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF2IF2SE2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHistogramToIntensityImageFilterHDIF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj" ]
[ "0.7311116", "0.72815", "0.72209346", "0.7205358", "0.7124789", "0.7123721", "0.71095455", "0.71008086", "0.705505", "0.7032606", "0.70307887", "0.7022936", "0.70089126", "0.697373", "0.69708544", "0.69626844", "0.6954855", "0.69405806", "0.69339013", "0.6925278", "0.69092274", "0.68900234", "0.6876028", "0.6867661", "0.6848236", "0.684468", "0.68182784", "0.6790141", "0.67723", "0.6769359" ]
0.8440965
0
itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(itkLightObject obj) > itkHalfHermitianToRealInverseFFTImageFilterICF2IF2
def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> "itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)", "def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)", "def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)", "def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)", "def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)", "def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)", "def itkHistogramToIntensityImageFilterHFIF2_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2 *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_cast(obj)", "def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)", "def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF2 *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_cast(obj)", "def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)", "def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)", "def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkClosingByReconstructionImageFilterIF2IF2SE2 *\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_cast(obj)", "def itkBinaryContourImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)", "def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS2ISS2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS2ISS2_cast(obj)" ]
[ "0.9269122", "0.84515977", "0.8448534", "0.79274684", "0.7901587", "0.78621644", "0.7820546", "0.77878016", "0.7710905", "0.7688822", "0.7686142", "0.76826346", "0.7652696", "0.7630173", "0.7629605", "0.7621622", "0.76134014", "0.76103306", "0.76072913", "0.75988936", "0.75794786", "0.7543886", "0.7482338", "0.7471546", "0.7440671", "0.7435547", "0.74262553", "0.73991317", "0.7394513", "0.73918855" ]
0.94871765
0
SetActualXDimensionIsOddInput(itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 self, itkSimpleDataObjectDecoratorB _arg)
def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> "void": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOddInput(self, _arg)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOddInput(self, _arg)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOdd(self, *args)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOdd(self, *args)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOdd(self)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOdd(self)", "def half_hermitian_to_real_inverse_fft_image_filter(*args, **kwargs):\n import itk\n instance = itk.HalfHermitianToRealInverseFFTImageFilter.New(*args, **kwargs)\n return instance.__internal_call__()", "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def __fill_real_return__(im, ax, real_return, origi_shape):\n\n if real_return == 'full':\n if type(ax) == tuple:\n ax = list(ax)\n axis = ax[-1] # axis of rfft;\n ax = ax[:-1] # axis of fft\n\n half = im.swapaxes(axis, -1)\n if np.mod(origi_shape[axis], 2) == 0:\n half = np.flipud(np.conjugate(half[1:-1]))\n else:\n half = np.flipud(np.conjugate(half[1:]))\n half = half.swapaxes(axis, -1)\n if len(ax) > 0:\n for a in ax:\n half = half.swapaxes(a, -1)\n half = half[::-1] # Reverse the other axis since the real fft is point symmetric\n half = np.roll(half, 1, 0) # for some reason one has to roll the axis, otherwise there will be one point wrong :(\n half = half.swapaxes(a, -1)\n return np.concatenate((im, half), axis)\n else:\n return (im)", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def SetInputNarrowBandwidth(self, _arg: 'double') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_SetInputNarrowBandwidth(self, _arg)", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size,)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def SetInputNarrowBandwidth(self, _arg: 'double') -> \"void\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF2_SetInputNarrowBandwidth(self, _arg)", "def SetPreserveIntensities(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIF2IF2SE2_SetPreserveIntensities(self, _arg)", "def SetKernel(self, _arg: 'itkFlatStructuringElement3') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS3ISS3SE3_SetKernel(self, _arg)", "def SetPreserveIntensities(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterIUC2IUC2SE2_SetPreserveIntensities(self, _arg)", "def SetInput(self, histogram: 'itkHistogramF') -> \"void\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_SetInput(self, histogram)", "def SetFullyConnected(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS2ISS2SE2_SetFullyConnected(self, _arg)", "def SetPreserveIntensities(self, _arg: 'bool const') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS2ISS2SE2_SetPreserveIntensities(self, _arg)", "def SetKernel(self, _arg: 'itkFlatStructuringElement2') -> \"void\":\n return _itkClosingByReconstructionImageFilterPython.itkClosingByReconstructionImageFilterISS2ISS2SE2_SetKernel(self, _arg)", "def SetInput(self, histogram: 'itkHistogramD') -> \"void\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHDIF3_Superclass_SetInput(self, histogram)", "def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def SetInput(self, histogram: 'itkHistogramF') -> \"void\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF2_Superclass_SetInput(self, histogram)" ]
[ "0.9123958", "0.8442863", "0.8440207", "0.8404514", "0.83800495", "0.7514097", "0.75077826", "0.6148815", "0.5708541", "0.57080245", "0.5218581", "0.52097803", "0.49215868", "0.49215868", "0.49037904", "0.4875511", "0.48751906", "0.48587048", "0.48545232", "0.48149854", "0.48114434", "0.48101634", "0.4799619", "0.47941184", "0.47906503", "0.4787675", "0.47863203", "0.47782406", "0.47650555", "0.47620776" ]
0.9243138
0
GetActualXDimensionIsOddInput(itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 self) > itkSimpleDataObjectDecoratorB
def GetActualXDimensionIsOddInput(self) -> "itkSimpleDataObjectDecoratorB const *": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOddInput(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOddInput(self, _arg)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOddInput(self, _arg)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOdd(self)", "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOdd(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOdd(self, *args)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOdd(self, *args)", "def half_hermitian_to_real_inverse_fft_image_filter(*args, **kwargs):\n import itk\n instance = itk.HalfHermitianToRealInverseFFTImageFilter.New(*args, **kwargs)\n return instance.__internal_call__()", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def test_diin_sampling_odd(self):\n\n samples = self.dINN_odd.sample(self.y_single, self.z_sample_size)\n self.assertEqual(samples.shape[0], self.z_sample_size,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs.\")\n self.assertEqual(samples.shape[1], self.x_dim_odd,\n \"Sample shape mismatch in DeepInvertibleModel on odd inputs\")\n\n # Sample batch\n samples_batch = self.dINN_odd.sample(self.y_batch, self.z_sample_size,)\n self.assertEqual(samples_batch.shape[0], self.z_sample_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch outputs.\")\n self.assertEqual(samples_batch.shape[1], self.batch_size, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")\n self.assertEqual(samples_batch.shape[2], self.x_dim_odd, \"Sample shape mismatch in \"\n \"DeepInvertibleModel on odd batch inputs.\")", "def GetOutput(self) -> \"itkSimpleDataObjectDecoratorF *\":\n return _itkHistogramThresholdCalculatorPython.itkHistogramThresholdCalculatorHFF_GetOutput(self)", "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def __check_type__(im, ft_axes, orig, name, real_axis=0, shift_axes=[]):\n if type(orig) == image.image:\n im = im.view(image.image) # note: view casting -> this is not the viewer!\n if type(orig.name) is str:\n im.name = name + ' of ' + orig.name\n im.info = orig.info\n pxs = []\n\n for a in ft_axes:\n if a not in orig.spectral_axes:\n im.spectral_axes += [a]\n im.shift_axes = shift_axes\n if type(orig.unit) is str:\n im.unit = ''\n for i in range(im.ndim):\n if i in ft_axes:\n if name == 'IRFT' and real_axis == i:\n pxs += [1 / (orig.pixelsize[i] * 2 * (orig.shape[i] - 1))]\n else:\n pxs += [1 / (orig.pixelsize[i] * orig.shape[i])]\n if type(orig.unit) is str:\n im.unit += orig.unit + '^-1 '\n else:\n try: # TODO: FIX THIS!!!\n pxs += [orig.pixelsize[i]]\n except:\n print('Error in setting pixel size')\n if type(orig.unit) is str:\n im.unit += orig.unit + ' '\n im.pixelsize = pxs\n return (im)\n else:\n return (im)\n\n # ifft shift", "def test_shapes_dinn_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_single.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on a batch in DeepInvertibleModel')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')", "def test_shapes_dinn_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_single.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on a batch in DeepInvertibleModel')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in DeepInvertibleModel')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in DeepInvertibleModel')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in DeepInvertibleModel')", "def _imfilter(x_data, f_data):\n return pipe(f_data, ifftshift, fftn, lambda x: x * fftn(x_data), ifftn).real", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def test_dinn_invertible_odd(self):\n\n out_single, out_single_J = self.dINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.dINN_odd(self.x_batch_odd, self.y_batch)\n\n rec_single = self.dINN_odd(out_single, self.y_single, inverse=True)\n rec_batch = self.dINN_odd(out_batch, self.y_batch, inverse=True)\n\n self.assertTrue(np.allclose(self.x_single_odd.numpy(), rec_single.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on a single instance with odd z')\n self.assertTrue(np.allclose(self.x_batch_odd.numpy(), rec_batch.numpy(), atol=1e-4),\n 'Could not invert DeepInvertibleModel on batch instance with odd z')", "def test_odd(self):\n actual = cm.ring_mask((5, 5), 1, 2)\n expected = np.array([[False, False, True, False, False],\n [False, True, False, True, False],\n [True, False, False, False, True],\n [False, True, False, True, False],\n [False, False, True, False, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def divi_img(x, h1, h2):\n return x[:, :, :, h1:h2]", "def _fake_only_visualize(fake_img, real_img, caption, num, num_per_caption, num_per_row, model):\n fake_img = _post_process(fake_img, model)\n grid = _make_grid(fake_img, cols=num_per_row)\n grid = tf.convert_to_tensor(grid, dtype=tf.uint8)\n return fake_img, grid", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def accept_numpy_array_like(image_filter):\n import numpy as np\n import itk\n\n @functools.wraps(image_filter)\n def image_filter_wrapper(*args, **kwargs):\n have_array_like_input = False\n\n args_list = list(args)\n for index, arg in enumerate(args):\n if is_arraylike(arg):\n have_array_like_input = True\n array = np.asarray(arg)\n image = itk.image_view_from_array(array)\n args_list[index] = image\n\n potential_image_input_kwargs = ('input', 'inputimage', 'input_image', 'input1', 'input2', 'input3')\n for key, value in kwargs.items():\n if key.lower() in potential_image_input_kwargs and is_arraylike(value):\n have_array_like_input = True\n array = np.asarray(value)\n image = itk.image_view_from_array(array)\n kwargs[key] = image\n\n if have_array_like_input:\n # Convert output itk.Image's to numpy.ndarray's\n output = image_filter(*tuple(args_list), **kwargs)\n if isinstance(output, tuple):\n output_list = list(output)\n for index, value in output_list:\n if isinstance(value, itk.Image):\n array = itk.array_from_image(value)\n output_list[index] = array\n return tuple(output_list)\n else:\n if isinstance(output, itk.Image):\n output = itk.array_from_image(output)\n return output\n else:\n return image_filter(*args, **kwargs)\n return image_filter_wrapper", "def test_whitley(self):\n fun = get_problem('whitley', self.dimension)\n self.assertEqual(fun(self.array2), 0.0)", "def test_shapes_cinn_odd(self):\n\n out_single, out_single_J = self.cINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.cINN_odd(self.x_batch_odd, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in ConditionalInvertibleBlock')\n self.assertEqual(out_single.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on single instance in ConditionalInvertibleBlock')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in ConditionalInvertibleBlock')\n self.assertEqual(out_batch.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on a batch in ConditionalInvertibleBlock')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in ConditionalInvertibleBlock')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in ConditionalInvertibleBlock')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in ConditonalInvertibleBlock')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in ConditonalInvertibleBlock')", "def test_shapes_cinn_odd(self):\n\n out_single, out_single_J = self.cINN_odd(self.x_single_odd, self.y_single)\n out_batch, out_batch_J = self.cINN_odd(self.x_batch_odd, self.y_batch)\n\n # Test shapes of output\n self.assertEqual(out_single.shape[0], 1,\n 'Batch shape mismatch on single instance in ConditionalInvertibleBlock')\n self.assertEqual(out_single.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on single instance in ConditionalInvertibleBlock')\n self.assertEqual(out_batch.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in ConditionalInvertibleBlock')\n self.assertEqual(out_batch.shape[1], self.x_dim_odd,\n 'Input/Output shape mismatch on a batch in ConditionalInvertibleBlock')\n\n # Test shapes of Jacobian\n self.assertEqual(out_single_J.shape[0], 1,\n 'Batch shape mismatch on single instance in ConditionalInvertibleBlock')\n self.assertEqual(out_batch_J.shape[0], self.batch_size,\n 'Batch shape mismatch on a batch in ConditionalInvertibleBlock')\n\n # Test equal batch sizes\n self.assertEqual(out_single.shape[0], out_single_J.shape[0],\n 'Batch shape mismatch between J and output in ConditonalInvertibleBlock')\n self.assertEqual(out_batch.shape[0], out_batch_J.shape[0],\n 'Batch shape mismatch between J and output in ConditonalInvertibleBlock')", "def indicator_kernel(h: np.ndarray, Xi: np.ndarray, x: np.ndarray) -> np.ndarray:\n return (Xi - x) == 0", "def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def is_odd(self, x):\n return x%2" ]
[ "0.90282", "0.8347846", "0.83297914", "0.7968715", "0.7855654", "0.7544492", "0.74166435", "0.55355155", "0.54117465", "0.530732", "0.5280623", "0.52173626", "0.52097636", "0.5165985", "0.51651883", "0.51651883", "0.51383734", "0.51095897", "0.51095897", "0.5096448", "0.50003666", "0.49986482", "0.49978325", "0.49556214", "0.4953388", "0.4943244", "0.4943244", "0.4938628", "0.4919324", "0.4912993" ]
0.8998347
1
GetActualXDimensionIsOdd(itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 self) > bool const &
def GetActualXDimensionIsOdd(self) -> "bool const &": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOdd(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetActualXDimensionIsOdd(self) -> \"bool const &\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOdd(self)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOdd(self, *args)", "def GetActualXDimensionIsOddInput(self) -> \"itkSimpleDataObjectDecoratorB const *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetActualXDimensionIsOddInput(self)", "def SetActualXDimensionIsOdd(self, *args) -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOdd(self, *args)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_SetActualXDimensionIsOddInput(self, _arg)", "def SetActualXDimensionIsOddInput(self, _arg: 'itkSimpleDataObjectDecoratorB') -> \"void\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_SetActualXDimensionIsOddInput(self, _arg)", "def is_odd(self, x):\n return x%2", "def is_odd(self):\n return S(self.parity()).is_odd", "def indicator_kernel(h: np.ndarray, Xi: np.ndarray, x: np.ndarray) -> np.ndarray:\n return (Xi - x) == 0", "def is_odd(x):\n if (x%2==1):\n return True\n else:\n return False", "def odd(self):\n return not self.even()", "def is_odd(x):\n return x % 2 != 0", "def is_odd(n):\r\n return not is_even(n)", "def ishomog(tr):\n \n return tr.shape == (4, 4)", "def is_odd(x):\n\n return (x & 1) == 1", "def test_odd(self):\n actual = cm.ring_mask((5, 5), 1, 2)\n expected = np.array([[False, False, True, False, False],\n [False, True, False, True, False],\n [True, False, False, False, True],\n [False, True, False, True, False],\n [False, False, True, False, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def is_isotropic(self):\n return self.fibres is None", "def is_imaginary(self) -> bool:\n return self < 0", "def check_image_invert(image_data, border_width = 30):\n \n _, avg_intensity_borders, avg_intensity_inside = \\\n _auto_threshold_borders(image_data,border_width = border_width);\n \n # if image borders are darker than the mean image, it's a surface tension\n # image:\n if(avg_intensity_inside > avg_intensity_borders):\n return False;\n # else, it's a shadowgraph image:\n else:\n return True;", "def get_nsatpix( self, step ):\n \n return np.sum( self.get_image_step( step, divide_by_exptime=False ) >= 1.6e4 )", "def _verify(self, dimension):\n value = self.texture.__getattribute__(dimension)\n while value > 1:\n div_float = float(value) / 2.0\n div_int = int(div_float)\n if not (div_float == div_int):\n raise Exception('image %s is %d, which is not a power of 2' % (\n dimension, self.texture.__getattribute__(dimension)))\n value = div_int", "def inside(i,j,im,h=H): #X\n return i-h >=0 and j-h >=0 and i+h+1<=im.shape[0] and j+h+1<=im.shape[1]", "def is_even(self):\n return S(self.parity()).is_even", "def test_fourier_dim_2():\n expected_res = np.array([[1 / np.sqrt(2), 1 / np.sqrt(2)], [1 / np.sqrt(2), -1 / np.sqrt(2)]])\n\n res = fourier(2)\n\n bool_mat = np.isclose(res, expected_res)\n np.testing.assert_equal(np.all(bool_mat), True)", "def is_even(self, x):\n return x%2 == 0", "def test_odd(self):\n actual = cm.circle_mask((5, 5), 2)\n expected = np.array([[False, False, True, False, False],\n [False, True, True, True, False],\n [True, True, True, True, True],\n [False, True, True, True, False],\n [False, False, True, False, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))", "def isOdd (n):\n if type(n) != int:\n return False\n elif n%2 == 0:\n return False\n else:\n return True", "def half_hermitian_to_real_inverse_fft_image_filter(*args, **kwargs):\n import itk\n instance = itk.HalfHermitianToRealInverseFFTImageFilter.New(*args, **kwargs)\n return instance.__internal_call__()", "def test_even(self):\n actual = cm.ring_mask((4, 4), 1.5, 2)\n expected = np.array([[False, True, True, False],\n [True, False, False, True],\n [True, False, False, True],\n [False, True, True, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))" ]
[ "0.90134573", "0.8116078", "0.8011804", "0.8007661", "0.78912485", "0.7518797", "0.7378851", "0.5830744", "0.5698672", "0.55729944", "0.5495327", "0.54534477", "0.5449673", "0.54046744", "0.5294553", "0.5243625", "0.52380204", "0.51997244", "0.5157646", "0.51447916", "0.5131071", "0.51213735", "0.5101829", "0.5048878", "0.504607", "0.50272626", "0.5025524", "0.5009051", "0.50089455", "0.499975" ]
0.9105638
0
GetSizeGreatestPrimeFactor(itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 self) > unsigned long long
def GetSizeGreatestPrimeFactor(self) -> "unsigned long long": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_GetSizeGreatestPrimeFactor(self)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetSizeGreatestPrimeFactor(self) -> \"unsigned long long\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_GetSizeGreatestPrimeFactor(self)", "def _get_max_image_bytes(self):\n raise NotImplementedError(\"Abstract method not implemented\")", "def __len__(self):\n return _itkRGBAPixelPython.itkRGBAPixelF___len__(self)", "def find_optimal_threshold(self, hist):\n k = 256\n threshold = int(k / 2)\n lastexpected1 = lastexpected2 = 0\n\n while True:\n expected1 = expected2 = 0\n t_exp1 = sum(hist[:threshold])\n t_exp2 = sum(hist[threshold:])\n for i in range(threshold):\n expected1 += (hist[i] / t_exp1) * i\n\n for i in range(threshold, k):\n expected2 += (hist[i] / t_exp2) * i\n\n threshold = (expected1 + expected2) / 2\n if abs(expected1 - lastexpected1) != 0 and abs(expected2 - lastexpected2) != 0:\n break\n lastexpected1 = expected1\n lastexpected2 = expected2\n # print(expected2, expected1)\n return threshold", "def get_size(img):\n ih, iw = img.shape[:2]\n return iw * ih", "def bitpix_size (bitpix):\n return abs(int(bitpix))", "def dimension(self):\r\n a = 0\r\n for x in self.faces():\r\n if (len(x) > a):\r\n a = len(x) \r\n return a-1", "def get_largest_blob(img, invert):\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n \n gray_threshed = np.zeros_like(gray)\n if invert:\n gray_threshed[gray<np.mean(gray)] = 255\n else:\n gray_threshed[gray>np.mean(gray)] = 255\n\n # First, detect very large regions and remove them\n if DEBUG:\n print(\"gray_threshed start\")\n disp(gray_threshed)\n retval, labels, stats, centroids = cv2.connectedComponentsWithStats(gray_threshed)\n for i in range(len(stats)):\n show = show_cc(gray_threshed, stats[i])\n x0, y0, w, h, _ = stats[i]\n if (w*h) > 0.5*(gray_threshed.shape[0]*gray_threshed.shape[1]):\n gray_threshed[labels == i] = 0\n if DEBUG:\n print(\"gray_threshed large removed\")\n disp(gray_threshed)\n\n # ds = 2\n # element = cv2.getStructuringElement(cv2.MORPH_RECT, (ds*2+1, ds*2+1), (ds, ds))\n element = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 2), (1, 1))\n gray_threshed_dilated = cv2.dilate(gray_threshed, element)\n gray_threshed_dilated = cv2.erode(gray_threshed_dilated, element)\n\n if DEBUG:\n print(\"gray_threshed dilated\")\n disp(gray_threshed_dilated)\n # return\n\n retval, labels, stats, centroids = cv2.connectedComponentsWithStats(gray_threshed_dilated)\n sorted_indices = np.argsort(-1.0*stats[:,-1]) #Sort by size descending \n idx = sorted_indices[1] # Take the largest component aside from full the bounding box \n\n show = show_cc(img, stats[idx])\n if DEBUG:\n print('CC')\n disp(show)\n\n x0, y0, x1, y1, _ = stats[idx]\n x1 += x0 + 5\n y1 += y0 + 5\n x0 -= 5\n y0 -= 5\n x0 = max(0, x0)\n y0 = max(0, y0)\n x1 = min(x1, img.shape[1]-1)\n y1 = min(y1, img.shape[0]-1)\n\n return img[y0 : y1, x0 : x1, :]", "def size(img):\n\treturn img.size", "def __len__(self):\n return self.flat_image.size", "def get_largest_two_component(img, print_info = False, threshold = None):\n s = ndimage.generate_binary_structure(3,2) # iterate structure\n labeled_array, numpatches = ndimage.label(img,s) # labeling\n sizes = ndimage.sum(img,labeled_array,range(1,numpatches+1)) \n sizes_list = [sizes[i] for i in range(len(sizes))]\n sizes_list.sort()\n if(print_info):\n print('component size', sizes_list)\n if(len(sizes) == 1):\n out_img = img\n else:\n if(threshold):\n out_img = np.zeros_like(img)\n for temp_size in sizes_list:\n if(temp_size > threshold):\n temp_lab = np.where(sizes == temp_size)[0] + 1\n temp_cmp = labeled_array == temp_lab\n out_img = (out_img + temp_cmp) > 0\n return out_img\n else: \n max_size1 = sizes_list[-1]\n max_size2 = sizes_list[-2]\n max_label1 = np.where(sizes == max_size1)[0] + 1\n max_label2 = np.where(sizes == max_size2)[0] + 1\n component1 = labeled_array == max_label1\n component2 = labeled_array == max_label2\n if(max_size2*10 > max_size1):\n component1 = (component1 + component2) > 0\n out_img = component1\n return out_img", "def get_largest_two_component(img, print_info = False, threshold = None):\n s = ndimage.generate_binary_structure(3,2) # iterate structure\n labeled_array, numpatches = ndimage.label(img,s) # labeling\n sizes = ndimage.sum(img,labeled_array,range(1,numpatches+1)) \n sizes_list = [sizes[i] for i in range(len(sizes))]\n sizes_list.sort()\n if(print_info):\n print('component size', sizes_list)\n if(len(sizes) == 1):\n out_img = img\n else:\n if(threshold):\n out_img = np.zeros_like(img)\n for temp_size in sizes_list:\n if(temp_size > threshold):\n temp_lab = np.where(sizes == temp_size)[0] + 1\n temp_cmp = labeled_array == temp_lab\n out_img = (out_img + temp_cmp) > 0\n return out_img\n else: \n max_size1 = sizes_list[-1]\n max_size2 = sizes_list[-2]\n max_label1 = np.where(sizes == max_size1)[0] + 1\n max_label2 = np.where(sizes == max_size2)[0] + 1\n component1 = labeled_array == max_label1\n component2 = labeled_array == max_label2\n if(max_size2*10 > max_size1):\n component1 = (component1 + component2) > 0\n out_img = component1\n return out_img", "def get_image_size(self):", "def npixfeh(self):\n return len(self.fehedges)-1", "def get_pixel_size(img):\n\tp1 = img.get_attr_default(\"apix_x\", -1.0)\n\tcc = img.get_attr_default(\"ctf\", None)\n\tif cc == None:\n\t\tp2 = -1.0\n\telse:\n\t\tp2 = round(cc.apix, 3)\n\tif p1 == -1.0 and p2 == -1.0:\n\t\tERROR(\"Pixel size not set\", \"get_pixel_size\", 0)\n\t\treturn -1.0\n\telif p1 > -1.0 and p2 > -1.0:\n\t\tif abs(p1-p2) >= 0.001:\n\t\t\tERROR(\"Conflict between pixel size in attribute and in ctf object\", \"get_pixel_size\", 0)\n\t\t# pixel size is positive, so what follows omits -1 problem\n\t\treturn max(p1, p2)\n\telse:\n\t\treturn max(p1, p2)", "def get_bitsize(self) -> int:\n return self._surface.get_bitsize()", "def optimalBinSize(x):\n interquartile = np.diff(np.prctile(x, [25, 75]))\n return 2. * interquartile * len(x)**(-1./3)", "def GetThreshold(self) -> \"float const &\":\n return _itkHistogramThresholdCalculatorPython.itkHistogramThresholdCalculatorHFF_GetThreshold(self)", "def CalculateMaxImageSize(self, partition_size):\n raise NotImplementedError", "def get_num_of_images(self):", "def calc_psf_size_inpix_quick(arr):\n\tarr1d = arr.sum(axis=0)\n\tx = np.arange(arr1d.size)\n\tspline = si.UnivariateSpline(x, arr1d-np.max(arr1d)/2, s=0)\n\tr1, r2 = spline.roots()\n\n\treturn np.absolute(r2 - r1)", "def get_kernel_size(factor):\r\n return 2 * factor - factor % 2", "def feature_size(self):\n return self.fingerprint_length", "def get_pckthres(self, batch, imsize):\r\n if self.thres == 'bbox':\r\n bbox = batch['trg_bbox'].clone()\r\n bbox_w = (bbox[2] - bbox[0])\r\n bbox_h = (bbox[3] - bbox[1])\r\n pckthres = torch.max(bbox_w, bbox_h)\r\n elif self.thres == 'img':\r\n imsize_t = batch['trg_img'].size()\r\n pckthres = torch.tensor(max(imsize_t[1], imsize_t[2]))\r\n else:\r\n raise Exception('Invalid pck threshold type: %s' % self.thres)\r\n return pckthres.float()", "def __len__(self):\n\n return math.ceil(len(self.img_files) * self.gen_count / self.batch_size)", "def min_image_length(self):\n\n # Will contain the minimum number of super pixels on return.\n px = ct.c_int()\n self.lib.GetMinimumImageLength(ct.pointer(px))\n\n return px.value", "def find_optimal_threshold(self, hist):\n\n\n threshold = int((len(hist)-1)/2)\n ct = len(hist) - 1\n\n while True:\n if(ct < 1):\n break\n threshold1 = self.evalue(hist,0,threshold)\n threshold2 = self.evalue(hist,threshold,len(hist) - 2)\n nt = int((threshold1+threshold2)/2)\n ct = nt - threshold\n threshold = nt\n\n return threshold", "def _calculate_fp_metric(self, image, transform, size):\n self.count += 1\n radius = int(round(size/2))\n center = transform.trans\n angle = transform.rot\n\n rotated = image.rotate(angle, center)\n\n sx1, sy1 = center.x - radius, center.y - radius\n sx2, sy2 = center.x + radius, center.y + radius\n thick = int(round(size / 14))\n\n # Top\n x1, y1 = sx1, sy1\n x2, y2 = sx2, sy1 + thick\n top = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Left\n x1, y1 = sx1, sy1\n x2, y2 = sx1 + thick, sy2\n left = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Bottom\n x1, y1 = sx1, sy2 - thick\n x2, y2 = sx2, sy2\n bottom = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Right\n x1, y1 = sx2 - thick, sy1\n x2, y2 = sx2, sy2\n right = np.sum(rotated.img[y1:y2, x1:x2]) / (size * thick)\n\n # Identify finder edges\n if top < bottom and left < right:\n val = top + left\n elif top < bottom and right < left:\n val = top + right\n elif bottom < top and left < right:\n val = bottom + left\n elif bottom < top and right < left:\n val = bottom + right\n else:\n val = 100000000\n\n return val", "def GetInputNarrowBandwidth(self) -> \"double\":\n return _itkReinitializeLevelSetImageFilterPython.itkReinitializeLevelSetImageFilterIF3_GetInputNarrowBandwidth(self)", "def get_img_output_length(width, height):\n def get_output_length(input_length):\n return input_length//16\n\n return get_output_length(width), get_output_length(height)" ]
[ "0.87134635", "0.58126163", "0.57935023", "0.56478095", "0.5621576", "0.55769056", "0.55031663", "0.5494461", "0.5490276", "0.5477918", "0.5459333", "0.5459333", "0.54508764", "0.5442352", "0.5436269", "0.540579", "0.54017246", "0.5392374", "0.53905743", "0.5386647", "0.5386138", "0.5383353", "0.5378718", "0.53328925", "0.53150135", "0.53106695", "0.53055507", "0.530193", "0.5288913", "0.52859795" ]
0.8878656
0
cast(itkLightObject obj) > itkHalfHermitianToRealInverseFFTImageFilterICF3IF3
def cast(obj: 'itkLightObject') -> "itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3 *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_cast(obj)", "def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIUC3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterIF3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterIF3_cast(obj)", "def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)", "def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkScalarImageToRunLengthFeaturesFilterISS3 *\":\n return _itkScalarImageToRunLengthFeaturesFilterPython.itkScalarImageToRunLengthFeaturesFilterISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkTransformMeshFilterMF2MF2TF22 *\":\n return _itkTransformMeshFilterPython.itkTransformMeshFilterMF2MF2TF22_cast(obj)" ]
[ "0.8754373", "0.84519255", "0.8329674", "0.74115735", "0.73911655", "0.7372709", "0.73513615", "0.73473054", "0.7336441", "0.73128325", "0.72939885", "0.72838974", "0.72707695", "0.72588015", "0.72561187", "0.72537726", "0.72531945", "0.72531515", "0.7238233", "0.72148824", "0.719826", "0.7198234", "0.71774405", "0.71752834", "0.71750146", "0.7161302", "0.71598494", "0.71596473", "0.71542656", "0.71508265" ]
0.88050354
0
New() > itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 Create a new object of the class itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'.
def New(*args, **kargs): obj = itkHalfHermitianToRealInverseFFTImageFilterICF3IF3.__New_orig__() import itkTemplate itkTemplate.New(obj, *args, **kargs) return obj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkTransformMeshFilterMF3MF3TF33.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def New(*args, **kargs):\n obj = itkClosingByReconstructionImageFilterIF3IF3SE3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj" ]
[ "0.74215084", "0.7370187", "0.736912", "0.73665327", "0.73401374", "0.7301532", "0.7293052", "0.72928876", "0.7292436", "0.72576916", "0.7249076", "0.7237967", "0.720986", "0.7209201", "0.71870464", "0.7181294", "0.7172013", "0.7157828", "0.7155288", "0.7106769", "0.7103403", "0.7032444", "0.70299363", "0.6991656", "0.69594353", "0.69529814", "0.6951653", "0.6944575", "0.6943916", "0.69399023" ]
0.83887285
0
itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(itkLightObject obj) > itkHalfHermitianToRealInverseFFTImageFilterICF3IF3
def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> "itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *": return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)", "def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)", "def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)", "def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)", "def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)", "def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)", "def itkHuangThresholdImageFilterISS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUC3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterISS3ISS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterISS3ISS3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)", "def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)", "def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)", "def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)", "def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)", "def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)", "def itkHistogramToIntensityImageFilterHFIF3_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3 *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_cast(obj)", "def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)" ]
[ "0.91791695", "0.8398635", "0.8379627", "0.78276956", "0.77661604", "0.77512765", "0.774406", "0.77390754", "0.76507497", "0.7648699", "0.7641291", "0.7640236", "0.76136947", "0.760601", "0.76036775", "0.75992405", "0.75864094", "0.7560299", "0.7546588", "0.75093377", "0.74893326", "0.7477863", "0.7476016", "0.746664", "0.7465313", "0.745609", "0.7444049", "0.74406886", "0.7422916", "0.74199235" ]
0.9441769
0
Procedural interface for HalfHermitianToRealInverseFFTImageFilter
def half_hermitian_to_real_inverse_fft_image_filter(*args, **kwargs): import itk instance = itk.HalfHermitianToRealInverseFFTImageFilter.New(*args, **kwargs) return instance.__internal_call__()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)", "def itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF3IF3 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF3IF3_cast(obj)", "def filtering(self):\r\n # 1 ###########################################################################################################\r\n fft_image = np.fft.fft2(self.image)\r\n # 2 ###########################################################################################################\r\n fft_shift_image = np.fft.fftshift(fft_image)\r\n\r\n ###\r\n mag_dft = np.log(np.abs(fft_shift_image))\r\n mag_dft = (255 * (mag_dft / np.max(mag_dft))).astype(dtype='uint8')\r\n ###\r\n\r\n # 3 ###########################################################################################################\r\n if self.filter_name == 'butterworth_l' or self.filter_name == 'butterworth_h':\r\n mask = self.filter(fft_shift_image.shape, self.cutoff, self.order)\r\n else:\r\n mask = self.filter(fft_shift_image.shape, self.cutoff)\r\n # 4 ###########################################################################################################\r\n # multiply the dft (fft shift image) by the mask\r\n filtered_image = fft_shift_image * mask\r\n\r\n ###\r\n mag_filtered_image = mag_dft * mask\r\n ###\r\n\r\n # 5 ###########################################################################################################\r\n inverse_fft_shift_image = np.fft.ifftshift(filtered_image)\r\n # 6 ###########################################################################################################\r\n inverse_fft_image = np.fft.ifft2(inverse_fft_shift_image)\r\n # 7 ###########################################################################################################\r\n mag_image = np.zeros(inverse_fft_image.shape, dtype=complex)\r\n for i in range(inverse_fft_image.shape[0]):\r\n for j in range(inverse_fft_image.shape[1]):\r\n if inverse_fft_image[i][j] < 0:\r\n mag_image[i][j] = -1 * inverse_fft_image[i][j]\r\n else:\r\n mag_image[i][j] = inverse_fft_image[i][j]\r\n # magnitude of inverse fft is complete\r\n # 8 ###########################################################################################################\r\n full_contrast_image = self.post_process_image(mag_image)\r\n\r\n return [mag_dft, mag_filtered_image, full_contrast_image]", "def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def image_pre_filtering(left_img: np.ndarray, right_img: np.ndarray) -> tuple:\n\n def clahe(image: np.ndarray) -> np.ndarray:\n \"\"\"\n Apply Contrast Limited Adaptive Histogram Equalization\n :param image: the image to be filtered\n :return: the image filtered with CLAHE\n \"\"\"\n clahe_filter = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))\n return clahe_filter.apply(image)\n\n def logarithmic(image: np.ndarray) -> np.ndarray:\n \"\"\"\n Apply Logarithmic Transform\n :param image: the image to be filtered\n :return: the image filtered with logarithmic transform\n \"\"\"\n c = max_disparity / math.log(1 + np.max(image))\n sigma = 1\n for i in range(0, image.shape[1]): # image width\n for j in range(0, image.shape[0]): # image height\n # compute logarithmic transform\n image[j, i] = int(c * math.log(1 + ((math.exp(sigma) - 1) * image[j, i])))\n return image\n\n def exponential(image: np.ndarray) -> np.ndarray:\n \"\"\"\n Perform pre-processing - raise to the power, as this subjectively appears\n to improve subsequent disparity calculation\n :param image:\n :return:\n \"\"\"\n return np.power(image, 0.75).astype('uint8')\n\n def apply_filter(image: np.ndarray) -> np.ndarray:\n \"\"\"\n Choose which filter to apply to both images, this could be a combination too\n :param image: the image to be filtered\n :return:\n \"\"\"\n # choose filters to apply\n return clahe(image)\n\n return apply_filter(left_img), apply_filter(right_img)", "def test_filter_image():\n\n model = Instafilter(\"Lo-Fi\")\n\n f_image = __local__ / \"Normal.jpg\"\n\n img0 = cv2.imread(str(f_image))\n img1 = model(f_image)\n\n diff = (img0 - img1).sum()\n\n assert abs(diff) > 0", "def highPassFilter(img, window=30):\n gray = grayscale(img)\n\tf = np.fft.fft2(gray)\n\tfshift = np.fft.fftshift(f)\n\trows, cols = gray.shape\n\tcrow, ccol = rows/2, cols/2\n\tfshift[crow-window:crow+window, ccol-window:ccol+window] = 0\n\tf_ishift = np.fft.ifftshift(fshift)\n\timg_back = np.fft.ifft2(f_ishift)\n\timg_back = np.abs(img_back)\n\treturn img_back", "def New(*args, **kargs):\n obj = itkHalfHermitianToRealInverseFFTImageFilterICF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj", "def filtering(self):\n from numpy import fft\n import numpy as np\n\n _image_dft = fft.fft2(self.image)\n _image_dft = fft.fftshift(_image_dft)\n # dft = DFT.DFT()\n # plt.figure(1) \n # plt.imshow(self.image)\n # plt.figure(2)\n # plt.imshow(20*np.log10(abs(_image_dft))) \n # print(_image_dft)\n # print(abs(_image_dft))\n # plt.show()\n filter = self.filter(self.image.shape, self.cutoff, self.order) \\\n if self.filter_name.startswith('butterworth') \\\n else self.filter(self.image.shape, self.cutoff)\n \n _image_dft_filtered = _image_dft * filter\n _image_filtered = abs(fft.ifft2(_image_dft_filtered))\n \n return [ self.post_process_image(_image_filtered), \\\n self.post_process_image(20*np.log10(abs(_image_dft)+.00001)), \\\n self.post_process_image(20*np.log10(abs(_image_dft_filtered)+.00001)) ]", "def fourier(img):\n return fourierCV(img)", "def _imfilter(x_data, f_data):\n return pipe(f_data, ifftshift, fftn, lambda x: x * fftn(x_data), ifftn).real", "def normalise(image):", "def filter_image(img):\n return cv2.bilateralFilter(img, 9, 50, 50)", "def apply_filter(self, image):\n pass", "def my_imfilter(image, filter):\n filtered_image = torch.Tensor()\n\n assert filter.shape[0] % 2 == 1\n assert filter.shape[1] % 2 == 1\n\n #############################################################################\n # TODO: YOUR CODE HERE\n ############################################################################\n ffilter = filter.float()\n K, J = ffilter.size()\n M, N, C = image.size()\n R1 = int((K - 1)/2)\n R2 = int((K + 1)/2)\n S1 = int((J - 1)/2)\n S2 = int((J + 1)/2)\n \n filtered_image = torch.zeros(M, N, C).float()\n padded_signal = torch.zeros(M + K, N + J, C)\n padded_signal[R1:M + K - R2, S1:N + J - S2, :] = image\n \n for i in range(int(M)):\n for j in range(int(N)):\n filtered_image[i, j, :] += torch.einsum('kjc,kj->c', padded_signal[i:i + K, j: j + J, :].float(), ffilter)\n \n #############################################################################\n # TODO: YOUR CODE HERE\n ############################################################################\n\n return filtered_image", "def ff_correct_image(image):\n pass", "def ff_correct_image(image):\n pass", "def preprocess_img_inv(img):\n img = img.data.numpy().copy()\n\n img[0] = img[0] * TORCH_IMG_STD[0] + TORCH_IMG_MEAN[0]\n img[1] = img[1] * TORCH_IMG_STD[1] + TORCH_IMG_MEAN[1]\n img[2] = img[2] * TORCH_IMG_STD[2] + TORCH_IMG_MEAN[2]\n img = img.transpose(1, 2, 0) * 255.0\n\n return img.round().astype('uint8')", "def convolve_im(im: np.array,\n kernel: np.array,\n verbose=True):\n ### START YOUR CODE HERE ### (You can change anything inside this block)\n \"\"\"\n\tcompared to the 4a solution this just adds padding to the filter if its smaller than the image\n\tthis is done by using the second parameter in fft.fft2 \n\t\n\tfirst it applies fourier transforms on the kernel and the image\n\tthen it sets the image to be the pointwise multiplication of the transforms\n\n the image is inverse fourier transformed and filtered for real values\n the domain image is shifted and taken the absolute value of\n the fourier transform of the image and kernel are also shifted and set to be the absolute value\n\tlastly everything is displayed in the subplots\n \"\"\"\n conv_result = im \n \n if verbose:\n fftKernel=np.fft.fft2(kernel,im.shape)\n fftImage=np.fft.fft2(conv_result)\n\t\t\n\t\t\n\t\t\n conv_result=np.multiply(fftImage,fftKernel)\n fftImageTransformed=conv_result\n\t\t\n \n conv_result=np.fft.ifft2(conv_result)\n \n conv_result=np.real(conv_result)\n\n fftImageTransformed=np.fft.fftshift(fftImageTransformed)\n fftImage=np.fft.fftshift(fftImage)\n fftKernel=np.fft.fftshift(fftKernel)\n\n fftImageTransformed=np.absolute(fftImageTransformed)\n fftImage=np.absolute(fftImage)\n fftKernel=np.absolute(fftKernel)\n\t\t\n\t\t\n # Use plt.subplot to place two or more images beside eachother\n plt.figure(figsize=(20, 4))\n # plt.subplot(num_rows, num_cols, position (1-indexed))\n plt.subplot(1, 5, 1)\n plt.imshow(im, cmap=\"gray\")\n plt.subplot(1, 5, 2)\n plt.imshow(fftImage, cmap=\"gray\")\n plt.subplot(1, 5, 3)\n plt.imshow(fftKernel, cmap=\"gray\")\n plt.subplot(1, 5, 4)\n plt.imshow(fftImageTransformed, cmap=\"gray\")\n plt.subplot(1, 5, 5)\n plt.imshow(conv_result, cmap=\"gray\")\n ### END YOUR CODE HERE ###\n return conv_result", "def create_hybrid_image(image1, image2, filter):\n\n hybrid_image = torch.Tensor()\n low_frequencies = torch.Tensor()\n high_frequencies = torch.Tensor()\n\n assert image1.shape[0] == image2.shape[0]\n assert image1.shape[1] == image2.shape[1]\n assert image1.shape[2] == image2.shape[2]\n assert filter.shape[0] <= image1.shape[0]\n assert filter.shape[1] <= image1.shape[1]\n assert filter.shape[0] % 2 == 1\n assert filter.shape[1] % 2 == 1\n\n #############################################################################\n # TODO: YOUR CODE HERE\n ############################################################################\n\n low_frequencies = my_imfilter(image1, filter)\n high_frequencies = image2 - my_imfilter(image2, filter)\n hybrid_image = torch.clamp(low_frequencies + high_frequencies, 0, 1)\n \n\n #############################################################################\n # TODO: YOUR CODE HERE\n ############################################################################\n\n return low_frequencies, high_frequencies, hybrid_image", "def inverse(im): \t \n x,y = np.shape(im)\n img = np.zeros([x,y])\n\t\n for i in range(x):\n for j in range(y):\n img[i,j] = 255 - im[i,j]\n return img", "def flip_h(image, gt):\n result_im = cv2.flip(image, 1)\n result_gt = cv2.flip(gt, 1)\n\n return result_im, result_gt", "def convolve_im(im: np.array,\n kernel: np.array,\n verbose=True):\n\t\n ### START YOUR CODE HERE ### (You can change anything inside this block) \n\t\n H,W = np.shape(im)\n h,w = np.shape(kernel)\n t_b = (H-h)//2\n l_r = (W-w)//2\n kernel_padded = np.pad(kernel, ((t_b, t_b+1),(l_r, l_r+1)), 'constant')\n kernel_padded = np.pad(kernel, ((0, 2*t_b),(0, 2*l_r)), 'constant')\n fft_kernel = np.fft.fft2(kernel_padded, s=None, axes=(-2, -1), norm=None)\n \n \n im_fft = np.fft.fft2(im, s=None, axes=(-2, -1), norm=None) \n im_filt = im_fft*fft_kernel \n conv_result = np.fft.ifft2(im_filt, s=None, axes=(-2, -1), norm=None).real \n\n if verbose:\n # Use plt.subplot to place two or more images beside eachother\n plt.figure(figsize=(12, 4))\n # plt.subplot(num_rows, num_cols, position (1-indexed))\n plt.subplot(1, 2, 1)\n plt.imshow(im, cmap=\"gray\")\n plt.subplot(1, 2, 2) \n plt.imshow(conv_result, cmap=\"gray\")\n\n ### END YOUR CODE HERE ###\n return conv_result", "def create_hybrid_image(image1, image2, filter):\n\n assert image1.shape[0] == image2.shape[0]\n assert image1.shape[1] == image2.shape[1]\n assert image1.shape[2] == image2.shape[2]\n\n low_frequencies = my_imfilter(image1,filter)\n im2blur = my_imfilter(image2,filter)\n high_frequencies = image2 - im2blur\n hybrid_image = low_frequencies + high_frequencies \n\n return low_frequencies, high_frequencies, hybrid_image", "def _get_fourier_filter(self):\n size = max(64, int(2 ** np.ceil(np.log2(2 * self.m[-1].item()))))\n\n pi = torch.acos(torch.zeros(1)).item() * 2.0\n n = torch.cat(\n [\n torch.arange(1, size // 2 + 1, 2, device=self.n.device),\n torch.arange(size // 2 - 1, 0, -2, device=self.n.device),\n ]\n )\n f = torch.zeros(size, device=self.n.device)\n f[0] = 0.25\n if self.flat:\n f[1::2] = -1 / (pi * n).pow(2)\n else:\n f[1::2] = -self.s_detect.abs().pow(2) / (\n pi\n * (self.d_source + self._d_detect())\n * torch.sin(\n n\n * self.s_detect.abs()\n / (self.d_source + self._d_detect())\n )\n ).pow(2)\n f = torch.stack(\n [f, torch.zeros(f.shape, device=self.n.device)], dim=-1\n )\n f = fftshift(f, dim=(-2,))\n\n filt = fft1(f)[..., 0]\n\n if self.filter_type == \"hamming\":\n # hamming filter\n fac = torch.tensor(\n np.hamming(size).astype(np.float32), device=f.device\n )\n elif self.filter_type == \"hann\":\n # hann filter\n fac = torch.tensor(\n np.hanning(size).astype(np.float32), device=f.device\n )\n elif self.filter_type == \"cosine\":\n # cosine filter\n fac = torch.sin(\n torch.linspace(0, pi, size + 1, device=f.device)[:-1]\n )\n else:\n # ramp / ram-lak filter\n fac = 1.0\n\n return fac * filt", "def invert(self, img):\n return self.inverse()(img)", "def filteringEngine(original, debug=False):\n\n processedImage1 = filterNotInRange(original, LABmin_healthy, LABmax_healthy, cv2.COLOR_BGR2LAB)\n processedImage2 = filterNotInRange(original, LABmin_terrain, LABmax_terrain, cv2.COLOR_BGR2LAB)\n # Image containing many FPs\n processedImage3 = filterNotInRange(original, HSVmin_yellow, HSVmax_yellow, cv2.COLOR_BGR2HSV)\n\n sum1 = cv2.add(processedImage1, processedImage2)\n sub1 = differentialNode(original, sum1)\n\n processedImage = filterNotInRange(sub1, LABmin, LABmax, cv2.COLOR_BGR2LAB)\n # sum2 = cv2.add(processedImage, processedImage3)\n\n kernel = np.ones((6, 6), np.uint8)\n temp = closing(processedImage, kernel)\n\n kernel = np.ones((3, 3), np.uint8)\n out = opening(temp, kernel)\n\n if debug:\n cv2.imshow('processedImage1', processedImage1)\n cv2.imshow('processedImage2', processedImage2)\n cv2.imshow('processedImage3', processedImage3)\n cv2.imshow('sum1', sum1)\n cv2.imshow('sub1', sub1)\n cv2.imshow('processedImage', processedImage)\n cv2.imshow('sum2', sum2)\n cv2.imshow('out', out)\n\n return out", "def filtering(image):\n output = np.array(image)\n for x in xrange(0,1):\n bilateralFilter_img = cv2.bilateralFilter(output,5, 75, 75)\n\n return bilateralFilter_img" ]
[ "0.7376454", "0.71049654", "0.70379233", "0.68949634", "0.6755386", "0.65881103", "0.63375324", "0.625885", "0.623645", "0.6208561", "0.61968803", "0.6196119", "0.610591", "0.605326", "0.6052592", "0.60317117", "0.5980117", "0.5919756", "0.5919756", "0.5908526", "0.58952224", "0.58919847", "0.5888873", "0.5768987", "0.575688", "0.5750642", "0.5750189", "0.574888", "0.57444876", "0.57385474" ]
0.82497996
0
Creates a MutateOperation for the sitelink campaign extension setting that will be removed.
def create_sitelink_campaign_extension_setting_mutate_operation( client, customer_id, campaign_id ): extension_type_enum = client.enums.ExtensionTypeEnum # Construct the campaign extension setting resource name, in format: # customers/{customer_id}/campaignExtensionSettings/{campaign_id}~{extension_type} resource_name = client.get_service( "CampaignExtensionSettingService" ).campaign_extension_setting_path( customer_id, campaign_id, extension_type_enum.SITELINK.name ) # Create a MutateOperation for the campaign extension setting. mutate_operation = client.get_type("MutateOperation") mutate_operation.campaign_extension_setting_operation.remove = resource_name return mutate_operation
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(client, customer_id, campaign_id):\n # Initialize an array of MutateOperations\n mutate_operations = []\n sitelink_campaign_extension_setting_mutate_operation = create_sitelink_campaign_extension_setting_mutate_operation(\n client, customer_id, campaign_id\n )\n mutate_operations.append(\n sitelink_campaign_extension_setting_mutate_operation\n )\n\n ga_service = client.get_service(\"GoogleAdsService\")\n extension_feed_item_resource_names = get_all_sitelink_extension_feed_items(\n client, ga_service, customer_id, campaign_id\n )\n extension_feed_item_mutate_operations = create_extension_feed_item_mutate_operations(\n client, extension_feed_item_resource_names\n )\n mutate_operations.extend(extension_feed_item_mutate_operations)\n\n # Issue a mutate request to remove the campaign extension setting and\n # its extension feed items.\n response = ga_service.mutate(\n customer_id=customer_id, mutate_operations=mutate_operations\n )\n mutate_operation_responses = response.mutate_operation_responses\n # The order of response messages corresponds to the order of operations\n # passed into the mutate method. Since the first operation sent in the\n # request was to remove a campaign extension setting, we can read the\n # resource name of that object in the first message in the response list.\n print(\n \"Removed a campaign extension setting with resource name \"\n f'\"{mutate_operation_responses[0].campaign_extension_setting_result.resource_name}\".'\n )\n # Since we read the result of the first remove operation above, next we\n # read the results for the remaining remove operations by iterating over all\n # but the first message in the response list.\n for mutate_operation_response in mutate_operation_responses[1:]:\n print(\n \"Removed an extension feed item with resource name \"\n f'\"{mutate_operation_response.extension_feed_item_result.resource_name}\".'\n )\n # [END remove_entire_sitelink_campaign_extension_setting]", "def create_extension_feed_item_mutate_operations(\n client, extension_feed_item_resource_names\n):\n mutate_operations = []\n # Create a MutateOperation for each extension feed item to remove.\n for resource_name in extension_feed_item_resource_names:\n mutate_operation = client.get_type(\"MutateOperation\")\n mutate_operation.extension_feed_item_operation.remove = resource_name\n mutate_operations.append(mutate_operation)\n\n return mutate_operations", "def remove(self, *args, **kwargs):\n raise InvalidEndpointOperation(\n 'Not a valid operation on this endpoint.'\n )", "def remove(self, *args, **kwargs):\n raise InvalidEndpointOperation(\n 'Not a valid operation on this endpoint.'\n )", "def unset(cls, client, resource, args) :\n\t\ttry :\n\t\t\tif type(resource) is not list :\n\t\t\t\tunsetresource = appfwlearningsettings()\n\t\t\t\tif type(resource) != type(unsetresource):\n\t\t\t\t\tunsetresource.profilename = resource\n\t\t\t\telse :\n\t\t\t\t\tunsetresource.profilename = resource.profilename\n\t\t\t\treturn unsetresource.unset_resource(client, args)\n\t\t\telse :\n\t\t\t\tif type(resource[0]) != cls :\n\t\t\t\t\tif (resource and len(resource) > 0) :\n\t\t\t\t\t\tunsetresources = [ appfwlearningsettings() for _ in range(len(resource))]\n\t\t\t\t\t\tfor i in range(len(resource)) :\n\t\t\t\t\t\t\tunsetresources[i].profilename = resource[i]\n\t\t\t\telse :\n\t\t\t\t\tif (resource and len(resource) > 0) :\n\t\t\t\t\t\tunsetresources = [ appfwlearningsettings() for _ in range(len(resource))]\n\t\t\t\t\t\tfor i in range(len(resource)) :\n\t\t\t\t\t\t\tunsetresources[i].profilename = resource[i].profilename\n\t\t\t\tresult = cls.unset_bulk_request(client, unsetresources, args)\n\t\t\treturn result\n\t\texcept Exception as e :\n\t\t\traise e", "def attr_remove(self):\n def _del_if_in(obj, attr):\n if attr in obj:\n del obj[attr]\n if self._modifier_exists(REMOVE_KEY):\n to_remove = self[CONFIG_KEY][SAMPLE_MODS_KEY][REMOVE_KEY]\n _LOGGER.debug(\"Removing attributes: {}\".format(to_remove))\n for attr in to_remove:\n [_del_if_in(s, attr) for s in self.samples]", "def remove_operation(self, name):\n\n del self.operations[name]", "def delop(self, mask, target, args):\n config = self.config\n try:\n del config[args['<mask>']]\n except KeyError:\n yield \"Operator not found!\"\n else:\n self.bot.db[self.key] = config\n yield \"Deleted operator.\"", "def MutateCampaignBidModifiers(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def remove(self, *values):\n\t\tif not self.unpacked: self._unpack()\n\t\tmap(self.permissions.discard, values)\n\t\treturn self", "def remove(self, key):\n key_str = self.optionxform(key)\n option_key = {\n 'product': self.product,\n 'section': self.name,\n 'option': key_str\n }\n try:\n setting = ProductSetting(self.env, keys=option_key)\n except ResourceNotFound:\n self.env.log.warning(\"No record for product option %s\", option_key)\n else:\n self._cache.pop(key, None)\n setting.delete()\n self.env.log.info(\"Removing product option %s\", option_key)", "def del_attrib(self, key):\n self.aux_attrib.pop(key)\n self.aux_attrib_args.pop(key)", "def _remove_operator(self, operator):", "def removeOnCreate(call, args=(), kwargs={}, nodeClass='*'):\n pass", "def unset(cls, client, resource, args) :\n\t\ttry :\n\t\t\tif type(resource) is not list :\n\t\t\t\tunsetresource = rewriteaction()\n\t\t\t\tif type(resource) != type(unsetresource):\n\t\t\t\t\tunsetresource.name = resource\n\t\t\t\telse :\n\t\t\t\t\tunsetresource.name = resource.name\n\t\t\t\treturn unsetresource.unset_resource(client, args)\n\t\t\telse :\n\t\t\t\tif type(resource[0]) != cls :\n\t\t\t\t\tif (resource and len(resource) > 0) :\n\t\t\t\t\t\tunsetresources = [ rewriteaction() for _ in range(len(resource))]\n\t\t\t\t\t\tfor i in range(len(resource)) :\n\t\t\t\t\t\t\tunsetresources[i].name = resource[i]\n\t\t\t\telse :\n\t\t\t\t\tif (resource and len(resource) > 0) :\n\t\t\t\t\t\tunsetresources = [ rewriteaction() for _ in range(len(resource))]\n\t\t\t\t\t\tfor i in range(len(resource)) :\n\t\t\t\t\t\t\tunsetresources[i].name = resource[i].name\n\t\t\t\tresult = cls.unset_bulk_request(client, unsetresources, args)\n\t\t\treturn result\n\t\texcept Exception as e :\n\t\t\traise e", "def modify_node(self, node):\n for kw in node.keywords:\n if kw.arg == \"script_mode\":\n node.keywords.remove(kw)\n return node", "def remove(self, *args):\n return _libsbml.XMLAttributes_remove(self, *args)", "def unset(bot, update, chat_data):\n if 'job' not in chat_data:\n update.message.reply_text('Sem notificacoes ativadas')\n return\n\n job = chat_data['job']\n job.schedule_removal()\n del chat_data['job']\n check = emojize(\":white_check_mark:\", use_aliases=True)\n update.message.reply_text('Notificacao cancelada com sucesso'+check+'')", "def __delitem__(self):\n raise ValueError(\"Dataset objects are immutable\")", "def test_remove_workflow_definition(self):\n pass", "def uninstall_hook_update_rule(cursor, registry):\n env = Environment(cursor, SUPERUSER_ID, {})\n for rule_xml_id, group_xml_id in MULTI_COMPANY_RULES.items():\n rule = env.ref(rule_xml_id)\n group = env.ref(group_xml_id)\n if group in rule.groups:\n rule.write({'groups':[(3, group.id)]})", "def delete(self):\n self.connection.deprecate_activity_type(self.domain.name, self.name, self.version)", "def remove_op(self, op):\n self._operations.remove(op)", "def remove_pilot_compute_service(self, pjs):\n self.pilot_job_services.remove(pjs)\n CoordinationAdaptor.update_cds(self.url, self)", "def removeOption(self, *args):\n return _libsbml.ConversionProperties_removeOption(self, *args)", "def clone(self, **kwargs):\n return super(AttentionWrapperState, self)._replace(**kwargs)", "def test_remove_a_single_attribute(self):\n pass", "def MutateAdGroupAds(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def remove_descriptor(self, uuid):", "def remove():" ]
[ "0.57828265", "0.54131603", "0.48680517", "0.48680517", "0.48087773", "0.47320238", "0.46879244", "0.46765333", "0.466588", "0.46614406", "0.46451315", "0.4598959", "0.45877948", "0.45646876", "0.45513937", "0.45418844", "0.45409346", "0.4482612", "0.44797063", "0.4463183", "0.44560787", "0.44487885", "0.4431297", "0.44211686", "0.44207096", "0.44077134", "0.4383901", "0.43817672", "0.4372546", "0.43621212" ]
0.6482961
0
Gets all sitelink extension feed items associated to the specified campaign extension setting.
def get_all_sitelink_extension_feed_items( client, ga_service, customer_id, campaign_id ): campaign_resource_name = client.get_service( "CampaignService" ).campaign_path(customer_id, campaign_id) extension_type_enum = client.enums.ExtensionTypeEnum extension_type_name = extension_type_enum.SITELINK.name # Construct the query. query = f""" SELECT campaign_extension_setting.campaign, campaign_extension_setting.extension_type, campaign_extension_setting.extension_feed_items FROM campaign_extension_setting WHERE campaign_extension_setting.campaign = '{campaign_resource_name}' AND campaign_extension_setting.extension_type = '{extension_type_name}'""" # Issue a search request using streaming. stream = ga_service.search_stream(customer_id=customer_id, query=query) extension_feed_item_resource_names = [] # Iterate through each row and append the extension feed item resource # names to the return array. for batch in stream: for row in batch.results: extension_feed_item_resource_names.extend( row.campaign_extension_setting.extension_feed_items ) if len(extension_feed_item_resource_names) == 0: print( "The specified campaign does not contain a sitelink campaign " "extension setting." ) sys.exit(1) return extension_feed_item_resource_names # [END remove_entire_sitelink_campaign_extension_setting_1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fetch_entries(self):\n entries = []\n rss_list = self.__data_adapter.fetch_rss()\n for rss in rss_list:\n rss_href = rss.get('url', None)\n if rss_href is not None:\n feed = feedparser.parse(rss_href)\n [entries.append(FeedDocument(entry.get('title', ''), entry.get('summary', ''))) for entry in feed.get('entries', [])]\n return entries", "def get_all_feed_sources(request):\n feed_sources = FeedSource.objects.all().order_by('-id')\n return get_feed_sources_list(feed_sources)", "def get_listing():\n\n result_items = []\n\n rss_data = urllib.request.urlopen(ActivityURL)\n rss_xml = xml.dom.minidom.parse(rss_data)\n\n channel = rss_xml.getElementsByTagName('channel')[0]\n items = channel.getElementsByTagName('item')\n for item in items:\n # Most of these are hackish, but a result of using the RSS\n # feed instead of something nicer like a JSON API. This\n # listing method is specifically isolated so we can easily\n # swap out the implementation later.\n asset_id = item.getElementsByTagName('guid')[0].childNodes[0].data.split('/')[-1]\n img_url = item.getElementsByTagName('description')[0].childNodes[0].data\n # Get part after start of img src attribute\n split_href = img_url.split('src=\"', 1)[1]\n # Get part before closing quote\n img_url = split_href.split('\"', 1)[0]\n # FIXME\n zip_url = ''\n result_items.append( Asset(asset_id, img_url, zip_url) )\n\n return result_items", "def fetch(self):\n\n entries = []\n for activity in self.activities[\"entries\"]:\n entries.append(\n [\n element\n for element in [activity[\"title\"], activity[\"content\"][0][\"value\"]]\n ]\n )\n\n return entries[0 : self.max_entries]", "def list_feed(self):\n entities = []\n entities_j = self._get('strings/tags/module:inventory,feed:*')\n if entities_j and entities_j['feed']:\n for entity_j in entities_j['feed']:\n entities.append(Feed(entity_j, CanonicalPath('/f;{}'.format(entity_j))))\n return entities", "def extensions(self):\n return list(self._list(extension.Extension, paginated=False))", "def fetch_feeds(self):\n feed_list = []\n rss_list = self.__data_adapter.fetch_rss()\n for rss in rss_list:\n rss_href = rss.get('url', None)\n rss_title = rss.get('title', '-')\n if rss_href is not None:\n feed = feedparser.parse(rss_href)\n feed_list.append({\n 'title':rss_title,\n 'href':rss_href,\n 'status': feed.get('status', 400),\n 'updated': feed.get('updated', None),\n 'updated_parsed': feed.get('updated_parsed', None),\n 'encoding': feed.get('encoding', None),\n 'bozo': feed.get('bozo', None),\n 'headers': feed.get('headers', {}),\n 'etag': feed.get('etag', None),\n 'version': feed.get('version', None),\n 'entries': feed.get('entries', []),\n 'namespaces': feed.get('namespaces', None)\n })\n\n return feed_list", "def get_extension_feed_item(client, customer_id, feed_item_id):\n ga_service = client.get_service(\"GoogleAdsService\")\n\n query = f\"\"\"\n SELECT\n extension_feed_item.id,\n extension_feed_item.ad_schedules,\n extension_feed_item.device,\n extension_feed_item.status,\n extension_feed_item.start_date_time,\n extension_feed_item.end_date_time,\n extension_feed_item.targeted_campaign,\n extension_feed_item.targeted_ad_group,\n extension_feed_item.promotion_feed_item.discount_modifier,\n extension_feed_item.promotion_feed_item.final_mobile_urls,\n extension_feed_item.promotion_feed_item.final_url_suffix,\n extension_feed_item.promotion_feed_item.final_urls,\n extension_feed_item.promotion_feed_item.language_code,\n extension_feed_item.promotion_feed_item.money_amount_off.amount_micros,\n extension_feed_item.promotion_feed_item.money_amount_off.currency_code,\n extension_feed_item.promotion_feed_item.occasion,\n extension_feed_item.promotion_feed_item.orders_over_amount.amount_micros,\n extension_feed_item.promotion_feed_item.orders_over_amount.currency_code,\n extension_feed_item.promotion_feed_item.percent_off,\n extension_feed_item.promotion_feed_item.promotion_code,\n extension_feed_item.promotion_feed_item.promotion_end_date,\n extension_feed_item.promotion_feed_item.promotion_start_date,\n extension_feed_item.promotion_feed_item.promotion_target,\n extension_feed_item.promotion_feed_item.tracking_url_template\n FROM extension_feed_item\n WHERE\n extension_feed_item.extension_type = 'PROMOTION'\n AND extension_feed_item.id = {feed_item_id}\n LIMIT 1\"\"\"\n\n # Issue a search request to get the extension feed item contents.\n stream = ga_service.search_stream(customer_id=customer_id, query=query)\n\n try:\n stream_response = next(stream)\n except StopIteration:\n print(f\"Error: No ExtensionFeedItem found with ID {feed_item_id}.\")\n sys.exit(1)\n\n extension_feed_item = stream_response.results[0].extension_feed_item\n print(\n \"Retrieved details for ad extension with ID: {extension_feed_item.id}.\"\n )\n\n # Create a query to retrieve any URL customer parameters attached to the\n # extension feed item.\n url_custom_params_query = f\"\"\"\n SELECT\n feed_item.url_custom_parameters\n FROM feed_item\n WHERE feed_item.id = {extension_feed_item.id}\"\"\"\n\n # Issue a search request to get any URL custom parameters.\n stream = ga_service.search_stream(\n customer_id=customer_id, query=url_custom_params_query\n )\n\n try:\n url_stream_response = next(stream)\n except StopIteration:\n print(f\"Error: No FeedItems found with ID {feed_item_id}.\")\n sys.exit(1)\n\n feed_item = url_stream_response.results[0].feed_item\n parameters = feed_item.url_custom_parameters\n num_params = len(parameters)\n print(f\"Retrieved {num_params} attached URL custom parameters.\")\n\n if num_params > 0:\n extension_feed_item.promotion_feed_item.url_custom_parameters.extend(\n parameters\n )\n\n return extension_feed_item", "def getSubscriptionList(self):\r\n return self.feeds", "def fetch_feed_list(self, **args):\n return self.fetch(\"/feedlist\", **args)", "def feed(self):\n feed_dict = feedparser.parse(self.URL)\n return [self.entry_dict(entry) for entry in feed_dict['entries']]", "def insider_trading_rss_feed(\n apikey: str, limit: int = DEFAULT_LIMIT\n) -> typing.Optional[typing.List[typing.Dict]]:\n path = f\"insider-trading-rss-feed\"\n query_vars = {\"apikey\": apikey, \"limit\": limit}\n return __return_json_v4(path=path, query_vars=query_vars)", "def list_feed(self):\n entities = []\n entities_j = self._get('traversal/type=f')\n if entities_j:\n for entity_j in entities_j:\n entities.append(Feed(entity_j['id'], CanonicalPath(entity_j['path'])))\n return entities", "def get_feed(self):\n possible_endings = ('rss', 'rss/')\n if not self.url or not self.url.endswith(possible_endings):\n print('Please check URL(is RSS?) and Internet connection')\n sys.exit()\n try:\n data = feedparser.parse(self.url)\n except urllib.error.URLError:\n print('Please input correct URL')\n sys.exit()\n self.get_content(data)\n return self.items", "def get_rss(self):\r\n rssfiles = []\r\n \r\n rssfiles.append(feedparser.parse(self.url))\r\n return rssfiles", "def main(client, customer_id, campaign_id):\n # Initialize an array of MutateOperations\n mutate_operations = []\n sitelink_campaign_extension_setting_mutate_operation = create_sitelink_campaign_extension_setting_mutate_operation(\n client, customer_id, campaign_id\n )\n mutate_operations.append(\n sitelink_campaign_extension_setting_mutate_operation\n )\n\n ga_service = client.get_service(\"GoogleAdsService\")\n extension_feed_item_resource_names = get_all_sitelink_extension_feed_items(\n client, ga_service, customer_id, campaign_id\n )\n extension_feed_item_mutate_operations = create_extension_feed_item_mutate_operations(\n client, extension_feed_item_resource_names\n )\n mutate_operations.extend(extension_feed_item_mutate_operations)\n\n # Issue a mutate request to remove the campaign extension setting and\n # its extension feed items.\n response = ga_service.mutate(\n customer_id=customer_id, mutate_operations=mutate_operations\n )\n mutate_operation_responses = response.mutate_operation_responses\n # The order of response messages corresponds to the order of operations\n # passed into the mutate method. Since the first operation sent in the\n # request was to remove a campaign extension setting, we can read the\n # resource name of that object in the first message in the response list.\n print(\n \"Removed a campaign extension setting with resource name \"\n f'\"{mutate_operation_responses[0].campaign_extension_setting_result.resource_name}\".'\n )\n # Since we read the result of the first remove operation above, next we\n # read the results for the remaining remove operations by iterating over all\n # but the first message in the response list.\n for mutate_operation_response in mutate_operation_responses[1:]:\n print(\n \"Removed an extension feed item with resource name \"\n f'\"{mutate_operation_response.extension_feed_item_result.resource_name}\".'\n )\n # [END remove_entire_sitelink_campaign_extension_setting]", "def list_extensions(self, **_params):\r\n return self.get(self.extensions_path, params=_params)", "def request_channel_items(\n self, page, keyword='', show_linked=True, show_unlinked=True,\n show_on_page=50):\n request = GetChannelItems(\n self.api_session, self.channel_id, self.source, self.sub_source,\n show_linked=show_linked, show_unlinked=show_unlinked, page=page,\n keyword=keyword, show_on_page=50)\n linking_list = LinkingList([self.channel_item_type(\n self.api_session, self.channel, item) for item in\n request.response_dict])\n if len(linking_list) > 0:\n return linking_list\n else:\n return LinkingList([])", "def get_games():\r\n feed = feedparser.parse(FEED_URL)\r\n games = []\r\n for entry in feed.entries:\r\n games.append(Game(title = entry['title']\r\n , link = entry['link']\r\n ))\r\n return games", "def extensions(self):\n return self.properties.get('extensions',\n EntityCollection(self.context, Extension,\n ResourcePath(\"extensions\", self.resource_path)))", "def list_campaigns_extended(self, **kwargs) -> ApiResponse:\n return self._request(kwargs.pop('path'), params=kwargs)", "def get_feeds():\n feeds = {}\n for _configuration_key, _configuration in blogs.all():\n if not _configuration.use_generic_feeds:\n continue\n\n class EntryFeed(Feed):\n configuration = _configuration\n configuration_key = _configuration_key\n\n title_template = _configuration.feed_title_template_name\n description_template = \\\n _configuration.feed_description_template_name\n\n feed_type = feedgenerator.Rss201rev2Feed\n\n def get_site(self):\n if not hasattr(self, '_current_site'):\n self._current_site = Site.objects.get_current()\n return self._current_site\n\n def title(self):\n if self.configuration.feed_title is not None:\n return self.configuration.feed_title\n return self.get_site().name\n \n def link(self):\n if self.configuration.feed_link is not None:\n return self.configuration.feed_link\n return \"http://%s/\" % (self.get_site().domain)\n \n def description(self):\n if self.configuration.feed_description is not None:\n return self.configuration.feed_description\n return \"Latest entries on %s\" % self.get_site().name\n \n def items(self):\n items = self.configuration.model.live.all()\n return items[:self.configuration.feed_limit]\n \n def item_pubdate(self, obj):\n return obj.pub_date\n\n def item_link(self, obj):\n return self.configuration.get_entry_absolute_url(obj)\n\n if _configuration.feed_format == feed_formats.ATOM:\n # Alter the class to support Atom feeds instead of RSS.\n EntryFeed.feed_type = feedgenerator.Atom1Feed\n EntryFeed.subtitle = EntryFeed.description\n\n feeds[_configuration_key] = EntryFeed\n return feeds", "def getExternalLinks(self, force=False):\n if hasattr(self, \"extlinks\") and not force:\n return self.extlinks\n if self.pageid == 0 and not self.title:\n self.setPageInfo()\n if not self.exists:\n raise page.NoPage\n params = {\n 'action': 'query',\n 'prop': 'extlinks',\n 'ellimit': self.site.limit,\n }\n if self.pageid > 0:\n params['pageids'] = self.pageid\n else:\n params['titles'] = self.title\n req = api.APIRequest(self.site, params)\n response = req.query()\n self.extlinks = []\n\n def _extractToList(json, stuff):\n list = []\n if self.pageid == 0:\n self.pageid = json['query']['pages'].keys()[0]\n if stuff in json['query']['pages'][str(self.pageid)]:\n # items are a single value dict of ns:link\n for item in json['query']['pages'][str(self.pageid)][stuff]:\n list.extend(item.values())\n return list\n\n if isinstance(response, list): #There shouldn't be more than 5000 links on a page...\n for part in response:\n self.extlinks.extend(_extractToList(self, 'extlinks'))\n else:\n self.extlinks = _extractToList(response, 'extlinks')\n return self.extlinks", "def extensions(self):\n if \"extensions\" in self._prop_dict:\n return ExtensionsCollectionPage(self._prop_dict[\"extensions\"])\n else:\n return None", "def extensions(self):\n if \"extensions\" in self._prop_dict:\n return ExtensionsCollectionPage(self._prop_dict[\"extensions\"])\n else:\n return None", "def extensions(self):\n if \"extensions\" in self._prop_dict:\n return ExtensionsCollectionPage(self._prop_dict[\"extensions\"])\n else:\n return None", "def get_feed_entries_task():\n get_feed_entries()\n logger.info(\"Entries for Feed\")", "def iter_feed(gd_client):\n feed = gd_client.GetContactsFeed()\n while feed:\n for entry in feed.entry:\n yield entry\n # Check whether there is another page and if yes\n next_link = feed.GetNextLink()\n feed = None\n if next_link:\n feed = gd_client.GetContactsFeed(uri=next_link.href)", "def extensions(self, global_step):\n return []", "def extensions(self, global_step):\n return []" ]
[ "0.55385774", "0.53972065", "0.5338791", "0.5298385", "0.52451897", "0.5244143", "0.51899904", "0.51251996", "0.5124382", "0.5122065", "0.5023741", "0.50200367", "0.5009792", "0.49959934", "0.49743348", "0.49121657", "0.48833185", "0.48079076", "0.48065683", "0.47998428", "0.4769366", "0.4760323", "0.47561407", "0.47479835", "0.47479835", "0.47479835", "0.47197264", "0.47118413", "0.46650025", "0.46650025" ]
0.8327138
0
Creates MutateOperations for the sitelink extension feed items that will be removed.
def create_extension_feed_item_mutate_operations( client, extension_feed_item_resource_names ): mutate_operations = [] # Create a MutateOperation for each extension feed item to remove. for resource_name in extension_feed_item_resource_names: mutate_operation = client.get_type("MutateOperation") mutate_operation.extension_feed_item_operation.remove = resource_name mutate_operations.append(mutate_operation) return mutate_operations
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_remove_yield(self, affiliate_items_url_factory, affiliate_network_factory):\n network = affiliate_network_factory(name='Network')\n\n with mock.patch('chiton.rack.affiliates.bulk.create_affiliate') as create_affiliate:\n affiliate = ValidatingAffiliate()\n affiliate.valid_tlds = ['com', 'org']\n create_affiliate.return_value = affiliate\n\n items = affiliate_items_url_factory(['biz', 'com', 'net', 'org'])\n for index, item in enumerate(items):\n item.name = 'Item %d' % (index + 1)\n item.network = network\n item.save()\n\n assert AffiliateItem.objects.count() == 4\n\n pruned = []\n for item_name, network_name, was_pruned in prune_affiliate_items(items.order_by('name')):\n pruned.append([item_name, network_name, was_pruned])\n\n assert pruned[0] == ['Item 1', 'Network', True]\n assert pruned[1] == ['Item 2', 'Network', False]\n assert pruned[2] == ['Item 3', 'Network', True]\n assert pruned[3] == ['Item 4', 'Network', False]", "def MutateFeedItemSetLinks(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def removeOldItems(self):\n pass", "def test_remove_invalid(self, affiliate_items_url_factory):\n with mock.patch('chiton.rack.affiliates.bulk.create_affiliate') as create_affiliate:\n affiliate = ValidatingAffiliate()\n affiliate.valid_tlds = ['com', 'org']\n create_affiliate.return_value = affiliate\n\n items = affiliate_items_url_factory(['biz', 'com', 'net', 'org'])\n assert AffiliateItem.objects.count() == 4\n\n assert len(list(prune_affiliate_items(items))) == 4\n assert sorted([i.affiliate_url.split('.')[-1] for i in AffiliateItem.objects.all()]) == ['com', 'org']", "def removeFromWatchlist(self, items):\n if not isinstance(items, list):\n items = [items]\n\n for item in items:\n if not self.onWatchlist(item):\n raise BadRequest(f'\"{item.title}\" is not on the watchlist')\n ratingKey = item.guid.rsplit('/', 1)[-1]\n self.query(f'{self.METADATA}/actions/removeFromWatchlist?ratingKey={ratingKey}', method=self._session.put)\n return self", "def remove(self, items, relative=True):\n if relative: items = self.items[items]\n self.items = np.setdiff1d(self.items, items)", "def removeItem(*args):", "def removeItem(*args):", "def batch_delete_attributes(self, items):\r\n return self.connection.batch_delete_attributes(self, items)", "def _clean_updated_items(sender, **kwargs):\n json_values = kwargs[\"json_values\"]\n stash = kwargs[\"stash\"]\n spot = kwargs[\"spot\"]\n\n stash[\"items_to_delete\"] = []\n stash[\"item_ei_to_delete\"] = []\n\n # if we don't have any json items, then return\n if spot is None or \"updated_items\" not in stash:\n return\n\n updated_items = stash[\"updated_items\"]\n\n updated_item_models = []\n updated_item_models_to_stash = {}\n\n # create the lists of items to delete\n items_to_delete = []\n item_ei_to_delete = []\n\n # get the old items\n old_items = spot.item_set.all()\n\n # create item models so we can use a hashmap for matching\n for item in updated_items:\n item_json = item.get_json()\n item_model = Item(\n name=item_json[\"name\"],\n item_category=item_json[\"item_category\"],\n item_subcategory=item_json[\"item_subcategory\"],\n id=item_json[\"id\"],\n spot=spot,\n )\n updated_item_models.append(item_model)\n updated_item_models_to_stash[item_model] = item\n\n # create a hashmap to match old to new, by using old:old\n lookup_hashmap = {}\n\n for old_item in old_items:\n lookup_hashmap[old_item] = old_item\n\n equality_hashmap = {}\n # create a hashmap matching new to old\n for updated_item in updated_item_models:\n if updated_item in lookup_hashmap:\n equality_hashmap[updated_item] = lookup_hashmap.pop(updated_item)\n\n # we should delete any missing from the PUT\n for item_to_delete in lookup_hashmap:\n items_to_delete.append(item_to_delete)\n\n # find items that haven't been updated and remove them\n for (\n updated_item_model,\n old_item,\n ) in equality_hashmap.items():\n\n updated_item = updated_item_models_to_stash[updated_item_model]\n updated_item_form = updated_item.get_form()\n updated_item.set_instance(old_item)\n updated_item_ei = updated_item.get_ei_forms()\n\n # clean up the EI\n old_ei_set = old_item.itemextendedinfo_set.all()\n ei_to_remove = clean_ei(old_ei_set, updated_item_ei)\n item_ei_to_delete += ei_to_remove\n\n # get rid of items that are all the same without EI\n if (\n updated_item_model.name == old_item.name\n and updated_item_model.item_category == old_item.item_category\n and updated_item_model.item_subcategory\n == old_item.item_subcategory\n and len(updated_item_ei) == 0\n ):\n updated_items.remove(updated_item)\n\n stash[\"items_to_delete\"] = items_to_delete\n stash[\"item_ei_to_delete\"] = item_ei_to_delete", "def decompose(self) -> typing.Generator[\"JsonPatchOperation\", None, None]:\n if self.op == JsonPatchOperation.Operation.remove:\n yield [self]\n return\n # else: add/replace\n\n if isinstance(self.value, dict):\n for k, v in self.value.items():\n sub_op = JsonPatchOperation(self.op, [*self.path, str(k)], v)\n for sub_sub_op in sub_op.decompose():\n yield sub_sub_op\n else:\n yield self", "def test_remove_item_test_remove_multiple_item():\n sc.menu = sc.default_menu\n sc.current.add_item('Fries', 3)\n sc.current.add_item('Steak', 1)\n sc.current.remove_item('Fries', 2)\n sc.current.receipt == {'subtotal': 3.28, 'Fries': 1, 'Steak': 1}", "def __delitem__(self):\n raise ValueError(\"Dataset objects are immutable\")", "def UpdateIds(self):\r\n removed = set(self.item_itemId.keys()) - set(self.data.keys())\r\n for item in removed:\r\n itemId = self.item_itemId[item]\r\n del self.item_itemId[item]\r\n del self.itemId_item[itemId]", "def middledelalllistitems(self):\n self._linklist.delete()", "def remove_ops(self):\n return self._remove_ops", "def recalculate_opinions(self):\r\n for i in range(len(self.news_feed)):\r\n idea = self.news_feed.pop()\r\n self._recalculate_opinions(idea)", "def clear_removed_items(self, doctype, items):\n\t\tif items:\n\t\t\tfrappe.db.delete(doctype, dict(parent=self.doc_type, custom=1, name=(\"not in\", items)))\n\t\telse:\n\t\t\tfrappe.db.delete(doctype, dict(parent=self.doc_type, custom=1))", "def remove_links(self, item):\r\n if item.get('link'):\r\n item.pop('link')\r\n if item.get('links'):\r\n item.pop('links')\r\n return item", "def difference_update(self, other):\n if not isinstance(other, (list, np.ndarray, IndexCollection)):\n other = [other]\n for item in other:\n self.discard(item)\n return self", "def remove_items(self, name, remove):\n items = self._get_itemmap(name, 'items')\n drop_item_names = [item for idx, item in enumerate(items, start=1)\n if idx in remove]\n keep_item_idxs = [idx for idx, item in enumerate(items, start=1)\n if idx not in remove]\n new_items = self._meta['masks'][name]['items']\n new_items = [item for idx, item in enumerate(new_items, start=1)\n if idx in keep_item_idxs]\n self._meta['masks'][name]['items'] = new_items\n for drop_item_name in drop_item_names:\n self._data.drop(drop_item_name, axis=1, inplace=True)\n del self._meta['columns'][drop_item_name]\n col_ref = 'columns@{}'.format(drop_item_name)\n if col_ref in self._meta['sets']['data file']['items']:\n self._meta['sets']['data file']['items'].remove(col_ref)\n self._meta['sets'][name]['items'].remove(col_ref)\n return None", "def remove():", "def removeAttr(atributes=('exp'), *items):\n for item in items:\n # check if item is pynode\n if not isinstance(item, pm.nodetypes.Transform):\n logger.debug('Create Pynode: %s, %s' % (item, type(item)))\n item = pm.PyNode(item)\n\n # deleteAttrs\n for attr in atributes:\n try:\n item.attr(attr).delete()\n logger.info('Remove attribute: %s.%s' % (item, attr))\n\n except:\n logger.info('Can not delete attr: %s' % attr)", "def remove_all(self, *items):\n for item in items:\n self.remove(item)", "def __delitem__(self, i):\n # An element of a policy function can't be deleted", "def remove (self, item):\n pass", "def remove(self, *args):\n return _libsbml.ListWrapperModelCreator_remove(self, *args)", "def remove_many_descriptors(self, uuids):", "def _remove_data(things, lst_remove=None):\n\n for data in things:\n data.pop(\"_sa_instance_state\", None)\n data.pop(\"user_id\", None)\n\n if lst_remove is not None:\n for str_remove in lst_remove:\n if str_remove in data:\n data.pop(str_remove, None)\n\n return things", "def delete_outlayers(input_list, price_diff):\n \n # Get maximum and minimum indices\n max_pos = price_diff.index(max(price_diff))\n min_pos = price_diff.index(min(price_diff))\n \n # Remove correspondant values from input list\n input_list.remove(input_list[max_pos])\n input_list.remove(input_list[min_pos])" ]
[ "0.5504989", "0.54735047", "0.53347915", "0.52744806", "0.5200508", "0.50634307", "0.50577694", "0.50577694", "0.5010065", "0.49496794", "0.4918461", "0.49112594", "0.49083838", "0.49023283", "0.49006012", "0.48826176", "0.48780647", "0.4877376", "0.4854148", "0.48365226", "0.4829849", "0.48276564", "0.4827215", "0.48081887", "0.48081318", "0.47964886", "0.47855496", "0.47833413", "0.47539982", "0.4750473" ]
0.7286365
0
Event handler for when a new player enters the game.
def handle_new_user_event(name, sid, methods=['GET', "POST"]): game.add_player(name, sid) print(f"there are {len(game.players)} players in the game") for player in game.players: print(player.name + " is in the game")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def on_client_enter(self, game) -> None:\n pass", "def handle_events(self):\n\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n sys.exit()\n if self.state == FIRST_ENTER:\n if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_ENTER:\n self.state = GAME\n self.player.event = event\n elif self.state == GAME:\n self.player.event = event\n elif self.state in [END, WIN]:\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_KP_ENTER:\n self.state = GAME\n elif event.key == pygame.K_n:\n self.running = False", "def newPlayer():\r\n pass", "def onenterready(self, event):\n print('onenterready; event: %s, %s->%s' % (event.event, event.src, event.dst))", "async def on_world_start(self, data, connection):\n player = self.plugins['player_manager'].get_player_by_name(\n connection.player.name)\n if hasattr(player, 'seen_before'):\n return True\n else:\n self.background(self._new_player_greeter(connection))\n self.background(self._new_player_gifter(connection))\n player.seen_before = True\n return True", "def on_game_go(self):\n print(\"============game go\")\n # create dict with players - defines the players that are playing\n self.on_data_to_all_clients({\"game\":{\"hide_all_dialogs\":\"\"}})\n\n for pl in self.player_list:\n if pl.client not in self.client_pl_dict :\n self.client_pl_dict[pl.client] = pl\n pl.override_direction(4)\n\n self.add_food_item()\n\n self.is_game_going = True\n #for pl in self.player_list:\n # pl.direction = 4", "def bcp_game_start(self, **kargs):\n self.bcp_player_add(number=1)\n self.bcp_player_turn_start(player=1)\n self.events.post('game_started', **kargs)", "def on_pre_enter(self):\n Logger.info('Application: Changed to the Combat screen.')\n Logger.info(\n 'Application: '\n 'The ship chosen for combat is the '\n '\"{}\" type.'.format(self.player.type)\n )\n Logger.info('Application: Stats: {}.'.format(self.player.stats))\n self.start_soundtrack()\n\n # Set the event interval of every frame\n self.updater = Clock.schedule_interval(self.update, 1.0/60.0)\n\n # Populate round/level objects and collidable lists\n self.collidables = []\n self.spaceships = []\n\n # Sets the combat stage/hostiles based on players level\n self.set_level_hostiles()\n\n\n for asteroid in self.asteroids:\n self.collidables.append(asteroid)\n for ship in self.spaceships:\n self.collidables.append(ship)", "def on_enter(self, userdata):\n pass", "def on_pre_enter(self):\n Logger.info('Application: Changed to the Intro screen.')\n self.start_soundtrack()", "def event_game_over(self):\n print('Game over!')\n self._cmd_exit()", "def visit_player(self, player):\n self.visit_character(player)", "def on_enter(self):\n raise NotImplemented(\"on_enter method should be implemented.\")", "def player_activate(event_var):\r\n debug.write(\"[SourceRPG] Handling player_activate\", 1)\r\n if \"PENDING\" in event_var['es_steamid']:\r\n debug.write(\"[SourceRPG] Player joining had a pending steamid, being kicked\")\r\n es.server.cmd('kickid %s \"We had an error with you joining, please reconnect\"' % event_var['userid'])\r\n else:\r\n debug.write(\"Player successfully joined and activated\", 1)\r\n players.addPlayer( event_var['userid'] )\r\n debug.write(\"[SourceRPG] player_activate handled\", 1)", "async def new_game():\n if enough_players():\n GAME.new_game()\n await update_players()", "def player_loop(self):\n\n while True:\n # send message to game that you are ready\n msg = self.receiver()\n if msg[\"game_over\"]:\n return", "async def handle_player_update(self, update: andesite.PlayerUpdate) -> None:\n ...", "def notify_game_over(self):\n self.is_game_over = True", "def start_of_game(self):\n pass", "def on_enter(self):\n\n super(BaseScene, self).on_enter()\n\n self.load_map()\n self.load_players()\n self.load_enemies()\n self.load_status_bar()\n\n self.enemies_layer.next_wave()", "def player(network, event) :\n\twhile event.is_set() :\n\t\t_, _, (V, P) = network.act()\n\t\t# print V, P\n\t\tnetwork.env.render()\n\t\tif (network.env.done) :\n\t\t\tnetwork.reset_game()\n\t\ttime.sleep(0.1)", "def playerDone(self, event):\n # set score\n try:\n self.handleScore(self.currentPlayer, int(self.scoreVar.get()))\n self.addScoreRow(self.currentPlayer)\n \n winner = self.goalCheck()\n if winner:\n self.victory(winner)\n else:\n newRound = not self.nextPlayer()\n if newRound:\n self.addRoundRow(self.currentRound)\n except ValueError:\n return", "def handle_event(self,event):\n if event.type != KEYDOWN:\n return\n if event.key == pygame.K_SPACE:\n self.model.addFaceToFoundation()\n # making the enter key only reset after a victory\n if (self.model.new_game == 1 and event.key == pygame.K_RETURN):\n self.model.reset_game = 1", "def onenterinit(self, event):\n print('onenterinit; event: %s, %s->%s' % (event.event, event.src, event.dst))", "def event_handler(self):\n for event in pygame.event.get():\n if event.type == pygame.QUIT:\n self.RENDER_FRAME = False\n\n elif event.type == pygame.KEYDOWN:\n if event.key == pygame.K_ESCAPE:\n # exit game on ESCAPE\n self.RENDER_FRAME = False\n\n elif event.key == pygame.K_a:\n self.player.move_direction[\"left\"] = True\n\n elif event.key == pygame.K_d:\n self.player.move_direction[\"right\"] = True\n\n elif event.key == pygame.K_SPACE:\n self.player.move_direction[\"up\"] = True\n\n elif event.key == pygame.K_h:\n self.player.move_direction[\"attack\"] = True\n\n elif event.key == pygame.K_k:\n # NOTE: testing purpose only\n self.player.health = 0\n\n elif event.type == pygame.KEYUP:\n if event.key == pygame.K_a:\n self.player.move_direction[\"left\"] = False\n elif event.key == pygame.K_d:\n self.player.move_direction[\"right\"] = False", "def update(self):\r\n if not self.tr.game_over and self.tr.turn_tracker:\r\n self.computer_play()", "def ev_windowenter(self, event: WindowEvent) -> None:", "def do_start_joined(self):\n\t\td = {\"state\": be.S_GAME,\n\t\t\t\t\"hosting\": False,\n\t\t\t\t\"uuid\": None,\n\t\t\t\t\"name\": self.game_name,\n\t\t\t\t\"nickname\": self.nickname,\n\t\t\t\t\"num_players\": self.num_players,\n\t\t\t\t\"boardsize\": self.boardsize}\n\t\tevent = pygame.event.Event(be.E_STATE, d)\n\t\tpygame.event.post(event)\n\n\t\tself.hide_all()\n\t\tself.renderer.color = (0, 0, 0, 0)", "def you_won(self):\n self.end_of_level()\n self.message_holder.add_widget(self.you_win_label)\n Clock.schedule_once(self.goto_next_level, 5)", "def event_player_wins(self) -> None:\n win_amount = self.user.bet\n print(\"Congratulations, you win:\", win_amount)\n self.user.win_balance(self.user.bet)" ]
[ "0.73703206", "0.6564488", "0.64533114", "0.64027756", "0.6338999", "0.63058937", "0.629707", "0.6246762", "0.6230514", "0.6219869", "0.61946213", "0.6177849", "0.617491", "0.61737484", "0.6160792", "0.61495346", "0.61304504", "0.61037457", "0.6088835", "0.60847485", "0.6083576", "0.6047838", "0.6029165", "0.59856176", "0.5958387", "0.59582686", "0.59525305", "0.5927901", "0.5920787", "0.5895829" ]
0.65749884
1
Event handler for when a new round starts, whether by all players having been entered and the "Start Game" button gets clicked, or by continuing after a prior round has ended. Creates and shuffles the deck, deals out the cards, and determines the trump card. Note that the trump is passed to each player as part of the object that this function returns, but it is not a part of their hand
def handle_game_start_event(methods=["GET", "POST"]): if not game.round_already_started: game.round_already_started = True print("start the round") game.start_round() for player in game.ordered_players: hand_dict = { "trump": [game.trump_value, game.trump] } for i, card in enumerate(player.hand): hand_dict[i]=[card.value, card.suit] socketio.emit("deal hand", hand_dict, room=player.sid)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def beginRound(self):\n\t\tself.gameState = Table.PRE_FLOP\n\t\tfor p in self.getPlayers():\n\t\t\tif p.money <= 0:\n\t\t\t\tprint p.name\n\t\t\t\tself.playerRemoveList.append(p)\n\t\tself.removeFromPlayerList()\n\t\tif len(self.getPlayers()) == 1:\n\t\t\tself.isGameEnd = True\n\t\telse:\n\t\t\tself.roundNo += 1\n\t\t\tself.determineBlinds()\n\t\t\tself.curRaise = self.bigBlind\n\t\t\tself.collectSmallBlind()\n\t\t\tself.collectBigBlind()\n\t\t\tself.deal()\n\t\t\tself.setState()\n\t\t\tif self.noOfPlayers() == 2:\n\t\t\t\tself.turn = self.curDealerSeatNo\n\t\t\t\t_, self.roundEndSeat = self.findNthPlayerFromSeat(self.turn, 1)\n\t\t\telse:\n\t\t\t\t_, self.turn = self.findNthPlayerFromSeat(self.curDealerSeatNo, 3)\n\t\t\t\t_, self.roundEndSeat = self.findNthPlayerFromSeat(self.curDealerSeatNo, 2)", "def start_round(user, dealer, deck):\n print(\"\\n======== INITIAL DEAL ========\")\n draw_card(dealer, deck, 2)\n draw_card(user, deck, 2)\n print(f\"\\nThis concludes the initial deal:\\n\"\n f\"\\033[36mPlayer: {user.total}\\033[0m\\n\"\n f\"\\033[33mDealer: {dealer.total}\\033[0m\\n\")\n time.sleep(1.5)", "def play_round(user, dealer, deck):\n start_round(user=user, dealer=dealer, deck=deck)\n dealer_turn(dealer, deck)\n if dealer.total <= GOAL_TOTAL():\n player_turn(user, deck)", "def runGame(self):\n for player in self.game_state.player_list:\n self.player_hand_dict[player.name] = []\n\n \"\"\" Deal the hand out starting with the player after the Dealer \"\"\"\n print \"Dealing cards...\"\n self.dealCards(self.dealer)\n self.printPlayersHands()\n self.playHand()\n dealer = (self.dealer+1)%self.num_players\n\n \"\"\" Play until the termination conditions are met \"\"\"\n if self.isGameFinished() == False:\n \"\"\" Increment dealer \"\"\"\n self.dealer = (self.dealer+1)%self.num_players\n \"\"\" Reset the game state \"\"\"\n self.game_state.newGameState()\n self.decision_list = []\n self.runGame()\n else:\n print \"\"\n print \"Game over!\"", "def round(self):\n #player turn\n if self.started:\n self.started = False #registers the game as started then immediately turns that value false\n if self.initial_action:\n card = self.deck.deal()\n self.player.value += card.value\n if card.is_ace:\n self.player.usable_ace = True\n else:\n self.player.playing = False\n else: \n if self.apply_policy():\n card = self.deck.deal()\n self.player.value += card.value\n if card.is_ace:\n self.player.usable_ace = True\n else:\n self.player.playing = False\n\n #dealer turn\n if self.dealer.value < 17:\n card = self.deck.deal()\n self.dealer.value += card.value\n self.dealer.visible_value += card.value\n #allow people to reduce their scores by applying aces\n self.apply_ace()\n #check to see if anyone has bust by making bust people not _playing\n if self.player.value > 21:\n self.player.broke = True\n self.player.playing = False\n if self.dealer.value > 21:\n self.dealer.broke = True", "def setUpNextGameRound(self):\n\t\tself.pots = [0]\n\t\tself.currentBet = [0]\n\t\tself.reinitDeck()\n\t\tself.communityCards = []\n\t\tallPlayers = self.getPlayers()\n\t\tself.resetPlayerHands(allPlayers)\n\t\tself.resetPlayerBetAmount(allPlayers)\n\t\t_, seat = self.findNthPlayerFromSeat(self.curDealerSeatNo, 1)\n\t\tself.curDealerSeatNo = seat\n\t\tself.beginRound()", "def step(self, action):\n assert self.completed_rounds < self.num_rounds\n\n player = self.players[self.current_player_id]\n card = action\n\n if card not in player.hand:\n raise ValueError(\"Action not allowed because the card is not in the player's hand\")\n\n player.hand.remove(card)\n player.played.add(card)\n # print(f\"Player {self.current_player_id} with hand {[c.id for c in player.hand]} played the card {card.id}\")\n best_combination_on_the_table = self._get_best_combination(card)\n if best_combination_on_the_table:\n self.last_player_capturing_id = self.current_player_id\n player.captured.add(card)\n for c in best_combination_on_the_table:\n self.table.remove(c)\n player.captured.add(c)\n if not self.table and not (self._is_last_round and self._is_round_over()):\n player.scope += 1\n else:\n self.table.add(card)\n # print(f\"Cards on the table after play: {[c.id for c in self.table]}\")\n\n if self._is_round_over():\n self.completed_rounds += 1\n # print(f\"=========== Round {self.current_round} completed ============\")\n self.current_player_id = (self.current_player_id + 1) % self.num_players\n\n if self.is_over():\n last_player_capturing = self.players[self.last_player_capturing_id]\n # print(f\"Giving the remaining cards to player {last_player_capturing.player_id}\")\n for card in self.table:\n last_player_capturing.captured.add(card)\n self.table = set()\n assert all([len(p.played) == 10 for p in self.players])\n assert all([len(p.hand) == 0 for p in self.players])\n return self.get_state(), self.current_player_id", "def main():\n\t\n\t# Declare Variables\n\tDeck = []\n\tPlayerAHand = []\n\tPlayerBHand = []\n\tgameCounter = 0\n\tcardA = 0\n\tcardB = 0\n\trankA = 0\n\trankB = 0\n\t\n\n\t# Create deck. Cards are represented by an integer value\n\tfor i in range(52):\n\t\tDeck.append(i)\n\t\n\t# Shuffle the deck\n\trandom.shuffle(Deck)\n\t\n\t# Deal 1/2 the cards to each player\n\tfor x in range(26):\n\t\tPlayerAHand.append(Deck.pop())\n\t\tPlayerBHand.append(Deck.pop())\n\t\n\t# Main Gameplay\n\t\t\n\twhile len(PlayerAHand) > 0 and len(PlayerBHand) > 0:\n\t\tgameCounter += 1\n\t\tPlayerAHand, PlayerBHand = playRound(PlayerAHand, PlayerBHand)\n\t\t\n\t# End of game\n\t\n\tprint(\"There were \", gameCounter, \" rounds played\")", "def run():\n \n # Enter player name\n #player_name = raw_input(\"Put your Name: \\n \")\n player1 = Player(raw_input(\"Put Player 1 name: \\n \"))\n player2 = Player(raw_input(\"Put Player 2 name: \\n \")) \n \n # Generate Deck\n cards = gen_deck()\n \n game_on = True\n start_pl = 0\n while game_on == True :\n deck = copy(cards) # Cards being played this hand\n deal_cards(deck, player1, player2)\n \n play_set(player1, player2, start_pl) \n\n game_on = check_score(player1, player2, game_on)", "def next_round(self, succeeds_fold=False):\r\n\r\n self.pot = 0\r\n self.actions = 0\r\n self.previous_bet = self.small_blind\r\n self.initiate_blind(self.small_blind + self.big_blind)\r\n\r\n # Let the first player begin\r\n self.active_player = (self.active_player + 1) % len(self.players)\r\n self.players[self.active_player].active = True\r\n\r\n self.players[self.active_player-1].flip_cards()\r\n\r\n if not succeeds_fold:\r\n self.community_cards.flip_cards()\r\n if succeeds_fold:\r\n self.community_cards.flip_all_cards()\r\n\r\n # Create a new deck\r\n self.deck_model = DeckModel()\r\n\r\n # Creates new cards\r\n self.community_cards.new_cards(self.deck_model)\r\n for player in self.players:\r\n player.new_cards(self.deck_model)\r\n\r\n output_text = \"Initiating round.\\n{} post the big blind [${}]\\n{} post the small blind [${}]\".format(\r\n self.players[(self.active_player + 1) % len(self.players)].name, self.big_blind,\r\n self.players[self.active_player].name, self.small_blind)\r\n\r\n self.new_pot.emit()\r\n self.new_credits.emit()\r\n self.new_output.emit(output_text)", "def startGame(d_hand, p_hand, deck1):\n NUM_CARDS = 2\n\n for i in range(NUM_CARDS):\n d_hand.getCard(deck1.drawCard())\n p_hand.getCard(deck1.drawCard())", "def play_round(self) -> None:\r\n # Print round details:\r\n self.open_cards()\r\n print(Messages.OPEN_CARDS)\r\n print(self.card_stack)\r\n print(Messages.MONEY_IN_STACK + \" \", end=\"\")\r\n print(self.money_stack)\r\n\r\n start_player = self.cur_player # Helper for the round to end correctly.\r\n\r\n # The actual round\r\n while self.continue_round(start_player):\r\n if self.active_players == 1:\r\n break\r\n self.round_player_money += self.players[self.cur_player].play_round(self.round_player_money)\r\n if not self.players[self.cur_player].is_active():\r\n self.active_players -= 1\r\n self.next_active_player()\r\n self.players[start_player].set_already_raised(True) # Helper for the round to end correctly.\r\n self.end_round()", "def deal():\n \n # Update messages, score and the player's \"Hand\" status\n # as global variables.\n global outcome, outcome_plus, outcome_plus_plus, in_play, score, action \n outcome = outcome_plus = outcome_plus_plus = \"\"\n action = HIT_OR_STAND\n \n # If the \"Deal\" button is clicked during the middle of \n # a round the program reports that the \"Player\" lost \n # the round and updates the \"score\" appropriately.\n if in_play:\n outcome = PLAYER_LOSES \n outcome_plus = EARLY_DEAL_1\n outcome_plus_plus = EARLY_DEAL_2\n score -= SCORE_POINTS\n else:\n in_play = True\n \n # Create and shuffle the \"Deck\" (stored as a global \n # variable). Avoids the situation where the \"Deck\" \n # becomes empty during play.\n global deck_of_cards\n deck_of_cards = Deck()\n deck_of_cards.shuffle()\n \n # Create new \"Player\" and \"Dealer\" Hands (stored as \n # global variables). \n global player, dealer\n player = Hand()\n dealer = Hand()\n \n # Add two \"Cards\" to each \"Hand\". To transfer a \"Card\" \n # from the \"Deck\" to a \"Hand\", the \"deal_card()\" \n # method of the \"Deck\" class and the \"add_card()\" \n # method of \"Hand\" class are being used in \n # combination. \n player.add_card(deck_of_cards.deal_card())\n dealer.add_card(deck_of_cards.deal_card())\n player.add_card(deck_of_cards.deal_card())\n dealer.add_card(deck_of_cards.deal_card())\n \n # Print resulting \"Hands\" to the console with an \n # appropriate message indicating which \"Hand\" is which.\n # Remove comments if in DEBUG mode.\n #print \"Player: \" + str(player)\n #print \"Dealer: \" + str(dealer) \n \n return None", "def _transit_to_start_round(self, **kwargs):\n logging.info(\"in _transit_to_start_round\")\n handler = kwargs['handler']\n\n # Once placed, a vote will not be unset from within this 'voting' state,\n # though it could be overridden with another vote from the same person\n # before all votes are in (which is okay)\n plus_id = kwargs['plus_id']\n selected_card = kwargs['card_num']\n\n game = models.Hangout.get_by_id(self.hangout_id).current_game.get()\n if not game:\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': \"Game for hangout %s not found\" % (self.hangout_id,)})\n return False\n else:\n logging.debug(\"using game: %s\", game)\n # check game state.\n # players can only select a card from the 'start_round' state.\n if not game.state == self.state_name:\n if handler:\n handler.render_jsonp(\n {'status' : 'ERROR',\n 'message': (\n \"Can't vote now, wrong game state %s.\" % (game.state,))})\n return False\n participant_key = model.Key(models.Participant, plus_id, parent=game.key)\n participant = participant_key.get()\n if not participant:\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': \"Could not retrieve indicated participant\"})\n return False\n sres = participant.select_card(selected_card)\n participant.put()\n if sres is None: # need to check explicitly, b/c of card 0\n if handler:\n handler.accumulate_response(\n {'status': 'ERROR',\n 'message': \"could not select card %s from hand\" % selected_card})\n return False\n\n # broadcast successful selection by player, but don't indicate the\n # card selected. (After all have selected, the shuffled set of\n # selections will be broadcast)\n self._cache_selection(\n plus_id, selected_card, game.key.id(),\n game.current_round)\n message = simplejson.dumps({'player_selection': \n {'participant': plus_id,\n 'game_id': game.key.id(),\n 'round': game.current_round}})\n logging.info(\"player selection channel msg: %s\", message)\n participants = game.participants()\n for p in participants:\n logging.info(\"sending channel msg to %s\", p.plus_id)\n channel.send_message(p.channel_id, message)\n \n return True", "def play_game(self):\n # need everyone to pass to move to next phase?\n self.deal_cards()\n self.plant_food()", "def setUpNextBetRound(self):\n\t\tif self.gameState == Table.PRE_FLOP:\n\t\t\tself.gameState = Table.FLOP\n\t\t\tself.dealCommunity(3)\n\t\t\tself.curRaise = self.bigBlind\n\t\t\tplayer = self.findNextSuitablePlayer(self.curDealerSeatNo)\n\t\t\t_, self.turn = self.findNextSuitablePlayer(self.curDealerSeatNo)\n\t\t\tself.roundEndSeat = self.curDealerSeatNo\n\t\telif self.gameState == Table.FLOP:\n\t\t\tself.gameState = Table.TURN\n\t\t\tself.dealCommunity(1)\n\t\t\tself.curRaise = self.bigBlind\n\t\t\t_, self.turn = self.findNextSuitablePlayer(self.curDealerSeatNo)\n\t\t\tself.roundEndSeat = self.curDealerSeatNo\n\t\telif self.gameState == Table.TURN:\n\t\t\tself.gameState = Table.RIVER\n\t\t\tself.dealCommunity(1)\n\t\t\tself.curRaise = self.bigBlind\n\t\t\t_, self.turn = self.findNextSuitablePlayer(self.curDealerSeatNo)\n\t\t\tself.roundEndSeat = self.curDealerSeatNo\n\t\telif self.gameState == Table.RIVER:\n\t\t\tself.gameState = Table.SHOWDOWN\n\t\t\tself.evaluateWinner()\n\t\t\tself.setUpNextGameRound()\n\t\t\t# Do showdown stuff here (evaluate hands, hand out pot, get ready for next game)", "def play_round(starter, cards):\n r = Round(starter)\n for who, card in cards:\n try:\n r.play(who, card)\n except AssertionError as e:\n print(e)\n return Round.winners", "def step(self, action):\n if self.allow_step_back:\n # First snapshot the current state\n r = copy(self.round)\n r_raised = copy(self.round.raised)\n gp = self.game_pointer\n r_c = self.round_counter\n d_deck = copy(self.dealer.deck)\n p = copy(self.public_card)\n ps = [copy(self.players[i]) for i in range(self.num_players)]\n ps_hand = [copy(self.players[i].hand) for i in range(self.num_players)]\n self.history.append((r, r_raised, gp, r_c, d_deck, p, ps, ps_hand))\n\n # Then we proceed to the next round\n self.game_pointer = self.round.proceed_round(self.players, action)\n\n # If a round is over, we deal more public cards\n if self.round.is_over():\n # For the first round, we deal 1 card as public card. Double the raise amount for the second round\n if self.round_counter == 0:\n self.dealer.shuffle()\n self.public_card = self.dealer.deal_card()\n self.round.raise_amount = 2 * self.raise_amount\n\n self.round_counter += 1\n self.round.start_new_round(self.game_pointer)\n\n state = self.get_state(self.game_pointer)\n\n return state, self.game_pointer", "def play_round():\n\n global hand1, hand2\n # get top cards\n card1 = hand1.pop(0)\n card2 = hand2.pop(0)\n\n if card1 > card2: # player 1 won\n # add cards to bottom of hand1\n hand1.append(card1)\n hand1.append(card2)\n\n else: # player 2 won\n # add cards to bottom of hand2\n hand2.append(card2)\n hand2.append(card1)\n\n return", "def play_poker(self) -> None:\r\n self.deal_opening_cards()\r\n for i in range(PokerRules.NUM_OF_ROUNDS):\r\n if self.active_players == 1:\r\n break\r\n self.play_round()\r\n PokerRules.winner(self.card_stack, self.money_stack, self.players)", "def game_setup(self):\n self.deck = Shoe(6)\n self.initial_draw()\n self.pot = ask_for_bet(self.player.money)\n show_table(self.player, self.dealer, self.pot)\n self.surrender_and_insurance()", "def main():\n number_of_players = get_number_of_players()\n number_of_decks = get_number_of_decks()\n game_data = setup_game(number_of_players)\n\n player_list = game_data[0]\n play_shoe = game_data[2]\n play_dealer = game_data[1]\n play_again = True\n\n while play_again:\n replay = play_game(play_shoe, player_list, play_dealer, number_of_decks)\n if replay:\n play_shoe = replay[1]\n else:\n play_again = False\n \n print(\"Thanks for playing\")", "def step(self, actions):\n if len(actions) != len(self._seats):\n raise error.Error('actions must be same shape as number of seats.')\n\n if self._current_player is None:\n raise error.Error('Round cannot be played without 2 or more players.')\n\n if self._round == 4:\n raise error.Error('Rounds already finished, needs to be reset.')\n\n players = [p for p in self._seats if p.playing_hand]\n if len(players) == 1:\n raise error.Error('Round cannot be played with one player.')\n\n self._last_player = self._current_player\n self._last_actions = actions\n\n if not self._current_player.playedthisround and len([p for p in players if not p.isallin]) >= 1:\n if self._current_player.isallin:\n self._current_player = self._next(players, self._current_player)\n return self._get_current_step_returns(False)\n\n move = self._current_player.player_move(\n self._output_state(self._current_player), actions[self._current_player.player_id])\n\n if move[0] == 'call':\n self._player_bet(self._current_player, self._tocall)\n if self._debug:\n print('Player', self._current_player.player_id, move)\n self._current_player = self._next(players, self._current_player)\n elif move[0] == 'check':\n self._player_bet(self._current_player, self._current_player.currentbet)\n if self._debug:\n print('Player', self._current_player.player_id, move)\n self._current_player = self._next(players, self._current_player)\n elif move[0] == 'raise':\n self._player_bet(self._current_player, move[1]+self._current_player.currentbet)\n if self._debug:\n print('Player', self._current_player.player_id, move)\n for p in players:\n if p != self._current_player:\n p.playedthisround = False\n self._current_player = self._next(players, self._current_player)\n elif move[0] == 'fold':\n self._current_player.playing_hand = False\n folded_player = self._current_player\n if self._debug:\n print('Player', self._current_player.player_id, move)\n self._current_player = self._next(players, self._current_player)\n players.remove(folded_player)\n self._folded_players.append(folded_player)\n # break if a single player left\n if len(players) == 1:\n self._resolve(players)\n if all([player.playedthisround for player in players]):\n self._resolve(players)\n\n terminal = False\n if all([player.isallin for player in players]):\n while self._round < 4:\n self._deal_next_round()\n self._round += 1\n if self._round == 4 or len(players) == 1:\n terminal = True\n self._resolve_round(players)\n return self._get_current_step_returns(terminal)", "def reset(self):\r\n self.player_hand.reset()\r\n self.dealer_hand.reset()\r\n self.player_hand.add(self.deck.deal())\r\n self.player_hand.add(self.deck.deal())\r\n self.dealer_hand.add(self.deck.deal())\r\n self.dealer_hand.add(self.deck.deal())\r\n # Checking for edge cases where player/dealer (or both) have two aces\r\n if self.player_hand.total == 22 and self.dealer_hand.total == 22:\r\n self.status_color = 'red'\r\n self.game_status = \"TIE Game... Press 'r' to start game\"\r\n self.in_progress = False\r\n elif self.player_hand.total == 22:\r\n self.status_color = 'red'\r\n self.game_status = \"Dealer WINS... Press 'r' to start game\"\r\n self.dealer_wins += 1\r\n self.in_progress = False\r\n elif self.dealer_hand.total == 22:\r\n self.status_color = 'red'\r\n self.game_status = \"Player WINS... Press 'r' to start game\"\r\n self.player_wins += 1\r\n self.in_progress = False\r\n else:\r\n self.game_status = 'In Progress...'\r\n self.status_color = 'green'\r\n self.in_progress = True\r\n self.refresh_canvas()", "def _new_game():\n\n global dealer_card_frame\n global player_card_frame\n global dealer_hand\n global player_hand\n\n dealer_card_frame.destroy()\n dealer_card_frame = tkinter.Frame(card_frame, background=\"green\")\n dealer_card_frame.grid(row=0, column=1, sticky=\"ew\", rowspan=2)\n\n player_card_frame.destroy()\n player_card_frame = tkinter.Frame(card_frame, background=\"green\")\n player_card_frame.grid(row=2, column=1, sticky=\"ew\", rowspan=2)\n\n # reset the result label\n result_text.set(\"\")\n\n # create a list to store dealer's and player's hands.\n dealer_hand = []\n player_hand = []\n\n # deal the first cards\n _initial_deal()", "def start_game(self):\n while self.can_deal:\n self.take_turn()", "def simulate(deck): \n \n # Initialize Banker and Player\n # player_third_card is initialized to -10 to signify that it doesn't exist.\n banker = 0\n player = 0\n player_third_card = -10\n \n# Deal out two hands of two cards\n player = (player + deck.pop()) % 10\n player = (player + deck.pop()) % 10\n \n banker = (banker + deck.pop()) % 10\n banker = (banker + deck.pop()) % 10\n \n# Check for natural\n if player >= 8 and banker >= 8:\n return 'tie'\n elif banker >= 8:\n return 'banker'\n elif player >= 8:\n return 'player'\n \n\n# Run through Player hand\n if player <= 5:\n player_third_card = deck.pop()\n player = (player + player_third_card) % 10\n \n\n# Run through Banker hand\n if player_third_card == -10 and banker < 6:\n banker = (banker + deck.pop()) % 10\n elif banker <= 2:\n banker = (banker + deck.pop()) % 10\n elif banker == 3 and player_third_card != 8:\n banker = (banker + deck.pop()) % 10\n elif banker == 4 and player_third_card >= 2 and player_third_card <=7:\n banker = (banker + deck.pop()) % 10\n elif banker == 5 and player_third_card >= 4 and player_third_card <=7:\n banker = (banker + deck.pop()) % 10\n elif banker == 6 and (player_third_card == 6 or player_third_card == 7):\n banker = (banker + deck.pop()) % 10\n \n \n# Compare hands and return results\n if player > banker:\n return 'player'\n elif banker > player:\n return 'banker'\n else:\n return 'tie'", "def game(players: List[Player]):\n desk_pile = Deck()\n turn_number = 0\n\n while players[0].has_cards:\n turn_number += 1\n print(f\"Turn {turn_number}\")\n for player in players:\n played_card = player.turn()\n desk_pile.add_card(played_card)\n check_snap(desk_pile, players)\n sleep(DEFAULT_TURN_TIME_SECONDS)\n\n pile_sizes = [(player, player.pile_size) for player in players]\n # sort from maximum player pile size to minimum, first player in the list wins the round\n pile_sizes.sort(key=lambda x: x[1], reverse=True)\n game_winner: Player = pile_sizes[0][0]\n\n # print game results\n print(\"############################\")\n print(f\"Player {game_winner.name} WON!\")\n print(\"############################\")\n print(\n f\"Game results:\\n\"\n + \"\\n\".join(\n f\"{player.name}: {player.pile_size} cards\" for player in list(players)\n )\n )", "def player_turn(user, deck):\n print(f\"\\n======== PLAYER'S TURN ========\\n\\n\"\n f\"Your current hand is \\033[36m{user.total}\\033[0m.\\n\")\n while deck.cards and not bust(user) and user.total != GOAL_TOTAL():\n if player_draw():\n draw_card(user, deck)\n else:\n print(f\"\\nYou've chosen to Stand, this ends the round with your hand of \\033[36m{user.total}\\033[0m.\\n\")\n break\n time.sleep(1)", "def first_round():\n time.sleep(1)\n print(\"\\nWhat would you like to choose?\")\n print(\"\\nROCK, PAPER or SCISSORS?\\n\")\n user_choice = input()\n user_choice = user_choice.lower()\n # Duck randomly choses from options variable\n duck_choice = random.choice(options)\n # Function to be repeated after each round\n\n def calculating_result():\n # This prints user's choice and Duck's choice\n print(f\"\\nOK. You chose {user_choice}, and I chose {duck_choice}.\\n\")\n time.sleep(1)\n print(\"\\nCalculating result...\")\n time.sleep(2)\n print(\"...\")\n time.sleep(2)\n print(\"!\")\n time.sleep(1)\n # They must play again in case of a tie\n if user_choice == duck_choice:\n calculating_result()\n print(\"\\nOh! It's a tie! We have to play it again.\")\n time.sleep(2)\n first_round()\n # They move on to chat_before_game in either of these scenarios:\n elif user_choice == \"rock\":\n if duck_choice == \"scissors\":\n calculating_result()\n print(\"\\nDrat! I guess you won this turn.\")\n time.sleep(2)\n chat_before_game()\n else:\n calculating_result()\n print(\"\\nI win!\")\n time.sleep(2)\n chat_before_game()\n elif user_choice == \"paper\":\n if duck_choice == \"rock\":\n calculating_result()\n print(\"\\nOh no! I lost!\")\n time.sleep(2)\n chat_before_game()\n else:\n calculating_result()\n print(\"\\nI win!\")\n time.sleep(2)\n chat_before_game()\n elif user_choice == \"scissors\":\n if duck_choice == \"rock\":\n calculating_result()\n print(\"\\nI win\")\n time.sleep(2)\n chat_before_game()\n else:\n calculating_result()\n print(\"\\nI lost... :(\")\n time.sleep(2)\n chat_before_game()\n # Replay first_round in case of invalid input\n else:\n print(\"\\nPlease type a valid option!\\n\")\n time.sleep(1)\n first_round()" ]
[ "0.71307325", "0.6843485", "0.6780641", "0.66546196", "0.6519484", "0.65184325", "0.6504133", "0.6440025", "0.64300364", "0.6407379", "0.6402357", "0.6399686", "0.63804615", "0.63598305", "0.631902", "0.6294538", "0.6265746", "0.6260989", "0.62533665", "0.6244803", "0.6237576", "0.620601", "0.61408263", "0.6133322", "0.61276853", "0.61163276", "0.611295", "0.60859203", "0.6072887", "0.60671043" ]
0.6857728
1
Event handler for when a new bid must be requested. Creates an input field where the player whose turn it is can input their bid and submit it. When all bids have been collected, this function instead switches the game state to "playing" and sends a message to the player whose turn it is that they may now start playing cards.
def handle_start_bidding_event(methods=["GET", "POST"]): if game.active_player_index == len(game.ordered_players): game.active_player_index = 0 active_player = game.ordered_players[game.active_player_index] game.state = "playing" socketio.emit("your turn", room=active_player.sid) else: active_player = game.ordered_players[game.active_player_index] if not active_player.bid_active: socketio.emit("make bid field", room=active_player.sid) active_player.bid_active = True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bid(command):\n env_banner()\n\n game = Game.read()\n if game is None:\n game = Game.init_game()\n\n command = command.lower().strip()\n\n if command not in ['start', 'pause', 'resume']:\n click.echo('Valid options are start, pause or resume.')\n return\n\n if game.user_count == 0 or game.player_count == 0:\n click.echo('Init the game first by uploading player data.')\n return\n\n if game.player_to_bid == 0:\n click.echo('Bidding is complete')\n return\n\n if command == 'start':\n if game.bid_in_progress:\n click.echo('Bid is already in progress.')\n return\n if game.player_to_bid != game.player_count:\n click.echo('Bidding has already started. Use resume option.')\n return\n invite_bid()\n click.echo('Bidding has been started.')\n return\n\n if command == 'pause':\n if not game.bid_in_progress:\n click.echo('Bid is NOT in progress.')\n return\n game.bid_in_progress = False\n game.update()\n click.echo('Bidding has been paused.')\n return\n\n if command == 'resume':\n if game.bid_in_progress:\n click.echo('Bid is already in progress.')\n return\n if game.player_to_bid == game.player_count:\n click.echo('Bidding has not yet started. Use start option.')\n return\n game.bid_in_progress = True\n game.update()\n click.echo('Bidding has been resumed.')", "def game_bid():\n game = current_user.get_active_game()\n bid = request.form.get('bid', '')\n\n # Validate bid\n if not isinstance(bid, str):\n # Invalid input for bid, but no need to alert user\n return redirect(url_for('game_home'))\n bid = bid.strip()\n if not BID_REGEX.match(bid):\n flash('Your bid must be an integer bid from zero (0) to thirteen (13).')\n return redirect(url_for('game_home'))\n\n if game is None:\n flash('If you want to join a game, click the Join button.')\n return redirect(url_for('home'))\n else:\n hand = game.get_latest_hand()\n # Attempt to place the bid\n try:\n hand.place_bid(current_user.user_id, int(bid), game)\n except UserCanNotBidError:\n flash('Bidding is not available at this time for you.')\n return redirect(url_for('game_home'))\n except BadGameStateError:\n flash('An error occurred while trying to pace your bid. Please try again.')\n return redirect(url_for('game_home'))\n else:\n flash(f'Your bid of {bid} has been placed.')\n return redirect(url_for('game_home'))", "def makebid_individualplayer(self, playernumber, max_price_to_pay):\n status = self.checkState(\"transfermarket\")\n if status:\n # Click player\n playerbutton = \"/html/body/main/section/section/div[2]/div/div/section[1]/div/ul/li[\" + str(\n playernumber) + \"]/div\"\n self.driver.find_element_by_xpath(playerbutton)\n self.sleep_approx(1)\n self.driver.find_element_by_xpath(playerbutton).click()\n self.sleep_approx(1)\n\n # If conserve bids is on, bid at (user_buy_ceiling * .7)*max price to pay\n if (self.conserve_bids == 1):\n bid_to_make = round(int(max_price_to_pay*.7), -2)\n bid_price_box = self.driver.find_element_by_css_selector(\n 'input.numericInput.filled')\n bid_price_box.click()\n self.sleep_approx(1)\n bid_price_box.send_keys(Keys.CONTROL, \"a\", Keys.DELETE)\n self.sleep_approx(1)\n\n # Enter bid price of (0.85)*(0.8) * marketprice\n bid_price_box.send_keys(bid_to_make)\n self.sleep_approx(1)\n\n # Click make bid button #TODO read input price and check for max bid error\n self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div/section[2]/div/div/div[2]/div[2]/button[1]\").click()\n else:\n # Not in conserve mode - Don't enter price - just click make bid\n self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div/section[2]/div/div/div[2]/div[2]/button[1]\").click()\n\n self.user_bids_made += 1\n self.update_autobidder_logs()\n self.sleep_approx(1)", "def handle_bid(bid, sid, methods=[\"GET\", \"POST\"]):\n game.set_player_bid(bid, sid)\n game.active_player_index += 1\n socketio.emit(\"get next bid\")\n bid_dict = {}\n for player in game.ordered_players:\n print(f\"{player.name}'s bid is {player.bid}\")\n bid_dict[player.name.title()]=player.bid\n socketio.emit(\"show bidTable\", bid_dict)", "def add_bid(self, bid, player_id):\n\t\tglobal_id = self.globalize_id(player_id)\n\t\tassert len(self.bids) < self.data_size and global_id not in self.bids\n\t\tif bid == 0:\n\t\t\tbid = \"N\"\n\t\tself.bids[global_id] = bid", "def bid(self):\n # log.debug(\"{0} is bidding...\".format(self.label))\n for bid in range(5):\n if self.is_legal_bid(bid):\n self.send_bid(bid)\n return", "def accept_bid(self, bid, bidder):\n if bidder != \"Starting Bid\":\n print(f\"{bidder} bidded {bid} in response to {self._highest_bidder}'s bid of {self._highest_bid}!\")\n\n self._highest_bid = bid\n self._highest_bidder = bidder\n\n self._notify_bidders()", "def handle_bids(self, auction_round, bids):\n\n game_state = dict()\n game_state['finished'] = False\n\n if self.__over:\n # Game is already over\n game_state.update(self.__game_state)\n\n else:\n max_bid = dict()\n max_bid['amount'] = 0\n max_bid['bidder'] = None\n max_bid['received_time'] = None\n\n bid_item = self.auction_items[auction_round]\n\n for idx in range(len(bids)):\n player_id = bids[idx]['player']\n\n if self.players[player_id]['valid'] is False: # skip invalid players\n continue\n\n elif bids[idx]['timeout'] is True: # player was timed out during bidding\n self.players[player_id]['valid'] = False\n self.players[player_id]['remain_time'] = -1\n print(('Player {} was timed out on round {}.'\n .format(self.players[player_id]['name'], auction_round)))\n\n continue\n\n start_time = bids[idx]['start_time']\n received_time = bids[idx]['received_time']\n bid_summary = bids[idx]['bid']\n\n # handle timestamp checking\n self.players[player_id]['remain_time'] -= (received_time - start_time).total_seconds()\n\n if self.players[player_id]['wealth'] - bid_summary['bid_amount'] >= 0:\n\n bid_amt = bid_summary['bid_amount']\n\n # highest bidder or first bidder (if same bid amount)\n if bid_amt > max_bid['amount'] or (bid_amt == max_bid and max_bid['amount'] > 0 and\n received_time < max_bid['received_time']):\n max_bid['amount'] = bid_amt\n max_bid['bidder'] = player_id\n max_bid['received_time'] = received_time\n\n else:\n # invalid bid from player\n self.players[player_id]['valid'] = False\n print('Player {} made an invalid bid on round {}.'\n .format(self.players[player_id]['name'], auction_round))\n\n max_bidder = max_bid['bidder']\n\n if max_bid['amount'] > 0 and max_bidder is not None:\n\n self.players[max_bidder]['wealth'] -= max_bid['amount']\n self.bid_winners[bid_item][max_bidder] += 1\n\n if self.bid_winners[bid_item][max_bidder] >= self.__required_count:\n game_state['finished'] = True\n game_state['winner'] = max_bidder # wins\n game_state['reason'] = ('Player {} won the game! Congrats!'\n .format(self.players[max_bidder]['name']))\n\n game_state['bid_item'] = self.auction_items[auction_round]\n\n if max_bidder is not None:\n game_state['bid_winner'] = str(self.players[max_bidder]['name'])\n else:\n game_state['bid_winner'] = None\n\n game_state['winning_bid'] = max_bid['amount']\n game_state['auction_round'] = auction_round\n\n remain_player_count = 0\n for idx in range(len(self.players)):\n player_name = self.get_player_name(idx)\n game_state[player_name] = self.players[idx]._dict\n\n if self.players[idx]['valid'] is True:\n remain_player_count += 1\n\n game_state['remain_players'] = remain_player_count\n\n if remain_player_count == 0:\n # game ends\n game_state['finished'] = True\n game_state['reason'] = 'No valid players remaining'\n\n if auction_round == len(self.auction_items) - 1 and game_state['finished'] is False:\n # last round and no winner, then set game to finished\n game_state['finished'] = True\n game_state['reason'] = 'Draw Game. Out of Auction Items.'\n\n\n self.__game_state.update(game_state)\n self.__over = game_state['finished']\n self.print_status(game_state, auction_round)\n\n return game_state", "async def add_bj_game(self, user_id, bid, ctx, mode):\n await ex.conn.execute(\"INSERT INTO blackjack.games (player1, bid1, channelid) VALUES ($1, $2, $3)\", user_id, str(bid), ctx.channel.id)\n game_id = await self.get_game_by_player(user_id)\n if mode != \"bot\":\n await ctx.send(f\"> **There are currently 1/2 members signed up for BlackJack. To join the game, please type {await ex.get_server_prefix_by_context(ctx)}joingame {game_id} (bid)** \")", "def main():\n\n # values to receive from a user\n bidders = []\n item_name = \"\"\n num_of_bidders = 0\n starting_price = 0\n\n errors = []\n is_valid = False\n while not is_valid:\n is_valid = True\n errors.clear()\n item_name = input(\"Please type the name of the item: \")\n\n # Starting price\n try:\n starting_price = float(input(\"Please type the starting price: \"))\n except ValueError:\n errors.append(\"[Error] Starting price should be a decimal number.\")\n is_valid = False\n\n # Number of bidders\n try:\n num_of_bidders = int(input(\"Please type the number of bidders: \"))\n except ValueError:\n errors.append(\"[Error] Number of bidders should be an integer.\")\n is_valid = False\n\n # print input errors\n for error in errors:\n print(error)\n\n # Creating bidders\n num = 1\n bidder_name = \"\"\n is_valid = False\n while num <= int(num_of_bidders) or is_valid is False:\n print(f\"Please provide the details of the bidder {num}\")\n name = input(\"name: \")\n try:\n budget = float(input(\"budget: \"))\n except ValueError as e:\n print(\"[Error] Budget should be a decimal number\")\n else:\n is_valid = True\n inc_rate = random.random()\n bidders.append(Bidder(name, float(budget), (1 + inc_rate)))\n num += 1\n\n # Create Auction with the input values and Start the auction\n my_auction = Auction(bidders, item_name, float(starting_price))\n print(f\"\\nStarting Auction!!\\n----------------------\\n\"\n f\"Auctioning {bidder_name} starting at {starting_price}.\")\n my_auction.start_auction()\n\n # Print out the auction results\n my_auction.print_auction_result()", "def makebid_individualplayerWatchlist(self, playernumber, bidprice):\n # /html/body/div[4]/section/div/div/button[1]\n # https://i.gyazo.com/317c7fa554d3ab5e8fd6d48dd6337b41.png\n status = self.checkState(\"watchlist\")\n if status:\n try:\n # page = self.driver.find_elements_by_tag_name(\"h1.title\")\n page = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[1]/h1\").text\n\n self.sleep_approx(1)\n originalbid = bidprice\n\n playerbutton = \"/html/body/main/section/section/div[2]/div/div/div/section[1]/ul/li[\" + str(\n playernumber) + \"]/div\"\n\n self.driver.find_element_by_xpath(playerbutton)\n self.driver.find_element_by_xpath(playerbutton).click()\n self.sleep_approx(0.5)\n\n try:\n # Click make bid\n WebDriverWait(self.driver, 30).until(\n EC.visibility_of_element_located(\n (By.XPATH, '/html/body/main/section/section/div[2]/div/div/section/div/div/div[2]/div[2]/button[1]'))\n )\n self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div/section/div/div/div[2]/div[2]/button[1]\").click()\n\n self.sleep_approx(1)\n # Check if \"highest bidder\" glitch occurred\n overbid_glitch = self.check_exists_by_xpath(\n \"/html/body/div[4]/section/div/div/button[1]\")\n if overbid_glitch:\n cancel_btn = self.driver.find_element_by_xpath(\n \"/html/body/div[4]/section/div/div/button[1]\")\n cancel_btn.click()\n self.sleep_approx(1)\n except:\n log_event(self.queue, \"Bid method failed\")\n\n if (page == \"TRANSFER TARGETS\"):\n # self.sleep_approx(1)\n curbidprice_afterbidding = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div/section/div/div/div[2]/div[1]/div/div[2]/span[2]\").text\n if \",\" in curbidprice_afterbidding:\n curbidprice_afterbidding = curbidprice_afterbidding.replace(\n \",\", \"\")\n curbidprice_afterbidding = int(curbidprice_afterbidding)\n\n diff = originalbid - curbidprice_afterbidding\n\n if (diff == 0):\n return \"Failure\"\n else:\n self.user_bids_made += 1\n self.update_autobidder_logs()\n return \"Success\"\n\n self.sleep_approx(1)\n except:\n log_event(self.queue, \"makebid_individualplayerWatchlist error\")", "def step_bid(self, action_bid):\n if self.done_bidding:\n raise Exception(\"No more actions can be taken\")\n\n # action_bid must be in [0; AUCTION_SPACE_SIZE - 1]\n if action_bid < 0 or action_bid > AUCTION_SPACE_SIZE - 1:\n raise Exception(\"illegal action\")\n\n # what happens when we get a pass\n if action_bid == PASS_IDX:\n\n # we are not allowed to make a double for now\n self.elim_sig_bid[DOUBLE_IDX] = 0\n\n self.history_bid[action_bid] = 1\n\n if self.max_bid == -1:\n self.auction_history[self.n_pass] = 1\n elif self.n_pass < 2:\n self.auction_history[\n 3 + 8*self.max_bid + 3*(self.n_double + self.n_redouble) + self.n_pass + 1] = 1\n\n # incrementing the current number of passes\n self.n_pass += 1\n\n # what happens when we get a contract bid\n elif action_bid < PASS_IDX:\n\n if action_bid <= self.max_bid:\n raise Exception(\"illegal bidding.\")\n\n # resetting n_pass, n_double and n_redouble\n self.n_pass = 0\n self.n_double = 0\n self.n_redouble = 0\n self.max_bid = action_bid\n\n self.history_bid[action_bid] = 1\n self.history_bid[-1] = 0\n self.auction_history[3 + 8*self.max_bid] = 1\n\n # this action and all the actions below can no longer be performed\n self.elim_sig_bid[:(1 + self.max_bid)] = 0\n\n # doubles are now permitted, redoubles are not permitted\n self.elim_sig_bid[DOUBLE_IDX] = 1\n self.elim_sig_bid[REDOUBLE_IDX] = 0\n\n strain = convert_action2strain(action_bid)\n group = Seat2Group[self.turn_bid]\n if self.strain_declarer[group].get(strain, '') == '':\n self.strain_declarer[group][strain] = self.turn_bid # which one\n self.group_declarer = group # which group\n\n # what happens when we get a double\n elif action_bid == DOUBLE_IDX:\n # doubles are not permitted when\n # no contract bids have been made OR\n # a double bid has already been made OR\n # a redouble bid has been made\n if (self.max_bid == -1) or (self.n_double == 1) or (self.n_redouble == 1):\n raise Exception(\"double is not currently allowed\")\n\n self.n_double = 1\n self.elim_sig_bid[DOUBLE_IDX] = 0\n self.elim_sig_bid[REDOUBLE_IDX] = 1\n self.auction_history[3 + 8*self.max_bid + 3] = 1\n\n # what happens when we get a redouble\n elif action_bid == REDOUBLE_IDX:\n # redoubles are not permitted when\n # no contract bids have been made OR\n # a double bid has not been made OR\n # a redouble bid has already been made\n if (self.max_bid == -1) or (self.n_double == 0) or (self.n_redouble == 1):\n raise Exception(\"redouble is not currently allowed\")\n\n self.n_redouble = 1\n self.elim_sig_bid[DOUBLE_IDX] = 0\n self.elim_sig_bid[REDOUBLE_IDX] = 0\n self.auction_history[3 + 8*self.max_bid + 6] = 1\n\n # updating the index of the next bidding player\n self.turn_bid = (self.turn_bid + 1) % len(Seat)\n\n # move to the participant\n # NB: this code is only useful if not all players are bidding (i.e. self.bidding_seats\n # does not contain everybody)\n while True:\n if self.turn_bid not in self.bidding_seats:\n self.turn_bid = (self.turn_bid + 1) % len(Seat)\n self.n_pass += 1\n else:\n break\n\n hand = self.one_hot_deal[self.turn_bid]\n reward = 0\n\n # state is the next bidding player's state\n if (self.n_pass >= 3 and self.max_bid < 0) or self.max_bid == 34:\n\n if self.max_bid < 0:\n raise Exception(\"illegal bidding\")\n # extract the declarer, strain , level\n strain = convert_action2strain(self.max_bid)\n level = convert_action2level(self.max_bid)\n # single thread\n # reward = np.mean(Deal.score_st(dealer=self.deal, level=level, strain=strain, declarer=declarer, tries=self.nmc, mode=self.score_mode))\n # parallel threads\n\n # np.mean is moved to score\n declarer = self.strain_declarer[self.group_declarer][strain] # thise group's first declarer\n\n # TODO[ス: game rewards / scores will no longer be calculated during bidding - the next\n # bit of code needs to be removed\n reward = Deal.score(dealer=self.deal,\n level=level,\n strain=strain,\n declarer=declarer,\n tries=self.nmc,\n mode=self.score_mode)\n self.done_bidding = True\n\n # storing the contract\n self.contract.from_bid(bid=self.max_bid,\n double=(self.n_double > 0),\n redouble=(self.n_redouble > 0))\n\n # setting the index of the first player\n self.turn_play = (self.turn_bid + 1) % len(Seat)\n\n # since bidding is now done, we need to set the initial value of self.score_play\n self._update_score()\n\n # TODO[ス: remove the next lines - this method should no longer return anything\n state = (hand, self.history_bid)\n info = {\"turn\": Seat[self.turn_bid], \"max_bid\": self.max_bid}\n if self.debug:\n log_state(state, reward, self.done_bidding, info)\n\n return state, reward, self.done_bidding, info", "def start_new_bids(self):\n for bidder in self._bidders:\n if bidder != self._highest_current_bidder:\n bid_price = bidder(self)\n if bid_price > self.current_bid:\n self.update_bid(bid_price, bidder)", "async def slot(self, ctx: commands.Context, bid: int):\r\n author = ctx.author\r\n guild = ctx.guild\r\n channel = ctx.channel\r\n if await bank.is_global():\r\n valid_bid = await self.config.SLOT_MIN() <= bid <= await self.config.SLOT_MAX()\r\n slot_time = await self.config.SLOT_TIME()\r\n last_slot = await self.config.user(author).last_slot()\r\n else:\r\n valid_bid = (\r\n await self.config.guild(guild).SLOT_MIN()\r\n <= bid\r\n <= await self.config.guild(guild).SLOT_MAX()\r\n )\r\n slot_time = await self.config.guild(guild).SLOT_TIME()\r\n last_slot = await self.config.member(author).last_slot()\r\n now = calendar.timegm(ctx.message.created_at.utctimetuple())\r\n\r\n if (now - last_slot) < slot_time:\r\n await ctx.send(_(\"You're on cooldown, try again in a bit.\"))\r\n return\r\n if not valid_bid:\r\n await ctx.send(_(\"That's an invalid bid amount, sorry :/\"))\r\n return\r\n if not await bank.can_spend(author, bid):\r\n await ctx.send(_(\"You ain't got enough money, friend.\"))\r\n return\r\n if await bank.is_global():\r\n await self.config.user(author).last_slot.set(now)\r\n else:\r\n await self.config.member(author).last_slot.set(now)\r\n await self.slot_machine(author, channel, bid)", "def update_bid(self, bid_price, bidder):\n bidder_info = \"Starting Bid\"\n if self.current_bidder is not None:\n bidder_info = self.current_bidder.name\n print(f\"{bidder.name} bidded {bid_price} in response to \"\n f\"{bidder_info}'s bid of {self.current_bid}!\")\n self._highest_current_bid = bid_price\n self._highest_current_bidder = bidder\n self.start_new_bids()", "def execute_bid(n: int) -> None:\n sun_to_bid = game_state.get_current_player_usable_sun()[n]\n game_state.add_auction_sun(game_state.get_current_player(), sun_to_bid)\n\n if game_state.get_current_player() == game_state.get_auction_start_player():\n handle_auction_end()\n else:\n game_state.advance_current_player()", "async def auction(self, msg):\n\t\tminRaise = await self.cog.config.guild(self.ctx.guild).minRaise()\n\t\tmsg += (\n\t\t\tf'{TILENAME[self.tile[self.p]]} is now up for auction!\\n'\n\t\t\t'Anyone can bid by typing the value of their bid. '\n\t\t\tf'Bids must increase the price by ${minRaise}. '\n\t\t\t'After 15 seconds with no bids, the highest bid will win.'\n\t\t)\n\t\tawait self.ctx.send(file=discord.File(self.bprint()))\n\t\tawait self.ctx.send(msg)\n\t\thighest = None\n\t\thighp = None\n\t\tdef auctioncheck(m):\n\t\t\ttry:\n\t\t\t\tif highest is None:\n\t\t\t\t\treturn (\n\t\t\t\t\t\tm.author.id in self.uid\n\t\t\t\t\t\tand self.bal[self.uid.index(m.author.id)] >= int(m.content)\n\t\t\t\t\t\tand self.isalive[self.uid.index(m.author.id)]\n\t\t\t\t\t)\n\t\t\t\treturn (\n\t\t\t\t\tm.author.id in self.uid\n\t\t\t\t\tand self.bal[self.uid.index(m.author.id)] >= int(m.content)\n\t\t\t\t\tand self.isalive[self.uid.index(m.author.id)]\n\t\t\t\t\tand (highest + minRaise) <= int(m.content)\n\t\t\t\t)\n\t\t\texcept Exception:\n\t\t\t\treturn False\n\t\twhile True:\n\t\t\ttry:\n\t\t\t\tbid_msg = await self.bot.wait_for(\n\t\t\t\t\t'message',\n\t\t\t\t\tcheck=auctioncheck,\n\t\t\t\t\ttimeout=15\n\t\t\t\t)\n\t\t\texcept asyncio.TimeoutError:\n\t\t\t\tbreak\n\t\t\thighest = int(bid_msg.content)\n\t\t\thighp = self.uid.index(bid_msg.author.id)\n\t\t\tawait self.ctx.send(\n\t\t\t\tf'{bid_msg.author.display_name} has the highest bid with ${highest}.'\n\t\t\t)\n\t\tif highp is None:\n\t\t\tmsg = 'Nobody bid...\\n'\n\t\telse:\n\t\t\tmemwin = await self.get_member(self.uid[highp])\n\t\t\tself.bal[highp] -= highest\n\t\t\tself.ownedby[self.tile[self.p]] = highp\n\t\t\tmsg = (\n\t\t\t\tf'{memwin.display_name} wins with a bid of ${highest}!\\n'\n\t\t\t\tf'{memwin.display_name} now owns {TILENAME[self.tile[self.p]]} '\n\t\t\t\tf'and has ${self.bal[highp]}.\\n'\n\t\t\t)\n\t\treturn msg", "def hit(self):\n global in_play, deck, player_hand, dealer_hand, outcome, lost\n \n if in_play:\n player_hand.add_card(deck.deal_card())\n \n if player_hand.get_value() > 21:\n self.outcome.set(\"You have busted! Dealer wins. New deal?\")\n self.lost += 1\n self.score.set(str(self.won) + \"/\" + str(self.lost))\n in_play = False\n draw(canvas)\n\n print \"\\nPlayer hand: \", player_hand\n print \"Dealer hand: \", dealer_hand", "def doBuyIn(self):\n self.protocol.sendPacket(networkpackets.PacketPokerBuyIn(amount=self.max_buy_in, **self._serial_and_game_id))\n self.protocol.sendPacket(networkpackets.PacketPokerAutoBlindAnte(**self._serial_and_game_id))", "def search_market_gather_players(self, name, max_price_to_pay, bids_allowed, bids_made, futbindata, min_bid, max_bid):\n if (int(max_bid) < 400):\n max_bid = 400\n # Ensure bid box is visible, then clear previous params\n self.sleep_approx(2)\n input = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[6]/div[2]/input\")\n self.driver.execute_script(\"arguments[0].scrollIntoView(true);\", input)\n WebDriverWait(self.driver, 20).until(EC.element_to_be_clickable(\n (By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[6]/div[2]/input\"))).click()\n self.sleep_approx(1)\n input.send_keys(0)\n self.sleep_approx(1)\n\n clear = \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[1]/button\"\n maxbidbox = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[3]/div[2]/input\")\n minbidbox = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[2]/div[2]/input\")\n\n # CLEAR RESULTS BOX\n self.driver.find_element(By.XPATH, clear).click()\n self.sleep_approx(1)\n\n # insert max_bid here\n maxbidbox.click()\n self.sleep_approx(1)\n maxbidbox.send_keys(max_bid)\n self.sleep_approx(1)\n\n # insert min_bid here\n minbidbox.click()\n self.sleep_approx(1)\n minbidbox.send_keys(min_bid)\n self.sleep_approx(1)\n\n # search the pages, and bid on players under bid price\n self.clickSearch()\n sleep(3)\n\n keepgoing = True\n while keepgoing:\n # Each page, get user config\n self.getUserConfig()\n status = self.checkState(\"transfermarket\")\n if status:\n max_price_to_pay = int(max_price_to_pay)\n self.sleep_approx(4)\n\n # TODO understand why some eligible players fail to receive bids...\n players_on_page = self.getAllPlayerInfo()\n for card in players_on_page:\n playernumber = card[0]\n bidStatus = card[1]\n curbid = card[5]\n timeremainingseconds = card[7]\n timeremainingmins = timeremainingseconds/60\n playerid = card[8]\n buynow = card[6]\n\n if bids_made < bids_allowed-1:\n if \"highest-bid\" not in bidStatus:\n stopbidTime = int(self.bidexpiration_ceiling)\n if timeremainingmins < stopbidTime:\n if timeremainingmins >= 2:\n # Check if bid to make falls under ceiling\n if (curbid < 1000):\n curbidprice_afterbidding = curbid+50\n else:\n curbidprice_afterbidding = curbid+100\n if curbidprice_afterbidding < max_price_to_pay:\n if ((curbid*2)<self.user_num_coins):\n self.makebid_individualplayer(\n playernumber, max_price_to_pay)\n self.sleep_approx(2)\n bids_made += 1\n log_event(self.queue, \"Bids made on \" + str(name) +\n \": \" + str(bids_made) + \"/\" + str(bids_allowed))\n else:\n log_event(self.queue, \"not enough coins\")\n else:\n keepgoing = False\n else:\n keepgoing = False\n\n self.sleep_approx(3)\n log_event(self.queue, \"Going to next page\")\n try:\n self.driver.find_element_by_xpath(\n '/html/body/main/section/section/div[2]/div/div/section[1]/div/div/button[2]')\n self.driver.find_element_by_xpath(\n '/html/body/main/section/section/div[2]/div/div/section[1]/div/div/button[2]').click()\n self.user_requests_made += 1\n except:\n log_event(self.queue, \"No next page found, returning\")\n keepgoing = False\n self.clickBack()\n self.sleep_approx(1)\n return bids_made", "def _new_game():\n\n global dealer_card_frame\n global player_card_frame\n global dealer_hand\n global player_hand\n\n dealer_card_frame.destroy()\n dealer_card_frame = tkinter.Frame(card_frame, background=\"green\")\n dealer_card_frame.grid(row=0, column=1, sticky=\"ew\", rowspan=2)\n\n player_card_frame.destroy()\n player_card_frame = tkinter.Frame(card_frame, background=\"green\")\n player_card_frame.grid(row=2, column=1, sticky=\"ew\", rowspan=2)\n\n # reset the result label\n result_text.set(\"\")\n\n # create a list to store dealer's and player's hands.\n dealer_hand = []\n player_hand = []\n\n # deal the first cards\n _initial_deal()", "def event_player_blackjack(self) -> None:\n win_amount = self.user.bet + 1.5\n print(\"Congratulations, you win:\", win_amount)\n self.user.win_balance(win_amount)", "def __call__(self, auctioneer):\n possible_bid = self.bid_increase_perc * auctioneer.get_highest_bid()\n if possible_bid < self.budget and random.random() <= self.bid_probability:\n self.highest_bid = possible_bid\n auctioneer.accept_bid(possible_bid, self)", "def stay(self):\n global dealer_hand, deck, outcome, in_play\n \n if in_play:\n while dealer_hand.get_value() < 17:\n dealer_hand.add_card(deck.deal_card())\n \n if dealer_hand.get_value() > 21:\n # print \"Dealer is busted.\\nPlayer wins.\"\n self.outcome.set(\"Dealer is busted. Player wins. New deal?\")\n self.won += 1\n self.score.set(str(self.won) + \"/\" + str(self.lost))\n elif player_hand.get_value() > 21:\n # print \"Player is busted.\\nDealer wins.\"\n self.outcome.set(\"Player is busted. Dealer wins. New deal?\")\n self.lost += 1\n self.score.set(str(self.won) + \"/\" + str(self.lost))\n elif dealer_hand.get_value() >= player_hand.get_value():\n # print \"Dealer wins.\"\n self.outcome.set(\"Dealer wins. New deal?\")\n self.lost += 1\n self.score.set(str(self.won) + \"/\" + str(self.lost))\n else:\n # print \"Player wins.\"\n self.outcome.set(\"Player wins! New deal?\")\n self.won += 1\n self.score.set(str(self.won) + \"/\" + str(self.lost))\n in_play = False\n draw(canvas)", "def player_hit(self):\r\n if self.in_progress:\r\n self.player_hand.add(self.deck.deal())\r\n if self.player_hand.total > 21:\r\n self.status_color = 'red'\r\n self.game_status = \"Dealer WINS... Press 'r' to start game\"\r\n self.dealer_wins += 1\r\n self.in_progress = False\r\n self.refresh_canvas()", "def jack_wish(self):\n channel_layer = get_channel_layer()\n async_to_sync(channel_layer.group_send)(\n self.lobby_id,\n {\"type\": \"jack.wish\", \"player\": self.current_player.to_json()},\n )\n self.wait_for_card_wish = True", "def deal():\n \n # Update messages, score and the player's \"Hand\" status\n # as global variables.\n global outcome, outcome_plus, outcome_plus_plus, in_play, score, action \n outcome = outcome_plus = outcome_plus_plus = \"\"\n action = HIT_OR_STAND\n \n # If the \"Deal\" button is clicked during the middle of \n # a round the program reports that the \"Player\" lost \n # the round and updates the \"score\" appropriately.\n if in_play:\n outcome = PLAYER_LOSES \n outcome_plus = EARLY_DEAL_1\n outcome_plus_plus = EARLY_DEAL_2\n score -= SCORE_POINTS\n else:\n in_play = True\n \n # Create and shuffle the \"Deck\" (stored as a global \n # variable). Avoids the situation where the \"Deck\" \n # becomes empty during play.\n global deck_of_cards\n deck_of_cards = Deck()\n deck_of_cards.shuffle()\n \n # Create new \"Player\" and \"Dealer\" Hands (stored as \n # global variables). \n global player, dealer\n player = Hand()\n dealer = Hand()\n \n # Add two \"Cards\" to each \"Hand\". To transfer a \"Card\" \n # from the \"Deck\" to a \"Hand\", the \"deal_card()\" \n # method of the \"Deck\" class and the \"add_card()\" \n # method of \"Hand\" class are being used in \n # combination. \n player.add_card(deck_of_cards.deal_card())\n dealer.add_card(deck_of_cards.deal_card())\n player.add_card(deck_of_cards.deal_card())\n dealer.add_card(deck_of_cards.deal_card())\n \n # Print resulting \"Hands\" to the console with an \n # appropriate message indicating which \"Hand\" is which.\n # Remove comments if in DEBUG mode.\n #print \"Player: \" + str(player)\n #print \"Dealer: \" + str(dealer) \n \n return None", "async def draw_start(self):\n for i, player in enumerate(self.players):\n def bet_check(m):\n \"\"\"If the value can be converted to a float and is within the bounds return true, else false\"\"\"\n try:\n value = float(m.content)\n if 0 <= value <= player.coins:\n return True\n else:\n return False\n except:\n return False\n\n if not player.out:\n await self.ctx.send(f\"{self.users[i].name}, How much would you like to bet? You have {player.coins} in the bank: \")\n try:\n bet = await self.client.wait_for('message', timeout=120.0, check=bet_check)\n bet = float(bet.content)\n if bet == 0:\n player.out = True\n self.total_players_out += 1\n else:\n player.debit(bet)\n player.bet = bet\n except:\n await self.ctx.send(\"Timed Out!\")\n player.out = True\n self.total_players_out += 1\n # shuffle cards and dealer draws one, send the dealers hand to the channel, loop through all players that aren't out and show their hand\n # if all players arent out\n if self.total_players_out < len(self.players):\n self.deck.shuffle()\n self.dealer.clear()\n self.deck.move_cards(self.dealer, 1)\n\n embed_dealer = discord.Embed(title='Dealer', color=0x00ff00)\n embed_dealer.add_field(\n name=\"Hand\", value=self.dealer, inline=False)\n self.dealer_msg = await self.ctx.send(embed=embed_dealer)\n\n embed_players = discord.Embed(title='Players', color=0x0000fd)\n for i, player in enumerate(self.players):\n if not player.out:\n player.clear()\n self.deck.move_cards(player, 2)\n # name=their discord name and value = their hand\n embed_players.add_field(\n name=self.users[i].name, value=player, inline=True)\n if player.get_value() == 21:\n player.has_bj = True\n self.players_msg = await self.ctx.send(embed=embed_players)", "def run(self):\n print \"Welcome to the BlackJack game ......\" # print help function if needed\n deckObj = Deck()\n deckObj.shuffle()\n while(not self.checkGameComplete()):\n self.displayGame()\n card = deckObj.deal()\n # ask user for move\n position = raw_input('Please input a number [1-16] for table, or [17-20] for discard list\\n')\n isPass = self.errorChecking(position)\n while(not isPass):\n position = raw_input('Please input a number [1-16] for table, or [17-20] for discard list\\n')\n isPass = self.errorChecking(position)\n # update table\n self.updateTableAndDiscardLs(position,card)\n ### Score Game\n self.displayGame()\n score = self.scoreGame()\n print 'Congratulations! Your final score is:'\n print score\n print 'Game is done... Thank you!'", "async def blackjack(self, ctx, arg: int): \n db = sqlite3.connect('main.sqlite')\n cursor = db.cursor()\n cursor.execute(f'SELECT user_id, jacks FROM main WHERE user_id = {ctx.author.id}')\n result = cursor.fetchone()\n embed = discord.Embed(color=0x228b22, title=\"Blackjack\")\n if result is not None:\n if arg > result[1]:\n embed.add_field(name=\"Error\", value=f\"You can't bid more chips than you have!\", inline=False)\n embed.set_footer(text=\"You can check your balance using the *profile* command\")\n else:\n player, house = [],[]\n deck.deal(player,2)\n deck.deal(house, 2)\n embed.add_field(name=\"Your Hand:\", value=f\"```{deck.display_hand(player)}``` \\n Value: {deck.hand_value(player)}\")\n embed.add_field(name=\"Dealer's Hand:\", value=f\"```['{deck.display_hand(house)[1]}', '?'] ``` \\n Value: ?\")\n embed.set_footer(text=\"Type `hit` or `stay` to take your turn!\")\n await ctx.send(content=None, embed=embed)\n if deck.hand_value(house) != 21 and deck.hand_value(player) != 21:\n msg = await self.client.wait_for('message', check=lambda message: message.author == ctx.author)\n while msg.content.startswith(\"hit\") or msg.content.startswith(\"Hit\"):\n embed.remove_field(0)\n deck.deal(player)\n embed.insert_field_at(0, name=\"Your Hand:\", value=f\"```{deck.display_hand(player)}``` \\n Value: {deck.hand_value(player)}\")\n await ctx.send(content=None, embed=embed)\n if deck.hand_value(player) > 21:\n break\n msg = await self.client.wait_for('message', check=lambda message: message.author == ctx.author)\n embed.remove_field(1)\n embed.set_footer(text=\"\")\n deck.house_turn(house)\n embed.add_field(name=\"Dealer's Hand:\", value=f\"```{deck.display_hand(house)}``` \\n Value: {deck.hand_value(house)}\")\n if deck.hand_value(player) == 21:\n outcome = \"Blackjack!\"\n bal = \"won\"\n chips = int(result[1] + arg*1.5)\n elif deck.hand_value(player) > 21:\n outcome = \"Player bust, you lose\"\n bal = \"lost\"\n chips = int(result[1] - arg)\n elif deck.hand_value(house) > 21:\n outcome = \"Dealer bust, you win!\"\n bal = \"won\"\n chips = int(result[1] + arg)\n elif deck.hand_value(player) > deck.hand_value(house):\n outcome = \"Win!\"\n bal = \"won\"\n chips = int(result[1] + arg)\n elif deck.hand_value(player) == deck.hand_value(house):\n outcome = \"Push, chips back\"\n bal = \"gotten back your\"\n chips = int(result[1])\n else:\n outcome = \"Loss\"\n bal = \"lost\"\n chips = int(result[1] - arg)\n sql = (\"UPDATE main SET jacks = ? WHERE user_id = ?\")\n val = (chips, ctx.author.id)\n cursor.execute(sql, val)\n db.commit()\n cursor.close()\n db.close()\n if chips == int(result[1]):\n chips += arg\n embed.add_field(name=outcome, value=f\"You have {bal} <:chip:657253017262751767> **{abs(int(result[1] - chips))}** chips\", inline=False)\n await ctx.send(content=None, embed=embed)\n else:\n await ctx.send(\"You must register before you can play blackjack!\")" ]
[ "0.6610059", "0.6388831", "0.63772714", "0.63008505", "0.6248338", "0.61771005", "0.6061648", "0.60147685", "0.5865799", "0.5846793", "0.5810716", "0.5787701", "0.5722459", "0.560185", "0.5557072", "0.5485424", "0.54785204", "0.5419658", "0.5318205", "0.5303232", "0.5240301", "0.5233226", "0.5183848", "0.5148347", "0.50895983", "0.5071559", "0.5056755", "0.50566494", "0.49846265", "0.4975826" ]
0.69479686
0
Event handler for when a player submits their bid. Takes the user input and assigns it to player.bid, then bounces a quick emit back to the client so the client can again bounce the "request bid" function to the next client.
def handle_bid(bid, sid, methods=["GET", "POST"]): game.set_player_bid(bid, sid) game.active_player_index += 1 socketio.emit("get next bid") bid_dict = {} for player in game.ordered_players: print(f"{player.name}'s bid is {player.bid}") bid_dict[player.name.title()]=player.bid socketio.emit("show bidTable", bid_dict)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_start_bidding_event(methods=[\"GET\", \"POST\"]):\n if game.active_player_index == len(game.ordered_players):\n game.active_player_index = 0\n active_player = game.ordered_players[game.active_player_index]\n game.state = \"playing\"\n socketio.emit(\"your turn\", room=active_player.sid)\n else:\n active_player = game.ordered_players[game.active_player_index]\n if not active_player.bid_active:\n socketio.emit(\"make bid field\", room=active_player.sid)\n active_player.bid_active = True", "def game_bid():\n game = current_user.get_active_game()\n bid = request.form.get('bid', '')\n\n # Validate bid\n if not isinstance(bid, str):\n # Invalid input for bid, but no need to alert user\n return redirect(url_for('game_home'))\n bid = bid.strip()\n if not BID_REGEX.match(bid):\n flash('Your bid must be an integer bid from zero (0) to thirteen (13).')\n return redirect(url_for('game_home'))\n\n if game is None:\n flash('If you want to join a game, click the Join button.')\n return redirect(url_for('home'))\n else:\n hand = game.get_latest_hand()\n # Attempt to place the bid\n try:\n hand.place_bid(current_user.user_id, int(bid), game)\n except UserCanNotBidError:\n flash('Bidding is not available at this time for you.')\n return redirect(url_for('game_home'))\n except BadGameStateError:\n flash('An error occurred while trying to pace your bid. Please try again.')\n return redirect(url_for('game_home'))\n else:\n flash(f'Your bid of {bid} has been placed.')\n return redirect(url_for('game_home'))", "def accept_bid(self, bid, bidder):\n if bidder != \"Starting Bid\":\n print(f\"{bidder} bidded {bid} in response to {self._highest_bidder}'s bid of {self._highest_bid}!\")\n\n self._highest_bid = bid\n self._highest_bidder = bidder\n\n self._notify_bidders()", "def bid(command):\n env_banner()\n\n game = Game.read()\n if game is None:\n game = Game.init_game()\n\n command = command.lower().strip()\n\n if command not in ['start', 'pause', 'resume']:\n click.echo('Valid options are start, pause or resume.')\n return\n\n if game.user_count == 0 or game.player_count == 0:\n click.echo('Init the game first by uploading player data.')\n return\n\n if game.player_to_bid == 0:\n click.echo('Bidding is complete')\n return\n\n if command == 'start':\n if game.bid_in_progress:\n click.echo('Bid is already in progress.')\n return\n if game.player_to_bid != game.player_count:\n click.echo('Bidding has already started. Use resume option.')\n return\n invite_bid()\n click.echo('Bidding has been started.')\n return\n\n if command == 'pause':\n if not game.bid_in_progress:\n click.echo('Bid is NOT in progress.')\n return\n game.bid_in_progress = False\n game.update()\n click.echo('Bidding has been paused.')\n return\n\n if command == 'resume':\n if game.bid_in_progress:\n click.echo('Bid is already in progress.')\n return\n if game.player_to_bid == game.player_count:\n click.echo('Bidding has not yet started. Use start option.')\n return\n game.bid_in_progress = True\n game.update()\n click.echo('Bidding has been resumed.')", "def add_bid(self, bid, player_id):\n\t\tglobal_id = self.globalize_id(player_id)\n\t\tassert len(self.bids) < self.data_size and global_id not in self.bids\n\t\tif bid == 0:\n\t\t\tbid = \"N\"\n\t\tself.bids[global_id] = bid", "def execute_bid(n: int) -> None:\n sun_to_bid = game_state.get_current_player_usable_sun()[n]\n game_state.add_auction_sun(game_state.get_current_player(), sun_to_bid)\n\n if game_state.get_current_player() == game_state.get_auction_start_player():\n handle_auction_end()\n else:\n game_state.advance_current_player()", "def makebid_individualplayer(self, playernumber, max_price_to_pay):\n status = self.checkState(\"transfermarket\")\n if status:\n # Click player\n playerbutton = \"/html/body/main/section/section/div[2]/div/div/section[1]/div/ul/li[\" + str(\n playernumber) + \"]/div\"\n self.driver.find_element_by_xpath(playerbutton)\n self.sleep_approx(1)\n self.driver.find_element_by_xpath(playerbutton).click()\n self.sleep_approx(1)\n\n # If conserve bids is on, bid at (user_buy_ceiling * .7)*max price to pay\n if (self.conserve_bids == 1):\n bid_to_make = round(int(max_price_to_pay*.7), -2)\n bid_price_box = self.driver.find_element_by_css_selector(\n 'input.numericInput.filled')\n bid_price_box.click()\n self.sleep_approx(1)\n bid_price_box.send_keys(Keys.CONTROL, \"a\", Keys.DELETE)\n self.sleep_approx(1)\n\n # Enter bid price of (0.85)*(0.8) * marketprice\n bid_price_box.send_keys(bid_to_make)\n self.sleep_approx(1)\n\n # Click make bid button #TODO read input price and check for max bid error\n self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div/section[2]/div/div/div[2]/div[2]/button[1]\").click()\n else:\n # Not in conserve mode - Don't enter price - just click make bid\n self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div/section[2]/div/div/div[2]/div[2]/button[1]\").click()\n\n self.user_bids_made += 1\n self.update_autobidder_logs()\n self.sleep_approx(1)", "def send_bid(self, publisher, task_id, bid_value, distance_to_dispenser, closest_dispenser_position_y, closest_dispenser_position_x):\n\n msg = auction_communication()\n msg.message_id = self.generateID()\n msg.agent_id = self._agent_name\n msg.task_id = task_id\n msg.bid_value = bid_value\n msg.distance_to_dispenser = distance_to_dispenser\n msg.closest_dispenser_position_x = closest_dispenser_position_x\n msg.closest_dispenser_position_y = closest_dispenser_position_y\n publisher.publish(msg)", "def update_bid(self, bid_price, bidder):\n bidder_info = \"Starting Bid\"\n if self.current_bidder is not None:\n bidder_info = self.current_bidder.name\n print(f\"{bidder.name} bidded {bid_price} in response to \"\n f\"{bidder_info}'s bid of {self.current_bid}!\")\n self._highest_current_bid = bid_price\n self._highest_current_bidder = bidder\n self.start_new_bids()", "def __call__(self, auctioneer):\n possible_bid = self.bid_increase_perc * auctioneer.get_highest_bid()\n if possible_bid < self.budget and random.random() <= self.bid_probability:\n self.highest_bid = possible_bid\n auctioneer.accept_bid(possible_bid, self)", "def place_bid():\n if not request.get_json():\n abort(400)\n data = request.get_json(force=True)\n\n if not data.get('userID'):\n abort(400)\n if not data.get('amount'):\n abort(400)\n if not data.get('petID'):\n abort(400)\n\n #new_uuid = str(uuid.uuid4())\n mod.place_a_bid(data['petID'], data['amount'], data['userID'])\n # HTTP 200 Created\n # return jsonify({\"id\": new_uuid}), 200\n resp = {\"status\": \"OK\"}\n return jsonify(resp)", "async def bet(message, user: ParamType.MIXER_USER, amount):\n\n username = user.username.lower()\n username_sender = message.username.lower()\n\n mixcord_user = await database.get_user(message.user_id)\n\n # handle if somebody is trying to accept or deny\n if amount == \"accept\" or amount == \"deny\":\n\n # get the pending bet\n bet = pending_bets.get(username)\n if bet is None or bet[\"username\"] != username_sender:\n return \"failed to find the bet you're responding to.\"\n\n # delete the pending bet, because we're handling it\n del pending_bets[username]\n\n # if the user wants to deny the bet, don't do anything\n if amount == \"deny\":\n return \"you have denied the pending bet from @{}.\".format(username)\n\n # if the user wants to accept the bet, continue\n if amount == \"accept\":\n\n # make sure they have enough money to accept\n if bet[\"amount\"] > mixcord_user[\"balance\"]:\n return \"you have insufficient funds to accept this bet.\"\n\n # make sure the issuer of the challenge still has enough money\n competitor_mixcord_user = await database.get_user(user.id)\n if bet[\"amount\"] > competitor_mixcord_user[\"balance\"]:\n return \"@{} no longer has sufficient funding to run this bet.\".format(username)\n\n # determine winner/loser\n pick = random.randint(0, 1) == 1\n winner_id = user.id if pick else message.user_id\n loser_id = message.user_id if pick else user.id\n winner_username = username if pick else username_sender\n loser_username = message.username if pick else username\n\n # affect balances accordingly\n await database.add_balance(winner_id, bet[\"amount\"])\n await database.add_balance(loser_id, -bet[\"amount\"])\n\n # end the bet!\n await chat.send_message(\"@{} has won {} {}! better luck next time, @{}.\".format(winner_username, bet[\"amount\"], currency_name, loser_username))\n return None\n\n # make sure the amount is numeric by converting it to an int\n amount = utils.get_positive_int(amount)\n if amount is None: return \"amount must be a positive integer.\"\n\n # make sure they're not trying to start a bet against themself :/\n if message.username == username:\n return \"you're not able to start a bet against yourself.\"\n\n # make sure we don't already have a pending bet\n if pending_bets.get(message.username) is not None:\n return \"you already have a pending bet.\"\n\n # make sure the challenger has enough money to start the bet\n if amount > mixcord_user[\"balance\"]:\n return \"you have insufficient funds to request this bet.\"\n\n # store challenge information\n pending_bets[message.username] = {\n \"username\": username,\n \"amount\": amount\n }\n\n # send messages indicating the challenge has been issued\n await chat.send_message(\"@{} has challenged @{} to a bet of {} {}!\".format(message.username, username, amount, currency_name))\n await asyncio.sleep(0.5)\n await chat.send_message(\"use {}bet @{} [accept/deny] to respond to your pending bet!\".format(chat.commands.prefix, message.username), username)\n\n # automatically timeout the bet in 30 seconds\n await asyncio.sleep(30)\n bet = pending_bets.get(message.username)\n if bet is not None:\n del pending_bets[message.username]\n await chat.send_message(\"@{} your pending bet has timed out.\".format(message.username))", "def handle_bids(self, auction_round, bids):\n\n game_state = dict()\n game_state['finished'] = False\n\n if self.__over:\n # Game is already over\n game_state.update(self.__game_state)\n\n else:\n max_bid = dict()\n max_bid['amount'] = 0\n max_bid['bidder'] = None\n max_bid['received_time'] = None\n\n bid_item = self.auction_items[auction_round]\n\n for idx in range(len(bids)):\n player_id = bids[idx]['player']\n\n if self.players[player_id]['valid'] is False: # skip invalid players\n continue\n\n elif bids[idx]['timeout'] is True: # player was timed out during bidding\n self.players[player_id]['valid'] = False\n self.players[player_id]['remain_time'] = -1\n print(('Player {} was timed out on round {}.'\n .format(self.players[player_id]['name'], auction_round)))\n\n continue\n\n start_time = bids[idx]['start_time']\n received_time = bids[idx]['received_time']\n bid_summary = bids[idx]['bid']\n\n # handle timestamp checking\n self.players[player_id]['remain_time'] -= (received_time - start_time).total_seconds()\n\n if self.players[player_id]['wealth'] - bid_summary['bid_amount'] >= 0:\n\n bid_amt = bid_summary['bid_amount']\n\n # highest bidder or first bidder (if same bid amount)\n if bid_amt > max_bid['amount'] or (bid_amt == max_bid and max_bid['amount'] > 0 and\n received_time < max_bid['received_time']):\n max_bid['amount'] = bid_amt\n max_bid['bidder'] = player_id\n max_bid['received_time'] = received_time\n\n else:\n # invalid bid from player\n self.players[player_id]['valid'] = False\n print('Player {} made an invalid bid on round {}.'\n .format(self.players[player_id]['name'], auction_round))\n\n max_bidder = max_bid['bidder']\n\n if max_bid['amount'] > 0 and max_bidder is not None:\n\n self.players[max_bidder]['wealth'] -= max_bid['amount']\n self.bid_winners[bid_item][max_bidder] += 1\n\n if self.bid_winners[bid_item][max_bidder] >= self.__required_count:\n game_state['finished'] = True\n game_state['winner'] = max_bidder # wins\n game_state['reason'] = ('Player {} won the game! Congrats!'\n .format(self.players[max_bidder]['name']))\n\n game_state['bid_item'] = self.auction_items[auction_round]\n\n if max_bidder is not None:\n game_state['bid_winner'] = str(self.players[max_bidder]['name'])\n else:\n game_state['bid_winner'] = None\n\n game_state['winning_bid'] = max_bid['amount']\n game_state['auction_round'] = auction_round\n\n remain_player_count = 0\n for idx in range(len(self.players)):\n player_name = self.get_player_name(idx)\n game_state[player_name] = self.players[idx]._dict\n\n if self.players[idx]['valid'] is True:\n remain_player_count += 1\n\n game_state['remain_players'] = remain_player_count\n\n if remain_player_count == 0:\n # game ends\n game_state['finished'] = True\n game_state['reason'] = 'No valid players remaining'\n\n if auction_round == len(self.auction_items) - 1 and game_state['finished'] is False:\n # last round and no winner, then set game to finished\n game_state['finished'] = True\n game_state['reason'] = 'Draw Game. Out of Auction Items.'\n\n\n self.__game_state.update(game_state)\n self.__over = game_state['finished']\n self.print_status(game_state, auction_round)\n\n return game_state", "def event_player_blackjack(self) -> None:\n win_amount = self.user.bet + 1.5\n print(\"Congratulations, you win:\", win_amount)\n self.user.win_balance(win_amount)", "def bid(self):\n # log.debug(\"{0} is bidding...\".format(self.label))\n for bid in range(5):\n if self.is_legal_bid(bid):\n self.send_bid(bid)\n return", "async def auction(self, msg):\n\t\tminRaise = await self.cog.config.guild(self.ctx.guild).minRaise()\n\t\tmsg += (\n\t\t\tf'{TILENAME[self.tile[self.p]]} is now up for auction!\\n'\n\t\t\t'Anyone can bid by typing the value of their bid. '\n\t\t\tf'Bids must increase the price by ${minRaise}. '\n\t\t\t'After 15 seconds with no bids, the highest bid will win.'\n\t\t)\n\t\tawait self.ctx.send(file=discord.File(self.bprint()))\n\t\tawait self.ctx.send(msg)\n\t\thighest = None\n\t\thighp = None\n\t\tdef auctioncheck(m):\n\t\t\ttry:\n\t\t\t\tif highest is None:\n\t\t\t\t\treturn (\n\t\t\t\t\t\tm.author.id in self.uid\n\t\t\t\t\t\tand self.bal[self.uid.index(m.author.id)] >= int(m.content)\n\t\t\t\t\t\tand self.isalive[self.uid.index(m.author.id)]\n\t\t\t\t\t)\n\t\t\t\treturn (\n\t\t\t\t\tm.author.id in self.uid\n\t\t\t\t\tand self.bal[self.uid.index(m.author.id)] >= int(m.content)\n\t\t\t\t\tand self.isalive[self.uid.index(m.author.id)]\n\t\t\t\t\tand (highest + minRaise) <= int(m.content)\n\t\t\t\t)\n\t\t\texcept Exception:\n\t\t\t\treturn False\n\t\twhile True:\n\t\t\ttry:\n\t\t\t\tbid_msg = await self.bot.wait_for(\n\t\t\t\t\t'message',\n\t\t\t\t\tcheck=auctioncheck,\n\t\t\t\t\ttimeout=15\n\t\t\t\t)\n\t\t\texcept asyncio.TimeoutError:\n\t\t\t\tbreak\n\t\t\thighest = int(bid_msg.content)\n\t\t\thighp = self.uid.index(bid_msg.author.id)\n\t\t\tawait self.ctx.send(\n\t\t\t\tf'{bid_msg.author.display_name} has the highest bid with ${highest}.'\n\t\t\t)\n\t\tif highp is None:\n\t\t\tmsg = 'Nobody bid...\\n'\n\t\telse:\n\t\t\tmemwin = await self.get_member(self.uid[highp])\n\t\t\tself.bal[highp] -= highest\n\t\t\tself.ownedby[self.tile[self.p]] = highp\n\t\t\tmsg = (\n\t\t\t\tf'{memwin.display_name} wins with a bid of ${highest}!\\n'\n\t\t\t\tf'{memwin.display_name} now owns {TILENAME[self.tile[self.p]]} '\n\t\t\t\tf'and has ${self.bal[highp]}.\\n'\n\t\t\t)\n\t\treturn msg", "def main():\n\n # values to receive from a user\n bidders = []\n item_name = \"\"\n num_of_bidders = 0\n starting_price = 0\n\n errors = []\n is_valid = False\n while not is_valid:\n is_valid = True\n errors.clear()\n item_name = input(\"Please type the name of the item: \")\n\n # Starting price\n try:\n starting_price = float(input(\"Please type the starting price: \"))\n except ValueError:\n errors.append(\"[Error] Starting price should be a decimal number.\")\n is_valid = False\n\n # Number of bidders\n try:\n num_of_bidders = int(input(\"Please type the number of bidders: \"))\n except ValueError:\n errors.append(\"[Error] Number of bidders should be an integer.\")\n is_valid = False\n\n # print input errors\n for error in errors:\n print(error)\n\n # Creating bidders\n num = 1\n bidder_name = \"\"\n is_valid = False\n while num <= int(num_of_bidders) or is_valid is False:\n print(f\"Please provide the details of the bidder {num}\")\n name = input(\"name: \")\n try:\n budget = float(input(\"budget: \"))\n except ValueError as e:\n print(\"[Error] Budget should be a decimal number\")\n else:\n is_valid = True\n inc_rate = random.random()\n bidders.append(Bidder(name, float(budget), (1 + inc_rate)))\n num += 1\n\n # Create Auction with the input values and Start the auction\n my_auction = Auction(bidders, item_name, float(starting_price))\n print(f\"\\nStarting Auction!!\\n----------------------\\n\"\n f\"Auctioning {bidder_name} starting at {starting_price}.\")\n my_auction.start_auction()\n\n # Print out the auction results\n my_auction.print_auction_result()", "def step_bid(self, action_bid):\n if self.done_bidding:\n raise Exception(\"No more actions can be taken\")\n\n # action_bid must be in [0; AUCTION_SPACE_SIZE - 1]\n if action_bid < 0 or action_bid > AUCTION_SPACE_SIZE - 1:\n raise Exception(\"illegal action\")\n\n # what happens when we get a pass\n if action_bid == PASS_IDX:\n\n # we are not allowed to make a double for now\n self.elim_sig_bid[DOUBLE_IDX] = 0\n\n self.history_bid[action_bid] = 1\n\n if self.max_bid == -1:\n self.auction_history[self.n_pass] = 1\n elif self.n_pass < 2:\n self.auction_history[\n 3 + 8*self.max_bid + 3*(self.n_double + self.n_redouble) + self.n_pass + 1] = 1\n\n # incrementing the current number of passes\n self.n_pass += 1\n\n # what happens when we get a contract bid\n elif action_bid < PASS_IDX:\n\n if action_bid <= self.max_bid:\n raise Exception(\"illegal bidding.\")\n\n # resetting n_pass, n_double and n_redouble\n self.n_pass = 0\n self.n_double = 0\n self.n_redouble = 0\n self.max_bid = action_bid\n\n self.history_bid[action_bid] = 1\n self.history_bid[-1] = 0\n self.auction_history[3 + 8*self.max_bid] = 1\n\n # this action and all the actions below can no longer be performed\n self.elim_sig_bid[:(1 + self.max_bid)] = 0\n\n # doubles are now permitted, redoubles are not permitted\n self.elim_sig_bid[DOUBLE_IDX] = 1\n self.elim_sig_bid[REDOUBLE_IDX] = 0\n\n strain = convert_action2strain(action_bid)\n group = Seat2Group[self.turn_bid]\n if self.strain_declarer[group].get(strain, '') == '':\n self.strain_declarer[group][strain] = self.turn_bid # which one\n self.group_declarer = group # which group\n\n # what happens when we get a double\n elif action_bid == DOUBLE_IDX:\n # doubles are not permitted when\n # no contract bids have been made OR\n # a double bid has already been made OR\n # a redouble bid has been made\n if (self.max_bid == -1) or (self.n_double == 1) or (self.n_redouble == 1):\n raise Exception(\"double is not currently allowed\")\n\n self.n_double = 1\n self.elim_sig_bid[DOUBLE_IDX] = 0\n self.elim_sig_bid[REDOUBLE_IDX] = 1\n self.auction_history[3 + 8*self.max_bid + 3] = 1\n\n # what happens when we get a redouble\n elif action_bid == REDOUBLE_IDX:\n # redoubles are not permitted when\n # no contract bids have been made OR\n # a double bid has not been made OR\n # a redouble bid has already been made\n if (self.max_bid == -1) or (self.n_double == 0) or (self.n_redouble == 1):\n raise Exception(\"redouble is not currently allowed\")\n\n self.n_redouble = 1\n self.elim_sig_bid[DOUBLE_IDX] = 0\n self.elim_sig_bid[REDOUBLE_IDX] = 0\n self.auction_history[3 + 8*self.max_bid + 6] = 1\n\n # updating the index of the next bidding player\n self.turn_bid = (self.turn_bid + 1) % len(Seat)\n\n # move to the participant\n # NB: this code is only useful if not all players are bidding (i.e. self.bidding_seats\n # does not contain everybody)\n while True:\n if self.turn_bid not in self.bidding_seats:\n self.turn_bid = (self.turn_bid + 1) % len(Seat)\n self.n_pass += 1\n else:\n break\n\n hand = self.one_hot_deal[self.turn_bid]\n reward = 0\n\n # state is the next bidding player's state\n if (self.n_pass >= 3 and self.max_bid < 0) or self.max_bid == 34:\n\n if self.max_bid < 0:\n raise Exception(\"illegal bidding\")\n # extract the declarer, strain , level\n strain = convert_action2strain(self.max_bid)\n level = convert_action2level(self.max_bid)\n # single thread\n # reward = np.mean(Deal.score_st(dealer=self.deal, level=level, strain=strain, declarer=declarer, tries=self.nmc, mode=self.score_mode))\n # parallel threads\n\n # np.mean is moved to score\n declarer = self.strain_declarer[self.group_declarer][strain] # thise group's first declarer\n\n # TODO[ス: game rewards / scores will no longer be calculated during bidding - the next\n # bit of code needs to be removed\n reward = Deal.score(dealer=self.deal,\n level=level,\n strain=strain,\n declarer=declarer,\n tries=self.nmc,\n mode=self.score_mode)\n self.done_bidding = True\n\n # storing the contract\n self.contract.from_bid(bid=self.max_bid,\n double=(self.n_double > 0),\n redouble=(self.n_redouble > 0))\n\n # setting the index of the first player\n self.turn_play = (self.turn_bid + 1) % len(Seat)\n\n # since bidding is now done, we need to set the initial value of self.score_play\n self._update_score()\n\n # TODO[ス: remove the next lines - this method should no longer return anything\n state = (hand, self.history_bid)\n info = {\"turn\": Seat[self.turn_bid], \"max_bid\": self.max_bid}\n if self.debug:\n log_state(state, reward, self.done_bidding, info)\n\n return state, reward, self.done_bidding, info", "def player_b_id(self, player_b_id):\n\n self._player_b_id = player_b_id", "async def slot(self, ctx: commands.Context, bid: int):\r\n author = ctx.author\r\n guild = ctx.guild\r\n channel = ctx.channel\r\n if await bank.is_global():\r\n valid_bid = await self.config.SLOT_MIN() <= bid <= await self.config.SLOT_MAX()\r\n slot_time = await self.config.SLOT_TIME()\r\n last_slot = await self.config.user(author).last_slot()\r\n else:\r\n valid_bid = (\r\n await self.config.guild(guild).SLOT_MIN()\r\n <= bid\r\n <= await self.config.guild(guild).SLOT_MAX()\r\n )\r\n slot_time = await self.config.guild(guild).SLOT_TIME()\r\n last_slot = await self.config.member(author).last_slot()\r\n now = calendar.timegm(ctx.message.created_at.utctimetuple())\r\n\r\n if (now - last_slot) < slot_time:\r\n await ctx.send(_(\"You're on cooldown, try again in a bit.\"))\r\n return\r\n if not valid_bid:\r\n await ctx.send(_(\"That's an invalid bid amount, sorry :/\"))\r\n return\r\n if not await bank.can_spend(author, bid):\r\n await ctx.send(_(\"You ain't got enough money, friend.\"))\r\n return\r\n if await bank.is_global():\r\n await self.config.user(author).last_slot.set(now)\r\n else:\r\n await self.config.member(author).last_slot.set(now)\r\n await self.slot_machine(author, channel, bid)", "def doBuyIn(self):\n self.protocol.sendPacket(networkpackets.PacketPokerBuyIn(amount=self.max_buy_in, **self._serial_and_game_id))\n self.protocol.sendPacket(networkpackets.PacketPokerAutoBlindAnte(**self._serial_and_game_id))", "def event_player_bust(self) -> None:\n print(f\"Your hand contains {min(self.user.hand.value)}, you're bust\")\n self.event_house_wins()", "def handleBuy(self, client_id, request_id, ticket_count,\n client_ip, client_port):\n if self.isLeader():\n # if the request is already in the list, ignore it\n if not self.requestInLog(client_id, request_id):\n self.log.append(LogEntry(self.current_term, len(self.log),\n {'client_id': client_id,\n 'request_id': request_id,\n 'ticket_count': ticket_count,\n 'client_ip': client_ip,\n 'client_port': client_port}))\n dictobj = {'current_term': self.current_term, 'voted_for': self.voted_for, 'log': self.log}\n filename = \"./state\"+self.datacenter_id+'.pkl'\n fileobj = open(filename, 'wb')\n pickle.dump(dictobj, fileobj)\n fileobj.close()\n else:\n # if there is a current leader, then send the request to\n # the leader, otherwise, ignore the request, the client\n # will eventually retry\n # need a way to know who to send the success message to\n message = ('BUY-FORWARD:\"{client_id}\",{request_id},' +\n '{ticket_count},\"{client_ip}\",{client_port}').format(\n client_id=client_id,\n request_id=request_id,\n ticket_count=ticket_count,\n client_ip=client_ip,\n client_port=client_port)\n logging.info('forward ticket request to leader {}'\n .format(self.leader_id))\n if self.leader_id:\n self.server.sendMessage(self.getMetaByID(self.leader_id),\n message)", "async def add_player_two(game_id, user_id, bid):\n await ex.conn.execute(\"UPDATE blackjack.games SET player2 = $1, bid2 = $2 WHERE gameid = $3 \", user_id, str(bid), game_id)", "def bid(self, value):\n if value is not None:\n self._bid = Decimal(value)", "def handle_event(self, event):\n if self.sub_event is not None:\n self.sub_event.handle_event(event)\n return\n\n if event.key == BattleActions.UP.value:\n if self.quant < self.max_quant:\n self.update_cursor_and_price(self.quant + 1)\n elif self.quant == self.max_quant:\n self.update_cursor_and_price(1)\n elif event.key == BattleActions.DOWN.value:\n if self.quant > 1:\n self.update_cursor_and_price(self.quant - 1)\n elif self.quant == 1:\n self.update_cursor_and_price(self.max_quant)\n elif event.key == BattleActions.SELECT.value:\n self.sub_event = ConfirmSell(self.player, self.item, self.quant)", "async def handle_pb(self, user: discord.User, brawler: str):\n\n # individual brawler\n trophies = await self.get_trophies(user=user, brawler_name=brawler)\n pb = await self.get_trophies(user=user, pb=True, brawler_name=brawler)\n\n if trophies > pb:\n await self.update_player_stat(\n user, 'brawlers', trophies, substat=brawler, sub_index='pb')", "def get_player_bet(self) -> None:\n print(\"Please enter the amount you want to bet.\")\n while self.user.bet == 0:\n input_ = input(\">>> \")\n try:\n input_ = float(input_)\n self.user.bet = input_\n except ValueError as e:\n print(str(e))\n continue", "async def add_bj_game(self, user_id, bid, ctx, mode):\n await ex.conn.execute(\"INSERT INTO blackjack.games (player1, bid1, channelid) VALUES ($1, $2, $3)\", user_id, str(bid), ctx.channel.id)\n game_id = await self.get_game_by_player(user_id)\n if mode != \"bot\":\n await ctx.send(f\"> **There are currently 1/2 members signed up for BlackJack. To join the game, please type {await ex.get_server_prefix_by_context(ctx)}joingame {game_id} (bid)** \")", "def event_player_push(self) -> None:\n print(f\"You got a push, your bet of {self.user.bet} is returned\")" ]
[ "0.6900256", "0.64509535", "0.6288491", "0.6142698", "0.61269826", "0.59916043", "0.59418434", "0.5737381", "0.5708696", "0.5684553", "0.5658267", "0.5652931", "0.5651575", "0.5596769", "0.55754954", "0.55639243", "0.55026704", "0.5463158", "0.53897315", "0.53704005", "0.5363362", "0.5252366", "0.525105", "0.5240292", "0.52351993", "0.51810336", "0.5165639", "0.5165077", "0.5165006", "0.5160155" ]
0.7031884
0
Event handler for when a player clicks on a card in their hand. First, checks to make sure that the game state is "playing" and that it is the respective client's turn. If not, it refuses to play the card. If it is, then the card is added to the current trick on the table amd passes priority to the next player in line. Also updates the player's hand who just played a card.
def handle_card_click_event(index, sid, methods=["GET", "POST"]): if game.state == "playing": print(index) active_player = game.ordered_players[game.active_player_index] print(active_player.hand) if active_player.sid == sid: game.play_card(int(index), active_player) if game.card_played == True: game.card_played = False for player, card in game.trick.items(): card_list = [card.value, card.suit] game.trick_obj[player.name]=card_list hand_dict = {} for i, card in enumerate(player.hand): hand_dict[i]=[card.value, card.suit] print(active_player.hand) print(active_player.sid) active_player.can_follow_suit = False socketio.emit("update hand", hand_dict, room=active_player.sid) socketio.emit("show trick", game.trick_obj) print(f"{game.trick_obj}") game.active_player_index += 1 if game.active_player_index == len(game.players): trick_table_dict = {} for player in game.ordered_players: trick_table_dict[player.name.title()]=player.tricks socketio.emit("end trick", game.winner_message) socketio.emit("update trick table", trick_table_dict) else: next_player = game.ordered_players[game.active_player_index] socketio.emit("your turn", room=next_player.sid) else:("Its not your turn") else: print("its not time for that")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hit(self, hand):\n if hand == \"player\":\n self.player_hand.append(self.cards_list[self.top_card_int])\n self.calculate_value(\"player\")\n elif hand == \"dealer\":\n self.dealer_hand.append(self.cards_list[self.top_card_int])\n self.calculate_value(\"dealer\")\n self.top_card_int += 1\n self.update_card_positions()", "def hit(self, player):\n\n hit_card = self.deck.draw()\n hit_card.flip()\n player.take_card(hit_card)\n\n if self.verbose:\n print(player, 'receives', hit_card)", "def player_turn(self):\n while self.player.hand.value < 22:\n choice = player_choice()\n if choice == \"S\":\n break\n elif choice == \"D\":\n self.player.double_down(self.pot, self.deck)\n break\n else:\n self.player.take_card(self.deck)\n show_table(self.player, self.dealer, self.pot)", "def play_card(self, player, card):\n top_card = self.discard_pile[len(self.discard_pile) - 1]\n if player in self.players and not self.wait_for_card_wish:\n if not player.has_card(card):\n return False\n\n if player != self.current_player:\n return False\n\n if self.card_wished is not None:\n return self._play_wished_card(card)\n\n if top_card.value == \"7\" and self.current_draw_punishment > 1:\n return self._play_card_on_seven(card, top_card)\n\n return self._play_normal_card(card, top_card)\n return False", "def take_turn(self):\n \n self.card_1 = self.get_card()\n self.display_card_1()\n guess = self.player.higher_lower()\n self.card_2 = self.get_card()\n self.display_card_2()\n self.compare_cards(guess)\n self.player.print_score()\n if self.player.score > 0:\n self.can_deal = self.player.keep_playing()\n print(\"\\n\")\n else:\n self.can_deal = False\n print(\"Game overThanks for playing!\")", "def hit(self):\n global in_play, deck, player_hand, dealer_hand, outcome, lost\n \n if in_play:\n player_hand.add_card(deck.deal_card())\n \n if player_hand.get_value() > 21:\n self.outcome.set(\"You have busted! Dealer wins. New deal?\")\n self.lost += 1\n self.score.set(str(self.won) + \"/\" + str(self.lost))\n in_play = False\n draw(canvas)\n\n print \"\\nPlayer hand: \", player_hand\n print \"Dealer hand: \", dealer_hand", "def play_card(self, player_index, card_index):\n card = self.hands[player_index][card_index]\n color_index = COLOR.index(card[0])\n if self.is_card_playable(card):\n # the color and the number match, add the card\n self.firework[color_index].append(card)\n # if we complete the firework for a color, we get an extra\n # blue stone\n if len(self.firework[color_index]) == 5:\n self.nb_blue_stone = min(self.nb_blue_stone + 1,\n MAX_BLUE_STONE)\n else:\n # error, the card cannot be played, remove a red_stone\n if self.nb_red_stone == 0:\n raise GameOverError(\"The card \" + card + \" cannot be\\\n played and there is no red stone anymore\")\n self.nb_red_stone = self.nb_red_stone - 1\n self.hands[player_index][card_index] = self.draw_card()\n return self.hands[player_index][card_index]", "def user_play(play_shoe, player, dealer):\n print(\"\\nDealer shows:\" + dealer.get_viewable_hand())\n hit = True\n while hit == True:\n decision = \" \"\n if len(player.get_hand()) == 2:\n print(\"\\nPlayer \" + player.get_name() + \" your hand is:\" + player.get_viewable_hand())\n else:\n print(\"\\nYour hand is now:\" + str(player.get_viewable_hand()))\n decide_soft_score_print(player)\n if not(check_blackjack(player.get_score(), player.get_hand())):\n if not(player.check_bust()) and player.get_score() < 21:\n while not(decision[0] == \"h\") and not(decision[0] == \"s\"):\n decision = input(\"Would you like to Hit or Stand? \").lower()\n if decision[0]==\"h\":\n player.hit_hand(play_shoe)\n else:\n hit = False\n else:\n hit = False\n else:\n hit = False\n check_stand(player)", "def make_card_wish(self, symbol, player):\n if player == self.current_player:\n if symbol in \"s c h d\":\n self.wait_for_card_wish = False\n self.card_wished = symbol\n self.choose_next_player()\n return True\n return False", "def player_hit(self):\r\n if self.in_progress:\r\n self.player_hand.add(self.deck.deal())\r\n if self.player_hand.total > 21:\r\n self.status_color = 'red'\r\n self.game_status = \"Dealer WINS... Press 'r' to start game\"\r\n self.dealer_wins += 1\r\n self.in_progress = False\r\n self.refresh_canvas()", "def hit():\n \n # Update messages, score and the player's \"Hand\" status\n # as global variables.\n global outcome, outcome_plus, outcome_plus_plus, in_play, score, action \n \n # If the \"Hand\" is in play, hit the \"player\". \n if in_play:\n outcome = outcome_plus = outcome_plus_plus = \"\"\n player.add_card(deck_of_cards.deal_card())\n else:\n return None\n \n # If busted, update messages, score and the player's \n # \"Hand\" status.\n if player.get_value() > 21:\n outcome = PLAYER_BUSTED\n outcome_plus = outcome_plus_plus = \"\"\n action = NEW_DEAL \n score -= SCORE_POINTS\n in_play = False\n \n return None", "def hit(hand=bj.player1.hand):\r\n hand.append(bj.deck.remove_card())", "def play(self):\n\n start_player = random.choice(self.names)\n turn_order = self.player_order(start=start_player)\n\n \"\"\"Play card from player hand when it is empty\"\"\"\n while self.hands[start_player].hand.cards:\n for name in turn_order:\n self.hands[name].player_card()\n print()", "def step(self, action):\n assert self.completed_rounds < self.num_rounds\n\n player = self.players[self.current_player_id]\n card = action\n\n if card not in player.hand:\n raise ValueError(\"Action not allowed because the card is not in the player's hand\")\n\n player.hand.remove(card)\n player.played.add(card)\n # print(f\"Player {self.current_player_id} with hand {[c.id for c in player.hand]} played the card {card.id}\")\n best_combination_on_the_table = self._get_best_combination(card)\n if best_combination_on_the_table:\n self.last_player_capturing_id = self.current_player_id\n player.captured.add(card)\n for c in best_combination_on_the_table:\n self.table.remove(c)\n player.captured.add(c)\n if not self.table and not (self._is_last_round and self._is_round_over()):\n player.scope += 1\n else:\n self.table.add(card)\n # print(f\"Cards on the table after play: {[c.id for c in self.table]}\")\n\n if self._is_round_over():\n self.completed_rounds += 1\n # print(f\"=========== Round {self.current_round} completed ============\")\n self.current_player_id = (self.current_player_id + 1) % self.num_players\n\n if self.is_over():\n last_player_capturing = self.players[self.last_player_capturing_id]\n # print(f\"Giving the remaining cards to player {last_player_capturing.player_id}\")\n for card in self.table:\n last_player_capturing.captured.add(card)\n self.table = set()\n assert all([len(p.played) == 10 for p in self.players])\n assert all([len(p.hand) == 0 for p in self.players])\n return self.get_state(), self.current_player_id", "def hit(player):\n deal_random_card(player)", "def click_on_card(num_in_hand, card_idx):\n card_idx -= 1\n logger.debug(\"Clicking on hand card index {} with {} cards in hand\".\\\n format(card_idx, num_in_hand))\n coords = card_coords[num_in_hand]\n game_click(coords[card_idx])", "def _deal_player():\n\n # we append the dealed card to the player's hand.\n player_hand.append(_deal_card(player_card_frame))\n\n # calculate and return the score of the player's hand.\n player_score = _score_hand(player_hand)\n\n # set the score to the respective label.\n player_score_label.set(player_score)\n\n # if the score surpasses 21, dealer wins.\n if player_score > 21:\n result_text.set(\"Dealer wins!\")", "def nextEvent(self):\n\n if self.controller._state.rules.Shared_Board:\n self.num_wilds = len(self.controller.unassigned_wilds_dict.keys())\n if self.num_wilds > 0:\n self.nextEventWildsOnBoard()\n\n for self.event in pygame.event.get():\n if self.event.type == pygame.QUIT:\n # The window crashed, we should handle this\n print(\"pygame crash, AAAHHH\")\n pygame.quit()\n quit()\n\n if not self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n wild_instructions = 'Use the keyboard to designate your prepared wild cards \\r\\n '\n wild_instructions = wild_instructions + '(use 0 for 10 and J, Q, or K for facecards).'\n self.controller.note = wild_instructions\n pos = pygame.mouse.get_pos()\n\n if self.event.type == pygame.MOUSEBUTTONDOWN:\n self.RuleSetsButtons.ClickedButton(self, pos)\n for element in self.hand_info:\n # cannot select prepared cards, so not included in logic below.\n if element.img_clickable.isOver(pos):\n if element.status == 1:\n element.status = 0\n element.img_clickable.changeOutline(0)\n elif element.status == 0:\n element.status = 1\n element.img_clickable.changeOutline(2)\n\n elif self.event.type == pygame.MOUSEMOTION:\n self.RuleSetsButtons.MouseHiLight(self, pos)\n HandManagement.MouseHiLight(self.hand_info, pos)\n elif self.event.type == pygame.KEYDOWN:\n if self.controller._state.rules.Buy_Option:\n if self.controller.buying_opportunity:\n if self.event.key == pygame.K_y:\n self.controller.wantTopCard(True)\n self.controller.note = 'You have signaled you want to buy the card.'\n elif self.event.key == pygame.K_n:\n self.controller.wantTopCard(False)\n self.controller.note = 'You have signaled you do not want to buy the card.'\n if not self.controller._state.rules.Shared_Board and self.num_wilds > 0:\n HandManagement.ManuallyAssign(self)", "def hit(self, card):\n self.hand.append(card)", "def addCard(self, flag, player, card):\n self.board.flags[flag].add_card(player, card)\n self.engine.output_handler.play_action(player, card, flag + 1)\n self.latestPlayer = player", "def take_turn(self, deck, pile):\n # print player info\n print(self.name + \", it's your turn.\")\n print(pile)\n\n # get a list of cards that can be played\n topcard = pile.top_card()\n\n # check if need to draw 4 cards\n if topcard.kind == 'wild4':\n # draw cards\n for i in range(4):\n self.draw_card(deck)\n print(\"You have been skipped.\")\n # change for next player\n topcard.kind = 'wild'\n self.hand.append(topcard)\n self.play_card(topcard, pile)\n return # skip turn\n\n if self.name == 'computer':\n self.take_comp_turn(deck, pile)\n return # move on to next player\n\n print(self.name + \" Your hand: \" + str(len(self.hand)))\n print(self.get_hand())\n\n matches = [card for card in self.hand if card.is_match(topcard)]\n if len(matches) > 0: # can play\n for index in range(len(matches)):\n # print the playable cards with their number\n print(str(index+1) + \": \" + str(matches[index]))\n # get player's choice of which card to play\n choice = 0\n while choice < 1 or choice > len(matches):\n choicestr = input(\"Which do you want to play? \")\n if choicestr.isdigit():\n choice = int(choicestr)\n # play the chosen card from hand, add it to the pile\n self.play_card(matches[choice-1], pile)\n # chose a wild card\n if matches[choice-1].kind == 'wild' or matches[choice-1].kind == 'wild4':\n chosencolor = input(\"Which color would you like to change to? [red, green, yellow, blue]:\")\n while chosencolor not in ['red', 'yellow', 'green', 'blue']:\n chosencolor = input(\"Which color would you like to change to? [red, green, yellow, blue]: \")\n matches[choice-1].color = chosencolor\n print(\"The color is now \" + str(chosencolor) + \".\")\n print(str(self.name) + \" played \" + str(matches[choice-1]))\n else: # can't play\n print(\"You can't play, so you have to draw.\")\n input(\"Press enter to draw.\")\n # check if deck is empty -- if so, reset it\n if deck.is_empty():\n deck.reset_deck(pile)\n # draw a new card from the deck\n newcard = self.draw_card(deck)\n print(\"You drew: \"+str(newcard))\n if newcard.is_match(topcard): # can be played\n print(\"Good -- you can play that!\")\n self.play_card(newcard, pile)\n # drew a wild card\n if newcard.kind == 'wild':\n chosencolor = input(\"Which color would you like to change to? [red, green, yellow, blue]: \")\n newcard.color = chosencolor\n print(\"The color is now \" + str(chosencolor) + \".\")\n else: # still can't play\n print(\"Sorry, you still can't play.\")\n input(\"Press enter to continue.\")", "def play_card():\n game = current_user.get_active_game()\n card = request.form.get('card', '')\n\n if game is None:\n flash('If you want to join a game, click the Join button.')\n return redirect(url_for('home'))\n else:\n # Validate card\n if not isinstance(card, str):\n # Invalid input for card, but no need to alert user\n return redirect(url_for('game_home'))\n card = card.strip()\n if not CARD_REGEX.match(card):\n flash('Invalid card format.')\n return redirect(url_for('game_home'))\n\n hand = game.get_latest_hand()\n trick = hand.get_latest_trick(with_for_update=True)\n # Attempt to play the card\n try:\n trick.play_card(current_user.user_id, card, game, hand)\n except NotPlayersTurnError:\n flash('It is not your turn to play a card.')\n return redirect(url_for('game_home'))\n except CardNotInHandError:\n flash('The card \\'{0}\\' is not in your hand or has already been played.'\n ' Please play a card from your hand.'.format(card))\n return redirect(url_for('game_home'))\n except SpadesNotBrokenError:\n flash('Spades have not yet been broken. Please choose a different suit.')\n return redirect(url_for('game_home'))\n except NotFollowingLeadSuitError:\n flash('You must follow the lead suit whenever possible. Please choose a card with the lead suit.')\n return redirect(url_for('game_home'))\n except BadGameStateError:\n flash('An error occurred while trying to play your card. Please try again.')\n return redirect(url_for('game_home'))\n else:\n flash(f'You played {card} successfully.')\n if trick.winner is not None:\n flash(f'{trick.winner.value} won the trick.')\n if trick.trick_number == 13 and game.state == GameStateEnum.IN_PROGRESS:\n flash('A new hand has been dealt.')\n return redirect(url_for('game_home'))\n elif game.state == GameStateEnum.COMPLETED:\n if game.ns_win:\n flash('North/South team won the game.')\n else:\n flash('East/West team won the game.')\n # Redirect to game summary screen\n redirect(url_for('game_summary', game_id=game.game_id))\n return redirect(url_for('game_home'))", "def play(self, player, game):\n player.get_hand().get_cards().remove(self)\n card = game.pick_card()\n player.get_hand().add_cards(card)\n game.next_player()\n game.set_action(\"NO_ACTION\")", "def button_press(self, value):\r\n\r\n self.reset_hands()\r\n\r\n self.increment_round(self.try_again)\r\n\r\n self.player_hand(value)\r\n\r\n self.opponent_hand()\r\n\r\n self.compare_hands()", "def set_picked_card(self, player_id: int, game_round_id: int, slack_user_hash: str, position: int,\n card: PlayerHandCardType):\n with self.eng.session_mgr() as session:\n # Move card to player_pick\n session.add(TablePlayerPick(\n player_key=player_id,\n game_round_key=game_round_id,\n slack_user_hash=slack_user_hash,\n card_order=position,\n answer_card_key=card.answer_card_key\n ))\n # Mark card in the hand as picked\n session.query(TablePlayerHand).filter(and_(\n TablePlayerHand.player_key == player_id,\n TablePlayerHand.hand_id == card.hand_id\n )).update({\n TablePlayerHand.is_picked: True\n })\n # Increment times picked\n session.query(TableAnswerCard).filter(\n TableAnswerCard.answer_card_id == card.answer_card_key\n ).update({\n TableAnswerCard.times_picked: TableAnswerCard.times_picked + 1\n })", "def player_turn(user, deck):\n print(f\"\\n======== PLAYER'S TURN ========\\n\\n\"\n f\"Your current hand is \\033[36m{user.total}\\033[0m.\\n\")\n while deck.cards and not bust(user) and user.total != GOAL_TOTAL():\n if player_draw():\n draw_card(user, deck)\n else:\n print(f\"\\nYou've chosen to Stand, this ends the round with your hand of \\033[36m{user.total}\\033[0m.\\n\")\n break\n time.sleep(1)", "def play_card(game_id, player_id, card_id, selected_color=None):\n game_data = load_state(game_id)\n if not game_data:\n return False\n players = game_data.get('players')\n if player_id not in [p['id'] for p in players]:\n return False\n player = [p for p in players if p['id'] == player_id][0]\n if not player['active']:\n return False\n if card_id not in [c['id'] for c in player['hand']]:\n return False\n if not game_data['active']:\n return False\n card = [c for c in player['hand'] if c['id'] == card_id][0]\n msg = make_danger_message('You can\\'t play that card!')\n if not can_play_card(game_data, card):\n flash_player(game_data, player, msg)\n return False\n if card['value'] == 'WILD_DRAW_FOUR':\n if player_has_matching_color_card(game_data, player):\n flash_player(game_data, player, msg)\n return False\n if card['value'] in SPECIAL_CARDS:\n if selected_color not in CARD_COLORS:\n flash_player(game_data, player, msg)\n return False\n card['color'] = selected_color\n player['hand'].remove(card)\n if len(player['hand']) == 1:\n msg = make_info_message('Only one card to go!')\n alt_msg = make_warning_message(\n '{} only has one card left!'.format(player['name']))\n flash_player(game_data, player, msg, alt_msg)\n game_data['stack'].append(card)\n if card['value'] == 'REVERSE':\n game_data['reverse'] = not game_data['reverse']\n if len(game_data['players']) != 2:\n if game_data['reverse']:\n msg = make_info_message('Game order has been reversed')\n else:\n msg = make_info_message('Game order is back to normal')\n flash_broadcast(game_data, msg)\n if not player['hand']:\n set_round_winner(game_data, player)\n else:\n activate_next_player(game_data)\n save_state(game_data)\n return True", "def draw_card(self, card):\n self.current_hand.append(card)", "def play_card(self, trump: Card, first: Card, played: Dict[int, Card], players: List[Player],\n played_in_round: Dict[int, List[Card]], first_player_index: int):\n possible_actions = super().get_playable_cards(first)\n if not isinstance(possible_actions, list):\n possible_actions = list(possible_actions)\n card_to_play = random.choice(possible_actions)\n self.hand.remove(card_to_play)\n return card_to_play", "def play(self, player, game):\n player.get_hand().get_cards().remove(self)\n card = game.pick_card()\n player.get_hand().add_cards(card)\n game.set_action(\"NO_ACTION\")" ]
[ "0.6828019", "0.66834337", "0.6655729", "0.66178566", "0.6582696", "0.6566157", "0.64809275", "0.64504397", "0.6402687", "0.63628477", "0.63552636", "0.6353695", "0.6352674", "0.6339642", "0.6332877", "0.6318542", "0.62739736", "0.6272104", "0.62683296", "0.62102437", "0.619966", "0.6155702", "0.614749", "0.6133011", "0.6132338", "0.60909337", "0.60908073", "0.608535", "0.6067082", "0.6062413" ]
0.682233
1
Enqueue(val),enqueue(val2), dequeue = val, dequeue = val2.
def test_values_dequeue_two_values_in_correct_order(): queue = Queue('ab') assert queue.dequeue() == 'a' assert queue.dequeue() == 'b'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_the_queue_dequeue_multi_values_phase_two(the_queue):\n the_queue.enqueue(2)\n the_queue.enqueue(3)\n the_queue.enqueue(4)\n the_queue.enqueue(5)\n the_queue.dequeue()\n assert (the_queue.dequeue(),\n the_queue._new_dll.tail.data) == (3, 4)", "def enqueue(self, val):\n self.q1.append(val)", "def test_the_queue_dequeue_multi_values_phase_one(the_queue):\n the_queue.enqueue(2)\n the_queue.enqueue(3)\n the_queue.enqueue(4)\n the_queue.enqueue(5)\n the_queue.dequeue()\n assert the_queue._new_dll.tail.data == 3", "def enqueue(tup):", "def dequeue(self):\n if self.q2:\n return self.q2.pop()\n if self.q1:\n while self.q1:\n self.q2.append(self.q1.pop())\n return self.q2.pop()\n return None", "def enqueue(self,value):\n pass", "def test_the_queue_enqueue_multi_values(the_queue):\n the_queue.enqueue(2)\n the_queue.enqueue(3)\n the_queue.enqueue(4)\n the_queue.enqueue(5)\n assert (the_queue._new_dll.head.data,\n the_queue._new_dll.tail.data,\n the_queue._new_dll.head.prior_node.data,\n the_queue._new_dll.tail.next_node.data) == (5, 2, 4, 3)", "def enqueue(self, val):\n self.stack1.push(val)", "def dequeue(self):", "def test_values_dequeue_in_correct_order():\n queue = Queue('ab')\n assert queue.dequeue() == 'a'", "def enqueue(self, val):\r\n self.queue.append(val)", "def enqueue(self, val):\n\n if self.front is None:\n self.back = self.front = Node(val)\n self._size += 1\n else:\n self.back._next = self.back = Node(val)\n self._size += 1\n return self.back", "def push(self, x):\n # enqueue new element to q2\n self.q2.enqueue(x)\n # dequeue from q1 and enqueue in q2\n\n while self.q1.qlist:\n self.q2.enqueue(self.q1.dequeue())\n # swap q1 and q2\n temp = self.q1.qlist\n self.q1.qlist = self.q2.qlist\n self.q2.qlist = temp", "def dequeue(self): ##################### <-\n value = self.lst[0]\n self.lst = self.lst[1:]\n return value", "def __init__(self, v1, v2):\n self.queue = [v for v in (v1, v2) if v]\n # 03092022\n # 初始化的时候,避免其中一个为None, 这样再叫next的时候\n # 则避免head.pop(0)的情况,一旦进入next, 则有if head判断\n if not v1:\n self.queue = [v2]\n elif not v2:\n self.queue = [v1]\n else:\n self.queue = [v1, v2]", "def enqueue(self, value): ################# <-\n self.lst = self.lst +[value]", "def dequeue(self): ##################### <-\n top = self.top\n def dQsub(top):\n if not top.next: return\n else:\n if top.next.next:\n topDQ = Node(top.value)\n topDQ.next = dQsub(top.next)\n else:\n topDQ = Node(top.value)\n return topDQ\n self.top = dQsub(top)", "def enqueue(self, item):\n while len(self._stack1) > 0:\n self._stack2.push(self._stack1.pop())\n self._stack2.push(item)", "def dequeue(self):\n if self.isEmpty():\n return -1\n if not self.s2.isEmpty():\n ele = self.s2.peek()\n self.s2.pop()\n return ele\n while not self.s1.isEmpty():\n self.s2.push(self.s1.pop())\n ele = self.s2.peek()\n self.s2.pop()\n return ele", "def test_dequeue(self):\r\n from numpy import random\r\n queue = Queue(shape=(11, 2, 3, 4), dtype='int16')\r\n for i in range(100):\r\n arr_in = random.randint(4096,size = (2,2,3,4))\r\n queue.enqueue(arr_in)\r\n arr_out = queue.dequeue(2)\r\n self.assertEqual((arr_in==arr_out).all(), True)\r\n self.assertEqual(queue.length,0)\r\n self.assertEqual(queue.global_rear,(i+1)*2)\r\n self.assertEqual(queue.rear,2*(i+1)-int(2*(i+1)/11)*11)\r\n\r\n from numpy import random\r\n queue = Queue(shape=(32, 2, 3, 4), dtype='int16')\r\n for i in range(100):\r\n arr_in = random.randint(4096,size = (1,2,3,4))\r\n queue.enqueue(arr_in)\r\n self.assertEqual(queue.length,1)\r\n arr_out = queue.dequeue(1)\r\n self.assertEqual((arr_in==arr_out).all(), True)\r\n self.assertEqual(queue.length,0)\r\n self.assertEqual(queue.global_rear,(i+1)*1)\r\n self.assertEqual(queue.rear,1*(i+1)-int(1*(i+1)/queue.shape[0])*queue.shape[0])", "def enqueue(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def enqueue(self, value): ################# <-\n self.top = Node(value, next=self.top)", "def dequeue(self):\n pass", "def dequeue(self):\n pass", "def testQueue():\n myQueue = Queue()\n myQueue.enqueue(1)\n myQueue.enqueue(2)\n myQueue.enqueue(3)\n print('Enqueue 1,2,3: ',myQueue)\n myQueue.enqueue(4)\n print('Peek: ',myQueue.peek())\n myQueue.dequeue()\n print('Enqueue 4+ dequeue: ',myQueue)\n myQueue.enqueue(5)\n print('Enqueue 5: ',myQueue)\n myQueue.enqueue(6)\n print('Enqueue 6: ',myQueue)\n myQueue.enqueue(7)\n print('Enqueue 7: ',myQueue)\n print('Peek: ',myQueue.peek())\n myQueue.dequeue()\n print('Dequeue: ',myQueue)", "def dequeue(Q):\n # x = Q.pop(0) # default is to pop from end (LIFO stack), param 0 indicates FIFO queue\n x = Q.get_nowait() # default is to pop from end (LIFO stack), param 0 indicates FIFO queue\n if debug: \n print(\"dequeue :\", end=\" \")\n show_queue(Q)\n return(Q, x)", "def test_the_queue_dequeue(the_queue):\n the_queue.enqueue(2)\n assert the_queue.dequeue() == 2", "def enqueue(self, value):\n node = Node(value)\n\n if not self.front:\n\n self.front = node\n self.rear = node\n else:\n self.rear.next = node\n self.rear = self.rear.next", "def dequeue(self): ##################### <-\n \"\"\"Llst -> lst, dequeue, lst -> Llst\"\"\"\n top = self.top\n def linkedLstToList(Llst):\n \"\"\"A list to a single node linked list\"\"\"\n if not Llst: return []\n return [Llst.value] + linkedLstToList(Llst.next)\n def lstToLinkedList(lst):\n \"\"\"A list to a single node linked list\"\"\"\n if not lst: return\n LinkedList = Node(lst[0])\n LinkedList.next = lstToLinkedList(lst[1:])\n return LinkedList\n self.top = lstToLinkedList(linkedLstToList(top)[:-1])\n return linkedLstToList(top)[-1]", "def enqueue(self, x):\n self.s1.push(x)" ]
[ "0.72554654", "0.69427395", "0.6906065", "0.6764085", "0.6579386", "0.65090865", "0.65050536", "0.6447959", "0.64349025", "0.63768375", "0.63478684", "0.6281892", "0.62638295", "0.6238328", "0.6197534", "0.61510164", "0.60696894", "0.6068618", "0.6031984", "0.5993058", "0.5987801", "0.5987801", "0.5980126", "0.5980126", "0.59776115", "0.5950593", "0.59373707", "0.5901261", "0.58850974", "0.58751774" ]
0.69951135
1
Peak empty returns none.
def test_peak_empty_queue_returns_none(): queue = Queue() assert queue.peak() is None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def peak(self):\n pass", "def detect_peak(data):\n nonlocal last, ascent_dist, ascent_start\n if data > last:\n if ascent_start is None:\n ascent_start = last\n ascent_dist += 1\n else:\n if ascent_dist:\n peak = last\n ascent_dist = 0\n if (peak - ascent_start) > thresh:\n last = data\n ascent_start = None\n return peak\n ascent_start = None\n last = data\n return None", "def test_peak_returns_value_does_not_dequeue():\n queue = Queue('a')\n a = queue.peak()\n assert queue._queue.last_node == 'a'", "def unique_peaks(self):\n return(None)", "def peak(self):\n return self.peak", "def test_find_peaks_nopeak(self):\n noise_amp = 1.0\n num_points = 100\n np.random.seed(181819141)\n test_data = (np.random.rand(num_points) - 0.5)*(2*noise_amp)\n widths = np.arange(10, 50)\n found_locs = find_peaks_cwt(test_data, widths, min_snr=5, noise_perc=30)\n np.testing.assert_equal(len(found_locs), 0)", "def peak(self) -> Tuple[MeasureInput, MeasureResult]:\n assert self._data\n return self._data[0][2]", "def peak(self):\n\n return self._data[0]", "def isPeakAssigned(peak, fully=True):\n\n n = 0\n for peakDim in peak.peakDims:\n if len(peakDim.peakDimContribs) > 0:\n n +=1\n \n if n == len(peak.peakDims):\n return True\n \n elif n > 0:\n if fully:\n return False\n else:\n return True\n \n else:\n return False", "def peak(self) -> Point:\n return self.most_intense_point()", "def locateSinglePeak(self, start, end, list):\n for x in range(start,end):\n currentVal = list[x]\n if currentVal > self.maxFound:\n self.maxFound = currentVal\n diff = self.maxFound - currentVal\n if diff > self.maxDiffFound:\n self.maxDiffFound = diff\n if diff > self.maxFound/2:\n self.dipsBelowHalf += 1", "def test_peak_returns_value():\n queue = Queue('a')\n assert queue.peak() is 'a'", "def peak_idx(self) -> Optional[int]:\n if any(item.energy is None for item in self):\n logger.warning(\"An energy was None - cannot locate peak\")\n return None\n\n peaks = [i for i in range(1, len(self) - 1) if self.is_saddle(i)]\n\n if len(peaks) > 0:\n peak_rel_es = self.rel_energies[np.array(peaks, dtype=int)]\n logger.info(\n f\"Found peaks at {peaks} with relative energies \"\n f\"∆E = {np.round(peak_rel_es, 1)} kcal mol-1\"\n )\n\n # Return the highest energy peak i.e. sorted high -> low\n for peak_idx in sorted(peaks, key=lambda i: -self.energies[i]):\n return peak_idx\n\n return None", "def get_peak(self):\r\n \r\n sensor_1_list = []\r\n\r\n for i in self.return_data:\r\n sensor_1_list.append(i[0])\r\n\r\n sensor_peak = max(sensor_1_list)\r\n \r\n return(sensor_peak)", "def first_peak_detect(beam, start_point):\n logging.debug('running first_peak_detect function')\n for i in range(start_point, len(beam)):\n logging.debug('current value of i is %d', i)\n if beam[i-1] < beam[i] > beam[i+1]:\n logging.debug('value determined to be the center of the values %d, %d, %d', beam[i-1], beam[i], beam[i+1])\n return i\n\n logging.error(\"no peak was found. will try working with the length of the beam\")\n return len(beam)", "def get_peakiness(spot_data):\n return spot_data[3] / np.mean((spot_data[5], spot_data[6]))", "def test_peak_returns_correct_value():\n queue = Queue()\n queue.enqueue('a')\n queue.enqueue('b')\n assert queue.peak() is 'a'", "def reduce_peaks(self,peaks,odf_min):\n if len(peaks)==0:\n return -1 \n if odf_min<self.iso_thr*peaks[0]:\n #remove small peaks\n ismallp=np.where(peaks<self.peak_thr*peaks[0])\n if len(ismallp[0])>0:\n l=ismallp[0][0]\n else:\n l=len(peaks)\n else:\n return -1\n return l", "def peak(data, fft_data=None):\n return np.max(np.abs(data))", "def measure_peak(sig, use_inflection=True, return_allinfo=False):\n sig = np.array(sig)\n cr = locate_peak(sig)\n cr_crosszero = np.zeros_like(cr)\n cr_inflection = np.zeros_like(cr)\n\n # cross zero points\n cr_cr1 = -int_sign(sig[1:] * sig[:-1])\n cr_cr2 = -int_sign(sig[:-1] * sig[1:])\n cr_cr1[cr_cr1<0] = 0\n cr_cr2[cr_cr2<0] = 0\n cr_crosszero[1:] = cr_cr1\n cr_crosszero[:-1] += cr_cr2\n cr_crosszero = int_sign(cr_crosszero * sig) * 4\n\n # inflection points\n d2 = second_derivate(sig)\n d2p = locate_peak(d2)\n d2p[np.where( np.abs(d2p) != 1 )] = 0\n d2p[np.where( ((d2p==1) & (sig<0)) | ((d2p==-1) & (sig>0)) )] = 0\n cr_inflection[np.where(d2p==-1)] = 8\n cr_inflection[np.where(d2p==1)] = -8\n \n if use_inflection:\n cr_combine = cr + cr_inflection + cr_crosszero \n else:\n cr_combine = cr + cr_crosszero\n\n oned = False\n if len(np.shape(sig)) == 1:\n oned = True\n sig = sig[:, np.newaxis]\n \n peaks_list = []\n for i in range(np.shape(sig)[1]):\n pvs = np.where(np.abs(cr[:,i]) == 1)[0]\n lims = np.where(np.abs(cr_combine[:,i]) >= 2)[0]\n if len(pvs) == 0 :\n peaks_list.append([])\n continue\n if np.shape(lims)[0] == 0:\n lower_pos = pvs\n upper_pos = pvs\n else:\n lower_arr = (pvs > lims[:, np.newaxis])\n upper_arr = (pvs < lims[:, np.newaxis])\n lower_arr_r = np.flipud(lower_arr)\n upper_pos_i = np.argmax(upper_arr, axis=0)\n upper_pos = lims[(upper_pos_i, )]\n w_upper_none = np.where(upper_arr[-1,:] == False)\n upper_pos[w_upper_none] = pvs[w_upper_none]\n lower_pos_r_i = np.argmax(lower_arr_r, axis=0)\n lower_pos_i = len(lims) - 1 - lower_pos_r_i\n lower_pos = lims[(lower_pos_i, )]\n w_lower_none = np.where(lower_arr[0, :] == False)\n lower_pos[w_lower_none] = 0\n\n peaks = []\n for center, lower, upper in zip(pvs, lower_pos, upper_pos):\n depth = sig[center, i]\n sig_range = sig[lower:upper+1, i]\n sig_range[np.where(int_sign(sig_range) != int_sign(depth))] = 0.0\n volume = np.sum(sig_range)\n peaks.append(Peak(center=center, lower=lower, upper=upper, depth=depth, volume=volume))\n peaks_list.append(peaks)\n if oned:\n peaks_list = peaks_list[0]\n \n if return_allinfo:\n return peaks_list, cr, cr_crosszero, cr_inflection \n else:\n return peaks_list", "def test_peak_statistics(RE):\n x = 'motor'\n y = 'det'\n ps = PeakStats(x, y)\n RE.subscribe(ps)\n RE(scan([det], motor, -5, 5, 100))\n\n fields = [\"x\", \"y\", \"min\", \"max\", \"com\", \"cen\", \"crossings\", \"fwhm\", \"lin_bkg\"]\n for field in fields:\n assert hasattr(ps, field), f\"{field} is not an attribute of ps\"\n\n np.allclose(ps.cen, 0, atol=1e-6)\n np.allclose(ps.com, 0, atol=1e-6)\n fwhm_gauss = 2 * np.sqrt(2 * np.log(2)) # theoretical value with std=1\n assert np.allclose(ps.fwhm, fwhm_gauss, atol=1e-2)", "def find_empty(counts): \n for index,count in enumerate(counts):\n if count == 0:\n return index\n return None", "def peak_PSF(self):\n return self.compute_PSF(np.zeros(self.N_zern))", "def peak_finder(thresh=0):\n last = 0 # Track last input value\n ascent_dist = 0 # Distance from last trough.\n ascent_start = None # Last trough height\n\n def detect_peak(data):\n \"\"\" Returns initialized function to detect peaks on live streaming data.\n\n Args:\n data (numeric value): Input data point.\n\n Returns:\n If peak is detected return peak value, else return None\n \"\"\"\n nonlocal last, ascent_dist, ascent_start\n if data > last:\n if ascent_start is None:\n ascent_start = last\n ascent_dist += 1\n else:\n if ascent_dist:\n peak = last\n ascent_dist = 0\n if (peak - ascent_start) > thresh:\n last = data\n ascent_start = None\n return peak\n ascent_start = None\n last = data\n return None\n\n return detect_peak", "def find_peak(list_of_integers):\n\n if list_of_integers is None or len(list_of_integers) == 0:\n return None\n list_of_integers.sort()\n peak = list_of_integers[-1]\n \"\"\"\n for number in list_of_integers:\n if number > peak:\n peak = number\"\"\"\n return peak", "def getPeakValue( self ):\n nCurrentMax = max( self.data.max(), -self.data.min() )\n return float(nCurrentMax) / self.getSampleMaxValue()", "def foundPeak(self, peak):\n\n self.sequence.append({\"type\": \"foundPeak\", \"coord\": peak})", "def find_peak(list_of_integers):\n if list_of_integers == []:\n return None\n\n list_of_integers.sort()\n return list_of_integers[-1]", "def peak(self, lookahead=1):\n # type: (int) -> Optional[T]\n if lookahead > self.lookahead:\n raise Exception(\n 'Cannot peak to {}: beyond buffer lookahead {}'.format(\n lookahead, self.lookahead\n )\n )\n if lookahead > len(self.buffer):\n return None\n index = len(self.buffer) - lookahead\n return self.buffer[index]", "def empty(self):\n return self.q_size.current_value == 0" ]
[ "0.7312167", "0.69030046", "0.6676074", "0.6622437", "0.6607863", "0.6598427", "0.64396054", "0.64075404", "0.62937284", "0.62708354", "0.62581694", "0.6185751", "0.61741555", "0.6110209", "0.60893977", "0.60679144", "0.6043656", "0.60037297", "0.59912175", "0.5986642", "0.5900788", "0.58855975", "0.58821714", "0.5842552", "0.58230346", "0.580748", "0.5792964", "0.5787179", "0.5756359", "0.5744386" ]
0.7288681
1
Size of new queue is 0.
def test_size_of_new_queue(): queue = Queue() assert queue.size() == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_size_empty(the_queue):\n assert the_queue.size() == 0", "def test_new_queue_is_empty(self):\n queue = Queue_()\n self.assertTrue(queue.empty())\n self.assertEqual(queue.size(), 0)", "def test_size(self):\n queue = Queue()\n self.assertEqual(queue.size(), 0)\n queue.enqueue(1)\n self.assertEqual(queue.size(), 1)", "def make_empty_queue():\n return Queue(0, None, None)", "def empty_queue():\n return Queue()", "def empty_queue():\n return Queue()", "def __init__(self, size):\n self.size = size\n self.queue = []", "def size(self):\r\n return len(self.queue)", "def size(self):\r\n return len(self.queue)", "def test_insertion_of_value_increases_length(empty_queue):\n assert len(empty_queue) == 0\n empty_queue.enqueue(100)\n assert len(empty_queue) == 1", "def qsize(self):\r\n return len(self._queue)", "def test_size_increments_with_enqueue():\n queue = Queue()\n queue.enqueue('val')\n assert queue.size() == 1", "def test_the_queue_size(the_queue):\n the_queue.enqueue(1)\n the_queue.enqueue(2)\n the_queue.enqueue(3)\n assert the_queue.size() == 3", "def ctrlqueue_queue_size(self) -> int:\n return self.dss_obj.CtrlQueueI(ctypes.c_int32(10), ctypes.c_int32(0))", "def test_size_decrements_with_dequeue():\n queue = Queue()\n queue.enqueue('val')\n assert queue.size() == 1\n queue.dequeue()\n assert queue.size() == 0", "def __init__(self, size):\n self.size = size\n self.queue = []\n self.sum = 0", "def __len__(self):\n\n return len(self._queue)", "def __init__(self, queueLength):\r\n self.queueLength = queueLength\r\n self.data = []\r\n return", "def __init__(self, size):\n self.queue = collections.deque(maxlen=size)", "def size(self):\n return len(self.queue)", "def size(self):\n return len(self.queue)", "def size(self):\n return len(self.queue)", "def size(self):\n return len(self.queue)", "def reset_queueing(self):\n self._num_queued = 0", "def __init__(self, size):\n self.queue = collections.deque(maxlen = size)", "def test_for_size_0_when_empty(new_empty_deque):\n assert new_empty_deque.size() == 0", "def size(self):\n return len(self.__queue)", "def qsize(self) -> int:\n return len(self._queue)", "def __post_init__(self) -> None:\n self.gtex += [None]\n self.bm += [None]\n self._q: queue.Queue = queue.Queue(maxsize=self.maxsize)", "def __init__ (self, size: int):\n self.size = size\n self.queue = []\n self.sum = 0" ]
[ "0.749195", "0.7395883", "0.727389", "0.7206155", "0.7155715", "0.7155715", "0.70977354", "0.7025954", "0.7025954", "0.69922", "0.69795096", "0.6951921", "0.6944076", "0.6941982", "0.694079", "0.6927224", "0.692247", "0.69112027", "0.69060826", "0.68897873", "0.68897873", "0.68897873", "0.68897873", "0.688251", "0.68763494", "0.6872756", "0.68678004", "0.6833468", "0.68252563", "0.6770307" ]
0.80333865
0
Enqueue, dequeue size is 0.
def test_size_decrements_with_dequeue(): queue = Queue() queue.enqueue('val') assert queue.size() == 1 queue.dequeue() assert queue.size() == 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enqueue(self,data): # total: O(n)\n # if pass capacity, double capacity and the array\n while self._size >= self._capacity: #O(n)\n self.doubleCapacity() #O(n)\n if self._size != 0: # O(1)\n self._end = (self._end+1)% self._capacity # O(1)\n self._queue[self._end] = data # O(1)\n self._size += 1 # O(1) ", "def dequeue(self):\n raise NotImplementedError(\"dequeue: You should have implemented this method!\")", "def runQueueEnqueue(self):\n raise NotImplementedError", "def dequeue(self):\n pass", "def dequeue(self):\n pass", "def dequeue(self):\r\n if self.size():\r\n self.queue.pop(0)\r\n else:\r\n raise IndexError(\"Queue is empty.\")", "def dequeue(self):", "def test_size_increments_with_enqueue():\n queue = Queue()\n queue.enqueue('val')\n assert queue.size() == 1", "def enqueue(self, item):\n # double size of array if necessary and recopy to front of array\n if self._N == len(self._q):\n self._resize(2*len(self._q)) # double size of array if necessary\n self._q[self._last] = item # add item\n self._last += 1\n if self._last == len(self._q):\n self._last = 0 # wrap-around\n self._N += 1", "def enqueue(self, val):\n if self.size+1 == self.capacity:\n self.grow() # double the array size\n #avail = (self.head + self.size) % len(self.data)\n self.data[self.tail] = val\n self.size += 1\n self.tail = (self.tail + 1) % self.capacity\n return None", "def test_dequeue():\n from parenthetics import Queue\n q = Queue()\n q.enqueue(0)\n assert q.dequeue() == 0", "def test_insertion_of_value_increases_length(empty_queue):\n assert len(empty_queue) == 0\n empty_queue.enqueue(100)\n assert len(empty_queue) == 1", "def enqueue(self, value):\n if len(self.data) == self.size:\n \"\"\"Current queue list:\"\"\"\n self.print_queue()\n raise Exception(\"Queue size limit reached maximum\")\n\n self.data.append(value)", "def test_dequeue_empty(self):\n \n r = self.store.dequeue('/queue/nonexist')\n assert r is None\n \n assert self.store.has_frames('/queue/nonexist') == False\n assert self.store.size('/queue/nonexist') == 0", "def test_enqueue(self):\n queue = Queue()\n self.assertEqual(queue.size(), 0)\n queue.enqueue(1)\n queue.enqueue(2)\n queue.enqueue(3)\n self.assertEqual(queue.size(), 3)", "def enqueue(queue, item):\n new_node = Node(item, None)\n if empty_queue(queue):\n queue.front = new_node\n queue.back = new_node\n else:\n queue.back.next = new_node\n queue.back = new_node\n queue.size = queue.size + 1", "def enqueue(Q, x):\n # Q.append(x)\n Q.put_nowait(x)\n if debug: \n print(\"enqueue\", x, \":\", end=\" \")\n show_queue(Q)\n return Q", "def main():\r\n print(\"Enqueuing\")\r\n q = Queue()\r\n q.enqueue(5)\r\n q.enqueue(6)\r\n q.enqueue(7)\r\n print(q)\r\n print(\"Peek:\", q.peek())\r\n print(\"\\nDequeuing...\")\r\n q.dequeue()\r\n print(q)\r\n print(\"Size:\", q.size())\r\n print(\"\\nDequeuing...\")\r\n q.dequeue()\r\n q.dequeue()\r\n print(q)\r\n print(\"Peek:\", q.peek())\r\n print(\"Size:\", q.size())", "def enqueue(self, e):\n newest = self._Node(e, None) # node will be new tail node\n if self.is_empty():\n self._head = newest # special case: previously empty\n else:\n self._tail._next = newest\n self._tail = newest # update reference to tail node\n self._size += 1", "def enqueue(self, elem):\r\n new_node = self._Node(elem, None)\r\n if self.is_empty():\r\n self._head = new_node\r\n else:\r\n self._tail._next = new_node\r\n self._tail = new_node\r\n self._size += 1", "def test_size(self):\n queue = Queue()\n self.assertEqual(queue.size(), 0)\n queue.enqueue(1)\n self.assertEqual(queue.size(), 1)", "def test_dequeue(self):\n queue = Queue()\n self.assertEqual(queue.dequeue(), None)\n queue.enqueue(1)\n queue.enqueue(2)\n queue.enqueue(3)\n self.assertEqual(queue.dequeue(), 1)\n self.assertEqual(queue.size(), 2)", "def testQueue():\n myQueue = Queue()\n myQueue.enqueue(1)\n myQueue.enqueue(2)\n myQueue.enqueue(3)\n print('Enqueue 1,2,3: ',myQueue)\n myQueue.enqueue(4)\n print('Peek: ',myQueue.peek())\n myQueue.dequeue()\n print('Enqueue 4+ dequeue: ',myQueue)\n myQueue.enqueue(5)\n print('Enqueue 5: ',myQueue)\n myQueue.enqueue(6)\n print('Enqueue 6: ',myQueue)\n myQueue.enqueue(7)\n print('Enqueue 7: ',myQueue)\n print('Peek: ',myQueue.peek())\n myQueue.dequeue()\n print('Dequeue: ',myQueue)", "def test_the_queue_dequeue(the_queue):\n the_queue.enqueue(2)\n assert the_queue.dequeue() == 2", "def enqueue(self, e):\n\t\tnewest = self._Node(e, None)\n\t\tif self.is_empty():\n\t\t\tself._head = newest\n\t\telse:\n\t\t\tself._tail._next = newest\n\t\tself._tail = newest\n\t\tself._size += 1", "def dequeue(self):\n if self.isEmpty():\n raise Exception(\"Queue underflow\")\n item = self._q[self._first]\n self._q[self._first] = None # to avoid loitering\n self._N -= 1\n self._first += 1\n if self._first == len(self._q):\n self._first = 0 # wrap-around\n # shrink size of array if necessary\n if self._N > 0 and self._N == len(self._q)/4:\n self._resize(len(self._q)/2)\n return item", "def dequeue(self):\r\n raise QueueException(\"Unimplemented Abstract Queue Function\")", "def enqueue(self,value):\n pass", "def enqueue(self, e):\n new_node = self._Node(e, None)\n if self.is_empty():\n self._head = new_node\n else:\n self._tail._next = new_node\n self._tail = new_node\n self._size += 1", "def enQueue(self, data_):\n\t\tif self.isFull():\n\t\t\tprint(\"Can't insert the data in the queue: Queue Full\")\n\t\t\texit(1)\n\n\t\t## This enqueuing logic using the concept of circular\n\t\t## movement to avoid the overhead of the transfer\n\n\t\tself.rear = (self.rear + 1) % self.capacity\n\t\tself.queue[self.rear] = data_\n\t\tself.size = self.size + 1" ]
[ "0.7157864", "0.703711", "0.70193213", "0.7013603", "0.7013603", "0.6990167", "0.6934225", "0.686834", "0.6846222", "0.683892", "0.68176216", "0.6813882", "0.680555", "0.67895997", "0.67571425", "0.6747387", "0.67188746", "0.66940105", "0.6679827", "0.66734767", "0.664942", "0.66480255", "0.66023695", "0.6591053", "0.65860456", "0.6552864", "0.6526645", "0.651582", "0.65090156", "0.6497045" ]
0.7311963
0
Creates a 2d or 3d axis.
def _create_axis(axis_type, variation="Linear", title=None): if axis_type not in ["3d", "2d"]: return None default_style = { "background": "rgb(230, 230, 230)", "gridcolor": "rgb(255, 255, 255)", "zerolinecolor": "rgb(255, 255, 255)", } if axis_type == "3d": return { "showbackground": True, "backgroundcolor": default_style["background"], "gridcolor": default_style["gridcolor"], "title": title, "type": variation, "zerolinecolor": default_style["zerolinecolor"], } if axis_type == "2d": return { "xgap": 10, "ygap": 10, "backgroundcolor": default_style["background"], "gridcolor": default_style["gridcolor"], "title": title, "zerolinecolor": default_style["zerolinecolor"], "color": "#444", }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_open3d_axis(axis_vector=None, origin=None, scale=1.0):\n if origin is None:\n origin = np.zeros(3)\n if axis_vector is None:\n axis_vector = np.array([0, 0, 1], dtype=np.float)\n axis_vector = axis_vector * scale\n axis_point = origin + axis_vector\n points = np.stack([origin, axis_point], axis=0)\n line = np.array([[0, 1]], dtype=np.long)\n axis = o3d.geometry.LineSet()\n axis.points = o3d.utility.Vector3dVector(points)\n axis.lines = o3d.utility.Vector2iVector(line)\n axis.paint_uniform_color(get_color(\"red\"))\n return axis", "def _generate_axes_(self):\n\n return AxesTuple(self._axis(i) for i in range(self.ndim))", "def setAxisLengths3D(x=2.,y=2.,z=2.):\n dislin.axis3d(x,y,z)", "def axis3D(xlow,xhigh,xfirst,xstep,ylow,yhigh,yfirst,ystep,\\\n zlow,zhigh,zfirst,zstep):\n dislin.graf3d(xlow,xhigh,xfirst,xstep,ylow,yhigh,yfirst,ystep,\\\n zlow,zhigh,zfirst,zstep)", "def _create_axis(\n self,\n range_terms: Sequence[float],\n axis_config: dict,\n length: float,\n ) -> NumberLine:\n axis_config[\"length\"] = length\n axis = NumberLine(range_terms, **axis_config)\n\n # without the call to _origin_shift, graph does not exist when min > 0 or max < 0\n # shifts the axis so that 0 is centered\n axis.shift(-axis.number_to_point(self._origin_shift([axis.x_min, axis.x_max])))\n return axis", "def create_plot_plane_2d(axis=(1.0, 1.0), origin=(0.0,0.0), size=(2.0,2.0)):\n ft = ImageFont.truetype (FONT_RESOURCES_DIR+\"/courier.ttf\", 12)\n gl_font = GlFont('', ft)\n gl_font.color = [0.0, 0, 0, 1.0]\n gl_plot = PlotPlane2d(gl_font)\n gl_plot.i_axis = axis\n gl_plot.i_origin = origin\n gl_plot.o_wh = size\n gl_plot.i_axis_units = (axis[0]/10, axis[1]/10)\n\n gl_plot.prepare()\n return gl_plot", "def addAxis(self, tag, name, minimum, maximum, default, warpMap=None):\n axisElement = ET.Element(\"axis\")\n axisElement.attrib['name'] = name\n axisElement.attrib['tag'] = tag\n axisElement.attrib['minimum'] = str(minimum)\n axisElement.attrib['maximum'] = str(maximum)\n axisElement.attrib['default'] = str(default)\n if warpMap is not None:\n for a, b in warpMap:\n warpPt = ET.Element(\"map\")\n warpPt.attrib['input'] = str(a)\n warpPt.attrib['output'] = str(b)\n axisElement.append(warpPt)\n self.root.findall('.axes')[0].append(axisElement)", "def __init__(self, axes=()):\n self._axes = []\n self._dimension = 0\n for axis in axes:\n self.add_axis(axis)", "def generate_axis(self):\n fg = plt.figure(figsize=(15, 15))\n ax = fg.add_axes([0.1, 0.1, 0.8, 0.8], projection='polar')\n norm = mpc.Normalize(0, 2*np.pi)\n t = np.linspace(0, 2*np.pi, 700) # 700 seems to be a sweet spot for no obvious lines, makes a smooth wheel\n r = np.linspace(0, 1, 2)\n rg, tg = np.meshgrid(r, t)\n c = tg\n ax.pcolormesh(t, r, c.T, norm=norm, cmap=cm.get_cmap('hsv', 2056))\n ax.set_yticklabels([])\n ax.set_xticklabels([])\n ax.spines['polar'].set_visible(True)\n self.ax = ax", "def draw_coordinate_axes(self):\n scale = 5.0\n glPushMatrix()\n\n glScalef(scale, scale, scale)\n origin = [0, 0, 0]\n\n xaxis = [1, 0, 0]\n yaxis = [0, 1, 0]\n zaxis = [0, 0, 1]\n\n glLineWidth(3.0)\n\n glBegin(GL_LINES)\n glColor3f(1, 0, 0)\n glVertex3fv(origin)\n glVertex3fv(xaxis)\n glColor3f(0, 1, 0)\n glVertex3fv(origin)\n glVertex3fv(yaxis)\n glColor3f(0, 0, 1)\n glVertex3fv(origin)\n glVertex3fv(zaxis)\n glEnd()\n glPopMatrix()", "def drawAxis(image, cameraMatrix, distCoeffs, rvec, tvec, length):\n pass", "def addExtraAxis(slab,newaxis=None,axis=0,verbose=False):\n\n import cdms2 as cdms\n import MV2 as MV\n\n if newaxis is None:\n newaxis=cdms.createAxis([1,])\n newaxis.units=''\n\n # add new axis to axis list of input <slab>\n axislist=slab.getAxisList()\n axislist.insert(axis,newaxis)\n\n #----------------Reshape----------------\n shape=list(slab.shape)\n shape.insert(axis,len(newaxis))\n slab2=MV.reshape(slab,shape)\n\n #------------Create variable------------\n att_dict=attribute_obj2dict(slab)\n slab2=cdms.createVariable(slab2,axes=axislist,attributes=att_dict,\\\n typecode='f')\n slab2.id=slab.id\n\n if verbose:\n print('\\n# <addExtraAxis>: Originial variable shape:',slab.shape)\n print('# <addExtraAxis>: New variable shape:',slab2.shape)\n\n return slab2", "def _handle_setup_axis(self, axis_args):\n axis_name = axis_args['name']\n axes_dict = self.server.axes\n\n if axis_name not in [name for name, _ in axes_dict.items()]:\n print \"Adding a new axis:\", axis_name\n axis_count = len(axes_dict)\n newaxis = self.server.figure.add_subplot(axis_count+1, 1, axis_count+1)\n axes_dict[axis_name] = newaxis\n axes_dict[axis_name].grid(True)\n axes_dict[axis_name].set_xlabel(axis_args['x_label'])\n axes_dict[axis_name].set_ylabel(axis_args['y_label'])\n # TODO: support *.set_title(\"Title\")\n if FLAGS.logy:\n axes_dict[axis_name].set_yscale('log', nonposy='clip')\n\n if axis_count != 0:\n # Resize other axes if the above wasn't the first.\n axis_count = len(axes_dict)\n for row,(name, _) in enumerate(axes_dict.items(), 1):\n print name, axis_count, row\n axes_dict[name].change_geometry(axis_count, 1, row)", "def draw_coordinate_axes(self):\n glDisable(GL_LIGHTING)\n scale = 1.8\n glPushMatrix()\n\n glScalef(scale, scale, scale)\n origin = [0, 0, 0]\n\n xaxis = [1, 0, 0]\n yaxis = [0, 1, 0]\n zaxis = [0, 0, 1]\n\n glLineWidth(3.0)\n\n glBegin(GL_LINES)\n glColor3f(1, 0, 0)\n glVertex3fv(origin)\n glVertex3fv(xaxis)\n glColor3f(0, 1, 0)\n glVertex3fv(origin)\n glVertex3fv(yaxis)\n glColor3f(0, 0, 1)\n glVertex3fv(origin)\n glVertex3fv(zaxis)\n glEnd()\n glPopMatrix()\n glEnable(GL_LIGHTING)", "def _make_axes(self):\n ax_idx = self.atlas.space.axes_order.index(\"frontal\")\n\n # make acustom axes dict\n atlas_shape = np.array(self.atlas.metadata[\"shape\"]) * np.array(\n self.atlas.metadata[\"resolution\"]\n )\n z_range = np.array([-atlas_shape[2], 0])\n z_ticks = [\n (-v, str(np.abs(v).astype(np.int32)))\n for v in np.linspace(\n 0,\n atlas_shape[ax_idx],\n 10,\n )\n ]\n\n if self.atlas.atlas_name == \"allen_human_500um\":\n z_range = None\n z_ticks = None\n logger.debug(\n \"RENDER: manually forcing axes size for human atlas, atlas needs fixing\"\n )\n\n # make custom axes dict\n axes = dict(\n axesLineWidth=3,\n tipSize=0,\n xtitle=\"AP (μm)\",\n ytitle=\"DV (μm)\",\n ztitle=\"LR (μm)\",\n textScale=0.8,\n xTitleRotation=180,\n zrange=z_range,\n zValuesAndLabels=z_ticks,\n xyGrid=False,\n yzGrid=False,\n zxGrid=False,\n xUseBounds=True,\n yUseBounds=True,\n zUseBounds=True,\n xLabelRotation=180,\n yLabelRotation=180,\n zLabelRotation=90,\n )\n\n return axes", "def axis2D(xlow,xhigh,xfirst,xstep,ylow,yhigh,yfirst,ystep):\n dislin.graf(xlow,xhigh,xfirst,xstep,\\\n ylow,yhigh,yfirst,ystep)", "def _gen_axes_patch(self):\n\n return Polygon([[0,0], [0.5,np.sqrt(3)/2], [1,0]], closed=True)", "def addAxes(self):\n numDims = len(self.relation.fieldNames) - 1\n angle = 360 / numDims\n axisDomains = self.relation.axisDomains\n for i in range(numDims):\n axis = PlotAxis(self)\n self.scene().addItem(axis)\n if self.axisAngles and i < len(self.axisAngles):\n axis.setRotation(self.axisAngles[i])\n else:\n axis.setRotation(angle * i)\n self.axes.append(axis)\n\n domain = axisDomains[i]\n text = PlotAxisLabel(\"{}\\n[{:.2f},{:.2f}]\".format(self.relation.fieldNames[i], domain[0], domain[1]))\n text.setFont(self.labelFont)\n self.axisLabels.append(text)\n text.setParentItem(axis)", "def get_2d_axes(xmin, xmax, ymin, ymax,\n axis_style='r-style',\n xscale=1.0, xbase=0.0, yscale=1.0, ybase=0.0,\n xticks=[], yticks=[],\n xtick_labels=None, ytick_labels=None,\n tick_font='normalsize', tick_size='semithick',\n label_font='Large', xlabel='x', ylabel='y',\n xlabel_offset=0.1, ylabel_offset=0.15,\n axis_size='thick', axis_color='gray',\n tick_frac=0.05, ):\n\n # Find the tick size\n tick_dim = min(tick_frac*(ymax - ymin)*yscale,\n tick_frac*(xmax - xmin)*xscale)\n\n # Draw the axes\n s = ''\n if axis_style == 'r-style':\n if len(xticks) >= 2:\n s += '\\\\draw[%s, color=%s] (%f, %f) -- (%f,%f) -- (%f,%f) -- (%f, %f);'%(\n axis_size, axis_color,\n xscale*(xticks[0] - xbase),\n yscale*(ymin - ybase) - tick_dim,\n xscale*(xticks[0] - xbase), yscale*(ymin - ybase),\n xscale*(xticks[-1] - xbase), yscale*(ymin - ybase),\n xscale*(xticks[-1] - xbase), yscale*(ymin - ybase) - tick_dim)\n if len(yticks) >= 2:\n s += '\\\\draw[%s, color=%s] (%f, %f) -- (%f,%f) -- (%f,%f) -- (%f, %f);'%(\n axis_size, axis_color,\n xscale*(xmin - xbase) - tick_dim, yscale*(yticks[0] - ybase),\n xscale*(xmin - xbase), yscale*(yticks[0] - ybase),\n xscale*(xmin - xbase), yscale*(yticks[-1] - ybase),\n xscale*(xmin - xbase) - tick_dim, yscale*(yticks[-1] - ybase))\n else:\n s += '\\\\draw[%s, color=%s] (%f,%f) -- (%f,%f);'%(\n axis_size, axis_color,\n xscale*(xmin - xbase), yscale*(ymin - ybase),\n xscale*(xmax - xbase), yscale*(ymin - ybase))\n s += '\\\\draw[%s, color=%s] (%f,%f) -- (%f,%f);'%(\n axis_size, axis_color,\n xscale*(xmin - xbase), yscale*(ymin - ybase),\n xscale*(xmin - xbase), yscale*(ymax - ybase))\n\n # Draw the x-label\n if xlabel is not None:\n s += '\\\\draw[font=\\\\%s] (%f, %f) node[below] {%s};'%(\n label_font, 0.5*xscale*(xmin + xmax - xbase),\n yscale*(ymin - xlabel_offset*(ymax - ymin) - ybase),\n xlabel)\n\n # Draw the y-label\n if ylabel is not None:\n s += '\\\\draw[font=\\\\%s] (%f, %f) node[rotate=90] {%s};'%(\n label_font, xscale*(xmin - ylabel_offset*(xmax - xmin) - xbase),\n 0.5*yscale*(ymin + ymax - ybase),\n ylabel)\n\n # Draw the ticks on the graph\n if axis_style == 'r-style':\n if xtick_labels is None:\n for i in range(len(xticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[below] {%g};\\n'%(\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase),\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase) - tick_dim,\n xticks[i])\n else:\n for i in range(len(xticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[below] {%s};\\n'%(\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase),\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase) - tick_dim,\n xtick_labels[i])\n\n # Draw the ticks on the graph\n if ytick_labels is None:\n for i in range(len(yticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[left] {%g};\\n'%(\n xscale*(xmin - xbase), yscale*(yticks[i] - ybase),\n xscale*(xmin - xbase) - tick_dim, yscale*(yticks[i] - ybase),\n yticks[i])\n else:\n for i in range(len(yticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[left] {%s};\\n'%(\n xscale*(xmin - xbase), yscale*(yticks[i] - ybase),\n xscale*(xmin - xbase) - tick_dim, yscale*(yticks[i] - ybase),\n ytick_labels[i])\n else:\n if xtick_labels is None:\n for i in range(len(xticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[below] {%g};\\n'%(\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase) + tick_dim,\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase),\n xticks[i])\n else:\n for i in range(len(xticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[below] {%s};\\n'%(\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase) + tick_dim,\n xscale*(xticks[i] - xbase), yscale*(ymin - ybase),\n xtick_labels[i])\n\n # Draw the ticks on the graph\n if ytick_labels is None:\n for i in range(len(yticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[left] {%g};\\n'%(\n xscale*(xmin - xbase) + tick_dim, yscale*(yticks[i] - ybase),\n xscale*(xmin - xbase), yscale*(yticks[i] - ybase),\n yticks[i])\n else:\n for i in range(len(yticks)):\n s += '\\\\draw[font=\\\\%s, %s, color=%s, text=black] '%(\n tick_font, tick_size, axis_color)\n s += '(%f, %f) -- (%f, %f) node[left] {%s};\\n'%(\n xscale*(xmin - xbase) + tick_dim, yscale*(yticks[i] - ybase),\n xscale*(xmin - xbase), yscale*(yticks[i] - ybase),\n ytick_labels[i])\n\n return s", "def set_equal_3d_axis(ax, x_lims, y_lims, z_lims):\n x_lims = np.asarray(x_lims)\n y_lims = np.asarray(y_lims)\n z_lims = np.asarray(z_lims)\n # compute max required range\n max_range = np.array([x_lims.max() - x_lims.min(),\n y_lims.max() - y_lims.min(),\n z_lims.max() - z_lims.min()]).max() / 2.0\n # compute mid-point along each axis\n mid_x = (x_lims.max() + x_lims.min()) * 0.5\n mid_y = (y_lims.max() + y_lims.min()) * 0.5\n mid_z = (z_lims.max() + z_lims.min()) * 0.5\n\n # set limits to axis\n ax.set_xlim(mid_x - max_range, mid_x + max_range)\n ax.set_ylim(mid_y - max_range, mid_y + max_range)\n ax.set_zlim(mid_z - max_range, mid_z + max_range)", "def new_axes(self, name):\n\n return self.figure.add_axes([0.05, 0.05, 0.9, 0.9], label=name)", "def __init__(self, axis=-1):\n self.axis = axis", "def setupVariableAxes(self):\n if self.var is None:\n return\n \n if (self.axisList is None):\n self.axisList = self.var.getAxisList()\n self.axisOrder = range(len(self.axisList))\n\n self.clear() \n self.setAxesNames()\n \n # Iterate through the variables axes & init each axis widget\n axisIndex = 0\n for axis, axisName in zip(self.axisList, self.axesNames):\n # Create the axis widget\n axisWidget = QAxis(axis, axisName, axisIndex, self)\n axisWidget.setAxisButtonText(axisName)\n self.axisWidgets.append(axisWidget)\n\n # Setup the layout for each axis\n row = self.gridLayout.rowCount()\n self.gridLayout.addWidget(axisWidget.getAxisButton(), row, 0)\n self.gridLayout.addWidget(axisWidget, row, 1) \n self.gridLayout.addWidget(axisWidget.getAxisOperationsButton(), row, 2)\n\n # Create separator line between each axis widget\n vline = QtGui.QFrame()\n vline.setFrameStyle(QtGui.QFrame.HLine | QtGui.QFrame.Sunken)\n self.gridLayout.addWidget(vline, row+1, 0, 1,\n self.gridLayout.columnCount())\n\n axisIndex += 1\n\n self.gridLayout.setRowStretch(self.gridLayout.rowCount(), 1)", "def axis(self):\n return np.array([np.cos(self._angle), np.sin(self._angle)])", "def zaxis ( self ) :\n return self.__zaxis", "def _make_axes(self, hdr, quiet=False, novec=False, vonly=False, simple=False):\n\n # PULL THE IMAGE/CUBE SIZES FROM THE HEADER\n naxis = int(hdr['NAXIS'])\n naxis1 = int(hdr['NAXIS1'])\n naxis2 = int(hdr['NAXIS2'])\n if naxis > 2:\n naxis3 = hdr['NAXIS3']\n\n ## EXTRACT FITS ASTROMETRY STRUCTURE\n ww = astropy.wcs.WCS(hdr)\n\n #IF DATASET IS A CUBE THEN WE MAKE THE THIRD AXIS IN THE SIMPLEST WAY POSSIBLE (NO COMPLICATED ASTROMETRY WORRIES FOR FREQUENCY INFORMATION)\n if naxis > 3:\n #GRAB THE RELEVANT INFORMATION FROM THE ASTROMETRY HEADER\n cd = ww.wcs.cd\n crpix = ww.wcs.crpix\n cdelt = ww.wcs.crelt\n crval = ww.wcs.crval\n\n if naxis > 2:\n # MAKE THE VELOCITY AXIS (WILL BE M/S)\n v = np.arange(naxis3) * 1.0\n vdif = v - (hdr['CRPIX3']-1)\n vaxis = (vdif * hdr['CDELT3'] + hdr['CRVAL3'])\n\n # CUT OUT HERE IF WE ONLY WANT VELOCITY INFO\n if vonly:\n return vaxis\n\n #IF 'SIMPLE' IS CALLED THEN DO THE REALLY TRIVIAL THING:\n if simple:\n print('Using simple aproach to make axes.')\n print('BE SURE THIS IS WHAT YOU WANT! It probably is not.')\n raxis = np.arange(naxis1) * 1.0\n rdif = raxis - (hdr['CRPIX1'] - 1)\n raxis = (rdif * hdr['CDELT1'] + hdr['CRVAL1'])\n\n daxis = np.arange(naxis2) * 1.0\n ddif = daxis - (hdr['CRPIX1'] - 1)\n daxis = (ddif * hdr['CDELT1'] + hdr['CRVAL1'])\n\n rimg = raxis # (fltarr(naxis2) + 1.)\n dimg = (np.asarray(naxis1) + 1.) # daxis\n return rimg, dimg\n\n # OBNOXIOUS SFL/GLS THING\n glspos = ww.wcs.ctype[0].find('GLS')\n if glspos != -1:\n ctstr = ww.wcs.ctype[0]\n newtype = 'SFL'\n ctstr.replace('GLS', 'SFL')\n ww.wcs.ctype[0] = ctstr\n print('Replaced GLS with SFL; CTYPE1 now =' + ww.wcs.ctype[0])\n\n glspos = ww.wcs.ctype[1].find('GLS')\n if glspos != -1:\n ctstr = ww.wcs.ctype[1]\n newtype = 'SFL'\n ctstr.replace('GLS', 'SFL')\n ww.wcs.ctype[1] = ctstr\n print('Replaced GLS with SFL; CTYPE2 now = ' + ww.wcs.ctype[1])\n\n # CALL 'xy2ad' TO FIND THE RA AND DEC FOR EVERY POINT IN THE IMAGE\n if novec:\n rimg = np.zeros((naxis1, naxis2))\n dimg = np.zeros((naxis1, naxis2))\n for i in range(naxis1):\n j = np.asarray([0 for i in xrange(naxis2)])\n\n pixcrd = np.array([[zip(float(i), float(j))]], numpy.float_)\n ra, dec = ww.all_pix2world(pixcrd, 1)\n\n rimg[i, :] = ra\n dimg[i, :] = dec\n else:\n ximg = np.arange(naxis1) * 1.0\n yimg = np.arange(naxis1) * 1.0\n X, Y = np.meshgrid(ximg, yimg, indexing='xy')\n ss = X.shape\n xx, yy = X.flatten(), Y.flatten()\n\n pixcrd = np.array(zip(xx, yy), np.float_)\n img_new = ww.all_pix2world(pixcrd, 0)\n rimg_new, dimg_new = img_new[:,0], img_new[:,1]\n\n rimg = rimg_new.reshape(ss)\n dimg = dimg_new.reshape(ss)\n\n # GET AXES FROM THE IMAGES. USE THE CENTRAL COLUMN AND CENTRAL ROW\n raxis = np.squeeze(rimg[:, naxis2/2])\n daxis = np.squeeze(dimg[naxis1/2, :])\n\n return rimg, dimg", "def axisinfo(unit, axis):\n if isinstance(unit, tuple):\n unit = unit[0]\n unit_obj = unit if isinstance(unit, Unit) else Unit(unit)\n name = unyt_arrayConverter._axisnames.get(axis, \"\")\n if unit_obj.is_dimensionless:\n label = name\n else:\n name += \" \"\n unit_str = unit_obj.latex_representation()\n if unyt_arrayConverter._labelstyle == \"[]\":\n label = name + \"$\\\\left[\" + unit_str + \"\\\\right]$\"\n elif unyt_arrayConverter._labelstyle == \"/\":\n axsym = \"$q_{\\\\rm\" + axis.axis_name + \"}$\"\n name = axsym if name == \" \" else name\n if \"/\" in unit_str:\n label = name + \"$\\\\;/\\\\;\\\\left(\" + unit_str + \"\\\\right)$\"\n else:\n label = name + \"$\\\\;/\\\\;\" + unit_str + \"$\"\n else:\n label = name + \"$\\\\left(\" + unit_str + \"\\\\right)$\"\n return AxisInfo(label=label.strip())", "def _add_axes(self, n):\n height = (self.top - self.bottom) / float(self.get_n())\n height = min(height, self.maxheight)\n width = self.right - self.left\n ax = self.figure.add_axes([self.left, self.bottom + (n - 1) * height, width, height])\n return ax", "def axes(*x: Iterable[int]):\n return [_ti_core.Axis(i) for i in x]", "def _appendAxisDefinition(self, axis):\n length = len(axis)\n\n self.na_dict[\"NX\"].append(length)\n self.na_dict[\"XNAME\"].append(xarray_utils.getBestName(axis))\n\n # If only one item in axis values\n if length < 2:\n self.na_dict[\"DX\"].append(0)\n self.na_dict[\"NXDEF\"].append(length)\n self.na_dict[\"X\"].append(axis.data.tolist()) \n return\n\n incr = xarray_utils.get_interval(axis, 0, 1)\n\n for i in range(1, length):\n if (axis[i] - axis[i - 1]) != incr:\n self.na_dict[\"DX\"].append(0)\n self.na_dict[\"NXDEF\"].append(length)\n self.na_dict[\"X\"].append(axis.data.tolist())\n break\n\n else: # If did not break out of the loop\n max_length = length\n if length > 3: \n max_length = 3\n\n self.na_dict[\"DX\"].append(incr)\n self.na_dict[\"NXDEF\"].append(max_length)\n self.na_dict[\"X\"].append(axis[:max_length])" ]
[ "0.6377895", "0.611665", "0.59663534", "0.5831629", "0.57575595", "0.5707427", "0.5611563", "0.55929464", "0.5588371", "0.5561071", "0.55527145", "0.55161554", "0.54822254", "0.54538625", "0.5445956", "0.54423445", "0.5383929", "0.53753966", "0.53407836", "0.531003", "0.5309422", "0.5306584", "0.5300296", "0.52657706", "0.5258121", "0.5241702", "0.52387166", "0.5230719", "0.52192175", "0.52084494" ]
0.67882353
0
This method is called automatically when Cinema 4D Create the Layout (display) of the Dialog.
def CreateLayout(self): # Defines the title of the Dialog self.SetTitle("A Custom Dialog with a Top Menu") # Flushes all the already existing menu to create our one. The content will be on the left. self.MenuFlushAll() # Creates a Sub menu begin to insert new menu entry self.MenuSubBegin("Left Menu") # Adds a string with a given ID, so it will trigger a call to Command once clicked self.MenuAddString(self.ID_LEFT_MENU_FIRST_ITEM, "Close") # Finalizes the Sub Menu self.MenuSubEnd() # Finalizes the menu self.MenuFinished() # Creates a Group in the Menu. The content will be on the right if self.GroupBeginInMenuLine(): # Creates a BitmapButtonCustomGui with the find icon settings = c4d.BaseContainer() settings[c4d.BITMAPBUTTON_BUTTON] = True settings[c4d.BITMAPBUTTON_BORDER] = False settings[c4d.BITMAPBUTTON_TOGGLE] = True settings[c4d.BITMAPBUTTON_ICONID1] = c4d.RESOURCEIMAGE_SCENEBROWSER_FIND2 settings[c4d.BITMAPBUTTON_ICONID2] = c4d.RESOURCEIMAGE_SCENEBROWSER_FIND1 self.displayContentButtonDlg = self.AddCustomGui(self.ID_RIGHT_MENU_SHOW_CONTENT, c4d.CUSTOMGUI_BITMAPBUTTON, "", c4d.BFH_CENTER | c4d.BFV_CENTER, 0, 0, settings) self.GroupEnd() # Creates a group that will contain the content that will be hidden when the BitmapButton is pressed. It's # important to have a parent group to the group that needs to be hidden since you need to redraw this parent # group after the visibility definition. if self.GroupBegin(self.ID_MAIN_GROUP, c4d.BFH_LEFT | c4d.BFV_CENTER): # The group that will be hidden if self.GroupBegin(self.ID_HIDDEN_GROUP, c4d.BFH_LEFT | c4d.BFV_CENTER): # Adds the content you want to toggle self.AddStaticText(0, c4d.BFH_LEFT | c4d.BFV_CENTER, name="test") self.GroupEnd() self.GroupEnd() # Adds two buttons, Ok and Cancel self.AddDlgGroup(c4d.DLG_OK | c4d.DLG_CANCEL) return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_layout( self ):", "def _init_ui(self):\n\n hlayout = QtWidgets.QHBoxLayout()\n\n label = QtWidgets.QLabel('Kies een normtraject:')\n\n hlayout.addWidget(label)\n\n self.section_combobox = QtWidgets.QComboBox()\n self.section_combobox.setFixedWidth(60)\n self.section_ids = sorted([''] + io.geometry.import_section_ids(self.datadir))\n self.section_combobox.addItems(self.section_ids)\n\n hlayout.addWidget(self.section_combobox)\n\n self.add_button = QtWidgets.QPushButton('Toevoegen', clicked=self._add_flooddefence)\n\n hlayout.addWidget(self.add_button)\n\n vlayout = QtWidgets.QVBoxLayout()\n vlayout.addLayout(hlayout)\n\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n vlayout.addWidget(line)\n\n self.close_button = QtWidgets.QPushButton('Sluiten', clicked=self.close)\n vlayout.addWidget(self.close_button, 0, QtCore.Qt.AlignRight)\n\n self.setLayout(vlayout)\n\n self.setWindowTitle(\"HB Havens: normtrajecten\")\n self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)", "def _do_layout(self):\n return", "def _init_ui(self):\n hlayout = QtWidgets.QHBoxLayout()\n\n hlayout.addWidget(QtWidgets.QLabel('Kies een normtraject:'))\n\n self.section_combobox = QtWidgets.QComboBox()\n self.section_combobox.setFixedWidth(60)\n self._update_combobox()\n\n hlayout.addWidget(self.section_combobox)\n\n self.remove_button = QtWidgets.QPushButton('Verwijderen', clicked=self._del_flooddefence)\n hlayout.addWidget(self.remove_button)\n\n vlayout = QtWidgets.QVBoxLayout()\n vlayout.addLayout(hlayout)\n\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n vlayout.addWidget(line)\n\n self.close_button = QtWidgets.QPushButton('Sluiten', clicked=self.close)\n vlayout.addWidget(self.close_button, 0, QtCore.Qt.AlignRight)\n\n self.setLayout(vlayout)\n\n self.setWindowTitle(\"HB Havens: normtrajecten\")\n self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)", "def __createLayout(self):\r\n self.__createCanvas()\r\n self.__createButton()\r\n self.__createInputFunction()\r\n self.__createLimits()\r\n self.__styleLayout()", "def _setup_ui(self):\n self.resize(750, 180)\n self.vertical_layout = QtWidgets.QVBoxLayout(self)\n\n # Dialog Label\n self.dialog_label = QtWidgets.QLabel(self)\n self.dialog_label.setText(\"%s Filename Template\" % self.mode)\n self.dialog_label.setStyleSheet(\"color: rgb(71, 143, 202);font: 18pt;\")\n self.vertical_layout.addWidget(self.dialog_label)\n\n # Title Line\n line = QtWidgets.QFrame(self)\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n self.vertical_layout.addWidget(line)\n\n # Form Layout\n self.form_layout = QtWidgets.QFormLayout()\n self.form_layout.setLabelAlignment(\n QtCore.Qt.AlignRight | QtCore.Qt.AlignTrailing | QtCore.Qt.AlignVCenter\n )\n self.vertical_layout.addLayout(self.form_layout)\n\n # ------------------------------------------------\n # Target Entity Type Field\n\n # label\n self.target_entity_type_label = QtWidgets.QLabel(\"Target Entity Type\", self)\n self.form_layout.setWidget(\n 0, QtWidgets.QFormLayout.LabelRole, self.target_entity_type_label\n )\n\n # field\n self.target_entity_type_combo_box = QtWidgets.QComboBox(self)\n self.form_layout.setWidget(\n 0, QtWidgets.QFormLayout.FieldRole, self.target_entity_type_combo_box\n )\n\n # ------------------------------------------------\n # Name Field\n self.name_label = QtWidgets.QLabel(\"Name\", self)\n self.form_layout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.name_label)\n self.name_fields_vertical_layout = QtWidgets.QVBoxLayout()\n self.name_validator_label = QtWidgets.QLabel(self)\n self.name_validator_label.setStyleSheet(\"color: rgb(255, 0, 0);\")\n\n from anima.ui.widgets import ValidatedLineEdit\n\n self.name_line_edit = ValidatedLineEdit(\n self, message_field=self.name_validator_label\n )\n\n self.name_fields_vertical_layout.addWidget(self.name_line_edit)\n self.name_fields_vertical_layout.addWidget(self.name_validator_label)\n self.form_layout.setLayout(\n 1, QtWidgets.QFormLayout.FieldRole, self.name_fields_vertical_layout\n )\n\n # ------------------------------------------------\n # Path Code Field\n self.path_label = QtWidgets.QLabel(\"Path\", self)\n self.form_layout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.path_label)\n\n self.path_line_edit = QtWidgets.QLineEdit(self)\n # set the default value to something useful\n self.form_layout.setWidget(\n 2, QtWidgets.QFormLayout.FieldRole, self.path_line_edit\n )\n\n # ------------------------------------------------\n # Filename Code Field\n self.filename_label = QtWidgets.QLabel(\"Filename\", self)\n self.form_layout.setWidget(\n 3, QtWidgets.QFormLayout.LabelRole, self.filename_label\n )\n\n self.filename_line_edit = QtWidgets.QLineEdit(self)\n self.form_layout.setWidget(\n 3, QtWidgets.QFormLayout.FieldRole, self.filename_line_edit\n )\n\n # ------------------------------------------------\n # Button Box\n self.button_box = QtWidgets.QDialogButtonBox(self)\n self.button_box.setOrientation(QtCore.Qt.Horizontal)\n self.button_box.setStandardButtons(\n QtWidgets.QDialogButtonBox.Cancel | QtWidgets.QDialogButtonBox.Ok\n )\n self.vertical_layout.addWidget(self.button_box)\n self.vertical_layout.setStretch(2, 1)\n\n # ------------------------------------------------\n # Default values\n self.target_entity_type_combo_box.addItems(\n [\"Task\", \"Asset\", \"Shot\", \"Sequence\"]\n )\n self.name_line_edit.set_invalid() # Empty field is not valid\n self.path_line_edit.setText(\n \"$REPO{{project.repository.code}}/{{project.code}}/\"\n \"{%- for parent_task in parent_tasks -%}{{parent_task.nice_name}}\"\n \"/{%- endfor -%}\"\n )\n self.filename_line_edit.setText(\n '{{version.nice_name}}_v{{\"%03d\"|format(version.version_number)}}'\n )\n\n # ------------------------------------------------\n # Disable Fields\n if self.mode == \"Update\":\n self.target_entity_type_combo_box.setEnabled(False)\n\n # ------------------------------------------------\n # Signals\n # Name\n QtCore.QObject.connect(\n self.name_line_edit,\n QtCore.SIGNAL(\"textChanged(QString)\"),\n self.name_line_edit_changed,\n )\n\n # Button box\n QtCore.QObject.connect(\n self.button_box, QtCore.SIGNAL(\"accepted()\"), self.accept\n )\n QtCore.QObject.connect(\n self.button_box, QtCore.SIGNAL(\"rejected()\"), self.reject\n )", "def inicialUI(self):\r\n\r\n self.setGeometry(500, 500, 500, 500)\r\n self.setWindownTitle(\"Pesquisa\")\r\n self.displayWidgets()\r\n\r\n self.show()", "def init_layout(self):\n pass", "def _initUI(self):\n\n vlayout = QtWidgets.QVBoxLayout()\n\n # Description\n #----------------------------------------------------------------\n hlayout = QtWidgets.QHBoxLayout()\n\n label = QtWidgets.QLabel()\n label.setText('Locatie:')\n label.setFixedWidth(100)\n hlayout.addWidget(label)\n\n label = QtWidgets.QLabel()\n label.setText(self.name)\n hlayout.addWidget(label)\n hlayout.setSpacing(10)\n\n vlayout.addLayout(hlayout)\n\n # Exportnaam\n #----------------------------------------------------------------\n self.exportname = ParameterInputLine(label='Exportnaam:', labelwidth=100)\n self.exportname.LineEdit.setMinimumWidth(200)\n vlayout.addLayout(self.exportname.layout)\n\n # Exportdatabase\n #----------------------------------------------------------------\n self.exportpath = ExtendedLineEdit(label='SQLite-database:', labelwidth=100, browsebutton=True)\n self.exportpath.BrowseButton.clicked.connect(self._get_path_database)\n vlayout.addLayout(self.exportpath.layout)\n\n # Line\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n vlayout.addWidget(line)\n\n # Buttons\n #----------------------------------------------------------------\n hbox = QtWidgets.QHBoxLayout()\n hbox.addItem(QtWidgets.QSpacerItem(0, 0, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum))\n # Add ok/close\n self.closebutton = QtWidgets.QPushButton('Sluiten')\n self.closebutton.clicked.connect(self.close)\n hbox.addWidget(self.closebutton)\n # Add ok/close\n self.savebutton = QtWidgets.QPushButton('Opslaan')\n self.savebutton.clicked.connect(self._save)\n hbox.addWidget(self.savebutton)\n\n vlayout.addLayout(hbox)\n\n # Add layout to widget\n self.setLayout(vlayout)", "def prepare_UI(self):", "def layout(self):\n # Create the layout\n boxLayout = QtGui.QGridLayout()\n\n # Add widgets to layout\n boxLayout.addWidget(self.magnitudeLabel,0,0)\n boxLayout.addWidget(self.magnitudeOption,0,1)\n boxLayout.addWidget(self.directionLabel,1,0)\n boxLayout.addWidget(self.directionOption,1,1)\n boxLayout.addWidget(self.horizontalLabel,2,0)\n boxLayout.addWidget(self.horizontalOption,2,1)\n boxLayout.addWidget(self.verticalLabel,3,0)\n boxLayout.addWidget(self.verticalOption,3,1)\n boxLayout.addWidget(self.closeButton,4,1)\n\n # Set layout to window\n self.setLayout(boxLayout)", "def _generate_layout(self):\n\n pass", "def do_layout(self):\n self.define_panel_structure()\n self.layout_selection()\n self.layout_data_list()\n self.layout_batch()\n self.layout_button()", "def layout(self):\n pass", "def init_layout(self):\n\t\tself.pack_start(self.edit, expand=True)\n\t\tself.pack_start(self.button, expand=False)\n\t\tself.show_all()", "def _init_ui(self):\n self.setWindowTitle(\"HB Havens: resultaten\")\n self.setWindowFlags(self.windowFlags() & ~QtCore.Qt.WindowContextHelpButtonHint)\n\n self.setLayout(QtWidgets.QVBoxLayout())\n\n # Create figure\n self.figure = Figure(figsize=(4,4))\n self.ax = self.figure.add_subplot()\n\n self.ax.grid()\n self.ax.spines['right'].set_visible(False)\n self.ax.spines['top'].set_visible(False)\n self.ax.tick_params(axis='y', color='0.75')\n self.ax.tick_params(axis='x', color='0.75')\n self.ax.set_aspect(1)\n\n # Add canvas\n self.canvas = FigureCanvasQTAgg(self.figure)\n\n # this is the Navigation widget\n # it takes the Canvas widget and a parent\n self.layout().addWidget(self.canvas)\n\n # Add location selection\n hbox = QtWidgets.QHBoxLayout()\n label = QtWidgets.QLabel('Locatie:')\n label.setFixedWidth(80)\n hbox.addWidget(label)\n self.location_combobox = QtWidgets.QComboBox()\n self.location_combobox.addItems(self.result_locations)\n self.location_combobox.setCurrentIndex(self.locid)\n self.location_combobox.currentIndexChanged.connect(self._set_location)\n hbox.addWidget(self.location_combobox)\n self.layout().addLayout(hbox)\n\n # Add parameter selection\n hbox = QtWidgets.QHBoxLayout()\n label = QtWidgets.QLabel('Parameter:')\n label.setFixedWidth(80)\n hbox.addWidget(label)\n self.parameter_combobox = QtWidgets.QComboBox()\n self.input_parameters = self.modelunctab.mainmodel.hydraulic_loads.result_columns[:]\n self.parameter_combobox.addItems(self.input_parameters)\n self.parameter_combobox.currentIndexChanged.connect(self._set_parameter)\n self.parameter_combobox.setCurrentIndex(0)\n self._set_parameter()\n self.figure.tight_layout()\n hbox.addWidget(self.parameter_combobox)\n self.layout().addLayout(hbox)\n\n # Line\n line = QtWidgets.QFrame()\n line.setFrameShape(QtWidgets.QFrame.HLine)\n line.setFrameShadow(QtWidgets.QFrame.Sunken)\n\n self.layout().addWidget(line)\n\n # Add ok/close\n self.closebutton = QtWidgets.QPushButton('Sluiten')\n self.closebutton.clicked.connect(self.close)\n self.layout().addWidget(self.closebutton, 0, QtCore.Qt.AlignRight)\n\n self.layout().setSizeConstraint(QtWidgets.QLayout.SetFixedSize)", "def setup_ui(self):\n self.setLayout(self.main_layout)\n\n self.pv_layout.addWidget(self.pv_protocol_cmb)\n self.pv_layout.addWidget(self.pv_name_line_edt)\n self.pv_layout.addWidget(self.pv_connect_push_btn)\n QTimer.singleShot(0, self.pv_name_line_edt.setFocus)\n\n self.curve_settings_tab.setLayout(self.curves_tab_layout)\n self.chart_settings_tab.setLayout(self.chart_settings_layout)\n self.setup_chart_settings_layout()\n\n self.tab_panel.addTab(self.curve_settings_tab, \"Curves\")\n self.tab_panel.addTab(self.chart_settings_tab, \"Chart\")\n self.tab_panel.hide()\n\n self.crosshair_settings_layout.addWidget(self.enable_crosshair_chk)\n self.crosshair_settings_layout.addWidget(self.cross_hair_coord_lbl)\n\n self.chart_control_layout.addWidget(self.auto_scale_btn)\n self.chart_control_layout.addWidget(self.view_all_btn)\n self.chart_control_layout.addWidget(self.reset_chart_btn)\n self.chart_control_layout.addWidget(self.pause_chart_btn)\n self.chart_control_layout.addLayout(self.crosshair_settings_layout)\n self.chart_control_layout.addWidget(self.import_data_btn)\n self.chart_control_layout.addWidget(self.export_data_btn)\n\n self.chart_control_layout.setStretch(4, 15)\n self.chart_control_layout.insertSpacing(5, 350)\n\n self.chart_layout.addWidget(self.chart)\n self.chart_layout.addLayout(self.chart_control_layout)\n\n self.chart_panel.setLayout(self.chart_layout)\n\n self.splitter.addWidget(self.chart_panel)\n self.splitter.addWidget(self.tab_panel)\n self.splitter.setStretchFactor(0, 0)\n self.splitter.setStretchFactor(1, 1)\n\n self.charting_layout.addWidget(self.splitter)\n\n self.body_layout.addLayout(self.pv_layout)\n self.body_layout.addLayout(self.charting_layout)\n self.body_layout.addLayout(self.chart_control_layout)\n self.main_layout.addLayout(self.body_layout)\n\n self.enable_chart_control_buttons(False)", "def layout_info_pane(self):\n self.param_layout = QtWidgets.QFormLayout()\n self.param_layout.addRow(QtWidgets.QLabel(\"<b>Beam Parameters</b>\"))\n self.param_layout.addRow(QtWidgets.QLabel(\"<i>(all radii are 1/e<sup>2</sup>)</i>\"))\n self.param_layout.addRow(QtWidgets.QWidget())\n self.param_layout.addRow(\"Semi-major radius:\", self.maj_radius)\n self.param_layout.addRow(\"Semi-minor radius:\", self.min_radius)\n self.param_layout.addRow(\"Average radius:\", self.avg_radius)\n self.param_layout.addRow(\"Ellipticity:\", self.ellipticity)\n self.param_layout.addRow(QtWidgets.QWidget())\n self.param_layout.addRow(\"X radius:\", self.x_radius)\n self.param_layout.addRow(\"Y radius:\", self.y_radius)\n self.param_layout.addRow(QtWidgets.QWidget())\n self.param_layout.addRow(\"X position:\", self.x_centroid)\n self.param_layout.addRow(\"Y position:\", self.y_centroid)\n self.param_layout.addRow(QtWidgets.QWidget())\n\n mark_x_label = QtWidgets.QLabel(\"Mark X:\")\n mark_y_label = QtWidgets.QLabel(\"Mark Y:\")\n dx_label = QtWidgets.QLabel(\"ΔX:\")\n dy_label = QtWidgets.QLabel(\"ΔY:\")\n self.mark_widgets.extend([\n mark_x_label, mark_y_label,\n # dx_label, dy_label,\n ])\n self.param_layout.addRow(self.mark, self.unmark)\n self.param_layout.addRow(mark_x_label, self.mark_x)\n self.param_layout.addRow(mark_y_label, self.mark_y)\n # self.param_layout.addRow(dx_label, self.x_delta)\n # self.param_layout.addRow(dy_label, self.y_delta)\n for w in self.mark_widgets:\n w.hide()\n\n self.param_widget = QtWidgets.QWidget()\n self.param_widget.setLayout(self.param_layout)\n\n self.info_pane_layout = QtWidgets.QVBoxLayout()\n self.info_pane_layout.setAlignment(QtCore.Qt.AlignTop)\n self.info_pane_layout.addWidget(self.start_acq)\n self.info_pane_layout.addWidget(self.single_acq)\n self.info_pane_layout.addWidget(self.stop_acq)\n self.info_pane_layout.addWidget(self.exposure)\n self.info_pane_layout.addStretch(1)\n self.info_pane_layout.addWidget(self.param_widget)\n self.info_pane_layout.addStretch(3)\n self.info_pane_layout.addWidget(self.fps)\n self.info_pane_layout.addWidget(self.message)\n self.info_pane_layout.addWidget(self.status)\n\n self.info_pane = QtWidgets.QWidget(self)\n self.info_pane.setLayout(self.info_pane_layout)", "def _initDialog(self):\n\n # ===== Configure focus policy ===== #\n self.setFocusPolicy(Qt.NoFocus)\n self.setFocus(False)\n\n # ===== Create movie parameters ===== #\n movieLabel = QLabel()\n self._movie = QMovie('./icons/loading.gif')\n movieLabel.setMovie(self._movie)\n\n # ===== Create layout ===== #\n layout = QVBoxLayout()\n layout.setContentsMargins(0, 0, 0, 0)\n layout.setSpacing(0)\n layout.addWidget(movieLabel)\n self.setLayout(layout)", "def setup_ui(self):\n\t\t\n\t\t# CREATION DU LAYOUT\n\t\tself.layout = QtWidgets.QHBoxLayout(self) #le layout prend la fenetre principal en argument donc notre self\n\t\t\n\t\t# CREATION DES WIDGETS\n\t\tself.cbb_devisesFrom = QtWidgets.QComboBox() #combobox (liste deroulante) pour choisir la devise From\n\t\tself.spn_montant = QtWidgets.QSpinBox() #spinbox (zone affichage) du montant a convertir\n\t\tself.cbb_devisesTo = QtWidgets.QComboBox() #cbb pour choisir la devise To\n\t\tself.spn_montantConverti = QtWidgets.QSpinBox() #spn du montant converti\n\t\tself.btn_inverser = QtWidgets.QPushButton(\"Inverser devises\") #bouton pour inverser les devises\n\t\t\n\t\t# AJOUT AU LAYOUT\n\t\tself.layout.addWidget(self.cbb_devisesFrom)\n\t\tself.layout.addWidget(self.spn_montant)\n\t\tself.layout.addWidget(self.cbb_devisesTo)\n\t\tself.layout.addWidget(self.spn_montantConverti)\n\t\tself.layout.addWidget(self.btn_inverser)", "def relayout(self): \n\t\t#self.urmaswin.Layout()\n\t\t#wx.CallAfter(self.urmaswin.Layout)\n\t\t#wx.CallAfter(self.visualizer.OnSize)", "def createUserInterface(self):\n\n\t\tself.__layout = self.__parent.createUserInterface()\n\n\t\tstep_label = qt.QLabel( 'Choose the volume you would like to threshold. If you are calculating a subtraction map, check the \\\"Calculate Subtraction Map\\\" box and select a post-contrast image.' )\n\t\tstep_label.setWordWrap(True)\n\t\tself.__primaryGroupBox = qt.QGroupBox()\n\t\tself.__primaryGroupBox.setTitle('Information')\n\t\tself.__primaryGroupBoxLayout = qt.QFormLayout(self.__primaryGroupBox)\n\n\t\tself.__subtractionMappingGroupBox = qt.QGroupBox()\n\t\tself.__subtractionMappingGroupBox.setTitle('Volume Selection')\n\t\tself.__subtractionMappingGroupBoxLayout = qt.QFormLayout(self.__subtractionMappingGroupBox)\n\n\t\tbaselineScanLabel = qt.QLabel( 'Primary / Pre-Contrast Image:' )\n\t\tself.__baselineVolumeSelector = slicer.qMRMLNodeComboBox()\n\t\tself.__baselineVolumeSelector.toolTip = \"Select the volume you wish to threshold. If you are calculating a subtraction map, this will be the pre-contrast scan.\"\n\t\tself.__baselineVolumeSelector.nodeTypes = ['vtkMRMLScalarVolumeNode']\n\t\tself.__baselineVolumeSelector.setMRMLScene(slicer.mrmlScene)\n\t\tself.__baselineVolumeSelector.addEnabled = 0\n\n\t\tsubtractionMappingLabel = qt.QLabel( 'Calculate Subtraction Map:' )\n\t\tself.__enableSubtractionMapping = qt.QCheckBox()\n\t\tself.__enableSubtractionMapping.checked = False\n\t\tself.__enableSubtractionMapping.setToolTip(\"Check if you would like to calculate a subtraction map\")\n\t\tself.__enableSubtractionMapping.connect('clicked()', self.setSubtractionMapping)\n\n\t\tfollowupScanLabel = qt.QLabel( 'Post-Contrast Image:' )\n\t\tself.__followupVolumeSelector = slicer.qMRMLNodeComboBox()\n\t\tself.__followupVolumeSelector.toolTip = \"Choose the post-contrast scan\"\n\t\tself.__followupVolumeSelector.nodeTypes = ['vtkMRMLScalarVolumeNode']\n\t\tself.__followupVolumeSelector.setMRMLScene(slicer.mrmlScene)\n\t\tself.__followupVolumeSelector.addEnabled = 0\n\t\tself.__followupVolumeSelector.enabled = 0\n\n\t\tself.__layout.addRow(self.__primaryGroupBox)\n\t\tself.__primaryGroupBoxLayout.addRow( step_label )\n\t\tself.__subtractionMappingGroupBoxLayout.addRow( baselineScanLabel, self.__baselineVolumeSelector )\n\n\t\tself.__layout.addRow(self.__subtractionMappingGroupBox)\n\t\tself.__subtractionMappingGroupBoxLayout.addRow( subtractionMappingLabel, self.__enableSubtractionMapping )\n\t\tself.__subtractionMappingGroupBoxLayout.addRow( followupScanLabel, self.__followupVolumeSelector )\n\n\t\tself.updateWidgetFromParameters(self.parameterNode())\n\n\t\t# This timer is a trick to wait for buttons to load BEFORE deleting them.\n\t\tqt.QTimer.singleShot(0, self.killButton)", "def opt_dialog(self, event):\n dialog = options.OptionsDialog(self, self.options)\n dialog.ShowModal()\n \n self.reconfigure()\n self.info_panel.Layout()\n self.main_panel.Layout()", "def createUI(self):\n\n q.getQItem(windowID, QtWidgets.QWidget)\n cmds.setParent(q.fullPath)\n\n # ################################################\n # Active Render Layer\n\n # cmds.separator(height=12, style='none')\n addFrameLayout(\n '%s_frameLayoutLayers' % windowID,\n 'Visible Render Layer', collapsable=False,\n labelVisible=False,\n marginHeight=0\n )\n\n addRowLayout(\n '%s_rowLayoutActiveRenderLayer' % windowID,\n 4,\n columnAlign4=('left', 'left', 'right', 'right'),\n columnAttach4=('left', 'both', 'right', 'right'),\n columnWidth4=(\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.775,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075\n )\n )\n\n\n addButton('%s_addNewLayer' % windowID, 'New', rsAddNewLayer,\n image='RS_create_layer', size=(21, 21))\n addOptionMenu('%s_selectActiveLayer' % windowID,\n 'Active Layer ', (), rsSelectActiveLayer)\n addButton('rsOpenRenderSetupWindow', 'Render Setup',\n rsOpenRenderSetupWindow, image='render_setup.png',\n size=(21, 21))\n addButton('rsOpenUnifiedRenderGlobals', 'Render Globals',\n rsOpenUnifiedRenderGlobals, image='render_setup.png',\n size=(21, 21))\n\n # ################################################\n # Work Render Layers\n\n cmds.setParent(q.fullPath)\n addFrameLayout('%s_frameLayoutLayersB' % windowID,\n 'Work Render Layer', collapsable=False,\n labelVisible=False, marginHeight=0)\n addRowLayout('%s_rowLayoutVisibleRenderLayer' % windowID, 3,\n columnAlign3=('left', 'left', 'right'),\n columnAttach3=('left', 'both', 'right'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.075, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.85,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075))\n\n cmds.separator()\n addOptionMenu('%s_selectVisibleLayer' % windowID,\n 'Visible Layer ', (), rsSelectVisibleLayer)\n cmds.separator()\n\n cmds.setParent(q.fullPath)\n cmds.separator(height=12, style='none')\n\n # ################################################\n # Collections\n\n addFrameLayout('%s_frameLayout02' % windowID, 'Collections',\n labelVisible=False, marginHeight=0)\n\n addRowLayout(\n '%s_rowLayout02' % windowID,\n 6,\n columnAlign6=('left', 'left', 'left', 'left', 'left', 'left'),\n columnAttach6=('both', 'both', 'right', 'right', 'right', 'right'),\n columnWidth6=(\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.18,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.18,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.415,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.075,\n )\n )\n\n addButton('rsAddCollection', 'Add', rsAddCollection)\n addButton('rsRemoveCollection', 'Remove', rsRemoveCollection)\n addButton('rsSelectShapes', 'Select Shapes', rsSelectShapes,\n image='selectObject.png', size=(21, 21))\n addButton('rsRenameShader', 'Rename Shader', rsRenameShader,\n size=(21, 21), image='QR_rename.png')\n addButton('rsDuplicateShader', 'Duplicate Shader',\n duplicateShader, size=(21, 21), image='newPreset.png')\n addButton('rsRefreshUI', 'Refresh', rsRefreshUI, size=(21, 21),\n image='QR_refresh.png')\n\n # ###########################\n # Filter List\n\n cmds.setParent('%s_frameLayout02' % windowID)\n addRowLayout('%s_rowLayout03' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'both'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.6, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.42))\n\n addTextField('%s_filterShaderList' % windowID, 'Search',\n rsFilterShaderList_off, rsFilterShaderList_off,\n window.updateUI)\n addOptionMenu('rsShaderGroups', '|', (), rsShaderGroups)\n\n # ###########################\n # The shaders scroll list\n\n cmds.setParent('%s_frameLayout02' % windowID)\n addRowLayout('%s_rowLayout04' % windowID, 1, columnAlign1='both', columnAttach1='both', columnWidth1=WINDOW_WIDTH\n + 12)\n addTextScrollList('%s_ShaderScrollList' % windowID, (),\n rsShaderScrollList_doubleClick,\n rsShaderScrollList_onSelect,\n rsShaderScrollList_deleteKey)\n\n # Add popup menu:\n\n cmds.popupMenu('rsShaderScrollListPopupMenu',\n parent='%s_ShaderScrollList' % windowID,\n allowOptionBoxes=False, markingMenu=True,\n postMenuCommand=postMenuCommand)\n cmds.menuItem('%s_popupMenuItem02' % windowID,\n label='Duplicate Shader', command=duplicateShader)\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem04' % windowID,\n label='Graph Shader')\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem03' % windowID,\n label='Select Shader')\n cmds.menuItem(divider=True)\n cmds.menuItem('%s_popupMenuItem05' % windowID,\n label='Select Assigned Shapes')\n cmds.menuItem('%s_popupMenuItem06' % windowID,\n label='Select Assigned Transforms')\n\n # ##################################################\n # Arnold Property Overrides\n\n cmds.setParent('%s_frameLayout02' % windowID)\n cmds.columnLayout(\n '%s_columnLayout20' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n\n cmds.separator(parent='%s_columnLayout20' % windowID, height=4,\n style='none')\n\n addRowLayout('%s_rowLayout05' % windowID, 2,\n columnAlign2=('left', 'both'),\n columnAttach2=('left', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.75, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.25))\n addText('%s_textArnoldPropertyOverridesLabel' % windowID,\n 'Apply Arnold Property Overrides', 'plainLabelFont')\n addCheckBox('rsArnoldPropertyOverridesCheckBox', '',\n rsArnoldPropertyOverridesCheckBox,\n rsArnoldPropertyOverridesCheckBox)\n cmds.separator(parent='%s_columnLayout20' % windowID, height=4,\n style='none')\n\n # Column Layout to toggle\n\n cmds.setParent('%s_columnLayout20' % windowID)\n cmds.columnLayout(\n '%s_columnLayout02' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n\n addCheckboxes('%s_columnLayout02' % windowID)\n cmds.columnLayout('%s_columnLayout02' % windowID, edit=True,\n visible=False)\n\n # #################################################\n # Shader Override\n\n cmds.setParent('%s_frameLayout02' % windowID)\n cmds.columnLayout(\n '%s_columnLayout21' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('left', 0),\n adjustableColumn=False,\n rowSpacing=0,\n )\n cmds.separator(parent='%s_columnLayout21' % windowID, height=4,\n style='none')\n addRowLayout('%s_rowLayout06' % windowID, 2,\n columnAlign2=('left', 'right'),\n columnAttach2=('left', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.75, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.25))\n addText('%s_shaderOverrideLabel' % windowID, 'Shader Override',\n 'plainLabelFont')\n addCheckBox('%s_shaderOverrideCheckbox' % windowID, '',\n rsShaderOverrideCheckbox, rsShaderOverrideCheckbox)\n cmds.separator(parent='%s_columnLayout21' % windowID, height=4,\n style='none')\n\n cmds.setParent('%s_columnLayout21' % windowID)\n cmds.columnLayout(\n '%s_columnLayout03' % windowID,\n width=WINDOW_WIDTH - FRAME_MARGIN * 2,\n columnAlign='left',\n columnAttach=('both', 4),\n adjustableColumn=True,\n rowSpacing=0,\n )\n cmds.setParent('%s_columnLayout03' % windowID)\n addOptionMenu('%s_optionMenu02' % windowID, 'Select: ', (),\n rsShaderOverridesMenu)\n\n global selectedShaderOverride\n\n # default selection\n\n selectedShaderOverride = SHADER_OVERRIDE_OPTIONS[0]['ui']\n cmds.columnLayout('%s_columnLayout03' % windowID, edit=True,\n visible=False)\n\n # #################################################\n\n cmds.setParent(q.fullPath)\n cmds.separator(height=10, style='none')\n\n # #################################################\n # Extras\n\n addFrameLayout('%s_frameLayout50' % windowID, 'Extras',\n collapsable=True, marginHeight=0,\n labelVisible=False)\n\n # #################################################\n # Add & Assign Shader Groups\n\n addFrameLayout(\n '%s_frameLayout05' % windowID,\n 'Add & Assign Shader Groups',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=False,\n labelVisible=True,\n )\n\n # Add the renamer window\n\n self.gwCustomRenamer = CustomRenamer()\n self.gwCustomRenamer.createUI()\n\n # #################################################\n # AutoConnect\n\n cmds.setParent('%s_frameLayout50' % windowID)\n\n addFrameLayout(\n '%s_frameLayout03' % windowID,\n 'Adobe Connector',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=True,\n labelVisible=True,\n )\n addRowLayout('%s_rowLayout07', 3, columnAlign3=('left', 'left',\n 'left'), columnAttach3=('both', 'both', 'both'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.4, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3))\n addButton('updateConnections', '> Update Connections <',\n updateConnections)\n addButton('uvSnapshot', 'UV Snapshot', uvSnapshot)\n addButton('editTexture', 'Edit Texture', editTexture)\n\n # After Effects\n\n cmds.setParent('%s_frameLayout03' % windowID)\n addRowLayout('%s_rowLayout11' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'both'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.4, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.6))\n addText('%s_text90' % windowID, 'Send to After Effects:')\n addButton('makeCompButton', 'Send to After Effects', rsMakeComp)\n\n # #################################################\n # Render Setup /\n # Output settings\n\n cmds.setParent('%s_frameLayout50' % windowID)\n addFrameLayout(\n '%s_frameLayout04' % windowID,\n 'Output Settings',\n collapsable=True,\n marginWidth=0,\n marginHeight=0,\n collapse=True,\n labelVisible=True,\n )\n addRowLayout('%s_rowLayout08' % windowID, 1,\n columnAlign1='center', columnAttach1='both',\n columnWidth1=WINDOW_WIDTH - FRAME_MARGIN * 2)\n addButton('%s_revealOutputDirectory' % windowID,\n 'Output path not set yet', rsRevealOutputDirectory)\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout09' % windowID, 3,\n columnAlign3=('left', 'right', 'right'),\n columnAttach3=('left', 'right', 'right'),\n columnWidth3=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.8, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.14,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.06))\n\n addOptionMenu('%s_optionMenu05' % windowID, '', (),\n rsSelectOutputTemplate)\n addOptionMenu('%s_outputVersionMenu' % windowID, '', (),\n rsSelectOutputVersion)\n cmds.menuItem(label='v001')\n\n cmds.setParent('%s_rowLayout09' % windowID)\n addButton('%s_incrementOutputVersionButton' % windowID, '+1',\n rsIncrementOutputVersion, size=(21, 21))\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout10' % windowID, 2,\n columnAlign2=('left', 'left'),\n columnAttach2=('both', 'right'),\n columnWidth2=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.7, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.3))\n addOptionMenu('%s_optionMenu03' % windowID, 'Format:', (),\n rsOutputTemplatesMenu)\n addOptionMenu('%s_optionMenu06' % windowID, '', (),\n rsSetFPSMenu)\n\n cmds.setParent('%s_frameLayout04' % windowID)\n addRowLayout('%s_rowLayout12' % windowID, 4,\n columnAlign4=('right', 'left', 'right', 'left'),\n columnAttach4=('both', 'both', 'both', 'both'),\n columnWidth4=((WINDOW_WIDTH - FRAME_MARGIN * 2)\n * 0.50, (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.15,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.20,\n (WINDOW_WIDTH - FRAME_MARGIN * 2) * 0.15))\n\n addText('%s_setInFrameLabel' % windowID, 'In Frame ')\n addTextField('%s_setInFrame' % windowID, '', setInFrame,\n setInFrame, setInFrame)\n\n addText('%s_setOutFrameLabel' % windowID, 'Out Frame ')\n addTextField('%s_setOutFrame' % windowID, '', setOutFrame,\n setOutFrame, setOutFrame)", "def setUI(self):\n \n l = QtGui.QLabel(\"Open file:\")\n browseButton = QtGui.QPushButton(\"Browse\")\n analyzeButton = QtGui.QPushButton(\"Analyse\")\n self.filelabel = QtGui.QLabel(\"\")\n self.messageLabel = QtGui.QLabel(\"\")\n \n #camera intrasec values\n self.fxlabel = QtGui.QLabel('focal x')\n self.fylabel = QtGui.QLabel('focal y')\n self.dist1label = QtGui.QLabel('K1')\n self.dist2label = QtGui.QLabel('K2')\n self.dist3label = QtGui.QLabel('P1')\n self.dist4label = QtGui.QLabel('P2')\n\n #set layout\n self.grid = QtGui.QGridLayout()\n a = self.grid.addWidget\n a(l, 0,0)\n a(browseButton, 0,2)\n a(self.filelabel,0,1)\n a(self.messageLabel, 1,0,1,4)\n a(analyzeButton, 2,0,1,4)\n\n a(self.fxlabel, 3,0)\n a(self.fylabel, 3,1)\n a(self.dist1label, 4,0)\n a(self.dist2label, 5,0)\n a(self.dist3label, 6,0)\n a(self.dist4label, 7,0)\n\n self.setLayout(self.grid)\n\n\n #connect signals to methods\n self.connect(browseButton, QtCore.SIGNAL('clicked()'), self.onOpenFileClicked)\n self.connect(analyzeButton, QtCore.SIGNAL('clicked()'), self.startAnalyze)", "def setup_ui(self):\n #super(Dialog, self).__init__()\n self.createFormGroupBox()\n\n buttonBox = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)\n buttonBox.accepted.connect(self.check_input)\n buttonBox.rejected.connect(self.reject)\n\n mainLayout = QVBoxLayout()\n mainLayout.addWidget(self.formGroupBox)\n mainLayout.addWidget(buttonBox)\n self.setLayout(mainLayout)\n\n self.make_connections()\n\n self.setWindowTitle(\"Load Data ...\")", "def __init_ui(self):\n self.__maximize_button.setFixedSize(31, 31)\n self.__maximize_button.setIcon(QIcon(SystemInfo.RESOURCES + 'images/buttons/maximize.svg'))\n\n self.__diagram_group.setStyleSheet(\"QGroupBox { border: 1px solid gray; background: white; }\")\n self.__diagram_layout.addWidget(self.__diagram_group)\n\n self.__button_layout = QHBoxLayout()\n self.__button_layout.addWidget(self.__start_button)\n self.__button_layout.addStretch()\n self.__button_layout.addWidget(self.__maximize_button)\n\n main_layout = QVBoxLayout()\n main_layout.addLayout(self.__button_layout, 1)\n main_layout.addLayout(self.__diagram_layout, 1)\n main_layout.addStretch(0)\n\n self.setLayout(main_layout)", "def init_ui(self):\n self.set_title(TITLE)\n self.set_default_size(WINDOW_WIDTH, WINDOW_HEIGHT)\n self.set_resizable(False)\n\n fixed = Gtk.Fixed()\n self.add(fixed)\n\n darea = Gtk.DrawingArea()\n darea.connect(DRAW_EVENT, self.on_draw)\n darea.set_size_request(DRAWING_AREA_WIDTH, DRAWING_AREA_HEIGHT)\n self.darea = darea\n fixed.put(darea, DRAWING_AREA_X, DRAWING_AREA_Y)\n\n button = Gtk.Button.new_with_label(RUN_NEXT_TURN_MSG)\n button.connect(CLICKED_EVENT, self.on_next_turn_click)\n button.set_size_request(BUTTON_SIZE, BUTTON_SIZE)\n fixed.put(button, BUTTON_X, BUTTON_Y)\n\n scores = Gtk.TextView()\n scores.get_buffer().set_text(self.get_current_scores_buffer())\n scores.set_size_request(TEXT_VIEW_WIDTH, TEXT_VIEW_HEIGHT)\n self.scores = scores\n fixed.put(scores, TEXT_VIEW_X, TEXT_VIEW_Y)\n\n self.connect(DESTROY_EVENT, Gtk.main_quit)", "def initUI(self) -> None:\n ratio = 70\n width_to_set = (ratio * self.get_current_window_info()[0]) / 100.0\n height_to_set = (ratio * self.get_current_window_info()[1]) / 100.0\n self.setGeometry(200, 100, width_to_set, height_to_set)\n self.createTable()\n # Add box layout, add table to box layout and add box layout to widget\n self.layout = QVBoxLayout()\n self.layout.addWidget(self.tableWidget)\n self.setLayout(self.layout)\n self.setWindowTitle('View files')\n self.show()", "def _setup_ui(self):\n\n self.window = ui.Widget()\n self.window.dimensions = ui.normalize_dimension((\n 0, 0,\n self.normalized_screen_resolution[0],\n self.normalized_screen_resolution[1]\n ))\n self.window.background_color = ImageColor.getcolor('#000000', 'RGB')\n\n interface_frame = ui.Widget(parent=self.window)\n interface_frame.dimensions = ui.normalize_dimension((\n self.preview_renderer.window[2],\n 0,\n self.normalized_screen_resolution[0] - self.preview_renderer.window[2],\n self.normalized_screen_resolution[1]\n ))\n interface_frame.background_color = ImageColor.getcolor('#ffffff', 'RGB')\n\n number = ui.LabelWidget(\"\",\n name=NAME_GET_STARTED,\n parent=interface_frame,\n align=\"center\",\n font_color=(0, 0, 0, 255))\n number.dimensions = (\n 5, 5,\n interface_frame.width - 10,\n interface_frame.height - 10\n )" ]
[ "0.6830704", "0.6582636", "0.6569698", "0.65478736", "0.6520507", "0.6498965", "0.64796984", "0.6454324", "0.6438737", "0.6431905", "0.6370083", "0.6368099", "0.6312776", "0.63090515", "0.62615967", "0.6222714", "0.6211797", "0.6191078", "0.6169073", "0.6165413", "0.616442", "0.6146367", "0.6124647", "0.61158943", "0.60976815", "0.6086287", "0.60619074", "0.6029246", "0.6017262", "0.59970963" ]
0.709687
0
Adds a stamp over the plot signalling it is a placeholder plot
def make_placeholder(fig: Figure) -> None: fig.add_artist(FancyBboxPatch( xy = (0.35, 0.45), width = 0.3, height = 0.1, boxstyle = 'Round, pad=0.015', linewidth = 3, edgecolor = 'red', facecolor = 'lightpink', alpha = 0.5 )) fig.text( x = 0.5, y = 0.5, s = "Placeholder", ha = "center", va = "center", fontsize = 'xx-large', fontweight = 'bold', alpha = 0.5 )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_blank(self):\n self.figure_bmp.SetBitmap(self.controller.plot_blank())", "def createBlankPlot(self):\n\n fig = plt.figure(figsize=(8,6),dpi=80)\n fig.set_facecolor('#ededed')\n \n # Format plot\n ax = plt.subplot(111)\n \n fig.canvas.draw()\n \n return fig, ax", "def plot():\n pass", "def empty_figure() -> object:\n figure = go.Figure(go.Scatter(x=[], y=[]))\n figure.update_layout(template=None)\n figure.update_xaxes(showgrid=False, showticklabels=False, zeroline=False)\n figure.update_yaxes(showgrid=False, showticklabels=False, zeroline=False)\n\n return figure", "def on_plot(self, event=None):\n data_id, theory_id, state_id = self.set_data_helper()\n self.parent.plot_data(data_id=data_id,\n state_id=state_id,\n theory_id=theory_id,\n append=False)\n self.enable_remove_plot()", "def on_append_plot(self, event=None):\n self._on_plot_selection()\n data_id, theory_id, state_id = self.set_data_helper()\n self.parent.plot_data(data_id=data_id,\n state_id=state_id,\n theory_id=theory_id,\n append=True)", "def liveplot(x, y, xlim, ylim, title):\n plt.plot(x,y,'b.')\n plt.xlim(xlim)\n plt.ylim(ylim)\n plt.xlabel('North-South Axis')\n plt.ylabel('East-West Axis')\n plt.title(title)\n plt.show()", "def update_plot():\n pass", "def showPlot(self):\r\n self.plot = not self.plot\r\n if self.plot:\r\n self.plot_button['text'] = \"No Plot\"\r\n else:\r\n self.plot_button['text'] = \"Plot\"", "def _plot_init(self):\n pass", "def _plot_init(self):\n pass", "def peek(self, **kwargs):\n\n plt.figure()\n axes = plt.gca()\n data_lab=self.meta['OBS-FREQ'][0:2] + ' ' + self.meta['OBS-FREQ'][2:5]\n axes.plot(self.data.index,self.data,label=data_lab)\n axes.set_yscale(\"log\")\n axes.set_ylim(1e-4,1)\n axes.set_title('Nobeyama Radioheliograph')\n axes.set_xlabel('Start time: ' + self.data.index[0].strftime(TIME_FORMAT))\n axes.set_ylabel('Correlation')\n axes.legend()\n plt.show()", "def plot_refresh():\n figure.canvas.draw()", "def null_plot(self, dataset):\n sns_heatmap_plot = sns.heatmap(\n dataset.isnull(), cmap=\"Blues\", yticklabels=False\n )\n sns_heatmap_plot.figure.savefig(config.NULL_CHECK_HEATMAP)", "def plot(self, title, series, x, y, setup=None, xlabel='Epochs', ylabel=None):\n hr_min = datetime.datetime.now().strftime(\"%I:%M\")\n timestamp = datetime.datetime.now().strftime(\"%A, %B %d, %Y %I:%M%p\")\n self.vis.text(\n f'<b>LAST UPDATED</b><br>{time}', env=self.env, win='last_updated')\n\n # if setup.expname != 'NoName':\n # title += f\" ({setup.expname})\"\n # if setup.has_suggestion:\n # title += f\" ({setup.sugg_id})\"\n #title += f\" (Phase {setup.phaser.idx}) \"\n\n # if setup.config.sigopt:\n # display_title = f\"{display_title}:{setup.sugg_id}\"\n # if setup.config.mode is not None:\n # display_title += f\" ({setup.config.mode})\"\n\n display_title = f\"{title} ({hr_min})\"\n\n if title in self.plots: # update existing plot\n self.vis.line(\n X=np.array([x]),\n Y=np.array([y]),\n env=self.env,\n win=self.plots[title],\n name=series,\n update='append'\n )\n else: # new plot\n self.plots[title] = self.vis.line(\n X=np.array([x, x]),\n Y=np.array([y, y]),\n env=self.env,\n opts={\n 'legend': [series],\n 'title': display_title,\n 'xlabel': xlabel,\n 'ylabel': ylabel,\n })\n #mlb.gray(\"[plotted to visdom]\")", "def UpdatePlot(self):\n\n if self.first_time:\n for ID, plt in self.plotIDs.iteritems():\n if plt:\n tmp = FellesBaseClass.FindInstance(ID)\n self.plot_panel.oplot(\n np.array(tmp.data['time']),\n np.array(tmp.data['data']),\n draw = True,\n side ='left',\n label = tmp['label'],\n color = tmp['color'],\n xlabel = None, ylabel = None, y2label = None,\n title = None,\n dy = None,\n ylog_scale = False,\n xmin = None, xmax = None, ymin = None, ymax = None,\n refresh = True,\n show_legend= True, legend_loc='ur', legend_on= True,\n delay_draw = False,\n marker = 'None', markersize = None,\n autoscale=True,\n linewidth = 3, # default 2\n drawstyle = 'line', style = 'solid',\n grid = True,\n bgcolor= None, framecolor= None, gridcolor= None,\n labelfontsize= 10, # default 9\n legendfontsize= 12, # default 7\n fullbox=None, # 'box', 'open', 'bottom'\n axes_style=None,\n zorder=None,\n )\n self.first_time = False\n\n else:\n i = 0\n for ID,plt in self.plotIDs.iteritems():\n if plt:\n tmp = FellesBaseClass.FindInstance(ID)\n self.plot_panel.update_line(\n i,\n np.array(tmp.data['time']),\n np.array(tmp.data['data']),\n draw=True,\n )\n i += 1\n\n self.plot_panel.set_xylims(\\\n [\\\n floor( min( [ min( FellesBaseClass.FindInstance(ID).data['time'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) ),\\\n ceil( max( [ max( FellesBaseClass.FindInstance(ID).data['time'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) ),\\\n floor( min( [ min( FellesBaseClass.FindInstance(ID).data['data'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) ),\\\n ceil( max( [ max( FellesBaseClass.FindInstance(ID).data['data'] )\\\n for ID,plt in self.plotIDs.iteritems() if plt ] ) )\\\n ]\\\n )\n\n self.panel_sizer.Fit(self)", "def plot_scatter_points(self):\n self.plot(1)", "def createPlot(self, plotData=None, **kwargs):\n\t\treturn super().createPlot(plotData=plotData, **kwargs)", "def plot_insertsize():", "def test_make_plot_custom(self):\n print(sys._getframe().f_code.co_name)\n try:\n x = np.arange(0,6)*300000\n y = np.arange(0,6)\n pp.make_plot(x,y,plot_type='c',plot_title='test',ylabel='test',xlabel='test',xticks=[0,2,4,6],yticks=[0,2,4,6])\n except Exception as e:\n raise\n plt.close('all')", "def plot(self):\n pass", "def plot(self):\n\t\tself.plotOfHeatingCurrent().plot()", "def timer_plot_data_out(self, w):\n w.update_plot(self.getLaps())", "def plot(self, *args, **kwargs):\n pass", "def _update_plot(self) -> None:\n\n # Check if plotting is active\n if self._fig is None:\n return None\n LOG.debug(\"Updating plot.\")\n\n # Extract glaciated area\n hs_back = np.ma.masked_where(\n self.h <= 1,\n hillshade(\n self.ele, self.PLOT_HILLSHADE_AZIMUTH, self.PLOT_HILLSHADE_ALTITUDE\n ),\n )\n\n # Clear plot and draw axes\n self._fig.clear()\n ax = plt.subplot(121, facecolor=\"black\")\n ax.tick_params(axis=\"x\", colors=\"w\")\n ax.tick_params(axis=\"y\", colors=\"w\")\n ax.set(xlabel=\"X-coordinate [m]\", ylabel=\"Y-coordinate [m]\")\n ax.xaxis.label.set_color(\"w\")\n ax.yaxis.label.set_color(\"w\")\n title_text = f\"Year: {str(self.i)} ELA: {str(int(self.ela))} m.a.s.l.\"\n ax.set_title(title_text, color=\"white\", size=18)\n\n # Draw new image layers\n plt.imshow(self.hs, vmin=90, vmax=345, cmap=\"copper\", extent=self.extent)\n plt.imshow(255 - hs_back, vmin=1, vmax=150, cmap=\"Greys\", extent=self.extent)\n\n # Mass balance\n ax1 = plt.subplot(222, facecolor=\"black\")\n ax1.plot(self.mass_balance, color=\"w\")\n ax1.plot(self.mass_balance_trend, color=\"r\")\n ax1.set(ylabel=\"Mass balance [m]\")\n ax1.yaxis.label.set_color(\"w\")\n plt.setp(ax1.get_xticklabels(), visible=False)\n ax1.tick_params(axis=\"y\", colors=\"w\")\n ax1.set_title(f\"Gradient: {str(self.m)} m/m\", color=\"white\", size=18)\n\n # Plot mean thickness\n ax2 = plt.subplot(224, sharex=ax1, facecolor=\"black\")\n ax2.plot(self.mass, color=\"w\")\n ax2.set(xlabel=\"Year [a]\", ylabel=\"Mean thickness [m]\")\n ax2.xaxis.label.set_color(\"w\")\n ax2.yaxis.label.set_color(\"w\")\n ax2.tick_params(axis=\"x\", colors=\"w\")\n ax2.tick_params(axis=\"y\", colors=\"w\")\n\n # Draw new plot\n self._fig.canvas.draw()\n plt.pause(0.05)", "def draw_empty( self ):\n prefs = self.prefs\n fig = Figure()\n canvas = FigureCanvasAgg( fig )\n dpi = prefs['width'] /prefs['width_inches']\n height_inches = prefs['height'] / float(dpi)\n fig.set_size_inches( prefs['width_inches'], height_inches )\n fig.set_dpi( dpi )\n fig.set_facecolor('white')\n fig.text( .5, .5, \"No data returned by DB query.\", horizontalalignment='center' )\n self.ax = None\n self.fig = fig\n self.canvas = canvas", "def test_no_arguments(self):\n fig = plt.figure()\n ax = fig.add_subplot(projection='ternary')\n lines = ax.plot()\n assert lines == []", "def plot_waterfall(self, plot_id):\n self.rt_plot.plot_waterfall(plot_id)", "def plot(self):\n self.fig = plt.figure('black hole')\n self.fig.clf() #clear the graph to avoir superposing data from the same set (can be deactivated if need to superpose)\n self.ax = plt.subplot()\n\n if self.img2 is not None:\n self.ax.imshow(self.img2)\n else:\n print(\"No black hole deformation in the memory, displayed the original image instead.\")\n self.ax.imshow(self.img_debut)\n\n self.fig.canvas.set_window_title('Black hole')\n self.ax.set_title(\"scrool to zoom in or out \\nright click to add an offset in the background \\nleft click to refresh image \\n close the option windows to stop the program\")\n self.fig.canvas.mpl_connect('scroll_event', self.onscroll)\n self.fig.canvas.mpl_connect('button_press_event', self.onclick)\n self.fig.canvas.mpl_connect('axes_leave_event', self.disconnect)\n self.fig.canvas.mpl_connect('axes_enter_event', self.connect)\n\n self.draw()", "def add_figure(self,sig,index,title='',xlabel='',ylabel=''):\n self.last_index = index\n ax = self.fig.add_subplot(self.position+index)\n ax.set_title(title)\n ax.set_xlabel(xlabel)\n ax.set_ylabel(ylabel)\n ax.plot(sig)" ]
[ "0.6603313", "0.61756814", "0.6072145", "0.6055725", "0.602782", "0.59950686", "0.5900304", "0.5885722", "0.5854338", "0.5785014", "0.5785014", "0.5734856", "0.572567", "0.57251096", "0.5723055", "0.5715933", "0.5715183", "0.5704", "0.5690976", "0.5655721", "0.5644146", "0.559757", "0.55882776", "0.558694", "0.5580749", "0.5552615", "0.55379474", "0.55330205", "0.5504115", "0.54700893" ]
0.62764513
1
Tag common incidents using a regex
def tag_incident(incident): try: return re.findall(regex, incident)[0] except TypeError: return pd.np.nan except IndexError: return pd.np.nan
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_tags(self):\n nolf = self.unixtext.replace(\"\\n\", \" \")\n res = EMERGENCY_RE.findall(nolf)\n if res:\n # TODO: this can be based off the IBW Tags too\n self.is_emergency = True\n match = WINDHAIL.match(nolf)\n if match:\n gdict = match.groupdict()\n self.windtag = gdict['wind']\n self.windtagunits = gdict['windunits']\n self.haildirtag = gdict['haildir']\n self.winddirtag = gdict['winddir']\n self.hailtag = gdict['hail']\n\n match = WINDTAG.match(nolf)\n if match:\n gdict = match.groupdict()\n self.winddirtag = gdict['winddir']\n self.windtag = gdict['wind']\n self.windtagunits = gdict['windunits']\n\n match = HAILTAG.match(nolf)\n if match:\n gdict = match.groupdict()\n self.haildirtag = gdict['haildir']\n self.hailtag = gdict['hail']\n\n match = TORNADOTAG.match(nolf)\n if match:\n gdict = match.groupdict()\n self.tornadotag = gdict['tornado']\n\n match = TORNADODAMAGETAG.match(nolf)\n if match:\n gdict = match.groupdict()\n self.tornadodamagetag = gdict['damage']\n\n match = WATERSPOUTTAG.match(nolf)\n if match:\n gdict = match.groupdict()\n self.waterspouttag = gdict['waterspout']\n\n for token in FLOOD_TAGS.findall(self.unixtext):\n self.flood_tags[token[0]] = token[1]", "def figure_id_filter(text):\n def fn(m):\n s = m.group(1) or \"\"\n s += m.group(2)\n body = m.group(4)\n s += replace_body(body, type=m.group(3))\n s += m.group(5) or \"\" # Close parens if any\n return s\n\n return __ref_pattern.sub(fn,text)", "def categorize(project, rules, output):\n output = output.replace('\\r\\n', '\\n')\n matched, applicable = set(), set()\n for tag, rule_project, regexp in rules:\n if not rule_project or rule_project == project:\n applicable.add(tag)\n if re.search(regexp, output, re.MULTILINE | re.DOTALL):\n matched.add(tag)\n return (matched, applicable)", "def tag():\n iso_list = []\n tags = [\"spatial_entity\", \"place\", \"motion\", \"location\", \"signal\", \"qslink\", \"olink\"]\n for token in doc:\n if token.norm_ in tags:\n iso_list.append(token.norm_)\n setList = list(set(iso_list))\n my_dict = {i: iso_list.count(i) for i in setList}\n\n for i in tags:\n if i.lower() not in my_dict:\n my_dict[i] = 0\n print(my_dict)", "def recreate_missing_match_TAGs (cls, nffg, log=logging.getLogger(\"TAG\")):\n log.debug(\"Recreate missing TAG matching fields...\")\n for infra in nffg.infras:\n # Iterate over flowrules of the infra\n for flowrule in infra.flowrules():\n # Get the source in_port of the flowrule from match field\n splitted = flowrule.match.split(';', 1)\n in_port = splitted[0].split('=')[1]\n try:\n # Convert in_port to int if it is possible\n in_port = int(in_port)\n except ValueError:\n pass\n # If the port is an inter-domain port\n if infra.ports[in_port].get_property('type') == \"inter-domain\":\n log.debug(\"Found inter-domain port: %s\", infra.ports[in_port])\n if len(splitted) > 1:\n # There is one or more TAG in match\n tags = splitted[1].split(';')\n found = False\n for tag in tags:\n try:\n vlan = tag.split('|')[-1]\n except ValueError:\n continue\n # Found a TAG with the vlan\n if vlan == str(flowrule.id):\n found = True\n break\n if found:\n # If found the appropriate TAG -> skip adding\n continue\n log.debug(\"TAG with vlan: %s is not found in %s!\" % (flowrule.id,\n flowrule))\n match_vlan = \";TAG=<None>|<None>|%s\" % flowrule.id\n flowrule.match += match_vlan\n log.debug(\"Manually extended match field: %s\" % flowrule.match)", "def tag(ctx, model, cs, corpus, output):\n click.echo('chemdataextractor.dict.tag')\n tagger = CsDictCemTagger(model=model) if cs else CiDictCemTagger(model=model)\n for line in corpus:\n sentence = []\n goldsentence = []\n for t in line.split():\n token, tag = t.rsplit('/', 1)\n goldsentence.append((token, tag))\n sentence.append(token)\n if sentence:\n tokentags = tagger.tag(sentence)\n for i, tokentag in enumerate(tokentags):\n goldtokentag = goldsentence[i]\n if goldtokentag[1] not in {'B-CM', 'I-CM'} and tokentag[1] in {'B-CM', 'I-CM'}:\n print(line)\n print(tokentag[0])\n\n output.write(' '.join('/'.join(tokentag) for tokentag in tagger.tag(sentence)))\n output.write('\\n')\n else:\n output.write('\\n')", "def tag_mapping(sentences):\n tags = [[word[-1] for word in s] for s in sentences]\n dico = create_dico(tags)\n tag_to_id, id_to_tag = create_mapping(dico)\n print(\"Found %i unique named entity tags\" % len(dico))\n return dico, tag_to_id, id_to_tag", "def categorize_tags(title):\n\n tag_cats = {'valid_tags': [], 'invalid_tags': []}\n\n # this regex might be a little too heavy-handed but it does support the valid tag formats\n allowed_tag_values = re.compile(\"^(?:(?:vol(?:\\.|ume)?|p(?:ar)?t|pt\\.)?\\s?(?:[1-9][0-9]?|one|two|three|five|ten|eleven|twelve|fifteen|(?:(?:four|six|seven|eight|nine)(?:teen)?))|final|update(?:[ ]#?[0-9]*)?)$\")\n matches = [m.group() for m in re.finditer(\"\\[([^]]*)\\]|\\((.*?)\\)|\\{(.*?)\\}\", title)]\n # for each match check if it's in the accepted list of tags\n\n for m in matches:\n # remove the braces/brackets/parens\n text = m.lower()[1:-1].strip()\n if not allowed_tag_values.match(text):\n tag_cats['invalid_tags'].append(text)\n else:\n tag_cats['valid_tags'].append(text)\n\n return tag_cats", "def tag_mapping(sentences):\n tags = [[char[-1] for char in s] for s in sentences]\n dico = create_dico(tags)\n tag_to_id, id_to_tag = create_mapping(dico)\n print(\"Found %i unique named entity tags\" % len(dico))\n return dico, tag_to_id, id_to_tag", "def form_bios(paired_plant_match_in, text_in):\r\n\ttagged_corpus =[]\r\n\r\n\t# used sorted and filtered indices to bio tag corpus\r\n\tfor i, word in enumerate(text_in): # iterate over words in corpus\r\n\t\tif word: # if not white space\r\n\t\t\tfind = paired_plant_match_in.get(i) # check if word index is in plant match dict keys\r\n\t\t\t#print(find)\r\n\t\t\tif find: # if match\r\n\t\t\t\ttagged_corpus.append(word + ' ' + str(find)) # append word to list WITH plant match dict value tag \r\n\t\t\telse:\r\n\t\t\t\ttagged_corpus.append(word + ' O') # else append word to list with 'O' tag\r\n\r\n\t\telse:\r\n\t\t\ttagged_corpus.append(word)\r\n\r\n\treturn tagged_corpus", "def _extract_spans(self, tag_sequence: List[int]) -> Set[Tuple[Tuple[int, int], str]]:\n spans = set()\n span_start = 0\n span_end = 0\n active_conll_tag = None\n for index, integer_tag in enumerate(tag_sequence):\n # Actual BIO tag.\n string_tag = self._bio_vocabulary[integer_tag]\n bio_tag = string_tag[0]\n conll_tag = string_tag[2:]\n if bio_tag == \"O\" or conll_tag ==\"V\":\n # The span has ended.\n if active_conll_tag:\n spans.add(((span_start, span_end), active_conll_tag))\n active_conll_tag = None\n # We don't care about tags we are\n # told to ignore, so we do nothing.\n continue\n elif bio_tag == \"U\":\n # The U tag is used to indicate a span of length 1,\n # so if there's an active tag we end it, and then\n # we add a \"length 0\" tag.\n if active_conll_tag:\n spans.add(((span_start, span_end), active_conll_tag))\n spans.add(((index, index), conll_tag))\n active_conll_tag = None\n elif bio_tag == \"B\":\n # We are entering a new span; reset indices\n # and active tag to new span.\n if active_conll_tag:\n spans.add(((span_start, span_end), active_conll_tag))\n active_conll_tag = conll_tag\n span_start = index\n span_end = index\n elif bio_tag == \"I\" and conll_tag == active_conll_tag:\n # We're inside a span.\n span_end += 1\n else:\n # This is the case the bio label is an \"I\", but either:\n # 1) the span hasn't started - i.e. an ill formed span.\n # 2) The span is an I tag for a different conll annotation.\n # We'll process the previous span if it exists, but also\n # include this span. This is important, because otherwise,\n # a model may get a perfect F1 score whilst still including\n # false positive ill-formed spans.\n if active_conll_tag:\n spans.add(((span_start, span_end), active_conll_tag))\n active_conll_tag = conll_tag\n span_start = index\n span_end = index\n # Last token might have been a part of a valid span.\n if active_conll_tag:\n spans.add(((span_start, span_end), active_conll_tag))\n return spans", "def tags():", "def findTags(user_input, tagged_text):\n result = []\n for item in tagged_text:\n for w in user_input:\n if w[WORD] == item[WORD]:\n tup = (w[WORD], item[TAG])\n result.append(tup)\n continue\n\n return result", "def _suggest_regexes(content):\n # Grab all regular expressions and compile them\n suggested_regexes = set()\n regex_keywords = TagRegex.objects.all()\n\n # Look for our regular expressions in the content\n for r in regex_keywords:\n if re.search(r.regex, content):\n suggested_regexes.add(r.tag_id)\n\n return suggested_regexes", "def tags_to_spans(tags):\n spans = set()\n span_start = 0\n span_end = 0\n active_conll_tag = None\n for index, string_tag in enumerate(tags):\n # Actual BIO tag.\n bio_tag = string_tag[0]\n assert bio_tag in [\"B\", \"I\", \"O\"], \"Invalid Tag\"\n conll_tag = string_tag[2:]\n if bio_tag == \"O\":\n # The span has ended.\n if active_conll_tag:\n spans.add((active_conll_tag, (span_start, span_end)))\n active_conll_tag = None\n # We don't care about tags we are\n # told to ignore, so we do nothing.\n continue\n elif bio_tag == \"B\":\n # We are entering a new span; reset indices and active tag to new span.\n if active_conll_tag:\n spans.add((active_conll_tag, (span_start, span_end)))\n active_conll_tag = conll_tag\n span_start = index\n span_end = index\n elif bio_tag == \"I\" and conll_tag == active_conll_tag:\n # We're inside a span.\n span_end += 1\n else:\n # This is the case the bio label is an \"I\", but either:\n # 1) the span hasn't started - i.e. an ill formed span.\n # 2) We have IOB1 tagging scheme.\n # We'll process the previous span if it exists, but also include this\n # span. This is important, because otherwise, a model may get a perfect\n # F1 score whilst still including false positive ill-formed spans.\n if active_conll_tag:\n spans.add((active_conll_tag, (span_start, span_end)))\n active_conll_tag = conll_tag\n span_start = index\n span_end = index\n # Last token might have been a part of a valid span.\n if active_conll_tag:\n spans.add((active_conll_tag, (span_start, span_end)))\n # Return sorted list of spans\n return sorted(list(spans), key=lambda x: x[1][0])", "def _annotate_re(a_re, a_ising, a_wght):\n for itok, _ in a_ising.iteritems():\n if a_re.search(itok):\n a_ising[itok][FXD_WGHT_IDX] = a_wght\n a_ising[itok][HAS_FXD_WGHT] = 1", "def getMatch(data):\n if len(data) > 15:\n return 'date: {0} {1}, match => {2}, {3}, {4}| 1x2 => {5}, {6}, {7}| handicap => {8}, {9}, {10}, {11}| OU => {12}, {13}, {14}, {15}'.format(data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], data[12], data[13], data[14], data[15], data[16])\n return 'date: {0} {1}, match => {2}, {3}, {4}| handicap => {5}, {6}, {7}, {8}| OU => {9}, {10}, {11}, {12}'.format(data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8], data[9], data[10], data[11], data[12], data[13])", "def tag_conjunction_entities(annotated_pages):\n for page_id in annotated_pages:\n page = Page.objects(id=page_id).first()\n #page = db_conn.pages.find_one({\"_id\":page_id}) # TODO: refactor\n annotation_ids = [p.id for p in page[\"annotations_ids\"]]\n all_annotations = list(Annotation.objects(id__in=annotation_ids))\n # retrieve meta-annotations from that page\n meta_annotations = list(Annotation.objects(id__in=annotation_ids, entity_type=\"meta-annotation\"))\n #all_annotations = list(db_conn.annotations.find({\"_id\":{\"$in\":annotation_ids}})) # TODO: refactor\n #meta_annotations = list(db_conn.annotations.find({\"_id\":{\"$in\":annotation_ids} # TODO: refactor\n # ,\"entity_type\":\"meta-annotation\"}))\n if(len(meta_annotations)>0):\n logger.debug(\"Meta-annotations: %s\"%meta_annotations)\n for meta_annotation in meta_annotations:\n logger.info(\"Processing meta-annotation %s\"%meta_annotation[\"id\"])\n line_span = sorted(list(set([(position[\"page_id\"], position[\"line_n\"]) \n for position in meta_annotation[\"positions\"]])))\n top_entities_ids = [ann.id for ann in meta_annotation[\"top_entities\"]]\n top_entities = list(Annotation.objects(id__in=top_entities_ids))\n #top_entities = [db_conn.annotations.find_one({\"_id\":top_annotation_id}) \n # for top_annotation_id in meta_annotation[\"top_entities\"]]\n tokens = []\n for page_obj, line_n in line_span:\n page = Page.objects(id=page_obj.id).first()\n #page = db_conn.pages.find_one({\"_id\":page_id})\n for line in page[\"lines\"]:\n if line[\"line_number\"]==line_n:\n tokens.append((page_obj,line_n,line[\"tokens\"]))\n try:\n for entity in top_entities:\n assert entity is not None\n true_conjunctions = []\n meta_annotation_start = (top_entities[0][\"positions\"][0][\"page_id\"]\n ,top_entities[0][\"positions\"][0][\"line_n\"]\n ,top_entities[0][\"positions\"][0][\"start\"])\n meta_annotation_end = (top_entities[-1][\"positions\"][-1][\"page_id\"]\n ,top_entities[-1][\"positions\"][-1][\"line_n\"]\n ,top_entities[-1][\"positions\"][-1][\"end\"])\n conjunctions = [(token,page,line) for page,line,toks in tokens for token in toks\n if(token[\"offset_start\"] >= meta_annotation_start[2] and token[\"offset_end\"] <= meta_annotation_end[2])]\n true_conjunctions += [(page,line,token) for token,page,line in conjunctions \n if not is_annotated(page,line,token,all_annotations)]\n if(len(true_conjunctions)>0):\n logger.debug(\"Conjunctions found: %s\"%true_conjunctions)\n conjunction_annotations = []\n all_ann_ids = [annotation[\"ann_id\"] for annotation in all_annotations \n if '+' not in annotation[\"ann_id\"] ]\n identifier_counter = int(sorted(all_ann_ids, key=lambda x: int(x.replace('T','')))[-1].replace(\"T\",\"\"))\n logger.debug(sorted(all_ann_ids, key=lambda x: int(x.replace('T','')))[-1])\n for page_obj, line_n, token in true_conjunctions:\n identifier_counter += 1\n conjunction_annotation = Annotation(entity_type=\"conjunction\"\n , ingestion_timestamp=datetime.utcnow()\n , annotation_ingester_version=__version__\n , pageid=meta_annotation.pageid\n , filename=meta_annotation.filename\n , bid=meta_annotation.bid)\n conjunction_annotation.surface = token[\"surface\"]\n conjunction_annotation.ann_id = \"T%i\"%identifier_counter\n conjunction_annotation.positions.append(PagePosition(page_id = page_obj\n , start = token[\"offset_start\"]\n , end = token[\"offset_end\"]\n , line_n = line_n))\n conjunction_annotation.save()\n conjunction_annotations.append(conjunction_annotation)\n logger.info(\"(Page: %s) %i conjunction annotations were created and stored in MongoDB\"%(page_obj.id\n , len(conjunction_annotations)))\n logger.debug(\"N %i of top entities before adding conjunction entities\"%len(meta_annotation[\"top_entities\"]))\n meta_annotation[\"top_entities\"] += conjunction_annotations\n logger.debug(\"N %i of top entities after adding conjunction entities\"%len(meta_annotation[\"top_entities\"]))\n Annotation.objects(id=meta_annotation.id).update_one(set__top_entities = meta_annotation[\"top_entities\"])\n for conj_annotation in conjunction_annotations:\n for position in conj_annotation[\"positions\"]:\n page = Page.objects(id=position.page_id.id).first()\n page[\"annotations_ids\"].append(conj_annotation)\n page.save()\n except AssertionError as e:\n #raise e\n logger.warning(\"The meta-annotation %s has no top-level entities and generated the following error: %s\"%(meta_annotation[\"_id\"],e))\n except Exception as e:\n raise e", "def scan_individual_identifiers(text: str, cpf: bool = True) -> List[str]:\n if cpf:\n regex = re.compile(r\"\\w{3}\\.\\w{3}\\.\\w{3}\\-\\w{2}\")\n else:\n regex = re.compile(r\"\\w{2}\\.\\w{3}\\.\\w{3}/\\w{4}\\-\\w{2}\")\n\n identifiers = re.findall(regex, text)\n return identifiers", "def addresses( data ) :\n return list( set(chain.from_iterable( [ re.sub(r'\\[.*?\\]\\s+','',x['C1']).split('; ') for x in data ] )))", "def findall(pattern, string, overlapping=True, sensitive=True, regexp=False):\n if regexp:\n return SE.occurrences_re(pattern, string)\n if overlapping:\n return SE.occurrences(pattern, string, sensitive)\n else:\n return SE.full_words(pattern, string, sensitive)", "def match_all_cui(s,max_len = 10, Eterm_cui = Eterm_cui):\n if len(s) == 0: \n return []\n sub_label = np.zeros(len(s),dtype = 'int')\n location_term = {}\n i = 0\n while i < len(s):\n for j in range(max_len+1,0,-1):\n temp = ' '.join(s[i:i+j])\n if temp in Eterm_cui:\n sub_label[i:i+j] = 1\n location_term[i] = [Eterm_cui[temp]]\n break#matched maximum string, so break\n i += j\n output = []\n for i in range(len(s)):\n if sub_label[i] == 0:#no match\n output += [s[i]]\n elif i in location_term:\n for cui in location_term[i][: :-1]:\n output += [cui]\n return output", "def tag_range(str_text, i_from, i_to, lst_tag_types):\n rgx_tag = re.compile(target_pattern(lst_tag_types))\n return [o_match.span() for o_match in rgx_tag.finditer(str_text) if\n overlap((i_from, i_to), o_match.span())]", "def tag(comment):\n words = comment.split()\n doc = spacy.tokens.Doc(nlp.vocab, words)\n doc = nlp.tagger(doc)\n comment=\"\"\n for i in doc:\n comment = comment +\" \"+ i.text+\"/\"+i.tag_\n comment=comment[1:]\n return comment", "def tagger():", "def get_tagged_user(line, unique_users):\n tagged_user = None\n\n for user in unique_users:\n \n tagged_user = re.search(f\"@{user}\\s*\", line)\n \n if tagged_user != None:\n tagged_user = tagged_user.group(0).replace(\"@\", \"\").strip()\n line = line.replace(f\"@{user} \", \"\")\n break\n \n return (tagged_user, line)", "def test_combine_multiple(self):\n inv_search = 'author:\"gattringer, c*\" keyword:chiral keyword:symmetry -title:chiral'\n spi_search = \"find a c gattringer and k chiral symmetry and not title chiral\"\n self._compare_searches(inv_search, spi_search)", "def determine_keywords(self):\n\n split = dict()\n split['email_cc'] = re.compile(\"^\\s*CC[-_]?MAIL[:=]\\s*(.*)\")\n split['email_cc2'] = re.compile(\"^\\s*C[Cc][:=]\\s*(.*)\")\n split['fixed_in'] = re.compile(\"^\\s*FIXED[-_]?IN[:=]\\s*(.*)\")\n\n numeric = dict()\n numeric['bug_fixed'] = re.compile(\"^\\s*(?:BUGS?|FEATURE)[:=]\\s*(.+)\")\n numeric['bug_cc'] = re.compile(\"^\\s*CCBUGS?[:=]\\s*(.+)\")\n\n presence = dict()\n presence['email_gui'] = re.compile(\"^\\s*GUI:\")\n presence['silent'] = re.compile(\"(?:CVS|SVN|GIT|SCM).?SILENT\")\n presence['notes'] = re.compile(\"(?:Notes added by 'git notes add'|Notes removed by 'git notes remove')\")\n\n results = defaultdict(list)\n for line in self.commit.message.split(\"\\n\"):\n # If our line starts with Summary: (as it does when using Arcanist's default template) then strip this off\n # This allows for people to fill keywords in the Differential Summary and have this work smoothly for them\n line = re.sub(\"^Summary: (.+)\", \"\\g<1>\", line)\n\n # Start processing our keywords...\n for (name, regex) in split.iteritems():\n match = re.match( regex, line )\n if match:\n results[name] += [result.strip() for result in match.group(1).split(\",\")]\n\n for (name, regex) in numeric.iteritems():\n match = re.match( regex, line )\n if match:\n results[name] += re.findall(\"(\\d{1,10})\", match.group(1))\n\n for (name, regex) in presence.iteritems():\n if re.match( regex, line ):\n results[name] = True\n\n self.keywords = results", "def cve_match(self, string):\n pattern_list = [r'assigned the following CVE ID: (CVE-\\d+-\\d+)',\n r'ID for this vulnerability is: (CVE-\\d+-\\d+)']\n\n for pattern in pattern_list:\n match = re.search('{0}'.format(pattern), string)\n if match:\n cve = match.group(1)\n if is_correct_cve_id(cve):\n return cve\n return ''", "def intf_ENTCHTAG(E):\n if ( not inc.entid_or_LST_of_entids(E.The,3) or\n not inc.TXT(E,2) or not inc.TXT(E,1) ):\n print(\"Input Error: chtag\")\n print(intf_ENTCHTAG.__doc__)\n return # Without doing much of anything.\n refreshview= False # No need unless view attributes (@) have been affected.\n newtag= E.The.StackPop().val\n oldtag= E.The.StackPop().val\n myeids= E.The.StackPop().val\n if type(myeids)==type(list()):\n #myeids= map(lambda x:x.val, myeids) # Should now be a list of VALs.\n myeids= [x.val for x in myeids] # Should now be a list of VALs.\n else:\n myeids= [ myeids ] # Also a (1 item) list of ints.\n for myeid in myeids:\n if myeid in MMEL.El: # Check if eid exists.\n if MMEL.El[myeid].has_tag(oldtag):\n print(\"Untagging entity #%d with tag ''%s''\" % (myeid,oldtag))\n MMEL.El[myeid].del_tag(oldtag)\n MMEL.El[myeid].add_tag(newtag)\n if '@' in oldtag or '@' in newtag: refreshview= True\n else:\n print(\"Warning: No entity #%d. Skipping.\" % myeid)\n if refreshview: OUT.default(MMEL,E) # AUTODUMP " ]
[ "0.52708447", "0.5163899", "0.51446724", "0.5110537", "0.5087893", "0.5036303", "0.5027366", "0.502397", "0.50166464", "0.49869832", "0.4966351", "0.49521333", "0.49241787", "0.4894549", "0.4828666", "0.48126188", "0.48103958", "0.48088047", "0.47784507", "0.47735015", "0.47564182", "0.47264576", "0.4713877", "0.4713829", "0.47035778", "0.4703499", "0.47026485", "0.47024643", "0.47002947", "0.46854466" ]
0.545593
0
!Allows subclasses to override variables in the scope before parsing. The default implementation does nothing. scope The global scope in which to override variables.
def override(self,scope):
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def beginScope():", "def scope(self):\n return 'global' if self.parent is None else 'local'", "def scope(self): # noqa: ANN201", "def scope_global(self):\n self.raw['scope'] = 'GLOBAL'\n return self", "def scope(self, name):\r\n raise NotImplementedError", "def __init__(self, name, parent=None):\n self.current_scope = Scope(name, parent)", "def enterScope(self, name):", "def default(self, line):\n try:\n exec(line) in self._locals, self._globals\n except Exception as e:\n print(e.__class__, \":\", e)", "def __init__(self, **variables):\n vars(self).update(variables)", "def init_locals(self):\n pass", "def __init__(self):\n self.variables = [] # List of all variables in certain scope.\n self.field_id = 0 # Id of next field varibale.\n self.argumen_id = 0 # Id of next argument variable.\n self.local_id = 0 # Id of next local variable.\n self.static_id = 0 # Id of next static variable.", "def variables_declared (self) :\r\n\t\treturn {}", "def name_scope(self):\n pass", "def set_vars():\n return dict()", "def activate_shell_scope(self):\n self.variables = {}\n self.prompt = 'cm> '\n self.active_scope = \"\"\n self.scopes = []\n self.scopeless = ['load', 'info', 'var', 'use', 'quit', 'q', 'help']\n # self.scopeless = ['use', 'quit', 'q', 'EOF', 'eof', 'help']", "def test_10_global_scope(self):\n\t\tinput = \"\"\"var x,y:integer;\n\t\tprocedure main(); var x:real; begin x:=foo(1,false); {OK} end\n\t\tfunction foo(a:real;b:boolean):real; begin return 1e2; end\n\t\tprocedure f(); var y:integer; y:real; begin end\"\"\"\n\t\texpect = \"Redeclared Variable: y\"\n\t\tself.assertTrue(TestChecker.test(input,expect,410))", "def __init__(self, name=None, scope=None, variables=None):\n self.name = name\n self.scope: 'VariableScope' = scope\n self.variables: Set[MemoryVariable] = variables or set()", "def __init__(self, **kwargs):\n\t\tself.vars = kwargs\n\t\tself.old_vars = None", "def default_environment():\n return dict(_VARS)", "def _init_vars(self):\n if not self._has(\"vars\"):\n if self._has(\"p\"):\n self._.vars = self._.p.variables()\n elif self._has(\"q\"):\n self._.vars = self._.q.variables()\n elif self._has(\"P\"):\n self._.vars = variables(self._.P)\n elif self._has(\"Q\"):\n self._.vars = variables(self._.Q)\n self._.vars_ordered = len(self._.vars) <= 1", "def vars(self, scope: str = '') -> VarCollection:\n vc = VarCollection()\n scope += f'({self.__class__.__name__})'\n for p, v in enumerate(self):\n if isinstance(v, BaseVar):\n vc[f'{scope}[{p}]'] = v\n elif isinstance(v, Module):\n vc.update(v.vars(scope=f'{scope}[{p}]'))\n return vc", "def vars(self, scope: str = '') -> VarCollection:\n vc = VarCollection()\n scope += f'({self.__class__.__name__}).'\n for k, v in self.__dict__.items():\n if isinstance(v, BaseVar):\n vc[scope + k] = v\n elif isinstance(v, Module):\n if k == '__wrapped__':\n vc.update(v.vars(scope=scope[:-1]))\n else:\n vc.update(v.vars(scope=scope + k))\n return vc", "def __init__(self, parent, isolated=True, function_name=None):\n self.parent = parent\n self.isolated = isolated\n self.function_name = function_name\n\n self.isolated_names = set()\n\n self.read = set()\n self.modified = set()\n self.deleted = set()\n\n self.bound = set()\n self.globals = set()\n self.nonlocals = set()\n self.annotations = set()\n\n self.params = weakref.WeakValueDictionary()\n\n # Certain fields can only be accessed after the scope and all its parent\n # scopes have been fully built. This field guards that.\n self.is_final = False", "def _normalize_variable_recurrent_scope(scope: 'Scope'):\n ret_scope = scope.copy()\n for scope_element in ret_scope:\n if scope_element.calling_module_class_name in [\"Recurrent\", \"VariableRecurrent\",\n \"VariableRecurrentReverse\"]:\n scope_element.calling_module_class_name = \"NormalizedName_Recurrent\"\n return ret_scope", "def set_rucio_scope( rucio_scope):\n global RUCIO_SCOPE\n RUCIO_SCOPE = rucio_scope", "def initialize_scope_settings(scope) -> None:\n scope.set_hi_res_mode()\n scope.set_single_acquisition_mode()\n scope.set_waveform_data_source_single_channel(1)\n scope.set_waveform_encoding_ascii()\n scope.set_waveform_start_point(1)\n scope.set_waveform_stop_point(scope.get_waveform_length())\n scope.set_waveform_start_point(1)\n scope.set_waveform_stop_point(10_000_000)\n scope.turn_off_all_measurements()\n scope.add_displayed_mean_measurement(1, 1)\n scope.add_displayed_mean_measurement(2, 2)\n scope.add_displayed_mean_measurement(3, 3)\n scope.zero_all_vertical_positions()\n scope.set_trigger_holdoff()\n return", "def __init__(self):\n\n self.var = None", "def vars(self, scope: str = '') -> VarCollection:\n if scope:\n return VarCollection((scope + k, v) for k, v in self.vc.items())\n return VarCollection(self.vc)", "def override_session_vars(self):\n self.session_overrides = self.parse_session_overrides_str(\n self.session_overrides_str\n )\n for var_name, var_value in self.session_overrides:\n log.info(\n \"Override session variable {} with value: {}\".format(\n var_name, var_value\n )\n )\n self.execute_sql(sql.set_session_variable(var_name), (var_value,))", "def __init__(self, varname, global_attrs, values=None):\n\n self.varname = varname\n self.attrs = global_attrs\n self.metric, self.g, parts = self._parse_varname()\n self.ref_ds, self.other_dss, self.metric_ds = self._named_attrs(parts)\n self.values = values" ]
[ "0.5940736", "0.582645", "0.5746078", "0.5648204", "0.5605894", "0.56055665", "0.5571537", "0.5530811", "0.55072176", "0.5491784", "0.54825747", "0.54637235", "0.54503393", "0.53918594", "0.5380209", "0.5379051", "0.5374612", "0.5317445", "0.5314528", "0.5310974", "0.5304187", "0.52938384", "0.5286956", "0.52778494", "0.5269433", "0.5219662", "0.51973456", "0.51847243", "0.51700735", "0.5160089" ]
0.7037989
0
Solve parabolic partial differential equation with Crank Nicolson finite difference method
def crank_nicolson_fd(main_args, boundary_left_args, boundary_right_args, initial_func, min_x, max_x, max_t, step_x, step_t, boundary_approximation_func='first_order_two_points', theta=0.5): d = { 'first_order_two_points': _o1p2, # o - order, p - points 'second_order_two_points': _o2p2, 'second_order_three_points': _o2p3 } (complete_matrix, complete_vector) = d[boundary_approximation_func](main_args, boundary_left_args, boundary_right_args, step_x, step_t, min_x, max_x) m = int(max_t / step_t) + 1 n = int((max_x - min_x) / step_x) + 1 u = [None for _ in range(m)] u[0] = [initial_func(min_x + x * step_x) for x in range(n)] a, b, c, f = main_args A = a * (1 - theta) / step_x ** 2 - b * (1 - theta) / (2 * step_x) B = c * (1 - theta) - 2 * a * (1 - theta) / step_x ** 2 - 1 / step_t C = a * (1 - theta) / step_x ** 2 + b * (1 - theta) / (2 * step_x) X = b * theta / (2 * step_x) - a * theta / step_x ** 2 Y = 2 * a * theta / step_x ** 2 - c * theta - 1 / step_t Z = - a * theta / step_x ** 2 - b * theta / (2 * step_x) matrix_u_t = Matrix(size=(n, 3)) for i in range(1, n - 1): matrix_u_t[i] = [A, B, C] complete_matrix(matrix_u_t) for t in range(1, m): v = Vector(size=(n, 1)) for x in range(1, n - 1): v[x] = (u[t - 1][x - 1] * X + u[t - 1][x] * Y + u[t - 1][x + 1] * Z + (theta - 1) * f(min_x + x * step_x, t * step_t) - theta * f(min_x + x * step_x, (t - 1) * step_t)) complete_vector(v, t * step_t, matrix_u_t, u[t-1][0], u[t-1][-1]) u[t] = list(TDMA(mtrx=matrix_u_t, vec=v).solve()) return u
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solve_differential_equation(f_derivatives, initial, oldest=120):\n bunch = solve_ivp(f_derivatives, t_span=(0, oldest), y0=initial, vectorized=True, dense_output=True)\n return bunch.sol", "def work_dos():\n #potential = 2x**2+x**2y+y**2\n x1,y1 = (2, -3)\n x2,y2 = (-1, 2)\n p1 = (2*(x1**2)) + ((x1**2)*y1) + (y1**2)\n p2 = (2*(x2**2)) + ((x2**2)*y2) + (y2**2)\n sol = p1 - p2\n sol = abs(sol)\n print(f'The vector field F=(4x+2xy,x2+2y) \\n'\n 'along the curve C parametrized by r(t)=(3t−1,−5t+2) \\n '\n f'for 0 ≤ t ≤ 1 is: {sol}')", "def solve(self):\n \n # getting the time values\n self.days_list = np.linspace(self.tbeg,self.tend,self.npoints)\n\n # calling the odeint method to solve the diff. equations\n self.x = odeint(self.diff_eq,self.x0,self.days_list,args = (self.par,))\n '''\n Its important to note that (par_est,) is the way to define a tuple\n with just ode element. When we put (par_est), the parenteses won't\n indicate a typle\n '''\n \n #setting the variables\n self.confirmed_list = self.x[:,1] + self.x[:,2] + self.x[:,3]\n self.recovered_list = self.x[:,2]\n self.death_list = self.x[:,3]", "def my_Newton( fct, df_dx, x0):\r\n xn = float(x0)\r\n eps = 1e-5\r\n N = 20\r\n i = 0\r\n while abs( fct( xn**(i + 1)) - fct( xn**i)) > eps and i < N:\r\n x_next = xn - fct(xn)/df_dx(xn)\r\n print( i, 'fct value', abs( fct(xn)), x_next)\r\n xn = x_next\r\n i += 1\r\n if abs( fct( xn)) < eps:\r\n return x_next\r\n else: #solution did not converge\r\n return np.nan", "def test_solver():\n # Choice of nonlinear coefficient\n m = 2\n\n def q(u):\n return (1+u)**m\n\n def Dq(u):\n return m*(1+u)**(m-1)\n\n u_exact = Expression(\n 'pow((pow(2, m+1)-1)*x[0] + 1, 1.0/(m+1)) - 1', m=m)\n linear_solver = 'direct'\n errors = []\n for method in 'alg_Newton', 'pde_Newton':\n for J_comp in 'manual', 'automatic':\n for degree in 1, 2, 3:\n error_prev = -1\n for divisions in [(10, 10), (20, 20), (40, 40)]:\n u = solver(\n q, Dq, f, divisions, degree,\n method, J_comp,\n linear_solver,\n abs_tol_Krylov=1E-10,\n rel_tol_Krylov=1E-10,\n abs_tol_Newton=1E-10,\n rel_tol_Newton=1E-10)\n\n # Find max error\n u_e = interpolate(u_exact, u.function_space())\n import numpy as np\n error = np.abs(u_e.vector().array() -\n u.vector().array()).max()\n # Expect convergence as h**(degree+1)\n if error_prev > 0:\n frac = abs(error - error_prev/2**(degree+1))\n errors.append(frac)\n error_prev = error\n tol = 4E-5\n for error_reduction in errors:\n assert error_reduction < tol, error_reduction", "def frac_delay(delta, N, w_max=0.9, C=4):\n\n # constraints\n N_C = int(C * N)\n w = np.linspace(0, w_max * np.pi, N_C)[:, np.newaxis]\n\n n = np.arange(N)\n\n try:\n from cvxopt import matrix, solvers\n except:\n raise ValueError(\n \"To use the frac_delay function, the cvxopt module is necessary.\"\n )\n\n f = np.concatenate((np.zeros(N), np.ones(1)))\n\n A = []\n b = []\n for i in range(N_C):\n Anp = np.concatenate(\n ([np.cos(w[i] * n), -np.sin(w[i] * n)], [[0], [0]]), axis=1\n )\n Anp = np.concatenate(([-f], Anp), axis=0)\n A.append(matrix(Anp))\n b.append(\n matrix(np.concatenate(([0], np.cos(w[i] * delta), -np.sin(w[i] * delta))))\n )\n\n solvers.options[\"show_progress\"] = False\n sol = solvers.socp(matrix(f), Gq=A, hq=b)\n\n h = np.array(sol[\"x\"])[:-1, 0]\n\n \"\"\"\n import matplotlib.pyplot as plt\n w = np.linspace(0, np.pi, 2*N_C)\n F = np.exp(-1j*w[:,np.newaxis]*n)\n Hd = np.exp(-1j*delta*w)\n plt.figure()\n plt.subplot(3,1,1)\n plt.plot(np.abs(np.dot(F,h) - Hd))\n plt.subplot(3,1,2)\n plt.plot(np.diff(np.angle(np.dot(F,h))))\n plt.subplot(3,1,3)\n plt.plot(h)\n \"\"\"\n\n return h", "def efSolver2(self):\n dx = self.dh[0] # dx\n dy = self.dh[1] # dy\n dz = self.dh[2] # dz\n \n \"\"\"\n for i in np.arange(0, self.ni):\n for j in np.arange(0, self.nj):\n for k in np.arange(0, self.nk):\n \"\"\"\n\n ##x-component#\n #if i==0: \n #x-component#\n \"\"\"\n if i==0: \n # forward\n self.ef[i][j][k][0] = -(-3*self.phi[i][j][k]+\\\n 4*self.phi[i+1][j][k]-\\\n self.phi[i+2][j][k])/(2*dx)\n \"\"\"\n \n # forward\n self.ef[0,0:self.nj,0:self.nk,0] = -(-3*self.phi[0,0:self.nj,0:self.nk]+\\\n 4*self.phi[1,0:self.nj,0:self.nk]-\\\n self.phi[2,0:self.nj,0:self.nk])/(2*dx)\n \n #elif i==self.ni-1: \n \"\"\"\n elif i==self.ni-1: \n # backward\n self.ef[i][j][k][0] = -(self.phi[i-2][j][k]-\\\n 4*self.phi[i-1][j][k]+\\\n 3*self.phi[i][j][k])/(2*dx)\n \"\"\" \n # backward\n self.ef[self.ni-1,0:self.nj,0:self.nk,0] = -(self.phi[self.ni-3,0:self.nj,0:self.nk]-\\\n 4*self.phi[self.ni-2,0:self.nj,0:self.nk]+\\\n 3*self.phi[self.ni-1,0:self.nj,0:self.nk])/(2*dx)\n \"\"\"\n else: \n #central\n self.ef[i][j][k][0] = -(self.phi[i+1][j][k] - \\\n self.phi[i-1][j][k])/(2*dx)\n \"\"\" \n #central\n self.ef[1:self.ni-1,0:self.nj,0:self.nk,0] = -(self.phi[2:self.ni,0:self.nj,0:self.nk] - \\\n self.phi[0:self.ni-2,0:self.nj,0:self.nk])/(2*dx)\n\n\n #y-component\n #if j==0:\n \"\"\"\n if j==0:\n self.ef[i][j][k][1] = -(-3*self.phi[i][j][k] + \\\n 4*self.phi[i][j+1][k]-\\\n self.phi[i][j+2][k])/(2*dy)\n \n \"\"\"\n self.ef[0:self.ni,0,0:self.nk,1] = -(-3*self.phi[0:self.ni,0,0:self.nk] + \\\n 4*self.phi[0:self.ni,1,0:self.nk]-\\\n self.phi[0:self.ni,2,0:self.nk])/(2*dy)\n #elif j==self.nj-1:\n \"\"\"\n elif j==self.nj-1:\n self.ef[i][j][k][1] = -(self.phi[i][j-2][k] - \\\n 4*self.phi[i][j-1][k] +\\\n 3*self.phi[i][j][k])/(2*dy)\n \n \"\"\"\n self.ef[0:self.ni,self.nj-1,0:self.nk,1] = -(self.phi[0:self.ni,self.nj-3,0:self.nk] - \\\n 4*self.phi[0:self.ni,self.nj-2,0:self.nk] +\\\n 3*self.phi[0:self.ni,self.nj-1,0:self.nk])/(2*dy)\n #else:\n \"\"\"\n else:\n self.ef[i][j][k][1] = -(self.phi[i][j+1][k] - \\\n self.phi[i][j-1][k])/(2*dy)\n\n \"\"\"\n self.ef[0:self.ni,1:self.nj-1,0:self.nk,1] = -(self.phi[0:self.ni,2:self.nj,0:self.nk] - \\\n self.phi[0:self.ni,0:self.nj-2,0:self.nk])/(2*dy)\n\n #z-component\n '''\n if k==0:\n self.ef[i][j][k][2] = -(-3*self.phi[i][j][k] + \\\n 4*self.phi[i][j][k+1]-\n self.phi[i][j][k+2])/(2*dz)\n \n '''\n #z-component\n #if k==0:\n self.ef[0:self.ni,0:self.nj,0,2] = -(-3*self.phi[0:self.ni,0:self.nj,0] + \\\n 4*self.phi[0:self.ni,0:self.nj,1]-\n self.phi[0:self.ni,0:self.nj,2])/(2*dz)\n\n \"\"\"\n elif k==self.nk-1:\n self.ef[i][j][k][2] = -(self.phi[i][j][k-2] - \\\n 4*self.phi[i][j][k-1] + \\\n 3*self.phi[i][j][k])/(2*dz)\n \"\"\"\n \n #elif k==self.nk-1:\n self.ef[0:self.ni,0:self.nj,self.nk-1,2] = -(self.phi[0:self.ni,0:self.nj,self.nk-3] - \\\n 4*self.phi[0:self.ni,0:self.nj,self.nk-2] + \\\n 3*self.phi[0:self.ni,0:self.nj,self.nk-1])/(2*dz) \n \"\"\"\n else:\n self.ef[i][j][k][2] = -(self.phi[i][j][k+1] - \\\n self.phi[i][j][k-1])/(2*dz)\n \"\"\"\n #else:\n self.ef[0:self.ni,0:self.nj,1:self.nk-1,2] = -(self.phi[0:self.ni,0:self.nj,2:self.nk] - \\\n self.phi[0:self.ni,0:self.nj,0:self.nk-2])/(2*dz)", "def my_Newton(fct, df_dx, x0):\r\n xn = float(x0)\r\n eps = 1e-5\r\n N = 20\r\n i = 0\r\n while abs(fct (xn)) > eps and i < N:\r\n x_next = xn - fct(xn)/df_dx(xn)\r\n print(i , 'fct_value', abs(fct(xn)), x_next)\r\n xn = x_next\r\n i += 1\r\n if abs(fct(xn)) < eps:\r\n return x_next\r\n else: #solution did not converge\r\n return np.nan", "def finite_diff(F, x0, v0, dt, M, K, C, T):\r\n\r\n ### INITIAL PARAMETERS ####\r\n\r\n # defining the number of steps of analysis = Ns\r\n Ns = int(T/dt)+1\r\n # step t0 (initial acceleration)\r\n ngl = np.shape(F)[0] # captures the number of degrees of freedom\r\n\r\n ### MODELLING THE DISPLACEMENTS ###\r\n\r\n x_before = np.zeros((ngl,1))\r\n # matrix that indicates the displacements, in each degree of freedom, along the time of \r\n # duration of analysis. Each column is a time step\r\n x = np.zeros((ngl, Ns))\r\n x[:,0] = x0[:,0]\r\n\r\n ### SOLVING INITIAL STEP ###\r\n\r\n # initial Force F0 is equivalent to the first column of the matrix of load vectors F along time\r\n aux1 = np.zeros((ngl,1))\r\n aux1[:,0] = np.copy(F[:,0])\r\n aux2 = aux1 - np.dot(C,v0) - np.dot(K,x0)\r\n a0 = np.dot(la.inv(M),aux2)\r\n # step t-1 (before initial condition)\r\n x_before = dt*dt*a0/2 - dt*v0 + x0 \r\n # step t+1 (after initial condition)\r\n C1 = M / (dt*dt) + C / (2*dt)\r\n C2 = K - 2*M / (dt*dt)\r\n C3 = M / (dt*dt) - C / (2*dt)\r\n aux3 = aux1 - np.dot(C2, x0) - np.dot(C3, x_before)\r\n x[:,1] = np.dot(la.inv(C1), aux3[:,0])\r\n\r\n ### INTEGRATING ALONG THE DURATION OS ANALYSIS ###\r\n\r\n i = 0\r\n aux4 = np.zeros((ngl,1))\r\n aux5 = np.zeros((ngl,1))\r\n aux6 = np.zeros((ngl,1))\r\n aux7 = np.zeros((ngl,1))\r\n for i in range(1,Ns-1):\r\n aux4[:,0] = np.copy(F[:,i])\r\n aux5[:,0] = np.copy(x[:,i])\r\n aux6[:,0] = np.copy(x[:,i-1])\r\n aux7[:,0] = np.copy(x[:,i+1])\r\n aux7 = np.dot(la.inv(C1), aux4 - np.dot(C2,aux5) - np.dot(C3,aux6))\r\n x[:,i+1] = np.copy(aux7[:,0])\r\n return x", "def solve(n=5000,C=-6*10**11,a=900,b=3):\n coeffs = np.zeros(n+2)\n coeffs[0] = a-b*n\n coeffs[1] = b*(n+1) - a\n coeffs[-3] = -C\n coeffs[-2] = 2*C - a\n coeffs[-1] = a+b-C\n mp.dps = 27\n roots = polyroots(coeffs)\n for root in roots:\n print root", "def nonlinear_electroelastodynamics(optimise=True):\n\n mesh = Mesh()\n mesh.Parallelepiped(upper_right_front_point=(1,1,0.001),nx=10,ny=10,nz=1, element_type=\"hex\")\n\n mu = 5.0e4\n mu1 = mu\n mu2 = mu\n eps_2 = 4.0*8.8541e-12\n v = 0.4\n lamb = 2.*mu*v/(1-2.*v)\n material = IsotropicElectroMechanics_108(3, mu1=mu1, mu2=mu2, lamb=lamb, eps_2=eps_2, rho=1200.)\n\n formulation = DisplacementPotentialFormulation(mesh)\n\n\n def dirichlet_function(mesh):\n\n boundary_data = np.zeros((mesh.points.shape[0],4))+np.NAN\n\n Z_0 = np.logical_and(np.isclose(mesh.points[:,0],0.),np.isclose(mesh.points[:,2],0.))\n boundary_data[Z_0,:3] = 0.\n Z_0 = np.logical_and(np.isclose(mesh.points[:,1],0.),np.isclose(mesh.points[:,2],0.))\n boundary_data[Z_0,:3] = 0.\n Z_0 = np.logical_and(np.isclose(mesh.points[:,0],1),np.isclose(mesh.points[:,2],0.))\n boundary_data[Z_0,:3] = 0.\n Z_0 = np.logical_and(np.isclose(mesh.points[:,1],1),np.isclose(mesh.points[:,2],0.))\n boundary_data[Z_0,:3] = 0.\n\n Z_0 = np.isclose(mesh.points[:,2],0.)\n boundary_data[Z_0,3] = 0.\n\n Z_0 = np.isclose(mesh.points[:,2],.001)\n boundary_data[Z_0,3] = 9e3\n\n return boundary_data\n\n boundary_condition = BoundaryCondition()\n boundary_condition.SetDirichletCriteria(dirichlet_function, mesh)\n\n nonlinear_static_solver = FEMSolver(total_time=60.,\n number_of_load_increments=25,\n analysis_nature=\"nonlinear\",\n analysis_type=\"static\",\n newton_raphson_tolerance=1e-5,\n newton_raphson_solution_tolerance=1e-11,\n optimise=optimise,\n print_incremental_log=True,\n )\n\n nonlinear_static_results = nonlinear_static_solver.Solve(formulation=formulation, mesh=mesh,\n material=material, boundary_condition=boundary_condition)\n\n\n nonlinear_dynamic_solver = FEMSolver(total_time=60.,\n number_of_load_increments=250,\n analysis_nature=\"nonlinear\",\n analysis_type=\"dynamic\",\n newton_raphson_tolerance=1e-5,\n newton_raphson_solution_tolerance=1e-11,\n optimise=optimise,\n print_incremental_log=True,\n compute_energy_dissipation=True,\n compute_linear_momentum_dissipation=True,\n )\n\n nonlinear_dynamic_results = nonlinear_dynamic_solver.Solve(formulation=formulation, mesh=mesh,\n material=material, boundary_condition=boundary_condition)\n\n\n # boundary_condition.__reset_state__()\n # boundary_condition.SetDirichletCriteria(dirichlet_function, mesh)\n\n # nonlinear_dynamic_solver_exp = FEMSolver(total_time=6.,\n # number_of_load_increments=200000,\n # save_frequency=200000,\n # analysis_nature=\"nonlinear\",\n # analysis_type=\"dynamic\",\n # analysis_subtype=\"explicit\",\n # newton_raphson_tolerance=1e-5,\n # newton_raphson_solution_tolerance=1e-11,\n # optimise=optimise,\n # print_incremental_log=True,\n # )\n\n # nonlinear_dynamic_results_exp = nonlinear_dynamic_solver_exp.Solve(formulation=formulation, mesh=mesh,\n # material=material, boundary_condition=boundary_condition)\n\n\n boundary_condition.__reset_state__()\n boundary_condition.SetDirichletCriteria(dirichlet_function, mesh)\n\n linear_static_solver = FEMSolver(total_time=60.,\n number_of_load_increments=250,\n analysis_nature=\"linear\",\n analysis_type=\"static\",\n newton_raphson_tolerance=1e-5,\n newton_raphson_solution_tolerance=1e-11,\n optimise=optimise,\n print_incremental_log=True,\n )\n\n linear_static_results = linear_static_solver.Solve(formulation=formulation, mesh=mesh,\n material=material, boundary_condition=boundary_condition)\n\n\n boundary_condition.__reset_state__()\n boundary_condition.SetDirichletCriteria(dirichlet_function, mesh)\n\n linear_dynamic_solver = FEMSolver(total_time=60.,\n number_of_load_increments=1000,\n analysis_nature=\"linear\",\n analysis_type=\"dynamic\",\n newton_raphson_tolerance=1e-5,\n newton_raphson_solution_tolerance=1e-11,\n optimise=optimise,\n print_incremental_log=True,\n break_at_increment=100,\n )\n\n linear_dynamic_results = linear_dynamic_solver.Solve(formulation=formulation, mesh=mesh,\n material=material, boundary_condition=boundary_condition)\n\n\n s1 = nonlinear_static_results.GetSolutionVectors()\n s2 = nonlinear_dynamic_results.GetSolutionVectors()\n # s3 = nonlinear_dynamic_results_exp.GetSolutionVectors()\n s4 = linear_static_results.GetSolutionVectors()\n s5 = linear_dynamic_results.GetSolutionVectors()\n\n norm = lambda x: np.linalg.norm(x[:,2,-1])\n assert norm(s1) > 0.13 and norm(s1) < 0.15\n assert norm(s2) > 0.13 and norm(s2) < 0.15\n assert norm(s4) > 0.13 and norm(s4) < 0.15", "def test_pde_vector():\n eq = PDE({\"u\": \"vector_laplace(u) + exp(-t)\"})\n grid = UnitGrid([8, 8])\n field = VectorField.random_normal(grid)\n\n res_a = eq.solve(field, t_range=1, dt=0.01, backend=\"numpy\", tracker=None)\n res_b = eq.solve(field, t_range=1, dt=0.01, backend=\"numba\", tracker=None)\n\n res_a.assert_field_compatible(res_b)\n np.testing.assert_allclose(res_a.data, res_b.data)", "def dpsi_dt(self, psi, t):\n#\t#To avoid doing anything twice. (odeint tends to do that.)\n#\t#---------------------------------------------------------\n#\tnovel, result = self.check_novelty(t,psi)\n#\tif not novel:\n#\t if self.my_id == 0:\n#\t\tprint \"Time: %2.2f / %2.2f au. Runtime: %2.2f---\"%(\n#\t\t t, self.total_duration, (time.time() - self.t_0)/60.)\n#\t\tself.debug_norm(t, psi, result)\t\n#\t\t\n#\t return result\n#\t##########################################################\n\n\t#Making a complex array. \n\tpsi_complex = psi[:len(psi)/2] + 1j * psi[len(psi)/2:] \n\t\n\tdp_dt_complex = zeros(psi_complex.shape, dtype = complex)\n\tdp_dt_buffer= zeros(psi_complex.shape, dtype = complex)\n\t\n\n\t#Do operations.\n\tmat_vec = self.mat_vec_product(psi_complex, t)\n\n\tdp_dt_complex[self.my_slice] = self.solve_overlap(-1j * mat_vec)\n\t\n\n\n\t#Add and redistribute.\n\tdp_dt_complex = pypar.reduce(dp_dt_complex, pypar.SUM, 0, buffer = dp_dt_buffer)\n\tdp_dt_buffer = dp_dt_complex.copy()\n\tdp_dt_complex = pypar.broadcast(dp_dt_buffer, 0)\n\t\n\n\n\t#Making a float array.\n\tdp_dt = r_[real(dp_dt_buffer), imag(dp_dt_buffer)] \n\t\n\tif self.my_id == 0:\n\t print \"Time: %2.2f / %2.2f au. Runtime: %2.2f\"%(\n\t\tt, self.total_duration, (time.time() - self.t_0)/60.)\n\t self.debug_norm(t, psi, dp_dt)\t\n\t\n\t#Store latest result. ----------------------------------\n\tself.prev_out = dp_dt\n\t############################3###########################3\n\treturn dp_dt", "def _redef_via_predef_eqn(self):\r\n time = self.current_T # + self.d_T\r\n\r\n self.Beta = (self.diff_scale * self.thermal_conductivity) / \\\r\n (self.convect_coeff) \r\n self.Epsilon = self.d_T * self.thermal_conductivity / \\\r\n (self.density * self.heat_capacity)\r\n\r\n # Source term.\r\n def F_func(elem, eta):\r\n x = elem.local_to_global(eta)\r\n F = elem.eval_elem(self.node_map, self.lst_tmp, [eta])[0]\r\n F -= self.Epsilon * self.redef_F_laplacian(x[0], x[1], time)\r\n F += self.redef_dTdt(x[0], x[1], time) * self.d_T\r\n return elem.funcs(eta) * F\r\n\r\n self.vF_vect_vol = et.elems_2_array(self.mesh,\r\n F_func,\r\n self.node_map,\r\n gauss_mult=2) # Use double gp_1D\r\n\r\n # Boundary term.\r\n def f_func(elem, eta):\r\n n = elem.guess_normal_vector_global(eta)\r\n f = elem.eval_elem(self.node_map, self.lst_tmp, [eta])[0]\r\n x = elem.local_to_global(eta)\r\n # Evaluate our boundary term.\r\n f += self.Beta * self.redef_f_norm_grad(x[0], x[1], time, n)\r\n f += self.redef_dTdt(x[0], x[1], time) * self.d_T\r\n return elem.funcs(eta) * f\r\n\r\n self.vf_vect_bound = et.edge_2_array(self.mesh,\r\n \"Boundary\",\r\n f_func,\r\n self.node_map,\r\n gauss_mult=2)", "def solve(self):\n \n # Definition of the parameters\n Q_pc = self.parameters.getParam(\"Q_pc\")\n V_c = self.parameters.getParam(\"V_c\")\n V_p = self.parameters.getParam(\"V_p\")\n CL = self.parameters.getParam(\"CL\")\n initial_conditions = [\n self.parameters.getParam(\"q_c0\"),\n self.parameters.getParam(\"q_p0\"),\n ]\n t_eval = np.linspace(0, self.timespan, self.nsteps)\n\n # Definition of the model ODEs\n def pk_iv_model(t, y, Q_pc, V_c, V_p, CL):\n \"\"\"Defines the differential equations for the PK IV model.\n\n Parameters:\n :param t: time (h)\n :param y: list of the state variables of the ODEs system, in the\n form [q_c, q_p]\n :param Q_pc: transition rate between central and peripheral\n compartments (mL/h)\n :param V_c: volume of central compartment (mL)\n :param V_p: volume of peripheral compartment (mL)\n :param CL: clearance/elimination rate from the central\n compartment (mL/h)\n\n The parameters (except for t and y) are extracted from the\n Parameter class, using getParam method.\n\n Returns list containing the differential equations, in the form:\n [dqc_dt, dqp_dt]\n \"\"\"\n q_c, q_p = y\n transfer = Q_pc * (q_c / V_c - q_p / V_p)\n dqc_dt = self.dosefunction(t) - q_c / V_c * CL - transfer\n dqp_dt = transfer\n return [dqc_dt, dqp_dt]\n\n # Solving the model\n sol = scipy.integrate.solve_ivp(\n fun=lambda t, y: pk_iv_model(t, y, Q_pc, V_c, V_p, CL),\n t_span=[t_eval[0], t_eval[-1]],\n y0=initial_conditions,\n t_eval=t_eval,\n )\n\n # Feeding the solution line by line to solution class\n t = sol.t\n y = sol.y\n N = t.shape[0]\n columnNames = [\"t\", \"dose\", \"q_c\", \"q_p\"]\n self.solution.begin(columnNames, N)\n for i in range(N):\n arr = np.zeros((len(columnNames), 1))\n arr[0] = t[i]\n arr[1] = self.dosefunction(t[i])\n arr[2:, 0] = y[:, i]\n self.solution.report(arr)", "def f(z):\n omega_m = 0.308\n omega_de = 0.692\n #omega = omega_m*(1+z)**3\n #return omega**0.6 + omega_de/70*(1+omega/2) # Dodelson approx\n\n omega = omega_m*(1+z)**3*H(0)**2/H(z)**2\n omega_de = omega_de*H(0)**2/H(z)**2\n return omega**(4/7) + omega_de/70*(1+omega/2) # Dodelson approx\n #return 5*omega/(2*(omega**(4/7) - omega_de + (1 + omega/2)*(1 + omega_de/70)))\n #return omega**0.55", "def block_solve_newton(\n r_j,\n A_j,\n a_1_j,\n a_2_j,\n m,\n b_j_init,\n C_j,\n I_j,\n ls_alpha=0.5,\n ls_beta=0.9,\n max_iters=20,\n tol=1e-8,\n verbose=False,\n):\n b_j = b_j_init\n k = 1\n pbar_stats = {} # stats for the progress bar\n pbar = tqdm.tqdm(\n desc=\"Solving block with Newton's method\", disable=not verbose, leave=False\n )\n\n while True:\n # First, compute the Newton step and decrement.\n q_b_j = r_j - A_j @ b_j\n b_j_norm = b_j.norm(p=2)\n grad_b_j = _grad_j(q_b_j, A_j, b_j, b_j_norm, a_1_j, a_2_j, m)\n hess_b_j = _hess_j(C_j, I_j, b_j, b_j_norm, a_1_j, a_2_j)\n hessinv_b_j = torch.inverse(hess_b_j)\n v_j = hessinv_b_j @ grad_b_j\n dec_j = grad_b_j @ (hessinv_b_j @ grad_b_j)\n\n # Check tolerance stopping criterion. Exit if dec_j / 2 is less than the\n # tolerance.\n if dec_j / 2 <= tol:\n break\n\n # Perform backtracking line search.\n t = 1\n f_b_j = _f_j(q_b_j, b_j_norm, a_1_j, a_2_j, m)\n k_j = grad_b_j @ v_j\n while True:\n # Compute the update and evaluate function at that point.\n bp_j = b_j - t * v_j\n q_bp_j = r_j - A_j @ bp_j\n bp_j_norm = bp_j.norm(p=2)\n f_bp_j = _f_j(q_bp_j, bp_j_norm, a_1_j, a_2_j, m)\n\n if f_bp_j <= f_b_j - ls_alpha * t * k_j:\n b_j = bp_j\n break\n t *= ls_beta\n\n # Make b_j non-zero if it is 0.\n if all(b_j.abs() < tol):\n b_j.fill_(1e-3)\n\n pbar_stats[\"t\"] = \"{:.2g}\".format(t)\n pbar_stats[\"1/2 newton decrement\"] = \"{:.2g}\".format(dec_j / 2)\n pbar.set_postfix(pbar_stats)\n pbar.update()\n\n # Check max iterations stopping criterion.\n if max_iters is not None and k == max_iters and k > 2:\n break\n k += 1\n\n pbar.close()\n return b_j", "def application_test():\n # Choice of nonlinear coefficient\n m = 2\n\n def q(u):\n return (1+u)**m\n\n def Dq(u):\n return m*(1+u)**(m-1)\n\n usage = 'manual|automatic Krylov|direct degree nx ny nz'\n try:\n import sys\n J_comp = sys.argv[1]\n linear_solver = sys.argv[2]\n degree = int(sys.argv[3])\n divisions = [int(arg) for arg in sys.argv[4:]]\n except:\n print('Usage: %s' % sys.argv[0], usage)\n sys.exit(0)\n\n u = solver(q, Dq, f, divisions, degree,\n 'pde_Newton', J_comp, linear_solver)\n\n # Find max error\n u_exact = Expression(\n 'pow((pow(2, m+1)-1)*x[0] + 1, 1.0/(m+1)) - 1', m=m)\n u_e = interpolate(u_exact, u.function_space())\n import numpy as np\n error = np.abs(u_e.vector().array() -\n u.vector().array()).max()\n print('error: %.2E' % error)", "def approximate_nonlinear_vector_field(dataset_path):\n\n file_X0 = \"nonlinear_vectorfield_data_x0.txt\"\n names_X0 = ['X0_x', 'X0_y']\n data_X0 = pd.read_csv(dataset_path / file_X0, sep=' ', names=names_X0).to_numpy()\n plt.scatter(data_X0[:, 0], data_X0[:, 1])\n\n names_X1 = ['X1_x', 'X1_y']\n file_X1 = \"nonlinear_vectorfield_data_x1.txt\"\n data_X1 = pd.read_csv(dataset_path / file_X1, sep=' ', names=names_X1).to_numpy()\n plt.scatter(data_X1[:, 0], data_X1[:, 1])\n plt.title(\"Given data set X0 and X1\")\n plt.show()\n\n \"\"\"\n Following block calculates the approximate values using differential\n solver solve_ivp\n \"\"\"\n V = (data_X1 - data_X0) / 0.1\n approx_func_At = np.linalg.inv(data_X0.T @ data_X0) @ data_X0.T @ V\n approx_values = []\n for i in range(data_X0.shape[0]):\n sol = solve_ivp(fun=derivative_func, t_span=[0, 10], t_eval=[0.1],\n y0=data_X0[i, :], args=(approx_func_At,))\n approx_values.append(sol.y)\n approx_values = np.array(approx_values)\n approx_values = approx_values.reshape((2000, 2))\n\n \"\"\"\n We now plot the original data of X1 and the newly approximated data.\n \"\"\"\n plt.scatter(data_X1[:, 0], data_X1[:, 1])\n plt.scatter(approx_values[:, 0], approx_values[:, 1], c='green')\n plt.title(\"Given X1 and approximated values\")\n plt.title(\"Approximated vector field\")\n plt.show()\n\n \"\"\"\n We now plot the vector filed and the phase portrait.\n \"\"\"\n x, y = np.meshgrid(np.linspace(-5, 5, 10), np.linspace(-5, 5, 10))\n u, v = np.zeros((10, 10)), np.zeros((10, 10))\n for i in range(0, 10):\n for j in range(0, 10):\n u[i, j] = approx_values.T[0, i]\n v[i, j] = approx_values.T[1, j]\n plt.quiver(x, y, u, v)\n plt.streamplot(x, y, u, v)\n plt.title(\"Approximated Vector field\")\n plt.show()\n\n \"\"\"\n Following block calculates the mean squared error of the X1 and calculate\n approximated values.\n \"\"\"\n MSE = np.square(data_X1 - approx_values).mean()\n print(MSE)", "def _orbit_dp(ring: Lattice, dp: float = None, guess: Orbit = None, **kwargs):\n # We seek\n # - f(x) = x\n # - g(x) = f(x) - x = 0\n # - g'(x) = f'(x) - 1\n # Use a Newton-Raphson-type algorithm:\n # - r_n+1 = r_n - g(r_n) / g'(r_n)\n # - r_n+1 = r_n - (f(r_n) - r_n) / (f'(r_n) - 1)\n #\n # (f(r_n) - r_n) / (f'(r_n) - 1) can be seen as x = b/a where we use least\n # squares fitting to determine x when ax = b\n # f(r_n) - r_n is denoted b\n # f'(r_n) is the 4x4 jacobian, denoted j4\n keep_lattice = kwargs.pop('keep_lattice', False)\n convergence = kwargs.pop('convergence', DConstant.OrbConvergence)\n max_iterations = kwargs.pop('max_iterations', DConstant.OrbMaxIter)\n xy_step = kwargs.pop('XYStep', DConstant.XYStep)\n rem = kwargs.keys()\n if len(rem) > 0:\n raise AtError(f'Unexpected keywords for orbit_dp: {\", \".join(rem)}')\n\n ref_in = numpy.zeros((6,)) if guess is None else numpy.copy(guess)\n ref_in[4] = 0.0 if dp is None else dp\n\n scaling = xy_step * numpy.array([1.0, 1.0, 1.0, 1.0])\n delta_matrix = numpy.zeros((6, 5), order='F')\n for i in range(4):\n delta_matrix[i, i] = scaling[i]\n id4 = numpy.asfortranarray(numpy.identity(4))\n change = 1\n itercount = 0\n while (change > convergence) and itercount < max_iterations:\n in_mat = ref_in.reshape((6, 1)) + delta_matrix\n _ = internal_lpass(ring, in_mat, refpts=[], keep_lattice=keep_lattice)\n # the reference particle after one turn\n ref_out = in_mat[:, 4]\n # 4x4 jacobian matrix from numerical differentiation:\n # f(x+d) - f(x) / d\n j4 = (in_mat[:4, :4] - in_mat[:4, 4:]) / scaling\n a = j4 - id4 # f'(r_n) - 1\n b = ref_out[:4] - ref_in[:4]\n b_over_a = numpy.linalg.solve(a, b)\n r_next = ref_in - numpy.append(b_over_a, numpy.zeros((2,)))\n # determine if we are close enough\n change = numpy.linalg.norm(r_next - ref_in)\n itercount += 1\n ref_in = r_next\n keep_lattice = True\n\n if itercount == max_iterations:\n warnings.warn(AtWarning('Maximum number of iterations reached. '\n 'Possible non-convergence'))\n return ref_in", "def dismod_solution(iota, rho, chi, omega):\n f_b = build_derivative_full(iota, rho, chi, omega)\n bunch = solve_differential_equation(f_b, initial=np.array([1.0 - 1e-6, 1e-6], dtype=np.float))\n S = lambda t: bunch(t)[0]\n C = lambda t: bunch(t)[1]\n return S, C", "def our_own_bvp_solve(f, a, b, n, y0, dim, bc, tol=1e-2):\n\n # interpolate the initial guess function y0 on Chebyshev points of the first kind\n cf0 = []\n for y0_i in y0:\n for thing in np.polynomial.chebyshev.Chebyshev(np.zeros(n), (a, b)).interpolate(y0_i, n, (a, b)):\n cf0.append(thing)\n\n solution = root(lambda u: fun(u, a, b, dim, n, f, bc), cf0, method='lm', tol=tol)\n if not solution.success:\n print('root finding failed')\n\n cf = solution.x\n cf = cf.reshape((dim, cf.size // dim))\n\n return [np.polynomial.chebyshev.Chebyshev(cf[i], (a, b)) for i in range(dim)]", "def approximate_nonlinear_vector_field_radial(dataset_path, L, epsilon):\n\n file_X0 = \"nonlinear_vectorfield_data_x0.txt\"\n names_X0 = ['X0_x', 'X0_y']\n data_X0 = pd.read_csv(dataset_path / file_X0, sep=' ', names=names_X0).to_numpy()\n\n names_X1 = ['X1_x', 'X1_y']\n file_X1 = \"nonlinear_vectorfield_data_x1.txt\"\n data_X1 = pd.read_csv(dataset_path / file_X1, sep=' ', names=names_X1).to_numpy()\n\n \"\"\"\n Following block calculates the values of phi_l's for each point in dataset of X0\n and form the corresponding phi_X matrix with the given value of L.\n \"\"\"\n phi = np.empty([2000, L])\n for l in range(L):\n phi_l = np.exp(-np.square(np.linalg.norm(data_X0 - data_X0[l],\n axis=1)) / epsilon ** 2)\n phi[:, l] = phi_l\n\n \"\"\"\n The following block performs the approximation of the vector field.\n \"\"\"\n V = (data_X1 - data_X0) / 0.1\n approx_func_Ct = np.linalg.inv(phi.T @ phi) @ phi.T @ V\n final = phi @ approx_func_Ct\n plt.scatter(final[:, 0], final[:, 1], c='green',\n label='approximated f(x)_hat values')\n plt.show()\n\n \"\"\"\n The following code plots the approximated vector field and the phase portrait.\n \"\"\"\n x, y = np.meshgrid(np.linspace(-5, 5, 10), np.linspace(-5, 5, 10))\n u, v = np.zeros((10, 10)), np.zeros((10, 10))\n for i in range(0, 10):\n for j in range(0, 10):\n u[i, j] = final.T[0, i]\n v[i, j] = final.T[1, j]\n plt.quiver(x, y, u, v)\n plt.streamplot(x, y, u, v)\n plt.title(\"Approximated Vector field\")\n plt.show()\n\n \"\"\"\n The following code calculates the MSE for the dataset X1 and the final values.\n \"\"\"\n MSE = np.square(data_X1 - final).mean()\n print(MSE)", "def define_ufl_equations_diff(self):\n\n # Derivatives of velocity integration equation.\n if self.f1 != 0:\n self.df1_du = dlf.derivative(self.f1, self.displacement, self.trial_vector)\n self.df1_dv = dlf.derivative(self.f1, self.velocity, self.trial_vector)\n else:\n self.df1_du = 0\n self.df1_dv = 0\n self.df1_dp = 0 # This is always zero.\n\n # Derivatives of momentum equation.\n if self.displacement != 0:\n self.df2_du = dlf.derivative(self.f2, self.displacement, self.trial_vector)\n else:\n self.df2_du = 0\n\n if self.velocity != 0:\n self.df2_dv = dlf.derivative(self.f2, self.velocity, self.trial_vector)\n else:\n self.df2_dv = 0\n\n if self.pressure != 0:\n self.df2_dp = dlf.derivative(self.f2, self.pressure, self.trial_scalar)\n else:\n self.df2_dp = 0\n\n # Derivatives of incompressibility equation.\n if self.f3 != 0:\n if self.displacement != 0:\n self.df3_du = dlf.derivative(self.f3, self.displacement, self.trial_vector)\n else:\n self.df3_du = 0\n\n if self.velocity != 0:\n self.df3_dv = dlf.derivative(self.f3, self.velocity, self.trial_vector)\n else:\n self.df3_dv = 0\n\n self.df3_dp = dlf.derivative(self.f3, self.pressure, self.trial_scalar)\n else:\n self.df3_du = 0\n self.df3_dv = 0\n self.df3_dp = 0\n\n return None", "def efSolver(self):\n dx = self.dh[0] # dx\n dy = self.dh[1] # dy\n dz = self.dh[2] # dz\n \n for i in np.arange(0, self.ni):\n for j in np.arange(0, self.nj):\n for k in np.arange(0, self.nk):\n\n #x-component#\n if i==0: \n # forward\n self.ef[i][j][k][0] = -(-3*self.phi[i][j][k]+\\\n 4*self.phi[i+1][j][k]-\\\n self.phi[i+2][j][k])/(2*dx)\n elif i==self.ni-1: \n # backward\n self.ef[i][j][k][0] = -(self.phi[i-2][j][k]-\\\n 4*self.phi[i-1][j][k]+\\\n 3*self.phi[i][j][k])/(2*dx)\n else: \n #central\n self.ef[i][j][k][0] = -(self.phi[i+1][j][k] - \\\n self.phi[i-1][j][k])/(2*dx)\n\n #y-component\n if j==0:\n self.ef[i][j][k][1] = -(-3*self.phi[i][j][k] + \\\n 4*self.phi[i][j+1][k]-\\\n self.phi[i][j+2][k])/(2*dy)\n elif j==self.nj-1:\n self.ef[i][j][k][1] = -(self.phi[i][j-2][k] - \\\n 4*self.phi[i][j-1][k] +\\\n 3*self.phi[i][j][k])/(2*dy)\n else:\n self.ef[i][j][k][1] = -(self.phi[i][j+1][k] - \\\n self.phi[i][j-1][k])/(2*dy)\n\n #z-component\n if k==0:\n self.ef[i][j][k][2] = -(-3*self.phi[i][j][k] + \\\n 4*self.phi[i][j][k+1]-\n self.phi[i][j][k+2])/(2*dz)\n elif k==self.nk-1:\n self.ef[i][j][k][2] = -(self.phi[i][j][k-2] - \\\n 4*self.phi[i][j][k-1] + \\\n 3*self.phi[i][j][k])/(2*dz)\n else:\n self.ef[i][j][k][2] = -(self.phi[i][j][k+1] - \\\n self.phi[i][j][k-1])/(2*dz)", "def solve_part2(input, verbose=False):\n equations = parse(input)\n\n result = []\n for eq in equations:\n result.append(solve_equation_addition_precendence(eq, verbose))\n\n if verbose:\n print(f\"results: {result}\")\n\n return sum(result)", "def build_rhs():\n\n def div(\n coeff_rho,\n momentum_x,\n momentum_y,\n momentum_z,\n ):\n \"\"\"Computes the divergence of the velocity field.\"\"\"\n # Compute the fourth order derivative of the pressure for the face\n # velocity correction.\n p_corr = (\n states['p']\n if self._params.enable_rhie_chow_correction else states['dp'])\n d4p_dx4 = self._kernel_op.apply_kernel_op_x(p_corr, 'k4d2x')\n d4p_dy4 = self._kernel_op.apply_kernel_op_y(p_corr, 'k4d2y')\n d4p_dz4 = self._kernel_op.apply_kernel_op_z(p_corr, 'k4d2z',\n 'k4d2zsh')\n\n # Compute velocity gradient based on interpolated values on cell faces.\n coeff_x = dt / (4. * coeff_rho * dx**2)\n du = self._kernel_op.apply_kernel_op_x(momentum_x, 'kDx')\n du_dx = [\n du_i / (2. * dx) + coeff_x * d4p_dx4_i\n for du_i, d4p_dx4_i in zip(du, d4p_dx4)\n ]\n\n coeff_y = dt / (4. * coeff_rho * dy**2)\n dv = self._kernel_op.apply_kernel_op_y(momentum_y, 'kDy')\n dv_dy = [\n dv_i / (2. * dy) + coeff_y * d4p_dy4_i\n for dv_i, d4p_dy4_i in zip(dv, d4p_dy4)\n ]\n\n coeff_z = dt / (4. * coeff_rho * dz**2)\n dw = self._kernel_op.apply_kernel_op_z(momentum_z, 'kDz', 'kDzsh')\n dw_dz = [\n dw_i / (2. * dz) + coeff_z * d4p_dz4_i\n for dw_i, d4p_dz4_i in zip(dw, d4p_dz4)\n ]\n\n return [\n du_dx_i + dv_dy_i + dw_dz_i\n for du_dx_i, dv_dy_i, dw_dz_i in zip(du_dx, dv_dy, dw_dz)\n ]\n\n def add_factor(\n v,\n factor,\n ):\n return [factor * v_i for v_i in v]\n\n b_terms = {\n _B_TERM_SOURCE_RHO: add_factor(src_rho, inv_dt),\n }\n if isinstance(rho_info, ConstantDensityInfo):\n b_terms.update({\n _B_TERM_DIV:\n add_factor(\n div(rho_info.rho, states['u'], states['v'], states['w']),\n inv_dt * rho_info.rho),\n _B_TERM_DRHO_DT: [\n tf.zeros_like(src_rho_i) for src_rho_i in src_rho\n ],\n })\n\n elif isinstance(rho_info, VariableDensityInfo):\n b_terms.update({\n _B_TERM_DIV:\n add_factor(\n div(1.0, states['rho_u'], states['rho_v'], states['rho_w']),\n inv_dt),\n _B_TERM_DRHO_DT:\n add_factor(rho_info.drho_dt, inv_dt),\n })\n\n else:\n raise ValueError('`rho_info` has to be either `ConstantDensityInfo` or '\n '`VariableDensityInfo`.')\n\n # pylint: disable=g-complex-comprehension\n return [(div_i + drho_dt_i - src_rho_i)\n for div_i, drho_dt_i, src_rho_i in zip(\n b_terms[_B_TERM_DIV],\n b_terms[_B_TERM_DRHO_DT],\n b_terms[_B_TERM_SOURCE_RHO],\n )], b_terms\n # pylint: enable=g-complex-comprehension", "def solve(self, F, u0=None, maxiter=100, rtol=1.e-6, rtol2=1.e-6 \\\n , verbose=False, update=False):\n # assembly the stifness matrix and bc terms\n poisson.assembly(self, update=update)\n\n # project u0 onto the discrete vectorial space\n self.initialize(u0=u0)\n\n # ...\n PDE = self\n V = PDE.space\n un = PDE.unknown\n rhs = self.rhs\n # ...\n\n rhs.func = F\n\n # ...\n from time import time\n list_Err = [1.e6]\n list_ErrH1 = [1.e6]\n un_values = un.get()\n normH1_old = np.dot(PDE.dot(un.get()), un.get())\n i = 0\n if verbose:\n tb = time()\n while (list_Err[-1] > rtol) and (list_ErrH1[-1] > rtol2) and (i < maxiter):\n U_old_values = un.get()\n# print \"-------\"\n# print \"solve\"\n# import matplotlib.pyplot as plt\n## Phi = PDE.G_W\n# Phi = PDE.unknown_dirichlet\n## Phi.plot(withpcolor=True) ; plt.colorbar() ; plt.show()\n# Phi.fast_plot() ; plt.colorbar() ; plt.show()\n# print \"-------\"\n\n # assembly the right hand side\n rhs.reset()\n self.update()\n # solve and update unew\n poisson.solve(self, rhs)\n\n U_values = un.get()\n err = np.linalg.norm(U_values-U_old_values)\n list_Err.append(err)\n\n normH1 = np.dot(PDE.dot(un.get()), un.get())\n list_ErrH1.append(np.abs(normH1-normH1_old))\n\n normH1_old = normH1\n\n i += 1\n if verbose:\n print(i, \": \",\" |F(x)| = \", list_Err[-1],\" |DF(x)| = \", list_ErrH1[-1])\n if verbose:\n te = time()\n print(\">> Elapsed time \", te-tb)\n\n list_Err = np.asarray(list_Err[1:])\n list_ErrH1 = np.asarray(list_ErrH1[1:])\n return list_Err, list_ErrH1", "def test_pde_scalar():\n eq = PDE({\"u\": \"laplace(u) + exp(-t) + sin(t)\"})\n grid = UnitGrid([8])\n field = ScalarField.random_normal(grid)\n\n res_a = eq.solve(field, t_range=1, dt=0.01, backend=\"numpy\", tracker=None)\n res_b = eq.solve(field, t_range=1, dt=0.01, backend=\"numba\", tracker=None)\n\n res_a.assert_field_compatible(res_b)\n np.testing.assert_allclose(res_a.data, res_b.data)", "def solve(self,\n notifications = False\n ):\n\n if notifications:\n print('[info]: Solving differential equations for '+self.name+' model. ')\n \n\n \n # getting the time values\n self.days_list = np.linspace(self.tbeg,self.tend,self.npoints)\n\n # calling the odeint method to solve the diff. equations\n self.x = odeint(self.diff_eq,self.x0,self.days_list,args = (self.par,))\n '''\n Its important to note that (par_est,) is the way to define a tuple\n with just one element. When we put (par_est), the parenteses won't\n indicate a tuple\n '''\n \n #setting the variables\n self.confirmed_list = self.x[:,1] + self.x[:,2] + self.x[:,3]\n self.recovered_list = self.x[:,2]\n self.death_list = self.x[:,3]" ]
[ "0.6431365", "0.6242809", "0.60729045", "0.60094076", "0.5977395", "0.58910745", "0.5847757", "0.5833228", "0.5826886", "0.5806057", "0.58032686", "0.57971686", "0.57969683", "0.57910645", "0.5788869", "0.5775308", "0.5765414", "0.57204235", "0.56981325", "0.5682662", "0.5675518", "0.5673673", "0.5655256", "0.5640836", "0.56332684", "0.5629773", "0.5616129", "0.5610296", "0.5606407", "0.5600484" ]
0.6265
1
This method builds the mimeData if the selection is correct
def mimeData(self, indices): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_mime(self, mime, def_mime='unk'):\n self.mime = def_mime\n if mime:\n self.mime = self.MIME_RE.split(mime, 1)[0]", "def build_post_data(self, file_):\n\n filemask = {\n 'uuid': str(uuid.uuid4()),\n 'filename': file_.filename,\n 'filesize': self.get_size(file_.file),\n 'mimetype': file_.type,\n }\n\n namespace = uuid.UUID(filemask['uuid'])\n name = str(hash(frozenset(filemask.items())))\n id_file = str(uuid.uuid3(namespace, name))\n filemask['id_file'] = id_file\n\n data = {\n 'id_doc': None,\n 'file': file_.file.read(),\n 'filetext': None,\n 'dt_ext_text': None\n }\n\n data.update(filemask)\n data.pop('uuid')\n return data, utils.object2json(filemask)", "def get_mime_encoded_user_data(self):\n # Split the frequencies\n index_underscore = find(self._frequency_id, '_')\n index_tilde = find(self._frequency_id, '~')\n min_freq = self._frequency_id[index_underscore + 1:index_tilde]\n max_freq = self._frequency_id[index_tilde + 1:]\n LOGGER.info('min_freq: {0}, max_freq: {1}'.format(min_freq, max_freq))\n\n # Build the mime message\n user_data = MIMEMultipart()\n user_data.attach(get_cloud_init())\n\n swap_size = self.get_swap_size()\n data_formatted = self._user_data.format(self._frequency_id, min_freq, max_freq, swap_size, PIP_PACKAGES)\n user_data.attach(MIMEText(self._setup_disks + data_formatted))\n return user_data.as_string()", "def drag_data_received(self, widget, context, x, y, sel_data, info, time):\n if not sel_data:\n return\n #modern file managers provide URI_LIST. For Windows split sel_data.data\n files = sel_data.get_uris()\n for file in files:\n if win():\n clean_string = conv_to_unicode(\n file.replace('\\0',' ').replace(\"\\r\", \" \").strip(),\n None)\n else:\n clean_string = file\n protocol, site, mfile, j, k, l = urlparse(clean_string)\n if protocol == \"file\":\n name = url2pathname(mfile)\n mime = get_type(name)\n if not is_valid_type(mime):\n return\n photo = MediaObject()\n self.uistate.set_busy_cursor(True)\n photo.set_checksum(create_checksum(name))\n self.uistate.set_busy_cursor(False)\n base_dir = cuni(media_path(self.dbstate.db))\n if os.path.exists(base_dir):\n name = relative_path(name, base_dir)\n photo.set_path(name)\n photo.set_mime_type(mime)\n basename = os.path.basename(name)\n (root, ext) = os.path.splitext(basename)\n photo.set_description(root)\n with DbTxn(_(\"Drag Media Object\"), self.dbstate.db) as trans:\n self.dbstate.db.add_object(photo, trans)\n widget.emit_stop_by_name('drag_data_received')", "def create_qt_mime_data(data):\n from PyQt5.QtCore import QByteArray, QDataStream, QIODevice, QMimeData\n\n item_data = QByteArray()\n data_stream = QDataStream(item_data, QIODevice.WriteOnly)\n\n qgraph_mime = {\n 'version': qmxgraph.constants.QGRAPH_DD_MIME_VERSION,\n }\n qgraph_mime.update(data)\n data_stream.writeString(json.dumps(qgraph_mime).encode('utf8'))\n\n mime_data = QMimeData()\n mime_data.setData(qmxgraph.constants.QGRAPH_DD_MIME_TYPE, item_data)\n\n return mime_data", "def getMimeTypeFileExtensions(mimeType):\n #getMimeTypeFileExtensions body\n\n if mimeType == applicationzlib:\n return [ \"zz\" ]\n\n if mimeType == applicationzstd:\n return [ \"zst\" ]\n\n if mimeType == applicationxzoo:\n return [ \"zoo\" ]\n\n if mimeType == applicationvndhandheldentertainment_xml:\n return [ \"zmm\" ]\n\n if mimeType == applicationvndzul:\n return [ \"zir\", \"zirz\" ]\n\n if mimeType == applicationzip:\n return [ \"zip\", \"zipx\" ]\n\n if mimeType == applicationxopenzim:\n return [ \"zim\" ]\n\n if mimeType == applicationvndzzazzdeck_xml:\n return [ \"zaz\" ]\n\n if mimeType == applicationxzmachine:\n return [ \"z1\", \"z2\", \"z3\", \"z4\", \"z5\", \"z6\", \"z7\", \"z8\" ]\n\n if mimeType == applicationxcompress:\n return [ \"z\" ]\n\n if mimeType == videovndyoutubeyt:\n return [ \"yt\" ]\n\n if mimeType == textxsuseymp:\n return [ \"ymp\" ]\n\n if mimeType == applicationyin_xml:\n return [ \"yin\" ]\n\n if mimeType == applicationyang:\n return [ \"yang\" ]\n\n if mimeType == applicationxyaml:\n return [ \"yaml\", \"yml\" ]\n\n if mimeType == applicationxxz:\n return [ \"xz\" ]\n\n if mimeType == chemicalxxyz:\n return [ \"xyz\" ]\n\n if mimeType == imagexxwindowdump:\n return [ \"xwd\" ]\n\n if mimeType == applicationvndmozillaxul_xml:\n return [ \"xul\" ]\n\n if mimeType == applicationxspf_xml:\n return [ \"xspf\" ]\n\n if mimeType == applicationvndsyncml_xml:\n return [ \"xsm\" ]\n\n if mimeType == applicationxslt_xml:\n return [ \"xsl\", \"xslt\" ]\n\n if mimeType == applicationprsxsf_xml:\n return [ \"xsf\" ]\n\n if mimeType == applicationvndinterconformnet:\n return [ \"xpw\", \"xpx\" ]\n\n if mimeType == applicationvndmsxpsdocument:\n return [ \"xps\" ]\n\n if mimeType == applicationvndisxpr:\n return [ \"xpr\" ]\n\n if mimeType == imagexxpixmap:\n return [ \"xpm\" ]\n\n if mimeType == applicationxproc_xml:\n return [ \"xpl\" ]\n\n if mimeType == applicationxxpinstall:\n return [ \"xpi\" ]\n\n if mimeType == applicationxop_xml:\n return [ \"xop\" ]\n\n if mimeType == applicationvndolpcsugar:\n return [ \"xo\" ]\n\n if mimeType == applicationxcapns_xml:\n return [ \"xns\" ]\n\n if mimeType == applicationxml:\n return [ \"xml\", \"xbl\", \"xsd\", \"rng\" ]\n\n if mimeType == textxxmi:\n return [ \"xmi\" ]\n\n if mimeType == audioxxmf:\n return [ \"xmf\" ]\n\n if mimeType == audioxxm:\n return [ \"xm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentspreadsheetmltemplate:\n return [ \"xltx\" ]\n\n if mimeType == applicationvndmsexceltemplatemacroenabled12:\n return [ \"xltm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentspreadsheetmlsheet:\n return [ \"xlsx\" ]\n\n if mimeType == applicationvndmsexcelsheetmacroenabled12:\n return [ \"xlsm\" ]\n\n if mimeType == applicationvndmsexcelsheetbinarymacroenabled12:\n return [ \"xlsb\" ]\n\n if mimeType == applicationvndmsexcel:\n return [ \"xls\", \"xlc\", \"xll\", \"xlm\", \"xlw\", \"xla\", \"xlt\", \"xld\" ]\n\n if mimeType == applicationxliff_xml:\n return [ \"xlf\", \"xliff\" ]\n\n if mimeType == applicationvndmsexceladdinmacroenabled12:\n return [ \"xlam\" ]\n\n if mimeType == imagevndxiff:\n return [ \"xif\" ]\n\n if mimeType == audioxxi:\n return [ \"xi\" ]\n\n if mimeType == applicationxhtml_xml:\n return [ \"xhtml\", \"xht\", \"html\", \"htm\" ]\n\n if mimeType == applicationvndpwgxhtmlprint_xml:\n return [ \"xhtm\" ]\n\n if mimeType == applicationvndxfdl:\n return [ \"xfdl\" ]\n\n if mimeType == applicationvndadobexfdf:\n return [ \"xfdf\" ]\n\n if mimeType == applicationpatchopserror_xml:\n return [ \"xer\" ]\n\n if mimeType == applicationxenc_xml:\n return [ \"xenc\" ]\n\n if mimeType == applicationxcapel_xml:\n return [ \"xel\" ]\n\n if mimeType == applicationvndfujixeroxdocuworks:\n return [ \"xdw\" ]\n\n if mimeType == applicationdssc_xml:\n return [ \"xdssc\" ]\n\n if mimeType == applicationvndadobexdp_xml:\n return [ \"xdp\" ]\n\n if mimeType == applicationvndsyncmldm_xml:\n return [ \"xdm\" ]\n\n if mimeType == applicationxcapdiff_xml:\n return [ \"xdf\" ]\n\n if mimeType == applicationcalendar_xml:\n return [ \"xcs\" ]\n\n if mimeType == imagexcompressedxcf:\n return [ \"xcfgz\", \"xcfbz2\" ]\n\n if mimeType == imagexxcf:\n return [ \"xcf\" ]\n\n if mimeType == applicationxcapcaps_xml:\n return [ \"xca\" ]\n\n if mimeType == imagexxbitmap:\n return [ \"xbm\" ]\n\n if mimeType == applicationxxbel:\n return [ \"xbel\" ]\n\n if mimeType == applicationvndfujixeroxdocuworksbinder:\n return [ \"xbd\" ]\n\n if mimeType == applicationxmsxbap:\n return [ \"xbap\" ]\n\n if mimeType == applicationxcapatt_xml:\n return [ \"xav\" ]\n\n if mimeType == applicationxxar:\n return [ \"xar\", \"pkg\" ]\n\n if mimeType == applicationxsilverlightapp:\n return [ \"xap\" ]\n\n if mimeType == applicationxaml_xml:\n return [ \"xaml\" ]\n\n if mimeType == imagexsigmax3f:\n return [ \"x3f\" ]\n\n if mimeType == modelx3d_vrml:\n return [ \"x3dv\", \"x3dvz\" ]\n\n if mimeType == modelx3d_binary:\n return [ \"x3db\", \"x3dbz\" ]\n\n if mimeType == modelx3d_xml:\n return [ \"x3d\", \"x3dz\" ]\n\n if mimeType == modelvndparasolidtransmittext:\n return [ \"x_t\" ]\n\n if mimeType == modelvndparasolidtransmitbinary:\n return [ \"x_b\" ]\n\n if mimeType == applicationxwwf:\n return [ \"wwf\" ]\n\n if mimeType == audioxwavpackcorrection:\n return [ \"wvc\" ]\n\n if mimeType == audioxwavpack:\n return [ \"wv\", \"wvp\" ]\n\n if mimeType == applicationvndwebturbo:\n return [ \"wtb\" ]\n\n if mimeType == applicationwspolicy_xml:\n return [ \"wspolicy\" ]\n\n if mimeType == applicationwsdl_xml:\n return [ \"wsdl\" ]\n\n if mimeType == applicationxwonderswancolorrom:\n return [ \"wsc\" ]\n\n if mimeType == applicationxwonderswanrom:\n return [ \"ws\" ]\n\n if mimeType == applicationxmswrite:\n return [ \"wri\" ]\n\n if mimeType == applicationvndwqd:\n return [ \"wqd\" ]\n\n if mimeType == applicationvndmswpl:\n return [ \"wpl\" ]\n\n if mimeType == applicationxwpg:\n return [ \"wpg\" ]\n\n if mimeType == applicationvndwordperfect:\n return [ \"wp\", \"wp4\", \"wp5\", \"wp6\", \"wpd\", \"wpp\" ]\n\n if mimeType == fontwoff2:\n return [ \"woff2\" ]\n\n if mimeType == fontwoff:\n return [ \"woff\" ]\n\n if mimeType == applicationxmswmz:\n return [ \"wmz\" ]\n\n if mimeType == videoxmswmv:\n return [ \"wmv\" ]\n\n if mimeType == applicationvndwapwmlscriptc:\n return [ \"wmlsc\" ]\n\n if mimeType == textvndwapwmlscript:\n return [ \"wmls\" ]\n\n if mimeType == applicationvndwapwmlc:\n return [ \"wmlc\" ]\n\n if mimeType == textvndwapwml:\n return [ \"wml\" ]\n\n if mimeType == imagewmf:\n return [ \"wmf\" ]\n\n if mimeType == applicationxmswmd:\n return [ \"wmd\" ]\n\n if mimeType == audioxmswma:\n return [ \"wma\" ]\n\n if mimeType == videoxmswm:\n return [ \"wm\" ]\n\n if mimeType == applicationxpartialdownload:\n return [ \"wkdownload\", \"crdownload\", \"part\" ]\n\n if mimeType == applicationxmswim:\n return [ \"wim\", \"swm\" ]\n\n if mimeType == applicationwatcherinfo_xml:\n return [ \"wif\" ]\n\n if mimeType == applicationwidget:\n return [ \"wgt\" ]\n\n if mimeType == applicationvndpmiwidget:\n return [ \"wg\" ]\n\n if mimeType == imagewebp:\n return [ \"webp\" ]\n\n if mimeType == applicationmanifest_json:\n return [ \"webmanifest\" ]\n\n if mimeType == videowebm:\n return [ \"webm\" ]\n\n if mimeType == applicationxwebappmanifest_json:\n return [ \"webapp\" ]\n\n if mimeType == audiowebm:\n return [ \"weba\" ]\n\n if mimeType == imagevndmsphoto:\n return [ \"wdp\" ]\n\n if mimeType == applicationvndmsworks:\n return [ \"wcm\", \"wdb\", \"wps\", \"xlr\" ]\n\n if mimeType == applicationvndwapwbxml:\n return [ \"wbxml\" ]\n\n if mimeType == applicationvndcriticaltoolswbs_xml:\n return [ \"wbs\" ]\n\n if mimeType == imagevndwapwbmp:\n return [ \"wbmp\" ]\n\n if mimeType == applicationxquattropro:\n return [ \"wb1\", \"wb2\", \"wb3\" ]\n\n if mimeType == audioxwav:\n return [ \"wav\" ]\n\n if mimeType == applicationwasm:\n return [ \"wasm\" ]\n\n if mimeType == applicationjavaarchive:\n return [ \"war\", \"ear\" ]\n\n if mimeType == applicationvndsunwadl_xml:\n return [ \"wadl\" ]\n\n if mimeType == applicationxwiiwad:\n return [ \"wad\" ]\n\n if mimeType == applicationvoicexml_xml:\n return [ \"vxml\" ]\n\n if mimeType == modelvndvtu:\n return [ \"vtu\" ]\n\n if mimeType == textvtt:\n return [ \"vtt\" ]\n\n if mimeType == imagevndvalvesourcetexture:\n return [ \"vtf\" ]\n\n if mimeType == applicationvndmsvisiotemplatemain_xml:\n return [ \"vstx\" ]\n\n if mimeType == applicationvndmsvisiotemplatemacroenabledmain_xml:\n return [ \"vstm\" ]\n\n if mimeType == applicationvndmsvisiostencilmain_xml:\n return [ \"vssx\" ]\n\n if mimeType == applicationvndmsvisiostencilmacroenabledmain_xml:\n return [ \"vssm\" ]\n\n if mimeType == applicationvndvsf:\n return [ \"vsf\" ]\n\n if mimeType == applicationvndmsvisiodrawingmain_xml:\n return [ \"vsdx\" ]\n\n if mimeType == applicationvndmsvisiodrawingmacroenabledmain_xml:\n return [ \"vsdm\" ]\n\n if mimeType == applicationvndvisio:\n return [ \"vsd\", \"vst\", \"vsw\", \"vss\" ]\n\n if mimeType == modelvrml:\n return [ \"vrm\", \"vrml\", \"wrl\" ]\n\n if mimeType == applicationxvhddisk:\n return [ \"vpc\" ]\n\n if mimeType == audioxvoc:\n return [ \"voc\" ]\n\n if mimeType == applicationxvmdkdisk:\n return [ \"vmdk\" ]\n\n if mimeType == videovndvivo:\n return [ \"viv\", \"vivo\" ]\n\n if mimeType == applicationvndvisionary:\n return [ \"vis\" ]\n\n if mimeType == applicationxvhdxdisk:\n return [ \"vhdx\" ]\n\n if mimeType == textxvhdl:\n return [ \"vhd\", \"vhdl\" ]\n\n if mimeType == modelvndsapvds:\n return [ \"vds\" ]\n\n if mimeType == applicationxvdidisk:\n return [ \"vdi\" ]\n\n if mimeType == applicationvndvcx:\n return [ \"vcx\" ]\n\n if mimeType == textcalendar:\n return [ \"vcs\", \"ics\", \"ifb\" ]\n\n if mimeType == applicationvndgroovevcard:\n return [ \"vcg\" ]\n\n if mimeType == applicationxcdlink:\n return [ \"vcd\" ]\n\n if mimeType == textvcard:\n return [ \"vcard\", \"vcf\", \"vct\", \"gcrd\" ]\n\n if mimeType == textvbscript:\n return [ \"vbs\" ]\n\n if mimeType == applicationxvirtualboxvboxextpack:\n return [ \"vbox-extpack\" ]\n\n if mimeType == applicationxvirtualboxvbox:\n return [ \"vbox\" ]\n\n if mimeType == applicationxvirtualboyrom:\n return [ \"vb\" ]\n\n if mimeType == textxvala:\n return [ \"vala\", \"vapi\" ]\n\n if mimeType == textxverilog:\n return [ \"v\" ]\n\n if mimeType == applicationvnddecezip:\n return [ \"uvz\", \"uvvz\" ]\n\n if mimeType == applicationvnddeceunspecified:\n return [ \"uvx\", \"uvvx\" ]\n\n if mimeType == videovnddecevideo:\n return [ \"uvv\", \"uvvv\" ]\n\n if mimeType == videovnduvvump4:\n return [ \"uvu\", \"uvvu\" ]\n\n if mimeType == applicationvnddecettml_xml:\n return [ \"uvt\", \"uvvt\" ]\n\n if mimeType == videovnddecesd:\n return [ \"uvs\", \"uvvs\" ]\n\n if mimeType == videovnddecepd:\n return [ \"uvp\", \"uvvp\" ]\n\n if mimeType == videovnddecemobile:\n return [ \"uvm\", \"uvvm\" ]\n\n if mimeType == imagevnddecegraphic:\n return [ \"uvi\", \"uvvi\", \"uvg\", \"uvvg\" ]\n\n if mimeType == videovnddecehd:\n return [ \"uvh\", \"uvvh\" ]\n\n if mimeType == applicationvnddecedata:\n return [ \"uvf\", \"uvvf\", \"uvd\", \"uvvd\" ]\n\n if mimeType == audiovnddeceaudio:\n return [ \"uva\", \"uvva\" ]\n\n if mimeType == textxuuencode:\n return [ \"uue\", \"uu\" ]\n\n if mimeType == applicationvnduiqtheme:\n return [ \"utz\" ]\n\n if mimeType == applicationxustar:\n return [ \"ustar\" ]\n\n if mimeType == modelvndusdz_zip:\n return [ \"usdz\" ]\n\n if mimeType == applicationxmswinurl:\n return [ \"url\" ]\n\n if mimeType == texturilist:\n return [ \"uri\", \"uris\", \"urls\" ]\n\n if mimeType == applicationvnduoml_xml:\n return [ \"uoml\", \"uo\" ]\n\n if mimeType == applicationvndunity:\n return [ \"unityweb\" ]\n\n if mimeType == applicationvndumajin:\n return [ \"umj\" ]\n\n if mimeType == applicationxglulx:\n return [ \"ulx\" ]\n\n if mimeType == audioxmod:\n return [ \"ult\", \"uni\", \"m15\", \"mtm\", \"669\", \"med\" ]\n\n if mimeType == textxuil:\n return [ \"uil\" ]\n\n if mimeType == applicationxdesigner:\n return [ \"ui\" ]\n\n if mimeType == applicationxufraw:\n return [ \"ufraw\" ]\n\n if mimeType == applicationvndufdl:\n return [ \"ufd\", \"ufdl\" ]\n\n if mimeType == applicationubjson:\n return [ \"ubj\" ]\n\n if mimeType == messageglobal:\n return [ \"u8msg\" ]\n\n if mimeType == messageglobaldispositionnotification:\n return [ \"u8mdn\" ]\n\n if mimeType == messageglobalheaders:\n return [ \"u8hdr\" ]\n\n if mimeType == messageglobaldeliverystatus:\n return [ \"u8dsn\" ]\n\n if mimeType == modelu3d:\n return [ \"u3d\" ]\n\n if mimeType == textplain:\n return [ \"txt\", \"text\", \"conf\", \"def\", \"list\", \"in\", \"ini\" ]\n\n if mimeType == applicationvndmobiustxf:\n return [ \"txf\" ]\n\n if mimeType == applicationvndgenomatixtuxedo:\n return [ \"txd\" ]\n\n if mimeType == textxtwig:\n return [ \"twig\" ]\n\n if mimeType == applicationvndsimtechmindmapper:\n return [ \"twd\", \"twds\" ]\n\n if mimeType == applicationxfontttx:\n return [ \"ttx\" ]\n\n if mimeType == applicationttml_xml:\n return [ \"ttml\" ]\n\n if mimeType == textturtle:\n return [ \"ttl\" ]\n\n if mimeType == fontttf:\n return [ \"ttf\" ]\n\n if mimeType == fontcollection:\n return [ \"ttc\" ]\n\n if mimeType == audioxtta:\n return [ \"tta\" ]\n\n if mimeType == texttabseparatedvalues:\n return [ \"tsv\" ]\n\n if mimeType == applicationtimestampeddata:\n return [ \"tsd\" ]\n\n if mimeType == textvndtrolltechlinguist:\n return [ \"ts\" ]\n\n if mimeType == applicationxmsterminal:\n return [ \"trm\" ]\n\n if mimeType == applicationtrig:\n return [ \"trig\" ]\n\n if mimeType == applicationvndtrueapp:\n return [ \"tra\" ]\n\n if mimeType == texttroff:\n return [ \"tr\", \"roff\" ]\n\n if mimeType == applicationvndtridtpt:\n return [ \"tpt\" ]\n\n if mimeType == applicationvndgroovetooltemplate:\n return [ \"tpl\" ]\n\n if mimeType == applicationxbittorrent:\n return [ \"torrent\" ]\n\n if mimeType == applicationtoml:\n return [ \"toml\" ]\n\n if mimeType == applicationxcdrdaotoc:\n return [ \"toc\" ]\n\n if mimeType == applicationvndmstnef:\n return [ \"tnef\", \"tnf\", \"winmaildat\" ]\n\n if mimeType == applicationvndtmobilelivetv:\n return [ \"tmo\" ]\n\n if mimeType == imagetiff:\n return [ \"tif\", \"tiff\" ]\n\n if mimeType == applicationvndmsofficetheme:\n return [ \"thmx\" ]\n\n if mimeType == applicationxwindowsthemepack:\n return [ \"themepack\" ]\n\n if mimeType == applicationxtheme:\n return [ \"theme\" ]\n\n if mimeType == imagextga:\n return [ \"tga\", \"icb\", \"tpic\", \"vda\" ]\n\n if mimeType == imagetifffx:\n return [ \"tfx\" ]\n\n if mimeType == applicationxtextfm:\n return [ \"tfm\" ]\n\n if mimeType == applicationthraud_xml:\n return [ \"tfi\" ]\n\n if mimeType == textxtexinfo:\n return [ \"texi\", \"texinfo\" ]\n\n if mimeType == textxtex:\n return [ \"tex\", \"ltx\", \"sty\", \"cls\", \"dtx\", \"ins\", \"latex\" ]\n\n if mimeType == applicationtei_xml:\n return [ \"tei\", \"teicorpus\" ]\n\n if mimeType == applicationvndsmartteacher:\n return [ \"teacher\" ]\n\n if mimeType == applicationurctargetdesc_xml:\n return [ \"td\" ]\n\n if mimeType == texttcl:\n return [ \"tcl\", \"tk\" ]\n\n if mimeType == applicationvnd3gpp2tcap:\n return [ \"tcap\" ]\n\n if mimeType == applicationxzstdcompressedtar:\n return [ \"tarzst\", \"tzst\" ]\n\n if mimeType == applicationxtarz:\n return [ \"tarz\", \"taz\" ]\n\n if mimeType == applicationxxzcompressedtar:\n return [ \"tarxz\", \"txz\" ]\n\n if mimeType == applicationxtzo:\n return [ \"tarlzo\", \"tzo\" ]\n\n if mimeType == applicationxlzmacompressedtar:\n return [ \"tarlzma\", \"tlz\" ]\n\n if mimeType == applicationxlz4compressedtar:\n return [ \"tarlz4\" ]\n\n if mimeType == applicationxlzipcompressedtar:\n return [ \"tarlz\" ]\n\n if mimeType == applicationxlrzipcompressedtar:\n return [ \"tarlrz\", \"tlrz\" ]\n\n if mimeType == applicationxcompressedtar:\n return [ \"targz\", \"tgz\" ]\n\n if mimeType == applicationxbzipcompressedtar:\n return [ \"tarbz2\", \"tarbz\", \"tbz2\", \"tbz\", \"tb2\" ]\n\n if mimeType == applicationxtar:\n return [ \"tar\", \"gtar\", \"gem\" ]\n\n if mimeType == imagevndtencenttap:\n return [ \"tap\" ]\n\n if mimeType == applicationvndtaointentmodulearchive:\n return [ \"tao\" ]\n\n if mimeType == audioxtak:\n return [ \"tak\" ]\n\n if mimeType == applicationvndmynfc:\n return [ \"taglet\" ]\n\n if mimeType == imaget38:\n return [ \"t38\" ]\n\n if mimeType == applicationxt3vmimage:\n return [ \"t3\" ]\n\n if mimeType == textxtxt2tags:\n return [ \"t2t\" ]\n\n if mimeType == textspreadsheet:\n return [ \"sylk\", \"slk\" ]\n\n if mimeType == applicationvndsunxmlwriter:\n return [ \"sxw\" ]\n\n if mimeType == applicationvndsunxmlmath:\n return [ \"sxm\" ]\n\n if mimeType == applicationvndsunxmlimpress:\n return [ \"sxi\" ]\n\n if mimeType == applicationvndsunxmlwriterglobal:\n return [ \"sxg\" ]\n\n if mimeType == applicationvndsunxmldraw:\n return [ \"sxd\" ]\n\n if mimeType == applicationvndsunxmlcalc:\n return [ \"sxc\" ]\n\n if mimeType == applicationswid_xml:\n return [ \"swidtag\" ]\n\n if mimeType == applicationvndaristanetworksswi:\n return [ \"swi\" ]\n\n if mimeType == applicationvndadobeflashmovie:\n return [ \"swf\", \"spl\" ]\n\n if mimeType == textxsvhdr:\n return [ \"svh\" ]\n\n if mimeType == imagesvg_xmlcompressed:\n return [ \"svgz\", \"svggz\" ]\n\n if mimeType == imagesvg_xml:\n return [ \"svg\" ]\n\n if mimeType == applicationvndsvd:\n return [ \"svd\" ]\n\n if mimeType == applicationvnddvbservice:\n return [ \"svc\" ]\n\n if mimeType == applicationxsv4crc:\n return [ \"sv4crc\" ]\n\n if mimeType == applicationxsv4cpio:\n return [ \"sv4cpio\" ]\n\n if mimeType == textxsvsrc:\n return [ \"sv\" ]\n\n if mimeType == applicationvndsuscalendar:\n return [ \"sus\", \"susp\" ]\n\n if mimeType == imagexsunraster:\n return [ \"sun\" ]\n\n if mimeType == textxmicrodvd:\n return [ \"sub\" ]\n\n if mimeType == textstylus:\n return [ \"stylus\", \"styl\" ]\n\n if mimeType == applicationvndsunxmlwritertemplate:\n return [ \"stw\" ]\n\n if mimeType == applicationvndpgformat:\n return [ \"str\" ]\n\n if mimeType == modelstep_zip:\n return [ \"stpz\" ]\n\n if mimeType == modelstepxml_zip:\n return [ \"stpxz\" ]\n\n if mimeType == modelstep_xml:\n return [ \"stpx\" ]\n\n if mimeType == audioxstm:\n return [ \"stm\" ]\n\n if mimeType == modelstl:\n return [ \"stl\" ]\n\n if mimeType == applicationhyperstudio:\n return [ \"stk\" ]\n\n if mimeType == applicationvndsunxmlimpresstemplate:\n return [ \"sti\" ]\n\n if mimeType == applicationvndwtstf:\n return [ \"stf\" ]\n\n if mimeType == applicationvndsunxmldrawtemplate:\n return [ \"std\" ]\n\n if mimeType == applicationvndsunxmlcalctemplate:\n return [ \"stc\" ]\n\n if mimeType == applicationvndsailingtrackertrack:\n return [ \"st\" ]\n\n if mimeType == applicationssml_xml:\n return [ \"ssml\" ]\n\n if mimeType == applicationvndepsonssf:\n return [ \"ssf\" ]\n\n if mimeType == applicationvndkodakdescriptor:\n return [ \"sse\" ]\n\n if mimeType == applicationssdl_xml:\n return [ \"ssdl\" ]\n\n if mimeType == textxssa:\n return [ \"ssa\", \"ass\" ]\n\n if mimeType == applicationsparqlresults_xml:\n return [ \"srx\" ]\n\n if mimeType == applicationsru_xml:\n return [ \"sru\" ]\n\n if mimeType == applicationxsubrip:\n return [ \"srt\" ]\n\n if mimeType == imagexsonysrf:\n return [ \"srf\" ]\n\n if mimeType == applicationxsourcerpm:\n return [ \"srcrpm\", \"spm\" ]\n\n if mimeType == applicationxwaissource:\n return [ \"src\" ]\n\n if mimeType == imagexsonysr2:\n return [ \"sr2\" ]\n\n if mimeType == applicationvndsquashfs:\n return [ \"sqsh\" ]\n\n if mimeType == applicationvndsqlite3:\n return [ \"sqlite3\" ]\n\n if mimeType == applicationxsqlite2:\n return [ \"sqlite2\" ]\n\n if mimeType == applicationsql:\n return [ \"sql\" ]\n\n if mimeType == applicationxapplesystemprofiler_xml:\n return [ \"spx\" ]\n\n if mimeType == applicationscvpvprequest:\n return [ \"spq\" ]\n\n if mimeType == applicationscvpvpresponse:\n return [ \"spp\" ]\n\n if mimeType == textvndin3dspot:\n return [ \"spot\" ]\n\n if mimeType == applicationvndyamahasmafphrase:\n return [ \"spf\" ]\n\n if mimeType == textxrpmspec:\n return [ \"spec\" ]\n\n if mimeType == textspdx:\n return [ \"spdx\" ]\n\n if mimeType == applicationxfontspeedo:\n return [ \"spd\" ]\n\n if mimeType == applicationxsharedlib:\n return [ \"so\", \"so09\" ]\n\n if mimeType == applicationxfontsnf:\n return [ \"snf\" ]\n\n if mimeType == applicationvndsnap:\n return [ \"snap\" ]\n\n if mimeType == applicationvndstepmaniapackage:\n return [ \"smzip\" ]\n\n if mimeType == videoxsmv:\n return [ \"smv\" ]\n\n if mimeType == applicationxsmsrom:\n return [ \"sms\" ]\n\n if mimeType == videovndradgamettoolssmacker:\n return [ \"smk\" ]\n\n if mimeType == applicationsmil_xml:\n return [ \"smil\", \"smi\", \"sml\", \"kino\" ]\n\n if mimeType == applicationvndstardivisionmath:\n return [ \"smf\" ]\n\n if mimeType == applicationvndstardivisionmail:\n return [ \"smd\" ]\n\n if mimeType == applicationvndstepmaniastepchart:\n return [ \"sm\" ]\n\n if mimeType == applicationvndepsonsalt:\n return [ \"slt\" ]\n\n if mimeType == applicationroutestsid_xml:\n return [ \"sls\" ]\n\n if mimeType == textslim:\n return [ \"slim\", \"slm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlslide:\n return [ \"sldx\" ]\n\n if mimeType == applicationvndmspowerpointslidemacroenabled12:\n return [ \"sldm\" ]\n\n if mimeType == applicationpgpkeys:\n return [ \"skr\", \"pkr\", \"key\" ]\n\n if mimeType == applicationvndkoan:\n return [ \"skp\", \"skd\", \"skt\", \"skm\" ]\n\n if mimeType == imagexskencil:\n return [ \"sk\", \"sk1\" ]\n\n if mimeType == applicationsieve:\n return [ \"siv\", \"sieve\" ]\n\n if mimeType == applicationxstuffitx:\n return [ \"sitx\" ]\n\n if mimeType == applicationxstuffit:\n return [ \"sit\" ]\n\n if mimeType == xepocxsisxapp:\n return [ \"sisx\" ]\n\n if mimeType == applicationvndsymbianinstall:\n return [ \"sis\" ]\n\n if mimeType == audiosilk:\n return [ \"sil\" ]\n\n if mimeType == applicationpgpsignature:\n return [ \"sig\" ]\n\n if mimeType == audioprssid:\n return [ \"sid\", \"psid\" ]\n\n if mimeType == applicationxsiag:\n return [ \"siag\" ]\n\n if mimeType == texthtml:\n return [ \"shtml\" ]\n\n if mimeType == applicationxshorten:\n return [ \"shn\" ]\n\n if mimeType == applicationshf_xml:\n return [ \"shf\" ]\n\n if mimeType == textshex:\n return [ \"shex\" ]\n\n if mimeType == applicationxshar:\n return [ \"shar\" ]\n\n if mimeType == applicationxdiashape:\n return [ \"shape\" ]\n\n if mimeType == applicationxshellscript:\n return [ \"sh\" ]\n\n if mimeType == textsgml:\n return [ \"sgml\", \"sgm\" ]\n\n if mimeType == imagexsgi:\n return [ \"sgi\" ]\n\n if mimeType == applicationxgosgf:\n return [ \"sgf\" ]\n\n if mimeType == applicationxsg1000rom:\n return [ \"sg\" ]\n\n if mimeType == textxsfv:\n return [ \"sfv\" ]\n\n if mimeType == applicationvndspotfiresfs:\n return [ \"sfs\" ]\n\n if mimeType == applicationvndhydrostatixsofdata:\n return [ \"sfd-hdstx\" ]\n\n if mimeType == applicationvndnintendosnesrom:\n return [ \"sfc\", \"smc\" ]\n\n if mimeType == applicationsetregistrationinitiation:\n return [ \"setreg\" ]\n\n if mimeType == applicationsetpaymentinitiation:\n return [ \"setpay\" ]\n\n if mimeType == textxdbusservice:\n return [ \"service\" ]\n\n if mimeType == applicationjavaserializedobject:\n return [ \"ser\" ]\n\n if mimeType == applicationsensml_xml:\n return [ \"sensmlx\" ]\n\n if mimeType == applicationsenml_xml:\n return [ \"senmlx\" ]\n\n if mimeType == applicationvndsemf:\n return [ \"semf\" ]\n\n if mimeType == applicationvndsemd:\n return [ \"semd\" ]\n\n if mimeType == applicationvndsema:\n return [ \"sema\" ]\n\n if mimeType == applicationvndfdsnseed:\n return [ \"seed\", \"dataless\" ]\n\n if mimeType == applicationvndseemail:\n return [ \"see\" ]\n\n if mimeType == applicationxsea:\n return [ \"sea\" ]\n\n if mimeType == applicationvndstardivisionwriter:\n return [ \"sdw\", \"vor\", \"sgl\" ]\n\n if mimeType == applicationvndstardivisionchart:\n return [ \"sds\" ]\n\n if mimeType == applicationvndsolentsdkm_xml:\n return [ \"sdkm\", \"sdkd\" ]\n\n if mimeType == applicationvndstardivisionimpress:\n return [ \"sdd\", \"sdp\" ]\n\n if mimeType == applicationvndstardivisioncalc:\n return [ \"sdc\" ]\n\n if mimeType == applicationvndstardivisiondraw:\n return [ \"sda\" ]\n\n if mimeType == textvndcurlscurl:\n return [ \"scurl\" ]\n\n if mimeType == textxscss:\n return [ \"scss\" ]\n\n if mimeType == applicationscvpcvresponse:\n return [ \"scs\" ]\n\n if mimeType == applicationscvpcvrequest:\n return [ \"scq\" ]\n\n if mimeType == textxscons:\n return [ \"sconstruct\", \"sconscript\" ]\n\n if mimeType == applicationxgodotscene:\n return [ \"scn\", \"tscn\", \"escn\" ]\n\n if mimeType == textxscheme:\n return [ \"scm\", \"ss\" ]\n\n if mimeType == applicationxmsschedule:\n return [ \"scd\" ]\n\n if mimeType == textxscala:\n return [ \"scala\", \"sc\" ]\n\n if mimeType == applicationsbml_xml:\n return [ \"sbml\" ]\n\n if mimeType == applicationxspsssav:\n return [ \"sav\", \"zsav\" ]\n\n if mimeType == textxsass:\n return [ \"sass\" ]\n\n if mimeType == applicationxthomsonsapimage:\n return [ \"sap\" ]\n\n if mimeType == applicationxsami:\n return [ \"sami\" ]\n\n if mimeType == applicationxamipro:\n return [ \"sam\" ]\n\n if mimeType == textxsagemath:\n return [ \"sage\" ]\n\n if mimeType == applicationvndyamahasmafaudio:\n return [ \"saf\" ]\n\n if mimeType == audioxs3m:\n return [ \"s3m\" ]\n\n if mimeType == textxasm:\n return [ \"s\", \"asm\" ]\n\n if mimeType == imagexpanasonicrw2:\n return [ \"rw2\" ]\n\n if mimeType == videovndrnrealvideo:\n return [ \"rv\", \"rvx\" ]\n\n if mimeType == applicationrouteusd_xml:\n return [ \"rusd\" ]\n\n if mimeType == applicationxmakeself:\n return [ \"run\" ]\n\n if mimeType == textrichtext:\n return [ \"rtx\" ]\n\n if mimeType == applicationrtf:\n return [ \"rtf\" ]\n\n if mimeType == textvndrnrealtext:\n return [ \"rt\" ]\n\n if mimeType == textxrst:\n return [ \"rst\" ]\n\n if mimeType == applicationrss_xml:\n return [ \"rss\" ]\n\n if mimeType == applicationurcressheet_xml:\n return [ \"rsheet\" ]\n\n if mimeType == applicationrsd_xml:\n return [ \"rsd\" ]\n\n if mimeType == applicationatscrsat_xml:\n return [ \"rsat\" ]\n\n if mimeType == textrust:\n return [ \"rs\" ]\n\n if mimeType == applicationvndnokiaradiopreset:\n return [ \"rpst\" ]\n\n if mimeType == applicationvndnokiaradiopresets:\n return [ \"rpss\" ]\n\n if mimeType == applicationxrpm:\n return [ \"rpm\" ]\n\n if mimeType == applicationvndcloantorp9:\n return [ \"rp9\" ]\n\n if mimeType == imagevndrnrealpix:\n return [ \"rp\" ]\n\n if mimeType == applicationrpkiroa:\n return [ \"roa\" ]\n\n if mimeType == applicationrelaxngcompactsyntax:\n return [ \"rnc\" ]\n\n if mimeType == audioxpnrealaudioplugin:\n return [ \"rmp\" ]\n\n if mimeType == messagexgnurmail:\n return [ \"rmail\" ]\n\n if mimeType == applicationvndrnrealmedia:\n return [ \"rm\", \"rmj\", \"rmm\", \"rms\", \"rmx\", \"rmvb\" ]\n\n if mimeType == imagerle:\n return [ \"rle\" ]\n\n if mimeType == applicationresourcelistsdiff_xml:\n return [ \"rld\" ]\n\n if mimeType == imagevndfujixeroxedmicsrlc:\n return [ \"rlc\" ]\n\n if mimeType == applicationresourcelists_xml:\n return [ \"rl\" ]\n\n if mimeType == applicationxresearchinfosystems:\n return [ \"ris\" ]\n\n if mimeType == audiovndrip:\n return [ \"rip\" ]\n\n if mimeType == applicationreginfo_xml:\n return [ \"rif\" ]\n\n if mimeType == imagexrgb:\n return [ \"rgb\" ]\n\n if mimeType == applicationxgodotresource:\n return [ \"res\", \"tres\" ]\n\n if mimeType == applicationvndbusinessobjects:\n return [ \"rep\" ]\n\n if mimeType == applicationp2poverlay_xml:\n return [ \"relo\" ]\n\n if mimeType == textxreject:\n return [ \"rej\" ]\n\n if mimeType == textxmsregedit:\n return [ \"reg\" ]\n\n if mimeType == textxreadme:\n return [ \"readme\" ]\n\n if mimeType == applicationvnddatavisionrdz:\n return [ \"rdz\" ]\n\n if mimeType == applicationrdf_xml:\n return [ \"rdf\", \"rdfs\", \"owl\" ]\n\n if mimeType == applicationvndipunpluggedrcprofile:\n return [ \"rcprofile\" ]\n\n if mimeType == applicationxruby:\n return [ \"rb\" ]\n\n if mimeType == applicationxrawdiskimagexzcompressed:\n return [ \"rawdiskimagexz\", \"imgxz\" ]\n\n if mimeType == applicationxrawdiskimage:\n return [ \"rawdiskimage\", \"img\" ]\n\n if mimeType == imagexpanasonicrw:\n return [ \"raw\" ]\n\n if mimeType == imagexcmuraster:\n return [ \"ras\" ]\n\n if mimeType == applicationvndrar:\n return [ \"rar\" ]\n\n if mimeType == applicationrouteapd_xml:\n return [ \"rapd\" ]\n\n if mimeType == applicationraml_yaml:\n return [ \"raml\" ]\n\n if mimeType == applicationram:\n return [ \"ram\" ]\n\n if mimeType == imagexfujiraf:\n return [ \"raf\" ]\n\n if mimeType == audiovndrnrealaudio:\n return [ \"ra\", \"rax\" ]\n\n if mimeType == applicationvndquarkquarkxpress:\n return [ \"qxd\", \"qxt\", \"qwd\", \"qwt\", \"qxl\", \"qxb\" ]\n\n if mimeType == applicationxquicktimemedialink:\n return [ \"qtl\" ]\n\n if mimeType == imagexquicktime:\n return [ \"qtif\" ]\n\n if mimeType == applicationxqtiplot:\n return [ \"qti\", \"qtigz\" ]\n\n if mimeType == videoquicktime:\n return [ \"qt\", \"mov\", \"moov\", \"qtvr\" ]\n\n if mimeType == applicationsparqlquery:\n return [ \"qs\", \"rq\" ]\n\n if mimeType == applicationvndpublisharedeltatree:\n return [ \"qps\" ]\n\n if mimeType == applicationxqpress:\n return [ \"qp\" ]\n\n if mimeType == textxqml:\n return [ \"qml\", \"qmltypes\", \"qmlproject\" ]\n\n if mimeType == applicationxqw:\n return [ \"qif\" ]\n\n if mimeType == applicationvndintuqfx:\n return [ \"qfx\" ]\n\n if mimeType == applicationxqeddisk:\n return [ \"qed\" ]\n\n if mimeType == applicationxqemudisk:\n return [ \"qcow2\", \"qcow\" ]\n\n if mimeType == applicationvndintuqbo:\n return [ \"qbo\" ]\n\n if mimeType == applicationvndepsonquickanime:\n return [ \"qam\" ]\n\n if mimeType == textxpython:\n return [ \"pyx\", \"wsgi\" ]\n\n if mimeType == videovndmsplayreadymediapyv:\n return [ \"pyv\" ]\n\n if mimeType == applicationxpyspreadspreadsheet:\n return [ \"pysu\" ]\n\n if mimeType == applicationxpyspreadbzspreadsheet:\n return [ \"pys\" ]\n\n if mimeType == modelvndpythapyox:\n return [ \"pyox\" ]\n\n if mimeType == applicationxpythonbytecode:\n return [ \"pyc\", \"pyo\" ]\n\n if mimeType == audiovndmsplayreadymediapya:\n return [ \"pya\" ]\n\n if mimeType == textxpython3:\n return [ \"py\", \"py3\", \"py3x\", \"pyi\" ]\n\n if mimeType == applicationvnd3mpostitnotes:\n return [ \"pwn\" ]\n\n if mimeType == applicationxpw:\n return [ \"pw\" ]\n\n if mimeType == applicationvnd3gpppicbwvar:\n return [ \"pvb\" ]\n\n if mimeType == applicationvndmspublisher:\n return [ \"pub\" ]\n\n if mimeType == applicationvndpviptid1:\n return [ \"ptid\" ]\n\n if mimeType == imageprspti:\n return [ \"pti\" ]\n\n if mimeType == applicationxpocketword:\n return [ \"psw\" ]\n\n if mimeType == applicationpskc_xml:\n return [ \"pskcxml\" ]\n\n if mimeType == applicationxgzpostscript:\n return [ \"psgz\" ]\n\n if mimeType == audioxpsflib:\n return [ \"psflib\" ]\n\n if mimeType == applicationxgzfontlinuxpsf:\n return [ \"psfgz\" ]\n\n if mimeType == applicationxfontlinuxpsf:\n return [ \"psf\" ]\n\n if mimeType == imagevndadobephotoshop:\n return [ \"psd\" ]\n\n if mimeType == applicationxbzpostscript:\n return [ \"psbz2\" ]\n\n if mimeType == applicationvnd3gpppicbwsmall:\n return [ \"psb\" ]\n\n if mimeType == applicationpostscript:\n return [ \"ps\" ]\n\n if mimeType == applicationprovenance_xml:\n return [ \"provx\" ]\n\n if mimeType == applicationxgodotproject:\n return [ \"projectgodot\" ]\n\n if mimeType == applicationpicsrules:\n return [ \"prf\" ]\n\n if mimeType == applicationvndlotusfreelance:\n return [ \"pre\" ]\n\n if mimeType == applicationvndpalm:\n return [ \"pqa\", \"oprc\" ]\n\n if mimeType == applicationvndmspowerpoint:\n return [ \"ppz\", \"ppt\", \"pps\", \"pot\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlpresentation:\n return [ \"pptx\" ]\n\n if mimeType == applicationvndmspowerpointpresentationmacroenabled12:\n return [ \"pptm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlslideshow:\n return [ \"ppsx\" ]\n\n if mimeType == applicationvndmspowerpointslideshowmacroenabled12:\n return [ \"ppsm\" ]\n\n if mimeType == imagexportablepixmap:\n return [ \"ppm\" ]\n\n if mimeType == applicationvndcupsppd:\n return [ \"ppd\" ]\n\n if mimeType == applicationvndmspowerpointaddinmacroenabled12:\n return [ \"ppam\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmltemplate:\n return [ \"potx\" ]\n\n if mimeType == applicationvndmspowerpointtemplatemacroenabled12:\n return [ \"potm\" ]\n\n if mimeType == applicationvndmacportsportpkg:\n return [ \"portpkg\" ]\n\n if mimeType == applicationxspsspor:\n return [ \"por\" ]\n\n if mimeType == textxmaven_xml:\n return [ \"pomxml\", \"settingsxml\" ]\n\n if mimeType == textxgettexttranslation:\n return [ \"po\" ]\n\n if mimeType == imagexmacpaint:\n return [ \"pntg\" ]\n\n if mimeType == imagexportableanymap:\n return [ \"pnm\" ]\n\n if mimeType == imagepng:\n return [ \"png\" ]\n\n if mimeType == applicationvndctcposml:\n return [ \"pml\" ]\n\n if mimeType == audioxscpls:\n return [ \"pls\" ]\n\n if mimeType == applicationxplanperfect:\n return [ \"pln\" ]\n\n if mimeType == applicationvndpocketlearn:\n return [ \"plf\" ]\n\n if mimeType == applicationvndmobiusplc:\n return [ \"plc\" ]\n\n if mimeType == applicationvnd3gpppicbwlarge:\n return [ \"plb\" ]\n\n if mimeType == audioxiriverpla:\n return [ \"pla\" ]\n\n if mimeType == applicationxperl:\n return [ \"pl\", \"pm\", \"al\", \"perl\", \"pod\", \"t\" ]\n\n if mimeType == applicationvndapplepkpass:\n return [ \"pkpass\" ]\n\n if mimeType == applicationpkixpkipath:\n return [ \"pkipath\" ]\n\n if mimeType == applicationpkixcmp:\n return [ \"pki\" ]\n\n if mimeType == applicationxtexpk:\n return [ \"pk\" ]\n\n if mimeType == applicationxphp:\n return [ \"php\", \"php3\", \"php4\", \"php5\", \"phps\" ]\n\n if mimeType == applicationpgpencrypted:\n return [ \"pgp\", \"gpg\", \"asc\" ]\n\n if mimeType == applicationvndchesspgn:\n return [ \"pgn\" ]\n\n if mimeType == imagexportablegraymap:\n return [ \"pgm\" ]\n\n if mimeType == applicationfonttdpfr:\n return [ \"pfr\" ]\n\n if mimeType == applicationxfonttype1:\n return [ \"pfa\", \"pfb\", \"gsf\", \"pfm\" ]\n\n if mimeType == imagexpentaxpef:\n return [ \"pef\" ]\n\n if mimeType == applicationxxzpdf:\n return [ \"pdfxz\" ]\n\n if mimeType == applicationxlzpdf:\n return [ \"pdflz\" ]\n\n if mimeType == applicationxgzpdf:\n return [ \"pdfgz\" ]\n\n if mimeType == applicationxbzpdf:\n return [ \"pdfbz2\" ]\n\n if mimeType == applicationpdf:\n return [ \"pdf\" ]\n\n if mimeType == textxprocessing:\n return [ \"pde\" ]\n\n if mimeType == applicationxaportisdoc:\n return [ \"pdb\", \"pdc\" ]\n\n if mimeType == imagevndzbrushpcx:\n return [ \"pcx\" ]\n\n if mimeType == applicationvndcurlpcurl:\n return [ \"pcurl\" ]\n\n if mimeType == imagexpict:\n return [ \"pct\", \"pict\", \"pict1\", \"pict2\", \"pic\" ]\n\n if mimeType == applicationvndhppclxl:\n return [ \"pclxl\" ]\n\n if mimeType == applicationvndhppcl:\n return [ \"pcl\" ]\n\n if mimeType == applicationxfontpcf:\n return [ \"pcf\", \"pcfz\", \"pcfgz\" ]\n\n if mimeType == applicationxpcenginerom:\n return [ \"pce\" ]\n\n if mimeType == imagexphotocd:\n return [ \"pcd\" ]\n\n if mimeType == applicationvndtcpdumppcap:\n return [ \"pcap\", \"cap\", \"dmp\" ]\n\n if mimeType == imagexportablebitmap:\n return [ \"pbm\" ]\n\n if mimeType == applicationvndpowerbuilder6:\n return [ \"pbd\" ]\n\n if mimeType == applicationvndpawaafile:\n return [ \"paw\" ]\n\n if mimeType == imagexgimppat:\n return [ \"pat\" ]\n\n if mimeType == applicationxpar2:\n return [ \"par2\" ]\n\n if mimeType == applicationxpak:\n return [ \"pak\" ]\n\n if mimeType == applicationvndapplepages:\n return [ \"pages\" ]\n\n if mimeType == applicationxjavapack200:\n return [ \"pack\" ]\n\n if mimeType == applicationxnsproxyautoconfig:\n return [ \"pac\" ]\n\n if mimeType == applicationpkcs8encrypted:\n return [ \"p8e\" ]\n\n if mimeType == applicationpkcs8:\n return [ \"p8\" ]\n\n if mimeType == applicationpkcs7signature:\n return [ \"p7s\" ]\n\n if mimeType == applicationxpkcs7certreqresp:\n return [ \"p7r\" ]\n\n if mimeType == applicationpkcs7mime:\n return [ \"p7c\", \"p7m\" ]\n\n if mimeType == applicationxpkcs7certificates:\n return [ \"p7b\", \"spc\" ]\n\n if mimeType == applicationxpagemaker:\n return [ \"p65\", \"pm6\", \"pmd\" ]\n\n if mimeType == applicationpkcs12:\n return [ \"p12\", \"pfx\" ]\n\n if mimeType == applicationpkcs10:\n return [ \"p10\" ]\n\n if mimeType == textxpascal:\n return [ \"p\", \"pas\" ]\n\n if mimeType == applicationvndopenofficeorgextension:\n return [ \"oxt\" ]\n\n if mimeType == applicationoxps:\n return [ \"oxps\" ]\n\n if mimeType == applicationowl_xml:\n return [ \"owx\" ]\n\n if mimeType == applicationxvirtualboxovf:\n return [ \"ovf\" ]\n\n if mimeType == applicationovf:\n return [ \"ova\" ]\n\n if mimeType == applicationvndoasisopendocumenttexttemplate:\n return [ \"ott\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheettemplate:\n return [ \"ots\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentationtemplate:\n return [ \"otp\" ]\n\n if mimeType == applicationvndoasisopendocumentimagetemplate:\n return [ \"oti\" ]\n\n if mimeType == applicationvndoasisopendocumenttextweb:\n return [ \"oth\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphicstemplate:\n return [ \"otg\" ]\n\n if mimeType == applicationvndoasisopendocumentformulatemplate:\n return [ \"otf\", \"odft\" ]\n\n if mimeType == applicationvndoasisopendocumentcharttemplate:\n return [ \"otc\" ]\n\n if mimeType == applicationvndopenstreetmapdata_xml:\n return [ \"osm\" ]\n\n if mimeType == applicationvndyamahaopenscoreformatosfpvg_xml:\n return [ \"osfpvg\" ]\n\n if mimeType == applicationvndyamahaopenscoreformat:\n return [ \"osf\" ]\n\n if mimeType == textorg:\n return [ \"org\" ]\n\n if mimeType == imagexolympusorf:\n return [ \"orf\" ]\n\n if mimeType == imageopenraster:\n return [ \"ora\" ]\n\n if mimeType == textxopml_xml:\n return [ \"opml\" ]\n\n if mimeType == applicationoebpspackage_xml:\n return [ \"opf\" ]\n\n if mimeType == textxooc:\n return [ \"ooc\" ]\n\n if mimeType == applicationonenote:\n return [ \"onetoc\", \"onetoc2\", \"onetmp\", \"onepkg\" ]\n\n if mimeType == applicationomdoc_xml:\n return [ \"omdoc\" ]\n\n if mimeType == applicationxoleo:\n return [ \"oleo\" ]\n\n if mimeType == applicationogg:\n return [ \"ogx\" ]\n\n if mimeType == videoogg:\n return [ \"ogv\" ]\n\n if mimeType == videoxogm_ogg:\n return [ \"ogm\" ]\n\n if mimeType == modelvndopengex:\n return [ \"ogex\" ]\n\n if mimeType == audioogg:\n return [ \"oga\", \"ogg\", \"opus\" ]\n\n if mimeType == applicationvndoasisopendocumenttext:\n return [ \"odt\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheet:\n return [ \"ods\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentation:\n return [ \"odp\" ]\n\n if mimeType == applicationvndoasisopendocumenttextmaster:\n return [ \"odm\" ]\n\n if mimeType == applicationvndoasisopendocumentimage:\n return [ \"odi\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphics:\n return [ \"odg\" ]\n\n if mimeType == applicationvndoasisopendocumentformula:\n return [ \"odf\" ]\n\n if mimeType == applicationvndoasisopendocumentchart:\n return [ \"odc\" ]\n\n if mimeType == applicationvndoasisopendocumentdatabase:\n return [ \"odb\" ]\n\n if mimeType == applicationoda:\n return [ \"oda\" ]\n\n if mimeType == textxocl:\n return [ \"ocl\" ]\n\n if mimeType == applicationxtgif:\n return [ \"obj\" ]\n\n if mimeType == applicationvndopenbloxgame_xml:\n return [ \"obgx\" ]\n\n if mimeType == applicationxmsbinder:\n return [ \"obd\" ]\n\n if mimeType == applicationvndfujitsuoasys:\n return [ \"oas\" ]\n\n if mimeType == applicationvndfujitsuoasys3:\n return [ \"oa3\" ]\n\n if mimeType == applicationvndfujitsuoasys2:\n return [ \"oa2\" ]\n\n if mimeType == applicationxobject:\n return [ \"o\", \"mod\" ]\n\n if mimeType == applicationxnzb:\n return [ \"nzb\" ]\n\n if mimeType == applicationvndapplenumbers:\n return [ \"numbers\" ]\n\n if mimeType == applicationvndnitf:\n return [ \"ntf\", \"nitf\" ]\n\n if mimeType == applicationntriples:\n return [ \"nt\" ]\n\n if mimeType == videoxnsv:\n return [ \"nsv\" ]\n\n if mimeType == applicationvndlotusnotes:\n return [ \"nsf\" ]\n\n if mimeType == applicationxnetshowchannel:\n return [ \"nsc\" ]\n\n if mimeType == imagexnikonnrw:\n return [ \"nrw\" ]\n\n if mimeType == applicationnquads:\n return [ \"nq\" ]\n\n if mimeType == imagevndnetfpx:\n return [ \"npx\" ]\n\n if mimeType == applicationvndnoblenetweb:\n return [ \"nnw\" ]\n\n if mimeType == applicationvndnoblenetsealer:\n return [ \"nns\" ]\n\n if mimeType == applicationvndnoblenetdirectory:\n return [ \"nnd\" ]\n\n if mimeType == applicationvndenliven:\n return [ \"nml\" ]\n\n if mimeType == applicationvndneurolanguagenlu:\n return [ \"nlu\" ]\n\n if mimeType == applicationxneogeopocketrom:\n return [ \"ngp\" ]\n\n if mimeType == applicationvndnokiangagedata:\n return [ \"ngdat\" ]\n\n if mimeType == applicationxneogeopocketcolorrom:\n return [ \"ngc\" ]\n\n if mimeType == applicationvndnokiangagesymbianinstall:\n return [ \"n-gage\" ]\n\n if mimeType == textxnfo:\n return [ \"nfo\" ]\n\n if mimeType == applicationxnesrom:\n return [ \"nes\", \"nez\", \"unf\", \"unif\" ]\n\n if mimeType == imagexnikonnef:\n return [ \"nef\" ]\n\n if mimeType == applicationxnintendodsrom:\n return [ \"nds\" ]\n\n if mimeType == applicationxdtbncx_xml:\n return [ \"ncx\" ]\n\n if mimeType == applicationvndwolframplayer:\n return [ \"nbp\" ]\n\n if mimeType == applicationmathematica:\n return [ \"nb\", \"ma\", \"mb\" ]\n\n if mimeType == applicationxn64rom:\n return [ \"n64\", \"z64\", \"v64\" ]\n\n if mimeType == textn3:\n return [ \"n3\" ]\n\n if mimeType == applicationvndtriscapemxs:\n return [ \"mxs\" ]\n\n if mimeType == applicationxv_xml:\n return [ \"mxml\", \"xhvml\", \"xvml\", \"xvm\" ]\n\n if mimeType == audiomobilexmf:\n return [ \"mxmf\" ]\n\n if mimeType == applicationvndrecordaremusicxml:\n return [ \"mxl\" ]\n\n if mimeType == applicationmxf:\n return [ \"mxf\" ]\n\n if mimeType == applicationvndmfer:\n return [ \"mwf\" ]\n\n if mimeType == applicationvndmapboxvectortile:\n return [ \"mvt\" ]\n\n if mimeType == applicationxmsmediaview:\n return [ \"mvb\", \"m13\", \"m14\" ]\n\n if mimeType == applicationvndrecordaremusicxml_xml:\n return [ \"musicxml\" ]\n\n if mimeType == applicationmmtusd_xml:\n return [ \"musd\" ]\n\n if mimeType == applicationvndmusician:\n return [ \"mus\" ]\n\n if mimeType == textxmup:\n return [ \"mup\", \"not\" ]\n\n if mimeType == modelmtl:\n return [ \"mtl\" ]\n\n if mimeType == applicationxmsxrom:\n return [ \"msx\" ]\n\n if mimeType == applicationvndmuveestyle:\n return [ \"msty\" ]\n\n if mimeType == imagexmsod:\n return [ \"msod\" ]\n\n if mimeType == applicationvndmobiusmsl:\n return [ \"msl\" ]\n\n if mimeType == applicationxmsi:\n return [ \"msi\" ]\n\n if mimeType == modelmesh:\n return [ \"msh\", \"mesh\", \"silo\" ]\n\n if mimeType == applicationvndmsoutlook:\n return [ \"msg\" ]\n\n if mimeType == applicationvndepsonmsf:\n return [ \"msf\" ]\n\n if mimeType == applicationvndmseq:\n return [ \"mseq\" ]\n\n if mimeType == applicationvndfdsnmseed:\n return [ \"mseed\" ]\n\n if mimeType == applicationmediaservercontrol_xml:\n return [ \"mscml\" ]\n\n if mimeType == textxtroffms:\n return [ \"ms\" ]\n\n if mimeType == imagexminoltamrw:\n return [ \"mrw\" ]\n\n if mimeType == textxmrml:\n return [ \"mrml\", \"mrl\" ]\n\n if mimeType == applicationmarcxml_xml:\n return [ \"mrcx\" ]\n\n if mimeType == applicationmarc:\n return [ \"mrc\" ]\n\n if mimeType == applicationvndmobiusmqy:\n return [ \"mqy\" ]\n\n if mimeType == applicationvndibmminipay:\n return [ \"mpy\" ]\n\n if mimeType == applicationvndmsproject:\n return [ \"mpt\" ]\n\n if mimeType == applicationvndmophunapplication:\n return [ \"mpn\" ]\n\n if mimeType == applicationvndblueicemultipass:\n return [ \"mpm\" ]\n\n if mimeType == textxmpl2:\n return [ \"mpl\" ]\n\n if mimeType == applicationvndappleinstaller_xml:\n return [ \"mpkg\" ]\n\n if mimeType == applicationmediapolicydataset_xml:\n return [ \"mpf\" ]\n\n if mimeType == videompeg:\n return [ \"mpeg\", \"mpg\", \"mpe\", \"vob\", \"090909vdr\", \"m1v\", \"m2v\" ]\n\n if mimeType == applicationdash_xml:\n return [ \"mpd\" ]\n\n if mimeType == audioxmusepack:\n return [ \"mpc\", \"mpp\", \"mp\" ]\n\n if mimeType == applicationmp4:\n return [ \"mp4s\", \"m4p\" ]\n\n if mimeType == videomp4:\n return [ \"mp4\", \"m4v\", \"f4v\", \"lrv\", \"mp4v\", \"mpg4\" ]\n\n if mimeType == audiompeg:\n return [ \"mp3\", \"mpga\", \"mp2a\", \"m2a\", \"m3a\" ]\n\n if mimeType == audiomp2:\n return [ \"mp2\" ]\n\n if mimeType == videoxsgimovie:\n return [ \"movie\" ]\n\n if mimeType == textxmof:\n return [ \"mof\" ]\n\n if mimeType == applicationmods_xml:\n return [ \"mods\" ]\n\n if mimeType == textxmoc:\n return [ \"moc\" ]\n\n if mimeType == applicationxmobipocketebook:\n return [ \"mobi\", \"prc\" ]\n\n if mimeType == audioxmo3:\n return [ \"mo3\" ]\n\n if mimeType == applicationxmsmoney:\n return [ \"mny\" ]\n\n if mimeType == videoxmng:\n return [ \"mng\" ]\n\n if mimeType == imagevndfujixeroxedmicsmmr:\n return [ \"mmr\" ]\n\n if mimeType == applicationmathml_xml:\n return [ \"mml\", \"mathml\" ]\n\n if mimeType == applicationvndsmaf:\n return [ \"mmf\", \"smaf\" ]\n\n if mimeType == applicationvndchipnutskaraokemmd:\n return [ \"mmd\" ]\n\n if mimeType == textxobjc__src:\n return [ \"mm\" ]\n\n if mimeType == applicationvnddolbymlp:\n return [ \"mlp\" ]\n\n if mimeType == textxocaml:\n return [ \"ml\", \"mli\" ]\n\n if mimeType == videoxmatroska:\n return [ \"mkv\", \"mks\" ]\n\n if mimeType == audioxmatroska:\n return [ \"mka\" ]\n\n if mimeType == videoxmatroska3d:\n return [ \"mk3d\" ]\n\n if mimeType == videoxmjpeg:\n return [ \"mjpeg\", \"mjpg\" ]\n\n if mimeType == videomj2:\n return [ \"mj2\", \"mjp2\" ]\n\n if mimeType == audioxminipsf:\n return [ \"minipsf\" ]\n\n if mimeType == applicationxmif:\n return [ \"mif\" ]\n\n if mimeType == applicationxmie:\n return [ \"mie\" ]\n\n if mimeType == audiomidi:\n return [ \"mid\", \"midi\", \"kar\", \"rmi\" ]\n\n if mimeType == applicationxmimearchive:\n return [ \"mhtml\", \"mht\" ]\n\n if mimeType == applicationvndproteusmagazine:\n return [ \"mgz\" ]\n\n if mimeType == applicationxmagicpoint:\n return [ \"mgp\" ]\n\n if mimeType == applicationrpkimanifest:\n return [ \"mft\" ]\n\n if mimeType == applicationvndmfmp:\n return [ \"mfm\" ]\n\n if mimeType == applicationmets_xml:\n return [ \"mets\" ]\n\n if mimeType == applicationmetalink_xml:\n return [ \"metalink\" ]\n\n if mimeType == applicationmetalink4_xml:\n return [ \"meta4\" ]\n\n if mimeType == textxmeson:\n return [ \"mesonbuild\", \"mesonoptionstxt\" ]\n\n if mimeType == textxtroffme:\n return [ \"me\" ]\n\n if mimeType == imagevndmsmodi:\n return [ \"mdi\" ]\n\n if mimeType == applicationvndmsaccess:\n return [ \"mdb\" ]\n\n if mimeType == textmarkdown:\n return [ \"md\", \"mkd\", \"markdown\" ]\n\n if mimeType == textvndcurlmcurl:\n return [ \"mcurl\" ]\n\n if mimeType == applicationvndmcd:\n return [ \"mcd\" ]\n\n if mimeType == textvndsenxwarpscript:\n return [ \"mc2\" ]\n\n if mimeType == applicationvndmedcalcdata:\n return [ \"mc1\" ]\n\n if mimeType == applicationmbox:\n return [ \"mbox\" ]\n\n if mimeType == applicationvndmobiusmbk:\n return [ \"mbk\" ]\n\n if mimeType == textcachemanifest:\n return [ \"manifest\", \"appcache\" ]\n\n if mimeType == applicationxtroffman:\n return [ \"man\", \"19\" ]\n\n if mimeType == textxmakefile:\n return [ \"makefile\", \"gnumakefile\", \"mk\", \"mak\" ]\n\n if mimeType == applicationvndecowinchart:\n return [ \"mag\" ]\n\n if mimeType == applicationmmtaei_xml:\n return [ \"maei\" ]\n\n if mimeType == applicationmads_xml:\n return [ \"mads\" ]\n\n if mimeType == applicationxmarkaby:\n return [ \"mab\" ]\n\n if mimeType == applicationxthomsoncartridgememo7:\n return [ \"m7\" ]\n\n if mimeType == videoisosegment:\n return [ \"m4s\" ]\n\n if mimeType == audioxm4r:\n return [ \"m4r\" ]\n\n if mimeType == audioxm4b:\n return [ \"m4b\", \"f4b\" ]\n\n if mimeType == audiomp4:\n return [ \"m4a\", \"f4a\", \"mp4a\" ]\n\n if mimeType == applicationxm4:\n return [ \"m4\" ]\n\n if mimeType == audioxmpegurl:\n return [ \"m3u\", \"m3u8\", \"vlc\" ]\n\n if mimeType == videomp2t:\n return [ \"m2t\", \"m2ts\", \"mts\", \"cpi\", \"clpi\", \"mpls\", \"bdm\", \"bdmv\" ]\n\n if mimeType == applicationmp21:\n return [ \"m21\", \"mp21\" ]\n\n if mimeType == videovndmpegurl:\n return [ \"m1u\", \"m4u\", \"mxu\" ]\n\n if mimeType == textxobjcsrc:\n return [ \"m\" ]\n\n if mimeType == applicationxlzop:\n return [ \"lzo\" ]\n\n if mimeType == applicationxlzma:\n return [ \"lzma\" ]\n\n if mimeType == applicationxlz4:\n return [ \"lz4\" ]\n\n if mimeType == applicationxlzip:\n return [ \"lz\" ]\n\n if mimeType == applicationxlyx:\n return [ \"lyx\" ]\n\n if mimeType == textxlilypond:\n return [ \"ly\" ]\n\n if mimeType == imagexlws:\n return [ \"lws\" ]\n\n if mimeType == applicationvndlotuswordpro:\n return [ \"lwp\" ]\n\n if mimeType == imagexlwo:\n return [ \"lwo\", \"lwob\" ]\n\n if mimeType == audiovndlucentvoice:\n return [ \"lvp\" ]\n\n if mimeType == applicationxluabytecode:\n return [ \"luac\" ]\n\n if mimeType == textxlua:\n return [ \"lua\" ]\n\n if mimeType == applicationvndfrogansltf:\n return [ \"ltf\" ]\n\n if mimeType == applicationxlrzip:\n return [ \"lrz\" ]\n\n if mimeType == applicationvndmslrm:\n return [ \"lrm\" ]\n\n if mimeType == applicationlost_xml:\n return [ \"lostxml\" ]\n\n if mimeType == textxlog:\n return [ \"log\" ]\n\n if mimeType == audiousac:\n return [ \"loas\", \"xhe\" ]\n\n if mimeType == applicationxatarilynxrom:\n return [ \"lnx\" ]\n\n if mimeType == applicationxmsshortcut:\n return [ \"lnk\" ]\n\n if mimeType == textcoffeescript:\n return [ \"litcoffee\" ]\n\n if mimeType == applicationvndroute66link66_xml:\n return [ \"link66\" ]\n\n if mimeType == applicationxlhz:\n return [ \"lhz\" ]\n\n if mimeType == textxliteratehaskell:\n return [ \"lhs\" ]\n\n if mimeType == applicationxlha:\n return [ \"lha\", \"lzh\" ]\n\n if mimeType == applicationlgr_xml:\n return [ \"lgr\" ]\n\n if mimeType == textless:\n return [ \"less\" ]\n\n if mimeType == applicationvndhhelessonplayer:\n return [ \"les\" ]\n\n if mimeType == textxldif:\n return [ \"ldif\" ]\n\n if mimeType == applicationvndllamagraphicslifebalanceexchange_xml:\n return [ \"lbe\" ]\n\n if mimeType == applicationvndllamagraphicslifebalancedesktop:\n return [ \"lbd\" ]\n\n if mimeType == applicationvndlaslas_xml:\n return [ \"lasxml\" ]\n\n if mimeType == applicationxsharedlibraryla:\n return [ \"la\" ]\n\n if mimeType == applicationxkword:\n return [ \"kwd\", \"kwt\" ]\n\n if mimeType == applicationxkugar:\n return [ \"kud\" ]\n\n if mimeType == applicationvndkahootz:\n return [ \"ktz\", \"ktr\" ]\n\n if mimeType == imagektx2:\n return [ \"ktx2\" ]\n\n if mimeType == imagektx:\n return [ \"ktx\" ]\n\n if mimeType == textxkotlin:\n return [ \"kt\" ]\n\n if mimeType == textxkaitaistruct:\n return [ \"ksy\" ]\n\n if mimeType == applicationxkspread:\n return [ \"ksp\" ]\n\n if mimeType == applicationxkrita:\n return [ \"kra\", \"krz\" ]\n\n if mimeType == applicationvnddskeypoint:\n return [ \"kpxx\" ]\n\n if mimeType == applicationxkpresenter:\n return [ \"kpr\", \"kpt\" ]\n\n if mimeType == applicationxkpovmodeler:\n return [ \"kpm\" ]\n\n if mimeType == applicationxkontour:\n return [ \"kon\" ]\n\n if mimeType == applicationvndkinar:\n return [ \"kne\", \"knp\" ]\n\n if mimeType == applicationvndgoogleearthkmz:\n return [ \"kmz\" ]\n\n if mimeType == applicationvndgoogleearthkml_xml:\n return [ \"kml\" ]\n\n if mimeType == applicationxkillustrator:\n return [ \"kil\" ]\n\n if mimeType == applicationvndkidspiration:\n return [ \"kia\" ]\n\n if mimeType == applicationxkformula:\n return [ \"kfo\" ]\n\n if mimeType == applicationxkexiprojectshortcut:\n return [ \"kexis\" ]\n\n if mimeType == applicationxkexiconnectiondata:\n return [ \"kexic\" ]\n\n if mimeType == applicationxkexiprojectsqlite2:\n return [ \"kexi\" ]\n\n if mimeType == imagexkodakkdc:\n return [ \"kdc\" ]\n\n if mimeType == applicationxkeepass2:\n return [ \"kdbx\" ]\n\n if mimeType == applicationxkarbon:\n return [ \"karbon\" ]\n\n if mimeType == applicationxthomsoncassette:\n return [ \"k7\" ]\n\n if mimeType == imagexkodakk25:\n return [ \"k25\" ]\n\n if mimeType == imagejxss:\n return [ \"jxss\" ]\n\n if mimeType == imagejxsi:\n return [ \"jxsi\" ]\n\n if mimeType == imagejxsc:\n return [ \"jxsc\" ]\n\n if mimeType == imagejxs:\n return [ \"jxs\" ]\n\n if mimeType == imagejxrs:\n return [ \"jxrs\" ]\n\n if mimeType == imagejxra:\n return [ \"jxra\" ]\n\n if mimeType == imagejxr:\n return [ \"jxr\" ]\n\n if mimeType == imagejxl:\n return [ \"jxl\" ]\n\n if mimeType == textjsx:\n return [ \"jsx\" ]\n\n if mimeType == applicationjsonpatch_json:\n return [ \"jsonpatch\" ]\n\n if mimeType == applicationjsonml_json:\n return [ \"jsonml\" ]\n\n if mimeType == applicationld_json:\n return [ \"jsonld\" ]\n\n if mimeType == applicationjson5:\n return [ \"json5\" ]\n\n if mimeType == applicationjson:\n return [ \"json\", \"map\" ]\n\n if mimeType == textjavascript:\n return [ \"js\", \"jsm\", \"mjs\" ]\n\n if mimeType == applicationjrd_json:\n return [ \"jrd\" ]\n\n if mimeType == applicationxjbuilderproject:\n return [ \"jpr\", \"jpx\" ]\n\n if mimeType == imagejpm:\n return [ \"jpm\", \"jpgm\" ]\n\n if mimeType == imagejph:\n return [ \"jph\" ]\n\n if mimeType == videojpeg:\n return [ \"jpgv\" ]\n\n if mimeType == imagejpeg:\n return [ \"jpg\", \"jpeg\", \"jpe\" ]\n\n if mimeType == imagejpx:\n return [ \"jpf\" ]\n\n if mimeType == imagejp2:\n return [ \"jp2\", \"jpg2\" ]\n\n if mimeType == applicationvndjoostjodaarchive:\n return [ \"joda\" ]\n\n if mimeType == applicationxjavajnlpfile:\n return [ \"jnlp\" ]\n\n if mimeType == imagexjng:\n return [ \"jng\" ]\n\n if mimeType == applicationvndhpjlyt:\n return [ \"jlt\" ]\n\n if mimeType == imagejls:\n return [ \"jls\" ]\n\n if mimeType == applicationxjavakeystore:\n return [ \"jks\", \"ks\", \"cacerts\" ]\n\n if mimeType == applicationvndjisp:\n return [ \"jisp\" ]\n\n if mimeType == imagejphc:\n return [ \"jhc\" ]\n\n if mimeType == applicationxjavajcekeystore:\n return [ \"jceks\" ]\n\n if mimeType == textxjava:\n return [ \"java\" ]\n\n if mimeType == applicationxjavaarchivediff:\n return [ \"jardiff\" ]\n\n if mimeType == applicationxjavaarchive:\n return [ \"jar\" ]\n\n if mimeType == applicationvndjam:\n return [ \"jam\" ]\n\n if mimeType == textjade:\n return [ \"jade\" ]\n\n if mimeType == textvndsunj2meappdescriptor:\n return [ \"jad\" ]\n\n if mimeType == imagexjp2codestream:\n return [ \"j2c\", \"j2k\", \"jpc\" ]\n\n if mimeType == applicationvndimmervisionivu:\n return [ \"ivu\" ]\n\n if mimeType == applicationvndimmervisionivp:\n return [ \"ivp\" ]\n\n if mimeType == applicationits_xml:\n return [ \"its\" ]\n\n if mimeType == applicationvndshanainformedformtemplate:\n return [ \"itp\" ]\n\n if mimeType == applicationxit87:\n return [ \"it87\" ]\n\n if mimeType == audioxit:\n return [ \"it\" ]\n\n if mimeType == applicationxcdimage:\n return [ \"iso\", \"iso9660\" ]\n\n if mimeType == applicationvndirepositorypackage_xml:\n return [ \"irp\" ]\n\n if mimeType == applicationvndibmrightsmanagement:\n return [ \"irm\" ]\n\n if mimeType == applicationxipynb_json:\n return [ \"ipynb\" ]\n\n if mimeType == textxiptables:\n return [ \"iptables\" ]\n\n if mimeType == applicationxipspatch:\n return [ \"ips\" ]\n\n if mimeType == applicationvndshanainformedpackage:\n return [ \"ipk\" ]\n\n if mimeType == applicationipfix:\n return [ \"ipfix\" ]\n\n if mimeType == applicationvndastraeasoftwareiota:\n return [ \"iota\" ]\n\n if mimeType == textxinstall:\n return [ \"install\" ]\n\n if mimeType == applicationinkml_xml:\n return [ \"ink\", \"inkml\" ]\n\n if mimeType == textximelody:\n return [ \"imy\", \"ime\" ]\n\n if mimeType == applicationvndmsims:\n return [ \"ims\" ]\n\n if mimeType == applicationvndaccpacsimplyimp:\n return [ \"imp\" ]\n\n if mimeType == applicationvndshanainformedinterchange:\n return [ \"iif\" ]\n\n if mimeType == applicationvndmicrografxigx:\n return [ \"igx\" ]\n\n if mimeType == modeliges:\n return [ \"igs\", \"iges\" ]\n\n if mimeType == applicationvndinsorsigm:\n return [ \"igm\" ]\n\n if mimeType == applicationvndigloader:\n return [ \"igl\" ]\n\n if mimeType == applicationvndshanainformedformdata:\n return [ \"ifm\" ]\n\n if mimeType == imagexilbm:\n return [ \"iff\", \"ilbm\", \"lbm\" ]\n\n if mimeType == imageief:\n return [ \"ief\" ]\n\n if mimeType == textxidl:\n return [ \"idl\" ]\n\n if mimeType == imagevndmicrosofticon:\n return [ \"ico\" ]\n\n if mimeType == imagexicns:\n return [ \"icns\" ]\n\n if mimeType == xconferencexcooltalk:\n return [ \"ice\" ]\n\n if mimeType == applicationvndiccprofile:\n return [ \"icc\", \"icm\" ]\n\n if mimeType == applicationxica:\n return [ \"ica\" ]\n\n if mimeType == applicationvndintergeo:\n return [ \"i2g\" ]\n\n if mimeType == applicationxhwt:\n return [ \"hwt\" ]\n\n if mimeType == applicationxhwp:\n return [ \"hwp\" ]\n\n if mimeType == applicationvndyamahahvscript:\n return [ \"hvs\" ]\n\n if mimeType == applicationvndyamahahvvoice:\n return [ \"hvp\" ]\n\n if mimeType == applicationvndyamahahvdic:\n return [ \"hvd\" ]\n\n if mimeType == applicationvndkenameaapp:\n return [ \"htke\" ]\n\n if mimeType == textxcomponent:\n return [ \"htc\" ]\n\n if mimeType == imagehsj2:\n return [ \"hsj2\" ]\n\n if mimeType == textxhaskell:\n return [ \"hs\" ]\n\n if mimeType == applicationmacbinhex40:\n return [ \"hqx\" ]\n\n if mimeType == applicationvndhphps:\n return [ \"hps\" ]\n\n if mimeType == applicationvndhphpid:\n return [ \"hpid\" ]\n\n if mimeType == applicationvndhphpgl:\n return [ \"hpgl\" ]\n\n if mimeType == applicationwinhlp:\n return [ \"hlp\" ]\n\n if mimeType == applicationhjson:\n return [ \"hjson\" ]\n\n if mimeType == textxc__hdr:\n return [ \"hh\", \"hp\", \"hpp\", \"h\", \"hxx\" ]\n\n if mimeType == applicationxhfefloppyimage:\n return [ \"hfe\" ]\n\n if mimeType == applicationatscheld_xml:\n return [ \"held\" ]\n\n if mimeType == imagehej2k:\n return [ \"hej2\" ]\n\n if mimeType == imageheifsequence:\n return [ \"heifs\" ]\n\n if mimeType == imageheicsequence:\n return [ \"heics\" ]\n\n if mimeType == imageheif:\n return [ \"heic\", \"heif\", \"hif\" ]\n\n if mimeType == applicationxhdf:\n return [ \"hdf\", \"hdf4\", \"h4\", \"hdf5\", \"h5\" ]\n\n if mimeType == applicationxvirtualboxhdd:\n return [ \"hdd\" ]\n\n if mimeType == textxhandlebarstemplate:\n return [ \"hbs\" ]\n\n if mimeType == applicationvndhbci:\n return [ \"hbci\" ]\n\n if mimeType == applicationvndhal_xml:\n return [ \"hal\" ]\n\n if mimeType == videoh264:\n return [ \"h264\" ]\n\n if mimeType == videoh263:\n return [ \"h263\" ]\n\n if mimeType == videoh261:\n return [ \"h261\" ]\n\n if mimeType == applicationgzip:\n return [ \"gz\" ]\n\n if mimeType == applicationvndgeonext:\n return [ \"gxt\" ]\n\n if mimeType == applicationgxf:\n return [ \"gxf\" ]\n\n if mimeType == textxgcodegx:\n return [ \"gx\" ]\n\n if mimeType == textxgooglevideopointer:\n return [ \"gvp\" ]\n\n if mimeType == textvndgraphviz:\n return [ \"gv\" ]\n\n if mimeType == modelvndgtw:\n return [ \"gtw\" ]\n\n if mimeType == applicationvndgroovetoolmessage:\n return [ \"gtm\" ]\n\n if mimeType == audioxgsm:\n return [ \"gsm\" ]\n\n if mimeType == applicationvndgoogleappspresentation:\n return [ \"gslides\" ]\n\n if mimeType == applicationvndgoogleappsspreadsheet:\n return [ \"gsheet\" ]\n\n if mimeType == textxgenie:\n return [ \"gs\" ]\n\n if mimeType == applicationsrgs_xml:\n return [ \"grxml\" ]\n\n if mimeType == applicationvndgrooveinjector:\n return [ \"grv\" ]\n\n if mimeType == textxgroovy:\n return [ \"groovy\", \"gvy\", \"gy\", \"gsh\" ]\n\n if mimeType == applicationxgrampsxml:\n return [ \"gramps\" ]\n\n if mimeType == applicationsrgs:\n return [ \"gram\" ]\n\n if mimeType == textxgradle:\n return [ \"gradle\" ]\n\n if mimeType == applicationxgraphite:\n return [ \"gra\" ]\n\n if mimeType == applicationvndgrafeq:\n return [ \"gqf\", \"gqs\" ]\n\n if mimeType == applicationgpx_xml:\n return [ \"gpx\" ]\n\n if mimeType == applicationvndflographit:\n return [ \"gph\" ]\n\n if mimeType == applicationxgnuplot:\n return [ \"gp\", \"gplt\", \"gnuplot\" ]\n\n if mimeType == textxgo:\n return [ \"go\" ]\n\n if mimeType == applicationxgnumeric:\n return [ \"gnumeric\" ]\n\n if mimeType == applicationxgnucash:\n return [ \"gnucash\", \"gnc\", \"xac\" ]\n\n if mimeType == applicationgnunetdirectory:\n return [ \"gnd\" ]\n\n if mimeType == applicationvndgmx:\n return [ \"gmx\" ]\n\n if mimeType == applicationxprofile:\n return [ \"gmonout\" ]\n\n if mimeType == applicationxgettexttranslation:\n return [ \"gmo\", \"mo\" ]\n\n if mimeType == applicationgml_xml:\n return [ \"gml\" ]\n\n if mimeType == modelgltf_json:\n return [ \"gltf\" ]\n\n if mimeType == modelgltfbinary:\n return [ \"glb\" ]\n\n if mimeType == applicationxglade:\n return [ \"glade\" ]\n\n if mimeType == applicationvndgrooveidentitymessage:\n return [ \"gim\" ]\n\n if mimeType == imagexgimpgih:\n return [ \"gih\" ]\n\n if mimeType == imagegif:\n return [ \"gif\" ]\n\n if mimeType == applicationvndgroovehelp:\n return [ \"ghf\" ]\n\n if mimeType == applicationvndgeogebratool:\n return [ \"ggt\" ]\n\n if mimeType == applicationvndgeogebrafile:\n return [ \"ggb\" ]\n\n if mimeType == applicationxgamegearrom:\n return [ \"gg\" ]\n\n if mimeType == applicationxtexgf:\n return [ \"gf\" ]\n\n if mimeType == applicationvndgeometryexplorer:\n return [ \"gex\", \"gre\" ]\n\n if mimeType == applicationgeo_json:\n return [ \"geojson\" ]\n\n if mimeType == applicationvnddynageo:\n return [ \"geo\" ]\n\n if mimeType == applicationxgenesisrom:\n return [ \"gen\", \"sgd\" ]\n\n if mimeType == applicationxgedcom:\n return [ \"ged\", \"gedcom\" ]\n\n if mimeType == applicationxgodotshader:\n return [ \"gdshader\" ]\n\n if mimeType == applicationvndgoogleappsdocument:\n return [ \"gdoc\" ]\n\n if mimeType == modelvndgdl:\n return [ \"gdl\" ]\n\n if mimeType == applicationxgdromcue:\n return [ \"gdi\" ]\n\n if mimeType == applicationxgdscript:\n return [ \"gd\" ]\n\n if mimeType == textxgcode:\n return [ \"gcode\" ]\n\n if mimeType == applicationxgcacompressed:\n return [ \"gca\" ]\n\n if mimeType == imagexgimpgbr:\n return [ \"gbr\" ]\n\n if mimeType == applicationxgameboycolorrom:\n return [ \"gbc\", \"cgb\" ]\n\n if mimeType == applicationxgbarom:\n return [ \"gba\", \"agb\" ]\n\n if mimeType == applicationxgameboyrom:\n return [ \"gb\", \"sgb\" ]\n\n if mimeType == applicationxtads:\n return [ \"gam\" ]\n\n if mimeType == applicationvndgrooveaccount:\n return [ \"gac\" ]\n\n if mimeType == applicationvndgeospace:\n return [ \"g3w\" ]\n\n if mimeType == imageg3fax:\n return [ \"g3\" ]\n\n if mimeType == applicationvndgeoplan:\n return [ \"g2w\" ]\n\n if mimeType == applicationvndfuzzysheet:\n return [ \"fzs\" ]\n\n if mimeType == applicationvndadobefxp:\n return [ \"fxp\", \"fxpl\" ]\n\n if mimeType == videoxjavafx:\n return [ \"fxm\" ]\n\n if mimeType == videovndfvt:\n return [ \"fvt\" ]\n\n if mimeType == applicationvndanserwebfundstransferinitiation:\n return [ \"fti\" ]\n\n if mimeType == applicationvndfluxtimeclip:\n return [ \"ftc\" ]\n\n if mimeType == imagevndfst:\n return [ \"fst\" ]\n\n if mimeType == applicationvndfscweblaunch:\n return [ \"fsc\" ]\n\n if mimeType == imagevndfpx:\n return [ \"fpx\" ]\n\n if mimeType == applicationvndoasisopendocumenttextflatxml:\n return [ \"fodt\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheetflatxml:\n return [ \"fods\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentationflatxml:\n return [ \"fodp\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphicsflatxml:\n return [ \"fodg\" ]\n\n if mimeType == textxxslfo:\n return [ \"fo\", \"xslfo\" ]\n\n if mimeType == applicationvndfrogansfnc:\n return [ \"fnc\" ]\n\n if mimeType == applicationvndframemaker:\n return [ \"fm\", \"frame\", \"maker\", \"book\" ]\n\n if mimeType == textvndfly:\n return [ \"fly\" ]\n\n if mimeType == textvndfmiflexstor:\n return [ \"flx\" ]\n\n if mimeType == applicationxkivio:\n return [ \"flw\" ]\n\n if mimeType == videoxflv:\n return [ \"flv\" ]\n\n if mimeType == applicationvndmicrografxflo:\n return [ \"flo\" ]\n\n if mimeType == videoxflic:\n return [ \"fli\", \"flc\" ]\n\n if mimeType == applicationvndflatpakrepo:\n return [ \"flatpakrepo\" ]\n\n if mimeType == applicationvndflatpakref:\n return [ \"flatpakref\" ]\n\n if mimeType == applicationvndflatpak:\n return [ \"flatpak\", \"xdgapp\" ]\n\n if mimeType == audioflac:\n return [ \"flac\" ]\n\n if mimeType == applicationxfluid:\n return [ \"fl\" ]\n\n if mimeType == applicationfits:\n return [ \"fits\", \"fit\", \"fts\" ]\n\n if mimeType == imagexxfig:\n return [ \"fig\" ]\n\n if mimeType == imagexfreehand:\n return [ \"fh\", \"fhc\", \"fh4\", \"fh5\", \"fh7\" ]\n\n if mimeType == applicationvndfujitsuoasysgp:\n return [ \"fg5\" ]\n\n if mimeType == textxgherkin:\n return [ \"feature\" ]\n\n if mimeType == applicationvnddenovofcselayoutlink:\n return [ \"fe_launch\" ]\n\n if mimeType == applicationfdt_xml:\n return [ \"fdt\" ]\n\n if mimeType == applicationxfdsdisk:\n return [ \"fds\" ]\n\n if mimeType == applicationfdf:\n return [ \"fdf\" ]\n\n if mimeType == applicationxrawfloppydiskimage:\n return [ \"fd\", \"qd\" ]\n\n if mimeType == applicationvndisacfcs:\n return [ \"fcs\" ]\n\n if mimeType == applicationvndadobeformscentralfcdt:\n return [ \"fcdt\" ]\n\n if mimeType == imagevndfastbidsheet:\n return [ \"fbs\" ]\n\n if mimeType == applicationxzipcompressedfb2:\n return [ \"fb2zip\" ]\n\n if mimeType == applicationxfictionbook_xml:\n return [ \"fb2\" ]\n\n if mimeType == textxfortran:\n return [ \"f\", \"f90\", \"f95\", \"for\", \"f77\" ]\n\n if mimeType == applicationvndezpixpackage:\n return [ \"ez3\" ]\n\n if mimeType == applicationvndezpixalbum:\n return [ \"ez2\" ]\n\n if mimeType == applicationandrewinset:\n return [ \"ez\" ]\n\n if mimeType == applicationvndnovadigmext:\n return [ \"ext\" ]\n\n if mimeType == imagexexr:\n return [ \"exr\" ]\n\n if mimeType == applicationexpress:\n return [ \"exp\" ]\n\n if mimeType == applicationexi:\n return [ \"exi\" ]\n\n if mimeType == applicationxmsdosexecutable:\n return [ \"exe\" ]\n\n if mimeType == textxelixir:\n return [ \"ex\", \"exs\" ]\n\n if mimeType == applicationxenvoy:\n return [ \"evy\" ]\n\n if mimeType == applicationxeva:\n return [ \"eva\" ]\n\n if mimeType == textxsetext:\n return [ \"etx\" ]\n\n if mimeType == applicationxetheme:\n return [ \"etheme\" ]\n\n if mimeType == applicationvndepsonesf:\n return [ \"esf\" ]\n\n if mimeType == applicationvndosgisubsystem:\n return [ \"esa\" ]\n\n if mimeType == applicationvndeszigno3_xml:\n return [ \"es3\", \"et3\" ]\n\n if mimeType == applicationecmascript:\n return [ \"es\", \"ecma\" ]\n\n if mimeType == textxerlang:\n return [ \"erl\" ]\n\n if mimeType == applicationepub_zip:\n return [ \"epub\" ]\n\n if mimeType == imagexgzeps:\n return [ \"epsgz\", \"epsigz\", \"epsfgz\" ]\n\n if mimeType == imagexbzeps:\n return [ \"epsbz2\", \"epsibz2\", \"epsfbz2\" ]\n\n if mimeType == imagexeps:\n return [ \"eps\", \"epsi\", \"epsf\" ]\n\n if mimeType == applicationvndmsfontobject:\n return [ \"eot\" ]\n\n if mimeType == audiovnddigitalwinds:\n return [ \"eol\" ]\n\n if mimeType == applicationxmlexternalparsedentity:\n return [ \"ent\" ]\n\n if mimeType == applicationxmsmetafile:\n return [ \"emz\" ]\n\n if mimeType == applicationvndemusicemusic_package:\n return [ \"emp\" ]\n\n if mimeType == applicationemotionml_xml:\n return [ \"emotionml\" ]\n\n if mimeType == applicationemma_xml:\n return [ \"emma\" ]\n\n if mimeType == messagerfc822:\n return [ \"eml\", \"mime\" ]\n\n if mimeType == imageemf:\n return [ \"emf\" ]\n\n if mimeType == textxemacslisp:\n return [ \"el\" ]\n\n if mimeType == applicationvndpgosasli:\n return [ \"ei6\" ]\n\n if mimeType == applicationxegon:\n return [ \"egon\" ]\n\n if mimeType == applicationvndpicsel:\n return [ \"efif\" ]\n\n if mimeType == applicationvndnovadigmedx:\n return [ \"edx\" ]\n\n if mimeType == applicationvndnovadigmedm:\n return [ \"edm\" ]\n\n if mimeType == audiovndnueraecelp9600:\n return [ \"ecelp9600\" ]\n\n if mimeType == audiovndnueraecelp7470:\n return [ \"ecelp7470\" ]\n\n if mimeType == audiovndnueraecelp4800:\n return [ \"ecelp4800\" ]\n\n if mimeType == textxeiffel:\n return [ \"e\", \"eif\" ]\n\n if mimeType == applicationvndspotfiredxp:\n return [ \"dxp\" ]\n\n if mimeType == imagevnddxf:\n return [ \"dxf\" ]\n\n if mimeType == imagevnddwg:\n return [ \"dwg\" ]\n\n if mimeType == modelvnddwf:\n return [ \"dwf\" ]\n\n if mimeType == applicationatscdwd_xml:\n return [ \"dwd\" ]\n\n if mimeType == applicationxgzdvi:\n return [ \"dvigz\" ]\n\n if mimeType == applicationxbzdvi:\n return [ \"dvibz2\" ]\n\n if mimeType == applicationxdvi:\n return [ \"dvi\" ]\n\n if mimeType == videovnddvbfile:\n return [ \"dvb\" ]\n\n if mimeType == videodv:\n return [ \"dv\" ]\n\n if mimeType == textxdevicetreesource:\n return [ \"dtsi\" ]\n\n if mimeType == audiovnddtshd:\n return [ \"dtshd\" ]\n\n if mimeType == audiovnddts:\n return [ \"dts\" ]\n\n if mimeType == applicationxmldtd:\n return [ \"dtd\" ]\n\n if mimeType == textxdevicetreebinary:\n return [ \"dtb\" ]\n\n if mimeType == applicationdssc_der:\n return [ \"dssc\" ]\n\n if mimeType == textxdsl:\n return [ \"dsl\" ]\n\n if mimeType == audioxdsf:\n return [ \"dsf\" ]\n\n if mimeType == textprslinestag:\n return [ \"dsc\" ]\n\n if mimeType == imagedicomrle:\n return [ \"drle\" ]\n\n if mimeType == audiovnddra:\n return [ \"dra\" ]\n\n if mimeType == applicationvnddpgraph:\n return [ \"dpg\" ]\n\n if mimeType == applicationvndosgidp:\n return [ \"dp\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentwordprocessingmltemplate:\n return [ \"dotx\" ]\n\n if mimeType == applicationvndmswordtemplatemacroenabled12:\n return [ \"dotm\" ]\n\n if mimeType == applicationmswordtemplate:\n return [ \"dot\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentwordprocessingmldocument:\n return [ \"docx\" ]\n\n if mimeType == applicationvndmsworddocumentmacroenabled12:\n return [ \"docm\" ]\n\n if mimeType == applicationmsword:\n return [ \"doc\" ]\n\n if mimeType == imagexadobedng:\n return [ \"dng\" ]\n\n if mimeType == applicationvnddna:\n return [ \"dna\" ]\n\n if mimeType == applicationxapplediskimage:\n return [ \"dmg\" ]\n\n if mimeType == imagevnddjvu:\n return [ \"djvu\", \"djv\" ]\n\n if mimeType == messagedispositionnotification:\n return [ \"disposition-notification\" ]\n\n if mimeType == applicationvndmobiusdis:\n return [ \"dis\" ]\n\n if mimeType == applicationxdirector:\n return [ \"dir\", \"dxr\", \"cst\", \"cct\", \"cxt\", \"w3d\", \"fgd\", \"swa\" ]\n\n if mimeType == textxpatch:\n return [ \"diff\", \"patch\" ]\n\n if mimeType == applicationdicom:\n return [ \"dicomdir\", \"dcm\" ]\n\n if mimeType == textxc:\n return [ \"dic\" ]\n\n if mimeType == applicationxdiadiagram:\n return [ \"dia\" ]\n\n if mimeType == applicationxdgccompressed:\n return [ \"dgc\" ]\n\n if mimeType == audioxdff:\n return [ \"dff\" ]\n\n if mimeType == applicationvnddreamfactory:\n return [ \"dfac\" ]\n\n if mimeType == applicationxdesktop:\n return [ \"desktop\", \"kdelnk\" ]\n\n if mimeType == applicationxx509cacert:\n return [ \"der\", \"crt\", \"cert\", \"pem\" ]\n\n if mimeType == applicationvnddebianbinarypackage:\n return [ \"deb\", \"udeb\" ]\n\n if mimeType == imagexdds:\n return [ \"dds\" ]\n\n if mimeType == applicationvndsyncmldmddf_xml:\n return [ \"ddf\" ]\n\n if mimeType == applicationvndfujixeroxddd:\n return [ \"ddd\" ]\n\n if mimeType == applicationvndomadd2_xml:\n return [ \"dd2\" ]\n\n if mimeType == textvndcurldcurl:\n return [ \"dcurl\" ]\n\n if mimeType == imagexkodakdcr:\n return [ \"dcr\" ]\n\n if mimeType == textxdcl:\n return [ \"dcl\" ]\n\n if mimeType == applicationxdocbook_xml:\n return [ \"dbk\", \"docbook\" ]\n\n if mimeType == applicationxdbf:\n return [ \"dbf\" ]\n\n if mimeType == applicationdavmount_xml:\n return [ \"davmount\" ]\n\n if mimeType == textxdart:\n return [ \"dart\" ]\n\n if mimeType == applicationxdar:\n return [ \"dar\" ]\n\n if mimeType == applicationvndmobiusdaf:\n return [ \"daf\" ]\n\n if mimeType == modelvndcollada_xml:\n return [ \"dae\" ]\n\n if mimeType == textxdsrc:\n return [ \"d\", \"di\" ]\n\n if mimeType == applicationprscww:\n return [ \"cww\" ]\n\n if mimeType == applicationcwl:\n return [ \"cwl\" ]\n\n if mimeType == applicationxappleworksdocument:\n return [ \"cwk\" ]\n\n if mimeType == textvndcurl:\n return [ \"curl\" ]\n\n if mimeType == imagexwinbitmap:\n return [ \"cur\" ]\n\n if mimeType == applicationxcue:\n return [ \"cue\" ]\n\n if mimeType == applicationcuseeme:\n return [ \"cu\" ]\n\n if mimeType == textcsvschema:\n return [ \"csvs\" ]\n\n if mimeType == textcsv:\n return [ \"csv\" ]\n\n if mimeType == textcss:\n return [ \"css\" ]\n\n if mimeType == applicationvndcommonspace:\n return [ \"csp\" ]\n\n if mimeType == applicationxcompressediso:\n return [ \"cso\" ]\n\n if mimeType == chemicalxcsml:\n return [ \"csml\" ]\n\n if mimeType == applicationvndcitationstylesstyle_xml:\n return [ \"csl\" ]\n\n if mimeType == applicationxcsh:\n return [ \"csh\" ]\n\n if mimeType == textxcsharp:\n return [ \"cs\" ]\n\n if mimeType == applicationvndrigcryptonote:\n return [ \"cryptonote\" ]\n\n if mimeType == applicationxchromeextension:\n return [ \"crx\" ]\n\n if mimeType == imagexcanoncrw:\n return [ \"crw\" ]\n\n if mimeType == applicationpkixcrl:\n return [ \"crl\" ]\n\n if mimeType == textxcredits:\n return [ \"credits\" ]\n\n if mimeType == applicationxmscardfile:\n return [ \"crd\" ]\n\n if mimeType == imagexcanoncr3:\n return [ \"cr3\" ]\n\n if mimeType == imagexcanoncr2:\n return [ \"cr2\" ]\n\n if mimeType == textxcrystal:\n return [ \"cr\" ]\n\n if mimeType == applicationmaccompactpro:\n return [ \"cpt\" ]\n\n if mimeType == textxc__src:\n return [ \"cpp\", \"cxx\", \"cc\", \"c\" ]\n\n if mimeType == applicationcpl_xml:\n return [ \"cpl\" ]\n\n if mimeType == applicationxcpiocompressed:\n return [ \"cpiogz\" ]\n\n if mimeType == applicationxcpio:\n return [ \"cpio\" ]\n\n if mimeType == applicationxcore:\n return [ \"core\" ]\n\n if mimeType == textxcopying:\n return [ \"copying\" ]\n\n if mimeType == applicationxmsdownload:\n return [ \"com\", \"bat\" ]\n\n if mimeType == applicationvndcoffeescript:\n return [ \"coffee\" ]\n\n if mimeType == applicationvndrimcod:\n return [ \"cod\" ]\n\n if mimeType == imagexcmx:\n return [ \"cmx\" ]\n\n if mimeType == applicationvndyellowrivercustommenu:\n return [ \"cmp\" ]\n\n if mimeType == chemicalxcml:\n return [ \"cml\" ]\n\n if mimeType == chemicalxcmdf:\n return [ \"cmdf\" ]\n\n if mimeType == applicationvndcosmocaller:\n return [ \"cmc\" ]\n\n if mimeType == textxcmake:\n return [ \"cmake\", \"cmakeliststxt\" ]\n\n if mimeType == applicationxmsclip:\n return [ \"clp\" ]\n\n if mimeType == applicationvndcrickclicker:\n return [ \"clkx\" ]\n\n if mimeType == applicationvndcrickclickerwordbank:\n return [ \"clkw\" ]\n\n if mimeType == applicationvndcrickclickertemplate:\n return [ \"clkt\" ]\n\n if mimeType == applicationvndcrickclickerpalette:\n return [ \"clkp\" ]\n\n if mimeType == applicationvndcrickclickerkeyboard:\n return [ \"clkk\" ]\n\n if mimeType == applicationxjava:\n return [ \"class\" ]\n\n if mimeType == applicationvndclaymore:\n return [ \"cla\" ]\n\n if mimeType == textxopenclsrc:\n return [ \"cl\" ]\n\n if mimeType == applicationnode:\n return [ \"cjs\" ]\n\n if mimeType == applicationvndmsartgalry:\n return [ \"cil\" ]\n\n if mimeType == applicationvndanserwebcertificateissueinitiation:\n return [ \"cii\" ]\n\n if mimeType == chemicalxcif:\n return [ \"cif\" ]\n\n if mimeType == applicationxkchart:\n return [ \"chrt\" ]\n\n if mimeType == applicationvndmshtmlhelp:\n return [ \"chm\" ]\n\n if mimeType == applicationxmamechd:\n return [ \"chd\" ]\n\n if mimeType == applicationxchat:\n return [ \"chat\" ]\n\n if mimeType == textxchangelog:\n return [ \"changelog\" ]\n\n if mimeType == imagecgm:\n return [ \"cgm\" ]\n\n if mimeType == applicationxcfscompressed:\n return [ \"cfs\" ]\n\n if mimeType == applicationpkixcert:\n return [ \"cer\" ]\n\n if mimeType == applicationvndcinderella:\n return [ \"cdy\" ]\n\n if mimeType == applicationvndchemdraw_xml:\n return [ \"cdxml\" ]\n\n if mimeType == chemicalxcdx:\n return [ \"cdx\" ]\n\n if mimeType == applicationvndcoreldraw:\n return [ \"cdr\" ]\n\n if mimeType == applicationcdmiqueue:\n return [ \"cdmiq\" ]\n\n if mimeType == applicationcdmiobject:\n return [ \"cdmio\" ]\n\n if mimeType == applicationcdmidomain:\n return [ \"cdmid\" ]\n\n if mimeType == applicationcdmicontainer:\n return [ \"cdmic\" ]\n\n if mimeType == applicationcdmicapability:\n return [ \"cdmia\" ]\n\n if mimeType == applicationvndmediastationcdkey:\n return [ \"cdkey\" ]\n\n if mimeType == applicationxdiscjugglercdimage:\n return [ \"cdi\" ]\n\n if mimeType == applicationcdfx_xml:\n return [ \"cdfx\" ]\n\n if mimeType == applicationxnetcdf:\n return [ \"cdf\", \"nc\" ]\n\n if mimeType == applicationvndcontactcmsg:\n return [ \"cdbcmsg\" ]\n\n if mimeType == applicationccxml_xml:\n return [ \"ccxml\" ]\n\n if mimeType == applicationxcocoa:\n return [ \"cco\" ]\n\n if mimeType == applicationxccmx:\n return [ \"ccmx\" ]\n\n if mimeType == applicationvndcomicbook_zip:\n return [ \"cbz\" ]\n\n if mimeType == applicationxcbt:\n return [ \"cbt\" ]\n\n if mimeType == applicationvndcomicbookrar:\n return [ \"cbr\" ]\n\n if mimeType == textxcobol:\n return [ \"cbl\", \"cob\" ]\n\n if mimeType == applicationxcbr:\n return [ \"cba\" ]\n\n if mimeType == applicationxcb7:\n return [ \"cb7\" ]\n\n if mimeType == applicationvndmspkiseccat:\n return [ \"cat\" ]\n\n if mimeType == applicationvndcurlcar:\n return [ \"car\" ]\n\n if mimeType == audioxcaf:\n return [ \"caf\" ]\n\n if mimeType == applicationvndmscabcompressed:\n return [ \"cab\" ]\n\n if mimeType == applicationvndclonkc4group:\n return [ \"c4g\", \"c4d\", \"c4f\", \"c4p\", \"c4u\" ]\n\n if mimeType == applicationvndcluetrustcartomobileconfigpkg:\n return [ \"c11amz\" ]\n\n if mimeType == applicationvndcluetrustcartomobileconfig:\n return [ \"c11amc\" ]\n\n if mimeType == applicationxbzip:\n return [ \"bz2\", \"bz\" ]\n\n if mimeType == imageprsbtif:\n return [ \"btif\", \"btf\" ]\n\n if mimeType == modelvndvalvesourcecompiledmap:\n return [ \"bsp\" ]\n\n if mimeType == applicationxbsdiff:\n return [ \"bsdiff\" ]\n\n if mimeType == applicationxbpspatch:\n return [ \"bps\" ]\n\n if mimeType == applicationxbzip2:\n return [ \"boz\" ]\n\n if mimeType == applicationvndpreviewsystemsbox:\n return [ \"box\" ]\n\n if mimeType == imagebmp:\n return [ \"bmp\", \"dib\" ]\n\n if mimeType == applicationvndbalsamiqbmml_xml:\n return [ \"bmml\" ]\n\n if mimeType == applicationvndbmi:\n return [ \"bmi\" ]\n\n if mimeType == applicationxblender:\n return [ \"blend\", \"blender\" ]\n\n if mimeType == applicationxblorb:\n return [ \"blb\", \"blorb\" ]\n\n if mimeType == applicationoctetstream:\n return [ \"bin\", \"dms\", \"lrf\", \"mar\", \"dist\", \"distz\", \"bpk\", \"dump\", \"elc\", \"deploy\", \"dll\", \"msp\", \"msm\", \"buffer\" ]\n\n if mimeType == videovndradgamettoolsbink:\n return [ \"bik\", \"bk2\" ]\n\n if mimeType == textxbibtex:\n return [ \"bib\" ]\n\n if mimeType == applicationvndfujitsuoasysprs:\n return [ \"bh2\" ]\n\n if mimeType == applicationvndrealvncbed:\n return [ \"bed\" ]\n\n if mimeType == applicationbdoc:\n return [ \"bdoc\" ]\n\n if mimeType == applicationxfontbdf:\n return [ \"bdf\" ]\n\n if mimeType == applicationxbcpio:\n return [ \"bcpio\" ]\n\n if mimeType == applicationxtrash:\n return [ \"bak\", \"old\", \"sik\" ]\n\n if mimeType == imagevndpcob16:\n return [ \"b16\" ]\n\n if mimeType == applicationvndamazonmobi8ebook:\n return [ \"azw3\", \"kfx\" ]\n\n if mimeType == applicationvndamazonebook:\n return [ \"azw\" ]\n\n if mimeType == imagevndairzipacceleratorazv:\n return [ \"azv\" ]\n\n if mimeType == applicationvndairzipfilesecureazs:\n return [ \"azs\" ]\n\n if mimeType == applicationvndairzipfilesecureazf:\n return [ \"azf\" ]\n\n if mimeType == videoannodex:\n return [ \"axv\" ]\n\n if mimeType == audioannodex:\n return [ \"axa\" ]\n\n if mimeType == applicationxawk:\n return [ \"awk\" ]\n\n if mimeType == audioamrwb:\n return [ \"awb\" ]\n\n if mimeType == applicationxapplixword:\n return [ \"aw\" ]\n\n if mimeType == imageavif:\n return [ \"avif\", \"avifs\" ]\n\n if mimeType == videoxmsvideo:\n return [ \"avi\", \"avf\", \"divx\" ]\n\n if mimeType == imageavcs:\n return [ \"avcs\" ]\n\n if mimeType == imageavci:\n return [ \"avci\" ]\n\n if mimeType == textxsystemdunit:\n return [ \"automount\", \"device\", \"mount\", \"path\", \"scope\", \"slice\", \"socket\", \"swap\", \"target\", \"timer\" ]\n\n if mimeType == textxauthors:\n return [ \"authors\" ]\n\n if mimeType == audiobasic:\n return [ \"au\", \"snd\" ]\n\n if mimeType == applicationvndantixgamecomponent:\n return [ \"atx\" ]\n\n if mimeType == applicationatomsvc_xml:\n return [ \"atomsvc\" ]\n\n if mimeType == applicationatomdeleted_xml:\n return [ \"atomdeleted\" ]\n\n if mimeType == applicationatomcat_xml:\n return [ \"atomcat\" ]\n\n if mimeType == applicationatom_xml:\n return [ \"atom\" ]\n\n if mimeType == applicationvndacucorp:\n return [ \"atc\", \"acutc\" ]\n\n if mimeType == audioxmsasx:\n return [ \"asx\", \"wax\", \"wvx\", \"wmx\" ]\n\n if mimeType == imageastc:\n return [ \"astc\" ]\n\n if mimeType == applicationxasp:\n return [ \"asp\" ]\n\n if mimeType == applicationvndaccpacsimplyaso:\n return [ \"aso\" ]\n\n if mimeType == applicationvndmsasf:\n return [ \"asf\" ]\n\n if mimeType == textxcommonlisp:\n return [ \"asd\", \"fasl\", \"lisp\", \"ros\" ]\n\n if mimeType == applicationxasar:\n return [ \"asar\" ]\n\n if mimeType == applicationxapplixspreadsheet:\n return [ \"as\" ]\n\n if mimeType == imagexsonyarw:\n return [ \"arw\" ]\n\n if mimeType == applicationxarj:\n return [ \"arj\" ]\n\n if mimeType == applicationxfreearc:\n return [ \"arc\" ]\n\n if mimeType == applicationvndlotusapproach:\n return [ \"apr\" ]\n\n if mimeType == applicationxmsapplication:\n return [ \"application\" ]\n\n if mimeType == applicationxiso9660appimage:\n return [ \"appimage\" ]\n\n if mimeType == imageapng:\n return [ \"apng\" ]\n\n if mimeType == applicationvndandroidpackagearchive:\n return [ \"apk\" ]\n\n if mimeType == audioxape:\n return [ \"ape\" ]\n\n if mimeType == applicationannodex:\n return [ \"anx\" ]\n\n if mimeType == videoxanim:\n return [ \"anim19j\" ]\n\n if mimeType == applicationxnavianimation:\n return [ \"ani\" ]\n\n if mimeType == audioxamzxml:\n return [ \"amz\" ]\n\n if mimeType == audioamr:\n return [ \"amr\" ]\n\n if mimeType == applicationvndamigaami:\n return [ \"ami\" ]\n\n if mimeType == applicationxalz:\n return [ \"alz\" ]\n\n if mimeType == applicationvnddvbait:\n return [ \"ait\" ]\n\n if mimeType == applicationvndadobeairapplicationinstallerpackage_zip:\n return [ \"air\" ]\n\n if mimeType == audioxaiff:\n return [ \"aiff\", \"aif\" ]\n\n if mimeType == audioxaifc:\n return [ \"aifc\", \"aiffc\" ]\n\n if mimeType == applicationillustrator:\n return [ \"ai\" ]\n\n if mimeType == applicationvndaheadspace:\n return [ \"ahead\" ]\n\n if mimeType == applicationvndage:\n return [ \"age\" ]\n\n if mimeType == imagexapplixgraphics:\n return [ \"ag\" ]\n\n if mimeType == applicationvndibmmodcap:\n return [ \"afp\", \"listafp\", \"list3820\" ]\n\n if mimeType == applicationxfontafm:\n return [ \"afm\" ]\n\n if mimeType == applicationvndaudiograph:\n return [ \"aep\" ]\n\n if mimeType == audioadpcm:\n return [ \"adp\" ]\n\n if mimeType == applicationxamigadiskformat:\n return [ \"adf\" ]\n\n if mimeType == textxadasrc:\n return [ \"adb\", \"ads\" ]\n\n if mimeType == applicationvndacucobol:\n return [ \"acu\" ]\n\n if mimeType == applicationxace:\n return [ \"ace\" ]\n\n if mimeType == applicationvndamericandynamicsacc:\n return [ \"acc\" ]\n\n if mimeType == audioac3:\n return [ \"ac3\" ]\n\n if mimeType == applicationpkixattrcert:\n return [ \"ac\" ]\n\n if mimeType == applicationxabiword:\n return [ \"abw\", \"abwcrashed\", \"abwgz\", \"zabw\" ]\n\n if mimeType == audiovndaudibleaax:\n return [ \"aax\" ]\n\n if mimeType == applicationxauthorwareseg:\n return [ \"aas\" ]\n\n if mimeType == applicationxauthorwaremap:\n return [ \"aam\" ]\n\n if mimeType == audioaac:\n return [ \"aac\", \"adts\" ]\n\n if mimeType == applicationxauthorwarebin:\n return [ \"aab\", \"x32\", \"u32\", \"vox\" ]\n\n if mimeType == audioxpnaudibleaudio:\n return [ \"aa\" ]\n\n if mimeType == applicationxatari7800rom:\n return [ \"a78\" ]\n\n if mimeType == applicationxatari2600rom:\n return [ \"a26\" ]\n\n if mimeType == applicationxarchive:\n return [ \"a\", \"ar\" ]\n\n if mimeType == applicationx7zcompressed:\n return [ \"7z\", \"7z001\" ]\n\n if mimeType == applicationxt602:\n return [ \"602\" ]\n\n if mimeType == model3mf:\n return [ \"3mf\" ]\n\n if mimeType == video3gpp:\n return [ \"3gp\", \"3gpp\", \"3ga\" ]\n\n if mimeType == video3gpp2:\n return [ \"3g2\", \"3gp2\", \"3gpp2\" ]\n\n if mimeType == applicationxnintendo3dsexecutable:\n return [ \"3dsx\" ]\n\n if mimeType == applicationxnintendo3dsrom:\n return [ \"3ds\", \"cci\" ]\n\n if mimeType == textvndin3d3dml:\n return [ \"3dml\" ]\n\n if mimeType == applicationxgenesis32xrom:\n return [ \"32x\", \"mdx\" ]\n\n if mimeType == applicationvnd1000mindsdecisionmodel_xml:\n return [ \"1km\" ]\n\n if mimeType == applicationvndlotus123:\n return [ \"123\", \"wk1\", \"wk3\", \"wk4\", \"wks\" ]\n \n return []", "def build_mimetype(self) -> None:\n logger.info(__('writing mimetype file...'))\n copy_asset_file(path.join(self.template_dir, 'mimetype'), self.outdir)", "def process_part(self, part):\n\t\tcontent_type = part.get_content_type()\n\t\tfilename = part.get_filename()\n\t\tif content_type == 'text/plain' and not filename:\n\t\t\tself.text_content += self.get_payload(part)\n\n\t\telif content_type == 'text/html':\n\t\t\tself.html_content += self.get_payload(part)\n\n\t\telif content_type == 'message/rfc822':\n\t\t\t# sent by outlook when another email is sent as an attachment to this email\n\t\t\tself.show_attached_email_headers_in_content(part)\n\n\t\telif content_type == 'text/calendar':\n\t\t\tself.set_calendar_invite(part)\n\n\t\telif filename or 'image' in content_type:\n\t\t\tself.get_attachment(part)", "def parse_mime(self, mtype):\n parts = mtype.split(';')\n params = OrderedDict()\n\n # Split parameters and convert numeric values to a Decimal object.\n for k, v in [param.split('=', 1) for param in parts[1:]]:\n k = k.strip().lower()\n v = v.strip().strip('\\'\"')\n\n if self._parm_val_lower:\n v = v.lower()\n\n try:\n v = Decimal(v)\n except InvalidOperation:\n if k == 'q':\n v = Decimal(\"1.0\")\n\n params[k] = v\n\n # Add/fix quality values.\n quality = params.get('q')\n\n if ('q' not in params\n or quality > Decimal(\"1.0\")\n or quality < Decimal(\"0.0\")):\n params['q'] = Decimal(\"1.0\")\n\n full_type = parts[0].strip().lower()\n\n # Fix non-standard single asterisk.\n if full_type == '*':\n full_type = '*/*'\n\n type, sep, subtype = full_type.partition('/')\n\n if '+' in subtype:\n idx = subtype.rfind('+')\n suffix = subtype[idx+1:].strip()\n subtype = subtype[:idx]\n else:\n suffix = ''\n\n return type.strip(), subtype.strip(), suffix, params", "def selection(self):\n self.file.seek(0)\n start = self.file.readline().rstrip().upper()\n if start[0] == \">\":\n self.file_type = \"fasta\"\n elif start[0:1] == \"ID\":\n self.file_type = \"embl_gcg\"\n elif start[0:5] == \"LOCUS\":\n self.file_type = \"genbank\"\n elif start[0] == \";\":\n self.file_type = \"ig\"\n elif not re.search('[^GATCN]', start):\n self.file_type = \"plain\"\n else:\n self.file_type = None\n return self.file_type", "def getMimeType(self, extension): #$NON-NLS-1$\r", "def getMimeTypes(self): #$NON-NLS-1$\r", "def mimetype(self):\n hcell = self._get_hcell2()\n mimetype = hcell.get(\"mimetype\")\n if mimetype is not None:\n return mimetype\n celltype = hcell[\"celltype\"]\n if celltype == \"code\":\n language = hcell[\"language\"]\n mimetype = language_to_mime(language)\n return mimetype\n if celltype == \"structured\":\n datatype = hcell[\"datatype\"]\n if datatype in (\"mixed\", \"binary\", \"plain\"):\n mimetype = get_mime(datatype)\n elif datatype in (\"float\", \"int\", \"str\", \"bool\"):\n mimetype = get_mime(\"plain\")\n else:\n mimetype = ext_to_mime(datatype)\n else:\n mimetype = get_mime(celltype)\n return mimetype", "def _check_mimetype(self):\n if self.mimetype in Config.aliases:\n mimetype = Config.aliases[self.mimetype]\n else:\n mimetype = self.mimetype\n expected_extensions = mimetypes.guess_all_extensions(mimetype,\n strict=False)\n if expected_extensions:\n if self.has_extension and self.extension not in expected_extensions:\n # LOG: improve this string\n self.make_dangerous('expected extensions')", "def to_internal_value(self, data):\n if isinstance(data, str) and data.startswith('data:image'):\n # Found image is encoded, and must be decoded\n format, imgstr = data.split(';base64,')\n ext = format.split('/')[-1] # Extract file extension\n id = uuid.uuid4()\n data = ContentFile(base64.b64decode(imgstr), name = id.urn[9:] + '.' + ext)\n return super(Base64ImageField, self).to_internal_value(data)", "def get_mime_property(data, key, mime):\n # type: (list, str, str) -> str\n try:\n return [item.get(key) for item in data if item.get(\"mimetype\") == mime][0]\n except (IndexError, KeyError):\n logger.debug(\"get_mime_property error: {} {}\".format(key, mime))\n return \"\"", "def _copy_attachment(self, name, data, mimetype, mfg_event):\n attachment = mfg_event.attachment.add()\n attachment.name = name\n attachment.value_binary = data\n if mimetype in test_runs_converter.MIMETYPE_MAP:\n attachment.type = test_runs_converter.MIMETYPE_MAP[mimetype]\n elif mimetype == test_runs_pb2.MULTIDIM_JSON:\n attachment.type = mimetype\n else:\n attachment.type = test_runs_pb2.BINARY", "def mime(mime):\n\n def dfn(fn):\n fn.mime = mime\n return fn\n\n return dfn", "def mime_types(self) -> FilebaseApiConfigMimeTypes:\n mime_types = self.get(\"mime_types\", {})\n if not isinstance(mime_types, FilebaseApiConfigMimeTypes):\n mime_types = FilebaseApiConfigMimeTypes(**mime_types)\n self[\"mime_types\"] = mime_types\n return mime_types", "def to_internal_value(self, data):\n try: # ToDo penetrate in order test if it has any security flaws\n decoded = base64.b64decode(data)\n mime_type = magic.from_buffer(decoded, mime=True)\n file_ext = mimetypes.guess_extension(mime_type)\n except TypeError:\n raise serializers.ValidationError(\n _('Not a valid base64 file')\n )\n\n if file_ext not in settings.VALID_FILE_EXTENSIONS:\n raise serializers.ValidationError(\n _('Forbidden file extension')\n )\n\n file_name = \"{0}{1}\".format(uuid.uuid4(), file_ext)\n data = ContentFile(decoded, name=file_name)\n return data", "def mime_allowed(self, mime_allowed: ConfigNodePropertyArray):\n\n self._mime_allowed = mime_allowed", "def to_image_data(data):\n \n # removing image\n if not data:\n return u''\n\n # image path (not changed)\n if data[0:5] != u'data:':\n return None\n \n # TODO: better MIME handling\n mime = data[5:data.index(u';')].lower()\n img = data[data.index(u',') + 1:].decode('base64')\n \n return mime, img", "def validate(self, data):\n logger.debug(data)\n upload = data['upload']\n config_type = data['config_type']\n content_type = validators.validate_content_type(upload, config_type)\n if config_type == 'PRESET':\n validators.validate_preset(upload)\n data['content_type'] = content_type\n fname = data['upload'].name\n data['filename'] = fname.replace(' ', '_').lower()\n return data", "def mime_type(self, type_t='application'):\n supported = ' '.join(MIME_TYPES.keys())\n\n if type_t not in list(MIME_TYPES.keys()):\n raise ValueError(\n 'Unsupported mime type! Use: {}'.format(supported))\n\n mime_type = self.random.choice(MIME_TYPES[type_t])\n return mime_type", "def checkFileType(self, ext, path, platformName):\r\n types_ = ['audio' for i in audio[:] if i == self.ext]\r\n if types_ and types_[0] == 'audio':\r\n return(path + '/Music')\r\n types_ = ['video' for i in video[:] if i == self.ext]\r\n if types_ and types_[0] == 'video':\r\n if(platformName == 'Darwin'):\r\n return (path + '/Movies')\r\n return (path + '/Videos')\r\n types_ = ['raster-image' for i in image[:] if i == self.ext]\r\n if types_ and types_[0] == 'raster-image':\r\n if platformName == 'Linux' or platformName == 'Darwin':\r\n return(path +'/Pictures')\r\n else:\r\n if(self.onedrive):\r\n return(path +'/OneDrive/Pictures')\r\n else:\r\n return(path +'/Pictures')\r\n types_ = ['document' for i in document[:] if i == self.ext]\r\n if types_ and types_[0] == 'document':\r\n if platformName == 'Linux' or platformName == 'Darwin':\r\n return(path+'/Documents')\r\n else:\r\n if(self.onedrive):\r\n return(path +'/OneDrive/Documents')\r\n else:\r\n return(path +'/Documents')\r\n if not types_:\r\n return('None')", "def drag_data_get_data(self, treeview, context, selection, target_id, etime):\n sel = treeview.get_selection()\n model, iter = sel.get_selected()\n data = model.get_value(iter, 1)\n if os.path.exists(data): data = 'file://'+os.path.abspath(data)\n # todo: multiselect\n #model, iter = sel.get_selected_rows()\n #data = []\n #for path in iter:\n # data += 'file://'+model.get_value(model.get_iter(path), 1)\n selection.set(selection.target, 8, data)", "def mime_type(self, mime_type):\n\n self._mime_type = mime_type", "def _approve_only_dd_mime_type(self, event):\n data = event.mimeData().data(constants.QGRAPH_DD_MIME_TYPE)\n if not data.isNull():\n event.acceptProposedAction()\n else:\n event.ignore()", "def _best_mime():\n supported = []\n renders = {}\n for renderer_cls in app.config.get(\"RENDERERS\"):\n renderer = import_from_string(renderer_cls)\n for mime_type in renderer.mime:\n supported.append(mime_type)\n renders[mime_type] = renderer\n\n if len(supported) == 0:\n abort(\n 500,\n description=debug_error_message(\n \"Configuration error: no supported mime types\"\n ),\n )\n\n best_match = request.accept_mimetypes.best_match(supported) or supported[0]\n return best_match, renders[best_match]", "def mime_allowed(self) -> ConfigNodePropertyArray:\n return self._mime_allowed" ]
[ "0.60108477", "0.5680776", "0.56193733", "0.5298136", "0.5222251", "0.5179363", "0.50925857", "0.4962017", "0.49526304", "0.49443462", "0.49148428", "0.48848096", "0.48705718", "0.48466402", "0.48276138", "0.48247182", "0.4824222", "0.48028073", "0.47541302", "0.47510785", "0.4739489", "0.4728229", "0.47236148", "0.46966264", "0.46947852", "0.4690782", "0.4663413", "0.46603996", "0.46558145", "0.46452993" ]
0.612524
0
The next function (isModelDirty) is a workaround. It should not be necessary but it is currently because we set tooltips in the treeview and that triggers emitDataChanged which triggers the rebuild or repopulate of the property editor. The proper fix will be to use columns in the treeview where each column has its own static tooltip and the tooltips should no longer be dynamically set by the delegate (views/renderSetupDelegate.py) depending on the lastHitAction
def isModelDirty(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isDirty(self):\n\t#@DEBUG christophe have to fix denoising optionnal issue prior to set isDirty() to True\n return False", "def _modelUpdated(self, *args, **kwargs):\n topLeft = self.index(column=0)\n bottomRight = self.index(column=1)\n model = self.model()\n if model is not None:\n model.dataChanged.emit(topLeft, bottomRight)", "def on_get_tooltip(model, tree_iter, tooltip):\n obj = model.get_object(tree_iter)\n doc = obj.document\n if doc and hasattr(doc, \"validation_result\"):\n errors = doc.validation_result[obj]\n if errors:\n tooltip.set_text(\"\\n\".join([e.msg for e in errors]))\n return True\n\n return False", "def modelChanged(self) -> None:\n ...", "def _dirty (self):\n pass", "def table_model_changed(self, table_model, index, old, new):\n self.undostack.push(EditCommand(table_model, index, old, new))\n # print(\"TableModel changed at [%s, %s]\" % (index.row(), index.column()))\n # print(\"Old Value: %s | New Value: %s\" % (old, new))", "def on_columnvalue_modified( self, *data ):\n\t\tif (len(data) == 4):\t( cell, path, model, user_data ) = data\n\t\telse:\t\t\t( cell, path, new_text, model, user_data ) = data\n\t\t(datatype,) = user_data\n\t\tcolid = self.window2.type2colid[datatype]\n\t\tif \t(datatype == \"combo\"):\n\t\t\tmodel[path][colid] = new_text\n\t\telif \t(datatype == \"spin\"):\n\t\t\tmodel[path][colid] = long(new_text)\n\t\telif \t(datatype == \"text\"):\n\t\t\tmodel[path][colid] = new_text\n\t\telif \t(datatype == \"check\"):\n\t\t\tmodel[path][colid] = not model[path][colid]", "def isDataChanged(self):\n return bool(self._dirty or self.ui.channelEditor.getQModel().isDataChanged() or self._dirtyMntGrps)", "def updateToolBar(self):\n cellWidget = self.sheet.getCell(self.row, self.col)\n for action in self.actions():\n action.needUpdateStatus.emit((self.sheet, self.row, self.col,\n cellWidget))", "def is_dirty(self):\n return self.dirty", "def is_dirty(self):\n return self.dirty", "def testDirtyRefresh(self):\n \n pass", "def should_save(self):\n return self.modified", "def is_dirty(self):\n return (\n self._momentary_follow_sense_prop.is_dirty\n or self._momentary_mode_on_prop.is_dirty\n or self._momentary_on_off_trigger_prop.is_dirty\n )", "def is_dirty(self):\n return True in [n.is_dirty for n in self.nodes]", "def _is_dirty(self):\n if self._get_dirty_fields():\n return True\n\n return False", "def is_dirty(self):\n return self._is_dirty", "def is_dirty(self):\n return self._is_dirty", "def is_modified(self):\n if self.any_parent_as_widget:\n return self._is_modified or self.defaults_not_set\n\n if self._is_modified or self.defaults_not_set:\n return True\n\n if self.is_overidable:\n return self.was_overriden != self.is_overriden\n else:\n return self.has_studio_override != self.had_studio_override", "def checkModel(self, model):\n # TODO", "def OnModified(self, evt):\n if self.VertEdit.Enabled:\n self.VertEdit.OnModified(evt)\n else:\n evt.Skip()", "def GetDirty(self, *args, **kwargs):\n pass", "def on_save(self):\n super(ToolSettings, self).on_save()\n #--- Parse Edited Items ---#\n for item in self.getEditedItems():\n self.log.detail(\"---> %s | %s\" % (item.parent().itemCode, item.itemCode))\n item.itemWidget.on_save()\n item.itemWidget.__edited__ = False\n #--- Refresh ---#\n self.rf_editedItemStyle()", "def GenColsByModel(modelo, indices,tree):\n nCols = 0\n for i in indices:\n if i[2] ==\"boo\":\n render = gtk.CellRendererToggle()\n if len(i) ==4:\n if i[3] != False:\n render.connect('toggled', i[3], modelo) \n elif len(i) ==5:\n if i[3] != False:\n render.connect('toggled', i[3], modelo,i[0]) \n else:\n render.connect('toggled', fixed_toggled, modelo,i[0])\n \n column = gtk.TreeViewColumn(i[1], render, active=i[0])\n if len(i) ==4:\n if i[3] != False:\n column.set_clickable(True)\n column.connect('clicked', column_click_ok,modelo, tree, i[0],nCols)\n else:\n column.set_clickable(True)\n column.connect('clicked', column_click_ok,modelo, tree, i[0],nCols)\n elif i[2] ==\"pboo\":\n render = gtk.CellRendererToggle()\n if len(i) ==4:\n if i[3] != False:\n render.connect('toggled', i[3], modelo) \n elif len(i) ==5:\n if i[3] != False:\n render.connect('toggled', i[3], modelo,i[0][0]) \n else:\n render.connect('toggled', fixed_toggled, modelo,i[0][0])\n \n column = gtk.TreeViewColumn(i[1], render, active=i[0][0])\n if len(i) ==4:\n if i[3] != False:\n column.set_clickable(True)\n column.connect('clicked', column_click_ok,modelo, tree, i[0][0],nCols)\n else:\n column.set_clickable(True)\n column.connect('clicked', column_click_ok,modelo, tree, i[0][0],nCols)\n pix = gtk.CellRendererPixbuf()\n #column = gtk.TreeViewColumn(i[1])\n #pix.set_property('cell-background', 'red')\n column.pack_start(pix, True)\n column.set_attributes(pix, stock_id=i[0][1])\n else:\n if i[2] == \"pix\":\n render = gtk.CellRendererPixbuf()\n else:\n render = gtk.CellRendererText()\n \n if len(i) >= 4:\n if len(i) == 5:\n render.set_property('mode',gtk.CELL_RENDERER_MODE_EDITABLE)\n render.connect(\"editing-started\",edited_cc,i[4])\n if len(i) == 6:\n render.connect(\"edited\",edited_cb,modelo,i[0],i[3],i[5])\n else:\n render.connect(\"edited\",edited_cb,modelo,i[0],i[3])\n render.set_property('editable',True)\n if i[2] == \"pix\":\n column = gtk.TreeViewColumn(i[1])\n column.pack_start(render, False)\n column.set_attributes(render, stock_id=i[0])\n else:\n column = gtk.TreeViewColumn(i[1], render, markup=i[0])\n column.set_resizable(True)\n #column.set_attributes(render,markup=i[0])\n if i[2] ==\"str\":#str\n column.set_cell_data_func(render, columna_utf8, i[0])\n column.set_clickable(True)\n column.connect('clicked', column_click,modelo, tree, i[0],nCols)\n elif i[2] ==\"pstr\":#str\n #column.set_cell_data_func(render, columna_utf8, i[0])\n column.set_clickable(True)\n column.connect('clicked', column_click,modelo, tree, i[0][0],nCols)\n pix = gtk.CellRendererPixbuf()\n #column = gtk.TreeViewColumn(i[1])\n column.pack_start(pix, True)\n column.set_attributes(pix, stock_id=i[0][1])\n elif i[2] ==\"STR\":#str\n #column.set_cell_data_func(render, columna_utf8, i[0])\n column.set_clickable(True)\n column.connect('clicked', column_click,modelo, tree, i[0],nCols)\n elif i[2] ==\"dbl\":#float:\n column.set_cell_data_func(render, columna_real, i[0])\n column.set_clickable(True)\n column.connect('clicked', column_click,modelo, tree, i[0],nCols)\n elif i[2] ==\"int\":\n column.set_cell_data_func(render, columna_numerica, i[0])\n column.set_clickable(True)\n column.connect('clicked', column_click,modelo, tree, i[0],nCols)\n elif i[2] ==\"rut\":\n column.set_cell_data_func(render, columna_rut, i[0])\n column.set_clickable(True)\n column.connect('clicked', column_click,modelo, tree, i[0],nCols)\n \n elif i[2] ==\"dte\":\n column.set_clickable(True)\n column.connect('clicked', column_click,modelo, tree, i[0],nCols)\n column.set_cell_data_func(render, columna_fecha, i[0])\n elif i[2] == \"pix\":\n pass\n \n tree.append_column(column)\n nCols = nCols +1 \n \n tree.set_model(modelo)", "def hasChanged(self):\r\n if self.is_updated:\r\n self.is_updated = False\r\n return True\r\n else:\r\n return False\r\n\r\n # if not self.hasBeenUpdatedOnce:\r\n # self.hasBeenUpdatedOnce = True\r\n # return True\r\n # else:\r\n # if BLENDER_MODE == 'BPY':\r\n # # for e in dir(self.obj): print(e)\r\n # # print(self.obj, self.obj.name, self.obj.is_updated, self.obj.is_updated_data)\r\n # # return self.obj.is_updated # DOESN't UPDATE A THING!\r\n # # return True\r\n # return self.is_updated\r\n\r\n # return False # no update in BGE mode\r", "def _set_dg_dirty(self):\n pm.dgdirty(self._object, self._futurePivot)", "def _check_is_editable(self, raise_error: bool = True) -> bool:", "def is_dirty(self):\n if self.sub_plugs:\n for sub_plug in self.sub_plugs.values():\n if sub_plug.is_dirty:\n return True\n return False\n return self._is_dirty", "def isdirty(self):\n\n return not not self._olddata", "def _itemChanged(self, event):\n if event == items.ItemChangedType.VISUALIZATION_MODE:\n item = self.sender()\n if item is not None: # This occurs with PySide/python2.7\n self.__isEnabled = item.isPropertyEnabled(self.__propertyName)\n self.__updateFlags()\n\n # Notify model\n model = self.model()\n if model is not None:\n begin = self.index(column=0)\n end = self.index(column=1)\n model.dataChanged.emit(begin, end)" ]
[ "0.58494514", "0.57748735", "0.5706154", "0.57018065", "0.5463952", "0.5334079", "0.5326089", "0.53073055", "0.52202743", "0.5202958", "0.5202958", "0.51957154", "0.5162258", "0.5160728", "0.51254827", "0.5123745", "0.505808", "0.505808", "0.5051105", "0.50360453", "0.5002459", "0.49995157", "0.49921343", "0.49849084", "0.49843317", "0.49658424", "0.49590573", "0.49582252", "0.49552315", "0.4953944" ]
0.66525507
0
Posts a ticker message with details about the crypto
async def ticker(ctx, crypto: str, currency="USD"): info = getTicker(crypto, currency.lower()) currency = currency.upper() if info: if currency in info["prices"]: em = discord.Embed(title='{} ({}-{})'.format(info["name"], info["symbol"], currency), description=""" **Ranked:** #{} \n**Current price:** {} {} \n**Price changes**:\n{} in the past hour\n{} in the past 24 hours\n{} in the past 7 days""".format(info["rank"], str(roundValue( info["prices"][currency])), currency, info[ "percent_change_1h"], info["percent_change_24h"], info["percent_change_7d"]), colour=0x00FF00, timestamp=datetime.strptime(info["last_updated"], "%Y-%m-%dT%H:%M:%S.%fZ")) await ctx.send(embed=em) else: await ctx.send("I couldn't find a currency called: {}".format(currency)) else: await ctx.send("I couldn't find a crypto called: {}".format(crypto))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def crypto(self, ctx, ticker: str):\n ticker = ticker.upper()\n api_endpoint = \"https://min-api.cryptocompare.com/data/pricemultifull\"\\\n f\"?tsyms=USD&fsyms={ticker}\"\n api_json = await self.bot.aiojson(api_endpoint)\n if \"Message\" in api_json:\n await ctx.send(f\"Error from API: `{api_json['Message']}`\")\n return\n\n raw_data = api_json[\"RAW\"][ticker][\"USD\"]\n stylized_data = api_json[\"DISPLAY\"][ticker][\"USD\"]\n\n change_color = self.get_change_color(raw_data[\"CHANGEPCTDAY\"], 10)\n\n data_timestamp = datetime.datetime.utcfromtimestamp(\n raw_data[\"LASTUPDATE\"])\n\n coin_name = await self.get_crypto_name(ticker)\n\n embed = discord.Embed(color=change_color, timestamp=data_timestamp)\n\n embed.set_author(name=f\"Price info for {coin_name} from {stylized_data['MARKET']}\")\n embed.set_footer(text=\"Price info supplied by CryptoCompare. \" + self.legal_notice)\n\n embed.add_field(name=\"Current Price\", value=stylized_data[\"PRICE\"])\n embed.add_field(name=\"Opening Price\", value=stylized_data[\"OPENDAY\"])\n\n embed.add_field(name=\"Change\", value=f\"{stylized_data['CHANGEDAY']} \"\\\n f\"({stylized_data['CHANGEPCTDAY']}%)\")\n embed.add_field(name=\"Volume\", value=stylized_data[\"VOLUMEDAY\"])\n\n embed.add_field(name=\"High\", value=stylized_data[\"HIGHDAY\"])\n embed.add_field(name=\"Low\", value=stylized_data[\"LOWDAY\"])\n\n await ctx.send(embed=embed)", "def _on_op_private_ticker(self, msg):\r\n msg = msg[\"ticker\"]\r\n if msg[\"sell\"][\"currency\"] != self.curr_quote:\r\n return\r\n if msg[\"item\"] != self.curr_base:\r\n return\r\n bid = int(msg[\"buy\"][\"value_int\"])\r\n ask = int(msg[\"sell\"][\"value_int\"])\r\n\r\n self.debug(\" tick: %s %s\" % (\r\n self.quote2str(bid),\r\n self.quote2str(ask)\r\n ))\r\n self.signal_ticker(self, (bid, ask))", "async def crypto(args: list, message: discord.Message):\n usage = \"usage: !crypto <BTC/ETH/LTC etc> [convert <USD/AUD/EUR/GBP etc.>]\"\n convert = str()\n if \"convert\" in args:\n idx = args.index(\"convert\")\n try:\n convert = args[idx + 1]\n except KeyError as e:\n await client.send_message(message.channel, content=usage)\n ticker = coinmarketcap.ticker(convert=convert)\n embed = discord.Embed()\n try:\n symbol = args[1].upper()\n except:\n symbol = \"BTC\"\n for currency in ticker:\n if currency[\"symbol\"] == symbol:\n embed.title = currency[\"name\"] + \" Price Data\"\n embed.type = \"rich\"\n unit = convert if len(convert) > 0 else \"USD\"\n embed.description = \"\"\"```\n Current Price: {:,} {}\n Market Cap: {:,} {}\n24H Trade Volume: {:,} {}\n Last Updated: {} ```\n \"\"\".format(\n float(currency[\"price_\" + unit.lower()]),\n unit,\n float(currency[\"market_cap_\" + unit.lower()]),\n unit,\n float(currency[\"24h_volume_\" + unit.lower()]),\n unit,\n datetime.datetime.fromtimestamp(int(currency[\"last_updated\"]))\n )\n await client.send_message(message.channel, embed=embed)", "def _send_market_price_request(self, ric_name):\n mp_req_json = {\n 'ID': 2,\n 'Key': {\n 'Name': ric_name,\n 'Service': service\n },\n }\n self.web_socket_app.send(json.dumps(mp_req_json))\n print(\"SENT on \" + self.session_name + \":\")\n print(json.dumps(mp_req_json, sort_keys=True, indent=2, separators=(',', ':')))", "def line_sent(price):\n now = datetime.datetime.now()\n LINE_ACCESS_TOKEN = \" \" # Line Token\n url = \"https://notify-api.line.me/api/notify\"\n print(\"[%02i:%02i:%02i] Price Change : Send Message\" % (now.hour, now.minute, now.second))\n message = \"[%02i:%02i:%02i] Now BTC Price : %s\" % (now.hour, now.minute, now.second, price)\n msg = urllib.parse.urlencode({\"message\":message})\n LINE_HEADERS = {'Content-Type':'application/x-www-form-urlencoded',\"Authorization\":\"Bearer \"+LINE_ACCESS_TOKEN}\n session = requests.Session()\n send = session.post(url, headers=LINE_HEADERS, data=msg)\n print(\"[%02i:%02i:%02i] \" % (now.hour, now.minute, now.second), end=\"\")\n print(send.text)", "async def btc( ctx):\r\n await ctx.message.delete()\r\n r = requests.get(\r\n \"https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD,EUR,GBP\"\r\n )\r\n r = r.json()\r\n usd = r[\"USD\"]\r\n eur = r[\"EUR\"]\r\n gbp = r[\"GBP\"]\r\n em = discord.Embed(\r\n description=f\"USD: `{str(usd)}$`\\n\\nEUR: `{str(eur)}€`\\n\\nGBP: `{str(gbp)}£`\"\r\n )\r\n em.set_author(\r\n name=\"Bitcoin\",\r\n icon_url=\"https://cdn.pixabay.com/photo/2013/12/08/12/12/bitcoin-225079_960_720.png\",\r\n )\r\n await ctx.send(embed=em)\r\n ### I hope this code is so horrible I'm never allowed to code embeds again\r", "def callback_crypto_currency_market_data(message):\n body = json.loads(message.body.decode('utf-8'))\n \n # routing_key have view: message_type.data_type.exchange.pair[.time_frame]\n # message_type == update | starting, data_type == ticker | candles | depth,\n # exchange, pair, time_frame - sending by listing_info\n # mask: *.*.*.#\n message_type = message.routing_key.split('.')[0]\n data_id = '.'.join(message.routing_key.split('.')[1:])\n\n if message_type == 'update':\n for observer in self.subscribers.get(data_id):\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n elif message_type == 'starting':\n # if exist waiters, send data and move waiters in subscribers\n if not self.waiters_first_msg.get(data_id):\n return\n\n new_subscribers = []\n while self.waiters_first_msg[data_id]:\n observer = self.waiters_first_msg[data_id].pop()\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n new_subscribers.append(observer)\n\n # if not subscribers on this data_id, init new dict-value, else append to exist array\n subscribers = self.subscribers.get(data_id, None)\n if not subscribers and new_subscribers:\n self.subscribers[data_id] = new_subscribers\n asyncio.get_event_loop().create_task(self._send_message_for_subscribe(data_id))\n else:\n for new_subscriber in new_subscribers:\n if new_subscriber not in self.subscribers[data_id]:\n self.subscribers[data_id].append(new_subscriber)", "def ticker(Symbol='tBTCUSD', **params):\n endpoint = f'ticker/{Symbol}'\n return request(authenticate=False, version=2, endpoint=endpoint, method='GET', query_params=params)", "def send_btc_price(message):\n\n bot_token = TOKEN\n chat_id = ID\n sendText = 'https://api.telegram.org/bot' + bot_token + '/sendMessage?chat_id=' + chat_id + '&parse_mode=Markdown&text=' + message\n\n response = requests.get(sendText)\n\n return response", "def subscribe_ticker(self, symbol, update_handler=None):\n pass", "async def eth(ctx):\r\n await ctx.message.delete()\r\n r = requests.get(\r\n \"https://min-api.cryptocompare.com/data/price?fsym=ETH&tsyms=USD,EUR,GBP\"\r\n )\r\n r = r.json()\r\n usd = r[\"USD\"]\r\n eur = r[\"EUR\"]\r\n gbp = r[\"GBP\"]\r\n em = discord.Embed(\r\n description=f\"USD: `{str(usd)}$`\\nEUR: `{str(eur)}€`\\n\\nGBP: `{str(gbp)}£`\"\r\n )\r\n em.set_author(\r\n name=\"Ethereum\",\r\n icon_url=\"https://cdn.discordapp.com/attachments/271256875205525504/374282740218200064/2000px-Ethereum_logo.png\",\r\n )\r\n await ctx.send(embed=em)", "def create_ticker_channel(self, symbol: str) -> str:", "def OnRtnDepthMarketData(self, data: dict) -> None:\n current_date = data[\"TradingDay\"]\n current_time = data[\"UpdateTime\"]\n dt = datetime.strptime(\n f'{current_date}-{current_time}', \"%Y%m%d-%H:%M:%S\"\n )\n dt = CHINA_TZ.localize(dt)\n\n tick = TickData(\n symbol=data[\"SecurityID\"],\n exchange=EXCHANGE_TORA2VT[bytes.decode(data[\"ExchangeID\"])],\n datetime=dt,\n name=data[\"SecurityName\"],\n volume=0,\n open_interest=data[\"OpenInterest\"],\n last_price=data[\"LastPrice\"],\n last_volume=data[\"Volume\"],\n limit_up=data[\"UpperLimitPrice\"],\n limit_down=data[\"LowerLimitPrice\"],\n open_price=data[\"OpenPrice\"],\n high_price=data[\"HighestPrice\"],\n low_price=data[\"LowestPrice\"],\n pre_close=data[\"PreClosePrice\"],\n bid_price_1=data[\"BidPrice1\"],\n ask_price_1=data[\"AskPrice1\"],\n bid_volume_1=data[\"BidVolume1\"],\n ask_volume_1=data[\"AskVolume1\"],\n gateway_name=self.gateway_name\n )\n\n if data[\"BidVolume2\"] or data[\"AskVolume2\"]:\n tick.bid_price_2 = data[\"BidPrice2\"]\n tick.bid_price_3 = data[\"BidPrice3\"]\n tick.bid_price_4 = data[\"BidPrice4\"]\n tick.bid_price_5 = data[\"BidPrice5\"]\n\n tick.ask_price_2 = data[\"AskPrice2\"]\n tick.ask_price_3 = data[\"AskPrice3\"]\n tick.ask_price_4 = data[\"AskPrice4\"]\n tick.ask_price_5 = data[\"AskPrice5\"]\n\n tick.bid_volume_2 = data[\"BidVolume2\"]\n tick.bid_volume_3 = data[\"BidVolume3\"]\n tick.bid_volume_4 = data[\"BidVolume4\"]\n tick.bid_volume_5 = data[\"BidVolume5\"]\n\n tick.ask_volume_2 = data[\"AskVolume2\"]\n tick.ask_volume_3 = data[\"AskVolume3\"]\n tick.ask_volume_4 = data[\"AskVolume4\"]\n tick.ask_volume_5 = data[\"AskVolume5\"]\n\n self.gateway.on_tick(tick)", "def OnRtnDepthMarketData(self, data: dict) -> None:\n current_date = data[\"TradingDay\"]\n current_time = data[\"UpdateTime\"]\n dt = datetime.strptime(\n f'{current_date}-{current_time}', \"%Y%m%d-%H:%M:%S\"\n )\n # dt = CHINA_TZ.localize(dt)\n\n tick = TickData(\n symbol=data[\"SecurityID\"],\n exchange=EXCHANGE_TORA2VT[bytes.decode(data[\"ExchangeID\"])],\n datetime=dt,\n name=data[\"SecurityName\"],\n volume=0,\n open_interest=data[\"OpenInterest\"],\n last_price=data[\"LastPrice\"],\n last_volume=data[\"Volume\"],\n limit_up=data[\"UpperLimitPrice\"],\n limit_down=data[\"LowerLimitPrice\"],\n open_price=data[\"OpenPrice\"],\n high_price=data[\"HighestPrice\"],\n low_price=data[\"LowestPrice\"],\n pre_close=data[\"PreClosePrice\"],\n bid_price_1=data[\"BidPrice1\"],\n ask_price_1=data[\"AskPrice1\"],\n bid_volume_1=data[\"BidVolume1\"],\n ask_volume_1=data[\"AskVolume1\"],\n gateway_name=self.gateway_name\n )\n\n if data[\"BidVolume2\"] or data[\"AskVolume2\"]:\n tick.bid_price_2 = data[\"BidPrice2\"]\n tick.bid_price_3 = data[\"BidPrice3\"]\n tick.bid_price_4 = data[\"BidPrice4\"]\n tick.bid_price_5 = data[\"BidPrice5\"]\n\n tick.ask_price_2 = data[\"AskPrice2\"]\n tick.ask_price_3 = data[\"AskPrice3\"]\n tick.ask_price_4 = data[\"AskPrice4\"]\n tick.ask_price_5 = data[\"AskPrice5\"]\n\n tick.bid_volume_2 = data[\"BidVolume2\"]\n tick.bid_volume_3 = data[\"BidVolume3\"]\n tick.bid_volume_4 = data[\"BidVolume4\"]\n tick.bid_volume_5 = data[\"BidVolume5\"]\n\n tick.ask_volume_2 = data[\"AskVolume2\"]\n tick.ask_volume_3 = data[\"AskVolume3\"]\n tick.ask_volume_4 = data[\"AskVolume4\"]\n tick.ask_volume_5 = data[\"AskVolume5\"]\n\n self.gateway.on_tick(tick)", "def subscribe_ticker(self, cancel=False, custom_id=None, **params):\n method = 'subscribeTicker'\n if cancel:\n method = 'un' + method\n self.conn.send(method, custom_id=custom_id, **params)", "async def watch_ticker(self, symbol: str, params={}):\n await self.load_markets()\n market = self.market(symbol)\n symbol = market['symbol']\n instrumentName = market['id']\n if market['spot']:\n instrumentName = market['baseId'] + '-' + market['quoteId']\n url = self.urls['api']['ws']\n messageHash = 'ticker:' + symbol\n request = {\n 'jsonrpc': '2.0',\n 'id': self.request_id(),\n 'method': '/public/subscribe',\n 'params': {\n 'channels': [\n 'ticker.' + instrumentName + '.raw',\n ],\n },\n }\n request = self.deep_extend(request, params)\n return await self.watch(url, messageHash, request, messageHash)", "def tapi(self,method,argc,**kwargs):\n url = self.btce_trade_url + argc + '/'\n kwargs['nonce'] = str(int(time.time()))\n kwargs['method'] = argc\n body = urllib.urlencode(kwargs)\n sign = self.hash_tapi( body )\n headers = dict( Sign = sign, Key = self.trade_key )\n if method == 'POST':\n response = requests.post( url,\n data = body,\n headers = headers,\n )\n elif method == 'GET':\n response = requests.get( url,\n headers = headers,\n )\n return response.text", "async def btc(self, ctx):\n try:\n btc_bitstamp_json = await self.bot.aiojson(\"https://www.bitstamp.net/api/ticker\")\n\n btc_currentprice_rate = Decimal(btc_bitstamp_json[\"last\"])\n btc_currentprice_string = self.format_currency(btc_currentprice_rate)\n\n btc_lastopen_rate = Decimal(btc_bitstamp_json[\"open\"])\n btc_lastopen_string = self.format_currency(btc_lastopen_rate)\n\n btc_high_string = self.format_currency(btc_bitstamp_json[\"high\"])\n btc_low_string = self.format_currency(btc_bitstamp_json[\"low\"])\n btc_bid_string = self.format_currency(btc_bitstamp_json[\"bid\"])\n btc_ask_string = self.format_currency(btc_bitstamp_json[\"ask\"])\n btc_volume_string = str(btc_bitstamp_json[\"volume\"]) + \" BTC\"\n\n btc_diff = btc_currentprice_rate - btc_lastopen_rate\n btc_change_percentage = (\n 100 * Decimal(btc_diff) / Decimal(btc_currentprice_rate))\n btc_change_percentage_string = f\"{str(btc_change_percentage)[:6]}%\"\n\n btc_change_color = self.get_change_color(btc_change_percentage, 10)\n\n btc_data_timestamp = datetime.datetime.utcfromtimestamp(\n int(btc_bitstamp_json[\"timestamp\"]))\n\n link = \"https://bitcoincharts.com/charts/chart.png?width=600&m=bitstampUSD&r=30\"\\\n f\"&t=S&v=1&cacheinval={int(time.time())}\"\n embed = discord.Embed(color=btc_change_color,\n timestamp=btc_data_timestamp)\n\n embed.set_author(name=\"30 Day BTC Chart and Info\",\n icon_url=\"https://bitcoin.org/img/icons/opengraph.png\")\n embed.set_image(url=link)\n embed.set_footer(text=\"Chart supplied by bitcoincharts.com under CC-BY-SA 3.0, \"\\\n \"price info supplied by BitStamp. \" + self.legal_notice)\n\n embed.add_field(name=\"Current Price\", value=btc_currentprice_string)\n embed.add_field(name=\"Opening Price\", value=btc_lastopen_string)\n\n embed.add_field(name=\"Change\", value=btc_change_percentage_string)\n embed.add_field(name=\"Volume\", value=btc_volume_string)\n\n embed.add_field(name=\"High\", value=btc_high_string)\n embed.add_field(name=\"Low\", value=btc_low_string)\n\n embed.add_field(name=\"Bid\", value=btc_bid_string)\n embed.add_field(name=\"Ask\", value=btc_ask_string)\n\n await ctx.send(embed=embed)\n except:\n await ctx.send(\"Error while fetching BTC data.\")\n self.bot.log.error(traceback.format_exc())", "def wepbuy(self):\n\t\tthismsg = \"\\r\\n\"+self.ESC+\"14C\"+self.ESC+\"1;34m\"+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.ESC+\"0;34m\"+self.A220+self.ESC+\"1m\"+self.A220+self.A220+self.A220+self.A220+self.ESC+\"0;34m\"+self.A220+self.ESC+\"1m\"+self.A220+self.ESC+\"0;34m\"+self.A220+self.A220+self.ESC+\"1m\"+self.A220+self.ESC+\"0;34m\"+self.A220+self.ESC+\"1m\"+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"46m\"+self.A178+self.ESC+\"40m\"+self.A223+self.ESC+\"2C\"+self.ESC+\"37m The\"+self.ESC+\"CSaga\"+self.ESC+\"Cof\"+self.ESC+\"Cthe\"+self.ESC+\"CRed\"+self.ESC+\"CDragon\"+self.ESC+\"C-\"+self.ESC+\"C\"+self.ESC+\"34mWeapons\"+self.ESC+\"CList \"+self.ESC+\"C\"+self.A220+self.ESC+\"46m\"+self.A178+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"46m\"+self.A178+self.ESC+\"44m\"+self.A219+self.ESC+\"40m\"+self.A223+self.A223+self.A223+self.A223+self.ESC+\"0;34m\"+self.A223+self.ESC+\"1m\"+self.A223+self.A223+self.A223+self.ESC+\"0;34m\"+self.A223+self.ESC+\"1m\"+self.A223+self.A223+self.ESC+\"0;34m\"+self.A223+self.ESC+\"1m\"+self.A223+self.ESC+\"0;34m\"+self.A223+self.A223+self.ESC+\"1m\"+self.A223+self.ESC+\"0;34m\"+self.A223+self.ESC+\"1m\"+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.A223+self.ESC+\"46m\"+self.A178+self.A178+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"46m\"+self.A219+self.ESC+\"2C\"+self.ESC+\"0;36mWeapons\"+self.ESC+\"27CPrice\"+self.ESC+\"7C\"+self.ESC+\"1;34;44m\"+self.A219+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"44m\"+self.A219+self.ESC+\"2C\"+self.ESC+\"40m1.\"+self.ESC+\"CStick\"+self.ESC+\"0;34m..................................\"+self.ESC+\"36m200\"+self.ESC+\"C\"+self.ESC+\"1;34;44m\"+self.A219+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"44m\"+self.A219+self.ESC+\"2C\"+self.ESC+\"40m2.\"+self.ESC+\"CDagger\"+self.ESC+\"0;34m...............................\"+self.ESC+\"36m1,000\"+self.ESC+\"C\"+self.ESC+\"1;34;44m\"+self.A219+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"44m\"+self.A178+self.ESC+\"2C\"+self.ESC+\"40m3.\"+self.ESC+\"CShort\"+self.ESC+\"CSword\"+self.ESC+\"0;34m..........................\"+self.ESC+\"36m3,000\"+self.ESC+\"C\"+self.ESC+\"1;34;44m\"+self.A178+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"44m\"+self.A177+self.ESC+\"2C\"+self.ESC+\"40m4.\"+self.ESC+\"CLong\"+self.ESC+\"CSword\"+self.ESC+\"0;34m..........................\"+self.ESC+\"36m10,000\"+self.ESC+\"C\"+self.ESC+\"1;34;44m\"+self.A177+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"44m\"+self.A176+self.ESC+\"2C\"+self.ESC+\"40m5.\"+self.ESC+\"CHuge\"+self.ESC+\"CAxe\"+self.ESC+\"0;34m............................\"+self.ESC+\"36m30,000\"+self.ESC+\"C\"+self.ESC+\"1;34;44m\"+self.A176+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"0;34m\"+self.A223+self.ESC+\"2C\"+self.ESC+\"1m6.\"+self.ESC+\"CBone\"+self.ESC+\"CCruncher\"+self.ESC+\"0;34m......................\"+self.ESC+\"36m100,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A223+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A176+self.ESC+\"2C\"+self.ESC+\"1m7.\"+self.ESC+\"CTwin\"+self.ESC+\"CSwords\"+self.ESC+\"0;34m........................\"+self.ESC+\"36m150,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A176+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A177+self.ESC+\"2C\"+self.ESC+\"1m8.\"+self.ESC+\"CPower\"+self.ESC+\"CAxe\"+self.ESC+\"0;34m..........................\"+self.ESC+\"36m200,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A177+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A178+self.ESC+\"2C\"+self.ESC+\"1m9.\"+self.ESC+\"CAble's\"+self.ESC+\"CSword\"+self.ESC+\"0;34m.......................\"+self.ESC+\"36m400,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A178+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A219+self.ESC+\"C\"+self.ESC+\"1m10.\"+self.ESC+\"CWan's\"+self.ESC+\"CWeapon\"+self.ESC+\"0;34m.....................\"+self.ESC+\"36m1,000,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A219+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A223+self.ESC+\"C\"+self.ESC+\"1m11.\"+self.ESC+\"CSpear\"+self.ESC+\"COf\"+self.ESC+\"CGold\"+self.ESC+\"0;34m....................\"+self.ESC+\"36m4,000,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A219+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A219+self.ESC+\"C\"+self.ESC+\"1m12.\"+self.ESC+\"CCrystal\"+self.ESC+\"CShard\"+self.ESC+\"0;34m...................\"+self.ESC+\"36m10,000,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A220+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A219+self.ESC+\"C\"+self.ESC+\"1m13.\"+self.ESC+\"CNiras's\"+self.ESC+\"CTeeth\"+self.ESC+\"0;34m...................\"+self.ESC+\"36m40,000,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A219+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A219+self.ESC+\"C\"+self.ESC+\"1m14.\"+self.ESC+\"CBlood\"+self.ESC+\"CSword\"+self.ESC+\"0;34m....................\"+self.ESC+\"36m100,000,000\"+self.ESC+\"C\"+self.ESC+\"34m\"+self.A219+\"\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.A219+self.ESC+\"C\"+self.ESC+\"1m15.\"+self.ESC+\"CDeath\"+self.ESC+\"CSword\"+self.ESC+\"0;34m....................\"+self.ESC+\"36m400,000,000\"+self.ESC+\"C\"+self.ESC+\"1;34;44m\"+self.A176+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"14C\"+self.ESC+\"44m\"+self.A176+self.ESC+\"0;34m\"+self.A219+self.A220+self.A220+self.A220+self.A220+self.A220+self.A178+self.ESC+\"C\"+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A220+self.A178+self.ESC+\"C\"+self.A220+self.A220+self.A220+self.ESC+\"C\"+self.A220+self.A220+self.ESC+\"C\"+self.A220+self.A176+self.ESC+\"C\"+self.A220+self.ESC+\"C\"+self.A220+self.A220+self.A220+self.A220+self.A178+self.A220+self.A220+self.A220+self.A220+self.A176+self.A220+self.A220+self.A219+self.ESC+\"1;44m\"+self.A176+self.ESC+\"40m\\r\\n\"\n\t\tthismsg += self.ESC+\"0m\\r\\n\";\n\t\treturn thismsg", "def ticket_submit_callback(self, data):\n self.output('staged order ticket submitted: %s' % repr(data))", "def ticker(self, symbol, **kwargs):\n pass", "def create_book_ticker_channel(self, symbol: str) -> str:", "async def stock(self, ctx, ticker: str):\n symbols = await self.bot.aiojson(\"https://api.robinhood.com/quotes/\"\\\n f\"?symbols={ticker.upper()}\")\n if not symbols:\n await ctx.send(\"Stock not found. This stock is probably not tradeable on robinhood.\")\n return\n symbols_result = symbols[\"results\"][0]\n instrument = await self.bot.aiojson(symbols_result[\"instrument\"])\n fundamentals = await self.bot.aiojson(\n f\"https://api.robinhood.com/fundamentals/{ticker.upper()}/\")\n\n current_price = (symbols_result[\"last_trade_price\"] if\n \"last_extended_hours_trade_price\" in symbols_result\n else symbols_result[\"last_extended_hours_trade_price\"])\n diff = Decimal(Decimal(current_price) -\n Decimal(symbols_result[\"previous_close\"]))\n percentage = str(100 * diff / Decimal(current_price))[:6]\n\n if not percentage.startswith(\"-\"):\n percentage = \"+\" + percentage\n\n current_price_string = self.format_currency(current_price)\n diff_string = self.format_currency(diff)\n bid_price_string = self.format_currency(Decimal(symbols_result[\"bid_price\"]))\n ask_price_string = self.format_currency(Decimal(symbols_result[\"ask_price\"]))\n tradeable_string = (\n \":white_check_mark:\" if instrument[\"tradeable\"] else \":x:\")\n\n update_timestamp = parser.parse(symbols_result[\"updated_at\"])\n\n symbol = symbols_result[\"symbol\"]\n change_color = await self.get_stock_change_color(symbol)\n\n embed = discord.Embed(title=f\"{symbol}'s stocks info\",\n color=change_color,\n timestamp=update_timestamp)\n\n embed.add_field(name=\"Name\", value=instrument[\"name\"])\n embed.add_field(name=\"Current Price\", value=current_price_string)\n embed.add_field(name=\"Change from yesterday\", value=f\"{diff_string} ({percentage}%)\")\n embed.add_field(name=\"Bid size\", value=f\"{symbols_result['bid_size']} ({bid_price_string})\")\n embed.add_field(name=\"Ask size\", value=f\"{symbols_result['ask_size']} ({ask_price_string})\")\n embed.add_field(name=\"Current Volume\", value=fundamentals[\"volume\"])\n embed.add_field(name=\"Average Volume\", value=fundamentals[\"average_volume\"])\n embed.add_field(name=\"Tradeable on Robinhood\", value=tradeable_string)\n embed.add_field(name=\"Country\", value=f\":flag_{instrument['country'].lower()}:\")\n\n await ctx.send(embed=embed)", "def ticker(self, ticker: str):\n\n self._ticker = ticker", "async def fancysay(self, ctx):", "def ticker(self):\n response = self.query('ticker')\n return response", "async def watch_ticker(self, symbol: str, params={}):\n await self.load_markets()\n market = self.market(symbol)\n symbol = market['symbol']\n name = 'rate'\n messageHash = name + '_' + market['id'].lower()\n url = self.urls['api']['ws'] + messageHash + '/' + self.options['uuid']\n subscription = {\n 'name': name,\n 'symbol': symbol,\n 'messageHash': messageHash,\n 'method': self.handle_ticker,\n }\n return await self.watch(url, messageHash, None, messageHash, subscription)", "def message(self, key):\n msg = '[ensime] ' + feedback[key]\n self.raw_message(msg)", "def ticker(self, ticker):\n\n self._ticker = ticker", "def create_tickers_channel(self) -> str:" ]
[ "0.6873399", "0.620606", "0.5890276", "0.5863005", "0.5817372", "0.58143044", "0.5677197", "0.5675489", "0.5656747", "0.5605821", "0.5585458", "0.5579567", "0.53901786", "0.53835154", "0.5364867", "0.5361006", "0.5358236", "0.5357463", "0.53430057", "0.5317439", "0.53008544", "0.52989143", "0.5298397", "0.5273488", "0.5262352", "0.52376425", "0.52288663", "0.52220243", "0.52152044", "0.52078986" ]
0.6571176
1
Posts an error message in case of an error with the ticker method.
async def ticker_error(ctx, error): print(error) if isinstance(error, commands.UserInputError): await ctx.send("Invalid input.") else: await ctx.send("Oops, something bad happened..")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error(self, msg, *args, **kwargs):\n pass", "def error(self, *args, **kwargs):\n\n message = self.get_message(*args, **kwargs)\n self.logger.error(message)", "def on_error(data):\n print('Market Data Error', data)", "def error(self, *args, **kwargs):\n self.msg(logging.ERROR, *args, **kwargs)", "def error_callback(bot, update, error):\n if isinstance(error, TelegramError):\n raise error # raise it for more sentry verbosity", "def error(self, tag, message, exc_info=False):\n \n self.log(logging.error,tag, message, exc_info)", "def send_error(self, conn, msg):\n print(\"ERROR PLACEHOLDER\")\n\n return", "def error(self, msg):\r\n self.logger.error(msg)", "def error ( self , message , *args , **kwargs ) :\n return self.logger.error ( message , *args , **kwargs )", "def error(_bot, update, error_):\n logger.warning('Update \"%s\" caused error \"%s\"', update, error_)", "def error(update: Update, context: CallbackContext):\n update.message.reply_text('Sorry! I don\\'t understand you!')", "def _error(msg):\n\n error(None, msg)", "def error(tag, message=None):\n Log._post(\"error\", tag, message)", "def error(self, msg):\n\n self.logger.error(msg)", "def queue_error(action, error_message):\n global ERRORS\n print(\"Error while {}: {}\".format(action, error_message))\n ERRORS[action] = error_message", "def error(self, msg):\n self.send_command('error', {\n 'msg': msg,\n })", "def error(self, message):\n return self.log(\"ERROR\", message)", "def error(self, error):\n pass", "def error(self, msg):\n\n self(msg, ERROR)", "async def ticket_error(self, ctx, error):\n embed: Embed = settings.get_ticket_error_embed()\n\n embed.set_footer(text=embed.footer.text,\n icon_url=self.bot.user.avatar_url)\n \n if isinstance(error, commands.MissingRequiredArgument):\n embed.description = f\"\\nUse **!ticket <ticketpanelname>**\"\n else:\n embed.description = f\"\\nYou don't have permissions for executing this command.\"\n\n await ctx.send(embed=embed)", "def error(self, msg, *args, **kwargs):\n self._logger.error(msg, *args, **kwargs)", "def error_cb(bot, update, error):\n logger.error(f\"Update {update} caused error {error}\")", "def error(update, context):\n logger.warning(f'caused error {context.error}')", "def error(self, msg: str):\n self._logger.error(msg)", "def error(msg):\n\n raise Exception(msg)", "def error(update: Update, context: CallbackContext):\n logging.warning('Update \"%s\" caused error \"%s\"', update, context.error)", "def error(self, *args, **kwargs):", "def error(bot, update, error):\n logger.warning('Update \"%s\" caused error \"%s\"', update, error)", "def error(bot, update, error):\n logger.warning('Update \"%s\" caused error \"%s\"', update, error)", "def error(bot, update, error):\n logger.warning('Update \"%s\" caused error \"%s\"', update, error)" ]
[ "0.6577208", "0.64844203", "0.64791065", "0.6470918", "0.6456576", "0.6378036", "0.63485813", "0.6337225", "0.6325072", "0.63216925", "0.6306876", "0.6291794", "0.6276781", "0.6269892", "0.6268414", "0.6248454", "0.62467206", "0.62421304", "0.62101525", "0.6194302", "0.6172539", "0.61659086", "0.6164166", "0.61621153", "0.61589897", "0.6155875", "0.6152967", "0.61500627", "0.61500627", "0.61500627" ]
0.7260296
0
Posts an error message in case of an error with the convert method.
async def convert_error(ctx, error): print(error) if isinstance(error, commands.UserInputError): await ctx.send("Invalid input.") else: await ctx.send("Oops, something bad happened..")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _error(msg):\n\n error(None, msg)", "def error(self, msg, *args, **kwargs):\n pass", "def error(self, message=None, show_help=True):", "def sendErrorMessage(msg): #@NoSelf", "def error(self, error):\n pass", "def error(self, *args, **kwargs):", "def handle_validation_error(self, error, bundle_errors):\n \n error_str = six.text_type(error)\n error_msg = self.help.format(error_msg=error_str) if self.help else error_str\n msg = {self.name: error_msg}\n\n if bundle_errors:\n return error, msg\n flask_restful.abort(400, message=msg)", "def set_error_message(msg):\n set_message(msg, TYPE_ERROR)", "def error(self, msg, transfers):\n self.validation_exceptions.extend(self._create_exceptions(msg, transfers, ValidationType.ERROR))", "def _get_submission_error(domain, instance_xml, error, auth_context):\n try:\n message = str(error)\n except UnicodeDecodeError:\n message = str(str(error), encoding='utf-8')\n\n xform = FormProcessorInterface(domain).submission_error_form_instance(instance_xml, message)\n xform.auth_context = auth_context\n return FormProcessingResult(xform)", "def _convert(string, type, message):\n try:\n return type(string)\n except ValueError as e:\n print(e)\n raise CharmmPSFError('Could not convert %s' % message)", "def handle_invalid_arguments(e):\n errors = e.message\n return generic_errors(errors, code=400)", "def _err_response(self, msg):\r\n return {'success': False, 'error': msg}", "def error(self, msg):\n\n self(msg, ERROR)", "def _r_send_error(self, result, protocol):\n error = result.value\n if not isinstance(error, MessageHandleError):\n raise error\n print(\"Error occurred: %s\" % result)\n msgid = None\n if error.original_message is not None:\n msgid = error.original_message.id\n msg = ResponseMessage(result_code=error.error_code, response_to=msgid, result=error.error_details)\n protocol.send_message(create_message_string(msg))", "def error(self, message):\n return self.log(\"ERROR\", message)", "def error(msg):\n\n raise Exception(msg)", "def __call__(self, *args, **kwargs):\r\n return self.error(*args, **kwargs)", "def error(self, msg):\n self.send_command('error', {\n 'msg': msg,\n })", "def sendErrorMessage(self, msg):\r\n self.sendMessage({'type' : types.ERROR, 'data' : msg})", "def handle_err(self):\n pass", "def postErrorMessage(self, message):\n self.postMessage(message)", "def send_error(self, conn, msg):\n print(\"ERROR PLACEHOLDER\")\n\n return", "def message_error(self, m):\n self.message(m, logging.ERROR)", "def error_handler(num, err):\n print(\"Error in input {}\".format(num))\n err = err.decode()\n raise Exception(err)", "def handle_error(self, data, **kwargs):\n logger.log_err(str(data))", "def error_wrapper(error, errorClass):\n http_status = 0\n if error.check(TwistedWebError):\n xml_payload = error.value.response\n if error.value.status:\n http_status = int(error.value.status)\n else:\n error.raiseException()\n if http_status >= 400:\n if not xml_payload:\n error.raiseException()\n try:\n fallback_error = errorClass(\n xml_payload, error.value.status, str(error.value),\n error.value.response)\n except (ParseError, AWSResponseParseError):\n error_message = http.RESPONSES.get(http_status)\n fallback_error = TwistedWebError(\n http_status, error_message, error.value.response)\n raise fallback_error\n elif 200 <= http_status < 300:\n return str(error.value)\n else:\n error.raiseException()", "def error(self, code, msg):\r\n self.status = code\r\n self.status_message = str(msg)", "def finish_error(self, msg: str = 'Unknown error'):\r\n\r\n self._is_error = True\r\n self._error_msg = msg", "def parse_error(self, message, exc_cls=VisualizerParseError):\n raise exc_cls(\"Error parsing %s '%s' (%s:%i): %s\" % \n (self.tag, self.ref, self.filename, self.lineno, message))" ]
[ "0.60691553", "0.60344666", "0.6002406", "0.6001901", "0.5961509", "0.59537053", "0.5941145", "0.59021723", "0.5863523", "0.58317477", "0.5790797", "0.5720247", "0.57159305", "0.5714528", "0.5691131", "0.5669286", "0.5664704", "0.56534857", "0.56486344", "0.5642917", "0.5634696", "0.56154305", "0.56096286", "0.56024224", "0.56023157", "0.55669904", "0.55662733", "0.5563288", "0.55530196", "0.5544" ]
0.6336882
0
Benchmark fully interacting system versus alchemicallymodified system.
def run_benchmark(): import argparse parser = argparse.ArgumentParser(description='Benchmark alchemically modified system against unmodified system.') parser.add_argument('--platform', dest='platform_name', action='store', default=None, help='platform name to benchmark (default: None)') options = parser.parse_args() from sams.tests import testsystems for testsystem_name in ['AblImatinibExplicitAlchemical']: cls = getattr(testsystems, testsystem_name) testsystem = cls() factory_args = { 'ligand_atoms' : testsystem.alchemical_atoms, 'receptor_atoms' : range(0,4266) } benchmark(testsystem.system, testsystem.positions, platform_name=options.platform_name, nsteps=5000, timestep=1.0*unit.femtoseconds, factory_args=factory_args)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def faster(self):\n self.run_command('faster')", "def slower(self):\n self.run_command('slower')", "def speed():\r\n\r\n algo = ['logistic_sgd', 'logistic_cg', 'mlp', 'convolutional_mlp',\r\n 'dA', 'SdA', 'DBN', 'rbm', 'rnnrbm']\r\n to_exec = [True] * len(algo)\r\n# to_exec = [False] * len(algo)\r\n# to_exec[-1] = True\r\n do_float64 = True\r\n do_float32 = True\r\n do_gpu = True\r\n\r\n algo_executed = [s for idx, s in enumerate(algo) if to_exec[idx]]\r\n #Timming expected are from the buildbot that have an i7-920 @\r\n # 2.67GHz with hyperthread enabled for the cpu, 12G of ram. An GeForce GTX\r\n # 285 for the GPU. OS=Fedora 14, gcc=4.5.1, python/BLAS from EPD\r\n # 7.1-2 (python 2.7.2, mkl unknow). BLAS with only 1 thread.\r\n\r\n expected_times_64 = numpy.asarray([10.0, 22.5, 76.1, 73.7, 116.4,\r\n 346.9, 381.9, 558.1, 186.3])\r\n expected_times_32 = numpy.asarray([11.6, 29.6, 42.5, 66.5, 71,\r\n 191.2, 226.8, 432.8, 176.2])\r\n\r\n # Number with just 1 decimal are new value that are faster with\r\n # the Theano version 0.5rc2 Other number are older. They are not\r\n # updated, as we where faster in the past!\r\n # TODO: find why and fix this!\r\n\r\n# Here is the value for the buildbot on February 3th 2012.\r\n# sgd, cg mlp conv da\r\n# sda dbn rbm\r\n# gpu times[3.72957802, 9.94316864, 29.1772666, 9.13857198, 25.91144657,\r\n# 18.30802011, 53.38651466, 285.41386175]\r\n# expected [3.076634879, 7.555234910, 18.99226785, 9.58915591, 24.130070450,\r\n# 24.77524018, 92.66246653, 322.340329170]\r\n# sgd, cg mlp conv da\r\n# sda dbn rbm\r\n#expected/get [0.82492841, 0.75984178, 0.65092691, 1.04930573, 0.93125138\r\n# 1.35324519 1.7356905 1.12937868]\r\n expected_times_gpu = numpy.asarray([3.07663488, 7.55523491, 18.99226785,\r\n 9.6, 24.13007045,\r\n 20.4, 56, 302.6, 315.4])\r\n expected_times_64 = [s for idx, s in enumerate(expected_times_64)\r\n if to_exec[idx]]\r\n expected_times_32 = [s for idx, s in enumerate(expected_times_32)\r\n if to_exec[idx]]\r\n expected_times_gpu = [s for idx, s in enumerate(expected_times_gpu)\r\n if to_exec[idx]]\r\n\r\n def time_test(m, l, idx, f, **kwargs):\r\n if not to_exec[idx]:\r\n return\r\n print algo[idx]\r\n ts = m.call_time\r\n try:\r\n f(**kwargs)\r\n except Exception, e:\r\n print >> sys.stderr, 'test', algo[idx], 'FAILED', e\r\n l.append(numpy.nan)\r\n return\r\n te = m.call_time\r\n l.append(te - ts)\r\n\r\n def do_tests():\r\n m = theano.compile.mode.get_default_mode()\r\n l = []\r\n time_test(m, l, 0, logistic_sgd.sgd_optimization_mnist, n_epochs=30)\r\n time_test(m, l, 1, logistic_cg.cg_optimization_mnist, n_epochs=30)\r\n time_test(m, l, 2, mlp.test_mlp, n_epochs=5)\r\n time_test(m, l, 3, convolutional_mlp.evaluate_lenet5, n_epochs=5,\r\n nkerns=[5, 5])\r\n time_test(m, l, 4, dA.test_dA, training_epochs=2,\r\n output_folder='tmp_dA_plots')\r\n time_test(m, l, 5, SdA.test_SdA, pretraining_epochs=1,\r\n training_epochs=2, batch_size=300)\r\n time_test(m, l, 6, DBN.test_DBN, pretraining_epochs=1,\r\n training_epochs=2, batch_size=300)\r\n time_test(m, l, 7, rbm.test_rbm, training_epochs=1, batch_size=300,\r\n n_chains=1, n_samples=1, output_folder='tmp_rbm_plots')\r\n time_test(m, l, 8, rnnrbm.test_rnnrbm, num_epochs=1)\r\n return numpy.asarray(l)\r\n\r\n #test in float64 in FAST_RUN mode on the cpu\r\n import theano\r\n if do_float64:\r\n theano.config.floatX = 'float64'\r\n theano.config.mode = 'FAST_RUN'\r\n float64_times = do_tests()\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'float64 times', float64_times\r\n print >> sys.stderr, 'float64 expected', expected_times_64\r\n print >> sys.stderr, 'float64 % expected/get', (\r\n expected_times_64 / float64_times)\r\n\r\n #test in float32 in FAST_RUN mode on the cpu\r\n theano.config.floatX = 'float32'\r\n if do_float32:\r\n float32_times = do_tests()\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'float32 times', float32_times\r\n print >> sys.stderr, 'float32 expected', expected_times_32\r\n print >> sys.stderr, 'float32 % expected/get', (\r\n expected_times_32 / float32_times)\r\n\r\n if do_float64:\r\n print >> sys.stderr, 'float64/float32', (\r\n float64_times / float32_times)\r\n print >> sys.stderr\r\n print >> sys.stderr, 'Duplicate the timing to have everything in one place'\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'float64 times', float64_times\r\n print >> sys.stderr, 'float64 expected', expected_times_64\r\n print >> sys.stderr, 'float64 % expected/get', (\r\n expected_times_64 / float64_times)\r\n print >> sys.stderr, 'float32 times', float32_times\r\n print >> sys.stderr, 'float32 expected', expected_times_32\r\n print >> sys.stderr, 'float32 % expected/get', (\r\n expected_times_32 / float32_times)\r\n\r\n print >> sys.stderr, 'float64/float32', (\r\n float64_times / float32_times)\r\n print >> sys.stderr, 'expected float64/float32', (\r\n expected_times_64 / float32_times)\r\n\r\n #test in float32 in FAST_RUN mode on the gpu\r\n import theano.sandbox.cuda\r\n if do_gpu:\r\n theano.sandbox.cuda.use('gpu')\r\n gpu_times = do_tests()\r\n print >> sys.stderr, algo_executed\r\n print >> sys.stderr, 'gpu times', gpu_times\r\n print >> sys.stderr, 'gpu expected', expected_times_gpu\r\n print >> sys.stderr, 'gpu % expected/get', (\r\n expected_times_gpu / gpu_times)\r\n\r\n if do_float64:\r\n print >> sys.stderr, 'float64/gpu', float64_times / gpu_times\r\n\r\n if (do_float64 + do_float32 + do_gpu) > 1:\r\n print >> sys.stderr\r\n print >> sys.stderr, 'Duplicate the timing to have everything in one place'\r\n print >> sys.stderr, algo_executed\r\n if do_float64:\r\n print >> sys.stderr, 'float64 times', float64_times\r\n print >> sys.stderr, 'float64 expected', expected_times_64\r\n print >> sys.stderr, 'float64 % expected/get', (\r\n expected_times_64 / float64_times)\r\n if do_float32:\r\n print >> sys.stderr, 'float32 times', float32_times\r\n print >> sys.stderr, 'float32 expected', expected_times_32\r\n print >> sys.stderr, 'float32 % expected/get', (\r\n expected_times_32 / float32_times)\r\n if do_gpu:\r\n print >> sys.stderr, 'gpu times', gpu_times\r\n print >> sys.stderr, 'gpu expected', expected_times_gpu\r\n print >> sys.stderr, 'gpu % expected/get', (\r\n expected_times_gpu / gpu_times)\r\n\r\n if do_float64 and do_float32:\r\n print >> sys.stderr, 'float64/float32', (\r\n float64_times / float32_times)\r\n print >> sys.stderr, 'expected float64/float32', (\r\n expected_times_64 / float32_times)\r\n if do_float64 and do_gpu:\r\n print >> sys.stderr, 'float64/gpu', float64_times / gpu_times\r\n print >> sys.stderr, 'expected float64/gpu', (\r\n expected_times_64 / gpu_times)\r\n if do_float32 and do_gpu:\r\n print >> sys.stderr, 'float32/gpu', float32_times / gpu_times\r\n print >> sys.stderr, 'expected float32/gpu', (\r\n expected_times_32 / gpu_times)\r\n\r\n def compare(x, y):\r\n ratio = x / y\r\n # If there is more then 5% difference between the expected\r\n # time and the real time, we consider this an error.\r\n return sum((ratio < 0.95) + (ratio > 1.05))\r\n\r\n if do_float64:\r\n err = compare(expected_times_64, float64_times)\r\n print >> sys.stderr, 'speed_failure_float64=' + str(err)\r\n if do_float32:\r\n err = compare(expected_times_32, float32_times)\r\n print >> sys.stderr, 'speed_failure_float32=' + str(err)\r\n if do_gpu:\r\n err = compare(expected_times_gpu, gpu_times)\r\n print >> sys.stderr, 'speed_failure_gpu=' + str(err)\r\n\r\n assert not numpy.isnan(gpu_times).any()", "def execute(self):\n print_verbose_messages = (self.verbose\n and self.device.communicator.rank == 0)\n\n # Ensure that all ops are attached (needed for is_tuning_complete).\n self.run(0)\n\n if print_verbose_messages:\n print(f'Running {type(self).__name__} benchmark')\n\n if print_verbose_messages:\n print(f'.. warming up for {self.warmup_steps} steps')\n self.run(self.warmup_steps)\n\n if (isinstance(self.device, hoomd.device.GPU)\n and hasattr(self.sim.operations, 'is_tuning_complete')):\n while not self.sim.operations.is_tuning_complete:\n if print_verbose_messages:\n print('.. autotuning GPU kernel parameters for '\n f'{self.warmup_steps} steps')\n self.run(self.warmup_steps)\n\n if print_verbose_messages:\n print(f'.. running for {self.benchmark_steps} steps '\n f'{self.repeat} time(s)')\n\n # benchmark\n performance = []\n\n if isinstance(self.device, hoomd.device.GPU):\n with self.device.enable_profiling():\n for i in range(self.repeat):\n self.run(self.benchmark_steps)\n performance.append(self.get_performance())\n if print_verbose_messages:\n print(f'.. {performance[-1]} {self.units}')\n else:\n for i in range(self.repeat):\n self.run(self.benchmark_steps)\n performance.append(self.get_performance())\n if print_verbose_messages:\n print(f'.. {performance[-1]} {self.units}')\n\n return performance", "def test_compare(self):\n config = {\n 'num_components': 512,\n 'num_features': 128,\n 'covariance': 'spherical'\n }\n\n samples = self.generate_samples(config, 100_000)\n sklearn_time = np.mean([self.train_sklearn(config, samples) for _ in range(3)])\n ours_cpu_time = np.mean([self.train_ours(config, samples) for _ in range(3)])\n ours_gpu_time = np.mean([\n self.train_ours(config, samples.cuda(), gpu=True) for _ in range(3)\n ])\n\n print(f\"-------------------------------------\")\n print(f\"Speedup of CPU implementation: {sklearn_time / ours_cpu_time:.2f}\")\n print(f\"Speedup of GPU implementation: {sklearn_time / ours_gpu_time:.2f}\")\n print(f\"-------------------------------------\")", "def benchmark(self):\n logger.info(self.benchmark.__doc__)\n return self.run(self.benchmark_profile())", "def faster():\n try:\n ttsEng.faster()\n except Exception, e:\n logging.error(e)", "def performance_test():\n from timeit import Timer\n t = Timer(\"test()\", \"from __main__ import test\")\n print t.timeit(number=1)", "def benchmark(nx, tstop):\n Lx = 10\n Ly = 10\n c = 1.0\n ny = nx\n\n # our use of weave requires string formulas:\n Is = StringFunction('exp(-pow(x-Lx/2.0,2)/2.0 -pow(y-Ly/2.0,2)/2.0)',\n independent_variables=('x','y'),\n Lx=Lx, Ly=Ly, globals=globals())\n fs = StringFunction('0.0', independent_variables=('x', 'y', 't'),\n globals=globals())\n BCs = StringFunction('0.0', independent_variables=('x', 'y', 't'),\n globals=globals())\n\n def action(u, xv, yv, t):\n #print t\n pass\n\n implementation = {}\n cpu = []\n for ic in 'f77', 'vec', 'scalar', 'weave':\n for bc in 'f77', 'vec', 'scalar', 'weave':\n for inner in 'f77', 'vec', 'scalar', 'weave':\n implementation['ic'] = ic\n implementation['inner'] = inner\n implementation['bc'] = bc\n # optimize StringFunction functions for the non-weave case:\n # implementation:\n if 'weave' in (ic, bc, inner) or 'f77' in (ic, bc, inner):\n I = Is; f = fs; BC = BCs\n else:\n I = Is.__call__; f = fs.__call__; BC = BCs.__call__\n\n t0 = time.clock()\n dt, cpu_ic, cpu_inner, cpu_bc = \\\n solver(I, f, c, BC, Lx, Ly, nx, ny, 0, tstop,\n user_action=None,\n implementation=implementation,\n verbose=False)\n t1 = time.clock()\n cpu_total = cpu_ic + cpu_inner + cpu_bc\n overhead = (t1-t0)-cpu_total\n cpu.append([implementation.copy(), cpu_total,\n cpu_ic, cpu_inner, cpu_bc, overhead])\n print t1-t0, implementation, 'overhead:', overhead\n # normalize CPU-times:\n cpu_min = min([abs(c) for i, c, c1, c2, c3, c4 in cpu])\n print '\\n\\nMinimum CPU time:', cpu_min\n print 'no of time steps:', int(tstop/dt)\n print 'interior/boundary ratio:', int(nx*ny*1.0/max(nx,ny))\n for impl, cpu, cpu_ic, cpu_inner, cpu_bc, overhead in cpu:\n # normalized-CPU ic inner bc overhead\n print \"%8.2f\" % (cpu/cpu_min),\n print \"%-10s %8.2f; \" % (impl['ic'], cpu_ic),\n print \"%-10s %8.2f; \" % (impl['inner'], cpu_inner),\n print \"%-10s %8.2f; \" % (impl['bc'], cpu_bc),\n print \"%d%%\" % (overhead/cpu*100)", "def run_performance():\n # Create a Struct data instance from config\n inputs = Struct(config)\n inputs.throttle = throttle\n # Get oxidizer properties at the given temperature\n n2o = n2o_properties(inputs.ox.T_tank)\n # Our integration variables are oxidizer mass and liquid oxidizer volume\n Mox = n2o.rho_l*(inputs.ox.liquid_V) + n2o.rho_g*(inputs.ox.tank_V-inputs.ox.liquid_V)\n if inputs.options.output_on:\n print(\"Initial oxidizer mass: {} kg.\".format(Mox))\n\n start = time.perf_counter() # Start timer for integration\n\n time, record = integration(inputs) # Time = time for integration, record = output data\n F_thrust = record.F_thrust\n p_cc = record.p_cc\n p_oxtank = record.p_oxtank\n p_oxpresstank = record.p_oxpresstank\n p_fueltank = record.p_fueltank\n p_fuelpresstank = record.p_fuelpresstank\n p_oxmanifold = record.p_oxmanifold\n T_oxtank = record.T_oxtank\n T_cc = record.T_cc\n area_core = record.area_core\n OF = record.OF_i\n gamma_ex = record.gamma_ex\n m_dot_ox = record.m_dot_ox\n m_dot_fuel = record.m_dot_fuel\n p_crit = record.p_crit\n m_dot_ox_crit = record.m_dot_ox_crit\n M_e = record.M_e\n p_exit = record.p_exit\n p_shock = record.p_shock\n\n time_elapsed = start-time.perf_counter() # Stop the timer and print elapsed time\n if inputs.options.output_on:\n print(\"Time elapsed for this timestep: {} sec.\".format(time_elapsed))", "def benchmark(Algorithm_, Network_, test): \n \n def sample(Algorithm_, Network_, test):\n \"\"\"\n Runs the Algorithm on Networks of the given type, varying n.\n After every execution, runs test on the resultant Network_.\n\n @param Algorithm_: a subclass of Synchronous_Algorithm, the algorithm to test.\n @param Network_: a subclass of Network, the network on which to benchmark the algorithm.\n @param test: a function that may throw an assertion error \n @return: (size, time, comm) where size is a list of values of network size,\n and time and comm are lists of corresponding values of time and communication complexities.\n \"\"\"\n size = []\n time = []\n comm = []\n n, lgn = 2, 1\n max_time = 0\n max_comm = 0\n print \"Sampling n = ...\",\n while max(max_time, max_comm) < 10000 and n < 500:\n\n #Progress\n if n == 2:\n print \"\\b\\b\\b\\b\"+str(n)+\"...\",\n else:\n print \"\\b\\b\\b\\b, \"+str(n)+\"...\",\n\n cur_times = []\n cur_comms = []\n for i in xrange( max(4, 2+lgn) ):\n A = Algorithm_(params={'draw': False, 'verbosity': Algorithm.SILENT})\n x = Network_(n)\n A(x)\n try:\n test(x)\n except AssertionError, e:\n print \"Algorithm Failed\"\n return None\n else:\n size.append(n)\n cur_comms.append(A.message_count)\n comm.append(A.message_count)\n\n if issubclass(Algorithm_, Synchronous_Algorithm):\n cur_times.append(A.r)\n time.append(A.r)\n max_time = max(max_time, A.r)\n max_comm = max(max_comm, A.message_count)\n\n #TODO here, decide whether need more samples for this n, based on cur_times and cur_comms variance\n n*=2\n lgn += 1\n print \" DONE\"\n return size, comm, time\n\n def averages(x,y):\n \"\"\"\n Groups x's with the same value, averages corresponding y values.\n\n @param x: A sorted list of x values\n @param y: A list of corresponding y values\n @return: (x grouped by value, corresponding mean y values)\n \n Example:\n\n averages([1,1,2,2,2,3], [5,6,3,5,1,8]) --> ([1, 2, 3], [5.5, 3.0, 8.0])\n \n \"\"\"\n new_x = [x[0]]\n new_y = []\n\n cur_x = new_x[0]\n cur_ys = []\n for x_i, y_i in zip(x,y):\n if x_i == cur_x:\n cur_ys.append(y_i)\n else:\n new_y.append( sum(cur_ys)/float(len(cur_ys) ) )\n new_x.append( x_i )\n cur_ys = [y_i]\n cur_x = x_i\n new_y.append( sum(cur_ys)/float(len(cur_ys) ) )\n return new_x, new_y\n\n def plot(x, y, title):\n \"\"\"Plots the points (x[i],y[i]) for all i, fig.\"\"\"\n fig, ax = plt.subplots()\n\n x_ave,y_ave = averages(x,y)\n\n ax.scatter(x, y, label=\"data\", color='b')\n ax.scatter(x_ave, y_ave, label=\"means\", color='r')\n \n ax.set_xlim( xmin=0 ) \n ax.set_ylim( ymin=0 )\n ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))\n ax.set_title(title)\n ax.set_xlabel(Network_.__name__ +' size')\n\n data = sample(Algorithm_, Network_, test)\n if data == None: return\n size, comm, time = data\n \n if issubclass(Algorithm_, Synchronous_Algorithm):\n plot(size, time, Algorithm_.__name__ + ' Time Complexity')\n\n plot(size, comm, Algorithm_.__name__ + ' Communication Complexity')", "def fast():\n # Need a minimum of 10 total_timesteps for adversarial training code to pass\n # \"any update happened\" assertion inside training loop.\n total_timesteps = 10\n n_expert_demos = 1\n n_episodes_eval = 1\n algorithm_kwargs = dict(\n shared=dict(\n demo_batch_size=1,\n n_disc_updates_per_round=4,\n ),\n )\n gen_batch_size = 2\n parallel = False # easier to debug with everything in one process\n max_episode_steps = 5\n # SB3 RL seems to need batch size of 2, otherwise it runs into numeric\n # issues when computing multinomial distribution during predict()\n num_vec = 2\n init_rl_kwargs = dict(batch_size=2)", "def main(cls):\n parser = cls.make_argument_parser()\n args = parser.parse_args()\n args.device = make_hoomd_device(args)\n benchmark = cls(**vars(args))\n performance = benchmark.execute()\n\n if args.device.communicator.rank == 0:\n print(f'{numpy.mean(performance)}')", "def measure_mp_speedup():\n modes = [\n # name, function\n ('dSMC', ana.d_smc),\n ('dAMC', ana.d_amc),\n ('EDF-VD', ana.d_edf_vd),\n ('pSMC', ana.p_smc),\n ('pAMC-BB', ana.p_amc_bb),\n ('pAMC-BB+', ft.partial(ana.p_amc_bb, ignore_hi_mode=True))\n ]\n times_seq = {}\n task_sets_list = pickle.load(open(task_sets_path + 'task_sets_fairgen', 'rb'))\n start_total_seq = time()\n for name, func in modes:\n start_mode_seq = time()\n rates = []\n for task_sets in task_sets_list:\n results = []\n for task_set in task_sets:\n results.append(func(task_set))\n rates.append(100 * np.average(results))\n stop_mode_seq = time()\n times_seq[name] = stop_mode_seq - start_mode_seq\n stop_total_seq = time()\n times_seq['Overall'] = stop_total_seq - start_total_seq\n\n times_par = {}\n start_total_par = time()\n pool = mp.Pool()\n for name, func in modes:\n start_mode_par = time()\n rates = []\n for task_sets in task_sets_list:\n rates.append(100 * np.average(pool.map(func, task_sets)))\n stop_mode_par = time()\n times_par[name] = stop_mode_par - start_mode_par\n stop_total_par = time()\n times_par['Overall'] = stop_total_par - start_total_par\n\n speedups = {}\n for name, _ in modes:\n speedups[name] = times_seq[name] / times_par[name]\n speedups['Overall'] = times_seq['Overall'] / times_par['Overall']\n\n print(\"PERFORMANCE MEASUREMENTS\")\n print(\"Number of cores: %d\" % mp.cpu_count())\n print(\"Scheme: Sequential time / Parallel time / Speedup\")\n for name, _ in modes:\n print(\"%s: %.3fs / %.3fs / %.3f\" % (name, times_seq[name], times_par[name], speedups[name]))\n print(\"Overall: %.3fs / %.3fs / %.3f\" % (times_seq['Overall'], times_par['Overall'], speedups['Overall']))", "def test_cpu_total_work(self):\n import time\n from supvisors.statistics import instant_cpu_statistics, cpu_total_work\n # take 2 spaced instant cpu statistics\n ref_stats = instant_cpu_statistics()\n time.sleep(1)\n last_stats = instant_cpu_statistics()\n total_work = cpu_total_work(last_stats, ref_stats)\n # total work should be quite close to sleeping time\n self.assertAlmostEqual(1, total_work, 1)", "def test_instant_cpu_statistics(self):\n import multiprocessing\n from supvisors.statistics import instant_cpu_statistics\n stats = instant_cpu_statistics()\n # test number of results (number of cores + average)\n self.assertEqual(multiprocessing.cpu_count() + 1, len(stats))\n # test average value\n total_work = total_idle = 0\n for cpu in stats[1:]:\n self.assertEqual(2, len(cpu))\n work, idle = cpu\n total_work += work\n total_idle += idle\n self.assertAlmostEqual(stats[0][0], total_work / multiprocessing.cpu_count())\n self.assertAlmostEqual(stats[0][1], total_idle / multiprocessing.cpu_count())", "def test_benchmark1(capsys):\n student_1 = Student('114007245','Mario Castillo',2018, [\"INST 201\",\"INST 326\"])\n student_2 = Student('117006012', 'Joe Rogan', 2018, [\"MATH 115\",\"PSYC 100\"])\n student_3 = Student(\"117008490\", \"Kanye West\", 2018, [\"MATH 120\",\"STAT 003\"])\n student_4 = Student('118009044', \"Elon Musk\", 2018, [\"PSYC 100\",\"MATH 003\"])\n \n student_1.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out\n assert out == ('You have not completed the Benchmark I requirements.\\n'\n 'You have not taken MATH 115 or higher.\\n'\n 'You have not taken PSYC 100.\\n')\n\n student_2.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out\n assert out == (f'You have completed all of your Benchmark I courses! '\n f'Congratulations, {student_2.student_name}!\\n')\n\n student_3.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out \n assert out == ('You have not completed the Benchmark I requirements.\\n'\n 'You have not taken PSYC 100.\\n')\n\n student_4.benchmark_I()\n outerr = capsys.readouterr()\n out = outerr.out\n assert out == ('You have not completed the Benchmark I requirements.\\n'\n 'You have not taken MATH 115 or higher.\\n')", "def main(benchmark, size=None, backend=None, repetitions=None, burnin=1, device=\"cpu\"):\n try:\n bm_module, bm_identifier = get_benchmark_module(benchmark)\n except ImportError as e:\n click.echo(f\"Error while loading benchmark {benchmark}: {e!s}\", err=True)\n raise click.Abort()\n\n available_backends = set(bm_module.__implementations__)\n\n if len(backend) == 0:\n backend = available_backends.copy()\n else:\n backend = set(backend)\n\n unsupported_backends = [b for b in backend if b not in available_backends]\n\n for b in unsupported_backends:\n click.echo(\n f'Backend \"{b}\" is not supported by chosen benchmark (skipping)', err=True\n )\n backend.remove(b)\n\n for b in backend.copy():\n try:\n with setup_functions[b](device=device) as bmod:\n click.echo(f\"Using {b} version {bmod.__version__}\")\n except BackendNotSupported as e:\n click.echo(\n f'Setup for backend \"{b}\" failed (skipping), reason: {e!s}', err=True\n )\n backend.remove(b)\n\n try:\n check_backend_conflicts(backend, device)\n except BackendConflict as exc:\n click.echo(f\"Backend conflict: {exc!s}\", err=True)\n raise click.Abort()\n\n runs = sorted(itertools.product(backend, size))\n\n if len(runs) == 0:\n click.echo(\"Nothing to do\")\n return\n\n timings = {run: [] for run in runs}\n\n if repetitions is None:\n click.echo(\"Estimating repetitions...\")\n repetitions = {}\n\n for b, s in runs:\n # use end-to-end runtime for repetition estimation\n def run_func():\n run = bm_module.get_callable(b, s, device=device)\n with setup_functions[b](device=device):\n run()\n\n repetitions[(b, s)] = estimate_repetitions(run_func)\n else:\n repetitions = {(b, s): repetitions for b, s in runs}\n\n all_runs = list(\n itertools.chain.from_iterable(\n [run] * (repetitions[run] + burnin) for run in runs\n )\n )\n random.shuffle(all_runs)\n\n results = {}\n checked = {r: False for r in runs}\n\n pbar = click.progressbar(\n label=f\"Running {len(all_runs)} benchmarks...\", length=len(runs)\n )\n\n try:\n with pbar:\n for (b, size) in all_runs:\n with setup_functions[b](device=device):\n run = bm_module.get_callable(b, size, device=device)\n with Timer() as t:\n res = run()\n\n # YOWO (you only warn once)\n if not checked[(b, size)]:\n if size in results:\n is_consistent = check_consistency(\n results[size], convert_to_numpy(res, b, device)\n )\n if not is_consistent:\n click.echo(\n f\"\\nWarning: inconsistent results for size {size}\",\n err=True,\n )\n else:\n results[size] = convert_to_numpy(res, b, device)\n checked[(b, size)] = True\n\n timings[(b, size)].append(t.elapsed)\n pbar.update(1.0 / (repetitions[(b, size)] + burnin))\n\n # push pbar to 100%\n pbar.update(1.0)\n\n for run in runs:\n assert len(timings[run]) == repetitions[run] + burnin\n\n finally:\n stats = compute_statistics(timings)\n click.echo(format_output(stats, bm_identifier, device=device))", "def compare_hard():\n\n\n cores = [1, 2, 4, 8, 16]\n s_times = [[], [], []]\n g_times = [[], [], []]\n\n for simulator in ['g', 's']:\n for n_cores in cores:\n for i, size in enumerate([1, 10, 100]):\n ss = str(size) + 'k'\n\n print('analyzing', '-'.join(['data/bench', str(simulator), ss, str(n_cores)]) + '.out')\n data = split_output_file('-'.join(['data/bench', str(simulator), ss, str(n_cores)]) + '.out')\n if simulator == 'g':\n g_times[i].append(data['FOOTER']['total_time'])\n if simulator == 's':\n s_times[i].append(data['FOOTER']['total_time'])\n\n # absolute time plot\n fig, axs = plt.subplots(3)\n\n for i in range(3):\n axs[i].plot(cores, s_times[i], label='sequential')\n axs[i].plot(cores, g_times[i], label='GPU')\n # axs[i].set_yticks([])\n axs[i].set_xticks(cores)\n axs[i].set_title(str([1, 10, 100][i]) + 'k population size')\n axs[i].ticklabel_format(style='sci', scilimits=(-128, 128))\n\n axs[0].legend()\n axs[1].set_ylabel('Total simulation time [ms]')\n axs[2].set_xlabel('Number of cores')\n\n fig.set_size_inches(4, 6, forward=True)\n plt.tight_layout()\n\n plt.show()\n\n # speedup plot\n fig, axs = plt.subplots(3)\n\n for i in range(3):\n axs[i].plot(cores, [s_times[i][0] / x / k for x, k in zip(s_times[i], cores)], label='sequential')\n axs[i].plot(cores, [g_times[i][0] / x / k for x, k in zip(g_times[i], cores)], label='GPU')\n # axs[i].plot([0, 16], [0, 16], label='theoretical')\n # axs[i].set_yticks([])\n axs[i].set_xticks(cores)\n axs[i].set_title(str([1.4, 14, 140][i]) + 'k population size')\n axs[i].ticklabel_format(style='sci', scilimits=(-128, 128))\n\n axs[0].legend()\n axs[1].set_ylabel('Relative speedup')\n axs[2].set_xlabel('Number of cores')\n\n fig.set_size_inches(4, 6, forward=True)\n plt.tight_layout()\n\n plt.show()\n\n # scaling plot\n fig, axs = plt.subplots(1)\n\n axs.plot([1400, 14000, 140000], [s_times[i][0] for i in range(3)], label='seqential')\n axs.plot([1400, 14000, 140000], [g_times[i][0] for i in range(3)], label='GPU')\n axs.set_xticks(cores)\n axs.set_title(str([1.4, 14, 140][i]) + 'k population size')\n axs.ticklabel_format(style='sci', scilimits=(-128, 128))\n\n axs.legend()\n axs.set_ylabel('Relative speedup')\n axs.set_xlabel('Number of cores')\n\n fig.set_size_inches(4, 2, forward=True)\n plt.tight_layout()\n\n plt.show()", "def slower():\n try:\n ttsEng.slower()\n except Exception, e:\n logging.error(e)", "def bench_oneshot_2():\n sh(\"%s -Wa scripts\\\\internal\\\\bench_oneshot_2.py\" % PYTHON)", "def main():\n run_simulation(spectral=False, ml=False, num_procs=1)\n run_simulation(spectral=True, ml=False, num_procs=1)\n run_simulation(spectral=False, ml=True, num_procs=1)\n run_simulation(spectral=True, ml=True, num_procs=1)\n run_simulation(spectral=False, ml=True, num_procs=10)\n run_simulation(spectral=True, ml=True, num_procs=10)", "def emPerformance(filesAndDirectories='None', resultsFileName='None', iterationCount='3', modes='None', testTypes='None', viewports='None', verbose='False'):\n\n pass", "def main():\n known_args, unknown_args = parse_known_args()\n if not unknown_args:\n # return an error message if no command is provided\n sys.exit(\"Please provide a command to benchmark: $ humann_benchmark COMMAND\")\n try:\n process = subprocess.Popen(\" \".join(unknown_args),shell=True)\n except (EnvironmentError, subprocess.CalledProcessError):\n sys.exit(\"Unable to execute command: \" + \" \".join(unknown_args))\n pid=str(process.pid)\n start=time.time()\n max_memory=0\n while process.poll() is None:\n time.sleep(1)\n # while the process is running check on the memory use\n # get the pids of the main process and all children (and their children)\n pids=get_pids(pid)\n stdout=subprocess.check_output([\"ps\",\"--pid\",\",\".join(pids),\"-o\",\"pid,rss,command\"]).decode(\"utf-8\")\n print(\"\\n\"+stdout+\"\\n\")\n # remove the header from the process output\n status=[i.split() for i in filter(lambda x: x, stdout.split(\"\\n\")[1:])]\n # memory is the sum of all rss\n memory=sum(int(i[1]) for i in status)\n if memory > max_memory:\n max_memory=memory\n \n end=time.time()\n print(\"Time: {:.0f} minutes\".format((end-start)/60))\n print(\"Max Memory (RSS): {:.1f} GB\".format(max_memory*1.0/1024**2))", "def testTiming(s,iters=10,batchsize=99):\n\n qstart = s.getEndConfig();\n gxcall = \"gj\"\n amcall = (\"am\",0.004)+tuple(qstart)\n\n index = 0\n while index < iters:\n t0=time.time()\n res = s.call([gxcall]*batchsize)\n assert not 'Error' in res,'Error on batch '+str(gxcall)\n print(\"Time for %d gx's (batched): %f\"%(batchsize,time.time()-t0))\n \n t0=time.time()\n for i in xrange(batchsize):\n res = s.call(gxcall)\n assert res != 'Error','Error on '+str(gxcall)\n print(\"Time for %d gx's (unbatched): %f\"%(batchsize,time.time()-t0))\n\n t0=time.time()\n res = s.call([amcall]*batchsize)\n assert not 'Error' in res,'Error on batch '+str(amcall)\n print(\"Time for %d am's (batched): %f\"%(batchsize,time.time()-t0))\n t0=time.time()\n\n t0=time.time()\n s.call([amcall]*batchsize,wantReply=False)\n print(\"Time for %d am's (batched,noreply): %f\"%(batchsize,time.time()-t0))\n ret = s.echo(\"blah\")\n assert ret == \"blah\"\n print(\" Responsive after time %f\"%(time.time()-t0))\n\n t0=time.time()\n for i in xrange(batchsize):\n res=s.call(amcall)\n assert 'Error' != res,'Error on '+str(amcall)\n print(\"Time for %d am's (unbatched): %f\"%(batchsize,time.time()-t0))\n\n t0=time.time()\n for i in xrange(batchsize):\n s.call(amcall,wantReply=False)\n print(\"Time for %d am's (unbatched,noreply): %f\"%(batchsize,time.time()-t0))\n ret = s.echo(\"blah\")\n assert ret == \"blah\"\n print(\" Responsive after time %f\"%(time.time()-t0))\n \n index += 1", "def run_std(self):\n print \"Initialising grid\"\n self.initialise_grid(50, 100, 3)\n \n self.initialise_shadow_map()\n \n self.num_iterations = 500\n self.jump_length = 1\n \n self.pd_s = 0.6\n self.pd_ns = 0.4\n \n self.avcount = np.zeros(self.num_iterations + 1)\n \n \n before = time.time()\n self.main_loop()\n after = time.time()\n \n time_taken = after - before\n \n print \"Took %f seconds\", time_taken", "def test_client_twrr_performance(self):\n pass", "def run_delayed_ssa(system):\n \n #vars used in the simulation\n time = 0 #unitless\n end_time = system['sim-time']\n species = system['participants']\n parameters = system['parameters']\n events = system['events']\n prop_funcs = {}\n exec_funcs = {}\n props = {}\n delays = {}\n last_exec_time = {}\n \n #return values\n time_array = []\n species_array = []\n \n #populate results array\n time_array = [time]\n row = [0]*len(species)\n species_names = [''] * len(species)\n \n #create species vars so that rate code can be executed\n i = 0\n for name in species:\n species_names[i] = name\n exec( name + '=' + str(species[name]) )\n row[i] = species[name]\n i += 1\n species_array.append(row)\n \n #create parameter vars so that rate code can be executed\n for name in parameters:\n exec( name + '=' + str(parameters[name]) )\n\n #create (compile) functions from input strings for rates and events\n for name in events:\n if events[name].get('delay'):\n delays[name] = events[name]['delay']\n else:\n delays[name] = 0.0\n last_exec_time[name] = -1\n props[name] = 0.0\n prop_funcs[name] = compile(\"props['\" + name + \"'] = \" + str(events[name]['propensity']), 'prop_funcs_'+name, 'exec')\n exec_funcs[name] = compile(events[name]['consequence'], 'exec_funcs_'+name, 'exec')\n \n #MAIN LOOP\n while time < end_time:\n \n #calculate propensities\n for name in props:\n exec(prop_funcs[name])\n if delays[name] > 0 and delays[name] + last_exec_time[name] < time:\n print(name)\n props[name] = 0.0\n \n #calculate total of all propensities\n total_prop = 0\n for name in props:\n total_prop += props[name]\n \n \n u = random.uniform(0,total_prop)\n usum = 0\n lucky = None\n for name in props:\n usum += props[name]\n if usum > u:\n lucky = name\n break\n\n #fire that reaction\n if lucky:\n last_exec_time[lucky] = time\n exec(exec_funcs[lucky])\n \n \n row = [0]*len(species)\n i = 0\n for name in species:\n row[i] = eval(name)\n i += 1\n time_array.append(time)\n species_array.append(row)\n \n #update next time using exp distrib\n if total_prop == 0.0: #jump to next delay\n lowest_delay = inf\n for name in props:\n if delays[name] > 0 and delays[name] < lowest_delay:\n lowest_delay = delays[name]\n time += lowest_delay\n else:\n dt = random.exponential(1.0/total_prop)\n time += dt\n\n #END MAIN LOOP\n\n result = {'time':time_array, 'participants':species_array, 'headers': species_names}\n return result", "def do_workload(self):\n pass", "def benchmark():\n print defer.Deferred.__module__\n for func, args, iter in benchmarkFuncs:\n print func.__name__, args, timeit(func, iter, *args)" ]
[ "0.679934", "0.6775366", "0.6487424", "0.6471152", "0.6418339", "0.6378135", "0.6274181", "0.6261773", "0.6100292", "0.60983396", "0.6038468", "0.6008453", "0.59768164", "0.5960427", "0.5952964", "0.59192204", "0.5903947", "0.59022474", "0.5896368", "0.58835787", "0.588011", "0.5853271", "0.58510494", "0.5850899", "0.5838955", "0.5837292", "0.583665", "0.58358973", "0.58346105", "0.58187026" ]
0.7757142
0
This function takes a level (a numpy array) and places the object at random. It also takes an optional parameter called ideal_position to force the algorithm to place said object on said place if it is empty. If that place isn't empty, the algorithm will select an empty position at random. This algorithm modifies the level IN PLACE.
def position_object(level, object_string, placeable_positions=None, ideal_position=None): if ideal_position: if level[ideal_position] == EMPTY: level[ideal_position] = object_string return if placeable_positions == set([]): raise ValueError(f"There are no placeable positions for object {object_string} in {level}") if placeable_positions is None: placeable_positions = get_placeable_positions(level) if not placeable_positions: raise ValueError(f"The level has no placeable positions for the object {object_string}: {level}") obj_position = random.choice(list(placeable_positions)) placeable_positions.remove(obj_position) level[obj_position] = object_string
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def dispatch_items_randomly(self, level):\n for item in self.list:\n item.position = Item.define_random_position(item, level)", "def get_rand_tree(position):\n max_width = random.randint(50, 100)\n min_width = random.randint(5, 10)\n itter_width = random.randint(5, 20)\n itter_height = random.randint(5, 20)\n random_tree = Tree(max_width, min_width, itter_width,\n itter_height)\n random_tree.loc_translate(position)\n return random_tree", "def place_obj(self,\n obj,\n top=None,\n size=None,\n reject_fn=None,\n max_tries=math.inf\n ):\n\n if top is None:\n top = (0, 0)\n\n if size is None:\n size = (self.grid.width, self.grid.height)\n\n num_tries = 0\n\n while True:\n # This is to handle with rare cases where rejection sampling\n # gets stuck in an infinite loop\n if num_tries > max_tries:\n raise RecursionError('rejection sampling failed in place_obj')\n\n num_tries += 1\n\n pos = np.array((\n self._rand_int(top[0], top[0] + size[0]),\n self._rand_int(top[1], top[1] + size[1])\n ))\n\n # Don't place the object on top of another object\n if self.grid.get(*pos) != None:\n continue\n\n # Don't place the object where the agent is\n if np.array_equal(pos, self.start_pos):\n continue\n\n if np.array_equal(pos, self.start_dpos):\n continue\n\n # Check if there is a filtering criterion\n if reject_fn and reject_fn(self, pos):\n continue\n\n break\n\n self.grid.set(*pos, obj)\n\n if obj is not None:\n obj.init_pos = pos\n obj.cur_pos = pos\n\n return pos", "def random_pos(self, ):\n self.pos_item['needle'] = self.shuffle_pos()\n self.pos_item['ether'] = self.shuffle_pos()\n self.pos_item['tube'] = self.shuffle_pos()", "def cut_trees(self, )\n\n\n\n def random_spot(x_low, y_low, x_range, y_range):\n x = randint(x_low, x_low + x_range)\n y = randint(y_low, y_low + y_range)\n dur = random.uniform(0.5, 3.0)\n\n return pyautogui.moveTo(x, y, dur)", "def move_to_random_pos(self):\n newpos = [(np.random.rand() - 0.5) * 0.1,\n (np.random.rand() - 0.5) * 0.1,\n np.random.rand() * 0.9 + 0.2]\n self.move_to(newpos)", "def set_from_random(self, random: tuple, level: int=200) -> None:\n\t\tdimM, dimN = random\n\t\tself._set_dim(dimM, dimN)\n\t\t\n\t\tlevel_max = 1000\n\t\tself.matrix = [[1 if rnd.randrange(level_max) < level else 0 for _ in range(dimN)] for _ in range(dimM)]\n\t\t# reflexivity\n\t\tif self.isreflexive:\n\t\t\tself.matrix = [[self.matrix[m][n] if m != n else 1 for n in range(dimN)] for m in range(dimM)]", "def mutate(individual):\n for idx, q in enumerate(individual):\n rng = random.random()\n if rng < 1 / (len(individual)):\n pos = random.randint(1, len(individual))\n individual[idx] = pos\n return individual", "def move_egg(array):\n new_array = deepcopy(array)\n old_x, old_y = get_random_position(array, 1)\n new_x, new_y = get_random_position(array, 0)\n new_array[old_y][old_x] = 0\n new_array[new_y][new_x] = 1\n return new_array", "def random_place(board, player):\n available = possibilities(board)\n place(board, player, random.choice(available))", "def random_position():\n pos = np.random.randn(3)\n pos[2] = 0\n return pos", "def set_random_pos(self, which):\n available = [[r, c] for r, row in enumerate(self.maze)\n for c, value in enumerate(row) if value == ' ']\n choice = random.choice(available)\n if which == 'starting':\n self.current_pos = choice\n elif which == 'finishing':\n self.finish_pos = choice", "def generate_level(level):\n seed = level * 69420 # multiply by 69420 to not have the seeds too close to each other\n random.seed(seed)\n dimensions = get_map_size(level)\n level_map = np.full(dimensions, -1)\n while -1 in level_map:\n choice = random.choice(np.argwhere(level_map == -1))\n next_index = (choice[0], choice[1])\n # get indices of the tiles next to the current index\n left_index, up_index, right_index, down_index = get_direction_indices(next_index)\n left = tile_needs_connection(left_index, level_map, has_connection_right)\n up = tile_needs_connection(up_index, level_map, has_connection_down)\n right = tile_needs_connection(right_index, level_map, has_connection_left)\n down = tile_needs_connection(down_index, level_map, has_connection_up)\n level_map[next_index] = get_tile(left, up, right, down)\n return un_solve(level_map)", "def place_vision(self, probability=0.1):\r\n number = int((self.__nx * self.__ny) * probability) # probability of having a pit\r\n for i in range(number):\r\n x = random.randint(0, (self.__nx - 1))\r\n y = random.randint(0, (self.__ny - 1))\r\n if self.__maze[x][y] != self.entrance_room() and \\\r\n self.__maze[x][y] != self.exit_room():\r\n self.__maze[x][y].set_vision_potion(True)", "def scramble_prompt(level, prompt, scrambles=1):\n current_prompt_positions = get_positions(prompt)\n placeable_positions = get_placeable_positions(level)\n\n old_pos = random.choice(current_prompt_positions)\n new_pos = random.choice(list(placeable_positions))\n\n level[old_pos] = EMPTY\n level[new_pos] = prompt", "def generate(self, level):\n # TODO The dungeon's instances are spawned and loaded here.\n # fill map with \"blocked\" tiles\n level.maze = [[Tile(x, y, True) for y in range(level.height)] for x in range(level.width)]\n\n for r in range(level.max_rooms):\n # random width and height\n w = random.randint(level.min_room_size, level.max_room_size)\n h = random.randint(level.min_room_size, level.max_room_size)\n\n # random position without going out of the boundaries of the map\n x = random.randint(0, level.width - w - 1)\n y = random.randint(0, level.height - h - 1)\n\n # \"DungeonRoom\" class makes rectangles easier to work with\n new_room = Room(x, y, w, h)\n level.rooms.append(new_room)\n\n # run through the other rooms and see if they intersect with this one\n failed = False\n for other_room in level.rooms:\n if other_room is not new_room and new_room.intersect(other_room):\n failed = True\n break\n\n if not failed:\n # this means there are no intersections, so this room is valid\n\n # \"paint\" it to the map's tiles\n self._create_room(level, new_room)\n\n # center coordinates of new room, will be useful later\n new_x, new_y = new_room.center()\n\n if level.num_rooms > 0:\n # connect it to the previous room with a tunnel\n # center coordinates of previous room\n (prev_x, prev_y) = level.rooms[level.num_rooms - 1].center()\n\n # draw a coin (random number that is either 0 or 1)\n if random.randint(0, 1) == 1:\n # first move horizontally, then vertically\n self._create_h_tunnel(level, prev_x, new_x, prev_y)\n self._create_v_tunnel(level, prev_y, new_y, new_x)\n else:\n # first move vertically, then horizontally\n self._create_v_tunnel(level, prev_y, new_y, prev_x)\n self._create_h_tunnel(level, prev_x, new_x, new_y)\n\n # finally, append the new room to the list\n level.rooms.append(new_room)\n level.num_rooms += 1\n\n # connect them with a tunnel\n self._create_h_tunnel(level, 25, 55, 23)", "def make_random_move(self):\n s=set()\n for i in range(self.height):\n for j in range(self.width):\n s.add((i,j))\n\n s=s-self.mines-self.moves_made\n if s==set(): return None\n return random.choice(list(s))\n #raise NotImplementedError", "def new_tile(self):\r\n random_row = random.randrange(0, self._grid_height)\r\n random_col = random.randrange(0, self._grid_width)\r\n random_choice = random.choice([2]*90 + [4] * 10)\r\n \r\n if 0 in [num for elem in self._cells for num in elem]: \r\n if self._cells[random_row][random_col] == 0:\r\n self._cells[random_row][random_col] = random_choice \r\n else:\r\n self.new_tile()\r\n else:\r\n pass", "def reset_food_level(self):\n food_level = random() * FOOD_DEFAULT\n self.plant = food_level", "def random_pose(self):\n position = self._start\n while self[position].distance < np.sum(self._rooms.shape) * 2:\n position = np.array(\n [random.randrange(limit) for limit in self._rooms.shape]\n )\n direction = random.choice(self.exits(position))\n return (position, direction)", "def level_one(self):\n swap_spot = randint(0,len(self.current_recipe.toppings)-1)\n #if statements are to stay within food type of original recipe\n if self.current_recipe.recipe_type == 'soup':\n new_ingredient = choice(soup_ingredients)\n self.current_recipe.toppings[swap_spot] = new_ingredient\n if self.current_recipe.recipe_type == 'smoothie':\n new_ingredient = choice(smoothie_ingredients)\n self.current_recipe.toppings[swap_spot] = new_ingredient\n if self.current_recipe.recipe_type == 'salad':\n new_ingredient = choice(salad_ingredients)\n self.current_recipe.toppings[swap_spot] = new_ingredient\n if self.current_recipe.recipe_type == 'sandwich':\n new_ingredient = choice(sandwich_ingredients)\n self.current_recipe.toppings[swap_spot] = new_ingredient\n self.new_ingredients = self.current_recipe.toppings", "def place_goal(self, point = \"random\"):\n if point != \"random\":\n self.maze[point[0]][point[1]] = self.goal_value\n self.goal_position = tuple([point[0],point[1]])\n\n else:\n # Find blank spaces for goal placement\n curr_blanks = np.where(self.maze == self.blank_value)\n\n # In a 2d array, curr_blanks should produce two arrays of equal length\n # Call one value in the range of those arrays to index\n value = randint(0, len(curr_blanks[0])-1)\n\n self.maze[curr_blanks[0][value]][curr_blanks[1][value]] = self.goal_value\n self.goal_position = tuple([curr_blanks[0][value],curr_blanks[1][value]])", "def new_tile(self):\n\n # creating a random float variable that will roll a random value\n # if randomvalue > .90\n #\n\n tile_added = False\n while not tile_added:\n row = random.randint(0,self.grid_height - 1)\n col = random.randint(0,self.grid_width - 1)\n if self.board[row][col] == 0:\n tile_added = True\n random_tile = random.random()\n if random_tile < .90:\n self.board[row][col] = 2\n else:\n self.board[row][col] = 4", "def MoveRandom(self):\n r = random.randint(0,3)\n if r == 0: self.x += 1\n elif r == 1: self.y += 1\n elif r == 2: self.x -= 1\n elif r == 3: self.y -= 1", "def add_walls(level, amount):\n placeable_pos = list(get_placeable_positions_inc_path(level))\n random.shuffle(placeable_pos)\n for _ in range(min(len(placeable_pos), amount)):\n pos = placeable_pos.pop()\n level[pos] = WALL\n assert is_solvable(level), \"Level isn't solvable. There's a bug in add_walls\"", "def make_random_move(self):\n choice = None\n options = []\n #generate full moves list\n for i in range(self.width):\n for j in range(self.height):\n #make sure move has not been made\n if (i,j) not in self.moves_made:\n #make sure move is not a mine\n if (i,j) not in self.mines:\n options.append((i,j))\n #if there are no options, return None\n if len(options) == 0:\n return None\n\n #pick a random option from generated list\n choice = random.choice(options)\n return choice\n\n \"\"\"\n For kicks and giggles I wrote this extra bit to determine a\n rough intuitive probability for each option based on the knowledge\n base, so rather than picking a choice randomly the AI can choose\n the option that is, at least intuitively, least likely to blow up.\n Better to take the 1/8 chance than the 1/3 chance, right?\n \"\"\"\n best_chance = 1\n #iterate through generated options\n for option in options:\n #Could set chance to 1/8, but the AI wouldn't actually know that. I\n #only know it because I can read the code...But for the purposes of this\n #drill we'll say the AI doesn't know how many bombs are placed.\n #Better then to pick a square we know nothing about than one that\n #has a 1/8 chance of exploding. Gather more information that way.\n chance = 0\n for sentence in self.knowledge:\n #look to see if current option is in sentences\n if option in sentence.cells:\n #use sentence count and length of cell set to calculate probability\n prob = sentence.count / len(sentence.cells)\n if prob > chance:\n #Looking for the highest explosive probability for this square\n chance = prob\n if chance < best_chance:\n #If this option has lower odds of exploding than current best, it becomes\n #the optimal\n best_chance = chance\n choice = option\n\n #return choice", "def place_agent(\n self,\n top=None,\n size=None,\n rand_dir=True,\n max_tries=math.inf\n ):\n self.start_dpos = None\n self.start_pos = None\n pos = self.place_obj(None, top, size, max_tries=max_tries)\n self.start_pos = pos\n self.start_dpos = self.start_pos\n\n return pos", "def move(self):\n if self._z >= 75:\n a = random.random()\n print(str(a))\n if a < 0.2:\n self._z += 1\n if a > 0.2 and a < 0.9:\n self._z -= 1\n if a > 0.9:\n self._z = self._z\n else: \n self._z -= 1\n \n b = random.random()\n print(str(b))\n if b < 0.1:\n self._y += 1\n if b > 0.1 and b < 0.2:\n self._y -= 1\n if b > 0.2 and b < 0.25:\n self._x -= 1\n if b > 0.25:\n self._x += 1", "def make_random_move(self):\n #completely random move\n all_moves = set(itertools.product(range(self.height), range(self.width)))\n moves_left = list(all_moves - self.mines - self.moves_made)\n if not moves_left:\n return None\n return random.choice(moves_left)", "def update_position(self):\n new_position = []\n for i in range(self.num_literals):\n r = random()\n position_i = 1 if r < self.sigmoid(self.velocity[i]) else 0\n new_position.append(position_i)\n self.position = new_position" ]
[ "0.62657046", "0.58841485", "0.5780916", "0.57509255", "0.57381296", "0.5687422", "0.5588331", "0.55675876", "0.5532268", "0.55043423", "0.54832673", "0.5475558", "0.54514307", "0.5417708", "0.54086894", "0.5363276", "0.5360702", "0.5359083", "0.5353122", "0.535133", "0.5322105", "0.532184", "0.5315245", "0.53080577", "0.5267091", "0.5264659", "0.52545124", "0.52472377", "0.52471715", "0.52391887" ]
0.6344606
0
This function adds an enemy at random in any of the floor tiles in place.
def add_enemies(level, amount): floor_tiles = get_positions(level, EMPTY) random.shuffle(floor_tiles) for _ in range(min(len(floor_tiles), amount)): pos = floor_tiles.pop() enemy = random.choice([ENEMY1, ENEMY2, ENEMY3]) level[pos] = enemy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def spawn_enemies():\n\n enemy_num = random.randint(1,5)\n spawn_box = spawn_boxes[random.randint(0, 3)]\n\n if spawn_box.y <= 0: start = [0, 128]\n elif spawn_box.y >= 640: start = [0, -128]\n elif spawn_box.x <= 0: start = [128, 0]\n elif spawn_box.x >= 640: start = [-128, 0]\n\n x = spawn_box.x\n y = spawn_box.y\n new_enemies = []\n for i in range(enemy_num):\n new_enemies.append(enemies.Wolf(x + 32, y + 32, grid, (x + 32 + start[0], y + 32 + start[1])))\n x += 64\n if not spawn_box.collidepoint(x, y):\n x = spawn_box.x\n y += 64\n\n all_enemies.add(new_enemies)\n all_sprites.add(new_enemies)", "def create_enemy():\n if randint(0, 20) == 5:\n try:\n check.check_life(common.COLS-1, common.MIDS_R, \"Enemy\")\n eitem = person.Enemy(common.COLS-1, common.MIDS_R)\n config.E_LIST.append(eitem)\n except (config.EnemyHere, config.GapHere):\n pass\n\n for i in config.E_LIST:\n try:\n i.move(i.x_pos-2, i.y_pos)\n except config.WallHere:\n pass\n except config.EnemyHere:\n config.E_LIST.remove(i)", "def _populate_level_with_enemies(self,\n map_layer_configuration,\n base_enemy_chance_cave: float = 0.006,\n base_enemy_chance_dungeon: float = 0.006,\n base_boss_chance: float = 0.003) -> None:\n enemy_chance_cave = self.generate_enemy_chance(base_enemy_chance_cave)\n enemy_chance_dungeon = self.generate_enemy_chance(base_enemy_chance_dungeon)\n boss_chance = self.generate_enemy_chance(base_boss_chance)\n for row in map_layer_configuration:\n for block in row:\n if block[0] == ' ':\n if np.random.rand() > (1 - enemy_chance_cave):\n if self.sprites.drill.center_x != block[1] or self.sprites.drill.center_y != block[2]:\n enemy_to_add = random.choice(potential_enemies)\n enemy_to_append = enemy_to_add(block[1], block[2], vision=200)\n self.sprites.entity_list.append(enemy_to_append)\n self.sprites.enemy_list.append(enemy_to_append)\n elif block[0] == 'F':\n if np.random.rand() > (1 - enemy_chance_dungeon):\n if self.sprites.drill.center_x != block[1] or self.sprites.drill.center_y != block[2]:\n enemy_to_add = random.choice(potential_enemies)\n enemy_to_append = enemy_to_add(block[1], block[2], vision=200)\n self.sprites.entity_list.append(enemy_to_append)\n self.sprites.enemy_list.append(enemy_to_append)\n elif np.random.rand() > (1 - boss_chance):\n if self.sprites.drill.center_x != block[1] or self.sprites.drill.center_y != block[2]:\n enemy_to_add = random.choice(potential_bosses)\n enemy_to_append = enemy_to_add(block[1], block[2], vision=200, speed=0.7)\n self.sprites.entity_list.append(enemy_to_append)\n self.sprites.enemy_list.append(enemy_to_append)\n self.sprites.drill_list.append(self.sprites.drill)\n\n for entity in self.sprites.entity_list:\n entity.setup_collision_engine([self.sprites.indestructible_blocks_list])", "def add_enemies_room(length, width, room):\r\n l = length - 3\r\n w = width - 3\r\n enemies = list()\r\n prob = 100\r\n # 60, 30, 15 percent chance of spawning enemy\r\n for i in range(0,5):\r\n if randint(0,100) < prob:\r\n enemies.append(True)\r\n else:\r\n enemies.append(False)\r\n prob = prob/2\r\n for enemy in enemies:\r\n if enemy == True:\r\n # random int with more padding\r\n y = randint(2,w)\r\n x = randint(2,l)\r\n #choose new x and y if space is occupied\r\n while room[x][y] != \".\":\r\n y = randint(2,w)\r\n x = randint(2,l)\r\n\r\n room[x][y] = \"E\"\r\n return room", "def create_enemies(self, count):\n self.enemies = []\n\n while count > 0:\n # Randomly select a cell\n x = int(random() * self.map_size[0])\n y = int(random() * self.map_size[1])\n\n # If it has been filled with something, choose another cell\n if self.is_filled(x, y):\n continue\n\n # Randomly select a type of enemy to generate\n r = int(random() * 10)\n if 4 < r and r < 8:\n enemy = self.create_enemy_bombeater_at(x, y)\n elif r == 8:\n enemy = self.create_enemy_flying_at(x, y)\n elif r == 9:\n enemy = self.create_enemy_bomber_at(x, y)\n else:\n enemy = self.create_enemy_normal_at(x, y)\n\n # Create dummy objects to prevent enemies \n # from concentrating at one place\n self.create_dummy_obj_at(x - 1, y)\n self.create_dummy_obj_at(x + 1, y)\n self.create_dummy_obj_at(x, y - 1)\n self.create_dummy_obj_at(x, y + 1)\n\n self.enemies.append(enemy)\n count -= 1", "def create_enemies(self):\n if not self.ENEMY_DIST:\n raise ValueError('You must initialize ENEMY_DIST. Import turnable.rooms.FightRoom and '\n 'call FightRoom.set_enemy_dist(your_dist).')\n\n amount = random.randint(1, 3)\n for c in range(amount):\n en = self._get_enemy()(pos=self.pos)\n en.game = self.game\n self.enemies.append(en)", "def new_tile(self):\n\n # creating a random float variable that will roll a random value\n # if randomvalue > .90\n #\n\n tile_added = False\n while not tile_added:\n row = random.randint(0,self.grid_height - 1)\n col = random.randint(0,self.grid_width - 1)\n if self.board[row][col] == 0:\n tile_added = True\n random_tile = random.random()\n if random_tile < .90:\n self.board[row][col] = 2\n else:\n self.board[row][col] = 4", "def spawn_enemies(self, number: int = None) -> None:\n\n # Make the enemies into rows of 6\n for j in range(2):\n self.enemies.add([EnemyShip(self.sensitivity, self.screen_width // 4 + i * self.screen_width // 10,\n self.screen_height // 2 - EnemyShip.sprites[0].get_height() * j,\n self.wave_random(), self.screen_width, self.screen_height,\n self.get_random_direction(), self.mob_bullet, self.debug) for i in range(6)])", "def _spawn_enemy(self):\n alien = Alien(self)\n alien_width = alien.rect.width\n alien.x = self.screen_width\n self.aliens.add(alien)", "def TrySpawnEnemy(self):\n if ((pygame.time.get_ticks() / self.clock.get_time()) % \n ENEMY_SPAWN_FREQUENCY == 0):\n lineNumber = 0 #here may be some random if there is more than one line\n type = 0 #here may be random also\n newEnemy = self.MakeZombie(0, self.fieldTop + \n lineNumber * VTAB_SIZE * self.height)\n self.enemies.add(newEnemy)\n return True\n return False", "def new_tile(self):\r\n # check if is zero or not\r\n new_tile_added = False\r\n # a list to 2 90% of the time and 4 10% of the time\r\n new_tile_list = [2,2,2,2,2,2,2,2,2,4]\r\n counter = 0\r\n while not new_tile_added:\r\n row_position = random.randrange(0,self.grid_height)\r\n col_position = random.randrange(0,self.grid_width)\r\n if self.grid[row_position][col_position] == 0:\r\n self.grid[row_position][col_position] = random.choice(new_tile_list)\r\n new_tile_added = True\r\n if counter > self.grid_width * self.grid_height:\r\n print 'you failed'\r\n break\r\n\r\n counter +=1", "def enemy_update(self, events):\n for event in events:\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_k:\n self.enemies.append(Enemy(1080, randint(0,256)))\n for enemy in self.enemies:\n enemy.update()\n if enemy.pos_x < 0:\n self.enemies = self.enemies[1:]\n elif enemy.is_dead(self.bullets):\n self.enemies = self.enemies[1:]\n if len(self.enemies) == 0:\n self.enemies.append(Enemy(1280, randint(100,356)))", "def new_tile(self):\r\n # replace with your code\r\n empty_square_lists = []\r\n for row in range(self._grid_height):\r\n for col in range(self._grid_width):\r\n if(self.get_tile(row, col) == 0):\r\n empty_square_lists.append((row, col))\r\n \r\n if len(empty_square_lists) == 0:\r\n return \"game over!\"\r\n \r\n random_cell = random.choice(empty_square_lists)\r\n random_cell_row = random_cell[0]\r\n random_cell_col = random_cell[1]\r\n \r\n values = [2] * 90 + [4] * 10\r\n value = random.choice(values)\r\n \r\n self.set_tile(random_cell_row, random_cell_col, value)", "def __spawn_enemy(self):\n enemy = BasicEnemy(self.__pos[0], self.__pos[1],\n 30, 30, self.__enemy_targets, BASIC_ENEMY_IMAGE)\n self.__timer = Timer(self.__spawn_time, self.__spawn_enemy)\n EventListener.fire_events(WORLD_ADD_OBJECT, enemy)", "def create_enemies_list(self):\n import random\n random.seed()\n enemies = [\"Assets/images/Inimigo_1_verde.png\", \"Assets/images/Inimigo_1_verm.png\", \"Assets/images/Inimigo_2.png\", \"Assets/images/Inimigo_3.png\"]\n start_x, start_y = 10, 25\n x, y = start_x, start_y\n tamanho = min(self.game.count_inimigos, self.running.colunas)\n self.game.count_inimigos += 1\n for j in range(tamanho): \n e_type = random.choice(enemies)\n enemy = Enemy(self.game, x, y, e_type)\n self.running.inimigos.append(enemy)\n self.running.game_images.append(enemy.game_image)\n x += self.running.x_space\n return", "def monster(x, y, i):\n screen.blit(monsterImg[i], (x, y))", "def enemy_load(ai, var, screen, ship, enemies):\r\n\tfor wav in range(2):\r\n\t\tif wav > 0:\r\n\t\t\tenemy = Enemy(ai, var, screen, ship, 4, (ai.height/3))\r\n\t\t\tenemy.y = (ai.height * wav) / (wav + 1)\r\n\t\t\tenemy.rect.centery = enemy.y\r\n\t\t\tenemies.add(enemy)", "def new_tile(self):\r\n random_row = random.randrange(0, self._grid_height)\r\n random_col = random.randrange(0, self._grid_width)\r\n random_choice = random.choice([2]*90 + [4] * 10)\r\n \r\n if 0 in [num for elem in self._cells for num in elem]: \r\n if self._cells[random_row][random_col] == 0:\r\n self._cells[random_row][random_col] = random_choice \r\n else:\r\n self.new_tile()\r\n else:\r\n pass", "def place_entrance(self):\r\n x = random.randint(0, (self.__nx - 1))\r\n y = random.randint(0, (self.__ny - 1))\r\n self.__current_room = x, y # places adventurer in dungeon at start of game\r\n self.__entrance_room = x, y\r\n self.__maze[x][y].set_entrance(True)", "def add_entity(self, ent):\n self.tiles[ent.position[x]][ent.position[y]].add_entity(ent)", "def new_tile(self):\n while True:\n random_row = random.randrange(self._grid_height)\n random_column = random.randrange(self._grid_width)\n if self._grid[random_row][random_column] == 0:\n self._grid[random_row][random_column] = random.choice([2] * 9 + [4])\n break", "def new_tile(self):\r\n rand_x = random.randrange(self.width)\r\n rand_y = random.randrange(self.height)\r\n while self.get_tile(rand_y, rand_x) != 0:\r\n rand_x = random.randrange(self.width)\r\n rand_y = random.randrange(self.height)\r\n value = random.choice([2,2,2,2,2,2,2,2,2,4])\r\n del self.board[rand_y][rand_x]\r\n self.board[rand_y].insert(rand_x,value)\r\n return self.board", "def etoile():\n x, y = random.randint(0, MAXW), random.randint(0, MAXH)\n cercle(x, y, 2, 'red')", "def make_attack(self):\n \n rand = random.randrange(0, 3)\n if rand == 1:\n self.attack = True\n elif rand == 2:\n self.attack = False\n \n if self.attack:\n if self.rect.y < 750:\n if self.rect.y == 200:\n self.rect.x += 100\n self.rect.y += 8\n self.rect.x += 5\n else:\n self.rect.y = self.originy\n self.rect.x = self.originx", "def put_item_random(self, x, y):\n r = int(random() * 10)\n if 3 < r and r <= 6:\n self.put_fireitem(x, y)\n elif 6 < r and r <= 9:\n self.put_bombitem(x, y)", "def add_tile(matrix):\n a = random.randint(0, len(matrix)-1)\n b = random.randint(0, len(matrix)-1)\n while matrix[a][b] != 0:\n a = random.randint(0, len(matrix)-1)\n b = random.randint(0, len(matrix)-1)\n\n # setting chance of getting tile : value 2 (80% chance) or 4 (20% chance), to the given matrix\n population = [2, 4]\n weights = [0.8, 0.2]\n matrix[a][b] = random.choices(population, weights)[0]\n\n return matrix", "def new_tile(self):\n col = random.choice(range(self.grid_width))\n row = random.choice(range(self.grid_height))\n if self.grid[row][col] == 0:\n if random.random() >= 0.9:\n self.grid[row][col] = 4\n else:\n self.grid[row][col] = 2\n else:\n self.new_tile()", "def testrandom(self):\n for i in range(100):\n WeaponAbility()", "def new_tile(self):\n\n if len(self._available_new_tiles) == 0:\n # Refill the _available_new_tiles after 10 moves\n self._available_new_tiles = TOTAL_AVAILABLE_MOVES[:]\n\n while True:\n # Checks for 0 in a random row and column\n row = random.randrange(self._grid_height)\n col = random.randrange(self._grid_width)\n if self._grid[row][col] == 0:\n break\n\n new_tile = random.choice(self._available_new_tiles)\n # Remove the selected tile from _available_new_tiles\n self._available_new_tiles.remove(new_tile)\n self._grid[row][col] = new_tile", "def remove_enemies(level, amount):\n enemy_pos = get_positions(level, ENEMY1)\n enemy_pos += get_positions(level, ENEMY2)\n enemy_pos += get_positions(level, ENEMY3)\n\n random.shuffle(enemy_pos)\n for _ in range(min(len(enemy_pos), amount)):\n pos = enemy_pos.pop()\n level[pos] = EMPTY" ]
[ "0.7069509", "0.6875109", "0.6687566", "0.6642939", "0.6589674", "0.63269234", "0.63249", "0.6316438", "0.6237066", "0.62076664", "0.61932665", "0.61864245", "0.6169646", "0.6110832", "0.60748166", "0.5997652", "0.59964746", "0.59939605", "0.59848565", "0.59831464", "0.59775543", "0.59686166", "0.59577656", "0.59497696", "0.5940162", "0.5937882", "0.5925916", "0.59217787", "0.5883414", "0.5866169" ]
0.7756488
0
This function removes an enemy at random replacing them for floor tiles in place.
def remove_enemies(level, amount): enemy_pos = get_positions(level, ENEMY1) enemy_pos += get_positions(level, ENEMY2) enemy_pos += get_positions(level, ENEMY3) random.shuffle(enemy_pos) for _ in range(min(len(enemy_pos), amount)): pos = enemy_pos.pop() level[pos] = EMPTY
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def take_remove_tile_turn(self, remove_tile_fxn):\n tilesAroundOpponents = []\n for player in self.board.players:\n if not player == self.player:\n x, y = player.x, player.y\n nearbyTiles = self.board.get_removable_tiles_around(x, y)\n tilesAroundOpponents.extend(nearbyTiles)\n tilesAroundOpponents = set(tilesAroundOpponents)\n x, y = self.player.x, self.player.y\n tilesAroundMe = set(self.board.get_removable_tiles_around(x, y)) # tiles around controlled player (me)\n safelyAroundOpponents = list(tilesAroundOpponents - tilesAroundMe) # tiles around opponents but not around me\n removableTiles = set(self.board.get_all_open_removable_tiles()) # all removable tiles\n safelyRemovable = list(removableTiles - tilesAroundMe) # all removable tiles except those around me\n try:\n if safelyAroundOpponents:\n target = random.choice(safelyAroundOpponents)\n elif tilesAroundOpponents: # likely that I'm next to other player. I'll have to remove a tile available for both of us\n target = random.choice(list(tilesAroundOpponents))\n else: # no open spots to remove around players can only happen if solid unremovable tiles exist\n target = random.choice(safelyRemovable)\n except IndexError: # this error will catch if last else statement possibly triggered it\n super(TileRemoveBot, self).take_remove_tile_turn(remove_tile_fxn)\n return\n remove_tile_fxn(target.x, target.y)", "def retreat(self, enemy: Enemy):\n self.__expedition.remove(enemy)", "def add_enemies(level, amount):\n floor_tiles = get_positions(level, EMPTY)\n random.shuffle(floor_tiles)\n for _ in range(min(len(floor_tiles), amount)):\n pos = floor_tiles.pop()\n enemy = random.choice([ENEMY1, ENEMY2, ENEMY3])\n level[pos] = enemy", "def take_remove_tile_turn(self, remove_tile_fxn):\n removableTiles = list(self.board.get_all_open_removable_tiles()) # all removable tiles\n target = random.choice(removableTiles)\n remove_tile_fxn(target.x, target.y)", "def create_enemy():\n if randint(0, 20) == 5:\n try:\n check.check_life(common.COLS-1, common.MIDS_R, \"Enemy\")\n eitem = person.Enemy(common.COLS-1, common.MIDS_R)\n config.E_LIST.append(eitem)\n except (config.EnemyHere, config.GapHere):\n pass\n\n for i in config.E_LIST:\n try:\n i.move(i.x_pos-2, i.y_pos)\n except config.WallHere:\n pass\n except config.EnemyHere:\n config.E_LIST.remove(i)", "def player_removes_tile(self, x, y):\n activePlayer = self.get_active_player()\n if activePlayer.humanControlled:\n super(RobotGame, self).player_removes_tile(x, y)", "def removeIfDead(self):\n global HP, winColor, FPS, kills\n if self.health <= 0:\n if self.rank == \"firerate\":\n if P.boosts == 1:\n P.timer = 600\n else:\n P.boosts += 1\n\n if self.rank == \"healer\":\n if P.medkits == 1:\n HP = 100\n else:\n P.medkits += 1\n\n if self.rank == \"quadshot\":\n P.quadshot = True\n P.quadshottimer = 300\n FPS = 100\n\n if self.rank == \"helper\":\n if self.firsttime:\n self.image = pygame.transform.rotate(self.image, 180)\n self.firsttime = False\n self.y -= self.vel*3\n if self.y <= 0:\n del enemies[findPlace(self, enemies)]\n if yn(Frame, 3):\n projectiles.append(projectile(self.x+self.w+2, self.y+self.h//2, 8, yvel=0, r=True, l=False))\n projectiles.append(projectile(self.x-42, self.y+self.h//2, -8, yvel=0, r=False, l=True))\n else:\n del enemies[findPlace(self, enemies)]\n kills += 1", "def remove_entity(self, ent):\n self.tiles[ent.position[x]][ent.position[y]].remove_entity(ent)", "def remove_tiles(self, num):\r\n drawntiles = [self.db.tilestring.pop(random.randrange(len(self.db.tilestring))) for _ in xrange(num)]\r\n return drawntiles", "def __remove_collision(self,x_pos,y_pos):\r\n random_board = random.choice(self.board_list).board\r\n collision_measurement = random_board[y_pos][x_pos]\r\n \r\n self.board_list = [board for board in self.board_list if board.board[y_pos][x_pos] == collision_measurement]", "def spawn_enemies():\n\n enemy_num = random.randint(1,5)\n spawn_box = spawn_boxes[random.randint(0, 3)]\n\n if spawn_box.y <= 0: start = [0, 128]\n elif spawn_box.y >= 640: start = [0, -128]\n elif spawn_box.x <= 0: start = [128, 0]\n elif spawn_box.x >= 640: start = [-128, 0]\n\n x = spawn_box.x\n y = spawn_box.y\n new_enemies = []\n for i in range(enemy_num):\n new_enemies.append(enemies.Wolf(x + 32, y + 32, grid, (x + 32 + start[0], y + 32 + start[1])))\n x += 64\n if not spawn_box.collidepoint(x, y):\n x = spawn_box.x\n y += 64\n\n all_enemies.add(new_enemies)\n all_sprites.add(new_enemies)", "def updatePositionAndClean(self):\n\t\tnewposition = self.position.getNewPosition(self.direction, self.speed)\n\t\twhile not self.room.isPositionInRoom(newposition):\n\t\t\tself.direction = random.randrange(0, 360)\n\t\t\tnewposition = self.position.getNewPosition(self.direction, self.speed)\n\t\tself.position = newposition\n\t\tself.direction = random.randrange(0, 360)\n\t\tself.room.cleanTileAtPosition(self.position)", "def remove_walls(level, amount):\n height, width = level.shape\n inner_level = level[1:height-1, 1:width-1]\n wall_pos = [(x+1, y+1) for (x,y) in get_positions(inner_level, WALL)]\n random.shuffle(wall_pos)\n for _ in range(min(len(wall_pos), amount)):\n pos = wall_pos.pop()\n level[pos] = EMPTY", "def flee_decrease_hp():\n\n global character\n character['HP'] -= randint(1, 4)", "def new_tile(self):\n\n if len(self._available_new_tiles) == 0:\n # Refill the _available_new_tiles after 10 moves\n self._available_new_tiles = TOTAL_AVAILABLE_MOVES[:]\n\n while True:\n # Checks for 0 in a random row and column\n row = random.randrange(self._grid_height)\n col = random.randrange(self._grid_width)\n if self._grid[row][col] == 0:\n break\n\n new_tile = random.choice(self._available_new_tiles)\n # Remove the selected tile from _available_new_tiles\n self._available_new_tiles.remove(new_tile)\n self._grid[row][col] = new_tile", "def updatePositionAndClean(self):\n\t\tnewposition = self.position.getNewPosition(self.direction, self.speed)\n\t\twhile not self.room.isPositionInRoom(newposition):\n\t\t\tself.direction = random.randrange(0, 360)\n\t\t\tnewposition = self.position.getNewPosition(self.direction, self.speed)\n\t\tself.position = newposition\n\t\tself.room.cleanTileAtPosition(self.position)", "def moveEnnemy(self):\n for i,ennemy in enumerate(self.ennemies):\n\n r = random.uniform(0,1)\n if r > 0.2 : \n if (self.ennemyText != None) and (self.PAS !=None) and (self.can!=None):\n ennemy.strategy( self, self.ennemyText[i], self.PAS, self.can)\n else:\n ennemy.strategy(self)\n if self.agent.getPosition() == ennemy.getPosition(): # if the movement of the ennemy is in the agent's position it is the end of the simulation\n self.killed = True\n\n if not(self.killed):\n if self.display:# if it's not the end we make move again the ennemies \n self.grille.after(1000, self.moveEnnemy) # Resubscribe to make move again the ennemy each second", "def cleanTileAtPosition(self, pos):\n self.tiles[pos] = 'clean'", "def troop_remove(self, pos):\n x, y = pos\n # tile_id = AIV_SIZE * y + x\n \n troop = self.tmap[y, x]\n if (troop == 0):\n return\n \n # update tmap\n self.tmap[y, x] = 0\n\n # first remove thing from tarr, then find something new in tmap\n\n\n # for slot in range(0, len(self.tarr)):\n # if (self.tarr[slot] == tile_id):\n # self.tmap[y, x] = slot//10\n \n # # update tarr\n # for slot in range(10*troop, 11*troop):\n # if (self.tarr[slot] == tile_id):\n # for slot_slot in range(slot, 11*troop-1):\n # self.tarr[slot_slot] = self.tarr[slot_slot+1]", "def _populate_level_with_enemies(self,\n map_layer_configuration,\n base_enemy_chance_cave: float = 0.006,\n base_enemy_chance_dungeon: float = 0.006,\n base_boss_chance: float = 0.003) -> None:\n enemy_chance_cave = self.generate_enemy_chance(base_enemy_chance_cave)\n enemy_chance_dungeon = self.generate_enemy_chance(base_enemy_chance_dungeon)\n boss_chance = self.generate_enemy_chance(base_boss_chance)\n for row in map_layer_configuration:\n for block in row:\n if block[0] == ' ':\n if np.random.rand() > (1 - enemy_chance_cave):\n if self.sprites.drill.center_x != block[1] or self.sprites.drill.center_y != block[2]:\n enemy_to_add = random.choice(potential_enemies)\n enemy_to_append = enemy_to_add(block[1], block[2], vision=200)\n self.sprites.entity_list.append(enemy_to_append)\n self.sprites.enemy_list.append(enemy_to_append)\n elif block[0] == 'F':\n if np.random.rand() > (1 - enemy_chance_dungeon):\n if self.sprites.drill.center_x != block[1] or self.sprites.drill.center_y != block[2]:\n enemy_to_add = random.choice(potential_enemies)\n enemy_to_append = enemy_to_add(block[1], block[2], vision=200)\n self.sprites.entity_list.append(enemy_to_append)\n self.sprites.enemy_list.append(enemy_to_append)\n elif np.random.rand() > (1 - boss_chance):\n if self.sprites.drill.center_x != block[1] or self.sprites.drill.center_y != block[2]:\n enemy_to_add = random.choice(potential_bosses)\n enemy_to_append = enemy_to_add(block[1], block[2], vision=200, speed=0.7)\n self.sprites.entity_list.append(enemy_to_append)\n self.sprites.enemy_list.append(enemy_to_append)\n self.sprites.drill_list.append(self.sprites.drill)\n\n for entity in self.sprites.entity_list:\n entity.setup_collision_engine([self.sprites.indestructible_blocks_list])", "def recall(self):\n for t in self.placed_tiles:\n row = self.placed_tiles[t][1][0]\n col = self.placed_tiles[t][1][1]\n # remove tiles from board\n self.board.board[row][col].letter = None\n # put tiles back on rack\n self.rack[t] = self.placed_tiles[t][0]", "def enemy_update(self, events):\n for event in events:\n if event.type == pygame.KEYDOWN:\n if event.key == pygame.K_k:\n self.enemies.append(Enemy(1080, randint(0,256)))\n for enemy in self.enemies:\n enemy.update()\n if enemy.pos_x < 0:\n self.enemies = self.enemies[1:]\n elif enemy.is_dead(self.bullets):\n self.enemies = self.enemies[1:]\n if len(self.enemies) == 0:\n self.enemies.append(Enemy(1280, randint(100,356)))", "def cleanTileAtPosition(self, pos):\n\t\tfor i in self.tiles:\n\t\t\tif i.positionInTile(pos):\n\t\t\t\ti.cleanTile()", "def cleanTileAtPosition(self, pos):\n posX = pos.getX()\n posY = pos.getY()\n if (int(posX), int(posY)) not in self.clean_tiles:\n self.clean_tiles.append((int(posX), int(posY)))", "def kill(self, enemys):\n for enemy in enemys:\n for bullet in self.bullets:\n if enemy.rect.colliderect(bullet.rect):\n enemy.take_damage(bullet.power)\n if(enemy.hp <= 0):\n enemys.remove(enemy)\n self.bullets.remove(bullet)", "def generatePiece(self):\n\n empty_tiles = []\n for y in range(BOARD_SIZE):\n for x in range(BOARD_SIZE):\n if self.grid[x][y].isEmpty():\n empty_tiles.append(self.grid[x][y])\n\n two_or_four = random.choice([2, 4])\n random.choice(empty_tiles).set(two_or_four)", "def cull(self) -> None:\n for player in self.players:\n to_remove = [creature for creature in player.battle_line if creature.damage_taken >= creature.power()]\n for creature in to_remove:\n player.battle_line.remove(creature)\n to_remove.destroyed(self, creature)", "def new_tile(self):\r\n rand_x = random.randrange(self.width)\r\n rand_y = random.randrange(self.height)\r\n while self.get_tile(rand_y, rand_x) != 0:\r\n rand_x = random.randrange(self.width)\r\n rand_y = random.randrange(self.height)\r\n value = random.choice([2,2,2,2,2,2,2,2,2,4])\r\n del self.board[rand_y][rand_x]\r\n self.board[rand_y].insert(rand_x,value)\r\n return self.board", "def death_scene_reset(groups, player, player_2):\r\n for group in groups:\r\n for sprite in group:\r\n sprite.rect.y -= 1000\r\n\r\n x = random.randrange(200, 800)\r\n player.update_pos(x, 600)\r\n player.speed_boosted = False\r\n\r\n if player_2 is not None:\r\n player_2.update_pos(x, 600)\r\n player_2.speed_boosted = False", "def move1(self):\n\n options = self.location.exits.keys()\n self.location.objects.remove(a)\n print('fred is moving..')\n self.location = self.location.exits[random.choice(list(options))]\n self.location.objects.append(a)" ]
[ "0.6756282", "0.6578938", "0.6368028", "0.6353323", "0.6288392", "0.6196521", "0.61859804", "0.6107032", "0.6084591", "0.60444194", "0.60030633", "0.597222", "0.5966598", "0.5921707", "0.589264", "0.58556813", "0.58181703", "0.5778161", "0.57766193", "0.57622844", "0.5743479", "0.57250786", "0.5714605", "0.5713231", "0.57035625", "0.5696768", "0.5696126", "0.5690717", "0.56745857", "0.56727296" ]
0.71985275
0
This function takes a level and adds walls in a row or in a column if mazelike is True, or at random if mazelike is False.
def add_walls(level, amount): placeable_pos = list(get_placeable_positions_inc_path(level)) random.shuffle(placeable_pos) for _ in range(min(len(placeable_pos), amount)): pos = placeable_pos.pop() level[pos] = WALL assert is_solvable(level), "Level isn't solvable. There's a bug in add_walls"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate(self, level):\n # TODO The dungeon's instances are spawned and loaded here.\n # fill map with \"blocked\" tiles\n level.maze = [[Tile(x, y, True) for y in range(level.height)] for x in range(level.width)]\n\n for r in range(level.max_rooms):\n # random width and height\n w = random.randint(level.min_room_size, level.max_room_size)\n h = random.randint(level.min_room_size, level.max_room_size)\n\n # random position without going out of the boundaries of the map\n x = random.randint(0, level.width - w - 1)\n y = random.randint(0, level.height - h - 1)\n\n # \"DungeonRoom\" class makes rectangles easier to work with\n new_room = Room(x, y, w, h)\n level.rooms.append(new_room)\n\n # run through the other rooms and see if they intersect with this one\n failed = False\n for other_room in level.rooms:\n if other_room is not new_room and new_room.intersect(other_room):\n failed = True\n break\n\n if not failed:\n # this means there are no intersections, so this room is valid\n\n # \"paint\" it to the map's tiles\n self._create_room(level, new_room)\n\n # center coordinates of new room, will be useful later\n new_x, new_y = new_room.center()\n\n if level.num_rooms > 0:\n # connect it to the previous room with a tunnel\n # center coordinates of previous room\n (prev_x, prev_y) = level.rooms[level.num_rooms - 1].center()\n\n # draw a coin (random number that is either 0 or 1)\n if random.randint(0, 1) == 1:\n # first move horizontally, then vertically\n self._create_h_tunnel(level, prev_x, new_x, prev_y)\n self._create_v_tunnel(level, prev_y, new_y, new_x)\n else:\n # first move vertically, then horizontally\n self._create_v_tunnel(level, prev_y, new_y, prev_x)\n self._create_h_tunnel(level, prev_x, new_x, new_y)\n\n # finally, append the new room to the list\n level.rooms.append(new_room)\n level.num_rooms += 1\n\n # connect them with a tunnel\n self._create_h_tunnel(level, 25, 55, 23)", "def remove_walls(level, amount):\n height, width = level.shape\n inner_level = level[1:height-1, 1:width-1]\n wall_pos = [(x+1, y+1) for (x,y) in get_positions(inner_level, WALL)]\n random.shuffle(wall_pos)\n for _ in range(min(len(wall_pos), amount)):\n pos = wall_pos.pop()\n level[pos] = EMPTY", "def generate_level(level):\n seed = level * 69420 # multiply by 69420 to not have the seeds too close to each other\n random.seed(seed)\n dimensions = get_map_size(level)\n level_map = np.full(dimensions, -1)\n while -1 in level_map:\n choice = random.choice(np.argwhere(level_map == -1))\n next_index = (choice[0], choice[1])\n # get indices of the tiles next to the current index\n left_index, up_index, right_index, down_index = get_direction_indices(next_index)\n left = tile_needs_connection(left_index, level_map, has_connection_right)\n up = tile_needs_connection(up_index, level_map, has_connection_down)\n right = tile_needs_connection(right_index, level_map, has_connection_left)\n down = tile_needs_connection(down_index, level_map, has_connection_up)\n level_map[next_index] = get_tile(left, up, right, down)\n return un_solve(level_map)", "def random_walls(self, prop_of_walls):\n\n # Map random indices for wall placement\n self.wall_indices = np.random.choice(np.arange(self.maze.size),\n replace=False,\n size=int(self.maze.size * prop_of_walls))\n\n # Add walls to forbidden zones\n self.forbid_indices.append(list(self.wall_indices))\n\n # Create flattened array to map\n temp_arr = self.maze.flatten()\n\n # Remake original structure\n temp_arr[self.wall_indices] = self.wall_value\n\n # Remake original structure\n self.maze = temp_arr.reshape(self.dims)\n\n return self.maze", "def _generate_maze(self):\n grid = [[GridCell(x, y, self._treasure_prob) for x in range(self._map_size)] for y in range(self._map_size)]\n\n center_x = self._map_size // 2\n center_y = self._map_size // 2\n\n for _ in range(self._sparsity):\n current = grid[center_x][center_y]\n stack = list()\n start = True\n while len(stack) or start:\n start = False\n current.visited = True\n children = current.has_children(grid)\n\n if children:\n choice = np.random.choice(children)\n choice.visited = True\n\n stack.append(current)\n\n self._remove_walls(current, choice)\n\n current = choice\n\n elif stack:\n current = stack.pop()\n for row in grid:\n for cell in row:\n cell.visited = False\n\n # edit center area\n grid[center_x][center_y].set_treasury()\n for x in range(center_x - 1, center_x + 2):\n for y in range(center_y - 1, center_y + 2):\n grid[x][y].erase_walls()\n return grid", "def generate_random_maze_matrix(size, ambient_size=None):\n maze = np.ones((size, size))\n\n # Start from a random point and recursively open points\n closed_neighbors = [] # Closed points that are neighbors of open points\n \n def _open_point(point):\n # Open a point and add its neighbors to closed_neighbors\n for p in _get_neighbors(size, point):\n if maze[p[0], p[1]] and p not in closed_neighbors:\n closed_neighbors.append(p)\n maze[point[0], point[1]] = 0\n\n def _find_and_open_new_point():\n # Find a closed neighbor that can be opened without creating an open\n # block, open it, and return True. If no such point exists, return\n # False.\n np.random.shuffle(closed_neighbors)\n for new_point in closed_neighbors:\n if not maze[new_point[0], new_point[1]]:\n continue\n will_make_open_block = any([\n np.sum(maze[i: i + 2, j: j + 2]) <= 1\n for i, j in _get_containing_blocks(size, new_point)\n ])\n if not will_make_open_block:\n _open_point(new_point)\n return True\n return False\n\n # Seed the maze and iteratively open points\n _open_point(tuple(np.random.randint(0, size, size=(2,))))\n points_to_add = True\n while points_to_add:\n points_to_add = _find_and_open_new_point()\n\n # Remove dead ends\n _remove_dead_ends(maze)\n \n # If maze has no open points, recurse to generate a new one\n if np.sum(1 - maze) == 0:\n return generate_random_maze_matrix(size, ambient_size=ambient_size)\n\n # Add wall border if necessary\n if ambient_size is not None and ambient_size > size:\n maze_with_border = np.ones((ambient_size, ambient_size))\n start_index = (ambient_size - size) // 2\n maze_with_border[start_index: start_index + size,\n start_index: start_index + size] = maze\n maze = maze_with_border\n\n return maze", "def add_walls(self):\n for x in range(self.width + 1):\n if not self.some_things_at((x, 0), Wall):\n self.add_thing(Wall(), (x, 0))\n if not self.some_things_at((x, self.height), Wall):\n self.add_thing(Wall(), (x, self.height))\n\n for y in range(self.height + 1):\n if not self.some_things_at((0, y), Wall):\n self.add_thing(Wall(), (0, y))\n if not self.some_things_at((self.width, y), Wall):\n self.add_thing(Wall(), (self.width, y))\n #self.add_thing(Wumpus(),(1,3))\n #self.add_thing(Pit(),(3,3))\n #self.add_thing(Pit(),(3,1))\n #self.add_thing(Gold(),(2,3))\n #self.add_thing(Pit(),(4,4))", "def klyubin_world(self):\n maze = self.create_maze_world(10,10)\n # wall A\n for i in range(6):\n maze.add_wall( (1, i), \"N\" )\n # wall B & D\n for i in range(2):\n maze.add_wall( (i+2, 5), \"E\")\n maze.add_wall( (i+2, 6), \"E\")\n # wall C\n maze.add_wall( (3, 6), \"N\")\n # wall E\n for i in range(2):\n maze.add_wall( (1, i+7), \"N\")\n # wall F\n for i in range(3):\n maze.add_wall( (5, i+2), \"N\")\n # wall G\n for i in range(2):\n maze.add_wall( (i+6, 5), \"W\")\n # walls HIJK\n maze.add_wall( (6, 4), \"N\")\n maze.add_wall( (7, 4), \"N\")\n maze.add_wall( (8, 4), \"W\")\n maze.add_wall( (8, 3), \"N\")\n return maze", "def create(self):\n\t\t# Pick a random starting position not on the parameter\n\t\tx = random.randint(1, self.width - 2)\n\t\ty = random.randint(1, self.height - 2)\n\n\t\t# Set node as floor and adjacent nodes as walls\n\t\tself.setFloor(x, y)\n\t\tself.setWall(x - 1, y)\n\t\tself.setWall(x + 1, y)\n\t\tself.setWall(x, y - 1)\n\t\tself.setWall(x, y + 1)\n\n\t\t# Create list of wall positions\n\t\tself._walls = []\n\t\tself._walls.append((x - 1, y))\n\t\tself._walls.append((x + 1, y))\n\t\tself._walls.append((x, y - 1))\n\t\tself._walls.append((x, y + 1))\n\t\t\n\t\twhile self._walls:\n\t\t\t# Pick random wall position\n\t\t\tx, y = random.choice(self._walls)\n\n\t\t\t# Check if this node divides an empty node and a floor node\n\t\t\tif (x > 0 and x < self.width - 1) and (y > 0 and y < self.height - 1):\n\t\t\t\tif ((self._isEmpty(x - 1, y) and self.isFloor(x + 1, y))\n\t\t\t\tor (self._isEmpty(x + 1, y) and self.isFloor(x - 1, y))\n\t\t\t\tor (self._isEmpty(x, y - 1) and self.isFloor(x, y + 1))\n\t\t\t\tor (self._isEmpty(x, y + 1) and self.isFloor(x, y - 1))):\n\t\t\t\t\t# Check there are less than 2 adjacent floor nodes\n\t\t\t\t\tif self.countAdjacentFloorNodes(x, y) < 2:\n\t\t\t\t\t\t# Set current node as a floor\n\t\t\t\t\t\tself.setFloor(x, y)\n\n\t\t\t\t\t\t# Set adjacent empty tiles to walls and add to list of wall positions\n\t\t\t\t\t\tif x > 0:\n\t\t\t\t\t\t\tself._makeWall(x - 1, y)\n\t\t\t\t\t\tif x < self.width - 1:\n\t\t\t\t\t\t\tself._makeWall(x + 1, y)\n\t\t\t\t\t\tif y > 0:\n\t\t\t\t\t\t\tself._makeWall(x, y - 1)\n\t\t\t\t\t\tif y < self.height - 1:\n\t\t\t\t\t\t\tself._makeWall(x, y + 1)\n\n\t\t\t# Remove the current position from the list of wall positions\n\t\t\tfor wall in self._walls:\n\t\t\t\tif (wall[0] == x and wall[1] == y):\n\t\t\t\t\tself._walls.remove(wall)\n\t\t\n\t\t# Fill in any empty nodes as walls\n\t\tfor y in range(self.height):\n\t\t\tfor x in range(self.width):\n\t\t\t\tif self._isEmpty(x, y):\n\t\t\t\t\tself.setWall(x, y)", "def gen_maze(dim, p):\n maze = []\n for i in range(dim):\n maze.append([])\n for j in range(dim):\n if(random.uniform(0, 1) < p):\n maze[i].append(1)\n else:\n maze[i].append(0)\n\n maze[0][0] = 0\n maze[dim - 1][dim - 1] = 0\n return maze", "def setup_level_2() -> object:\n #create level object\n level = Level()\n\n #create vertical walls for level\n create_and_add_vertical_walls_to_list(4, 19, 4, level.wall_list)\n create_and_add_vertical_walls_to_list(12, 54, 19, level.wall_list)\n create_and_add_vertical_walls_to_list(0, 5, 23, level.wall_list)\n create_and_add_vertical_walls_to_list(0, 4, 30, level.wall_list)\n create_and_add_vertical_walls_to_list(55, settings.HEIGHT, 23, level.wall_list)\n create_and_add_vertical_walls_to_list(55, settings.HEIGHT, 30, level.wall_list)\n create_and_add_vertical_walls_to_list(4, 15, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(24, 54, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(29, 45, 47, level.wall_list)\n create_and_add_vertical_walls_to_list(24, 29, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(44, 54, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(14, 55, 73, level.wall_list)\n\n #create horizontal walls for level\n create_and_add_horiontal_walls_to_list(4, 24, 4, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 34, 4, level.wall_list)\n create_and_add_horiontal_walls_to_list(20, 24, 14, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 74, 14, level.wall_list)\n create_and_add_horiontal_walls_to_list(4, 19, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(34, 54, 24, level.wall_list)\n create_and_add_horiontal_walls_to_list(48, 60, 29, level.wall_list)\n create_and_add_horiontal_walls_to_list(68, 74, 29, level.wall_list)\n create_and_add_horiontal_walls_to_list(48, 60, 44, level.wall_list)\n create_and_add_horiontal_walls_to_list(68, 74, 44, level.wall_list)\n create_and_add_horiontal_walls_to_list(54, 73, 54, level.wall_list)\n create_and_add_horiontal_walls_to_list(19, 24, 54, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 35, 54, level.wall_list) \n\n #create sword item for \"outfit change\" \n create_and_add_item_to_list(\"pics\\sword_item.png\", 0.05, 75, 100, level.item_list)\n\n #create mysterious figure for level\n create_and_add_character_to_list(\"pics\\mystery_figure.png\", 0.095, 270, 350, level.character_list)\n\n #create dialogue for mysterious figure character\n find_disguise_convo = Dialogue(300, 390, 300, 50, \"Someone will notice you!\\n I've hidden something in the servant's quarters,\\n to make you fit in with the nobility.\")\n level.dialogue_list.append(find_disguise_convo)\n\n #info prompts and text for level\n balcony = RoomInfo(640, 500, \"Balcony. Along with the forest and sea, you can see that a battle is coming.\")\n level.room_info_list.append(balcony)\n kitchen = RoomInfo(270, 90, \"Kitchen. There are plentry of servants around. Your torn clothes are eye-catching, and may sabotage your escape\")\n level.room_info_list.append(kitchen)\n great_hall = RoomInfo(270, 470, \"Great hall. You could have sworn that someone recognized you, but nobody acts to capture you.\")\n level.room_info_list.append(great_hall)\n sitting_room = RoomInfo(650, 230, \"Private sitting room. You find several sketches... sketches that look like a richer, healthier version of you.\")\n level.room_info_list.append(sitting_room)\n\n return level", "def create_wall():\n if config.W_LIST == []:\n pos = randint(config.M.x_pos+4, common.R2)\n if common.value_arr(pos, common.MIDS_R) == \" \" and \\\n common.value_arr(pos, common.MIDS_R+1) == \"0\":\n try:\n witem = obstacle.Wall(pos)\n config.W_LIST.append(witem)\n except config.GapHere:\n pass\n\n elif len(config.W_LIST) < int((3*common.COLS)/80):\n if randint(0, 10) == 5:\n # create a obstacle\n pos = config.W_LIST[-1].x_pos + randint(10, 20)\n if pos < common.COLS - 3:\n try:\n witem = obstacle.Wall(pos)\n config.W_LIST.append(witem)\n except config.GapHere:\n pass\n\n else:\n pass", "def add_tile(matrix):\n a = random.randint(0, len(matrix)-1)\n b = random.randint(0, len(matrix)-1)\n while matrix[a][b] != 0:\n a = random.randint(0, len(matrix)-1)\n b = random.randint(0, len(matrix)-1)\n\n # setting chance of getting tile : value 2 (80% chance) or 4 (20% chance), to the given matrix\n population = [2, 4]\n weights = [0.8, 0.2]\n matrix[a][b] = random.choices(population, weights)[0]\n\n return matrix", "def generate_random_maze(w=20, h=20):\n\n if not (5 <= w <= 40):\n raise ValueError(\"width: {0} was not in the appropriate range of\"\n \" 5 - 40\".format(w))\n if not(5 <= h <= 40):\n raise ValueError(\"height: {0} was not in the appropriate range of\"\n \" 5 - 40\".format(h))\n vis = [[0] * w + [1] for _ in range(h)] + [[1] * (w + 1)]\n ver = [[\"10\"] * w + ['1'] for _ in range(h)] + [[]]\n hor = [[\"11\"] * w + ['1'] for _ in range(h + 1)]\n\n def walk(x, y):\n vis[y][x] = 1\n\n d = [(x - 1, y), (x, y + 1), (x + 1, y), (x, y - 1)]\n shuffle(d)\n for (xx, yy) in d:\n if vis[yy][xx]: continue\n if xx == x: hor[max(y, yy)][x] = \"10\"\n if yy == y: ver[y][max(x, xx)] = \"00\"\n walk(xx, yy)\n\n walk(randrange(w), randrange(h))\n maze = []\n for (a, b) in zip(hor, ver):\n if a:\n row1 = ''.join(a)\n row2 = ''.join(b)\n r1_ls = []\n r2_ls = []\n for thing in row1:\n thing = int(thing)\n r1_ls.append(thing)\n for thing in row2:\n thing = int(thing)\n r2_ls.append(thing)\n if r1_ls:\n maze.append(r1_ls)\n if r2_ls:\n maze.append(r2_ls)\n\n return maze", "def setup_level_1() -> object:\n #create level object\n level = Level()\n\n #create vertical walls for level\n create_and_add_vertical_walls_to_list(4, 39, 4, level.wall_list)\n create_and_add_vertical_walls_to_list(4, 25, 19, level.wall_list)\n create_and_add_vertical_walls_to_list(33, 54, 19, level.wall_list)\n create_and_add_vertical_walls_to_list(4, 25, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(33, 54, 34, level.wall_list)\n create_and_add_vertical_walls_to_list(14, 25, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(33, 44, 54, level.wall_list)\n create_and_add_vertical_walls_to_list(14, 45, 74, level.wall_list)\n create_and_add_vertical_walls_to_list(54, settings.HEIGHT, 23, level.wall_list)\n create_and_add_vertical_walls_to_list(54, settings.HEIGHT, 30, level.wall_list)\n\n #create horizontal walls for level\n create_and_add_horiontal_walls_to_list(4, 34, 4, level.wall_list)\n create_and_add_horiontal_walls_to_list(4, 9, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(15, 24, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 54, 19, level.wall_list)\n create_and_add_horiontal_walls_to_list(54, 74, 14, level.wall_list)\n create_and_add_horiontal_walls_to_list(4, 24, 39, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 54, 39, level.wall_list)\n create_and_add_horiontal_walls_to_list(54, 74, 44, level.wall_list)\n create_and_add_horiontal_walls_to_list(19, 24, 54, level.wall_list)\n create_and_add_horiontal_walls_to_list(30, 35, 54, level.wall_list)\n\n #create knight character for level\n create_and_add_character_to_list(\"pics\\prison_guard.png\", 0.2, 270, 470, level.character_list)\n\n #knight asks for bribe\n guard_convo = Dialogue(300, 500, 150, 50, \"I know who you are...\\n if you pay me,\\n I'll turn a blind eye.\")\n level.dialogue_list.append(guard_convo)\n\n #create coin item to bribe knight character\n create_and_add_item_to_list(\"pics\\gold_1.png\", 0.5, 400, 250, level.item_list)\n\n #create prompts and info for rooms for object\n cell = RoomInfo(120, 100, \"Dungeon cell. There's a note and key. Someone's waiting for you in the garden.\")\n level.room_info_list.append(cell)\n guard_room = RoomInfo(450, 280, \"Guardroom. There's the unconconsious bodies of the guards. Your saviours must've gone to great lengths...\")\n level.room_info_list.append(guard_room)\n torture_chamber = RoomInfo(120, 280, \"Torture chamber. You've been here before. They were questioning you, but you didn't answer.\")\n level.room_info_list.append(torture_chamber)\n battle_room = RoomInfo(650, 280, \"Battle room. You see that your captors are fighting revolutionaries- those who seek to bring back a lost king.\")\n level.room_info_list.append(battle_room)\n stairwell = RoomInfo(220, 520, \"Stairwell. There's a lone guard who doesn't look surprised to see you\")\n level.room_info_list.append(stairwell)\n\n return level", "def wallsAndGates(self, rooms: List[List[int]]) -> None:\n if rooms==[]: return\n xcord=len(rooms)\n ycord=len(rooms[0])\n indexstack=[(i,j) for i in range(len(rooms)) for j in range(len(rooms[0])) if rooms[i][j] == 0]\n direction=[(0,1),(1,0),(0,-1),(-1,0)]\n gatenum=1\n while indexstack != []:\n newindex=[]\n for item in indexstack:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if 0<=xpoint <len(rooms) and 0<=ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=gatenum\n newindex.append((xpoint,ypoint))\n indexstack=newindex\n gatenum+=1\n ''''\n for item in index_0:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <len(rooms) and ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=1\n index_1.append((xpoint,ypoint))\n for item in index_1:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <len(rooms) and ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=2\n index_2.append((xpoint,ypoint))\n for item in index_2:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <len(rooms) and ypoint<len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=3\n index_3.append((xpoint,ypoint))\n for item in index_3:\n for mapdir in direction:\n xpoint=item[0]+mapdir[0]\n ypoint=item[1]+mapdir[1]\n if xpoint <=len(rooms) and ypoint<=len(rooms[0]):\n if rooms[xpoint][ypoint]==pow(2,31)-1:\n rooms[xpoint][ypoint]=4\n #index_3.append((xpoint,ypoint))'''", "def add_walls(self):\n for x in range(self.width):\n self.add_thing(Wall(), (x, 0))\n self.add_thing(Wall(), (x, self.height - 1))\n\n for y in range(self.height):\n self.add_thing(Wall(), (0, y))\n self.add_thing(Wall(), (self.width - 1, y))", "def generate_mine_map(width=30, height=16, num_mines=99):\n\n if num_mines > width * height:\n print(\"The number of mines exceeds the size of the board.\")\n return\n \n mine_map = [[False for i in range(width)] for j in range(height)]\n mines = 0\n while mines < num_mines:\n x = random.randint(0, width-1)\n y = random.randint(0, height-1)\n if not mine_map[y][x]:\n mine_map[y][x] = True\n mines += 1\n\n return mine_map", "def new_tile(self):\n\n # creating a random float variable that will roll a random value\n # if randomvalue > .90\n #\n\n tile_added = False\n while not tile_added:\n row = random.randint(0,self.grid_height - 1)\n col = random.randint(0,self.grid_width - 1)\n if self.board[row][col] == 0:\n tile_added = True\n random_tile = random.random()\n if random_tile < .90:\n self.board[row][col] = 2\n else:\n self.board[row][col] = 4", "def path(self):\n\n path_direction = random.randrange(1, 6)\n # print(path_direction)\n\n # if current room is on the edge of the level (column 0 or column 4 and we did not drop to that room\n # because of hitting an edge in the previous assignment, assign the current room to be of type 2 and the\n # new room above it to be of type 3 so that the rooms connect\n if self.current_room_y in (0, 4) and self.edge_row_jump is False:\n self.room_type[self.current_room_x][self.current_room_y] = 3\n self.current_room_x += 1\n # if we are at the bottom of level and attempt to go down again, we will have found our start room. In this\n # we save the parameter and exit the loop\n if self.current_room_x > 4:\n self.room_type[self.current_room_x - 1][self.current_room_y] = 4\n self.start_room['row'] = self.current_room_x - 1\n self.start_room['column'] = self.current_room_y\n return True\n self.room_type[self.current_room_x][self.current_room_y] = 2\n # this is set to true so that we don't continue jumping up the side of the level\n self.edge_row_jump = True\n self.number_of_rooms += 1\n\n # if random number is 1 or 2 we move the path left and give that new room left/right exits\n elif path_direction in (1, 2):\n\n # if we are on the left edge of level then we shouldn't move left any further\n # if cell we are moving to has already been assigned then we should not move there either\n if self.current_room_y > 0 and self.room_type[self.current_room_x][self.current_room_y - 1] is 0:\n # we now have a new direction without jumping rows because of hitting an edge\n self.edge_row_jump = False\n # move current room to the left\n self.current_room_y -= 1\n # assign that room with a left/right exit\n self.room_type[self.current_room_x][self.current_room_y] = 1\n self.number_of_rooms += 1\n\n # if random number is 3 or 4 we move right and give that new room left/right exits\n elif path_direction in (3, 4):\n # check if the room we are moving to has already been assigned or is off the screen\n if self.current_room_y < 4 and self.room_type[self.current_room_x][self.current_room_y + 1] == 0:\n # we now have a new direction without jumping rows because of hitting an edge\n self.edge_row_jump = False\n # move current room to the right\n self.current_room_y += 1\n # assign that room with a left/right exit\n self.room_type[self.current_room_x][self.current_room_y] = 1\n self.number_of_rooms += 1\n\n # if random number is 5 then we are moving down\n elif self.number_of_rooms != 0 and path_direction is 5:\n self.edge_row_jump = False\n self.room_type[self.current_room_x][self.current_room_y] = 3\n # print cell to screen\n self.current_room_x += 1\n # if we are at bottom of level and attempt to go down again, we will have found our start room. In this\n # we save the parameter and exit the loop\n if self.current_room_x > 4:\n self.room_type[self.current_room_x - 1][self.current_room_y] = 4\n self.start_room['row'] = self.current_room_x - 1\n self.start_room['column'] = self.current_room_y\n return True\n self.room_type[self.current_room_x][self.current_room_y] = 2\n self.number_of_rooms += 1\n\n # print array to see if movements are correct\n # for row in self.room_type:\n # print(row)\n return False", "def new_tile(self):\r\n # replace with your code\r\n empty_square_lists = []\r\n for row in range(self._grid_height):\r\n for col in range(self._grid_width):\r\n if(self.get_tile(row, col) == 0):\r\n empty_square_lists.append((row, col))\r\n \r\n if len(empty_square_lists) == 0:\r\n return \"game over!\"\r\n \r\n random_cell = random.choice(empty_square_lists)\r\n random_cell_row = random_cell[0]\r\n random_cell_col = random_cell[1]\r\n \r\n values = [2] * 90 + [4] * 10\r\n value = random.choice(values)\r\n \r\n self.set_tile(random_cell_row, random_cell_col, value)", "def regenerate(self, random_state):\n super(WallsCorridor, self).regenerate(random_state)\n wall_x = variation.evaluate(\n self._wall_gap, random_state=random_state) - _CORRIDOR_X_PADDING\n wall_side = 0\n wall_id = 0\n while wall_x < self._current_corridor_length:\n wall_width = variation.evaluate(\n self._wall_width, random_state=random_state)\n wall_height = variation.evaluate(\n self._wall_height, random_state=random_state)\n wall_rgba = variation.evaluate(self._wall_rgba, random_state=random_state)\n if variation.evaluate(self._swap_wall_side, random_state=random_state):\n wall_side = 1 - wall_side\n\n wall_pos = [\n wall_x,\n (2 * wall_side - 1) * (self._current_corridor_width - wall_width) / 2,\n wall_height / 2\n ]\n wall_size = [_WALL_THICKNESS / 2, wall_width / 2, wall_height / 2]\n self._walls_body.add(\n 'geom',\n type='box',\n name='wall_{}'.format(wall_id),\n pos=wall_pos,\n size=wall_size,\n rgba=wall_rgba)\n\n wall_id += 1\n wall_x += variation.evaluate(self._wall_gap, random_state=random_state)", "def new_tile(self):\r\n # check if is zero or not\r\n new_tile_added = False\r\n # a list to 2 90% of the time and 4 10% of the time\r\n new_tile_list = [2,2,2,2,2,2,2,2,2,4]\r\n counter = 0\r\n while not new_tile_added:\r\n row_position = random.randrange(0,self.grid_height)\r\n col_position = random.randrange(0,self.grid_width)\r\n if self.grid[row_position][col_position] == 0:\r\n self.grid[row_position][col_position] = random.choice(new_tile_list)\r\n new_tile_added = True\r\n if counter > self.grid_width * self.grid_height:\r\n print 'you failed'\r\n break\r\n\r\n counter +=1", "def add_floor_corners(mesh, tile):\n corner_directions = [[BmeshFactory.W], [BmeshFactory.W, BmeshFactory.N], [BmeshFactory.N]]\n tile_below = tile.get_tile_in_direction([], -1)\n ceiling_below = False\n if tile_below is not None:\n ceiling_below = True\n for d in Direction:\n corner_directions[1][1] = d\n corner_directions[2][0] = d\n l = len(mesh.verts)\n add_corner = False\n try:\n if tile.terrain.extend_to:\n for e in corner_directions:\n neighbor_tile = tile.get_tile_in_direction(e)\n if neighbor_tile is None or (neighbor_tile.terrain.extend_to and not tile.terrain.make_edges_to):\n add_corner = True\n neighbor_tile = tile.get_tile_in_direction([d])\n if neighbor_tile is None or neighbor_tile.terrain.make_edges_to:\n mesh.from_object(bpy.data.objects['FLOOR_Cen'], bpy.context.scene)\n # for tiles that do not get extended to but help connect diagonals\n if tile.terrain.connect_diag and tile.terrain.make_edges_to:\n neighbor_tile1 = tile.get_tile_in_direction(corner_directions[0])\n neighbor_tile2 = tile.get_tile_in_direction(corner_directions[2])\n if neighbor_tile1.terrain.extend_to and neighbor_tile2.terrain.extend_to and \\\n not neighbor_tile1.terrain.make_edges_to and not neighbor_tile2.terrain.make_edges_to:\n add_corner = True\n mesh.from_object(bpy.data.objects['FLOOR_OD'], bpy.context.scene)\n except AttributeError:\n pass\n if add_corner:\n num_walls = 0\n for e in corner_directions:\n neighbor_tile = tile.get_tile_in_direction(e)\n if neighbor_tile is not None and neighbor_tile.terrain.terrain_type == TerrainType.WALL:\n num_walls += 1\n if num_walls < 3:\n mesh.from_object(bpy.data.objects['FLOOR_CORNER'], bpy.context.scene)\n if ceiling_below:\n BmeshFactory.add_ceiling_single_corner(mesh, tile_below, corner_directions, True)\n try:\n neighbor_tile = tile.get_tile_in_direction(corner_directions[0])\n diag_tile = tile.get_tile_in_direction(corner_directions[1])\n if neighbor_tile is None or neighbor_tile.terrain.make_edges_to:\n if diag_tile is None or not (diag_tile.terrain.extend_to and neighbor_tile.terrain.connect_diag)\\\n or not neighbor_tile.terrain.connect_diag or not diag_tile.terrain.connect_diag:\n mesh.from_object(bpy.data.objects['FLOOR_Cor0'], bpy.context.scene)\n neighbor_tile = tile.get_tile_in_direction(corner_directions[2])\n if neighbor_tile is None or neighbor_tile.terrain.make_edges_to:\n if diag_tile is None or not (diag_tile.terrain.extend_to and neighbor_tile.terrain.connect_diag)\\\n or not neighbor_tile.terrain.connect_diag or not diag_tile.terrain.connect_diag:\n mesh.from_object(bpy.data.objects['FLOOR_Cor2'], bpy.context.scene)\n except AttributeError:\n print(\"unexpected None Type Attribute Error\")\n elif tile.terrain.extend_to:\n mesh.from_object(bpy.data.objects['FLOOR_ID'], bpy.context.scene)\n bmesh.ops.rotate(mesh, verts=mesh.verts[l:len(mesh.verts)], cent=BmeshFactory.center, matrix=BmeshFactory.rot_dict[d])\n corner_directions[0][0] = d\n corner_directions[1][0] = d", "def new_tile(self):\n col = random.choice(range(self.grid_width))\n row = random.choice(range(self.grid_height))\n if self.grid[row][col] == 0:\n if random.random() >= 0.9:\n self.grid[row][col] = 4\n else:\n self.grid[row][col] = 2\n else:\n self.new_tile()", "def make_dungeon(self):\r\n # Total number of rooms\r\n n = self.__nx * self.__ny\r\n room_stack = []\r\n current_room = self.room_at(self.__ix, self.__iy)\r\n # Total number of visited rooms during maze construction\r\n nv = 1\r\n\r\n # iterate over all rooms of dungeon\r\n while nv < n:\r\n neighbors = self.find_neighbors(current_room)\r\n\r\n if not neighbors:\r\n # We've reached a dead end: backtrack.\r\n current_room = room_stack.pop()\r\n continue\r\n\r\n # Choose a random neighboring room and move to it\r\n direction, next_room = random.choice(neighbors)\r\n current_room.connect(next_room, direction)\r\n room_stack.append(current_room)\r\n current_room = next_room\r\n nv += 1", "def new_tile(self):\n while True:\n random_row = random.randrange(self._grid_height)\n random_column = random.randrange(self._grid_width)\n if self._grid[random_row][random_column] == 0:\n self._grid[random_row][random_column] = random.choice([2] * 9 + [4])\n break", "def new_tile(self):\r\n random_row = random.randrange(0, self._grid_height)\r\n random_col = random.randrange(0, self._grid_width)\r\n random_choice = random.choice([2]*90 + [4] * 10)\r\n \r\n if 0 in [num for elem in self._cells for num in elem]: \r\n if self._cells[random_row][random_col] == 0:\r\n self._cells[random_row][random_col] = random_choice \r\n else:\r\n self.new_tile()\r\n else:\r\n pass", "def gen_world(num_rows, num_cols):\n world = collections.deque()\n\n # Generate top perimeter.\n world.append([eg.ROCK] * num_cols)\n\n # In between top and bottom perimeters, generate a clean world.\n # (all non-perimeter cells are clear)\n for i in xrange(num_rows - 2):\n world.append([eg.ROCK] + ([eg.NONE] * (num_cols - 2)) + [eg.ROCK])\n\n # Generate bottom perimeter.\n world.append([eg.ROCK] * num_cols)\n\n # Apply red anthill in world.\n _randomly_apply_anthill(world, eg.RED)\n\n # Apply black anthill in world.\n _randomly_apply_anthill(world, eg.BLACK)\n\n # Apply food blocks in world.\n _randomly_apply_foodblob(world)\n\n # Apply rocks in world.\n _randomly_apply_rocks(world)\n\n world.appendleft([str(num_rows)])\n world.appendleft([str(num_cols)])\n\n return world", "def pygameMazeDraw (screen, arr, x, y, mobList, walls, monstors, exit, floors, entry):\n xLower = x-11\n yLower = y-11\n xUpper = x+11\n yUpper = y+11\n if xLower <= 0:\n xLower = 0\n xUpper = 21\n if yLower <= 0:\n yLower = 0\n yUpper = 21\n if xUpper >= len(arr):\n xUpper = len(arr)\n xLower = len(arr)-21\n if yUpper >= len(arr[0]):\n yUpper = len(arr[0])\n yLower = len(arr[0])-21\n #X and Y lower and upper are the bounds for the sprites being generated. This generates only the sprites that get displayed, so its more efficient\n for i in range((yLower), (yUpper), 1):\n for j in range((xLower), (xUpper), 1):\n if arr[i][j] == 9:\n florBlock = Flor(i,j)\n florBlock.add(floors)\n elif arr[i][j] == 5:\n wallBlock = Wal(i,j)\n wallBlock.add(walls)\n elif arr[i][j] == 1:\n florBlock = Flor(i,j)\n florBlock.add(floors)\n elif arr[i][j] == 6:\n entryBlock = Entry(i,j)\n entryBlock.add(entry)\n elif arr[i][j] == 7:\n exitBlock = Exit(i,j)\n exitBlock.add(exit)\n\n mobDraw(mobList, monstors)" ]
[ "0.6876265", "0.6516054", "0.6429965", "0.63483196", "0.621235", "0.60248286", "0.6014611", "0.6001514", "0.5975719", "0.59756225", "0.59673595", "0.59558344", "0.59392995", "0.5887236", "0.5887092", "0.587867", "0.58472425", "0.5824042", "0.58144677", "0.5728708", "0.5718844", "0.57073826", "0.5706921", "0.56227285", "0.56169623", "0.5607534", "0.55859697", "0.5584191", "0.556884", "0.5564827" ]
0.70530474
0
This function removes {amount} walls in the inner part of the level.
def remove_walls(level, amount): height, width = level.shape inner_level = level[1:height-1, 1:width-1] wall_pos = [(x+1, y+1) for (x,y) in get_positions(inner_level, WALL)] random.shuffle(wall_pos) for _ in range(min(len(wall_pos), amount)): pos = wall_pos.pop() level[pos] = EMPTY
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _remove_walls(current: GridCell, choice: GridCell):\n if choice.x > current.x:\n current.walls[1] = False\n choice.walls[0] = False\n elif choice.x < current.x:\n current.walls[0] = False\n choice.walls[1] = False\n elif choice.y > current.y:\n current.walls[3] = False\n choice.walls[2] = False\n elif choice.y < current.y:\n current.walls[2] = False\n choice.walls[3] = False", "def remove_wall(self, direction, other):\n\t\td_map = {\n\t\t\t(-1, 0): 3,\n\t\t\t(1 , 0): 0,\n\t\t\t(0 , -1): 1,\n\t\t\t(0 , 1): 2\n\t\t}\n\t\tself.walls[d_map[direction]] = False\n\t\tother.walls[3 - d_map[direction]] = False", "def add_walls(level, amount):\n placeable_pos = list(get_placeable_positions_inc_path(level))\n random.shuffle(placeable_pos)\n for _ in range(min(len(placeable_pos), amount)):\n pos = placeable_pos.pop()\n level[pos] = WALL\n assert is_solvable(level), \"Level isn't solvable. There's a bug in add_walls\"", "def remove_enemies(level, amount):\n enemy_pos = get_positions(level, ENEMY1)\n enemy_pos += get_positions(level, ENEMY2)\n enemy_pos += get_positions(level, ENEMY3)\n\n random.shuffle(enemy_pos)\n for _ in range(min(len(enemy_pos), amount)):\n pos = enemy_pos.pop()\n level[pos] = EMPTY", "def remove_wall(self, direction):\n assert direction\n if self._walls & direction != 0:\n self.walls &= ~direction", "def cull(self) -> None:\n for player in self.players:\n to_remove = [creature for creature in player.battle_line if creature.damage_taken >= creature.power()]\n for creature in to_remove:\n player.battle_line.remove(creature)\n to_remove.destroyed(self, creature)", "def die(self):\n self.pjs.bombermen.remove(self)\n for block in self.physics.blocks[self.stype]:\n if block == self.rects[0]:\n self.physics.blocks[self.stype].remove(block)", "def remove_blocks(draft):\n for symbol in draft.Blocks:\n if symbol.Name in blocks_to_delete:\n print(\"[-] %s, \\tdeleted\" % symbol.Name)\n symbol.delete()\n\n # for ball in draft.ActiveSheet.Balloons:\n if draft.Balloons:\n for ball in draft.Balloons:\n if ball.BalloonType == 7: # type 7 filter the triangle balloons.\n print(\"[-] %s, \\tdeleted\" % ball.Name)\n ball.Delete()\n else:\n pass", "def _removeBolt(self):\n for bolt in self._bolts:\n if (bolt.y-BOLT_HEIGHT/2)>GAME_HEIGHT:\n self._bolts.remove(bolt)\n self._key = False\n elif bolt.y + BOLT_HEIGHT < 0:\n self._bolts.remove(bolt)", "def empty_diff_walls():\n\t# 4 side walls are absorptive\n\troom_materials = [pra.Material(energy_absorption=0.1, scattering=None)] * 4\n\t# floor and ceiling are reflective\n\troom_materials.extend([pra.Material(energy_absorption=0.98, scattering=None)] * 2)\n\t\n\troom_faces = make_polygon(\n\t\tcentre=[0,0,2.5],\n\t\tradius=10,\n\t\theight=5,\n\t\tN=4,\n\t\trpy=[0,0,np.pi/4]\n\t)\n\n\t# create room\n\twalls = []\n\twalls.extend(create_walls(room_faces, room_materials))\n\n\troom = pra.Room(walls, fs=fs, max_order=3, ray_tracing=False, air_absorption=False)\n\n\troom.add_source([-5, 2, 2.])\n\troom.add_microphone([1, 0, 2.])\n\n\t# compute rir\n\troom.image_source_model()\n\troom.compute_rir()\n\n\treturn room", "def deleteBolts(self):\n a = self.get_bolts()\n for i in self.get_bolts():\n if i.y>GAME_HEIGHT:\n a.remove(i)\n self.set_plyrbolts(0)\n elif i.y<=-BOLT_HEIGHT:\n a.remove(i)", "def remove_duplicate_walls(self):\n wall_map = {}\n duplicates = []\n for cnt, thing in enumerate(self.things):\n if isinstance(thing, Wall):\n if not wall_map.has_key(thing.location):\n wall_map[thing.location] = True\n else:\n duplicates.append(cnt)\n for cnt, item in enumerate(duplicates):\n self.things.pop(item - cnt)", "def makeTheHouse(pos, blockTypeMain= wool, blockTypeSecond= wool,\n mainColor= wMagenta, secondColor= wWhite,\n myDoor= wDoorWood):\n\n ### FRONT (& BACK )###\n for Front in range(0,22,21): #This is the trick for the back copy...\n \n mc.setBlocks(pos.x-4, pos.y,pos.z+6+Front,\n pos.x+7, pos.y+9, pos.z+6+Front, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-3, pos.y+1,pos.z+6+Front,\n pos.x+6, pos.y+8, pos.z+6+Front, blockTypeSecond, secondColor)\n # FRONT - Remove blocks\n # Small trick to remove the 6 empty space by a loop\n #[[x,y],[x,y],[x,y],...]\n for i in [[-1,+1],[5,+1],[+2,0],[-1,+5],[2,+5],[5,+5]]:\n mc.setBlocks(pos.x+i[0], pos.y+i[1],pos.z+6+Front,\n pos.x+i[0]-1, pos.y+i[1]+2, pos.z+6+Front, air)\n #let's put the Glasses (that's almost the same than remove actually...)\n for i in [[-1,+1],[5,+1],[-1,+5],[2,+5],[5,+5]]:\n mc.setBlocks(pos.x+i[0], pos.y+i[1],pos.z+6+Front,\n pos.x+i[0]-1, pos.y+i[1]+2, pos.z+6+Front, wGlass_Pane)\n # The door at Entrance\n mc.setBlock(pos.x+1, pos.y, pos.z+6+Front, myDoor,4)\n mc.setBlock(pos.x+1, pos.y+1, pos.z+6+Front, myDoor,8)\n mc.setBlock(pos.x+2, pos.y, pos.z+6+Front, myDoor,1)\n mc.setBlock(pos.x+2, pos.y+1, pos.z+6+Front, myDoor,8)\n \n # ************\n \n # FRONT - Small top\n mc.setBlocks(pos.x-3, pos.y+10,pos.z+6+Front,\n pos.x+6, pos.y+14, pos.z+6+Front, blockTypeSecond, secondColor)\n mc.setBlocks(pos.x-1, pos.y+10,pos.z+6+Front,\n pos.x+4, pos.y+13, pos.z+6+Front, blockTypeMain, mainColor)\n mc.setBlocks(pos.x, pos.y+10,pos.z+6+Front,\n pos.x+3, pos.y+12, pos.z+6+Front, blockTypeSecond, secondColor)\n # FRONT-Small top Remove Blocks\n mc.setBlocks(pos.x+1, pos.y+11,pos.z+6+Front,\n pos.x+2, pos.y+12, pos.z+6+Front, air)\n # small trick to remove as \"stairs\" - funny ? no ?\n for i in range(0,10,1):\n iy = i\n if i > 5:\n iy=9-i\n #print i, iy\n mc.setBlocks(pos.x-3+i, pos.y+11+iy,pos.z+6+Front,\n pos.x-3+i, pos.y+15, pos.z+6+Front, air)\n # FRONT-Small Top put Glass\n mc.setBlocks(pos.x+1, pos.y+11,pos.z+6+Front,\n pos.x+2, pos.y+12, pos.z+6+Front, wGlass_Pane)\n\n\n # FRONT-Right & Left side \n for i in range(0,19,18):\n #print i\n mc.setBlocks(pos.x-4+i, pos.y,pos.z+7+Front,\n pos.x-11+i, pos.y+8, pos.z+7+Front, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-5+i, pos.y+1,pos.z+7+Front,\n pos.x-10+i, pos.y+7, pos.z+7+Front, blockTypeSecond, secondColor)\n # blocks removal\n mc.setBlocks(pos.x-6+i, pos.y+1,pos.z+7+Front,\n pos.x-9+i, pos.y+7, pos.z+7+Front, wGlass_Pane)\n # the line\n mc.setBlocks(pos.x-5+i, pos.y+4,pos.z+7+Front,\n pos.x-11+i, pos.y+4, pos.z+7+Front, blockTypeMain, mainColor)\n \n #remove 2 extra columns\n mc.setBlocks(pos.x-4, pos.y, pos.z+7,\n pos.x-4, pos.y+8, pos.z+7, air)\n mc.setBlocks(pos.x-4+11, pos.y, pos.z+7,\n pos.x-4+11, pos.y+8, pos.z+7, air)\n\n\n ### MAIN WALLS RIGHT & LEFT SIDE ###\n for wall in range(0,26,25):\n mc.setBlocks(pos.x-11+wall, pos.y, pos.z+8,\n pos.x-11+wall, pos.y+8, pos.z+28, blockTypeMain, mainColor)\n\n mc.setBlocks(pos.x-11+wall, pos.y+1, pos.z+8,\n pos.x-11+wall, pos.y+7, pos.z+27, blockTypeSecond, secondColor)\n\n for i in range(0,15,7):\n mc.setBlocks(pos.x-11+wall, pos.y+1,pos.z+9+i,\n pos.x-11+wall, pos.y+7, pos.z+12+i, wGlass_Pane)\n \n # the 3 lines\n mc.setBlocks(pos.x-11+wall, pos.y, pos.z+14,\n pos.x-11+wall, pos.y+8, pos.z+14, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-11+wall, pos.y, pos.z+21,\n pos.x-11+wall, pos.y+8, pos.z+21, blockTypeMain, mainColor)\n mc.setBlocks(pos.x-11+wall, pos.y+4, pos.z+8,\n pos.x-11+wall, pos.y+4, pos.z+28, blockTypeMain, mainColor)\n\n\n \n\n #same \n #removeBlocks(pos.x-1, pos.y+2, pos.z+6, 2, \n pass", "def hole_cleanup(atom_list): \n joey = atom_list.copy()\n while (len(joey) != 0):\n for atom in joey:\n takein = [atom]\n source_update = takein.copy()\n check = 1\n while (check == 1):\n source = source_update.copy()\n source_update = []\n c = len(takein)\n for element in source:\n bonds = [bond[0] for bond in identify_bonds(element, joey) if bond[0] not in takein]\n for h in bonds:\n takein.append(h)\n source_update.append(h)\n if ((len(takein) == c) and (len(takein) < 6)):\n check = 0\n for element in takein:\n atom_list.remove(element)\n elif (len(takein) == c):\n check = 0\n for element in takein:\n joey.remove(element)\n return atom_list", "def _clear_wall_times(request):\n for run in request.runs:\n for tag in run.tags:\n for point in tag.points:\n point.ClearField(\"wall_time\")", "def remove_stuck(traj,size):\n from numpy import sqrt, where\n \n r_min = traj.groupby('particle').first()\n r_max = traj.groupby('particle').last()\n\n pos_columns = ['x','y']\n dist = r_min[pos_columns] - r_max[pos_columns]\n dist_eu = sqrt(dist['x']**2+dist['y']**2)\n\n index_remove = dist_eu.index[where(dist_eu < size)]\n \n traj_new = traj\n for i in range(len(index_remove)):\n traj_new = traj_new[(traj_new['particle'] != index_remove[i])]\n \n return traj_new", "def removeFullRows(self, fullRows):\n #for each full row starting from the top down to the bottom, check all obstacle blocks in order that way it doesn't go out of range\r\n for row in fullRows:\r\n for i in reversed(range(len(self.blocks))):\r\n #Remove each block that is on that full row\r\n if self.blocks[i].row == row:\r\n self.blocks.pop(i)\n #Move down each block that isn\u0010't on the row that is full\r\n elif self.blocks[i].row < row:\r\n self.blocks[i].move_down()", "def remove(self, value):\n tower = [None] * self.max_levels\n node = self.head\n for level in reversed(range(self.max_levels)):\n while node.next[level].value < value:\n node = node.next[level]\n tower[level] = node\n if value != tower[0].next[0].value:\n raise KeyError('Not Found')\n d = len(tower[0].next[0].next)\n for level in range(d):\n prev = tower[level]\n prev.width[level] += prev.next[level].width[level] - 1\n prev.next[level] = prev.next[level].next[level]\n for level in range(d, self.max_levels):\n tower[level].width[level] -= 1\n self.size -= 1", "def remove(self, pieces):\n for piece in pieces:\n self.board[piece.row][piece.col] = None\n if piece.get_player() is Player.white:\n self.num_white_pieces -= 1\n if piece.is_king():\n self.num_white_kings -= 1\n\n elif piece.get_player() is Player.black:\n self.num_black_pieces -= 1\n if piece.is_king():\n self.num_black_kings -= 1", "def deleteAllNeedlesFromScene(self):\n #productive #onButton\n profprint()\n while slicer.util.getNodes('python-catch-round_'+str(self.round)+'*') != {}:\n nodes = slicer.util.getNodes('python-catch-round_'+str(self.round)+'*')\n for node in nodes.values():\n slicer.mrmlScene.RemoveNode(node)", "def clear_radius(self, radius):\n s = self\n length = self.physics.len_blocks\n for i in range(-radius, radius + 1):\n for j in range(-radius, radius + 1):\n block = Rectangle(Vector(i * length, j * length), Vector(length * (i + 1), length * (j + 1)))\n if not(block in self.physics.unavailable_blocks):\n self.physics.unavailable_blocks.append(block)", "def block(self, tree, factors):\n # first we apply strip mining to the loops given in factors\n for x in range(len(factors)):\n\n # we may want to not block a particular loop, e.g. when doing Rivera/Tseng blocking\n if factors[x] > 1:\n tree = StencilCacheBlocker.StripMineLoopByIndex(x*2, factors[x]).visit(tree)\n\n # now we move all the outer strip-mined loops to be outermost\n for x in range(1, len(factors)):\n if factors[x] > 1:\n tree = self.bubble(tree, 2*x, x)\n\n return tree", "def removeWallCrossSection(self):\n if self._wallCrossSectionOverlayHandle is not None:\n self._wallCrossSectionOverlayHandle.remove()\n self._wallCrossSectionOverlayHandle = None\n\n self.overlayWallCrossSection = False", "def remove_tiles(self, num):\r\n drawntiles = [self.db.tilestring.pop(random.randrange(len(self.db.tilestring))) for _ in xrange(num)]\r\n return drawntiles", "def remove_token(self, amount):\n self.M -= amount", "def destroy_all():\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)", "def add_walls(self):\n for x in range(self.width + 1):\n if not self.some_things_at((x, 0), Wall):\n self.add_thing(Wall(), (x, 0))\n if not self.some_things_at((x, self.height), Wall):\n self.add_thing(Wall(), (x, self.height))\n\n for y in range(self.height + 1):\n if not self.some_things_at((0, y), Wall):\n self.add_thing(Wall(), (0, y))\n if not self.some_things_at((self.width, y), Wall):\n self.add_thing(Wall(), (self.width, y))\n #self.add_thing(Wumpus(),(1,3))\n #self.add_thing(Pit(),(3,3))\n #self.add_thing(Pit(),(3,1))\n #self.add_thing(Gold(),(2,3))\n #self.add_thing(Pit(),(4,4))", "def updatePositionAndClean(self):\n #X direction (num. rows) wall limit is the width of rectangular room\n #Y direction (num. cols) wall limit is the height of rectangular room\n #So (0,0) is in bottom LEFT corner--since rows start at zero at BOTTOM, not top\n #direction works as you would think, with east at 0 or 360 degrees, 90 degrees at north,\n #180 degrees at west, and 270 degrees at south direction\n\n #so each time unit, getNewPosition in SAME direction if you don't hit the wall\n #if you hit the wall, then get a new RANDOM direction and then recalculate new position,\n #making sure it is a valid position on grid, has not already been cleaned (tile visited)\n\n #So it makes no difference which direction you are moving in--the getNewPosition() function\n #figures out mathematically what the next position is, based on grid, and you just have to\n #determine whether you have hit the wall in that same direction--don't have to look at the\n #number the degrees or radians in that particular direction--just moving in same direction,\n #get next position, do you hit the wall, if so get new random direction, move that way, if you\n #won't hit a wall that way.\n\n #If you don't hit a wall when you calculate a new direction, but the tile is clean already, then\n #just go through the tiles and find one that is not clean yet, and move in the same direction.\n \n robotPos = self.getRobotPosition()\n posx = robotPos.getX()\n posy = robotPos.getY()\n posx = math.floor(posx)\n posy = math.floor(posy)\n #First check if this position is clean:\n #if (self.room.isTileCleaned(posx,posy) == False):\n #then clean this tile!\n #self.room.cleanTileAtPosition(robotPos)\n #Now see where to move robot next on floor and clean that tile if it is not clean\n #So first try moving in same direction--will you hit a wall?\n newPos = self.position.getNewPosition(self.direction,self.speed)\n newPosx = newPos.getX()\n newPosy = newPos.getY()\n newPosx = math.floor(newPosx)\n newPosy = math.floor(newPosy)\n if (self.room.isPositionInRoom(newPos)) and (self.room.isTileCleaned(newPosx,newPosy) == False):\n #position is in room AND the tile has NOT been visited yet--since it's still DIRTY\n #Should NOT have to check whether you hit a wall, since new position is in room\n #so NO NEW DIRECTION needed yet--move in SAME direction\n self.setRobotPosition(newPos)\n self.room.cleanTileAtPosition(newPos)\n #print \"Moved in SAME DIRECTION I was moving in last time, direction = \" + str(self.direction)\n else: # (self.room.isPositionInRoom(newPos) == False) or (self.room.isTileCleaned(newPosx, newPosy) == True):\n # either HIT WALL -- OR -- tile already cleaned -- so calculate new RANDOM direction\n\n #NOTE: this works until you are surrounded by tiles that have no next step tile that has not already been\n #cleaned?\n #?? think a problem is that if all surrounding tiles are already clean, then, in that case,\n #you can get stuck in situation where you keep recalculating a new random direction, but when you take a step,\n #all the next tiles have already been cleaned, and you get stuck in a loop, so in this case, you must\n #not recalculate a new direction, but rather keep going in same direction until you find a tile not clean,\n #and jump to that tile instead, and go from there.\n #So find this case--see if that corrects this issue!\n \n keepTryingNewDirection = True\n while (keepTryingNewDirection == True):\n self.direction = random.randrange(0,359) #get new random direction\n newPos = self.position.getNewPosition(self.direction,self.speed) #get new next position step with new direc.\n newPosx = newPos.getX()\n newPosy = newPos.getY()\n newPosx = math.floor(newPosx)\n newPosy = math.floor(newPosy)\n if (self.room.isPositionInRoom(newPos)) and (self.room.isTileCleaned(newPosx,newPosy) == False):\n #new position in new direction is in room, and the tile has not been cleaned yet\n #so new direction and new tile to clean found!\n self.setRobotPosition(newPos)\n self.room.cleanTileAtPosition(newPos)\n #print \"Moved in NEW DIRECTION I was moving in last time, direction = \" + str(self.direction)\n keepTryingNewDirection = False\n elif (self.room.isPositionInRoom(newPos) == False):\n #new position in new direction NOT in room -- try again!\n #print \"new direction found a new position not in room --hit wall--try again! direction = \" + str(self.direction)\n continue\n else:\n #print \"new direction produced new position in room but tile already clean--try again?! direction = \" + str(self.direction)\n #print \"first check to see if all tiles have already been cleaned.\"\n #?? Any other checks needed here? list of tiles visited? is this really needed??\n #calculate list of cells not clean yet\n tilesCleaned = []\n allSurroundingTilesClean = False\n foundTileUnclean = False\n saveWidth = 0\n saveHeight = 0\n for i in range(0,self.room.width):\n for j in range(0,self.room.height):\n if (self.room.isTileCleaned(i,j) == False):\n saveWidth = i\n saveHeight = j\n foundTileUnclean = True\n else:\n #print \"appending to tiles cleaned: tile: i = \" + str(i) + \" j = \" + str(j)\n tilesCleaned.append((i,j)) #make list of tiles cleaned\n if (foundTileUnclean == True):\n #print \"not all tiles are clean!--start here rather than getting new direc. i = \" + str(saveWidth) + \" j = \" + str(saveHeight)\n newPos = Position(saveWidth,saveHeight)\n self.setRobotPosition(newPos)\n self.room.cleanTileAtPosition(newPos)\n #print \"Found new tile that was not clean! current direc. \" + str(self.direction)\n #print \"tile location x = \" + str(saveWidth) + \" y = \" + str(saveHeight)\n keepTryingNewDirection = False\n else:\n keepTryingNewDirection = False\n #print \"all tiles clean! stop cleaning!-- do not look for new direction! should be done.\"\n\n #for tile in tilesCleaned:\n #print tile", "def remove_numbers(self):\n for i in range(len(self.board.board[0])):\n while self.board.board[i].count(0) < 6:\n random_val = random.randint(0, 8)\n self.board.update_board((i, random_val), 0)", "def remove_wall(self, c, d):\n\n # Update the adjacency list for cell.\n self._remove_wall_from_adj_list(c, d)\n\n # Update the adjacency list for the cell on the other side\n # of the wall.\n self._remove_wall_from_adj_list(self.get_neighbor(c, d),\n opposite(d))" ]
[ "0.6669705", "0.62802994", "0.61957467", "0.60948914", "0.59552044", "0.58271617", "0.58098435", "0.5787096", "0.57141244", "0.5677796", "0.5673484", "0.54642314", "0.5412543", "0.54086787", "0.5403356", "0.5378274", "0.537228", "0.5346544", "0.5341223", "0.5317386", "0.5316431", "0.53129596", "0.5298249", "0.5297842", "0.52895546", "0.5285841", "0.5275277", "0.52741563", "0.52693254", "0.5263971" ]
0.8167338
0
Test the correctness of a GPTree against a dataset.
def eval_tree(tree: GPTree, dataset: Iterable) -> list: results = [] for data in zip(*dataset): try: output = tree.compute_tree(data[0]) results.append( 0 if output == data[1] else 1 ) # right or wrong, but no error. except Exception: results.append(2) # Fails to run. return results
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test(self, dataset) -> None:\n raise NotImplementedError()", "def test_Tree():", "def test(tree, data, name, print_result=True):\n correctness = 0\n\n for test_dict in data:\n current_tree = tree\n while type(current_tree) is not TreeLeaf:\n try:\n current_tree = current_tree.children[test_dict['object'][current_tree.root]]\n except KeyError as e:\n # print('The tree could not pass the test due to a key error:\\n\\t'\n # 'Tried to find key %s, but available keys where: %s ' % (e, current_tree.children.keys()))\n break\n\n if current_tree.root == test_dict['class']:\n correctness += 1\n\n if print_result:\n print('Test Result for \"%s\": %i of %i (%.2f %%) correct classifications' % (name, correctness, len(data), (correctness / float(len(data))) * 100))\n return correctness / float(len(data)) * 100", "def test_check_tree_subset(self):\r\n\r\n fasta_labels = ['seq1_1', 'seq1_2', 'seq2_3', 'seq3_4']\r\n\r\n actual_subset_results = check_tree_subset(fasta_labels,\r\n self.sample_tree_3tips_fp)\r\n\r\n # Should find all and give True result\r\n\r\n self.assertEqual(actual_subset_results, True)\r\n\r\n # Should also get same results with 5 tip tree\r\n\r\n fasta_labels = ['seq1_1', 'seq1_2', 'seq2_3', 'seq3_4']\r\n\r\n actual_subset_results = check_tree_subset(fasta_labels,\r\n self.sample_tree_5tips_fp)\r\n\r\n # Should find all and give True result\r\n\r\n self.assertEqual(actual_subset_results, True)\r\n\r\n # Change two of the fasta labels to not match tree tips\r\n\r\n fasta_labels = ['seq1_1', 'seqX_2', 'seq2_3', 'seqY_4']\r\n\r\n actual_subset_results = check_tree_subset(fasta_labels,\r\n self.sample_tree_5tips_fp)\r\n\r\n # Should find seqX and seqY as not being a subset\r\n\r\n self.assertEqual(actual_subset_results, ['seqX', 'seqY'])", "def test_check_tree_exact_match(self):\r\n\r\n fasta_labels = ['seq1_1', 'seq1_2', 'seq2_3', 'seq3_4']\r\n\r\n actual_subset_results = check_tree_exact_match(fasta_labels,\r\n self.sample_tree_3tips_fp)\r\n\r\n # Should find all and give True, True result\r\n\r\n self.assertEqual(actual_subset_results, [True, True])\r\n\r\n # Should get tips not found in fasta labels with 5 tip tree\r\n\r\n fasta_labels = ['seq1_1', 'seq1_2', 'seq2_3', 'seq3_4']\r\n\r\n actual_subset_results = check_tree_exact_match(fasta_labels,\r\n self.sample_tree_5tips_fp)\r\n\r\n # Should find all and give True result\r\n\r\n self.assertEqual(actual_subset_results, [True, ['seq5', 'seq4']])\r\n\r\n # Change two of the fasta labels to not match tree tips\r\n\r\n fasta_labels = ['seq1_1', 'seqX_2', 'seq2_3', 'seqY_4']\r\n\r\n actual_subset_results = check_tree_exact_match(fasta_labels,\r\n self.sample_tree_5tips_fp)\r\n\r\n # Should find seqX and seqY as not being a subset\r\n\r\n self.assertEqual(actual_subset_results, [['seqX', 'seqY'],\r\n ['seq3', 'seq5', 'seq4']])", "def is_valid(self, dataset):\n pass", "def check_data(dataname):\n oname = data_path(dataname, \"org\")\n data = load_data(data_path(dataname, \"py\"))\n root = load(oname)\n\n for (i, (node, kwds)) in enumerate(zip(root[1:], data)):\n for key in kwds:\n val = value_from_data_key(node, key)\n eq_(kwds[key], val,\n msg=('check value of {0}-th node of key \"{1}\" from \"{2}\".'\n '\\n\\nParsed:\\n{3}\\n\\nReal:\\n{4}'\n ).format(i, key, dataname, val, kwds[key]))\n\n eq_(root.env.filename, oname)", "def test(self, dataset):\n model_path = os.path.join(self.check_point, 'model.pt')\n if not os.path.exists(model_path):\n raise Exception('Cannot find %s.' %model_path)\n \n self.model = torch.load(model_path)\n _, _, stats, outputs = self._check_PSNR(dataset, is_test=True)\n return stats, outputs", "def test_valid_dataset():\n train = ((\"Lorem ipsum dolor sit amet\", 3, 4.5),\n (\"Sed ut perspiciatis unde\", 5, 5.5))\n val = ((\"ipsum quia dolor sit\", 10, 3.5),)\n test = ((\"Ut enim ad minima veniam\", 100, 35),)\n\n t = TabularDataset(train, val, test)\n\n assert len(t) == 4\n assert len(t.train) == 2\n assert len(t.val) == 1\n assert len(t.test) == 1\n\n def check(d, t):\n for i, tu in enumerate(d):\n v0, v1, v2 = tu\n assert t[i][0] == v0\n assert t[i][1] == v1\n assert t[i][2] == v2\n\n check(train, t.train)\n check(val, t.val)\n check(test, t.test)", "def check_nodes_match_nptg_data(cls, gdf, named_area):\n #\n check_name = \"check_nodes_match_nptg_data\"\n # list of all geographic admin areas\n admin_areas = [\n \"Aberdeen\",\n \"Aberdeenshire\",\n \"Angus\",\n \"Argyll & Bute\",\n \"Bath & North East Somerset\",\n \"Bedford\",\n \"Blackburn with Darwen\",\n \"Blackpool\",\n \"Blaenau Gwent\",\n \"Bournemouth\",\n \"Bracknell Forest\",\n \"Bridgend\",\n \"Brighton and Hove\",\n \"Bristol\",\n \"Buckinghamshire\",\n \"Caerphilly\",\n \"Cambridgeshire\",\n \"Cardiff\",\n \"Carmarthenshire\",\n \"Central Bedfordshire\",\n \"Ceredigion\",\n \"Cheshire East\",\n \"Cheshire West & Chester\",\n \"Clackmannanshire\",\n \"Conwy\",\n \"Cornwall\",\n \"Cumbria\",\n \"Darlington\",\n \"Denbighshire\",\n \"Derby\",\n \"Derbyshire\",\n \"Devon\",\n \"Dorset\",\n \"Dumfries & Galloway\",\n \"Dundee\",\n \"Durham\",\n \"East Ayrshire\",\n \"East Dunbartonshire\",\n \"East Lothian\",\n \"East Renfrewshire\",\n \"East Riding of Yorkshire\",\n \"East Sussex\",\n \"Edinburgh\",\n \"Essex\",\n \"Falkirk\",\n \"Fife\",\n \"Flintshire\",\n \"Glasgow\",\n \"Gloucestershire\",\n \"Greater London\",\n \"Greater Manchester\",\n \"Gwynedd\",\n \"Halton\",\n \"Hampshire\",\n \"Hartlepool\",\n \"Herefordshire\",\n \"Hertfordshire\",\n \"Highland\",\n \"Inverclyde\",\n \"Isle of Anglesey\",\n \"Isle of Wight\",\n \"Kent\",\n \"Kingston upon Hull\",\n \"Lancashire\",\n \"Leicester\",\n \"Leicestershire\",\n \"Lincolnshire\",\n \"Luton\",\n \"Medway\",\n \"Merseyside\",\n \"Merthyr Tydfil\",\n \"Middlesbrough\",\n \"Midlothian\",\n \"Milton Keynes\",\n \"Monmouthshire\",\n \"Moray\",\n \"Neath Port Talbot\",\n \"Newport\",\n \"Norfolk\",\n \"North Ayrshire\",\n \"North East Lincolnshire\",\n \"North Lanarkshire\",\n \"North Lincolnshire\",\n \"North Somerset\",\n \"North Yorkshire\",\n \"Northamptonshire\",\n \"Northumberland\",\n \"Nottingham\",\n \"Nottinghamshire\",\n \"Orkney Islands\",\n \"Oxfordshire\",\n \"Pembrokeshire\",\n \"Perth & Kinross\",\n \"Peterborough\",\n \"Plymouth\",\n \"Poole\",\n \"Portsmouth\",\n \"Powys\",\n \"Reading\",\n \"Redcar & Cleveland\",\n \"Renfrewshire\",\n \"Rhondda Cynon Taff\",\n \"Rutland\",\n \"Scottish Borders\",\n \"Shetland Islands\",\n \"Shropshire\",\n \"Slough\",\n \"Somerset\",\n \"South Ayrshire\",\n \"South Gloucestershire\",\n \"South Lanarkshire\",\n \"South Yorkshire\",\n \"Southampton\",\n \"Southend-on-Sea\",\n \"Staffordshire\",\n \"Stirling\",\n \"Stockton-on-Tees\",\n \"Stoke-on-Trent\",\n \"Suffolk\",\n \"Surrey\",\n \"Swansea\",\n \"Swindon\",\n \"Telford & Wrekin\",\n \"Thurrock\",\n \"Torbay\",\n \"Torfaen\",\n \"Tyne & Wear\",\n \"Vale of Glamorgan\",\n \"Warrington\",\n \"Warwickshire\",\n \"West Berkshire\",\n \"West Dunbartonshire\",\n \"West Lothian\",\n \"West Midlands\",\n \"West Sussex\",\n \"West Yorkshire\",\n \"Western Isles\",\n \"Wiltshire\",\n \"Windsor & Maidenhead\",\n \"Wokingham\",\n \"Worcestershire\",\n \"Wrexham\",\n \"York\",\n ]\n\n # TODO get the admin areas from the nodes file, compare against the list of\n # area names\n # nptg values\n adjanct_locals = etl.load_gazette_adjanct_localities()\n admin_codes = etl.naptan_gazette_admin_area_codes()\n districts = etl.naptan_gazette_districts()\n localities = etl.naptan_gazette_localities()\n locality_alternate = etl.load_gazette_localities_alternative_names()\n locality_hierarch = etl.load_gazette_locality_hierarchy()\n plusbusmap = etl.load_gazette_plusbus_mapping()\n plusbuszone = etl.load_gazette_plusbus_zones()\n regions = etl.naptan_gazette_region()\n\n # node values\n node_locs = gdf[\"LocalityName\"].unique()\n # get nptg localities,\n nptg_locs = localities[\"LocalityName\"].unique()\n # TODO filter to nptg to nodes, get all the localities in nptg for\n # this area\n # get the unique area code for this admin area.\n area_admin_code = node_locs[\"AdminCode\"].unique()\n # check the area admin code in the nptg file for the corresponding\n # localities.\n missing_localities = nptg_locs[~nptg_locs.AdminCode.isin(area_admin_code)]\n # check if locality is\n df3 = gaz_locs[gaz_locs.LocalityName.isin(gdf.LocalityName)]\n # get all the localities\n # TODO list the localities in nptg but not nodes\n\n # TODO plot sample on map\n # TODO write unused localities in given area to file.\n report_failing_nodes(\n gdf,\n check_name,\n )\n return", "def testTree(self, valid):\n return testTreeF(self, valid)", "def test(self, dataset):\n\n outputs, errors = self.use(dataset)\n\n ## PUT CODE HERE ##\n # I put the code in the \"use\" function, seems better :-)\n\n return outputs, errors", "def test(self, dataset_path: str, gscale=False):\r\n\r\n self.proto_test(dataset_path, self.name, self.predict, gscale)", "def test_train_dataset(self):\n classifiers, estimates =\\\n ada_boost.train_dataset(self.larger_matrix,\n self.larger_class_labels,\n 9)\n expected = [\n {'alpha': 0.6931471805599453,\n 'dim': 0,\n 'inequal': 'lt',\n 'threshold': 1.3},\n {'alpha': 0.9729550745276565,\n 'dim': 1,\n 'inequal': 'lt',\n 'threshold': 1.0},\n {'alpha': 0.8958797346140273,\n 'dim': 0,\n 'inequal': 'lt',\n 'threshold': 0.90000000000000002}\n ]\n self.assertEqual(classifiers, expected)", "def test(self, dataset): \n predictions = np.zeros(len(dataset), int)\n \n accuracy = self.random_forest.score(dataset[:,:-1], dataset[:,-1]) # Predict and compute accuracy.\n predictions = self.predict(dataset[:,:-1]) # Predict and return list of predictions.\n \n return predictions, accuracy", "def test(self, dataset):\n model_path = os.path.join(self.check_point, 'model.pt')\n if not os.path.exists(model_path):\n raise Exception('Cannot find %s.' % model_path)\n\n self.model = torch.load(model_path)\n print(self.model)\n model_parameters = filter(lambda p: p.requires_grad, self.model.parameters())\n params = sum([np.prod(p.size()) for p in model_parameters])\n print(1.0 * params / (1000 * 1000))\n _, _, stats, outputs, names = self._check_PSNR(dataset, is_test=True)\n return stats, outputs, names", "def check(self, data):# ->bool:\r\n return check(self.gd, data)", "def test_decision_tree(train,test,maxnodes=None):\n tree = DecisionTree()\n tree.maxnodes = maxnodes\n errors = tree.learn(train,'label')\n print \"Decision tree makes\",errors,\"errors\"\n print \"Depth\",tree.depth(),\"nodes\",tree.numNodes()\n if tree.numNodes() < 100:\n tree.pprint()\n if errors > 0:\n print \"Training errors:\"\n for id,e in enumerate(train.entries):\n res = tree.predict(e[:-1])\n if res != e[-1]:\n if len(e[:-1]) > 10:\n print \" Error on\",id,\"prediction\",res\n else:\n print \" Error on\",e[:-1],\"prediction\",res\n print \"Testing error:\"\n tp,tn,fp,fn = 0,0,0,0\n for e in test.entries:\n res = tree.predict(e[:-1])\n if res and e[-1]:\n tp += 1\n elif res and not e[-1]:\n fp += 1\n elif not res and e[-1]:\n fn += 1\n else:\n tn += 1\n Ntest = len(test.entries)\n print \"True +: %g, True -: %g\"%(float(tp)/Ntest,float(tn)/Ntest) \n print \"False -: %g, False +: %g\"%(float(fn)/Ntest,float(fp)/Ntest)\n print \"Overall error: %g\"%(float(fn+fp)/Ntest,)", "def test_branches_and_nodes_regression(\n traces, areas, snap_threshold, allowed_loops, already_clipped, data_regression\n):\n branches, nodes = branches_and_nodes.branches_and_nodes(\n traces, areas, snap_threshold, allowed_loops, already_clipped\n )\n\n branches_value_counts = branches[general.CONNECTION_COLUMN].value_counts().to_dict()\n nodes_value_counts = nodes[general.CLASS_COLUMN].value_counts().to_dict()\n\n data_regression.check({**branches_value_counts, **nodes_value_counts})", "def mutate_compare(\n tree: GPTree, num_mutation: int, dataset: tuple\n) -> tuple[float, float]:\n corrected, wrong_answers = 0, 0\n for j in range(num_mutation):\n tree_copy = deepcopy(tree)\n tree_copy.mutation()\n eval_result = eval_tree(tree_copy, dataset)\n if list_equal(eval_result, [0] * len(dataset[1])):\n corrected += 1\n else:\n wrong_answers += not (2 in eval_result)\n\n return corrected / num_mutation, wrong_answers / num_mutation", "def test_validation_class(self):\n\n for data in ('tbldata', 'dihedraldata', 'rdcdata', 'danidata', 'tensordata', 'pcsdata'):\n v = self.web.query_nodes(key=data)\n\n if not v.empty():\n self.assertTrue(v.validate())", "def check(self, dgraph, **params):\n raise NotImplementedError", "def test_validate_valid_person(self):\r\n assert self.person_tree != 0", "def test_dataset_iter(train_dataset):\n for i, ex in enumerate(train_dataset):\n assert np.array_equal(ex, train_dataset[i])", "def test_case8(self):\n\n graph = BipartiteGraph.createRandomGraph(self.students,self.supervisors)\n\n solution = Solution(graph)\n \n result = solution.isValid(self.students,self.supervisors)\n \n self.assertTrue(result)", "def evaluate(self, dataset):\n\t\tpass", "def test(self,dataset):\n outputs = self.use(dataset)\n \n costs = np.ones((len(outputs),1))\n # Compute classification error\n for xy,pred,cost in zip(dataset,outputs,costs):\n x,y = xy\n if y == pred[0]:\n cost[0] = 0\n\n return outputs,costs", "def validate(self, sess, valid_dataset):\n return self.test(sess, valid_dataset)", "def test_compare_old_to_new_method_to_create_trees(self):\n nodes = util.generate_sequence_of_points(2, 2)\n tree1 = kdtree.createNewTree(nodes)\n kdtree.visualize(tree1)\n \n sel_axis = (lambda axis: axis)\n tree2 = kdtree.createNewTree([[0.5, 0.5]],axis = 0, sel_axis= sel_axis)\n tree2.split2([0.25, 0.5], axis = 1)\n tree2.split2([0.75, 0.5], axis = 1)\n \n #left\n tree2.split2([0.25, 0.25], axis = 0, sel_axis = sel_axis)\n tree2.split2([0.25, 0.75], axis = 0, sel_axis = sel_axis)\n \n #right\n tree2.split2([0.75, 0.25], axis = 0, sel_axis = sel_axis)\n tree2.split2([0.75, 0.75], axis = 0, sel_axis = sel_axis)\n \n kdtree.visualize(tree2)\n \n for n in zip(kdtree.level_order(tree1), kdtree.level_order(tree2)):\n self.assertEqual(n[0].data, n[1].data, \"elements not equal\")\n \n if n[0].data is not None and n[1].data is not None:\n self.assertEqual(n[0].axis, n[1].axis, \"elements not equal\")", "def test_validate_valid_crisis(self):\r\n assert self.crisis_tree != 0" ]
[ "0.6213084", "0.6166914", "0.615361", "0.61207813", "0.5985466", "0.58801496", "0.58662474", "0.58526355", "0.5805036", "0.57705605", "0.5722015", "0.56628764", "0.56300074", "0.5613361", "0.5591549", "0.55806154", "0.55714977", "0.5570216", "0.5565086", "0.5548838", "0.5531335", "0.5510529", "0.54999745", "0.54989415", "0.54883504", "0.54834235", "0.548213", "0.5446263", "0.54200757", "0.54032725" ]
0.6924719
0
Mutate (a copy) of the tree num_mutation times, and return the percentage of successful mutations.
def mutate_compare( tree: GPTree, num_mutation: int, dataset: tuple ) -> tuple[float, float]: corrected, wrong_answers = 0, 0 for j in range(num_mutation): tree_copy = deepcopy(tree) tree_copy.mutation() eval_result = eval_tree(tree_copy, dataset) if list_equal(eval_result, [0] * len(dataset[1])): corrected += 1 else: wrong_answers += not (2 in eval_result) return corrected / num_mutation, wrong_answers / num_mutation
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def evaluate ( self, mutation ) :\n\t\tif isinstance( mutation , Mutation ):\n\t\t\tmutation = mutation.to_int()\n\n\t\tassert type( mutation ) is int , 'mutation must work out to a int or have an internal representaiton of int'\n\n\n\t\tfor phenotype, region in self.phenotypes.items():\n\n\t\t\tif mutation > region[0] and mutation < region[1]:\n\t\t\t\t# print phenotype\n\t\t\t\tself.counts[ phenotype ] += 1", "def mutate(self):\n #inlined 'flip_coin' for speed\n if prng.random() < self.mutation_rate:\n self._value = self.mutator.evaluate(self)\n return 1\n return 0", "def mutate(self):\n if self.mutator.evaluate(self):\n self.evaluated = 0\n return 1\n return 0", "def num_mutations(self):\n return sum(len(site.mutations) for site in self.sites())", "def _get_mutation_amount(self):\n return self._get_sign() * self._get_number()", "def num_mutations(self, this_node=None, path=()):\n\n if this_node is None:\n this_node = self.root\n self.total_num_mutations = 0\n\n if isinstance(path, tuple):\n path = list(path)\n\n for edge in self.edges_from(this_node.id):\n next_node = self.nodes[edge.dst]\n self.total_num_mutations += next_node.num_mutations()\n\n if edge.src != self.root.id:\n path.append(edge)\n\n self.num_mutations(next_node, path)\n\n # finished with the last node on the path, pop it off the path stack.\n if path:\n path.pop()\n\n return self.total_num_mutations", "def mutate(self, tree):\n\n assert isinstance(tree, ast.AST)\n\n tree = copy.deepcopy(tree)\n\n if not self.source:\n self.source = all_statements(tree)\n\n for node in ast.walk(tree):\n node.mutate_me = False\n\n node = self.node_to_be_mutated(tree)\n node.mutate_me = True\n\n self.mutations = 0\n\n tree = self.visit(tree)\n\n if self.mutations == 0:\n warnings.warn(\"No mutations found\")\n\n ast.fix_missing_locations(tree)\n return tree", "def update_tree(root, executed_acts, total_rew):\n root.value = max(total_rew, root.value)\n root.visits += 1\n new_nodes = 0\n\n node = root\n for step, act in enumerate(executed_acts):\n if act not in node.children:\n node.children[act] = Node()\n new_nodes += 1\n node = node.children[act]\n node.value = max(total_rew, node.value)\n node.visits += 1\n\n return new_nodes", "def test_mutation(self):\n genotype = '0|0|2|0|0|2|0|0 1|0|0|1|1|0|0|0 0|1|0|0|0|0|2|1--1 7'\n search_space = {'dil_conv_3x3', 'dil_conv_5x5', 'dil_conv_7x7',\n 'skip_connect', 'clinc_3x3', 'clinc_7x7', 'avg_pool_3x3', 'max_pool_3x3'}\n\n mutator = Mutations(search_space, prob_mutation=0.8,\n prob_resize=0.99, prob_swap=0.99)\n mutated_g = mutator(genotype)\n mutated_g = mutator(mutated_g)\n mutated_g = mutator(mutated_g)\n a, s, d = get_conf(mutated_g)\n print('---->', mutated_g)\n self.assertGreaterEqual(10, d)\n self.assertTrue(s in (0, 1))\n a = torch.tensor(a)\n d = int((a.shape[0]*2)**.5)\n start = 0\n for i in range(d):\n end = int((i+1)*(i+2)/2)\n self.assertTrue(a[start:end, :].sum() > 0)\n start = end", "def num_mutations(self):\n return len(self.fuzz_library)", "def __mutate(self, chromosomes, mutation_probability):\n\n for chromosome in chromosomes:\n for i in range(self.chromosome_size):\n if random.randint(1, 100) <= mutation_probability:\n logging.getLogger().debug(\n \"---> Mutation in Chromosome \" + str(\n chromosome.chromosome_id) + \"in gene \" + str(i)\n + \" <---\")\n chromosome.genes[i] = random.choice(self.gene_pool)", "def mutate(self, number_of_mutations):\n self.mutated.clear()\n mutations = []\n for i in range(number_of_mutations+1):\n old_gene = random.choice(self.genes)\n while old_gene in mutations:\n old_gene = random.choice(self.genes)\n # print(self.max_time)\n old_gene.start_time = random.choice(range(self.max_time - old_gene.finish))\n self.mutated.append(self.genes.index(old_gene))", "def percent_passing(self) -> float:\n num_meas = Enumerable(self.mlc_meas).select_many(lambda m: m.passed).count()\n num_pass = (\n Enumerable(self.mlc_meas)\n .select_many(lambda m: m.passed)\n .count(lambda p: bool(p) is True)\n )\n return float(100 * num_pass / num_meas)", "def update_tactic_usage(self):\n usage = {e: 0 for e in self.tactics.all_tactics}\n count = 0\n for gene in self.population:\n for tactic in gene.chromosome:\n count += 1\n try:\n usage[tactic] += 1\n except KeyError:\n usage[tactic] = 1\n for tactic in usage:\n usage[tactic] = usage[tactic]/count\n self.proof.tactics.usage = usage", "def count_total_mutations(seqs, database):\n total = 0\n for seq in seqs:\n total += count_minimum_mutations(seq, database)\n return total", "def update(self, result, action_taken):\n self.visits += 1\n self.wins += result\n\n if self.parent is None:\n return\n\n for key in self.parent.children:\n p = self.parent.children[key].player\n for a in action_taken[p]:\n if a == key:\n self.parent.children[key].visits_amaf += 1\n self.parent.children[key].wins_amaf += result", "def recalculate_transition(self, i, j, corpus):\n num = sum(sum(self.p(i, j, t, O) for t in xrange(len(O)-1)) for O in corpus)\n denom = sum(sum(self.gamma(i, t, O) for t in xrange(len(O)-1)) for O in corpus)\n\n return num / denom", "def percent_updated(self):\n return self.percent_complete - self.previous_percent_complete", "def fitness(self):\r\n history = self.history\r\n return sum(history) / len(history)", "def mutations_time(self):\n return self._mutations_time", "def modularity():\n\n q = 0.0\n for idx in range(0, node_count):\n if _tot[idx] > 0.0:\n q += (_in[idx] / m - math.pow(_tot[idx] / m, 2))\n return q", "def N_genes_with_multiple_mutants(self):\n return len(self.genes_with_multiple_mutants)", "def _test_mutation(self, individual, mutation, mutation_check):\n ind_clone = self.gama._toolbox.clone(individual)\n new_ind, = mutation(ind_clone, self.gama._pset)\n\n applied, message = mutation_check(individual, new_ind)\n if not applied:\n self.fail(message)\n\n # Should be able to compile the individual, will raise an Exception if not.\n compile_individual(new_ind, self.gama._pset)", "def get_ancestral_mutation_count(tree, alphabet):\n alphabet_to_index = {a:ai for ai,a in enumerate(alphabet)}\n L = tree.seq_len\n q = len(alphabet)\n positions = np.arange(L)\n n_ija = np.zeros((q,q,L), dtype=int)\n T_ia = np.zeros((q,L),dtype=float)\n for n in tree.tree.get_nonterminals():\n parent_profile = np.zeros(L, dtype=int)\n for ai,a in enumerate(alphabet):\n parent_profile[n.ancestral_sequence==a] = ai\n\n for c in n:\n child_profile = np.zeros(L, dtype=int)\n for ai,a in enumerate(alphabet):\n child_profile[c.ancestral_sequence==a] = ai\n\n T_ia[parent_profile,positions] += 0.5*c.branch_length\n T_ia[child_profile,positions] += 0.5*c.branch_length\n\n n_ija[child_profile, parent_profile, positions] += (1-(parent_profile==child_profile))\n\n\n return n_ija, T_ia, tree.tree.root.ancestral_sequence", "def member_mutation(member, MutationPct):\n ran_spot = random.randint(0, int(len(member)) - 1)\n\n if MutationPct:\n if member[ran_spot] == 1:\n member[ran_spot] = 0\n else:\n member[ran_spot] = 1", "def fraction_completed(self):\n return sum(self._chunk_done.values()) / len(self.chunks)", "def test_random_valid_mutation_with_all(self):\n\n applied_mutation = defaultdict(int)\n N = self._min_trials(n_mutations=4)\n\n for i in range(N):\n ind = self.individuals[self.ind_strings[1]]\n ind_clone = self.gama._toolbox.clone(ind)\n new_ind, = mut_replace_primitive(ind_clone, self.gama._pset)\n if self._mutShrink_is_applied(ind, new_ind)[0]:\n applied_mutation['shrink'] += 1\n elif self._mutInsert_is_applied(ind, new_ind)[0]:\n applied_mutation['insert'] += 1\n elif self._mut_replace_terminal_is_applied(ind, new_ind)[0]:\n applied_mutation['terminal'] += 1\n elif self._mut_replace_primitive_is_applied(ind, new_ind)[0]:\n applied_mutation['primitive'] += 1\n else:\n self.fail(\"No mutation (or one that is unaccounted for) is applied.\")\n\n self.assertTrue(all([n > 0 for (mut, n) in applied_mutation.items()]))", "def _repetitions(webpage_tree):\n\n metadata = {\n \"runs\": len(webpage_tree),\n \"max_resources_run\": 0,\n # a huge number\n \"min_resources_run\": time() * 99999,\n \"avg_resources_run\": 0,\n \"static_resources\": 0,\n \"dynamic_resources\": 0,\n \"files\": {},\n }\n data = {}\n\n if len(webpage_tree) > 0:\n for run in webpage_tree:\n files_in_run = len(webpage_tree[run])\n if metadata[\"min_resources_run\"] > files_in_run:\n metadata[\"min_resources_run\"] = files_in_run\n if metadata[\"max_resources_run\"] < files_in_run:\n metadata[\"max_resources_run\"] = files_in_run\n metadata[\"avg_resources_run\"] = metadata[\"avg_resources_run\"] + files_in_run\n for f in webpage_tree[run]:\n filename = f.split(os.path.sep)[-1]\n if filename not in data:\n metadata[\"files\"][filename] = {\n \"reps\": 1,\n }\n data[filename] = {\n \"reps\": 1,\n \"hash\": webpage_tree[run][f],\n }\n else:\n metadata[\"files\"][filename][\"reps\"] = (\n metadata[\"files\"][filename][\"reps\"] + 1\n )\n data[filename][\"reps\"] = data[filename][\"reps\"] + 1\n\n metadata[\"avg_resources_run\"] = int(\n metadata[\"avg_resources_run\"] / metadata[\"runs\"]\n )\n\n for f in data:\n if metadata[\"files\"][f][\"reps\"] >= (metadata[\"runs\"] * _REP_TRESHOLD):\n metadata[\"static_resources\"] = (\n metadata[\"static_resources\"] + metadata[\"files\"][f][\"reps\"]\n )\n else:\n metadata[\"dynamic_resources\"] = (\n metadata[\"dynamic_resources\"] + metadata[\"files\"][f][\"reps\"]\n )\n\n return metadata, data", "def updateNode(self, result):\n self.visits += 1\n self.wins += result", "def mutation(child_weights):\n for index, _ in enumerate(child_weights):\n # Add a chance for random mutation\n has_mutation = random.uniform(0, 1)\n if has_mutation <= .1:\n child_weights[index] *= random.randint(0, 5)" ]
[ "0.5752211", "0.5665256", "0.56194353", "0.56189704", "0.55831844", "0.5554643", "0.55110043", "0.54711574", "0.546658", "0.5309369", "0.52780664", "0.5274613", "0.52594477", "0.5250121", "0.5200165", "0.51973754", "0.514616", "0.51334417", "0.51302975", "0.5127141", "0.50880015", "0.50602466", "0.505178", "0.5049011", "0.5045399", "0.50344986", "0.5024229", "0.49861524", "0.49574393", "0.4955571" ]
0.71331596
0
Returns the object associated with the selected option. If there is none selected, it will return the default. If there is none selected and no default, then it will return None.
def getSelected(self): selected = self.defaultChoice if self.tableSelected is not None: selected = self.tableSelected.getString(self.defaultChoice) return self.map.get(selected)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_item(self, option):\n selected_item = None\n items = [item for item in self.items if item.id == option]\n if len(items) > 0:\n selected_item = items[0]\n return selected_item", "def selected(self):\n return self._choices[self._selected][0]", "def selected_option(self):\n\n for child in self._options_iter:\n if child.selected:\n return child\n return None", "def selected_item(self) -> MenuItem | None:\n if self.selected_option == -1:\n return None\n else:\n return self.all_items[self.selected_option]", "def default(self):\n return self.get(name='Unknown')", "def get_default(section, option=\"\"):\n\tif not option:\n\t\tif defaults.has_key(section):\n\t\t\treturn defaults[section]\n\telse:\n\t\tif defaults.has_key(section):\n\t\t\tif defaults[section].has_key(option):\n\t\t\t\treturn defaults[section][option]\n\treturn None", "def find_selected(self):\r\n return None", "def selected_value(self):\n option = self.selected_option\n return option.value if option else None", "def default(self) -> object:\n return self._default", "def get_selected(self):\n return self.selected", "def first_selected_option(self):\n try:\n return self.all_selected_options[0]\n except IndexError:\n raise ValueError(\"No options are selected\")", "def get_default(self, obj):\n if callable(self.default):\n return self.default()\n else:\n return self.default", "def get_default_variant(variants):\n for variant in variants:\n if variant.default:\n return variant", "def get(self, option, default=None):\n\t\treturn self._get_raw(option, '', default)", "def default(self):\n # easy enough\n return self._default", "def get_default(self):\n\n\t\treturn self.__default", "def find_option(self, option_name, default=None):\n value = (\n getattr(self.pconfig.option, option_name, None) or\n self.pconfig.getini(option_name)\n )\n return value if value else default", "def get_default(cls, opt):\n try:\n return cls._OPTS[opt].default\n except KeyError:\n raise ValueError('unknown option name %r' % (opt,))", "def get_default(self) -> T | None:\n return (\n self.default # TODO: deepcopy mutable defaults?\n if self.default_factory is None\n else self.default_factory()\n )", "def getdefault(self, option, type=str, default=None):\r\n return self.get(Config.DEFAULT_SECTION, option, type, default=default)", "def default(self):\n return self.__default", "def get(self, name, default=None):\n\t\treturn self[name] if self[name] is not None else default", "def __get__(self, instance, owner):\n # Check if Model class is being called, rather than Model instance\n if instance is None:\n return self\n # Get value from Model instance if available\n value = instance._values.get(self.name)\n # If value is None or empty string then return the default value, if set\n # if value in [None, ''] and self.default is not None:\n # return self.default\n return value", "def __get_option(self, option):\n if option in Config.OPTIONS.keys():\n _default = Config.OPTIONS[option]\n elif option in Config.FILE_OPTIONS.keys():\n _default = Config.FILE_OPTIONS[option]\n elif option in Config.PATH_OPTIONS.keys():\n _default = Config.PATH_OPTIONS[option]\n else:\n _default = None # XXX ??\n \n _val = self.__get(option)\n\n if _val: \n return _val\n else:\n return _default", "def pop_default(self, option: str) -> Optional[Any]:\n index = self._get_index(option)\n assert index is not None\n value = self._options[index]\n del self._options[index]\n default = value[1] if isinstance(value, tuple) else None\n return default", "def current_choice(self):\n\t\treturn self.choice_data_list[self.select_index]", "def _get_default(self):\n if callable(self.default):\n return self.default()\n else:\n return self.default", "def _get_option(self, name, datatype, default):\n return config.get_option(self._options,\n name,\n type=datatype,\n default=default)", "def get_default(self):\r\n if self.has_default:\r\n if callable(self.default):\r\n return self.default()\r\n else:\r\n return self.default", "def get(self, key, default=''):\n key = self.optionxform(key)\n cached = self._cache.get(key, _use_default)\n if cached is not _use_default:\n return cached\n name_str = self.name\n key_str = to_unicode(key)\n settings = ProductSetting.select(self.env,\n where={'product': self.product,\n 'section': name_str,\n 'option': key_str})\n if len(settings) > 0:\n value = settings[0].value\n else:\n for parent in self.config.parents:\n value = parent[self.name].get(key, _use_default)\n if value is not _use_default:\n break\n else:\n if default is not _use_default:\n option = Option.registry.get((self.name, key))\n value = option.default if option else _use_default\n else:\n value = _use_default\n if value is _use_default:\n return default\n if not value:\n value = u''\n elif isinstance(value, basestring):\n value = to_unicode(value)\n self._cache[key] = value\n return value" ]
[ "0.7156055", "0.68458813", "0.675694", "0.6539312", "0.6476181", "0.64479786", "0.6434399", "0.64323425", "0.63644886", "0.6284368", "0.6248488", "0.62412125", "0.6223481", "0.61877", "0.6180245", "0.6166791", "0.6160753", "0.614588", "0.61024815", "0.60922366", "0.60900766", "0.6066577", "0.6049018", "0.6045651", "0.6039281", "0.60284567", "0.60233164", "0.60203844", "0.6019801", "0.6001419" ]
0.73067707
0
Alias for self.get_me() but lazy and with caching.
async def me(self) -> types.User: if not hasattr(self, '_me'): setattr(self, '_me', await self.get_me()) return getattr(self, '_me')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getMe(self):\n return self('getMe')", "def me(self):\r\n return Member(self, 'me')", "def me():\n return current_user.get()", "def get_me(self):\n return self._api_call('get', '/me')", "def me(self):\n return User(self, ResourcePath(\"me\", None))", "async def get_me(self) -> types.User:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.GET_ME, payload)\n\n return types.User(**result)", "def current_user(self, **kwargs):\n return self.me(**kwargs)", "def me(self):\r\n return User(self)", "def get_self(self):\n return self.request(verb=requests.get, address=\"me\")", "def me(self):\n\n return self.client._get(self._url())", "def get_me(self, *args):\n\n user_data = api.get_me(\n *args, \n api_key=self.__creds.api_key_v2)\n\n return en.User(creds=self.__creds, **user_data)", "def get_me(self) -> requests.models.Response:\n return self.get('v1/me')", "def __get__(self, instance, cls=None):\n\n if cls is None:\n cls = type(instance)\n\n try:\n return vars(cls)[self.__cache_name__]\n except KeyError:\n result = super().__get__(instance, cls)\n setattr(cls, self.__cache_name__, result)\n return result", "def me(self, **kwargs):\n return self._get(API.ME.value, **kwargs)", "def _get_cached_instance(self):\n\n try:\n identifier = self._get_identifier()\n except (ValueError, ObjectDoesNotExist) as error:\n if self._fail_silently:\n return None\n raise LazyModelObjectError(exc=error) from error\n\n # Get the cache key, basically just namespacing the identifier\n cache_key = model_cache_key(identifier)\n\n cache, timeout = self._cache\n cace: BaseCache\n if cache_key in cache:\n instance = cache.get(cache_key)\n else:\n instance = self._get_instance(identifier)\n cache.set(cache_key, instance, timeout=timeout)\n\n if instance is None and not self._fail_silently:\n raise LazyModelObjectError(f'{identifier} not found.')\n return instance", "def get_me(cls, session):\n return cls(\n '/users/me.json',\n singleton=True,\n session=session,\n )", "def me_get(): # noqa: E501\n s = base.check_session()\n return _cleanuser(s['user'])", "def lazy(func):\n\n @wraps(func)\n def wrapper(self, *args, **kwargs):\n name = \"_\" + func.__name__\n try:\n return getattr(self, name)\n except AttributeError:\n value = func(self, *args, **kwargs)\n setattr(self, name, value)\n return value\n\n return wrapper", "def me(self, data, *args, **kwargs):\n return self._me(data, *args, **kwargs)", "def get_instance(self):\n try:\n return self._instance\n except AttributeError:\n self._instance = self._decorated()\n return self._instance", "def __get__(self, obj, objtype=None):\n try:\n return getattr(obj, self.cache_attr)\n except AttributeError:\n # Call the wrapped function with the obj instance as argument\n setattr(obj, self.cache_attr, self.fget(obj))\n return getattr(obj, self.cache_attr)", "def __get__(self, cls, owner):\n return self.fget.__get__(None, owner)()", "def get_object(self, *args, **kwargs):\n cache_key = \"_cache_get_object\"\n if not hasattr(self, cache_key):\n setattr(self, cache_key, super(PageDetailsMixin, self).get_object(*args, **kwargs))\n return getattr(self, cache_key)", "def get_cache(self):\n return self._instance._cache[self.name]", "def get_cache(self, obj: Any) -> LazyPropertyCache:\n try:\n return getattr(obj, self.cache_name)\n except AttributeError: # need creation\n cache = self.LazyPropertyCache(self.requirements, {\"self\": obj})\n setattr(obj, self.cache_name, cache)\n return cache", "def get_me(sess=None):\n return send_request('getMe', sess)", "def _wrapper(self, *args, **kwargs):\n if self.use_cache:\n cache = load_cache(self.cache_filename)\n original_key = generate_hash(\n self.__class__.__name__, func.__name__, args, kwargs)\n cache_key = hashlib.md5(original_key.encode('utf-8')).hexdigest()\n cached_val = cache.get(cache_key)\n if cached_val:\n return cached_val\n val = func(self, *args, **kwargs)\n if self.use_cache:\n cache.set(cache_key, val)\n return val", "def __get__(self, cls, owner):\n return classmethod(self.fget).__get__(None, owner)()", "def me():\n return User.query.get(6)", "def __call__(self, alias):\n return self.get_by_alias(alias)" ]
[ "0.74499834", "0.6551658", "0.6282139", "0.6246415", "0.61712277", "0.6143197", "0.6129853", "0.6117795", "0.6093259", "0.5974101", "0.5957396", "0.59498155", "0.59403694", "0.59009767", "0.5900434", "0.58857435", "0.58728683", "0.58081734", "0.5733064", "0.5696561", "0.5694217", "0.5692779", "0.56478864", "0.5631771", "0.55985093", "0.5579654", "0.5578544", "0.5555505", "0.5545786", "0.55144763" ]
0.70557714
1
Download file by file_id to destination
async def download_file_by_id(self, file_id: base.String, destination=None, timeout: base.Integer = 30, chunk_size: base.Integer = 65536, seek: base.Boolean = True): file = await self.get_file(file_id) return await self.download_file(file_path=file.file_path, destination=destination, timeout=timeout, chunk_size=chunk_size, seek=seek)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _download_file(self, file_id, file_name, path):\n request = self.service.files().get_media(fileId=file_id)\n fh = io.FileIO(path + file_name, 'wb')\n downloader = MediaIoBaseDownload(fh, request)\n done = False\n print('Start download ' + file_name)\n while not done:\n status, done = downloader.next_chunk()\n print(\"Download %d%%.\" % int(status.progress() * 100))", "def download_file(self, file_id):\n \n file = self.service.files().get(fileId=file_id).execute()\n file_name = file['name']\n print ('Name:', file_name)\n # print ('MIME type:', file['mimeType'])\n local_fd = open(file_name, \"wb\")\n request = self.service.files().get_media(fileId=file_id)\n media_request = http.MediaIoBaseDownload(local_fd, request)\n\n while True:\n try:\n download_progress, done = media_request.next_chunk()\n except errors.HttpError as error:\n print ('An error occurred:', error)\n return\n if download_progress:\n print ('Download Progress:', int(download_progress.progress() * 100))\n if done:\n print ('Download Complete')\n local_fd.close()\n return", "def download(self, file_id: str, output_path: str) -> str:\n url = self.get_method_url('storage', 'download', file_id)\n response = self.request(\n url=url,\n method='GET'\n )\n file_name = search(\n r'\\\"(.*?)\\\"', response.headers['content-disposition']\n ).group(1)\n with open(f'{output_path}/{file_name}', 'wb') as file:\n file.write(response.content)\n\n return f'{output_path}/{file_name}'", "def download_file(service, file_id, file_path):\n request = service.files().get_media(fileId=file_id)\n with open(file_path, mode='wb') as f:\n downloader = MediaIoBaseDownload(f, request)\n done = False\n while done is False:\n status, done = downloader.next_chunk()", "def download_file():\n\n if 'POST' == request.method:\n file_id = request.form['file_id']\n else:\n file_id = request.args.get('file_id')\n\n # 1 ==> example_1.tgz\n file_path = file_manager.get_file_path_from_id(file_id)\n print \"serving file: \" + file_path\n return send_file(file_path, as_attachment=True)", "def download_file(client, file_id):\n\n file_content = client.file(file_id).content()\n print(file_content)", "def media_file_download(request, media_file_id):\n media_file = get_object_or_404(MediaFile, id=media_file_id)\n full_path = os.path.join(settings.MEDIA_ROOT, str(media_file.file))\n return sendfile(request, full_path, attachment=True, attachment_filename=media_file.filename)", "def download_file(service, file_id, file_name=None):\n # Setup request for the file\n request = service.files().get_media(fileId=file_id, supportsAllDrives=True)\n # setup the file handler to recieve the byte stream\n fh = io.BytesIO()\n\n # the download utility \n downloader = MediaIoBaseDownload(fh, request, chunksize=204800)\n # continue to download until all bytes are recieved\n done = False\n while not done:\n status, done = downloader.next_chunk()\n\n # go to beginning of stream\n fh.seek(0)\n\n # get file name meta data to store file with same name, \n # only executed if user did not provide a file name to store the file locally\n if file_name == None:\n file_name_results = service.files().get(fileId=file_id,\n fields='name', \n supportsAllDrives=True).execute()\n file_name = file_name_results['name']\n # copy stream from file handler to local storage\n with open(file_name, 'wb') as f:\n shutil.copyfileobj(fh, f)\n \n return file_name", "def download_file(file_id, filename):\n # httplib2 library is not thread-safe, need a new http for each thread\n drive_service = discovery.build('drive', 'v3', http=get_http())\n request = drive_service.files().get_media(fileId=file_id)\n\n fh = io.FileIO(filename, 'wb')\n downloader = MediaIoBaseDownload(fh, request)\n\n done = False\n while done is False:\n try:\n status, done = downloader.next_chunk()\n except Exception as ex:\n print (\"User rate limit exceeded for %s\" % filename)\n return False\n print (\"Download %d%%.\" % int(status.progress() * 100))\n return True", "def download_file(self, id, file_name, file_location):\n if not self.check_for_file():\n gd = GDrive()\n gd.download_file(id, file_name, file_location)", "async def download_file(\n location_id: LocationID,\n file_id: StorageFileID,\n user_id: UserID,\n link_type: LinkType = LinkType.PRESIGNED,\n):", "def get_file(self, file_id, filename=''):\n method = 'getFile?' + 'file_id=' + str(file_id)\n res = requests.post(self.api_url + method, file_id)\n try:\n file_path = res.json()['result']['file_path']\n # Determine the fileName. Use modified file_path if none given.\n if not filename:\n filename = file_path[file_path.rfind('/') + 1:]\n except (KeyError, ValueError):\n return \"500 - Failed parsing the file link from API response.\"\n\n if not os.path.exists(self.dirDownloads):\n os.mkdir(self.dirDownloads)\n\n local_path = os.path.join(self.dirDownloads, filename)\n\n # Download file as stream.\n res = requests.get(self.file_url + file_path, stream=True)\n if res.status_code == 200:\n try:\n with open(local_path, 'wb') as f:\n for chunk in res:\n f.write(chunk)\n except IOError:\n pass\n return '200 - {} written.'.format(local_path)\n else:\n return '404 - Error accessing {}'.format(file_path)", "def filedownload(source, destination):\n\n # Initiate the download\n urllib.request.urlretrieve(source, destination)", "def download_file(fileid, save_path):\n authorize_google_drive()\n file_object = DRIVE.CreateFile({'id': fileid})\n file_object.GetContentFile(save_path)\n return True", "def download_redirect(id_):\n if check_expired_file(id_):\n return abort(404)\n return redirect(url_for(\"file_handler.download\", id_=id_))", "def download_result_file(run_id, file_id):\n print('download {} {}'.format(run_id, file_id))\n from robflask.service import service\n with service() as api:\n # Authentication of the user from the expected api_token in the header\n # will fail if no token is given or if the user is not logged in.\n fh = api.runs().get_result_file(run_id=run_id, file_id=file_id)\n return send_file(\n fh.open(),\n as_attachment=True,\n attachment_filename=fh.name,\n mimetype=fh.mime_type\n )", "def download_file(service, file_id, local_fd):\n request = service.files().get_media(fileId=file_id)\n media_request = http.MediaIoBaseDownload(local_fd, request)\n \n while True:\n try:\n download_progress, done = media_request.next_chunk()\n except errors.HttpError as error:\n print('An error occurred: %s' % error)\n return\n if download_progress:\n print('Download Progress: %d%%' % int(download_progress.progress() * 100))\n if done:\n print('Download Complete')\n return", "def download_file(service, file_id, local_fd):\n request = service.files().get_media(fileId=file_id)\n media_request = MediaIoBaseDownload(local_fd, request)\n\n while True:\n try:\n download_progress, done = media_request.next_chunk()\n except errors.HttpError as error:\n print('An error occurred: %s' % error)\n return\n # if download_progress:\n # print('Download Progress: %d%%' % int(download_progress.progress() * 100))\n if done:\n print('Download Complete')\n return", "def downloadImage(self, file_id):\n # check if using full url or partial\n url = \"https://www.sendspace.com/file/{}\".format(file_id) if len(file_id) == 6 else file_id\n\n try:\n r = self.getRequest(url, {}) # GET request for image\n except (RuntimeError) as e:\n raise RuntimeError(\"Error getting download URL for image from \" +\n \"sendspace.\") from e\n\n # the download image retrieved from the uploadImage method does not\n # return a direct download URL. This parses the request to download\n # for the direct download URL.\n dd_url = BeautifulSoup(r.text, \"lxml\").find(\"a\", {\"id\": \"download_button\"})['href']\n\n # download the actual image from the dd_url\n try:\n return BytesIO(self.getRequest(dd_url, {}).content)\n except (RuntimeError) as e:\n raise RuntimeError(\"Error downloading the image from \" +\n \"sendspace.\") from e", "def __download_file(self, filename):\r\n \r\n respons = requests.get(self.__url + filename, stream=True)\r\n save_filename = os.path.join(self.__folder, os.path.basename(filename))\r\n with open(save_filename, 'wb') as output_file:\r\n for chunk in respons.iter_content(chunk_size=128):\r\n output_file.write(chunk)", "def download_file_from_google_drive(file_id, dest_path, verbose=False):\n\n destination_directory = dirname(dest_path)\n if len(destination_directory) > 0 and not exists(destination_directory):\n makedirs(destination_directory)\n\n session = requests.Session()\n\n if verbose:\n print('Downloading file with Google ID {} into {}... '.format(file_id, dest_path), end='')\n stdout.flush()\n\n response = session.get(Constant.DOWNLOAD_URL, params={'id': file_id}, stream=True)\n\n token = get_confirm_token(response)\n if token:\n params = {'id': file_id, 'confirm': token}\n response = session.get(Constant.DOWNLOAD_URL, params=params, stream=True)\n\n save_response_content(response, dest_path)\n if verbose:\n print('Download completed.')", "def download(ctx, file, stream):\n if not check_main_conf(ctx):\n return\n\n file = int(file)\n\n resp = ctx.obj['api'].client.file.file_download(id=file).result()\n\n if 'error_code' in resp:\n click.echo(resp['error_message'])\n return\n\n if stream:\n r = requests.get(resp['download_url'])\n stdout_binary = click.get_binary_stream('stdout')\n\n for chunk in r.iter_content(chunk_size=512 * 1024):\n stdout_binary.write(chunk)\n else:\n click.echo(resp['download_url'])", "def download(job_id, filename):\n client = connect()\n result = client.get_job_output(\n vaultName=VAULT_NAME,\n jobId=str(job_id))\n\n with open(filename) as f:\n f.write(result['Body'].read())\n print(\"File successfully downloaded.\")", "def dwnld_with_id(credentials, file_id, dst_pth):\n \n http = credentials.authorize(httplib2.Http())\n service = discovery.build('drive', 'v3', http=http)\n\n request = service.files().get_media(fileId=file_id)\n fh = io.FileIO( dst_pth, 'wb' )\n downloader = MediaIoBaseDownload(fh, request)\n done = False\n while done is False:\n status, done = downloader.next_chunk()\n print ( '{} Download {}%.'.format( file_id, int(status.progress() * 100) ) )", "def download_file(self, remote_file):\n remote_file.download()", "def download_file(fileId, fileName):\n creds = authenticate()\n service = build('drive', 'v3', credentials=creds)\n\n request = service.files().get_media(fileId=fileId)\n\n # print(request)\n\n # Downloads the photo to local storage.\n fh = io.FileIO(temp_dir + fileName, mode='wb')\n downloader = MediaIoBaseDownload(fd=fh, request=request)\n\n\n done = False\n while done is False:\n status,done = downloader.next_chunk()\n # print(\"Download %d%%.\" % int(status.progress() * 100))\n\n fh.close()\n\n return fileName", "def download_file(id, output=DATA_DIR, quiet=False):\n url = f\"https://drive.google.com/uc?id={id}\"\n gdown.download(url, output=output, quiet=quiet)", "def download(self, url, destination):\n fileDownloader = utils.HttpFileDownloader(url, destination)\n fileDownloader.download()", "def download(package_type, id, resource_id, filename=None):\n context = {\n u'model': model,\n u'session': model.Session,\n u'user': g.user,\n u'auth_user_obj': g.userobj\n }\n\n try:\n rsc = get_action(u'resource_show')(context, {u'id': resource_id})\n get_action(u'package_show')(context, {u'id': id})\n except (NotFound, NotAuthorized):\n return base.abort(404, _(u'Resource not found'))\n\n if rsc.get(u'url_type') == u'upload':\n upload = uploader.get_resource_uploader(rsc)\n filepath = upload.get_path(rsc[u'id'])\n resp = flask.send_file(filepath)\n if rsc.get(u'mimetype'):\n resp.headers[u'Content-Type'] = rsc[u'mimetype']\n return resp\n\n elif u'url' not in rsc:\n return base.abort(404, _(u'No download is available'))\n return h.redirect_to(rsc[u'url'])", "def download_file(self, source_file_name, destination_file_name, **keyword_args):\n blob = self.bucket.blob(source_file_name)\n blob.download_to_filename(destination_file_name, **keyword_args)\n print(f\"Download file {source_file_name} and save as {destination_file_name}\")" ]
[ "0.7663905", "0.751212", "0.7483206", "0.74830407", "0.7481888", "0.7381274", "0.73347574", "0.72252023", "0.71731603", "0.7148918", "0.71000427", "0.7078511", "0.70491886", "0.7038326", "0.6999377", "0.698364", "0.68519497", "0.68437755", "0.6835958", "0.6823059", "0.6821894", "0.67723656", "0.67243004", "0.6718662", "0.6667066", "0.66516614", "0.6622451", "0.6616093", "0.6611391", "0.6573351" ]
0.7592607
1
Use this method to remove webhook integration if you decide to switch back to getUpdates. Returns True on success. Requires no parameters.
async def delete_webhook(self) -> base.Boolean: payload = generate_payload(**locals()) result = await self.request(api.Methods.DELETE_WEBHOOK, payload) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def on_shutdown():\n # Remove webhook.\n await bot.delete_webhook()", "def delete (self, webhook_id: str) -> NoReturn:\r\n try:\r\n return self.api(\r\n method=\"DELETE\",\r\n endpoint=f\"all/{config('TWITTER_ENV_NAME')}/webhooks/{webhook_id}.json\",\r\n )\r\n except Exception as e:\r\n raise e", "def delete(cls, webhook_endpoint_id):\n return Requester.delete(cls.endpoint + '/' + webhook_endpoint_id)", "def delete_webhook(self, webhook_id, channel_id=None, partner_id=None):\n path = self._get_webhook_path(channel_id, partner_id)\n path += '/hooks'\n url = urlparse(self.api_url)._replace(path=path).geturl()\n\n r = requests.delete(url, headers=self.headers)\n validate_response(r)\n\n return {\n 'code': r.status_code,\n 'message': 'Web Hook has been successfully deleted'\n }", "def webhook():\n if request.headers.get('content-type') == 'application/json':\n\n json_string = request.get_data().decode('utf-8')\n update = Update.de_json(json_string)\n bot.process_new_updates([update])\n return ''\n\n else:\n abort(403)", "def graceful_exit(*args, **kwargs):\n if updater is not None:\n updater.bot.delete_webhook()\n\n sys.exit(1)", "async def delete_webhook(self, webhook: 'dt_webhook.Webhook'):\n if not self.me.guild_permissions.manage_webhooks:\n raise PermissionsError(\"manage_webhooks\")\n\n await self._bot.http.delete_webhook(webhook.id)", "def refresh(self, webhook_id: str) -> NoReturn:\r\n try:\r\n return self.api(\r\n method=\"PUT\",\r\n endpoint=f\"all/{config('TWITTER_ENV_NAME')}/webhooks/{webhook_id}.json\",\r\n )\r\n except Exception as e:\r\n raise e", "def disable_webhooks(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"disable_webhooks\")", "def disable_webhooks(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"disable_webhooks\")", "def set_status_update_waiter_webhook_deleted(self):\n self.set_state(CHANNEL_MOVE_STATE_WEBHOOK_DELETED)\n self.set_status_update_waiter()", "def removeUpdate(self, e):\n syncJSONtoUI()", "def cli_jira_webhook_delete(ctx, webhook_id):\n jira_webhook_path = \"rest/webhooks/1.0/webhook\"\n _url = f'{jira_webhook_path}/{webhook_id}'\n _res = ctx.obj['connect'].delete(_url, headers=json_headers, auth=True)\n ctx.obj['writer'].out(_res)", "def unset_response(update: Update, context: CallbackContext) -> None:\n context.bot_data.update({str(update.message.chat_id) : 'False'})\n user_data = OrderedDict(user=str(update.message.chat_id),subscribed='False') \n users_table.upsert(user_data, ['user'])\n logger.info(\"Unsubscribing user \" + str(update.message.chat_id))\n text = \"No worries, you've been unsubscribed.\\n\\n\" \\\n \"To subscribe to daily updates again, just press /daily\"\n update.message.reply_text(text)\n\n update_string = \"User \" + str(update.message.chat_id) + \" unsubscribed\"\n # Alert admin that user unsubscribed. \n context.bot.send_message(ADMIN_CONVERSATION_ID, parse_mode='HTML', text=update_string)", "def cb_stop(self, update, context):\n\n print(f\"Unsubscribing chat_id '{update.message.chat_id}'\")\n try:\n self.clientChatIds.remove(update.message.chat_id)\n answer = \"You sucessfully unsubscribed.\"\n self.saveToFile(self.configFile)\n except KeyError:\n answer = \"You are not subscribed.\"\n\n update.message.reply_text(answer)", "def remove_hook(self):\n for handle in self.handlers:\n handle.remove()", "def remove_hook(self):\n for handle in self.handlers:\n handle.remove()", "def remove_update_function(self):\n self.index_wid.remove_update_function()\n self._update_function = None", "async def will_remove_from_hass(self) -> None:\n if self.unsub_update:\n self.unsub_update()\n self.unsub_update = None", "async def async_unload_entry_gw(hass: HomeAssistant, entry: ConfigEntry):\n unload_ok = all(\n await asyncio.gather(\n *(\n hass.config_entries.async_forward_entry_unload(entry, component)\n for component in GATEWAY_PLATFORMS\n )\n )\n )\n\n hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()\n\n if unload_ok:\n hass.data[DOMAIN].pop(entry.entry_id)\n\n return unload_ok", "def webhook(self) -> bool:\n return self._webhook", "def del_hook(self, name: str):\n try:\n del self.__hooks[name]\n except KeyError:\n pass", "def unrequest_changes(self):\n self._check_if_open()\n return super(BitbucketCloudBase, self).delete(\"request-changes\")", "def api_delete(self, name):\n if self.api_get(name):\n return self._delete(['apis', name])\n\n return False", "def webhooks(self) -> Optional[Sequence['outputs.MutatingWebhook']]:\n return pulumi.get(self, \"webhooks\")", "def delete_webmention(self) -> bool:\n return self.send_notification()", "def unset_wrapper(bot, update, args, job_queue, chat_data):\n if len(args) == 0:\n update.message.reply_text('No parameter provided')\n return\n\n job_name = args[0]\n if len(args) == 0 or job_name not in settings.JOBS:\n update.message.reply_text(\n 'Sorry {0} is not a valid job'.format(job_name))\n return\n\n job = find_job(job_name, job_queue)\n\n if not job:\n update.message.reply_text('You have no active job')\n return\n\n job.schedule_removal()\n\n update.message.reply_text('{0} job successfully unset!'.format(job_name))", "def webhooks(self) -> Optional[Sequence['outputs.ValidatingWebhook']]:\n return pulumi.get(self, \"webhooks\")", "def telegram_web_hook():\n\n update = Update.de_json(request.stream.read().decode('utf-8'))\n bot.process_new_updates([update])\n\n return Response('ok', 200)", "async def async_will_remove_from_hass(self):\n # The opposite of async_added_to_hass. Remove any registered call backs here.\n if self._product is not None:\n self._product.remove_callback(self.async_write_ha_state)" ]
[ "0.6497652", "0.6384995", "0.6074761", "0.600638", "0.58422136", "0.5825405", "0.5794588", "0.5749265", "0.57441294", "0.56677645", "0.5614371", "0.5531468", "0.5475864", "0.5461781", "0.5430949", "0.54267085", "0.54267085", "0.5416139", "0.5374666", "0.53592205", "0.53320056", "0.5323293", "0.5281029", "0.5236727", "0.52076375", "0.51805717", "0.5178032", "0.516996", "0.5167664", "0.51665646" ]
0.71436477
0
Use this method to get current webhook status. Requires no parameters. If the bot is using getUpdates, will return an object with the url field empty.
async def get_webhook_info(self) -> types.WebhookInfo: payload = generate_payload(**locals()) result = await self.request(api.Methods.GET_WEBHOOK_INFO, payload) return types.WebhookInfo(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_status(self):\n r = requests.get(self.base_url + '/status')\n return r.json()", "def webhook(self) -> Optional[pulumi.Input['WebhookArgs']]:\n return pulumi.get(self, \"webhook\")", "def __get_status_api(self):\r\n try:\r\n return Call_shelly_api(url=self.__api_address + \"/status\")\r\n except ShellyException as err:\r\n _LOGGER.warning(err)", "def status(self):\n return self._get(path='status')", "def status(self):\n r = requests.get('/'.join([self.base_url, self.ENDPOINT_STATUS]))\n return r.json()", "def webhooks(self) -> Optional[Sequence['outputs.ValidatingWebhook']]:\n return pulumi.get(self, \"webhooks\")", "def webhooks(self) -> Optional[Sequence['outputs.MutatingWebhook']]:\n return pulumi.get(self, \"webhooks\")", "def get_status(self):\n return self._refreshed", "def update_get():\n\n status, error = update.status.get()\n if error:\n return json_response.error(error), 200\n return json_response.success({'status': str(status)})", "def GetStatus(handler, query):\n json_config = {}\n\n lock = None\n if 'Url' in query:\n url = query['Url'][0]\n status, lock = ToGo.get_status(url)\n\n if not lock:\n # no Url or no status found for url\n handler.send_json(json.dumps(json_config))\n return\n\n with lock:\n state = 'queued'\n if status['running']:\n state = 'running'\n elif status['finished']:\n if status['error'] == '':\n state = 'finished'\n else:\n state = 'error'\n json_config['error'] = status['error']\n\n json_config['state'] = state\n json_config['rate'] = status['rate']\n json_config['size'] = status['size']\n json_config['retry'] = status['retry']\n json_config['maxRetries'] = status['ts_max_retries']\n json_config['errorCount'] = status['ts_error_count']\n\n handler.send_json(json.dumps(json_config))", "def get_status(self):\n url = \"data_request?id=jobstatus&job=%d&plugin=zwave\" % self.id\n return self.vera.get(url)", "def GetStatus(self):\r\n return self.status", "def cli(context):\r\n\tclick.echo('getting webhookinfo for', context.config.API)\r\n\trp = tornado.httpclient.HTTPClient().fetch(context.config.API % 'getWebhookInfo')\r\n\tclick.echo(rp.body)", "def get_status(self):\n return self.status", "def get_status(self):\n return self.status", "def get_status(self):\n return self.status", "def status(self) -> Optional[pulumi.Input['GoogleRpcStatusArgs']]:\n return pulumi.get(self, \"status\")", "def get_webhooks():\n response = requests.get(f'{KAZOO_SERVER}:8000/v2/webhooks', headers=HEADERS)\n\n return response", "def get_status_callback_url(self):\n return [obj for obj in self._request_uri(\"status_callback_url\")]", "def _get_status(self):\n return self.__status", "def webhook_url(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"webhook_url\")", "def webhook():\n if request.headers.get('content-type') == 'application/json':\n\n json_string = request.get_data().decode('utf-8')\n update = Update.de_json(json_string)\n bot.process_new_updates([update])\n return ''\n\n else:\n abort(403)", "def get_status(self):\n return self._status", "def status(self):\n if hasattr(self, \"_status\"):\n return self._status\n else:\n return None", "def webhooks(self) -> json:\r\n try:\r\n return self.api(method=\"GET\", endpoint=f\"all/webhooks.json\").json()\r\n except Exception as e:\r\n raise e", "def _read_status(self):\n results = self.status_table.query_items({'api_version': self.api_version})\n if not results:\n return None\n else:\n return results[0]", "def status(self):\n return self._data['status']", "def get_default_status(self):\n return self.bot_data_file[\"bot_status\"][\"defaultStatus\"]", "def get_status(self):\n # TODO retrieve from db if not set\n return self.status", "def status(self):\n self._refresh_state()\n return self._data.get('status')" ]
[ "0.63750786", "0.62557083", "0.62087816", "0.6191062", "0.6176805", "0.61665905", "0.6119854", "0.61036825", "0.60876465", "0.6077727", "0.600287", "0.5995869", "0.59853315", "0.5968422", "0.5968422", "0.5968422", "0.59592545", "0.59138614", "0.5902295", "0.58840424", "0.5882718", "0.58628845", "0.58624554", "0.58473164", "0.5805567", "0.5785759", "0.57804465", "0.5780009", "0.57608277", "0.5758453" ]
0.6399687
0
Use this method to edit live location messages sent by the bot or via the bot (for inline bots). A location can be edited until its live_period expires or editing is explicitly disabled by a call to stopMessageLiveLocation.
async def edit_message_live_location(self, latitude: base.Float, longitude: base.Float, chat_id: typing.Union[base.Integer, base.String, None] = None, message_id: typing.Union[base.Integer, None] = None, inline_message_id: typing.Union[base.String, None] = None, reply_markup: typing.Union[types.InlineKeyboardMarkup, None] = None) -> types.Message or base.Boolean: reply_markup = prepare_arg(reply_markup) payload = generate_payload(**locals()) result = await self.request(api.Methods.EDIT_MESSAGE_LIVE_LOCATION, payload) if isinstance(result, bool): return result return types.Message(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def send_location(self, chat_id: typing.Union[base.Integer, base.String], latitude: base.Float,\n longitude: base.Float, live_period: typing.Union[base.Integer, None] = None,\n disable_notification: typing.Union[base.Boolean, None] = None,\n reply_to_message_id: typing.Union[base.Integer, None] = None,\n reply_markup: typing.Union[types.InlineKeyboardMarkup,\n types.ReplyKeyboardMarkup,\n types.ReplyKeyboardRemove,\n types.ForceReply, None] = None) -> types.Message:\n reply_markup = prepare_arg(reply_markup)\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SEND_LOCATION, payload)\n\n return types.Message(**result)", "def send_location(self, bot, update, lat, lon, **kwargs):\n\n reply_markup = ReplyKeyboardMarkup(self.keyboard)\n return bot.sendLocation(update.message.chat_id, lat, lon,\n reply_markup=reply_markup,\n resize_keyboard=True,\n **kwargs)", "def proc_location(bot, update, chat_data):\n\n\tchat_id = update.message.chat_id\t\n\n\tdata = chat_data[chat_id]\n\tdata['location'] = update.message.location\n\n\tbot.send_chat_action(chat_id=chat_id, action=telegram.ChatAction.TYPING)\n\n\tresult_atms = atm_manager.get_atms(data)\n\tmsg, img = format_info_atms(data,result_atms)\n\n\tif len(msg):\n\t\tbot.sendMessage(chat_id, msg)\n\t\ttry:\n\t\t\tbot.send_photo(chat_id, photo=img)\n\t\texcept Exception:\n\t\t\tbot.sendMessage(chat_id, \"No se pudo cargar imagen con ubicación de los cajeros\")\n\telse:\n\t\tbot.sendMessage(chat_id, \"No hay cajeros automáticos cercanos\")\n\n\treply_markup = telegram.ReplyKeyboardRemove()\n\tbot.send_message(chat_id,\"Adiós\", reply_markup=reply_markup)", "def location_callback(self,msg):\n self.location = msg.data", "def process_update(message):\n resident = Resident.objects.get(phone_number=message.sender)\n resident.location = message.location.location\n resident.save()\n\n # TODO - wording\n message.respond('Thank you. Your location has been updated.')\n \n return TropoOkResponse()", "def flash_location(self,params):\n loc = params['location']\n if self.participant:\n if self.service.groupOfParticipant(self) and self.participant.status != AVOID:\n self.participant.changeStatus(LISTEN)\n #print loc\n (x,y) = self.service.grid.conformToGrid(loc)\n self.participant.setLocation((x,y))\n self.receiveDirectCommand(\"location\",{\"x\":x,\"y\":y})\n \n else:\n self.notLoggedIn()", "def edit_msg_in_chat(client: WebClient, channel_id: str, msg_id: str, new_msg: str, blocks=None) -> dict:\n return client.chat_update(\n channel=channel_id,\n ts=msg_id,\n text=new_msg,\n blocks=blocks\n )", "def _ros_location_callback(self, msg: NavSatFix):\n self._telegram_updater.bot.send_location(self._telegram_chat_id, location=Location(msg.longitude, msg.latitude))", "def update_location(self, **kwargs):\n \n self.options.update(kwargs)\n self.options['action'] = 'locator.location.update'\n return self.call(self.options)", "def settings_convo_ask_location(update, context):\n query = update.callback_query\n bot = context.bot\n\n bot.edit_message_text(text=f'Hvilken placering vil du søge ud fra?',\n chat_id=query.message.chat_id,\n message_id=query.message.message_id)\n\n return SETTINGS_VIEW_SAVE", "async def location(self, msg, place_name=None, *args):\n if not place_name:\n return\n if args:\n place_name = f'{place_name} {\" \".join(args)}'\n if place_name.lower() in MapController.locations:\n lat, lng, size = MapController.locations[place_name.lower()]\n map_controller = MapController(lat, lng, 1, lat, lng)\n\n content = f'The location `{place_name}` is located at ({lat:.2f}, {lng:.2f})'\n if Guard.has_permission(msg, 'embed_links'):\n # If can embed link, post the URL too\n url = map_controller.generate_url()\n content = f'{content}\\nURL: <{url}>'\n\n response = {\n 'content': content,\n 'reference': msg.to_reference(),\n 'mention_author': True,\n }\n\n if Guard.has_permission(msg, 'attach_files'):\n # If can post image, post the snapshot too\n image = await map_controller.generate_snapshot(include_world=True)\n response['file'] = discord.File(image, filename=f'snapshot_{map_controller.get_id()}.png')\n await msg.channel.send(**response)\n else:\n await msg.channel.send(**{\n 'content': f'There is no location named `{place_name}`',\n 'reference': msg.to_reference(),\n 'mention_author': True,\n 'delete_after': 3,\n })", "def edit_locations(location_id):\n\n if not g.user:\n flash(\"Please login to access\", \"danger\")\n return redirect(\"/\")\n \n if g.user.is_admin == False:\n flash (\"Unauthorized\", \"danger\")\n return redirect(\"/login\")\n\n location = Location.query.get_or_404(location_id)\n form = Location_Form(obj = location)\n\n if form.validate_on_submit():\n location.site_name = form.site_name.data,\n location.city = form.city.data,\n location.state = form.state.data\n \n db.session.commit()\n flash(f\"Location {site_name} has been updated\")\n return redirect(\"/administrator\")\n else:\n return render_template(\"/admin/edit_location.html\", form = form, location = location)", "async def stop_message_live_location(self,\n chat_id: typing.Union[base.Integer, base.String, None] = None,\n message_id: typing.Union[base.Integer, None] = None,\n inline_message_id: typing.Union[base.String, None] = None,\n reply_markup: typing.Union[types.InlineKeyboardMarkup,\n None] = None) -> types.Message or base.Boolean:\n reply_markup = prepare_arg(reply_markup)\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.STOP_MESSAGE_LIVE_LOCATION, payload)\n\n if isinstance(result, bool):\n return result\n\n return types.Message(**result)", "def location(update: Update, context: CallbackContext) -> int:\n user = update.message.from_user\n user_location = update.message.location\n logger.info(\n \"Location of %s: %f / %f\", user.first_name, user_location.latitude, user_location.longitude\n )\n update.message.reply_text(\n 'Scommetto che è un posto da visitare! Per ultima cosa , dimmi qualcosa di te stessa/o.'\n )\n\n return BIO", "async def run(self, message: discord.Message) -> None:\n await message.edit(content=self.current(), view=self)", "def update(self, msg):\n pass", "def start(bot, update):\n\n if update.message.chat.type == 'private':\n location_keyboard = telegram.KeyboardButton(text=\"Send current location\", request_location=True)\n postal_code = telegram.KeyboardButton(text=\"Input a postal code\")\n custom_keyboard = [[location_keyboard, postal_code]]\n chat_reply = \"Hello hello! You want to send me your current location or input a postal code?\"\n reply_markup = telegram.ReplyKeyboardMarkup(custom_keyboard, one_time_keyboard=True, resize_keyboard=True)\n bot.send_message(chat_id=update.message.chat_id, text=chat_reply, reply_markup=reply_markup)\n else:\n chat_reply = \"Hello hello! Please type /find@SGParkingBot and the postal code of the place you want to check (e.g. /find@SGParkingBot 098585). If you want to directly send me your location, talk to me in private ;)\"\n bot.send_message(chat_id=update.message.chat_id, text=chat_reply)", "def set_location(self, location, now):\n def work():\n member = db.get(self.key())\n member.location = location\n member.location_time = now\n member.put()\n db.run_in_transaction(work)", "async def handleMessageEdit(self, before: discord.Message, after: discord.Message):\n # Ignore on DMs.\n if not isinstance(after.channel, discord.TextChannel):\n return\n\n # ignore bot messages\n if after.author.bot:\n return\n\n if after.edited_at:\n await self.saveMessageTimestamp(after, datetime.now().timestamp())", "def __location_handler(self, update, context):\n trigger = Constructor.LOCATION_TRIGGER\n self.__handler(context, update, trigger)", "def export_locations_to_editor():\n\n categories = {}\n\n # Get all messages\n\n msgs = GPS.Message.list()\n\n # Filter them and organize them by category and file\n for m in msgs:\n if m.get_flags() & 2:\n file = m.get_file()\n category = m.get_category()\n\n if category in categories:\n if file in categories[category]:\n categories[category][file] += [m]\n else:\n categories[category][file] = [m]\n else:\n categories[category] = {file: [m]}\n\n if not categories:\n GPS.MDI.dialog(\"The Locations view is empty.\")\n return\n\n # Construct a string that we will write in the editor\n\n text = \"\"\n\n categories_list = [c for c in categories]\n categories_list.sort()\n\n for c in categories_list:\n text += c + \"\\n\"\n\n files_list = [f for f in categories[c]]\n files_list.sort()\n\n for f in files_list:\n text += \" %s\\n\" % f.path\n messages = categories[c][f]\n messages.sort(message_compare)\n\n for m in messages:\n text += \" %s:%s %s\\n\" % (\n m.get_line(),\n m.get_column(),\n m.get_text())\n\n text += \"\\n\"\n\n # Open an editor\n\n GPS.execute_action(\"new file\")\n buf = GPS.EditorBuffer.get()\n\n # Write the contents\n buf.insert(buf.at(1, 1), text)", "def waypoint_callback(self,msg):\n self.waypoint_loc = msg.data", "def edit_updated(update: str, update_key: str, bot: telegram.Bot):\n chat_id, message_id = update_key.split(':')[1:]\n url, likes, dislikes = update.decode('utf-8').split('^')\n edit_message_keyboard(bot, message_id, chat_id, likes, dislikes, url)", "def _telegram_location_callback(self, update: Update, _: CallbackContext):\n self._from_telegram_location_publisher.publish(\n NavSatFix(\n header=Header(stamp=rospy.Time.now()),\n latitude=update.message.location.latitude,\n longitude=update.message.location.longitude,\n position_covariance_type=NavSatFix.COVARIANCE_TYPE_UNKNOWN,\n )\n )", "def set_location(self, location):\n self.location = location", "def update_location(self, id, location):\n sql = f\"UPDATE incidences SET location = \\'{location}\\'\\\n WHERE incidences.id = {id}\"\n conn = Db().con\n curr = conn.cursor()\n curr.execute(sql)\n conn.commit()", "def update_location(request_form, location_id):\n values = {'latitude': request_form.get('latitude'), 'longitude': request_form.get('longitude'),\n 'city': request_form.get('city'), 'country': request_form.get('country')}\n db_session.query(Locations).filter_by(id=location_id).update(values)\n db_session.commit()\n return 'Updated location #%s: %s, %s.' \\\n % (location_id, values['city'].title(), values['country'].title()), 'success'", "def location(bot, update):\n\n bot.send_message(chat_id=update.message.chat_id, text=\"OK you wait ah...\")\n latitude = update.message.location.latitude\n longitude = update.message.location.longitude\n bot.send_message(chat_id=update.message.chat_id, text=\"Just let you know for fun lol - your latitude is {0}, and your longitude is {1}\".format(latitude,longitude))\n try:\n # Read carpark csv as dataframe\n df = pd.read_csv('Parking_withcoords.csv')\n \n # Calculate distance between each carpark and postal code and append it to dataframe\n distance = []\n for coord in df['Coord_rad']: \n carpark = haversine((radians(latitude),radians(longitude)), ast.literal_eval(coord)) #converts string to tuple\n distance.append(carpark)\n df['Distance_km'] = distance\n\n # Sort in ascending order and extract top 5\n top_five = df.sort_values('Distance_km').head(5)\n\n for row in top_five['Info']:\n bot.send_message(chat_id=update.message.chat_id, parse_mode='HTML', text=row.replace(\"\\$\", \"$\"))\n\n bot.send_message(chat_id=update.message.chat_id, text=\"Fast hor! If you want to check other places, type /start again ok :P\")\n except:\n bot.send_message(chat_id=update.message.chat_id, text=\"Jialat liao got error...try again with /start and then use the postal code method can? Paiseh!\")", "def set_location(self, location):\n self.location = location", "def fusion_api_edit_lsg(self, body, uri, api=None, headers=None):\n return self.lsg.update(body, uri, api, headers)" ]
[ "0.61447716", "0.60086536", "0.60046154", "0.5799428", "0.5798248", "0.57214075", "0.5505844", "0.54993886", "0.541418", "0.53947383", "0.53667855", "0.53470784", "0.53172225", "0.53044945", "0.5118792", "0.51045984", "0.5075171", "0.5071682", "0.5016867", "0.5007397", "0.49927863", "0.49784616", "0.49745274", "0.4971635", "0.49584863", "0.4953589", "0.49177647", "0.48601967", "0.4859128", "0.480771" ]
0.7712768
0
Use this method to send information about a venue.
async def send_venue(self, chat_id: typing.Union[base.Integer, base.String], latitude: base.Float, longitude: base.Float, title: base.String, address: base.String, foursquare_id: typing.Union[base.String, None] = None, disable_notification: typing.Union[base.Boolean, None] = None, reply_to_message_id: typing.Union[base.Integer, None] = None, reply_markup: typing.Union[types.InlineKeyboardMarkup, types.ReplyKeyboardMarkup, types.ReplyKeyboardRemove, types.ForceReply, None] = None) -> types.Message: reply_markup = prepare_arg(reply_markup) payload = generate_payload(**locals()) result = await self.request(api.Methods.SEND_VENUE, payload) return types.Message(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_venue(self, bot, update, lat, lon, title, address, **kwargs):\n\n return bot.sendVenue(update.message.chat_id, lat, lon, title, address,\n reply_markup=ReplyKeyboardMarkup(self.keyboard),\n resize_keyboard=True,\n **kwargs)", "def venue(request):\n # Test Comment\n assert isinstance(request, HttpRequest)\n return render(\n request,\n 'venue.html',\n context_instance=RequestContext(request, {})\n )", "def venues(self):\n response = self._request(V2_ENDPOINTS['VENUES'])\n return response", "def save_venue(data, form):\n venue = form.save()\n\n venue.city = City.objects.get(id=int(data.get('city_identifier')))\n venue.country = Country.objects.get(name='Canada')\n venue.location = Point((\n float(data.get('location_lng')),\n float(data.get('location_lat'))\n ))\n venue.save()", "def show_venue(venue_id):\n data = get_venue_by_id(venue_id).venue_details\n return render_template('pages/show_venue.html', venue=data)", "def send_event(agent_name, agent_version):\n client_name, client_version = _get_client_info()\n payload = {\n 'v': '1',\n 'tid': GA_INSTANCE,\n 'aip': '1',\n 'cid': str(uuid4()),\n 't': 'event',\n 'ec': 'Client name \"{}\", version \"{}\", interpreter \"{}\"'.format(\n client_name, client_version, _get_platform_info()\n ),\n 'ea': 'Start launch',\n 'el': 'Agent name \"{}\", version \"{}\"'.format(\n agent_name, agent_version\n )\n }\n headers = {'User-Agent': 'Universal Analytics'}\n try:\n return requests.post(url=GA_ENDPOINT, data=payload, headers=headers)\n except requests.exceptions.RequestException as err:\n logger.debug('Failed to send data to Google Analytics: %s',\n str(err))", "def __bot_info(self):\n log.debug(\"Displaying __bot_info\")\n self.bot.send_message(self.chat.id, self.loc.get(\"bot_info\"))", "def send_place(self, osm_id):\n # Send place types in categories\n self._provider.setProperty(self._plugin_name,\n \"food_and_drink\", self._amenity)\n\n place = self._places.search(osm_id)\n if place.is_complete():\n # Count distance\n try:\n place.set_distance(distance(self._location.getLatitude(),\n self._location.getLongitude(), place.get_lat(),\n place.get_lon()))\n # If place is far away\n if (float(place.get_distance()) > \\\n (self._location.getRange() * 1000)):\n return\n # Send distance\n self._provider.setProperty(self._plugin_name,\n place.get_distance_id(), place.get_distance())\n except Exception:\n pass\n\n # Send rest of data\n # Main name\n self._provider.setProperty(self._plugin_name,\n place.get_osm_id_key(), place.get_display_name())\n # Type\n self._provider.setProperty(self._plugin_name,\n place.get_type_id(), place.get_type())\n # Latitude and Longitude\n self._provider.setProperty(self._plugin_name,\n place.get_latlng_id(), place.get_latlng())\n # Url\n self._provider.setProperty(self._plugin_name,\n place.get_url_id(), place.get_url())\n # Website\n if place.get_website():\n self._provider.setProperty(self._plugin_name,\n place.get_website_id(), place.get_website())\n # Cuisine\n if place.get_cuisine():\n self._provider.setProperty(self._plugin_name,\n place.get_cuisine_id(), place.get_cuisine())\n # Opening hours\n if place.get_opening_hours():\n self._provider.setProperty(self._plugin_name,\n place.get_opening_hours_id(), place.get_opening_hours())\n\n self._provider.done(self._plugin_name)", "def send(self, event, message):\n pass", "def command_where(self, bot, update):\n\n bot.sendChatAction(update.message.chat_id, action='typing')\n\n foursquare = ext.get_foursquare_location(self.config['foursquare'])\n venue = foursquare['venue']\n location = venue['location']\n\n msg = 'Myles Braithwaite checked in to *{venue[name]}* {ago}.'\n self.send_message(bot, update, msg.format(**foursquare))\n\n if location.get('address', None):\n self.send_venue(bot, update, location['lat'], location['lng'],\n venue['name'], location['address'],\n foursquare_id=venue['id'])\n else:\n self.send_location(bot, update, location['lat'], location['lng'])", "async def info(ctx):\n embed = discord.Embed(title=\"Zane Bot\", description=\"All hail the hypnotoad!\", color=0x0091C5)\n\n # give info about you here\n embed.add_field(name=\"Author\", value=\"Zanexius\")\n\n # Shows the number of servers the bot is member of.\n embed.add_field(name=\"Server count\", value=f\"{len(bot.guilds)}\")\n\n # give users a link to invite thsi bot to their server\n embed.add_field(name=\"Invite\", value=\"[Invite link](<insert your OAuth invitation link here>)\")\n\n await ctx.send(embed=embed)", "def send(self, count: int):\n return self.analytics.send(self.anal_name, count)", "def dispatch_event(event):\n queue = connect_to_sqs() \n logging.info('Writing event to SQS:' + str(json.dumps(event.params)))\n\n visitor = event.params['visitors'][0]['visitor_id']\n attributes = event.params['visitors'][0]['attributes']\n snapshot = event.params['visitors'][0]['snapshots'][0]\n\n response = queue.send_message(MessageBody=json.dumps({visitor: (attributes, snapshot)}))", "def create_venue_submission():\n # TODO: insert form data as a new Venue record in the db, instead (DONE)\n # TODO: modify data to be the data object returned from db insertion\n\n try:\n name = request.form.get(\"name\")\n city = request.form.get(\"city\")\n state = request.form.get(\"state\")\n address = request.form.get(\"address\")\n phone = request.form.get(\"phone\")\n imageLink = request.form.get(\"image_link\")\n genres = request.form.getlist(\"genres\")\n facebookLink = request.form.get(\"facebook_link\")\n website = request.form.get(\"website\")\n seeking_talent = request.form.get(\"facebook_link\")\n seeking_description = request.form.get(\"facebook_link\")\n\n venue_to_add = Venue(\n name=name,\n city=city,\n state=state,\n address=address,\n phone=phone,\n image_link=imageLink,\n genres=genres,\n facebook_link=facebookLink,\n website=website,\n seeking_talent=seeking_talent,\n seeking_description=seeking_description\n )\n\n db.session.add(venue_to_add)\n db.session.commit()\n\n # on successful db insert, flash success\n flash(\"Venue \" + request.form[\"name\"] + \" was successfully listed!\")\n\n # TODO: on unsuccessful db insert, flash an error instead.\n # e.g., flash('An error occurred. Venue ' + data.name + ' could not be listed.')\n except:\n flash(\"An error occurred. Venue \" + name + \" could not be listed.\")\n db.session.rollback()\n finally:\n db.session.close()\n # see: http://flask.pocoo.org/docs/1.0/patterns/flashing/\n return render_template(\"pages/home.html\")", "def _send_event(self, title, text, tags, type, aggregation_key, severity='info'):\n event_dict = {\n 'timestamp': int(time.time()),\n 'source_type_name': self.SOURCE_TYPE_NAME,\n 'msg_title': title,\n 'event_type': type,\n 'alert_type': severity,\n 'msg_text': text,\n 'tags': tags,\n 'aggregation_key': aggregation_key,\n }\n self.event(event_dict)", "def _send_event(self, title, text, tags, type, aggregation_key, severity='info'):\n event_dict = {\n 'timestamp': int(time()),\n 'source_type_name': self.SOURCE_TYPE_NAME,\n 'msg_title': title,\n 'event_type': type,\n 'alert_type': severity,\n 'msg_text': text,\n 'tags': tags,\n 'aggregation_key': aggregation_key,\n }\n self.event(event_dict)", "def getVenue(lat, lon, name, radius=300, addr=''):\n # Construct the client object\n client = foursquare.Foursquare(CLIENT_ID, CLIENT_SECRET, redirect_uri='http://fondu.com/oauth/authorize')\n\n # Return all venues within radius of lat,lon\n ll = str(lat) + \",\" + str(lon)\n radius = str(radius)\n venues = client.venues.search(params={'v': VERSION, 'll': ll, 'intent': 'browse', \n 'radius': radius, 'limit': 100 })[\"venues\"]\n # Returns a list of dictionaries, each is a \"compact venue\"\n print \"Returned\", len(venues) , \"venues within\", radius ,\"meters\"\n print venues[0]\n \n # pull out just venue name and its distance from lat, lon\n venue_deets = [(ven[\"name\"], ven[\"location\"][\"distance\"], ven[\"location\"][\"address\"]) for ven in venues]\n \n # sort by distance away\n venue_deets = sorted(venue_deets, key=lambda x: x[1])\n venue_names = [x[0] for x in venue_deets]\n venue_addr = [x[2] for x in venue_deets]\n print venue_names\n \n # grab the \"foursquare\" version of the name\n if name in venue_names:\n # name supplied exactly matches foursquare name\n fs_name = name\n else:\n # look for close matches to supplied name\n \n # defaults set: returns a max of 3 matches with minimum score of 0.6 in similarity\n fs_name = difflib.get_close_matches(name, venue_names, n=3, cutoff=0.5)\n print fs_name\n \n if len(fs_name)<1:\n # hopefully this doesn't happen!\n #raise ValueError(\"ERROR: venue not found\")\n # match on address instead\n add_name = difflib.get_close_matches(addr, venue_addr, n=3, cutoff=0.5)\n print add_name\n return -1\n elif len(fs_name)>1:\n # if more than one match returned take closest venue\n dists = [venue_deets[venue_names.index(n)][1] for n in fs_name]\n fs_name = fs_name[dists.index(min(dists))] # return closest\n else:\n fs_name = fs_name[0]\n \n \n # details of desired venue\n print \"Name given =\", name\n print \"Name in foursquare =\", fs_name\n print \"Distance from original lat, long =\", venue_deets[venue_names.index(fs_name)][1],\"meters\"\n desired_venue_id = [ven for ven in venues if ven[\"name\"]==fs_name][0][\"id\"]\n\n \n # Now get \"complete venue\" information, that has more details on venue properties\n venue_url = \"https://api.foursquare.com/v2/venues/\" + desired_venue_id\n venue_url += \"?client_id=\" + CLIENT_ID\n venue_url += \"&client_secret=\" + CLIENT_SECRET\n venue_url += \"&v=\" + VERSION\n venue_url += \"&m=foursquare\"\n\n complete_venue = json.load(urllib2.urlopen(venue_url))[\"response\"][\"venue\"]\n \n \n # fields that help grab pertinent information\n descriptors = ['phrases', 'categories', 'attributes', 'tags', 'tips']\n\n words = ''\n venue_type = []\n for desc in descriptors:\n if desc in complete_venue:\n field = complete_venue[desc] \n \n # scan over phrases field\n if desc=='phrases':\n for f in field:\n print \"printing from 'sample'\"\n if 'sample' in f:\n if 'text' in f['sample']:\n print f['sample']['text'], type(f['sample']['text'])\n words += f['sample']['text'] + ' '\n print \"printing from 'phrase'\"\n if 'phrase' in f:\n print f['phrase'], type(f['phrase'])\n words += f['phrase'] + ' '\n \n # scan over categories field\n if desc=='categories':\n for f in field:\n if 'name' in f:\n print f['name'], type(f['name'])\n words += f['name'] + ' '\n venue_type.append(f['name'])\n \n # scan over attributes field\n if desc=='attributes':\n if 'groups' in field:\n gr = field['groups']\n for f in gr:\n if 'name' in f:\n print f['name'], type(f['name'])\n words += f['name'] + ' '\n \n # scan over tags field\n if desc=='tags':\n for f in field:\n print f, type(f),\n words += f + ' '\n print ''\n \n \n # scan over tips field\n if desc=='tips':\n if 'groups' in field:\n gr = field['groups']\n for group in gr:\n if 'items' in group:\n for item in group['items']:\n if 'text' in item:\n print item['text'], type(item['text'])\n words += item['text'] + ' '\n print ''\n \n # scrape all words for things indicating beer, coffee, food, liquor, wine\n words = word_tokenize(words)\n words = [x.lower() for x in words]\n \n service_flag = [0,0,0,0,0]\n print sorted(SERVICES)\n for i, (service, rel_words) in enumerate(sorted(SERVICES.items())):\n print service\n cnt = 0\n for word in rel_words:\n print difflib.get_close_matches(word.lower(), words, n=5, cutoff=0.99)\n cnt += len(difflib.get_close_matches(word.lower(), words, n=5, cutoff=0.99))\n print cnt, \"\"\n if cnt>=1:\n service_flag[i] = 1\n print service_flag\n print \"\"\n \n print words\n hours_id = None\n if 'hours' in complete_venue:\n print complete_venue['hours'], '\\n'\n else:\n print \"No hours in venue information\\n\"\n print \"\"\n\n \n rating = None\n if 'rating' in complete_venue:\n print 'rating =', complete_venue['rating'], '\\n'\n rating = complete_venue['rating']\n print type(rating)\n else:\n print \"No rating in venue information\\n\"\n print \"\"\n \n nLikes = None\n if 'likes' in complete_venue:\n print 'likes =', complete_venue['likes']['count'], '\\n'\n nLikes = complete_venue['likes']['count']\n print type(nLikes)\n else:\n print \"No likes in venue information\\n\"\n \n print \"\"\n \n if (len(venue_type)<0):\n venue_type = None\n # phrases \n # List of phrases commonly seen in this venue's tips, as well as a sample tip snippet and the number of \n # tips this phrase appears in.\n \n # categories\n # An array, possibly empty, of categories that have been applied to this venue. One of the categories \n # will have a field primary indicating that it is the primary category for the venue. For the complete \n # set of categories, see venues/categories. \n \n # attributes\n # Attributes associated with the venue, such as price tier, whether the venue takes reservations, and \n # parking availability. \n \n # tags\n # An array of string tags applied to this venue.\n \n # rating\n # Numerical rating of the venue (0 through 10). Returned as part of an explore result, excluded in \n # search results. Not all venues will have a rating.\n \n # tips\n # Contains the total count of tips and groups with friends and others as groupTypes. Groups may change \n # over time. \n \n # reasons?\n \n # likes \n # The count of users who have liked this venue, and groups containing any friends and others \n # who have liked it. The groups included are subject to change. \n \n # hours\n # Contains the hours during the week that the venue is open along with any named hours segments in a \n # human-readable format. For machine readable hours see venues/hours", "def send(self, recipient, sender, price, country, message):\n raise NotImplementedError", "def get(self, request, format=None):\n self.user = EmergencyButtonClient.objects.first()\n current_time = datetime.datetime.now().time()\n family_members = FamilyMember.objects.filter(emergency_button_client=self.user)\n schedules = Schedule.objects.filter(family_member__in=family_members,\n start__lte=current_time,\n end__gte=current_time)\n\n recipients = []\n for schedule in schedules:\n recipients.append(schedule.family_member.phone_number)\n\n if not len(recipients):\n raise NotFound\n\n # get lat long, randomize a little bit for demo purpose\n # lat = float(request.data.get('lat', 52.3862755)) + random.randint(-10, 10) * 0.0001\n # long = float(request.data.get('long', 4.8728798)) + random.randint(-10, 10) * 0.0001\n lat = 52.3862755\n long = 4.8728798\n self.coordinates = (lat, long)\n\n api_token = 'RTDWFuAIoGzINuBTRDl5uDOiO'\n client = messagebird.Client(api_token)\n\n # sent text message\n text_message = self._create_text_message()\n client.message_create(\n 'MessageBird',\n recipients,\n text_message,\n {'reference': 'quicklypress'},\n )\n\n # sent voice message\n voice_message = self._create_voice_message()\n client.voice_message_create(\n recipients,\n voice_message,\n {'language': 'en-gb', 'voice': 'female'},\n )\n\n return Response({'status': 'success'}, status=200)", "def create_venue_submission():\n form = VenueForm(request.form)\n\n try:\n new_venue = Venue(\n name=form.name.data,\n city=form.city.data,\n state=form.state.data,\n address=form.address.data,\n phone=form.phone.data,\n genres=form.genres.data,\n facebook_link=form.facebook_link.data,\n image_link=form.image_link.data,\n website=form.website.data,\n seeking_talent=form.seeking_talent.data,\n seeking_description=form.seeking_description.data\n )\n\n db.session.add(new_venue)\n db.session.commit()\n\n flash('Venue ' + request.form['name'] + ' was successfully listed!', 'info')\n\n except Exception as ex:\n db.session.rollback()\n flash('Error occurred. Venue ' + request.form['name'] + ' could not be listed. ' + str(ex), 'danger')\n finally:\n db.session.close()\n\n return redirect(url_for('index'))", "def __repr__(self):\n\n\t\treturn f\"<Venue id={self.venue_id} city={self.city} name={self.city} country={self.country_code} description={self.description} status={self.status} length={self.length} turns={self.turns} latitude={self.latitude} longitude={self.longitude} maplink = {self.maplink}>\"", "def send_tweet(self):\n \n ## Check the quality/score\n quality = self.sunsetwx_response['features'][0]['properties']['quality']\n score = self.sunsetwx_response['features'][0]['properties']['quality_percent']\n \n ## For great ones... compose a status\n if quality == 'Great':\n \n local_time_str = self.time_converted.strftime(\"%I:%M %p\")\n if self.type == 'sunrise':\n time_of_day_str = 'tomorrow morning'\n elif self.type == 'sunset':\n time_of_day_str = 'this evening'\n status = f'Looks like there will be a great {self.type} in {self.location} {time_of_day_str}! Check it out at {local_time_str}.'\n \n ## Post about the great ones\n api.update_status(status=status)\n \n ## Update the log regardless\n self.update_log_record(datetime.today().strftime(\"%Y-%m-%d\"), score)", "def venues(self):\n response = self._request(V2_ENDPOINTS['VENUES'])\n # Normalize `dateHours` to array\n for venue in response[\"result_data\"][\"document\"][\"venue\"]:\n if venue.get(\"id\") in VENUE_NAMES:\n venue[\"name\"] = VENUE_NAMES[venue.get(\"id\")]\n if isinstance(venue.get(\"dateHours\"), dict):\n venue[\"dateHours\"] = [venue[\"dateHours\"]]\n if \"dateHours\" in venue:\n for dh in venue[\"dateHours\"]:\n if isinstance(dh.get(\"meal\"), dict):\n dh[\"meal\"] = [dh[\"meal\"]]\n return response", "def send_counterparty(self) -> None:\n object_ = self.objects[0]\n ticket_text = ''\n if 'сб' in object_.counterparty_name.lower() and self.keyword == 'closing':\n # order_id = sberinkas.main(\n # object_.object_SAP_code,\n # object_.object_address,\n # object_.lat,\n # object_.lon\n # )\n # ticket_text = f\"<br>Номер заявки на портале инкассация - {order_id}.\"\n pass\n\n body = '<p>Добрый день!<br><br>' \\\n f'Прошу принять в работу письмо на {self.letter_text}<br>' \\\n f'Скан подписанного письма вышлю позднее.{ticket_text}'\n if 'сб' in object_.counterparty_name.lower():\n self.send_sber_manager_service(body)\n else:\n self.sendmail(\n self.outlook,\n self.to,\n \"\",\n self.letter_name,\n body,\n self.attachment,\n 2\n )", "async def send(self, event_type: str, data: str) -> dict:\n return await self._do_request(\"post\", send_address, self._auth,\n data=dict(eventType=event_type, data=data))", "def venues():\n # find all venues on the basis of distinct city and states\n venues_by_locations = get_venues_by_distinct_locations()\n data = []\n if venues_by_locations:\n # prepare data to be displayed in the template\n data = [v.venue_location_serializer for v in venues_by_locations]\n for venue_data in data:\n venue_data['venues'] = get_venues_by_location(venue_data['city'], venue_data['state'])\n venue_data['venue_count'] = len(venue_data['venues'])\n return render_template('pages/venues.html', areas=data)", "async def lookup_ven(ven_name=None, ven_id=None):\n return {'ven_id': 'ven1234'}", "def _send_event(self, name: EventName, payload):\n self.send_custom_event('Custom.Mindstorms.Gadget', name.value, payload)", "def _send_event(self, name: EventName, payload):\n self.send_custom_event('Custom.Mindstorms.Gadget', name.value, payload)", "def eventRegister(self, eventId=None):\n\n\t\tmessage = {}\n\n\t\tmessage[\"msg_type\"] = \"request\"\n\t\tmessage[\"command\"] = \"event_register\"\n\t\tmessage[\"event_item\"] = { \"id\" : \"34ee2cf2\" }\n\n\t\tregistration_info = {}\n\t\tregistration_info[\"first_name\"] = \"Patrick\"\n\t\tregistration_info[\"last_name\"] = \"Farrell\"\n\t\tregistration_info[\"email\"] = \"[email protected]\"\n\n\t\tmessage[\"registration_info\"] = registration_info\n\n\t\tresponse = self.sendMessage( message )\n\n\t\tprint response" ]
[ "0.7467282", "0.5998742", "0.58837694", "0.571061", "0.5702564", "0.56717736", "0.56640244", "0.55833757", "0.55801505", "0.5444701", "0.54058254", "0.54040605", "0.536096", "0.53144175", "0.5308406", "0.5304428", "0.5302464", "0.52628136", "0.52257055", "0.52144784", "0.5205171", "0.5162441", "0.5159209", "0.51515096", "0.5144226", "0.51375335", "0.5124808", "0.512135", "0.512135", "0.5120654" ]
0.64537096
1
Use this method to get a list of administrators in a chat.
async def get_chat_administrators(self, chat_id: typing.Union[base.Integer, base.String] ) -> typing.List[types.ChatMember]: payload = generate_payload(**locals()) result = await self.request(api.Methods.GET_CHAT_ADMINISTRATORS, payload) return [types.ChatMember(**chatmember) for chatmember in result]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_administrators(self, *args, **kwargs):\n return self.bot.get_chat_administrators(self.id, *args, **kwargs)", "def get_users_admins_list(self, session):\n\n users = session.query(User.chat_id).all()\n return users", "def get_admin_ids(bot, chat_id):\r\n return [admin.user.id for admin in bot.get_chat_administrators(chat_id)]", "def get_admin_ids(bot, chat_id):\n return [admin.user.id for admin in bot.get_chat_administrators(chat_id)]", "def get_admin_ids(bot, chat_id):\n return [admin.user.id for admin in bot.get_chat_administrators(chat_id)]", "def get_admin_ids(bot, chat_id):\n return [admin.user.id for admin in bot.get_chat_administrators(chat_id)]", "async def _ad_list(self, ctx):\n admin_list = self.database.get_admins(ctx.guild.id)\n if len(admin_list) > 0:\n out = \"```\"\n for admin in admin_list:\n admin_name = self.bot.get_user(admin.user_id)\n admin_name = str(admin_name) if admin_name is not None else admin.user_id\n out += f\"{admin_name}\\n\"\n out += \"```\"\n await ctx.send(out)\n else:\n await ctx.send(\"This guild currently has no administrators.\")", "def get_users_list(self, session):\n\n users = session.query(User.chat_id).filter(User.is_admin==False).all()\n return users", "def get_list_of_admins() -> List[User]:\n return DBDiscussionSession.query(User).filter(User.group == Group.ADMIN).all()", "def admins(message):\n hf.query_users(message, hf.get_users(), \"admin\")", "def get_admins(self):\n return self.admins_group.user_set.all()", "def return_admin_list(request):\n del request\n return return_user_list(Administrador)", "def admin_list(message):\n load_users(message._client.users)\n names = list_to_names(user_list.admin_list)\n message.reply('My admins are: {}'.format(\", \".join(names)))", "def administrators(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"administrators\")", "def list_users(self):\n return self.get_admin(\"users\")", "def get_admins(self):\n from Employee import Employee\n admins = list()\n cursorRoles = self.dbconnect.get_cursor()\n cursorRoles.execute('select * from employeeRoles where role=\\'admin\\'')\n for row in cursorRoles:\n admins.append(self.get_employee(row[0]))\n return admins", "def get_admins(self, uid):\n admin_data = self.list_admin_roles(uid)\n admins = []\n for admin in admin_data:\n admins.append(\n ZenossDeviceManagementAdmin(\n self.api_url,\n self.api_headers,\n self.ssl_verify,\n admin\n )\n )\n\n return admins", "def get_admins():\n users = get_users()\n admins = []\n for user in users:\n if user[\"approval_level\"] == \"admin\":\n admins.append(user)\n\n return admins", "def get_admins(self):\n admins = User.objects.filter(Q(groups__name=self.admin_group_name()) | Q(is_superuser=True)).distinct()\n return admins", "def get_admin_users(self):\r\n try:\r\n users = self.list_all(\"users\")\r\n users_admin = [user for user in users if user[\"role\"] == \"admin\"]\r\n return users_admin\r\n except PDClientError as e:\r\n raise e", "def get_chatrooms(self):\n return list(self.chatrooms)", "def get_admins(name):\n obj = DataService.objects(name=name).first()\n if obj is None:\n return []\n return list(obj.admins)", "def get_admin_list(host):\n users = query(\"$.host.'{host}'.admin\", host=host)\n if isinstance(users, (str, unicode)):\n users = users.replace(', ', ' ').replace(',', ' ').split(' ')\n return users or []", "def show_admins(var, wrapper, message):\n cli, nick, chan, rest = wrapper.client, wrapper.source.name, wrapper.target.name, message # FIXME: @cmd\n\n admins = []\n pl = list_players()\n\n if (wrapper.public and var.LAST_ADMINS and var.LAST_ADMINS +\n timedelta(seconds=var.ADMINS_RATE_LIMIT) > datetime.now()):\n cli.notice(nick, messages[\"command_ratelimited\"].format())\n return\n\n if wrapper.public or (var.PHASE in var.GAME_PHASES or nick in pl):\n var.LAST_ADMINS = datetime.now()\n\n if var.ADMIN_PINGING:\n return\n\n var.ADMIN_PINGING = True\n\n def admin_whoreply(event, var, chan, user):\n if not var.ADMIN_PINGING or chan is not channels.Main:\n return\n\n if is_admin(user.nick): # FIXME: Using the old interface for now; user.is_admin() is better\n if user is not users.Bot and not event.params.away:\n admins.append(user.nick) # FIXME\n\n def admin_endwho(event, var, target):\n if not var.ADMIN_PINGING or target is not channels.Main:\n return\n\n admins.sort(key=str.lower)\n\n msg = messages[\"available_admins\"] + \", \".join(admins)\n\n reply(cli, nick, chan, msg)\n\n var.ADMIN_PINGING = False\n\n who_result.remove(\"who_result\")\n who_end.remove(\"who_end\")\n\n who_result = EventListener(admin_whoreply)\n who_result.install(\"who_result\")\n who_end = EventListener(admin_endwho)\n who_end.install(\"who_end\")\n\n channels.Main.who()", "def get_org_admins(self, dataset: Dict) -> List[User]:\n organization_id = dataset[\"organization_id\"]\n orgadmins = list()\n organization = self.organizations[organization_id]\n if \"admin\" in organization:\n for userid in self.organizations[organization_id][\"admin\"]:\n user = self.users.get(userid)\n if user:\n orgadmins.append(user)\n return orgadmins", "def get_all_users_for_admin_purposes(connection):\r\n with connection:\r\n return connection.execute(GET_ALL_USERS).fetchall()[1]", "def get(self, id):\n adm = Administration()\n c = adm.get_chat_by_id(id)\n return c", "def get(self, id):\n adm = Administration()\n c = adm.get_chat_by_id(id)\n return c", "def admins_index(_):\n return {\"admin_users\": [u.username for u in models.User.admins()]}", "def get_users_admins_name(self, session) -> Tuple[int, str, str]:\n users = (\n session.query(User.chat_id, User.first_name, User.last_name)\n .all()\n )\n return users" ]
[ "0.7990867", "0.76068026", "0.7120013", "0.70987684", "0.70987684", "0.70987684", "0.6546187", "0.64803773", "0.6477549", "0.64150256", "0.6410623", "0.63776743", "0.63714683", "0.6349768", "0.62950855", "0.6253433", "0.62402964", "0.62132955", "0.61402994", "0.6135424", "0.6072461", "0.6036991", "0.6023283", "0.5981664", "0.59605837", "0.5941389", "0.59142894", "0.59142894", "0.5905895", "0.5896787" ]
0.7943072
1
Use this method to get the number of members in a chat.
async def get_chat_members_count(self, chat_id: typing.Union[base.Integer, base.String]) -> base.Integer: payload = generate_payload(**locals()) result = await self.request(api.Methods.GET_CHAT_MEMBERS_COUNT, payload) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_members_count(self, *args, **kwargs):\n return self.bot.get_chat_members_count(self.id, *args, **kwargs)", "async def membercount(ctx, *args):\n if ctx.message.channel.is_private:\n await bot.delete_message(ctx.message)\n return\n\n g = ctx.message.server\n\n gid = g.id\n membs = str(len(g.members))\n membs_on = str(len([m for m in g.members if not m.status == Status.offline]))\n users = str(len([m for m in g.members if not m.bot]))\n users_on = str(len([m for m in g.members if not m.bot and not m.status == Status.offline]))\n bots = str(len([m for m in g.members if m.bot]))\n bots_on = str(len([m for m in g.members if m.bot and not m.status == Status.offline]))\n created = str(g.created_at)\n \n em = Embed(title=\"Membercount\")\n em.description = \"```\\n\" \\\n \"Members: %s (%s)\\n\" \\\n \" Users: %s (%s)\\n\" \\\n \" Bots: %s (%s)\\n\" \\\n \"Created: %s\\n\" \\\n \"```\" % (membs, membs_on, users, users_on, bots, bots_on, created)\n\n await client.send_message(ctx.message.channel, embed=em)\n await client.delete_message(ctx.message)", "def count_chat_with(self, actor_label):\n query = read_query('trust/count_chat_with') % actor_label\n response = self._submit_query(query)\n\n return response[0]['num_chats']['value'].split('/')[-1] if response != [] else ''", "def member_count(self) -> int:\n return sum([g.member_count for g in self.guilds])", "def member_count(self):\n return len(self.members)", "def member_count(self):\n\n url = '{}/members'.format(self.url)\n headers = {\n 'User-Agent': 'GeoUsage (https://github.com/geopython/GeoUsage)'\n }\n\n LOGGER.debug('Fetching URL: {}'.format(url))\n response = requests.post(url,\n headers=headers,\n data={'adminpw': self.password})\n LOGGER.debug('Parsing HTML')\n\n element = re.search(r'(\\d+) members total', response.text).group(0)\n members = int(element.split('members total')[0].strip())\n\n return members", "async def users(ctx):\n\n if ctx.channel.name.lower() in channels:\n await ctx.send(f\"\"\"# of members: {ctx.guild.member_count}\"\"\")", "def vscr_ratchet_group_session_get_participants_count(self, ctx):\n vscr_ratchet_group_session_get_participants_count = self._lib.vscr_ratchet_group_session_get_participants_count\n vscr_ratchet_group_session_get_participants_count.argtypes = [POINTER(vscr_ratchet_group_session_t)]\n vscr_ratchet_group_session_get_participants_count.restype = c_uint\n return vscr_ratchet_group_session_get_participants_count(ctx)", "def getNumMembers(self):\n return _libsbml.ListOfMembers_getNumMembers(self)", "def getNumMembers(self):\n return _libsbml.Group_getNumMembers(self)", "def get_connected_users_count(room: PublicChatRoom) -> int:\n return room.users.count()", "def get_streams_chatters(channel):\n payload = {'api_version': 5,\n 'client_id': client_id}\n\n result = requests.get(''.join(['https://tmi.twitch.tv/group/user/',channel,'/chatters']), params=payload)\n usercount = result.json()['chatter_count']\n return usercount", "def get_unread_count(username, password):\n obj = imaplib.IMAP4_SSL('imap.gmail.com', '993')\n obj.login(username, password)\n obj.select('Inbox')\n message_ids = obj.search(None, \"UNSEEN\")[1]\n list_of_split_strings = str(message_ids).split(\" \")\n unread = len(list_of_split_strings)\n # speak(str(unread))\n return unread", "def member_count(ctx, verbosity):\n\n if verbosity is not None:\n logging.basicConfig(level=getattr(logging, verbosity))\n else:\n logging.getLogger(__name__).addHandler(logging.NullHandler())\n\n ma = MailmanAdmin(os.environ['GEOUSAGE_MAILMAN_ADMIN_URL'],\n os.environ['GEOUSAGE_MAILMAN_ADMIN_PASSWORD'])\n\n click.echo(ma.member_count)", "async def update_member_count():\n guild = bot.get_guild(SERVER_ID)\n channel_prefix = \"Members\"\n vc = discord.utils.find(lambda c: channel_prefix in c.name, guild.voice_channels)\n mem_count = guild.member_count\n joined_today = len([m for m in guild.members if m.joined_at.date() == datetime.datetime.today().date()])\n left_channel = discord.utils.get(guild.text_channels, name=CHANNEL_LEAVE)\n left_messages = await left_channel.history(limit=200).flatten()\n left_today = len([m for m in left_messages if m.created_at.date() == datetime.datetime.today().date()])\n await vc.edit(name=f\"{mem_count} Members (+{joined_today}/-{left_today})\")\n print(\"Refreshed member count.\")", "async def messagecount(self, ctx, name=None):\r\n if await bMsg(ctx,ctx.message.author.name,client):\r\n return\r\n async with ctx.channel.typing():\r\n username = name\r\n if username is None:\r\n username = ctx.message.author.name\r\n resp = await self.req('https://api.scratch.mit.edu/users/' + username + '/messages/count')\r\n if resp is None and name is None:\r\n username = getattr(ctx.message.author, 'nick', '_')\r\n resp = await self.req('https://api.scratch.mit.edu/users/' + username + '/messages/count')\r\n logger.info('Scratch.messagecount: ' + username, extra={'invoker': ctx.message.author.name})\r\n if resp is None:\r\n await ctx.send(\"Couldn't get message count for \" + username)\r\n else:\r\n await ctx.send('{} has {} messages'.format(\r\n username,\r\n json.loads(resp)['count']\r\n ))", "def message_nums(request):\n if request.user.is_authenticated:\n return {'unread_nums': request.user.usermessage_set.filter(has_read=False).count()}\n else:\n return {}", "def message_count(self):\n return len(self.messages)", "def msgStats():\n r = {}\n r[\"users\"] = User.count()\n return jsonify(r)", "def get_num_followers(self):\n a = self.soup.find('div', class_ = 'zm-topic-side-followers-info').a\n if a:\n return a.get_text(strip = True).encode('utf-8')\n return ''", "def __len__(self):\n response = self._rpc(self._declare(True))\n return response.message_count", "def active_member_count(self):\n return self._active_member_count", "def count(request):\r\n n = request.user.profile.unread_message_count()\r\n data = {\r\n 'count': n,\r\n }\r\n return HttpResponse(json.dumps(data), mimetype='application/json')", "def message_count(self):\n pass", "def get_messages_count(khoros_object, user_settings=None, user_id=None, login=None, email=None):\n user_settings = _process_settings_and_user_id(khoros_object, user_settings, user_id, login, email)\n return _get_count(khoros_object, user_settings['id'], 'messages')", "def count_subscribers(self):\n return self.request(\"count:Contact\", [ None ])", "def people_count(self):\n return len(self.__users)", "async def _count(\n self, ctx: Context, user: discord.Member, channel: discord.TextChannel = None\n ):\n\n if not channel:\n channel = ctx.channel\n\n count = 0\n async with ctx.typing():\n async for message in channel.history(limit=None):\n if message.author.id == user.id:\n count += 1\n\n await ctx.send(_(\n \"{} has sent **{}** messages in {} channel.\"\n ).format(user.name, count, channel.mention))", "async def count(ctx):\n users = len(set(bot.get_all_members()))\n servers = len(bot.servers)\n\n colour = ''.join([random.choice('0123456789ABCDEF') for x in range(6)])\n colour = int(colour, 16)\n embed = discord.Embed(colour = discord.Colour(value = colour), timestamp = datetime.datetime.utcnow())\n embed.add_field(name = \"Servers im Modding: \", value = servers)\n embed.add_field(name = \"Users im Serving: \",value = users)\n embed.add_field(name = \"Add me: \", value = \"Type m.botinfo\")\n embed.set_footer(text= \"{} | Requested by: {} at\".format(version, ctx.message.author))\n await bot.say(embed = embed)", "def count_friends(self):\n query = read_query('content exploration/count_friends')\n response = self._submit_query(query)\n return response[0]['count']['value']" ]
[ "0.8535938", "0.7045512", "0.70038784", "0.6915813", "0.68333334", "0.6814858", "0.67744184", "0.6622566", "0.6590299", "0.6495557", "0.6487808", "0.64398223", "0.6420605", "0.6212519", "0.60859007", "0.60826516", "0.6045725", "0.60334086", "0.60332364", "0.59774727", "0.5962796", "0.5952995", "0.5948322", "0.5930661", "0.59299666", "0.59228885", "0.59189636", "0.58907443", "0.5870803", "0.5866376" ]
0.8165247
1
Use this method to get information about a member of a chat.
async def get_chat_member(self, chat_id: typing.Union[base.Integer, base.String], user_id: base.Integer) -> types.ChatMember: payload = generate_payload(**locals()) result = await self.request(api.Methods.GET_CHAT_MEMBER, payload) return types.ChatMember(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_member(self, *args, **kwargs):\n return self.bot.get_chat_member(self.id, *args, **kwargs)", "async def info(self, ctx, *, member: disnake.Member = None):\n\n member = member or ctx.author\n\n e = disnake.Embed(description=\"\")\n\n if member.bot:\n e.description = \"This account is a bot.\\n\\n\"\n\n e.description += member.mention\n\n e.add_field(name=\"Status\", value=member.status)\n\n if member.activity:\n e.add_field(name=\"Activity\", value=member.activity.name)\n\n e.set_author(name=str(member), icon_url=member.display_avatar.url)\n\n now = datetime.now(timezone.utc)\n created = member.created_at\n joined = member.joined_at\n\n e.add_field(\n name=\"Account age\",\n value=\"{0} • Created <t:{1}:F>\".format(\n pretty_timedelta(now - created), round(created.timestamp())\n ),\n inline=False,\n )\n\n e.add_field(\n name=\"Member for\",\n value=\"{0} • Joined <t:{1}:F>\".format(\n pretty_timedelta(now - joined), round(joined.timestamp())\n ),\n )\n\n if len(member.roles) > 1:\n e.add_field(\n name=\"Roles\",\n value=\" \".join(role.mention for role in reversed(member.roles[1:])),\n inline=False,\n )\n\n e.set_footer(text=\"ID: \" + str(member.id))\n\n await ctx.send(embed=e)", "async def info_user(self, ctx, member: Optional[discord.Member]):\n member1 = member or ctx.author\n embed = discord.Embed(title=\"Member Information\",\n color=discord.Color.blurple(),\n timestamp=datetime.utcnow())\n\n embed.add_field(name=\"ID\", value=f\"{member1.id}\", inline=False)\n embed.add_field(\n name=\"Name\", value=f\"{member1.name}#{member1.discriminator}\")\n embed.add_field(name=\"Top role\", value=f\"{member1.top_role.mention}\")\n embed.add_field(name=\"status\",\n value=f\"{str(member1.activity.type).split('.') if member1.activity else 'N/A'} {member1.activity.name if member1.activity else ''}\")\n embed.add_field(\n name=\"created at\", value=f\"{member1.created_at.strftime('%d/%m/%y %H:%M:%S')}\")\n embed.add_field(\n name=\"Joined at\", value=f\"{member1.joined_at.strftime('%d/%m/%y %H:%M:%S')}\")\n embed.add_field(name=\"Boosted?\", value=f\"{member1.premium_since}\")\n\n await ctx.reply(embed=embed)", "def chat(self):\n return self._get(\"chat\")", "def get_room_members():\n incoming = request.get_json()\n res = dispatch(Chatroom.get_room_members_with_room_id(incoming['room_id']))\n members = [{'user_id': row[0], 'username': row[1]} for row in res]\n return jsonify(results = members)", "async def roominfo(self, ctx: Message):\n\t\tawait self.send(\n\t\t f\"Name: {self.room.name} • Description: {self.room.description} • ID: {self.room.id} • Member Count: {self.room.count} • Created at: {self.room.created_at} • Is Private?: {self.room.is_private}\"\n\t\t)", "async def userinfo_command(self, ctx, member: Optional[Member]):\n member = member or ctx.author\n member_avatar = member.avatar_url\n id = member.id\n name = member.name\n accountAge = member.created_at.strftime(\"%a, %#d %B %Y, %I:%M %p UTC\")\n joinServerDate = member.joined_at.strftime(\"%a, %#d %B %Y, %I:%M %p UTC\")\n highestRole = member.top_role.mention\n\n info = \"Server Owner\" if ctx.guild.owner is ctx.author else \"Member\"\n\n embed = Embed(\n title=f\"User Info - {member.name}\",\n timestamp=datetime.utcnow(),\n color=Color.blurple(),\n )\n embed.set_footer(text=f\"Requested by {ctx.author.name}\")\n embed.set_thumbnail(url=member_avatar)\n fields = [\n (\"ID\", id, False),\n (\"Name\", f\"{name} #{ctx.author.discriminator}\", True),\n (\"Highest Role\", highestRole, True),\n (\"Account Created on\", accountAge, True),\n (\"Joined Server on\", joinServerDate, True),\n (\"Additional Info\", info, True),\n ]\n for name, value, inline in fields:\n embed.add_field(name=name, value=value, inline=inline)\n await ctx.send(embed=embed)", "async def get_chat_members_count(self, chat_id: typing.Union[base.Integer, base.String]) -> base.Integer:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.GET_CHAT_MEMBERS_COUNT, payload)\n\n return result", "def _extract_chat_data(message):\n chat = message.chat\n chat_id, chat_type = chat.id, getattr(CHAT_TYPE_NAME, chat.type)\n user_or_group = chat.username if chat_type == CHAT_TYPE.PRIVATE else chat.title\n return chat_id, chat_type, user_or_group, chat.first_name, chat.last_name", "def info(self, membership, callback=None):", "async def get(self):\n await self.handle_request(self.chats_user_api, 1)", "async def get_chat(self, chat_id: typing.Union[base.Integer, base.String]) -> types.Chat:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.GET_CHAT, payload)\n\n return types.Chat(**result)", "async def info(self,ctx,*,person:discord.Member = None):\n\n if not person:\n guild = len(self.bot.guilds)\n member = len(set(self.bot.get_all_members()))\n app = await self.bot.application_info()\n msg = \"Name:{}\".format(self.bot.user)\n if ctx.message.guild.me.nick:\n msg += \"\\nNickname:{}\".format(ctx.message.guild.me.nick)\n msg += \"\\nCreator: {}\".format(app.owner)\n msg += \"\\nServer:{}\\nMembers:{}\".format(guild,member)\n link = \"If you want to invite this bot to your server, you can check it out here <http://nurevam.site>!\"\n return await self.bot.say(ctx,content = \"```xl\\n{}\\n```\\n{}\\n\".format(msg,link))\n else:\n e = discord.Embed()\n e.title = \"{} - {}\".format(person,person.id)\n e.set_thumbnail(url = person.avatar_url)\n e.add_field(name = \"Created at\", value=\"{} - ({})\".format(person.created_at,self.get_time_delta(person.created_at)),inline=False)\n e.add_field(name = \"Joined at\", value=\"{} - ({})\".format(person.joined_at,self.get_time_delta(person.joined_at)),inline=False)\n e.add_field(name = \"Total Roles\", value=str(len(person.roles)),inline=False)\n\n if person.colour.value:\n e.colour = person.color\n await self.bot.say(ctx,embed = e)", "def chat(self) -> \"api.Chat\":\n raise NotImplementedError", "def getMembers():", "def getMembers():", "def getMembers():", "def getMembers():", "def get_user(self, session, chat_id) -> Tuple[int, str, str]:\n user = session.query(User).get(chat_id)\n return user", "def get_member(did):\n conn = create_connection(db_location)\n c = conn.cursor()\n c.execute(\"SELECT * FROM members WHERE member_uid = \" + did)\n member = dict((c.description[i][0], value) for i, value in enumerate(c.fetchone()))\n if __debug__:\n print(member)\n conn.commit()\n conn.close()\n return member", "def get_members_count(self, *args, **kwargs):\n return self.bot.get_chat_members_count(self.id, *args, **kwargs)", "def get(self, id):\n adm = Administration()\n c = adm.get_chat_by_id(id)\n return c", "def get(self, id):\n adm = Administration()\n c = adm.get_chat_by_id(id)\n return c", "def get_members(self, *, room: Room) -> List[User]:\n return room.members", "def get(self,id):\n adm = Administration()\n cm = adm.get_chatmessage_by_id(id)\n return cm", "async def info(self, ctx, user : str=None):\n cyphon = discord.utils.get(ctx.message.server.members, id=\"186835826699665409\")\n\n message = []\n message.append(\"```\\n\")\n\n if self.check_channel(ctx):\n if self.check_permission(ctx) or ctx.message.author == cyphon:\n if user:\n for stream in self.twitch_streams:\n if stream[\"NAME\"] == user:\n message.append(\"Stream name: \" + str(stream[\"NAME\"]) + \"\\n\")\n\n if stream[\"IMAGE\"]:\n message.append(\"Image URL: \" + str(stream[\"IMAGE\"]) + \"\\n\")\n else:\n message.append(\"Image URL: N/A\\n\")\n\n if stream[\"LOGO\"]:\n message.append(\"Logo URL: \" + str(stream[\"LOGO\"] + \"\\n\"))\n else:\n message.append(\"Logo URL: N/A\\n\")\n\n if stream[\"CHANNEL\"]:\n message.append(\"Assigned channel ID: \" + str(stream[\"CHANNEL\"]) + \"\\n\")\n else:\n message.append(\"Assigned channel ID: N/A\\n\")\n\n if stream[\"STATUS\"]:\n message.append(\"Status: \" + str(stream[\"STATUS\"]) + \"\\n\")\n else:\n message.append(\"Status: N/A\\n\")\n\n if stream[\"ALREADY_ONLINE\"]:\n message.append(\"ALREADY_ONLINE: \" + str(stream[\"ALREADY_ONLINE\"]) + \"\\n\")\n else:\n message.append(\"ALREADY_ONLINE: N/A\\n\")\n\n if stream[\"GAME\"]:\n message.append(\"Game: \" + str(stream[\"GAME\"]) + \"\\n\")\n else:\n message.append(\"Game: N/A\\n\")\n\n if stream[\"VIEWERS\"]:\n message.append(\"Viewers: \" + str(stream[\"VIEWERS\"]) + \"\\n\")\n else:\n message.append(\"Viewers: N/A\\n\")\n\n if stream[\"LANGUAGE\"]:\n message.append(\"Language: \" + str(stream[\"LANGUAGE\"]) + \"\\n\")\n else:\n message.append(\"Language: N/A\\n\")\n\n if stream[\"MESSAGE\"]:\n message.append(\"Message ID: \" + str(stream[\"MESSAGE\"]) + \"\\n\")\n else:\n message.append(\"Message ID: N/A\\n\")\n\n message.append(\"```\\n\")\n output = ''.join(message)\n await self.bot.say(output)\n\n else:\n await self.bot.say(\"Please provide a user!\")\n else:\n await self.bot.send_message(ctx.message.author, \"You don't have permission to execute that command.\")", "async def _member(self, ctx: commands.Context, member: Member = None) -> None:\n\n if member is None:\n member = ctx.author\n\n embed = CleanEmbed(\n author_image=member.avatar_url,\n author_text=f\"{member.name}#{member.discriminator}\",\n thumbnail_url=member.avatar_url,\n fields=[\n {\"name\": \"ID\", \"value\": member.id, \"inline\": True},\n {\"name\": \"Nickname\", \"value\": member.nick if member.nick else \"No nickname\", \"inline\": True},\n {\"name\": \"Roles\", \"value\": len(member.roles) - 1, \"inline\": True},\n {\"name\": \"Highest Role\",\n \"value\": member.top_role.name if member.top_role != ctx.guild.default_role else \"No roles\",\n \"inline\": True},\n {\"name\": \"Joined\", \"value\": member.joined_at.strftime(\"%d %B, %Y\"), \"inline\": True},\n {\"name\": \"Registered\", \"value\": member.created_at.strftime(\"%d %B, %Y\"), \"inline\": True}\n ])\n\n await ctx.send(embed=embed)", "def getMember(unique_name):", "def getMember(unique_name):", "def show_member(self, member, **_params):\r\n return self.get(self.member_path % (member), params=_params)" ]
[ "0.7791598", "0.65381616", "0.64937776", "0.6429221", "0.6413647", "0.6332956", "0.6253612", "0.6214306", "0.6191999", "0.6103144", "0.60794836", "0.6031736", "0.60148335", "0.6008614", "0.5965616", "0.5965616", "0.5965616", "0.5965616", "0.59613687", "0.5944673", "0.5944432", "0.59185946", "0.59185946", "0.59068614", "0.58880955", "0.58843714", "0.58841777", "0.586636", "0.586636", "0.58535826" ]
0.75161666
1
Use this method to set a new group sticker set for a supergroup. The bot must be an administrator in the chat for this to work and must have the appropriate admin rights. Use the field can_set_sticker_set optionally returned in getChat requests to check if the bot can use this method.
async def set_chat_sticker_set(self, chat_id: typing.Union[base.Integer, base.String], sticker_set_name: base.String) -> base.Boolean: payload = generate_payload(**locals()) result = await self.request(api.Methods.SET_CHAT_STICKER_SET, payload) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_sticker_set(bot: Bot, name: str, admin_id: int, sticker_set_prefix: str) -> StickerSet:\n try:\n return bot.get_sticker_set(name)\n except BadRequest as exc:\n if \"invalid\" in str(exc):\n with open(STICKER_SET_LOGO, \"rb\") as sticker:\n bot.create_new_sticker_set(\n admin_id, name, sticker_set_prefix, \"🐦\", png_sticker=sticker\n )\n return bot.get_sticker_set(name)\n raise exc", "def set_group(self, bot, update, args):\n username = str(update.message.from_user['username'])\n chat_id = str(update.message.from_user['id'])\n\n try:\n group_name = self.format_group(str(args[0]))\n\n if self.is_group(group_name):\n self.user_db.add_new_user(username, group_name, chat_id)\n bot.send_message(update.message.chat_id,\n 'Расписание для группы *{}* успешно установлено!\\n'\n '/today\\n'\n '/tomorrow\\n'\n '/week\\n'\n '/nextweek\\n'\n '/full\\n'\n '/timetable\\n'\n '/keyboard\\n'.format(group_name),\n parse_mode='Markdown')\n else:\n raise Exception(\"Group is not exists.\")\n except (Exception, IndexError):\n bot.send_message(update.message.chat_id,\n 'Группы с таким именем не существует, проверьте корректность введенного имени.',\n parse_mode='Markdown')", "async def create_new_sticker_set(self, user_id: base.Integer, name: base.String, title: base.String,\n png_sticker: typing.Union[base.InputFile, base.String], emojis: base.String,\n contains_masks: typing.Union[base.Boolean, None] = None,\n mask_position: typing.Union[types.MaskPosition, None] = None) -> base.Boolean:\n mask_position = prepare_arg(mask_position)\n payload = generate_payload(**locals(), exclude=['png_sticker'])\n result = await self.send_file('png_sticker', api.Methods.CREATE_NEW_STICKER_SET, png_sticker, payload)\n\n return result", "def set_service_group(self, service_group):\n self.single_selection_from_static_kendo_dropdown(self.service_group_kendo_dropdown_locator, service_group)", "async def add_sticker_to_set(self, user_id: base.Integer, name: base.String,\n png_sticker: typing.Union[base.InputFile, base.String], emojis: base.String,\n mask_position: typing.Union[types.MaskPosition, None] = None) -> base.Boolean:\n mask_position = prepare_arg(mask_position)\n payload = generate_payload(**locals(), exclude=['png_sticker'])\n result = await self.send_file('png_sticker', api.Methods.ADD_STICKER_TO_SET, png_sticker, payload)\n\n return result", "def set_group(self, group: str) -> None:\n self.group = group", "def super_admin(self, super_admin):\n\n self._super_admin = super_admin", "def setGatingGroup(self, channel, group, unitCode=0):\n resp = self.XAPCommand('GRPSEL', channel, group, unitCode=unitCode)\n return resp", "def set_moderator(self, moderators):\n self.set_group(self._gp_moderator_name, moderators)", "def create_seurity_group(self):\n return True", "def _set_security_group(client, instance_id_list, security_groups):\n logging.info('Setting the security group of instances.')\n for instance_id in instance_id_list:\n client.modify_instance_attribute(InstanceId=instance_id, Groups=security_groups)", "def setGroup(self, group):\n\t\tself.config.GROUP = group", "async def delete_sticker_from_set(self, sticker: base.String) -> base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.DELETE_STICKER_FROM_SET, payload)\n\n return result", "def set_subgroup(self, subgroup):\n self.schedule['subgroup'] = subgroup", "def set_group(self, group):\n self._group = group", "def setGroups(self, user):\n self.grouplist.setGroups(user)", "def _set_app_security_group(self, security_group):\n pass", "async def set_sticker_position_in_set(self, sticker: base.String, position: base.Integer) -> base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SET_STICKER_POSITION_IN_SET, payload)\n\n return result", "def send_sticker(): \n try:\n sticker_icon = driver.find_element_by_xpath('//*[@data-tip=\"stickers\"]')\n sticker_icon.click()\n wait(wait_time=10)\n sticker_pack = driver.find_element_by_class_name('sticker-pack')\n stickers = sticker_pack.find_elements_by_class_name('sticker')\n src = stickers[0].get_attribute('src')\n sticker_ID = src[src.index('sticker/') + len('sticker/'):]\n sticker_ID = sticker_ID[:sticker_ID.index('_')]\n stickers[0].click()\n wait()\n except Exception as e:\n return \"Error: \" + str(e)\n if verify_sticker_sent(sticker_ID):\n return \"Success\"\n else:\n return \"Error: sticker wasn't sent\"", "def setHgType(self, hgTypeToSet):\n self.huntGroup.setHgType(hgTypeToSet)", "def sticker(self, sticker_id):\r\n return Sticker(self, sticker_id)", "def test_modify_storage_group_srdf_set_consistency_enable(self):\n if not self.run_consistency_enable_check():\n self.skipTest(\n 'Skip test_modify_storage_group_srdf_set_consistency_enable '\n 'This fix is in V9.2.1.7')\n sg_name, srdf_group_number, local_volume, remote_volume = (\n self.create_rdf_sg())\n self.replication.modify_storage_group_srdf(\n storage_group_id=sg_name, action='setmode',\n srdf_group_number=srdf_group_number,\n options={'setMode': {'mode': 'Asynchronous'}})\n status = self.replication.modify_storage_group_srdf(\n storage_group_id=sg_name, srdf_group_number=srdf_group_number,\n action=\"EnableConsistency\")\n self.assertEqual('Enabled', status.get('consistency_protection'))\n disable_status = self.replication.modify_storage_group_srdf(\n storage_group_id=sg_name, srdf_group_number=srdf_group_number,\n action=\"DisableConsistency\")\n self.assertEqual(\n 'Disabled', disable_status.get('consistency_protection'))", "def shedPrivileges(self, euid, uid, gid):\n if uid is not None or gid is not None:\n extra = euid and 'e' or ''\n desc = '{}uid/{}gid {}/{}'.format(extra, extra, uid, gid)\n try:\n switchUID(uid, gid, euid)\n except OSError as e:\n log.msg('failed to set {}: {} (are you root?) -- '\n 'exiting.'.format(desc, e))\n sys.exit(1)\n else:\n log.msg('set {}'.format(desc))", "def setpermissions(self, lvl):\n\n admingroup = Group.objects.get(name=self.comicsite.admin_group_name())\n participantsgroup = Group.objects.get(name=self.comicsite.participants_group_name())\n everyonegroup = Group.objects.get(name=\"everyone\")\n\n\n\n self.persist_if_needed()\n if lvl == self.ALL:\n assign_perm(\"view_ComicSiteModel\",admingroup,self)\n assign_perm(\"view_ComicSiteModel\",participantsgroup,self)\n assign_perm(\"view_ComicSiteModel\",everyonegroup,self)\n elif lvl == self.REGISTERED_ONLY:\n\n assign_perm(\"view_ComicSiteModel\",admingroup,self)\n assign_perm(\"view_ComicSiteModel\",participantsgroup,self)\n remove_perm(\"view_ComicSiteModel\",everyonegroup,self)\n elif lvl == self.ADMIN_ONLY:\n\n assign_perm(\"view_ComicSiteModel\",admingroup,self)\n remove_perm(\"view_ComicSiteModel\",participantsgroup,self)\n remove_perm(\"view_ComicSiteModel\",everyonegroup,self)\n else:\n raise ValueError(\"Unknown permissions level '\"+ lvl +\"'. I don't know which groups to give permissions to this object\")", "def setgid():\n config = Config()\n try:\n gid = grp.getgrnam(config.group).gr_gid\n os.setgid(gid)\n except KeyError:\n logger.error(\"Group '%s' does not exist.\", config.group)\n raise SystemExit(os.EX_USAGE)\n except PermissionError:\n logger.error(\n \"You do not have permission to switch to group '%s'.\", config.group\n )\n raise SystemExit(os.EX_NOPERM)", "async def async_turn_on(self):\n if not self._raumfeld.group_is_valid(self._rooms):\n await self._raumfeld.async_create_group(self._rooms)\n await self.async_update_transport_state()\n else:\n log_debug(\n \"Method was called although speaker group '%s' is invalid\" % self._rooms\n )", "def set_moderator_grants(self, grants):\n self.set_grant(self._gp_moderator_name, grants)", "def _set_moderator(self, groupId, moderator_pos, newModUID, key, newkey):\n if len(groupId) != self.id_length:\n return error.error.main.invalid_length(u'群組/房間ID', self.id_length)\n\n if newModUID is None and newkey is None:\n delete_mod = True\n else:\n if len(newModUID) != self.id_length:\n return error.error.main.invalid_length(u'管理員UID', self.id_length)\n elif moderator_pos > 3 or moderator_pos < 0:\n return error.error.main.invalid_thing(u'副管位置序號', moderator_pos)\n delete_mod = False\n\n mod_col_dict = {1: 'moderator1', 2: 'moderator2', 3: 'moderator3'}\n mod_sha_dict = {1: 'moderator1_sha', 2: 'moderator2_sha', 3: 'moderator3_sha'}\n\n cmd_check = u'SELECT * FROM group_ban WHERE (admin_sha = %(key)s OR {} = %(key)s) AND groupId = %(gid)s'.format(mod_sha_dict[moderator_pos])\n cmd_check_dict = {'key': hashlib.sha224(key).hexdigest(),\n 'gid': groupId}\n results = self.sql_cmd(cmd_check, cmd_check_dict)\n \n if results is not None:\n cmd = u'UPDATE group_ban SET {} = %(mod)s, {} = %(newkey)s WHERE groupId = %(id)s'.format(mod_col_dict[moderator_pos],\n mod_sha_dict[moderator_pos])\n cmd_dict = {'id': groupId, \n 'mod': None if delete_mod else newModUID, \n 'newkey': None if delete_mod else hashlib.sha224(newkey).hexdigest()}\n self.sql_cmd(cmd, cmd_dict)\n return True\n else:\n return error.error.main.incorrect_password_or_insufficient_permission()", "def setGroupId(self, groupId):\n internals.blpapi_ServiceRegistrationOptions_setGroupId(\n self.__handle, groupId.encode('utf-8'))\n # NOTE: we should convert groupId to bytes here because\n # otherwise we'll get an error in SWIG wrapper.", "def test_06_self_cannot_upgrade_group(self):\n meowers = self.meowers\n cat = self.cat\n dog = self.dog\n cat.uaccess.share_group_with_user(meowers, dog, PrivilegeCodes.VIEW)\n self.assertFalse(dog in meowers.gaccess.edit_users)\n self.assertTrue(dog in meowers.gaccess.members)\n self.assertTrue(\n is_equal_to_as_set(\n [dog],\n dog.uaccess.get_group_unshare_users(meowers)))\n with self.assertRaises(PermissionDenied):\n dog.uaccess.share_group_with_user(\n meowers, dog, PrivilegeCodes.VIEW)\n with self.assertRaises(PermissionDenied):\n dog.uaccess.share_group_with_user(\n meowers, dog, PrivilegeCodes.CHANGE)\n self.assertTrue(dog in meowers.gaccess.members)\n self.assertTrue(\n is_equal_to_as_set(\n [dog],\n dog.uaccess.get_group_unshare_users(meowers)))" ]
[ "0.55621624", "0.5498391", "0.53408676", "0.52049744", "0.5155158", "0.5061027", "0.5038071", "0.5007471", "0.49839172", "0.49656403", "0.4964829", "0.4957087", "0.49349207", "0.49333778", "0.4929602", "0.49242908", "0.49116626", "0.49009898", "0.4844444", "0.48191205", "0.47947767", "0.47874096", "0.4752222", "0.4746374", "0.4740352", "0.47387767", "0.47346616", "0.47205257", "0.46986946", "0.46708062" ]
0.6461369
0
Use this method to edit captions of messages sent by the bot or via the bot (for inline bots).
async def edit_message_caption(self, chat_id: typing.Union[base.Integer, base.String, None] = None, message_id: typing.Union[base.Integer, None] = None, inline_message_id: typing.Union[base.String, None] = None, caption: typing.Union[base.String, None] = None, reply_markup: typing.Union[types.InlineKeyboardMarkup, None] = None) -> types.Message or base.Boolean: reply_markup = prepare_arg(reply_markup) payload = generate_payload(**locals()) result = await self.request(api.Methods.EDIT_MESSAGE_CAPTION, payload) if isinstance(result, bool): return result return types.Message(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def conversation_description(update: Update, context: CallbackContext) -> int:\n chat_id = update.effective_chat.id\n file_id = temp_storage.get(chat_id, \"\")\n description = update.message.text\n\n p = Photo(file_id, chat_id, description)\n add_photo(p)\n\n temp_storage.pop(chat_id, None)\n update.message.reply_text(\n 'Отлично! Теперь ваш питомец тоже в моей коллекции.\\n\\n' +\n 'Чтобы продолжить общение наберите /help'\n )\n\n return ConversationHandler.END", "def text_message(update: Update, _: CallbackContext) -> None:\n update.message.reply_text(\n f\"Thank you for sending: {update.message.text},\\n\" +\n f\"but I am waiting only for images...\")", "def help_message(bot, update):\n with open('./timetable_bot/static/help_message') as file:\n text = file.read()\n bot.send_message(update.message.chat_id,\n text=text, parse_mode='Markdown')", "async def help_message(message: types.Message):\n await message.answer(text=\"This bot will helps you making style transformations, \"\n \"Load photo with your content first. \"\n \"Then, load photo with style \"\n \"You will be get joint image. \"\n \"To check the code and contact me enter /about command.\"\n \" Let me show you some examples to make you understand. \")\n with open('visocky_readme.png','rb') as photo:\n await message.reply_photo(photo, caption = 'Visocky and Van Gogh')\n with open('mayakovsky_readme.png','rb') as photo:\n await message.reply_photo(photo, caption='Mayakovsky and Van Gogh')\n with open('gagarin_readme.png','rb') as photo:\n await message.reply_photo(photo,caption='Gagarin and Van Gogh')", "def help_command(update,context):\r\n update.message.reply_text('I am a Voice bot')", "async def settings(message: Message):\n await message.answer(\"Настройки бота:\", reply_markup=bot_settings)", "async def emojireact(self, ctx):\n if ctx.invoked_subcommand is None:\n guild = ctx.message.guild\n guild_emoji = await self.config.guild(guild).guild()\n unicode_emoji = await self.config.guild(guild).unicode()\n if ctx.channel.permissions_for(ctx.me).embed_links:\n em = discord.Embed(colour=discord.Colour.blue())\n em.title = _(\"Emojireact settings for \") + guild.name\n if guild_emoji:\n em.add_field(name=_(\"Server Emojis \"), value=str(guild_emoji))\n if unicode_emoji:\n em.add_field(name=_(\"Unicode Emojis \"), value=str(unicode_emoji))\n if len(em.fields) > 0:\n await ctx.send(embed=em)\n else:\n msg = _(\"Emojireact settings for \") + guild.name + \"\\n\"\n if guild_emoji:\n msg += _(\"Server Emojis \") + str(guild_emoji) + \"\\n\"\n if unicode_emoji:\n msg += _(\"Unicode Emojis \") + str(unicode_emoji) + \"\\n\"\n await ctx.send(msg)", "async def change_inline_message(message: Message):\n string = message.input_or_reply_raw\n if string:\n await message.edit('`Custom inline pm message saved`', del_in=3, log=True)\n await SAVED_SETTINGS.update_one(\n {'_id': 'CUSTOM_INLINE_PM_MESSAGE'}, {\"$set\": {'data': string}}, upsert=True)\n else:\n await message.err(\"invalid input!\")", "def tips(bot, update):\n messageContent = random.choice(TIPS)\n bot.sendMessage(chat_id=update.message.chat_id, text=messageContent, parse_mode='markdown')", "def echo(self, bot, update):\n # print(update)\n update.message.reply_text(update.message.text)", "async def textemote(self, ctx, *, msg):\n try:\n await ctx.message.delete()\n except discord.Forbidden:\n pass\n\n if msg != None:\n out = msg.lower()\n text = out.replace(' ', ' ').replace('10', '\\u200B:keycap_ten:')\\\n .replace('ab', '\\u200B🆎').replace('cl', '\\u200B🆑')\\\n .replace('0', '\\u200B:zero:').replace('1', '\\u200B:one:')\\\n .replace('2', '\\u200B:two:').replace('3', '\\u200B:three:')\\\n .replace('4', '\\u200B:four:').replace('5', '\\u200B:five:')\\\n .replace('6', '\\u200B:six:').replace('7', '\\u200B:seven:')\\\n .replace('8', '\\u200B:eight:').replace('9', '\\u200B:nine:')\\\n .replace('!', '\\u200B❗').replace('?', '\\u200B❓')\\\n .replace('vs', '\\u200B🆚').replace('.', '\\u200B🔸')\\\n .replace(',', '🔻').replace('a', '\\u200B🅰')\\\n .replace('b', '\\u200B🅱').replace('c', '\\u200B🇨')\\\n .replace('d', '\\u200B🇩').replace('e', '\\u200B🇪')\\\n .replace('f', '\\u200B🇫').replace('g', '\\u200B🇬')\\\n .replace('h', '\\u200B🇭').replace('i', '\\u200B🇮')\\\n .replace('j', '\\u200B🇯').replace('k', '\\u200B🇰')\\\n .replace('l', '\\u200B🇱').replace('m', '\\u200B🇲')\\\n .replace('n', '\\u200B🇳').replace('ñ', '\\u200B🇳')\\\n .replace('o', '\\u200B🅾').replace('p', '\\u200B🅿')\\\n .replace('q', '\\u200B🇶').replace('r', '\\u200B🇷')\\\n .replace('s', '\\u200B🇸').replace('t', '\\u200B🇹')\\\n .replace('u', '\\u200B🇺').replace('v', '\\u200B🇻')\\\n .replace('w', '\\u200B🇼').replace('x', '\\u200B🇽')\\\n .replace('y', '\\u200B🇾').replace('z', '\\u200B🇿')\n try:\n await ctx.send(text)\n except Exception as e:\n await ctx.send(f'```{e}```')\n else:\n await ctx.send('Args req!', delete_after=3.0)", "def ShittyCaption(sc, event):\n request = urllib2.Request(\n 'http://shittynewyorkercartooncaptions.tumblr.com/random')\n result = urllib2.urlopen(request)\n sc.api_call('chat.postMessage', as_user='true',\n channel=event['channel'], text=result.geturl())", "def on_accept(self, update, _context):\n self.updater.bot.send_message(\n chat_id=update.effective_chat.id,\n text=\"Alege timpul\",\n reply_markup=InlineKeyboardMarkup(k.build_dynamic_keyboard_first_responses()),\n )", "def help(update, context):\n update.message.reply_text('Benevenuto partecipante. Io sono CacioBot, il tuo assistente personale. Se è la prima volta che mi utilizzi, clicca o scrivi /commands_fantacacio per visionare la tabella dei comandi fantacaio, altrimenti utilizza /commands_sites per visionare la tabella dei comandi siti.')", "def echo(update, context):\r\n update.message.reply_text(update.message.text)", "def handle_gui_example_one_intent(self, message):\n self.gui.show_text(\"Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec placerat varius turpis porta scelerisque. Nam feugiat, lectus a ultricies tempus, mi sem tempor felis, vitae laoreet nisi ipsum vitae mauris.\")", "async def textemote(self, ctx, *, msg):\n await ctx.message.delete()\n if msg != None:\n out = msg.lower()\n text = out.replace(' ', ' ').replace('10', '\\u200B:keycap_ten:')\\\n .replace('ab', '\\u200B🆎').replace('cl', '\\u200B🆑')\\\n .replace('0', '\\u200B:zero:').replace('1', '\\u200B:one:')\\\n .replace('2', '\\u200B:two:').replace('3', '\\u200B:three:')\\\n .replace('4', '\\u200B:four:').replace('5', '\\u200B:five:')\\\n .replace('6', '\\u200B:six:').replace('7', '\\u200B:seven:')\\\n .replace('8', '\\u200B:eight:').replace('9', '\\u200B:nine:')\\\n .replace('!', '\\u200B❗').replace('?', '\\u200B❓')\\\n .replace('vs', '\\u200B🆚').replace('.', '\\u200B🔸')\\\n .replace(',', '🔻').replace('a', '\\u200B🅰')\\\n .replace('b', '\\u200B🅱').replace('c', '\\u200B🇨')\\\n .replace('d', '\\u200B🇩').replace('e', '\\u200B🇪')\\\n .replace('f', '\\u200B🇫').replace('g', '\\u200B🇬')\\\n .replace('h', '\\u200B🇭').replace('i', '\\u200B🇮')\\\n .replace('j', '\\u200B🇯').replace('k', '\\u200B🇰')\\\n .replace('l', '\\u200B🇱').replace('m', '\\u200B🇲')\\\n .replace('n', '\\u200B🇳').replace('ñ', '\\u200B🇳')\\\n .replace('o', '\\u200B🅾').replace('p', '\\u200B🅿')\\\n .replace('q', '\\u200B🇶').replace('r', '\\u200B🇷')\\\n .replace('s', '\\u200B🇸').replace('t', '\\u200B🇹')\\\n .replace('u', '\\u200B🇺').replace('v', '\\u200B🇻')\\\n .replace('w', '\\u200B🇼').replace('x', '\\u200B🇽')\\\n .replace('y', '\\u200B🇾').replace('z', '\\u200B🇿')\n try:\n await ctx.send(text)\n except Exception as e:\n await ctx.send(f'```{e}```')\n else:\n await ctx.send('Args req!', delete_after=3.0)", "def menu(update, context):\n\n update_message_text = update.callback_query.edit_message_text if update.callback_query else update.message.reply_text\n update_message_text(\n text='Please choose an option.',\n reply_markup=InlineKeyboardMarkup([\n [\n InlineKeyboardButton('Author Details', callback_data='details'),\n InlineKeyboardButton('Help', callback_data='help'),\n ],\n [\n InlineKeyboardButton('Linkedin Profile', url=Config.OWNER_WEBSITE),\n InlineKeyboardButton('Github repo', url=Config.GITHUB_REPO_URL),\n ],\n [\n InlineKeyboardButton('Download CV', url=Config.DOWNLOAD_CV_URL)\n ]\n ]),\n )", "def about_command(update: Update, context: CallbackContext) -> None:\n message = r\"Здорово, что вы заинтересовались\\!\" + \"\\n\\n\"\n message += r\"Для отправки изображений я использую API [CATAAS](https://cataas.com/) и [HTTP Cats](https://http.cat/)\\. \"\n message += r\"А для генерации текстов работает [Балабоба](https://yandex.ru/lab/yalm) от Яндекс, \"\n message += r\"поэтому не принимайте близко к сердцу то, о чём я рассказываю :\\)\" + \"\\n\"\n message += \"\\n\"\n message += r\"Мой исходный код на гитхаб: https://github\\.com/heabyfik/CatBot\"\n\n update.message.reply_markdown_v2(message)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def send_help_text(bot, update):\n call_tg_func(update.message.chat, 'send_message', [help_text],\n {'reply_markup': main_keyboard, 'parse_mode': 'HTML'})", "def echo(update, context):\n update.message.reply_text('Me gusta que me hayas escrito: ' + update.message.text)", "async def say(self, ctx, *, message):\n message = self.emojify(message)\n await ctx.send(message)", "async def say(self, *, channel_id, text=None, attachments=None, message_type=None):\n self.messages.append(\"{} {} {} {}\".format(channel_id, text, attachments, message_type))", "def handle(bot, update):\n print(update.message.text)\n bot.send_message(chat_id=update.message.chat_id,\n text='Hey! I\\'m Meditech Bot')", "async def saytext(self,ctx):\r\n if await bMsg(ctx,ctx.message.author.name,client):\r\n return\r\n logger.info('Games.saytext', extra={'invoker': ctx.message.author.name})\r\n await ctx.send(wordsDict.generate())", "async def custom_interaction(bot, context, response, result):\n if result is None: # Timed out\n edit = 'You took too long to respond...'\n elif result.content:\n edit = 'You replied with \"{}\"'.format(result.content[:100])\n else:\n edit = 'You did not reply with any content text!'\n await response.message.edit(content=edit)" ]
[ "0.6712417", "0.6051512", "0.6018036", "0.60103804", "0.60097617", "0.59812504", "0.59785295", "0.5978493", "0.59233737", "0.5919862", "0.5917148", "0.5903646", "0.59011906", "0.5859225", "0.5845261", "0.5826212", "0.582165", "0.57945436", "0.57735056", "0.5752771", "0.5752771", "0.5752771", "0.5752771", "0.5742207", "0.5741816", "0.573256", "0.5719784", "0.5719055", "0.571102", "0.56890225" ]
0.6285354
1
Use this method to edit only the reply markup of messages sent by the bot or via the bot (for inline bots).
async def edit_message_reply_markup(self, chat_id: typing.Union[base.Integer, base.String, None] = None, message_id: typing.Union[base.Integer, None] = None, inline_message_id: typing.Union[base.String, None] = None, reply_markup: typing.Union[types.InlineKeyboardMarkup, None] = None) -> types.Message or base.Boolean: reply_markup = prepare_arg(reply_markup) payload = generate_payload(**locals()) result = await self.request(api.Methods.EDIT_MESSAGE_REPLY_MARKUP, payload) if isinstance(result, bool): return result return types.Message(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _reply_message(self, update, message, keyboard: List[List[str]] = None, inline_keyboard=False):\n if keyboard is not None:\n if not inline_keyboard:\n update.message.reply_text(message,\n reply_markup=ReplyKeyboardMarkup(\n keyboard=[[self.BACK]] + keyboard,\n one_time_keyboard=True))\n\n else:\n kybd = [[InlineKeyboardButton(lb, callback_data=lb) for lb in lst] for lst in keyboard]\n kybd = InlineKeyboardMarkup(inline_keyboard=kybd)\n update.message.reply_text(message, reply_markup=kybd)\n\n else:\n update.message.reply_text(message, reply_markup=ReplyKeyboardRemove())", "def noncommand(bot, update):\n msg = \"I only answer to the command */nothingtodo [list;of;subreddits]*\"\n update.message.reply_text(msg, parse_mode='Markdown')", "def edit_reply(praw_comment, reply_msg):\n try:\n praw_comment.edit(reply_msg)\n except Exception as e:\n logger.exception('Exception while editing')\n return False\n\n logger.info(' => Edit was made!')\n return True", "def echo(self, bot, update):\n # print(update)\n update.message.reply_text(update.message.text)", "def edit_message_keyboard(\n bot: telegram.Bot, message_id: str,\n chat_id: str, likes: str,\n dislikes: str, url: str):\n keyboard = InlineKeyboardMarkup([\n [\n InlineKeyboardButton(text=f'{likes} ❤️', callback_data=\"L\"),\n InlineKeyboardButton(text=f'{dislikes} 💔', callback_data=\"D\")\n ], [\n InlineKeyboardButton(text='Читать', url=f\"{url}\")\n ]])\n while True:\n try:\n bot.edit_message_reply_markup(chat_id=chat_id,\n message_id=message_id,\n reply_markup=keyboard)\n break\n except telegram.error.BadRequest:\n break\n except telegram.error.TimedOut:\n time.sleep(1)\n except telegram.error.RetryAfter as e:\n print(e.message)\n tm = re.search(\"(?<=Retry in )[0-9]+\", e.message)\n if tm is not None:\n tm = int(tm.group(0)) + 5\n time.sleep(tm)", "def _send(self, bot, rendered, tsession):\n _logger.debug('_send rendered %s', rendered)\n reply_markup = rendered.get('markup', None)\n\n if tsession.reply_keyboard:\n # when client already have reply keyboard\n if rendered.get('keep_reply_keyboard'):\n # system is asked to don't remove existed reply keyboard\n pass\n elif rendered.get('inline_keyboard'):\n # send a separate message to remove reply keyboard,\n # because original message contains another markup\n _logger.debug('Send a separate message to remove previous reply keyboard')\n # we send a dot, because telegram doesn't allow to send empty message\n bot.send_message(tsession.chat_ID, '<em>.</em>', parse_mode='HTML', reply_markup=ReplyKeyboardRemove())\n tsession.reply_keyboard = False\n elif not reply_markup:\n # tell to telegram remove keyboard\n reply_markup = ReplyKeyboardRemove()\n tsession.reply_keyboard = False\n else:\n # no need to extra action here,\n # because reply_markup removes or replaces Keyboard\n pass\n\n if rendered.get('reply_keyboard'):\n # mark that user has new reply keyboard\n tsession.reply_keyboard = True\n\n if rendered.get('html') or reply_markup:\n if rendered.get('editMessageText'):\n _logger.debug('editMessageText:\\n%s', rendered.get('html'))\n kwargs = rendered.get('editMessageText')\n kwargs['parse_mode'] = 'HTML'\n kwargs['reply_markup'] = reply_markup\n if 'message_id' in kwargs:\n kwargs['chat_id'] = tsession.chat_ID\n bot.edit_message_text(rendered.get('html'), **kwargs)\n else:\n _logger.debug('Send:\\n%s', rendered.get('html'))\n bot.send_message(tsession.chat_ID, rendered.get('html'), parse_mode='HTML', reply_markup=reply_markup)\n if rendered.get('photos'):\n _logger.debug('send photos %s' % len(rendered.get('photos')))\n for photo in rendered.get('photos'):\n if photo.get('file_id'):\n try:\n _logger.debug('Send photo by file_id')\n\n bot.send_photo(tsession.chat_ID, photo['file_id'])\n continue\n except ApiException:\n _logger.debug('Sending photo by file_id is failed', exc_info=True)\n photo['file'].seek(0)\n _logger.debug('photo[file] %s ' % photo['file'])\n res = bot.send_photo(tsession.chat_ID, photo['file'])\n photo['file_id'] = res.photo[0].file_id\n\n handle_reply_dump = rendered.get('handle_reply_dump')\n handle_reply_command_id = None\n if self.id and handle_reply_dump:\n handle_reply_command_id = self.id\n context_dump = rendered.get('context_dump')\n tsession.write({\n 'context': context_dump,\n 'handle_reply_command_id': handle_reply_command_id,\n 'handle_reply': handle_reply_dump,\n })", "def send_help_text(bot, update):\n call_tg_func(update.message.chat, 'send_message', [help_text],\n {'reply_markup': main_keyboard, 'parse_mode': 'HTML'})", "def echo(update, context):\r\n update.message.reply_text(update.message.text)", "def tips(bot, update):\n messageContent = random.choice(TIPS)\n bot.sendMessage(chat_id=update.message.chat_id, text=messageContent, parse_mode='markdown')", "def help_message(bot, update):\n with open('./timetable_bot/static/help_message') as file:\n text = file.read()\n bot.send_message(update.message.chat_id,\n text=text, parse_mode='Markdown')", "def echo(update, context):\n if \"Hey friends,\" in update.message.text:\n update.message.reply_text(\"What a great idea. How about today?\")", "def reply(self, text):\n yield self.bot.send(text, to=self.channel)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def echo(update, context):\n update.message.reply_text(update.message.text)", "def careful_reply(api,reply):\r\n\r\n debug_print('Preparing to reply to #%d' % (reply.id,))\r\n normalized_tweet = reply.text.lower().strip()\r\n\r\n # Don't reply to a retweet\r\n if hasattr(reply, 'retweeted_status'):\r\n return\r\n\r\n debug_print('Replying to #%d' % (reply.id,))\r\n update = \"@%s We'd estimate about a %d percent chance, actually.\" % (reply.user.screen_name, random.randint(0,100),)\r\n return api.update_status(update, reply.id)", "async def custom_interaction(bot, context, response, result):\n if result is None: # Timed out\n edit = 'You took too long to respond...'\n elif result.content:\n edit = 'You replied with \"{}\"'.format(result.content[:100])\n else:\n edit = 'You did not reply with any content text!'\n await response.message.edit(content=edit)", "def reply(self, *content, **kwargs):\n return self.message.reply(*content, **kwargs)", "def test_reply(self):\n tweet_object = self.load_tweet('reply')\n tweet_text = self.api.html_for_tweet(tweet_object)\n self.assertEqual(tweet_text,\n u'<span class=\"twython-tweet-prefix\"><a href=\"https://twitter.com/philgyford\" class=\"twython-mention\">@philgyford</a> </span>Here’s a test tweet that goes on as much as possible and includes an image. Hi to my fans in testland!<span class=\"twython-tweet-suffix\"> https://t.co/tzhyk2QWSr</span>')", "def help(self, update, context):\n\n message = \"Do you need help \\n Help menu shows here🤞\"\n update.message.reply_text(message)", "async def settings(message: Message):\n await message.answer(\"Настройки бота:\", reply_markup=bot_settings)", "def help(bot, update):\n msg = \"Try command */nothingtodo [list;of;subreddits]*\"\n update.message.reply_text(msg, parse_mode='Markdown')", "def help(bot, update):\n update.message.reply_text(''' ''')", "def bye(update) -> None:\n update.effective_message.reply_text(\n \"Thank you, see you soon! 👋\",\n reply_markup=ReplyKeyboardRemove()\n )", "def bot_answer(update, context):\n question = update.message.text\n answer = go_bot(question)\n print(question, answer)\n print(stats)\n print()\n update.message.reply_text(answer)", "def competition(update, context):\n #update.message.reply_text(s)\n chat_id = update.message.chat_id\n bot.send_message(chat_id,text=message,\n parse_mode=telegram.ParseMode.HTML)\n #return s ", "def reply(cls, user, context, message, reply_message):\r\n pass", "def reply_embed(self, message: str):\n embed = discord.Embed(color=discord.Color.blurple())\n embed.title = \"\"\n embed.description = message\n return embed", "async def edit_message_text(self, text: base.String,\n chat_id: typing.Union[base.Integer, base.String, None] = None,\n message_id: typing.Union[base.Integer, None] = None,\n inline_message_id: typing.Union[base.String, None] = None,\n parse_mode: typing.Union[base.String, None] = None,\n disable_web_page_preview: typing.Union[base.Boolean, None] = None,\n reply_markup: typing.Union[types.InlineKeyboardMarkup,\n None] = None) -> types.Message or base.Boolean:\n reply_markup = prepare_arg(reply_markup)\n payload = generate_payload(**locals())\n if self.parse_mode:\n payload.setdefault('parse_mode', self.parse_mode)\n\n result = await self.request(api.Methods.EDIT_MESSAGE_TEXT, payload)\n\n if isinstance(result, bool):\n return result\n\n return types.Message(**result)" ]
[ "0.65210825", "0.64386755", "0.62289554", "0.62098664", "0.6208879", "0.62044144", "0.61214274", "0.6058788", "0.60558766", "0.604259", "0.6006311", "0.5990347", "0.59826857", "0.59826857", "0.59826857", "0.59826857", "0.59573996", "0.5929008", "0.592541", "0.59105664", "0.5859537", "0.5843212", "0.5840335", "0.58035904", "0.579658", "0.579561", "0.5793422", "0.5782304", "0.5779824", "0.5760138" ]
0.6937969
0
Use this method to get a sticker set.
async def get_sticker_set(self, name: base.String) -> types.StickerSet: payload = generate_payload(**locals()) result = await self.request(api.Methods.GET_STICKER_SET, payload) return types.StickerSet(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_sticker_set(bot: Bot, name: str, admin_id: int, sticker_set_prefix: str) -> StickerSet:\n try:\n return bot.get_sticker_set(name)\n except BadRequest as exc:\n if \"invalid\" in str(exc):\n with open(STICKER_SET_LOGO, \"rb\") as sticker:\n bot.create_new_sticker_set(\n admin_id, name, sticker_set_prefix, \"🐦\", png_sticker=sticker\n )\n return bot.get_sticker_set(name)\n raise exc", "def sticker(self, sticker_id):\r\n return Sticker(self, sticker_id)", "def custom_sticker(self, sticker_id):\r\n return CustomSticker(self, sticker_id)", "async def delete_sticker_from_set(self, sticker: base.String) -> base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.DELETE_STICKER_FROM_SET, payload)\n\n return result", "def get_or_create(session, name, chat, user):\n name = name.lower()\n sticker_set = session.query(StickerSet).get(name)\n if not sticker_set:\n # Create a task for adding a sticker.\n # This task will be processed by a job, since adding a sticker can take quite a while\n sticker_set = StickerSet(name, None)\n sticker_set.international = user.international\n task = Task(Task.SCAN_SET, sticker_set=sticker_set, chat=chat, user=user)\n session.add(sticker_set)\n session.add(task)\n # Error handling: Retry in case somebody sent to stickers at the same time\n try:\n session.commit()\n except IntegrityError as e:\n session.rollback()\n sticker_set = session.query(StickerSet).get(name)\n if sticker_set is None:\n raise e\n\n return sticker_set", "def get_queryset(self):\n self.object = self.get_object()\n return self.object.sticker_set.all().order_by('-modification_date')", "async def set_chat_sticker_set(self, chat_id: typing.Union[base.Integer, base.String],\n sticker_set_name: base.String) -> base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SET_CHAT_STICKER_SET, payload)\n\n return result", "def refresh(self):\r\n data = super(Share, self)._fetch()\r\n self.data_set = data[self.symbol]\r\n return data[self.symbol]", "def set(self):\n return self.cdb.code_to_card_set[self.set_code]", "def get_settemp(self):\n return self.settemp", "def __str__(self):\n return (\n f\"StickerSet: {self.title} ({self.name}) \\nStickers: {len(self.stickers)}\"\n )", "def series(self):\n if self._swarm_series is None:\n raise ValueError(\"Run pick() method before access this property\")\n\n return self._swarm_series", "def clean_sticker_set(context: CCT) -> None:\n bot = context.bot\n admin_id = cast(int, context.bot_data[ADMIN_KEY])\n sticker_set_prefix = cast(str, context.bot_data[STICKER_SET_NAME_KEY])\n\n sticker_set = get_sticker_set(\n bot, build_sticker_set_name(bot, sticker_set_prefix), admin_id, sticker_set_prefix\n )\n if len(sticker_set.stickers) > 1:\n for sticker in sticker_set.stickers[1:]:\n try:\n bot.delete_sticker_from_set(sticker.file_id)\n except BadRequest as exc:\n if \"Stickerset_not_modified\" in str(exc):\n pass\n else:\n raise exc", "def getSets():", "def retrieve_set(self, setname):\n\n return list(self._collection.find({'set': setname}))", "def get_queryset(self):\n self.object = self.get_object()\n return self.object.sticker_set.filter(sprint__isnull=True).order_by(\n '-modification_date'\n )", "def _get_seriesepisode_set(self):\n return SeriesEpisode.objects.filter(series=self)", "async def tickers(self, symbol):\n ticker = await self.client.get_public_ticker(symbol)\n return Ticker(\n bid=ticker[0],\n ask=ticker[2],\n last_price=ticker[6],\n volume=ticker[7]\n )", "def get_trending():\n trending = r.get('trending_store')\n return trending", "def fetch_markets_tickers(self):\r\n return self.__public_request('GET', '/api/v1/tickers')", "def getSeries(self) -> Series:\n\n return self.__series", "def get_set(self, which_set):\n return (getattr(self, 'x_' + which_set),\n getattr(self, 'y_' + which_set))", "def retrieve_trades(self):\n trades_file = self.current_trades_path()\n if not trades_file.exists():\n LOGGER.info(f\"no trades stored for league {self.league_id}\")\n return set()\n fo = trades_file.open(\"rb\")\n return pickle.load(fo)", "def get_bookset(self): # pragma: no coverage\r\n raise NotImplementedError()", "def getTrainSet(self):\r\n return self.fTrainData", "def get_ticker(self, ticker_symbol):\n return", "def getScatterers(self):\n return self.scatterers", "def ticker(self):\n return self._ticker", "def ticker(self):\n return self._ticker", "def getTicker(self):\n\t\tif self.poll_reqq is None or self.poll_rspq is None:\n\t\t\treturn\n\n\t\treq = {\"cmd\" : \"get ticker\"}\n\t\tself.poll_reqq.put(req)\n\t\tticker = self.poll_rspq.get(timeout=self.q_get_tov)\n\n\t\t#debug\n\t\t# self.logger.debug(\"ticker=%s\" % str(ticker))\n\n\t\treturn ticker" ]
[ "0.74767673", "0.730071", "0.65737146", "0.609914", "0.5899323", "0.584576", "0.5708728", "0.5630611", "0.5577954", "0.55621547", "0.5537053", "0.55129313", "0.5512399", "0.5468756", "0.54597294", "0.54515886", "0.5433279", "0.5400197", "0.53024924", "0.5262992", "0.5247267", "0.5239091", "0.5238041", "0.52067155", "0.5166485", "0.5162194", "0.5146395", "0.5132359", "0.5132359", "0.5126562" ]
0.816935
0
Use this method to upload a .png file with a sticker for later use in createNewStickerSet and addStickerToSet methods (can be used multiple times).
async def upload_sticker_file(self, user_id: base.Integer, png_sticker: base.InputFile) -> types.File: payload = generate_payload(**locals(), exclude=['png_sticker']) result = await self.send_file('png_sticker', api.Methods.UPLOAD_STICKER_FILE, png_sticker, payload) return types.File(**result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_upload_image_to_serivce(self):\n url = image_upload_url(self.service_id)\n with tempfile.NamedTemporaryFile(suffix='.jpg') as ntf:\n img = Image.new('RGB', (10, 10))\n img.save(ntf, format='JPEG')\n ntf.seek(0)\n res = self.client.post(url, {'image': ntf}, format='multipart')\n\n self.services.refresh_from_db()\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertEqual('image', res.data)\n self.assertTrue(os.path.exists(self.services.image.path))", "def upload_image(set_name):\n\tif request.method == 'POST':\n\t\tfile = request.files['photo']\n\t\tif file and allowed_file(file.filename):\n\t\t\tfile_type=allowed_file(file.filename)\n\t\t\tfilename=str(set_name)+file_type \n\t\t\tfile.save(os.path.join(IMAGE_PATH, filename))\n\t\t\treturn \"/static/image/\"+filename\n\t\telse:\n\t\t\treturn \"http://upload.wikimedia.org/wikipedia/commons/a/ac/No_image_available.svg\"\n\telse:\n return \"http://upload.wikimedia.org/wikipedia/commons/a/ac/No_image_available.svg\"", "def test_upload_sync(created_test_helper, setup_teardown_file_operations):\n # get current test params\n test_params = created_test_helper.get_test_data(TESTLOC)\n\n # Upload a PNG file < 1MB in size\n upload_file_response = created_test_helper.upload(\n file_name=test_params[\"test_file\"])\n\n # Validate\n # 1. id of file uploaded\n # 2. thumb nail urls and their links\n # 3. file id of uploaded file present in file list operation\n created_test_helper.validate_upload(\n file_name=test_params[\"test_file\"],\n upload_file_response=upload_file_response,\n file_type=\"images\")", "def file_upload():\n\n click.secho('*** Uploading image...', fg='green')\n uploaded = _uploaded_file('cover.jpg')\n click.secho(json.dumps(uploaded, indent=2, sort_keys=True), fg='yellow')\n\n click.secho('*** Creating a Picture document for it...', fg='green')\n picture = _make_document('picture', title='cover image', sys_filename=uploaded['path'])\n click.secho(json.dumps(picture, indent=2, sort_keys=True), fg='yellow')\n\n click.secho('*** Attaching it to a Blueray as cover...', fg='green')\n slp = _make_document('movie', title='Silver Linings Playbook')\n blueray = _make_document('blueray', movie_id=slp['_id'], cover_id=picture['_id'])\n click.secho(json.dumps(blueray, indent=2, sort_keys=True), fg='yellow')", "async def add_sticker_to_set(self, user_id: base.Integer, name: base.String,\n png_sticker: typing.Union[base.InputFile, base.String], emojis: base.String,\n mask_position: typing.Union[types.MaskPosition, None] = None) -> base.Boolean:\n mask_position = prepare_arg(mask_position)\n payload = generate_payload(**locals(), exclude=['png_sticker'])\n result = await self.send_file('png_sticker', api.Methods.ADD_STICKER_TO_SET, png_sticker, payload)\n\n return result", "def test_upload(self):\n image = file(os.path.join(self.path, 'images', 'riker.gif'),\n 'rb')\n rv = self.post(url='/queue/',\n content={'image': (image, 'riker.gif')},\n token=self.user_token)\n self.assertStatus(rv, 200)\n\n # just check if the file is there\n fullpath = os.path.join(self.queue_dir, 'riker.gif')\n self.assertTrue(os.path.exists(fullpath))\n return", "def upload_img(self, path_img):\n if type(path_img) != str:\n path_img = path_img.toolTip()\n exif = self.extract_exif_data(path_img)\n self.listImages.upload_img(path_img, exif)", "def image_create_and_upload(self, upload=True, **kwargs):\n if 'name' not in kwargs:\n name = data_utils.rand_name(self.__name__ + \"-image\")\n kwargs['name'] = name\n\n params = dict(kwargs)\n image = self.create_image(**params)\n self.assertEqual('queued', image['status'])\n if not upload:\n return image\n\n file_content = data_utils.random_bytes()\n image_file = io.BytesIO(file_content)\n self.client.store_image_file(image['id'], image_file)\n\n image = self.client.show_image(image['id'])\n return image", "def upload_image(i):\n print(\"Uploading...\")\n\n url = 'https://api.imgur.com/3/image'\n payload = {'image': i}\n headers = {\n # The Client ID is bound to your application. You can create one at:\n # https://api.imgur.com/oauth2/addclient\n 'Authorization': 'Client-ID ec61be071b16841'\n }\n\n response = requests.request('POST', url, headers=headers, data=payload)\n return response.json()['data']['link']", "def upload_files(self, context, instance_ref, bless_files):\n raise Exception(\"Uploading files to the image service is not supported.\")", "def POST_upload_sr_img(self, file, header, name):\r\n\r\n # default error list (default values will reset the errors in\r\n # the response if no error is raised)\r\n errors = dict(BAD_CSS_NAME = \"\", IMAGE_ERROR = \"\")\r\n try:\r\n cleaned = cssfilter.clean_image(file,'PNG')\r\n if header:\r\n num = None # there is one and only header, and it is unnumbered\r\n elif not name:\r\n # error if the name wasn't specified or didn't satisfy\r\n # the validator\r\n errors['BAD_CSS_NAME'] = _(\"Bad image name\")\r\n else:\r\n num = c.site.add_image(name, max_num = g.max_sr_images)\r\n c.site._commit()\r\n\r\n except cssfilter.BadImage:\r\n # if the image doesn't clean up nicely, abort\r\n errors[\"IMAGE_ERROR\"] = _(\"Bad image\")\r\n except ValueError:\r\n # the add_image method will raise only on too many images\r\n errors['IMAGE_ERROR'] = (\r\n _(\"Too many images (you only get %d)\") % g.max_sr_images)\r\n\r\n if any(errors.values()):\r\n return UploadedImage(\"\", \"\", \"\", errors = errors).render()\r\n else:\r\n # with the image num, save the image an upload to s3. the\r\n # header image will be of the form \"${c.site._fullname}.png\"\r\n # while any other image will be ${c.site._fullname}_${num}.png\r\n new_url = cssfilter.save_sr_image(c.site, cleaned, num = num)\r\n if header:\r\n c.site.header = new_url\r\n c.site._commit()\r\n\r\n return UploadedImage(_('Saved'), new_url, name,\r\n errors = errors).render()", "async def capture_and_upload_screenshot(self) -> None:", "def start_upload(self, group_name=None):\n rmt = BossRemote(cfg_file_or_dict=self.args.config)\n\n type_to_dtype = {'image': 'uint16', 'annotation': 'uint64'}\n\n img = tf.imread(os.path.expanduser(self.args.tif_stack))\n if self.args.type == 'annotation' and img.dtype != 'uint64':\n img = np.asarray(img, dtype='uint64')\n\n coll_name = self.args.collection\n exp_name = self.args.experiment\n chan_name = self.args.channel\n source_chan = []\n\n if self.args.source_channel != None:\n source_chan = [self.args.source_channel]\n\n # upload image back to boss\n channel_rsc = self._get_channel_resource(rmt, chan_name, coll_name, exp_name, type=self.args.type, sources=source_chan, datatype=type_to_dtype[self.args.type], new_channel=self.args.new_channel)\n\n if img.dtype != 'uint64' or img.dtype != 'uint16':\n if self.args.type == 'image':\n img = img.astype('uint16')\n else:\n img = img.astype('uint64')\n\n if not self.args.chunk:\n self._upload_to_boss(rmt, img, channel_rsc)\n else:\n self._upload_chunk_to_boss(rmt, img, channel_rsc, x_range=self.args.x_range, y_range=self.args.y_range, z_range=self.args.z_range)\n\n url = 'https://ndwebtools.neurodata.io/ndviz_url/{}/{}/'.format(coll_name, exp_name)\n\n if group_name:\n self._change_permissions(group_name)\n\n return url", "def upload_to(instance, filename):\n return upload_image_path(filename, 'products')", "def upload_preset(self, filename, title, description, version, author, REQUEST=None):\r\n\r\n # TODO presets.py - upload_preset - specify how to authenticate\r\n\r\n raise NotImplementedError", "def test_upload_image(self):\n url = image_upload_url(self.recipe.id)\n with tempfile.NamedTemporaryFile(suffix='.jpg') as ntf:\n img = Image.new('RGB', (10, 10))\n img.save(ntf, format='JPEG')\n ntf.seek(0) # go back to begining of file_path\n res = self.client.post(url, {'image': ntf, format: 'multipart'})\n\n self.recipe.refresh_from_db()\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertIn('image', res.data)\n self.assertTrue(os.path.exists(self.recipe.image.path))", "def __upload(self, filename):\n # Save to local path\n save_img = self.__frame.copy()\n\n # Initialize the bucket for after usage\n image_blob = None\n\n # Make the Google Cloud Storage client\n # and set the storage path\n if self.__yaml[\"bucket\"] is not None:\n client = storage.Client()\n bucket = client.get_bucket(self.__yaml[\"bucket\"])\n image_blob = bucket.blob(filename)\n\n # Upload and save the image\n try:\n if self.__yaml[\"output_path\"] is not None:\n # Save image in local\n LOGGER.info(f\"Saved {filename} in local folder\", )\n path = os.path.sep.join((self.__yaml[\"output_path\"], filename))\n cv2.imwrite(path, save_img)\n\n # Upload to Google Cloud Storage\n # if the user set the \"bucket\" option\n if self.__yaml[\"bucket\"] is not None:\n image_blob.upload_from_filename(os.path.sep.join((self.__yaml[\"output_path\"],\n filename)),\n content_type=\"image/jpeg\")\n\n LOGGER.info(f\"Saved {filename} to google cloud storage\")\n elif self.__yaml[\"bucket\"] is not None:\n # Convert numpy array to bytes\n temp_file = Image.fromarray(cv2.cvtColor(save_img, cv2.COLOR_BGR2RGB))\n temp_file_bytes = io.BytesIO()\n temp_file.save(temp_file_bytes,\n format=\"JPEG\")\n\n # Read the bytes from beginning\n temp_file_bytes.seek(0)\n image_blob.upload_from_file(temp_file_bytes,\n content_type=\"image/jpeg\")\n\n LOGGER.info(f\"Saved {filename} to google cloud storage\")\n except Exception as error:\n # If errors occur, just print the error messages\n # and don't exit the program\n LOGGER.warning(error)", "def import_image(self, file: str) -> Any:\n pass", "def upload_file():\r\n # Define an image object\r\n image_path = r'F:\\Testing_Development\\Projects\\Interface_requests\\Interface_requests\\upload_files\\Napoleon Bonaparte.jpg'\r\n file = {'file': open('Napoleon Bonaparte.jpg', 'rb')}\r\n # response = requests.post(base_url + '/post', files=file, timeout=3)\r\n response = requests.post(base_url + '/post', files=file)\r\n print(response.status_code)\r\n print(response.text)", "def test_upload_image_to_recipe(self):\n url = create_upload_image_url(self.recipe.id)\n\n with tempfile.NamedTemporaryFile(suffix='.jpg') as ntf:\n image = Image.new(\"RGB\", (10, 10))\n image.save(ntf, format='JPEG')\n ntf.seek(0)\n res = self.client.post(url, {'image': ntf}, format=\"multipart\")\n\n self.recipe.refresh_from_db()\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertIn('image', res.data)\n self.assertTrue(os.path.exists(self.recipe.image.path))", "def upload_single(self, filepath, test=False):\n return self._gphotocli_image_tasks.upload_single(filepath, test)", "def upload(self, filename, file_path):\n return", "def upload_to_slowpics(self, config: SlowPicsConf = default_conf) -> None:\n # Upload to slow.pics\n all_images = [sorted((self.path / name).glob('*.png')) for name in self.clips.keys()]\n if self.path_diff:\n all_images.append(sorted(self.path_diff.glob('*.png'))) # type: ignore\n\n fields: Dict[str, Any] = {}\n\n for i, (name, images) in enumerate(\n zip(list(self.clips.keys()) + (['diff'] if self.path_diff else []),\n all_images)\n ):\n for j, (image, frame) in enumerate(zip(images, self.frames)):\n fields[f'comparisons[{j}].name'] = str(frame)\n fields[f'comparisons[{j}].images[{i}].name'] = name\n fields[f'comparisons[{j}].images[{i}].file'] = (image.name, image.read_bytes(), 'image/png')\n\n with Session() as sess:\n sess.get('https://slow.pics/api/comparison')\n # TODO: yeet this\n files = MultipartEncoder(config | fields)\n\n Status.info('Uploading images...')\n print()\n url = sess.post(\n 'https://slow.pics/api/comparison', data=files.to_string(),\n headers=_get_slowpics_header(str(files.len), files.content_type, sess)\n )\n\n slowpics_url = f'https://slow.pics/c/{url.text}'\n Status.info(f'Slowpics url: {slowpics_url}')\n\n url_file = self.path / 'slow.pics.url'\n url_file.write_text(f'[InternetShortcut]\\nURL={slowpics_url}', encoding='utf-8')\n Status.info(f'url file copied to \"{url_file.resolve().to_str()}\"')", "def send_sticker(): \n try:\n sticker_icon = driver.find_element_by_xpath('//*[@data-tip=\"stickers\"]')\n sticker_icon.click()\n wait(wait_time=10)\n sticker_pack = driver.find_element_by_class_name('sticker-pack')\n stickers = sticker_pack.find_elements_by_class_name('sticker')\n src = stickers[0].get_attribute('src')\n sticker_ID = src[src.index('sticker/') + len('sticker/'):]\n sticker_ID = sticker_ID[:sticker_ID.index('_')]\n stickers[0].click()\n wait()\n except Exception as e:\n return \"Error: \" + str(e)\n if verify_sticker_sent(sticker_ID):\n return \"Success\"\n else:\n return \"Error: sticker wasn't sent\"", "def push_image_to_provider(self, builder, provider, credentials, target, target_image, parameters):", "def test_uploading_image_to_recipe(self):\n url = image_upload_url(self.recipe.id)\n with tempfile.NamedTemporaryFile(suffix='.jpg') as ntf:\n img = Image.new('RGB',(10,10))\n img.save(ntf,format='JPEG')\n ntf.seek(0)\n res = self.client.post(url, {'image': ntf}, format='multipart')\n\n self.recipe.refresh_from_db()\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertIn('image', res.data)\n self.assertTrue(os.path.exists(self.recipe.image.path))", "def test_upload_image_to_reteta(self):\n url = image_upload_url(self.reteta.id)\n with tempfile.NamedTemporaryFile(suffix='.jpg') as ntf:\n img = Image.new('RGB', (10, 10))\n img.save(ntf, format='JPEG')\n ntf.seek(0)\n res = self.client.post(url, {'image': ntf}, format='multipart')\n self.reteta.refresh_from_db()\n self.assertEqual(res.status_code, status.HTTP_200_OK)\n self.assertIn('image', res.data)\n self.assertTrue(os.path.exists(self.reteta.image.path))", "def upload(self, upload_request):\n raise NotImplementedError", "def upload_image_to_minio_package(request):\n # Instanciating MinioStoreStorage creates a Minio client from settings\n # and a bucket with the name passed to it.\n x = MinioStoreStorage(\"abdelhalim\")\n image = request.FILES[\"image_to_upload\"]\n width, height, size, imageFormat, name = fetch_metadata(image)\n\n # The name might conflict with an already existing picture\n while x.exists(name):\n name = name + \"X\"\n\n # Using Pillow to fetch metadata\n width, height, size, image_format, name = fetch_metadata(image)\n\n serializer = ImageForMinioSerializer(data={\n \"name\": name,\n \"image\": image,\n \"height\": height,\n \"width\": width,\n \"size\": size, # pillow_image.size will return (width, height)\n \"path_to_image\": \"NEEDSTOBESET\", # See presigned URLs in Minio?\n \"image_format\": image_format\n }\n )\n\n if (\n serializer.is_valid(raise_exception=True) and\n is_image(image.content_type)\n ):\n # What if saving does not go well?\n # Wrap in a try/except statement\n x._save(name, image)\n serializer.save()\n return Response({\"response\": \"Picture saved\"})", "def paintSkirt(self):\n imgPath = GG.genteguada.GenteGuada.getInstance().getDataPath(os.path.join(PATH_EDITOR_IMG, self.avatarConfiguration[\"gender\"], self.avatarConfiguration[\"bodySize\"], self.avatarConfiguration[\"typeSkirt\"]+\"_skirt\", self.avatarConfiguration[\"skirt\"] + IMG_EXTENSION))\n self.newAvatarImage(imgPath, \"skirt\")" ]
[ "0.60989136", "0.60354024", "0.57841253", "0.5716377", "0.564847", "0.55834883", "0.55747676", "0.5573079", "0.5571249", "0.55512714", "0.5531908", "0.5524271", "0.55130595", "0.54837316", "0.5478993", "0.5464416", "0.54550964", "0.5447844", "0.5437544", "0.5398548", "0.53983843", "0.5395419", "0.53777474", "0.5368458", "0.536561", "0.53549415", "0.53544503", "0.53242564", "0.5319269", "0.53162116" ]
0.77327746
0
Use this method to create new sticker set owned by a user. The bot will be able to edit the created sticker set.
async def create_new_sticker_set(self, user_id: base.Integer, name: base.String, title: base.String, png_sticker: typing.Union[base.InputFile, base.String], emojis: base.String, contains_masks: typing.Union[base.Boolean, None] = None, mask_position: typing.Union[types.MaskPosition, None] = None) -> base.Boolean: mask_position = prepare_arg(mask_position) payload = generate_payload(**locals(), exclude=['png_sticker']) result = await self.send_file('png_sticker', api.Methods.CREATE_NEW_STICKER_SET, png_sticker, payload) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_or_create(session, name, chat, user):\n name = name.lower()\n sticker_set = session.query(StickerSet).get(name)\n if not sticker_set:\n # Create a task for adding a sticker.\n # This task will be processed by a job, since adding a sticker can take quite a while\n sticker_set = StickerSet(name, None)\n sticker_set.international = user.international\n task = Task(Task.SCAN_SET, sticker_set=sticker_set, chat=chat, user=user)\n session.add(sticker_set)\n session.add(task)\n # Error handling: Retry in case somebody sent to stickers at the same time\n try:\n session.commit()\n except IntegrityError as e:\n session.rollback()\n sticker_set = session.query(StickerSet).get(name)\n if sticker_set is None:\n raise e\n\n return sticker_set", "def get_sticker_set(bot: Bot, name: str, admin_id: int, sticker_set_prefix: str) -> StickerSet:\n try:\n return bot.get_sticker_set(name)\n except BadRequest as exc:\n if \"invalid\" in str(exc):\n with open(STICKER_SET_LOGO, \"rb\") as sticker:\n bot.create_new_sticker_set(\n admin_id, name, sticker_set_prefix, \"🐦\", png_sticker=sticker\n )\n return bot.get_sticker_set(name)\n raise exc", "async def add_sticker_to_set(self, user_id: base.Integer, name: base.String,\n png_sticker: typing.Union[base.InputFile, base.String], emojis: base.String,\n mask_position: typing.Union[types.MaskPosition, None] = None) -> base.Boolean:\n mask_position = prepare_arg(mask_position)\n payload = generate_payload(**locals(), exclude=['png_sticker'])\n result = await self.send_file('png_sticker', api.Methods.ADD_STICKER_TO_SET, png_sticker, payload)\n\n return result", "async def set_chat_sticker_set(self, chat_id: typing.Union[base.Integer, base.String],\n sticker_set_name: base.String) -> base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SET_CHAT_STICKER_SET, payload)\n\n return result", "def create(self, request):\n user = request.user\n serializer = self.get_serializer(data=request.data, many=True)\n serializer.is_valid(raise_exception=True)\n user.series_list = json.dumps(serializer.data)\n user.save()\n headers = self.get_success_headers(serializer.data)\n return Response(serializer.data, headers=headers)", "async def get_sticker_set(self, name: base.String) -> types.StickerSet:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.GET_STICKER_SET, payload)\n\n return types.StickerSet(**result)", "def perform_create(self, serializer):\n serializer.save(warehouse=self.request.user)", "def sticker(self, sticker_id):\r\n return Sticker(self, sticker_id)", "def build_sticker_set_name(bot: Bot, sticker_set_prefix: str) -> str:\n return f\"{sticker_set_prefix}_by_{bot.username}\"", "def create_user_shingles(args: argparse.Namespace) -> set:\n shing = set()\n add_interest_points_shingles(shing, args)\n add_length_shingle(shing, args)\n add_difficulty_shingle(shing, args)\n add_shape_shing(shing, args)\n return shing", "async def create(\n self, *, header: Optional[headers.RequestHeader] = None\n ) -> CreateResponse:\n\n request = CreateRequest()\n if header is not None:\n request.header = header\n\n return await self._unary_unary(\n \"/atomix.set.SetService/Create\", request, CreateResponse,\n )", "def _create_hotkey_set():\n message_str = 'You must use a custom hotkey profile.\\n\\n' \\\n 'To continue adding Mimic hotkeys, switch \\n' \\\n 'to a custom hotkey set in the Hotkey Editor\\n' \\\n 'or create a new profile below. \\n'\n\n user_input = pm.promptDialog(\n title='New Hotkey Profile',\n message=message_str,\n messageAlign='center',\n button=['Cancel', 'Create'],\n defaultButton='Create',\n text='Mimic_Hotkeys',\n style='text',\n cancelButton='Cancel',\n dismissString='Cancel')\n\n if user_input == 'Create':\n hotkey_set_name = pm.promptDialog(query=True, text=True)\n hotkey_set_name_filtered = _filter_hotkey_set_name(hotkey_set_name)\n pm.hotkeySet(hotkey_set_name_filtered, current=True)\n print('New Hotkey Set created: {}'.format(hotkey_set_name_filtered))\n return True", "def create_investor(sender, **kwargs):\n u = kwargs[\"instance\"]\n try:\n \n if not InvestorProfile.objects.filter(username=u.username):\n inv = InvestorProfile(username=u.username,user=u)\n inv.save()\n g = DjangoGroup.objects.get(name='Investors') \n g.user_set.add(u)\n except Exception as e:\n print e", "def custom_sticker(self, sticker_id):\r\n return CustomSticker(self, sticker_id)", "def perform_create(self, serializer):\n serializer.save(creator=self.request.user)", "def perform_create(self, serializer):\r\n serializer.save(user_type=\"SPEAKER\")", "def assign_user_to_shelter():\n current_user = get_jwt_identity()\n\n if not current_user:\n print('uri=/login error=\"Missing username parameter\"')\n return jsonify({\"msg\": \"Missing username parameter\"}), 400\n\n if not request.is_json:\n print('uri=/login error=\"Missing JSON in request\"')\n return jsonify({\"msg\": \"Missing JSON in request\"}), 400\n\n username = User.get_username_by_id(current_user)\n shelter_name = request.json.get('shelter_name', None)\n\n if not shelter_name:\n print('uri=/assign-user-to-shelter error=\"Missing shelter parameter\"')\n return jsonify({\"msg\": \"Missing shelter parameter\"}), 400\n\n result = ShelterWorker.assign_user_by_username(username, shelter_name)\n\n if result:\n return jsonify(message='User {} assigned to shelter {} successfully'.format(username, shelter_name)), 200\n else:\n return jsonify(message='User {} assignment to shelter {} failed'.format(username, shelter_name)), 500", "def create(ctx, amount, save_to, type):\n skale = ctx.obj['skale']\n for i in range(amount):\n schain_info = create_schain(skale, skale.wallet, type)\n save_info(i, schain_info, skale.wallet, save_to)\n logger.info(LONG_LINE)\n show_all_schains_names(skale)", "def create_default_strike_entry(self, user: discord.User):\n\t\t\n\t\tdef data_interaction(cur: Cursor):\n\t\t\tsql = f\"INSERT INTO {StrikeConsts.STRIKE_TABLE} ({StrikeConsts.ID}, {StrikeConsts.USERNAME}, {StrikeConsts.STRIKE_COUNT}) \" \\\n\t\t\t f\"VALUES (%(id)s, %(username)s, %(count)s);\"\n\t\t\t\n\t\t\tparams = {\n\t\t\t\t'id': user.id,\n\t\t\t\t'username': f'{user.display_name}#{user.discriminator}',\n\t\t\t\t'count': 0\n\t\t\t}\n\t\t\t\n\t\t\tcur.execute(sql, params)\n\t\t\t\n\t\t\treturn []\n\t\t\n\t\tif not self.user_has_entry(user):\n\t\t\tself.connect_and_execute(data_interaction)", "def _create_nsem_user():\n users = User.objects.filter(username=settings.CWWED_NSEM_USER)\n if users.exists():\n user = users[0]\n else:\n user = User.objects.create_user(settings.CWWED_NSEM_USER, password=settings.CWWED_NSEM_PASSWORD)\n group, _ = Group.objects.get_or_create(name=settings.CWWED_NSEM_GROUP)\n perm_names = [\n 'add_{}'.format(NsemPsa._meta.model_name),\n 'add_{}'.format(NamedStormCoveredDataSnapshot._meta.model_name),\n ]\n perms = Permission.objects.filter(codename__in=perm_names)\n # set permission\n user.user_permissions.set(list(perms))\n group.permissions.set(list(perms))\n # add user to group\n group.user_set.add(user)", "def assign_permissions(sender, instance, created, **kwargs):\n if created:\n assign_perm('view_strand', instance.owner.group, instance)\n assign_perm('change_strand', instance.saver, instance)\n assign_perm('delete_strand', instance.saver, instance)\n assign_perm('view_strand', instance.saver, instance)", "def perform_create(self, serializer):\n user = self.request.user\n serializer.save(owner=user)\n return Response({}, status=status.HTTP_201_CREATED)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def perform_create(self, serializer):\n serializer.save(owner=self.request.user)", "def new_set(*, ctx: context.ContextLevel, **kwargs) -> irast.Set:\n ir_set = irast.Set(**kwargs)\n ctx.all_sets.append(ir_set)\n return ir_set", "def create(data):\n \n return Setlist(\n list_id = data['id'],\n name = data['name'],\n items = data['num_sets'])", "def test_create_shelf(self, *_):\n form = forms.ShelfForm()\n form.data[\"user\"] = self.local_user.id\n form.data[\"name\"] = \"new shelf name\"\n form.data[\"description\"] = \"desc\"\n form.data[\"privacy\"] = \"unlisted\"\n request = self.factory.post(\"\", form.data)\n request.user = self.local_user\n\n views.create_shelf(request)\n\n shelf = models.Shelf.objects.get(name=\"new shelf name\")\n self.assertEqual(shelf.privacy, \"unlisted\")\n self.assertEqual(shelf.description, \"desc\")\n self.assertEqual(shelf.user, self.local_user)" ]
[ "0.6126132", "0.6081491", "0.6063581", "0.5805446", "0.56008524", "0.5516832", "0.53947544", "0.5390292", "0.537911", "0.5359913", "0.5275231", "0.5241288", "0.5193254", "0.5166913", "0.5152117", "0.513813", "0.5116448", "0.510349", "0.50846744", "0.5074969", "0.5071933", "0.50714886", "0.506797", "0.506797", "0.506797", "0.506797", "0.506797", "0.50664675", "0.5016981", "0.5000298" ]
0.7162943
0
Use this method to delete a sticker from a set created by the bot. The following methods and objects allow your bot to work in inline mode.
async def delete_sticker_from_set(self, sticker: base.String) -> base.Boolean: payload = generate_payload(**locals()) result = await self.request(api.Methods.DELETE_STICKER_FROM_SET, payload) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clean_sticker_set(context: CCT) -> None:\n bot = context.bot\n admin_id = cast(int, context.bot_data[ADMIN_KEY])\n sticker_set_prefix = cast(str, context.bot_data[STICKER_SET_NAME_KEY])\n\n sticker_set = get_sticker_set(\n bot, build_sticker_set_name(bot, sticker_set_prefix), admin_id, sticker_set_prefix\n )\n if len(sticker_set.stickers) > 1:\n for sticker in sticker_set.stickers[1:]:\n try:\n bot.delete_sticker_from_set(sticker.file_id)\n except BadRequest as exc:\n if \"Stickerset_not_modified\" in str(exc):\n pass\n else:\n raise exc", "def delete_set(set_name):\n\n flg = logging.getLogger(\"lettuce.xgenSetup.delete_set\")\n\n flg.info(\"Set to delete: {}\".format(set_name))\n\n if mc.objExists(set_name):\n mc.select(set_name)\n old_objects = mc.ls(selection=True)\n flg.debug(\"Old Objects:\")\n for o in old_objects:\n flg.debug(o)\n ref_objects = mc.ls(selection=True, referencedNodes=True)\n\n ref_del_queue = []\n if len(ref_objects) > 0:\n flg.debug(\"Old Reference Nodes:\")\n for o in ref_objects:\n flg.debug(o)\n for o in ref_objects:\n flg.debug(\"Queuing {} for reference removal\".format(o))\n top = mc.referenceQuery(o, referenceNode=True)\n ref_del_queue.append(top)\n if len(ref_del_queue):\n for o in ref_del_queue:\n flg.debug(\"Removing reference: {}\".format(o))\n ref_file = mc.referenceQuery(o, filename=True)\n mc.file(ref_file, removeReference=True)\n for o in old_objects:\n try:\n flg.debug(\"Deleting {}\".format(o))\n mc.delete(o)\n except ValueError as e:\n flg.debug(\"Unable to delete {0}. Error: {1}\".format(o, e))\n flg.debug(\"Deleting set: {}\".format(set_name))\n mc.delete(set_name)", "def delete_pair_set(namespace, workspace, pair_set_id):\n body = [{\"entityType\": \"pair_set\", \"entityName\": pair_set_id}]\n res = firecloud_api.delete_entities(namespace, workspace, body)\n return res", "def delete_meal():", "def delete_set(self, item): # TODO test\n tree = item.parent\n item_label = item.parent_node\n tree.remove_node(item)\n tree.remove_node(item_label)\n self.exercise.sets.remove(item.set)\n print(\"delete set\")", "def remove(self, pset):\n self._sets.remove(pset)", "def delete_leader(self):", "def delete_sample_set(namespace, workspace, sample_set_id):\n body = [{\"entityType\": \"sample_set\", \"entityName\": sample_set_id}]\n res = firecloud_api.delete_entities(namespace, workspace, body)\n return res", "def delete(log, session, args):\n log('imageset id: {highlight}{id}{reset}',\n highlight=Fore.GREEN,\n id=args.id,\n reset=Style.RESET_ALL)\n log.warn('delete imageset command coming soon.')", "def delete(self, show_id, session):\n try:\n show = db.show_by_id(show_id, session=session)\n except NoResultFound:\n raise NotFoundError('show with ID %s not found' % show_id)\n args = delete_parser.parse_args()\n forget = args.get('forget')\n for season in show.seasons:\n db.remove_series_entity(show.name, season.identifier, forget)\n return success_response('successfully removed all series %s seasons from DB' % show_id)", "def delete():", "def delete_patchset(request):\n request.patchset.nuke()\n return HttpResponseRedirect(reverse(show, args=[request.issue.key.id()]))", "def delete(self, **kwargs):\n\n rst = self.del_sngl_pair(kwargs)\n return rst", "def delete(self):\n ...", "def delete(self):\n del self.shx.atoms[self.index]", "async def delete_one(self, where):\n\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def remove(self):\n self._switch.odlclient._request(self._path, method=\"delete\")", "def delete_command():\n global selected_tuple\n backend.delete(selected_tuple[0])", "def delete(self, _id):", "def delete(self, *args, **kwargs):\n pass", "def delete(self, *args, **kwargs):\n pass", "def delete(self):\n raise NotImplementedError(\"Deleting not supported for servers\")", "def delete(self, obj):", "def rm(tesserae, tessera_id):\n try:\n return tesserae.remove(tessera_id)\n except TesseraError, e:\n sys.stderr.write(\"Error: %s\\n\" % str(e))\n return False", "def get_sticker_set(bot: Bot, name: str, admin_id: int, sticker_set_prefix: str) -> StickerSet:\n try:\n return bot.get_sticker_set(name)\n except BadRequest as exc:\n if \"invalid\" in str(exc):\n with open(STICKER_SET_LOGO, \"rb\") as sticker:\n bot.create_new_sticker_set(\n admin_id, name, sticker_set_prefix, \"🐦\", png_sticker=sticker\n )\n return bot.get_sticker_set(name)\n raise exc", "def remove_from_hand(self):\n pass" ]
[ "0.68418777", "0.5678615", "0.5613526", "0.5577408", "0.54829514", "0.5386825", "0.53562564", "0.5326483", "0.52805626", "0.5252555", "0.5252049", "0.5235838", "0.5230914", "0.52285314", "0.5212259", "0.519053", "0.518183", "0.518183", "0.518183", "0.518183", "0.51652455", "0.5156669", "0.5138878", "0.50822693", "0.50822693", "0.50810325", "0.5074075", "0.50734836", "0.5073269", "0.50629395" ]
0.7008481
0
If you sent an invoice requesting a shipping address and the parameter is_flexible was specified, the Bot API will send an Update with a shipping_query field to the bot.
async def answer_shipping_query(self, shipping_query_id: base.String, ok: base.Boolean, shipping_options: typing.Union[typing.List[types.ShippingOption], None] = None, error_message: typing.Union[base.String, None] = None) -> base.Boolean: if shipping_options: shipping_options = prepare_arg([shipping_option.to_python() if hasattr(shipping_option, 'to_python') else shipping_option for shipping_option in shipping_options]) payload = generate_payload(**locals()) result = await self.request(api.Methods.ANSWER_SHIPPING_QUERY, payload) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _assemble_and_send_request(self):\r\n # Fire off the query.\r\n response = self.client.service.processShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n RequestedShipment=self.RequestedShipment)\r\n return response", "def ship(item_id):\n kwargs = {k: parse(v) for k, v in request.args.to_dict().items()}\n dest = kwargs.pop('dest', 'US')\n code = kwargs.pop('code', None)\n details = kwargs.pop('details', None)\n quantity = kwargs.pop('quantity', None)\n options = {\n 'ItemID': item_id, 'MessageID': item_id, 'DestinationCountryCode': dest}\n\n if code:\n options['DestinationPostalCode'] = code\n\n if details:\n options['IncludeDetails'] = details\n\n if quantity:\n options['QuantitySold'] = quantity\n\n options.update(kwargs)\n shopping = Shopping(**kwargs)\n\n try:\n response = shopping.search(options)\n except ConnectionError as err:\n result = str(err)\n status = 500\n else:\n result = shopping.parse(response)\n status = 200\n\n return jsonify(status, objects=result)", "def editShipment(order_id, comment, appendComment, notify):\n generate_request = oAuth_magento()\n\n payload = {\"searchCriteria[filter_groups][0][filters][0][field]\": \"increment_id\",\n \"searchCriteria[filter_groups][0][filters][0][value]\": order_id,\n \"searchCriteria[filter_groups][0][filters][0][condition_type]\": \"eq\",\n \"fields\": \"items[entity_id]\"}\n\n response = requests.request(\"GET\", url=generate_request[0], headers=generate_request[1], params=payload)\n json_response = json.loads(response.text)\n entity_id = json_response['items'][0]['entity_id']\n\n if appendComment == \"true\": \n payload = {\"appendComment\": \"true\",\n \"notify\": notify,\n \"comment\": {\n \"extension_attributes\": {},\n \"comment\": comment,\n \"is_visible_on_front\": 1\n }\n }\n \n else:\n payload = {\"notify\": notify}\n\n shipment_response = requests.request(\"POST\", url=\"https://www.amsbio.com/index.php/rest/V1/order/\" + str(entity_id) + \"/ship\", headers=generate_request[1], data=json.dumps(payload))\n return json.loads(shipment_response.text)", "def _assemble_and_send_validation_request(self):\r\n # Fire off the query.\r\n response = self.client.service.validateShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n RequestedShipment=self.RequestedShipment)\r\n return response", "def test_update_shipping_address(self):\n self.cim.update_shipping_address(\n customer_profile_id=u\"222\",\n customer_address_id=u\"444\",\n first_name=u\"pippo\",\n phone=u\"415-415-4154\"\n )", "def preview_aws_shipment(order):\n\n # Data that will be sent to AWS GetFulfillmentPreview method\n aws_order_data = {}\n\n # The destination address for the fulfillment order.\n address = order[\"delivery_address\"]\n country_iso = address[\"delivery_country_iso\"]\n aws_order_data[\"Address\"] = {\n \"Name\": u\"{firstname} {lastname}\".format(\n firstname=address[\"delivery_firstname\"],\n lastname=address[\"delivery_lastname\"]).title(),\n \"Line1\": address[\"delivery_address\"].title(),\n \"Line2\": address[\"delivery_address_2\"].title(),\n \"Line3\": address[\"delivery_address_complement\"].title(),\n \"City\": address[\"delivery_city\"].title(),\n \"StateOrProvinceCode\": address[\"delivery_zipcode\"],\n \"PostalCode\": address[\"delivery_zipcode\"],\n \"CountryCode\": country_iso,\n \"PhoneNumber\": address[\"delivery_phone_mobile\"]\n }\n # As specified in AWS documentation, don\"t include city if country is JP.\n if country_iso == \"JP\":\n aws_order_data[\"Address\"].pop(\"City\")\n\n aws_order_data[\"Items\"] = []\n for product in order[\"cart\"][\"products\"]:\n item = {\n \"SellerSKU\": product[\"sku\"],\n \"SellerFulfillmentOrderItemId\": product[\"sku\"],\n \"Quantity\": product[\"quantity\"]\n }\n aws_order_data[\"Items\"].append(item)\n\n mws_shipments = mws.OutboundShipments(\n access_key=MWS_ACCESS_KEY, secret_key=MWS_SECRET_KEY,\n account_id=MWS_MERCHANT_ID, region=\"FR\")\n\n data = dict(Action=\"GetFulfillmentPreview\")\n data.update(enumerate_data(aws_order_data))\n return mws_shipments.make_request(data, \"POST\")", "def ShipOrder(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def calculate(self, cart, contact):\n from satchmo_store.shop.models import Config\n\n settings = config_get_group('shipping.modules.ups')\n self.delivery_days = _(\"3 - 4\") #Default setting for ground delivery\n shop_details = Config.objects.get_current()\n # Get the code and description for the packaging\n container = settings.SHIPPING_CONTAINER.value\n container_description = settings.SHIPPING_CONTAINER.choices[int(container)][1]\n configuration = {\n 'xml_key': settings.XML_KEY.value,\n 'account': settings.ACCOUNT.value,\n 'userid': settings.USER_ID.value,\n 'password': settings.USER_PASSWORD.value,\n 'container': container,\n 'container_description': container_description,\n 'pickup': settings.PICKUP_TYPE.value,\n 'ship_type': self.service_type_code,\n 'shop_details':shop_details,\n }\n\n shippingdata = {\n 'single_box': False,\n 'config': configuration,\n 'contact': contact,\n 'cart': cart,\n 'shipping_address' : shop_details,\n 'shipping_phone' : shop_details.phone,\n 'shipping_country_code' : shop_details.country.iso2_code\n }\n\n if settings.SINGLE_BOX.value:\n log.debug(\"Using single-box method for ups calculations.\")\n\n box_weight = Decimal(\"0.00\")\n for product in cart.get_shipment_list():\n if product.smart_attr('weight') is None:\n log.warn(\"No weight on product (skipping for ship calculations): %s\", product)\n else:\n box_weight += product.smart_attr('weight')\n if product.smart_attr('weight_units') and product.smart_attr('weight_units') != \"\":\n box_weight_units = product.smart_attr('weight_units')\n else:\n log.warn(\"No weight units for product\")\n\n if box_weight < Decimal(\"0.1\"):\n log.debug(\"Total box weight too small, defaulting to 0.1\")\n box_weight = Decimal(\"0.1\")\n\n shippingdata['single_box'] = True\n shippingdata['box_weight'] = '%.1f' % box_weight\n shippingdata['box_weight_units'] = box_weight_units.upper()\n\n total_weight = 0\n for product in cart.get_shipment_list():\n try:\n total_weight += product.smart_attr('weight')\n except TypeError:\n pass\n\n signals.shipping_data_query.send(Shipper, shipper=self, cart=cart, shippingdata=shippingdata)\n c = Context(shippingdata)\n t = loader.get_template('shipping/ups/request.xml')\n request = t.render(c)\n self.is_valid = False\n if settings.LIVE.value:\n connection = settings.CONNECTION.value\n else:\n connection = settings.CONNECTION_TEST.value\n\n cachekey = cache_key(\n 'UPS_SHIP',\n #service_type = self.service_type_code,\n weight = str(total_weight),\n country = shop_details.country.iso2_code,\n zipcode = contact.shipping_address.postal_code)\n\n try:\n tree = cache_get(cachekey)\n except NotCachedError:\n tree = None\n\n if tree is not None:\n self.verbose_log('Got UPS info from cache [%s]', cachekey)\n else:\n self.verbose_log(\"Requesting from UPS [%s]\\n%s\", cachekey, request)\n cache_set(cachekey, value=request, length=600)\n tree = self._process_request(connection, request)\n self.verbose_log(\"Got from UPS [%s]:\\n%s\", cachekey, self.raw)\n cache_set(cachekey, value=tree)\n\n try:\n status_code = tree.getiterator('ResponseStatusCode')\n status_val = status_code[0].text\n self.verbose_log(\"UPS Status Code for cart #%s = %s\", int(cart.id), status_val)\n except AttributeError:\n status_val = \"-1\"\n\n if status_val == '1':\n self.is_valid = False\n self._calculated = False\n all_rates = tree.getiterator('RatedShipment')\n for response in all_rates:\n if self.service_type_code == response.find('.//Service/Code/').text:\n self.charges = response.find('.//TotalCharges/MonetaryValue').text\n if response.find('.//GuaranteedDaysToDelivery').text:\n self.delivery_days = response.find('.//GuaranteedDaysToDelivery').text\n self.is_valid = True\n self._calculated = True\n\n if not self.is_valid:\n self.verbose_log(\"UPS Cannot find rate for code: %s [%s]\", self.service_type_code, self.service_type_text)\n\n else:\n self.is_valid = False\n self._calculated = False\n\n try:\n errors = tree.find('.//Error')\n log.info(\"UPS %s Error: Code %s - %s\" % (errors[0].text, errors[1].text, errors[2].text))\n except AttributeError:\n log.info(\"UPS error - cannot parse response:\\n %s\", self.raw)\n\n if self.is_valid and settings.TIME_IN_TRANSIT.value:\n self.verbose_log('Now getting time in transit for cart')\n self.time_in_transit(contact, cart)", "def shipping_address(self) -> 'outputs.ShippingAddressResponse':\n return pulumi.get(self, \"shipping_address\")", "def shipping_address(self) -> 'outputs.ShippingAddressResponse':\n return pulumi.get(self, \"shipping_address\")", "def shipping_address(self) -> 'outputs.ShippingAddressResponse':\n return pulumi.get(self, \"shipping_address\")", "def test_checkout_lines_update_only_stock_in_cc_warehouse_delivery_method_set(\n user_api_client, checkout_with_item, warehouse_for_cc, shipping_method\n):\n # given\n checkout = checkout_with_item\n line = checkout.lines.first()\n variant = line.variant\n\n variant.stocks.all().delete()\n\n # set stock for a collection point warehouse\n Stock.objects.create(\n warehouse=warehouse_for_cc, product_variant=variant, quantity=10\n )\n\n checkout.shipping_method = shipping_method\n checkout.save(update_fields=[\"shipping_method\"])\n\n variant_id = graphene.Node.to_global_id(\"ProductVariant\", variant.pk)\n\n variables = {\n \"id\": to_global_id_or_none(checkout_with_item),\n \"lines\": [{\"variantId\": variant_id, \"quantity\": 1}],\n }\n\n # when\n response = user_api_client.post_graphql(MUTATION_CHECKOUT_LINES_UPDATE, variables)\n\n # then\n content = get_graphql_content(response)\n data = content[\"data\"][\"checkoutLinesUpdate\"]\n assert data[\"errors\"]\n assert data[\"errors\"][0][\"code\"] == CheckoutErrorCode.INSUFFICIENT_STOCK.name\n assert data[\"errors\"][0][\"field\"] == \"quantity\"", "def ship_updates(ai, var, screen, ship, charges, shields, hub):\r\n\tship.update(ai)\r\n\tship.draw_ship()\r\n\tcharge_shield_graphics(ai, var, screen, ship, charges, shields, hub)", "def shipping(self, shipping):\n\n self._shipping = shipping", "def shipmentDetails(request):\n order_id = request.GET.get('order_id')\n generate_request = oAuth_magento()\n\n payload = {\"searchCriteria[filter_groups][0][filters][0][field]\": \"increment_id\",\n \"searchCriteria[filter_groups][0][filters][0][value]\": order_id,\n \"searchCriteria[filter_groups][0][filters][0][conditionType]\": \"eq\",\n \"fields\": \"items[status,base_currency_code,grand_total,items[name,sku],extension_attributes[shipping_assignments[shipping[address[city,company,country_id,firstname,lastname,postcode,region,telephone]]]]]\",\n }\n response = requests.request(\"GET\", url=generate_request[0], headers=generate_request[1], params=payload)\n # with open('temp_files/magento_get_order_select.json','w') as f:\n # f.write(response.text)\n json_response = json.loads(response.text)\n context = {'result': json_response['items'][0]['extension_attributes']['shipping_assignments'][0]['shipping']['address'], \n 'status': json_response['items'][0]['status'],\n 'item_name': json_response['items'][0]['items'],\n 'price': json_response['items'][0]['base_currency_code'] + ' ' + str(json_response['items'][0]['grand_total']),\n }\n return JsonResponse(context)", "def _assemble_and_send_request(self):\r\n client = self.client\r\n \r\n \r\n # We get an exception like this when specifying an IntegratorId:\r\n # suds.TypeNotFound: Type not found: 'IntegratorId'\r\n # Setting it to None does not seem to appease it.\r\n \r\n del self.ClientDetail.IntegratorId\r\n \r\n # Fire off the query.\r\n response = client.service.postalCodeInquiry(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n PostalCode = self.PostalCode,\r\n CountryCode = self.CountryCode)\r\n\r\n return response", "def _assemble_and_send_request(self):\r\n client = self.client\r\n # Fire off the query.\r\n response = client.service.deleteShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n ShipTimestamp = datetime.now(), \r\n TrackingId=self.TrackingId,\r\n DeletionControl=self.DeletionControlType)\r\n\r\n return response", "def test_get_shipment(self):\n pass", "def update_shipment_custom_fields(self, body, **kwargs):\n\n all_params = ['body']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method update_shipment_custom_fields\" % key\n )\n params[key] = val\n del params['kwargs']\n\n # verify the required parameter 'body' is set\n if ('body' not in params) or (params['body'] is None):\n raise ValueError(\"Missing the required parameter `body` when calling `update_shipment_custom_fields`\")\n\n resource_path = '/beta/shipment/customFields'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'body' in params:\n body_params = params['body']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['api_key']\n\n response = self.api_client.call_api(resource_path, 'PUT',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type=None,\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "async def ship(self, ctx):\r\n if ctx.invoked_subcommand:\r\n return\r\n defenders = ctx.message.mentions\r\n if defenders:\r\n for defender in defenders:\r\n captain = defender.name\r\n user_ship = Ship.find_ship(captain)\r\n if not user_ship:\r\n await ctx.send(\"{} does not yet have a ship.\".format(captain))\r\n else:\r\n em = discord.Embed(colour=0xAA0000)\r\n em.set_author(name=user_ship.ship_name, icon_url=defender.avatar_url)\r\n em.add_field(name='Ship Level: {}'.format(str(user_ship.level())),\r\n value=\"Win/Loss: {}/{}\".format(user_ship.win, user_ship.loss), inline=False)\r\n em.add_field(name=\"__Part__\", value=parts_print, inline=True)\r\n em.add_field(name=\"__Level__\", value=user_ship.info(), inline=True)\r\n em.set_footer(text=\"Their ship's coffers hold {} gold\".format(user_ship.gold),\r\n icon_url=\"https://cdn.discordapp.com/emojis/554730061463289857.gif\")\r\n em_msg = await ctx.send(embed=em)\r\n return\r\n\r\n captain = ctx.message.author.name\r\n user_ship = Ship.find_ship(captain)\r\n\r\n if not user_ship:\r\n user_ship = Ship(captain)\r\n user_ship.update(is_new=True)\r\n\r\n await ctx.send('Congratulations on the new ship, Captain {}! Welcome aboard!'\r\n '\\nCannons and Crew contribute to your attack,'\r\n ' while Armor and Sails contribute to defense\\nHere\\'s what she\\'s got:'.format(captain))\r\n\r\n em = discord.Embed(colour=0xDD0000)\r\n em.set_author(name=user_ship.ship_name,\r\n icon_url=ctx.message.author.avatar_url)\r\n em.add_field(name='Ship Level: {}'.format(str(user_ship.level())),\r\n value=\"Win/Loss: {}/{}\".format(user_ship.win, user_ship.loss), inline=False)\r\n #em.add_field(name='Ship Level', value=str(user_ship.level()), inline=False)\r\n em.add_field(name=\"__Part__\", value=parts_print, inline=True)\r\n em.add_field(name=\"__Level__\", value=user_ship.info(), inline=True)\r\n em.set_footer(text=\"Your ship's coffers hold {} gold\".format(user_ship.gold),\r\n icon_url=\"https://cdn.discordapp.com/emojis/554730061463289857.gif\")\r\n em_msg = await ctx.send(embed=em)", "def update_shipping_cost(self, cr, uid, ids, context=None):\n datas = self.browse(cr, uid, ids[0], context=context)\n if context is None:\n context = {}\n if context.get('active_model', False) in ['sale.order', 'account.invoice'] and 'active_id' in context:\n model = context['active_model']\n model_obj = self.pool.get(model)\n model_id = context.get('active_id', False)\n if model_id:\n model_obj.write(cr, uid, [model_id], {\n 'shipcharge': datas.shipping_cost,\n 'ship_method': datas.rate_select.shipmethodname,\n 'sale_account_id': datas.account_id.id,\n 'ship_method_id': datas.rate_select.id,\n }, context=context)\n if model == 'sale.order':\n model_obj.button_dummy(cr, uid, [model_id], context=context)\n if model == 'account.invoice':\n model_obj.button_reset_taxes(cr, uid, [model_id], context=context)\n return {'nodestroy': False, 'type': 'ir.actions.act_window_close'}", "def get_shipment_by_filter(self, **kwargs):\n\n all_params = ['filter', 'page', 'limit', 'sort']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_shipment_by_filter\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/beta/shipment/search'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n if 'filter' in params:\n query_params['filter'] = params['filter']\n if 'page' in params:\n query_params['page'] = params['page']\n if 'limit' in params:\n query_params['limit'] = params['limit']\n if 'sort' in params:\n query_params['sort'] = params['sort']\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type([])\n\n # Authentication setting\n auth_settings = ['api_key']\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='list[Shipment]',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response", "def webhook_payment_successful(self, event):\n\n intent = event.data.object\n p_id = intent.id\n pack = intent.metadata.pack\n save_detail = intent.metadata.save_detail\n\n billing_details = intent.charges.data[0].billing_details\n shipping_details = intent.shipping\n grand_cost = round(intent.charges.data[0].amount / 100, 2)\n\n for field, value in shipping_details.address.items():\n if value == \"\":\n shipping_details.address[field] = None\n\n profile = None\n username = intent.metadata.username\n if username != 'AnonymousUser':\n profile = UserProfile.objects.get(user__username=username)\n if save_detail:\n profile.default_phone_number = shipping_details.phone,\n profile.default_home_Address = shipping_details.address.line1,\n profile.default_home_Address_continued = \\\n shipping_details.address.line2,\n profile.default_postcode = \\\n shipping_details.address.postal_code,\n profile.default_county = \\\n shipping_details.address.city,\n profile.default_country = \\\n shipping_details.address.country,\n profile.save()\n\n order_present = False\n seek = 1\n while seek <= 6:\n try:\n order = Order.objects.get(\n Name__iexact=shipping_details.name,\n user_account=profile,\n email__iexact=billing_details.email,\n phone_number__iexact=shipping_details.phone,\n home_Address__iexact=shipping_details.address.line1,\n home_Address_continued__iexact =(\n shipping_details.address.line2\n ),\n postcode__iexact=shipping_details.address.postal_code,\n county__iexact=shipping_details.address.city,\n country__iexact=shipping_details.address.country,\n grand_cost=grand_cost,\n original_pack=pack,\n stripe_p_id=p_id,\n )\n order_present = True\n break\n except Order.DoesNotExist:\n seek += 1\n time.sleep(1)\n if order_present:\n self._send_email_details(order)\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | Good news. \\\n This is now in the database',\n status=200)\n else:\n order = None\n try:\n order = Order.objects.create(\n Name=shipping_details.name,\n email=billing_details.email,\n phone_number=shipping_details.phone,\n home_Address=shipping_details.address.line1,\n home_Address_continued=shipping_details.address.line2,\n postcode=shipping_details.address.postal_code,\n county=shipping_details.address.city,\n country=shipping_details.address.country,\n original_pack=pack,\n stripe_p_id=p_id,\n )\n for item_id, item_data in json.load(pack).items():\n product = Product.objects.get(id=item_id)\n if isinstance(item_data, int):\n order_line_item = OrderLineItem(\n order=order,\n product=product,\n quantity=item_data,\n )\n order_line_item.save()\n else:\n for size, quantity in item_data['items_by_size'].items():\n order_line_item = OrderLineItem(\n order=order,\n product=product,\n quantity=quantity,\n product_size=size,\n )\n order_line_item.save()\n except Exception as e:\n if order:\n order.delete()\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | \\\n There is an error: {e}',\n status=500)\n self._send_email_details(order)\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | \\\n Goodnews: webhook order created',\n status=200)", "def test_new_empty_shipping_address(self):\r\n self.original = self.env[\"res.partner\"].create({\r\n \"is_company\": False,\r\n \"type\": 'delivery',\r\n \"lastname\": \"\",\r\n \"firstname\": \"\"})", "def free_shipping(self, free_shipping):\n\n self._free_shipping = free_shipping", "def test_create_shipping_address(self):\n self.cim.create_shipping_address(\n customer_profile_id=100,\n ship_phone=u'415-415-4154',\n ship_first_name=u'valentino'\n )", "def create_shipment(self, international=False):\n ret_val = self.create(\"RequestedShipment\")\n ret_val.RateRequestTypes = None\n\n if not international:\n del ret_val.EdtRequestType\n\n return ret_val", "def on_ship(self, ship):\n # TODO: add ship to game\n # The game has a set combination of ships which is created when choosing the field size\n # (by battleships.ship_combination_creator()).\n # After that you need to create the player and add every ship from the combination (\n # without position) to his fleet. Done by add_ship(size) in the player class,\n # just cycle through the fleet_config of the Game class.\n # Then you need a button to determine the facing of the ship (north, west, east, south) and\n # something that shows which ship you are placing (either go through the array yourself\n # or by choosing the size per button).\n # Then the player needs to call position_ship(size, x, y, facing). If the ship cannot be\n # placed there (either because it conflicts with another ship or goes over the edge of\n # the board) the function will return a False, if it works it'll return True.\n # By calling check_ship_placement() from the Player class you can check whether all\n # ships are placed or not (returning True if all are placed, returning False if one or\n # more are still missing a position).\n # Apparently this is a commuication error on both sides:\n # This is how the ship placement works via the now built GUI:\n # New Game-> field size setting -> player name entry + ship placement,\n # via click, hoover and click (lenght of placed ship 1+hoover+1)\n # a list of coords creates the ship\n # {'n': [[(0, 0), (1, 0), (2, 0), (3, 0)]],\n # 'b': [[(1, 1), (2, 1), (3, 1), (4, 1)], [(2, 2), (2, 3), (2, 4)]]}\n\n if len(ship) < 3 or len(ship) > 6:\n # set length of ship to 3 to 6\n return False\n return True", "def req_item_in_shipment( shipment_item,\r\n shipment_type,\r\n req_items,\r\n ): \r\n \r\n shipment_item_table = \"logs_%s_item\" % shipment_type\r\n try:\r\n item_id = shipment_item[shipment_item_table].item_id\r\n except:\r\n item_id = shipment_item.inventory_store_item.item_id\r\n \r\n #Check for req_items\r\n if item_id in req_items: \r\n quantity_req_type = \"quantity_%s\" % shipment_to_req_type[shipment_type]\r\n \r\n #This item has been requested by this store\r\n req_item = req_items[item_id]\r\n req_item_id = req_item.id\r\n \r\n #Update the quantity_fulfil \r\n #convert the shipment items quantity into the req_tem.quantity_fulfil (according to packet)\r\n quantity = req_item[quantity_req_type] + \\\r\n (shipment_item[shipment_item_table].packet_quantity / \\\r\n req_item.packet_quantity) * \\\r\n shipment_item[shipment_item_table].quantity \r\n quantity = min(quantity, req_item.quantity) #Cap at req. quantity \r\n db.logs_req_item[req_item_id] = {quantity_req_type: quantity} \r\n \r\n #link the shipment_item to the req_item \r\n db[shipment_item_table][shipment_item[shipment_item_table].id] = dict(logs_req_item_id = req_item_id)\r\n \r\n #Flag req record to update status_fulfil \r\n return req_item.logs_req_id\r\n else:\r\n return None", "def test_get_shipping_address(self):\n self.cim.get_shipping_address(\n customer_profile_id=u\"900\",\n customer_address_id=u\"344\"\n )" ]
[ "0.60649186", "0.60223025", "0.57076615", "0.5564417", "0.5513518", "0.54623425", "0.5378201", "0.5326703", "0.52991426", "0.52991426", "0.52991426", "0.52568734", "0.51945746", "0.5156792", "0.5143426", "0.5109757", "0.50229204", "0.4990119", "0.49679393", "0.496416", "0.49600378", "0.49571705", "0.48192286", "0.47837773", "0.47802943", "0.47737247", "0.4758608", "0.47571695", "0.47512156", "0.47382325" ]
0.632263
0
Once the user has confirmed their payment and shipping details, the Bot API sends the final confirmation in the form of an Update with the field pre_checkout_query. Use this method to respond to such precheckout queries.
async def answer_pre_checkout_query(self, pre_checkout_query_id: base.String, ok: base.Boolean, error_message: typing.Union[base.String, None] = None) -> base.Boolean: payload = generate_payload(**locals()) result = await self.request(api.Methods.ANSWER_PRE_CHECKOUT_QUERY, payload) return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def webhook_payment_successful(self, event):\n\n intent = event.data.object\n p_id = intent.id\n pack = intent.metadata.pack\n save_detail = intent.metadata.save_detail\n\n billing_details = intent.charges.data[0].billing_details\n shipping_details = intent.shipping\n grand_cost = round(intent.charges.data[0].amount / 100, 2)\n\n for field, value in shipping_details.address.items():\n if value == \"\":\n shipping_details.address[field] = None\n\n profile = None\n username = intent.metadata.username\n if username != 'AnonymousUser':\n profile = UserProfile.objects.get(user__username=username)\n if save_detail:\n profile.default_phone_number = shipping_details.phone,\n profile.default_home_Address = shipping_details.address.line1,\n profile.default_home_Address_continued = \\\n shipping_details.address.line2,\n profile.default_postcode = \\\n shipping_details.address.postal_code,\n profile.default_county = \\\n shipping_details.address.city,\n profile.default_country = \\\n shipping_details.address.country,\n profile.save()\n\n order_present = False\n seek = 1\n while seek <= 6:\n try:\n order = Order.objects.get(\n Name__iexact=shipping_details.name,\n user_account=profile,\n email__iexact=billing_details.email,\n phone_number__iexact=shipping_details.phone,\n home_Address__iexact=shipping_details.address.line1,\n home_Address_continued__iexact =(\n shipping_details.address.line2\n ),\n postcode__iexact=shipping_details.address.postal_code,\n county__iexact=shipping_details.address.city,\n country__iexact=shipping_details.address.country,\n grand_cost=grand_cost,\n original_pack=pack,\n stripe_p_id=p_id,\n )\n order_present = True\n break\n except Order.DoesNotExist:\n seek += 1\n time.sleep(1)\n if order_present:\n self._send_email_details(order)\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | Good news. \\\n This is now in the database',\n status=200)\n else:\n order = None\n try:\n order = Order.objects.create(\n Name=shipping_details.name,\n email=billing_details.email,\n phone_number=shipping_details.phone,\n home_Address=shipping_details.address.line1,\n home_Address_continued=shipping_details.address.line2,\n postcode=shipping_details.address.postal_code,\n county=shipping_details.address.city,\n country=shipping_details.address.country,\n original_pack=pack,\n stripe_p_id=p_id,\n )\n for item_id, item_data in json.load(pack).items():\n product = Product.objects.get(id=item_id)\n if isinstance(item_data, int):\n order_line_item = OrderLineItem(\n order=order,\n product=product,\n quantity=item_data,\n )\n order_line_item.save()\n else:\n for size, quantity in item_data['items_by_size'].items():\n order_line_item = OrderLineItem(\n order=order,\n product=product,\n quantity=quantity,\n product_size=size,\n )\n order_line_item.save()\n except Exception as e:\n if order:\n order.delete()\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | \\\n There is an error: {e}',\n status=500)\n self._send_email_details(order)\n return HttpResponse(\n content=f'Webhook obtained: {event[\"type\"]} | \\\n Goodnews: webhook order created',\n status=200)", "def acknowledge_prepayment(self):\n self.acknowledge_payment()", "def __wait_for_precheckoutquery(self,\n cancellable: bool = False) -> Union[telegram.PreCheckoutQuery, CancelSignal]:\n log.debug(\"Waiting for a PreCheckoutQuery...\")\n while True:\n # Get the next update\n update = self.__receive_next_update()\n # If a CancelSignal is received...\n if isinstance(update, CancelSignal):\n # And the wait is cancellable...\n if cancellable:\n # Return the CancelSignal\n return update\n else:\n # Ignore the signal\n continue\n # Ensure the update contains a precheckoutquery\n if update.pre_checkout_query is None:\n continue\n # Return the precheckoutquery\n return update.pre_checkout_query", "def awaiting_payment(self):", "def payment_confirmation(self, **post):\n sale_order_id = view.session.get('sale_last_order_id')\n partner_id = view.env.user.partner_id\n if sale_order_id:\n sale_order_id = view.env['sale.order'].sudo().browse(int(sale_order_id))\n lines = sale_order_id.order_line\n policy_line = view.env['policies.holder.line']\n for line in lines:\n code = ''.join(random.choice('0123456789ABCDEF') for i in range(16))\n policy_line.sudo().create({'name':lines.product_id.id, \n 'premium':lines.price_unit, \n 'policy_code':code, \n 'line_id':partner_id.id,\n 'start_date':Datetime.now(), 'end_date':Datetime.to_string(timedelta(days=lines.product_id.policy_period*360)+ datetime.now())})\n s = super(InsuranceWebsiteSale, self).payment_confirmation()\n view.session['sale_last_order_id'] = False\n return s\n return", "def confirm_further(self, update, context):\n response_code = update.callback_query[\"data\"] # wouldyou_{yes|no}\n request_id = context.user_data[\"current_request\"]\n log.info(\"No further comments req:%s %s\", request_id, response_code)\n self.finalize_request(update, context, request_id)", "def onCheckout(self, controller):\n \n if askokcancel(\"Proceed\", \"Pay the order?\"):\n c = controller.customer\n package = {'customer_id':c.id, 'order_price':c.my_order.GetTotalPrice}\n msg = controller.transmit(package)\n \n if msg['order_received']:\n c.CheckOut(c.my_order.GetTotalPrice)\n c.Clear()\n controller.show_frame(PageThree)", "def google_notification(request, payment_mode='google'):\r\n # ack_xml = '''<notification-acknowledgment xmlns=\"http://checkout.google.com/schema/2\" serial-number=\"%s\"/>\\n\\n'''\r\n ack_html = '''_type=notification-acknowledgment&serial-number=%s\\n\\n'''\r\n processed_response = None\r\n try:\r\n data = request.POST\r\n _log.debug(\"Google Checkout IPN Headers: %s\", repr(request.META))\r\n _log.debug(\"Google Checkout IPN data: %s\", repr(data))\r\n\r\n serial_number = data[u'serial-number'].strip()\r\n ack_html = ack_html % serial_number\r\n processed_response = HttpResponse(ack_html, content_type=\"text/plain\")\r\n\r\n ipn_type = data[u'_type']\r\n if ipn_type not in [u'new-order-notification']:\r\n _log.debug(\"Ignored unsupported IPN type %s.\", ipn_type)\r\n return processed_response\r\n\r\n order_state = data[u'fulfillment-order-state']\r\n if order_state.upper() not in [u'NEW']:\r\n _log.debug(\"Ignored unsupported order state %s\", order_state)\r\n return processed_response\r\n\r\n currency = data.get(u'order-total.currency', settings.CURRENCY_DEFAULT)\r\n if currency.upper() not in settings.CURRENCIES_SUPPORTED:\r\n # We do not support any currency other than USD.\r\n _log.debug(\"Ignored unsupported currency %s\", currency)\r\n return processed_response\r\n\r\n required_keys = (\r\n 'shopping-cart.items.item-1.merchant-item-id',\r\n 'shopping-cart.items.item-1.quantity',\r\n 'shopping-cart.merchant-private-data',\r\n 'order-total',\r\n 'google-order-number',\r\n )\r\n for key in required_keys:\r\n if key not in data:\r\n # We do not support this type of a response\r\n _log.debug(\"Unsupported IPN. No %s\", key)\r\n return processed_response\r\n\r\n try:\r\n pending_contribution_id, username, invoice_num = data[u'shopping-cart.merchant-private-data'].split('~')\r\n except ValueError:\r\n # This IPN is not meant for our application\r\n _log.debug(\"Unsupported IPN. No proper shopping-cart.merchant-private-data\")\r\n return processed_response\r\n\r\n is_anon = username == 'anonymous'\r\n campaign_id = data[u'shopping-cart.items.item-1.merchant-item-id']\r\n qty = data[u'shopping-cart.items.item-1.quantity']\r\n amount = data[u'order-total']\r\n transaction_id = data[u'google-order-number']\r\n\r\n if not is_anon:\r\n try:\r\n pending_contrib = PendingContribution.objects.get(\r\n pk=pending_contribution_id,\r\n contributor__username=username,\r\n campaign=campaign_id,\r\n qty=qty,\r\n payment_mode=payment_mode)\r\n except PendingContribution.DoesNotExist:\r\n # We don't need to keep receiving this notification any more.\r\n _log.debug(\"Pending contribution not found. Locals: %s\" % locals())\r\n return processed_response\r\n\r\n campaign = Campaign.objects.get(pk=campaign_id)\r\n\r\n if is_anon:\r\n artist = campaign.artist\r\n else:\r\n artist = pending_contrib.campaign.artist\r\n\r\n google_merchant_id, google_merchant_key = artist.google_merchant_id, artist.google_merchant_key\r\n\r\n if not (google_merchant_id or google_merchant_key):\r\n # This artist does not support Google Checkout payments\r\n _log.debug(\"Artist %s does not accept Google Checkout payments\", artist)\r\n return processed_response\r\n\r\n if not google.verify_ipn_request(request, google_merchant_id, google_merchant_key):\r\n raise Http404\r\n\r\n contribs = Contribution.objects.filter(transaction_id=transaction_id, payment_mode=payment_mode).count()\r\n if contribs:\r\n # This payment was already processed. Just acknowledge the notification.\r\n _log.debug(\"Payment was already processed.\")\r\n return processed_response\r\n\r\n # Process it and update the ``memo`` field if it has been provided by the buyer.\r\n if is_anon:\r\n # Anonymous contribution\r\n contributor = User.objects.get(username='anonymous')\r\n contrib = campaign.contribution_set.create(\r\n contributor=contributor,\r\n amount=amount,\r\n qty=qty,\r\n payment_mode=payment_mode,\r\n transaction_id=transaction_id,\r\n memo=data.get('memo', '')\r\n )\r\n _log.info(\"Google (tx: %s) anonymous contribution recorded: %s\", transaction_id, contrib)\r\n else:\r\n # Member contribution\r\n contrib = pending_contrib.process_payment_notification(transaction_id, data.get('memo', ''))\r\n _log.info(\"Google transaction %s resolved. Contribution recorded: %s\", transaction_id, contrib)\r\n except:\r\n _log.exception(''.join(format_exception(*exc_info())))\r\n raise Http404\r\n return processed_response", "def order_success(self, request):\n order = self.order_from_request(request)\n\n if not order:\n return self.order_new(request)\n\n if not order.balance_remaining:\n self.set_order_on_request(request, order=None)\n\n\n order_data = OrderData.objects.get(order=order)\n o_data = simplejson.loads(order_data.data)\n\n paymentData = {}\n paymentData['delivery_address2'] = o_data['delivery_address2']\n paymentData['billing_address2'] = o_data['billing_address2']\n paymentData['delivery_date'] = o_data['delivery_date']\n paymentData['delivery_state'] = o_data['delivery_state']\n paymentData['billing_state'] = o_data['billing_state']\n paymentData['salutation'] = o_data['salutation']\n paymentData['contact_number'] = o_data['billing_contact_number']\n\n #try:\n oPayment = OrderPayment.objects.get(order=order)\n oPayment.payment_method = o_data['order_payment_method']\n oPayment.data = simplejson.dumps(paymentData)\n oPayment.save()\n #except:\n # pass\n\n \"\"\"\n order update note\n \"\"\"\n notes = o_data['order_notes']\n order.notes = notes\n order.save()\n\n # st_save_helper(request, order)\n\n \"\"\"\n sbid = None\n\n if 'customer_styleboard' in request.session:\n sbid = request.session.get('customer_styleboard').id\n\n if 'personalize_id' in request.session:\n print \"There's a personalize_id\"\n \"\"\"\n\n current_user = User.objects.get(id=int(request.user.id))\n\n if 'ipn_emailed' in o_data and o_data['ipn_emailed']:\n\n pass\n \n else:\n\n emailed = send_email_order(order, current_user, notes, paymentData['contact_number'], self)\n\n logr.info('emailed order confirmation to : %s from order success' % current_user.email)\n\n\n order_data.delete() # not needed after saving to order payment\\\n \n clear_styleboard_session(request)\n\n try:\n del request.session['customer_styleboard']\n del request.session['personalize_id']\n except:\n pass\n\n return self.render(request, 'plata/shop_order_success.html',\n self.get_context(request, {\n 'order': order,\n 'progress': 'success',\n }))", "def test_process_postpay_accepted(self):\r\n student1 = UserFactory()\r\n student1.save()\r\n\r\n order1 = Order.get_cart_for_user(student1)\r\n params = {\r\n 'card_accountNumber': '1234',\r\n 'card_cardType': '001',\r\n 'billTo_firstName': student1.first_name,\r\n 'orderNumber': str(order1.id),\r\n 'orderCurrency': 'usd',\r\n 'decision': 'ACCEPT',\r\n 'ccAuthReply_amount': '0.00'\r\n }\r\n result = process_postpay_callback(params)\r\n self.assertTrue(result['success'])\r\n self.assertEqual(result['order'], order1)\r\n order1 = Order.objects.get(id=order1.id) # reload from DB to capture side-effect of process_postpay_callback\r\n self.assertEqual(order1.status, 'purchased')\r\n self.assertFalse(result['error_html'])", "def callback(self):\n\n # Gather information from callback response\n data = json.loads(request.data)\n order = data.get(\"order\", None)\n customer = data.get(\"customer\", None)\n\n email = customer[\"email\"]\n id = order[\"id\"]\n status = order[\"status\"]\n custom = order[\"custom\"]\n button = order[\"button\"]\n button_name = button[\"name\"]\n\n ## Generate Token and store in database\n gen_uuid = str(uuid.uuid4())\n\n try:\n t = Token()\n t.uuid = gen_uuid\n t.email = email\n t.active = True\n t.package = custom\n\n db.session.add(t)\n db.session.commit()\n except:\n import traceback\n db.session.rollback()\n traceback.print_exc()\n\n ## Send email to user with unique link\n try:\n msg = Message(\n \"Guildbit - Order Confirmation\",\n sender=settings.DEFAULT_MAIL_SENDER,\n recipients=[email])\n\n # msg.html = template\n msg.html = render_template(\"emails/payment_thankyou.html\", package=button_name, uuid=gen_uuid)\n mail.send(msg)\n except:\n import traceback\n traceback.print_exc()\n\n return jsonify({\n \"status\": \"received\"\n })", "def payment(self, **post):\n cr, uid, context = request.cr, request.uid, request.context\n payment_obj = request.registry.get('payment.acquirer')\n sale_order_obj = request.registry.get('sale.order')\n\n order = request.website.sale_get_order(context=context)\n order.write({'usersess': request.session['webcalc_session_id']})\n #order.env.cr.commit()\n redirection = self.checkout_redirection(order)\n if redirection:\n return redirection\n\n shipping_partner_id = False\n if order:\n if order.partner_shipping_id.id:\n shipping_partner_id = order.partner_shipping_id.id\n else:\n shipping_partner_id = order.partner_invoice_id.id\n\n values = {\n 'order': request.registry['sale.order'].browse(cr, SUPERUSER_ID, order.id, context=context),\n 'usersess': request.session['webcalc_session_id']\n }\n values['errors'] = sale_order_obj._get_errors(cr, uid, order, context=context)\n values.update(sale_order_obj._get_website_data(cr, uid, order, context))\n\n if not values['errors']:\n acquirer_ids = payment_obj.search(cr, SUPERUSER_ID, [('website_published', '=', True), ('company_id', '=', order.company_id.id)], context=context)\n values['acquirers'] = list(payment_obj.browse(cr, uid, acquirer_ids, context=context))\n render_ctx = dict(context, submit_class='btn btn-primary', submit_txt=_('Завершить оформление'))\n for acquirer in values['acquirers']:\n acquirer.button = payment_obj.render(\n cr, SUPERUSER_ID, acquirer.id,\n '/',\n order.amount_total,\n order.pricelist_id.currency_id.id,\n partner_id=shipping_partner_id,\n tx_values={\n 'return_url': '/shop/payment/validate',\n },\n context=render_ctx)\n #vips_shop\n return request.website.render(\"vips_shop.payment\", values)", "def purchase(self, first='', last='', street1='', street2='', city='', state='', postalcode='',\r\n country='', ccnum='', cardtype='', processor_reply_dump=''):\r\n if self.status == 'purchased':\r\n return\r\n self.status = 'purchased'\r\n self.purchase_time = datetime.now(pytz.utc)\r\n self.bill_to_first = first\r\n self.bill_to_last = last\r\n self.bill_to_city = city\r\n self.bill_to_state = state\r\n self.bill_to_country = country\r\n self.bill_to_postalcode = postalcode\r\n if settings.FEATURES['STORE_BILLING_INFO']:\r\n self.bill_to_street1 = street1\r\n self.bill_to_street2 = street2\r\n self.bill_to_ccnum = ccnum\r\n self.bill_to_cardtype = cardtype\r\n self.processor_reply_dump = processor_reply_dump\r\n\r\n # save these changes on the order, then we can tell when we are in an\r\n # inconsistent state\r\n self.save()\r\n # this should return all of the objects with the correct types of the\r\n # subclasses\r\n orderitems = OrderItem.objects.filter(order=self).select_subclasses()\r\n for item in orderitems:\r\n item.purchase_item()\r\n\r\n # send confirmation e-mail\r\n subject = _(\"Order Payment Confirmation\")\r\n message = render_to_string(\r\n 'emails/order_confirmation_email.txt',\r\n {\r\n 'order': self,\r\n 'order_items': orderitems,\r\n 'has_billing_info': settings.FEATURES['STORE_BILLING_INFO']\r\n }\r\n )\r\n try:\r\n from_address = microsite.get_value(\r\n 'email_from_address',\r\n settings.DEFAULT_FROM_EMAIL\r\n )\r\n\r\n send_mail(subject, message,\r\n from_address, [self.user.email]) # pylint: disable=E1101\r\n except (smtplib.SMTPException, BotoServerError): # sadly need to handle diff. mail backends individually\r\n log.error('Failed sending confirmation e-mail for order %d', self.id) # pylint: disable=E1101\r", "def handle_payment_intent_succeeded(self, event):\n intent = event.data.object\n pid = intent.id\n bag = intent.metadata.bag\n\n billing_details = intent.charges.data[0].billing_details\n grand_total = round(intent.charges.data[0].amount / 100, 2)\n\n order_exists = False\n attempt = 1\n while attempt <= 5:\n try:\n order = Order.objects.get(\n full_name__iexact=billing_details.name,\n email__iexact=billing_details.email,\n phone_number__iexact=billing_details.phone,\n street_address1__iexact=(\n billing_details.address.line1),\n street_address2__iexact=(\n billing_details.address.line2),\n town_or_city__iexact=billing_details.address.city,\n county__iexact=billing_details.address.state,\n country__iexact=billing_details.address.country,\n grand_total=grand_total,\n original_bag=bag,\n stripe_pid=pid,\n )\n order_exists = True\n break\n except Order.DoesNotExist:\n attempt += 1\n time.sleep(1)\n\n if order_exists:\n return HttpResponse(\n content=f'Webhook received: ({event[\"type\"]}'\n '| SUCCESS: Verified order already in database',\n status=200)\n else:\n order = None\n try:\n order = Order.objects.create(\n full_name=billing_details.name,\n email=billing_details.email,\n phone_number=billing_details.phone,\n street_address1=billing_details.address.line1,\n street_address2=billing_details.address.line2,\n town_or_city=billing_details.address.city,\n county=billing_details.state,\n country=billing_details.country,\n original_bag=bag,\n stripe_pid=pid,\n )\n for workshop_id, quantity in json.loads(bag).items():\n workshop = Workshop.objects.get(id=workshop_id)\n if isinstance(quantity, int):\n order_line_item = OrderLineItem(\n order=order,\n workshop=workshop,\n quantity=quantity,\n )\n order_line_item.save()\n except Exception as e:\n if order:\n order.delete()\n return HttpResponse(\n content=f'Webhook received: {event[\"type\"]} | ERROR: {e}',\n status=500)\n return HttpResponse(\n content=f'Webhook received: {event[\"type\"]}'\n '| SUCCESS: Created order in webhook',\n status=200)", "def SendOrderConfirmation(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def checkout(request):\n # Redirect search requests\n if request.GET:\n if 'q' in request.GET:\n return redirect(get_search_request(request))\n\n # Check the basket for errors and if found, send a message, deallocate any\n # allocated stock, then redirect the user to the basket\n basket_errors = request.session.get('basket_errors', 0)\n if basket_errors > 0:\n messages.error(\n request,\n \"Please resolve errors with your basket before proceeding\",\n 'from__checkout_basket'\n )\n deallocate_stock(request)\n return redirect(reverse('view_basket'))\n\n # Get stripe keys\n stripe_public_key = settings.STRIPE_PUBLIC_KEY\n stripe_secret_key = settings.STRIPE_SECRET_KEY\n\n # Get the basket object from session\n basket = request.session.get('basket', {})\n\n # POST Request. Attempt to process the order\n if request.method == 'POST':\n # Define the form_data object from POST data, setting the delivery\n # details to the billing address by default\n form_data = {\n 'first_name': request.POST['first_name'],\n 'last_name': request.POST['last_name'],\n 'email': request.POST['email'],\n 'phone_number': request.POST['phone_number'],\n 'street_address1': request.POST['street_address1'],\n 'street_address2': request.POST['street_address2'],\n 'town_or_city': request.POST['town_or_city'],\n 'county': request.POST['county'],\n 'postcode': request.POST['postcode'],\n 'country': request.POST['country'],\n 'delivery_first_name': request.POST['first_name'],\n 'delivery_last_name': request.POST['last_name'],\n 'delivery_address1': request.POST['street_address1'],\n 'delivery_address2': request.POST['street_address2'],\n 'delivery_town_or_city': request.POST['town_or_city'],\n 'delivery_county': request.POST['county'],\n 'delivery_postcode': request.POST['postcode'],\n 'delivery_country': request.POST['country'],\n }\n\n # If the 'use billing address' for delivery option was not selected,\n # update the delivery details in the form_data\n if request.POST.get('use-billing-address') != 'on':\n form_data['delivery_first_name'] = (\n request.POST['delivery_first_name'])\n form_data['delivery_last_name'] = (\n request.POST['delivery_last_name'])\n form_data['delivery_address1'] = request.POST['delivery_address1']\n form_data['delivery_address2'] = request.POST['delivery_address2']\n form_data['delivery_town_or_city'] = (\n request.POST['delivery_town_or_city'])\n form_data['delivery_county'] = request.POST['delivery_county']\n form_data['delivery_postcode'] = request.POST['delivery_postcode']\n form_data['delivery_country'] = request.POST['delivery_country']\n\n # If either the billing or delivery postcodes fail to validate, send\n # a message, deallocate stock, and return the user to the checkout\n if (submit_postcode(form_data['postcode']) is not True or\n submit_postcode(form_data['delivery_postcode']) is not True):\n messages.error(\n request,\n 'Please provide a valid UK postcode for billing and delivery.',\n 'from__checkout_basket'\n )\n deallocate_stock(request)\n return redirect(reverse('checkout'))\n\n # If either the billing or delivery countries fail to validate, send a\n # message, deallocate stock, and return the user to the checkout\n if (form_data['country'] not in settings.COUNTRIES_ONLY or\n form_data['delivery_country'] not in settings.COUNTRIES_ONLY):\n messages.error(\n request,\n 'Your selected country is not on our list of approved \\\n shipping destinations.',\n 'from__checkout_basket'\n )\n deallocate_stock(request)\n return redirect(reverse('checkout'))\n\n # use the form_data to populate an instance of OrderForm\n order_form = OrderForm(form_data)\n # If the form is valid...\n if order_form.is_valid():\n # create an order object by saving the OrderForm instance\n order = order_form.save(commit=False)\n # get the strip pid and add it to the order object along with the\n # the basket contents in json format, then save the order\n pid = request.POST.get('client_secret').split('_secret')[0]\n order.stripe_pid = pid\n order.original_basket = json.dumps(basket)\n order.save()\n # Iterate over the basket, and for each item...\n for product_id, quantity in basket.items():\n try:\n # Get the product from the Products table\n product_obj = Product.objects.get(id=product_id)\n if is_product_hidden(product_obj) is True:\n raise Exception\n # If the product does not exist, send a message, delete the\n # order, deallocate stock and return the user to the basket\n except Product.DoesNotExist:\n messages.error(\n request,\n \"One of the products in your basket wasn't found in \\\n our database. Please call us for assistance!\",\n 'from__checkout_basket'\n )\n order.delete()\n deallocate_stock(request)\n return redirect(reverse('view_basket'))\n # If the product is hidden, send a message, delete the order,\n # deallocate stock and return the user to the basket\n except Exception:\n messages.error(\n request,\n \"Unfortunately, one of the products in your basket is \\\n no longer available for purchase.\",\n 'from__checkout_basket'\n )\n order.delete()\n deallocate_stock(request)\n return redirect(reverse('view_basket'))\n else:\n # Get the product name\n product = product_obj.name\n # Get the sub_product_line name\n sub_product_line = product_obj.sub_product_line.name\n # Get the product_line name\n product_line = (\n product_obj.sub_product_line.product_line.name)\n # Get the price, corrected for any offers\n item_price = product_obj.get_price()['price']\n # Create an order line item instance using these details\n # plus the quantity of the item in the basket\n order_line_item = OrderLineItem(\n order=order,\n product_id=product_id,\n product=product,\n sub_product_line=sub_product_line,\n product_line=product_line,\n item_price=item_price,\n quantity=quantity,\n )\n # save the order line item (which will update the basket\n # total via signal)\n order_line_item.save()\n\n # Write the value of the 'save info' check box to the session\n request.session['save_info'] = 'save-info' in request.POST\n # Redirect the user to checkout success\n return redirect(reverse(\n 'checkout_success', args=[order.order_number]))\n # The form is invalid, so send a message, deallocate stock and return\n # the user to the checkout\n else:\n messages.error(\n request,\n 'There was an error with your form. Please double check your \\\n information.',\n 'from__checkout_basket'\n )\n deallocate_stock(request)\n return redirect(reverse('checkout'))\n # GET Request\n else:\n # If the basket is empty, send a message and redirect the user to\n # all_games\n if not basket:\n messages.error(\n request,\n \"There's nothing in your basket at the moment.\",\n 'from__checkout_basket'\n )\n return redirect(reverse('all_games'))\n\n # Run the basket_contents context processor and get the basket object\n # from the results\n current_basket = basket_contents(request)['basket']\n\n # Create stripe data\n stripe_total = round(current_basket['grand_total'] * 100)\n stripe.api_key = stripe_secret_key\n intent = stripe.PaymentIntent.create(\n amount=stripe_total,\n currency=settings.STRIPE_CURRENCY,\n )\n\n # Get a country object for the default country\n selected_country = get_country(settings.DEFAULT_COUNTRY)\n\n # If the user is logged in...\n if request.user.is_authenticated:\n # Get the user profile\n try:\n profile = UserProfile.objects.get(user=request.user)\n # User profile doesn't exist, so set order_form to an empty\n # instance of OrderForm\n except UserProfile.DoesNotExist:\n order_form = OrderForm()\n # Otherwise we have the profile, so set order_form to an instance\n # of OrderForm with initial data from the user profile\n else:\n order_form = OrderForm(initial={\n 'first_name': profile.user.first_name,\n 'last_name': profile.user.last_name,\n 'email': profile.user.email,\n 'phone_number': profile.default_phone_number,\n 'street_address1': profile.default_street_address1,\n 'street_address2': profile.default_street_address2,\n 'town_or_city': profile.default_town_or_city,\n 'county': profile.default_county,\n 'postcode': profile.default_postcode,\n 'country': profile.default_country,\n 'delivery_first_name': profile.user.first_name,\n 'delivery_last_name': profile.user.last_name,\n 'delivery_address1': profile.default_street_address1,\n 'delivery_address2': profile.default_street_address2,\n 'delivery_town_or_city': profile.default_town_or_city,\n 'delivery_county': profile.default_county,\n 'delivery_postcode': profile.default_postcode,\n 'delivery_country': profile.default_country,\n })\n\n # get a country object for the country in the user profile\n selected_country = get_country(profile.default_country)\n # User is not logged in, so set order_form to an empty instance of\n # order_form\n else:\n order_form = OrderForm()\n\n # If the stripe public key is missing, send a warning message - this\n # Should never occur in prod!\n if not stripe_public_key:\n messages.warning(\n request,\n 'Stripe public key is missing. \\\n Did you forget to set it in your environment?'\n )\n\n # Define the context object, including the name of the view, the\n # order form, selected country and stripe keys\n context = {\n 'view': 'checkout',\n 'order_form': order_form,\n 'stripe_public_key': stripe_public_key,\n 'client_secret': intent.client_secret,\n 'selected_country': selected_country\n }\n\n # Render the checkout view, passing the context\n return render(request, 'checkout/checkout.html', context)", "def paynow_update(request, payment_reference):\r\n\r\n # Get saved paymend details\r\n payment = get_object_or_404(PaynowPayment, reference=payment_reference)\r\n # Init paynow object. The URLS can be blank\r\n paynow = Paynow(settings.PAYNOW_INTEGRATION_ID, settings.PAYNOW_INTEGRATION_KEY, '', '')\r\n # Check the status of the payment with paynow server\r\n payment_result = paynow.check_transaction_status(payment.poll_url)\r\n\r\n save_changes = False\r\n\r\n # check if status has changed\r\n if payment.status != payment_result.status:\r\n payment.status = payment_result.status\r\n save_changes = True\r\n\r\n # Check if paynow reference has changed\r\n if payment.paynow_reference != payment_result.paynow_reference:\r\n payment.paynow_reference = payment_result.paynow_reference\r\n save_changes = True\r\n\r\n # Check if payment is now paid\r\n if payment_result.paid:\r\n if not payment.paid:\r\n payment.paid = True\r\n payment.confirmed_at = timezone.now()\r\n\r\n if save_changes:\r\n payment.save()\r\n\r\n return HttpResponse('ok')", "def post(self):\n \n access_token = accessToken.gerated_access_token\n api_url = \"https://sandbox.safaricom.co.ke/mpesa/stkpush/v1/processrequest\"\n headers = { \"Authorization\": \"Bearer %s\" % access_token }\n request = {\n \"BusinessShortCode\": constants.BusinessShortCode ,\n \"Password\": generated_password,\n \"Timestamp\": generated_timestamp,\n \"TransactionType\": \"CustomerPayBillOnline\",\n \"Amount\": \"1\",\n \"PartyA\": \"254705275702\",\n \"PartyB\": constants.BusinessShortCode,\n \"PhoneNumber\": \"\", #pass in the phone number that will be prompted to enter the pin\n \"CallBackURL\": \"https://test.com\", #pass in an actual callback url if you have one\n \"AccountReference\": \"Test100\",\n \"TransactionDesc\": \"Test payment\"\n }\n \n response = requests.post(api_url, json = request, headers=headers)\n # print (response.text)\n\n return {\"response\":response.json()}", "def get_checkout_view(request):\n if request.method == 'POST':\n order_form = OrderForm(request.POST)\n payment_form = PaymentForm(request.POST)\n\n if order_form.is_valid() and payment_form.is_valid():\n order = order_form.save(commit=False)\n order.user = request.user\n order.order_date = timezone.now()\n order.save()\n\n basket = request.session.get('basket', {})\n total = 0\n\n for id, quantity in basket.items():\n ticket = get_object_or_404(Ticket, pk=id)\n total += quantity * ticket.upvote_price\n order_item = OrderItem(order=order, ticket=ticket,\n quantity=quantity)\n order_item.save()\n\n try:\n customer = stripe.Charge.create(\n amount=int(total * 100),\n currency='GBP',\n description=request.user.email,\n card=payment_form.cleaned_data['stripe_id'],\n )\n except stripe.error.CardError:\n messages.error(request,\n 'Error! Your card was declined.')\n\n if customer.paid:\n for id, quantity in basket.items():\n ticket = Ticket.objects.get(pk=id)\n ticket.upvotes += quantity\n ticket.earned += quantity * ticket.upvote_price\n if ticket.status == 'FR' and ticket.status is not 'C':\n ticket.status = 'IP'\n ticket.save()\n messages.success(request,\n 'Success! Your upvotes have been added.')\n request.session['basket'] = {}\n return redirect('main-homepage')\n else:\n messages.error(request,\n 'Error! We were unable to take payment.')\n else:\n print(payment_form.errors)\n messages.error(request,\n 'Error! We were unable to take payment.')\n return render(request, 'checkout/checkout.html',\n {'order_form': order_form,\n 'payment_form': payment_form,\n 'publishable': settings.STRIPE_PUBLISHABLE})\n else:\n basket = request.session.get('basket', {})\n if basket:\n payment_form = PaymentForm()\n order_form = OrderForm()\n return render(request, 'checkout/checkout.html',\n {'order_form': order_form,\n 'payment_form': payment_form,\n 'publishable': settings.STRIPE_PUBLISHABLE})\n else:\n messages.error(request,\n 'Error! You have nothing to checkout.')\n return redirect('main-homepage')", "def event_payu_com_dpn(self, **post):\n cr, uid, context = request.cr, request.uid, request.context\n payment_acquire = request.env['payment.acquirer'].sudo().search([('provider', '=', 'payu')])\n transactionDetails = {}\n transactionDetails['store'] = {}\n transactionDetails['store']['soapUsername'] = payment_acquire.payu_api_username\n transactionDetails['store']['soapPassword'] = payment_acquire.payu_api_password\n transactionDetails['store']['safekey'] = payment_acquire.payu_seller_account\n transactionDetails['store']['environment'] = payment_acquire.environment\n transactionDetails['additionalInformation'] = {}\n transactionDetails['additionalInformation']['payUReference'] = post['PayUReference']\n try:\n result = PayuController.payuMeaGetTransactionApiCall('', transactionDetails)\n payment_transation_id = request.env['payment.transaction'].sudo().search(\n [('reference', '=', result['merchantReference'])])\n payu_response = {}\n if result:\n payu_response['TRANSACTION_STATUS'] = result['transactionState']\n # payu_response['SUCCESSFUL'] = result['successful']\n payu_response['AMOUNT'] = payment_transation_id.amount * 100 if payment_transation_id else 0.00\n payu_response['CURRENCYCODE'] = result['basket']['currencyCode']\n payu_response['PAYUREFERENCE'] = result['payUReference']\n payu_response['REFERENCE'] = result['merchantReference']\n payu_response['RESULTMESSAGE'] = result['resultMessage']\n response_state = request.env['payment.transaction'].sudo().form_feedback(payu_response, 'payu')\n # response_state = PaymentTransactionCus.form_feedback('', payu_response, 'payu')\n # if response_state:\n # return werkzeug.utils.redirect('/shop/payment/validate')\n # else:\n # return werkzeug.utils.redirect('/shop/unsuccessful')\n\n sale_order_id = request.env['sale.order'].sudo().search([('name', '=', result['merchantReference'])])\n sale_order_data = sale_order_id\n request.session['sale_last_order_id'] = sale_order_id.id\n\n tx_id = request.env['payment.transaction'].sudo().search([('reference', '=', result['merchantReference'])])\n tx = tx_id\n if not sale_order_id or (sale_order_id.amount_total and not tx):\n return request.redirect('/shop')\n if (not sale_order_id.amount_total and not tx) or tx.state in ['pending']:\n if sale_order_id.state in ['draft', 'sent']:\n if (not sale_order_id.amount_total and not tx):\n sale_order_id.action_button_confirm()\n email_act = sale_order_id.action_quotation_send()\n elif tx and tx.state == 'cancel':\n sale_order_id.action_cancel()\n elif tx and (tx.state == 'draft' or tx.state == 'sent' or tx.state == 'done'):\n # if result and payu_response['successful'] and payu_response['TRANSACTION_STATUS'] in ['SUCCESSFUL', 'PARTIAL_PAYMENT', 'OVER_PAYMENT']:\n if result and payu_response['TRANSACTION_STATUS'] in ['SUCCESSFUL', 'PARTIAL_PAYMENT', 'OVER_PAYMENT']:\n transaction = tx.sudo().write(\n {'state': 'done', 'date_validate': datetime.now(),\n 'acquirer_reference': result['payUReference']})\n email_act = sale_order_id.action_quotation_send()\n action_confirm_res = sale_order_id.action_confirm()\n sale_order = sale_order_id.read([])\n # if sale_order_id.state == 'sale':\n # journal_ids = request.env['account.journal'].sudo().search([('name', '=', 'FNB 62085815143')], limit=1)\n # journal = journal_ids.read([])\n currency = request.env['res.currency'].sudo().search([('name', '=', 'ZAR')], limit=1)\n method = request.env['account.payment.method'].sudo().search([('name', '=', 'Manual')], limit=1)\n journal_id = request.env['account.journal'].sudo().search(\n [('name', '=', 'FNB - Cheque Account 6208585815143')], limit=1, order=\"id desc\")\n if journal_id:\n account_payment = {\n 'partner_id': sale_order[0]['partner_id'][0],\n 'partner_type': 'customer',\n 'journal_id': journal_id.id,\n # 'invoice_ids':[(4,inv_obj.id,0)],\n 'amount': sale_order[0]['amount_total'],\n 'communication': sale_order_id.name,\n 'currency_id': currency.id,\n 'payment_type': 'inbound',\n 'payment_method_id': method.id,\n 'payment_transaction_id': tx.id,\n }\n acc_payment = request.env['account.payment'].sudo().create(account_payment)\n acc_payment.sudo().post()\n sale_order_id = request.session.get('sale_last_order_id')\n print(\"\\n\\n\\n\\n\\n\\n=======================sale order sale order======\", sale_order_id)\n sale_order_data = request.env['sale.order'].sudo().browse(sale_order_id)\n # if sale_order_data.project_project_id:\n # request.session['last_project_id'] = sale_order_data.project_project_id.id\n if response_state:\n sale_order_data.message_post(subject=\"T&C's Privacy Policy\",\n body=\"%s accepted T&C's and Privacy Policy.\" % sale_order_data.partner_id.name)\n return werkzeug.utils.redirect('/pay/thankyou')\n # return werkzeug.utils.redirect('/shop/confirmation')\n else:\n return werkzeug.utils.redirect('/event/unsuccessful')\n except Exception as e:\n return werkzeug.utils.redirect('/event/unsuccessful')", "def proceed_to_checkout_and_payment(self):\r\n # 1- summary\r\n logger.info('starting wizard with summary')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '.cart_navigation a.standard-checkout')))\r\n self.automation.driver.execute_script(\"document.querySelectorAll('.cart_navigation a.standard-checkout')[0]\"\r\n \".click()\")\r\n\r\n # 2-sign in & 3-address\r\n logger.info('2-sign in & 3-address')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, 'button[name=\"processAddress\"]')))\r\n\r\n self.automation.driver.find_element_by_css_selector('button[name=\"processAddress\"]').click()\r\n\r\n # 4- shipping\r\n logger.info('4- shipping')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#uniform-cgv span')))\r\n\r\n is_checked = self.automation.driver.find_element_by_css_selector('#uniform-cgv span').get_attribute('class')\r\n if not is_checked: # agree\r\n self.automation.driver.execute_script(\"document.querySelectorAll('#cgv')[0].click()\")\r\n\r\n self.automation.driver.find_element_by_css_selector('button[name=processCarrier]').click()\r\n logger.info('agree and confirmed')\r\n\r\n # pay by bank wire\r\n logger.info('pay by bank wire')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '.payment_module a')))\r\n\r\n self.automation.driver.find_element_by_css_selector('.payment_module a').click()\r\n\r\n # 5- payment and confirm\r\n logger.info('5- payment and confirm')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#cart_navigation button')))\r\n self.automation.driver.find_element_by_css_selector('#cart_navigation button').click()\r\n\r\n # back to orders\r\n logger.info('back to orders')\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, 'p.cart_navigation .button-exclusive.btn')))\r\n self.automation.driver.find_element_by_css_selector('p.cart_navigation .button-exclusive.btn').click()\r\n\r\n # how many items do you have\r\n time.sleep(1.5)\r\n self.automation.wait.until(\r\n EC.presence_of_element_located((By.CSS_SELECTOR, '#order-list tbody tr')))\r\n items = self.automation.driver.find_elements_by_css_selector('#order-list tbody tr')\r\n logger.info(f'You have \"{len(items)}\" at your order')", "def _assemble_and_send_request(self):\r\n # Fire off the query.\r\n response = self.client.service.processShipment(WebAuthenticationDetail=self.WebAuthenticationDetail,\r\n ClientDetail=self.ClientDetail,\r\n TransactionDetail=self.TransactionDetail,\r\n Version=self.VersionId,\r\n RequestedShipment=self.RequestedShipment)\r\n return response", "def confirm(request, virtualpos_type):\n return djangovirtualpos_views.confirm_payment(request, virtualpos_type, CreditCardReference)", "def checkout(request):\n\n if request.method == \"POST\":\n payment_form = MakePaymentForm(request.POST)\n if payment_form.is_valid():\n cart = request.session.get('cart', {})\n total = 0\n for id, quantity in cart.items():\n total += quantity * 10\n try:\n customer = stripe.Charge.create(\n amount=int(total * 100),\n currency=\"USD\",\n description=request.user.email,\n card=payment_form.cleaned_data['stripe_id'],\n )\n except stripe.error.CardError:\n messages.error(request, \"Your card was declined!\")\n\n if customer.paid:\n messages.success(request, \"You have successfully paid\")\n\n return redirect(reverse('cart_success'))\n else:\n messages.error(request, \"Unable to take payment\")\n else:\n messages.error(\n request, \"We were unable to take a payment with that card!\")\n else:\n payment_form = MakePaymentForm()\n\n return render(request, \"checkout.html\", {\n \"publishable\": settings.STRIPE_PUBLISHABLE,\n \"payment_form\": payment_form,\n \"simple_form\": 1,\n })", "def _onSuccess(self, controller):\r\n if controller.order.paid_in_full:\r\n controller.cart.empty()\r\n for item in controller.order.orderitem_set.all():\r\n if item.product.is_subscription:\r\n item.completed = True\r\n item.save()\r\n try:\r\n curr_status = controller.order.orderstatus_set.latest() \r\n except OrderStatus.DoesNotExist:\r\n curr_status = None\r\n \r\n if (curr_status is None) or (curr_status.notes and curr_status.status == \"New\"):\r\n controller.order.add_status(status='New', notes = \"Order successfully submitted\")\r\n else:\r\n # otherwise just update and save\r\n if not curr_status.notes:\r\n curr_status.notes = _(\"Order successfully submitted\")\r\n curr_status.save() \r\n\r\n #Redirect to the success page\r\n url = controller.lookup_url('satchmo_checkout-success')\r\n return HttpResponseRedirect(url) \r\n\r\n else:\r\n log.debug('Order #%i not paid in full, sending to pay rest of balance', controller.order.id)\r\n #url = controller.order.get_balance_remaining_url()\r\n url = reverse('satchmo_balance_remaining')\r\n return HttpResponseRedirect(url)", "def paypal_gateway(self):\n\n print(request.form)\n\n # Gather information from callback response\n first_name = request.form.get(\"first_name\", None)\n last_name = request.form.get(\"last_name\", None)\n payer_id = request.form.get(\"payer_id\", None)\n payer_email = request.form.get(\"payer_email\", None)\n item_name = request.form.get(\"item_name\", None)\n item_number = request.form.get(\"item_number\", None)\n custom = request.form.get(\"custom\", None)\n payment_gross = request.form.get(\"payment_gross\", None)\n\n ## Generate Token and store in database\n gen_uuid = str(uuid.uuid4())\n\n try:\n t = Token()\n t.uuid = gen_uuid\n t.email = payer_email\n t.active = True\n t.package = item_name\n t.package_id = item_number\n\n db.session.add(t)\n db.session.commit()\n except:\n import traceback\n db.session.rollback()\n traceback.print_exc()\n\n ## Send email to user with unique link\n try:\n msg = Message(\n \"Guildbit - Order Confirmation\",\n sender=settings.DEFAULT_MAIL_SENDER,\n recipients=[payer_email])\n\n msg.html = render_template(\"emails/payment_thankyou.html\", package=item_name, uuid=gen_uuid)\n mail.send(msg)\n except:\n import traceback\n traceback.print_exc()\n\n return jsonify({\n \"status\": \"received\"\n })", "def process(request, order):\n # Transaction results\n APPROVED = '1'\n DECLINED = '2'\n ERROR = '3'\n HELD_FOR_REVIEW = '4'\n print \"I am processing the request\"\n\n postdata = request.POST.copy()\n amount = cart.cart_subtotal(request)\n\n print amount\n\n charge = stripe.Charge.create(\n amount=int(amount*100),\n currency=\"ngn\", # I can Change to naira if needed\n card=postdata.get('stripeToken', ''),\n description=\"Example charge\"\n )\n #\n #charge.capture()\n\n\n if charge['card']['cvc_check']:\n transaction_id = charge.id[3:22]\n order = create_order(request, order, transaction_id)\n results = {'order_number': order.id, 'message': u''}\n elif charge.balance_transaction:\n results = {'order_number': 0, 'message': charge.failure_message, 'code': charge.failure_code,\n 'text': charge.description}\n else:\n results = {'order_number': 0, 'message':charge.failure_message, 'errors': charge.errors}\n return results", "def confirm_new_card(update, context):\n query = update.callback_query\n if query.message.reply_to_message:\n user = query.message.reply_to_message.from_user\n else:\n user = query.message.chat\n bot = context.bot\n CURRENT_USER = USERS[user.username]\n CURRENT_CONTEXT = process_card_value(query.data, CURRENT_USER, False, True)\n print(\"COnfirm New data\")\n message = f'Round: {CURRENT_CONTEXT[\"round\"]} ({CURRENT_CONTEXT[\"username\"]}) \\nDealers Card: {CURRENT_CONTEXT[\"dealer_card\"]}\\nYour Cards: {CURRENT_CONTEXT[\"player_cards\"]} \\nYour total: {CURRENT_CONTEXT[\"player_total\"]} \\n\\n'\n message = message + f'You should: *{CURRENT_CONTEXT[\"strategy\"]}* \\n\\n'\n message = message + f'Choose the action you took: '\n keyboard = [\n [inline(CURRENT_USER[\"strategy\"])],\n [inline('Something Else')],\n [inline('New Round')]\n ]\n\n strategy_markup = InlineKeyboardMarkup(keyboard)\n bot.edit_message_text(\n chat_id=query.message.chat_id,\n message_id=query.message.message_id,\n text=message,\n reply_markup=strategy_markup\n )\n\n # Tell ConversationHandler that we're in state `STRATEGY` now\n return STRATEGY", "def confirm_data(update, context):\n query = update.callback_query\n if query.message.reply_to_message:\n user = query.message.reply_to_message.from_user\n else:\n user = query.message.chat\n bot = context.bot\n CURRENT_USER = USERS[user.username]\n CURRENT_CONTEXT = process_card_value(query.data, CURRENT_USER, True, True)\n message = f'Round: {CURRENT_CONTEXT[\"round\"]} ({CURRENT_CONTEXT[\"username\"]}) \\nDealers Card: {CURRENT_CONTEXT[\"dealer_card\"]}\\nYour Cards: {CURRENT_CONTEXT[\"player_cards\"]} \\nYour total: {CURRENT_CONTEXT[\"player_total\"]} \\n\\n'\n message = message + f'You should: *{CURRENT_CONTEXT[\"strategy\"]}* \\n\\n'\n message = message + f'Choose the action you took: '\n keyboard = [\n [inline(CURRENT_USER[\"strategy\"])],\n [inline('Something Else')],\n [inline('New Round')]\n ]\n\n strategy_markup = InlineKeyboardMarkup(keyboard)\n bot.edit_message_text(\n chat_id=query.message.chat_id,\n message_id=query.message.message_id,\n text=message,\n reply_markup=strategy_markup\n )\n\n # Tell ConversationHandler that we're in state `STRATEGY` now\n return STRATEGY", "def confirm_renewal(self, renewal_id, paid_on, query_params=dict()):\n return Renewal().patch(query_params, **{\n 'partner_id': self.partner,\n 'renewal_id': renewal_id,\n 'paid_on': paid_on\n })" ]
[ "0.6260118", "0.6211977", "0.58263457", "0.58254206", "0.5741798", "0.5719997", "0.56042004", "0.56010264", "0.55716336", "0.548527", "0.54411465", "0.5427429", "0.54274267", "0.5387689", "0.5368422", "0.53410625", "0.531948", "0.53180856", "0.5281035", "0.52600944", "0.5241745", "0.515977", "0.51554704", "0.51468724", "0.51461947", "0.5135499", "0.51186675", "0.5110157", "0.51046526", "0.5101727" ]
0.6384015
0
Parses all messages found in `buffer`.
def parse_messages(buffer): messages = [] message, expected = parse_message(buffer) while message: messages.append(message) message, expected = parse_message(buffer) return messages, expected
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse(self, buffer, ignore_binary = False):\n self._buffer = buffer\n self._index = 0\n self._keep_binary = not ignore_binary\n return self._parse()", "def process_messages(self):\n pass", "def parse(self, buffer, ignore_binary = False):\n if buffer == \"\":\n return False\n\n self._buffer = buffer\n self._index = 0\n return self._parse()", "def process_messages(self):\n for each_message in self.unprocessed_messages:\n if not ( 'message_type' in each_message):\n logging.error(\"(%s:%d) invalid message found...ignoring the message\",\\\n self.ip, self.port)\n else:\n if ( each_message['message_type'] is 'unchoke'):\n self.is_choking = 0\n elif ( each_message['message_type'] is 'choke'):\n self.is_choking = 1\n elif ( each_message['message_type'] is 'interested'):\n self.is_interested = 1\n elif ( each_message['message_type'] is 'not interested'):\n self.is_interested = 0\n elif ( each_message['message_type'] is 'have'):\n self.pieces.append(each_message['piece_index'])\n elif ( each_message['message_type'] is 'bitfield'):\n bitfield = each_message['bitfield']\n for index, each_bit in enumerate(bitfield):\n if ( each_bit is '1'):\n self.pieces.append(index)", "def twitch_receive_messages(self):\r\n self._push_from_buffer()\r\n result = []\r\n while True:\r\n # process the complete buffer, until no data is left no more\r\n try:\r\n time.sleep(.01)\r\n if self.s is None:\r\n raise Exception('socket is closed')\r\n msg = self.s.recv(4096).decode() # NON-BLOCKING RECEIVE!\r\n except socket.error as e:\r\n err = e.args[0]\r\n if err == errno.EAGAIN or err == errno.EWOULDBLOCK:\r\n # There is no more data available to read\r\n if len(result):\r\n self._maybe_print('returning with {}'.format(result))\r\n\r\n return result\r\n else:\r\n # a \"real\" error occurred\r\n # import traceback\r\n # import sys\r\n # print(traceback.format_exc())\r\n if not self.in_shutdown:\r\n print(\"Trying to recover...\")\r\n self.connect()\r\n return result\r\n else:\r\n self._maybe_print('twitch in: ' + msg)\r\n rec = [self._parse_message(line)\r\n for line in filter(None, msg.split('\\r\\n'))]\r\n rec = [r for r in rec if r] # remove Nones\r\n result.extend(rec)\r\n self._maybe_print(\"result length {} {}\".format(len(result), result))", "def recv_messages(self):\n while True:\n b = unwrap_read(self.sock.recv(4096))\n msgs = self.parser.feed(b)\n if msgs:\n for msg in msgs:\n self.router.incoming(msg)\n return", "def msgs_from_bytes(self, b):\n msgs = []\n # User remainder bytes\n parse_bytes = self.remainder + b.decode('ascii')\n # Find the first frame delimiter\n i = parse_bytes.find('\\r\\n')\n while i >= 0:\n # Try to parse a single message\n m = self._parse_msg(parse_bytes[:i])\n # Remove parsed bytes and delimter\n parse_bytes = parse_bytes[i+2:]\n # Add parsed message, if any\n if m:\n msgs.append(m)\n self.logger.debug('Parsed ASCII frame: address={}, function={}, len={}'.format(m.address, m.function, len(m.data) if m.data else 0))\n #else - warn?\n i = parse_bytes.find('\\r\\n')\n # Store any remaining bytes for the next pass\n self.remainder = parse_bytes\n return msgs", "def _parse_msg(self, msg):\n try:\n self.received_msg += msg.decode()\n except:\n self.log.warning(\"invalid parse frame '%s'\" % msg)\n\n while True:\n pos = self.received_msg.find('\\r')\n if pos == -1: # no full msg\n break\n m = self.received_msg[:pos].strip()\n if not len(m):\n break\n self.platform.process_received_message(m)\n self.received_msg = self.received_msg[pos + 1:]", "def process_messages(self, messages):\n\n return messages", "def get_messages(self):\n data = self.socket.recv(BUF_SIZE).decode()\n return data.split('\\0')", "def recv_and_load_messages(self):\n logging.debug('receiving message from peer(%s:%d)',\\\n self.ip, self.port)\n buff = b''\n while True:\n try:\n msg = self.sock.recv(4096)\n if len(msg) == 0:\n break\n buff += msg\n except socket.error:\n logging.warning('socket.error in receiving message from peer(%s:%d)',\\\n self.ip, self.port)\n break\n try:\n logging.debug(\"(%s:%d) receiving messages...\", self.ip, self.port)\n decoded_messages = Message.decode_all_messages(buff)\n pieces = self._remove_pieces(decoded_messages)\n self.unprocessed_messages += decoded_messages\n logging.debug(\"(%s:%d) following messages successfully loaded...\", self.ip, self.port)\n logging.debug(decoded_messages)\n return pieces\n except ValueError:\n logging.error(\"invalid message. Skipping to next peer\")\n pass", "def process(self, chunk):\n self._buf += chunk\n\n # Streams are `\\r\\n`-separated JSON messages.\n raw_lines = self._buf.split(b\"\\r\\n\")\n\n # If only one element in the split, then there wasn't a CRLF.\n if len(raw_lines) > 1:\n\n # The last element may be a b'', which is perfectly fine.\n self._buf = raw_lines[-1]\n\n # Blank lines are keep-alive messages.\n self._mailbox.extend(l for l in raw_lines[:-1] if l.strip())", "def parseBuffer(self):\n idx = self.buf.find(DELIMITER)\n while idx > -1:\n packet = self.buf[0:idx]\n if len(packet) > 4:\n if packet[0:3] == 'DATA':\n self.factory.setData(packet[4:idx])\n else:\n print \"%s is a malformed packet, header %s not recognized\" % (packet, packet[0:3])\n else:\n print \"%s attempting to send a packet of invalid length %s\" % (packet, len(packet))\n self.buf = self.buf[(idx + len(DELIMITER)):]\n idx = self.buf.find(DELIMITER)", "def recv(recv_buffer):\n\n header_len = struct.calcsize(CCPMessage.HEADER_FMT)\n\n msgs = []\n while True:\n if len(recv_buffer) < header_len:\n #print \"AAAAAAAAAAAAAA %s \" % str(len(recv_buffer))\n return (msgs, recv_buffer)\n\n header = recv_buffer[:header_len]\n\n (version, msg_type, data_len, conn_id) = struct.unpack(\n CCPMessage.HEADER_FMT, header)\n\n DEBUG and log_debug(\"msg %d: version: %r; msg_type: %r; data_len: %u; conn_id: %r\"\n % (len(msgs), version, msg_type, data_len, conn_id))\n\n\n #print(\"msg %d: version: %r; msg_type: %r; data_len: %u; conn_id: %r\"\n # % (len(msgs), version, msg_type, data_len, conn_id))\n\n if DEBUG:\n top1 = len(recv_buffer)\n top2 = len(recv_buffer)\n if top1 > 64:\n top1 = 64\n if top2 > 128:\n top2 = 128\n\n log_debug(\"first %d bytes of data: %s\" %\n (top1, binascii.b2a_hex(recv_buffer[0:top1])))\n if top2 > 64:\n log_debug(\"second %d bytes of data: %s\" %\n (top2 - top1,\n binascii.b2a_hex(recv_buffer[top1:top2])))\n\n # check to make sure that the data_len is sane. We defer the other\n # checks until we have the entire packet, but this needs to be done\n # first in order to prevent an adversary from gumming up the\n # connection by sending an impossibly long message.\n if data_len > CCPMessage.MAX_DATA_LEN:\n #print \"BBBBBBB bad data len\"\n err_str = 'bad data_len (%u), recv_buffer len: %d' % (data_len,\n len(recv_buffer))\n CCPMessage.LOGGER.warn(err_str)\n\n raise CCPMessageUnpackError(err_str)\n\n # if we haven't received an entire message yet, then return\n #\n if len(recv_buffer) < (header_len + data_len):\n #print \"CCCCCCCCCCCCCCCCCCCCCCCC len(recv_buffer) < %s \" % str(header_len + data_len)\n #print str(len(recv_buffer))\n return (msgs, recv_buffer)\n\n if data_len == 0:\n data = None\n else:\n data = recv_buffer[header_len:header_len + data_len]\n\n recv_buffer = recv_buffer[header_len + data_len:]\n\n # Now that we have the entire message, we can decide whether it's\n # valid.\n #\n if version != CCPMessage.PROTOCOL_VERSION:\n err_str = 'bad version (%u)' % (version,)\n CCPMessage.LOGGER.warn(err_str)\n raise CCPMessageUnpackError(err_str)\n\n if not msg_type in CCPMessage.msg_types:\n err_str = 'bad msg_type (%u)' % (msg_type,)\n CCPMessage.LOGGER.warn(err_str)\n raise CCPMessageUnpackError(err_str)\n\n # The message is complete and valid. Append it to the list of\n # messages we've received.\n #\n msgs.append(CCPMessage(msg_type, conn_id=conn_id, data=data))\n\n # CCPMessage.LOGGER.debug(\"received msg_type (%u)\" %\n # (msg_type,))", "def parse(self):\n\t\tfor part in self.mail.walk():\n\t\t\tself.process_part(part)", "def loop(self):\n # dump all incoming messages into a list and empty the string\n incoming_messages = self.receiver.getDataFromCallback()\n # empty the buffer\n self.receiver.emptyDataFromCallback()\n\n parsed_messages = []\n pingacks = []\n for message in incoming_messages:\n # Deal with ping requests\n\n if message.topic == self.PINGREQ:\n self.pingack(json.loads(message.payload.decode()))\n # Deal with acknowledgements to our own ping requests\n elif message.topic == self.PINGACK:\n pingacks.append(json.loads(message.payload.decode()))\n # Parse non-encrypted messages\n elif message.topic == self.PUBLIC:\n parsed_messages.append(json.loads(message.payload.decode()))\n\n return parsed_messages, pingacks", "def _parse_cmds(self):\n lst = self.inbuffer.split('\\n')\n # leave trailing text (not terminated by \\n) in inbuffer\n self.inbuffer = lst.pop(-1)\n if lst:\n for cmd in lst:\n self.cmds.append(cmd)", "def parse_message(self, message):\n pass", "def __receive_messages(self) -> [str]:\n while True:\n try:\n data = self.__socket.recv(4096)\n if data:\n msgs = self.__json_serializer.bytes_to_jsons(data)\n if RemotePlayerProxy.DEBUG:\n for msg in msgs:\n print(f'[RPP] [RECV] <- [{self.name}]: {msg}')\n return msgs\n except Exception as e:\n if RemotePlayerProxy.DEBUG:\n print(f'Lost client {self.name} because: ', e)\n return []", "def read_cbor_message(self):\n while True:\n # 'self' is sufficiently 'file-like' to act as a load source.\n # Throws EOFError on end of stream/timeout/lost-connection etc.\n message = cbor.load(self)\n\n if isinstance(message, collections.abc.Mapping):\n # A message response (to a prior request)\n if 'id' in message:\n logger.info(\"Received msg: {}\".format(_hexlify(message)))\n return message\n\n # A log message - handle as normal\n if 'log' in message:\n response = message['log']\n log_method = device_logger.error\n try:\n response = message['log'].decode(\"utf-8\")\n log_methods = {\n 'E': device_logger.error,\n 'W': device_logger.warn,\n 'I': device_logger.info,\n 'D': device_logger.debug,\n 'V': device_logger.debug,\n }\n if len(response) > 1 and response[1] == ' ':\n lvl = response[0]\n log_method = log_methods.get(lvl, device_logger.error)\n except Exception as e:\n logger.error('Error processing log message: {}'.format(e))\n log_method('>> {}'.format(response))\n continue\n\n # Unknown/unhandled/unexpected message\n logger.error(\"Unhandled message received\")\n device_logger.error(message)", "def parse(self, message: Message):\n\t\tpass", "def parse_payload(self):\n while len(self.buffer) >= 10:\n \"\"\" check magic word \"\"\"\n if self.buffer[0:2] != self.mw:\n #LogDebug(\"drop all buffer due to incorrect magic word\")\n self.buffer = b\"\" # drop entire buffer\n\n \"\"\" extract the value from length field \"\"\"\n length = struct.unpack(\"I\", self.buffer[2:6])[0] + 1\n #print \"packet len\", length, \"buffer len\", len(self.buffer)\n if len(self.buffer) < length:\n #LogDebug(\"imcompleted packet will be processed later\")\n break\n\n \"\"\" verify the packet CRC \"\"\"\n calculated_crc = struct.pack(\"I\", binascii.crc32(self.buffer[:length-4]) & 0xFFFFFFFF)\n if calculated_crc != self.buffer[length-4:length]:\n pass\n else:\n payload = self.buffer[6:length-4]\n self.payloads.append(payload)\n self.buffer = self.buffer[length:]", "def parse_messages(self, orig):\n data=orig[1:len(orig)-1]\n output=[]\n for i in range(0, len(data), 3):\n message_data=data[i].split(',')\n message_text=data[i+1]\n output.append({'status':message_data[1], 'number':message_data[2],'date':message_data[4],'time':message_data[5],'text':message_text})\n return output", "def read_messages(message_file):\n line = message_file.readline()\n messages = []\n \n while line != '':\n line = clean_message(line)\n line = line.strip('\\n')\n messages.append(line)\n line = message_file.readline()\n return messages\n\t\n # Function will go through each line removing occurences of '/n'", "def _read_message(self):\n if self.__eof:\n return None\n result = {}\n line = sys.stdin.readline()\n while line == '\\n':\n line = sys.stdin.readline()\n if not line:\n self.__eof = True\n return None\n s = line.split(\" \", 1)\n result['_number'] = int(s[0])\n result['_text'] = s[1].strip()\n\n while not self.__eof:\n line = sys.stdin.readline()\n if not line:\n self.__eof = True\n return result\n if line == '\\n':\n return result\n s = line.split(\":\", 1)\n result[s[0]] = s[1].strip()", "def send_and_parse(self, cmd):\n\n lines = self.__send(cmd)\n messages = self.__protocol(lines)\n return messages", "def receive_message(self):\n\n # Calculate the size of the buffer\n self.buffer.seek(0, os.SEEK_END)\n buffer_size = self.buffer.tell()\n\n # Check if a complete header is present\n if buffer_size < MessageHeaderSerializer.calcsize():\n return\n\n # Go to the beginning of the buffer\n self.buffer.reset()\n\n message_model = None\n message_header_serial = MessageHeaderSerializer()\n message_header = message_header_serial.deserialize(self.buffer)\n\n total_length = MessageHeaderSerializer.calcsize() + message_header.length\n\n # Incomplete message\n if buffer_size < total_length:\n self.buffer.seek(0, os.SEEK_END)\n return\n\n payload = self.buffer.read(message_header.length)\n self.buffer = StringIO()\n self.handle_message_header(message_header, payload)\n\n payload_checksum = \\\n MessageHeaderSerializer.calc_checksum(payload)\n\n # Check if the checksum is valid\n if payload_checksum != message_header.checksum:\n return (message_header, message_model)\n\n if message_header.command in MESSAGE_MAPPING:\n deserializer = MESSAGE_MAPPING[message_header.command]()\n message_model = deserializer.deserialize(StringIO(payload))\n\n return (message_header, message_model)", "def parse_message(buffer):\n _discard_until_message_start(buffer)\n\n if buffer and buffer[0] == MESSAGE_FAILURE_BYTE:\n buffer[:] = buffer[1:]\n return MessageFailure(\n 'Command send failure (probable collision). Expect a retry.',\n ), 2 - len(buffer)\n\n # It takes at least 2 bytes to move forward.\n if len(buffer) < 2:\n return None, 2 - len(buffer)\n\n try:\n command_code = CommandCode(buffer[1])\n except ValueError:\n logger.warning(\n \"Unrecognized command code (0x%02x). Ignoring invalid data.\",\n buffer[1],\n )\n buffer[:2] = []\n\n return None, 2\n\n extension = 0\n\n # If the message is an Insteon message and has the extended flag, we expect\n # 14 user-data more bytes.\n if command_code == CommandCode.send_standard_or_extended_message:\n if len(buffer) >= 6 and buffer[5] & (1 << 4):\n extension = 14\n\n body, expected = _extract_body(\n buffer,\n BODY_SIZES[command_code] + extension,\n )\n\n # Not enough bytes to process the message. Let's wait for more.\n if body is None:\n return None, expected\n\n return (\n IncomingMessage(command_code=command_code, body=body),\n max(2 - len(buffer), 1),\n )", "def _discard_until_message_start(buffer):\n discarded_bytes = bytearray()\n\n for index, c in enumerate(buffer):\n if c not in {MESSAGE_START_BYTE, MESSAGE_FAILURE_BYTE}:\n discarded_bytes.append(c)\n else:\n break\n\n if discarded_bytes:\n buffer[:len(discarded_bytes)] = []\n discarded_bytes = discarded_bytes.lstrip(b'\\x00')\n\n if discarded_bytes:\n logger.warning(\n \"Discarding %s unexpected byte(s): %s\",\n len(discarded_bytes),\n discarded_bytes.hex(),\n )", "def get_msgs(self):\n msgs = []\n while True:\n try:\n msgs.append(self.get_msg(block=False))\n except Empty:\n break\n return msgs" ]
[ "0.6308243", "0.61712545", "0.6151684", "0.6139022", "0.61054057", "0.60950094", "0.6041251", "0.5966716", "0.5920718", "0.588217", "0.58744967", "0.57379323", "0.56501997", "0.5636574", "0.56351095", "0.56131524", "0.55846614", "0.5568359", "0.55157125", "0.5508778", "0.55041057", "0.54978436", "0.54791343", "0.5475574", "0.5467432", "0.5454493", "0.54528916", "0.5449792", "0.5424526", "0.5414733" ]
0.7935604
0
Discard unexpected bytes until a message start or the buffer end is found.
def _discard_until_message_start(buffer): discarded_bytes = bytearray() for index, c in enumerate(buffer): if c not in {MESSAGE_START_BYTE, MESSAGE_FAILURE_BYTE}: discarded_bytes.append(c) else: break if discarded_bytes: buffer[:len(discarded_bytes)] = [] discarded_bytes = discarded_bytes.lstrip(b'\x00') if discarded_bytes: logger.warning( "Discarding %s unexpected byte(s): %s", len(discarded_bytes), discarded_bytes.hex(), )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def skip(self):\r\n length = self.next_byte()\r\n while length != b\"\\x00\" and length:\r\n self.next_bytes(parse_int(length, 'big'))\r\n length = self.next_byte()", "def _discard_excess_bytes(self):\n discard_len = min(self._discard, len(self._buffer))\n del self._buffer[:discard_len]\n self._discard -= discard_len", "def _leftovers(self, fl):\n try:\n data = self.sock.recv(1024, fl)\n except socket.error as _:\n return False\n if len(data) != 0:\n tail = data\n while True:\n (head, tail) = Ctrl().split_combined(tail)\n print(\"Got message:\", Ctrl().rem_header(head))\n if len(tail) == 0:\n break\n return True\n return False", "def parseBuffer(self):\n idx = self.buf.find(DELIMITER)\n while idx > -1:\n packet = self.buf[0:idx]\n if len(packet) > 4:\n if packet[0:3] == 'DATA':\n self.factory.setData(packet[4:idx])\n else:\n print \"%s is a malformed packet, header %s not recognized\" % (packet, packet[0:3])\n else:\n print \"%s attempting to send a packet of invalid length %s\" % (packet, len(packet))\n self.buf = self.buf[(idx + len(DELIMITER)):]\n idx = self.buf.find(DELIMITER)", "def read_until(self, data):\n\n while not data in self.buff:\n self.buff += self.socket.recv(1024)\n \n pos = self.buff.find(data)\n rval = self.buff[:pos + len(data)]\n self.buff = self.buff[pos + len(data):]\n \n return rval", "def read_until(self, data):\n\n while not data in self.buff:\n self.buff += self.socket.recv(1024)\n \n pos = self.buff.find(data)\n rval = self.buff[:pos + len(data)]\n self.buff = self.buff[pos + len(data):]\n \n return rval", "def read_until(self, data):\n\n while not data in self.buff:\n self.buff += self.socket.recv(1024)\n\n pos = self.buff.find(data)\n rval = self.buff[: pos + len(data)]\n self.buff = self.buff[pos + len(data) :]\n\n return rval", "def found_terminator(self):\n self.signal_filter.send((self, \"\".join(self.buffer)))\n del(self.buffer[:])", "def emptyBuffer(self):\n msg = True\n while msg:\n msg = self.receive()", "def readuntil(self, separator=b'\\n'):\n seplen = len(separator)\n if seplen == 0:\n raise ValueError('Separator should be at least one-byte string')\n\n if self._exception is not None:\n raise self._exception\n\n # Consume whole buffer except last bytes, which length is\n # one less than seplen. Let's check corner cases with\n # separator='SEPARATOR':\n # * we have received almost complete separator (without last\n # byte). i.e buffer='some textSEPARATO'. In this case we\n # can safely consume len(separator) - 1 bytes.\n # * last byte of buffer is first byte of separator, i.e.\n # buffer='abcdefghijklmnopqrS'. We may safely consume\n # everything except that last byte, but this require to\n # analyze bytes of buffer that match partial separator.\n # This is slow and/or require FSM. For this case our\n # implementation is not optimal, since require rescanning\n # of data that is known to not belong to separator. In\n # real world, separator will not be so long to notice\n # performance problems. Even when reading MIME-encoded\n # messages :)\n\n # `offset` is the number of bytes from the beginning of the buffer\n # where there is no occurrence of `separator`.\n offset = 0\n\n # Loop until we find `separator` in the buffer, exceed the buffer size,\n # or an EOF has happened.\n while True:\n buflen = len(self._buffer)\n\n # Check if we now have enough data in the buffer for `separator` to\n # fit.\n if buflen - offset >= seplen:\n isep = self._buffer.find(separator, offset)\n\n if isep != -1:\n # `separator` is in the buffer. `isep` will be used later\n # to retrieve the data.\n break\n\n # see upper comment for explanation.\n offset = buflen + 1 - seplen\n if offset > self._limit:\n raise LimitOverrunError(\n 'Separator is not found, and chunk exceed the limit',\n offset)\n\n # Complete message (with full separator) may be present in buffer\n # even when EOF flag is set. This may happen when the last chunk\n # adds data which makes separator be found. That's why we check for\n # EOF *ater* inspecting the buffer.\n if self._eof:\n chunk = bytes(self._buffer)\n self._buffer.clear()\n raise IncompleteReadError(chunk, None)\n\n # _wait_for_data() will resume reading if stream was paused.\n yield from self._wait_for_data('readuntil')\n\n if isep > self._limit:\n raise LimitOverrunError(\n 'Separator is found, but chunk is longer than limit', isep)\n\n chunk = self._buffer[:isep + seplen]\n del self._buffer[:isep + seplen]\n self._maybe_resume_transport()\n return bytes(chunk)", "def recvuntil(self, needle: bytes) -> bytes:\n\n data = b\"\"\n\n # We read one byte at a time so we don't overshoot the goal\n while not data.endswith(needle):\n next_byte = self.recv(1)\n\n if next_byte is not None:\n data += next_byte\n\n return data", "def bytes_strip(buf: bytes) -> bytes:\n if b'\\x00' in buf:\n return buf[:buf.find(b'\\x00')]\n else:\n return buf", "def read_until(steg_bytes: bytes, offset: int, ending: str):\r\n # Create a variable to hold the bytes read\r\n bytes_read = b\"\"\r\n\r\n # Loop through the steg_bytes\r\n while offset < len(steg_bytes):\r\n # Check if the current byte is the ending byte sequence\r\n if steg_bytes[offset:offset + len(ending)] == ending.encode():\r\n # Return the bytes read and the offset of the ending byte sequence\r\n return bytes_read, offset\r\n # Read the next byte\r\n bytes_read += steg_bytes[offset:offset + 1]\r\n offset += 1", "def flush(self):\n while self.port.inWaiting() > 0:\n try:\n data = self.readResult()\n except ValueError:\n # CRC error, better clear the buffer and get out of here.\n self.port.reset_input_buffer()\n break\n\n if data[0] != '\\xFF' or data[1] != 'V':\n D('discarded non-version frame', data)", "def _recv(self):\n result = self._con.receive()\n if result.startswith(Parser.NOT_OK_MSG) or len(result) == 0:\n return result\n while not result.endswith(Parser.OK_MSG + '\\n') and not result.startswith(Parser.OK_MSG):\n result += self._con.receive()\n return result", "def remove_buffered_packets(self):\n seq = self.next_seq\n while True:\n p = self.buffer.pop(seq, None)\n if p is None:\n break\n else:\n seq += len(p.data)\n yield p", "def found_terminator(self):\r\n self.msg = ''.join(self.msg_buffer)\r\n self.msg_split = self.msg.split(client_api[\"delimiter\"])\r\n cmd = self.msg_split[0]\r\n try:\r\n self.msg_handler[cmd]()\r\n except KeyError as e:\r\n server_log.info('Unhandled command received from client id {}: {}'.format(self.client_id, cmd))\r\n except Exception as e:\r\n server_log.info('Exception raised in server when receiving message from client: {!r}'.format(e))\r\n raise e\r\n finally:\r\n self.msg_buffer = []\r\n self.msg = ''\r\n self.msg_split = []", "def read_until_null(self):\r\n # Check socket connection\r\n if self.connected:\r\n # Get result data from debugger engine\r\n try:\r\n while not '\\x00' in self.buffer:\r\n self.buffer += H.data_read(self.socket.recv(self.read_size))\r\n data, self.buffer = self.buffer.split('\\x00', 1)\r\n return data\r\n except:\r\n e = sys.exc_info()[1]\r\n raise ProtocolConnectionException(e)\r\n else:\r\n raise ProtocolConnectionException(\"Xdebug is not connected\")", "def _pop_received_packet(self):\n fragments = self._receive_heap.pop_min_and_all_fragments()\n if fragments is None:\n self._attempt_disabling_looping_receive()\n else:\n last_seqnum = fragments[-1].sequence_number\n self._update_next_expected_seqnum(last_seqnum)\n self._update_next_delivered_seqnum(last_seqnum)\n payload = b''.join(f.payload for f in fragments)\n self.handler.receive_message(payload)\n\n if self._next_delivered_seqnum not in self._receive_heap:\n self._attempt_disabling_looping_receive()", "def filter(self, data):\n filtered = None\n if not self.matcher:\n return data, None\n\n self.buffer.extend(data)\n\n # We'd like to preserve colouring in self.buffer, but allow filtering\n # for the side channel without control sequences.\n\n # Copy the buffer chunks without control sequences\n buffer_no_cs = bytearray()\n # Chunks offsets in self.buffer\n offsets_buffer = []\n # Corresponding offsets in buffer_no_cs\n offsets_buffer_no_cs = []\n # Current offset in self.buffer while iterating over CSEQ.\n offset = 0\n for m in re.finditer(self.CSEQ, self.buffer):\n if m.start() > offset:\n # Copy over the chunk until a CSEQ start and remember\n # the offsets\n offsets_buffer_no_cs.append(len(buffer_no_cs))\n offsets_buffer.append(offset)\n buffer_no_cs.extend(self.buffer[offset:m.start()])\n offset = m.end()\n # Copy the rest of the buffer into the search buffer buffer_no_cs\n if offset < len(self.buffer):\n offsets_buffer_no_cs.append(len(buffer_no_cs))\n offsets_buffer.append(offset)\n buffer_no_cs.extend(self.buffer[offset:])\n\n # Note that we are scanning over the buffer again and again\n # if this causes noticeable performance issue, consider maintaining\n # a smaller part of the buffer to scan.\n match = self.matcher.search(buffer_no_cs)\n if match:\n # We've just found the boundaries of the desired output.\n self.matcher = None\n filtered = bytes(buffer_no_cs[:match.end()])\n # Find corresponding offset in the original buffer to remove\n # whatever corresponded to the found message.\n idx = bisect.bisect_left(offsets_buffer_no_cs, match.end())\n assert idx > 0\n offset_rest = offsets_buffer[idx-1] + match.end() - offsets_buffer_no_cs[idx-1]\n output = bytes(self.buffer[offset_rest:])\n self.buffer = bytearray()\n return output, filtered\n return b'', None", "def skip_until(self, s, timeout=None):\n self.read_cond(lambda x: s in x.buf, timeout)\n start = self.buf.find(s)\n self.buf = self.buf[start:]\n return", "def recvtil(self, delim):\n buf = b''\n # TODO maybe not make this O(n**2)...\n while not delim in buf:\n buf += self.recv(1)\n return buf", "def test_chopped_message(self):\n queue = Queue()\n # Receive the message in a separate thread, because it blocks\n thread = Thread(target=lambda q: q.put(self.inverter.receive()), args=(queue,))\n thread.start()\n self.sock.send(message[0:1]) # Send some message parts\n sleep(0.01)\n self.sock.send(message[1:3])\n sleep(0.01)\n self.sock.send(message[3:7])\n sleep(0.01)\n self.sock.send(message[7:])\n thread.join()\n # Check result\n ident, payload = queue.get(timeout=1.0)\n self.assertEqual(b\"\\x00\\x01\\x02\", ident)\n self.assertEqual(b\"\", payload)", "def _readuntil(f, end=_TYPE_END):\n\tbuf = bytearray()\n\twhile True:\n\t\tbyte = f.read(1)\n\t\tif byte != end:\n\t\t\tbuf += byte\n\t\telse:\n\t\t\tbreak\n\treturn buf", "def _get_non_transparent_framed_message(self):\n for sep in constant.TERMS:\n pos = self._buffer.find(sep)\n if pos != -1:\n message = self._buffer[:pos]\n del self._buffer[:pos+1]\n return message\n if len(self._buffer) > constant.MAX_MESSAGE_BUFFER:\n self._close_with_error('Maximum buffer size exceeded without finding terminating trailer character')", "def clear_in_serial_buffer(self):\n if self.serial.inWaiting():\n resp = self.serial.readall()\n log.warn(\"Unknown/unparsed serial response: %s\", resp)", "def consume_endmarker(self) -> None:\n line = self.fetch(1, allow_endmarker=True)\n if self.pattern.match(line):\n self.step(1)", "def found_terminator(self):\n self.l.debug('found_terminator()')\n self.process_data()", "def data_received(self, data):\n self._log.debug(\"recv %s\", data)\n self.incomingMessageBuffer += data\n\n if not self.started:\n # Need to check the startByte to see if we can receive\n if not self.startByte in self.incomingMessageBuffer:\n # We cut the buffer to size, removing data that can't be part of start byte\n if len(self.startByte) < len(self.incomingMessageBuffer):\n self.incomingMessageBuffer = self.incomingMessageBuffer[\n -len(self.startByte) :\n ]\n self._log.debug(\"Ignoring: start byte %s not found\", self.startByte)\n return\n else:\n self._log.debug(\"startBytes %s found - starting read\", self.startByte)\n _, self.incomingMessageBuffer = self.incomingMessageBuffer.split(\n self.startByte, 1\n )\n self.started = True\n self.onReady(True)\n\n if self.readStruct is not None:\n while len(self.incomingMessageBuffer) >= self.readStruct.size:\n msg = self.readStruct.unpack(\n self.incomingMessageBuffer[: self.readStruct.size]\n )\n self.incomingMessageBuffer = self.incomingMessageBuffer[\n self.readStruct.size :\n ]\n\n if self.readKeys is not None:\n msg = dict(zip(self.readKeys, msg))\n self._log.debug(\"recvmsg: %s\", msg)\n self.putter(msg)\n elif self.readFormat is None:\n self.putter(self.incomingMessageBuffer)\n self.incomingMessageBuffer = bytes()\n else:\n # We split by line:\n outputArray = self.incomingMessageBuffer.split(b\"\\n\")\n self.incomingMessageBuffer = outputArray[-1]\n for i in range(len(outputArray) - 1):\n # This returns the bytes object of the line.\n # We don't convert to string, since people might be sending non-ascii characters.\n # When receiving, the user should use .decode('ascii') to get a a string.\n self._log.debug(\"recvmsg: %s\", outputArray[i])\n self.putter(outputArray[i])", "def _process_end_of_file(self):\n (nd_timestamp, non_data) = self._chunker.get_next_non_data()\n if non_data and len(non_data) > 0:\n message = \"Extra un-expected non-data bytes at the end of the file:%s\", non_data\n log.warn(message)\n self._exception_callback(RecoverableSampleException(message))" ]
[ "0.6518637", "0.6509923", "0.5996722", "0.5991392", "0.59583664", "0.59583664", "0.59414077", "0.58899826", "0.5845345", "0.58346933", "0.5820699", "0.5807118", "0.5782911", "0.5689225", "0.566917", "0.5615318", "0.56149256", "0.5603645", "0.5601141", "0.55972356", "0.55565846", "0.5508083", "0.55068505", "0.5492847", "0.54685503", "0.5456014", "0.5428998", "0.542044", "0.5418556", "0.5412399" ]
0.8094288
0