query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
listlengths 30
30
| negative_scores
listlengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
Check that there is only two reward volumes within a session, one of which is 0. | def check_reward_volume_set(data, **_):
metric = data["rewardVolume"]
passed = 0 < len(set(metric)) <= 2 and 0. in metric
return metric, passed | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check_reward_volumes(data, **_):\n metric = data['rewardVolume']\n correct = data['correct']\n passed = np.zeros_like(metric, dtype=bool)\n # Check correct trials within correct range\n passed[correct] = (1.5 <= metric[correct]) & (metric[correct] <= 3.)\n # Check incorrect trials are 0\n passed[~correct] = metric[~correct] == 0\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed",
"def acquisition_function_expected_volume_removal(\n gp_reward_model: BasicGPRewardModel,\n) -> int:\n assert gp_reward_model.use_comparisons\n\n # DL: This assumes the same observation model for each query which we might\n # want to change at some point\n query0 = gp_reward_model.candidate_queries[0]\n response = query0.response\n\n (\n candidate_queries_gp_repr,\n candidate_queries_linear_combination,\n candidate_queries_gp_repr_idx,\n ) = gp_reward_model.get_candidate_queries_gp_repr()\n # mu_diff, _ = gp_reward_model.get_candidate_queries_reward_predictions()\n mu_diff, _ = gp_reward_model.gp_model.predict_multiple(\n candidate_queries_gp_repr,\n linear_combination=candidate_queries_linear_combination,\n )\n\n if response == \"bernoulli\":\n prob = (1 + np.clip(mu_diff, -1, 1)) / 2\n elif response == \"deterministic\":\n prob = np.sign(mu_diff)\n elif response == \"probit\":\n prob = norm.cdf(mu_diff / (np.sqrt(2) * query0.sigma))\n else:\n raise NotImplementedError(f\"evr for {response}\")\n\n volume_removal = np.minimum(1 - prob, prob)\n\n argmax_volume_removal = argmax_over_index_set(\n volume_removal, range(len(candidate_queries_gp_repr_idx))\n )\n return candidate_queries_gp_repr_idx[np.random.choice(argmax_volume_removal)]",
"def regular(self):\n if all(self._volumes - self._volumes[0] == 0):\n return True\n else:\n return False",
"def test_shared_cards_are_zero(self):\n self.assertEqual(len(self.hand.sharedCards), 0)",
"def still_betting(self):\n for player in self.players:\n if player.current_bet is 0:\n return True\n return False",
"def is_zero(self):\n for action, prob in self._regrets.items():\n if prob != 0.0:\n return False\n return True",
"def is_buy(self):\n return(copysign(1, self.volume)>0)",
"def status(self):\n return self.value > self.initial_value/2 \\\n and self.bag > 0 \\\n and self.balance > 1",
"def check(self, reward_new, reward, iteration):\n return reward_new > reward",
"def reward_threshold(self) -> Optional[float]:",
"def valid(self):\n return len(self._totals_) <= 1",
"def penalty_reward(reward):\n if reward < 0:\n return True\n return False",
"def pops_agree(x):\n return len(x.all_open_closed) == 1",
"def check_dead(cart):\n id = cart_to_loc(cart)\n return voxel_data[id] == 0",
"def verify_winner(self):\r\n return self.count_pegs() == 1",
"def validate_staking_rewards_emission(self, key, asset):\n if (\n key == \"native.digg\"\n or key == \"native.uniDiggWbtc\"\n or key == \"native.sushiDiggWbtc\"\n ):\n if asset == \"digg\":\n return True\n if (\n key == \"native.badger\"\n or key == \"native.uniBadgerWbtc\"\n or key == \"native.sushiBadgerWbtc\"\n ):\n if asset == \"badger\":\n return True\n else:\n return False",
"def multishot(attacker_schema, victim_schema):\n\n multishot = attacker_schema.multishot.get(victim_schema.name, 0)\n return multishot > 0 and (multishot - 1.0) / multishot > random.random()",
"def __is_terminal(self, reward):\n\n # Initialize the terminal signals to false\n done = 0\n exit_cond = 0\n\n # Find readings that are below the set minimum. If there are multiple readings below the threshold, a crash\n # likely occurred and the episode should end\n # print(self.lidar_ranges)\n # indices = np.where(self.lidar_ranges <= self.min_dist)\n # # print(indices)\n # if len(indices[0]) >= self.crash_threshold:\n # exit_cond = 1\n # If the speed is less than 0.3, then the vehicle is pressed against a wall and not moving. Thus, it has crashed.\n # print('Speed: ' + str(self.pos[3]))\n if self.pos[3] < 0.2:\n dist_to_start = np.sqrt((self.pos[0] -self.ref_track[1088, 0])**2 + (self.pos[1] -self.ref_track[1088, 1])**2)\n # print(dist_to_start)\n if dist_to_start > 0.4:\n exit_cond = 1\n\n if reward <= -1.0:\n exit_cond = 1\n\n return done, exit_cond",
"def check_win(players: List[Player]) -> Tuple[bool, Optional[Player]]:\n total_players = len(players)\n for player in players:\n if player.influence == 0:\n total_players -= 1\n if total_players == 1:\n for player in players:\n if player.influence >0:\n return True, player\n return False, None",
"def test_reward(self):\n success = True\n old_sim = self.sim\n old_robot_num = self.robot_num\n old_agents = copy.deepcopy(self.agents)\n old_obstacles = copy.deepcopy(self.obstacles)\n old_goals = copy.deepcopy(self.goals)\n old_action_list = copy.deepcopy(self.last_actions)\n\n # Test collision penalties and overtaking penalty\n self.sim = rvo2.PyRVOSimulator(\n 0.1, 1.0, 10, 5.0, 5.0, 0.2, 1.5, (0,0)\n )\n self.obstacles = []\n self.goals = []\n self.last_actions = []\n self.robot_num = self.sim.addAgent((0, 0))\n self.agents = [self.robot_num]\n self.agents.append(self.sim.addAgent((0.1, 0.1)))\n self.agents.append(self.sim.addAgent((-0.1, 0.1)))\n self.agents.append(self.sim.addAgent((0.1, -0.1)))\n self.agents.append(self.sim.addAgent((-0.1, -0.1)))\n r = self.reward()[0].item()\n exp = -4.22\n if r != exp:\n success = False\n print(\"Actual reward: \", r, \"Expected: \", exp)\n print(\"Explanation: -4 for 4 collisions, -0.2 for 4 predicted \"\n \"collisions, -0.02 for overtake penalty with top right agent\")\n\n # Test closeness penalties and overtaking penalty\n self.agents = []\n self.sim = rvo2.PyRVOSimulator(\n 0.1, 1.0, 10, 5.0, 5.0, 0.2, 1.5, (0,0)\n )\n self.robot_num = self.sim.addAgent((0, 0))\n self.agents = [self.robot_num]\n self.agents.append(self.sim.addAgent((0.35, 0.35)))\n self.agents.append(self.sim.addAgent((0.35, -0.35)))\n self.agents.append(self.sim.addAgent((-0.35, 0.35)))\n self.agents.append(self.sim.addAgent((-0.35, -0.35)))\n r = self.reward()[0].item()\n exp = -1.02\n if r != exp:\n success = False\n print(\"Actual reward: \", r, \"Expected: \", exp)\n print(\"Explanation: -1 for 4 closeness violations, -0.02 for \"\n \"overtake penalty with top right agent\")\n\n # Test passing penalty\n self.agents = []\n self.sim = rvo2.PyRVOSimulator(\n 0.1, 1.0, 10, 5.0, 5.0, 0.2, 1.5, (0, 0)\n )\n self.robot_num = self.sim.addAgent((0, 0))\n self.agents = [self.robot_num]\n self.agents.append(self.sim.addAgent((0.7, -0.5), 1.0, 10, 5.0, 5.0,\n 0.2, 1.5, (-0.5, 0)))\n r = self.reward()[0].item()\n exp = -0.02\n if r != exp:\n success = False\n print(\"Actual reward: \", r, \"Expected: \", exp)\n print(\"Explanation: -0.02 for passing violation\")\n\n # Test crossing penalty\n self.agents = []\n self.sim = rvo2.PyRVOSimulator(\n 0.1, 1.0, 10, 5.0, 5.0, 0.2, 1.5, (0, 0)\n )\n self.robot_num = self.sim.addAgent((0, 0))\n self.agents = [self.robot_num]\n self.agents.append(self.sim.addAgent((0.35, 0.3), 1.0, 10, 5.0, 5.0,\n 0.2, 1.5, (0, -0.5)))\n r = self.reward()[0].item()\n exp = -0.27\n if r != exp:\n success = False\n print(\"Actual reward: \", r, \"Expected: \", exp)\n print(\"Explanation: -0.02 for crossing violation, -0.25 for \"\n \"closeness violation\")\n\n # Test action penalty (moving)\n self.agents = []\n self.sim = rvo2.PyRVOSimulator(\n 0.1, 1.0, 10, 5.0, 5.0, 0.2, 1.5, (0, 0)\n )\n self.robot_num = self.sim.addAgent((0, 0))\n self.last_actions = [1, 1]\n self.last_action_ind = 0\n r = self.reward()[0].item()\n exp = -0.01\n if r != exp:\n success = False\n print(\"Actual reward: \", r, \"Expected: \", exp)\n print(\"Explanation: -0.01 for moving\")\n\n # Test action penalty (changing actions)\n self.agents = []\n self.sim = rvo2.PyRVOSimulator(\n 0.1, 1.0, 10, 5.0, 5.0, 0.2, 1.5, (0, 0)\n )\n self.robot_num = self.sim.addAgent((0, 0))\n self.last_actions = [1, 0]\n self.last_action_ind = 0\n r = self.reward()[0].item()\n exp = -0.01\n if r != exp:\n success = False\n print(\"Actual reward: \", r, \"Expected: \", exp)\n print(\"Explanation: -0.01 for changing actions\")\n\n self.sim = old_sim\n self.robot_num = old_robot_num\n self.agents = old_agents\n self.obstacles = old_obstacles\n self.goals = old_goals\n self.last_actions = old_action_list\n return success",
"def check(self):\n\n if (sum(self.game_state) == 0):\n return 1\n elif (self.game_state[-1] >=1 ):\n return -1\n else:\n return 0",
"def check_cart(cart):\n return 0 <= cart[0] < grid_size and 0 <= cart[1] < grid_size and 0 <= cart[2] < grid_size",
"def checkExternalReward(self):\r\n\r\n self.externalReward, res = self.receiver.getExternalReward()\r\n\r\n return res",
"def check(self, output = \"debug\"):\n counter = 0;\n for fragment in self.mdv:\n if fragment in self.observed_fragments:\n for number in self.mdv[fragment]:\n if self.mdv[fragment][number]['use'] == 'use':\n if self.mdv[fragment][number]['ratio'] > 1.0:\n counter = counter + 1\n if output == \"debug\":\n print(fragment, number, self.mdv[fragment][number]['ratio'],\"> 1.0\")\n if self.mdv[fragment][number]['ratio'] <= 0.0:\n counter = counter + 1\n if output == \"debug\":\n print(fragment, number, self.mdv[fragment][number]['ratio'],\"<= 0.0\")\n else:\n if output == \"debug\":\n print('The MDV object does not include', fragment)\n counter = counter + 1\n if counter == 0:\n return True\n return False",
"def bet_check(m):\n try:\n value = float(m.content)\n if 0 <= value <= player.coins:\n return True\n else:\n return False\n except:\n return False",
"def getSusceptible(self):\n\n # use a mask and sum it to see the number of healthy people, designated as having a value equal to zero\n self.susceptible = np.sum((self.getSpace()) == 0)\n\n return self.susceptible",
"def isZero(self):\n return self.count == 0",
"def assert_valid_volume(wells,exception_info='invalid volume'):\n wells = ensure_list(wells)\n \n assert all([well.volume >= get_well_dead_volume(well) for well in wells]), exception_info\n assert all([well.volume <= get_well_max_volume(well) for well in wells]), exception_info",
"def check(self):\r\n self.check_probabilities()\r\n self.check_sum()",
"def check_cap(org, amount):\n from django.db.models import Sum, Q\n\n if amount < 0:\n query = Q(favor__lt=0)\n else:\n query = Q(favor__gt=0)\n total = abs(\n org.reputations.filter(query).aggregate(sum=Sum(\"favor\"))[\"sum\"] or 0\n ) + abs(amount)\n mod = org.social_modifier * 5\n if total > mod:\n noun = \"favor\" if amount > 0 else \"disfavor\"\n raise CommandError(\n \"That would bring your total %s to %s, and you can only spend %s.\"\n % (noun, total, mod)\n )"
] | [
"0.6595489",
"0.59923935",
"0.58829993",
"0.55576754",
"0.5459114",
"0.5263144",
"0.5262954",
"0.5224466",
"0.51899666",
"0.5080002",
"0.50792956",
"0.5036989",
"0.5031256",
"0.5027713",
"0.501224",
"0.50117445",
"0.5010227",
"0.49776033",
"0.49289915",
"0.49272078",
"0.4919848",
"0.4911695",
"0.49102",
"0.48821977",
"0.48780873",
"0.4851163",
"0.48416057",
"0.48379615",
"0.4837657",
"0.48349565"
] | 0.71636873 | 0 |
Check that the difference between wheel position samples is close to the encoder resolution and that the wheel timestamps strictly increase. | def check_wheel_integrity(data, re_encoding='X1', enc_res=None, **_):
if isinstance(re_encoding, str):
re_encoding = int(re_encoding[-1])
# The expected difference between samples in the extracted units
resolution = 1 / (enc_res or ephys_fpga.WHEEL_TICKS
) * np.pi * 2 * ephys_fpga.WHEEL_RADIUS_CM / re_encoding
# We expect the difference of neighbouring positions to be close to the resolution
pos_check = np.abs(np.diff(data['wheel_position']))
# Timestamps should be strictly increasing
ts_check = np.diff(data['wheel_timestamps']) <= 0.
metric = pos_check + ts_check.astype(float) # all values should be close to zero
passed = metric < 1.5 * resolution
return metric, passed | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check_wheel_freeze_during_quiescence(data, **_):\n assert np.all(np.diff(data[\"wheel_timestamps\"]) >= 0)\n assert data[\"quiescence\"].size == data[\"stimOnTrigger_times\"].size\n # Get tuple of wheel times and positions over each trial's quiescence period\n qevt_start_times = data[\"stimOnTrigger_times\"] - data[\"quiescence\"]\n traces = traces_by_trial(\n data[\"wheel_timestamps\"],\n data[\"wheel_position\"],\n start=qevt_start_times,\n end=data[\"stimOnTrigger_times\"]\n )\n\n metric = np.zeros((len(data[\"quiescence\"]), 2)) # (n_trials, n_directions)\n for i, trial in enumerate(traces):\n t, pos = trial\n # Get the last position before the period began\n if pos.size > 0:\n # Find the position of the preceding sample and subtract it\n idx = np.abs(data[\"wheel_timestamps\"] - t[0]).argmin() - 1\n origin = data[\"wheel_position\"][idx if idx != -1 else 0]\n # Find the absolute min and max relative to the last sample\n metric[i, :] = np.abs([np.min(pos - origin), np.max(pos - origin)])\n # Reduce to the largest displacement found in any direction\n metric = np.max(metric, axis=1)\n metric = 180 * metric / np.pi # convert to degrees from radians\n criterion = 2 # Position shouldn't change more than 2 in either direction\n passed = metric < criterion\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed",
"def check_wheel_move_before_feedback(data, **_):\n # Get tuple of wheel times and positions within 100ms of feedback\n traces = traces_by_trial(\n data[\"wheel_timestamps\"],\n data[\"wheel_position\"],\n start=data[\"feedback_times\"] - 0.05,\n end=data[\"feedback_times\"] + 0.05,\n )\n metric = np.zeros_like(data[\"feedback_times\"])\n # For each trial find the displacement\n for i, trial in enumerate(traces):\n pos = trial[1]\n if pos.size > 1:\n metric[i] = pos[-1] - pos[0]\n\n # except no-go trials\n metric[data[\"choice\"] == 0] = np.nan # NaN = trial ignored for this check\n nans = np.isnan(metric)\n passed = np.zeros_like(metric) * np.nan\n\n passed[~nans] = (metric[~nans] != 0).astype(float)\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed",
"def quick_check(self):\n for ang in range(self.MIDPOINT-150, self.MIDPOINT+151, 150):\n self.servo(ang)\n if self.read_distance() < self.SAFE_DIST:\n return False\n return True",
"def test_t(self):\n assert np.isclose(self.stepper.t, self.final_t)",
"def sanity_check(self):\n res = True\n res = res and self.detected\n res = res and np.sum(self.diffs) < 30000 # experimental value\n return res",
"def check_time():\n times = get_times()\n time_difference = abs((times['local'] - times['target']).total_seconds())\n return time_difference < post_time_tol_seconds",
"def check_last_update(self):\n now = self.get_clock().now()\n diff_L = (now - self.last_stamp_L).nanoseconds * 1e-9\n diff_R = (now - self.last_stamp_R).nanoseconds * 1e-9\n if diff_L > 0.1:\n self.duty_left = 0.0\n if diff_R > 0.1:\n self.duty_right = 0.0",
"def test_accurate(self):\n M = simulation.EventMonitor(self.G)\n sim = simulation.Simulation(self.G, M, dt=self.dt)\n sim.run(self.t_max)\n\n times = self.G.pattern.nonzero()[1]*self.dt\n self.assertTrue(np.allclose(sorted(times), M.t))\n for (i, t) in zip(M.i, M.t):\n self.assertTrue(self.G.pattern[i, int_r(t/self.dt)])",
"def distance_between_wheels():",
"def __straightness_correction(self):\n self.elapsed_ticks_left, self.elapsed_ticks_right = \\\n read_enc_ticks(self.initial_ticks_left, self.initial_ticks_right)\n\n print(\"L: \" + str(self.elapsed_ticks_left) + \"\\tR: \" + str(self.elapsed_ticks_right))\n\n # Handle invalid encoder readings\n if self.elapsed_ticks_left < 0 and self.elapsed_ticks_right < 0:\n print(\"Bad encoder reading\")\n return (0, 0)\n if self.elapsed_ticks_left > self.elapsed_ticks_right:\n print(\"Right slow\")\n return (-get_inc(self.speed), get_inc(self.speed))\n elif self.elapsed_ticks_left < self.elapsed_ticks_right:\n print(\"Left slow\")\n return (get_inc(self.speed), -get_inc(self.speed))\n else:\n print(\"Equal\")\n return (0, 0)",
"def validate_ts(self):\n try:\n self.get_log_file()\n\n self.parse_vibrations()\n\n self.obtain_geometries()\n\n self.percent_changes = self.obtain_percent_changes()\n\n\n center_values = np.log(\n self.percent_changes[self.percent_changes.center].percent_change.mean())\n shell_values = np.log(\n self.percent_changes[self.percent_changes.center != True].percent_change.mean())\n\n if center_values > shell_values + 1:\n logging.info(\"Vibrational analysis was successful\")\n return True\n else:\n logging.info(\n \"Cannot reasonably say that we have arrived at a TS through vibrational analysis.\")\n return False\n except AssertionError:\n logging.info(\"Something went wrong when attempting vibrational analysis...\")\n logging.info(\"Cannot verify via vibrational analysis\")\n return False",
"def test_e0_ts(self):\n self.assertAlmostEqual(self.tunneling.E0_TS.value_si * 0.001, self.E0_TS, 4)",
"def test_base_period_tolerance(delta, expected):\n result = wrap(180 - delta)\n print(result, np.isclose(result, -180))\n assert np.isclose(result, -180)[0] == expected",
"def check_peak_win(self):\n if self.peak_win[0] < 0.0:\n self.peak_win[0] = 0.0\n if self.logger is not None:\n self.logger.warning(('Start of peak window < 0 sec for cond: {}. ' +\n 'Setting to 0.').format(self.cond))\n if self.peak_win[1] > self.psc_dur:\n self.peak_win[1] = self.psc_dur\n if self.logger is not None:\n logger.warning(('End of peak window is longer than trial HRF ' +\n 'for cond: {}. Truncating.').format(self.cond))\n return",
"def test_fix_fingers_xmin_peak(self):\n\t\tself.watcher = ww.WeightWatcher(model=self.model, log_level=logging.WARNING)\n\t\t\t\n\t\t# default\n\t\tdetails = self.watcher.analyze(layers=[self.second_layer], xmax=FORCE, pl_package=POWERLAW_PACKAGE)\n\t\tactual = details.alpha.to_numpy()[0]\n\t\texpected = 7.116304\n\t\tprint(\"ACTUAL {}\".format(actual))\n\t\tself.assertAlmostEqual(actual,expected, places=2)\n\n\t\t# XMIN_PEAK xmax FORCED\n\t\tdetails = self.watcher.analyze(layers=[self.second_layer], fix_fingers='xmin_peak', xmax=FORCE, xmin_max=1.0, pl_package=POWERLAW_PACKAGE)\n\t\tactual = details.alpha[0]\n\t\tactual = details.alpha.to_numpy()[0]\n\t\texpected = 1.68\n\t\tdelta = 0.01\n\t\tself.assertAlmostEqual(actual,expected, None, '', delta)\n\t\t\n\t\t\n\t\t# XMIN_PEAK xmax None, sligltly different alphja\n\t\tdetails = self.watcher.analyze(layers=[self.second_layer], fix_fingers='xmin_peak', xmin_max=1.0, pl_package=POWERLAW_PACKAGE)\n\t\tactual = details.alpha[0]\n\t\tactual = details.alpha.to_numpy()[0]\n\t\texpected = 1.72\n\t\tdelta = 0.01\n\t\tself.assertAlmostEqual(actual,expected, None, '', delta)",
"def check_wheel_move_during_closed_loop(data, wheel_gain=None, **_):\n # Get the Bpod extracted wheel data\n timestamps = data['wheel_timestamps']\n position = data['wheel_position']\n\n return _wheel_move_during_closed_loop(timestamps, position, data, wheel_gain, tol=3)",
"def test_correctness(self):\n M_win = 1024\n N_fft = 131072\n # Set norm=False for correctness as the values obtained from the\n # scientific publication do not normalize the values. Normalizing\n # changes the sidelobe level from the desired value.\n w = windows.taylor(M_win, nbar=4, sll=35, norm=False, sym=False)\n f = fft(w, N_fft)\n spec = 20 * np.log10(np.abs(f / np.amax(f)))\n\n first_zero = np.argmax(np.diff(spec) > 0)\n\n PSLL = np.amax(spec[first_zero:-first_zero])\n\n BW_3dB = 2*np.argmax(spec <= -3.0102999566398121) / N_fft * M_win\n BW_18dB = 2*np.argmax(spec <= -18.061799739838872) / N_fft * M_win\n\n assert_allclose(PSLL, -35.1672, atol=1)\n assert_allclose(BW_3dB, 1.1822, atol=0.1)\n assert_allclose(BW_18dB, 2.6112, atol=0.1)",
"def check_convergency(self):\n if self.vars['ema_trace'][self.vars['step']] <= self.settings[\"emaSpeedTol\"]:\n return True\n else:\n return False",
"def is_equidistant(self) -> bool:\n if len(self.time) < 3:\n return True\n return len(self.time.to_series().diff().dropna().unique()) == 1",
"def test9(self):\n sig1 = np.array([0, 1, 0])\n sig2 = np.array([0, 0, 1, 0])\n d, p = EventSync.estimate_delay(sig1, sig2)\n self.assertTrue(d == -1)",
"def test8(self):\n sig1 = np.array([1, 0, 0, 0])\n sig2 = np.array([0, 1, ])\n d, p = EventSync.estimate_delay(sig1, sig2)\n self.assertTrue(d == -1)",
"def test_W_end(self):\t\t\n self.assertAlmostEqual(attempt.W[-1], 9.494852380803035)",
"def test_estimate_data_time__incorrect_doy():\n parser = IMFV283Parser()\n # BLC aka 1999 rollover gps issue\n transmission = '17274013241'\n day = 46\n minute = 78\n (data_time, transmit_time, corrected) = \\\n parser._estimate_data_time(transmission, day, minute)\n assert_equals(data_time, UTCDateTime('2017-10-01T01:18:00Z'))\n assert_equals(transmit_time, UTCDateTime('2017-10-01T01:32:41Z'))\n assert_equals(corrected, True)",
"def quick_check(self):\n #loop three times and moce the servo \n for ang in range(self.MIDPOINT - 115, self.MIDPOINT+116, 115):\n self.servo(ang)\n time.sleep(.05)\n if self.read_distance() < self.SAFE_DISTANCE:\n return False\n #if the three-part check didn't freak out\n return True",
"def test_numprops_different_sign(self):\n # Perform diff.\n df = Differ(key=\"name\", deltas={\"energy\": Delta(\"+-\")})\n d = df.diff(*self.engines)\n # Calculate expected results.\n is_different = lambda a, b: a < 0 < b or b < 0 < a\n changed = sum((int(is_different(e[0], e[1])) for e in self.energies))\n # Check results.\n self.assertEqual(len(d[Differ.CHANGED]), changed)",
"def test_check_conformer_energy(self):\n v_list = [-272.2779012225, -272.2774933703, -272.2768397635, -272.2778432059, -272.278645477, -272.2789602654,\n -272.2788749196, -272.278496709, -272.2779350675, -272.2777008843, -272.2777167286, -272.2780937643,\n -272.2784838846, -272.2788050464, -272.2787865352, -272.2785091607, -272.2779977452, -272.2777957743,\n -272.2779134906, -272.2781827547, -272.278443339, -272.2788244214, -272.2787748749]\n v_list = np.array(v_list, np.float64)\n v_diff = (v_list[0] - np.min(v_list)) * constants.E_h * constants.Na / 1000\n self.assertAlmostEqual(v_diff / 2.7805169838282797, 1, 5)",
"def _checkTimestamp(self, acquisition_time):\n\n #\n # Check for None\n #\n if acquisition_time is None:\n raise Exception('Invalid acquisition_time {acquisition_time}'.\n format(acquisition_time =acquisition_time))\n\n #\n # Do the conversion\n # \n acquisition_time_candidate = (parser.parse(acquisition_time)).timetuple()\n\n #\n # Check several values\n # \n if acquisition_time_candidate.tm_year < 2015:\n raise Exception('Invalid year {year} in acquisition time {acquisition_time}'.\n format(year = acquisition_time_candidate.tm_year, acquisition_time =acquisition_time))\n\n #\n # Return if it passed all tests\n #\n return acquisition_time_candidate",
"def check_detected_wheel_moves(data, min_qt=0, **_):\n # Depending on task version this may be a single value or an array of quiescent periods\n min_qt = np.array(min_qt)\n if min_qt.size > data[\"intervals\"].shape[0]:\n min_qt = min_qt[:data[\"intervals\"].shape[0]]\n\n metric = data['firstMovement_times']\n qevt_start = data['goCueTrigger_times'] - np.array(min_qt)\n response = data['response_times']\n # First movement time for each trial should be after the quiescent period and before feedback\n passed = np.array([a < m < b for m, a, b in zip(metric, qevt_start, response)], dtype=float)\n nogo = data['choice'] == 0\n passed[nogo] = np.nan # No go trial may have no movement times and that's fine\n return metric, passed",
"def _checkDT(self):\r\n dt = np.diff(self.tsec)\r\n \r\n dt_unique = np.unique(dt)\r\n \r\n if np.size(dt_unique) == 1:\r\n self.isequal = True\r\n else:\r\n self.isequal = False\r\n \r\n try:\r\n self.dt = dt[1]\r\n except:\r\n self.dt = 0.0",
"def test_estimate_data_time__correct_doy():\n parser = IMFV283Parser()\n # BOU aka normal\n transmission = '17274013121'\n day = 274\n minute = 72\n (data_time, transmit_time, corrected) = \\\n parser._estimate_data_time(transmission, day, minute)\n assert_equals(data_time, UTCDateTime('2017-10-01T01:12:00Z'))\n assert_equals(transmit_time, UTCDateTime('2017-10-01T01:31:21Z'))\n assert_equals(corrected, False)"
] | [
"0.6416596",
"0.6204324",
"0.5757079",
"0.5752711",
"0.5702539",
"0.5654965",
"0.56396395",
"0.5632442",
"0.5595564",
"0.5541013",
"0.5507114",
"0.54889065",
"0.54887325",
"0.5486571",
"0.54845035",
"0.5462624",
"0.54456806",
"0.5435884",
"0.5428846",
"0.53493243",
"0.5341664",
"0.5331515",
"0.5327165",
"0.5322862",
"0.53067636",
"0.5300874",
"0.5298395",
"0.5292938",
"0.52925795",
"0.5284481"
] | 0.6974392 | 0 |
Check that there are no audio outputs between the start of the trial and the go cue sound onset 20 ms. | def check_audio_pre_trial(data, audio=None, **_):
if audio is None:
_log.warning("No BNC2 input in function call, retuning None")
return None
s = audio["times"][~np.isnan(audio["times"])] # Audio TTLs with NaNs removed
metric = np.array([], dtype=np.int8)
for i, c in zip(data["intervals"][:, 0], data["goCue_times"]):
metric = np.append(metric, sum(s[s > i] < (c - 0.02)))
passed = metric == 0
assert data["intervals"].shape[0] == len(metric) == len(passed)
return metric, passed | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def test_skipped_already_unsilenced(self):\n self.cog.scheduler.__contains__.return_value = False\n self.cog.previous_overwrites.get.return_value = None\n\n for channel in (MockVoiceChannel(), MockTextChannel()):\n with self.subTest(channel=channel):\n self.assertFalse(await self.cog._unsilence(channel))\n channel.set_permissions.assert_not_called()",
"def test_all():\n try:\n wd = WD('noaa_test.wav', 'noaa_test.png')\n return True\n except:\n return False",
"def test_match_new_aud():\n for ii in range(2):\n assert get_clip(audio['NTF'], log, ii) == get_clip(audlist, unscram_log, ii)",
"def quick_check(self):\n #loop three times and moce the servo \n for ang in range(self.MIDPOINT - 115, self.MIDPOINT+116, 115):\n self.servo(ang)\n time.sleep(.05)\n if self.read_distance() < self.SAFE_DISTANCE:\n return False\n #if the three-part check didn't freak out\n return True",
"def checkendsilence(inputgiven):\n output = getlastslice(inputgiven)\n wave_file = wave.open(output, \"r\")\n for i in range(wave_file.getnframes()):\n current_frame = wave_file.readframes(1)\n unpacked_signed_value = struct.unpack(\"<h\", current_frame)\n if abs(unpacked_signed_value[0]) > 500:\n return False\n return True",
"def check_stimOn_goCue_delays(data, **_):\n # Calculate the difference between stimOn and goCue times.\n # If either are NaN, the result will be Inf to ensure that it crosses the failure threshold.\n metric = np.nan_to_num(data[\"goCue_times\"] - data[\"stimOn_times\"], nan=np.inf)\n passed = (metric < 0.01) & (metric > 0)\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed",
"def check_goCue_delays(data, **_):\n metric = np.nan_to_num(data[\"goCue_times\"] - data[\"goCueTrigger_times\"], nan=np.inf)\n passed = (metric <= 0.0015) & (metric > 0)\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed",
"def test_no_audio_no_features():\n # This file doesn't exist\n no_audio_file_struct = FileStruct(\"fixtures/caca.mp3\")\n feat_type = FeatureTypes.framesync\n with raises(NoAudioFileError):\n CQT(no_audio_file_struct, feat_type, sr=11025).features",
"def audio_event_detection(self):\n # Test if trials already exist\n if 'TimeIntervals_speaker' not in self.model.nwb.intervals:\n # Test if file contains audio signals\n if any(name in self.model.nwb.stimulus for name in ['speaker1', 'speaker2']):\n AudioEventDetection(parent=self)\n else:\n NoAudioDialog()\n else:\n ExistIntervalsDialog()",
"def test_no_audio():\n # This file doesn't exist\n no_audio_file_struct = FileStruct(\"fixtures/chirp_noaudio.mp3\")\n no_audio_file_struct.features_file = \"features/chirp_noaudio.json\"\n feat_type = FeatureTypes.framesync\n CQT(no_audio_file_struct, feat_type, sr=22050).features\n assert (os.path.isfile(no_audio_file_struct.features_file))\n with open(no_audio_file_struct.features_file) as f:\n data = json.load(f)\n assert(CQT.get_id() in data.keys())",
"def _check_for_noise(self) -> None:\n safety_stop = 5\n while self._has_noise() and safety_stop > 0:\n self.filter(size=3)\n safety_stop -= 1",
"def test_time_supp_length_matches_no_timesteps(self):\n for no_timesteps in [5, 578, 993, 300072]:\n for dt in [0.1, 0.5, 3.0]:\n test_rec = rt.Recording(np.empty([6, no_timesteps, 1]), dt=dt)\n self.assertEqual(\n len(test_rec.time_supp),\n no_timesteps,\n 'Expected length of time_supp {} to match no_timesteps of '\n 'input {}.'.format(len(test_rec.time_supp), no_timesteps),\n )",
"async def test_silenced_voice_channel_full(self):\n self.assertTrue(await self.cog._set_silence_overwrites(self.voice_channel, kick=True))\n self.assertFalse(self.voice_overwrite.speak or self.voice_overwrite.connect)\n self.voice_channel.set_permissions.assert_awaited_once_with(\n self.cog._verified_voice_role,\n overwrite=self.voice_overwrite\n )",
"def testGenerateSamplesMeasureNotCalled(self):\n timer = timing_util.IntervalTimer()\n self.assertEqual(timer.intervals, [])\n samples = timer.GenerateSamples()\n self.assertEqual(timer.intervals, [])\n self.assertEqual(samples, [])",
"def check_errorCue_delays(data, **_):\n metric = np.nan_to_num(data[\"errorCue_times\"] - data[\"errorCueTrigger_times\"], nan=np.inf)\n passed = ((metric <= 0.0015) & (metric > 0)).astype(float)\n passed[data[\"correct\"]] = metric[data[\"correct\"]] = np.nan\n assert data[\"intervals\"].shape[0] == len(metric) == len(passed)\n return metric, passed",
"def test(self):\n winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS)\n \n pulses=1000*3\n winsound.Beep(200, 1000) # .Beep(1650Hz, (XXXXms)) #e.g 1000ms=1second\n self.run(pulses); self.run(pulses, ANTI_CLK_W)\n sleep(1)\n\n winsound.Beep(400, 1000)\n self.swing(128, count=30); self.stop() #0.9 degrees\n sleep(1)\n\n winsound.Beep(800, 1000)\n print('Testing I.....')\n self.swing(32, count=120); self.stop() #0.225 degrees \n sleep(1)\n\n winsound.Beep(1600, 1000)\n print('Testing II.....')\n self.swing(2, count=1800); self.stop() #0.05625 degrees\n \n winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS)\n print(' Testings Done! ')\n return self.stop() #set low before exist ",
"def test_wrong_ann_frame_times():\n my_file_struct = FileStruct(os.path.join(\"fixtures\", \"chirp.mp3\"))\n my_file_struct.features_file = os.path.join(\"features\", \"no_file.json\")\n cqt = CQT(my_file_struct, FeatureTypes.ann_beatsync, sr=11025)\n with raises(FeatureTypeNotFound):\n cqt.frame_times",
"def quick_check(self):\n # loop three times and move the servo\n for ang in range(self.MIDPOINT - 100, self.MIDPOINT + 101, 100):\n self.servo(ang)\n time.sleep(.01)\n if self.read_distance() < self.SAFE_DISTANCE:\n return False \n # if the three-part check didn't freak out\n return True",
"def test_measure_nondeterministic_without_sampling(self):\n shots = 2000\n circuits = ref_measure.measure_circuits_nondeterministic(allow_sampling=False)\n targets = ref_measure.measure_counts_nondeterministic(shots)\n job = execute(circuits, QasmSimulator(), shots=shots)\n result = job.result()\n self.is_completed(result)\n self.compare_counts(result, circuits, targets, delta=0.05 * shots)",
"def check_peak_win(self):\n if self.peak_win[0] < 0.0:\n self.peak_win[0] = 0.0\n if self.logger is not None:\n self.logger.warning(('Start of peak window < 0 sec for cond: {}. ' +\n 'Setting to 0.').format(self.cond))\n if self.peak_win[1] > self.psc_dur:\n self.peak_win[1] = self.psc_dur\n if self.logger is not None:\n logger.warning(('End of peak window is longer than trial HRF ' +\n 'for cond: {}. Truncating.').format(self.cond))\n return",
"def test_measure_nondeterministic_with_sampling(self):\n shots = 2000\n circuits = ref_measure.measure_circuits_nondeterministic(allow_sampling=True)\n targets = ref_measure.measure_counts_nondeterministic(shots)\n job = execute(circuits, QasmSimulator(), shots=shots)\n result = job.result()\n self.is_completed(result)\n self.compare_counts(result, circuits, targets, delta=0.05 * shots)",
"def test_measure_nondeterministic_multi_qubit_without_sampling(self):\n shots = 2000\n qobj = ref_measure.measure_circuits_qobj_nondeterministic(allow_sampling=False)\n qobj.config.shots = shots\n circuits = [experiment.header.name for experiment in qobj.experiments]\n targets = ref_measure.measure_counts_qobj_nondeterministic(shots)\n job = QasmSimulator().run(qobj)\n result = job.result()\n self.is_completed(result)\n self.compare_counts(result, circuits, targets, delta=0.05 * shots)",
"def missing_tests(session):\n print('The following samples do not have tests:')\n for sample in set(ALL_SAMPLE_DIRECTORIES) - set(ALL_TESTED_SAMPLES):\n print('* {}'.format(sample))",
"def test_three_arms_one_unsampled_arm(self):\n self._test_three_arms_one_unsampled_arm()",
"def testZeroPlaybacksIsSufficientlyPlayedBack(self):\n\t\tpolicy = MinimumPlaybackPolicy(0)\n\t\tself.failUnless(policy.hasBeenPlayedBack)",
"async def test_silence_voice(self):\n message = \"This should show up just here.\"\n await self.cog.send_message(message, self.text_channels[0], self.voice_channel, alert_target=False)\n self.text_channels[0].send.assert_awaited_once_with(message)\n self.text_channels[1].send.assert_not_called()",
"def nanny(self): \n while not self.started and not self.failed:\n eventlet.sleep(.1)\n return not self.failed",
"def stop_on_low_ais_ess(trial_id, result):\n return result[\"ais_effective_sample_size\"] < 0.1",
"def test_new_log_diff():\n assert get_clip(audlist, log, 1) != get_clip(audio['NTF'], log, 1)",
"def test_connection(self):\n try:\n if 0 <= self.get_wavelength() <= 10e-6:\n return True\n except Exception:\n print(\"Self test failed.\")\n return False"
] | [
"0.6270315",
"0.60045695",
"0.5891026",
"0.5863397",
"0.57892865",
"0.57889146",
"0.57805943",
"0.57586294",
"0.5748307",
"0.57187366",
"0.57087165",
"0.5685844",
"0.5684214",
"0.5667447",
"0.56195986",
"0.5612378",
"0.5611862",
"0.5586836",
"0.5543327",
"0.55262345",
"0.5487903",
"0.54724324",
"0.5451039",
"0.5433701",
"0.5406621",
"0.540635",
"0.5394377",
"0.5389447",
"0.5378511",
"0.5373404"
] | 0.662603 | 0 |
Check the VTK version. | def vtk_version_ok(major, minor, build):
requested_version = (100 * int(major) + int(minor)) * 100000000 + int(build)
ver = vtkVersion()
actual_version = (100 * ver.GetVTKMajorVersion() + ver.GetVTKMinorVersion()) \
* 100000000 + ver.GetVTKBuildVersion()
if actual_version >= requested_version:
return True
else:
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def vtk_version_ok(major, minor, build):\n needed_version = 10000000000 * int(major) + 100000000 * int(minor) + int(build)\n try:\n vtk_version_number = vtk.VTK_VERSION_NUMBER\n except AttributeError: # as error:\n ver = vtk.vtkVersion()\n vtk_version_number = 10000000000 * ver.GetVTKMajorVersion() + 100000000 * ver.GetVTKMinorVersion() \\\n + ver.GetVTKBuildVersion()\n if vtk_version_number >= needed_version:\n return True\n else:\n return False",
"def _is_version_uptodate(self):\n logging.info(\"Checking tesseract version\")\n cmd = '%s -v' % (self.binary)\n logging.info(cmd) \n try:\n ret_output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)\n except CalledProcessError:\n # Could not run tesseract\n error(self.msgs['TS_MISSING'])\n\n ver_str = '0.0.0'\n for line in ret_output.splitlines():\n if 'tesseract' in line:\n ver_str = line.split(' ')[1]\n if ver_str.endswith('dev'): # Fix for version strings that end in 'dev'\n ver_str = ver_str[:-3]\n\n # Iterate through the version dots\n ver = [int(x) for x in ver_str.split('.')]\n req = [int(x) for x in self.required.split('.')]\n\n # Aargh, in windows 3.02.02 is reported as version 3.02 \n # SFKM\n if str(os.name) == 'nt':\n req = req[:2]\n\n version_good = False\n for i,num in enumerate(req):\n if len(ver) < i+1:\n # This minor version number is not present in tesseract, so it must be\n # lower than required. (3.02 < 3.02.01)\n break\n if ver[i]==num and len(ver) == i+1 and len(ver)==len(req):\n # 3.02.02 == 3.02.02\n version_good = True\n continue\n if ver[i]>num:\n # 4.0 > 3.02.02\n # 3.03.02 > 3.02.02\n version_good = True\n break\n if ver[i]<num:\n # 3.01.02 < 3.02.02\n break\n \n return version_good, ver_str",
"def is_valid_version(self):\n pass",
"def check_version(ctx, _, value):\n if not value or ctx.resilient_parsing:\n return\n\n click.echo(f\"geocube v{importlib.metadata.version('geocube')}\")\n\n ctx.exit()",
"def GetVersion(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_GetVersion(self)",
"def test_version():\n assert(hasattr(tekel, '__version__'))",
"def version_check(self):\n param_name = \"rethink/software_version\"\n sdk_version = settings.SDK_VERSION\n\n # get local lock for rosparam threading bug\n with self.__class__.param_lock:\n robot_version = rospy.get_param(param_name, None)\n if not robot_version:\n rospy.logwarn(\"RobotEnable: Failed to retrieve robot version \"\n \"from rosparam: %s\\n\"\n \"Verify robot state and connectivity \"\n \"(i.e. ROS_MASTER_URI)\", param_name)\n return False\n else:\n # parse out first 3 digits of robot version tag\n pattern = (\"^([0-9]+)\\.([0-9]+)\\.([0-9]+)\")\n match = re.search(pattern, robot_version)\n if not match:\n rospy.logwarn(\"RobotEnable: Invalid robot version: %s\",\n robot_version)\n return False\n robot_version = match.string[match.start(1):match.end(3)]\n if robot_version not in settings.VERSIONS_SDK2ROBOT[sdk_version]:\n errstr_version = \"\"\"RobotEnable: Software Version Mismatch.\nRobot Software version (%s) does not match local SDK version (%s). Please\nUpdate your Robot Software. \\\nSee: http://sdk.rethinkrobotics.com/wiki/Software_Update\"\"\"\n rospy.logerr(errstr_version, robot_version, sdk_version)\n return False\n return True",
"def vF3d_VTK(field,name,VTKformat): \n if VTKformat == 'vtu':\n vf3d_vtu(field,name)\n elif VTKformat == None:\n print 'Please select a VTK format'\n else:\n print 'The selected format has not been developed yet'\n return #nothing, since functions output the written VTK file",
"def _check_version () -> None:\n py_version_info: typing.Tuple = sys.version_info[:2]\n\n if py_version_info < MIN_PY_VERSION:\n error_msg = \"This version of pytextrank requires Python {} or later ({} detected)\\n\"\n raise RuntimeError(error_msg.format(_versify(MIN_PY_VERSION), _versify(py_version_info)))",
"def test_version(self):\n version_instance = get_version('kolibri', __file__)\n self.assertIn(version_instance.major_version, kolibri.__version__)",
"def GetVersion(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_GetVersion(self)",
"def check_version():\n err = \"PaddlePaddle version 1.6 or higher is required, \" \\\n \"or a suitable develop version is satisfied as well. \\n\" \\\n \"Please make sure the version is good with your code.\" \\\n\n try:\n fluid.require_version('1.6.0')\n except Exception as e:\n logger.error(err)\n sys.exit(1)",
"def GetVersion(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_GetVersion(self)",
"def test_versionInfo(self):\n self.assertEqual(\n nevow.__version_info__,\n (nevow.version.major, nevow.version.minor, nevow.version.micro))",
"def check_version():\n reset_flag = False\n try:\n data = du.read_yml(du.DEFAULT)\n if (\n data[\"version\"].split(\".\")[0] != __version__.split(\".\")[0]\n ): # If Version if different from \"1.x.y\" remove data:\n reset_flag = True\n except (KeyError, FileNotFoundError, TypeError):\n reset_flag = True\n\n if reset_flag:\n print(\"Your configuration file version is older than 1.0.0\")\n print(\n \"Your .Experiment file will be removed, please run daf.init to generate an up-to-date file\"\n )\n if os.path.isfile(du.DEFAULT):\n os.remove(du.DEFAULT)\n sys.exit(0)",
"def _check_tt_data_format(ttdata: dict, name: str) -> None:\n formatVersion = ttdata.get(\"formatVersion\", None)\n if not isinstance(formatVersion, str):\n raise TypeError(\n f\"Illegal type '{type(formatVersion).__name__}' instead of 'str' for \"\n f\"formatVersion for instructions in {name}.\"\n )\n if formatVersion != \"1\":\n raise NotImplementedError(\n f\"Unknown formatVersion {formatVersion} for instructions in {name}.\"\n )",
"def verify_ios_versionNumber():\r\n msg = \"\"\r\n try:\r\n 'Getting Version number for IOS '\r\n if g.platform == 'ios':\r\n text_view = ui_controls.text_view(get_obj_identifier('about_versionNumber_lbl'), label=True)\r\n\r\n 'Verifying whether Version number is matching with expected value IOS'\r\n if g.platform == 'ios' and text_view.strip() == g.version_number :\r\n print \"Version number is verified successfully. Expected : %s. Actual : %s\" % (g.version_number,text_view.strip())\r\n else:\r\n if g.platform == 'ios':\r\n print \"Version number is not verified successfully. Expected : %s. Actual : %s\" % (g.version_number, text_view.strip())\r\n return False, msg\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n return True, msg",
"def test_matplotlib_suported_version(self):\r\n min_acceptable_version = (1, 1, 0)\r\n max_acceptable_version = (1, 3, 1)\r\n try:\r\n from matplotlib import __version__ as matplotlib_lib_version\r\n version = matplotlib_lib_version.split('.')\r\n if version[-1].endswith('rc'):\r\n version[-1] = version[-1][:-2]\r\n version = tuple(map(int, version))\r\n pass_test = (version >= min_acceptable_version and\r\n version <= max_acceptable_version)\r\n version_string = str(matplotlib_lib_version)\r\n except ImportError:\r\n pass_test = False\r\n version_string = \"Not installed\"\r\n self.assertTrue(pass_test,\r\n \"Unsupported matplotlib version. Must be >= %s and <= %s , but running %s.\"\r\n % ('.'.join(map(str, min_acceptable_version)),\r\n '.'.join(map(str, max_acceptable_version)), version_string))",
"def check_version_2(dataset):\n\n if float(dataset.get('version')) >= 2.0 \\\n if dataset.get('version') else False:\n return True\n else:\n return False",
"def check_version(client):\n version_number = get_version(client)\n logger.debug('Detected Elasticsearch version %s', \".\".join(map(str, version_number)))\n if version_number >= version_max or version_number < version_min:\n vmin = \".\".join(map(str, version_min))\n vmax = \".\".join(map(str, version_max))\n vnum = \".\".join(map(str, version_number))\n print('Expected Elasticsearch version range > {} < {}'.format(vmin, vmax))\n print('ERROR: Incompatible with version {} of Elasticsearch. Exiting.'.format(vnum))\n sys.exit(1)",
"def checkVersion(self, clientName, edamVersionMajor, edamVersionMinor):\r\n pass",
"def test_version(self):\n self.assertIsInstance(nevow.version, Version)",
"def test_denoiser_supported_version(self):\r\n\r\n pass_test = True\r\n try:\r\n check_flowgram_ali_exe()\r\n except (ApplicationNotFoundError, ApplicationError):\r\n pass_test = False\r\n\r\n self.assertTrue(pass_test,\r\n \"Denoiser flowgram aligner not found or not \"\r\n \"executable. This may or may not be a problem \"\r\n \"depending on which components of QIIME you plan to \"\r\n \"use.\")",
"def check_from_version(version: str) -> str:\n version_int = [int(v) for v in version.split(\".\")]\n if version_int[0] not in PipetteModelMajorVersion:\n raise ValueError(f\"Major version {version_int[0]} is not supported.\")\n if version_int[1] not in PipetteModelMinorVersion:\n raise ValueError(f\"Minor version {version_int[1]} is not supported.\")\n return version",
"def GetVersion(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_GetVersion(self)",
"def is_valid_version(self) -> bool:\n return self._is_valid_version()",
"def test_rtax_supported_version(self):\r\n acceptable_version = [(0, 984)]\r\n self.assertTrue(which('rtax'),\r\n \"rtax not found. This may or may not be a problem depending on \" +\r\n \"which components of QIIME you plan to use.\")\r\n command = \"rtax 2>&1 > %s | grep Version | awk '{print $2}'\" % devnull\r\n proc = Popen(command, shell=True, universal_newlines=True,\r\n stdout=PIPE, stderr=STDOUT)\r\n stdout = proc.stdout.read()\r\n version_string = stdout.strip()\r\n try:\r\n version = tuple(map(int, version_string.split('.')))\r\n pass_test = version in acceptable_version\r\n except ValueError:\r\n pass_test = False\r\n version_string = stdout\r\n self.assertTrue(pass_test,\r\n \"Unsupported rtax version. %s is required, but running %s.\"\r\n % ('.'.join(map(str, acceptable_version)), version_string))",
"def test_major(self):\n self.assertEqual(\"0\", self._version1.major())\n self.assertEqual(\"1.2\", self._version2.major())",
"def test_version(self):\n result = check_output([b\"flocker-reportstate\"] + [b\"--version\"])\n self.assertEqual(result, b\"%s\\n\" % (__version__,))",
"def test_version_exists():\n assert ztm.__version__"
] | [
"0.75072044",
"0.6211863",
"0.61744905",
"0.60805243",
"0.60018027",
"0.5978742",
"0.59628785",
"0.59613186",
"0.595438",
"0.5882819",
"0.5849206",
"0.58227885",
"0.5737467",
"0.5716113",
"0.56421566",
"0.56049746",
"0.55878174",
"0.5585991",
"0.5575074",
"0.5545344",
"0.5543388",
"0.55395687",
"0.5518413",
"0.55125165",
"0.549927",
"0.54973805",
"0.54967356",
"0.5491591",
"0.54907507",
"0.5488945"
] | 0.7449666 | 1 |
Try to read a file from subversion for inclusion in the wiki. | def GoogleCode_ReadSVNFile(wikifier, domain, path, start, end):
gcurl = "http://%s.googlecode.com/svn/trunk/%s" % (domain,path)
fdata = urllib.urlopen(gcurl).readlines()
return gcurl, fdata[start-1:end] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_file_with_svn_and_revision(self):\n self._test_get_file(\n tool_name='Subversion',\n revision='123',\n base_commit_id=None,\n expected_revision='123')",
"def read(fname):\n try:\n return open(os.path.join(os.path.dirname(__file__), fname)).read()\n except IOError:\n return \"Simple git management application to be used in Kapsi hosting.\"",
"def svn_fs_file_contents(*args):\r\n return _fs.svn_fs_file_contents(*args)",
"def test_get_file_with_svn_and_base_commit_id(self):\n self._test_get_file(\n tool_name='Subversion',\n revision='123',\n base_commit_id='456',\n expected_revision='123')",
"def _fs_get_file(url, working_dir):\n if not os.path.isabs(url) and working_dir:\n url = os.path.join(working_dir, url)\n\n try:\n with codecs.open(url, 'r', encoding='utf-8') as f:\n return f.read()\n except Exception as e:\n raise ScrBaseException(\"Could not load file from {0}: {1}\".format(url, e))",
"def read_from_file(self, filename: str) -> None:",
"def read(path):",
"def read(*parts):\n with codecs.open(os.path.join(PROJECT, *parts), 'rb', 'utf-8') as f:\n return f.read()",
"def get_contents(base_dir, filename):\n full_path = os.path.join(base_dir, filename)\n if not is_subdir(full_path, base_dir):\n # don't allow breaking away from base_dir\n return None\n\n if os.path.exists(full_path):\n with open(full_path, 'r') as f:\n data = f.read()\n return data\n return None",
"def read_file(path_to_file):\n 8",
"def _load_project(self, _thefile='', _lang=''):\n# messagebox.showwarning('_load_project', 'Entered')\n# def _load_project(self, thefile):\n# \"\"\"loads an existing project (.prj) file,adapting it's contents\n# to the current Simple/Advanced choice\"\"\"\n #set current project label\n if len(_lang) > 0:\n lang = _lang\n else:\n lang = self.selected_lang.get()\n if lang == '':\n lang = 'en-US'\n# messagebox.showwarning('_load_project', \\\n# '_lang=>{}<, lang=>{}<'.format(_lang, lang))\n self.lblProject['text'] = '{} {}'.\\\n format(LOCALIZED_TEXT[lang]['Current Project>'], \\\n self.current_project.get())\n self.update()\n\n linesin = list()\n #thefile comes from current project label or test\n if _thefile:\n thefile = _thefile\n else:\n thefile = ospath.normpath(\\\n self.BibTerm + '/' + self.current_project + '.prj')\n if os.path.exists(thefile):\n filein = codecs.open(thefile, mode='r', encoding='utf-8')\n for aline in filein.readlines():\n # if len(aline.strip()) > 0:\n if aline.strip():\n linesin.extend([aline.strip()])\n filein.close()\n lines = ''.join(linesin)\n self.root = etree.fromstring(lines)\n self.settings = self.root.find(\"settings\")\n self.sf0 = self.settings.find(\"f0\")\n if self.sf0.get('fallback') != self.dict_in.get():\n self.dict_in.set(self.sf0.get('fallback'))\n self.dict_in_changed.set(1)\n else:\n self.dict_in_changed.set(0)\n if self.sf0.get('terms') != self.terms_in.get():\n self.terms_in.set(self.sf0.get('terms'))\n self.terms_in_changed.set(1)\n else:\n self.terms_in_changed.set(0)\n if self.sf0.get('old') != self.terms_in.get():\n self.old_dict.set(self.sf0.get('old'))\n self.old_dict_changed.set(1)\n else:\n self.old_dict_changed.set(0)\n self.sf1 = self.settings.find(\"f1\")\n self.sf2 = self.settings.find(\"f2\")\n self.trout = self.root.find(\"tree\")\n# self.preferred.set(int(self.sf1.attrib['preferred'] == 'True'))\n self.txtPrefChar.delete(0.0, 9999.9999)\n if self.sf1.text != None:\n self.txtPrefChar.insert(9999.9999, self.sf1.text)",
"def test_get_file_exists_with_svn_and_revision(self):\n self._test_get_file_exists(\n tool_name='Subversion',\n revision='123',\n base_commit_id=None,\n expected_revision='123',\n expected_found=True)",
"def do_pull_file(dbsync, bibkey):\n pass",
"def read(self, filename):\n pass",
"def read(self, filename):\n pass",
"def read(*parts):\n here = os.path.abspath(os.path.dirname(__file__))\n with codecs.open(os.path.join(here, *parts), 'r', 'utf-8') as f:\n return f.read()",
"def read_version(setup_file, name, default_value=None, subfolder=None):\n import os\n version_str = default_value\n TOP_DIR = os.path.abspath(os.path.dirname(setup_file))\n if not os.path.exists(TOP_DIR):\n if version_str is None:\n raise FileNotFoundError(\n f\"Unable to find folder {TOP_DIR!r}.\")\n else:\n if subfolder is None:\n init = os.path.join(TOP_DIR, name, '__init__.py')\n else:\n init = os.path.join(TOP_DIR, subfolder, name, '__init__.py')\n looked = []\n with open(init, \"r\") as f:\n line = [_ for _ in [_.strip(\"\\r\\n \") for _ in f.readlines()]\n if _.startswith(\"__version__\")]\n if len(line) > 0:\n looked = line\n version_str = line[0].split('=')[1].strip('\" \\'')\n if version_str is None:\n raise RuntimeError(\n \"Unable to extract version from file %r, \"\n \"interesting lines %r.\" % (init, looked))\n if version_str is None:\n raise RuntimeError(\n \"Unable to extract version from path %r. Content is %r.\" % (\n TOP_DIR, os.listdir(TOP_DIR)))\n return version_str",
"def load_file(filename):\n with open(path.join(PATH_ROOT, filename), \"r\") as in_file:\n return in_file.readlines()",
"def load_file(self, file_path):\n if self.no_update:\n return False\n import urllib\n\n remote_base_url = self._make_git_raw_base_url()\n remote_url = \"%s%s\" % (remote_base_url, file_path)\n self.echo(\"Loading %s from github\" % (file_path))\n web_file = urllib.URLopener()\n web_file.retrieve(remote_url, file_path)\n web_file.close()\n return True",
"def read_file(path):\n # Mystery arguments:\n strictness = False\n # Read the string:\n return _iterate_bibtexsource(_bibtex.open_file(path, strictness))",
"def read(self, filename):\n with RavenFileReader(filename) as f:\n line = f.nexttag()\n while line:\n # Begin data type checks\n if self.cleantag(line) == 'SubBasins':\n self.read_subbasins(f)\n elif self.cleantag(line) == 'HRUs':\n self.read_HRUs(f)\n # Next line\n line = f.nexttag()",
"def read(*parts):\n here = os.path.abspath(os.path.dirname(__file__))\n with codecs.open(os.path.join(here, *parts), \"rb\", \"utf-8\") as f:\n return f.read()",
"def readFromFile(filename):\n raise NotImplementedError",
"def test_subversion_binary_file(host):\n assert host.file(PACKAGE_BINARY).is_file",
"def checkFile_and_return(adistro):\n try:\n if os.path.isfile(adistro.releaseFile):\n return adistro\n except IOError:\n return None",
"def test_readfile(self):\n fname = os.path.join(self.datadir, 'monol_testA_E3-50_rebin4_gti') + \\\n HEN_FILE_EXTENSION\n command = \"{0}\".format(fname)\n\n hen.io.main(command.split())",
"def test_get_infile(self):\r\n pass # not practically testable, but obvious file I/O\r",
"def read_remote_file(org, repo, filename):\n import fsspec\n fs = fsspec.filesystem('github', org=org, repo=repo)\n\n with fs.open(filename) as f:\n data = loads(f.read())\n\n return data",
"def _file_load(path):\n _, _, file_path = path.split('/', 2)\n with open(file_path, 'r', encoding='utf-8') as file_handle:\n return file_handle.read()",
"def read_file(root, file_name, file_type='t'):\r\n\r\n page_dir = handle_path(main_directory + '/' + root + '/' + file_name)\r\n\r\n try:\r\n # if type != 'b' and type != 't':\r\n # raise\r\n with open(page_dir, 'r' + file_type) as page_reader:\r\n return str(page_reader.read())\r\n except FileNotFoundError:\r\n print(\"The file the user requested doesn't exist\")\r\n raise_http_error(\"Not Found\")\r\n except OSError:\r\n print(\"The server couldn't get the page file\")\r\n raise_http_error(\"Internal Server Error\")"
] | [
"0.6046934",
"0.5708818",
"0.5687152",
"0.56721485",
"0.564001",
"0.5629846",
"0.55640376",
"0.5557791",
"0.5525044",
"0.5476475",
"0.5451403",
"0.5451343",
"0.5430152",
"0.5422481",
"0.5422481",
"0.53963757",
"0.5382174",
"0.5358687",
"0.53586805",
"0.53561574",
"0.53425014",
"0.5330644",
"0.5321996",
"0.53210557",
"0.5318453",
"0.5292515",
"0.52807605",
"0.52791995",
"0.5277815",
"0.5275896"
] | 0.60082895 | 1 |
Calculate the distance between the weights vector of the node and a given vector. | def get_distance(self, vec):
sum = 0
if len(self.weights) == len(vec):
for i in range(len(vec)):
sum += (self.weights[i] - vec[i]) * (self.weights[i] - vec[i])
return np.sqrt(sum)
else:
sys.exit("Error: dimension of nodes != input data dimension!") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def distance(v: Vector, w: Vector) -> float:\n return magnitude(subtract(v, w))",
"def distance(v, w):\n return magnitude_of_vector(vector_subtract(v, w))",
"def vector_dist(v, w):\n if isinstance(v, list):\n v = np.asarray(v)\n return vector_mag(v - w)",
"def distance(v, w):\n\treturn magnitude(vector_subtract(v, w))",
"def _vector_dist(self, vec1, vec2):\r\n return sqrt(sum([(float(v1) - float(v2)) ** 2 for v1, v2 in\r\n zip(vec1, vec2)]))",
"def node_distance(self, inputs):\n tmp = 0\n for i in len(self.inputs):\n tmp += np.power(data[i] - self.weights[i], 2)\n return np.sqrt(tmp)",
"def distance1(v: Vector, w: Vector) -> float:\n return math.sqrt(squared_distance(v, w))",
"def distance(self, other_vector):\n return self.distance_sq(other_vector) ** 0.5",
"def squared_distance(v: Vector, w: Vector) -> float:\n return sum_of_squares(subtract(v, w))",
"def test_distances_with_vector_input(self):\n input_vector = self.vectors['dog.n.01']\n distances = self.vectors.distances(input_vector, ['mammal.n.01', 'dog.n.01'])\n self.assertTrue(np.allclose(distances, [4.5278745, 0]))\n\n distances = self.vectors.distances(input_vector)\n self.assertEqual(len(distances), len(self.vectors.vocab))\n self.assertTrue(np.allclose(distances[-1], 10.04756))",
"def distance_to(self, v: Vector) -> float:\n return math.fabs(self.signed_distance_to(v))",
"def compute_distance(node1, node2):\n return np.linalg.norm(node1 - node2)",
"def squared_distance(v, w):\n return sum_of_squares(vector_subtraction(v, w))",
"def squared_distance(v, w):\n\treturn sum_squares(vector_subtract(v, w))",
"def squared_distance(v, w):\n return sum_of_squares(vector_subtract(v, w))",
"def distance(self, u, v):\n # Implement the distance function between vectors u and v]\n # Note: you can also think of this as computing a similarity measure\n # Use of cosine similarity measure, assumes u and v have equal length\n num = np.dot(u,v)\n # den_u = np.sum(u**2)\n # den_v = np.sum(v**2)\n den_u = np.linalg.norm(u)\n den_v = np.linalg.norm(v)\n if den_u == 0.0 or den_v == 0.0:\n return 0.0\n # return num / (math.sqrt(den_u) * math.sqrt(den_v))\n return num / (den_u * den_v)",
"def _distance_from_weights(self, data):\n input_data = array(data)\n weights_flat = self._weights.reshape(-1, self._weights.shape[2])\n input_data_sq = power(input_data, 2).sum(axis=1, keepdims=True)\n weights_flat_sq = power(weights_flat, 2).sum(axis=1, keepdims=True)\n cross_term = dot(input_data, weights_flat.T)\n return sqrt(-2 * cross_term + input_data_sq + weights_flat_sq.T)",
"def compute_distance (uVector, uOther):\n ## since each element can be either 0 or 1,\n ## no need for square roots and pow\n d = 0\n for i in range (len(uVector)):\n d = d + math.pow((int(uVector [i]) - int(uOther [i])), 2)\n\n return d",
"def signed_distance_to(self, v: Vector) -> float:\n return self._normal.dot(v) - self._distance_from_origin",
"def euclidean_distance(vec):\n\n x, y = vec\n distance = tf.math.sqrt(tf.math.reduce_sum(tf.math.square(x - y), axis=-1, keepdims=True))\n return distance",
"def distance_metric(u, v):\n if len(u) != len(v):\n raise Exception(\n \"Distance metric not valid for differently sized vectors\")\n sum = 0.\n for i in range(len(u)):\n sum += ((u[i] - v[i]) ** 2)\n return math.sqrt(sum)",
"def distance_to(self, target_pos):\n v = self.vector_to(target_pos)\n return norm(v)",
"def distance(self, vec1, vec2):\n regular_type = self.dict_paras['regular_type']\n if regular_type == 1: # L1-norm\n out = tf.reduce_sum(tf.abs(tf.sub(vec1, vec2)), 1)\n elif regular_type == 2: # L2-norm\n out = tf.reduce_sum(tf.square(tf.sub(vec1, vec2)), 1)\n else: # cosine similarity\n vec1_norm = tf.nn.l2_normalize(vec1, 1)\n vec2_norm = tf.nn.l2_normalize(vec2, 1)\n vec_mul = tf.mul(vec1_norm, vec2_norm)\n out = tf.reduce_sum(vec_mul, 1)\n return out",
"def wedge_distance(u, v):\n n_it = np.size(u)\n sum = 0\n for i in range(1, n_it):\n for j in range(i):\n sum += np.abs(u[i] * v[j] - u[j] * v[i]) ** 2\n return sum",
"def dist(v1, v2):\n return ( (v1[0] - v2[0])**2 + (v1[1] - v2[1])**2 )**0.5",
"def distance(self, u, v):\n numerator = np.dot(u,v)\n denominator = np.linalg.norm(u) * np.linalg.norm(v)\n similarity = numerator/(denominator +1e-7)\n return similarity",
"def dot(v: Vector, w: Vector) -> float:\n assert len(v) == len(w), \"vectors must be same length\"\n\n return sum(v_i * w_i for v_i, w_i in zip(v, w))",
"def dot(v: Vector, w: Vector) -> float:\n assert len(v) == len(w), 'vectors must be the same length'\n\n return sum(v_item * w_item for v_item, w_item in zip(v, w))",
"def __dist(u, v):\n return spatial.distance.euclidean(u, v)",
"def euclidean_distance(vector_1: Vector, vector_2: Vector) -> VectorOut:\n return np.sqrt(np.sum((np.asarray(vector_1) - np.asarray(vector_2)) ** 2))"
] | [
"0.7776724",
"0.7519333",
"0.7443472",
"0.7413635",
"0.72083104",
"0.705537",
"0.6956175",
"0.6888227",
"0.6851769",
"0.66734695",
"0.6631523",
"0.6563728",
"0.65633744",
"0.6544231",
"0.6533643",
"0.6511806",
"0.6494287",
"0.6481558",
"0.6432434",
"0.64151883",
"0.64133346",
"0.63910115",
"0.6357026",
"0.63533187",
"0.6351764",
"0.63445294",
"0.629528",
"0.6292373",
"0.6290047",
"0.62851256"
] | 0.8128836 | 0 |
The Game Object that was added. | def added_game_object(self) -> GameObject:
return self._added_game_object | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add(self, game_obj):\r\n self.game_objects_for_adding.append(game_obj)",
"def added_game_object_id(self) -> int:\n return CommonObjectUtils.get_object_id(self.added_game_object)",
"def added_object_guid(self) -> int:\n return CommonObjectUtils.get_object_guid(self.added_game_object)",
"def obj(self):\r\n return self._obj",
"def object(self):",
"def get_object(self):\n return self._object",
"def addObject(self,object):\n object.screen = self.screen\n object.parent = self\n self.addList.append(object)",
"def obj(self) -> object:\n pass",
"def object(self):\n return self._object",
"def remove(self, game_obj):\r\n self.game_objects_for_removal.append(game_obj)",
"def obj(self):\n return self._obj",
"def get_object(self):\n if getattr(self, 'current_instance', None):\n ret = self.current_instance\n else:\n ret = super().get_object()\n return ret",
"def get_display_object(self):\n if len(self.objects) == 0:\n return self.tile.get_display_object()\n else:\n return self.objects[-1].get_display_object()",
"def get_object(self):\n return self.request.user.player",
"def add_object(self, obj): # DEFINE OBJ!\n obj.spritesheet_width = self.spritesheet.size['width']\n obj.spritesheet_height = self.spritesheet.size['height']\n \n obj._layer_added(self)\n \n\n obj.buffer_index = len(self.objects)\n self.objects.append(obj)\n\n x = obj.x\n y = obj.y\n \n self.verts.extend(((x, y, 0.0), (x+obj.width, y, 0.0), (x+obj.width, y-obj.height, 0.0), (x, y-obj.height, 0.0)))\n self.texcoords.extend(obj.uv_texture)\n self.norms.extend(((0, 0, -1), (0, 0, -1), (0, 0, -1), (0, 0, -1)))\n\n if pi3d.PLATFORM == pi3d.PLATFORM_PI:\n self.inds.append((self.a,self.b,self.c))\n self.inds.append((self.d,self.a,self.c))\n else:\n self.inds.extend((self.a,self.b,self.c))\n self.inds.extend((self.d,self.a,self.c))\n\n self.a += 4\n self.b += 4\n self.c += 4\n self.d += 4\n\n \n #~ return len(self.sprites)-1",
"def obj(self):\n if not self._obj:\n self._get()\n return self._obj",
"def register_game_object(self, game_object):\n game_object.game_engine = self\n self.game_objects.append(game_object)",
"def _vbe6_newobject(self, event):\n this = event[\"arguments\"][\"this\"]\n object_name = event[\"arguments\"][\"object_name\"]\n\n self.vbe6_ptrs[this] = object_name",
"def make_game(self):\n game = Game(self.data['gamename'])\n self.game = game\n return game",
"def add_drawable(self, gameObject):\r\n if not self.sprite_group.has(gameObject):\r\n self.sprite_group.add(gameObject)",
"def new_object(self):\r\n\t\tpass",
"def __init__(self):\n GameObject.__init__(self)\n\n # private attributes to hold the properties so they appear read only\n self._client_type = \"\"\n self._creatures = []\n self._lost = False\n self._name = \"Anonymous\"\n self._opponent = None\n self._reason_lost = \"\"\n self._reason_won = \"\"\n self._time_remaining = 0\n self._total_health = 0\n self._won = False",
"def addObject(self, item, row, column, gameGrid=None):\n if not gameGrid:\n gameGrid = self.gameGrid\n if row > self.rows-1 or row < 0 or column > self.columns-1 or column < 0:\n print \"addObject could not add %s: \\\n Location out of bounds\" % str(item)\n return None\n gameGrid.setItem(item, row, column)",
"def add_to_world(self, thing):\n\t\tthing.set_world_info(self.current_id, self)\n\t\tself.gameObjects.append(thing)\n\t\tself.current_id += 1",
"async def async_added_to_opp(self):\n self.opp.data[DOMAIN][\"entities\"][\"scene\"].append(self)",
"def get_object_to_run(self):",
"def add_object(self, obj):\n\t\tself.objects.append(obj)",
"def load_game_object(self, g):\n for gC in g.children:\n self.load_game_object(gC)\n if g.has_component(collider.Collider):\n c = g.get_component(collider.Collider)\n left = c.lowerP[0]\n top = c.upperP[1]\n width = abs(c.upperP[0] - c.lowerP[0])\n height = abs(c.upperP[1] - c.lowerP[1])\n r = pygame.Rect(left, top, width, height)\n self.rects[(r.x, r.y, r.w, r.h)] = g.name\n self.backw_rects[g.name] = r",
"def get_object ( self, object ):\n return object",
"def __current_object__(self):\n return self.__lookup()"
] | [
"0.6893105",
"0.67863286",
"0.6558083",
"0.60798115",
"0.6059226",
"0.60259646",
"0.60233736",
"0.6021916",
"0.5961065",
"0.5930729",
"0.5921287",
"0.5852365",
"0.58480656",
"0.58340657",
"0.5832652",
"0.5820273",
"0.5774459",
"0.5736312",
"0.571463",
"0.5690474",
"0.56496143",
"0.56327146",
"0.5603855",
"0.5578611",
"0.55684364",
"0.55446124",
"0.55246776",
"0.55221546",
"0.55218107",
"0.5513936"
] | 0.8658577 | 0 |
The decimal identifier of the Game Object that was added. | def added_game_object_id(self) -> int:
return CommonObjectUtils.get_object_id(self.added_game_object) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def added_object_guid(self) -> int:\n return CommonObjectUtils.get_object_guid(self.added_game_object)",
"def GetID(self):\n return hex(id(self()))",
"def id(self):\n # Might also be a first 12-characters shortcut.\n return self._id",
"def dot_id(self):\n return u\"{0}_{1}\".format(\n Concept.d_clean(self.dot_printname()), str(id(self))[-4:])",
"def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id,\n \"sensor_object_count\",\n f\"{self._cam_name}_{self._obj_name}\",\n )",
"def number(self) -> int:\n return self._id",
"def get_id(self): # real signature unknown; restored from __doc__\n return \"\"",
"def identifier(self):\r\n return self.id",
"def label_id(self):\n return int(self.instance_id // 1000)",
"def obj_id(self) -> int:\n return int(self.index.split(\"/\")[-1]) if self.index else None",
"def unique_id(self):\n return f\"octopus_energy_gas_{self._serial_number}_{self._mprn}_previous_accumulative_cost_override_tariff\"",
"def getMcnpId(self):\n return \"{0:d}000\".format(self.z)",
"def get_id(self):\n #return self.__str__().__hash__()\n object_type = self['object_type']\n shortname = self.get_description()\n object_name = self['name']\n filename = self['filename']\n id = \"%s-%s-%s-%s\" % ( object_type, shortname, object_name, filename)\n import md5\n return md5.new(id).hexdigest()\n return id",
"def getID():",
"def get_id(self):\n return self.name",
"def getId(self):\n AgentInventory.__idCounter__ += 1\n return AgentInventory.__idCounter__",
"def get_identifier(self):",
"def identifier(self):\n return self.__id",
"def getId(self):\n return '%s%08X' % (self.id,self.index)",
"def getId(self):\n return _libsbml.GraphicalObject_getId(self)",
"def get_id(self):\n if self.mlat:\n return f'm{-self.mlat}_{self.mlng}'\n else:\n return f'{-self.clat}_{self.clng}'",
"def myID() -> np.int:\r\n return 304976335",
"def myID() -> np.int:\r\n return 304976335",
"def getGraphicalObjectId(self):\n return _libsbml.TextGlyph_getGraphicalObjectId(self)",
"def getIdent (self) :\n return self.id",
"def get_id(self):\n \"\"\"Requires use of Python 3\"\"\"\n return str(self.id)",
"def object_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"object_id\")",
"def object_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"object_id\")",
"def id(self):\n return \"{model:s}--{serial:08x}\".format(model=self.model.replace('-',''), serial=self.serial_number).lower()",
"def object_id(self) -> str:\n return self._event.get('object_id')"
] | [
"0.72008175",
"0.67034495",
"0.6595657",
"0.6433382",
"0.6414539",
"0.63876456",
"0.6380759",
"0.637583",
"0.63683313",
"0.6359994",
"0.6348633",
"0.63423586",
"0.633373",
"0.6327065",
"0.6312481",
"0.6284648",
"0.62755895",
"0.6267277",
"0.62603825",
"0.6254732",
"0.6250296",
"0.6249532",
"0.6249532",
"0.6249394",
"0.62219757",
"0.62154144",
"0.6212527",
"0.6212527",
"0.6206226",
"0.62001145"
] | 0.74052256 | 0 |
The guid identifier of the Game Object that was added. | def added_object_guid(self) -> int:
return CommonObjectUtils.get_object_guid(self.added_game_object) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def added_game_object_id(self) -> int:\n return CommonObjectUtils.get_object_id(self.added_game_object)",
"def guid(self):\n return self._guid",
"def guid(self) -> str:\n return pulumi.get(self, \"guid\")",
"def guid(self) -> str:\n return pulumi.get(self, \"guid\")",
"def guid(self):\n _, _, _, guid, _ = RPR.GetSetMediaItemTakeInfo_String(\n self.id, 'GUID', 'stringNeedBig', False\n )\n return guid",
"def unique_id(self):\n return f\"{DOMAIN}_{self._cam_name}_{self._obj_name}_snapshot\"",
"def getguid(self):\n self.guidp += 1\n return self.guidp-1",
"def GetID(self):\n return hex(id(self()))",
"def object_id(self) -> str:\n return self._event.get('object_id')",
"def unique_id(self):\n return self.properties.get(\"UniqueId\", None)",
"def unique_id(self):\n return self._uuid",
"def unique_id(self):\n return self._uuid",
"def unique_id(self) -> str:\n return f\"{self.entry_id}_{self.module_id}_{self.data_id}\"",
"def unique_id(self) -> str:\n return get_frigate_entity_unique_id(\n self._config_entry.entry_id,\n \"sensor_object_count\",\n f\"{self._cam_name}_{self._obj_name}\",\n )",
"def unique_id(self):\n return self._id",
"def unique_id(self):\n return self._id",
"def get_objectID(self):\n return self.collection.uuid",
"def guid():\n return _guid64()",
"def unique_id(self):\n return self._uid",
"def unique_id(self):\n return self._uid",
"def unique_id(self):\n return self._uid",
"def get_objectID(self):\n return self.resource.uuid",
"def unique_id(self) -> str:\n return self._uid",
"def getUniqueID(self):\n return self.unique_id",
"def unique_id(self):\n return (\n \"a80f3d5b-df3d-4e38-bbb7-1025276830cd\"\n )",
"def id(self):\n # Might also be a first 12-characters shortcut.\n return self._id",
"def unique_id(self):\n return self._light.address",
"def identifier(self):\n return self.__id",
"def get_id(self):\n return self.name",
"def id(self):\n return id(self._component)"
] | [
"0.7552691",
"0.7443691",
"0.7353285",
"0.7353285",
"0.69248873",
"0.6919849",
"0.6838233",
"0.68214774",
"0.6788781",
"0.6781373",
"0.6761098",
"0.6761098",
"0.6756859",
"0.67415684",
"0.6731418",
"0.6731418",
"0.6710318",
"0.67052126",
"0.6645539",
"0.6645539",
"0.6645539",
"0.6634344",
"0.6629039",
"0.66201735",
"0.6619411",
"0.65901667",
"0.65789646",
"0.65527034",
"0.65462625",
"0.6543312"
] | 0.8549839 | 0 |
Get the current voltage. | def voltage(self):
return self._voltage | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_voltage(self):\n self._raise_not_implemented()",
"def voltage(self):\n return self.outputValue()",
"def get_voltage(self):\n return self.environment.get_voltage(self.neuron_id)",
"def get_voltage(self):\n print(\"voici le voltage de la batterie\")",
"def voltage(self) -> int:\n return int(self._device_info[\"CurrentVoltage\"])",
"def get_voltage(self):\n status = self.get_status_response()\n volts = status[20] + (status[21] * 0x100) + (status[22] * 0x10000) + (status[23] * 0x1000000)\n volts = float(volts)\n volts /= (1000.0 * 1000.0)\n return volts\n #end get_voltage",
"def get_voltage_and_current(self):\n return self.voltage_and_current",
"def fetch(self):\n return read_voltage()",
"def get_voltage(self):\n summary = \" \".join(self.get_summary().split())\n pattern = '\\$.... .. (.*?) .*? .*? .*? .*? . .*? .*? . . . .*?'\n voltage = float(re.findall(pattern,summary).pop())\n return voltage",
"def reference_voltage(self) -> float:\n return self._ref_voltage",
"def ac_voltage(self):\n return float(self.get_ac_voltage())",
"def _voltage_get(self):\n #print \"latest=%s\" % self.connection.latestResults\n if self.channelNumber in self.connection.latestResults:\n return self.connection.latestResults[self.channelNumber]\n else:\n return -999",
"def get_setVoltage(self):\n self.read(\":VOLT?\")",
"def read_actual_voltage(self):\n function_string = 'V' + self.output + 'O?'\n value_string = self.scpi_comm(function_string)\n LOGGER.warn(value_string)\n time.sleep(0.1) # This might only be necessary on LAN interface\n try:\n value = float(value_string.replace('V', ''))\n except ValueError:\n value = -999999\n return value",
"def raw_ldr_voltage(self) -> int:\n self._update_analog_value_cache()\n return self.analog_cache.ldr_voltage",
"def getBatteryVoltage(self):\n return self.values[BATTERY_VOLTAGE]",
"def last_voltage(self):\n return self._last_voltage",
"def request_voltage_and_current(self):\n self.voltage_and_current = self.current_sensors.get_channel_voltage_and_current(self.channel)\n return self.voltage_and_current",
"def read_set_voltage(self):\n function_string = 'V' + self.output + '?'\n value_string = self.scpi_comm(function_string)\n try:\n value = float(value_string.replace('V' + self.output, ''))\n except ValueError:\n value = -9997\n return value",
"def get_voltage(self, channel):\n self.check_validity()\n\n channel = int(channel)\n\n return self.ipcon.send_request(self, BrickletIndustrialDualAnalogInV2.FUNCTION_GET_VOLTAGE, (channel,), 'B', 12, 'i')",
"def voltage(self, voltage=None):\n if voltage is not None:\n # set output voltage on all phases\n # self.ts.log_debug('voltage: %s, type: %s' % (voltage, type(voltage)))\n if type(voltage) is not list and type(voltage) is not tuple:\n self.cmd(':AC:SETB:VOLT PERC,%0.1f,%0.1f,%0.1f\\n' % (voltage, voltage, voltage))\n v1 = voltage\n v2 = voltage\n v3 = voltage\n else:\n self.cmd(':AC:SETB:VOLT PERC,%0.1f,%0.1f,%0.1f\\n' % (voltage[0], voltage[0], voltage[0])) # use the first value in the 3 phase list\n v1 = voltage[0]\n v2 = voltage[0]\n v3 = voltage[0]",
"def get_voltage(self, c):\n if 'slot_number' in c.keys():\n slot_number = c['slot_number'] \n yield self.open_comm(c, slot_number)\n yield self.write(c, 'VOLT?')\n voltage = yield self.read(c)\n yield self.close_comm(c, slot_number)\n \n try:\n voltage = float(voltage)\n except(ValueError):\n old_voltage = voltage\n voltage = ''.join([i for i in voltage if (i.isdigit() or i == '-' or i == '.')])\n voltage = float(voltage)\n print ('The value ' + old_voltage + ' was returned by the SIM928, this value was automatically converted to ' + voltage + '.')\n \n value = voltage * units.V\n \n # try:\n # voltage = yield self.query(c,\n # slot_number, \n # \"VOLT?\")\n # try:\n # voltage = float(voltage)\n # except:\n # self.initialize_mainframe(c)\n # voltage = yield self.query(c,\n # slot_number,\n # \"VOLT?\")\n # voltage = float(voltage)\n # except:\n # self.initialize_mainframe(c)\n # voltage = yield self.query(c,\n # slot_number,\n # \"VOLT?\")\n # try:\n # voltage = float(voltage)\n # except:\n # self.initialize_mainframe(c)\n # voltage = yield self.query(c,\n # slot_number,\n # \"VOLT?\")\n # voltage = float(voltage)\n # try:\n # value = voltage * units.V\n # except:\n # self.initialize_mainframe(c)\n # voltage = yield self.query(c,\n # slot_number,\n # \"VOLT?\")\n # voltage = float(voltage)\n # value = voltage * units.V\n # else:\n # raise ValueError(self.no_selection_msg())\n returnValue(value)",
"def read_voltage(self):\n self.write(':FETC?')\n msg = self.read()\n #print ('dmm msg = ', msg)\n v = msg.split(',')[0].rstrip('NVDC').strip()\n if v[-1] == 'R':\n return float(v[:-1])\n else:\n return float(v)",
"def raw_zener_voltage(self) -> int:\n self._update_analog_value_cache()\n return self.analog_cache.zener_voltage",
"def _voltage_get(self, channelNumber):\n #print \"latest=%s\" % self.connection.latestResults\n if channelNumber in self.connection.latestResults:\n return self.connection.latestResults[channelNumber]\n else:\n return scipy.NaN",
"def present_input_voltage(self):\n return self._read(MX_PRESENT_INPUT_VOLTAGE)",
"def voltage(analog_pin):\n return \"%0.2f\" % inVolts(analogRead(analog_pin))",
"def get_voltage_no_load(self):\n return self.__voltage_no_load",
"def get_voltage(self, i_sup, t, *args, **kwargs):\r\n raise NotImplementedError",
"def hp34401a_read_voltage(hp_meter):\n hp_meter.write(\"MEAS:VOLT:DC? DEF,DEF\")\n return float(hp_meter.read())"
] | [
"0.87557864",
"0.8721714",
"0.86113745",
"0.8594014",
"0.8466012",
"0.8349264",
"0.8208592",
"0.8176201",
"0.812972",
"0.7857926",
"0.7801919",
"0.77965105",
"0.7761072",
"0.773627",
"0.76734275",
"0.76537436",
"0.7645603",
"0.7621279",
"0.7468227",
"0.7446886",
"0.73814726",
"0.72876585",
"0.72740674",
"0.7254983",
"0.7245216",
"0.72365874",
"0.7218977",
"0.70258933",
"0.69731015",
"0.6938107"
] | 0.8939401 | 0 |
Computes the thrust force for the given command. | def get_thrust_value(self, command):
return self._gain * numpy.abs(command) * command | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def applyForce(self, F, dT):",
"def thrust(self, evt=None):\n self.dbgprint(\"thrust(%r)\"%evt)",
"def calculer_force_traction(module_young, coefficient_poisson, longueur_fleche,\n longueur_bras, longueur_corde):\n return calculer_ressort(module_young, coefficient_poisson) * \\\n calculer_longueur_deplacement(longueur_fleche, longueur_bras, longueur_corde)",
"def forces(self):\n pass",
"def _compute_gravity_torque(self):\n pass",
"def update_forces(self):\n\n pass",
"def apply_force(self, force, dt):\n dv = force(self, dt)\n if dv != None:\n pos_new = vector_add(self.position, dv)\n self.set_position(pos_new[0], pos_new[1])\n return dv",
"def tare_force(crush):\n tare = hanging_force(crush)\n if abs(tare) >= 0.25:\n set_trace()\n # assert abs(tare) < 0.25, f\"Excessive hanging force detected: {tare:.3f}\"\n crush['Force (N)'] = crush['Force (N)'] - tare\n return crush",
"def findBasicSafeguardedCmd(self, cmd):\n obstacles = list(self.obstacle_map.obstacles_in_memory)\n\n curr_xspeed = abs(self.curr_vel[0])\n if curr_xspeed > 0.25:\n #the faster you are going, the more modification is performed\n front_modifier = 0.5 + 0.5*(self.max_vel - curr_xspeed)\n side_modifier = 0.5 + 0.5*(self.max_vel - curr_xspeed)\n else:\n front_modifier = 1.0\n side_modifier = 1.0\n\n for obs in obstacles:\n new_modifier = 1.0\n if (np.sign(cmd[0])*obs[0] > 0) and (abs(obs[1]) < self.robot.footprint[1][1]):\n dist = abs(obs[0])\n\n if dist < 0.7:\n new_modifier = 0.0\n elif dist > 2.0:\n new_modifier = 1.0\n else:\n new_modifier = (dist/2.0)\n\n front_modifier = min(new_modifier, front_modifier)\n\n for obs in obstacles:\n new_modifier = 1.0\n if (np.sign(cmd[1])*obs[1] > 0) and (abs(obs[0]) < self.robot.footprint[2][0]):\n dist = abs(obs[1])\n\n if dist < 0.50:\n new_modifier = 0.0\n elif dist > 2.0:\n new_modifier = 1.0\n else:\n new_modifier = (dist/2.0)\n\n side_modifier = min(new_modifier, side_modifier)\n rospy.loginfo('Basic Modifiers: ' + str(front_modifier) + ', ' + str(side_modifier))\n\n best_cmd = [front_modifier*cmd[0], side_modifier*cmd[1]]\n return best_cmd",
"def twist_to_rover_command(linear, angular):\n\n if linear > max_speed:\n linear = max_speed\n elif linear < -max_speed:\n linear = -max_speed\n\n if angular > max_angular_speed:\n angular = max_angular_speed\n elif angular < -max_angular_speed:\n angular = -max_angular_speed\n\n linear_speed = linear / max_speed # linear_speed should now be in [-1, 1]\n angular_speed = angular / max_angular_speed # angular_speed should now [-1,1]\n\n linear_val = linear_speed * max_throttle\n angular_val = angular_speed * max_steering\n\n throttle = 0\n steering = 0\n\n if linear_val == 0:\n throttle = abs(angular_val)\n if angular_val < 0:\n steering = 49\n elif angular_val > 0:\n steering = -49\n else:\n throttle = linear_val\n steering = 0\n\n return str(round(throttle)) + ':' + str(round(steering))",
"def update_forces(self):\r\n # update all the functions\r\n self.compute_gravity()\r\n self.compute_tides()\r\n self.compute_centrifugal()\r\n self.compute_coriolis()\r\n\r\n # add together the forces into the summation function\r\n self.forcing.assign(self.ftides+self.gravity +\r\n self.centrifugal+self.coriolis)",
"def force ( r ):\n \n assert r.shape == (n,3), 'Incorrect shape of r'\n\n d = np.zeros_like(r) # Create d vectors (bonds)\n d[1:n,:] = r[1:n,:] - r[0:n-1,:] # Compute d vectors (zero index not used)\n\n # Store C coefficients in a matrix\n # In the general case we would not need to calculate every pair\n # and also we would make use of the symmetry cc[a,b]=cc[b,a]\n cc = np.zeros((n,n),dtype=np.float_) # Create C array (scalar products)\n for a in range(1,n):\n for b in range(1,n):\n cc[a,b]=np.dot(d[a,:],d[b,:]) # Compute C array (zero indices not used)\n\n a = n-1 # For this test there is just one angle\n\n # Here is the potential as a function of cos(theta)\n # For testing we use the simplest form: v= -cos(theta)\n # The notation matches that used in the appendix\n\n prefac = 1.0 / np.sqrt(cc[a,a]*cc[a-1,a-1])\n fac = cc[a,a-1]\n pot = -prefac*fac # This is -cos(theta)\n\n # Here we include the derivative of the potential with respect to cos(theta) in the prefactor\n # For this simple case it is -1, so the forces are simply gradients of cos(theta) as in the text\n f = np.empty_like(r) # Create force array\n fac1 = fac / cc[a,a]\n fac2 = fac / cc[a-1,a-1]\n f[a,:] = -prefac * ( fac1*d[a,:] - d[a-1,:] )\n f[a-1,:] = prefac * ( fac1*d[a,:] - fac2*d[a-1,:] + d[a,:] - d[a-1,:] )\n f[a-2,:] = prefac * ( fac2*d[a-1,:] - d[a,:] )\n\n return pot, f",
"def friction_model():\n return TimeWeakening()",
"def force_wo_scf(self):\n self.report('INFO: run Force theorem calculations')\n\n status = self.change_fleurinp()\n if status:\n return status\n\n fleurin = self.ctx.fleurinp\n\n # Do not copy mixing_history* files from the parent\n settings = {'remove_from_remotecopy_list': ['mixing_history*']}\n\n # Retrieve remote folder from the inputs\n remote = self.inputs.remote\n\n label = 'DMI_force_theorem'\n description = 'The is the force theorem calculation for DMI energy.'\n\n code = self.inputs.fleur\n options = self.ctx.options.copy()\n\n inputs_builder = get_inputs_fleur(code,\n remote,\n fleurin,\n options,\n label,\n description,\n settings,\n add_comp_para=self.ctx.wf_dict['add_comp_para'])\n future = self.submit(FleurBaseWorkChain, **inputs_builder)\n return ToContext(f_t=future)",
"def test_force(self):\n group = hoomd.group.all()\n\n # compute forces\n f = azplugins.restrain.plane(group=group, point=(0,0,0), normal=(1,0,0), k=2.0)\n hoomd.run(1)\n np.testing.assert_array_almost_equal(f.forces[0].force, (-2.,0,0))\n np.testing.assert_array_almost_equal(f.forces[1].force, ( 2.,0,0))\n np.testing.assert_array_almost_equal(f.forces[2].force, ( 6.,0,0))\n self.assertAlmostEqual(f.forces[0].energy, 1.)\n self.assertAlmostEqual(f.forces[1].energy, 1.)\n self.assertAlmostEqual(f.forces[2].energy, 9.)\n np.testing.assert_array_almost_equal(f.forces[0].virial, (-2.,0,0,0,0,0))\n np.testing.assert_array_almost_equal(f.forces[1].virial, (-2.,0,4.,0,0,0))\n np.testing.assert_array_almost_equal(f.forces[2].virial, (12.,0,0,0,0,0))\n\n # change the spring constant\n f.set_params(k=1.0)\n hoomd.run(1)\n np.testing.assert_array_almost_equal(f.forces[0].force, (-1.,0,0))\n np.testing.assert_array_almost_equal(f.forces[1].force, ( 1.,0,0))\n np.testing.assert_array_almost_equal(f.forces[2].force, ( 3.,0,0))\n self.assertAlmostEqual(f.forces[0].energy, 0.5)\n self.assertAlmostEqual(f.forces[1].energy, 0.5)\n self.assertAlmostEqual(f.forces[2].energy, 4.5)\n\n # shift the plane down\n f.set_params(point=(-1,0,0))\n hoomd.run(1)\n np.testing.assert_array_almost_equal(f.forces[0].force, (-2.,0,0))\n np.testing.assert_array_almost_equal(f.forces[1].force, ( 0.,0,0))\n np.testing.assert_array_almost_equal(f.forces[2].force, ( 2.,0,0))\n self.assertAlmostEqual(f.forces[0].energy, 2.0)\n self.assertAlmostEqual(f.forces[1].energy, 0.0)\n self.assertAlmostEqual(f.forces[2].energy, 2.0)\n\n # rotate the plane so that only particle 1 is off the line\n f.set_params(point=(0,0,0), normal=(0,0,1))\n hoomd.run(1)\n np.testing.assert_array_almost_equal(f.forces[0].force, (0,0,0))\n np.testing.assert_array_almost_equal(f.forces[1].force, (0,0,-2))\n np.testing.assert_array_almost_equal(f.forces[2].force, (0,0,0))\n self.assertAlmostEqual(f.forces[0].energy, 0.0)\n self.assertAlmostEqual(f.forces[1].energy, 2.0)\n self.assertAlmostEqual(f.forces[2].energy, 0.0)",
"def _twist_callback(self, cmd):\n self.set_velocity(cmd.linear.x, cmd.angular.z)",
"def set_stress(self) -> None:\n\n c = self.cos()\n s = self.sin()\n transformation_matrix = np.array([-c, -s, c, s], dtype=np.float64)\n nodal_displacements = self.__get_arranged_nodal_displacements()\n self.__stress = (\n self.youngs_modulus # type: ignore\n / self.get_length()\n * (transformation_matrix @ nodal_displacements.T)\n )",
"def set_critical_option_force_command(self, command):\n if command:\n self.critical_option_force_command = command\n else:\n raise ValueError(\"Provide a non-null string\")",
"def execute_instruction(self, command: str, argument: int):\n heading_letter_to_vector_map = {\n \"N\": (-1, 0),\n \"E\": (0, 1),\n \"S\": (1, 0),\n \"W\": (0, -1),\n }\n if command == \"F\":\n self.translate(self.waypoint_vector, argument)\n elif command in heading_letter_to_vector_map.keys():\n vector = heading_letter_to_vector_map[command]\n self.translate_waypoint(vector, argument)\n else:\n self.rotate_waypoint(command, argument)",
"def _thruster_hfs(motions: Dict[str, float]) -> int:\n surge, yaw, sway = motions[\"surge\"], motions[\"yaw\"], motions[\"sway\"]\n\n if surge and yaw:\n\n # If backwards, else forwards\n if surge < CONTROL_NORM_IDLE:\n value = -surge\n else:\n value = yaw\n\n elif surge:\n value = -surge\n\n elif sway:\n value = -sway\n\n elif yaw:\n value = yaw\n\n else:\n value = CONTROL_NORM_IDLE\n\n return Converter._to_thruster_value(value)",
"def make_torque(self):\n def torque_func(m):\n heff = self.field(m)\n total_torque = torque.landau_lifshitz(m, heff, self.damping)\n if self.stt != 0:\n total_torque += torque.slonczewski(m, self.Jc, self.stt)\n return total_torque\n self.torque = torque_func",
"def _compute_aero_torque(self):\n pass",
"def runcmd(self, cmd, *parms):\n cmd = cmd.lower()\n t = time.time()\n ret = self.callbacks.get(cmd, self.unknown_command)(cmd, *parms)\n print \"\\nTime elapsed: %.3fs\" % (time.time() - t)\n return ret",
"def _compute_stabilised_speed(self, thruster_id, error, direction_to_compensate):\n if (thruster_id == \"1\" and direction_to_compensate == \"CW\") or \\\n (thruster_id == \"2\" and direction_to_compensate == \"CCW\"):\n error = -1*error\n return int(self._thrusters_actual_speed[thruster_id] + self._P * error)",
"def update_vehicle_state(self):\n #vel = self.v + self.commands['throttle']/self.m/self.simulation_rate\n\n vel = self.commands['speed']\n steer = self.commands['steering_angle']\n\n if steer > 0.5:\n steer_cmd = 25\n elif steer < -0.5:\n steer_cmd = 185\n else:\n steer_cmd = 100 - 160*steer ##linear\n #steer_cmd = 100 - 640*steer**3 ##cubic\n\n #rospy.logwarn('Velocity command is '+ str(vel))\n # 130 is the lowest vel_cmd that makes the truck move.\n if vel > 12:\n vel_cmd = 161\n elif vel < 0:\n vel_cmd = 0\n else:\n vel_cmd = 3.77*vel + 117\n # rospy.logerr('throttle: ' + str(throttle))\n hw_port.set_command(vel_cmd,steer_cmd,self.vehicle_id)",
"def GetThrustPower(self):\n return _gmat_py.Spacecraft_GetThrustPower(self)",
"def DragCoeff(h,Vc,Temp_m,Thrust,S):\n T,p,rho = isa(h)\n return Thrust/(0.5*rho*VTrue(h,Vc,p,Temp_m)**2*S)",
"def slower(self):\n self.run_command('slower')",
"def handle_func_command(cls, command):\n cmd, _, args, kwargs = command\n\n try: # will work if tensors are wrappers\n\n # Replace all TensorFlow tensor with their child attribute\n # Note that we return also args_type which helps handling case 3 in the docstring\n new_args, new_kwargs, new_type, args_type = hook_args.unwrap_args_from_function(\n cmd, args, kwargs, return_args_type=True\n )\n # This handles case 3: it redirects the command to the appropriate class depending\n # of the syft type of the arguments and returns\n if args_type not in FrameworkTensor:\n return args_type.handle_func_command(command)\n\n # build the new command\n new_command = (cmd, None, new_args, new_kwargs)\n # Send it to the appropriate class and get the response\n response = new_type.handle_func_command(new_command)\n # Put back the wrappers where needed\n response = hook_args.hook_response(cmd, response, wrap_type=args_type)\n except PureFrameworkTensorFoundError: # means that it's not a wrapper but a pure tensor\n\n # Check that the function has not been overwritten\n try:\n # Try to get recursively the attributes in cmd = \"<attr1>.<attr2>.<attr3>...\"\n command = cls.rgetattr(cls, cmd)\n return command(*args, **kwargs)\n except AttributeError:\n pass\n\n # TODO: clean this line\n cmd_split = cmd.split(\".\")\n cmd_path = cmd_split[:-1]\n cmd_name = cmd_split[-1]\n cmd = \"syft.local_worker.hook.\" + \".\".join(cmd_path) + \".native_\" + cmd_name\n\n # Run the native function with the new args\n # Note the the cmd should already be checked upon reception by the worker\n # in the execute_command function\n if isinstance(args, tuple):\n response = eval(cmd)(*args, **kwargs)\n else:\n response = eval(cmd)(args, **kwargs)\n\n return response",
"def calculateForce(self, atom1, atom2):\n # Calculate distance between two atoms\n dx = self.atoms[atom1].x - self.atoms[atom2].x\n dy = self.atoms[atom1].y - self.atoms[atom2].y\n dz = self.atoms[atom1].z - self.atoms[atom2].z\n \n # Minimum Image Convention\n dx -= self.lbox*round(dx/self.lbox)\n dy -= self.lbox*round(dy/self.lbox)\n dz -= self.lbox*round(dz/self.lbox)\n \n r2 = dx*dx + dy*dy + dz*dz\n\n if r2 < self.rcutsq:\n fr2 = (self.sigma**2)/r2\n fr6 = fr2**3\n force = fr6*(fr6 - 0.5)/r2\n pot = fr6*(fr6 - 1)\n \n # Update forces\n self.atoms[atom1].fx += force*dx\n self.atoms[atom2].fx -= force*dx\n self.atoms[atom1].fy += force*dy\n self.atoms[atom2].fy -= force*dy\n self.atoms[atom1].fz += force*dz\n self.atoms[atom2].fz -= force*dz\n \n # Update potentials\n self.atoms[atom1].potential += pot\n self.atoms[atom2].potential += pot"
] | [
"0.5629867",
"0.55524796",
"0.5294417",
"0.52144986",
"0.5205213",
"0.51810914",
"0.51384085",
"0.50823224",
"0.5055878",
"0.49796396",
"0.49717823",
"0.4953122",
"0.49304152",
"0.49190193",
"0.48769048",
"0.48733506",
"0.48713782",
"0.4851697",
"0.48084703",
"0.48044527",
"0.47652596",
"0.4730262",
"0.47094193",
"0.46820408",
"0.46759447",
"0.4628703",
"0.46211642",
"0.46177542",
"0.46046248",
"0.46017694"
] | 0.6190781 | 0 |
Get a pydicom.FileDataset from the instance's Orthanc identifier | def get_pydicom(orthanc: Orthanc, instance_identifier: str) -> pydicom.FileDataset:
dicom_bytes = orthanc.get_instances_id_file(instance_identifier)
return pydicom.dcmread(BytesIO(dicom_bytes)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_pydicom(self) -> pydicom.FileDataset:\n return util.get_pydicom(self.client, self.id_)",
"def get_dataset(self, identifier):\n # Test if a subfolder for the given dataset identifier exists. If not\n # return None.\n dataset_dir = self.get_dataset_dir(identifier)\n if not os.path.isdir(dataset_dir):\n return None\n # Load the dataset handle\n return FileSystemDatasetHandle.from_file(\n descriptor_file=os.path.join(dataset_dir, DESCRIPTOR_FILE),\n data_file=os.path.join(dataset_dir, DATA_FILE),\n annotations=DatasetMetadata.from_file(\n self.get_metadata_filename(identifier)\n )\n )",
"def get_by_id(self, id: str) -> \"Dataset\":\n raise NotImplementedError",
"def get_dataset(self, cid, type=\"train\"):\n dataset = torch.load(\n os.path.join(self.path, type, \"data{}.pkl\".format(cid)))\n return dataset",
"def get_dataset(hdf5_data, path_attribute):\n path = getattr(structure, path_attribute)\n dset = hdf5_data.get(path)\n default_name = {\n \"description\": path\n }\n name = str(getattr(structure, path + \"_ATTR\", default_name)[\"description\"])\n check_dataset_type(dset, name=name, location=path)\n return dset",
"def get_dataset(self):\n return datasets.get_dataset(self.dataset_id)",
"def getDataset(filename, dsdict):\n\n dataset = \"\"\n for ds in dsdict.keys():\n if filename in dsdict[ds]:\n dataset = ds\n break\n\n if dataset == \"\":\n tolog(\"!!WARNING!!2999!! Dataset not found for file %s\" % (filename))\n else:\n tolog(\"File %s belongs to dataset/container %s\" % (filename, dataset))\n\n return dataset",
"def get(id: str) -> DataSet:\n pass",
"def get_dataset(name):\n if name == 'cityscapes':\n return Cityscapes",
"def from_pydicom(ds: pydicom.Dataset, fn: str=None, file=None):\n\n meta = {\n 'FileName': fn,\n 'TransferSyntaxUID': ds.file_meta.TransferSyntaxUID,\n 'TransferSyntax': str(ds.file_meta.TransferSyntaxUID),\n 'MediaStorage': str(ds.file_meta.MediaStorageSOPClassUID),\n }\n\n def dictify_ds(ds):\n output = dict()\n\n _ds = ExceptionHandlingIterator(ds)\n\n for elem in _ds:\n if elem.keyword == \"PixelData\":\n continue\n # Deal with that separately\n elif not elem.value or not elem.keyword:\n continue\n elif elem.VR == \"PN\":\n output[elem.keyword] = str(elem.value)\n # print(elem.value)\n elif elem.VM != 1 and elem.VR == 'SQ':\n # elif elem.keyword == \"AdmittingDiagnosesCodeSequence\":\n # print(f\"Diagnosis Code: VM {elem.VM} VR {elem.VR}\")\n output[elem.keyword] = [dictify_ds(item) for item in elem]\n elif elem.VM != 1:\n # print(f\"VM ne 1: VM {elem.VM} VR {elem.VR}\")\n output[elem.keyword] = [item for item in elem]\n elif elem.VR != 'SQ':\n output[elem.keyword] = elem.value\n else:\n output[elem.keyword] = [dictify_ds(item) for item in elem]\n\n # print(output)\n\n return output\n\n tags = dictify_ds(ds)\n # MONOCHROME, RGB etc.\n if (0x0028, 0x0004) in ds:\n tags['PhotometricInterpretation'] = ds[0x0028, 0x0004].value\n\n # logging.debug(pformat(tags))\n\n d = Dixel(meta=meta,\n tags=tags,\n level=DicomLevel.INSTANCES)\n d.simplify_tags()\n\n # TODO: If the creation times are going to be \"now\", use the file creation time instead?\n\n if not d.tags.get(\"PatientID\") and d.tags.get(\"PatientName\"):\n logging.warning(\"Imputing missing PatientID from PatientName\")\n new_id = md5(d.tags.get(\"PatientName\").encode('utf8')).hexdigest()\n d.tags[\"PatientID\"] = new_id\n\n if hasattr(ds, \"PixelData\"):\n # Don't need file, can recreate it\n logging.warning(\"Creating file with new PatientID tag, OID will be valid\")\n ds_edit = ds\n ds_edit.PatientID = new_id\n\n with NamedTemporaryFile() as f:\n ds_edit.save_as(filename=f.name, write_like_original=True)\n file = f.read()\n\n elif not hasattr(ds, \"PixelData\") and file:\n # Read pixels out of file and _then_ recreate it\n logging.warning(\"Loading pixels and creating file with new PatientID tag, OID will be valid\")\n\n ds_edit = pydicom.read_file(BytesIO(file), stop_before_pixels=False)\n ds_edit.PatientID = new_id\n\n with NamedTemporaryFile() as f:\n ds_edit.save_as(filename=f.name, write_like_original=True)\n file = f.read()\n else:\n logging.warning(\"No file to update, OID will be invalid\")\n\n if not d.tags.get('PatientID') or \\\n not d.tags.get('StudyInstanceUID') or \\\n not d.tags.get('SeriesInstanceUID') or \\\n not d.tags.get('SOPInstanceUID'):\n raise DicomFormatError(\"File is missing required tags\")\n\n if file:\n d.file = file\n\n if hasattr(ds, \"PixelData\"):\n d.pixels = ds.pixel_array\n\n return d",
"def get_dataset(self) -> datasets.OpenMLDataset:\n return datasets.get_dataset(self.dataset_id)",
"def get_dataset(id=None, name=None):\n query = db.session.query(Dataset).\\\n filter(\n or_(\n Dataset.id == id,\n Dataset.name == name\n )\n )[0]\n return query",
"def open(self):\n return xr.open_dataset(self)",
"def load(self):\r\n\r\n #Open the dataset read only using GDAL\r\n dataset = gdal.Open(self.inputds, gdal.GA_ReadOnly)\r\n \r\n return dataset\r\n \r\n\r\n #print \"Failed to open %s. Is it a GDAL supported format?\" %(self.inputds)\r",
"def get_dicom_file_content(self) -> bytes:\n return self.orthanc.get_instance_file(self.identifier)",
"def get_dataset(self, dspath):\n\n dataset_id = self.__dataset_json_values.get_dataset(dspath)\n return Dataset(dataset_id, self.__dataset_json_values, self.__facets)",
"def __get_dataset_name(self):\n d = gdal.Open(self.fname)\n # Get band metadata\n b = d.GetRasterBand(1)\n md = b.GetMetadata()\n\n if 'data_var' in md:\n return md['data_var']\n else:\n fnames = d.GetFileList()\n if len(fnames) > 2:\n d = gdal.Open(fnames[1])\n # Get band metadata\n b = d.GetRasterBand(1)\n md = b.GetMetadata()\n if 'data_var' in md:\n return md['data_var']\n else:\n return 'data'\n else:\n return 'data'",
"def get_datasetID(self):\n\t\treturn self.dsDoc['about']['datasetID']",
"def get_dataset(self, dataset_id):\n return self.session.query(self.Dataset).get(dataset_id)",
"def get_dataset(self):\n return",
"def get_dicom_file_content(self) -> bytes:\n return self.client.get_instances_id_file(self.id_)",
"def get_dataset(params):\r\n module_name, class_name = params.dataset.name.rsplit('.', 1)\r\n i = importlib.import_module(module_name)\r\n return getattr(i, class_name)",
"def get_datasetID(self):\n\t\treturn self.prDoc['inputs']['data'][0]['datasetID']",
"def GetDicomFromNode(self,node):\n storageNode=node.GetStorageNode()\n if storageNode is not None: # loaded via drag-drop\n filepath=storageNode.GetFullNameFromFileName()\n else: # loaded via DICOM browser\n instanceUIDs=node.GetAttribute('DICOM.instanceUIDs').split()\n filepath=slicer.dicomDatabase.fileForInstance(instanceUIDs[0])\n Dcm_tag=pydicom.dcmread(filepath)\n return Dcm_tag",
"def set_dataset(self, id, value, name='', attrs={}, dtype=None, compress=False):\n sgd = self.get_sgd(id, name)\n link_info = self.file.extract_link_info(value, None, Dataset)\n path = self.full_path\n ds = Dataset(self.file, sgd, name, path, attrs, self, value, dtype, compress, link_info)\n # self.mstats[id]['created'].append(ds) \n return ds",
"def get_data_by_id(data_id):\n return Data.get_by_id(data_id)",
"def make_dataset(self) -> torch.utils.data.Dataset:\n transform = cnn_utils.ToTensor()\n return cnn_utils.ArtifactDataset(self.stamps, transform)",
"def get_dataset_reference(self, dataset_name):\n\n print_debug(\"Geting dataset :\" + dataset_name)\n dataset = DatasetFactory.get(dataset_file_name=dataset_name)\n return dataset",
"def open_dataset(path: str):\n\n # remove trailing slash:\n if path.endswith(\"/\"):\n path = path[:-1]\n\n if path.endswith(\".zarr.zip\") or path.endswith(\".zarr\"):\n # we can recognise a Zarr dataset by its extension.\n dataset = ZDataset(path)\n elif path.endswith(\"tif\") or path.endswith(\"tiff\"):\n dataset = TIFDataset(path)\n elif exists(join(path, \"stacks\")):\n # we can recognise a ClearControl dataset by the presence of a 'stacks' sub folder.\n dataset = CCDataset(path)\n else:\n raise ValueError(\"Dataset type not recognised, or path incorrect!\")\n\n return dataset",
"def load():\n filepath = dirname(abspath(__file__))\n##### EDIT THE FOLLOWING TO POINT TO DatasetName.csv #####\n data = recfromtxt(open(filepath + '/spector.csv',\"rb\"), delimiter=\" \",\n names=True, dtype=float, usecols=(1,2,3,4))\n names = list(data.dtype.names)\n endog = array(data[names[3]], dtype=float)\n endog_name = names[3]\n exog = column_stack(data[i] for i in names[:3]).astype(float)\n exog_name = names[:3]\n dataset = Dataset(data=data, names=names, endog=endog, exog=exog,\n endog_name = endog_name, exog_name=exog_name)\n return dataset"
] | [
"0.76758665",
"0.6792178",
"0.6289413",
"0.628798",
"0.62494963",
"0.6189763",
"0.6181921",
"0.6159265",
"0.6122643",
"0.60905063",
"0.6083762",
"0.60353416",
"0.6018338",
"0.6012588",
"0.59544116",
"0.5922192",
"0.5917395",
"0.59086275",
"0.5863918",
"0.58565897",
"0.5843114",
"0.5836123",
"0.5786403",
"0.57821083",
"0.5762734",
"0.5727176",
"0.5721712",
"0.57205486",
"0.57201517",
"0.5697462"
] | 0.75693786 | 1 |
Rasterize a collection of lon,lat shapes onto a DLTile. | def rasterize_shape(
tile: Tile,
shapes: AnyShapes,
values: Sequence[int] = None,
out: np.ndarray = None,
mode="burn",
dtype=np.byte,
shape_coords="lonlat",
all_touched=False,
) -> np.ndarray:
shapes = normalize_polygons(shapes)
if values is None:
if mode == "burn":
values = range(1, len(shapes) + 1)
elif mode == "add":
values = (1 for _ in range(len(shapes)))
else:
raise ValueError("Expected mode of 'burn' or 'add', got %s" % mode)
elif len(values) != len(shapes):
raise ValueError(
"Expected parameter 'values' to have the same length as parameter "
"'shapes', got %i and %i." % (len(values), len(shapes))
)
if out is None:
out = np.zeros((tile.tile_extent, tile.tile_extent), dtype=dtype)
# Convert shapes to pixel coordinates
tilebox = geo.box(0, 0, tile.tile_extent, tile.tile_extent)
if shape_coords == "lonlat":
shapes_rowcol = [
utm_to_rowcol(lonlat_to_utm(shape, zone=tile.zone), tile=tile).intersection(
tilebox
)
for shape in shapes
]
elif shape_coords == "utm":
shapes_rowcol = [
utm_to_rowcol(shape, tile=tile).intersection(tilebox) for shape in shapes
]
elif shape_coords == "rowcol":
shapes_rowcol = [shape.intersection(tilebox) for shape in shapes]
else:
raise ValueError(
"Parameter shape_coords of function rasterize_shape() must be one "
"of 'lonlat', 'rowcol', or 'utm'."
)
# We use a quadtree algorithm to rasterize.
TreeNode = namedtuple("TreeNode", ("min_col", "min_row", "max_col", "max_row"))
for shape_i, (shape, value) in enumerate(zip(shapes_rowcol, values)):
nodes = [TreeNode(0, 0, tile.tile_extent, tile.tile_extent)]
while len(nodes) > 0:
node = nodes.pop()
# "min_nodebox" is the smallest box we need to contain to cover all
# pixels in the box. "max_nodebox" is the larger box we would need
# to miss entirely in order to cover no pixels in the box.
if all_touched:
min_nodebox = geo.box(
node.min_row + 1,
node.min_col + 1,
node.max_row - 1,
node.max_col - 1,
)
max_nodebox = geo.box(
node.min_row, node.min_col, node.max_row, node.max_col
)
else:
min_nodebox = geo.box(
node.min_row + 0.5,
node.min_col + 0.5,
node.max_row - 0.5,
node.max_col - 0.5,
)
max_nodebox = min_nodebox
node_w = node.max_col - node.min_col
node_h = node.max_row - node.min_row
if node_w <= 3 and node_h <= 3:
# Check each pixel for being within shape
for row in range(node.min_row, node.max_row):
for col in range(node.min_col, node.max_col):
if all_touched:
pixelbox = geo.box(row, col, row + 1.0, col + 1.0)
condition = shape.intersects(pixelbox)
else:
pixel = geo.Point(row + 0.5, col + 0.5)
condition = shape.intersects(pixel)
if condition:
if mode == "burn":
out[row, col] = value
elif mode == "add":
out[row, col] += value
else:
raise ValueError(
"Expected mode of 'burn' or 'add', got %s" % mode
)
elif shape.contains(min_nodebox):
# Apply to all pixels in box
if mode == "burn":
out[
node.min_row : node.max_row,
node.min_col : node.max_col,
] = value
elif mode == "add":
out[
node.min_row : node.max_row,
node.min_col : node.max_col,
] += value
else:
raise ValueError("Expected mode of 'burn' or 'add', got %s" % mode)
elif max_nodebox.disjoint(shape):
# No intersection, do nothing.
pass
else:
# There is some intersection.
# Split node into child nodes
node_mid_row = int((node.max_row + node.min_row) / 2)
node_mid_col = int((node.max_col + node.min_col) / 2)
if node_h <= 1:
left_node = TreeNode(
node.min_col, node.min_row, node_mid_col, node.max_row
)
right_node = TreeNode(
node_mid_col, node.min_row, node.max_col, node.max_row
)
nodes.extend([left_node, right_node])
elif node_w <= 1:
upper_node = TreeNode(
node.min_col, node.min_row, node.max_col, node_mid_row
)
lower_node = TreeNode(
node.min_col, node_mid_row, node.max_col, node.max_row
)
nodes.extend([upper_node, lower_node])
else:
ul_node = TreeNode(
node.min_col, node.min_row, node_mid_col, node_mid_row
)
ur_node = TreeNode(
node_mid_col, node.min_row, node.max_col, node_mid_row
)
ll_node = TreeNode(
node.min_col, node_mid_row, node_mid_col, node.max_row
)
lr_node = TreeNode(
node_mid_col, node_mid_row, node.max_col, node.max_row
)
nodes.extend([ul_node, ur_node, ll_node, lr_node])
return out | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def rasterize(shapes, coords, fill=np.nan, **kwargs):\n from rasterio import features\n transform = transform_from_latlon(coords['lat'], coords['lon'])\n out_shape = (len(coords['lat']), len(coords['lon']))\n raster = features.rasterize(shapes, out_shape=out_shape,\n fill=fill, transform=transform,\n dtype=float, **kwargs)\n return xr.DataArray(raster, coords=coords, dims=('lat', 'lon'))",
"def rasterize(self):\n\n for primitive in self._scene:\n bbox = primitive[\"bounding_box\"]\n # Loop through all pixels\n # You MUST use bounding boxes in order to speed up this loop\n for w in range(bbox[0][0], bbox[1][0]):\n x = w + 0.5\n for h in range(bbox[0][1], bbox[1][1]):\n y = h + 0.5\n # First, we check if the pixel center is inside the primitive\n im_x, im_y = w, self._height - (h + 1)\n if inside(x, y, primitive):\n # apply affine xfrom if needed\n if \"xform\" in primitive.keys():\n result = np.matmul(primitive[\"xform\"],\n [[im_x], [im_y], [1]])\n im_x, im_y = int(result[0][0]), int(result[1][0])\n\n self._image[im_y, im_x] = primitive[\"color\"]\n # break\n # break\n # break",
"def create_tiles(self, zoom):\n # Compute the tile x-y-z index range for the rasterlayer for this zoomlevel\n bbox = self.rasterlayer.extent()\n indexrange = tiler.tile_index_range(bbox, zoom)\n\n # Compute scale of tiles for this zoomlevel\n tilescale = tiler.tile_scale(zoom)\n\n # Count the number of tiles that are required to cover the raster at this zoomlevel\n nr_of_tiles = (indexrange[2] - indexrange[0] + 1) * (indexrange[3] - indexrange[1] + 1)\n\n # Create destination raster file\n self.log('Snapping dataset to zoom level {0}'.format(zoom))\n\n bounds = tiler.tile_bounds(indexrange[0], indexrange[1], zoom)\n sizex = (indexrange[2] - indexrange[0] + 1) * self.tilesize\n sizey = (indexrange[3] - indexrange[1] + 1) * self.tilesize\n dest_file = os.path.join(self.tmpdir, 'djangowarpedraster' + str(zoom) + '.tif')\n\n snapped_dataset = self.dataset.warp({\n 'name': dest_file,\n 'origin': [bounds[0], bounds[3]],\n 'scale': [tilescale, -tilescale],\n 'width': sizex,\n 'height': sizey,\n })\n\n self.log('Creating {0} tiles for zoom {1}.'.format(nr_of_tiles, zoom))\n\n counter = 0\n for tilex in range(indexrange[0], indexrange[2] + 1):\n for tiley in range(indexrange[1], indexrange[3] + 1):\n # Log progress\n counter += 1\n if counter % 250 == 0:\n self.log('{0} tiles created at zoom {1}'.format(counter, zoom))\n\n # Calculate raster tile origin\n bounds = tiler.tile_bounds(tilex, tiley, zoom)\n\n # Construct band data arrays\n pixeloffset = (\n (tilex - indexrange[0]) * self.tilesize,\n (tiley - indexrange[1]) * self.tilesize\n )\n\n band_data = [\n {\n 'data': band.data(offset=pixeloffset, size=(self.tilesize, self.tilesize)),\n 'nodata_value': band.nodata_value\n } for band in snapped_dataset.bands\n ]\n\n # Add tile data to histogram\n if zoom == self.max_zoom:\n self.push_histogram(band_data)\n\n # Warp source raster into this tile (in memory)\n dest = GDALRaster({\n 'width': self.tilesize,\n 'height': self.tilesize,\n 'origin': [bounds[0], bounds[3]],\n 'scale': [tilescale, -tilescale],\n 'srid': WEB_MERCATOR_SRID,\n 'datatype': snapped_dataset.bands[0].datatype(),\n 'bands': band_data,\n })\n\n # Store tile\n RasterTile.objects.create(\n rast=dest,\n rasterlayer=self.rasterlayer,\n tilex=tilex,\n tiley=tiley,\n tilez=zoom\n )\n\n # Store histogram data\n if zoom == self.max_zoom:\n bandmetas = RasterLayerBandMetadata.objects.filter(rasterlayer=self.rasterlayer)\n for bandmeta in bandmetas:\n bandmeta.hist_values = self.hist_values[bandmeta.band].tolist()\n bandmeta.save()\n\n # Remove snapped dataset\n self.log('Removing snapped dataset.', zoom=zoom)\n snapped_dataset = None\n os.remove(dest_file)",
"def draw_tiles(self):\n db = self.double_buffer\n if db is not None:\n span_x = self.width\n span_y = self.height\n tiles_x = int(ceil(span_x/256.0))\n tiles_y = int(ceil(span_y/256.0))\n\n cc = cairo.Context(db)\n tiles = self.tile_loader.load_area(self.longitude,self.latitude,self.zoom,tiles_x,tiles_y)\n tile_number=0\n line_number=0\n\n x_center = self.width/2# - 128\n y_center = self.height/2# - 128\n offset_x,offset_y = self.tile_loader.gmap_tile_xy_from_coord(self.longitude,self.latitude,self.zoom)\n\n\n xtiles = len(tiles[0])\n ytiles = len(tiles)\n #print len(tiles),len(tiles[0])\n for line in tiles:\n for tile in line:\n x = (tile_number - int(xtiles/2)) * 256 + x_center\n y = (line_number - int(ytiles/2)) * 256 + y_center\n finalx = x - offset_x #+128\n finaly = y - offset_y #+128\n cc.set_source_surface(tile, finalx+self.dx, finaly+self.dy)\n cc.paint()\n tile_number += 1\n tile_number = 0\n line_number += 1\n\n self.draw_cross(cc,x_center,y_center)\n self.draw_points(cc)\n\n db.flush()\n\n else:\n print('Invalid double buffer')",
"def convert(self):\n self.tilewidth = int(self.tilewidth)\n self.tileheight = int(self.tileheight)\n self.width = int(self.width)\n self.height = int(self.height)\n self.pixel_width = self.width * self.tilewidth\n self.pixel_height = self.height * self.tileheight\n for layer in self.layers:\n self.named_layers[layer.name] = layer\n layer.opacity = float(layer.opacity)\n layer.x = int(layer.x)\n layer.y = int(layer.y)\n layer.width = int(layer.width)\n layer.height = int(layer.height)\n layer.pixel_width = layer.width * self.tilewidth\n layer.pixel_height = layer.height * self.tileheight\n layer.visible = bool(int(layer.visible))\n for tile_set in self.tile_sets:\n self.named_tile_sets[tile_set.name] = tile_set\n tile_set.spacing = int(tile_set.spacing)\n tile_set.margin = int(tile_set.margin)\n for img in tile_set.images:\n if img.trans:\n img.trans = (int(img.trans[:2], 16), int(img.trans[2:4], 16), int(img.trans[4:], 16))\n for obj_group in self.object_groups:\n obj_group.x = int(obj_group.x)\n obj_group.y = int(obj_group.y)\n obj_group.width = int(obj_group.width)\n obj_group.height = int(obj_group.height)\n for map_obj in obj_group.objects:\n map_obj.x = int(map_obj.x)\n map_obj.y = int(map_obj.y)\n map_obj.width = int(map_obj.width)\n map_obj.height = int(map_obj.height)",
"def convert(self):\n self.tilewidth = int(self.tilewidth)\n self.tileheight = int(self.tileheight)\n self.width = int(self.width)\n self.height = int(self.height)\n self.pixel_width = self.width * self.tilewidth\n self.pixel_height = self.height * self.tileheight\n for layer in self.layers:\n self.named_layers[layer.name] = layer\n layer.opacity = float(layer.opacity)\n layer.x = int(layer.x)\n layer.y = int(layer.y)\n layer.width = int(layer.width)\n layer.height = int(layer.height)\n layer.pixel_width = layer.width * self.tilewidth\n layer.pixel_height = layer.height * self.tileheight\n layer.visible = bool(int(layer.visible))\n for tile_set in self.tile_sets:\n self.named_tile_sets[tile_set.name] = tile_set\n tile_set.spacing = int(tile_set.spacing)\n tile_set.margin = int(tile_set.margin)\n for img in tile_set.images:\n if img.trans:\n img.trans = (int(img.trans[:2], 16), int(img.trans[2:4], 16), int(img.trans[4:], 16))\n for obj_group in self.object_groups:\n obj_group.x = int(obj_group.x)\n obj_group.y = int(obj_group.y)\n obj_group.width = int(obj_group.width)\n obj_group.height = int(obj_group.height)\n for map_obj in obj_group.objects:\n map_obj.x = int(map_obj.x)\n map_obj.y = int(map_obj.y)\n map_obj.width = int(map_obj.width)\n map_obj.height = int(map_obj.height)",
"def _rasterize_polygons(polygons, bounds = [[-100, -100], [100, 100]],\n dx = 1, dy = 1):\n try:\n from skimage import draw\n except:\n raise ImportError('The fill function requires the module '\n '\"scikit-image\" to operate. Please retry '\n 'after installing scikit-image:\\n\\n'\n '$ pip install --upgrade scikit-image')\n\n # Prepare polygon array by shifting all points into the first quadrant and\n # separating points into x and y lists\n xpts = []\n ypts = []\n for p in polygons:\n p_array = np.asarray(p)\n x = p_array[:, 0]\n y = p_array[:, 1]\n xpts.append((x-bounds[0][0])/dx - 0.5)\n ypts.append((y-bounds[0][1])/dy - 0.5)\n\n # Initialize the raster matrix we'll be writing to\n xsize = int(np.ceil((bounds[1][0]-bounds[0][0]))/dx)\n ysize = int(np.ceil((bounds[1][1]-bounds[0][1]))/dy)\n raster = np.zeros((ysize, xsize), dtype = np.bool)\n\n # TODO: Replace polygon_perimeter with the supercover version\n for n in range(len(xpts)):\n rr, cc = draw.polygon(ypts[n], xpts[n], shape = raster.shape)\n rrp, ccp = draw.polygon_perimeter(ypts[n], xpts[n],\n shape = raster.shape, clip = False)\n raster[rr, cc] = 1\n raster[rrp, ccp] = 1\n\n return raster",
"def _add_latlon(ds, n=50):\n\n nx = ncols(ds)\n ny = nrows(ds)\n src_crs = get_crs(ds)\n dst_crs = CRS(init='epsg:4326')\n idx_x = np.linspace(0, nx - 1, n, dtype=int)\n idx_y = np.linspace(0, ny - 1, n, dtype=int)\n xs = ds.x[idx_x]\n ys = ds.y[idx_y]\n xgrid, ygrid = np.meshgrid(xs, ys)\n lon, lat = rasterio.warp.transform(src_crs, dst_crs, xgrid.flatten(),\n ygrid.flatten())\n lon_sparse = np.empty((ny, nx))\n lat_sparse = np.empty((ny, nx))\n lon_sparse[:] = np.nan\n lat_sparse[:] = np.nan\n # idx_y needs to be a column vector\n lon_sparse[idx_y[:, None], idx_x] = np.array(lon).reshape((n, n))\n lat_sparse[idx_y[:, None], idx_x] = np.array(lat).reshape((n, n))\n ds.coords['lat'] = (('y', 'x'), lat_sparse)\n ds.coords['lon'] = (('y', 'x'), lon_sparse)",
"def rasterize_all(self, overwrite=True):\n\n paths = self.tiles.get_filenames_from_dir('staged')\n self.rasterize_vectors(paths, overwrite=overwrite)\n self.webtiles_from_all_geotiffs(overwrite=overwrite)",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\n scale_rows_to_unit_interval=True,\n output_pixel_vals=True):\n \n assert len(img_shape) == 2\n assert len(tile_shape) == 2\n assert len(tile_spacing) == 2\n \n # The expression below can be re-written in a more C style as\n # follows :\n #\n # out_shape = [0,0]\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\n # tile_spacing[0]\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\n # tile_spacing[1]\n out_shape = [\n (ishp + tsp) * tshp - tsp\n for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)\n ]\n \n if isinstance(X, tuple):\n assert len(X) == 4\n # Create an output numpy ndarray to store the image\n # colors default to 0 (i.e. black), alphas defaults to 1 (fully opaque i.e.\n # corresponding pixel fully visible in image))\n if output_pixel_vals:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype='uint8') \n else:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype=X.dtype) \n\n if output_pixel_vals:\n channel_defaults = [0, 0, 0, 255]\n else:\n channel_defaults = [0., 0., 0., 1.]\n \n for i in range(4):\n if X[i] is None:\n # if channel is None, fill it with zeros of the correct\n # dtype\n dt = out_array.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array[:, :, i] = np.zeros(\n out_shape,\n dtype=dt\n ) + channel_defaults[i]\n else:\n # use a recurrent call to compute the channel and store it\n # in the output\n out_array[:, :, i] = tile_raster_images(\n X[i], img_shape, tile_shape, tile_spacing,\n scale_rows_to_unit_interval, output_pixel_vals)\n return out_array\n \n else:\n # if we are dealing with only one channel\n H, W = img_shape\n Hs, Ws = tile_spacing\n \n # generate a matrix to store the output\n dt = X.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array = np.ones(out_shape, dtype=dt)*255\n \n for tile_row in range(tile_shape[0]):\n for tile_col in range(tile_shape[1]):\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\n this_x = X[tile_row * tile_shape[1] + tile_col]\n if scale_rows_to_unit_interval:\n # if we should scale values to be between 0 and 1\n # do this by calling the `scale_to_unit_interval`\n # function\n this_img = scale_to_unit_interval(\n this_x.reshape(img_shape))\n else:\n this_img = this_x.reshape(img_shape)\n # add the slice to the corresponding position in the\n # output array\n c = 1\n if output_pixel_vals:\n c = 255\n out_array[\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\n tile_col * (W + Ws): tile_col * (W + Ws) + W\n ] = this_img * c\n return out_array",
"def generate_base_tiles(self):\n\n gdal.SetConfigOption(\"GDAL_PAM_ENABLED\", \"NO\")\n\n print \"Generating Base Tiles:\"\n if self.options.verbose:\n #mx, my = self.out_gt[0], self.out_gt[3] # OriginX, OriginY\n #px, py = self.mercator.MetersToPixels( mx, my, self.tmaxz)\n #print \"Pixel coordinates:\", px, py, (mx, my)\n print\n print \"Tiles generated from the max zoom level:\"\n print \"----------------------------------------\"\n print\n\n\n # Set the bounds\n tminx, tminy, tmaxx, tmaxy = self.tminmax[self.tmaxz]\n querysize = self.querysize\n\n # Just the center tile\n #tminx = tminx+ (tmaxx - tminx)/2\n #tminy = tminy+ (tmaxy - tminy)/2\n #tmaxx = tminx\n #tmaxy = tminy\n\n #print tminx, tminy, tmaxx, tmaxy\n tcount = (1+abs(tmaxx-tminx)) * (1+abs(tmaxy-tminy))\n #print tcount\n ti = 0\n i_y_column_count=((tmaxy-tminy)+1)\n ds = self.out_ds\n tz = self.tmaxz\n if self.options.verbose:\n # tx in range(tminx, tmaxx+1) tminx[ 281596 ] tmaxx[ 281744 ] ; ((tmaxx-tmaxy)+1) x_tiles[ 23393 ]\n print \"\\ttz=[\",tz,\"] : tx in range(tminx, tmaxx+1) tminx[\",tminx,\"] tmaxx[\",tmaxx,\"] ; ((tmaxx-tmaxy)+1) x_tiles[\",tcount,\"]\"\n # ty_tms in range(tmaxy, tminy-1, -1) tmaxy[ 352409 ] tminy[ 352253 ] ; ((tmaxy-tminy)) y_tiles[ 157 ] 352409-(352253-1)\n print \"\\ttz=[\",tz,\"] : ty_tms in range(tmaxy, tminy-1, -1) tmaxy[\",tmaxy,\"] tminy[\",tminy,\"] ; ((tmaxy-tminy+1)) y_tiles[\",i_y_column_count,\"]\"\n if self.options.resume:\n i_count = self.tile_exists(0, 0, tz,2)\n if i_count == tcount:\n if self.options.verbose:\n print \"\\tTile generation skipped because of --resume ; x/y-tiles of z[\",tz,\"] y_tiles[\",tcount,\"]\"\n return\n for tx in range(tminx, tmaxx+1):\n tmaxy_work=tmaxy\n if self.options.resume:\n i_count = self.tile_exists(tx, 0, tz,3)\n if i_count == i_y_column_count:\n if self.options.verbose:\n print \"\\tTile generation skipped because of --resume ; z =\",tz,\" ; y-tiles of x[\",tx,\"] y_tiles[\",i_y_column_count,\"]\"\n break\n else:\n if i_count > 0:\n # this assums the rows are compleate, which may NOT be true\n tmaxy_work-=i_count\n if self.options.verbose:\n print \"\\tTile generation skipped to tmaxy[\",tmaxy_work,\"] because of --resume ; z =\",tz,\" ; y-tiles of x[\",tx,\"] y_tiles[\",i_y_column_count,\"]\"\n for ty_tms in range(tmaxy_work, tminy-1, -1): #range(tminy, tmaxy+1):\n ty_osm=self.flip_y(tz,ty_tms)\n ty=ty_tms\n if self.options.tms_osm:\n ty=ty_osm\n if self.stopped:\n if self.options.mbtiles:\n if self.mbtiles_db:\n self.mbtiles_db.close_db()\n break\n ti += 1\n\n if self.options.resume:\n exists = self.tile_exists(tx, ty, tz,0)\n if exists and self.options.verbose:\n print \"\\tTile generation skipped because of --resume ; z =\",tz,\" ; x =\",tx,\" ; y_tms =\",ty_tms, \"; y_osm =\",ty_osm\n else:\n exists = False\n\n if not exists:\n if self.options.verbose:\n print ti, '/', tcount, self.get_verbose_tile_name(tx, ty, tz)\n # Don't scale up by nearest neighbour, better change the querysize\n # to the native resolution (and return smaller query tile) for scaling\n if self.options.profile in ('mercator','geodetic'):\n if self.options.profile == 'mercator':\n # Tile bounds in EPSG:900913\n b = self.mercator.TileBounds(tx, ty_tms, tz)\n elif self.options.profile == 'geodetic':\n b = self.geodetic.TileBounds(tx, ty_tms, tz)\n\n rb, wb = self.geo_query( ds, b[0], b[3], b[2], b[1])\n nativesize = wb[0]+wb[2] # Pixel size in the raster covering query geo extent\n if self.options.verbose:\n print \"\\tNative Extent (querysize\",nativesize,\"): \", rb, wb\n\n querysize = self.querysize\n # Tile bounds in raster coordinates for ReadRaster query\n rb, wb = self.geo_query( ds, b[0], b[3], b[2], b[1], querysize=querysize)\n\n rx, ry, rxsize, rysize = rb\n wx, wy, wxsize, wysize = wb\n else: # 'raster' or 'gearth' or 'garmin' profile:\n tsize = int(self.tsize[tz]) # tilesize in raster coordinates for actual zoom\n xsize = self.out_ds.RasterXSize # size of the raster in pixels\n ysize = self.out_ds.RasterYSize\n if tz >= self.nativezoom:\n querysize = self.tilesize # int(2**(self.nativezoom-tz) * self.tilesize)\n\n rx = (tx) * tsize\n rxsize = 0\n if tx == tmaxx:\n rxsize = xsize % tsize\n if rxsize == 0:\n rxsize = tsize\n\n rysize = 0\n if ty_tms == tmaxy:\n rysize = ysize % tsize\n if rysize == 0:\n rysize = tsize\n ry = ysize - (ty_tms * tsize) - rysize\n\n wx, wy = 0, 0\n\n wxsize, wysize = int(rxsize/float(tsize) * querysize), int(rysize/float(tsize) * querysize)\n if wysize != querysize:\n wy = querysize - wysize\n xyzzy = Xyzzy(querysize, rx, ry, rxsize, rysize, wx, wy, wxsize, wysize)\n try:\n if self.options.verbose:\n print ti,'/',tcount,' total ; z =',tz,' ; x =',tx,' ; y_tms =',ty_tms,' ; y_osm =',ty_osm\n print \"\\tReadRaster Extent: \", (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)\n self.write_base_tile(tx, ty, tz, xyzzy)\n except ImageOutputException, e:\n self.error(\"'%d/%d/%d': %s\" % (tz, tx, ty, e.message))\n\n if not self.options.verbose or self.is_subprocess:\n self.progressbar( ti / float(tcount) )\n if self.options.mbtiles:\n if self.mbtiles_db:\n self.mbtiles_db.close_db()\n self.mbtiles_db=None",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\r\n scale_rows_to_unit_interval=True,\r\n output_pixel_vals=True):\r\n\r\n assert len(img_shape) == 2\r\n assert len(tile_shape) == 2\r\n assert len(tile_spacing) == 2\r\n\r\n # The expression below can be re-written in a more C style as\r\n # follows :\r\n #\r\n # out_shape = [0,0]\r\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\r\n # tile_spacing[0]\r\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\r\n # tile_spacing[1]\r\n out_shape = [(ishp + tsp) * tshp - tsp for ishp, tshp, tsp\r\n in zip(img_shape, tile_shape, tile_spacing)]\r\n\r\n if isinstance(X, tuple):\r\n assert len(X) == 4\r\n # Create an output numpy ndarray to store the image\r\n if output_pixel_vals:\r\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\r\n dtype='uint8')\r\n else:\r\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\r\n dtype=X.dtype)\r\n\r\n #colors default to 0, alpha defaults to 1 (opaque)\r\n if output_pixel_vals:\r\n channel_defaults = [0, 0, 0, 255]\r\n else:\r\n channel_defaults = [0., 0., 0., 1.]\r\n\r\n for i in xrange(4):\r\n if X[i] is None:\r\n # if channel is None, fill it with zeros of the correct\r\n # dtype\r\n dt = out_array.dtype\r\n if output_pixel_vals:\r\n dt = 'uint8'\r\n out_array[:, :, i] = numpy.zeros(out_shape,\r\n dtype=dt) + channel_defaults[i]\r\n else:\r\n # use a recurrent call to compute the channel and store it\r\n # in the output\r\n out_array[:, :, i] = tile_raster_images(\r\n X[i], img_shape, tile_shape, tile_spacing,\r\n scale_rows_to_unit_interval, output_pixel_vals)\r\n return out_array\r\n\r\n else:\r\n # if we are dealing with only one channel\r\n H, W = img_shape\r\n Hs, Ws = tile_spacing\r\n\r\n # generate a matrix to store the output\r\n dt = X.dtype\r\n if output_pixel_vals:\r\n dt = 'uint8'\r\n out_array = numpy.zeros(out_shape, dtype=dt)\r\n\r\n for tile_row in xrange(tile_shape[0]):\r\n for tile_col in xrange(tile_shape[1]):\r\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\r\n this_x = X[tile_row * tile_shape[1] + tile_col]\r\n if scale_rows_to_unit_interval:\r\n # if we should scale values to be between 0 and 1\r\n # do this by calling the `scale_to_unit_interval`\r\n # function\r\n this_img = scale_to_unit_interval(\r\n this_x.reshape(img_shape))\r\n else:\r\n this_img = this_x.reshape(img_shape)\r\n # add the slice to the corresponding position in the\r\n # output array\r\n c = 1\r\n if output_pixel_vals:\r\n c = 255\r\n out_array[\r\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\r\n tile_col * (W + Ws): tile_col * (W + Ws) + W\r\n ] = this_img * c\r\n return out_array",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\n scale_rows_to_unit_interval=True,\n output_pixel_vals=True):\n\n assert len(img_shape) == 2\n assert len(tile_shape) == 2\n assert len(tile_spacing) == 2\n\n # The expression below can be re-written in a more C style as\n # follows :\n #\n # out_shape = [0,0]\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\n # tile_spacing[0]\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\n # tile_spacing[1]\n out_shape = [\n (ishp + tsp) * tshp - tsp\n for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)\n ]\n\n if isinstance(X, tuple):\n assert len(X) == 4\n # Create an output numpy ndarray to store the image\n if output_pixel_vals:\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\n dtype='uint8')\n else:\n out_array = numpy.zeros((out_shape[0], out_shape[1], 4),\n dtype=X.dtype)\n\n #colors default to 0, alpha defaults to 1 (opaque)\n if output_pixel_vals:\n channel_defaults = [0, 0, 0, 255]\n else:\n channel_defaults = [0., 0., 0., 1.]\n\n for i in xrange(4):\n if X[i] is None:\n # if channel is None, fill it with zeros of the correct\n # dtype\n dt = out_array.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array[:, :, i] = numpy.zeros(\n out_shape,\n dtype=dt\n ) + channel_defaults[i]\n else:\n # use a recurrent call to compute the channel and store it\n # in the output\n out_array[:, :, i] = tile_raster_images(\n X[i], img_shape, tile_shape, tile_spacing,\n scale_rows_to_unit_interval, output_pixel_vals)\n return out_array\n\n else:\n # if we are dealing with only one channel\n H, W = img_shape\n Hs, Ws = tile_spacing\n\n # generate a matrix to store the output\n dt = X.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array = numpy.zeros(out_shape, dtype=dt)\n\n for tile_row in xrange(tile_shape[0]):\n for tile_col in xrange(tile_shape[1]):\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\n this_x = X[tile_row * tile_shape[1] + tile_col]\n if scale_rows_to_unit_interval:\n # if we should scale values to be between 0 and 1\n # do this by calling the `scale_to_unit_interval`\n # function\n this_img = scale_to_unit_interval(\n this_x.reshape(img_shape))\n else:\n this_img = this_x.reshape(img_shape)\n # add the slice to the corresponding position in the\n # output array\n c = 1\n if output_pixel_vals:\n c = 255\n out_array[\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\n tile_col * (W + Ws): tile_col * (W + Ws) + W\n ] = this_img * c\n return out_array",
"def tile_raster_images(X, img_shape, tile_shape, tile_spacing=(0, 0),\n scale_rows_to_unit_interval=True,\n output_pixel_vals=True):\n\n assert len(img_shape) == 2\n assert len(tile_shape) == 2\n assert len(tile_spacing) == 2\n\n # The expression below can be re-written in a more C style as\n # follows :\n #\n # out_shape = [0,0]\n # out_shape[0] = (img_shape[0]+tile_spacing[0])*tile_shape[0] -\n # tile_spacing[0]\n # out_shape[1] = (img_shape[1]+tile_spacing[1])*tile_shape[1] -\n # tile_spacing[1]\n out_shape = [\n (ishp + tsp) * tshp - tsp\n for ishp, tshp, tsp in zip(img_shape, tile_shape, tile_spacing)\n ]\n\n if isinstance(X, tuple):\n assert len(X) == 4\n # Create an output np ndarray to store the image\n if output_pixel_vals:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype='uint8')\n else:\n out_array = np.zeros((out_shape[0], out_shape[1], 4),\n dtype=X.dtype)\n\n # colors default to 0, alpha defaults to 1 (opaque)\n if output_pixel_vals:\n channel_defaults = [0, 0, 0, 255]\n else:\n channel_defaults = [0., 0., 0., 1.]\n\n for i in xrange(4):\n if X[i] is None:\n # if channel is None, fill it with zeros of the correct\n # dtype\n dt = out_array.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array[:, :, i] = np.zeros(\n out_shape,\n dtype=dt\n ) + channel_defaults[i]\n else:\n # use a recurrent call to compute the channel and store it\n # in the output\n out_array[:, :, i] = tile_raster_images(\n X[i], img_shape, tile_shape, tile_spacing,\n scale_rows_to_unit_interval, output_pixel_vals)\n return out_array\n\n else:\n # if we are dealing with only one channel\n H, W = img_shape\n Hs, Ws = tile_spacing\n\n # generate a matrix to store the output\n dt = X.dtype\n if output_pixel_vals:\n dt = 'uint8'\n out_array = np.zeros(out_shape, dtype=dt)\n\n for tile_row in xrange(tile_shape[0]):\n for tile_col in xrange(tile_shape[1]):\n if tile_row * tile_shape[1] + tile_col < X.shape[0]:\n this_x = X[tile_row * tile_shape[1] + tile_col]\n if scale_rows_to_unit_interval:\n # if we should scale values to be between 0 and 1\n # do this by calling the `scale_to_unit_interval`\n # functionmapping\n this_img = scale_to_unit_interval(\n this_x.reshape(img_shape))\n else:\n this_img = this_x.reshape(img_shape)\n # add the slice to the corresponding position in the\n # output array\n c = 1\n if output_pixel_vals:\n c = 255\n out_array[\n tile_row * (H + Hs): tile_row * (H + Hs) + H,\n tile_col * (W + Ws): tile_col * (W + Ws) + W\n ] = this_img * c\n return out_array",
"def __init__(self, raster_path):\n self.raster_path = raster_path\n dataset = gdal.Open(raster_path)\n self.width = dataset.RasterXSize\n self.height = dataset.RasterYSize\n # Gets the gdal geo transformation tuples\n # gdal_version = gdal.__version__\n self._txf = dataset.GetGeoTransform()\n # self._inv_txf = gdal.InvGeoTransform(self._txf)[1]\n self._inv_txf = gdal.InvGeoTransform(self._txf)\n # Gets the transformation from lat/lon to coordinates\n wgs84_ref = osr.SpatialReference()\n wgs84_ref.ImportFromEPSG(4326) # WGS84\n sref = osr.SpatialReference()\n sref.ImportFromWkt(dataset.GetProjection())\n if int(osgeo.__version__[0]) >= 3:\n # Output order has changed in osgeo v3\n wgs84_ref.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)\n sref.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)\n\n self._transform = osr.CoordinateTransformation(wgs84_ref, sref)\n inv_transform = osr.CoordinateTransformation(sref, wgs84_ref)\n # Find a loose lat/lon bounding box for quick check without\n # having to do full coordinates transformation\n corners = []\n for x in [0, self.width]:\n for y in [0, self.height]:\n corners.append([self._txf[0] + self._txf[1] * x + self._txf[2] * y,\n self._txf[3] + self._txf[4] * x + self._txf[5] * y])\n self.max_lat = -100\n self.min_lat = 100\n self.max_lon = -500\n self.min_lon = 500\n for c in corners:\n p = inv_transform.TransformPoint(c[0], c[1])\n if p[0] > self.max_lon:\n self.max_lon = p[0]\n if p[0] < self.min_lon:\n self.min_lon = p[0]\n if p[1] > self.max_lat:\n self.max_lat = p[1]\n if p[1] < self.min_lat:\n self.min_lat = p[1]\n dataset = None",
"def find_tiles(self):\n lat1, lat2 = self.bbox.south, self.bbox.north\n lon1, lon2 = self.bbox.west, self.bbox.east\n # convert to geographic bounding box\n minlat, minlon = min(lat1, lat2), min(lon1, lon2)\n maxlat, maxlon = max(lat1, lat2), max(lon1, lon2)\n\n # convert to tile-space bounding box\n _, xmin, ymin = self.mercator(maxlat, minlon, self.zoom)\n _, xmax, ymax = self.mercator(minlat, maxlon, self.zoom)\n\n # generate a list of tiles\n xs, ys = range(xmin, xmax + 1), range(ymin, ymax + 1)\n tile_list = [(self.zoom, x, y) for (y, x) in product(ys, xs)]\n\n return tile_list",
"def geo_query(self, ds, ulx, uly, lrx, lry, querysize = 0):\n\n geotran = ds.GetGeoTransform()\n rx= int((ulx - geotran[0]) / geotran[1] + 0.001)\n ry= int((uly - geotran[3]) / geotran[5] + 0.001)\n rxsize= int((lrx - ulx) / geotran[1] + 0.5)\n rysize= int((lry - uly) / geotran[5] + 0.5)\n\n if not querysize:\n wxsize, wysize = rxsize, rysize\n else:\n wxsize, wysize = querysize, querysize\n\n # Coordinates should not go out of the bounds of the raster\n wx = 0\n if rx < 0:\n rxshift = abs(rx)\n wx = int( wxsize * (float(rxshift) / rxsize) )\n wxsize = wxsize - wx\n rxsize = rxsize - int( rxsize * (float(rxshift) / rxsize) )\n rx = 0\n if rx+rxsize > ds.RasterXSize:\n wxsize = int( wxsize * (float(ds.RasterXSize - rx) / rxsize) )\n rxsize = ds.RasterXSize - rx\n\n wy = 0\n if ry < 0:\n ryshift = abs(ry)\n wy = int( wysize * (float(ryshift) / rysize) )\n wysize = wysize - wy\n rysize = rysize - int( rysize * (float(ryshift) / rysize) )\n ry = 0\n if ry+rysize > ds.RasterYSize:\n wysize = int( wysize * (float(ds.RasterYSize - ry) / rysize) )\n rysize = ds.RasterYSize - ry\n\n return (rx, ry, rxsize, rysize), (wx, wy, wxsize, wysize)",
"def project_dem(\n self,\n dem: Raster,\n values: np.ndarray = None,\n mask: np.ndarray = None,\n tile_size: Iterable[int] = (256, 256),\n tile_overlap: Iterable[int] = (1, 1),\n scale: Number = 1,\n scale_limits: Iterable[Number] = (1, 1),\n parallel: Union[bool, int] = False,\n return_depth: bool = False,\n ) -> np.ndarray:\n has_values = False\n if values is not None:\n has_values = True\n values = np.atleast_3d(values)\n if values.shape[0:2] != dem.shape:\n raise ValueError(\"values does not have the same 2-d shape as dem\")\n elif not return_depth:\n raise ValueError(\"values cannot be missing if return_depth is False\")\n if mask is None:\n mask = ~np.isnan(dem.array)\n if mask.shape != dem.shape:\n raise ValueError(\"mask does not have the same 2-d shape as dem\")\n parallel = helpers._parse_parallel(parallel)\n # Generate DEM block indices\n tile_indices = dem.tile_indices(size=tile_size, overlap=tile_overlap)\n ntiles = len(tile_indices)\n # Initialize array\n nbands = (values.shape[2] if has_values else 0) + return_depth\n # TODO: Use something faster than full\n array = np.full((self.imgsz[1], self.imgsz[0], nbands), np.nan)\n # Define parallel process\n bar = helpers._progress_bar(max=ntiles)\n\n def process(\n ij: Tuple[slice, slice]\n ) -> Optional[Tuple[Tuple[Iterable[int], Iterable[int]], Iterable[Number]]]:\n tile_mask = mask[ij] # type: ignore\n if not np.count_nonzero(tile_mask):\n # No cells selected\n return None\n tile = dem[ij]\n if has_values:\n tile_values = values[ij] # type: ignore\n # Scale tile based on distance from camera\n mean_xyz = (\n tile.xlim.mean(),\n tile.ylim.mean(),\n np.nanmean(tile.array[tile_mask]),\n )\n if np.isnan(mean_xyz[2]):\n # No cells with elevations\n return None\n _, mean_depth = self._xyz_to_xy(np.atleast_2d(mean_xyz), return_depth=True)\n tile_scale = scale * np.abs(tile.d).mean() / (mean_depth / self.f.mean())\n tile_scale = min(max(tile_scale, min(scale_limits)), max(scale_limits))\n if tile_scale != 1:\n tile.resize(tile_scale)\n tile_mask = scipy.ndimage.zoom(\n tile_mask, zoom=float(tile_scale), order=0\n )\n tile_values = np.dstack(\n scipy.ndimage.zoom(\n tile_values[:, :, i], zoom=float(tile_scale), order=1\n )\n for i in range(tile_values.shape[2])\n )\n # Project tile\n xyz = helpers.grid_to_points(\n (tile.X[tile_mask], tile.Y[tile_mask], tile.array[tile_mask])\n )\n if return_depth:\n xy, depth = self._xyz_to_xy(xyz, return_depth=True)\n uv = self._xy_to_uv(xy)\n else:\n uv = self.xyz_to_uv(xyz)\n is_in = self.inframe(uv)\n if not np.count_nonzero(is_in):\n # No cells in image\n return None\n rc = uv[is_in, ::-1].astype(int)\n # Compile values\n if has_values:\n tile_values = tile_values[tile_mask][is_in]\n if return_depth:\n if has_values:\n tile_values = np.column_stack((tile_values, depth[is_in, None]))\n else:\n tile_values = depth[is_in, None]\n # Build DataFrame for fast groupby operation\n shape = (self.imgsz[1], self.imgsz[0])\n fidx, means = helpers.rasterize_points(\n rc[:, 0], rc[:, 1], tile_values, shape=shape\n )\n return np.unravel_index(fidx, shape), means\n\n def reduce(\n idx: Tuple[Iterable[int], Iterable[int]] = None,\n values: Iterable[Number] = None,\n ) -> None:\n bar.next()\n if idx is not None:\n array[idx] = values\n\n with config.backend(np=parallel) as pool:\n pool.map(func=process, reduce=reduce, sequence=tile_indices)\n bar.finish()\n return array",
"def render_tiles(bbox, config, tile_dir, min_zoom=DEFAULT_MIN_ZOOM, max_zoom=DEFAULT_MAX_ZOOM, process_count=DEFAULT_PROCESS_COUNT):\n if not os.path.isdir(tile_dir):\n os.mkdir(tile_dir)\n\n tile_projection = GoogleProjection(max_zoom) \n\n ll0 = (bbox[1], bbox[0])\n ll1 = (bbox[3], bbox[2])\n\n tile_queue = multiprocessing.JoinableQueue()\n\n for zoom in range(min_zoom, max_zoom + 1):\n px0 = tile_projection.fromLLtoPixel(ll0, zoom)\n px1 = tile_projection.fromLLtoPixel(ll1, zoom)\n\n tile_x1 = int(px0[0] / 256.0)\n tile_x2 = int(px1[0] / 256.0) + 1\n tile_y1 = int(px0[1] / 256.0)\n tile_y2 = int(px1[1] / 256.0) + 1\n\n zoom_dir = os.path.join(tile_dir, str(zoom))\n\n if not os.path.isdir(zoom_dir):\n os.mkdir(zoom_dir)\n\n for tile_x in range(tile_x1, tile_x2):\n # Validate x coordinate\n if (tile_x < 0) or (tile_x >= 2**zoom):\n continue\n\n x_dir = os.path.join(zoom_dir, str(tile_x))\n\n if not os.path.isdir(x_dir):\n os.mkdir(x_dir)\n\n for tile_y in range(tile_y1, tile_y2):\n # Validate y coordinate\n if (tile_y < 0) or (tile_y >= 2**zoom):\n continue\n\n filename = os.path.join(x_dir, '%s.png' % str(tile_y))\n\n # Submit tile to be rendered into the queue\n t = (filename, tile_x, tile_y, zoom)\n tile_queue.put(t)\n\n print 'Using %i processes to render %i tiles' % (process_count, tile_queue.qsize())\n\n processes = []\n\n for i in range(process_count):\n renderer = Renderer(tile_queue, config)\n renderer.start()\n\n processes.append(renderer)\n\n try:\n tile_queue.join()\n except KeyboardInterrupt:\n for p in processes:\n p.terminate()",
"def tile(sceneid, tile_x, tile_y, tile_z, bands=None, tilesize=256, **kwargs):\n if not bands:\n raise InvalidBandName(\"bands is required\")\n\n if not isinstance(bands, tuple):\n bands = tuple((bands,))\n\n for band in bands:\n if band not in SENTINEL_BANDS:\n raise InvalidBandName(\"{} is not a valid Sentinel band name\".format(band))\n\n scene_params = _sentinel_parse_scene_id(sceneid)\n sentinel_address = \"{}/{}/measurement\".format(SENTINEL_BUCKET, scene_params[\"key\"])\n\n mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z)\n tile_bounds = mercantile.xy_bounds(mercator_tile)\n\n addresses = [\n \"{}/{}-{}.tiff\".format(sentinel_address, scene_params[\"beam\"].lower(), band)\n for band in bands\n ]\n\n def _s1_tiler(src_path):\n with rasterio.open(src_path) as src_dst:\n with WarpedVRT(\n src_dst,\n src_crs=src_dst.gcps[1],\n src_transform=transform.from_gcps(src_dst.gcps[0]),\n src_nodata=0,\n ) as vrt_dst:\n if not utils.tile_exists(vrt_dst.bounds, tile_z, tile_x, tile_y):\n raise TileOutsideBounds(\n \"Tile {}/{}/{} is outside image bounds\".format(\n tile_z, tile_x, tile_y\n )\n )\n\n return utils._tile_read(vrt_dst, bounds=tile_bounds, tilesize=tilesize)\n\n with futures.ThreadPoolExecutor() as executor:\n data, masks = zip(*list(executor.map(_s1_tiler, addresses)))\n mask = numpy.all(masks, axis=0).astype(numpy.uint8) * 255\n\n return numpy.concatenate(data), mask",
"def test_rasters_and_arrays(self):\n\n # Create test data\n lon_ul = 100 # Longitude of upper left corner\n lat_ul = 10 # Latitude of upper left corner\n numlon = 8 # Number of longitudes\n numlat = 5 # Number of latitudes\n dlon = 1\n dlat = -1\n\n # Define array where latitudes are rows and longitude columns\n A1 = numpy.zeros((numlat, numlon))\n\n # Establish coordinates for lower left corner\n lat_ll = lat_ul - numlat\n lon_ll = lon_ul\n\n # Define pixel centers along each direction\n lon = numpy.linspace(lon_ll + 0.5, lon_ll + numlon - 0.5, numlon)\n lat = numpy.linspace(lat_ll + 0.5, lat_ll + numlat - 0.5, numlat)\n\n # Define raster with latitudes going bottom-up (south to north).\n # Longitudes go left-right (west to east)\n for i in range(numlat):\n for j in range(numlon):\n A1[numlat - 1 - i, j] = linear_function(lon[j], lat[i])\n\n # Throw in a nodata element\n A1[2, 6] = numpy.nan\n\n # Upper left corner\n assert A1[0, 0] == 105.25\n assert A1[0, 0] == linear_function(lon[0], lat[4])\n\n # Lower left corner\n assert A1[4, 0] == 103.25\n assert A1[4, 0] == linear_function(lon[0], lat[0])\n\n # Upper right corner\n assert A1[0, 7] == 112.25\n assert A1[0, 7] == linear_function(lon[7], lat[4])\n\n # Lower right corner\n assert A1[4, 7] == 110.25\n assert A1[4, 7] == linear_function(lon[7], lat[0])\n\n # Generate raster object and write\n projection = ('GEOGCS[\"WGS 84\",'\n 'DATUM[\"WGS_1984\",'\n 'SPHEROID[\"WGS 84\",6378137,298.2572235630016,'\n 'AUTHORITY[\"EPSG\",\"7030\"]],'\n 'AUTHORITY[\"EPSG\",\"6326\"]],'\n 'PRIMEM[\"Greenwich\",0],'\n 'UNIT[\"degree\",0.0174532925199433],'\n 'AUTHORITY[\"EPSG\",\"4326\"]]')\n geotransform = (lon_ul, dlon, 0, lat_ul, 0, dlat)\n R1 = Raster(A1, projection, geotransform,\n keywords={'testkwd': 'testval', 'size': 'small'})\n\n # Check string representation of raster class\n assert str(R1).startswith('Raster data')\n assert str(R1.rows) in str(R1)\n assert str(R1.columns) in str(R1)\n\n # Test conversion between geotransform and\n # geometry (longitudes and latitudes)\n longitudes, latitudes = R1.get_geometry()\n msg = 'Longitudes not as expected: %s' % str(longitudes)\n assert numpy.allclose(longitudes, [100.5, 101.5, 102.5, 103.5, 104.5,\n 105.5, 106.5, 107.5]), msg\n\n msg = 'Latitudes not as expected: %s' % str(latitudes)\n assert numpy.allclose(latitudes, [5.5, 6.5, 7.5, 8.5, 9.5]), msg\n\n gt = raster_geometry2geotransform(longitudes, latitudes)\n msg = ('Conversion from coordinates to geotransform failed: %s'\n % str(gt))\n assert numpy.allclose(gt, geotransform,\n rtol=1.0e-12, atol=1.0e-12), msg\n\n msg = ('Dimensions of raster array do not match those of '\n 'raster object')\n assert numlat == R1.rows, msg\n assert numlon == R1.columns, msg\n\n # Write back to new (tif) file\n out_filename = unique_filename(suffix='.tif')\n R1.write_to_file(out_filename)\n assert R1.filename == out_filename\n\n # Check nodata in original layer\n assert numpy.isnan(R1.get_nodata_value())\n\n # Read again and check consistency\n R2 = read_layer(out_filename)\n assert R2.filename == out_filename\n\n # Check nodata in read layer\n assert numpy.isnan(R2.get_nodata_value())\n\n msg = ('Dimensions of written raster array do not match those '\n 'of input raster file\\n')\n msg += (' Dimensions of input file '\n '%s: (%s, %s)\\n' % (R1.filename, numlat, numlon))\n msg += (' Dimensions of output file %s: '\n '(%s, %s)' % (R2.filename, R2.rows, R2.columns))\n\n assert numlat == R2.rows, msg\n assert numlon == R2.columns, msg\n\n A2 = R2.get_data()\n\n assert numpy.allclose(numpy.nanmin(A1), numpy.nanmin(A2))\n assert numpy.allclose(numpy.nanmax(A1), numpy.nanmax(A2))\n\n msg = 'Array values of written raster array were not as expected'\n assert nanallclose(A1, A2), msg\n\n msg = 'Geotransforms were different'\n assert R1.get_geotransform() == R2.get_geotransform(), msg\n\n p1 = R1.get_projection(proj4=True)\n p2 = R2.get_projection(proj4=True)\n msg = 'Projections were different: %s != %s' % (p1, p2)\n assert p1 == p1, msg\n\n # Exercise projection __eq__ method\n assert R1.projection == R2.projection\n\n # Check that equality raises exception when type is wrong\n try:\n R1.projection == 234\n except TypeError:\n pass\n else:\n msg = 'Should have raised TypeError'\n raise Exception(msg)\n\n # Check keywords\n assert R1.keywords == R2.keywords\n\n # Check override of ==\n assert R1 == R2",
"def render(self):\n\n self.baseMap.beginDraw()\n self.baseMap.background(255)\n self.baseMap.endDraw()\n\n numColumns = self.width / self.tileSize\n numRows = self.height / self.tileSize\n\n startX = floor(self.centerX - numColumns / 2.0)\n startY = floor(self.centerY - numRows / 2.0)\n\n endX = ceil(self.centerX + numColumns / 2.0)\n endY = ceil(self.centerY + numRows / 2.0)\n\n self.offsetX = -floor((self.centerX - floor(self.centerX)) * self.tileSize) + \\\n floor(self.width / 2.0) + \\\n floor(startX - floor(self.centerX)) * self.tileSize\n self.offsetY = -floor((self.centerY - floor(self.centerY)) * self.tileSize) + \\\n floor(self.height / 2.0) + \\\n floor(startY - floor(self.centerY)) * self.tileSize\n\n def onTileLoaded(tile, meta):\n self.baseMap.beginDraw()\n x = meta['destX']\n y = meta['destY']\n self.baseMap.image(tile, x, y)\n self.baseMap.endDraw()\n\n for x in xrange(startX, endX):\n for y in xrange(startY, endY):\n # Interpolate the URL for this particular tile.\n # 12/1208/1541.png\n url = self.url % (self.zoom, x, y)\n\n # Compute the x and y coordinates for where this tile will go on the map.\n destX = (x - startX) * self.tileSize + self.offsetX\n destY = (y - startY) * self.tileSize + self.offsetY\n\n # Attempts to load all the images lazily.\n meta = {\n 'url' : url,\n 'destX' : destX,\n 'destY' : destY,\n 'x' : x,\n 'y' : y,\n }\n self.lazyImageManager.addLazyImage(url, onTileLoaded, meta)\n\n # Kick off all the layer rendering.\n for layer in self.layers:\n layer.render()\n\n for marker in self.markers:\n marker.draw()",
"def tile(\n sceneid, tile_x, tile_y, tile_z, bands=(\"04\", \"03\", \"02\"), tilesize=256, **kwargs\n):\n scene_params = _sentinel_parse_scene_id(sceneid)\n\n if not isinstance(bands, tuple):\n bands = tuple((bands,))\n\n for band in bands:\n if band not in scene_params[\"valid_bands\"]:\n raise InvalidBandName(\"{} is not a valid Sentinel band name\".format(band))\n\n preview_file = os.path.join(\n scene_params[\"aws_bucket\"],\n scene_params[\"aws_prefix\"],\n scene_params[\"preview_file\"],\n )\n with rasterio.open(preview_file) as src:\n bounds = transform_bounds(src.crs, \"epsg:4326\", *src.bounds, densify_pts=21)\n\n if not utils.tile_exists(bounds, tile_z, tile_x, tile_y):\n raise TileOutsideBounds(\n \"Tile {}/{}/{} is outside image bounds\".format(tile_z, tile_x, tile_y)\n )\n\n mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z)\n tile_bounds = mercantile.xy_bounds(mercator_tile)\n\n path_prefix = os.path.join(scene_params[\"aws_bucket\"], scene_params[\"aws_prefix\"])\n if scene_params[\"processingLevel\"] == \"L2A\":\n bands = [_l2_prefixed_band(b) for b in bands]\n else:\n bands = [\"B{}\".format(b) for b in bands]\n\n def _read_tile(path):\n with rasterio.open(path) as src_dst:\n return utils.tile_read(\n src_dst, bounds=tile_bounds, tilesize=tilesize, nodata=0, **kwargs\n )\n\n addresses = [\"{}/{}.jp2\".format(path_prefix, band) for band in bands]\n with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:\n data, masks = zip(*list(executor.map(_read_tile, addresses)))\n mask = np.all(masks, axis=0).astype(np.uint8) * 255\n\n return np.concatenate(data), mask",
"def __init__(self):\n# This is the top container for all data. The gid is the global id (for a image).\n# Before calling convert most of the values are strings. Some additional\n# values are also calculated, see convert() for details. After calling\n# convert, most values are integers or floats where appropriat.\n # set through parser\n self.orientation = None\n self.tileheight = 0\n self.tilewidth = 0\n self.width = 0\n self.height = 0\n self.version = 0\n self.tile_sets = [] # TileSet\n self.layers = [] # WorldTileLayer <- what order? back to front (guessed)\n self.indexed_tiles = {} # {gid: (offsetx, offsety, image}\n self.object_groups = []\n self.properties = {} # {name: value}\n # additional info\n self.pixel_width = 0\n self.pixel_height = 0\n self.named_layers = {} # {name: layer}\n self.named_tile_sets = {} # {name: tile_set}\n self.map_file_name = \"\"\n self._image_loader = None",
"def __init__(self):\n# This is the top container for all data. The gid is the global id (for a image).\n# Before calling convert most of the values are strings. Some additional\n# values are also calculated, see convert() for details. After calling\n# convert, most values are integers or floats where appropriat.\n # set through parser\n self.orientation = None\n self.tileheight = 0\n self.tilewidth = 0\n self.width = 0\n self.height = 0\n self.version = 0\n self.tile_sets = [] # TileSet\n self.layers = [] # WorldTileLayer <- what order? back to front (guessed)\n self.indexed_tiles = {} # {gid: (offsetx, offsety, image}\n self.object_groups = []\n self.properties = {} # {name: value}\n # additional info\n self.pixel_width = 0\n self.pixel_height = 0\n self.named_layers = {} # {name: layer}\n self.named_tile_sets = {} # {name: tile_set}\n self.map_file_name = \"\"\n self._image_loader = None",
"def process_tile(tile):\n global base_kwds, resampling, src\n # Get the bounds of the tile.\n ulx, uly = mercantile.xy(\n *mercantile.ul(tile.x, tile.y, tile.z))\n lrx, lry = mercantile.xy(\n *mercantile.ul(tile.x + 1, tile.y + 1, tile.z))\n\n kwds = base_kwds.copy()\n kwds['transform'] = from_bounds(ulx, lry, lrx, uly, 256, 256)\n src_nodata = kwds.pop('src_nodata', None)\n dst_nodata = kwds.pop('dst_nodata', None)\n\n with rasterio.open('/vsimem/tileimg', 'w', **kwds) as tmp:\n reproject(rasterio.band(src, src.indexes),\n rasterio.band(tmp, tmp.indexes),\n src_nodata=src_nodata,\n dst_nodata=dst_nodata,\n num_threads=1,\n resampling=resampling)\n\n data = bytearray(virtual_file_to_buffer('/vsimem/tileimg'))\n\n # Workaround for https://bugs.python.org/issue23349.\n if sys.version_info[0] == 2 and sys.version_info[2] < 10:\n # Check for backported bug fix before re-ordering\n\tif kwds['driver'] == 'PNG' and data[0:8] == png_header:\n # Properly constructed PNG, no need to re-order bytes\n pass\n\telif kwds['driver'] == 'JPEG' and data[0:4] == jpeg_header:\n # Properly constructed JPEG, no need to re-order bytes\n pass\n\telse:\n data[:] = data[-1:] + data[:-1]\n\n return tile, data",
"def parse_raster_layer(self):\n try:\n # Clean previous parse log\n self.log(\n 'Started parsing raster file',\n reset=True,\n status=self.rasterlayer.parsestatus.DOWNLOADING_FILE\n )\n\n # Download, unzip and open raster file\n self.get_raster_file()\n self.open_raster_file()\n\n # Remove existing tiles for this layer before loading new ones\n self.rasterlayer.rastertile_set.all().delete()\n\n # Transform raster to global srid\n if self.dataset.srs.srid == WEB_MERCATOR_SRID:\n self.log('Dataset already in SRID {0}, skipping transform'.format(WEB_MERCATOR_SRID))\n else:\n self.log(\n 'Transforming raster to SRID {0}'.format(WEB_MERCATOR_SRID),\n status=self.rasterlayer.parsestatus.REPROJECTING_RASTER\n )\n self.dataset = self.dataset.transform(WEB_MERCATOR_SRID)\n\n # Compute max zoom at the web mercator projection\n self.max_zoom = tiler.closest_zoomlevel(\n abs(self.dataset.scale.x)\n )\n\n # Store max zoom level in metadata\n self.rasterlayer.metadata.max_zoom = self.max_zoom\n self.rasterlayer.metadata.save()\n\n # Reduce max zoom by one if zoomdown flag was disabled\n if not self.zoomdown:\n self.max_zoom -= 1\n\n self.log(\n 'Started creating tiles',\n status=self.rasterlayer.parsestatus.CREATING_TILES\n )\n\n # Loop through all lower zoom levels and create tiles to\n # setup TMS aligned tiles in world mercator\n for iz in range(self.max_zoom + 1):\n self.create_tiles(iz)\n\n self.drop_empty_rasters()\n\n # Send signal for end of parsing\n rasterlayers_parser_ended.send(sender=self.rasterlayer.__class__, instance=self.rasterlayer)\n\n # Log success of parsing\n self.log(\n 'Successfully finished parsing raster',\n status=self.rasterlayer.parsestatus.FINISHED\n )\n except:\n self.log(\n traceback.format_exc(),\n status=self.rasterlayer.parsestatus.FAILED\n )\n raise\n finally:\n self.close_raster_file()\n shutil.rmtree(self.tmpdir)",
"def initialize_dataset_rasters(self, i, levelsAndBBoxes, localYResolution):\n # parse the original graph data, map edge weights to nodes\n heatmap = heatmap_setup([graphFilenames[i]], [edgeHeatFilenames[i]])[0]\n gc.collect()\n\n # create the raster for each zoom level\n for index, levelAndBounds in enumerate(reversed(levelsAndBBoxes)):\n if index > 0:\n hm = hmRaster # recursively build from previous level\n else:\n hm = heatmap\n (level, minLon, minLat, maxLon, maxLat) = levelAndBounds\n print \"Creating the raster for level\", level\n # compute step size from local scope (js map at zoom level)\n bbox = minLon, minLat, maxLon, maxLat\n latFraction = (maxLat - minLat) / (localYResolution - 1.)\n lonFraction = compute_longitude_stepsize(bbox, latFraction)\n\n # transform to global scope\n lonStart, latStart, lonEnd, latEnd = heatmap.leftBottomRightTop\n lonStart = lonStart - 0.5 * lonFraction\n latStart = latStart - 0.5 * latFraction\n lonEnd = lonEnd + 0.5 * lonFraction\n latEnd = latEnd + 0.5 * latFraction\n\n globalYResolution = math.ceil((latEnd - latStart) / latFraction)\n globalXResolution = math.ceil((lonEnd - lonStart) / lonFraction)\n rasterData, latFrac = hm.rasterize(heatmap.leftBottomRightTop,\n (globalXResolution,\n globalYResolution))\n hmRaster = HeatmapFactory.construct_from_nparray(rasterData)\n hmRaster.latFraction = latFrac\n self.rasterHeatmaps[i][normalize_zoomlvl(level)] = hmRaster",
"def to_xyz_tiles(\n self, root: str, tile_size: int, zoom_levels: list, driver=\"GTiff\", **kwargs\n ):\n mName = os.path.normpath(os.path.basename(root))\n\n def create_folder(path):\n if not os.path.exists(path):\n os.makedirs(path)\n\n def tile_window(shape, px):\n \"\"\"Yield (left, upper, width, height).\"\"\"\n nr, nc = shape\n lu = product(range(0, nc, px), range(0, nr, px))\n\n ## create the window\n for l, u in lu:\n h = min(px, nr - u)\n w = min(px, nc - l)\n yield (l, u, w, h)\n\n vrt_fn = None\n prev = 0\n nodata = self.nodata\n obj = self._obj.copy()\n zls = {}\n for zl in zoom_levels:\n diff = zl - prev\n pxzl = tile_size * (2 ** (diff))\n\n # read data from previous zoomlevel\n if vrt_fn is not None:\n obj = xr.open_dataarray(vrt_fn, engine=\"rasterio\").squeeze(\n \"band\", drop=True\n )\n x_dim, y_dim = obj.raster.x_dim, obj.raster.y_dim\n obj = obj.chunk({x_dim: pxzl, y_dim: pxzl})\n dst_res = abs(obj.raster.res[-1]) * (2 ** (diff))\n\n if pxzl > min(obj.shape):\n logger.warning(\n f\"Tiles at zoomlevel {zl} smaller than tile_size {tile_size}\"\n )\n\n # Write the raster paths to a text file\n sd = join(root, f\"{zl}\")\n create_folder(sd)\n txt_path = join(sd, \"filelist.txt\")\n file = open(txt_path, \"w\")\n\n for l, u, w, h in tile_window(obj.shape, pxzl):\n col = int(np.ceil(l / pxzl))\n row = int(np.ceil(u / pxzl))\n ssd = join(sd, f\"{col}\")\n\n create_folder(ssd)\n\n # create temp tile\n temp = obj[u : u + h, l : l + w]\n if zl != 0:\n temp = temp.coarsen(\n {x_dim: 2**diff, y_dim: 2**diff}, boundary=\"pad\"\n ).mean()\n temp.raster.set_nodata(nodata)\n\n if driver == \"netcdf4\":\n path = join(ssd, f\"{row}.nc\")\n temp = temp.raster.gdal_compliant()\n temp.to_netcdf(path, engine=\"netcdf4\", **kwargs)\n elif driver in gis_utils.GDAL_EXT_CODE_MAP:\n ext = gis_utils.GDAL_EXT_CODE_MAP.get(driver)\n path = join(ssd, f\"{row}.{ext}\")\n temp.raster.to_raster(path, driver=driver, **kwargs)\n else:\n raise ValueError(f\"Unkown file driver {driver}\")\n\n file.write(f\"{path}\\n\")\n\n del temp\n\n file.close()\n # Create a vrt using GDAL\n vrt_fn = join(root, f\"{mName}_zl{zl}.vrt\")\n gis_utils.create_vrt(vrt_fn, file_list_path=txt_path)\n prev = zl\n zls.update({zl: float(dst_res)})\n del obj\n\n # Write a quick data catalog yaml\n yml = {\n \"crs\": self.crs.to_epsg(),\n \"data_type\": \"RasterDataset\",\n \"driver\": \"raster\",\n \"path\": f\"{mName}_zl{{zoom_level}}.vrt\",\n \"zoom_levels\": zls,\n }\n with open(join(root, f\"{mName}.yml\"), \"w\") as f:\n yaml.dump({mName: yml}, f, default_flow_style=False, sort_keys=False)",
"def rasterize_layer(src_vector, rows, cols, geo_transform, use_attribute, all_touched=False, no_data_value=0):\n # Open your shapefile\n assert type(src_vector) is gpd.GeoDataFrame, \"src_vector should be GeoDataFrame type.\"\n assert use_attribute in src_vector.columns, \"attribute not exists in src_vector.\"\n gdaldtype = tgp.npdtype_to_gdaldtype(src_vector[use_attribute].dtype)\n # projection = src_vector.crs.to_wkt() if src_vector.crs is not None else None\n projection = pyproj.CRS(src_vector.crs).to_wkt() if src_vector.crs is not None else None\n src_shp_ds = ogr.Open(src_vector.to_json())\n src_shp_layer = src_shp_ds.GetLayer()\n\n # Create the destination raster data source\n ds = tgp.write_gdal_ds(bands=1, cols=cols, rows=rows, geo_transform=geo_transform, gdaldtype=gdaldtype, no_data_value=no_data_value)\n\n # set it to the attribute that contains the relevant unique\n options = [\"ATTRIBUTE=\"+use_attribute]\n if all_touched:\n options.append('ALL_TOUCHED=TRUE')\n gdal.RasterizeLayer(ds, [1], src_shp_layer, options=options) # target_ds, band_list, source_layer, options = options\n\n data = ds.GetRasterBand(1).ReadAsArray()\n raster = tgp.Raster(data, geo_transform, projection, gdaldtype, no_data_value)\n return raster"
] | [
"0.6464894",
"0.60362566",
"0.57705593",
"0.57181996",
"0.5690948",
"0.5690948",
"0.56545186",
"0.5653969",
"0.5625701",
"0.55724025",
"0.5562426",
"0.55392987",
"0.54787064",
"0.5474159",
"0.546856",
"0.5447885",
"0.5437408",
"0.54057693",
"0.5390586",
"0.5384432",
"0.53807014",
"0.53670007",
"0.53472894",
"0.53463924",
"0.53463924",
"0.531059",
"0.52941",
"0.5291763",
"0.52888596",
"0.5234576"
] | 0.6133304 | 1 |
Retrieve a PyUSB device for the Luxafor Flag. Will lazy load the device as necessary. | def get_device(l):
if not l.device:
l.device = find_device()
setup_device(l.device)
return l.device | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_device():\n device = usb.core.find(\n idVendor=LuxaforFlag.DEVICE_VENDOR_ID,\n idProduct=LuxaforFlag.DEVICE_PRODUCT_ID\n )\n return device",
"def get_device(cls, devdesc: UsbDeviceDescriptor) -> UsbDevice:\n cls.Lock.acquire()\n try:\n if devdesc.index or devdesc.sn or devdesc.description:\n dev = None\n if not devdesc.vid:\n raise ValueError('Vendor identifier is required')\n devs = cls._find_devices(devdesc.vid, devdesc.pid)\n if devdesc.description:\n devs = [dev for dev in devs if\n UsbTools.get_string(dev, dev.iProduct) ==\n devdesc.description]\n if devdesc.sn:\n devs = [dev for dev in devs if\n UsbTools.get_string(dev, dev.iSerialNumber) ==\n devdesc.sn]\n if devdesc.bus is not None and devdesc.address is not None:\n devs = [dev for dev in devs if\n (devdesc.bus == dev.bus and\n devdesc.address == dev.address)]\n if isinstance(devs, set):\n # there is no guarantee the same index with lead to the\n # same device. Indexing should be reworked\n devs = list(devs)\n try:\n dev = devs[devdesc.index or 0]\n except IndexError as exc:\n raise IOError(\"No such device\") from exc\n else:\n devs = cls._find_devices(devdesc.vid, devdesc.pid)\n dev = list(devs)[0] if devs else None\n if not dev:\n raise IOError('Device not found')\n try:\n devkey = (dev.bus, dev.address, devdesc.vid, devdesc.pid)\n if None in devkey[0:2]:\n raise AttributeError('USB backend does not support bus '\n 'enumeration')\n except AttributeError:\n devkey = (devdesc.vid, devdesc.pid)\n if devkey not in cls.Devices:\n # only change the active configuration if the active one is\n # not the first. This allows other libusb sessions running\n # with the same device to run seamlessly.\n try:\n config = dev.get_active_configuration()\n setconf = config.bConfigurationValue != 1\n except USBError:\n setconf = True\n if setconf:\n try:\n dev.set_configuration()\n except USBError:\n pass\n cls.Devices[devkey] = [dev, 1]\n else:\n cls.Devices[devkey][1] += 1\n return cls.Devices[devkey][0]\n finally:\n cls.Lock.release()",
"def get_usb_device(self, nIndex):\n\t\treturn handle_to_object(call_sdk_function('PrlVmCfg_GetUsbDevice', self.handle, nIndex))",
"def get_dev(di_file=default_di_file, serial_num=None, bus_addr=None, VID=default_VID, PID=default_PID, timeout=10):\n\n dev=fx3.get_dev(di_file=di_file, serial_num=serial_num, bus_addr=bus_addr, VID=VID, PID=PID, timeout=timeout)\n return UXN1330(dev)",
"def _configure_device():\n vendor_id = 0x04D8 # These ids are microchip's libusb based device\n product_id = 0x0204 # ids\n dev = usb.core.find(idVendor=vendor_id, idProduct = product_id)\n try:\n dev.set_configuration()\n return dev\n except:\n return None",
"def get_usb_dev(self, nIndex):\n\t\treturn handle_to_object(call_sdk_function('PrlSrvCfg_GetUsbDev', self.handle, nIndex))",
"def find(vid, pid):\n dev = usb.core.find(idVendor=vid, idProduct=pid)\n if dev is None:\n raise NoDeviceError(\"No device with vendor %s and product %s\"%(vid, pid))\n\n if dev.is_kernel_driver_active(0):\n dev.detach_kernel_driver(0)\n\n return PyUSBDevice(dev)",
"def udev(self):\n return self._udev",
"def _lsusbv_on_device(bus_id, dev_id):\n _, raw_output = cmd_helper.GetCmdStatusAndOutputWithTimeout(\n ['lsusb', '-v', '-s', '%s:%s' % (bus_id, dev_id)], timeout=10)\n\n device = {'bus': bus_id, 'device': dev_id}\n depth_stack = [device]\n\n # TODO(jbudorick): Add documentation for parsing.\n for line in raw_output.splitlines():\n # Ignore blank lines.\n if not line:\n continue\n # Filter out error mesage about opening device.\n if _COULDNT_OPEN_ERROR_RE.match(line):\n continue\n # Find start of device information.\n m = _LSUSB_BUS_DEVICE_RE.match(line)\n if m:\n if m.group(1) != bus_id:\n logging.warning(\n 'Expected bus_id value: %r, seen %r', bus_id, m.group(1))\n if m.group(2) != dev_id:\n logging.warning(\n 'Expected dev_id value: %r, seen %r', dev_id, m.group(2))\n device['desc'] = m.group(3)\n continue\n\n indent_match = _INDENTATION_RE.match(line)\n if not indent_match:\n continue\n\n depth = 1 + len(indent_match.group(1)) / 2\n if depth > len(depth_stack):\n logging.error(\n 'lsusb parsing error: unexpected indentation: \"%s\"', line)\n continue\n\n while depth < len(depth_stack):\n depth_stack.pop()\n\n cur = depth_stack[-1]\n\n m = _LSUSB_GROUP_RE.match(line)\n if m:\n new_group = {}\n cur[m.group(1)] = new_group\n depth_stack.append(new_group)\n continue\n\n m = _LSUSB_ENTRY_RE.match(line)\n if m:\n new_entry = {\n '_value': m.group(2),\n '_desc': m.group(3),\n }\n cur[m.group(1)] = new_entry\n depth_stack.append(new_entry)\n continue\n\n logging.error('lsusb parsing error: unrecognized line: \"%s\"', line)\n\n return device",
"def fusion_api_get_power_device(self, uri=None, param='', api=None, headers=None):\n return self.pd.get(uri=uri, api=api, headers=headers, param=param)",
"def _get_backend_device(cls, device: UsbDevice) -> Any:\n try:\n #pylint: disable-msg=protected-access\n # need to access private member _ctx of PyUSB device\n # (resource manager) until PyUSB #302 is addressed\n return device._ctx.dev\n #pylint: disable-msg=protected-access\n except AttributeError:\n return None",
"def lsusb():\n _, lsusb_list_output = cmd_helper.GetCmdStatusAndOutputWithTimeout(\n ['lsusb'], timeout=10)\n devices = []\n for line in lsusb_list_output.splitlines():\n m = _LSUSB_BUS_DEVICE_RE.match(line)\n if m:\n bus_num = m.group(1)\n dev_num = m.group(2)\n try:\n devices.append(_lsusbv_on_device(bus_num, dev_num))\n except cmd_helper.TimeoutError:\n # Will be blacklisted if it is in expected device file, but times out.\n logging.info('lsusb -v %s:%s timed out.', bus_num, dev_num)\n return devices",
"def finddevice():\n\n return next((device for device in [\"xpu\"] if hasattr(torch, device) and getattr(torch, device).is_available()), None)",
"def get_device(self, field):\n return self._devices[field]",
"def _get_device(self, dev_id):\n tuya = self.hass.data[DOMAIN][TUYA_DATA]\n return tuya.get_device_by_id(dev_id)",
"def init():\n try:\n h = hid.device()\n h.open(USB_VID, USB_PID)\n h.set_nonblocking(1)\n except IOError as ex:\n print('ERROR: could not establish connection to device')\n print(ex)\n return None\n return h",
"def acquire(self):\n clf = nfc.ContactlessFrontend()\n\n if clf.open('usb:{bus}:{dev}'.format(bus = self.usb_bus,\n dev = self.usb_dev)):\n print(\"dev {0} acquired successfully\".format(self.usb_target))\n self.hw_connected = True\n return True\n\n print(\"dev {0} not found\".format(self.usb_target))\n return False",
"def setup_usb(self):\n global DEVICE\n global epBulkWriter\n global epBulkReader\n global VID\n global PID\n\n DEVICE = usb.core.find(idVendor=0x2AB9,idProduct=0xFFFF)\n if DEVICE is None:#If not a LVPM, look for an HVPM.\n DEVICE = usb.core.find(idVendor=0x04d8,idProduct=0x000b)\n VID = '0x4d8'\n PID = '0xb'\n if \"Linux\" == platform.system():\n try:\n DEVICE.detach_kernel_driver(0)\n except:\n pass # already unregistered\n DEVICE.set_configuration()\n\n cfg = DEVICE.get_active_configuration()\n intf = cfg[(0,0)]\n\n epBulkWriter = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_OUT)\n epBulkReader = usb.util.find_descriptor(\n intf,\n custom_match = \\\n lambda e: \\\n usb.util.endpoint_direction(e.bEndpointAddress) == \\\n usb.util.ENDPOINT_IN)",
"def find_device(device):\n return usb.core.find(idVendor=device['idVendor'], idProduct=device['idProduct'])",
"async def device_fixture(hass: HomeAssistant, ufp: MockUFPFixture):\n\n await init_entry(hass, ufp, [])\n\n device_registry = dr.async_get(hass)\n\n return list(device_registry.devices.values())[0]",
"def get_device():\n c_dev = ct.c_int(0)\n safe_call(backend.get().af_get_device(ct.pointer(c_dev)))\n return c_dev.value",
"def get_mixer_dev(self):\n\t\treturn call_sdk_function('PrlVmDevSound_GetMixerDev', self.handle)",
"def get_device(self):\n raise NotImplementedError()",
"def device():\n return G.DEVICE",
"def get_device(arn=None):\n pass",
"def find_stick():\n out = subprocess.check_output(\n \"gdbus introspect --system --dest org.freedesktop.UDisks \"\n \"--object-path /org/freedesktop/UDisks/devices --recurse \"\n \"--only-properties\".split())\n devs = zip(*((re.match(r\".* = '?(.*?)'?;\", x).group(1)\n for x in out.splitlines()\n if \"DriveConnectionInterface =\" in x\n or \"DeviceIsPartition =\" in x\n or \"DeviceFile = \" in x),)*3)\n try:\n return next(dev[2] for dev in devs if dev[0] == 'usb'\n and dev[1] == 'true')\n except StopIteration:\n return None",
"def test_usb_device(self):\n candidate = Ftdi.get_identifiers(self.ftdi_url)\n usbdev = UsbTools.get_device(candidate[0])\n i2c = I2cController()\n i2c.configure(usbdev, interface=candidate[1], frequency=100e3)\n eeprom = SerialEepromManager.get_from_controller(i2c, '24AA32A', 0x50)",
"def get_device(self):\n self.connect_button = 1\n self.device_name = self.deviceEntry.text()",
"def device(self):\n hw = self.hw()\n if hw: return hw.device()",
"def get_device(self, dev_id):\n return self.api_request('GET', self.url + '/device/' + str(dev_id), {})"
] | [
"0.69254965",
"0.6485634",
"0.63472474",
"0.6307298",
"0.6286261",
"0.6203117",
"0.61067283",
"0.60893977",
"0.60004014",
"0.59861434",
"0.59207916",
"0.59178483",
"0.5893291",
"0.58582234",
"0.58397084",
"0.5817381",
"0.5815132",
"0.5781035",
"0.5756109",
"0.5748684",
"0.5712419",
"0.5712206",
"0.56547666",
"0.5641166",
"0.56144345",
"0.56047535",
"0.5548086",
"0.5547943",
"0.5523503",
"0.55024916"
] | 0.65442044 | 1 |
Send values to the device. Expects the values to be a List of command byte codes. Refer to the individual commands for more information on the specific command codes. | def write(l, values):
l.get_device().write(1, values)
# Sometimes the flag simply ignores the command. Unknown if this
# is an issue with PyUSB or the flag itself. But sending the
# command again works a treat.
l.get_device().write(1, values) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __send(self, cmd_val, data):\n # Proof the input\n if cmd_val not in command.values():\n raise ValueError(\"{}: the provided command value {} is not valid.\".format(self.sensor_name, cmd_val))\n if not isinstance(data, bytearray):\n raise TypeError(\"{}: command data must be of type byte array.\".format(self.sensor_name))\n\n # Initialise the command bytes array\n bytes_to_send = bytearray()\n bytes_to_send.append(self.__SerialStart)\n bytes_to_send.append(self.__SendByte)\n bytes_to_send.append(cmd_val)\n\n # Add data and set zero to the remainder\n for i in range(0, 12):\n if i < len(data):\n bytes_to_send.append(data[i])\n else:\n bytes_to_send.append(0)\n\n # Last two bytes before the checksum is the CommandTerminator\n # TODO : rename command terminator to sensor ID\n bytes_to_send.append(self.__CommandTerminator)\n bytes_to_send.append(self.__CommandTerminator)\n\n # Calculate and append the checksum\n checksum = self.__checksum_make(bytes_to_send)\n bytes_to_send.append(checksum % 256)\n\n # Append the terminator for serial message\n bytes_to_send.append(self.__SerialEnd)\n\n self.logger.info(\"{}: sending {} {} command with {} message.\".format(self.sensor_name, command_mode.keys()[command_mode.values().index(bytes_to_send[3])], command.keys()[command.values().index(cmd_val)], \":\".join(\"%02x\" % b for b in bytes_to_send)))\n\n if len(bytes_to_send) != self.__CommandLength:\n raise IOError(\"{}: sent {} bytes, expected {}.\".format(self.sensor_name, len(bytes_to_send), self.__CommandLength))\n\n # Send the command\n written_bytes = self.device.write(bytes_to_send)\n self.device.flush()\n\n if written_bytes != len(bytes_to_send):\n raise IOError(\"{}: not all bytes written.\".format(self.sensor_name))\n\n # Check the received values\n received = self.__response(cmd_val)\n\n if len(received) != self.__ResponseLength:\n raise IOError(\"{}: received {} bytes, expected {}.\".format(self.sensor_name, len(received), self.__ResponseLength))\n\n if len(received) == 0:\n raise IOError(\"{}: sensor is not responding.\".format(self.sensor_name))\n\n # When no command or command is request command,\n # second byte has to be ReceiveByte\n if (cmd_val is None or cmd_val == command[\"Request\"]) and received[1] != self.__ReceiveByte:\n raise ValueError(\"{}: expected to receive value {:#X} on a value request. Received: \\\"{}\\\".\".format(self.sensor_name, self.__ReceiveByte, received[1]))\n\n # Check, if the response is response of the command, except request command\n if cmd_val != command[\"Request\"]:\n if received[2] != cmd_val:\n raise ValueError(\"{}: sensor response does not belong to the command sent before.\".format(self.sensor_name))\n else:\n return received[3: -2]\n else:\n return received",
"def send_command(self, value):\n self._cbmif.send_dlm(value)",
"def _send_multiple(self, what, values, address, **kwargs):\n\n raise NotImplementedError('Multiple sending is not yet implemented for Modbus')",
"def _send(self, what, value, address='localhost:502', **kwargs):\n\n colon_index = address.find(':')\n IP = '-i {} '.format(address[:colon_index])\n PORT = '-p {} '.format(address[colon_index+1:])\n # NOTE: following data is validated by client script\n MODE = '-m {} '.format('w')\n TYPE = '-t {} '.format(what[0])\n OFFSET = '-o {} '.format(what[1]) # NOTE: 0-based\n\n # NOTE: value is a list of bools or ints when write multiple times\n if 'count' in kwargs and kwargs['count'] > 1:\n count = kwargs['count']\n COUNT = '--count {} '.format(count)\n else:\n count = 1\n COUNT = '--count {} '.format(count)\n\n # NOTE: value is a int when writing to a register\n if what[0] == 'HR':\n if count == 1:\n VALUE = '-r {} '.format(value)\n else:\n VALUE = '-r '\n for v in value:\n VALUE += str(v)\n VALUE += ' '\n\n # NOTE: value is a bool when writing to a coil\n elif what[0] == 'CO':\n if count == 1:\n if value == True:\n VALUE = '-c {} '.format(1)\n else:\n VALUE = '-c {} '.format(0)\n else:\n VALUE = '-c '\n for v in value:\n if v == True:\n VALUE += str(1)\n else:\n VALUE += str(0)\n VALUE += ' '\n else:\n raise ValueError('IR and DI are read only data.')\n\n\n cmd = shlex.split(\n self._client_cmd +\n IP +\n PORT +\n MODE +\n TYPE +\n OFFSET +\n COUNT +\n VALUE\n )\n # print 'DEBUG modbus_send cmd shlex list: ', cmd\n\n # TODO: pipe stdout and return the sent value\n try:\n client = subprocess.Popen(cmd, shell=False)\n client.wait()\n\n except Exception as error:\n print('ERROR modbus _send: ', error)",
"def _send_multiple(self, what, values, address, **kwargs):\n\n tag_string = ''\n tag_string = EnipProtocol._tuple_to_cpppo_tag_multiple(what, values)\n\n cmd = shlex.split(\n self._client_cmd +\n '--log ' + self._client_log +\n '--address ' + address +\n ' ' + tag_string\n )\n\n try:\n client = subprocess.Popen(cmd, shell=False)\n client.wait()\n\n except Exception as error:\n print('ERROR enip _send multiple: '), error",
"def execute(self, devices, command_bytes):",
"def sendCommand(self, data):\n #make sure data has an even number of elements\n if(len(data) % 2 == 1):\n data.append(0)\n\n #Initiate message as an empty list\n message = []\n\n #Fill message by combining two bytes in one register\n for i in range(0, len(data)/2):\n message.append((data[2*i] << 8) + data[2*i+1])\n\n #To do!: Implement try/except\n with self.lock:\n self.client.write_registers(0, message)",
"def _send_multiple(self, what, values, address):\n\n print('_send_multiple: please override me.')",
"def _serial_write(self, values_to_write):\n if self.verbose:\n self.log(\"Writing 0x{:x} to serial port...\".format(values_to_write))\n if type(values_to_write) is not list:\n self.serial.write(bytearray([values_to_write]))\n else:\n self.serial.write(bytearray(values_to_write))",
"def _send_command(self, data):\n # make sure data has an even number of elements\n if(len(data) % 2 == 1):\n data.append(0)\n\n # Initiate message as an empty list\n message = []\n\n # Fill message by combining two bytes in one register\n for i in range(int(len(data)/2)):\n message.append((data[2*i] << 8) + data[2*i+1])\n\n # To do!: Implement try/except\n self.client.write_registers(0x03E8, message, unit=0x0009)",
"def send(self, value):\n pass",
"def sendcmd(self, comd, value):\n tosend = cmd.clientcmd(comd, value)\n self.sendLine(tosend)",
"def __send__(self,val):\n assert(len(val) == 1)\n assert(type(val) == bytes)\n v = int.from_bytes(val,byteorder=\"little\")\n if(self.verbose):\n pc.color_stdout(\"GREEN\")\n print(\">> %s\\t - %s\\t - %d\"% (hex(v),bin(v),v))\n pc.color_stdout(\"RESET\")\n self.port.write(val)",
"def send(self, value, _control=False):\n if not _control:\n self.increment('out')\n for output in self.outputs:\n output.put(value)",
"def send_cmds(self, cmds):\r\n self.socket.sendall(cmds)",
"def send_command(self, commands):\n action_space = self.action_space\n commands = np.clip(commands, action_space.low, action_space.high)\n i = 0\n joint_commands = {}\n for joint in self._used_joints:\n joint_commands[joint] = commands[i]\n i += 1\n\n if self._control_mode == 'position':\n self._set_limb_joint_positions(joint_commands)\n elif self._control_mode == 'velocity':\n self._set_limb_joint_velocities(joint_commands)\n elif self._control_mode == 'effort':\n self._set_limb_joint_torques(joint_commands)\n\n self._set_gripper_position(commands[7])",
"def _spi_write(self, dc, values):\n self._gpio.output(self.cs_pin, 0)\n self._gpio.output(self.dc_pin, dc)\n\n if type(values) is str:\n values = [ord(c) for c in values]\n\n for byte_value in values:\n self._spi_bus.xfer([byte_value])\n\n self._gpio.output(self.cs_pin, 1)",
"def send_command(self, command):\n send_message = \"\"\n for i in command:\n send_message += chr(i)\n #send_message += bytes(i)\n\n for data in send_message:\n self.pymata.transport.write(data)",
"def set_all_values (self, para='V', values=0):\r\n\t\t\r\n\t\tpara = para.upper()\r\n\t\t\r\n\t\tif isinstance(values,list):\r\n\t\t\t# Check length\r\n\t\t\tif len(values) != self.n_chs:\r\n\t\t\t\traise AttributeError(\"Length of values list ({:}) must match total number of channels ({:}).\".format(len(values), self.n_chs))\r\n\t\telse:\r\n\t\t\t# If input is atomic, then set each channel to that\r\n\t\t\tvalues = [values] * self.n_chs\r\n\t\t\r\n\t\tif self.binary_mode:\r\n\t\t\tif para in ['V','VMAX']:\r\n\t\t\t\tfulls = self.v_fulls\r\n\t\t\telif para in ['I','IMAX']:\r\n\t\t\t\tfulls = self.i_fulls\r\n\t\t\telif para in ['X','XMIN','XMAX']:\r\n\t\t\t\tfulls = self.x_fulls\r\n\t\t\t\r\n\t\t\tif para in ['X','XMIN','XMAX','MODE']:\r\n\t\t\t\t# TODO: A 32-b version of issue_binary_command is not yet implemented\r\n\t\t\t\traise RuntimeError(\"Binary mode X commands not implemented yet. Use binary_mode = False to workaround.\")\r\n\t\t\t\r\n\t\t\t# Convert input to ints\r\n\t\t\tfor i in range(self.n_chs):\r\n\t\t\t\tvalues[i] = int((values[i]/fulls[i])*0xFFFF)\r\n\t\t\t\r\n\t\t\t# Map command name to code\r\n\t\t\tcmd_code = CMD_CODES[para.upper()]\r\n\t\t\t\r\n\t\t\t# Send vectorised outputs to each module\r\n\t\t\ti = 0\r\n\t\t\tfor d in self.chain:\r\n\t\t\t\tn = d['n_chs']\r\n\t\t\t\tself.issue_binary_command(cmd_code, ch=i, RW=0, DEXT=1, value_int = values[i:i+n])\r\n\t\t\t\ti += n\r\n\t\telse:\r\n\t\t\t# Send vectorised outputs to each module\r\n\t\t\ti = 0\r\n\t\t\tfor d in self.chain:\r\n\t\t\t\tn = d['n_chs']\r\n\t\t\t\tself.issue_command(para+'VEC', ch=i, operator='=', value = values[i:i+n])\r\n\t\t\t\ti += n",
"def command( self, value ): # uint8_t\n\t\tself.send(value, mode=0)",
"def send_commands(self, commands=None):\n commands = commands or []\n command_list = {}\n for command in commands:\n command_list[command.id] = {\n 'speed': command.speed, 'direction': command.direction\n }\n data = {'commands': command_list}\n state = self._post(data)\n status = state['status'].lower()\n print(\"status: {}\".format(status))\n if status == 'error':\n print(\"message: {}\".format(state['message']))\n elif status == 'finished':\n print(\"finished! Score: {} Watch result at: {}\".format(state['score'], state['visualization']))\n if 'requests' not in state:\n state['requests'] = []\n for elevator_data in state.get('elevators', []):\n if 'buttons_pressed' not in elevator_data:\n elevator_data['buttons_pressed'] = []\n\n return state",
"def send_OSC_cmd(self, cmd, value):\n self.simulator.relay_OSC_cmd(cmd, value)",
"def command(self, *cmd):\n self._gpio.set_low(self._dc)\n for t in cmd:\n self._spi.write([t])\n #assert(len(cmd) <= 32)\n #self.bus.write_i2c_block_data(self.addr, self.cmd_mode, list(cmd))",
"def write_registers(self, registeraddress, values):\n if not isinstance(values, list):\n raise TypeError('The \"values parameter\" must be a list. Given: {0!r}'.format(values))\n _checkInt(len(values), minvalue=1, description='length of input list')\n # Note: The content of the list is checked at content conversion.\n\n self._genericCommand(16, registeraddress, values, numberOfRegisters=len(values), payloadformat='registers')",
"def sendValue(self, value):\n\n print(f'Sending: {value}\\n')\n self.ser.write(bytes([value]))\n self.ser.write('\\n'.encode(\"ascii\"))\n\n self.ser.reset_input_buffer()\n ser_bytes = self.ser.read(1)\n print(f'Receiving\\nraw data: {ser_bytes}')\n\n\n #decoded_bytes = (ser_bytes.decode(\"ascii\"))\n\n #print(f'Ascii Value: {decoded_bytes}', flush=True)",
"def __send_switching_command(self, device, order, list_of_commands):\n if list_of_commands:\n if list_of_commands[0]:\n command = self.build_command(device, (order, list_of_commands))\n if (\n command\n ): # If something dont work with the building of the command, no None will be send\n self.vcw.write(device, command) # Write new switching\n else:\n command = self.build_command(device, (order, \"\"))\n if (\n command\n ): # If something dont work with the building of the command, no None will be send\n self.vcw.write(device, command) # Write new switching",
"def set_all_values (self, para='V', values=0):\r\n\t\t\r\n\t\tif isinstance(values,list):\r\n\t\t\t# Check length\r\n\t\t\tif len(values) != self.n_chs:\r\n\t\t\t\traise AttributeError(\"Length of values list ({:}) must match total number of channels ({:}).\".format(len(values), self.n_chs))\r\n\t\telse:\r\n\t\t\t# If input is atomic, then set each channel to that\r\n\t\t\tvalues = [values] * self.n_chs\r\n\t\t\r\n\t\tif self.binary_mode:\r\n\t\t\tif para in ['V','VMAX']:\r\n\t\t\t\tfulls = self.v_fulls\r\n\t\t\telif para in ['I','IMAX']:\r\n\t\t\t\tfulls = self.i_fulls\r\n\t\t\t\r\n\t\t\t# Convert input to ints\r\n\t\t\tfor i in range(self.n_chs):\r\n\t\t\t\tvalues[i] = int((values[i]/fulls[i])*0xFFFF)\r\n\t\t\t\r\n\t\t\t# Map command name to code\r\n\t\t\tcmd_code = CMD_CODES[para.upper()]\r\n\t\t\t\r\n\t\t\t# Send vectorised outputs to each module\r\n\t\t\ti = 0\r\n\t\t\tfor d in self.chain:\r\n\t\t\t\tn = d['n_chs']\r\n\t\t\t\tself.issue_binary_command(cmd_code, ch=i, RW=0, DEXT=1, value_int = values[i:i+n])\r\n\t\t\t\ti += n\r\n\t\telse:\r\n\t\t\t# Send vectorised outputs to each module\r\n\t\t\ti = 0\r\n\t\t\tfor d in self.chain:\r\n\t\t\t\tn = d['n_chs']\r\n\t\t\t\tself.issue_command(para+'VEC', ch=i, operator='=', value = values[i:i+n])\r\n\t\t\t\ti += n",
"def __send(self, value, index, min_value, max_value):\n command = int((1500 + 500 * min(max(value, min_value), max_value)) * 4)\n try:\n self._device.ctrl_transfer(0x40, 0x85, command, index)\n except Exception as e:\n return False\n return True",
"def _send(self, what, value, address='localhost:44818', **kwargs):\n\n tag_string = ''\n tag_string = EnipProtocol._tuple_to_cpppo_tag(what, value)\n # print 'DEBUG enip _send tag_string: ', tag_string\n\n cmd = shlex.split(\n self._client_cmd +\n '--log ' + self._client_log +\n '--address ' + address +\n ' ' + tag_string\n )\n # print 'DEBUG enip _send cmd shlex list: ', cmd\n\n # TODO: pipe stdout and return the sent value\n try:\n client = subprocess.Popen(cmd, shell=False)\n client.wait()\n\n except Exception as error:\n print('ERROR enip _send: ', error)",
"def write(self):\n # build up all commands into a single request to increase network perf\n connection = self.connection\n commands = self.commands\n try:\n connection.send_packed_command(connection.pack_commands([c.args for c in commands]))\n except ConnectionError as e:\n for c in commands:\n c.result = e"
] | [
"0.70586413",
"0.69986117",
"0.6902992",
"0.6806956",
"0.677012",
"0.6769976",
"0.6622003",
"0.6621487",
"0.6603113",
"0.658566",
"0.6583125",
"0.6578142",
"0.6574134",
"0.65513706",
"0.6506194",
"0.65046686",
"0.64179957",
"0.6382939",
"0.6382298",
"0.6373489",
"0.63698804",
"0.6345875",
"0.6314687",
"0.63135785",
"0.6300218",
"0.6268363",
"0.62583333",
"0.6250048",
"0.62493473",
"0.62465"
] | 0.71878153 | 0 |
Fade a single LED or multiple LEDs from their current colour to a new colour for the supplied duration. | def do_fade_colour(l, leds, r, g, b, duration):
l._do_multi_led_command(
create_fade_colour_command, leds, r, g, b, duration
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fade_out(self, duration: int = 1):\n original_brightness = self.np.brightness\n\n step_level = 0.01\n sleep_cycle = duration / (original_brightness / step_level)\n\n while self.np.brightness > 0:\n # FIXME :\n # Im not totally sure why, but...\n # self.np.brightness -= step_level\n # causes self.np.brightness of 0.1 to become 0.09000000000000001\n # and i dont feel like figuring out why right now\n self.np.brightness = round(self.np.brightness - step_level, 2)\n self.np.show()\n time.sleep(sleep_cycle)\n\n self.np.fill(OFF)\n self.np.show()\n\n # Reset brightness to original value now that pixels are OFF\n self.np.brightness = original_brightness\n\n return True",
"def flash_red(self, duration=0.2):\n self.pen_color = wx.RED\n self.Refresh(True)\n t = time.time()\n while time.time() - t < duration:\n time.sleep(0.001)\n self.pen_color = wx.WHITE\n self.Refresh(True)",
"def fade(startColor, endColor, steps, interval, strip):\r\n lastUpdate = utime.time() - interval\r\n for i in range(0, steps):\r\n print(\"range step: \", steps)\r\n red = ((startColor[0] * (steps - i)) + (endColor[0] * i)) // steps\r\n green = ((startColor[1] * (steps - i)) + (endColor[1] * i)) // steps\r\n blue = ((startColor[2] * (steps - i)) + (endColor[2] * i)) // steps\r\n \r\n while ((utime.time() - lastUpdate) < interval):\r\n pass\r\n setStrip(strip, (red, green, blue))\r\n lastUpdate = utime.time()",
"def fade(timer):\n global np, LED_FADE_SPEED\n for i in range(np.n):\n np[i] = [\n v - int(LED_FADE_SPEED) if v > int(LED_FADE_SPEED) else 0 for v in np[i]\n ]",
"def _colour_loop(self, colours, seconds=None, milliseconds=None, fade=True):\n colours = self.convert_to_colour_list(colours) #Forces a list of colours into an actual python list\n if len(colours)<2:\n colours.append(\"#000000\") #Blink between black and the specified colour if only one provided\n \n #Start with the first colour immediately:\n if fade:\n self.fade(colours[0])\n else:\n self.set(colours[0])\n step_time = self.clean_time_in_milliseconds(seconds, milliseconds, default_seconds=1, minimum_milliseconds=50)\n \n #Do the loop\n i = 1 #We're moving to the second colour now\n total_colours = len(colours)\n while not self._sequence_stop_signal:\n #Resolve our colour\n next_colour = colours[i]\n i = (i+1) % total_colours #ensures we are never asking for more colours than provided\n if fade: #Fading is a blocking process, thus we let the fade loop use up the time\n _latest_colour = self.fade(next_colour, fade_time=step_time, check=False)\n else: #Set is instant, so we need to consume the step time\n _latest_colour = self.set(next_colour, fade=False, check=False)\n self.sleep(step_time/1000) #NB fade uses milliseconds!!\n #Return the latest colour\n return self.sync_channels()",
"def fade(self, r=None, g=None, b=None, hex_value=None, name=None, fade_time=300, check=True):\n return self.set(r, g, b, hex_value, name, fade=fade_time, check=check)",
"def fadeLED( gpio, startVal, stopVal ):\n\t#convert passed values into usable format for pi-blaster (i.e 0 - 1)\n\tRGBstartVal = startVal / 255\n\tRGBstopVal = stopVal / 255\n\t#debug\n\tprint RGBstartVal, startVal, RGBstopVal, stopVal;\n\t#set the current LED values to the start value\n\tcurrentVal = RGBstartVal\n\tif RGBstartVal < RGBstopVal:\n\t\twhile currentVal < RGBstopVal:\n\t\t\tos.system(\"echo \\\"{0}={1}\\\" > /dev/pi-blaster\" .format(gpio,currentVal))\n\t\t\tcurrentVal = currentVal + STEP;\n\t\t\ttime.sleep(FADESPEED)\n\t\t\tprint currentVal\n\telif RGBstartVal > RGBstopVal:\n\t\t while currentVal > RGBstopVal:\n\t\t\tos.system(\"echo \\\"{0}={1}\\\" > /dev/pi-blaster\" .format(gpio,currentVal))\n currentVal = currentVal - STEP;\n time.sleep(FADESPEED)\n print currentVal\n\treturn;",
"def FadeOutputs(box, color, steps=50):\n for output in box:\n output.Fade(color=color, steps=steps)\n time.sleep(steps / (float(box.frequency) / len(box)))",
"def colorEyes(self, color, fade_duration = 0.2):\n\n\t\tif color in self.colors:\n\t\t\tcolor = self.colors[color]\n\n\t\tself.leds.fadeRGB(\"FaceLeds\", color, fade_duration)",
"def colorEyes(self, color, fade_duration = 0.2):\n\n\t\tif color in self.colors:\n\t\t\tcolor = colors[color]\n\n\t\tself.leds.fadeRGB(\"FaceLeds\", color, fade_duration)",
"def fadeToRGB(self, color: tuple):\n r, g, b = color\n self._sendi2c('c', [r, g, b])",
"def FadeOut(self):\r\n\r\n while 1:\r\n self._alpha_amount -= 10\r\n if self._alpha_amount <= 0:\r\n self._alpha_amount = 255\r\n return\r\n\r\n self.SetTransparent(self._alpha_amount)\r\n wx.SafeYield()\r\n wx.MilliSleep(15)",
"def setFadeSpeed(self,speed):\n speed = clamp(speed, 1, 255)\n self._sendi2c('f', [speed])",
"async def flash(self, light: Light, num_times: int, delay=0.15) -> None:\n for _ in range(num_times):\n self.set_lights_off()\n await sleep(delay)\n self.set_lights(light)\n await sleep(delay)",
"def fade_to_rgb(self, r=0, g=0, b=0, fade=300, check=True):\n #When we're doing a fade, the pin values may have changed... check first!!\n if check:\n self.sync_channels()\n \n #Now we'll have the correct init values!!!\n init_r = self.red\n init_g = self.green\n init_b = self.blue\n gap_r = r - init_r\n gap_g = g - init_g\n gap_b = b - init_b\n n_steps = int(float(fade)/20.0) #50Hz = 20 milliseconds\n \n for step in xrange(0, n_steps):\n fractional_progress = float(step)/n_steps\n cur_r = init_r + (gap_r*fractional_progress)\n cur_g = init_g + (gap_g*fractional_progress)\n cur_b = init_b + (gap_b*fractional_progress)\n cur_col = self.set_rgb(cur_r,cur_g,cur_b)\n sleep(0.02) #20ms\n if self._sequence and self._sequence_stop_signal: #Instantly escape the fade if changing routine\n break \n \n #And fix it to the target in case float calcs put us off a bit\n return self.set_rgb(r,g,b)",
"def blink(self, duration: int=1, intensity: int=0xff):\n # Turn LED on\n self.intensity(max(0, min(intensity, 0xff)))\n # Turn LED off (after a delay)\n upyt.sched.loop.call_later_ms(duration, self.off)",
"def cycle_colors(colors=(\"red\", \"green\", \"blue\"), delay_secs=1):\n set_color('black') # Start with all LED's \"off\"\n\n for c in colors:\n print(\"LEDs are all \" + c)\n set_color(c)\n update()\n sleep(delay_secs)",
"def transition_to(self,r,g,b,duration=1,updatetime=0.001):\n steps=duration/updatetime\n start_r,start_g,start_b=self.r,self.g,self.b\n step_r=(r-start_r)/steps\n step_g=(g-start_g)/steps\n step_b=(b-start_b)/steps\n for step in range(int(steps)):\n time.sleep(updatetime)\n new_r=start_r+(step*step_r)\n new_g=start_g+(step*step_g)\n new_b=start_b+(step*step_b)\n self.set_rgb_color(new_r,new_g,new_b)\n \n self.set_rgb_color(r,g,b)",
"def ledFlash(strip, color, t = 1):\r\n utime.sleep(t)\r\n setStrip(strip, color)\r\n utime.sleep(t)\r\n setStrip(strip, LED_COLOR_OFF)",
"def fade_display():\n for col in range(5):\n for row in range(5):\n brightness = microbit.display.get_pixel(col, row)\n # reduce by one, but make sure it's still in 0 to 9\n brightness = clamp(MIN_BRIGHTNESS, brightness - 1, MAX_BRIGHTNESS)\n microbit.display.set_pixel(col, row, brightness)",
"def RedLED(firstPixel, secondPixel):\n led = LPD8806.strand() \n count1 = 250\n count2 = 0\n while count1 != 0:\n \"\"\" Fade green off \"\"\"\n led.set(firstPixel, 0, count1, 0)\n led.set(secondPixel, 0, count1, 0)\n led.update()\n count1 -= 25\n while count2 != 250:\n \"\"\" Fade red on \"\"\"\n led.set(firstPixel, count2, 0, 0)\n led.set(secondPixel, count2, 0, 0)\n led.update()\n count2 += 25\n return",
"def fadeout(time):\r\n check_mixer()\r\n sdl.Mix_FadeOutChannel(-1, time)",
"def jump(self, colours, seconds=None, milliseconds=None):\n return self.run_sequence(self._colour_loop, colours=colours, seconds=seconds, milliseconds=milliseconds, fade=False)",
"def flash(groups, colour, period):\n\n if groups == [1]:\n if period <= 2000:\n raise ValueError(\n \"The cycle period for a flash must be longer than 2 seconds\"\n )\n\n return [\n (colour, 1000),\n ('Off', period-1000)\n ]\n\n return light_sequence(groups, colour, 'Off', period, 500, 1000)",
"def set_fade(self, interval: Union[int, float]) -> None:\n\n fade_color = (\n self.theme_cls.primary_color\n if not self.fade_color\n else self.fade_color\n )\n height_segment = (\n self.fade_height if self.fade_height else dp(100)\n ) // self._height_segment\n alpha = 1.1\n\n with self.canvas:\n for i in range(self._height_segment):\n alpha -= 0.1\n\n Color(rgba=(fade_color[:-1] + [round(alpha, 1)]))\n rectangle_top = (\n Rectangle(\n pos=(self.x, self.height - (i * height_segment)),\n size=(self.width, height_segment),\n )\n if self.edge_top\n else None\n )\n rectangle_bottom = (\n Rectangle(\n pos=(self.x, i * height_segment),\n size=(self.width, height_segment),\n )\n if self.edge_bottom\n else None\n )\n # How I hate lambda functions because of their length :(\n # But I don’t want to call the arguments by short,\n # incomprehensible names 'a', 'b', 'c'.\n self.bind(\n pos=lambda instance_fadind_edge_effect, window_size, rectangle_top=rectangle_top, rectangle_bottom=rectangle_bottom, index=i: self.update_canvas(\n instance_fadind_edge_effect,\n window_size,\n rectangle_top,\n rectangle_bottom,\n index,\n ),\n size=lambda instance_fadind_edge_effect, window_size, rectangle_top=rectangle_top, rectangle_bottom=rectangle_bottom, index=i: self.update_canvas(\n instance_fadind_edge_effect,\n window_size,\n rectangle_top,\n rectangle_bottom,\n index,\n ),\n )\n self.update_canvas(\n self, self.size, rectangle_top, rectangle_bottom, i\n )",
"def fadeOut(self):\n clock = pygame.time.Clock()\n blackRect = pygame.Surface(self.screen.get_size())\n blackRect.set_alpha(100)\n blackRect.fill((0, 0, 0))\n # Continuously draw a transparent black rectangle over the screen\n # to create a fadeout effect\n for i in range(0, 5):\n clock.tick(15)\n self.screen.blit(blackRect, (0, 0))\n pygame.display.flip()\n clock.tick(15)\n screen.fill((255, 255, 255, 50))\n pygame.display.flip()",
"def lightsOn(strip, interval):\r\n clearStrip(strip)\r\n print(\"lightsOn\", strip, interval)\r\n fade(LED_COLOR_OFF, LED_COLOR_FULL, STEPS, interval, strip)",
"def crossfade(self, seconds):\n resp = yield from self.command('crossfade {}'.format(seconds))\n return True",
"def fadeout(self, time):\r\n check_mixer()\r\n sdl.Mix_FadeOutChannel(self.chan, time)",
"def transition(red, green, blue, new_red, new_green, new_blue):\n while (red != new_red) or (green != new_green) or (blue != new_blue):\n while red != new_red:\n if red > new_red:\n red = red - 1\n break\n else:\n red = red + 1\n break\n while green != new_green:\n if green > new_green:\n green = green - 1\n break\n else:\n green = green + 1\n break\n while blue != new_blue:\n if blue > new_blue:\n blue = blue - 1\n break\n else:\n blue = blue + 1\n break\n logi_led.logi_led_set_lighting(red, green, blue)\n time.sleep(0.01)"
] | [
"0.7056472",
"0.68512785",
"0.6850706",
"0.6402508",
"0.63308096",
"0.62507784",
"0.62267405",
"0.6176442",
"0.6172806",
"0.6114443",
"0.6007189",
"0.5919894",
"0.5918335",
"0.5838925",
"0.58141935",
"0.578331",
"0.5769158",
"0.567631",
"0.566404",
"0.5663928",
"0.5610591",
"0.555381",
"0.550779",
"0.54846305",
"0.54464227",
"0.54107505",
"0.5400773",
"0.53841627",
"0.5350834",
"0.53286594"
] | 0.79324836 | 0 |
Animate the flag with a wave pattern of the given type, using the specified colour, duration and number of times to repeat. | def do_wave(l, wave_type, r, g, b, duration, repeat):
command = create_wave_command(
wave_type, r, g, b, duration, repeat
)
l.write(command) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def animate_to(number, color):\n for _ in range(10):\n trellis.pixels.fill((0, 0, 0))\n display_number(random.randint(10, 99), color)\n time.sleep(0.1)\n trellis.pixels.fill((0, 0, 0))\n display_number(number, color)",
"def flash_red(self, duration=0.2):\n self.pen_color = wx.RED\n self.Refresh(True)\n t = time.time()\n while time.time() - t < duration:\n time.sleep(0.001)\n self.pen_color = wx.WHITE\n self.Refresh(True)",
"def wave_handler(args_dict: dict):\n\n color_sequence = args_dict['color_sequence']\n wave = args_dict['wave']\n color_delay = args_dict['color_delay']\n n_leds = args_dict['n_leds']\n delay_itr = args_dict['delay_itr']\n color_itr = args_dict['color_itr']\n\n step_sequence = []\n\n color_seq_len = args_dict['color_seq_len']\n wave_len = args_dict['wave_len']\n\n for l in range(n_leds):\n\n step_sequence.append(\n [int(color_sequence[color_itr][channel] * wave[l % wave_len]) for channel in range(3)])\n\n delay_itr += 1\n\n if delay_itr % color_delay == 0:\n delay_itr = 0\n color_itr += 1\n if color_itr == color_seq_len:\n color_itr = 0\n\n # Updating wave for next iteration.\n rotate_list_left(wave)\n\n args_dict['delay_itr'] = delay_itr\n args_dict['color_itr'] = color_itr\n\n return step_sequence",
"def waves(repeats = 1):\r\n for i in range(repeats):\r\n alex.up()\r\n alex.color(hueGen(i, .5*i/repeats, .5))\r\n alex.goto(-315,315 - i)\r\n alex.seth(45) # set heading\r\n x = alex.xcor()\r\n y = alex.ycor()\r\n f = i + 1\r\n for j in range(630):\r\n x = alex.xcor()\r\n alex.goto(x + 1, y + 25*sin(8*j/f + i/25)) # plot sines\r\n alex.down()\r\n x = alex.xcor()",
"def anim():\n i = 0\n while 1:\n\n for r in Reprs:\n r.draw(i)\n i = i+ 1\n i = i % len(t)\n yield",
"def static(fps, duration):\n\n frames = int(duration * fps)\n def animate(thing):\n thing = list(thing)\n yield from repeat(thing, frames)\n return animate",
"def quick(groups, colour, period):\n # The cycle period cannot be longer than 1.2s (60/50)\n # or shorter than 0.5s\n if groups == [1]:\n if period is not None:\n raise ValueError(\n \"Quick Flash cycle periods must be longer than 0.5 seconds\"\n )\n\n return [\n (colour, 250),\n ('Off', 750)\n ]\n\n return light_sequence(groups, 'Off', colour, period, 250, 500)",
"def look_around(r, num_repeats=2):\n for i in range(num_repeats):\n r.setLEDs(0, 255, 0, 0)\n time.sleep(.05)\n r.setLEDs(0, 255, 1, 0)\n time.sleep(.05)\n r.setLEDs(0, 255, 1, 1)\n time.sleep(.05)\n r.setLEDs(0, 255, 1, 0)\n time.sleep(.05)\n r.setLEDs(0, 255, 0, 0)\n time.sleep(.05)\n for i in range(num_repeats):\n r.setLEDs(12, 255, 0, 0)\n time.sleep(.15)\n r.setLEDs(0, 0, 0, 0)\n time.sleep(.05)",
"def color_chase(self, color: tuple = CYAN, wait: float = DEFAULT_SPEED):\n for i in range(self.np.n):\n self.np[i] = color\n time.sleep(wait)\n self.np.show()\n return True",
"def play_tone(freq=440, duration=0.01):\n tone(board.A0, freq, duration)",
"def _colour_loop(self, colours, seconds=None, milliseconds=None, fade=True):\n colours = self.convert_to_colour_list(colours) #Forces a list of colours into an actual python list\n if len(colours)<2:\n colours.append(\"#000000\") #Blink between black and the specified colour if only one provided\n \n #Start with the first colour immediately:\n if fade:\n self.fade(colours[0])\n else:\n self.set(colours[0])\n step_time = self.clean_time_in_milliseconds(seconds, milliseconds, default_seconds=1, minimum_milliseconds=50)\n \n #Do the loop\n i = 1 #We're moving to the second colour now\n total_colours = len(colours)\n while not self._sequence_stop_signal:\n #Resolve our colour\n next_colour = colours[i]\n i = (i+1) % total_colours #ensures we are never asking for more colours than provided\n if fade: #Fading is a blocking process, thus we let the fade loop use up the time\n _latest_colour = self.fade(next_colour, fade_time=step_time, check=False)\n else: #Set is instant, so we need to consume the step time\n _latest_colour = self.set(next_colour, fade=False, check=False)\n self.sleep(step_time/1000) #NB fade uses milliseconds!!\n #Return the latest colour\n return self.sync_channels()",
"def startColorLoop():\n b.set_group(1, 'on', True)\n b.set_group(1, 'bri', 254)\n b.set_group(1, 'hue', 255)\n b.set_group(1, 'sat', 255)\n b.set_group(1, 'effect', 'colorloop')",
"def timer(alarm):\n # Start alarm clock again.\n glutTimerFunc(DELAY, timer, 0)\n if exiting:\n global brightness\n brightness -= 0.05\n if brightness < 0.01:\n # Enough dimming - terminate!\n glutLeaveMainLoop()\n glutPostRedisplay()\n \n if animate:\n # Advance to the next frame.\n advance()\n glutPostRedisplay()\n\n if animateTan:\n # Advance to the next frame\n advanceTan()\n glutPostRedisplay()\n\n if animateSilver:\n # Advance to the next frame\n advanceSilver()\n glutPostRedisplay()\n\n if animateDice:\n # Advance to the next frame\n advanceDice()\n glutPostRedisplay()",
"def shake(r, num_repeats=1):\n for i in range(num_repeats):\n r.go(25)\n time.sleep(.1)\n r.stop()\n time.sleep(.1)\n r.go(-25)\n time.sleep(.1)\n r.stop()\n time.sleep(.1)",
"def theater_chase(strip, colors, run_time=RUN_TIME):\n timeout_start = time.time()\n while time.time() < timeout_start + run_time:\n for color in colors:\n for j in range(10):\n for q in range(3):\n for i in range(0, strip.numPixels(), 3):\n strip.setPixelColor(i+q, COLOR_CODES[color])\n strip.show()\n time.sleep(50/1000.0)\n for i in range(0, strip.numPixels(), 3):\n strip.setPixelColor(i+q, 0)",
"def jump(self, colours, seconds=None, milliseconds=None):\n return self.run_sequence(self._colour_loop, colours=colours, seconds=seconds, milliseconds=milliseconds, fade=False)",
"def change_timer_color(timez):\r\n\r\n if timez > 90:\r\n stopwatch.configure(bg='green')\r\n if 31 <= timez <= 90:\r\n stopwatch.configure(bg='yellow')\r\n elif 0 <= timez <= 30:\r\n stopwatch.configure(bg='red')",
"def cycle(effect):\n\n def animate(thing):\n frames = (list(frame) for frame in effect(thing))\n yield from cycle(frames)\n return animate",
"def theaterChase(self, color, wait_ms=50, iterations=10):\n for j in range(iterations):\n for q in range(3):\n for i in range(0, self.LEDS, 3):\n self.ring.setPixelColor(i + q, color)\n self.ring.show()\n time.sleep(wait_ms / 1000.0)\n for i in range(0, self.LEDS, 3):\n self.ring.setPixelColor(i + q, 0)",
"def generate(self, duration=0, tone='brownnoise', gain=0, fadetime=0):\n if logging.getLogger().getEffectiveLevel() != 10:\n self.t = Thread(target = self._send_command, kwargs={'duration': duration, 'tone': tone, 'gain': gain, 'fadetime': fadetime})\n self.t.start()\n logging.debug(\"Generating sound \" + tone + \" for a duration of \" + str(duration))",
"def animation (t,mode = \"cercle\",taille = 40):\n\tx,y = primitives.get_position ()\n\t\n\t# En fonction du nombre de « cycles » \n\t# on peut définir des couleurs différentes\n\t# qui sont représentatives d'une progression\n\tif t % 5 == 0:\n\t\tliste = [\"rouge\",\"carmin\",\"or\",\"vert\",\"chartreuse\"]\n\telif t % 3 == 0:\n\t\tliste = [\"carmin\",\"or\",\"chartreuse\"]\n\telif t % 2 == 0:\n\t\tliste = [\"carmin\",\"chartreuse\"]\n\telse: # Un nombre indéterminé \n\t\tliste = [\"zinzolin\",\"indigo\"]\n\n\t# speed (0) est déjà activé normalement \n\tfor i in range (t):\n\t\t# Définit la couleur de ce tour de boucle \n\t\tcurrent_color = couleurs.string_to_hexa (liste[i % len (liste)])\n\n\t\tif mode == \"cercle\":\n\t\t\t# Fait un cercle ... mouhaha\n\t\t\tprimitives.cercle (6,taille * 2 + 20,generer_couleurs (current_color,6, taille))\n\t\telif mode == \"arc\":\n\t\t\tprimitives.arc (20,taille + 10,generer_couleurs (current_color,5, taille))\n\t\telse: # mode == \"ligne\"\n\t\t\tprimitives.colonnes (1,taille + 10, taille + 10,generer_couleurs (current_color,4,taille))",
"def vibrate(self, duration):\n self.wm.rumble = 1\n sleep(duration)\n self.wm.rumble = 0",
"def flash(groups, colour, period):\n\n if groups == [1]:\n if period <= 2000:\n raise ValueError(\n \"The cycle period for a flash must be longer than 2 seconds\"\n )\n\n return [\n (colour, 1000),\n ('Off', period-1000)\n ]\n\n return light_sequence(groups, colour, 'Off', period, 500, 1000)",
"def led_theaterChase(strip, color, wait_ms=50, iterations=5):\n for j in range(iterations):\n for q in range(3):\n for i in range(0, strip.numPixels()-q, 3):\n strip.setPixelColor(i+q, color)\n strip.show()\n gevent.sleep(wait_ms/1000.0)\n for i in range(0, strip.numPixels()-q, 3):\n strip.setPixelColor(i+q, 0)",
"def kitt(r, num_repeats=2):\n for i in range(num_repeats):\n r.setLEDs(0, 255, 0, 0)\n time.sleep(.25)\n r.setLEDs(0, 0, 1, 0)\n time.sleep(.25)\n r.setLEDs(0, 0, 0, 1)\n time.sleep(.25)\n r.setLEDs(0, 0, 1, 0)\n time.sleep(.25)\n r.setLEDs(12, 255, 0, 0)\n time.sleep(.25)\n r.setLEDs(0, 0, 1, 0)\n time.sleep(.25)\n r.setLEDs(0, 0, 0, 1)\n time.sleep(.25)\n r.setLEDs(0, 0, 1, 0)\n time.sleep(.25)\n r.setLEDs(255, 255, 0, 0)\n time.sleep(.25)\n r.setLEDs(0, 0, 1, 0)\n time.sleep(.25)\n r.setLEDs(0, 0, 0, 1)\n time.sleep(.25)\n r.setLEDs(0, 0, 1, 0)\n time.sleep(.25)\n r.setLEDs(0, 255, 0, 0)",
"def run_anime(self, inval=10, rep=True, blitit=False):\n ani = animation.FuncAnimation(self.fig, self.animate,\n len(self.artists[0][0]), repeat=rep,\n interval=inval, blit=blitit,\n init_func=self.init_anime)\n plt.show()",
"def at_anim(seq, anim, d):\n at(\"ANIM\", seq, [anim, d])",
"def circular_motion(blink_number, sleep_time):\n for i in range(blink_number):\n # set the pixel\n pixel_number = i % 8\n\n # set the colors\n red = i * 10\n blue = 255 - red\n green = 0\n\n # clear the old pixel\n clear()\n\n # set the new pixel\n set_pixel(pixel_number, red, green, blue)\n show() \n\n # sleep\n time.sleep(sleep_time)\n\n # clear at the end of the loop\n clear()\n show()",
"def theaterChase(strip, color, wait_ms=50, iterations=10):\r\n for j in range(iterations):\r\n for q in range(3):\r\n for i in range(0, strip.numPixels(), 3):\r\n strip.setPixelColor(i+q, color)\r\n strip.show()\r\n time.sleep(wait_ms/1000.0)\r\n for i in range(0, strip.numPixels(), 3):\r\n strip.setPixelColor(i+q, 0)",
"def theaterChase(strip, color, wait_ms=50, iterations=10):\n for j in range(iterations):\n for q in range(3):\n for i in range(0, strip.numPixels(), 3):\n strip.setPixelColor(i+q, color)\n strip.show()\n time.sleep(wait_ms/1000.0)\n for i in range(0, strip.numPixels(), 3):\n strip.setPixelColor(i+q, 0)"
] | [
"0.5687249",
"0.5520184",
"0.5502301",
"0.549894",
"0.54490465",
"0.5440435",
"0.52621114",
"0.52320236",
"0.522888",
"0.52235174",
"0.5193794",
"0.5145144",
"0.51310945",
"0.5130986",
"0.5125205",
"0.51163036",
"0.5074429",
"0.504401",
"0.5028484",
"0.5004908",
"0.4992485",
"0.49548584",
"0.49506998",
"0.4889166",
"0.48598439",
"0.485799",
"0.48468614",
"0.4843101",
"0.48407546",
"0.48388943"
] | 0.5781703 | 0 |
Constructor for the SSH Timeout Exception class | def __init__(self, message="Remote operation timeout"):
super(SshTimeout, self).__init__(message) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, error_msg):\n super(RequestTimeoutException, self).__init__(error_msg)",
"def __init__(self, timeout_time):\n self.timeout_time = timeout_time",
"def __init__(self, hostname, username, password, timeout, optional_args):\n raise NotImplementedError",
"def __init__(self, timeout=120):\n self.m_timeout = timeout",
"def __init__(self, timeout=129600):\n self.timeout = timeout",
"def __init__(self, timeout, tries):\r\n self._timeout = timeout\r\n self._tries = tries",
"def __init__(self, seconds):\n super(RobotiqCommandTimeout, self).__init__()\n self.start_time = rospy.get_rostime()\n self.duration = rospy.Duration(seconds)",
"def test_timeout_elapsed_exception(self):\n deadline = Deadline(-MS)\n with self.assertRaises(TimeoutError):\n deadline.timeout()",
"def __init__(self, timeout=5):\n self.sock = None\n self.timeout = timeout",
"def __init__(self, host=\"137.227.224.97\", port=2061, timeout=30,\n debug=False):\n if debug:\n print(\"int __init__\" + host + \"/\" + str(port) + \" timeout=\" +\n str(timeout))\n self.host = host\n self.port = port\n self.timeout = timeout\n self.debug = debug",
"def raise_timeout(self, *args, **kwargs):\n\n self.log.error(\"Task timeout encountered.\")\n raise TimeoutError",
"def __init__(self, assigned_time, task_id, node_type):\n Exception.__init__(self, assigned_time, task_id, node_type)\n self.assigned_time = assigned_time\n self.task_id = task_id\n self.node_type = node_type",
"def _timeout(signum, frame):\n # Raise TimeoutException with system default timeout message\n raise TimeoutException()",
"def __init__(self, timeout=0.1):\n self.poller = select.epoll()\n self.timeout = timeout",
"def __init__(self, timeout=0.1):\n self.poller = select.epoll()\n self.timeout = timeout",
"def __init__( self, timeout = 60.0 ):\n\n self.timeout = timeout\n self.alive = None",
"def __init__(self, error_msg):\n super(ConnectionException, self).__init__(error_msg)",
"def __init__(self, *args, **kwargs):\n self.total_timeout = kwargs.pop('total_timeout', None)\n self.chunk_timeout = kwargs.pop('chunk_timeout', None)\n super(TimeoutProc, self).__init__(*args, **kwargs)",
"def test_timeout(self):\n # Attempt connection with short timeout\n with self.assertRaises(requests.exceptions.ReadTimeout):\n a = api.InvenTreeAPI(SERVER, username=USERNAME, password=PASSWORD, timeout=0.001) # noqa: F841",
"def __init__(self, ip, user, port=22, key=None, timeout=1800):\n self.ip = ip\n self.user = user\n self.timeout = timeout\n self.client = None\n if key:\n self.key = key\n else:\n self.key = os.path.expanduser('~/.ssh/id_rsa')",
"def raise_timeout_exception(self, _result=None, _timeout=None):\n raise RosTimeoutError(\"No service response received\")",
"def test_wait_timeout_inheritance():\n # confirm subclassed from pypyr root error\n err = WaitTimeOut()\n assert isinstance(err, PypyrAwsError)\n assert isinstance(err, PlugInError)\n assert isinstance(err, PypyrError)",
"def __init__(self, timeout=0.1):\n self._fds = []\n self.timeout = timeout",
"def __init__(self, earliest_time, latest_time):\n Exception.__init__(self, earliest_time, latest_time)\n self.earliest_time = earliest_time\n self.latest_time = latest_time",
"def __init__(self, rudp_packet, timeout, timeout_cb, retries=0):\n self.rudp_packet = rudp_packet\n self.timeout = timeout\n self.timeout_cb = timeout_cb\n self.retries = retries",
"def timeoutConnection(self):\n log.msg('Timeout reached in BackendSSHTransport')\n self.transport.loseConnection()\n self.factory.server.transport.loseConnection()",
"def __init__(self, host, port, timeout=10, timeout_limit=3):\n self._buffer = b\"\"\n self._conn = socket.create_connection((host, port), timeout=timeout)\n self.timeout = timeout\n self.timeout_limit = timeout_limit",
"def raise_timeout_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.Timeout",
"def raise_timeout_error(api_url, headers, timeout, proxies):\n raise requests.exceptions.Timeout",
"def __init__(self, timeout_seconds=1, handle_timeouts=True, uuid=None):\n self.handle_timeouts = handle_timeouts\n self.timeout_seconds = timeout_seconds\n self.uuid = None"
] | [
"0.67599875",
"0.67163706",
"0.64918303",
"0.64363897",
"0.63409185",
"0.6284611",
"0.6284362",
"0.6282421",
"0.62392485",
"0.6182562",
"0.6173027",
"0.6143694",
"0.6105834",
"0.6094346",
"0.6094346",
"0.60341465",
"0.5954169",
"0.59241354",
"0.5864031",
"0.57930404",
"0.57638675",
"0.57325685",
"0.5711684",
"0.5679064",
"0.567616",
"0.56573886",
"0.5616766",
"0.5608401",
"0.5608401",
"0.55981964"
] | 0.8164756 | 0 |
create a shell connector from machine info object | def from_info(cls, info, user='root'):
conn = None
if not isinstance(info, MachineInfo):
raise TypeError('info must be a MachineInfo')
if user == 'cmuser':
conn = cls(info.ip, 22, info.operator_user,
info.operator_password, '')
else:
conn = cls(info.ip, 22, info.ssh_username,
info.ssh_password, info.ssh_key)
return conn | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_shell(self, shell):",
"def __init__(self, connector=pxssh.pxssh()):\n self.connector = connector\n\n # pxssh.UNIQUE_PROMPT is \"\\[PEXPECT\\][\\$\\#] \", set prompt for csh\n # should not contain slash(\\)\n if isinstance(self.connector, pxssh.pxssh):\n self.connector.PROMPT_SET_CSH = \"set prompt='[PEXPECT]$ '\"\n\n # Echo command result\n self.echo_cmd_result = \"\"",
"def __init__(self, args, shell, userns):\n super(SSHMgr, self).__init__(args, shell, userns)\n parser = MagicArgumentParser()\n parser.add_argument('--host', type=str, default='localhost',\n help='Machine to reach (default = localhost)')\n parser.add_argument('--pid', type=str,\n help='Variable to store SSH process pid')\n _args, cmd = parser.parse_known_args(args)\n self.cmd = self._wlbin + [_args.host, ] + cmd\n # SSH Cannot fork into background without a command to execute.\n # Popen instance is created in submit",
"async def _connect(self, subsystem=None, exec_command=None):\n ip, port, user, passwd = await self.dest_info()\n self._extra_info[\"peer\"] = PeerInfo(ip, port)\n\n if self._devinfo.proxy_required(ip):\n host = self.service.get_http_proxy_url(ip)\n elif self._devinfo.should_nat(ip):\n host = await self._devinfo.translate_address(ip)\n else:\n host = ip\n\n self.logger.info(\"Connecting to: %s: %d\", host, port)\n\n # known_hosts is set to None to disable the host verifications. Without\n # this the connection setup fails for some devices\n conn, _ = await asyncssh.create_connection(\n self._client_factory,\n host=host,\n port=port,\n username=user,\n password=passwd,\n client_keys=None,\n known_hosts=None,\n )\n\n chan, cmd_stream = await self._conn.create_session(\n lambda: CommandStream(self, self._loop),\n encoding=None,\n term_type=self.TERM_TYPE,\n subsystem=subsystem,\n command=exec_command,\n )\n self._chan = chan\n return cmd_stream",
"def connect():\n\n crate = get_crate()\n crate.mch_comms.ipmitool_shell_connect()",
"def createMachine():\n cd('/')\n machine = create(machineName, 'UnixMachine')\n cd('Machines/'+machineName+'/NodeManager/'+machineName)\n cmo.setName(machineName)\n cmo.setListenAddress(hostname)",
"def __init__(self, name, shell_type, proto, code, os=None, arch=None, use_handler=None, use_http_stager=None):\r\n\r\n # These are the required attributes;\r\n self.name = name\r\n self.type = shell_type\r\n self.proto = proto\r\n self.code = code\r\n\r\n # These are optional attributes;\r\n self.os = \"Unknown\" if os is None else os\r\n self.arch = \"Unknown\" if arch is None else arch\r\n self.handler = None if use_handler is None else use_handler # this is going to be the handler function.\r\n self.handler_args = None # this is going to be set during execution.\r\n\r\n self.use_http_stager = False if use_http_stager is None else use_http_stager\r\n return",
"def __init__(self, sm_params, name=None, io_connection=None, io_type=None, variant=None, io_constructor_kwargs=None,\n initial_state=None, lazy_cmds_events=False):\n initial_state = initial_state if initial_state is not None else AdbRemote.adb_shell\n super(AdbRemote, self).__init__(name=name, io_connection=io_connection,\n io_type=io_type, variant=variant,\n io_constructor_kwargs=io_constructor_kwargs,\n sm_params=sm_params, initial_state=initial_state,\n lazy_cmds_events=lazy_cmds_events)",
"def shell(console):\n return create_shell(\n MANAGE_DICT.get(\"shell\", {}).get(\"console\", console), MANAGE_DICT\n )",
"def __init__(self, host, prompt=None):\n\n self.host = host\n self.hostname = None\n self.system_version = None\n self.chassis_id = None\n self.logger = logging.getLogger('nexusswitch.NxosSwitch')\n self.logger.debug(\"Instantiating NxosSwitch object for {}\".format(self.host))\n\n self._ncc = NxosConnect(host)\n self.vlans = None\n self.interfaces = None\n self.connected = False\n self.vdcs = None\n self.current_vdc = 'default'\n self.default_vdc = 'default'\n\n #Regular expression for the prompt to look for in 7k shells\n self.prompt = prompt",
"def shell(self, cmd):\n raise NotImplementedError",
"def __init__(self, host, username= \"\", password= \"\", stdprompt=ixia_prompt_regex):\n\n Host.__init__(self, host, username, password, stdprompt)\n log.output(\"Ixia object for host %s created.\" % host)\n\t#self.cmd(\"\")",
"def attach(self):\r\n sshpass = \"sshpass -p \\\"akanksha1\\\"\"\r\n remote_Station = \"[email protected]\"\r\n base = \"ssh -t \" + options[\"username\"] + \"@\" + options[\"server\"]\r\n\r\n screen = \" screen -r \"\r\n if self.device_type == \"Wireless_access_point\":\r\n screen += \"WAP_%d\" % self.getID()\r\n elif self.device_type == \"yRouter\":\r\n yrouter = \"yrouter --interactive=1 --config=/root/script_t1_y1.conf test3\"\r\n screen_yrouter = \"%s ssh %s \\\"source /root/.profile; %s\\\"\"%(sshpass, remote_Station, yrouter)\r\n else:\r\n name = self.getName()\r\n pid = mainWidgets[\"tm\"].getPID(name)\r\n if not pid:\r\n return\r\n screen += pid + \".\" + name\r\n\r\n command = \"\"\r\n\r\n window_name = str(self.getProperty(\"Name\")) # the strcast is necessary for cloning\r\n if(self.getName() != window_name):\r\n window_name += \" (\" + self.getName() + \")\"\r\n if environ[\"os\"] == \"Windows\":\r\n\r\n startpath = environ[\"tmp\"] + self.getName() + \".start\"\r\n try:\r\n outfile = open(startpath, \"w\")\r\n outfile.write(screen)\r\n outfile.close()\r\n except:\r\n mainWidgets[\"log\"].append(\"Failed to write to start file!\")\r\n return\r\n\r\n command += \"putty -\"\r\n if options[\"session\"]:\r\n command += \"load \" + options[\"session\"] + \" -l \" + options[\"username\"] + \" -t\"\r\n else:\r\n command += base\r\n command += \" -m \\\"\" + startpath + \"\\\"\"\r\n else:\r\n if self.device_type == \"yRouter\":\r\n command += \"rxvt -T \\\"\" + window_name + \"\\\" -e \" + screen_yrouter\r\n else:\r\n command += \"rxvt -T \\\"\" + window_name + \"\\\" -e \" + base + screen\r\n\r\n self.shell = subprocess.Popen(str(command), shell=True)",
"def __init__(self,\n device_name,\n create_device_func,\n props,\n hub_name_prop,\n primary_port_prop,\n secondary_port_prop,\n ethernet_switch_prop,\n ethernet_port_prop,\n get_switchboard_if_initialized,\n power_and_data_share_cable=False,\n pre_off_func=None):\n super().__init__(device_name=device_name)\n\n self._create_device_func = create_device_func\n self._hub_name_prop = hub_name_prop\n self._primary_port_prop = primary_port_prop\n self._secondary_port_prop = secondary_port_prop\n self._props = props\n self._ethernet_switch = None\n\n # Set the properties\n self._get_switchboard_if_initialized = get_switchboard_if_initialized\n self._power_and_data_share_cable = power_and_data_share_cable\n self._pre_off_func = pre_off_func\n self._ethernet_switch_prop = ethernet_switch_prop\n self._ethernet_port_prop = ethernet_port_prop",
"def __init__(self, target):\n if isinstance(target, bash.Host):\n self.host = target\n else:\n self.host = bash.Host(target)\n self.args = ['-s']\n self.data = None\n self.child_pid = None",
"def init():\n\n @click.command()\n @click.option('--cell', required=True,\n envvar='TREADMILL_CELL',\n callback=cli.handle_context_opt,\n expose_value=False)\n @click.option('--ssh', help='SSH client to use.',\n type=click.Path(exists=True, readable=True))\n @click.argument('app')\n @click.argument('command', nargs=-1)\n def ssh(ssh, app, command):\n \"\"\"SSH into Treadmill container.\"\"\"\n if ssh is None:\n ssh = _DEFAULT_SSH\n\n if app.find('#') == -1:\n # Instance is not specified, list matching and exit.\n raise click.BadParameter('Specify full instance name: xxx#nnn')\n\n app_discovery = discovery.Discovery(context.GLOBAL.zk.conn, app, 'ssh')\n app_discovery.sync()\n\n # Restore default signal mask disabled by python spawning new thread\n # for Zk connection.\n #\n # TODO: should this be done as part of zkutils.connect?\n for sig in range(1, signal.NSIG):\n try:\n signal.signal(sig, signal.SIG_DFL)\n except OSError:\n pass\n\n # TODO: not sure how to handle mutliple instances.\n for (app, hostport) in app_discovery.items():\n _LOGGER.info('%s :: %s', app, hostport)\n if hostport:\n host, port = hostport.split(b':')\n run_ssh(host, port, ssh, list(command))\n\n return ssh",
"async def async_step_host(self, info: Optional[dict] = None):\n errors = {}\n if info is not None:\n try:\n data = await self._async_get_entry_data(\n self._device_info.serial,\n self._device_info.credential,\n self._device_info.product_type,\n self._device_info.name,\n info.get(CONF_HOST),\n )\n except CannotConnect:\n errors[\"base\"] = \"cannot_connect\"\n except CannotFind:\n errors[\"base\"] = \"cannot_find\"\n else:\n return self.async_create_entry(\n title=self._device_info.name,\n data=data,\n )\n\n info = info or {}\n return self.async_show_form(\n step_id=\"host\",\n data_schema=vol.Schema(\n {vol.Optional(CONF_HOST, default=info.get(CONF_HOST, \"\")): str}\n ),\n errors=errors,\n )",
"def __init__(\n self,\n avm_wrapper: AvmWrapper,\n device_friendly_name: str,\n switch_info: SwitchInfo,\n ) -> None:\n super().__init__(avm_wrapper, device_friendly_name)\n\n self._description = switch_info[\"description\"]\n self._friendly_name = switch_info[\"friendly_name\"]\n self._icon = switch_info[\"icon\"]\n self._type = switch_info[\"type\"]\n self._update = switch_info[\"callback_update\"]\n self._switch = switch_info[\"callback_switch\"]\n\n self._name = f\"{self._friendly_name} {self._description}\"\n self._unique_id = f\"{self._avm_wrapper.unique_id}-{slugify(self._description)}\"\n\n self._attributes: dict[str, str] = {}\n self._is_available = True",
"def __init__(__self__, *,\n connector_name: pulumi.Input[str],\n entity_type: pulumi.Input['EntityTypes'],\n entity_type_name: pulumi.Input[str],\n hub_name: pulumi.Input[str],\n mapping_properties: pulumi.Input['ConnectorMappingPropertiesArgs'],\n resource_group_name: pulumi.Input[str],\n connector_type: Optional[pulumi.Input[Union[str, 'ConnectorTypes']]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n mapping_name: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"connector_name\", connector_name)\n pulumi.set(__self__, \"entity_type\", entity_type)\n pulumi.set(__self__, \"entity_type_name\", entity_type_name)\n pulumi.set(__self__, \"hub_name\", hub_name)\n pulumi.set(__self__, \"mapping_properties\", mapping_properties)\n pulumi.set(__self__, \"resource_group_name\", resource_group_name)\n if connector_type is not None:\n pulumi.set(__self__, \"connector_type\", connector_type)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if display_name is not None:\n pulumi.set(__self__, \"display_name\", display_name)\n if mapping_name is not None:\n pulumi.set(__self__, \"mapping_name\", mapping_name)",
"def get_switch_details_from_console(self):\n ret_output = {}\n #Get the console port\n console = self.telnet_console_port()\n console.sendline('terminal length 0')\n console.expect(SWITCH_PROMPT)\n console.sendline('show inventory | xml')\n console.expect(SWITCH_PROMPT)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['inv'] = console.before\n console.sendline('show system uptime | xml')\n console.expect(SWITCH_PROMPT)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['uptime'] = console.before\n console.sendline('show accounting log | grep \"configure\" | last 1')\n console.expect(SWITCH_PROMPT,120)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['idletime'] = console.before\n console.sendline('terminal length 15')\n console.expect(SWITCH_PROMPT)\n console.sendline('show clock | last 1')\n console.expect(SWITCH_PROMPT)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['clock'] = console.before\n console.close()\n return ret_output",
"def __init__(self, host, port=2345):\n self.host = host\n self.port = port\n self.set_command_list()",
"def __init__(self, hst, iface):\n self.host = hst\n self.iface = iface",
"def _get_shell_obj(self):\n return self._shell_obj()",
"def Shell(self, cmd): # pylint: disable=invalid-name\n raise NotImplementedError",
"def __init__(self, hostname, port, protocol, auth, tenant_id):\n HawkularService.__init__(self, hostname=hostname, port=port, protocol=protocol,\n auth=auth, tenant_id=tenant_id, entry=\"hawkular/inventory\")",
"def __init__(self,\n comms_address,\n args=\"\",\n auto_reopen=False,\n open_on_start=True):\n self.comms_address = comms_address\n working_directory = os.path.dirname(comms_address)\n if working_directory:\n comms_address = comms_address.replace(working_directory, \"./\")\n else:\n working_directory = None\n super(PtyTransport, self).__init__(\n command=comms_address,\n args=args,\n auto_reopen=auto_reopen,\n open_on_start=open_on_start,\n working_directory=working_directory)\n self.primary = None\n self.secondary = None",
"def test_create_connector(self):\n pass",
"def get_switch_details_from_mgmt(self, using):\n ret_output = {}\n #Get the console mgmt handle\n console = self.connect_mgmt_ip(using)\n console.sendline('terminal length 0')\n console.expect(SWITCH_PROMPT)\n console.sendline('show inventory | xml')\n console.expect(SWITCH_PROMPT)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['inv'] = console.before\n console.sendline('show system uptime | xml')\n console.expect(SWITCH_PROMPT)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['uptime'] = console.before\n console.sendline('show accounting log | grep \"configure\" | last 1')\n console.expect(SWITCH_PROMPT,120)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['idletime'] = console.before\n console.sendline('terminal length 15')\n console.expect(SWITCH_PROMPT)\n console.sendline('show clock | last 1')\n console.expect(SWITCH_PROMPT)\n if any(i in console.before for i in INVALID_CLI): raise InvalidCliError('show cmd failure') \n ret_output['clock'] = console.before\n console.close()\n return ret_output",
"def ssh(host_=None):\n run_command_on_selected_server(open_shell, host_=host_)",
"def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n connector_name: Optional[pulumi.Input[str]] = None,\n connector_type: Optional[pulumi.Input[Union[str, 'ConnectorTypes']]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n entity_type: Optional[pulumi.Input['EntityTypes']] = None,\n entity_type_name: Optional[pulumi.Input[str]] = None,\n hub_name: Optional[pulumi.Input[str]] = None,\n mapping_name: Optional[pulumi.Input[str]] = None,\n mapping_properties: Optional[pulumi.Input[pulumi.InputType['ConnectorMappingPropertiesArgs']]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ..."
] | [
"0.63129455",
"0.55860054",
"0.5557034",
"0.55390114",
"0.5533736",
"0.5485371",
"0.54844224",
"0.5483027",
"0.5439714",
"0.54216254",
"0.53780776",
"0.5342085",
"0.53084445",
"0.5289528",
"0.5280711",
"0.52249736",
"0.52055746",
"0.51824886",
"0.5179961",
"0.51555294",
"0.51385844",
"0.5136769",
"0.51325333",
"0.51316226",
"0.51313484",
"0.5125092",
"0.51227987",
"0.51116186",
"0.50746053",
"0.50706756"
] | 0.59186614 | 1 |
Updates the text labels that display the slider values | def updateLabels(self):
# Intensity range
self.minIntensityLabel.setText("Intensity: "+str(self.ABsettings["intensity_range"][0]).rjust(3))
self.labelMaxInt.setText(str(self.ABsettings["intensity_range"][1]).ljust(3))
# Z range
self.minZLabel.setText("Z range: "+str(self.ABsettings["zrange"][0]+1).rjust(2))
self.labelMaxZ.setText(str(self.ABsettings["zrange"][1]+1).ljust(2)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_elements(self, viewer):\n for i in range(self.num_labels):\n lbl = self.lbls[i]\n # get data coord equivalents\n x, y = self.get_data_xy(viewer, (lbl.x, lbl.y))\n # format according to user's preference\n lbl.text = self.format_value(x)",
"def UpdateLabel(self) -> _n_6_t_0:",
"def _update_moved(self):\n self._RAS_textbox.setPlainText('{:.2f}, {:.2f}, {:.2f}'.format(\n *self._ras))\n self._VOX_textbox.setPlainText('{:3d}, {:3d}, {:3d}'.format(\n *self._current_slice))\n self._intensity_label.setText('intensity = {:.2f}'.format(\n self._base_data[tuple(self._current_slice)]))",
"def update_elements(self, viewer):\n # set Y labels/grid as needed\n for i in range(self.num_labels):\n lbl = self.lbls[i]\n # get data coord equivalents\n x, y = self.get_data_xy(viewer, (lbl.x, lbl.y))\n lbl.text = self.format_value(y)",
"def update_label_text(self, event=None) -> None:\n # get and create variables\n text_var = self._label_text.get()\n scale_var = self._scale_var.get()\n text = \"{0} {1}\".format(text_var, scale_var)\n # set the text to the label\n self._label.configure(text=text)",
"def slider_update(attrname, old, new):\n tick = slider.value\n title.text = str(time.strftime(\n '%Y-%m-%d %H:%M:%S', time.localtime(tick)))\n source2.data = my_sources[tick]",
"def slider_changed(self):\n freq_index = self.ui.frequencySlider.value()\n freq = self.psd.freqs[freq_index]\n self.ui.fmin.setText(str(freq))\n self.ui.fmax.setText(str(freq))\n self.value_changed()",
"def setValues(self,values):\n for v,val in zip(self.values,map(float,values)):\n v.setText(str(val))",
"def update(self, value):\n self.bar.setValue(value)\n self.text_label.setText('{}: {}/{}'.format(self.label, value, self.num_regions))",
"def update_score(self):\n score_text = ' ' + str(self.x_score) + ' - ' + str(self.o_score) + ' '\n self.Score_Label.configure(text=score_text, foreground='#FFFFFF')",
"def updateTextScreen(self):\n if (self.count_desired):\n graph_hint = 'Count of ' + self.y_axis + ' versus ' + self.x_axis\n self.ids.yTitle.hint_text = 'Count of ' + str(self.y_axis)\n else:\n graph_hint = self.y_axis + ' versus ' + self.x_axis\n self.ids.yTitle.hint_text = str(self.y_axis)\n self.ids.gTitle.text = ''\n self.ids.gTitle.hint_text = str(graph_hint)\n self.ids.gTitle.focus = False\n self.ids.xTitle.text = ''\n self.ids.xTitle.hint_text = str(self.x_axis)\n self.ids.xTitle.focus = False\n self.ids.yTitle.text = ''\n self.ids.yTitle.focus = False",
"def labelUpdate(self, run_dict):\n self.progressBar.reset()\n self.progressBar.setMinimum(1)\n self.progressBar.setMaximum(run_dict[\"Progress\"])\n self.progressLabel.setText(run_dict[\"Text\"])",
"def update(self, *args):\n #Fetches slider information\n s1=self.s1.get()\n s2=self.s2.get()\n r1=self.r1.get()\n r2=self.r2.get()\n p=self.p.get()\n\n #Changes the number next to the bar\n self.r1_string.configure(text=\"%.2f\"% r1)\n self.r2_string.configure(text=\"%.2f\"% r2)\n self.s1_string.configure(text=\"%.2f\"% s1)\n self.s2_string.configure(text=\"%.2f\"% s2)\n self.p_string.configure(text=\"%.2f\"% self.p.get())\n\n #Creates two asset objects\n self.I1 = Instrument(r1, s1, \"Asset 1\", \"Equity\")\n self.I2 = Instrument(r2, s2, \"Asset 2\", \"Bond\")\n\n #Builds a portfolio object\n self.port = Portfolio([self.I1, self.I2])\n self.port.addcorr([[0,p]])\n\n #Displays the new graph to the graph frame\n fff =Frame(height=400, width=400, bd=10, bg='white')\n Chart(self.port, 0.02).scatter(fff)\n fff.grid(row=1, column=0)",
"def update_and_display(self, updated_data: list) -> None:\n for elem in updated_data:\n self.values.update({elem.left_window_label: elem.left_window_value})\n self.__display()",
"def text_changed(self, text):\n self.lbl.setText(text)",
"def updateText(self,new_value):\n if self.value.getText() == new_value:\n pass\n else:\n self.value.setText(new_value)\n # Visual feedback when the value changes for the user\n for i in range(2):\n time.sleep(0.1)\n\n if i % 2 == 0:\n self.value.setStyle('bold')\n else:\n self.value.setStyle('normal')",
"def update_values(self, values):\n if values is not None:\n self.settings.update(values)\n\n # External (from MCU)\n self.label_smc1.configure(text=self.smc1_template % self.settings['s1'], font=self.font)\n self.label_smc2.configure(text=self.smc2_template % self.settings['s2'], font=self.font)\n self.label_smc3.configure(text=self.smc3_template % self.settings['s3'], font=self.font)\n self.label_smc4.configure(text=self.smc4_template % self.settings['s4'], font=self.font)\n self.label_ambient_min.configure(text=self.ambient_light_template % self.settings['p'], font=self.font)\n\n # Internal (from GUI)\n self.label_overhead_level.configure(text=self.overhead_level_template % self.overhead_level.get(), font=self.font)\n self.active_changes = True # (flag) Once changes are retrieved, we assume that they will be sent to the controller",
"def _update_output_voltage_label(self):\n self.widgets['p_outputVoltage'].setValue((self._curr_output_voltage))",
"def update_steps_display(self):\r\n self.steps_display[\"text\"] = str(self.steps.get())",
"def set_labels(self):\n\n if 1 <= self.selected_data <= 2:\n self.plot_select.setLabel(\"left\", \"P (kPa)\")\n self.plot_select.setLabel(\"bottom\", \"t\", \"s\")\n self.plot_zoom.setLabel(\"left\", \"P (kPa)\")\n self.plot_zoom.setLabel(\"bottom\", \"t\", \"s\")\n\n elif self.selected_data == 3:\n self.plot_select.setLabel(\"left\", \"ext\", \"\")\n self.plot_select.setLabel(\"bottom\", \"t\", \"s\")\n self.plot_zoom.setLabel(\"left\", \"ext\", \"\")\n self.plot_zoom.setLabel(\"bottom\", \"t\", \"s\")\n\n elif self.selected_data == 4:\n self.plot_select.setLabel(\"left\", \"U\", \"V\")\n self.plot_select.setLabel(\"bottom\", \"t\", \"s\")\n self.plot_zoom.setLabel(\"left\", \"U\", \"V\")\n self.plot_zoom.setLabel(\"bottom\", \"t\", \"s\")\n\n # self.plot_simulate.setLabel(\"left\", \"ext\", \"\")\n # self.plot_simulate.setLabel(\"bottom\", \"t\", \"s\")\n\n self.plot_distribution.setLabel(\"left\", \"N ×10¹⁰ (#/m³)\")\n self.plot_distribution.setLabel(\"bottom\", \"d_p\", \"m\")\n self.plot_distribution.showGrid(y=True)\n\n self.plot_rotatometer.setLabel(\"left\", \"N ×10¹⁰ (#/m³)\")\n self.plot_rotatometer.setLabel(\"bottom\", \"laimennusvirtaus\")\n self.plot_rotatometer.showGrid(y=True)",
"def refresh_label(self):\n finalText = \"%s\" % (player.get_time())\n self.label.configure(text=finalText)",
"def update_elements(self, viewer):\n for i, plot_src in enumerate(self.aide.plots.values()):\n lbl = self.lbls[plot_src]\n self.format_label(lbl, plot_src)",
"def afficher_scores(sj, so):\n lbl_sj1.configure(text=\"{}\".format(sj))\n lbl_sj2.configure(text=\"{}\".format(so))",
"def ct_slider_value_changed(self):\n for (x, slider) in enumerate(self.sliders):\n # for x in range(0, len(self.sliders)):\n # slider = self.sliders[x]\n slider_value = float(slider.value()) / float(slider.maximum())\n # Use an square function for easier opacity adjustments\n converted_value = slider_value * slider_value * slider_value\n self.render_widget.sectionsOpacity[x] = converted_value\n\n self.render_widget.update()",
"def slider(self):\n\n if self.count >= len(self.txt):\n self.count = -1\n self.text = ''\n self.heading.config(text=self.text)\n\n else:\n self.text = self.text + self.txt[self.count]\n self.heading.config(text=self.text)\n self.count += 1\n\n self.heading.after(100, self.slider)",
"def UpdateFrame(self, sender=None, args=None):\n # Update label for sensor: s['label']\n # with the most recent measurement: s().data['data'][-1]\n for s in self.sensors:\n self.gValue[s.GetID()].SetLabel( '{num} {unit}'.format(\n num = s().data['data'][-1],\n unit = str(s['unit'])) )\n try:\n pub.sendMessage( 'Plot.%s' %self.GetLabel() )\n except:\n self.plot_deleted = True\n\n \n self.top_sizer.Layout()",
"def _update_prob(self):\n for i in range(len(self.xml_names)):\n if (self.label_list[i]!=0):\n atlas_prob = self.atlas_display(self.nii_data[i], self.label_list[i])\n self.prob[i].setText(atlas_prob)",
"def _sync_gui(self):\n self._update_buttons()\n\n self.turn_value_label.config(text=self.turn_value_text)\n self.selected_piece_value_label.config(text=self.selected_piece_value_text)\n\n self.update()",
"def updateState(self):\n QtGui.QLabel.setText(self, self._state[0])",
"def update_label(\r\n self,\r\n root,\r\n label_var,\r\n text = \"\",\r\n delay = 2 #seconds\r\n ):\r\n label_var.set(text)\r\n root.update()\r\n time.sleep(delay)"
] | [
"0.73107535",
"0.7074322",
"0.68887264",
"0.6816435",
"0.67901117",
"0.6768138",
"0.6733651",
"0.6677558",
"0.6651518",
"0.65698177",
"0.6497585",
"0.6495086",
"0.64658064",
"0.6458767",
"0.64468765",
"0.64334774",
"0.64204824",
"0.64106315",
"0.63746005",
"0.63589525",
"0.63266236",
"0.62993336",
"0.62936187",
"0.6263536",
"0.6249932",
"0.621589",
"0.6212001",
"0.6210995",
"0.619091",
"0.6128024"
] | 0.77173656 | 0 |
encode string into numpy.ndarray using utf32. | def array_encode(s):
return np.frombuffer(s.encode('utf32'), dtype=np.int32, offset=4) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _unicode(arr):\n try:\n return unicode(arr)\n except UnicodeEncodeError:\n dt = arr.dtype.newbyteorder('S')\n return unicode(arr.view(dt))",
"def encoded(self):\n text, chars = self.chars()\n int2char = dict(enumerate(chars))\n char2int = {ch: ii for ii, ch in int2char.items()}\n encoded = np.array([char2int[ch] for ch in text])\n return encoded",
"def encode(self, string):\n\n encoded = np.zeros((len(string), len(self.__chars)))\n\n for i, c in enumerate(string):\n encoded[i, self.__char2idx[c]] = 1\n\n return encoded",
"def from_ascii(s):\n\treturn numpy.array(\n\t\t[\n\t\t\tord(c)\n\t\t\tfor c in s\n\t\t],\n\t\tdtype=numpy.uint8\n\t)",
"def stringToUtf8Array(input):\n if _haveTypeUnicode:\n # Assume this is Python 2.\n if type(input) is str:\n # Convert the raw string to an int array.\n return map(ord, input)\n elif type(input) is unicode:\n # In Python 2, the result of encode is a str, so convert to int array.\n return map(ord, input.encode('utf-8'))\n else:\n return input\n else:\n if type(input) is str:\n return input.encode('utf-8')\n else:\n return input",
"def array_decode(arr):\n if arr.dtype != np.int32:\n raise ValueError('Incompatible dtype: expected numpy.int32, got numpy.%s' % arr.dtype)\n return bytes(arr).decode('utf32')",
"def encode(data, code_book):\n return np.array([int(chunk, 2).to_bytes(-(-len(chunk) // 8), byteorder='big') for chunk in\n map(lambda tup: ''.join(tup), (lambda iterable: zip_longest(*[iter(iterable)] * 8, fillvalue=''))(\n ''.join(map(lambda x: code_book[x], data))))])",
"def _encodeStringForPyTables(self, string, name, where='/', complevel=1, complib='zlib', shuffle=True):\n bytestring = np.fromstring(string.encode('utf-8'), np.uint8)\n atom = self.tables.UInt8Atom()\n filters = self.tables.Filters(complevel, complib, shuffle)\n if self.tables.__version__ >= '3.0.0':\n self._handle.create_carray(where=where, name=name, obj=bytestring, atom=atom, filters=filters)\n else:\n self._handle.createCArray(where=where, name=name, obj=bytestring, atom=atom, filters=filters)",
"def decodeUtf8(self, arrayBuffer):",
"def decodeUtf8(self, arrayBuffer):",
"def get_cards_encoded_from_str(cards: List[str]) -> np.ndarray:\n cards_int = convert_str_encoded_cards_to_int_encoded(cards)\n result = np.zeros(36, np.int32)\n result[cards_int] = 1\n return result",
"def ascii_to_numpy(ascii_diagram, as_bytes=True):\n ascii_diagram = [list(i) for i in ascii_diagram]\n ascii_diagram = np.array(ascii_diagram)\n v_to_bytes = np.vectorize(to_bytes)\n return v_to_bytes(ascii_diagram) if as_bytes else ascii_diagram",
"def test_utf8_bytes_in_an_array(self):\n # Python3 doesn't support bytestrings, don't run this test\n if str is unicode:\n return\n input = \"A r\\xc3\\xa9sum\\xc3\\xa9, also spelled resum\\xc3\\xa9 or resume\"\n output = input.split(\" \")\n output[1] = output[1][0:-1]\n input = array.array('c', input)\n output = [array.array('c', w) for w in output]\n for (itmO, itmV) in zip(output, tokenize_en(array.array('c', input))):\n self.assertEqual(itmO, itmV[0])\n self.assertEqual(input[itmV[1]:itmV[1] + len(itmV[0])], itmO)",
"def byn_to_utf(str_):\n\n byte = bytearray()\n for item in str_.split(' '):\n byte.append(int(item, 2))\n return byte.decode()",
"def encode_string(word, string_to_int = True):\n\n if string_to_int: # encode\n word = word.ljust(8, ' ') if len(word) < 8 else word # padding -- 8 is max length, all tensors must have the same size during communication\n word_encoded = [letter for letter in word.encode()]\n return word_encoded\n else: #decode\n cleanup_array = [letter for letter in word if letter!= 32] # Remove padding\n word_decoded = bytes(cleanup_array).decode()\n return word_decoded",
"def _ascii_encode(inarray, out=None):\n out_dtype = np.dtype((f\"S{inarray.dtype.itemsize // 4}\", inarray.dtype.shape))\n if out is not None:\n out = out.view(out_dtype)\n\n op_dtypes = [inarray.dtype, out_dtype]\n op_flags = [[\"readonly\"], [\"writeonly\", \"allocate\"]]\n it = np.nditer(\n [inarray, out], op_dtypes=op_dtypes, op_flags=op_flags, flags=[\"zerosize_ok\"]\n )\n\n try:\n for initem, outitem in it:\n outitem[...] = initem.item().encode(\"ascii\")\n except UnicodeEncodeError as exc:\n index = np.unravel_index(it.iterindex, inarray.shape)\n raise _UnicodeArrayEncodeError(*(exc.args + (index,)))\n\n return it.operands[1]",
"def encode(self, C, num_rows):\n x = np.zeros((num_rows, len(self.chars)))\n print(C)\n for i, c in enumerate(C):\n x[i, self.char_indices[c]] = 1\n return x",
"def to_str(array, encoding='utf8'):\n\n if not isinstance(array, np.ndarray):\n raise ValueError('input should be a NumPy array.')\n\n return np.char.decode(array, encoding)",
"def decode_xarray_bytes(xdf):\n for col in list(xdf):\n if xdf[col].dtype == 'O':\n try:\n xdf[col] = xdf[col].astype(str)\n except:\n xdf[col] = xdf[col].str.decode('cp1252').str.strip()\n return xdf",
"def encode(self, C, num_rows):\n x = np.zeros((num_rows, len(self.chars)))\n for i, c in enumerate(C):\n x[i, self.char_indices[c]] = 1\n return x",
"def encode(self, C, num_rows):\n x = np.zeros((num_rows, len(self.chars)))\n for i, c in enumerate(C):\n x[i, self.char_indices[c]] = 1\n return x",
"def base64_2_data(s: str) -> np.ndarray:\n saved_bytes = io.BytesIO(zlib.decompress(base64.b64decode(s)))\n return np.load(saved_bytes)",
"def netcdf_compatible_array(arry):\n arry = strip_array_wrappers(arry)\n\n if arry.ndim > 0:\n for _ in range(3):\n if arry.dtype.char != \"O\" or arry.ndim == 0:\n break\n\n if arry.shape[0] == 1:\n arry = np.array(arry[0])\n else:\n arry = np.array(tuple(arry))\n\n if \"S\" in arry.dtype.char:\n return np.char.decode(arry, \"ascii\")\n # TODO: ensure no float16, ...\n return arry",
"def _encodeArray(self, array):\n\n # Actually, we want dtype,naxis,axNlen,base64(array)\n return base64.b64encode(array.tostring())",
"def encode(self, strs):",
"def encode(self, strs):",
"def encode(self, text):\r\n\r\n text = unicodedata.normalize(\"NFKD\", text).encode(\"ASCII\", \"ignore\").decode(\"ASCII\")\r\n text = \" \".join(text.split())\r\n\r\n groups = [\"\".join(group) for _, group in groupby(text)]\r\n text = \"\".join([self.UNK_TK.join(list(x)) if len(x) > 1 else x for x in groups])\r\n encoded = []\r\n\r\n for item in text:\r\n index = self.chars.find(item)\r\n index = self.UNK if index == -1 else index\r\n encoded.append(index)\r\n\r\n return np.asarray(encoded)",
"def load_encoded(filename):\n return np.fromfile(filename, dtype='uint8')",
"def decode_python(self, encoded_chunks: np.ndarray) -> np.ndarray:\n encoded_chunks_reshaped = encoded_chunks.reshape((-1, 1))\n encoded_chunks_tiles = np.tile(\n encoded_chunks_reshaped, [1, self._dtype_size_bytes]\n )\n encoded_chunks_bytes_shifted = np.right_shift(\n encoded_chunks_tiles, self._bit_lengths\n )\n encoded_chunks_bytes = encoded_chunks_bytes_shifted % 2 ** (\n self._utf8_size_bits\n )\n int_to_char_fn = lambda x: (dict(enumerate(self._int_to_byte_map)).get(x))\n\n # Added `otypes=(np.string_,)` as an additional arg to np.vectorize to avoid\n # numpy crashes with empty strings (not able to identify the type).\n decoded_chars = np.vectorize(int_to_char_fn, otypes=(np.string_,))(\n encoded_chunks_bytes\n )\n decoded_chars_reshaped = decoded_chars.reshape(-1, self._max_length)\n decoded_strings = np.apply_along_axis(\n lambda r: r.tobytes(), arr=decoded_chars_reshaped, axis=1\n )\n\n return decoded_strings",
"def sequence_encoding(sequence, str_to_idx):\n sequence_of_indexes = [str_to_idx[element] for element in sequence]\n return torch.tensor(sequence_of_indexes, dtype=torch.long)"
] | [
"0.6568203",
"0.6536662",
"0.6487182",
"0.6231948",
"0.6180096",
"0.6152209",
"0.5947935",
"0.59344274",
"0.59313184",
"0.59313184",
"0.5893763",
"0.5873927",
"0.5808209",
"0.56744933",
"0.56118715",
"0.5595339",
"0.55714226",
"0.55584484",
"0.5555841",
"0.55391484",
"0.55205256",
"0.5508514",
"0.55031073",
"0.55031",
"0.548618",
"0.548618",
"0.5477914",
"0.54508317",
"0.5421384",
"0.5412541"
] | 0.8272287 | 0 |
Test handling a poorly implemented locate_module method. | def test_handling_wrong_locate_module_implementation(method):
loader = WrongEnamlImporter()
with pytest.raises(ImportError):
getattr(loader, method)('module_name') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test___find_corresponding_module_for_location_exceptions(self):\r\n # pylint: disable=protected-access\r\n with self.assertRaises(ItemNotFoundError):\r\n self.peer_grading._find_corresponding_module_for_location(\r\n Location('org', 'course', 'run', 'category', 'name', 'revision')\r\n )",
"def test_find_module_py33():\n assert find_module_py33('_io') == (None, '_io', False)",
"def findModule(name):",
"def test_module(self):\n pass",
"def testFindAASpamAbs(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, 'spam')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('_fake.a.aa.spam', aaeggs))",
"def testFindsBuiltins(self):\r\n self.assertEqual('sys', modulefinder.get_module_filename('sys'))\r\n self.assertEqual('time', modulefinder.get_module_filename('time'))",
"def testRelativeImport(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, 'spam')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('spam', aaeggs))",
"def import_module(self, location, name):",
"def testPynocleImportsPynocle(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(THISDIR, '__init__')\r\n self.assertEqual(expected, modulefinder.get_module_filename('pynocle', __file__))",
"def test_import_string_missing_module(self):\n invalid_module = 'ttgn.nonexistent_module.foobar'\n with pytest.raises(ImportError):\n utils.import_string(invalid_module)",
"def testAbsolutePackageImport(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, '__init__')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('aa', aaeggs))",
"def test():\n import doctest\n from . import locate\n return doctest.testmod(locate)",
"def test_modules(self):\n for mod in self.expected_modules:\n try:\n __import__(mod)\n except ImportError:\n raise",
"def test_check_module(self) -> None:\n check_module(\"os\")",
"def find_spec(module_name):\n try:\n imp.find_module(module_name)\n return True\n except ImportError:\n return None",
"def testRelativePackageImport(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, '__init__')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('aa', aaeggs))",
"def find_module(cls, *args, **kwargs): # real signature unknown\n pass",
"def find_module(cls, *args, **kwargs): # real signature unknown\n pass",
"def find_module(cls, *args, **kwargs): # real signature unknown\n pass",
"def test_module_exists(self):\n project_path = os.getcwd()\n rango_app_path = os.path.join(project_path, 'rango')\n forms_module_path = os.path.join(rango_app_path, 'forms.py')\n\n self.assertTrue(os.path.exists(forms_module_path), f\"{FAILURE_HEADER}Couldn't find forms.py module.{FAILURE_FOOTER}\")",
"def test_get_imports(self):\n pass",
"def test_absent_imports():\n module, HABEMUS_MODULE = optional_import(\"not_real_module\")\n\n assert not HABEMUS_MODULE\n assert module.__name__ == \"not_real_module\"\n with pytest.raises(ModuleNotFoundError):\n _ = module.layers",
"def test_molecool_imported():\n assert \"molecool\" in sys.modules",
"def test_load_module_no_location(\n requested_model: ModuleModel, subject: ProtocolCore\n) -> None:\n with pytest.raises(InvalidModuleLocationError):\n subject.load_module(model=requested_model, deck_slot=None, configuration=None)",
"def test_relative_missing_import():\n with pytest.raises(ImportError):\n from . import _missing_import",
"def test_import_not_in_sys_path(Script):\n a = Script(path='module.py', line=5).goto_definitions()\n assert a[0].name == 'int'\n\n a = Script(path='module.py', line=6).goto_definitions()\n assert a[0].name == 'str'\n a = Script(path='module.py', line=7).goto_definitions()\n assert a[0].name == 'str'",
"def test_component_loading_generic_module_not_found_error(component_configuration):\n\n with mock.patch.object(\n Protocol,\n \"from_config\",\n side_effect=ModuleNotFoundError(\n \"Package loading error: An error occurred while loading .*: Generic error\"\n ),\n ):\n with pytest.raises(ModuleNotFoundError, match=\"Generic error\"):\n load_component_from_config(component_configuration)",
"def test_instantiate_non_existent_module(self):\n # create test configs\n test_configs = [\n {\"_target_\": \"non_existent_module.some_class\"},\n {\"_target_\": \"another_non_existent_module.some_class\", \"a\": 1, \"b\": 2}\n ]\n\n # check that instantiate raises ModuleNotFoundError for each test config\n for test_conf in test_configs:\n self.assertRaises(ModuleNotFoundError, instantiate, test_conf)",
"def test_deprecated_modules(self):\n\n deprecated_modules_present = False\n\n deprecated_modules = [\n \"game_assets\",\n \"models\",\n \"world\",\n \"modular_assets\",\n ]\n\n for path in self.application_files:\n for module in deprecated_modules:\n module_text = open(path).read()\n found_reference = False\n if \"import %s\" % module in module_text:\n found_reference = True\n if \"from %s\" % module in module_text:\n found_reference = True\n\n if found_reference:\n print(\"Found '%s' reference in %s\" % (module, path))\n deprecated_modules_present = True\n\n self.assertFalse(deprecated_modules_present)",
"def provoke_and_handle_ModuleNotFoundError():\n try:\n import arsiton324\n except ModuleNotFoundError as mnfe:\n print(f\"Sorry! {mnfe} was found!\")"
] | [
"0.704441",
"0.66467345",
"0.65472347",
"0.642813",
"0.6363407",
"0.6362937",
"0.625545",
"0.6194759",
"0.6184722",
"0.6180454",
"0.6120239",
"0.6073932",
"0.60296243",
"0.6000383",
"0.596782",
"0.5952297",
"0.5937704",
"0.5937704",
"0.5937704",
"0.59352267",
"0.5929191",
"0.59123325",
"0.5910788",
"0.59041786",
"0.58638954",
"0.5829872",
"0.5829069",
"0.582749",
"0.58228695",
"0.5794339"
] | 0.693356 | 1 |
Create an enaml module in a tempdir and add it to sys.path. | def enaml_module(tmpdir):
name = '__enaml_test_module__'
folder = str(tmpdir)
path = os.path.join(folder, name + '.enaml')
with open(path, 'w') as f:
f.write(SOURCE)
sys.path.append(folder)
yield name, folder, path
sys.path.remove(folder)
if name in sys.modules:
del sys.modules[name] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_import_and_cache_generation(enaml_module):\n name, folder, _ = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n\n # Check that the module attributes are properly populated\n mod = sys.modules[name]\n assert mod.__name__ == name\n assert mod.__file__ == os.path.join(folder, name + \".enaml\")\n assert os.path.join(folder, \"__enamlcache__\") in mod.__cached__\n assert isinstance(mod.__loader__, EnamlImporter)\n assert isinstance(mod.__spec__, ModuleSpec)\n\n cache_folder = os.path.join(folder, '__enamlcache__')\n assert os.path.isdir(cache_folder)\n cache_name = os.listdir(cache_folder)[0]\n assert name in cache_name\n assert '.enamlc' in cache_name",
"def create_modules( package ):\n\n #we need to make the package directory.\n #we need to make the folder that this\n #parsed file will live in.\n # currentPath + package\n paths = package.split( \".\" )\n package = os.path.join( \"./\", os.path.join( *paths ) )\n os.makedirs( package )\n\n #Create the __init__.py files\n temp = \"./\"\n for p in paths:\n temp = os.path.join( temp, p )\n open( \"%s/__init__.py\" % temp, \"a\" ).close()",
"def new_module(reset_sys_argv, move_home_pypackage):\n\n TestModule.make_new()\n mod_path = TestModule.full_path()\n os.mkdir(mod_path)\n os.chdir(mod_path)\n return mod_path",
"def testRelativePackageImport(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, '__init__')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('aa', aaeggs))",
"def add_module(module_name: str):\n sys.path.append(module_name)",
"def generate_sample_sls_module(env_root, template_dir, module_dir=None):\n if module_dir is None:\n module_dir = os.path.join(env_root, 'sampleapp.sls')\n\n if os.path.isdir(module_dir):\n LOGGER.error(\"Error generating sample module -- directory %s \"\n \"already exists!\",\n module_dir)\n sys.exit(1)\n\n shutil.copytree(\n os.path.join(ROOT,\n 'templates',\n template_dir),\n module_dir\n )\n convert_gitignore(module_dir)\n LOGGER.info(\"Sample Serverless module created at %s\",\n module_dir)\n LOGGER.info('To finish its setup, change to the %s directory and execute '\n '\"npm install\" to generate its lockfile.', module_dir)",
"def add_modulepath_to_env(plugin_path, env_path):\n plugin_mods = os.path.join(plugin_path, \"modules\")\n open_format = 'a' if os.path.exists(env_path) else 'w'\n try:\n with open(env_path, open_format) as modfile:\n if open_format == 'a' and modfile.tell() != 0:\n modfile.seek(-1, os.SEEK_END)\n next_char = modfile.read(1)\n if next_char != '\\n':\n modfile.write('\\n')\n if os.pathsep == ';':\n modfile.write(\n \"MAYA_MODULE_PATH=%MAYA_MODULE_PATH%;{0}\".format(\n plugin_mods))\n else:\n modfile.write(\n \"MAYA_MODULE_PATH=$MAYA_MODULE_PATH:{0}\".format(\n plugin_mods))\n return AzureBatchSetup.create_modfile(plugin_mods, plugin_path)\n except Exception as exp:\n print(\"Couldn't create new maya env file: %s\" % env_path)\n return False",
"def setupModule(directory, moduleName, appendFile = None, includes = [\"*.py\"], excludes = []):\n\tmoduleHeader = '''#!/usr/bin/python\n# coding:utf-8\n# %(date)s\n\n# ____ ____ _ _ _ ____ ____ ____ _____ _ _ ___ _ ____ _____ \n# | __ \\| ___|| \\ / || | | __ \\| ___|| __ \\|_ _|| |_| | / _ \\ | | | ___||_ _|\n# | /| __| | \\/ || | | __ <| __| | / | | | _ |( |_| )| |_ | __| | | \n# |_|\\_\\|____||_|\\/|_||_| |____/|____||_|\\_\\ |_| |_| |_| \\___/ |___||____| |_| \n\nfrom sys import path\nfrom os import remove\n\n# Uudecode zipped module and write zip module\ntry:\n\tfrom base64 import decodebytes as decodestring \nexcept:\n\tfrom base64 import decodestring \nopen(\"%(moduleName)s.zip\",\"wb\").write(decodestring(b\"\"\"\n%(moduleContent)s\n\"\"\"))\n\n# Add zip archive module to PYTHONPATH\npath.insert(0, '%(moduleName)s.zip')\n\n# Add zip internal directory into PYTHONPATH to more easily import scripts between them\npath.insert(0, '%(moduleName)s.zip/%(moduleName)s_lib')\n\n# Import zip module\nfrom %(moduleName)s_lib import *\n\n# Remove zip module file : It is no longer useful\nremove (\"%(moduleName)s.zip\")\n'''\n\tfrom re import split as splitre, DOTALL\n\t\n\tmoduleFilename = moduleName + \".py\"\n\tmoduleContent = BytesIO()\n\tdate = getTimeString()\n\n\tzipDir(moduleContent, directory, includes, excludes + [moduleFilename], False, [[moduleName, moduleName+\"_lib\"]])\n\t\n\t# Uuencode zipped module \n\tmoduleContent = uuEncode(moduleContent.getvalue(), 8192)\n\t\n\t# Write python module\n\toutput = open(moduleFilename, \"w\")\n\toutput.write(moduleHeader%locals())\n\t\n\tif appendFile != None:\n\t\tif isString(appendFile):\n\t\t\tappendFile = [appendFile]\n\t\tfor file in appendFile:\n\t\t\tcontent = open(file,\"r\").read()\n\t\t\tspl = splitre(r\".*#<<<<(.*)#>>>>.*\", content, flags=DOTALL)\n\t\t\tif len(spl) > 1:\n\t\t\t\tcontent = spl[1]\n\t\t\toutput.write(content)\n\t\n\tprint (\"Module %s.py created\"%moduleName)\n\treturn moduleFilename",
"def testAbsolutePackageImport(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, '__init__')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('aa', aaeggs))",
"def _create_module(name):\n module = new.module(name)\n sys.modules[name] = module\n return module",
"def __ensure_testcase_module(path: Text) -> NoReturn:\n init_file = os.path.join(os.path.dirname(path), \"__init__.py\")\n if os.path.isfile(init_file):\n return\n\n with open(init_file, \"w\", encoding=\"utf-8\") as f:\n f.write(\"# NOTICE: Generated By HttpRunner. DO NOT EDIT!\\n\")",
"def setup_module():\n current_file = inspect.getfile(inspect.currentframe())\n current_dir = os.path.dirname(os.path.abspath(current_file))\n plugin_path = os.path.split(current_dir)[0] + os.sep\n AzureBatchSetup.set_environment(plugin_path)",
"def make_module(self, name, newpath=None):\r\n module = imp.new_module(name)\r\n module.__file__ = self.filename\r\n if newpath:\r\n module.__path__ = newpath\r\n sys.modules[name] = module\r\n exec self.code in vars(module)\r\n return module",
"def setup(base_path, root_module_name=\"caliper\"):\n if sys.modules.has_key(root_module_name):\n return\n _create_module_and_parents(root_module_name)\n imp.load_package(root_module_name, base_path)\n\n # allow locally installed third party packages to be found.\n sys.path.insert(0, os.path.join(base_path, \"site_packages\"))",
"def create_modfile(mod_path, plugin_path):\n try:\n modfile = os.path.join(mod_path, \"AzureBatch.mod\")\n with open(modfile, 'w') as mod:\n mod.write(\"+ AzureBatch {0} {1}\\n\".format(\n VERSION, plugin_path))\n mod.write(\"MAYA_PLUG_IN_PATH+={0}\\n\".format(\n os.path.join(plugin_path, \"plug-in\")))\n mod.write(\"MAYA_SCRIPT_PATH+:=mel\\n\")\n mod.write(\"AZUREBATCH_ICONS:=icons\\n\")\n mod.write(\"AZUREBATCH_TEMPLATES:=templates\\n\")\n mod.write(\"AZUREBATCH_SCRIPTS:=scripts\\n\")\n mod.write(\"AZUREBATCH_SCRIPTS+:=scripts/ui\\n\")\n mod.write(\"AZUREBATCH_TOOLS:=scripts/tools\\n\")\n mod.write(\"AZUREBATCH_MODULES:=modules\")\n\n print(\"Successfully created mod file at %s\" % mod_path)\n print(\"Setting environment variables for current session.\")\n os.environ[\"AZUREBATCH_SCRIPTS\"] = \"{0};{1};\".format(\n AzureBatchSetup.clean(os.path.join(plugin_path,\n \"scripts\")),\n AzureBatchSetup.clean(os.path.join(plugin_path,\n \"scripts\", \"ui\")))\n return True\n except Exception as err:\n print(str(err))\n print(\"Couldn't create mod file at %s\" % mod_path)\n return False",
"def app_module_tmp_path(tmp_path_factory: pytest.TempPathFactory) -> Path:\n tmp_dir = tmp_path_factory.mktemp(\"app\")\n shutil.copytree(Path(pre_start_module.__file__).parent, Path(f\"{tmp_dir}/tmp_app\"))\n return tmp_dir",
"def add_python_files(self):",
"def make_package(tmp_path, pyproject_toml):\n return make_package_base(tmp_path, pyproject_toml)",
"def _create_module(self, rootdir):\n name = 'module_' + rootdir.get_name()\n moduleobj = Module(name, rootdir)\n rootdir.set_module(moduleobj)\n self._modules[name] = moduleobj",
"def _make_basic_environment(self, text):\n current_plugins = len(registry.get_checkers() + registry.get_contexts())\n\n root = tempfile.mkdtemp(suffix=\"_some_python_path_root\")\n self.delete_item_later(root)\n sys.path.append(root)\n\n with tempfile.NamedTemporaryFile(suffix=\".py\", delete=True) as handler:\n pass\n\n with open(os.path.join(root, os.path.basename(handler.name)), \"w\") as handler:\n handler.write(text)\n\n _convert_to_importable_namespace(handler.name, root)\n namespace = _convert_to_importable_namespace(handler.name, root)\n\n os.environ[\"REZ_LINT_PLUGIN_PATHS\"] = namespace\n cli._register_external_plugins.has_run = ( # pylint: disable=protected-access\n False\n )\n cli._register_external_plugins() # pylint: disable=protected-access\n\n return current_plugins",
"def testRelativeImport(self):\r\n self.buildTempDirs()\r\n expected = os.path.join(self.temp_fake_aa, 'spam')\r\n aaeggs = os.path.join(self.temp_fake_aa, 'eggs.py')\r\n self.assertEqual(expected, modulefinder.get_module_filename('spam', aaeggs))",
"def make_module_extra(self):\n txt = super(EB_CPLEX, self).make_module_extra()\n\n try:\n cwd = os.getcwd()\n os.chdir(self.installdir)\n bins = glob.glob(os.path.join('*', 'bin', 'x86-64*'))\n libs = glob.glob(os.path.join('*', 'lib', 'x86-64*', '*pic'))\n os.chdir(cwd)\n except OSError as err:\n raise EasyBuildError(\"Failed to determine bin/lib subdirs: %s\", err)\n\n txt += self.module_generator.prepend_paths('PATH', [path for path in bins])\n txt += self.module_generator.prepend_paths('LD_LIBRARY_PATH', [path for path in bins+libs])\n\n txt += self.module_generator.set_environment('CPLEX_HOME', os.path.join(self.installdir, 'cplex'))\n txt += self.module_generator.set_environment('CPLEXDIR', os.path.join(self.installdir, 'cplex'))\n\n self.log.debug(\"make_module_extra added %s\" % txt)\n return txt",
"def prep_rel_package(directory: str) -> Path:\n path = _setup_dir(directory)\n # once here, the directory exists and is a directory; clean it out\n # _clean_directory(str(path))\n init = path / \"__init__.py\"\n init.touch()\n f = init.open('w')\n print(_module_docstring, file=f)\n print(_package_init_code, file=f)\n print(file=f)\n output_footer(stream=f)\n return path",
"def generate_basic_modules(template_dir=TEMPLATE_DIR, out_dir=PKG_DIR):\n print(80 * \"-\")\n print(\"Package:\", out_dir)\n\n basic_modules = [\"_init.py\",\n \"constants.py\",\n \"base_api.py\",\n \"exception.py\"]\n\n if not os.path.exists(out_dir):\n os.mkdir(out_dir)\n\n installed = []\n for module in basic_modules:\n in_file = os.path.join(template_dir, module)\n\n if module == \"_init.py\":\n module = \"__init__.py\"\n\n out_file = os.path.join(out_dir, module)\n try:\n shutil.copy(in_file, out_file)\n except (FileNotFoundError, shutil.SameFileError) as err:\n print(err)\n installed.append(\"- \" + out_file)\n\n print(\"Basic modules:\")\n print(\"\\n\".join(installed))",
"def set_environment(plugin_path):\n srcpath = os.path.join(plugin_path, \"scripts\")\n icnpath = os.path.join(plugin_path, \"icons\")\n melpath = os.path.join(plugin_path, \"mel\")\n modpath = os.path.join(plugin_path, \"modules\")\n tplpath = os.path.join(plugin_path, \"templates\")\n tolpath = os.path.join(plugin_path, \"scripts\", \"tools\")\n sys.path.append(modpath)\n sys.path.append(srcpath)\n sys.path.append(os.path.join(srcpath, \"ui\"))\n sys.path.append(tolpath)\n\n script_dirs = os.environ[\"MAYA_SCRIPT_PATH\"] + os.pathsep\n os.environ[\"AZUREBATCH_ICONS\"] = AzureBatchSetup.clean(icnpath)\n os.environ[\"AZUREBATCH_MODULES\"] = AzureBatchSetup.clean(modpath)\n os.environ[\"AZUREBATCH_TEMPLATES\"] = AzureBatchSetup.clean(tplpath)\n os.environ[\"AZUREBATCH_TOOLS\"] = AzureBatchSetup.clean(tolpath)\n os.environ[\"MAYA_SCRIPT_PATH\"] = script_dirs + \\\n AzureBatchSetup.clean(melpath)\n print(\"Attempting to create mod file under MAYA_MODULE_PATH\")\n mods = AzureBatchSetup.find_modules_locations(plugin_path)\n\n if not mods:\n print(\"Attempting to add custom module path to Maya.env\")\n mods = AzureBatchSetup.find_env_location(plugin_path)\n if not mods:\n print(\"Failed to setup AzureBatch mod file\")\n return os.environ[\"MAYA_MODULE_PATH\"] + os.pathsep",
"def test_handling_importing_a_bugged_module(enaml_module):\n name, _, path = enaml_module\n with open(path, 'a') as f:\n f.write('\\nraise RuntimeError()')\n\n assert name not in sys.modules\n with imports():\n with pytest.raises(RuntimeError):\n importlib.import_module(name)\n\n assert name not in sys.modules",
"def generate_sample_cdk_py_module(env_root, module_dir=None):\n if module_dir is None:\n module_dir = os.path.join(env_root, 'sampleapp.cdk')\n generate_sample_module(module_dir)\n os.mkdir(os.path.join(module_dir, 'hello'))\n for i in ['hello/__init__.py', 'hello/hello_construct.py',\n 'hello/hello_stack.py', '.gitignore', 'app.py', 'cdk.json',\n 'package.json', 'Pipfile', 'Pipfile.lock', 'runway.module.yml']:\n shutil.copyfile(\n os.path.join(ROOT,\n 'templates',\n 'cdk-py',\n i),\n os.path.join(module_dir, i),\n )\n with open(os.path.join(module_dir, '.gitignore'), 'w') as stream:\n stream.write('node_modules')\n LOGGER.info(\"Sample CDK module created at %s\", module_dir)\n LOGGER.info('To finish its setup, change to the %s directory and execute '\n '\"npm install\" to generate its lockfile.', module_dir)",
"def write_module(args, module_path, templates):\n now_str = datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\n module_code = templates.format(author=args.author,\n email_addr=args.email,\n name=args.name,\n now=now_str,\n purpose=args.purpose)\n with open(module_path, 'w') as f_obj:\n f_obj.writelines(module_code)",
"def insert_package_path():\n sys.path.insert(0, ospdn(ospdn(ospdn(ospap(__file__)))))",
"def init_src(config):\n new_py = new_hark = None\n\n os.makedirs(str(config.project.python_src), exist_ok=True)\n\n py_init = config.project.python_src / \"__init__.py\"\n if not py_init.exists():\n with open(py_init, \"w\") as f:\n f.write(\"\")\n new_py = py_init\n\n if not config.project.hark_file.exists():\n with open(config.project.hark_file, \"w\") as f:\n main = 'fn main() {\\n print(\"Hello World!\");\\n}\\n'\n f.write(f\"// Something great begins here.\\n\\n\\n{main}\")\n new_hark = config.project.hark_file\n\n return new_py, new_hark"
] | [
"0.6082835",
"0.6066244",
"0.60439175",
"0.58154875",
"0.5798756",
"0.57679164",
"0.575023",
"0.574298",
"0.57139564",
"0.56493825",
"0.5631153",
"0.5621098",
"0.5608184",
"0.5584475",
"0.55722296",
"0.55148625",
"0.54987985",
"0.5458551",
"0.54473263",
"0.54202634",
"0.54123217",
"0.53777385",
"0.5333992",
"0.53328294",
"0.5325023",
"0.53019816",
"0.5282772",
"0.5281177",
"0.5250387",
"0.52402204"
] | 0.7460824 | 0 |
Test importing a module and checking that the cache was generated. | def test_import_and_cache_generation(enaml_module):
name, folder, _ = enaml_module
with imports():
importlib.import_module(name)
assert name in sys.modules
# Check that the module attributes are properly populated
mod = sys.modules[name]
assert mod.__name__ == name
assert mod.__file__ == os.path.join(folder, name + ".enaml")
assert os.path.join(folder, "__enamlcache__") in mod.__cached__
assert isinstance(mod.__loader__, EnamlImporter)
assert isinstance(mod.__spec__, ModuleSpec)
cache_folder = os.path.join(folder, '__enamlcache__')
assert os.path.isdir(cache_folder)
cache_name = os.listdir(cache_folder)[0]
assert name in cache_name
assert '.enamlc' in cache_name | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_import_when_cache_exists(enaml_module):\n name, folder, _ = enaml_module\n assert name not in sys.modules\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n del sys.modules[name]\n\n cache_folder = os.path.join(folder, '__enamlcache__')\n assert os.path.isdir(cache_folder)\n cache_name = os.listdir(cache_folder)[0]\n cache_path = os.path.join(cache_folder, cache_name)\n cache_time = os.path.getmtime(cache_path)\n\n with imports():\n importlib.import_module(name)\n\n assert os.path.getmtime(cache_path) == cache_time\n assert name in sys.modules",
"def test_import_cache_only(enaml_module):\n name, _, path = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n del sys.modules[name]\n os.remove(path)\n\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules",
"def test_module(self):\n pass",
"def test_imports():\n assert False",
"def test_module_imports(self):\n apps = [\n 'customers',\n 'customers.migrations',\n 'customers.management',\n 'customers.management.commands',\n 'customers.management.commands.load_customers_to_redis',\n 'customers.forms',\n 'customers.admin',\n 'customers.models',\n 'customers.urls',\n 'customers.views',\n ]\n for a in apps:\n self.assertTrue(module_exists(a))",
"def test_molecool_imported():\n assert \"molecool\" in sys.modules",
"def test_modules(self):\n for mod in self.expected_modules:\n try:\n __import__(mod)\n except ImportError:\n raise",
"def test_qm_project_python_testing_imported():\n assert \"qm_project_python_testing\" in sys.modules",
"def test_local_cache():",
"def test_parrot_imported():\n assert \"parrot\" in sys.modules",
"def test_xchemOT_imported():\n assert \"xchemOT\" in sys.modules",
"def test_ifPythonModuleIsInstalled():\n for name in config.toTest:\n testConfig = dynamicallyLoadModule(name)\n if \"pyModule\" in testConfig.config:\n print \"pyModule: \"+ testConfig.config[\"name\"]\n yield assertionFunctions.checkIfPythonModuleIsInstalled, testConfig.config",
"def test_import():\n import chrisbrake\n assert chrisbrake",
"def test_rlmm_imported():\n assert \"rlmm\" in sys.modules",
"def test_ensureWhenNotImportedDontPrevent(self):\n modules = {}\n self.patch(sys, \"modules\", modules)\n ensureNotImported([\"m1\", \"m2\"], \"A message.\")\n self.assertEqual(modules, {})",
"def test_ensureWhenNotImported(self):\n modules = {}\n self.patch(sys, \"modules\", modules)\n ensureNotImported([\"m1\", \"m2\"], \"A message.\", preventImports=[\"m1\", \"m2\", \"m3\"])\n self.assertEqual(modules, {\"m1\": None, \"m2\": None, \"m3\": None})",
"def test_get_imports(self):\n pass",
"def test_ensureWhenFailedToImport(self):\n modules = {\"m2\": None}\n self.patch(sys, \"modules\", modules)\n ensureNotImported([\"m1\", \"m2\"], \"A message.\", preventImports=[\"m1\", \"m2\"])\n self.assertEqual(modules, {\"m1\": None, \"m2\": None})",
"def test_mmelemental_imported():\n import sys\n\n assert \"mmelemental\" in sys.modules",
"def test_molssi_project_imported():\n assert \"molssi_project\" in sys.modules",
"def test_ufedmm_imported():\n assert \"ufedmm\" in sys.modules",
"def test_importable():\n root_path = os.path.dirname(MY_DIRECTORY)\n\n for version in versioning.get_all_versions():\n v = version.label.replace(\".\", \"_\")\n path = os.path.join(root_path, v)\n module_names = [m[:-3] for m in os.listdir(path) if m.endswith(\".py\")]\n for name in module_names:\n m = importlib.import_module(\".\".join([\"kuber\", v, name]))\n assert m is not None, f\"Expected kuber.{v}.{m} to be importable.\"",
"def test_reload_if_needed(self):\n cache = DummyCache()\n cache.reload_from_storage = MagicMock()\n cache.reload_if_needed()\n self.assertTrue(cache.reload_from_storage.called)",
"def test_imports():\n from tg_utils import admin\n from tg_utils import checks\n from tg_utils import compressor_filters\n from tg_utils import email\n from tg_utils import files\n from tg_utils import hashmodels\n from tg_utils import lock\n from tg_utils import managers\n from tg_utils import mixins\n from tg_utils import models\n from tg_utils import profiling\n from tg_utils import signals\n from tg_utils import uuid\n from tg_utils import decorators",
"def test_import(self):\n try:\n import gtcal\n except ImportError:\n self.fail(\"Could not import gtcal\")",
"def testmodule():\n import doctest\n import sys\n thismodule = sys.modules[__name__]\n return doctest.testmod(m=thismodule)",
"def testmodule():\n import doctest\n import sys\n thismodule = sys.modules[__name__]\n return doctest.testmod(m=thismodule)",
"def test_imports():\n import sys\n import src\n assert 'sklearn.feature_extraction' not in sys.modules.keys()",
"def test_IMOD_pass(self):\n self.assertTrue(self.mod.isset)",
"def test_ensureFailsWhenImported(self):\n module = object()\n modules = {\"m2\": module}\n self.patch(sys, \"modules\", modules)\n e = self.assertRaises(\n ImportError,\n ensureNotImported,\n [\"m1\", \"m2\"],\n \"A message.\",\n preventImports=[\"m1\", \"m2\"],\n )\n self.assertEqual(modules, {\"m2\": module})\n self.assertEqual(e.args, (\"A message.\",))"
] | [
"0.7983087",
"0.7303237",
"0.72239596",
"0.7091177",
"0.69668573",
"0.693975",
"0.67968786",
"0.67071986",
"0.6687973",
"0.66533375",
"0.6651848",
"0.6650018",
"0.6642446",
"0.66381323",
"0.6611553",
"0.6525819",
"0.6480871",
"0.6442875",
"0.6429513",
"0.6378989",
"0.63643533",
"0.63496727",
"0.6293848",
"0.6283989",
"0.62542653",
"0.6235454",
"0.6235454",
"0.62353194",
"0.6229822",
"0.6222852"
] | 0.7375834 | 1 |
Test importing a module when the cache exists. | def test_import_when_cache_exists(enaml_module):
name, folder, _ = enaml_module
assert name not in sys.modules
with imports():
importlib.import_module(name)
assert name in sys.modules
del sys.modules[name]
cache_folder = os.path.join(folder, '__enamlcache__')
assert os.path.isdir(cache_folder)
cache_name = os.listdir(cache_folder)[0]
cache_path = os.path.join(cache_folder, cache_name)
cache_time = os.path.getmtime(cache_path)
with imports():
importlib.import_module(name)
assert os.path.getmtime(cache_path) == cache_time
assert name in sys.modules | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __exist_module_in_sys_cache(module_name):\n try:\n if hasattr(sys, 'stypy_module_cache'):\n return module_name in sys.stypy_module_cache\n else:\n __preload_sys_module_cache()\n return False\n except:\n return False",
"def test_import_cache_only(enaml_module):\n name, _, path = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n del sys.modules[name]\n os.remove(path)\n\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules",
"def _import_module(self, name):\r\n try:\r\n __import__(name)\r\n return True\r\n except ImportError:\r\n return False",
"def module_exists(module_name):\r\n\r\n try:\r\n __import__(module_name)\r\n except ImportError:\r\n return False\r\n else:\r\n return True",
"def has_module(name):\n _refresh_cache()\n return name in _modules",
"def test_import_allows_multiple_modules_failure(self):\n # Deliberately using modules that will already be imported to avoid side effects.\n feature = LazyImportTester([\"site\", \"sys\", \"_qiskit_module_does_not_exist_\"])\n with mock_availability_test(feature) as check:\n check.assert_not_called()\n self.assertFalse(feature)\n check.assert_called_once()",
"def test_module_imports(self):\n apps = [\n 'customers',\n 'customers.migrations',\n 'customers.management',\n 'customers.management.commands',\n 'customers.management.commands.load_customers_to_redis',\n 'customers.forms',\n 'customers.admin',\n 'customers.models',\n 'customers.urls',\n 'customers.views',\n ]\n for a in apps:\n self.assertTrue(module_exists(a))",
"def test_ifPythonModuleIsInstalled():\n for name in config.toTest:\n testConfig = dynamicallyLoadModule(name)\n if \"pyModule\" in testConfig.config:\n print \"pyModule: \"+ testConfig.config[\"name\"]\n yield assertionFunctions.checkIfPythonModuleIsInstalled, testConfig.config",
"def have_python_version(name, cache={}):\n if name not in cache:\n cache[name] = os.system(name + ' -c \"import test.test_support\"') == 0\n return cache[name]",
"def test_import_allows_multiple_modules_successful(self):\n # Deliberately using modules that will already be imported to avoid side effects.\n feature = LazyImportTester([\"site\", \"sys\"])\n with mock_availability_test(feature) as check:\n check.assert_not_called()\n self.assertTrue(feature)\n check.assert_called_once()",
"def test_molecool_imported():\n assert \"molecool\" in sys.modules",
"def test_import_nothandled():\n sys.meta_path.append(TaskImporter())\n with pytest.raises(ImportError):\n import_module('not.exist')",
"def test_ensureWhenFailedToImport(self):\n modules = {\"m2\": None}\n self.patch(sys, \"modules\", modules)\n ensureNotImported([\"m1\", \"m2\"], \"A message.\", preventImports=[\"m1\", \"m2\"])\n self.assertEqual(modules, {\"m1\": None, \"m2\": None})",
"def test_ensureWhenNotImported(self):\n modules = {}\n self.patch(sys, \"modules\", modules)\n ensureNotImported([\"m1\", \"m2\"], \"A message.\", preventImports=[\"m1\", \"m2\", \"m3\"])\n self.assertEqual(modules, {\"m1\": None, \"m2\": None, \"m3\": None})",
"def test_modules(self):\n for mod in self.expected_modules:\n try:\n __import__(mod)\n except ImportError:\n raise",
"def try_import(module_name):\n have_module = True\n try:\n importlib.import_module(module_name)\n except ImportError:\n logging.warn(\"Module '%s' cannot be imported, certain system information will not be available\", module_name)\n have_module = False\n return have_module",
"def test_reload_if_needed(self):\n cache = DummyCache()\n cache.reload_from_storage = MagicMock()\n cache.reload_if_needed()\n self.assertTrue(cache.reload_from_storage.called)",
"def checkIfPythonModuleIsInstalled(testConfig):\n try:\n exec(\"import \"+testConfig[\"pyModule\"])\n assert True\n except Exception as e:\n assert False, testConfig[\"name\"]+\": \"+testConfig[\"pyModule\"]+\" could not successfully be loaded in Python.\"",
"def test_ensureWhenNotImportedDontPrevent(self):\n modules = {}\n self.patch(sys, \"modules\", modules)\n ensureNotImported([\"m1\", \"m2\"], \"A message.\")\n self.assertEqual(modules, {})",
"def can_import(name):\n try:\n __import__(name)\n return True\n except ImportError:\n return False",
"def test_import_and_cache_generation(enaml_module):\n name, folder, _ = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n\n # Check that the module attributes are properly populated\n mod = sys.modules[name]\n assert mod.__name__ == name\n assert mod.__file__ == os.path.join(folder, name + \".enaml\")\n assert os.path.join(folder, \"__enamlcache__\") in mod.__cached__\n assert isinstance(mod.__loader__, EnamlImporter)\n assert isinstance(mod.__spec__, ModuleSpec)\n\n cache_folder = os.path.join(folder, '__enamlcache__')\n assert os.path.isdir(cache_folder)\n cache_name = os.listdir(cache_folder)[0]\n assert name in cache_name\n assert '.enamlc' in cache_name",
"def testImport(self):\n success = False\n try:\n from cutlass import DiseaseMeta\n success = True\n except:\n pass\n\n self.failUnless(success)\n self.failIf(DiseaseMeta is None)",
"def module_imported(module_name):\n return sys.modules.get(module_name) is not None",
"def test_imports():\n assert False",
"def is_installed(module):\n try:\n __import__(module)\n return True\n except ImportError as e:\n return False",
"def test_local_cache():",
"def test_absent_imports():\n module, HABEMUS_MODULE = optional_import(\"not_real_module\")\n\n assert not HABEMUS_MODULE\n assert module.__name__ == \"not_real_module\"\n with pytest.raises(ModuleNotFoundError):\n _ = module.layers",
"def test_rlmm_imported():\n assert \"rlmm\" in sys.modules",
"def test_import_fails_with_no_modules(self):\n with self.assertRaises(ValueError):\n LazyImportTester([])",
"def test_compiled_import_none(monkeypatch, Script):\n monkeypatch.setattr(compiled, 'load_module', lambda *args, **kwargs: None)\n assert not Script('import sys').goto_definitions()"
] | [
"0.7172652",
"0.70065624",
"0.6809813",
"0.67054206",
"0.66869247",
"0.6645123",
"0.6615341",
"0.65780705",
"0.6531684",
"0.64752275",
"0.64740145",
"0.64509606",
"0.6437659",
"0.6436985",
"0.6392631",
"0.6389371",
"0.63749903",
"0.6354479",
"0.6347128",
"0.6333608",
"0.63293666",
"0.6322523",
"0.6265581",
"0.6260738",
"0.62498",
"0.6225009",
"0.6224707",
"0.62241936",
"0.6221277",
"0.62207013"
] | 0.80143213 | 0 |
Standard enaml importer whose state is restored after testing. | def enaml_importer():
print(imports, dir(imports))
old = imports.get_importers()
yield imports
imports._imports__importers = old | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_importer_management(enaml_importer):\n standard_importers_numbers = len(enaml_importer.get_importers())\n enaml_importer.add_importer(WrongEnamlImporter)\n assert WrongEnamlImporter in enaml_importer.get_importers()\n enaml_importer.add_importer(WrongEnamlImporter)\n assert (len(enaml_importer.get_importers()) ==\n standard_importers_numbers + 1)\n enaml_importer.remove_importer(WrongEnamlImporter)\n\n # Test removing twice\n enaml_importer.remove_importer(WrongEnamlImporter)\n\n with pytest.raises(TypeError):\n enaml_importer.add_importer(object)",
"def importer():\n pass",
"def test_import_and_cache_generation(enaml_module):\n name, folder, _ = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n\n # Check that the module attributes are properly populated\n mod = sys.modules[name]\n assert mod.__name__ == name\n assert mod.__file__ == os.path.join(folder, name + \".enaml\")\n assert os.path.join(folder, \"__enamlcache__\") in mod.__cached__\n assert isinstance(mod.__loader__, EnamlImporter)\n assert isinstance(mod.__spec__, ModuleSpec)\n\n cache_folder = os.path.join(folder, '__enamlcache__')\n assert os.path.isdir(cache_folder)\n cache_name = os.listdir(cache_folder)[0]\n assert name in cache_name\n assert '.enamlc' in cache_name",
"def test_yaml_import_and_export(self) -> None:\n self.save_new_valid_exploration(\n self.EXPLORATION_ID, '[email protected]', end_state_name='End')\n\n collection = collection_domain.Collection.create_default_collection(\n self.COLLECTION_ID, title='A title', category='A category',\n objective='An objective')\n collection.add_node(self.EXPLORATION_ID)\n self.assertEqual(len(collection.nodes), 1)\n\n collection.validate()\n\n yaml_content = collection.to_yaml()\n self.assertEqual(yaml_content, SAMPLE_YAML_CONTENT)\n\n collection2 = collection_domain.Collection.from_yaml(\n 'collection2', yaml_content)\n self.assertEqual(len(collection2.nodes), 1)\n yaml_content_2 = collection2.to_yaml()\n self.assertEqual(yaml_content_2, yaml_content)\n\n # Should not be able to create a collection from no YAML content.\n with self.assertRaisesRegex(\n utils.InvalidInputException,\n 'Please ensure that you are uploading a YAML text file, '\n 'not a zip file. The YAML parser returned the following error: '\n ):\n collection_domain.Collection.from_yaml('collection3', '')",
"def loadseasoning(self):\n stream = open(self.fileref)\n self.config = yaml.safe_load(stream)\n stream.close()",
"def setUp(self):\n self._expected = yaml.safe_load(OUT)",
"def setUp(self):\n\n def import_hook(name, *args, **kwargs):\n if name == 'actstream':\n raise ImportError('test case module import failure')\n else:\n return self.original_imports(name, *args, **kwargs)\n\n self.original_imports = builtins.__import__\n builtins.__import__ = import_hook",
"def enaml_module(tmpdir):\n name = '__enaml_test_module__'\n folder = str(tmpdir)\n path = os.path.join(folder, name + '.enaml')\n with open(path, 'w') as f:\n f.write(SOURCE)\n sys.path.append(folder)\n\n yield name, folder, path\n\n sys.path.remove(folder)\n if name in sys.modules:\n del sys.modules[name]",
"def test_import_cache_only(enaml_module):\n name, _, path = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n del sys.modules[name]\n os.remove(path)\n\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules",
"def restore(self):\n self._result.unparse_seq = self._unparse_seq",
"def tearDown(self):\n builtins.__import__ = self.original_imports",
"def _load_restored(self, dataset_path):\n for group in ['knowledge', 'source', 'target']:\n if getattr(self, group + '_format') != 'none':\n text_data = load_restored(dataset_path, group + '.', ignore_file='vocab')[0]\n setattr(self, group + '_text_data', text_data)\n idx2token, token2idx = load_restored(dataset_path, ignore_file='data')\n setattr(self, 'idx2token', idx2token)\n setattr(self, 'token2idx', token2idx)\n self.max_vocab_size = len(self.idx2token)\n self.logger.info(\"Restore finished!\")",
"def load(self,previous=True):\n\n\t\tincoming = pickle.load(open(self.filename,'rb'))\n\t\t#---reconstitute things that were bootstrapped\n\t\t#---we do not load spots because e.g. paths might have changed slightly in paths.yaml\n\t\tself.post = incoming.post\n\t\tself.groups = incoming.groups\n\t\tself.slices = incoming.slices\n\t\tself.vars = incoming.vars\n\t\tself.meta = incoming.meta\n\t\tself.calc = incoming.calc\n\t\tself.toc = incoming.toc\n\n\t\t#---retain the incoming workspace for comparison\n\t\tif previous: self.previous = incoming",
"def test_load_dangling(self):\n with NamedTemporaryFile(suffix=\".yaml\") as config:\n with open(config.name, \"w\") as write_stream:\n write_stream.write(\n \"\"\"\n pipeline:\n - !LinearController\n low_utilisation: 0.9\n high_utilisation: 1.1\n - !MockPool\n random_things:\n foo: bar\n \"\"\"\n )\n with pytest.raises(ConfigurationError):\n with load(config.name):\n assert False",
"def test_assessment_import_states(self):\n self.import_file(\"assessment_full_no_warnings.csv\")\n response = self.import_file(\"assessment_update_intermediate.csv\")\n expected_errors = {\n \"Assessment\": {\n \"block_errors\": set(),\n \"block_warnings\": set(),\n \"row_errors\": set(),\n \"row_warnings\": set(),\n }\n }\n self._check_csv_response(response, expected_errors)\n\n assessments = {r.slug: r for r in models.Assessment.query.all()}\n self.assertEqual(assessments[\"Assessment 60\"].status,\n models.Assessment.START_STATE)\n self.assertEqual(assessments[\"Assessment 61\"].status,\n models.Assessment.PROGRESS_STATE)\n self.assertEqual(assessments[\"Assessment 62\"].status,\n models.Assessment.DONE_STATE)\n self.assertEqual(assessments[\"Assessment 63\"].status,\n models.Assessment.FINAL_STATE)\n self.assertEqual(assessments[\"Assessment 64\"].status,\n models.Assessment.FINAL_STATE)\n self.assertEqual(assessments[\"Assessment 3\"].status,\n models.Assessment.FINAL_STATE)\n self.assertEqual(assessments[\"Assessment 4\"].status,\n models.Assessment.FINAL_STATE)\n\n # Check that there is only one attachment left\n asmt1 = assessments[\"Assessment 1\"]\n self.assertEqual({\"a.b.com\", \"c d com\"},\n {i.title for i in asmt1.document_url})\n self.assertEqual({u'evidence title 1'},\n {i.title for i in asmt1.document_evidence})",
"def ini_restore():\n raise NotImplementedError()",
"def test_load(self):\n with NamedTemporaryFile(suffix=\".yaml\") as config:\n with open(config.name, \"w\") as write_stream:\n write_stream.write(\n \"\"\"\n pipeline:\n - !LinearController\n low_utilisation: 0.9\n high_utilisation: 1.1\n - !MockPool\n \"\"\"\n )\n with load(config.name):\n assert True\n assert True",
"def _load_datas(self) -> tp.Dict[str, dict]:\n with open(self._file, \"r\") as stream:\n try:\n load: tp.Dict[str, dict] = yaml.safe_load(stream)\n logger.info(\"YAML imported\")\n return load\n except yaml.YAMLError as exc:\n logger.debug(\"YAML import error : %s\", exc)\n raise",
"def run_import(self, yaml,\n **kwargs):\n with NamedTemporaryFile('w+', dir=str(self.get_import_dir())) as temp:\n temp.write(yaml)\n temp.seek(0)\n\n kwargs.setdefault('stdout', open(os.devnull, 'w'))\n kwargs.setdefault('stderr', open(os.devnull, 'w'))\n call_command('import_pages', temp.name, **kwargs)",
"def test_import_process(self):\r\n good_file = self._get_del_file()\r\n imp = Importer(good_file, username=u\"admin\")\r\n imp.process()\r\n\r\n # now let's do some db sanity checks\r\n self._delicious_xml_data_test()",
"def __ordered_load(self, stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict):\n class OrderedLoader(Loader):\n pass\n OrderedLoader.add_constructor(\n yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,\n lambda loader, node: object_pairs_hook(loader.construct_pairs(node)))\n return yaml.load(stream, OrderedLoader)",
"def load(self):\n pass",
"def load(self):\n pass",
"def load(self):\n pass",
"def load(self):\n pass",
"def test_load_from_v1(self) -> None:\n self.save_new_valid_exploration(\n 'Exp1', '[email protected]', end_state_name='End')\n collection = collection_domain.Collection.from_yaml(\n 'cid', self.YAML_CONTENT_V1)\n self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)",
"async def async_step_import(self, user_input):\n _LOGGER.error(\n \"Configuration via YAML file is no longer supported by this integration.\"\n )",
"def import_(self, node):\n yamal_name = os.path.join(self._root, self.construct_scalar(node))\n\n with open(yamal_name, 'r') as yamal_file:\n return yaml.load(yamal_file, ImportLoader)",
"def test_load_from_v4(self) -> None:\n self.save_new_valid_exploration(\n 'Exp1', '[email protected]', end_state_name='End')\n collection = collection_domain.Collection.from_yaml(\n 'cid', self.YAML_CONTENT_V4)\n self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)",
"def ignore_test_load_and_persist_without_train(self):\n test_config = \"tests/data/test_config/test_config.json\"\n config = AnnotatorConfig(test_config)\n\n trainer = Trainer(config)\n assert len(trainer.pipeline) > 0\n # create tmp train set\n tmp_path = create_tmp_test_jsonfile(\"tmp.json\")\n train_data = load_local_data(tmp_path)\n # rm tmp train set\n rm_tmp_file(\"tmp.json\")\n\n # interpreter = trainer.train(train_data)\n # test persist and load\n persisted_path = trainer.persist(config['path'],\n config['project'],\n config['fixed_model_name'])\n\n interpreter_loaded = Interpreter.load(persisted_path, config)\n assert interpreter_loaded.pipeline\n assert interpreter_loaded.parse(\"hello\") is not None\n assert interpreter_loaded.parse(\"Hello today is Monday, again!\") is not None\n # remove tmp models\n shutil.rmtree(config['path'], ignore_errors=False)"
] | [
"0.624024",
"0.58275086",
"0.5791585",
"0.57655126",
"0.55804414",
"0.55008775",
"0.5492725",
"0.53878236",
"0.5371314",
"0.5351267",
"0.5319722",
"0.52447087",
"0.5232631",
"0.5228512",
"0.5220751",
"0.52105415",
"0.5205329",
"0.5183187",
"0.5170881",
"0.5164955",
"0.5149321",
"0.5136016",
"0.5136016",
"0.5136016",
"0.5136016",
"0.51250017",
"0.5092205",
"0.5091983",
"0.50869286",
"0.508089"
] | 0.6965677 | 0 |
Test managing manually enaml importers. | def test_importer_management(enaml_importer):
standard_importers_numbers = len(enaml_importer.get_importers())
enaml_importer.add_importer(WrongEnamlImporter)
assert WrongEnamlImporter in enaml_importer.get_importers()
enaml_importer.add_importer(WrongEnamlImporter)
assert (len(enaml_importer.get_importers()) ==
standard_importers_numbers + 1)
enaml_importer.remove_importer(WrongEnamlImporter)
# Test removing twice
enaml_importer.remove_importer(WrongEnamlImporter)
with pytest.raises(TypeError):
enaml_importer.add_importer(object) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def enaml_importer():\n print(imports, dir(imports))\n old = imports.get_importers()\n\n yield imports\n\n imports._imports__importers = old",
"def test_import_and_cache_generation(enaml_module):\n name, folder, _ = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n\n # Check that the module attributes are properly populated\n mod = sys.modules[name]\n assert mod.__name__ == name\n assert mod.__file__ == os.path.join(folder, name + \".enaml\")\n assert os.path.join(folder, \"__enamlcache__\") in mod.__cached__\n assert isinstance(mod.__loader__, EnamlImporter)\n assert isinstance(mod.__spec__, ModuleSpec)\n\n cache_folder = os.path.join(folder, '__enamlcache__')\n assert os.path.isdir(cache_folder)\n cache_name = os.listdir(cache_folder)[0]\n assert name in cache_name\n assert '.enamlc' in cache_name",
"def test_get_imports(self):\n pass",
"def setUp(self):\n\n def import_hook(name, *args, **kwargs):\n if name == 'actstream':\n raise ImportError('test case module import failure')\n else:\n return self.original_imports(name, *args, **kwargs)\n\n self.original_imports = builtins.__import__\n builtins.__import__ = import_hook",
"def test_imports():\n from tg_utils import admin\n from tg_utils import checks\n from tg_utils import compressor_filters\n from tg_utils import email\n from tg_utils import files\n from tg_utils import hashmodels\n from tg_utils import lock\n from tg_utils import managers\n from tg_utils import mixins\n from tg_utils import models\n from tg_utils import profiling\n from tg_utils import signals\n from tg_utils import uuid\n from tg_utils import decorators",
"def test_handling_wrong_locate_module_implementation(method):\n loader = WrongEnamlImporter()\n with pytest.raises(ImportError):\n getattr(loader, method)('module_name')",
"def test_imports():\n from .context import readersender # noqa: F401",
"def test_handling_importing_a_bugged_module(enaml_module):\n name, _, path = enaml_module\n with open(path, 'a') as f:\n f.write('\\nraise RuntimeError()')\n\n assert name not in sys.modules\n with imports():\n with pytest.raises(RuntimeError):\n importlib.import_module(name)\n\n assert name not in sys.modules",
"def test_importable():\n root_path = os.path.dirname(MY_DIRECTORY)\n\n for version in versioning.get_all_versions():\n v = version.label.replace(\".\", \"_\")\n path = os.path.join(root_path, v)\n module_names = [m[:-3] for m in os.listdir(path) if m.endswith(\".py\")]\n for name in module_names:\n m = importlib.import_module(\".\".join([\"kuber\", v, name]))\n assert m is not None, f\"Expected kuber.{v}.{m} to be importable.\"",
"def test_imports():\n assert False",
"def test_modules(self):\n for mod in self.expected_modules:\n try:\n __import__(mod)\n except ImportError:\n raise",
"def test_import_when_cache_exists(enaml_module):\n name, folder, _ = enaml_module\n assert name not in sys.modules\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n del sys.modules[name]\n\n cache_folder = os.path.join(folder, '__enamlcache__')\n assert os.path.isdir(cache_folder)\n cache_name = os.listdir(cache_folder)[0]\n cache_path = os.path.join(cache_folder, cache_name)\n cache_time = os.path.getmtime(cache_path)\n\n with imports():\n importlib.import_module(name)\n\n assert os.path.getmtime(cache_path) == cache_time\n assert name in sys.modules",
"def test_NKT13_import(): \n\tdef test(): \n\t\ttry: \n\t\t\tfrom .. import NKT13 \n\t\texcept: \n\t\t\treturn False \n\t\treturn True \n\treturn [\"vice.yields.ccsne.NKT13\", test]",
"def enaml_module(tmpdir):\n name = '__enaml_test_module__'\n folder = str(tmpdir)\n path = os.path.join(folder, name + '.enaml')\n with open(path, 'w') as f:\n f.write(SOURCE)\n sys.path.append(folder)\n\n yield name, folder, path\n\n sys.path.remove(folder)\n if name in sys.modules:\n del sys.modules[name]",
"def test_module_imports(self):\n apps = [\n 'customers',\n 'customers.migrations',\n 'customers.management',\n 'customers.management.commands',\n 'customers.management.commands.load_customers_to_redis',\n 'customers.forms',\n 'customers.admin',\n 'customers.models',\n 'customers.urls',\n 'customers.views',\n ]\n for a in apps:\n self.assertTrue(module_exists(a))",
"def test_import_cache_only(enaml_module):\n name, _, path = enaml_module\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules\n del sys.modules[name]\n os.remove(path)\n\n with imports():\n importlib.import_module(name)\n\n assert name in sys.modules",
"def test_import_string(self):\n assert utils.import_string('ttgn.pokedex.utils') == utils",
"def test_import():\n import pyapp",
"def importer():\n pass",
"def test_import_string():\n tests = [\n 'virtstrap.commands',\n 'virtstrap.options'\n ]\n for string in tests:\n yield import_a_string, string",
"def test_import_error(self):\n\n mock_entry_importerror = mock.create_autospec(EntryPoint)\n mock_entry_importerror.name = \"IErr\"\n mock_entry_importerror.load = self.raiseimporterror\n\n with pytest.warns(AstropyUserWarning, match=r\".*ImportError.*\"):\n populate_entry_points([mock_entry_importerror])",
"def test_LC18_import(): \n\tdef test(): \n\t\ttry: \n\t\t\tfrom .. import LC18 \n\t\texcept: \n\t\t\treturn False \n\t\treturn True \n\treturn [\"vice.yields.ccsne.LC18\", test]",
"def test_module(self):\n pass",
"def test_ufedmm_imported():\n assert \"ufedmm\" in sys.modules",
"def test_loaders():\n\n tempdir = tempfile.mkdtemp()\n\n loader = \"\"\"\nfrom mindbender import api\n\nclass DemoLoader(api.Loader):\n def process(self, asset, subset, version, representation):\n pass\n\n\"\"\"\n\n with open(os.path.join(tempdir, \"my_loader.py\"), \"w\") as f:\n f.write(loader)\n\n try:\n pipeline.register_loaders_path(tempdir)\n loaders = pipeline.discover_loaders()\n\n assert \"DemoLoader\" in list(\n L.__name__ for L in loaders\n ), \"Loader not found in %s\" % \", \".join(\n l.__name__ for l in loaders)\n\n finally:\n shutil.rmtree(tempdir)",
"def test_package(self):\n pass",
"def test_taskfile_import(monkeypatch, modpath):\n monkeypatch.setattr(loadlimit.importhook, 'lstaskfiles', fake_lstaskfiles)\n monkeypatch.setattr(loadlimit.importhook, 'SourceFileLoader',\n FakeSourceFileLoader)\n\n taskfiles = ['a_{}.py'.format(i) for i in range(10)]\n names = [splitext(n)[0] for n in taskfiles]\n pypath = ['{}.{}'.format(modpath, n) for n in names]\n randpath = choice(pypath)\n\n assert modpath not in sys.modules\n assert all(not p.startswith(modpath) for p in sys.modules)\n\n sys.meta_path.append(TaskImporter(*taskfiles))\n taskfile = import_module(randpath)\n\n expected = set(pypath) | set([modpath])\n result = set(p for p in sys.modules if p.startswith(modpath))\n\n assert modpath in sys.modules\n assert result == expected\n assert taskfile.TEST == randpath",
"def test_import_test_asset(self):\n pass",
"def test_import(self):\n try:\n import gtcal\n except ImportError:\n self.fail(\"Could not import gtcal\")",
"def setUp(self):\n self.simplejson = sys.modules.pop('simplejson', None)\n self.json = sys.modules.pop('json', None)\n self.original_import = self.get_import()\n def block_all_jsons(name, *args, **kwargs):\n if 'json' in name:\n if name in sys.modules:\n module = sys.modules[name]\n module.name = name\n return module\n raise ImportError('Unable to find %s' % name)\n else:\n return self.original_import(name, *args, **kwargs)\n self.set_import(block_all_jsons)"
] | [
"0.7417657",
"0.6825249",
"0.6661549",
"0.6391132",
"0.63690794",
"0.6306982",
"0.6259021",
"0.6216331",
"0.6164024",
"0.6135842",
"0.60130453",
"0.59856313",
"0.59792435",
"0.5934923",
"0.5910432",
"0.59071934",
"0.58914375",
"0.5864077",
"0.58505535",
"0.58491695",
"0.5825152",
"0.58173525",
"0.5810209",
"0.57798594",
"0.57283205",
"0.56914115",
"0.5690196",
"0.5676424",
"0.567272",
"0.56667066"
] | 0.77357394 | 0 |
r""" Convert a Pico detection to a menpo.shape.PointDirectedGraph. This enforces a particular point ordering. The Pico detections are circles with a given diameter. Here we convert them to the tighest possible bounding box around the circle. No orientaton is currently provided. | def pointgraph_from_circle(fitting):
diameter = fitting.diameter
radius = diameter / 2.0
y, x = fitting.center
y -= radius
x -= radius
return bounding_box((y, x), (y + diameter, x + diameter)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_puncturefinder_graph(self):\n try:\n return self._puncturefinder_graph\n except AttributeError:\n pass\n\n # g = Graph(multiedges=True, loops=True)\n g = nx.MultiGraph()\n for i in self.switches():\n for sw in {-i, i}:\n b1 = self.outgoing_branches(sw)\n b2 = self.outgoing_branches(-sw)\n # connecting branches forming a 180 degree angle\n g.add_edge(b1[0], -b2[-1], weight=0)\n # g.add_edge([b1[0], -b2[-1], 0])\n\n # The left side of branch b, when looking\n # from the switch conveniently corresponds to vertex\n # b. The right side corresponds to -b.\n\n # connecting branches at cusps\n for j in range(len(b1)-1):\n # g.add_edge([-b1[j], b1[j+1], 1])\n g.add_edge(-b1[j], b1[j+1], weight=1)\n\n self._puncturefinder_graph = g\n return self._puncturefinder_graph",
"def to_points(self, divisions=100):",
"def find_points(self):\n\n points = [\n (self.inner_radius, 0, \"straight\"),\n (self.inner_radius, self.height / 2, \"straight\"),\n (self.outer_radius, self.height / 2, \"straight\"),\n (self.outer_radius, self.arc_height / 2, \"circle\"),\n (self.mid_radius, 0, \"circle\"),\n (self.outer_radius, -self.arc_height / 2, \"straight\"),\n (self.outer_radius, -self.height / 2, \"straight\"),\n (self.inner_radius, -self.height / 2, \"straight\")\n ]\n\n self.points = points",
"def dfs(x, p, step):\n disc[x] = low[x] = step\n for xx in graph.get(x, []): \n if disc[xx] == inf: \n step += 1\n dfs(xx, x, step)\n low[x] = min(low[x], low[xx])\n if low[xx] > disc[x]: ans.append([x, xx]) # bridge\n elif xx != p: low[x] = min(low[x], disc[xx])",
"def getCoordinates(p):\n if p[0] == 'p': # minimum bounding rectangle for point\n return (int(p[1]), int(p[2]), int(p[1]), int(p[2]))\n elif p[0] == 'c': # minimum bounding rectangle for circle\n x = int(p[1])\n y = int(p[2])\n r = int(p[3])\n return (x - r, y - r, x + r, y + r)\n elif p[0] == 'l': # minimum bounding rectangle for line segment\n x1 = int(p[1])\n y1 = int(p[2])\n x2 = int(p[3])\n y2 = int(p[4])\n if y2 > y1:\n if x1 < x2:\n return (x1, y1, x2, y2)\n else:\n return (x2, y1, x1, y2)\n else:\n if x1 < x2:\n return (x1, y2, x2, y1)\n else:\n return (x2, y2, x1, y1)",
"def solve(P, X, Y):\n\n # use coordinate frame at center of pie\n x = X - 50\n y = Y - 50\n\n # inside circle?\n if x**2 + y**2 > 50**2:\n return 'white'\n\n # \"edges\" and/or special cases\n if P == 0:\n return 'white'\n if x == 0 and y == 0:\n return 'black'\n\n # theta and phi 0 from y-axis clockwise\n theta = 2*pi * (P / 100)\n if x >= 0:\n phi = pi/2 - atan(y/x)\n else:\n phi = 3*pi/2 - atan(y/x)\n\n if phi <= theta:\n return 'black'\n else:\n return 'white'",
"def get_pupil_diameter(dlc):\r\n diameters = []\r\n # Get the x,y coordinates of the four pupil points\r\n top, bottom, left, right = [np.vstack((dlc[f'pupil_{point}_r_x'], dlc[f'pupil_{point}_r_y']))\r\n for point in ['top', 'bottom', 'left', 'right']]\r\n # First compute direct diameters\r\n diameters.append(np.linalg.norm(top - bottom, axis=0))\r\n diameters.append(np.linalg.norm(left - right, axis=0))\r\n\r\n # For non-crossing edges, estimate diameter via circle assumption\r\n for pair in [(top, left), (top, right), (bottom, left), (bottom, right)]:\r\n diameters.append(np.linalg.norm(pair[0] - pair[1], axis=0) * 2 ** 0.5)\r\n\r\n # Ignore all nan runtime warning\r\n with warnings.catch_warnings():\r\n warnings.simplefilter(\"ignore\", category=RuntimeWarning)\r\n return np.nanmedian(diameters, axis=0)",
"def propanolLowest():\n coords = [\n [-1.9554949371, 0.1467391618, 0.0031595607],\n [-0.5906278346, -0.5279387138, -0.0201649611],\n [0.5440986558, 0.4958779663, 0.0283462055],\n [0.4812068385, 1.1678478833, -0.8308000219],\n [0.4590669813, 1.0993020658, 0.9450529713],\n [1.8195161785, -0.0957487212, -0.0534239359],\n [1.9103706588, -0.7338049177, 0.6631507673],\n [-0.5004127933, -1.2028008461, 0.8364936998],\n [-0.4854009629, -1.1250023438, -0.9282499098],\n [-2.7476736372, -0.5972665554, -0.0242488945],\n [-2.0700756998, 0.8040326560, -0.8554507953],\n [-2.0722381370, 0.7410005769, 0.9069567477],\n ]\n\n symbols = [\n \"C\",\n \"C\",\n \"C\",\n \"H\",\n \"H\",\n \"O\",\n \"H\",\n \"H\",\n \"H\",\n \"H\",\n \"H\",\n \"H\",\n ]\n\n atoms = []\n for i, _ in enumerate(coords):\n atoms.append(Atom(symbols[i], position=coords[i]))\n return Molecule(symbols=atoms)",
"def getClosePoints(self, point, depth=None):\n if not depth:\n depth = self.depth\n\n point = [c if c > 0 else self.size[i]-c for i, c in enumerate(point)]\n point = [c if c < self.size[i] else c % self.size[i] for i, c in enumerate(point)]\n # testSignature = self.grid.getSignature(point, self.spacings)\n testSignature = self.grid.getSignature2(point, self.spacings)\n # print testSignature, point\n # print testSignature\n # return self.tree[tuple(testSignature)]\n\n neighbors = []\n for neighborSignature in self.grid.getNeighborNodes(testSignature):\n neighborSignature = [s if s>=0 else self.maxIndex for s in neighborSignature]\n # neighborSignature = testSignature[:-1] + [neighborSignature]\n try:\n neighbors += self.tree[tuple(neighborSignature)]\n except KeyError:\n pass\n return neighbors",
"def arrange_nodes_circular(self, radius=120):\n\n self.get()\n if self.status != \"opened\":\n self.open() # pragma: no cover\n\n _angle = (2 * pi) / len(self.nodes)\n # The Y Axis is inverted in GNS3, so the -Y is UP\n for index, n in enumerate(self.nodes):\n _x = int(radius * (sin(_angle * index)))\n _y = int(radius * (-cos(_angle * index)))\n n.update(x=_x, y=_y)",
"def generate_pilars_positions(distance, radius, x, y, width, height):\n pair_distance = (2 * radius) + (distance - 2*radius)\n \n pilars_x_axis = int(width / pair_distance)\n pilars_y_axis = int(height / pair_distance)\n\n gap_x_axis = (width - (pilars_x_axis * pair_distance))/2 + radius#gap both sides of the rectangle\n gap_y_axis = (height - (pilars_y_axis * pair_distance))/2 + radius\n\n points = [] #set of positions\n for col in range(0, pilars_x_axis):\n for row in range(0, pilars_y_axis):\n points.append(( (x + gap_x_axis) + col*pair_distance,\n (y - gap_y_axis) - row*pair_distance))\n return points",
"def correct_pore_diameter(com, *params):\n elements, coordinates = params\n return (-pore_diameter(elements, coordinates, com)[0])",
"def discretize_wire(a_topods_wire):\n if not is_wire(a_topods_wire):\n raise AssertionError(\n \"You must provide a TopoDS_Wire to the discretize_wire function.\")\n wire_explorer = WireExplorer(a_topods_wire)\n wire_pnts = []\n # loop over ordered edges\n for edg in wire_explorer.ordered_edges():\n edg_pnts = discretize_edge(edg)\n wire_pnts += edg_pnts\n return wire_pnts",
"def pc_to_binvox_for_shape_completion(points,\n patch_size):\n\n if points.shape[1] != 3:\n raise Exception(\"Invalid pointcloud size, should be nx3, but is {}\".format(points.shape))\n\n # how much of the voxel grid do we want our pointcloud to fill.\n # make this < 1 so that there is some padding on the edges\n PERCENT_PATCH_SIZE = (4.0/5.0)\n\n # Where should the center of the points be placed inside the voxel grid.\n # normally make PERCENT_Z < 0.5 so that the points are placed towards the front of the grid\n # this leaves more room for the shape completion to fill in the occluded back half of the occupancy grid.\n PERCENT_X = 0.5\n PERCENT_Y = 0.5\n PERCENT_Z = 0.45\n\n # get the center of the pointcloud in meters. Ex: center = np.array([0.2, 0.1, 2.0])\n center = get_bbox_center(points)\n\n # get the size of an individual voxel. Ex: voxel_resolution=0.01 meaning 1cm^3 voxel\n # PERCENT_PATCH_SIZE determines how much extra padding to leave on the sides\n voxel_resolution = get_voxel_resolution(points, PERCENT_PATCH_SIZE * patch_size)\n\n # this tuple is where we want to stick the center of the pointcloud in our voxel grid\n # Ex: (20, 20, 18) leaving some extra room in the back half.\n pc_center_in_voxel_grid = (patch_size*PERCENT_X, patch_size*PERCENT_Y, patch_size*PERCENT_Z)\n\n # create a voxel grid.\n vox_np = voxelize_points(\n points=points[:, 0:3],\n pc_bbox_center=center,\n voxel_resolution=voxel_resolution,\n num_voxels_per_dim=patch_size,\n pc_center_in_voxel_grid=pc_center_in_voxel_grid)\n\n # location in meters of the bottom corner of the voxel grid in world space\n offset = np.array(center) - np.array(pc_center_in_voxel_grid) * voxel_resolution\n\n # create a voxel grid object to contain the grid, shape, offset in the world, and grid resolution\n vox = binvox_rw.Voxels(vox_np, vox_np.shape, tuple(offset), voxel_resolution * patch_size, \"xyz\")\n return vox",
"def parent_to_child(p):\n\n a,b = LINES[p.side]\n if p.x < 0.5:\n return Point(a, p.side, 2*p.x)\n else:\n return Point(b, p.side, 2*p.x - 1)",
"def surround(self, p):\n res = set([])\n if p.x + 1 < self.height:\n res.add((p.x + 1, p.y))\n if p.y + 1 < self.width:\n res.add((p.x + 1, p.y + 1))\n res.add((p.x, p.y + 1))\n if p.y - 1 >= 0:\n res.add((p.x + 1, p.y - 1))\n res.add((p.x, p.y - 1))\n if p.x - 1 >= 0:\n res.add((p.x - 1, p.y))\n if p.y + 1 < self.width:\n res.add((p.x - 1, p.y + 1))\n res.add((p.x, p.y + 1))\n if p.y - 1 >= 0:\n res.add((p.x - 1, p.y - 1))\n res.add((p.x, p.y - 1))\n return res",
"def pointListForCurve(x, y, type):\n\n\tif x < 10:\n\t\txString = \"0%d\" % x\n\telse:\n\t\txString = \"%d\" % x\n\n\tif x < 11:\n\t\txMString = \"0%d\" % (x - 1)\n\telse:\n\t\txMString = \"%d\" % (x - 1)\n\n\tif x < 9:\n\t\txPString = \"0%d\" % (x + 1)\n\telse:\n\t\txPString = \"%d\" % (x + 1)\n\n\tif x < 8:\n\t\txPPString = \"0%d\" % (x + 2)\n\telse:\n\t\txPPString = \"%d\" % (x + 2)\n\n\tif y < 11:\n\t\tyMString = \"0%d\" % (y - 1)\n\telse:\n\t\tyMString = \"%d\" % (y - 1)\n\n\tif y < 9:\n\t\tyPString = \"0%d\" % (y + 1)\n\telse:\n\t\tyPString = \"%d\" % (y + 1)\n\n\tif y < 8:\n\t\tyPPString = \"0%d\" % (y + 2)\n\telse:\n\t\tyPPString = \"%d\" % (y + 2)\n\n\tif y < 10:\n\t\tyString = \"0%d\" % y\n\telse:\n\t\tyString = \"%d\" % y\n\n\tinnerRadius = 54.0 / 64.0\n\touterRadius = 87.0 / 64.0\n\n\tslices = 10\n\n\t# Dots are numbered as xxyy[IO]z\n\t# The I means it is the inside trek, the O the outside\n\t# The z is which particular dot it is (0-9)\n\t# Note that all paths are marked as being inside the top-left square\n\t# Except for entrence and exit dots.\n\t# Curves are generated from star + 10 to end - 10\n\n\tif type == 8:\t\t\t# Bottom right\n\t\tcenterX = 25.0 / 64.0\n\t\tcenterY = 25.0 / 64.0\n\t\tstart = 0.0\n\t\tend = 90.0\n\n\t\tenterIn =\t[\"%s%sTL\" % (xPString, yString), 0.25, 0.25, [\"%s%sI0\" % (xString, yString)]]\n\t\tenterOut =\t[\"%s%sBL\" % (xString, yPString), 0.25, 0.75, [\"%s%sO0\" % (xString, yString)]]\n\t\texitIn =\t[\"%s%sTL\" % (xString, yPString), 0.25, 0.25, [\"%s%sTR\" % (xMString, yPString)]]\n\t\texitOut =\t[\"%s%sTR\" % (xPString, yString), 0.75, 0.25, [\"%s%sBR\" % (xPString, yMString)]]\n\n\t\tendIn = \"%s%sTL\" % (xString, yPString)\n\t\tendOut = \"%s%sTR\" % (xPString, yString)\n\n\telif type == 9:\t\t\t# Bottom left\n\t\tcenterX = 103.0 / 64.0\n\t\tcenterY = 25.0 / 64.0\n\t\tstart = 90.0\n\t\tend = 180.0\n\n\t\tenterIn =\t[\"%s%sTR\" % (xPString, yPString), 0.75, 0.25, [\"%s%sI0\" % (xString, yString)]]\n\t\tenterOut =\t[\"%s%sTL\" % (xString, yString), 0.25, 0.25, [\"%s%sO0\" % (xString, yString)]]\n\t\texitIn =\t[\"%s%sTR\" % (xString, yString), 0.75, 0.25, [\"%s%sBR\" % (xString, yMString)]]\n\t\texitOut =\t[\"%s%sBR\" % (xPString, yPString), 0.75, 0.75, [\"%s%sBL\" % (xPPString, yPString)]]\n\n\t\tendIn = \"%s%sTR\" % (xString, yString)\n\t\tendOut = \"%s%sBR\" % (xPString, yPString)\n\n\telif type == 10:\t\t# Top left\n\t\tcenterX = 103.0 / 64.0\n\t\tcenterY = 103.0 / 64.0\n\t\tstart = 180.0\n\t\tend = 270.0\n\n\t\tenterIn =\t[\"%s%sBR\" % (xString, yPString), 0.75, 0.75, [\"%s%sI0\" % (xString, yString)]]\n\t\tenterOut =\t[\"%s%sTR\" % (xPString, yString), 0.75, 0.25, [\"%s%sO0\" % (xString, yString)]]\n\t\texitIn =\t[\"%s%sBR\" % (xPString, yString), 0.75, 0.75, [\"%s%sBL\" % (xPPString, yString)]]\n\t\texitOut =\t[\"%s%sBL\" % (xString, yPString), 0.25, 0.75, [\"%s%sTL\" % (xString, yPPString)]]\n\n\t\tendIn = \"%s%sBR\" % (xPString, yString)\n\t\tendOut = \"%s%sBL\" % (xString, yPString)\n\n\telse: # type == 11:\t\t# Top right\n\t\tcenterX = 25.0 / 64.0\n\t\tcenterY = 103.0 / 64.0\n\t\tstart = 270.0\n\t\tend = 360.0\n\n\t\tenterIn =\t[\"%s%sBL\" % (xString, yString), 0.25, 0.75, [\"%s%sI0\" % (xString, yString)]]\n\t\tenterOut =\t[\"%s%sBR\" % (xPString, yPString), 0.75, 0.75, [\"%s%sO0\" % (xString, yString)]]\n\t\texitIn =\t[\"%s%sBL\" % (xPString, yPString), 0.25, 0.75, [\"%s%sTL\" % (xPString, yPPString)]]\n\t\texitOut =\t[\"%s%sTL\" % (xString, yString), 0.25, 0.25, [\"%s%sTR\" % (xMString, yString)]]\n\n\t\tendIn = \"%s%sBL\" % (xPString, yPString)\n\t\tendOut = \"%s%sTL\" % (xString, yString)\n\n\tpointList = [enterIn, enterOut, exitIn, exitOut]\n\n\tstring = \"%s%s\" % (xString, yString)\n\tstep = ((end - 1) - (start + 1)) / float(slices)\n\n\tfor i in range(slices):\n\n\t\tangle = radians(start + step * i)\n\n\t\tif i < 9:\n\t\t\ttemp = [\"%sI%d\" % (string, i), centerX + cos(angle) * innerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\tcenterY + sin(angle) * innerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\t[\"%sI%d\" % (string, i + 1)]]\n\t\telse:\n\t\t\ttemp = [\"%sI%d\" % (string, i), centerX + cos(angle) * innerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\tcenterY + sin(angle) * innerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\t[endIn]]\n\n\t\tpointList.append(temp)\n\n\t\tangle = radians(start + step * (10 - i))\n\n\t\tif i < 9:\n\t\t\ttemp = [\"%sO%d\" % (string, i), centerX + cos(angle) * outerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\tcenterY + sin(angle) * outerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\t[\"%sO%d\" % (string, i + 1)]]\n\t\telse:\n\t\t\ttemp = [\"%sO%d\" % (string, i), centerX + cos(angle) * outerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\tcenterY + sin(angle) * outerRadius,\n\t\t\t\t\t\t\t\t\t\t\t\t\t[endOut]]\n\n\t\tpointList.append(temp)\n\n\treturn pointList",
"def to_points(self, dx):\n remainder = 0\n pt0 = self[0]\n vertices = [pt0.get_vertex()]\n\n for seg in self.segments:\n pos = 0\n az = seg[0].azimuth(seg[1])\n\n while pos < seg.length:\n distance_to_endpt = pt0.distance(seg[1])\n if distance_to_endpt >= dx:\n pt1 = pt0.walk(dx - remainder, az)\n pos += dx - remainder\n vertices.append(pt1.get_vertex())\n remainder = 0\n pt0 = pt1\n else:\n remainder = distance_to_endpt\n pos = seg.length\n pt0 = seg[1]\n return Multipoint(vertices, crs=self.crs)",
"def order_points(pts):\n\n\trect = np.zeros((4, 2), dtype=\"float32\")\n\ts = pts.sum(axis=1)\n\trect[0] = pts[np.argmin(s)]\n\trect[2] = pts[np.argmax(s)]\n\tdiff = np.diff(pts, axis=1)\n\trect[1] = pts[np.argmin(diff)]\n\trect[3] = pts[np.argmax(diff)]\n\n\treturn rect",
"def point_group(self) -> PermutationGroup:\n perms = []\n for p in self.point_group_:\n if isinstance(p, Identity):\n perms.append(Identity())\n else:\n # note that we need the preimages in the permutation\n perm = self.lattice.id_from_position(p.preimage(self.lattice.positions))\n perms.append(Permutation(perm, name=str(p)))\n return PermutationGroup(perms, degree=self.lattice.n_nodes)",
"def draw_di_graph(graph_object, scale_by_degree=True):\n positions = nx.spring_layout(graph_object)\n if scale_by_degree:\n d = nx.degree(graph_object)\n keys, degrees = zip(*d)\n network = nx.draw(graph_object, nodelist=keys,\n node_size=[5*degree for degree in degrees],\n pos=positions, alpha=0.5, arrows=False)\n else:\n network = nx.draw(graph_object, pos=positions, node_size=50, alpha=0.5)\n # labels = nx.draw_networkx_labels(graph, pos=positions)\n return positions, network, plt.gca()",
"def propanolIntermediate():\n coords = [\n [-1.60306996, 0.10333519, 0.50792736],\n [-0.66904416, -0.46962566, -0.55371646],\n [0.67345677, 0.26436258, -0.61179298],\n [1.26292797, -0.10585085, -1.45392921],\n [0.49744830, 1.34089332, -0.75955140],\n [1.47742183, 0.05176805, 0.52349829],\n [0.98773122, 0.34094585, 1.30125393],\n [-0.48213061, -1.52528483, -0.34815476],\n [-1.14165995, -0.39229359, -1.53423716],\n [-2.56608070, -0.40007121, 0.47312929],\n [-1.76619136, 1.16652831, 0.34003517],\n [-1.19366144, -0.03197289, 1.50775619],\n ]\n\n symbols = [\n \"C\",\n \"C\",\n \"C\",\n \"H\",\n \"H\",\n \"O\",\n \"H\",\n \"H\",\n \"H\",\n \"H\",\n \"H\",\n \"H\",\n ]\n\n atoms = []\n for i, _ in enumerate(coords):\n atoms.append(Atom(symbols[i], position=coords[i]))\n return Molecule(symbols=atoms)",
"def order_points(pts):\n pts = np.array(pts)\n sums = pts.sum(axis=1)\n topleft_id = np.argmin(sums)\n bottomright_id = np.argmax(sums)\n\n # Quite clumsy, rewrite here\n leftover_ids = [i for range(4) if i not in (topleft_id, bottomright_id)]\n topright_id = min(leftover_ids, key=lambda i: pts[i][0])\n bottomleft_id = leftover_ids[0] if leftover_ids[0] != topright_id else leftover_ids[1]\n\n return pts[[topleft_id, topright_id, bottomright_id, bottomleft_id]]",
"def parks(self):\n point_array = [0, 2, 8, 12, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14]\n park_coords = []\n parks_sorted = []\n for i in range(4):\n for j in range(4):\n if self.as_list[i][j] == 'p':\n park_coords.append(tuple([i, j]))\n while len(park_coords) > 0:\n x, y = park_coords.pop(0)\n if len(parks_sorted) == 0:\n parks_sorted.append([(x, y)])\n else:\n borders_bool = []\n for block_no, park_block in enumerate(parks_sorted):\n borders_bool.append(False)\n for i, j in park_block:\n if abs(x - i) + abs(y - j) == 1:\n borders_bool[block_no] = True\n if (num_true := borders_bool.count(True)) == 1:\n parks_sorted[borders_bool.index(True)].append((x, y))\n elif num_true > 1:\n new_parks_sorted = []\n i_mega_park = None\n for block_no, park_block in enumerate(parks_sorted):\n if borders_bool[block_no]: # If it is bordering\n if i_mega_park is None:\n i_mega_park = block_no\n new_parks_sorted.append(park_block)\n else:\n new_parks_sorted[i_mega_park] += park_block\n new_parks_sorted[i_mega_park] += [(x, y)]\n parks_sorted = new_parks_sorted\n else:\n new_parks_sorted.append(park_block)\n parks_sorted = new_parks_sorted\n else:\n parks_sorted.append([(x, y)])\n\n return sum([point_array[len(block)] for block in parks_sorted])",
"def find_diameter(self):\n try:\n #Get the contours of the gear\n contours, hierarchy = cv.findContours(self.frame, cv.RETR_TREE, cv.CHAIN_APPROX_NONE)\n \n #Get the leftmost and the rightmost point of the gear\n leftmost = (sys.maxsize, sys.maxsize)\n rightmost = (sys.maxsize*-1, sys.maxsize*-1)\n frame_area = self.frame.shape[0] * self.frame.shape[1]\n new_contours = []\n for c in contours:\n c_area = cv.contourArea(c)\n area_percentage = (c_area/frame_area) * 100\n if area_percentage > .01:\n new_contours.append(c)\n\n c_leftmost = tuple(c[c[:, :, 0].argmin()][0])\n if leftmost[0] > c_leftmost[0]:\n leftmost = c_leftmost\n\n c_rightmost = tuple(c[c[:, :, 0].argmax()][0])\n if rightmost[0] < c_rightmost[0]:\n rightmost = c_rightmost\n\n cv.drawContours(self.result, new_contours, -1, (255, 0, 0), 3)\n\n cv.circle(self.result, leftmost, 4, [0, 0, 255], -1)\n cv.circle(self.result, rightmost, 4, [0, 0, 255], -1)\n\n #Get the distance between the two points\n distance = math.sqrt((leftmost[0] - rightmost[0])**2 + (leftmost[1] - rightmost[1])**2)\n\n #Convert the distance, which is in pixels to inches by using the background width as a point of reference\n self.diameter = (distance/self.frame.shape[1]) * self.background_width\n self.diameter = round(self.diameter, 2)\n \n except:\n cv.putText(self.result, 'Error', (10, 50), cv.FONT_HERSHEY_SIMPLEX, 2, (0, 0, 255), 2, cv.LINE_AA)",
"def calculate_points(self, component):\n # find selection range on source plot\n x_start, x_end = self._get_selection_screencoords()\n if x_start > x_end:\n x_start, x_end = x_end, x_start\n\n y_end = self.source.y\n y_start = self.source.y2\n\n left_top = np.array([x_start, y_start])\n left_mid = np.array([x_start, y_end])\n right_top = np.array([x_end, y_start])\n right_mid = np.array([x_end, y_end])\n\n # Offset y because we want to avoid overlapping the trapezoid with the topmost\n # pixels of the destination plot.\n y = self.destination.y2 + 1\n\n left_end = np.array([self.destination.x, y])\n right_end = np.array([self.destination.x2, y])\n\n polygon = np.array((left_top, left_mid, left_end,\n right_end, right_mid, right_top))\n left_line = np.array((left_top, left_mid, left_end))\n right_line = np.array((right_end, right_mid, right_top))\n\n return left_line, right_line, polygon",
"def getCircleDiameter(self):\n segments = []\n for (i, p1) in enumerate(self.points):\n for p2 in self.points[i+1:]:\n segments.append(Segment(p1, p2))\n s = max(segments, key=lambda s: s.length)\n return Circle(*s.middle, radius=s.length/2)",
"def calculate_points(self, component):\n # find selection range on source plot\n x_start, x_end = self._get_selection_screencoords()\n if x_start > x_end:\n x_start, x_end = x_end, x_start\n\n y_end = self.source.y\n y_start = self.source.y2\n\n left_top = np.array([x_start, y_end])\n left_mid = np.array([x_start, y_start])\n right_top = np.array([x_end, y_end])\n right_mid = np.array([x_end, y_start])\n\n # Offset y because we want to avoid overlapping the trapezoid with the topmost\n # pixels of the destination plot.\n y = self.destination.y - 1\n\n left_end = np.array([self.destination.x, y])\n right_end = np.array([self.destination.x2, y])\n\n polygon = np.array((left_end, left_mid, left_top,\n right_top, right_mid, right_end))\n left_line = np.array((left_top, left_mid, left_end))\n right_line = np.array((right_end, right_mid, right_top))\n\n return left_line, right_line, polygon",
"def _adj(self, sino):\n # image coordinate grid\n p_x = torch.linspace(\n -self.n[0] / 2.0 + 0.5,\n self.n[0] / 2.0 - 0.5,\n self.n[0],\n device=self.n.device,\n ).unsqueeze(1)\n p_y = torch.linspace(\n -self.n[1] / 2.0 + 0.5,\n self.n[1] / 2.0 - 0.5,\n self.n[1],\n device=self.n.device,\n ).unsqueeze(0)\n\n # check if coordinate is within circle\n if self.flat:\n max_gamma = torch.atan(\n (self.s_detect.abs() * (self.n_detect / 2.0))\n / (self.d_source + self._d_detect())\n )\n else:\n max_gamma = (self.s_detect.abs() * (self.n_detect / 2.0)) / (\n self.d_source + self._d_detect()\n )\n radius = self.d_source * torch.sin(max_gamma)\n p_r = torch.sqrt(p_x * p_x + p_y * p_y)\n mask = p_r <= radius\n\n # use batch and channel dimensions for vectorized interpolation\n original_dim = sino.ndim\n while sino.ndim < 4:\n sino = sino.unsqueeze(0)\n assert sino.shape[-3] == 1 # we can handle only single channel data\n sino = sino.transpose(-4, -3) # switch batch and channel dim\n\n # rotated coordinate grid\n pi = torch.acos(torch.zeros(1)).item() * 2.0\n cs = torch.cos(self.angles * pi / 180.0).unsqueeze(1).unsqueeze(1)\n sn = torch.sin(self.angles * pi / 180.0).unsqueeze(1).unsqueeze(1)\n p_x_r = cs * p_x + sn * p_y\n p_y_r = -sn * p_x + cs * p_y\n\n # find angles and detector positions defining rays through coordinate\n if self.flat:\n grid_d = (\n (self.d_source + self._d_detect())\n * p_x_r\n / (self.d_source - p_y_r)\n )\n else:\n grid_d = (self.d_source + self._d_detect()) * torch.atan(\n p_x_r / (self.d_source - p_y_r)\n )\n grid_a = (\n torch.arange(self.m[0], device=sino.device)\n .unsqueeze(1)\n .unsqueeze(1)\n .expand(-1, self.n[0], self.n[1])\n - self.m[0] / 2.0\n + 0.5\n )\n\n grid_d = grid_d / (\n (self.n_detect / 2.0 - 0.5) * self.s_detect\n ) # rescale valid detector positions to [-1,1]\n grid_a = grid_a / (self.m[0] / 2.0 - 0.5) # rescale angles to [-1,1]\n grid = torch.stack([grid_d, grid_a], dim=-1)\n inter = torch.nn.functional.grid_sample(\n sino.expand(self.m[0], -1, -1, -1), grid, align_corners=True\n )\n\n # compute integral reweighting factors and integrate\n if self.flat:\n weight = (self.d_source + self._d_detect()).pow(2) / (\n self.d_source - p_y_r\n ).pow(2)\n else:\n weight = (self.d_source + self._d_detect()).pow(2) / (\n (self.d_source - p_y_r).pow(2) + p_x_r.pow(2)\n )\n x = mask * (inter * (weight).unsqueeze(1)).sum(dim=0, keepdim=True)\n\n # undo batch and channel manipulations\n x = x.transpose(-4, -3) # unswitch batch and channel dim\n while x.ndim > original_dim:\n x = x.squeeze(0)\n\n return x / self.s_detect.abs()",
"def topo_diameter(self):\n import math\n \n Temp = 0\n for i in range(self.nodenum):\n for j in range(self.nodenum):\n pathlist = []\n self.pathij(i, j, pathlist)\n distance = []\n \n for k in range(len(pathlist)):\n distance.append(len(pathlist[k]) - 1)\n \n if(len(distance) == 0):\n continue\n else:\n if(min(distance) >= Temp):\n Temp = min(distance)\n \n self.topodiameter = Temp"
] | [
"0.50767064",
"0.49477744",
"0.48864093",
"0.48731953",
"0.48656204",
"0.48196903",
"0.48136315",
"0.48119062",
"0.47768894",
"0.4766206",
"0.47514772",
"0.47462827",
"0.47427285",
"0.47004747",
"0.4687095",
"0.46782252",
"0.46657154",
"0.4664539",
"0.46427816",
"0.46333265",
"0.46289703",
"0.46254635",
"0.46234065",
"0.46121687",
"0.45971894",
"0.45951968",
"0.45950866",
"0.45908454",
"0.45797753",
"0.45718798"
] | 0.6494857 | 0 |
Makes the horizontal box with buttons | def makeButtons(self):
self.but_run = QtWidgets.QPushButton('Run')
self.but_status = QtWidgets.QPushButton('Status')
self.but_brow = QtWidgets.QPushButton('View')
self.but_remove = QtWidgets.QPushButton('Remove files')
self.hboxB = QtWidgets.QHBoxLayout()
self.hboxB.addWidget(self.but_run)
self.hboxB.addWidget(self.but_status)
self.hboxB.addWidget(self.but_brow)
self.hboxB.addStretch(1)
self.hboxB.addWidget(self.but_remove)
self.but_run.clicked.connect(self.onRun)
self.but_status.clicked.connect(self.onStatus)
self.but_brow.clicked.connect(self.onBrow)
self.but_remove.clicked.connect(self.onRemove) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def button_box(self):\r\n\r\n below_hz_frame = tkinter.Frame(self)\r\n ok_button = ttk.Button(below_hz_frame, text=\"OK\",\r\n width=10, command=self.ok,\r\n default=tkinter.ACTIVE)\r\n ok_button.grid(row=0, column=0, padx=30, pady=10)\r\n cancel_button = ttk.Button(below_hz_frame, text=\"Cancel\", width=10,\r\n command=self.cancel)\r\n cancel_button.grid(row=0, column=1, padx=30, pady=10)\r\n\r\n # bind 'ok' method to the 'enter' button of the keyboard\r\n self.bind(\"<Return>\", self.ok)\r\n\r\n # bind 'cancel' method to the 'esc' button of the keyboard\r\n self.bind(\"<Escape>\", self.cancel)\r\n below_hz_frame.pack(fill=tkinter.X)",
"def buttonbox(self):\n self.ok_button = tk.Button(\n self, text=\"OK\", width=5, command=lambda: self.destroy()\n )\n self.ok_button.pack(pady=10)",
"def add_side_buttons(self):\n # Top and bottom buttons\n for col in range(self._grid.width):\n top_button = widgets.HExitButton('^', -1, col)\n bottom_button = widgets.HExitButton('v', self._grid.height, col)\n self._graphic_grid.addWidget(top_button, 1, 2 + col)\n self._graphic_grid.addWidget(bottom_button,\n 2 + self._grid.height, 2 + col)\n top_button.clicked.connect(self.button_clicked)\n bottom_button.clicked.connect(self.button_clicked)\n # Left and right buttons\n for row in range(self._grid.height):\n left_button = widgets.VExitButton('<', row, -1)\n right_button = widgets.VExitButton('>', row, self._grid.width)\n self._graphic_grid.addWidget(left_button, 2 + row, 1)\n self._graphic_grid.addWidget(right_button,\n 2 + row, 2 + self._grid.width)\n left_button.clicked.connect(self.button_clicked)\n right_button.clicked.connect(self.button_clicked)",
"def buttonbox(self):\n\n box = Frame(self)\n b = Button(box, text=\"OK\", width=10, command=self.ok, default=ACTIVE)\n b.pack(side=LEFT, padx=5, pady=5)\n #w = Button(box, text=\"Cancel\", width=10, command=self.cancel)\n a= Button(box, text=\"Autofill\", width=10, command=self.auto_populate, default=ACTIVE)\n a.pack(side=LEFT, padx=5, pady=5)\n #w.pack(side=LEFT, padx=5, pady=5)\n #w[\"state\"] = DISABLED\n\n\n self.bind(\"<Return>\", self.ok)\n self.bind(\"<Escape>\", self.cancel)\n\n box.pack()",
"def buttonbox(self):\n if(self.box is not None):\n self.box.destroy()\n\n self.box = Frame(self)\n\n ok_btn = Button(self.box, text=\"Next Player\", width=10,\n command=self.next_cmd, default=ACTIVE)\n ok_btn.pack(side=LEFT, padx=5, pady=5)\n\n finish_btn = Button(self.box, text=\"Ready\",\n state=DISABLED, width=10, command=self.finish_command, default=ACTIVE)\n # ensures a minimum of 2 players\n if self.num_players >= self.min_players:\n finish_btn.config(state=\"normal\")\n finish_btn.pack(side=LEFT, padx=5, pady=5)\n\n cancel_btn = Button(self.box, text=\"Cancel\",\n width=10, command=self.cancel)\n cancel_btn.pack(side=LEFT, padx=5, pady=5)\n\n self.bind(\"<Escape>\", self.cancel)\n\n self.box.pack()",
"def setupButtons(self, buttons=None):\n self.button_box = qt.QHBoxLayout(self.layout)\n spacer = qt.QSpacerItem(0,0,qt.QSizePolicy.Expanding,qt.QSizePolicy.Minimum)\n self.button_box.addItem(spacer)\n\n if buttons is None:\n self._addOkButton()\n self._addCancelButton()\n else:\n if \"ok\" in buttons:\n self._addOkButton(buttons[\"ok\"])\n if \"cancel\" in buttons:\n self._addCancelButton(buttons[\"cancel\"])\n if \"quit\" in buttons:\n self._addQuitButton(buttons[\"quit\"])",
"def create_buttons(self, width=0.2):\n b_N, b_length = self.num_buttons, width\n b_sep = (1. / (b_N + 1)) * (1 - b_N * b_length)\n for b in range(b_N):\n start = (b + 1) * b_sep + b * b_length\n r = [start, 0.05, b_length, 0.075]\n self.regions.append(r)\n\n # adjust the figure\n plt.subplots_adjust(bottom=0.25)\n # populate buttons\n for b in range(b_N):\n axbutton = plt.axes(self.regions[b])\n button = Button(axbutton, self.button_labels[b])\n button.on_clicked(self.actions[self.button_actions[b]])\n self.buttons.append(button)",
"def uiNewHorizontalBox():\n\n # Set return type\n clibui.uiNewHorizontalBox.restype = ctypes.POINTER(uiBox)\n\n return clibui.uiNewHorizontalBox()",
"def create_button_block(self):\n self.btn_compare = QPushButton(self.centralWidget)\n self.btn_display = QPushButton(self.centralWidget)\n self.btn_display_hist = QPushButton(self.centralWidget)\n self.btn_display_color_hist = QPushButton(self.centralWidget)\n\n self.btn_equalize_hist = QPushButton(self.centralWidget)\n self.btn_contrast_stretch = QPushButton(self.centralWidget)\n self.btn_log_compress = QPushButton(self.centralWidget)\n self.btn_contrast_invert = QPushButton(self.centralWidget)\n\n self.btn_dload_jpeg = QPushButton(self.centralWidget)\n self.btn_dload_tiff = QPushButton(self.centralWidget)\n self.btn_dload_png = QPushButton(self.centralWidget)\n self.btn_upload = QPushButton(self.centralWidget)\n\n self.lay_button_block = QGridLayout()\n\n self.lay_button_block.addWidget(self.btn_compare, 0, 0)\n self.lay_button_block.addWidget(self.btn_display, 1, 0,)\n self.lay_button_block.addWidget(self.btn_display_hist, 2, 0)\n self.lay_button_block.addWidget(self.btn_display_color_hist, 3, 0)\n\n self.lay_button_block.addWidget(self.btn_equalize_hist, 0, 1)\n self.lay_button_block.addWidget(self.btn_contrast_stretch, 1, 1)\n self.lay_button_block.addWidget(self.btn_log_compress, 2, 1)\n self.lay_button_block.addWidget(self.btn_contrast_invert, 3, 1)\n\n self.lay_button_block.addWidget(self.btn_dload_jpeg, 0, 2)\n self.lay_button_block.addWidget(self.btn_dload_tiff, 1, 2)\n self.lay_button_block.addWidget(self.btn_dload_png, 2, 2)\n self.lay_button_block.addWidget(self.btn_upload, 3, 2)",
"def create_widgets(self):\n #create first button\n self.button1 = Button(self, text = \"Start\")\n self.button1.bind\n self.button1.grid()",
"def button_strip(parent, *buttons):\n hbox = qtw.QHBoxLayout()\n hbox.addStretch()\n if 'Cancel' in buttons:\n btn = qtw.QPushButton(\"Afbreken\", parent)\n btn.clicked.connect(parent.parent().do_detail)\n hbox.addWidget(btn)\n if 'Go' in buttons:\n btn = qtw.QPushButton(\"Uitvoeren\", parent)\n btn.clicked.connect(parent.submit)\n hbox.addWidget(btn)\n if 'GoBack' in buttons:\n btn = qtw.QPushButton(\"Uitvoeren en terug\", parent)\n btn.clicked.connect(parent.submit_and_back)\n hbox.addWidget(btn)\n if 'Edit' in buttons:\n btn = qtw.QPushButton(\"Wijzigingen doorvoeren\", parent)\n btn.clicked.connect(parent.submit)\n hbox.addWidget(btn)\n if 'New' in buttons:\n btn = qtw.QPushButton(\"Nieuwe opvoeren\", parent)\n btn.clicked.connect(parent.new)\n hbox.addWidget(btn)\n if 'Select' in buttons:\n btn = qtw.QPushButton(\"Terug naar Selectie\", parent)\n btn.clicked.connect(parent.parent().do_select)\n hbox.addWidget(btn)\n if 'Start' in buttons:\n btn = qtw.QPushButton(\"Terug naar Startscherm\", parent)\n btn.clicked.connect(parent.parent().do_start)\n hbox.addWidget(btn)\n hbox.addStretch()\n return hbox",
"def _create_button_frame(self):\n\n self.frames.append(tk.Frame(self.master))\n self.row_buttons.append([])\n\n for row in range(5):\n self.row_buttons[0].append(tk.Button(self.frames[3],\n text=\"Go\", height=2, width=4))\n self.row_buttons[0][row].grid(column=0, row=row)\n self.frames[3].rowconfigure(row, weight=1, minsize=50)\n\n self.frames[3].grid(column=1, row=1, sticky=\"ew\")\n self.frames[3].columnconfigure(0, weight=1)",
"def create_buttons(frame):\n button0 = Button(frame, height=2, width=2, text=\" \",\n command=lambda: on_click(button0))\n button0.pack(side=LEFT)\n button1 = Button(frame, height=2, width=2, text=\" \",\n command=lambda: on_click(button1))\n button1.pack(side=LEFT)\n button2 = Button(frame, height=2, width=2, text=\" \",\n command=lambda: on_click(button2))\n button2.pack(side=LEFT)",
"def create_button_strip(data):\n button_box = gtk.HBox()\n buttons = []\n for label, cb in data:\n if cb is None and not isinstance(label, str):\n b = label # masquarade as a place to put premade widgets, for battery meter\n else:\n b = gtk.Button(label.replace('_', '__'))\n if cb:\n b.connect(\"clicked\", cb)\n try:\n button_box.add(b)\n except:\n import pdb; pdb.set_trace()\n buttons.append(b)\n return button_box, buttons",
"def place_buttons(self):\n tk.Button(self.parent, text='^', command=self.up_callback).grid(row=0, column=1)\n tk.Button(self.parent, text='v', command=self.down_callback).grid(row=2, column=1)\n tk.Button(self.parent, text='>', command=self.right_callback).grid(row=1, column=2)\n tk.Button(self.parent, text='<', command=self.left_callback).grid(row=1, column=0)\n tk.Button(self.parent, text='<-', command=self.back_callback).grid(row=0, column=0)\n tk.Button(self.parent, text='OK', command=self.ok_callback).grid(row=1, column=1)\n tk.Button(self.parent, text='<<', command=self.rewind_callback).grid(row=3, column=0)\n tk.Button(self.parent, text='>||', command=self.pp_callback).grid(row=3, column=1)\n tk.Button(self.parent, text='>>', command=self.pp_callback).grid(row=3, column=2)\n\n tk.Button(self.parent, text='HOME', command=self.home_callback).grid(row=0, column=3)",
"def define_button(self):\n self.separator1 = pygame.Rect(\n 0,\n SCREEN_WIDTH,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n self.separator2 = pygame.Rect(\n 0,\n SCREEN_WIDTH + BIG_LINE_WIDTH // 2,\n SCREEN_WIDTH,\n BIG_LINE_WIDTH,\n )\n\n self.button = pygame.Rect(\n SCREEN_WIDTH // 2 - BUTTON_WIDTH // 2,\n (SCREEN_HEIGHT + SCREEN_WIDTH) // 2 - BUTTON_HEIGHT // 2,\n BUTTON_WIDTH,\n BUTTON_HEIGHT,\n )",
"def place_buttons(self):\n grid_size = 3\n button_index = 0\n for i in range(grid_size):\n for j in range(grid_size):\n self.button_list[button_index].grid(row=i, column=j)\n button_index += 1",
"def create_buttons(self):\r\n # The buttons are created in the center of the screen then offset in the x/y directions by a number of button\r\n # widths. E.g. The \"-1, 0\" for the easy button means to shift the button one button width left of center.\r\n self.easy_button = Button(self.ai_game, \"Easy\", -1, 0)\r\n self.normal_button = Button(self.ai_game, \"Normal\", 0, 0)\r\n self.hard_button = Button(self.ai_game, \"Hard\", 1, 0)\r\n self.quit_button = Button(self.ai_game, \"Quit\", 0, 1)\r\n self.buttons = (self.easy_button, self.normal_button,\r\n self.hard_button, self.quit_button)",
"def _create_button_panel(self):\n ## Buttons\n #\n # Our button order (when all buttons present):\n # [Defaults] [Refresh] [Apply] [Close]\n #\n # Our button - Windows\n # Close(yes) - OK\n # Close(no ) - Cancel\n # [X] - Cancel\n # Apply - Apply\n # Defaults -\n # Refresh - Reset\n #\n # I think Windows users will head for the window's [X]\n # when they want to close and cancel their changes,\n # because they won't know if [Close] saves their changes\n # or not (until they press it, and find that it asks).\n #\n #\n # Some links that discuss and address what order to use for buttons:\n #\n # http://java.sun.com/products/jlf/ed2/book/HIG.Dialogs2.html\n # http://developer.kde.org/documentation/books/kde-2.0-development/ch08lev1sec6.html\n # http://developer.kde.org/documentation/standards/kde/style/dialogs/index.html\n # http://doc.trolltech.com/qq/qq19-buttons.html\n\n\n # Catch click on the [X]: like clicking [Close]\n # CEBALERT: but what if this frame isn't in its own window!\n try:\n self.master.protocol(\"WM_DELETE_WINDOW\",self._close_button)\n except AttributeError:\n pass\n\n buttons_frame = T.Frame(self,borderwidth=1,relief='sunken')\n self.buttons_frame = buttons_frame\n buttons_frame.pack(side=\"bottom\",expand=\"no\")\n\n self._buttons_frame_left = T.Frame(buttons_frame)\n self._buttons_frame_left.pack(side='left',expand='yes',fill='x')\n\n self._buttons_frame_right = T.Frame(buttons_frame)\n self._buttons_frame_right.pack(side='right',expand='yes',fill='x')\n\n self.pack_param('Defaults',parent=self._buttons_frame_left,\n on_set=self._defaults_button,side='left')\n self.pack_param('Refresh',parent=self._buttons_frame_left,\n on_set=self._refresh_button,side='left')\n self.pack_param('Close',parent=self._buttons_frame_right,\n on_set=self._close_button,side='right')",
"def boxen(self, paneel):\n \n boxje = wx.BoxSizer(wx.VERTICAL)\n boxje.Add(paneel, 1, wx.EXPAND | wx.ALL)\n \n #boxje.Add(vbox, 8, wx.EXPAND | wx.ALL)\n return boxje",
"def _createButtonsBox(self):\n\n # Create OK and Cancel buttons\n buttons = QDialogButtonBox(\n QDialogButtonBox.Ok | QDialogButtonBox.Cancel | QDialogButtonBox.Reset,\n Qt.Horizontal)\n buttons.accepted.connect(self._onOk)\n buttons.rejected.connect(self._onCancel)\n buttons.button(QDialogButtonBox.Reset).clicked.connect(self._onReset)\n return buttons",
"def layout():\n wid = QtGui.QWidget()\n lo = QtGui.QVBoxLayout()\n lo.addWidget(tab)\n lo.addWidget(btn)\n wid.setLayout(lo)\n tb.addWidget(wid)\n lo.setContentsMargins(0, 0, 0, 0)\n btn.setMaximumWidth(tab.height())",
"def _createButtonsBox(self):\n\n # Create OK and Cancel buttons\n buttons = QDialogButtonBox(\n QDialogButtonBox.Ok | QDialogButtonBox.Cancel | QDialogButtonBox.Reset,\n Qt.Horizontal)\n buttons.accepted.connect(self._onOk)\n buttons.rejected.connect(self._onCancel)\n buttons.button(QDialogButtonBox.Reset).clicked.connect(self._onReset)\n\n return buttons",
"def addStdButtons (self,frame):\n \n # Create the ok and cancel buttons.\n self.ok = ok = Tk.Button(frame,text=\"Go\",width=6,command=self.go)\n self.hide = hide = Tk.Button(frame,text=\"Hide\",width=6,command=self.hide)\n \n ok.pack(side=\"left\",pady=2,padx=5)\n hide.pack(side=\"left\",pady=2,padx=5)",
"def pack_buttons(self):\n button_layout = QtGui.QHBoxLayout()\n for button in self.buttons:\n button_layout.addWidget(button)\n \n layout = QtGui.QVBoxLayout()\n layout.addWidget(self.costmap_widget)\n layout.addLayout(button_layout)\n # layout.addStretch(1)\n self.setLayout(layout)",
"def createWidgets(self):\r\n top = self.winfo_toplevel()\r\n top.rowconfigure(0, weight=1)\r\n top.columnconfigure(0, weight=1)\r\n self.rowconfigure(0, weight=1)\r\n self.columnconfigure(0, weight=1) \r\n\r\n self.button_quit = tk.Button(self, text='Quit', command=self.quit)\r\n self.button_quit.grid(row=0, column=0, sticky=tk.N+tk.S+tk.E+tk.W)",
"def create_buttonbox(self, master_fr):\n \n ttk.Button(\n master_fr,\n text=\"Cancel\",\n command=self.dlg_pbCancel\n ).pack(side=tk.RIGHT)\n \n ttk.Button(\n master_fr,\n text=\"OK\",\n command=self.dlg_pbOK,\n default=tk.ACTIVE\n ).pack(side=tk.RIGHT)\n \n # Do Cancel if closed\n self.tkWindow.protocol(\"WM_DELETE_WINDOW\", self.dlg_pbCancel)",
"def create_widgets(self):\n self.Hi = Button(self, text= \"hi\", fg=\"red\", command=self.say_hi)\n self.Hi.pack({\"side\": \"left\"})\n # quit with out () means return not call\n self.Quit = Button(self, text=\"Goodbye\", fg=\"blue\", command=self.quit)\n self.Quit.pack({\"side\": \"left\"})",
"def addComponents(self):\n\n self.mainLayout = QVBoxLayout()\n self.setLayout(self.mainLayout)\n # title\n self.lblTitle = QLabel(self.title)\n self.mainLayout.addWidget(self.lblTitle)\n styleTitle = \"\"\"\nfont-size: 20px; \nfont-style:italic; \nfont-weight: bold; \nmargin:auto;\nmargin-bottom: 1px; \n\"\"\"\n self.lblTitle.setStyleSheet(styleTitle)\n\n # controls\n self.widgetControls = QWidget()\n self.layoutControls = QGridLayout()\n # self.layoutControls.setColumnStretch(0, 4)\n # self.layoutControls.setColumnStretch(1, 4)\n # self.layoutControls.setColumnStretch(2, 4)\n\n self.widgetControls.setLayout(self.layoutControls)\n self.mainLayout.addWidget(self.widgetControls)\n\n # buttons\n styleControls = \"\"\"\n width: 60px; \n height: 50px; \n \"\"\"\n self.buttons = []\n for i in range(self.shapeRow):\n self.buttons.append(self.generateColumnButtons())\n\n for i in range(self.shapeRow):\n for j in range(self.shapeColumn):\n self.buttons[i][j].setStyleSheet(styleControls)\n self.layoutControls.addWidget(self.buttons[i][j], i, j)",
"def addButtons(self):\n profbox()\n if self.buttonsGroupBox != None:\n self.layout.removeWidget(self.buttonsGroupBox)\n self.buttonsGroupBox.deleteLater()\n self.buttonsGroupBox = None\n self.buttonsGroupBox = qt.QGroupBox()\n self.buttonsGroupBox.setTitle( 'Manage Needles' )\n self.layout.addRow( self.buttonsGroupBox )\n self.buttonsGroupBoxLayout = qt.QFormLayout( self.buttonsGroupBox )\n \n modelNodes = slicer.util.getNodes('vtkMRMLModelNode*')\n for modelNode in modelNodes.values():\n if modelNode.GetAttribute(\"segmented\") == \"1\":\n i = int(modelNode.GetAttribute(\"nth\"))\n buttonDisplay = qt.QPushButton(\"Hide \"+self.option[i])\n buttonBentDisplay = qt.QPushButton(\"Hide Bent \"+self.option[i])\n buttonDisplay.checkable = True\n buttonBentDisplay.checkable = True\n\n if modelNode.GetDisplayVisibility() ==0:\n buttonDisplay.setChecked(1)\n\n buttonDisplay.connect(\"clicked()\", lambda who=i: self.displayNeedle(who))\n buttonBentDisplay.connect(\"clicked()\", lambda who=i: self.displayBentNeedle(who))\n buttonReformat = qt.QPushButton(\"Reformat \"+self.option[i])\n buttonReformat.connect(\"clicked()\", lambda who=i: self.reformatNeedle(who))\n widgets = qt.QWidget()\n hlay = qt.QHBoxLayout(widgets)\n hlay.addWidget(buttonDisplay)\n hlay.addWidget(buttonBentDisplay)\n hlay.addWidget(buttonReformat)\n self.buttonsGroupBoxLayout.addRow(widgets)"
] | [
"0.7095853",
"0.6955391",
"0.6926687",
"0.68542206",
"0.6837923",
"0.67752045",
"0.6765243",
"0.671351",
"0.6640223",
"0.66398984",
"0.6555973",
"0.65282583",
"0.6519112",
"0.64659417",
"0.64081466",
"0.6394941",
"0.63714725",
"0.63361865",
"0.62948644",
"0.6293922",
"0.6293837",
"0.6273122",
"0.62560505",
"0.6254372",
"0.6247241",
"0.62452775",
"0.62355095",
"0.62316394",
"0.6231468",
"0.6226869"
] | 0.71940935 | 0 |
Integration test that logger will raise an exception if account does not exists. | def test_configure_no_account(self):
config = self._getConfiguration()
account = u'no-such-account'
logger = manufacture.makeLogger()
with self.assertRaises(UtilsError) as context:
logger.configure(configuration=config, account=account)
self.assertEqual(u'1026', context.exception.event_id) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_lookup_account(self):\n pass",
"def test_account_already_exists(mocker, api: API):\n mocker.patch.object(Account, \"does_exist\", return_value=True)\n login = mocker.patch.object(Account, \"login\")\n create = mocker.patch.object(Account, \"create\")\n\n Account(api, \"USERNAME\", \"PASSWORD\")\n\n login.assert_called_once()\n create.assert_not_called()",
"def test_account_view_inexistent_account(flask_server, create_account):\n import requests\n\n data = create_account\n data['name'] += '123'\n\n req = requests.post('{}/account/view'.format(API_URL), data=data)\n assert req.content == b'No such account in database'\n assert req.status_code == 400",
"def test_email_account_operation_with_missing_user(self):\n user_id = 'missinguser'\n self.assertRaises(error.NotFound, \\\n SpokeEmailAccount, self.org_name, user_id)",
"def test_does_exist(account, api: API, accounts_found: list):\n api.user.login.return_value = accounts_found\n assert account.does_exist() == bool(accounts_found)",
"def test_duo_account_get(self):\n pass",
"def test_not_found(self):\n with self.assertRaises(UserNotFoundException):\n self._storage.get_by_username(\"test\")",
"def test_login_fails(api: API, account: Account):\n api.candlepin.get_owners.side_effect = EthelError(\"msg\", raw_error=HTTPError())\n with pytest.raises(EthelError):\n account.login()",
"def test_get_account(self):\n account = Account(self.client, \"[email protected]\", {})\n\n self.assertEqual(account.email, \"[email protected]\")\n self.assertEqual(account.state, \"PA\")\n self.assertEqual(account.city, \"Philadelphia\")\n self.assertEqual(account.phone, \"123-456-7890\")\n self.assertEqual(account.tax_id, \"\")\n self.assertEqual(account.balance, 0)\n self.assertEqual(account.company, \"Linode\")\n self.assertEqual(account.address_1, \"3rd & Arch St\")\n self.assertEqual(account.address_2, \"\")\n self.assertEqual(account.zip, \"19106\")\n self.assertEqual(account.first_name, \"Test\")\n self.assertEqual(account.last_name, \"Guy\")\n self.assertEqual(account.country, \"US\")\n self.assertIsNotNone(account.capabilities)\n self.assertIsNotNone(account.active_promotions)\n self.assertEqual(account.balance_uninvoiced, 145)\n self.assertEqual(account.billing_source, \"akamai\")\n self.assertEqual(account.euuid, \"E1AF5EEC-526F-487D-B317EBEB34C87D71\")",
"def test_unique_username(self):\n AccountFactory(username=\"billy\")\n with self.assertRaises(IntegrityError):\n AccountFactory(username=\"billy\")",
"def test_nonexistent_email(self, mock_log, result):\r\n # create an InstructorTask object to pass through\r\n course_id = self.course.id\r\n entry = InstructorTask.create(course_id, \"task_type\", \"task_key\", \"task_input\", self.instructor)\r\n task_input = {\"email_id\": -1}\r\n with self.assertRaises(CourseEmail.DoesNotExist):\r\n perform_delegate_email_batches(entry.id, course_id, task_input, \"action_name\") # pylint: disable=E1101\r\n ((log_str, __, email_id), __) = mock_log.warning.call_args\r\n self.assertTrue(mock_log.warning.called)\r\n self.assertIn('Failed to get CourseEmail with id', log_str)\r\n self.assertEqual(email_id, -1)\r\n self.assertFalse(result.called)",
"def test_logging():\n assert logger.name == 'wellcomeml.logger'",
"def test_route_non_existing_account():\n\n account_repository = AccountRepositoryMock()\n get_account = GetAccountMock(account_repository)\n withdraw_account = WithdrawAccount(account_repository, get_account)\n withdraw_account_controller = WithdrawAccountController(withdraw_account)\n\n attributes = {\n \"type\": \"withdraw\",\n \"origin\": faker.random_number(digits=2),\n \"amount\": faker.random_number(digits=3),\n }\n\n response = withdraw_account_controller.route(HttpRequest(body=attributes))\n\n assert get_account.account_id_param[\"account_id\"] == int(attributes[\"origin\"])\n\n assert response.status_code == 404\n assert \"error\" in response.body",
"def test_delete_missing_email_account(self):\n email_addr = 'deletemissing@' + self.email_dom\n first = 'test'\n last = 'missing'\n user_id = first + last\n user = SpokeUser(self.org_name)\n user.create(email_addr, first, last)\n acc = SpokeEmailAccount(self.org_name, user_id)\n self.assertRaises(error.NotFound, acc.delete, email_addr)\n user.delete(first, last)",
"def test_email_account_operation_with_missing_org(self):\n org_name = 'TestMissingOrg'\n self.assertRaises(error.NotFound, \\\n SpokeEmailAccount, org_name, self.user_id)",
"def test_search_for_log_without_credentials_fail(self):\n search_for = 'log'\n res = self.client.get(\n f'http://localhost:8000/api/logs/results?search={search_for}'\n )\n\n self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_verify_invalid_user(self, rp_logger):\n\n test_name = sys._getframe().f_code.co_name\n\n rp_logger.info(\"###### TEST EXECUTION STARTED :: \" +\n test_name + \" ######\")\n\n first_name = data_reader.get_data(test_name, 'FirstName')\n last_name = data_reader.get_data(test_name, 'LastName')\n email = data_reader.get_data(test_name, 'Email')\n\n with allure.step(\"Verify whether user exists\"):\n result = base_api.verify_valid_user(\n email, first_name, last_name)\n exe_status.mark_final(test_step=test_name, result=result)",
"def test_create_email_account_twice(self):\n email_addr = 'testcreatetwins@' + self.email_dom\n acc = SpokeEmailAccount(self.org_name, self.user_id)\n self.assertRaises(error.AlreadyExists, acc.create, email_addr)",
"def test_error_not_logged_create(self, mock_create_request):\n\n self.azk.logout()\n\n with self.assertRaises(NotLoggedOnError):\n self.azk.create('project', 'description')\n\n mock_create_request.assert_not_called()",
"def test_level_error(self):\n self.assertEqual(DiscordReportFormatter().format(self.record(loglevel=logging.ERROR)), \":exclamation: **test**\")",
"def test_already_existing_user(self):\n self.user.registration(\n \"Githeri\", \"[email protected]\", \"iwantgitheri\", \"iwantgitheri\")\n msg = self.user.registration(\"Githeri\",\n \"[email protected]\",\n \"iwantgitheri\",\n \"iwantgitheri\")\n self.assertEqual(msg, \"Your Account Already Active. Proceed to login\")",
"def test_get_logger(self):\n logger = utils.get_logger(name='foo',\n location='/tmp/junk.txt',\n max_size=102400,\n max_count=3)\n\n self.assertTrue(isinstance(logger, logging.Logger))",
"def test_need_client(self):\n self.assertRaises(TypeError, ACMEAccount)",
"def test_non_user_login(self):\n self.user.list_of_accounts = [{'username': 'Parseen',\n 'pwd': 'mypassword',\n 'email': '[email protected]'}]\n msg = self.user.login(\"[email protected]\", \"idontevenhaveone\")\n self.assertEqual(msg, \"Account not registered, sign up\")",
"def test_aud_from_log_ignores_index():\n assert True",
"def test_missing_credentials(self):\n twine = Twine(source=self.VALID_CREDENTIALS_TWINE)\n with self.assertRaises(exceptions.CredentialNotFound):\n twine.validate_credentials()",
"def test_inactive_account(self):",
"def test_logging(self):\n self._verify_logging()",
"def test_user_does_not_create_log(self):\n user = UserFactory.create()\n self.request.user = user\n user_logged_in.send(\n sender=user.__class__,\n request=self.request,\n user=user,\n )\n\n self.assertFalse(LogEntry.objects.count())",
"def test_create_account_not_allowed(self):\n\n def _side_effect_for_get_value(value, default=None):\n \"\"\"\n returns a side_effect with given return value for a given value\n \"\"\"\n if value == 'ALLOW_PUBLIC_ACCOUNT_CREATION':\n return False\n else:\n return get_value(value, default)\n\n with mock.patch('openedx.core.djangoapps.site_configuration.helpers.get_value') as mock_get_value:\n mock_get_value.side_effect = _side_effect_for_get_value\n response = self.client.post(self.url, {\"email\": self.EMAIL, \"username\": self.USERNAME})\n assert response.status_code == 403"
] | [
"0.63863415",
"0.6266122",
"0.6089439",
"0.6070338",
"0.60358477",
"0.59792036",
"0.5952488",
"0.5939118",
"0.58980507",
"0.5886915",
"0.5841414",
"0.5836312",
"0.58110714",
"0.57805747",
"0.5735064",
"0.5731167",
"0.5697972",
"0.56951076",
"0.5693734",
"0.5681385",
"0.56789255",
"0.56670946",
"0.56480414",
"0.5647173",
"0.5627153",
"0.56139636",
"0.5602365",
"0.5600672",
"0.55958927",
"0.5591881"
] | 0.76106346 | 0 |
Extracts an archive if it matches tar, tar.gz, tar.bz, or zip formats. | def _extract_archive(file_path, path=".", archive_format="auto"):
if archive_format is None:
return False
if archive_format == "auto":
archive_format = ["tar", "zip"]
if isinstance(archive_format, six.string_types):
archive_format = [archive_format]
for archive_type in archive_format:
if archive_type == "tar":
open_fn = tarfile.open
is_match_fn = tarfile.is_tarfile
if archive_type == "zip":
open_fn = zipfile.ZipFile
is_match_fn = zipfile.is_zipfile
if is_match_fn(file_path):
with open_fn(file_path) as archive:
try:
archive.extractall(path)
except (tarfile.TarError, RuntimeError, KeyboardInterrupt):
if os.path.exists(path):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
raise
return True
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _extract_archive(file_path, path='.', archive_format='auto'):\n if archive_format is None:\n return False\n if archive_format == 'auto':\n archive_format = ['tar', 'zip']\n if isinstance(archive_format, six.string_types):\n archive_format = [archive_format]\n\n for archive_type in archive_format:\n if archive_type == 'tar':\n open_fn = tarfile.open\n is_match_fn = tarfile.is_tarfile\n if archive_type == 'zip':\n open_fn = zipfile.ZipFile\n is_match_fn = zipfile.is_zipfile\n\n if is_match_fn(file_path):\n with open_fn(file_path) as archive:\n try:\n archive.extractall(path)\n except (tarfile.TarError, RuntimeError, KeyboardInterrupt):\n if os.path.exists(path):\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n raise\n return True\n return False",
"def _extract_archive(file_path, path='.', archive_format='auto'):\n if archive_format is None:\n return False\n if archive_format is 'auto':\n archive_format = ['tar', 'zip']\n if isinstance(archive_format, six.string_types):\n archive_format = [archive_format]\n\n for archive_type in archive_format:\n if archive_type is 'tar':\n open_fn = tarfile.open\n is_match_fn = tarfile.is_tarfile\n if archive_type is 'zip':\n open_fn = zipfile.ZipFile\n is_match_fn = zipfile.is_zipfile\n\n if is_match_fn(file_path):\n with open_fn(file_path) as archive:\n # check weather extracted or not\n extracted = True\n for fname in archive.getnames():\n if not os.path.exists(os.path.join(path, fname)):\n extracted = False\n if not extracted:\n try:\n archive.extractall(path)\n print('extracted to', path)\n except (tarfile.TarError, RuntimeError,\n KeyboardInterrupt):\n if os.path.exists(path):\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n raise\n return True\n return False",
"def _extract_archive(file_path, path='.', archive_format='auto'):\n if archive_format is None:\n return False\n if archive_format == 'auto':\n archive_format = ['tar', 'zip']\n if isinstance(archive_format, str):\n archive_format = [archive_format]\n\n file_path = path_to_string(file_path)\n path = path_to_string(path)\n\n for archive_type in archive_format:\n if archive_type == 'tar':\n open_fn = tarfile.open\n is_match_fn = tarfile.is_tarfile\n if archive_type == 'zip':\n open_fn = zipfile.ZipFile\n is_match_fn = zipfile.is_zipfile\n\n if is_match_fn(file_path):\n with open_fn(file_path) as archive:\n try:\n archive.extractall(path)\n except (tarfile.TarError, RuntimeError, KeyboardInterrupt):\n if os.path.exists(path):\n if os.path.isfile(path):\n os.remove(path)\n else:\n shutil.rmtree(path)\n raise\n return True\n return False",
"def extract_all(fn,dst=\".\"):\r\n if tarfile.is_tarfile(fn): \r\n with tarfile.open(fn,'r') as tf:\r\n tf.extractall(dst)\r\n tf.close()\r\n elif zipfile.is_zipfile(fn):\r\n with zipfile.ZipFile(fn, 'r') as zf:\r\n zf.extractall(dst)\r\n zf.close()\r\n else:\r\n print( \"Please provide a tar archive file or zip file\" )",
"def extract_archive(\n fname: str, outfile: Optional[str] = None, concat: bool = False\n) -> Union[str, None]:\n if fname.endswith((\".tgz\", \".tar.gz\")):\n return extract_tarball(fname, outfile=outfile)\n elif fname.endswith(\".gz\"):\n return extract_gzip(fname, outfile=outfile)\n elif fname.endswith(\n \".zip\",\n ):\n return extract_zip(fname, outfile=outfile, concat=concat)",
"def extract_tars(file_pattern, path_in, path_out):\n for f in glob.glob(os.path.join(path_in, file_pattern)):\n shutil.unpack_archive(f, path_out)",
"def extract_tar_file(archive_path, output_dir, relative_to, extractors=None):\n if extractors is None:\n extractors = DEFAULT_EXTRACTORS\n\n current_platform = get_running_platform()\n if current_platform == PlatformEnum.WINDOWS:\n # Try to use 7-zip first\n sevenzip_cmd = extractors.get(ExtractorEnum.SEVENZIP)\n if sevenzip_cmd == USE_REGISTRY:\n sevenzip_cmd = str(_find_7z_by_registry())\n sevenzip_bin = _find_extractor_by_cmd(sevenzip_cmd)\n if sevenzip_bin is not None:\n _extract_tar_with_7z(sevenzip_bin, archive_path, output_dir, relative_to)\n return\n\n # Use WinRAR if 7-zip is not found\n winrar_cmd = extractors.get(ExtractorEnum.WINRAR)\n if winrar_cmd == USE_REGISTRY:\n winrar_cmd = str(_find_winrar_by_registry())\n winrar_bin = _find_extractor_by_cmd(winrar_cmd)\n if winrar_bin is not None:\n _extract_tar_with_winrar(winrar_bin, archive_path, output_dir, relative_to)\n return\n get_logger().warning(\n 'Neither 7-zip nor WinRAR were found. Falling back to Python extractor...')\n elif current_platform == PlatformEnum.UNIX:\n # NOTE: 7-zip isn't an option because it doesn't preserve file permissions\n tar_bin = _find_extractor_by_cmd(extractors.get(ExtractorEnum.TAR))\n if not tar_bin is None:\n _extract_tar_with_tar(tar_bin, archive_path, output_dir, relative_to)\n return\n else:\n # This is not a normal code path, so make it clear.\n raise NotImplementedError(current_platform)\n # Fallback to Python-based extractor on all platforms\n _extract_tar_with_python(archive_path, output_dir, relative_to)",
"def _unpack_archive(self, dir, filters):\n ext = os.path.splitext(self.path)[1]\n if ext in [\".zip\", \".xpi\"]:\n if filters:\n raise GbpError(\"Can only filter tar archives: %s\", (ext, self.path))\n self._unpack_zip(dir)\n else:\n self._unpack_tar(dir, filters)",
"def unzip_archive(archive):\n tmpdir = os.path.join(tempfile.gettempdir(),\n os.path.basename(archive))\n assert tmpdir != archive # That wouldn't work out\n\n if os.path.exists(tmpdir):\n # files are already extracted\n pass\n else:\n if tarfile.is_tarfile(archive):\n print 'Extracting tarfile ...'\n with tarfile.open(archive) as tf:\n tf.extractall(path=tmpdir)\n elif zipfile.is_zipfile(archive):\n print 'Extracting zipfile ...'\n with zipfile.ZipFile(archive) as zf:\n zf.extractall(path=tmpdir)\n else:\n raise ValueError('Unknown file type for %s' % os.path.basename(archive))\n return tmpdir",
"def extractTar(tar_file):\r\n\r\n tfile = tarfile.open(tar_file, 'r')\r\n tar_members = tfile.getmembers()\r\n for tar_member in tar_members:\r\n tar_member.name = os.path.basename(tar_member.name) # Strip out the path and keep just the file name\r\n tfile.extract(tar_member, path=\"tempdata/\")\r\n tfile.close()\r\n print \"Finished extracting tar file\"\r\n return",
"def handle_tar(file_path, extension, extracted_path, destination_directory):\n tar = tarfile.open(file_path, extension)\n # remove files if they already exist\n if os.path.exists(extracted_path):\n shutil.rmtree(extracted_path)\n tar.extractall(path=destination_directory)\n tar.close()",
"def unpack(tarball, dst, verbose=False, match=None):\n print(\"extracting\", tarball)\n fname = os.path.basename(tarball).replace(\".tar.gz\", \"\")\n with contextlib.closing(tarfile.open(tarball)) as tar:\n for member in tar.getnames():\n if \"/\" not in member:\n continue\n name = member.replace(fname + \"/\", \"\", 1)\n if match is not None and not name.startswith(match):\n continue\n name = name[len(match) + 1:]\n\n dst_path = os.path.join(dst, name)\n if verbose:\n print(\" extracting\", member)\n tar.extract(member, dst)\n src_path = os.path.join(dst, member)\n if os.path.isdir(src_path) and os.path.exists(dst_path):\n continue\n shutil.move(src_path, dst_path)\n shutil.rmtree(os.path.join(dst, fname))",
"def untar(archive):\n log.info('Unpacking archive \"%s\".' % archive)\n tar = module.params['tar']\n tar_extra_options = shlex.split(module.params['tar_extra_options'])\n if not tar:\n tar = module.get_bin_path('tar', required=True)\n if archive.endswith('.gz'):\n uncompress = 'z'\n elif archive.endswith('.bz2'):\n uncompress = 'j'\n else:\n raise ValueError('Unsupported compression type: %s' % archive)\n options = ''.join(['x', uncompress, 'f'])\n args = [tar, options] + tar_extra_options + [archive]\n rc, out, err = module.run_command(args)\n log.info('untar: rc=%d out=%s err=%s', rc, out, err)\n if rc != 0:\n raise ValueError('tar command failed: %d' % rc)",
"def extract_one(self, archive: Path, dest: Path):\n if dest.exists():\n shutil.rmtree(dest)\n\n dest.mkdir(parents=True)\n\n if self.should_use_libarchive_c:\n import libarchive\n\n old_cwd = os.getcwd()\n os.chdir(str(dest))\n try:\n libarchive.extract_file(str(archive))\n finally:\n os.chdir(old_cwd)\n return\n\n if archive.name.endswith(EXTENSION_ZIP):\n with zipfile.ZipFile(archive) as zf:\n zf.extractall(dest)\n elif archive.name.endswith(EXTENSION_TAR):\n mode = \"r:bz2\" if archive.name.endswith(\".bz2\") else \"r:gz\"\n with tarfile.open(archive, mode) as tf:\n self.safe_extract_all(tf, dest)\n else:\n raise ValueError(f\"Unrecognized archive format {archive.name}\")\n\n for path in [dest, *dest.rglob(\"*\")]:\n path.chmod(MOD_DIRECTORY if path.is_dir() else MOD_FILE)",
"def extract_tar(\r\n file_path: str, unpack_path: str = None, remove_if_exists: bool = False\r\n) -> Optional[str]:\r\n\r\n if not os.path.exists(file_path):\r\n log.warning(file_path + \" does not exist.\")\r\n return None\r\n\r\n if not tarfile.is_tarfile(file_path):\r\n log.warning(file_path + \" is not a tar file.\")\r\n return None\r\n\r\n if not unpack_path:\r\n unpack_path = os.path.join(\r\n os.path.dirname(file_path), os.path.splitext(os.path.basename(file_path))[0]\r\n )\r\n\r\n if os.path.isdir(unpack_path):\r\n log.info(\"Unpack directory already exists \" + unpack_path)\r\n if not os.listdir(unpack_path):\r\n log.info(\"Directory is empty. Unpacking...\")\r\n elif remove_if_exists:\r\n log.info(\"Removing existing unpacked dir: \" + unpack_path)\r\n shutil.rmtree(unpack_path)\r\n else:\r\n return unpack_path\r\n\r\n log.info(\"Unpacking file \" + os.path.basename(file_path) + \" to: \" + unpack_path)\r\n compression = identify_compression(file_path)\r\n if not compression:\r\n mode = \"r\"\r\n elif compression == \"gz\":\r\n mode = \"r:gz\"\r\n elif compression == \"bz2\":\r\n mode = \"r:bz2\"\r\n else:\r\n mode = \"r\"\r\n\r\n tar = tarfile.open(file_path, mode)\r\n tar.extractall(unpack_path)\r\n tar.close()\r\n\r\n # Tar unpacking via tar command\r\n # tar needs empty directory\r\n # if not os.path.exists(unpack_path):\r\n # os.makedirs(unpack_path)\r\n # log.info(\"Unpacking (via tar command) file \" + os.path.basename(file_path) + \" to: \" + unpack_path)\r\n # handle compression with -zvxf\r\n # cmd = \"tar -xf \" + file_path + \" -C \" + unpack_path\r\n # log.debug(\"Executing: \" + cmd)\r\n # exit_code = system_utils.bash_command(cmd)\r\n # log.info(\"Finished with exit code: \" + str(exit_code))\r\n\r\n if not os.path.exists(unpack_path):\r\n log.warning(\"Failed to extract tar file: \" + file_path)\r\n\r\n return unpack_path",
"def __extract_tgz(self):\n tar_file = tarfile.open(self.archive)\n try:\n extract_dir = tempfile.mkdtemp()\n archive_binaries_dir = self.__create_extraction_dir(\n tar_file.getnames(), extract_dir, tar_file.extract)\n finally:\n tar_file.close()\n return archive_binaries_dir, extract_dir",
"def extract_file(source_path, destination_path):\n extender_tar = ['tar.gz', 'tar', 'gz']\n extender_zip = ['zip']\n\n extend = source_path.split('.')[-1]\n if extend in extender_tar:\n extract_tar(source_path, destination_path)\n elif extend in extender_zip:\n extract_zip(source_path, destination_path)",
"def extract_tar_from_ab(path_to_ab, output_dir=None):\n if output_dir is None:\n output_dir = os.path.dirname(path_to_ab)\n\n # Check we can open the file\n try:\n ab_data = open(path_to_ab, 'rb')\n except:\n logging.critical(f\"Unable to open AB file at {path_to_ab}\")\n return False\n\n # Check the AB file header is intact\n ab_bytes_to_remove = ab_data.read(24)\n\n if ab_bytes_to_remove[:14] == AB_HEADER:\n logging.info(\"AB Header checked and intact\")\n else:\n logging.error(\"AB Header not found; is it definitely the right file?\")\n return False\n\n # Open the target tar file\n output_path = build_tar_filepath(input_path, output_dir)\n\n try:\n output_file = open(output_path, 'wb')\n except:\n logging.error(\"Unable to open file at {output_path}\")\n return False\n\n logging.info(\"Writing tar header..\")\n output_file.write(TAR_HEADER)\n\n logging.info(\"Writing rest of AB file..\")\n output_file.write(ab_data.read())\n\n logging.info(\"..done.\")\n logging.info(\"Closing files..\")\n\n output_file.close()\n ab_data.close()\n\n # quick verify\n try:\n test_val = is_tarfile(output_path)\n logging.info(\"Output verified OK\")\n return True\n except:\n logging.error(\"Verification failed; maybe it's encrypted?\")\n return False",
"def extract_tar(tar_path, target_folder):\n with tarfile.open(tar_path, 'r') as archive:\n archive.extractall(target_folder)",
"def untar(tarfile, outdir):\n tmpdir = tempfile.mkdtemp()\n try:\n untared = _open_archive(tarfile, tmpdir)\n files = [f for f in untared if os.path.isfile(os.path.join(tmpdir, f))]\n dirs = [d for d in untared if os.path.isdir(os.path.join(tmpdir, d))]\n assert len(files) + len(dirs) == len(untared), 'Only files and directories'\n if _files_same(tmpdir, outdir, files) and _dirs_same(tmpdir, outdir, dirs):\n # Nothing new or different in the tarfile.\n return False\n # Some or all of the files / directories are new.\n _move_files(tmpdir, outdir, files)\n _move_dirs(tmpdir, outdir, dirs)\n return True\n finally:\n if os.path.isdir(tmpdir):\n shutil.rmtree(tmpdir)",
"def extract_source(source_archive, target):\r\n with tarfile.open(source_archive) as tar_file:\r\n safetar_extractall(tar_file, target)",
"def untar_first(input_filename: str, extract_dir: str) -> str:\n with tarfile.open(input_filename) as tar_data:\n file_to_extract = tar_data.next()\n while file_to_extract is not None and not file_to_extract.isfile():\n file_to_extract = tar_data.next()\n\n if file_to_extract is None:\n print(f'No file found in archive {input_filename}')\n res = ''\n else:\n tar_data.extract(file_to_extract, path=extract_dir)\n res = os.path.join(extract_dir, file_to_extract.name)\n return res",
"def safetar_extractall(tarf, *args, **kwargs):\r\n return tarf.extractall(members=safemembers(tarf), *args, **kwargs)",
"def is_archive(afile):\n return file_ext(os.path.basename(afile)) in ARCHIVE_COMPRESS_FORMATS",
"def extract(apath, ffilter=[]):\n\n files = []\n\n def extract_recursive(curr_apath):\n \"\"\"Look into archive recursively to extract files considering ffilter\"\"\"\n\n handler = resolve_format(curr_apath)\n unpacker = HandlersFactory.get_handler(handler)\n _files = unpacker.files_list(curr_apath)\n\n for f in _files:\n if is_matched(f, ffilter=ffilter):\n _fpath = unpacker.extract(curr_apath, f)\n files.append(_fpath)\n if is_archive(f):\n _apath = unpacker.extract(curr_apath, f)\n extract_recursive(_apath)\n\n extract_recursive(apath)\n return files",
"def _extract_tar_file(tar_path, buildspace_tree, unpack_dir, ignore_files, relative_to):\n\n class NoAppendList(list):\n \"\"\"Hack to workaround memory issues with large tar files\"\"\"\n def append(self, obj):\n pass\n\n # Simple hack to check if symlinks are supported\n try:\n os.symlink('', '')\n except FileNotFoundError:\n # Symlinks probably supported\n symlink_supported = True\n except OSError:\n # Symlinks probably not supported\n get_logger().info('System does not support symlinks. Ignoring them.')\n symlink_supported = False\n except BaseException:\n # Unexpected exception\n get_logger().exception('Unexpected exception during symlink support check.')\n raise BuildkitAbort()\n\n resolved_tree = buildspace_tree.resolve()\n\n with tarfile.open(str(tar_path)) as tar_file_obj:\n tar_file_obj.members = NoAppendList()\n for tarinfo in tar_file_obj:\n try:\n if relative_to is None:\n tree_relative_path = unpack_dir / PurePosixPath(tarinfo.name)\n else:\n tree_relative_path = unpack_dir / PurePosixPath(tarinfo.name).relative_to(\n relative_to) # pylint: disable=redefined-variable-type\n try:\n ignore_files.remove(tree_relative_path.as_posix())\n except KeyError:\n destination = resolved_tree / tree_relative_path\n if tarinfo.issym() and not symlink_supported:\n # In this situation, TarFile.makelink() will try to create a copy of the\n # target. But this fails because TarFile.members is empty\n # But if symlinks are not supported, it's safe to assume that symlinks\n # aren't needed. The only situation where this happens is on Windows.\n continue\n if tarinfo.islnk():\n # Derived from TarFile.extract()\n new_target = resolved_tree / unpack_dir / PurePosixPath(\n tarinfo.linkname).relative_to(relative_to)\n tarinfo._link_target = new_target.as_posix() # pylint: disable=protected-access\n if destination.is_symlink():\n destination.unlink()\n tar_file_obj._extract_member(tarinfo, str(destination)) # pylint: disable=protected-access\n except BaseException:\n get_logger().exception('Exception thrown for tar member: %s', tarinfo.name)\n raise BuildkitAbort()",
"def extract_sources(tarsources=[], zipsources=[]):\n for source in tarsources:\n cmd = [\"tar\", \"-xvf\", source, \"-C\", WHEEL_BUILD_DIR]\n subprocess.check_call(cmd)\n\n for source in zipsources:\n cmd = [\"unzip\", source, \"-d\", WHEEL_BUILD_DIR]\n subprocess.check_call(cmd)",
"def extract_file(self, filename):\n unp_bin = os.path.join(self.modulebin, 'unp')\n\n filepath = os.path.dirname(filename)\n uncompressed = ['fasta', 'fa', 'fastq', 'fq', 'fna', 'h5' ]\n supported = ['tar.gz', 'tar.bz2', 'bz2', 'gz', 'lz',\n 'rar', 'tar', 'tgz','zip']\n for ext in uncompressed:\n if filename.endswith('.'+ext):\n return filename\n for ext in supported:\n if filename.endswith('.'+ext):\n extracted_file = filename[:filename.index(ext)-1]\n if os.path.exists(extracted_file): # Check extracted already\n return extracted_file\n logger.info(\"Extracting {}...\".format(filename))\n # p = subprocess.Popen([unp_bin, filename],\n # cwd=filepath, stderr=subprocess.STDOUT)\n # p.wait()\n # Hide the \"broken pipe\" message from unp\n out = subprocess.Popen([unp_bin, filename],\n cwd=filepath,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT).communicate()[0]\n if os.path.exists(extracted_file):\n return extracted_file\n else:\n logger.error(\"Extraction of {} failed: {}\".format(filename, out))\n raise Exception('Archive structure error')\n logger.error(\"Could not extract {}\".format(filename))\n return filename",
"def _is_archive(local_path: str) -> bool:\n archive_mimetypes = [\n \"application/zip\",\n \"application/x-tar\",\n \"application/x-gzip\",\n \"application/x-bzip2\",\n \"application/x-7z-compressed\",\n \"application/x-rar-compressed\",\n \"application/x-xz\",\n \"application/x-lzip\",\n \"application/x-lzma\",\n \"application/x-lzop\",\n \"application/x-bzip\",\n \"application/x-bzip2\",\n \"application/x-compress\",\n \"application/x-compressed\",\n ]\n\n return mimetypes.guess_type(local_path)[0] in archive_mimetypes",
"def extract_bz2(tar_bz_f, calc_type):\n \n # we give the gaussian calculator a label that includes the directory the\n # log file is contained in, this does not affect the parsing process but\n # generate a warning as it is not an expected form for the label. To avoid\n # the profusion of warning messages in stdout we suppress warnings here.\n warnings.filterwarnings(\"ignore\")\n\n extended_xyzs = []\n\n with tarfile.open(tar_bz_f,'r:bz2') as data_tar:\n calc_logs = (log_tar for log_tar in data_tar \n if calc_type in log_tar.name and '.log' in log_tar.name)\n \n for calc_log in calc_logs:\n try:\n # extract log file (and relevant directory)\n data_tar.extract(calc_log)\n\n # convert to ASE/Gaussian format\n mol = read(calc_log.name)\n mol.set_calculator(\n Gaussian(\n label=calc_log.name.replace('.log','')\n )\n )\n\n # parse into ML readable format\n parsed_mol = ase_mol_parse(mol)\n extended_xyzs.append(parsed_mol)\n except AttributeError:\n # if the calculation didn't complete we get an empty dictionary \n # so we we keep track of these failed calculations with a blank \n # entry other than mol_id\n blank_entry = {'mol_id':calc_log.name.replace('.log','')}\n extended_xyzs.append(blank_entry)\n finally:\n # delete extracted log file\n os.remove(calc_log.name)\n\n # solves memory problem\n # http://blogs.it.ox.ac.uk/inapickle/2011/06/20/high-memory-usage-when-using-pythons-tarfile-module/ # noqa\n data_tar.members=[]\n \n dir_name = tar_bz_f.replace('.tar.bz2','')\n\n # delete extracted directory\n os.rmdir(dir_name)\n\n # search key function\n # assumes files are named xxx_frame_{frame}_mol_{no}\n def mol_no_frame(mol):\n mol_no = int(mol['mol_id'].split('_')[-1].replace('.log',''))\n mol_frame = int(mol['mol_id'].split('_')[-3])\n return mol_no,mol_frame\n\n extended_xyzs = sorted(extended_xyzs,key=mol_no_frame)\n\n with open(dir_name + '_' + calc_type + '_data.pkl', 'w') as data_f:\n dill.dump(extended_xyzs, data_f)"
] | [
"0.77330834",
"0.76566243",
"0.7581955",
"0.6881372",
"0.6814677",
"0.66347516",
"0.64978355",
"0.6380294",
"0.6378682",
"0.6291018",
"0.62800467",
"0.6221986",
"0.6217149",
"0.619506",
"0.6140137",
"0.61342347",
"0.6103157",
"0.61027014",
"0.6078421",
"0.6045398",
"0.5988209",
"0.5934428",
"0.59085774",
"0.58919173",
"0.58721024",
"0.5846229",
"0.58381796",
"0.5823581",
"0.5795458",
"0.57654846"
] | 0.77244127 | 1 |
Minimization of a scalar function of one or more variables using parallel CMAES retry. | def minimize(fun,
bounds = None,
value_limit = math.inf,
num_retries = 1000,
logger = None,
workers = mp.cpu_count(),
popsize = 31,
max_evaluations = 50000,
capacity = 500,
stop_fittness = None,
optimizer = None,
):
if optimizer is None:
optimizer = de_cma(max_evaluations, popsize, stop_fittness)
store = Store(bounds, capacity = capacity, logger = logger)
return retry(fun, store, optimizer.minimize, num_retries, value_limit, workers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def min_scalar(objective, **kwargs):\n result = minimize_scalar(objective, **kwargs)\n return result.fun",
"def auxminf1(x):\n \n# Sum over data points\n f = 0.0\n for m_ind in range(cfg.ntrain):\n f += auxmin_f1_part_i(x,m_ind) \n \n return f",
"def fmin(func, x0, sigma0=None, args=()\r\n # the follow string arguments are evaluated, besides the verb_filenameprefix\r\n , CMA_active='False # exponential negative update, conducted after the original update'\r\n , CMA_activefac='1 # learning rate multiplier for active update'\r\n , CMA_cmean='1 # learning rate for the mean value'\r\n , CMA_const_trace='False # normalize trace, value CMA_const_trace=2 normalizes sum log eigenvalues to zero'\r\n , CMA_diagonal='0*100*N/sqrt(popsize) # nb of iterations with diagonal covariance matrix, True for always' # TODO 4/ccov_separable?\r\n , CMA_eigenmethod='np.linalg.eigh # 0=numpy-s eigh, -1=pygsl, otherwise cma.Misc.eig (slower)'\r\n , CMA_elitist='False # elitism likely impairs global search performance'\r\n , CMA_mirrors='popsize < 6 # values <0.5 are interpreted as fraction, values >1 as numbers (rounded), otherwise about 0.16 is used'\r\n , CMA_mu='None # parents selection parameter, default is popsize // 2'\r\n , CMA_on='True # False or 0 for no adaptation of the covariance matrix'\r\n , CMA_rankmu='True # False or 0 for omitting rank-mu update of covariance matrix'\r\n , CMA_rankmualpha='0.3 # factor of rank-mu update if mu=1, subject to removal, default might change to 0.0'\r\n , CMA_dampfac='1 #v positive multiplier for step-size damping, 0.3 is close to optimal on the sphere'\r\n , CMA_dampsvec_fac='np.Inf # tentative and subject to changes, 0.5 would be a \"default\" damping for sigma vector update'\r\n , CMA_dampsvec_fade='0.1 # tentative fading out parameter for sigma vector update'\r\n , CMA_teststds='None # factors for non-isotropic initial distr. mainly for test purpose, see scaling_...'\r\n , CMA_AII='False # not yet tested'\r\n , bounds='[None, None] # lower (=bounds[0]) and upper domain boundaries, each a scalar or a list/vector'\r\n , eval_parallel='False # when True, func might be called with more than one solution as first argument'\r\n , eval_initial_x='False # '\r\n , fixed_variables='None # dictionary with index-value pairs like {0:1.1, 2:0.1} that are not optimized'\r\n , ftarget='-inf #v target function value, minimization'\r\n , incpopsize='2 # in fmin(): multiplier for increasing popsize before each restart'\r\n , maxfevals='inf #v maximum number of function evaluations'\r\n , maxiter='100 + 50 * (N+3)**2 // popsize**0.5 #v maximum number of iterations'\r\n , mindx='0 #v minimal std in any direction, cave interference with tol*'\r\n , minstd='0 #v minimal std in any coordinate direction, cave interference with tol*'\r\n , noise_handling='False # maximal number of evaluations for noise treatment, only fmin'\r\n , noise_reevals=' 1.5 + popsize/20 # number of solution to be reevaluated for noise measurement, only fmin'\r\n , noise_eps='1e-7 # perturbation factor for noise handling reevaluations, only fmin'\r\n , noise_change_sigma='True # exponent to default sigma increment'\r\n , popsize='4+int(3*log(N)) # population size, AKA lambda, number of new solution per iteration'\r\n , randn='np.random.standard_normal #v randn((lam, N)) must return an np.array of shape (lam, N)'\r\n , restarts='0 # in fmin(): number of restarts'\r\n , restart_from_best='False'\r\n , scaling_of_variables='None # scale for each variable, sigma0 is interpreted w.r.t. this scale, in that effective_sigma0 = sigma0*scaling. Internally the variables are divided by scaling_of_variables and sigma is unchanged, default is ones(N)'\r\n , seed='None # random number seed'\r\n , termination_callback='None #v a function returning True for termination, called after each iteration step and could be abused for side effects'\r\n , tolfacupx='1e3 #v termination when step-size increases by tolfacupx (diverges). That is, the initial step-size was chosen far too small and better solutions were found far away from the initial solution x0'\r\n , tolupsigma='1e20 #v sigma/sigma0 > tolupsigma * max(sqrt(eivenvals(C))) indicates \"creeping behavior\" with usually minor improvements'\r\n , tolfun='1e-11 #v termination criterion: tolerance in function value, quite useful'\r\n , tolfunhist='1e-12 #v termination criterion: tolerance in function value history'\r\n , tolstagnation='int(100 + 100 * N**1.5 / popsize) #v termination if no improvement over tolstagnation iterations'\r\n , tolx='1e-11 #v termination criterion: tolerance in x-changes'\r\n , transformation='None # [t0, t1] are two mappings, t0 transforms solutions from CMA-representation to f-representation (tf_pheno), t1 is the (optional) back transformation, see class GenoPheno'\r\n , typical_x='None # used with scaling_of_variables'\r\n , updatecovwait='None #v number of iterations without distribution update, name is subject to future changes' # TODO: rename: iterwaitupdatedistribution?\r\n , verb_append='0 # initial evaluation counter, if append, do not overwrite output files'\r\n , verb_disp='100 #v verbosity: display console output every verb_disp iteration'\r\n , verb_filenameprefix='outcmaes # output filenames prefix'\r\n , verb_log='1 #v verbosity: write data to files every verb_log iteration, writing can be time critical on fast to evaluate functions'\r\n , verb_plot='0 #v in fmin(): plot() is called every verb_plot iteration'\r\n , verb_time='True #v output timings on console'\r\n , vv='0 #? versatile variable for hacking purposes, value found in self.opts[\\'vv\\']'\r\n ): # style guides say there should be the above empty line\r\n try: # pass on KeyboardInterrupt\r\n opts = locals() # collect all local variables (i.e. arguments) in a dictionary\r\n del opts['func'] # remove those without a default value\r\n del opts['args']\r\n del opts['x0'] # is not optional, no default available\r\n del opts['sigma0'] # is not optional for the constructor CMAEvolutionStrategy\r\n if not func: # return available options in a dictionary\r\n return Options(opts, True) # these opts are by definition valid\r\n\r\n # TODO: this is very ugly:\r\n incpopsize = Options({'incpopsize':incpopsize}).eval('incpopsize')\r\n restarts = Options({'restarts':restarts}).eval('restarts')\r\n del opts['restarts']\r\n noise_handling = Options({'noise_handling': noise_handling}).eval('noise_handling')\r\n del opts['noise_handling']# otherwise CMA throws an error\r\n\r\n irun = 0\r\n best = BestSolution()\r\n while 1:\r\n # recover from a CMA object\r\n if irun == 0 and isinstance(x0, CMAEvolutionStrategy):\r\n es = x0\r\n x0 = es.inputargs['x0'] # for the next restarts\r\n if sigma0 is None or not np.isscalar(array(sigma0)):\r\n sigma0 = es.inputargs['sigma0'] # for the next restarts\r\n # ignore further input args and keep original options\r\n else: # default case\r\n if irun and opts['restart_from_best']:\r\n print('CAVE: restart_from_best is typically not useful')\r\n es = CMAEvolutionStrategy(best.x, sigma0, opts)\r\n else:\r\n es = CMAEvolutionStrategy(x0, sigma0, opts)\r\n if opts['eval_initial_x']:\r\n x = es.gp.pheno(es.mean, bounds=es.gp.bounds)\r\n es.best.update([x], None, [func(x, *args)], 1)\r\n es.countevals += 1\r\n\r\n opts = es.opts # processed options, unambiguous\r\n\r\n append = opts['verb_append'] or es.countiter > 0 or irun > 0\r\n logger = CMADataLogger(opts['verb_filenameprefix'], opts['verb_log'])\r\n logger.register(es, append).add() # initial values, not fitness values\r\n\r\n # if es.countiter == 0 and es.opts['verb_log'] > 0 and not es.opts['verb_append']:\r\n # logger = CMADataLogger(es.opts['verb_filenameprefix']).register(es)\r\n # logger.add()\r\n # es.writeOutput() # initial values for sigma etc\r\n\r\n noisehandler = NoiseHandler(es.N, noise_handling, np.median, opts['noise_reevals'], opts['noise_eps'], opts['eval_parallel'])\r\n while not es.stop():\r\n X, fit = es.ask_and_eval(func, args, evaluations=noisehandler.evaluations,\r\n aggregation=np.median) # treats NaN with resampling\r\n # TODO: check args and in case use args=(noisehandler.evaluations, )\r\n\r\n if 11 < 3 and opts['vv']: # inject a solution\r\n # use option check_point = [0]\r\n if 0 * np.random.randn() >= 0:\r\n X[0] = 0 + opts['vv'] * es.sigma**0 * np.random.randn(es.N)\r\n fit[0] = func(X[0], *args)\r\n # print fit[0]\r\n es.tell(X, fit) # prepare for next iteration\r\n if noise_handling:\r\n es.sigma *= noisehandler(X, fit, func, es.ask, args)**opts['noise_change_sigma']\r\n es.countevals += noisehandler.evaluations_just_done # TODO: this is a hack, not important though\r\n\r\n es.disp()\r\n logger.add(more_data=[noisehandler.evaluations, 10**noisehandler.noiseS] if noise_handling else [],\r\n modulo=1 if es.stop() and logger.modulo else None)\r\n if opts['verb_log'] and opts['verb_plot'] and \\\r\n (es.countiter % max(opts['verb_plot'], opts['verb_log']) == 0 or es.stop()):\r\n logger.plot(324, fontsize=10)\r\n\r\n # end while not es.stop\r\n mean_pheno = es.gp.pheno(es.mean, bounds=es.gp.bounds)\r\n fmean = func(mean_pheno, *args)\r\n es.countevals += 1\r\n\r\n es.best.update([mean_pheno], None, [fmean], es.countevals)\r\n best.update(es.best) # in restarted case\r\n\r\n # final message\r\n if opts['verb_disp']:\r\n srestarts = (' after %i restart' + ('s' if irun > 1 else '')) % irun if irun else ''\r\n for k, v in list(es.stop().items()):\r\n print('termination on %s=%s%s (%s)' % (k, str(v), srestarts, time.asctime()))\r\n\r\n print('final/bestever f-value = %e %e' % (es.best.last.f, best.f))\r\n if es.N < 9:\r\n print('mean solution: ' + str(es.gp.pheno(es.mean)))\r\n print('std deviation: ' + str(es.sigma * sqrt(es.dC) * es.gp.scales))\r\n else:\r\n print('mean solution: %s ...]' % (str(es.gp.pheno(es.mean)[:8])[:-1]))\r\n print('std deviations: %s ...]' % (str((es.sigma * sqrt(es.dC) * es.gp.scales)[:8])[:-1]))\r\n\r\n irun += 1\r\n if irun > restarts or 'ftarget' in es.stopdict or 'maxfunevals' in es.stopdict:\r\n break\r\n opts['verb_append'] = es.countevals\r\n opts['popsize'] = incpopsize * es.sp.popsize # TODO: use rather options?\r\n opts['seed'] += 1\r\n\r\n # while irun\r\n\r\n es.out['best'] = best # TODO: this is a rather suboptimal type for inspection in the shell\r\n if 1 < 3:\r\n return es.result() + (es.stop(), es, logger)\r\n\r\n else: # previously: to be removed\r\n return (best.x.copy(), best.f, es.countevals,\r\n dict((('stopdict', CMAStopDict(es.stopdict))\r\n ,('mean', es.gp.pheno(es.mean))\r\n ,('std', es.sigma * sqrt(es.dC) * es.gp.scales)\r\n ,('out', es.out)\r\n ,('opts', es.opts) # last state of options\r\n ,('cma', es)\r\n ,('inputargs', es.inputargs)\r\n ))\r\n )\r\n # TODO refine output, can #args be flexible?\r\n # is this well usable as it is now?\r\n except KeyboardInterrupt: # Exception, e:\r\n if opts['verb_disp'] > 0:\r\n print(' in/outcomment ``raise`` in last line of cma.fmin to prevent/restore KeyboardInterrupt exception')\r\n raise # cave: swallowing this exception can silently mess up experiments, if ctrl-C is hit\r",
"def minimize(self, cost_function, initial_params):\n\n # Optimization Results Object\n history = []\n\n def wrapped_cost_function(params):\n value = cost_function.evaluate(params)\n history.append(cost_function.evaluations_history[-1])\n print(f'Function evaluation {len(history)}: {value}', flush=True)\n print(f'{params}', flush=True)\n return value\n\n strategy = cma.CMAEvolutionStrategy(initial_params, self.sigma_0, self.options)\n result = strategy.optimize(wrapped_cost_function).result\n\n optimization_results = {}\n optimization_results['opt_value'] = result.fbest\n optimization_results['opt_params'] = result.xbest\n optimization_results['history'] = history\n optimization_results['nfev'] = result.evaluations\n optimization_results['nit'] = result.iterations\n optimization_results['cma_xfavorite'] = list(result.xfavorite)\n\n return OptimizeResult(optimization_results)",
"def minimize(A, t, y0, function):\n return y0 - function(A, t)",
"def minimize_neldermead(func, x0, args=(), callback=None,\n maxiter=None, maxfev=None, disp=False,\n return_all=False, initial_simplex=None,\n xatol=1e-4, fatol=1e-4, **unknown_options):\n maxfun = maxfev\n retall = return_all\n\n rho = 1\n chi = 2\n psi = 0.5\n sigma = 0.5\n nonzdelt = 0.05\n zdelt = 0.00025\n\n if initial_simplex is None:\n N = len(x0)\n\n sim = numpy.zeros((N + 1, N), dtype=x0.dtype)\n sim[0] = x0\n for k in range(N):\n y = numpy.array(x0, copy=True)\n if y[k] != 0:\n y[k] = (1 + nonzdelt) * y[k]\n else:\n y[k] = zdelt\n sim[k + 1] = y\n\n maxiter = 10\n maxfun = 10\n\n one2np1 = list(range(1, N + 1))\n fsim = numpy.zeros((N + 1,), float)\n\n for k in range(N + 1):\n fsim[k] = func(sim[k])\n\n ind = numpy.argsort(fsim)\n fsim = numpy.take(fsim, ind, 0)\n # sort so sim[0,:] has the lowest function value\n sim = numpy.take(sim, ind, 0)\n raise Exception()\n print('aaaaffaaaaaa')\n\n iterations = 1\n\n while iterations < maxiter:\n if (numpy.max(numpy.ravel(numpy.abs(sim[1:] - sim[0]))) <= xatol and\n numpy.max(numpy.abs(fsim[0] - fsim[1:])) <= fatol):\n break\n logger.debug('itr: %s' % iterations)\n print('aaaaaaaaaa')\n xbar = numpy.add.reduce(sim[:-1], 0) / N\n xr = (1 + rho) * xbar - rho * sim[-1]\n fxr = func(xr)\n doshrink = 0\n\n if fxr < fsim[0]:\n xe = (1 + rho * chi) * xbar - rho * chi * sim[-1]\n fxe = func(xe)\n\n if fxe < fxr:\n sim[-1] = xe\n fsim[-1] = fxe\n else:\n sim[-1] = xr\n fsim[-1] = fxr\n else: # fsim[0] <= fxr\n if fxr < fsim[-2]:\n sim[-1] = xr\n fsim[-1] = fxr\n else: # fxr >= fsim[-2]\n # Perform contraction\n if fxr < fsim[-1]:\n xc = (1 + psi * rho) * xbar - psi * rho * sim[-1]\n fxc = func(xc)\n\n if fxc <= fxr:\n sim[-1] = xc\n fsim[-1] = fxc\n else:\n doshrink = 1\n else:\n # Perform an inside contraction\n xcc = (1 - psi) * xbar + psi * sim[-1]\n fxcc = func(xcc)\n\n if fxcc < fsim[-1]:\n sim[-1] = xcc\n fsim[-1] = fxcc\n else:\n doshrink = 1\n\n if doshrink:\n for j in one2np1:\n sim[j] = sim[0] + sigma * (sim[j] - sim[0])\n fsim[j] = func(sim[j])\n\n ind = numpy.argsort(fsim)\n sim = numpy.take(sim, ind, 0)\n fsim = numpy.take(fsim, ind, 0)\n if callback is not None:\n callback(sim[0])\n iterations += 1\n\n x = sim[0]\n fval = numpy.min(fsim)\n warnflag = 0\n\n result = OptimizeResult(fun=fval, nit=iterations, nfev=0,\n status=warnflag, success=(warnflag == 0),\n message=None, x=x, final_simplex=(sim, fsim))\n return result",
"def minimize_scalar(\n func: Callable,\n bracket: Optional[Union[Sequence[float]]] = None,\n bounds: Optional[Sequence[float]] = None,\n args: Union[Tuple, Tuple[Any]] = (),\n method: str = \"brent\",\n tol: Optional[float] = None,\n options: Optional[dict] = None,\n) -> spopt.OptimizeResult:\n\n def f(x, *args):\n # Wrap jax-based function `func` to return a numpy float rather\n # than a jax array of size (1,)\n return func(x, *args).item()\n\n res = spopt.minimize_scalar(\n fun=f,\n bracket=bracket,\n bounds=bounds,\n args=args,\n method=method,\n tol=tol,\n options=options,\n )\n return res",
"def objective_function(x):\n return x * 1 # change this to our actual function",
"def solve_l1(y, A_fun, AT_fun, lambda_l1, reshape_img_fun, show_img_progress=False, alpha=0.2, max_iter=100, solver_tol=1e-6):\n\n\n obj_lss = np.zeros(max_iter)\n x_zs = np.zeros(max_iter)\n u_norms = np.zeros(max_iter)\n times = np.zeros(max_iter)\n\n ATy = AT_fun(y)\n x_shape = ATy.shape\n d = np.prod(x_shape)\n\n def A_cgs_fun(x):\n x = np.reshape(x, x_shape, order='F')\n y = AT_fun(A_fun(x)) + alpha * x\n return vec(y)\n A_cgs = LinearOperator((d,d), matvec=A_cgs_fun, dtype='float')\n\n def compute_p_inv_A(b, z0):\n (z,info) = sp.sparse.linalg.cgs(A_cgs, vec(b), x0=vec(z0), tol=1e-3, maxiter=100)\n if info > 0:\n print('cgs convergence to tolerance not achieved')\n elif info <0:\n print('cgs gets illegal input or breakdown')\n z = np.reshape(z, x_shape, order='F')\n return z\n\n\n def A_cgs_fun_init(x):\n x = np.reshape(x, x_shape, order='F')\n y = AT_fun(A_fun(x))\n return vec(y)\n A_cgs_init = LinearOperator((d,d), matvec=A_cgs_fun_init, dtype='float')\n\n def compute_init(b, z0):\n (z,info) = sp.sparse.linalg.cgs(A_cgs_init, vec(b), x0=vec(z0), tol=1e-2)\n if info > 0:\n print('cgs convergence to tolerance not achieved')\n elif info <0:\n print('cgs gets illegal input or breakdown')\n z = np.reshape(z, x_shape, order='F')\n return z\n\n # initialize z and u\n z = compute_init(ATy, ATy)\n u = np.zeros(x_shape)\n\n\n plot_normalozer = matplotlib.colors.Normalize(vmin=0.0, vmax=1.0, clip=True)\n\n\n start_time = timeit.default_timer()\n\n for iter in range(max_iter):\n\n # x-update\n net_input = z+u\n Wzu, wbook = wavelet_transform(net_input)\n q = soft_threshold(Wzu, lambda_l1/alpha)\n x = inverse_wavelet_transform(q, wbook, x_shape)\n x = np.reshape(x, x_shape)\n\n # z-update\n b = ATy + alpha * (x - u)\n z = compute_p_inv_A(b, z)\n\n # u-update\n u += z - x;\n\n if show_img_progress == True:\n\n fig = plt.figure('current_sol')\n plt.gcf().clear()\n fig.canvas.set_window_title('iter %d' % iter)\n plt.subplot(1,3,1)\n plt.imshow(reshape_img_fun(np.clip(x, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('x')\n plt.subplot(1,3,2)\n plt.imshow(reshape_img_fun(np.clip(z, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('z')\n plt.subplot(1,3,3)\n plt.imshow(reshape_img_fun(np.clip(net_input, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('netin')\n plt.pause(0.00001)\n\n\n obj_ls = 0.5 * np.sum(np.square(y - A_fun(x)))\n x_z = np.sqrt(np.mean(np.square(x-z)))\n u_norm = np.sqrt(np.mean(np.square(u)))\n\n print('iter = %d: obj_ls = %.3e |x-z| = %.3e u_norm = %.3e' % (iter, obj_ls, x_z, u_norm))\n\n\n obj_lss[iter] = obj_ls\n x_zs[iter] = x_z\n u_norms[iter] = u_norm\n times[iter] = timeit.default_timer() - start_time\n\n if x_z < solver_tol:\n break\n\n infos = {'obj_lss': obj_lss, 'x_zs': x_zs, 'u_norms': u_norms,\n 'times': times, 'alpha':alpha, 'lambda_l1':lambda_l1,\n 'max_iter':max_iter, 'solver_tol':solver_tol}\n\n\n return (x, z, u, infos)",
"def minimize_batch(cost_fn, gradient_fn, v0, tolerance=0.000001):\n\n step_sizes = [100, 10, 1, 0.1, 0.01, 0.001, 0.0001, 0.00001]\n\n v = v0 # set v to initial value\n cost_fn = safe(cost_fn) # safe version of target_fn\n cost = cost_fn(v) # value we're minimizing\n\n while True:\n gradient = gradient_fn(v)\n\n # choose next v using v := v + a * dv\n next_vs = [step(v, gradient, -step_size) for step_size in step_sizes]\n next_v = min(next_vs, key=cost_fn)\n\n # stop if we're \"converging\"\n next_cost = cost_fn(next_v)\n if abs(cost - next_cost) < tolerance:\n return v\n else:\n v, cost = next_v, next_cost",
"def run_qae_optimization(training_states, n_repetitions, exact=no_noise, noisy=gate_error):\n result_list = []\n def proxy(params, training_states, n_repetitions, exact=no_noise, noisy=gate_error):\n \"\"\"Embedded function version\n \"\"\"\n input_list = fix_list(params, all_param_array=all_param, var_param_array=var_param, fixed_vals_array=fixed_vals)\n fidelities = []\n for training_state in training_states:\n fid = cusp_stage2.compute_stage2_cost_function(*input_list, alpha=training_state, n_repetitions=n_repetitions,\n exact=exact, noisy=noisy)\n fidelities.append(fid)\n avg_fid = np.mean(fidelities)\n result_list.append(1-avg_fid)\n print(1-avg_fid)\n return 1. - avg_fid\n\n \n # Initialize parameters\n half_turn_min = 0\n half_turn_max = 2\n init_params = np.random.uniform(low=half_turn_min, high=half_turn_max,\n size=num_param)\n\n # Optimization using Nelder-Mead.\n h2_qae_wrap = lambda params: proxy(params, training_states=training_states,\n n_repetitions=n_repetitions, exact=exact, noisy=noisy)\n \n if noisy:\n maxiter = 60\n else:\n maxiter = None\n \n res = minimize(h2_qae_wrap, init_params, args=(),\n method='Nelder-Mead', tol=None, \n options={'disp': False, 'maxiter': maxiter, 'xatol': 0.001,\n 'return_all': False, 'fatol': 0.001})\n np.savetxt('stage2_data.csv',result_list, delimiter=',')\n return res.x",
"def optimize(self, acqf: MCAcquisitionFunction) -> Tuple[Tensor, Tensor]:\n initial_conditions = self.generate_restart_points(acqf)\n # shape = num_restarts x *acqf.batch_shape x 1 x dim_X\n if self.inequality_constraints is not None:\n org_shape = initial_conditions.shape\n initial_conditions = initial_conditions.reshape(\n self.num_restarts, -1, self.dim_x\n )\n options = {\"maxiter\": int(self.maxiter / 25)}\n with settings.propagate_grads(True):\n solutions, values = gen_candidates_scipy(\n initial_conditions=initial_conditions,\n acquisition_function=acqf,\n lower_bounds=self.bounds[0],\n upper_bounds=self.bounds[1],\n options=options,\n inequality_constraints=self.inequality_constraints,\n )\n self.add_solutions(solutions.view(-1, 1, self.dim_x).detach())\n best_ind = torch.argmax(values, dim=0)\n if self.inequality_constraints is not None:\n solutions = solutions.reshape(org_shape)\n solution = solutions.gather(\n dim=0,\n index=best_ind.view(1, *best_ind.shape, 1, 1).repeat(\n *[1] * (best_ind.dim() + 2), self.dim_x\n ),\n )\n if self.inequality_constraints is not None:\n org_shape = solution.shape\n solution = solution.reshape(1, -1, self.dim_x)\n options = {\"maxiter\": self.maxiter}\n with settings.propagate_grads(True):\n solution, value = gen_candidates_scipy(\n initial_conditions=solution,\n acquisition_function=acqf,\n lower_bounds=self.bounds[0],\n upper_bounds=self.bounds[1],\n options=options,\n inequality_constraints=self.inequality_constraints,\n )\n # This is needed due to nested optimization\n value = acqf(solution)\n if self.inequality_constraints is not None:\n solution = solution.reshape(org_shape)\n return solution, value.reshape(*acqf.batch_shape)",
"def minimize(\n func: Callable,\n x0: Union[Array, BlockArray],\n args: Union[Tuple, Tuple[Any]] = (),\n method: str = \"L-BFGS-B\",\n hess: Optional[Union[Callable, str]] = None,\n hessp: Optional[Callable] = None,\n bounds: Optional[Union[Sequence, spopt.Bounds]] = None,\n constraints: Union[spopt.LinearConstraint, spopt.NonlinearConstraint, dict] = (),\n tol: Optional[float] = None,\n callback: Optional[Callable] = None,\n options: Optional[dict] = None,\n) -> spopt.OptimizeResult:\n\n if snp.util.is_complex_dtype(x0.dtype):\n # scipy minimize function requires real-valued arrays, so\n # we split x0 into a vector with real/imaginary parts stacked\n # and compose `func` with a `_join_real_imag`\n iscomplex = True\n func_ = lambda x: func(_join_real_imag(x))\n x0 = _split_real_imag(x0)\n else:\n iscomplex = False\n func_ = func\n\n x0_shape = x0.shape\n x0_dtype = x0.dtype\n x0 = x0.ravel() # if x0 is a BlockArray it will become a jax array here\n\n # Run the SciPy minimizer\n if method in (\n \"CG, BFGS, Newton-CG, L-BFGS-B, TNC, SLSQP, dogleg, trust-ncg, trust-krylov, \"\n \"trust-exact, trust-constr\"\n ).split(\n \", \"\n ): # uses gradient info\n min_func = _wrap_func_and_grad(func_, x0_shape, x0_dtype)\n jac = True # see scipy.minimize docs\n else: # does not use gradient info\n min_func = _wrap_func(func_, x0_shape, x0_dtype)\n jac = False\n\n res = spopt.OptimizeResult({\"x\": None})\n\n def fun(x0):\n nonlocal res # To use the external res and update side effect\n res = spopt.minimize(\n min_func,\n x0=x0,\n args=args,\n jac=jac,\n method=method,\n options=options,\n ) # Returns OptimizeResult with x0 as ndarray\n return res.x.astype(x0_dtype)\n\n # HCB call with side effects to get the OptimizeResult on the same device it was called\n res.x = hcb.call(\n fun,\n arg=x0,\n result_shape=x0, # From Jax-docs: This can be an object that has .shape and .dtype attributes\n )\n\n # un-vectorize the output array from spopt.minimize\n res.x = snp.reshape(\n res.x, x0_shape\n ) # if x0 was originally a BlockArray then res.x is converted back to one here\n\n if iscomplex:\n res.x = _join_real_imag(res.x)\n\n return res",
"def minimize(self, func, grad, x0, args=()):\n learning_rate = self._learning_rate\n best_x = x = x0\n best_value = func(x, *args)\n iters_without_improve = 0\n\n for iteration in range(self._max_iterations):\n gradient = grad(x, *args)\n\n # If absolute values of all partial derivatives are equal to 0 with specified accuracy, then parameters are\n # close enough to the minimum and there is no need to continue gradient descent.\n if np.abs(gradient).max() <= self._accuracy:\n break\n\n x = x - learning_rate * gradient\n\n # If new values of x haven't lead to decrease of the function value for the specified number of iteration,\n # the x is reverted to its previous best value and the learning rate is reduced\n value = func(x, *args)\n if value > best_value:\n iters_without_improve += 1\n if iters_without_improve >= self._lr_reduce_patience:\n x = best_x\n learning_rate *= self._lr_reduce_factor\n else:\n iters_without_improve = 0\n best_value = value\n best_x = x\n\n return best_x",
"def minimize_cost_function(m = no_of_training_exammples, x, y, theta_0, theta_1):\r\n var_x, var_y = np.array(x), np.array(y)\r\n compute_1, compute_2, compute_3 = (0,0,0)\r\n minimized_solution = []\r\n for i in range(m):\r\n compute_1 = var_y[i] - cost_function(var_x[i],theta_0, theta_1) # finds the computed Euclidean distance between\r\n # x and its corresponding value y\r\n compute_2 = pow(compute_1,2) # square of the distance\r\n minimized_solution.append(compute_2) # appends the corresponding solution to a list minimized_solution\r\n compute_3 = sum(minimized_solution)/ (2 * m) # compute the minimized cost function of x and y by dividing by\r\n # twice the number of training examples\r\n return compute_3",
"def continuation(func, x0, par0, vary_par, par_max, discretisation=lambda x: x, solver=find_root):\n\n # Calculate v0 and v1 to start the process\n v = []\n v.append(np.array((*solver(discretisation(func), x0, args=({vary_par: par0[vary_par]})), par0[vary_par])))\n v.append(np.array((*solver(discretisation(func), v[0][:-1], args=({vary_par: v[0][-1] + 0.1})), v[0][-1] + 0.1)))\n\n while v[-1][-1] < par_max:\n # Calculate the secant\n secant = v[-1] - v[-2]\n\n # Make a prediction \n predict = v[-1] + secant\n\n # Construct g\n g = lambda Vn: [\n *discretisation(func)(Vn[:-1], {vary_par: Vn[-1]}),\n np.dot(Vn - predict, secant),\n ]\n\n # Solve and append to v\n v.append(solver(g, predict))\n\n return v",
"def minimize_scalar(func, *args, **kwargs):\n bounds = kwargs.get('bounds', None)\n\n if bounds is None or len(bounds) != 2:\n msg = (\"To run maximize_scalar or minimize_scalar, \"\n \"you have to provide a `bounds` \"\n \"keyword argument with a sequence \"\n \"of length 2.\")\n raise ValueError(msg)\n\n try:\n func(bounds[0], *args)\n except Exception as e:\n msg = (\"Before running scipy.integrate.minimize_scalar, \"\n \"I tried running the function you provided \"\n \"with the lower bound, \"\n \"and I got the following error:\")\n logger.error(msg)\n raise (e)\n\n underride(kwargs, method='bounded')\n\n res = spo.minimize_scalar(func, args=args, **kwargs)\n\n if not res.success:\n msg = (\"minimize_scalar did not succeed.\"\n \"The message it returned is: \\n\" +\n res.message)\n raise Exception(msg)\n\n return res",
"def minimize(self,x0=None):\n import time\n start_time = time.time()\n tmp,total_par,lik_grad = self.minimize_both_vers(numerical=False,x0=x0)\n if tmp['success']==False:\n print(\"Probably a problem with gradient, do numerical\")\n tmp,total_par,lik_grad = self.minimize_both_vers(x0=tmp['x'],numerical=True)\n print(\"--- %s seconds ---\" % (time.time() - start_time))\n self.lengthscale = total_par[0]\n self.variance = total_par[1]\n self.gstds = total_par[2]\n tmp['fx']=np.array([total_par[0],total_par[1],total_par[2]])\n return tmp,total_par,lik_grad",
"def minimize(fun: Callable[..., float],\n x0: np.ndarray,\n args: Tuple = (),\n method: Optional[str] = None,\n **kwargs) -> scipy.optimize.OptimizeResult:\n if method.lower() in OPTIMIZERS:\n optimizer = OPTIMIZERS[method.lower()]\n return optimizer(fun, x0, args=args, **kwargs)\n return scipy.optimize.minimize(fun, x0, args=args, method=method, **kwargs)",
"def optimise_fn(self, x):\n\n success = self._set_material_parameters(x)\n if not success:\n return self._bad_metric()\n\n # some iterations are repeated so cache the results to avoid unnecessary iterations\n cached_result_key = tuple(x)\n metric_value = self.cached_results.get(cached_result_key)\n\n if metric_value is None:\n print('--> Optimiser: {}'.format(self.material_model))\n\n sim_result = fs.run_simulation(stoma_cfg=self.stoma_cfg,\n from_optimiser=True)\n\n # when the simulation fails we want a non-constant measure for the optimiser to use\n metric_value = sim_result.metric_value if sim_result.success else self._bad_metric()\n\n self.cached_results[cached_result_key] = metric_value\n\n print('--> Optimiser: {} - metric={}'.format(self.material_model, metric_value))\n else:\n print('--> Optimiser: {} - metric={} (cached result)'.format(self.material_model, metric_value))\n\n return metric_value",
"def minimize(self, fun, x_0, bounds=None):\n x = np.copy(x_0).reshape(-1)\n opt = climin.Adadelta(wrt=x, fprime=fun, step_rate=self.step_rate, momentum=self.momentum,\n decay=self.decay, offset=self.offset)\n\n x_list = [x.copy()]\n time_list = [0.]\n start = time.time()\n\n for info in opt:\n i = info['n_iter']\n if i > self.maxiter:\n break\n \n if self.disp and not (i % self.print_freq):\n grad = info['gradient']\n print('Epoch', int(i / self.iter_per_epoch), ':')\n print('\\tx', x.reshape(-1)[:5])\n print(\"\\tGradient norm\", np.linalg.norm(grad))\n \n if not i % int(self.iter_per_epoch):\n x_list.append(x.copy())\n time_list.append(time.time() - start)\n\n stat_dict = {'time_lst': time_list, 'x_lst': x_list, 'fun': None, 'time': time_list[-1], \n 'info': info}\n\n return x.copy(), stat_dict",
"def optimize(self, x0):\n (result,f,d) = fmin_l_bfgs_b(lambda x:self.costFun(x), np.ravel(x0),lambda x: self.gradFun(x))\n print(\"optimization completed with cost: \" + str(f))\n return result.reshape(self.inp_shape)",
"def optimization_step(self):\n \n if \"CSS\" in self.algorithm:\n \n input_dict = {self.x: self.train_inputs[self.minibatch_set,:]}\n \n var_list = [self.x_tilda, self.minibatch_set]\n \n if (self.num_samples > 0) and (not self.mixture):\n \n if ((self.mf_steps > 0) and self.alpha >0) or\\\n self.gibbs_steps > 0: \n \n var_list.append(self.sampler_theta)\n \n elif \"CD\" in self.algorithm:\n \n input_dict = {self.x : self.train_inputs[self.minibatch_set,:]} \n \n var_list = [self.minibatch_set]\n \n var_list.append(self.learning_rate)\n \n if self.use_momentum:\n \n var_list.append(self.momentum)\n \n output_vars = [self.pseudo_cost]\n \n if self.report_p_tilda:\n \n output_vars.append(self.p_tilda)\n \n else:\n \n output_vars.append(theano.shared(0))\n \n opt_step = theano.function(inputs = var_list,\n outputs = output_vars,\n updates = self.updates,\n givens = input_dict,\n on_unused_input='warn')\n \n return opt_step",
"def minimize(self):\n raise NotImplementedError",
"def solve_pcaw(y, A_fun, AT_fun, lambda_l1, reshape_img_fun, head, invhead, mean, show_img_progress=False, alpha=0.2, max_iter=100, solver_tol=1e-6):\n\n\n obj_lss = np.zeros(max_iter)\n x_zs = np.zeros(max_iter)\n u_norms = np.zeros(max_iter)\n times = np.zeros(max_iter)\n\n ATy = AT_fun(y)\n x_shape = ATy.shape\n d = np.prod(x_shape)\n \n def vec(x):\n return tf.reshape(x, [-1])\n\n def A_cgs_fun(x):\n x = tf.reshape(x,x_shape)\n y = AT_fun(A_fun(x)) + alpha * x\n return vec(y)\n A_cgs = LinearOperator((d,d), matvec=A_cgs_fun, dtype='float')\n\n def compute_p_inv_A(b, z0):\n (z,info) = sp.sparse.linalg.cgs(A_cgs, vec(b), x0=vec(z0), tol=1e-3, maxiter=100)\n if info > 0:\n print('cgs convergence to tolerance not achieved')\n elif info <0:\n print('cgs gets illegal input or breakdown')\n z = tf.reshape(z, x_shape)\n return z\n\n\n def A_cgs_fun_init(x):\n x = tf.reshape(x, x_shape)\n y = AT_fun(A_fun(x))\n return vec(y)\n A_cgs_init = LinearOperator((d,d), matvec=A_cgs_fun_init, dtype='float')\n\n def compute_init(b, z0):\n (z,info) = sp.sparse.linalg.cgs(A_cgs_init, vec(b), x0=vec(z0), tol=1e-2)\n if info > 0:\n print('cgs convergence to tolerance not achieved')\n elif info <0:\n print('cgs gets illegal input or breakdown')\n z = tf.reshape(z,x_shape)\n return z\n\n # initialize z and u\n z = tf.reshape(mean,x_shape)\n u = np.zeros(x_shape)\n\n plot_normalozer = matplotlib.colors.Normalize(vmin=0.0, vmax=1.0, clip=True)\n\n\n start_time = timeit.default_timer()\n\n for iter in range(max_iter):\n\n # x-update\n net_input = z+u\n \n Wzu = head([net_input])\n q = tfp.math.soft_threshold(Wzu, lambda_l1/alpha)\n x = invhead(q)[0]\n\n # z-update\n b = ATy + alpha * (x - u)\n z = compute_p_inv_A(b, z)\n\n # u-update\n u += z - x;\n\n if show_img_progress:\n\n fig = plt.figure('current_sol')\n plt.gcf().clear()\n fig.canvas.set_window_title('iter %d' % iter)\n plt.subplot(1,3,1)\n plt.imshow(reshape_img_fun(np.clip(x, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('x')\n plt.subplot(1,3,2)\n plt.imshow(reshape_img_fun(np.clip(z, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('z')\n plt.subplot(1,3,3)\n plt.imshow(reshape_img_fun(np.clip(net_input, 0.0, 1.0)), interpolation='nearest', norm=plot_normalozer)\n plt.title('netin')\n plt.pause(0.00001)\n\n\n obj_ls = 0.5 * np.sum(np.square(y - A_fun(x)))\n x_z = np.sqrt(np.mean(np.square(x-z)))\n u_norm = np.sqrt(np.mean(np.square(u)))\n\n obj_lss[iter] = obj_ls\n x_zs[iter] = x_z\n u_norms[iter] = u_norm\n times[iter] = timeit.default_timer() - start_time\n\n if x_z < solver_tol:\n break\n\n infos = {'obj_lss': obj_lss, 'x_zs': x_zs, 'u_norms': u_norms,\n 'times': times, 'alpha':alpha, 'lambda_l1':lambda_l1,\n 'max_iter':max_iter, 'solver_tol':solver_tol}\n\n\n return (x, z, u, infos)",
"def _optimise_acquisition(acq_fn, acq_optimiser, anc_data):\n return acq_optimiser(acq_fn, anc_data.max_evals)",
"def run(self, C, p0 = None):\n global algorithm \n algorithm = AdaptiveMM(self.g, C, p0 = p0, lambda0 = 2000)\n solve()",
"def objective_function(params):\n cirq_circuit = variational_state_evolve(params)\n mean_value = self.expectation(\n all_qubits_in_circuit, cirq_circuit, hamiltonian)\n self._current_expectation = mean_value\n return mean_value",
"def function_to_minimize(x):\n return math.sin(x[0]) * math.cos(x[1]) + math.cos(x[0] + x[1]) + random.uniform(-0.02, 0.02)",
"def cem_optimize(init_mean, cost_func, init_variance=1., samples=20, precision=1.0e-3,\n steps=20, nelite=5, contraint_mean=None, constraint_variance=(-999999, 999999), device=\"cpu\"):\n mean = init_mean\n variance = torch.tensor([init_variance], device=device).repeat(mean.shape).float()\n # print(mean.type(), variance.type())\n step = 1\n diff = 9999999\n while diff > precision and step < steps:\n dists = [distributions.MultivariateNormal(m, torch.diagflat(v+precision/10), device=device) for m, v in zip(mean, variance)]\n candidates = [d.sample_n(samples) for d in dists]\n candidates = torch.stack(candidates, dim=1)\n costs = cost_func(candidates)\n # we sort descending because we want a maximum reward\n sorted_idx = torch.argsort(costs, dim=0, descending=True)\n candidates = candidates[sorted_idx]\n elite = candidates[:nelite]\n new_mean = torch.mean(elite, dim=0)\n variance = torch.var(elite, dim=0)\n diff = torch.mean(torch.abs(mean - new_mean))\n mean = new_mean\n # print(mean, variance)\n if not contraint_mean is None:\n mean = clip(mean, contraint_mean[0], contraint_mean[1])\n step += 1\n\n return mean"
] | [
"0.58401585",
"0.5788632",
"0.57425696",
"0.57303965",
"0.56360745",
"0.56013083",
"0.55810285",
"0.556376",
"0.55437535",
"0.5528637",
"0.55246705",
"0.5506914",
"0.550672",
"0.550141",
"0.54960895",
"0.54928195",
"0.54925305",
"0.5491091",
"0.5490144",
"0.548972",
"0.5489115",
"0.5477563",
"0.5469459",
"0.5455893",
"0.54365116",
"0.54273653",
"0.5393373",
"0.53857017",
"0.5364743",
"0.53292185"
] | 0.6479807 | 0 |
sorts all store entries, keep only the 90% best to make room for new ones. | def sort(self): # sort all entries to make room for new ones, determine best and worst
ns = self.num_stored.value
ys = np.asarray(self.ys[:ns])
yi = ys.argsort()
sortRuns = []
for i in range(len(yi)):
y = ys[yi[i]]
xs = self.get_x(yi[i])
sortRuns.append((y, xs))
numStored = min(len(sortRuns),int(0.9*self.capacity)) # keep 90% best
for i in range(numStored):
self.replace(i, sortRuns[i][0], sortRuns[i][1])
self.num_sorted.value = numStored
self.num_stored.value = numStored
return numStored | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sort_and_reduce(self):\n self.data = sorted(self.data, key=lambda item: item.pubDate)\n if len(self.data) > MAX_SIZE:\n self.data = self.data[-MAX_SIZE:]",
"def volume_sort(self):\n self.jobs_sorted = sorted(\n self.jobs,\n key=lambda job: (job['height'], job['width'] * job['height']),\n # key=lambda job: job['width'] * job['height'],\n reverse=True)",
"def sortby(self):\n ...",
"def sort_by_default(self):\n self.data.sort()",
"def sort(self, quant=None):\n if quant is None: # sort bei weight\n self.__sortlist = [key for key, values in sorted(self.__quantile.items(), key=lambda items: sum((10^quantille * count for quantille, count in enumerate(items[1].values()))))]\n elif isinstance(quant, int):\n self.__sortlist = [key for key, values in sorted(self.__quantile.items(), key=lambda items: items[1][quant])]",
"def sort_animals(all_animals):\n def get_key(a):\n return a.row + 0.001 * a.col\n\n all_animals.sort(key=get_key)",
"def _sort_compounds(self):\n self.sorted_molecules = sorted(self.values(), key=operator.attrgetter('criterion'))",
"def sort_results(self):\n pass",
"def sort(self):\r\n self.candidates.sort(key=self.sortFitness)\r\n return",
"def _sort_phot(self, verbose=False):\n if hasattr(self, \"data\") and hasattr(self, \"data_filters\"):\n ## This looks fugly.\n newkeys = np.array([i for i in self.data_filters.keys()])[np.argsort([self.data_filters[i].lambda_effective.value for i in self.data_filters])]\n\n sorted_data = OrderedDict()\n sorted_data_filters = OrderedDict()\n\n for newkey in newkeys:\n\n if verbose: print(newkey)\n\n sorted_data[newkey] = self.data[newkey]\n sorted_data_filters[newkey] = self.data_filters[newkey]\n\n self.data = sorted_data\n self.data_filters = sorted_data_filters\n\n else:\n warnings.warn(\"Doesn't seem to be any data here (empty self.data)\")\n pass",
"def sort_album(self):\n self.sort('album')",
"def sort_entries(self):\n if not len(self.student_list):\n print('There is no contents to sort')\n return\n\n opt = self.input_options(['n', 'a', 'g'], 1, 'Sort by name(n) or average(a) or grade(g)')\n if opt.upper() == 'N':\n self.print_dataframe(self.student_list.sort_values(by=['name', 'average'], ascending=[True,False]))\n elif opt.upper() == 'A' or opt.upper() == 'G':\n self.print_dataframe(self.student_list.sort_values(by=['average', 'name'], ascending=[False,True]))",
"def sort_chunks(self):\n\n first = True\n\n all_chunks = dict(self.dirty_chunk_cache)\n all_chunks.update(self.chunk_cache)\n self.chunk_cache.clear()\n self.dirty_chunk_cache.clear()\n for coords, chunk in all_chunks.iteritems():\n if chunk.dirty:\n if first:\n first = False\n self.save_chunk(chunk)\n self.chunk_cache[coords] = chunk\n else:\n self.dirty_chunk_cache[coords] = chunk\n else:\n self.chunk_cache[coords] = chunk",
"def organizeM():\n scores = []\n today_listM = strainer('name', 'sort', 'event')\n today_listM.extend(strainer('name', 'sort', 'todo'))\n data = list(today_listM)\n while len(data) != 0:\n number = lowest_number(data)\n scores.append(number)\n data.remove(number)\n return scores",
"def sorted_data():\n stock_data = scrape_data()\n filtered_data = list(filter(sort_func, stock_data))\n return filtered_data",
"def get_primary_buckets(store):\n ss = get_buckets_keys_count(store)\n bucket_list = ss.items()\n bucket_list = [x for x in bucket_list if x[1] > 0]\n if not bucket_list:\n return None\n bucket_list.sort(lambda a, b: cmp(a[1], b[1]), reverse=True)\n result = [bucket_list[0]]\n for i in bucket_list[1:]:\n if result[-1][1] / i[1] >= 2:\n break\n result.append(i)\n return [x[0] for x in result]",
"def sort_key(self):\n ...",
"def sort(self):\n def get_fval(res):\n return res.fval if not np.isnan(res.fval) else np.inf\n\n self.list = sorted(self.list, key=get_fval)",
"def sort(self):\n self.cards.sort()",
"def sort(self):\n self.cards.sort()",
"def sort(self):\r\n\t\treturn sorted(self.sample)",
"def sort(self):\n\t\tself.servers = sorted(self.servers, key=lambda s: s.load)\n\t\tself.servers = sorted(self.servers, key=lambda s: s.distance_class)\n\t\tself.servers = sorted(self.servers, key=lambda s: s.country == self.locale_info.country, reverse=True)",
"def sort(self):\n self.fragment_list.sort()",
"def _sorted_moves_per_poketype(self):\n with pd.HDFStore(settings.store_filepath, mode='r') as store:\n poketypes = store['poketypes']\n # move_categories = store['move_categories']\n # poketype_chart = store['poketype_chart']\n # pokedex = store['pokedex']\n attackdex = store['attackdex']\n # learnsets = store['learnsets']\n\n # compute and set the effective power\n effective_power = attackdex['power'] * attackdex['accuracy'] / 100 \\\n * attackdex['repeat'] / attackdex['turns_used']\n\n attackdex['effective_power'] = effective_power\n\n sorted_moves = {}\n\n for poketype in poketypes['poketype']:\n subdex = attackdex[attackdex['poketype'] == poketype]\n\n subdex = subdex.sort_values(by=['effective_power'], ascending=False)\n\n sorted_moves[poketype] = subdex\n\n return sorted_moves",
"def find_and_sort_saves(self, number_of_saves=1000):\n\n cursor = self._conn.cursor()\n sql = \"SELECT * FROM saves ORDER BY progress DESC LIMIT ?\"\n cursor.execute(sql, (number_of_saves,))\n rows = cursor.fetchall()\n save_list = []\n for row in rows:\n save = Save(row[0], row[1], row[2], row[3])\n save_list.append(save)\n return save_list",
"def oldsortslice(self):\n ...",
"def get_all_sorted(self):\n self.sort_and_reduce()\n return self.data",
"def _sort(self):\n self.population.sort()\n self.population.reverse()",
"def resort(self):\n self.items.sort(key=lambda node: node.path_weight, reverse=True)",
"def bucket_sort(data):\r\n\r\n # initialize 10 buckets\r\n buckets = []\r\n for i in range(0, 10):\r\n buckets.append([])\r\n\r\n start_time = time.time()\r\n\r\n # put elements into their proper buckets\r\n for d in data:\r\n buckets[math.floor(d * 10)].append(d)\r\n\r\n # sort each bucket using insertion sort\r\n for i in range(0, 10):\r\n insertion_sort(buckets[i])\r\n\r\n # concatenate the buckets into one list\r\n result = []\r\n for b in buckets:\r\n for bb in b:\r\n result.append(bb)\r\n \r\n return time.time() - start_time"
] | [
"0.65042984",
"0.6291601",
"0.5924032",
"0.58100486",
"0.56878287",
"0.56756747",
"0.5670386",
"0.5621025",
"0.55958736",
"0.55391216",
"0.5532553",
"0.54889405",
"0.5487602",
"0.5483785",
"0.5481501",
"0.5474903",
"0.54568",
"0.54499155",
"0.5449535",
"0.5449535",
"0.5441329",
"0.5433494",
"0.54264754",
"0.5426299",
"0.5420908",
"0.5419809",
"0.54057074",
"0.5399989",
"0.53957546",
"0.53846323"
] | 0.690427 | 0 |
Calculates hash value of the state and its children. | def hash(self, hashed_states=None):
if hashed_states is None:
hashed_states = []
hashed_states.append(self)
result = '1' if self.final else '0'
result += str(len(self.children))
for symbol in self.children:
child = self.children[symbol]
if child in hashed_states:
result += str(symbol) + child.hash_value
else:
result += str(symbol) + self.children[symbol].hash(hashed_states)
self.hash_value = result
return result | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _hash(self, value, get_val, get_child):\n hasher = getattr(hashlib, self.hash_func)\n children = get_child(value)\n\n # If leaf node\n if len(children) < 1:\n return hasher(get_val(value)).hexdigest()\n\n h = hasher()\n for child in children:\n # Tree is created recursively\n n = Node(child, get_val, get_child,\n self.hash_func)\n self.c.append(n)\n h.update(n.h.encode(\"utf-8\"))\n return h.hexdigest()",
"def __hash__(self) -> int:\n return hash(\n (self._final_states, self._recoil_state, self._parent_recoil_state)\n )",
"def __hash__(self, reinit=False):\n if not self.hash_value is None and not reinit:\n return self.hash_value\n elif isinstance(self, Leaf):\n self.hash_value = Hash.leaf_hash(self)\n return self.hash_value\n else:\n self.hash_value = Hash.node_hash(self)\n return self.hash_value",
"def __hash__(self):\n return hash((self.name, self.state))",
"def HashValue(self) -> _n_0_t_3[_n_0_t_9]:",
"def internal_hash(self): \n return hash(tuple(sorted(self.hashtriples())))",
"def hash_state(self):\n return hash(self.board.tostring())",
"def compute_hash(self):\n block_string = json.dumps(self.__dict__, sort_keys=True)\n return sha256(block_string.encode()).hexdigest()",
"def compute_hash(self):\n block_string = json.dumps(self.__dict__, sort_keys=True)\n return sha256(block_string.encode()).hexdigest()",
"def get_hash(self) -> str:\n if self.call_hash:\n # Derived state from a call_node.\n return hash_struct([\"Handle\", self.fullname, \"call_hash\", self.key, self.call_hash])\n else:\n # Initial state.\n return hash_struct([\"Handle\", self.fullname, \"init\", self.key, self.args, self.kwargs])",
"def __hash__(self):\n hash_value = hash(self.tag)\n for attribute in self.attributes:\n hash_value += hash(attribute)\n return hash_value",
"def IsomorphicHash(self) -> int:\n # The hash is based on the nodes and edges, not their attributes.\n return hash((tuple(self.nodes), tuple(self.edges)))",
"def current_hash(self):",
"def __hash__(self) -> int:\n # Turn the steps into a set first, so that permuting the steps doesn't change the equality.\n self.simplify()\n return hash(tuple(sorted(self.steps_set)))",
"def hash(self):\n return self.hash_by_id(self.id)",
"def get_hash(self):\r\n return",
"def __hash__(self):\n # These entities are not cached, so we wont use their `id` if applicable.\n hash_value = 0\n \n # bot\n hash_value ^= hash(self.bot)\n \n # description\n description = self.description\n if (description is not None):\n hash_value ^= hash(description)\n \n # icon\n hash_value ^= hash(self.icon)\n \n # id\n hash_value ^= self.id\n \n # name\n name = self.name\n if (description is None) or (description != name):\n hash_value ^= hash(name)\n \n return hash_value",
"def __hash__(self):\n return hash(self.hash)",
"def __hash__(self):\n hash_value = 0\n \n # avatar\n hash_value ^= hash(self.avatar)\n \n # boosts_since\n boosts_since = self.boosts_since\n if (boosts_since is not None):\n hash_value ^= hash(boosts_since)\n \n # flags\n hash_value ^= self.flags\n \n # joined_at\n joined_at = self.joined_at\n if (joined_at is not None):\n hash_value ^= hash(joined_at)\n \n # nick\n nick = self.nick\n if (nick is not None):\n hash_value ^= hash(nick)\n \n # pending\n hash_value ^= self.pending\n \n # role_ids\n role_ids = self.role_ids\n if (role_ids is not None):\n hash_value ^= len(role_ids) << 4\n for role_id in role_ids:\n hash_value ^= role_id\n \n # timed_out_until\n timed_out_until = self.timed_out_until\n if (timed_out_until is not None):\n hash_value ^= hash(timed_out_until)\n \n return hash_value",
"def get_hash(self):\n if self.contributes:\n return hash_from_values(self.iter_values())",
"def __hash__(self) -> int:\n # The hash is based on the graph topology and node and edge attributes.\n return hash(\n (\n tuple(self.nodes),\n tuple(self.edges),\n tuple([str(self.nodes[n]) for n in self.nodes]),\n tuple([str(self.edges[i, j]) for i, j in self.edges]),\n )\n )",
"def __Hash(self):\n return self._Hash()",
"def tree_hash(hashes):\n while len(hashes) > 1:\n hashes = [hashlib.sha256(\"\".join(h[i:i+1])).digest() for i in range(i,2)]\n return hashes[0]",
"def __hash__(self):\n hash_value = 0\n \n # approximate_online_count\n hash_value ^= self.approximate_online_count\n \n # approximate_user_count\n hash_value ^= self.approximate_user_count << 12\n \n # description\n description = self.description\n if (description is not None):\n hash_value ^= hash(description)\n \n # discovery_splash\n hash_value ^= hash(self.discovery_splash)\n \n # emojis\n emojis = self.emojis\n hash_value ^= len(emojis) << 1\n for emoji in emojis.values():\n hash_value ^= hash(emoji)\n \n # features\n features = self.features\n hash_value ^= len(features) << 5\n for feature in features:\n hash_value ^= hash(feature)\n \n # icon\n hash_value ^= hash(self.icon)\n \n # id\n hash_value ^= self.id\n \n # invite_splash\n hash_value ^= hash(self.invite_splash)\n \n # stickers\n stickers = self.stickers\n hash_value ^= len(stickers) << 9\n for sticker in stickers.values():\n hash_value ^= hash(sticker)\n \n # name\n name = self.name\n if (description is None) or (description != name):\n hash_value ^= hash(name)\n \n return hash_value",
"def __hash__(self):\n if getattr(self, \"_immutable\", False):\n return hash((tuple(self.states()), tuple(self.transitions())))\n raise TypeError(\"Finite state machines are mutable, \" \\\n \"and thus not hashable.\")",
"def __hash__(self):\n hash_value = 0\n \n # required\n hash_value ^= self.required << 14\n \n # title\n hash_value ^= hash(self.title)\n \n # type\n hash_value ^= hash(self.type)\n \n # values\n values = self.values\n if (values is not None):\n hash_value ^= len(values)\n \n for value in values:\n hash_value ^= hash(value)\n \n return hash_value",
"def get_hash(self):\n return self.__hash",
"def __hash__(self):\n return hash(self.base_location) ^ hash(self.fold_path) ^ hash(self.field)",
"def __hash__(self):\n return hash(self.value)",
"def __hash__(self):\n return hash(tuple(self._sub_effects))"
] | [
"0.7342968",
"0.72909665",
"0.7124429",
"0.70509243",
"0.6966833",
"0.6820188",
"0.6742874",
"0.67155385",
"0.67155385",
"0.6707483",
"0.66744095",
"0.6658651",
"0.65652084",
"0.6561096",
"0.65602064",
"0.65579015",
"0.6549498",
"0.65474933",
"0.65199417",
"0.65194243",
"0.6502622",
"0.64912695",
"0.6461444",
"0.64578855",
"0.645696",
"0.6443671",
"0.64415956",
"0.6436461",
"0.6433601",
"0.64316785"
] | 0.7933615 | 0 |
Copies state and its children. Ignores parents. | def copy(self):
new_state = State(self.final)
for symbol in self.children:
child = self.children[symbol]
new_state.add_child(child.copy(), symbol)
return new_state | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def copy(self):\n copy = Node(self.node_data, self.tree_type, self.parent)\n if self.children:\n copy.children = [c.copy() for c in self.children]\n for c in copy.children:\n c.parent = copy\n return copy",
"def clone_state(self):\n return self.strategy['state_handler'].clone(self.state)",
"def _save_state_as_orig(self):\n self._orig = None\n self._orig = deepcopy(self)",
"def __copy__(self) -> 'Tree':\n return non_recursive_tree_copy(self)",
"def clone_full_state(self):\n state_ref = self.ale.cloneSystemState()\n state = self.ale.encodeState(state_ref)\n self.ale.deleteState(state_ref)\n return state",
"def copy_children(self):\n\n # Create a group\n self.fileh.create_group('/', 'agroup')\n # Create several objects there\n for i in range(10):\n # Create a new array\n self.fileh.create_array('/agroup', 'array' + str(i), self.a1)\n # Excercise copy_children\n for i in range(self.nobjects):\n # Create another group for destination\n self.fileh.create_group('/', 'anothergroup' + str(i))\n # Copy children from /agroup to /anothergroup+i\n self.fileh.copy_children('/agroup', '/anothergroup' + str(i))\n # Put a mark\n self.fileh.mark()\n # Unwind all marks sequentially\n for i in range(self.niter):\n t1 = clock()\n for i in range(self.nobjects):\n self.fileh.undo()\n if verbose:\n print(\"u\", end=' ')\n if verbose:\n print()\n undo = clock() - t1\n # Rewind all marks sequentially\n t1 = clock()\n for i in range(self.nobjects):\n self.fileh.redo()\n if verbose:\n print(\"r\", end=' ')\n if verbose:\n print()\n redo = clock() - t1\n\n print((\"Time for Undo, Redo (copy_children):\", undo, \"s, \",\n redo, \"s\"))",
"def __setstate__(self, state: Dict[str, Any]) -> None:\n self.__dict__ = state.copy()\n # Once state is ingested - repopulate, NOT recursing.\n # Child segments will do it for themselves on unpickling.\n self.set_as_parent(recurse=False)",
"def clone_state(self):\n state_ref = self.ale.cloneState()\n state = self.ale.encodeState(state_ref)\n self.ale.deleteState(state_ref)\n return state",
"def copy(self):\n return State([r[:] for r in self.values], empty_loc=self.empty_loc)",
"def deep_copy(self):\n return self.__class__(self.inputs, self.outputs, self.middle)",
"def _copy_states_(self, fromstate, index=None, deep=False):\n # Bad check... doesn't generalize well...\n # if not issubclass(fromstate.__class__, self.__class__):\n # raise ValueError, \\\n # \"Class %s is not subclass of %s, \" % \\\n # (fromstate.__class__, self.__class__) + \\\n # \"thus not eligible for _copy_states_\"\n # TODO: FOR NOW NO TEST! But this beast needs to be fixed...\n operation = { True: copy.deepcopy,\n False: copy.copy }[deep]\n\n if isinstance(fromstate, ClassWithCollections):\n fromstate = fromstate.states\n\n #self.enabled = fromstate.enabled\n _items, from_items = self._items, fromstate._items\n if index is None:\n # copy all set ones\n for name in fromstate.whichSet():#self.names:\n #if fromstate.isKnown(name):\n _items[name] = operation(from_items[name])\n else:\n isKnown = fromstate.isKnown\n for name in index:\n if isKnown(name):\n _items[name] = operation(from_items[name])",
"def clone_as_root(self) :\n clone = deepcopy(self)\n clone.parent = None\n clone.path_length = 0\n clone.previous_action = None\n return clone",
"def copyState(self, that : 'AbstractRecursiveFilter') -> None:\n self.n = that.n;\n self.t0 = that.t0;\n self.t = that.t;\n self.tau = that.tau;\n self.D = that.D;\n self.Z = that.Z;",
"def copy_from(self, other):\n assert not self.is_final\n if self.parent is not None:\n assert other.parent is not None\n self.parent.copy_from(other.parent)\n self.isolated_names = copy.copy(other.isolated_names)\n self.modified = copy.copy(other.modified)\n self.read = copy.copy(other.read)\n self.deleted = copy.copy(other.deleted)\n self.bound = copy.copy(other.bound)\n self.annotations = copy.copy(other.annotations)\n self.params = copy.copy(other.params)",
"def copy(self):\n node_new = Node(self.state.copy(), self.parent, self.children.copy(), self.RRT, self.path_length)\n node_new.vs = self.vs.copy()\n node_new.RRT = self.RRT\n node_new.observed = self.observed\n node_new.observation_node = self.observation_node\n node_new.observation_area = self.observation_area\n\n return node_new",
"def copy_from(self, source):\n\t\tNamedObject.copy_from(self, copy_from)\n\n\t\tself._parent = copy_from.parent\n\t\tself._next = copy_from.next\n\t\tself._prev = copy_from.prev\n\t\tself._first = copy_from.first\n\t\tself._last = copy_from.last\n\t\tself._children = copy_from.children",
"def copy(self):\n state = State(self.state_object, self.compute_dag)\n state.stage_id_map = self.stage_id_map.copy()\n return state",
"def copy(self):\n pass",
"def copy(self):\n pass",
"def copy(self):\n pass",
"def repackage_state(self, state):\n state['hxs'] = state['hxs'].detach()\n state['cxs'] = state['cxs'].detach()\n return state",
"def copy(self):",
"def copy(self):\n new_tree = Tree(support_label=self._support_label, remove_name_quotes=self._remove_name_quotes)\n new_tree.name = self.name\n new_tree._is_cladogram = self._is_cladogram\n new_tree._cladogram_branch = self._cladogram_branch\n new_tree._node_id_template = self._node_id_template\n new_tree._node_ids = self._node_ids.copy()\n new_tree._node_id_index = self._node_id_index\n new_tree.root = self.root.copy(new_tree)\n self.copy_nodes(self.root, new_tree.root, new_tree)\n new_tree.process_tree_nodes()\n return new_tree",
"def __getstate__(self) -> Dict[str, Any]:\n s = self.__dict__.copy()\n # Kill the parent ref. It won't pickle well.\n s[\"_parent\"] = None\n return s",
"def clone(self):",
"def test_after_creation_copy():\n builder = TreeBuilder()\n builder.create_root(0)\n builder.add_child(2, move=True)\n builder.add_child(13)\n builder.move_to_parent()\n builder.add_child(7)\n\n t1 = builder.build()\n\n builder.move_to_root()\n builder.set_data(4)\n builder.add_child(3, move=True)\n builder.add_child(15)\n\n t2 = builder.build()\n\n assert t2 is not t1\n assert t2[0] is not t1[0]\n assert t2[0][0] is not t1[0][0]\n assert t2[1] is not t1[1]\n\n assert t2.data == 4\n assert t2[0].data == 2\n assert t2[0][0].data == 13\n assert t2[1].data == 7\n assert t2[2].data == 3\n assert t2[2][0].data == 15\n\n assert len(t2) == 3\n assert len(t2[0]) == 1\n assert len(t2[1]) == 0\n assert len(t2[2]) == 1",
"def act(state: Tree) -> None:\n cur_state = state.value\n state.children = [Tree(cur_state.make_move(m)) for m in\n cur_state.get_possible_moves()]",
"def __copy__(self):\n #new = MCTS(copy=True) # don't run _predict() twice\n new = MCTS(self.env, copy=True) # don't set pi and Q twice\n new.env = self.env.__copy__()\n # can't use __dict__.update() without effecting env __copy__()\n # in theory, you don't need to copy the env. just use one copy for simulating, and restore it to root\n # since _Q() evaluates the env.done() of children, you need self.done = env.done() in __init__()\n # same for env.winner\n new.pi = []\n new. Q = 0\n new.net = self.net\n new.t = self.t\n new.expl = self.expl\n new.children = []\n new.parent = None\n return new",
"def copy(self):\n return super().copy()",
"def clone(self):\n\n clone = self.__class__.__new__(self.__class__)\n clone._graph_state = self._graph_state\n clone._molecule_state = self._molecule_state\n return clone"
] | [
"0.66461754",
"0.66092765",
"0.64861435",
"0.64813745",
"0.64767754",
"0.64714724",
"0.6470946",
"0.6437448",
"0.64342207",
"0.6415057",
"0.64008224",
"0.6400438",
"0.6381998",
"0.63714826",
"0.6197291",
"0.61630076",
"0.61239284",
"0.61002094",
"0.61002094",
"0.61002094",
"0.6087869",
"0.60732865",
"0.60604906",
"0.6048482",
"0.6041445",
"0.5972078",
"0.59655374",
"0.59391785",
"0.5915868",
"0.5909403"
] | 0.7628509 | 0 |
Login to APICEM northbound APIs in shell. | def login():
try:
client = NbClientManager(
server=APIC,
username=APIC_USER,
password=APIC_PASSWORD,
connect=True)
return client
except requests.exceptions.HTTPError as exc_info:
if exc_info.response.status_code == 401:
print('Authentication Failed. Please provide valid username/password.')
else:
print('HTTP Status Code {code}. Reason: {reason}'.format(
code=exc_info.response.status_code,
reason=exc_info.response.reason))
exit(1)
except requests.exceptions.ConnectionError:
print('Connection aborted. Please check if the host {host} is available.'.format(host=APIC))
exit(1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def login(self):\n return self.client.login(username='Georgie', password='12345678')",
"def _login(self):\n data = self._send(self.nc_request(action=\"login\", parameters={\"apipassword\": self._api_password}))\n\n self._session_id = data[\"apisessionid\"]\n\n logging.info(f\"logged in successfully with session id {self._session_id}\")",
"def connect(api, username, password):\n\treturn api.login(username, password)",
"def login_to_apic(self):\n session = Session(URL, LOGIN, PASSWORD)\n resp = session.login()\n self.assertTrue(resp.ok)\n return session",
"def login(self):\n self._client.clear_credentials()\n self._client.get('/v1/whoami')",
"def loginAsManager(self):\n self.browser.open('http://nohost/plone/')\n self.browser.getLink('Log in').click()\n self.browser.getControl('Login Name').value = 'root'\n self.browser.getControl('Password').value = 'secret'\n self.browser.getControl('Log in').click()",
"def _login_vapi(self):\n session = requests.Session()\n session.verify = self.validate_certs\n if not self.validate_certs:\n # Disable warning shown at stdout\n requests.packages.urllib3.disable_warnings()\n\n print(\"logging in\")\n client = create_vsphere_client(server=self.hostname,\n username=self.username,\n password=self.password,\n session=session)\n if client is None:\n raise Exception(\"Failed to login to %s using %s\" %\n (self.hostname, self.username))\n return client",
"def do_login(self):\n self.content = self._login()\n if self.with_tags:\n self.rest_content = self._login_vapi()",
"def login(self):\n r = self._login_token()",
"def __login(self):\n loginResult = self.sfdc.login(username=self.__username, \n password=self.__password)\n self.userId = loginResult.get('userId')\n \n \n self.userInfo = loginResult.get('userInfo', {}) \n # Switch the binding to the returned endpoint\n for method in self.sfdc.methods.itervalues():\n method.location = loginResult.get('serverUrl')\n continue\n\n # set the session ID in the SOAP header\n self.buildSoapHdr('SessionHeader', 'sessionId', \n loginResult.get('sessionId'))\n self.setSoapHdr()\n\n return",
"def login(self):\n\t\treturn",
"def login():",
"def login():",
"def login(self):",
"def authenticate(self):\n self.login(closet.app.config['USERNAME'],\n closet.app.config['PASSWORD'])",
"def login(self):\n login_form = {\"kid\": \"\",\n \"uni\": self.server,\n \"login\": self.username,\n \"pass\": self.password}\n url = \"https://%s.ogame.gameforge.com/main/login\" % self.country_code\n result = self.session.post(url, data=login_form)",
"def login(self):\n req_url = 'http://{}:{}/api/'.format(self.server, self.port)\n return requests.head(\n req_url,\n auth=requests.auth.HTTPBasicAuth(self.username, self.password)\n )",
"def login(api_key, secret_key):\n update_session(\"X-GEMINI-APIKEY\", api_key)\n set_secret_key(secret_key.encode())\n set_login_state(True)",
"def NavigateGuestLogin(self):\n self._ExecuteOobeApi('Oobe.guestLoginForTesting')",
"def login(self):\n try:\n self._service_instance = connect.SmartConnect(host=self.address,\n user=self.username,\n pwd=self.password,\n port=self.port,\n sslContext=self.sslContext)\n #connectionPoolTimeout=self.timeout)\n except Exception as err:\n raise err",
"def do_login(self, backend, user):",
"def login(self) -> None:\n\n sObj = Splitwise(self.consumer_key, self.consumer_secret)\n self.url, self.login_secret = sObj.getAuthorizeURL()\n print(self.url)\n self.oauth_token = input('token: ')\n self.oauth_verifier = input('verifier: ')",
"def fusion_api_login_appliance(self, host, creds, headers=None):\n # logger._log_to_console_and_log_file(\"Logging into appliance\")\n return self.loginsession.login(host, creds, headers)",
"def login(self):\n # Login to Azure\n login_cmd = [\"az\", \"login\"]\n\n if self.identity:\n login_cmd.append(\"--identity\")\n logging.info(\"Login to Azure with Managed System Identity\")\n else:\n logging.info(\"Login to Azure\")\n\n result = run_cmd(login_cmd)\n\n if result[\"returncode\"] != 0:\n logging.error(result[\"err_msg\"])\n raise AzureError(result[\"err_msg\"])\n\n logging.info(\"Successfully logged into Azure\")\n\n # Login to ACR\n logging.info(\"Login to ACR: %s\" % self.name)\n acr_cmd = [\"az\", \"acr\", \"login\", \"-n\", self.name]\n\n result = run_cmd(acr_cmd)\n\n if \"login succeeded\" in result[\"output\"].lower():\n logging.info(\"Successfully logged into ACR\")\n else:\n logging.error(result[\"err_msg\"])\n raise AzureError(result[\"err_msg\"])",
"def login(self):\n self.client.login(username=self.user.username, password='test')",
"def login(self):\n req = BFGlobalFactory.create(\"ns1:LoginReq\")\n req.username = self.username\n req.password = self.password\n req.productId = self.product_id\n req.vendorSoftwareId = self.vendor_id\n req.ipAddress = 0\n req.locationId = 0\n self._heartbeat = HeartBeat(self.keep_alive)\n rsp = self._soapcall(BFGlobalService.login, req)\n try:\n if rsp.errorCode != APIErrorEnum.OK:\n error_code = rsp.errorCode\n if error_code == LoginErrorEnum.API_ERROR:\n error_code = rsp.header.errorCode\n logger.error(\"{login} failed with error {%s}\", error_code)\n raise ServiceError(error_code)\n except:\n self._heartbeat = None\n raise\n self._heartbeat.start()",
"def login_bot(self):\n pass",
"def login(self):\n # the login url is just api, not api2\n url = 'https://simple-note.appspot.com/api/login'\n query = {'email': self.email, 'password': self.password}\n data = base64.b64encode(urllib.urlencode(query))\n try:\n fh = urllib2.urlopen(url, data)\n self.authtok = fh.read()\n except urllib2.HTTPError, e:\n # Received a non 2xx status code\n raise SimplenoteError('http error: {}'.format(e.code))\n except urllib2.URLError, e:\n # Non http error, like network issue\n raise SimplenoteError('url error: {}'.format(e.reason))\n fh.close()\n return True",
"def einloggen(self):\n \n self.c.login(self.username.text(), self.password.text(), \"1\")",
"def initialize(self):\n self.login()"
] | [
"0.6875937",
"0.6725718",
"0.67193294",
"0.6714746",
"0.657623",
"0.6535312",
"0.65296215",
"0.64994276",
"0.6449236",
"0.64444697",
"0.642967",
"0.6409585",
"0.6409585",
"0.63871634",
"0.63714683",
"0.6365348",
"0.6360069",
"0.63199073",
"0.6291092",
"0.62905693",
"0.6288457",
"0.62468624",
"0.6237381",
"0.62331325",
"0.6197332",
"0.6193854",
"0.6192812",
"0.61740124",
"0.6172292",
"0.6169374"
] | 0.6854403 | 1 |
According to this function We Fetch data from the data base ordering by reverse Id here (N_post = Normal profile posts) (my_post_lists > refer fetching all post from the data base | def get(self, request, *args, **kwargs):
my_normal_post_lists = NormalPosts.objects.filter(uploded_by=request.user.normalprofile).order_by("-id")
return render(request, self.template_name, {
'my_normal_post_lists': my_normal_post_lists,
}) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fetch_posts():\n get_chain_address = F\"{CONNECTED_NODE_ADDRESS}/chain\"\n response = requests.get(get_chain_address)\n if response.status_code == 200:\n content = []\n chain = json.loads(response.content)\n for block in chain[\"chain\"]:\n for tx in block[\"transactions\"]:\n tx[\"index\"] = block[\"index\"]\n tx[\"hash\"] = block[\"previous_hash\"]\n content.append(tx)\n \n global posts \n posts = sorted(content,\n key=lambda k: k['timestamp'],\n reverse=True)",
"def get_queryset(self):\n user: User = self.request.user\n following_users = user.profile.following.all()\n return Post.objects.filter(author__in=following_users).order_by('created')",
"def getPosts():\n\n cur, user_id = initialise(3)\n cur.execute(\"SELECT username FROM users WHERE id = ?\", [user_id])\n name = cur.fetchall()[0][0]\n cur.execute(\"SELECT * FROM posts WHERE name IN (SELECT following FROM followers WHERE user = ?) OR name = ?\", (name, name))\n posts = cur.fetchall()\n return posts",
"def load_posts(post_ids, current_user_id=None):\r\n logging.warn(\"Ids===={}\".format(post_ids))\r\n\r\n # If list is not used, or any call that trigger __iter__ will end up with the query syntax\r\n # rather than the data itself.\r\n #posts_query = Post.objects.filter(id__in=post_ids).limit(100).allow_filtering()\r\n #post_counters = list(PostCounter.objects.filter(id__in=post_ids).limit(100).allow_filtering())\r\n\r\n post_objects = []\r\n # ok ,\r\n for post_id in post_ids:\r\n p = Post.objects.get(id=post_id)\r\n\r\n try:\r\n pc = PostCounter.objects.get(id=post_id) #filter(lambda x: x.id == post.id, post_counters)\r\n stats = pc._as_dict()\r\n del stats['id']\r\n p.__dict__['statistics'] = stats\r\n except DoesNotExist, dne:\r\n pass\r\n\r\n if current_user_id is not None:\r\n try:\r\n pv = PostVote.objects.get(post_id=post_id, user_id=current_user_id)\r\n p.__dict__['upvoted'] = True\r\n except DoesNotExist, dne:\r\n pass\r\n post_objects.append(p)\r\n\r\n return post_objects",
"def fetch_posts():\n get_chain_address = \"{}/chain\".format(CONNECTED_NODE_ADDRESS)\n response = requests.get(get_chain_address)\n if response.status_code == 200:\n content = []\n chain = json.loads(response.content)\n for pos, block in enumerate(chain[\"chain\"]):\n if pos ==0:\n pass\n else:\n for tx in list(block[\"transactions\"].values()):\n tx[\"index\"] = block[\"index\"]\n tx[\"hash\"] = block[\"previous_hash\"]\n content.append(tx)\n\n global posts\n posts = sorted(content, key=lambda k: k['timestamp'],\n reverse=True)",
"def get_posts_for_user(account_pk):\n where = \"WHERE account_pk = ?\"\n values = (account_pk, )\n orders = \"ORDER BY time DESC\"\n return Post.select_many(where, orders, values)",
"def list(self, request):\n # Get all Post records from the database\n posts = Post.objects.all()\n \n \n\n # Support filtering Posts by type\n # http://localhost:8000/Posts?type=1\n #\n # That URL will retrieve all tabletop Posts\n \n\n category = self.request.query_params.get('category', None)\n if category is not None:\n posts = posts.filter(category__id=category)\n \n user = RareUser.objects.get(user=request.auth.user)\n active = self.request.query_params.get('active', None)\n my_subscriptions=Subscription.objects.filter(follower_id=user.id)\n # print(my_subscriptions)\n \n if active is not None:\n print(\"my post navbar is being clicked\")\n # 1)get the posts where the user on the post equals the id on the user\n\n # 2)get the subscriptions where the follower on the subscription equals the id on the user\n # 3)get the posts where the user on the post equals the author in the subscription\n\n home_page_posts=[]\n\n followed_users=RareUser.objects.filter(rareusers_author__follower=user)\n for author in followed_users:\n subscribed_post=list(posts.filter(user=author))\n home_page_posts=home_page_posts+subscribed_post\n\n only_my_posts = list(posts.filter(user__id=user.id))\n home_page_posts=home_page_posts+only_my_posts\n\n # for subscription in my_subscriptions:\n \n # subscribed_post=posts.filter(user__id=subscription.author_id)\n # # my_list.append(subscribed_post)\n # # print(subscribed_post)\n # # my_list.append(only_my_posts)\n \n posts=home_page_posts\n \n \n users = self.request.query_params.get('user', None)\n if users is not None:\n \n posts = posts.filter(user__id=user)\n \n\n title = self.request.query_params.get('title', None)\n if title is not None:\n posts = posts.filter(title__contains=title)\n\n # subscribers=Subscription.objects.filter(follower=user.id)\n # for subscriber in subscribers:\n # subscriptionPosts=posts.filter(user=subscriber.author)\n # posts.append(subscriptionPosts)\n\n for post in posts:\n if post.user == user:\n post.my_post =True\n else:\n post.my_post =False\n\n \n\n serializer = PostSerializer(\n posts, many=True, context={'request': request})\n\n return Response(serializer.data)",
"def by_post_id(cls, post_id):\n return cls.all().filter('post_id =', post_id).order('-created').fetch(limit=20)",
"def get_posts(self): #return list of posts that are associated with this blog_id\n return Post.find_posts_for_blog_id(self.blog_id) #this will return a list of posts objects",
"def get_posts(db_cursor, page_num):\n posts = []\n db_cursor.execute(\"SELECT * FROM posts ORDER BY time_posted DESC LIMIT ?, ?\", (POSTS_PER_PAGE*(page_num-1), POSTS_PER_PAGE))\n for post in db_cursor.fetchall():\n posts.append({\n 'url_id': post[1],\n 'title': post[2],\n 'time_posted': post[4],\n 'category': post[5],\n 'visibility': post[6]\n })\n return posts",
"def get_queryset(self):\n return Post.objects.order_by('-posted')[:5]",
"def getMyPosts():\n \n cur, user_id = initialise(3)\n cur.execute(\"SELECT username FROM users WHERE id = ?\", [user_id])\n name = cur.fetchall()[0][0]\n cur.execute(\"SELECT * FROM posts WHERE name = ?\", [name])\n posts = cur.fetchall()\n return posts",
"def postList(posts):\n post_list = list()\n for post in posts:\n visible_to = list()\n visible = post.visibleTo.all()\n if visible:\n for author in visible:\n auth = \"{}/api/author/{}\".format(DOMAIN, author.id)\n visible_to.append(auth)\n\n comments = commentList(post)\n comment_url = \"{}/api/posts/{}/comments\".format(DOMAIN, post.id)\n post_dict = {'author': addAuthor(post.author), 'title': post.title, 'description': post.description,\n 'contentType': post.contentType, 'content': post.content, 'published': post.published,\n 'visibility': post.visibility, 'visibleTo': visible_to, 'unlisted': post.unlisted, 'id': post.id,\n 'comments': comments[:5], 'next': comment_url, 'count': len(comments),\n 'origin': \"{}/api/posts/{}\".format(DOMAIN, post.id),\n 'source': \"{}/api/posts/{}\".format(DOMAIN, post.id)}\n post_list.append(post_dict)\n return post_list",
"def list_posts(params, db_conn):\n\n skip = params.get('skip') or 0\n limit = params.get('limit') or 10\n params = omit(params, ('skip', 'limit',))\n query = (r.table(post_schema['tablename'])\n .filter(params)\n .order_by(r.asc('created'))\n .skip(skip)\n .limit(limit))\n return list(query.run(db_conn))",
"def get_user_posts(request):\n if request.method == \"POST\":\n token = request.data.get('token')\n post_id = request.data.get('post_id')\n type_ = request.data.get('type')\n\n if Token.objects.filter(key=token).exists():\n token = get_object_or_404(Token, key=token)\n if post_id == -1:\n posts = Post.objects.all().order_by(\"-date\")[:PAGE_OFFSET]\n elif type_ == 'old':\n posts = Post.objects.filter(pk__lt=post_id).order_by(\"-date\")[:PAGE_OFFSET]\n else: # 'new'\n posts = reversed(Post.objects.filter(pk__gt=post_id).order_by(\"date\")[:PAGE_OFFSET])\n\n serializer = PostSerializer(posts, many=True, context={'user_id': token.user_id})\n return Response({\"success\": 29,\n \"post\": serializer.data})\n else:\n return Response({\"error\": 17})",
"def commentList(post):\n comments = Comment.objects.all().filter(post=post).order_by('-published')\n remote_comments = RemoteComment.objects.all().filter(post=post).order_by('published')\n comment_list = list()\n\n if comments:\n for comment in comments:\n comment_dict = dict()\n comment_dict['author'] = addAuthor(comment.author)\n comment_dict['comment'] = comment.comment\n comment_dict['contentType'] = comment.contentType\n comment_dict['published'] = comment.published\n comment_dict['id'] = comment.id\n comment_list.append(comment_dict)\n if remote_comments:\n for remote in remote_comments:\n remote_dict = dict()\n server = remote.server\n r = requests.get(remote.author, auth=(server.username, server.password))\n if r.status_code == 200:\n author = remoteAddAuthor(r.json())\n remote_dict['author'] = author\n remote_dict['comment'] = remote.comment\n remote_dict['contentType'] = remote.contentType\n remote_dict['published'] = remote.published\n remote_dict['id'] = remote.id\n comment_list.append(remote_dict)\n else:\n continue\n\n comment_list = sorted(comment_list, key=lambda k: k['published'], reverse=True)\n\n return comment_list",
"def get_posts(self):\r\n\r\n sub_dict = {\r\n 'selftext': [], 'title': [], 'id': [], 'sorted_by': [],\r\n 'num_comments': [], 'score': [], 'ups': [], 'downs': []}\r\n csv = f'{self.sub}_posts.csv'\r\n\r\n # Attempt to specify a sorting method.\r\n sort, subreddit = self.set_sort()\r\n\r\n # Set csv_loaded to True if csv exists since you can't\r\n # evaluate the truth value of a DataFrame.\r\n df, csv_loaded = (pd.read_csv(csv), 1) if isfile(csv) else ('', 0)\r\n\r\n print(f'csv = {csv}')\r\n print(f'After set_sort(), sort = {sort} and sub = {self.sub}')\r\n print(f'csv_loaded = {csv_loaded}')\r\n\r\n print(f'Collecting information from r/{self.sub}.')\r\n\r\n for post in subreddit:\r\n\r\n # Check if post.id is in df and set to True if df is empty.\r\n # This way new posts are still added to dictionary when df = ''\r\n unique_id = post.id not in tuple(df.id) if csv_loaded else True\r\n\r\n # Save any unique posts to sub_dict.\r\n if unique_id:\r\n sub_dict['selftext'].append(post.selftext)\r\n sub_dict['title'].append(post.title)\r\n sub_dict['id'].append(post.id)\r\n sub_dict['sorted_by'].append(sort)\r\n sub_dict['num_comments'].append(post.num_comments)\r\n sub_dict['score'].append(post.score)\r\n sub_dict['ups'].append(post.ups)\r\n sub_dict['downs'].append(post.downs)\r\n sleep(0.1)\r\n\r\n new_df = pd.DataFrame(sub_dict)\r\n\r\n # Add new_df to df if df exists then save it to a csv.\r\n if 'DataFrame' in str(type(df)) and self.mode == 'w':\r\n pd.concat([df, new_df], axis=0, sort=0).to_csv(csv, index=False)\r\n print(\r\n f'{len(new_df)} new posts collected and added to {csv}')\r\n elif self.mode == 'w':\r\n new_df.to_csv(csv, index=False)\r\n print(f'{len(new_df)} posts collected and saved to {csv}')\r\n else:\r\n print(\r\n f'{len(new_df)} posts were collected but they were not '\r\n f'added to {csv} because mode was set to \"{self.mode}\"')",
"def get_data_fb(user_id, access_token):\n\n my_user = storage.get(User, user_id)\n my_user.update_attr(\"fb_access_token\", access_token)\n\n r = requests.get('https://graph.facebook.com/me/feed?access_token=' + access_token)\n result = r.json()\n post_dict = {}\n post_list = []\n index = 0\n for posts in result[\"data\"]:\n if index == 10:\n break\n new_post = {}\n\n new_post[\"CrossMe_user_id\"] = user_id\n new_post[\"Post_id_CrossMe\"] = str(uuid.uuid4())\n\n if \"message\" in posts.keys():\n new_post[\"message\"] = posts[\"message\"]\n else:\n new_post[\"message\"] = \"NULL\"\n\n new_post[\"created_time\"] = datetime.strptime(posts[\"created_time\"], '%Y-%m-%dT%H:%M:%S+%f')\n\n new_post[\"source\"] = \"FACEBOOK\"\n\n new_post[\"fb_post_id\"] = posts[\"id\"]\n\n\n URLPOST = 'https://graph.facebook.com/' + posts[\"id\"] + '?fields=object_id&access_token=' + access_token\n post_data = requests.get(URLPOST).json()\n if \"object_id\" in post_data.keys():\n URLIMAGE = 'https://graph.facebook.com/' + post_data[\"object_id\"] + '?fields=images&access_token=' + access_token\n image_data = requests.get(URLIMAGE).json()\n if \"images\" not in image_data.keys():\n continue\n all_images = image_data[\"images\"]\n new_post[\"image_url\"] = all_images[1][\"source\"]\n posts[\"media_type\"] = \"IMAGE\"\n else:\n continue\n posts[\"media_type\"] = \"STATUS\"\n new_post[\"image_url\"] = \"NULL\"\n\n post_list.append(new_post)\n index = index + 1\n\n my_post = Post()\n\n my_post.user_id = new_post[\"CrossMe_user_id\"]\n my_post.creation_date = new_post[\"created_time\"]\n my_post.post_source = new_post[\"source\"]\n my_post.post_type = posts[\"media_type\"]\n my_post.post_text = new_post[\"message\"]\n my_post.media_url = new_post[\"image_url\"]\n my_post.save()\n\n\n post_dict[\"fb_last_post\"] = post_list\n\n return make_response(jsonify(post_dict), 200)",
"def get_queryset(self):\n return Post.objects.order_by('-post_date')[:5]",
"def OLD_get_my_posts(access_obj, wall=None):\n from apps.users.models import Dept, Subdept, Page\n\n if isinstance(access_obj, User):\n erp_profile = access_obj.erp_profile\n erp_coords = erp_profile.coord_relations.all()\n erp_supercoords = erp_profile.supercoord_relations.all()\n erp_cores = erp_profile.core_relations.all()\n erp_pages = erp_profile.page_relations.all()\n # directly given access to post\n # + have access to that wall\n my_query = ( \\\n Q(access_users__id__exact=access_obj.id) | \\\n Q(access_subdepts__in=erp_coords) | \\\n Q(access_depts__in=erp_supercoords) | \\\n Q(access_depts__in=erp_cores) | \\\n Q(access_pages__in=erp_pages) | \\\n Q(wall__access_users__id__exact=access_obj.id) | \\\n Q(wall__access_subdepts__in=erp_coords) | \\\n Q(wall__access_depts__in=erp_supercoords) | \\\n Q(wall__access_depts__in=erp_cores) | \\\n Q(wall__access_pages__in=erp_pages)\n )\n if wall:\n my_query = Q(wall=wall) & my_query\n if access_obj.is_superuser:\n my_query = Q(wall=wall)\n\n return Post.objects.filter(my_query).distinct().order_by('-time_created')\n elif isinstance(access_obj, Subdept) or isinstance(access_obj, Dept) or isinstance(access_obj, Page):\n temp = access_obj.access_post\n if wall:\n return temp.filter(wall=wall).distinct().order_by('-time_created')\n else:\n return temp.all().order_by('-time_created')",
"def LoadPosts(self):\n result = {}\n for userid in self.User_list:\n file_list = set(os.listdir(f\"{self.home_dir}/{userid}\")) - set(['student.txt','img.jpg'])\n file_list = sorted(list(file_list))\n file_des_dict = {}\n for entirepostname in file_list:\n postname = entirepostname.replace(\".txt\",\"\")\n sp = postname.split(\"-\")\n if sp[0] not in file_des_dict.keys():\n file_des_dict[sp[0]] = {'post':None,'comments':[],'reply':{}}\n if len(sp) == 1:\n file_des_dict[sp[0]]['post'] = f\"{self.home_dir}/{userid}/{entirepostname}\"\n elif len(sp) == 2:\n file_des_dict[sp[0]]['comments'].append(sp[1])\n else:\n if sp[1] not in file_des_dict[sp[0]]['reply'].keys():\n file_des_dict[sp[0]]['reply'][sp[1]] = []\n file_des_dict[sp[0]]['reply'][sp[1]].append(sp[2])\n result[userid] = file_des_dict\n return result",
"def get_queryset(self):\r\n\r\n user = get_object_or_404(User, username=self.kwargs.get('username'))\r\n return Post.objects.filter(author=user).order_by('-date_posted')",
"def related_posts(self, number_items=5, include_ancestors=True) -> Type[QuerySet]:\n if include_ancestors:\n categories = Category.objects.get_queryset_ancestors(self.categories.get_queryset())\n else:\n categories = self.categories.get_queryset()\n posts = Post.objects.published_posts().exclude(pk=self.pk).filter(categories__in=categories).order_by('?')[:number_items]\n return posts",
"def list_posts(request):\n if request.method == 'POST':\n category = request.POST.get('category', False)\n posts = Post.objects.select_related('author')\\\n .filter(category=category)\\\n .order_by('-modified')\n # import pdb; pdb.set_trace()\n return render(request, 'posts/index.html',\n {'posts': posts})\n\n posts = Post.objects.select_related('author').order_by('-modified')\n likes = Likes.objects.select_related('post')\n\n return render(request, 'posts/index.html',\n {'posts': posts})",
"def get_queryset(self):\n\t\treturn Post.objects.order_by('-pub_date')[:5]",
"def getNewsFeed(self, userId: int) -> 'List[int]':\n news = [v for v in self.posts[userId]]\n for id in self.users[userId]:\n news += self.posts[id]\n return [id for _, id in sorted(news, reverse=True)[:10]]",
"def get_last_posts(self):\n last_posts = []\n r = requests.get(self.target_url)\n html = BeautifulSoup(r.content, 'html.parser')\n raw_posts = html.findAll(\"div\", {\"class\": \"item\"})\n\n for post in raw_posts:\n title_element = post.find(\"a\", {\"class\": \"item-link\"})\n title = self.text(title_element)\n href = title_element['href']\n description = self.text(post.find(\"div\", {\"class\": \"item-info-container\"}))\n id_post = str(post['data-adid'])\n price = self.text(post.find(\"span\", {\"class\": \"item-price\"}))\n image_element = post.find_all(\"img\")\n image_src = image_element[0]['data-ondemand-img'] if image_element else None\n complete_href = self.crawler_url + href\n description = '\\n'.join([title, description, price, complete_href])\n last_posts.append(Post(id=id_post, href=complete_href, description=description, image=image_src))\n return last_posts",
"def get_posts():\n db = psycopg2.connect(\"dbname=forum\")\n c = db.cursor()\n query = \"SELECT content, time FROM posts ORDER BY time DESC\"\n c.execute(query)\n rows = c.fetchall()\n POSTS = rows\n db.close()\n return POSTS",
"def get_post_ids(self, category_names: List[str]) -> List[str]:\n url = \"http://blog.naver.com/PostTitleListAsync.nhn\"\n post_ids = set()\n\n for category_name in category_names:\n params = {\n \"blogId\": self.naver_id,\n \"currentPage\": 1,\n \"categoryNo\": self.categories[category_name][0],\n \"parentCategoryNo\": self.categories[category_name][1],\n \"countPerPage\": 30,\n \"viewdate\": \"\",\n }\n while True:\n response = self.session.get(url, params=params)\n data = json.loads(response.text.replace(\"\\\\\", \"\\\\\\\\\"))\n\n try:\n lists = data[\"postList\"]\n except KeyError:\n params[\"parentCategoryNo\"] = params[\"categoryNo\"]\n print(\"API Error occured restart...\", file=sys.stderr)\n continue\n\n ids = [d[\"logNo\"] for d in lists]\n\n if ids[0] not in post_ids:\n post_ids.update(ids)\n params[\"currentPage\"] += 1\n else:\n print(f\"Get post ids: {len(post_ids)} posts found.\")\n break\n return sorted(list(post_ids))",
"def iterateAnswers(db, postIds):\n c=db.cursor()\n strPostId = \",\".join([str(postId) for postId in postIds])\n #logging.debug(\"Loading answers...\")\n c.execute(\"\"\"SELECT * FROM posts WHERE type_id=2 AND parent_id IN (%s) ORDER BY FIELD(parent_id, %s)\"\"\" % (strPostId, strPostId))\n for answer in c.fetchall():\n yield Post(answer)\n c.close()"
] | [
"0.6402055",
"0.63352996",
"0.62798464",
"0.62748545",
"0.6224536",
"0.6171342",
"0.6130636",
"0.6092321",
"0.60728544",
"0.59778965",
"0.5957494",
"0.5913101",
"0.58570284",
"0.58428276",
"0.579514",
"0.57481414",
"0.5719974",
"0.5713792",
"0.5709445",
"0.5702672",
"0.56802404",
"0.56420565",
"0.5631714",
"0.5591221",
"0.5590091",
"0.55895513",
"0.55550086",
"0.5554554",
"0.55319226",
"0.5522835"
] | 0.66532075 | 0 |
Check a given line to see if a move is valid. Return the squares that will change, if the move is valid, otherwise an empty list. | def valid_line(board, x, y, dx, dy):
if not 0 <= x + dx + dx < 8:
return []
if not 0 <= y + dy + dy < 8:
return []
coords_1 = board.columns[x+dx] + board.rows[y+dy]
coords_2 = board.columns[x+dx+dx] + board.rows[y+dy+dy]
if board[coords_1] != -self.color:
# If the neighbour square is not of the opposite color
return []
# We don't need to make this test as it will be tested in the next
# recursive iteration if needed
# elif board[coords_2] == 0:
# # If the following square is empty, the line is not valid
# return False
elif board[coords_2] == self.color:
# If the following square is of the right colour, then the line
# is matching, and the move is valid
return [coords_1]
else:
# Otherwise, we need to test one step further
modified_squares = valid_line(board, x+dx, y+dy, dx, dy)
if modified_squares:
# If the line is valid
return modified_squares + [coords_1]
else:
return [] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def legal_moves(player, board):\n return [sq for sq in Othello.squares() if Othello.is_legal(sq, player, board)]",
"def is_valid(move):\n return isinstance(move, int) and move in Othello.squares()",
"def is_valid_move(state, move):\n row, col = move\n if row not in [1, 2, 3] or col not in [1, 2, 3]:\n print(\"Invalid move! Specify correct game square!\")\n return False\n if state[row-1][col-1] != '_':\n print('Invalid move! Place your marker on a free square!')\n return False\n return True",
"def legal_moves(self, player, board):\r\n #go through the whole board and check whether the piece is on the board or not\r\n #num/row size - num%col == num2/row size - num@%col\r\n #num/row size + num%col\r\n moves = list()\r\n opp = self.opponent(player)\r\n #print(board)\r\n for i in self.squares():\r\n if board[i] == core.EMPTY:\r\n for d in core.DIRECTIONS:\r\n endPt = self.find_bracket(i, player, board, d)\r\n if endPt!= None:\r\n moves.append(i)\r\n break\r\n\r\n return moves",
"def is_move_valid(move: Move, board: Board, whites_turn: bool) -> bool:\n if out_of_bounds(move[0]) == True or out_of_bounds(move[1]) == True:\n return False\n \n if move[0] == move[1]:\n return False\n\n if is_current_players_piece(piece_at_position(move[0], board), False) and whites_turn == True:\n return False\n elif is_current_players_piece(piece_at_position(move[0], board), True) and whites_turn == False:\n return False\n\n\n if piece_at_position(move[1], board) in WHITE_PIECES and whites_turn == True:\n return False\n elif piece_at_position(move[1], board) in BLACK_PIECES and whites_turn == False:\n return False\n\n\n if move[1] not in get_possible_moves(move[0], board):\n return False\n\n\n test_board = board\n test_board = update_board(test_board, move)\n if is_in_check(test_board, True) and whites_turn == True:\n return False\n elif is_in_check(test_board, False) and whites_turn == False:\n return False\n\n return True",
"def get_valid_moves(self):\r\n validMoves = []\r\n\r\n for x in range(BOARD_SIZE):\r\n for y in range(BOARD_SIZE):\r\n pos = np.array([x,y])\r\n if self.board[pos[0],pos[1]] == 0:\r\n if(self.update_board(pos,_testing=True)):\r\n validMoves.append(pos)\r\n\r\n return validMoves",
"def checkio(lines_list):\n row = [[0]*3,[0]*3,[0]*3,[0]*3]\n colume = [[0]*4,[0]*4,[0]*4]\n square = 0\n # save line in matrix\n for i in lines_list:\n if i[0]-i[1] in [-1,1]:\n row[int((i[0]-1)/4)][min(i[0],i[1])%4-1] = 1\n else:\n colume[int(((min(i[0],i[1])-1)/4))][min(i[0],i[1])%4-1] = 1\n\n for r in [0, 1, 2]:\n # r is the start point of row\n for c in [0, 1, 2]:\n # c is the start point of colume\n for line in range(1, 4-max(r,c)):\n # line is the length of square\n check = 0\n print(line)\n for i in range(0, line):\n check = row[r][c+i] + colume[r+i][c] + row[r+line][c+i] + colume[r+i][c+line] + check\n if check == line * 4:\n square += 1\n return square",
"def get_valid_moves(self):\n if self.king:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1],\n [self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n else:\n if self.player == 1:\n valid_moves = [[self.row + 1, self.col + 1],\n [self.row + 1, self.col - 1]]\n else:\n valid_moves = [[self.row - 1, self.col - 1],\n [self.row - 1, self.col + 1]]\n return valid_moves",
"def valid_move(self, row, col):\n if not self._game_over:\n i_row, i_col = row-1, col-1\n #i_row and i_col wil be used to index the board (hence the i)\n (valid, flip_lst) = self._valid_placement(i_row, i_col)\n #print(\"FOR TESTING. Tiles Flipped: \", flip_lst)\n \n if valid:\n #Big Change: You decided to make determining validity\n # and flipping separate operations\n self._flip(i_row, i_col, flip_lst)\n else:\n print(\"\\nPlease enter a valid move!\")\n return False\n\n if self._board_is_full():\n self._game_over = True\n self._set_winner() \n \n self._switch_turn(self._turn)\n if not self._valid_move_exists(): #Check if the other player has any valid moves\n print(\"\\nNo valid moves exist for {0}. {0}'s turn has been skipped\".format(self._turn))\n self._switch_turn(self._turn) #Switch turn back to player before skip was determined\n if not self._valid_move_exists(): #Check if the other player has any valid moves\n print(\"No valid moves exist for {0}. {0}'s turn has been skipped\".format(self._turn))\n print(\"No moves exist for either player. GAME OVER\")\n self._game_over = True\n self._set_winner()\n return False\n\n return True\n elif self._game_over:\n print(\"The game is over. No more moves can be made!\")\n #TODO: Replace this^ with an exception later?\n return False",
"def piece_moves(self, pos, line_movemant, diagonal_movemant, distance):\r\n enemy = (\"white\" if self.color == \"black\" else \"black\")\r\n board = self.board\r\n moves = []\r\n unchecked_moves = ()\r\n line = ((0, 1), (1, 0), (-1, 0), (0, -1))\r\n diag = ((1, 1), (-1, 1), (1, -1), (-1, -1))\r\n starting_pos = board.num_notation(pos.upper())\r\n if line_movemant and diagonal_movemant:\r\n unchecked_moves = line + diag\r\n elif diagonal_movemant:\r\n unchecked_moves = diag\r\n elif line_movemant:\r\n unchecked_moves = line\r\n for (x, y) in unchecked_moves:\r\n for single_move in range(1, distance + 1):\r\n \"\"\"iterating over all possible moves in a specific line/diagonal \"\"\"\r\n destination = starting_pos[0] + single_move * x, starting_pos[1] + single_move * y\r\n if board.alpha_notation(destination) not in board.occupied(self.color) and board.in_board(destination):\r\n moves.append(destination)\r\n if board.alpha_notation(destination) in board.occupied(enemy):\r\n break\r\n else:\r\n break\r\n if self.name == \"K\":\r\n moves = moves + self.castle(pos)\r\n return list(map(board.alpha_notation, moves))\r\n # instead of iterating over moves and using alpha_notation on its values we can use map\r",
"def checkMoves(self,board):\n possibleMoves = []\n\n for c in xrange(0,8):\n for r in xrange(0,8):\n if board.isValidMove(self.tile,c,r):\n possibleMoves.append(c+r*8)\n\n return possibleMoves",
"def get_legal_moves(self, color):\n moves = [] # stores the legal moves.\n # Get all the squares with pieces of the given color.\n for x in range(self.n):\n for y in range(self.n):\n if self[x][y]==0:\n moves.append((x,y))\n return moves",
"def validate_line(self, line):\n splitline = line.split('\\t')\n if len(splitline) is not 9:\n return []\n if not \"ID\" in splitline[8]:\n return []\n if not int(splitline[3]) <= int(splitline[4]):\n return []\n # Everything except genes must have parent id\n if not \"Parent\" in splitline[8] and not splitline[2] == \"gene\":\n return []\n return splitline",
"def get_all_valid_moves(self, player):\n moves = [] # Stores the possible moves\n capture_move_exists = False # Indicates if a capturing move is possible\n\n for piece in self.get_all_pieces(player):\n valid_moves = self._get_valid_moves(piece)\n\n for move, skip in valid_moves.items():\n moves.append([(piece.row, piece.col), move, skip])\n\n if len(skip) > 0:\n # Checks if there is a move that can capture a piece\n capture_move_exists = True\n\n if capture_move_exists:\n # Only gets the capturing moves if there is one\n eating_moves = []\n for move in moves:\n if len(move[2]) != 0:\n eating_moves.append(move)\n\n moves = eating_moves\n\n return moves",
"def valid_moves(board):\n return [i for i, x in enumerate(board) if x == ' ']",
"def get_legal_moves(self, color):\n moves = set() # stores the legal moves.\n color = max(0, color)\n\n # Get all the squares with pieces of the given color.\n for y in range(self.n):\n for x in range(self.n):\n if self[x][y]==color:\n newmoves = self.get_moves_for_square((x,y))\n moves.update(newmoves)\n return list(moves)",
"def test_check_move_with_valid(self):\n board = [\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\"\\u25cb\"] + [\" \"] * 5,\n [\" \"] * 6,\n [\" \"] * 6\n ]\n valid = self.game.check_move(board, 3)\n self.assertTrue(valid)",
"def _valid_move_exists(self):\n lst = []\n for i_row in range(self._num_rows):\n for i_col in range(self._num_cols):\n if self._valid_placement(i_row, i_col)[0]:\n lst.append((i_row, i_col))\n\n return lst != [] #If lst != [], then the list has elements -> valid move(s) exist",
"def test_check_move_with_invalid(self):\n board = [\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\"\\u25cb\"] * 6,\n [\" \"] * 6,\n [\" \"] * 6\n ]\n valid = self.game.check_move(board, 4)\n self.assertFalse(valid)",
"def is_legal_move(player, row_from, col_from, row_to, col_to):\r\n illegal_moves = [(0, 0), (2, 0), (0, 4), (2, 4)]\r\n\r\n \"\"\"special moves that are move available according to diagram\r\n List of tuples to and from values that are not possible\"\"\"\r\n moves_not_permitted = [[(0, 2), (1, 1)], [(0, 2), (1, 3)], [(1, 1), (2, 2)], [(1, 3), (2, 2)]]\r\n row_diff = abs(row_from - row_to)\r\n col_diff = abs(col_from - col_to)\r\n\r\n if player == 'hounds':\r\n\r\n if (row_to >= 0 and row_to < 3 and col_to >= 0 and col_to < 5):\r\n \"\"\"Check if the move is not out of bounds for the board with max col range 4 and row range 3\r\n and then check if it is a legal move\"\"\"\r\n\r\n if board[row_to][col_to] == 0 and (row_to, col_to) not in illegal_moves and row_diff <= 1 and col_diff <= 1:\r\n \"\"\" Check if the position is blank.\r\n Then check if the move is not one of the blank places\r\n Then check if the row difference and column difference isn't more than 1\r\n \"\"\"\r\n if (col_to - col_from) < 0: # no moves to the left of the board\r\n return False\r\n\r\n for item in moves_not_permitted:\r\n if len(set([(row_from, col_from), (row_to, col_to)]).intersection(set(item))) == 2:\r\n \"\"\" If to and from co-ordinates are present in the moves_not_permitted list then return False\"\"\"\r\n return False\r\n else:\r\n pass\r\n return True\r\n else:\r\n return False\r\n\r\n else:\r\n \"\"\"When player is a hare\"\"\"\r\n\r\n if (row_to >= 0 and row_to < 3 and col_to >= 0 and col_to < 5):\r\n \"\"\"Check if the move is not out of bounds for the board with max col range 4 and row range 3\r\n and then check if it is a legal move\"\"\"\r\n\r\n if board[row_to][col_to] == 0 and (row_to, col_to) not in illegal_moves and row_diff <= 1 and col_diff <= 1:\r\n \"\"\" Check if the position is blank.\r\n Then check if the move is not one of the blank places\r\n Then check if the row difference and column difference isn't more than 1\"\"\"\r\n\r\n for item in moves_not_permitted:\r\n if len(set([(row_from, col_from), (row_to, col_to)]).intersection(set(item))) == 2:\r\n \"\"\" If to and from co-ordinates are present in the moves_not_permitted list then return False\"\"\"\r\n return False\r\n else:\r\n pass\r\n return True\r\n\r\n else:\r\n return False",
"def has_valid_move(self, cur_square, board):\n coords = cur_square.coords\n neighbor_list = [tuple(map(sum, zip(coords, offset))) for offset in self._offsets]\n return self.has_valid_move_in_list(coords, neighbor_list, board)",
"def get_move(self, board):\n\n valid_moves = [move for move in board.legal_moves]\n is_valid_move = False\n while not is_valid_move:\n move = input(\"Enter a valid move in uci format: \").lower()\n if len(move) == 4 or len(move) == 5:\n try:\n player_move = chess.Move.from_uci(move)\n\n if board.is_legal(player_move):\n try:\n board.push(player_move)\n return player_move\n except:\n print(\"invalid move...\")\n else:\n print(\"invalid move...\")\n except:\n print(\"invalid move...\")\n else:\n print(\"invalid move...\")",
"def _get_valid_moves(self, piece):\n moves = {}\n left = piece.col - 1 # Left position\n right = piece.col + 1 # Right position\n row = piece.row # Current row\n\n if piece.get_player() == Player.white or piece.is_king():\n # Checks the movements from the bottom to the top\n moves.update(self._traverse_left(row - 1, max(row - 3, -1), -1, piece.get_player(), left))\n moves.update(self._traverse_right(row - 1, max(row - 3, -1), -1, piece.get_player(), right))\n\n if piece.get_player() == Player.black or piece.is_king():\n # Checks the movements from the top to the bottom\n moves.update(self._traverse_left(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), left))\n moves.update(self._traverse_right(row + 1, min(row + 3, constant.BOARD_DIMENSION),\n 1, piece.get_player(), right))\n\n return moves",
"def valid_move(x, y):\r\n if [x, y] in empty_cells(board):\r\n return True\r\n else:\r\n return False",
"def check_word(self, line, sp_ch='*'):\n valid = []\n le = cross.get_length()\n for n in range(le):\n t1 = \"\".join(line)\n t2 = cross.get_word(n)\n if len(t1) == len(t2):\n z = zip(t1, t2)\n for x in z:\n if (x[0] != x[1]) and (x[0] != sp_ch):\n break\n else:\n word_as_list = list(t2)\n valid.append(word_as_list)\n return valid",
"def test_check_move_with_barely_valid(self):\n board = [\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\" \"] * 6,\n [\"\\u25cb\"] * 5 + [\" \"],\n [\" \"] * 6,\n [\" \"] * 6\n ]\n valid = self.game.check_move(board, 4)\n self.assertTrue(valid)",
"def validate_moves(moves):\n valid_coords = []\n for move in moves:\n if move[0] in cc.VALID_RANKS and move[1] in cc.VALID_RANKS:\n valid_coords.append(True)\n else:\n valid_coords.append(False)\n return tuple(valid_coords)",
"def validate_move(move, player_board):\n select_row = move.select_row\n select_col = move.select_col\n \n player_board_rows = player_board.shape[0]\n player_board_cols = player_board.shape[1]\n \n if select_row >= player_board_rows or select_row < 0 or \\\n select_col >= player_board_cols or select_col < 0 or \\\n player_board[select_row][select_col] != -1:\n return False\n \n return True",
"def find_list_for_old_line(self, line):\n target = hash_graphics_line(line)\n for i, markers in enumerate(self._lines):\n hashes = [hash_graphics_line(x) for x in markers]\n if target in hashes:\n return i\n\n return None",
"def get_legal_moves(self):\n moves = []\n if self.player_locations[self.whose_turn] is None:\n return self.get_blank_locations()\n matrix = [(1,0), (-1,0), (0,1), (0,-1), (1,1), (1,-1), (-1, 1), (-1,-1)]\n\n for dx, dy in matrix:\n x,y = self.player_locations[self.whose_turn]\n while x+dx <= xdim and x+dx >= 0 and y+dy <= ydim and y+dy >= 0:\n x = x+dx\n y = y+dx\n if self.board[x][y] : break\n moves.append((x,y))\n return moves"
] | [
"0.6370445",
"0.62957835",
"0.6113358",
"0.61125934",
"0.59849495",
"0.5977295",
"0.5965192",
"0.5934297",
"0.59330297",
"0.59321463",
"0.5929608",
"0.5929004",
"0.5915414",
"0.5898067",
"0.58975315",
"0.5873103",
"0.5867404",
"0.586026",
"0.5846924",
"0.58331126",
"0.58301663",
"0.58262575",
"0.58225304",
"0.57990503",
"0.5772037",
"0.5766031",
"0.5761845",
"0.5756341",
"0.5727597",
"0.5718173"
] | 0.7884284 | 0 |
Check if the given key is valid, with the format XY. X is the column, between A and H, and Y is the row, between 1 and 8. | def _is_valid_key(self, key):
# If the key is not a string
if not isinstance(key, str):
return False
else:
key = str.upper(key)
# If the given key does not match the standard notation XY
if len(key) != 2:
return False
# If the key is out of the board
if key[0] not in self.columns or key[1] not in self.rows:
return False
# Otherwise the key is valid
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __check_key_validity(self, key):\n if not isinstance(key, tuple):\n raise TypeError(\"key must be a tuple\")\n if len(key) != 2:\n raise ValueError(\"key must be of length two\")\n if not (isinstance(key[0], int) and isinstance(key[1], int)):\n raise TypeError(\"elements of key must be integers\")\n if not ((0 <= key[0] < self.m) and (0 <= key[1] < self.n)):\n raise exc.OutOfBoundsError(\"key is out of bounds\")",
"def isValidKey(key):\n return True",
"def _checkKey(self, key):\n x, y = self._convertNegativeTupleKeyToPositiveTupleKey(key)\n return x, y",
"def check_keypoint(kp: Sequence, rows: int, cols: int) -> None:\n for name, value, size in zip([\"x\", \"y\"], kp[:2], [cols, rows]):\n if not 0 <= value < size:\n raise ValueError(\n \"Expected {name} for keypoint {kp} \"\n \"to be in the range [0.0, {size}], got {value}.\".format(kp=kp, name=name, value=value, size=size)\n )\n\n angle = kp[2]\n if not (0 <= angle < 2 * math.pi):\n raise ValueError(\"Keypoint angle must be in range [0, 2 * PI). Got: {angle}\".format(angle=angle))",
"def test_valid_key(self):\n f = lws.valid_data_key\n assert f('string', int, r'string') is False\n assert f('string', str, r'test') is False\n assert f(123, int, '123') is False\n assert f(123.00, float, '123') is False\n assert f('123', str, r'[0-9]*') is True",
"def is_key_valid(self,key):\n if not key or any(map(lambda s: s in key,space_chars))\\\n or any(map(lambda s: s in key,bad_chars)):\n return False \n return True",
"def is_valid(key):\n return key[0:2] == \"MR\" and key[2:].isdigit() and len(key) in [9, 10]",
"def test_is_valid_annotation_key_invalid_input():\n # test length violations\n assert not is_valid_annotation_key(key=None) # Too short\n assert not is_valid_annotation_key(key=\"\") # Too short\n assert not is_valid_annotation_key(key=f\"{'p' * 254}/n\") # prefix too long\n assert not is_valid_annotation_key(key=\"/n\") # prefix too short\n assert not is_valid_annotation_key(key=\"p/\") # name too short\n assert not is_valid_annotation_key(key=\"a\" * 254) # name too long\n assert not is_valid_annotation_key(key=f\"d/{'b'*64}\") # name too long\n # test first character violations (not alphanum)\n assert not is_valid_annotation_key(key=\"-a\")\n assert not is_valid_annotation_key(key=\".b\")\n assert not is_valid_annotation_key(key=\" c\")\n # test last character violations (not alphanum)\n assert not is_valid_annotation_key(key=\"a-\")\n assert not is_valid_annotation_key(key=\"b.\")\n assert not is_valid_annotation_key(key=\"c \")\n assert not is_valid_annotation_key(key=\"sw33T#\")\n # test middle characters violations\n assert not is_valid_annotation_key(key=\"a$$a\")\n assert not is_valid_annotation_key(key=\"b b\")",
"def _is_valid_key(self, key):\r\n\r\n # Check the length\r\n if len(key) > 250:\r\n return False\r\n\r\n # Check that there are no spaces or control characters\r\n for char in key:\r\n if ord(char) < 33 or ord(char) == 127:\r\n return False\r\n\r\n return True",
"def test_is_valid_annotation_key_valid_input():\n # test valid label keys\n assert is_valid_annotation_key(key=\"l0l\")\n assert is_valid_annotation_key(key=\"l0L\")\n assert is_valid_annotation_key(key=\"L-l\")\n assert is_valid_annotation_key(key=\"L.L\")\n assert is_valid_annotation_key(key=\"4-you\")\n assert is_valid_annotation_key(key=\"you.2\")\n assert is_valid_annotation_key(key=\"p/n\")\n assert is_valid_annotation_key(key=\"prefix/you.2\")\n assert is_valid_annotation_key(key=\"how.sad/to-see\")\n assert is_valid_annotation_key(key=f\"{'d'*253}/{'n'*63}\")",
"def keyIsValid(key):\n\n isValid = 1\n \n try:\n temp = getParam(key)\n\n except ValueError:\n isValid = 0\n warning(\" WARNING: %s not set\" % (key))\n\n return isValid",
"def _check_key(self, key):\n raise NotImplementedError",
"def test_validate_yubikey(self):\n from_key = self.yk_rnd.from_key(self.yk_public_id, self.yk_key)\n self.assertTrue(pyhsm.yubikey.validate_yubikey_with_aead( \\\n self.hsm, from_key, self.aead.data, self.kh_validate))",
"def test_is_valid_label_key_valid_input():\n # test valid label keys\n assert is_valid_label_key(key=\"l0l\")\n assert is_valid_label_key(key=\"l0L\")\n assert is_valid_label_key(key=\"L-l\")\n assert is_valid_label_key(key=\"L.L\")\n assert is_valid_label_key(key=\"4-you\")\n assert is_valid_label_key(key=\"you.2\")\n assert is_valid_label_key(key=\"p/n\")\n assert is_valid_label_key(key=\"prefix/you.2\")\n assert is_valid_label_key(key=\"how.sad/to-see\")\n assert is_valid_label_key(key=f\"{'d'*253}/{'n'*63}\")",
"def validate_coords(coords, delta):\n global KEYPAD\n coord_x, coord_y = coords\n delta_x, delta_y = delta\n if any([(coord_x + delta_x) > 2,\n (coord_y + delta_y) > 2]):\n return False\n return True",
"def test_is_valid_label_key_invalid_input():\n # test length violations\n assert not is_valid_label_key(key=None) # Too short\n assert not is_valid_label_key(key=\"\") # Too short\n assert not is_valid_label_key(key=f\"{'p' * 254}/n\") # prefix too long\n assert not is_valid_label_key(key=\"/n\") # prefix too short\n assert not is_valid_label_key(key=\"p/\") # name too short\n assert not is_valid_label_key(key=\"a\" * 254) # name too long\n assert not is_valid_label_key(key=f\"d/{'b'*64}\") # name too long\n # test first character violations (not alphanum)\n assert not is_valid_label_key(key=\"-a\")\n assert not is_valid_label_key(key=\".b\")\n assert not is_valid_label_key(key=\" c\")\n # test last character violations (not alphanum)\n assert not is_valid_label_key(key=\"a-\")\n assert not is_valid_label_key(key=\"b.\")\n assert not is_valid_label_key(key=\"c \")\n assert not is_valid_label_key(key=\"sw33T#\")\n # test middle characters violations\n assert not is_valid_label_key(key=\"a$$a\")\n assert not is_valid_label_key(key=\"b b\")",
"def validate_in(self, xcoord, ycoord):\r\n x = int(xcoord/(self.tr.bd.TILE_WIDTH + self.tr.bd.LINE_WIDTH))\r\n y = int(ycoord/(self.tr.bd.TILE_WIDTH + self.tr.bd.LINE_WIDTH))\r\n if not self.tr.turn_tracker and self.tr.bd.disks[x][y].halo_tag:\r\n return True, x, y\r\n else:\r\n return False, x, y",
"def _assert_valid(self, y: int, x: int) -> None:\n if not (0 <= y < self.size[0] and 0 <= x < self.size[1]):\n raise ValueError('Coordinates out of image boundary, {}'.format(self.size))",
"def validated(x, y, playing_field):\n # user_input_cell = (x, y)\n if playing_field[x][y] == '*':\n True\n else:\n return False",
"def _checkForSlicesInKey(self, key):\n if isinstance(key, tuple):\n for i, v in enumerate(key):\n if isinstance(v, slice):\n raise PyTextCanvasException('Use parentheses when specifying slices, i.e. spam[(0, 0):(9, 9)] not spam[0, 0:9, 9].')",
"def validate(self, key, val):\n return True",
"def validate(self, key, val):\n return True",
"def valid_coordinate(self,row,column):\r\n if row >= 0 and row < len(self.wordsearch):\r\n if column >= 0 and column < len(self.wordsearch[0]):\r\n return True\r\n return False",
"def valid_coordinates(self, x, y):\n return ((x >= 0) and (x < self.width) and\n (y >= 0) and (y < self.height))",
"def _check(self, x, y):\n n = self.n\n # x direction\n xline = self.arr[y]\n if not self.x_regexes[y].match(xline):\n return False\n\n # y direction\n ypos = x + max(0, y + 1 - n)\n yline = []\n x1, y1 = ypos, 0\n while x1 >= 0 and y1 < 2 * n - 1:\n if x1 < len(self.arr[y1]):\n yline.append(self.arr[y1][x1])\n if y1 >= n - 1:\n x1 -= 1\n y1 += 1\n\n if not self.y_regexes[ypos].match(yline):\n return False\n\n # z direction\n zpos = x + max(0, n - 1 - y)\n zline = []\n x1, y1 = zpos, 2 * n - 2\n while x1 >= 0 and y1 >= 0:\n if x1 < len(self.arr[y1]):\n zline.append(self.arr[y1][x1])\n if y1 <= n - 1:\n x1 -= 1\n y1 -= 1\n\n if not self.z_regexes[zpos].match(zline):\n return False\n\n return True",
"def _cleankey(self, key, is_view=None):\n m, n = self.shape\n if is_view == None:\n if isinstance(key, int) and any([x == 1 for x in self.shape]):\n is_view = False\n elif isinstance(key, tuple) and all(\n [isinstance(x, int) for x in key]):\n is_view = False\n else:\n is_view = True\n\n if isinstance(key, tuple) and len(key) == 2 and all(\n [isinstance(x, list) for x in key]):\n return (*key, is_view)\n\n if isinstance(key, MPView):\n key = (key.p_rows, key.p_cols)\n return (*key, is_view)\n\n if isinstance(key, int) or isinstance(key, slice) or isinstance(\n key, list):\n # One index given, check if vector shaped.\n if m == 1: # row vector case\n key = ([0], key)\n elif n == 1: # col vector case\n key = (key, [0])\n else: # if not vector-shaped, all columns are implicitly indexed\n key = (key, list(range(n)))\n row_key, col_key = key\n\n if isinstance(row_key, slice):\n row_key = list(range(m))[row_key]\n elif isinstance(row_key, int):\n row_key = [row_key]\n\n if isinstance(col_key, slice):\n col_key = list(range(n))[col_key]\n elif isinstance(col_key, int):\n col_key = [col_key]\n\n return (row_key, col_key, is_view)",
"def valid_input(self, row, col):\n return ((row, col) not in self.marks and\n row <= WIDTH and row > 0 and\n col in COL_MAP)",
"def validate_key(self, key: keyType) -> bool:\n if isinstance(key, (dict,bool)):\n raise Exception\n if key is None:\n raise Exception\n # Numerical key object has no len(),\n # so explicitly specify which types are not allowed to use empty value as keys\n if isinstance(key, (str, tuple, set, list)) and (len(key) == 0):\n raise Exception\n return True",
"def is_valid_cord(x, y, w, h):\n return x >=0 and x < w and y >= 0 and y < h;",
"def is_valid_position(self, x, y):\n if (x > self.n_cols-1 or y > self.n_rows-1) or (x < 0 or y < 0):\n return False\n\n elif self.grid[x][y] == 3:\n return False\n\n return True"
] | [
"0.7048035",
"0.69368386",
"0.692533",
"0.6781486",
"0.64831704",
"0.6406309",
"0.6364915",
"0.6322703",
"0.63189375",
"0.63171273",
"0.6212421",
"0.61821645",
"0.6121905",
"0.6106289",
"0.60453564",
"0.60074675",
"0.6005349",
"0.5961546",
"0.5960941",
"0.5926862",
"0.5837048",
"0.5837048",
"0.5837031",
"0.5817769",
"0.5799098",
"0.57947886",
"0.57783204",
"0.57515806",
"0.57225996",
"0.5715044"
] | 0.79342586 | 0 |
wrapper function for replace dialog, launches dialog over the passed curses window and returns a tuple of (pattern, replace) or (None,None) if canceled | def replace( scr ):
d = ReplaceDialog(scr)
value = d.main()
if not "pattern" in value:
return (None,None)
else:
return (value["pattern"],value["replace"]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self,scr):\n (max_y,max_x) = scr.getmaxyx()\n dialog.Dialog.__init__(self, scr, \"ReplaceDialog\", 5, max_x-4, [ dialog.Frame(\"Search and Replace\"),\n dialog.Prompt(\"pattern\",1,2,1,\"Pattern: \",-1),\n dialog.Prompt(\"replace\",2,2,2,\"Replace: \",-1),\n dialog.Button(\"ok\",3,2,3,\"OK\",dialog.Component.CMP_KEY_OK),\n dialog.Button(\"cancel\",4,29,3,\"CANCEL\",dialog.Component.CMP_KEY_CANCEL)\n ])",
"def find_replace(self, event=None):\n if self.app.children:\n #find string\n findStr = self.app.childActive.source.GetSelectedText()\n if findStr and self.findDialog:\n self.findDialog.Destroy()\n self.findDialog = None\n #dialog already open, if yes give focus\n if self.findDialog:\n self.findDialog.Show(1)\n self.findDialog.Raise()\n return\n if not findStr:\n findStr = self.findStr\n self.numberMessages=0\n #find data\n data = wx.FindReplaceData(self.findFlags)\n data.SetFindString(findStr)\n data.SetReplaceString(self.replaceStr)\n #dialog\n self.findDialog = wx.FindReplaceDialog(self, data, \"Find & Replace\",\n wx.FR_REPLACEDIALOG|wx.FR_NOUPDOWN)\n x, y = self.frame.GetPosition()\n self.findDialog.SetPosition((x+5,y+200))\n self.findDialog.Show(1)\n self.findDialog.Raise()\n self.findDialog.data = data # save a reference to it...",
"def findReplaceTextClicked(self):\n if not self.graphicsView.hasImage():\n self.showImageSelectionMessageBox()\n return\n\n from TextItemEditDialog import QTextItemEditDialog\n\n self.dlgTextItemEdit = QTextItemEditDialog(self)\n self.dlgTextItemEdit.show()\n self.dlgTextItemEdit.exec_()",
"def find_replace_events(self):\n self.find_replace_dialog = dialog.FindReplaceEvents()\n self.find_replace_dialog.currentIdx = -1\n self.find_replace_dialog.currentIdx_idx = -1\n # list of rows to find/replace\n self.find_replace_dialog.rowsToFind = set([item.row() for item in self.twEvents.selectedIndexes()])\n self.find_replace_dialog.clickSignal.connect(self.click_signal_find_replace_in_events)\n self.find_replace_dialog.setWindowFlags(Qt.WindowStaysOnTopHint)\n self.find_replace_dialog.show()",
"def comdlg32_ReplaceText(jitter, get_str, set_str):\n ret_ad, args = jitter.func_args_stdcall([\"lpfr\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def findAndReplace(self):\n\n # Prompts user for find regex and replace text\n findText, replaceText = Model.FindAndReplaceDialogBox.getResults(self)\n\n # Gets the current selection from the current tab\n selectionModel = self.getCurrentView().selectionModel()\n\n # Pass to panda\n self.getCurrentPanda().findAndReplace(findText, replaceText, selectionModel)",
"def __init__(self,scr):\n dialog.Dialog.__init__(self, scr, \"ConfirmReplaceDialog\", 5, 40, [ dialog.Frame(\"Do replace?\"),\n dialog.Button(\"yes\",1,2,3,\"YES\",dialog.Component.CMP_KEY_OK),\n dialog.Button(\"no\",2,10,3,\"NO\",dialog.Component.CMP_KEY_OK),\n dialog.Button(\"all\",3,15,3,\"ALL\",dialog.Component.CMP_KEY_OK),\n dialog.Button(\"cancel\",4,21,3,\"CANCEL\",dialog.Component.CMP_KEY_OK)\n ])",
"def confirm_replace( scr ):\n d = ConfirmReplaceDialog(scr)\n value = d.main()\n if \"yes\" in value:\n if value[\"yes\"]:\n return 1\n elif value[\"no\"]:\n return 2\n elif value[\"all\"]:\n return 3\n elif value[\"cancel\"]:\n return 4\n else:\n return 4",
"def __replaceOpenFiles(self):\n self.ui.showReplaceFilesDialog(self.textForFind(), openFiles=True)",
"def __replaceFiles(self):\n self.ui.showReplaceFilesDialog(self.textForFind())",
"def click_signal_find_replace_in_events(self, msg):\n\n if msg == \"CANCEL\":\n self.find_replace_dialog.close()\n return\n if not self.find_replace_dialog.findText.text():\n dialog.MessageDialog(programName, \"There is nothing to find.\", [\"OK\"])\n return\n\n if self.find_replace_dialog.cbFindInSelectedEvents.isChecked() and not len(self.find_replace_dialog.rowsToFind):\n dialog.MessageDialog(programName, \"There are no selected events\", [OK])\n return\n\n fields_list = []\n if self.find_replace_dialog.cbSubject.isChecked():\n fields_list.append(EVENT_SUBJECT_FIELD_IDX)\n if self.find_replace_dialog.cbBehavior.isChecked():\n fields_list.append(EVENT_BEHAVIOR_FIELD_IDX)\n if self.find_replace_dialog.cbModifier.isChecked():\n fields_list.append(EVENT_MODIFIER_FIELD_IDX)\n if self.find_replace_dialog.cbComment.isChecked():\n fields_list.append(EVENT_COMMENT_FIELD_IDX)\n\n number_replacement = 0\n for event_idx, event in enumerate(self.pj[OBSERVATIONS][self.observationId][EVENTS]):\n\n if event_idx < self.find_replace_dialog.currentIdx:\n continue\n\n if (not self.find_replace_dialog.cbFindInSelectedEvents.isChecked()) or (\n self.find_replace_dialog.cbFindInSelectedEvents.isChecked() and event_idx in self.find_replace_dialog.rowsToFind):\n for idx1 in fields_list:\n if idx1 <= self.find_replace_dialog.currentIdx_idx:\n continue\n if self.find_replace_dialog.findText.text() in event[idx1]:\n number_replacement += 1\n self.find_replace_dialog.currentIdx = event_idx\n self.find_replace_dialog.currentIdx_idx = idx1\n event[idx1] = event[idx1].replace(self.find_replace_dialog.findText.text(),\n self.find_replace_dialog.replaceText.text())\n self.pj[OBSERVATIONS][self.observationId][EVENTS][event_idx] = event\n self.loadEventsInTW(self.observationId)\n self.twEvents.scrollToItem(self.twEvents.item(event_idx, 0))\n self.twEvents.selectRow(event_idx)\n self.projectChanged = True\n\n if msg == \"FIND_REPLACE\":\n return\n\n self.find_replace_dialog.currentIdx_idx = -1\n\n if msg == \"FIND_REPLACE\":\n if dialog.MessageDialog(programName, \"{} not found.\\nRestart find/replace from the beginning?\".format(\n self.find_replace_dialog.findText.text()),\n [YES, NO]) == YES:\n self.find_replace_dialog.currentIdx = -1\n else:\n self.find_replace_dialog.close()\n if msg == \"FIND_REPLACE_ALL\":\n dialog.MessageDialog(programName, \"{} substitution(s).\".format(number_replacement), [OK])\n self.find_replace_dialog.close()",
"def update_launcher(self):\n if not self.misc.bufwinnr(self.name):\n self.open_launcher()\n\n self.mapper.clear()\n self.clear_highlighting()\n self.misc.go_to_win(self.misc.bufwinnr(self.name))\n self.misc.set_buffer(None)\n\n buffer_list = sorted(self.buffers_with_matches())\n if not self.view_buffer:\n self.view_buffer = self.curr_buf.number\n\n i = buffer_list.index(self.view_buffer)\n buf_prev = buffer_list[-1 if not i else i - 1]\n buf_next = buffer_list[0 if i == len(buffer_list) - 1 else i + 1]\n\n vim.command(\"setlocal stl=\\ \\ <-\\ {0}\\ \\ [{1}]\\ \\ {2}\\ ->\\ \\ \".format(\n os.path.split(self.misc.bufname(buf_prev))[1].replace(' ', '\\\\'),\n os.path.split(self.misc.bufname(self.view_buffer))[1].replace(' ', '\\\\'),\n os.path.split(self.misc.bufname(buf_next))[1].replace(' ', '\\\\')))\n\n # self.matches = {'bufname': [(linenr, col, line), ...], ...}\n if self.find_new_matches:\n if not self.cache:\n self.search(self.input_so_far)\n self.cache = list(self.matches)\n\n _matches = self.matches[self.view_buffer]\n if _matches:\n if self.view_buffer == self.curr_buf.number:\n pos = bisect.bisect_left(_matches, self.curr_buf_pos)\n _matches.insert(pos, self.curr_buf_pos)\n else:\n _matches = self.matches[self.view_buffer]\n\n if _matches:\n self.misc.set_buffer(\n [self.render_line(m, j) for j, m in enumerate(_matches)])\n\n # set the position to the current line\n if self.find_new_matches:\n if self.view_buffer == self.curr_buf.number:\n self.launcher_curr_pos = pos\n else:\n self.launcher_curr_pos = 0\n\n if self.launcher_curr_pos is not None:\n length = len(vim.current.buffer)\n if self.launcher_curr_pos >= length:\n self.launcher_curr_pos = length - 1\n vim.current.window.cursor = (self.launcher_curr_pos + 1, 1)\n\n self.render_curr_line()\n self.highlight()\n\n # adjust the window height according to the total\n # number of matches\n n = len(_matches)\n if n > self.max_height:\n vim.current.window.height = self.max_height\n else:\n vim.current.window.height = n\n\n vim.command(\"normal! zz\")\n\n else:\n vim.command('syntax clear')\n self.misc.set_buffer([' nothing found...'])\n vim.current.window.height = 1\n self.launcher_curr_pos = 0",
"def ReplaceInsertSymbolClicked(self):\n if not self.graphicsView.hasImage():\n self.showImageSelectionMessageBox()\n return\n\n from ReplaceSymbolDialog import QReplaceSymbolDialog\n\n self.dlgReplace = QReplaceSymbolDialog(self)\n self.dlgReplace.show()\n self.dlgReplace.exec_()",
"def _replace_dialog(self, player_id):\n if self.users[player_id].IsAI:\n self._on_replacement_selected(None, player_id, replace_list=self.users[player_id].agent.get_replace_card())\n return\n\n DW, DH = 0.9, 0.6\n game = self.ctrl.game\n\n layer_ = DialogLayer(Colors['black'], *map(int, pos(DW, DH)),\n position=pos((1 - DW) / 2, (1 - DH) / 2), stop_event=True, border=True)\n layer_.add(hs_style_label('请选择要替换的卡牌(玩家{})'.format(player_id),\n pos(DW * 0.5, DH * 0.98), anchor_y='top'))\n layer_.add_ok(lambda: self._on_replacement_selected(layer_, player_id))\n layer_.card_sprites = []\n\n num_cards = len(game.players[player_id].hand)\n for i, card in enumerate(game.players[player_id].hand):\n card_sprite = HandSprite(\n card, pos((2 * i + 1) / (2 * num_cards + 1), DH / 2),\n is_front=True, scale=0.6,\n callback=lambda self_: bool(self_.toggle_side()) or True,\n self_in_callback=True,\n sel_mgr_kwargs={'set_default': False},\n )\n layer_.card_sprites.append(card_sprite)\n layer_.add(card_sprite)\n layer_.add_to_scene(self.parent)",
"def comdlg32_FindText(jitter, get_str, set_str):\n ret_ad, args = jitter.func_args_stdcall([\"lpfr\"])\n raise RuntimeError('API not implemented')\n jitter.func_ret_stdcall(ret_ad, ret_value)",
"def __init__(self, parent):\n FindReplaceDialog.__init__(self, parent)\n self.ui.findReplaceForm.hide_replace_widgets()\n self.setWindowTitle(self.tr(\"Find\"))",
"def replace_all(self):\n i = 0\n while self.textedit.textCursor().hasSelection():\n self.textedit.textCursor().insertText(self.ui.textToReplace.text())\n self.find()\n i += 1\n self.show_message(unicode(self.tr(\"Replaced {0} occurrence(s)\")).format(i))",
"def __find(self):\n txt = self.textCursor().selectedText()\n self.__mainWindow.showFind(txt)",
"def menu_find__replace(self, event=None):\n self.parentPanel.find_replace(event)",
"def createFindDialog(self, c: Cmdr) -> Widget:\n if c:\n g.app.globalFindTabManager = c.findCommands.ftm\n top = c and c.frame.top # top is the DynamicWindow class.\n w = top.findTab # type:ignore\n dialog = QtWidgets.QDialog()\n # Fix #516: Hide the dialog. Never delete it.\n\n def closeEvent(event: Event) -> None:\n event.ignore()\n dialog.hide()\n\n dialog.closeEvent = closeEvent\n layout = QtWidgets.QVBoxLayout(dialog)\n layout.addWidget(w)\n self.attachLeoIcon(dialog)\n dialog.setLayout(layout)\n if c:\n # c.styleSheetManager.set_style_sheets(w=dialog)\n g.app.gui.setFilter(c, dialog, dialog, 'find-dialog')\n # This makes most standard bindings available.\n dialog.setModal(False)\n return dialog",
"def findText(self):\n\n # Prompts user for find regex\n findText,_ = Model.FindAndReplaceDialogBox.getResults(self)\n model = self.getCurrentPanda()\n start = model.index(0, 0)\n matches = model.match(\n start, QtCore.Qt.DisplayRole,\n findText, -1, QtCore.Qt.MatchContains)\n if matches:\n index = matches[0]\n self.getCurrentView().clearSelection()\n self.getCurrentView().selectionModel().select(\n index, QtCore.QItemSelectionModel.Select)\n self.getCurrentView().scrollTo(index)\n else:\n self.notifyUser(\"No matches found.\")",
"def showReplaceWidget(self):\n self.__searchWidget.hide()\n self.__replaceWidget.show(self.textForFind())",
"def _on_replacement_selected(self, dialog, player_id, replace_list=None):\n\n if replace_list is None:\n replace_list = [i for i, c in enumerate(dialog.card_sprites) if not c.is_front]\n\n game = self.ctrl.game\n game.run_player_action(pa.ReplaceStartCard(\n game, player_id, replace_list))\n\n if dialog is not None:\n dialog.remove_from_scene()\n\n # If replace done, start running main program (if current user is an AI, run it).\n if game.state == game.GameState.Main:\n self.update_content_after_animations(dt=1.0, scheduled=False) # Initial update.\n self.maybe_run_ai()\n return True",
"def replace_user_handles(text, replace_with=\"_USER_\"):\n return RE_USER_HANDLE.sub(replace_with, text)",
"def __call__(self):\n\n (width_offset, height_offset)=self.get_offset(self.dialog)\n self.dialog.geometry(f\"+{width_offset}+{height_offset}\")\n self.dialog.update_idletasks()\n self.dialog.deiconify()\n\n self.dialog.wait_window()\n\n for i, path in enumerate(self.dialog_selection):\n if self.unix_delimiter:\n self.dialog_selection[i] = sub(\"\\\\\\\\\", \"/\", path)\n else:\n self.dialog_selection[i] = sub(\"/\", \"\\\\\\\\\", path)\n\n\n if self.stdout:\n [print(item) for item in self.dialog_selection]\n\n return list(self.dialog_selection)",
"def replace_text_in_selections(view, edit, text):\n for region in view.sel():\n view.replace(edit, region, text)",
"def find_replace_next(self):\n old_term = self.text_find.get()\n new_term = self.text_replace.get()\n idx = '1.0'\n idx = self.text.search(old_term, idx, nocase=1,\n stopindex=tk.END)\n lastidx = '% s+% dc' % (idx, len(old_term))\n try:\n self.text.delete(idx, lastidx)\n self.text.insert(idx, new_term)\n self.highlight_next_match()\n except tk.TclError:\n return",
"def run(self):\n\n self.window.run_command(\"show_panel\", {\"panel\": \"output.reg_replace\"})",
"def on_replacetextCombo_editTextChanged(self, text):\n self.__enableFindButton()",
"def find_replace_all(self):\n old_term = self.text_find.get()\n new_term = self.text_replace.get()\n while True:\n idx = '1.0'\n idx = self.text.search(old_term, idx, nocase=1,\n stopindex=tk.END)\n lastidx = '% s+% dc' % (idx, len(old_term))\n if not idx:\n break\n self.text.delete(idx, lastidx)\n self.text.insert(idx, new_term)"
] | [
"0.66686773",
"0.66447234",
"0.62689984",
"0.6130497",
"0.60572594",
"0.6004771",
"0.5970039",
"0.5969815",
"0.55415326",
"0.55407614",
"0.5508379",
"0.5372172",
"0.53694844",
"0.53567016",
"0.52937233",
"0.5293361",
"0.5287301",
"0.5183273",
"0.5166747",
"0.5142576",
"0.5140756",
"0.51198924",
"0.5117318",
"0.50144744",
"0.50100577",
"0.49991384",
"0.4995551",
"0.4947921",
"0.4939014",
"0.48668116"
] | 0.69241583 | 0 |
wrapper function for the confirm replace dialog, returns 1 == yes, 2 == no, 3 == all, 4 == cancel returns 4 == cancel if canceled | def confirm_replace( scr ):
d = ConfirmReplaceDialog(scr)
value = d.main()
if "yes" in value:
if value["yes"]:
return 1
elif value["no"]:
return 2
elif value["all"]:
return 3
elif value["cancel"]:
return 4
else:
return 4 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def confirm(text, window=None):\n return message(text, u'Confirma', M_QUESTION, B_YES_NO, window) == R_YES",
"def confirm(force):\n if not force:\n ans = input(que(bold(\"Are you sure? [y/N]: \")))\n else:\n ans = 'y'\n\n return ans.lower()",
"def ask_ok_cancel(message=\"\", title=None):\n return dialog(\"ask_ok_cancel\", message=message, title=title)",
"def __window_confirm(self, text):\n return True",
"def confirm(msg: str = \"Do you want it:\", default: bool = True) -> bool:\n\n question = [\n {\n 'type': 'confirm',\n 'name': 'confirm',\n 'message': msg,\n 'default': default\n }\n ]\n try:\n answer = prompt(question)\n return answer['confirm']\n except KeyError:\n exit = confirm(msg=\"Do you want cancel script\")\n if exit:\n raise SystemExit\n else:\n return confirm(msg, default)",
"def PresentDialog_Confirm_Call( message ):\n return call( message, [ 'Ok', 'Cancel' ] )",
"def runAskYesNoCancelDialog(\n self,\n c: Cmdr,\n title: str,\n message: str=None,\n yesMessage: str=\"&Yes\",\n noMessage: str=\"&No\",\n yesToAllMessage: str=None,\n defaultButton: str=\"Yes\",\n cancelMessage: str=None,\n ) -> str:\n if g.unitTesting:\n return None\n dialog = QtWidgets.QMessageBox(c and c.frame.top)\n if message:\n dialog.setText(message)\n dialog.setIcon(Information.Warning)\n dialog.setWindowTitle(title)\n # Creation order determines returned value.\n yes = dialog.addButton(yesMessage, ButtonRole.YesRole)\n no = dialog.addButton(noMessage, ButtonRole.NoRole)\n cancel = dialog.addButton(cancelMessage or 'Cancel', ButtonRole.RejectRole)\n if yesToAllMessage:\n dialog.addButton(yesToAllMessage, ButtonRole.YesRole)\n if defaultButton == \"Yes\":\n dialog.setDefaultButton(yes)\n elif defaultButton == \"No\":\n dialog.setDefaultButton(no)\n else:\n dialog.setDefaultButton(cancel)\n try:\n c.in_qt_dialog = True\n dialog.raise_() # #2246.\n val = dialog.exec() if isQt6 else dialog.exec_()\n finally:\n c.in_qt_dialog = False\n # val is the same as the creation order.\n # Tested with both Qt6 and Qt5.\n return {\n 0: 'yes', 1: 'no', 2: 'cancel', 3: 'yes-to-all',\n }.get(val, 'cancel')",
"def yes_no_cancel_popup(title=None,\n text=None):\n d = gtk.Dialog(title=title,\n parent=None,\n flags=gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,\n buttons=( gtk.STOCK_YES, gtk.RESPONSE_YES,\n gtk.STOCK_NO, gtk.RESPONSE_NO,\n gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL ))\n hb=gtk.HBox()\n hb.show()\n d.vbox.add(hb)\n\n i=gtk.Image()\n i.set_from_stock(gtk.STOCK_DIALOG_QUESTION, gtk.ICON_SIZE_DIALOG)\n i.show()\n hb.pack_start(i, expand=False)\n\n if text is not None:\n l=gtk.Label(text)\n l.show()\n hb.add(l)\n d.connect('key-press-event', dialog_keypressed_cb)\n\n d.show()\n center_on_mouse(d)\n retval=d.run()\n d.destroy()\n return retval",
"def askOk(parent,message,title=''):\r\n return askStyled(parent,message,title,wx.OK|wx.CANCEL)",
"def confirm(self, action):\n title = \"%s : P L E A S E C O N F I R M\" % action\n question_text = \"<html><b>%s - PLEASE CONFIRM.</b><br/>\"\\\n \"<br/>Do you want to %s %s recordings for the following project?\"\\\n \"<br/><br/>PROJECT : %s\"\\\n \"<br/>CLIENT : %s\"\\\n \"<br/>DATE : %s<br/></html>\" % (\n action.upper(),\n action,\n \" & \".join(self.selected_formats),\n self.recordings_table.project_details()[2],\n self.recordings_table.project_details()[3],\n self.recordings_table.project_details()[0]\n )\n\n self.hide()\n if action == 'upload':\n self.confirmation_dialog.setText(title, question_text)\n self.confirmation_dialog.exec_()\n self.show()\n\n if self.confirmation_dialog.cancelled:\n return (False, False)\n\n return (True, self.confirmation_dialog.immediate_upload)\n else:\n self.confirmation_dialog.showQuestion(title, question_text)\n self.show()\n return self.confirmation_dialog.copy_confirmed",
"def action_confirm(self):\n self.check_txt_ids()\n self.write({'state': 'confirmed'})\n return True",
"def decision(question):\n return click.confirm(question, show_default=True)",
"def confirm(\n\t\ttext: str,\n\t\tdefault: bool = False,\n\t\tabort: bool = False,\n\t\tprompt_suffix: str = \": \",\n\t\tshow_default: bool = True,\n\t\terr: bool = False,\n\t\t):\n\n\tprompt = _build_prompt(text, prompt_suffix, show_default, \"Y/n\" if default else \"y/N\")\n\n\twhile True:\n\t\ttry:\n\t\t\tvalue = _prompt(prompt, err=err, hide_input=False).lower().strip()\n\t\texcept (KeyboardInterrupt, EOFError):\n\t\t\traise click.Abort()\n\n\t\tif value in ('y', \"yes\"):\n\t\t\trv = True\n\t\telif value in ('n', \"no\"):\n\t\t\trv = False\n\t\telif value == '':\n\t\t\trv = default\n\t\telse:\n\t\t\tclick.echo(\"Error: invalid input\", err=err)\n\t\t\tcontinue\n\t\tbreak\n\n\tif abort and not rv:\n\t\traise click.Abort()\n\n\treturn rv",
"def messageConfirm(self,message):\n answer=self.message(message,style=wx.YES_NO|wx.ICON_QUESTION)\n return self.messageIsOk(answer)",
"def Confirm(self):\r\n \r\n global references\r\n self.from_ed = self.ed_result.get(\"1.0\",'end-1c')\r\n references.append(self.from_ed)\r\n self.confirm_b.configure(state = 'disabled')\r\n self.discard_b.configure(state = 'disabled')\r\n self.finalresult.configure(state = 'normal')\r\n self.finalresult.delete('1.0', END)\r\n \r\n self.final()",
"def runAskYesNoDialog(self,\n c: Cmdr, title: str, message: str=None, yes_all: bool=False, no_all: bool=False,\n ) -> str:\n if g.unitTesting:\n return None\n dialog = QtWidgets.QMessageBox(c and c.frame.top)\n # Creation order determines returned value.\n yes = dialog.addButton('Yes', ButtonRole.YesRole)\n dialog.addButton('No', ButtonRole.NoRole)\n # dialog.addButton('Cancel', ButtonRole.RejectRole)\n if yes_all:\n dialog.addButton('Yes To All', ButtonRole.YesRole)\n if no_all:\n dialog.addButton('No To All', ButtonRole.NoRole)\n dialog.setWindowTitle(title)\n if message:\n dialog.setText(message)\n dialog.setIcon(Information.Warning)\n dialog.setDefaultButton(yes)\n if c:\n try:\n c.in_qt_dialog = True\n dialog.raise_()\n val = dialog.exec() if isQt6 else dialog.exec_()\n finally:\n c.in_qt_dialog = False\n else:\n dialog.raise_()\n val = dialog.exec() if isQt6 else dialog.exec_()\n # val is the same as the creation order.\n # Tested with both Qt6 and Qt5.\n return_d = {0: 'yes', 1: 'no'}\n if yes_all and no_all:\n return_d [2] = 'yes-all'\n return_d [3] = 'no-all'\n elif yes_all:\n return_d [2] = 'yes-all'\n elif no_all:\n return_d [2] = 'no-all'\n return return_d.get(val, 'cancel')",
"def confirm():\n\t\traise NotImplementedError",
"def __init__(self,scr):\n dialog.Dialog.__init__(self, scr, \"ConfirmReplaceDialog\", 5, 40, [ dialog.Frame(\"Do replace?\"),\n dialog.Button(\"yes\",1,2,3,\"YES\",dialog.Component.CMP_KEY_OK),\n dialog.Button(\"no\",2,10,3,\"NO\",dialog.Component.CMP_KEY_OK),\n dialog.Button(\"all\",3,15,3,\"ALL\",dialog.Component.CMP_KEY_OK),\n dialog.Button(\"cancel\",4,21,3,\"CANCEL\",dialog.Component.CMP_KEY_OK)\n ])",
"def confirm(self, prompt, default):\n raise NotImplementedError(NotImplementedMessage)",
"def confirm(msg: str) -> bool:\n res = input(msg + \" (Y/n) > \")\n if res == 'Y' or res == 'y' or res == 'yes' or res == 'Yes' or res == \"\":\n return True\n return False",
"def confirm(text, app, version, modules=None, default_yes=False):\n print(text)\n print(' Directory: %s' % os.path.basename(app.app_dir))\n print(' App ID: %s' % app.app_id)\n print(' Version: %s' % version)\n print(' Modules: %s' % ', '.join(modules or app.modules))\n if default_yes:\n return raw_input('Continue? [Y/n] ') not in ('n', 'N')\n else:\n return raw_input('Continue? [y/N] ') in ('y', 'Y')",
"def confirmDialog(*args, annotation: Union[AnyStr, List[AnyStr]]=\"\", backgroundColor:\n List[float, float, float]=None, button: Union[AnyStr, List[AnyStr]]=\"\",\n cancelButton: AnyStr=\"\", defaultButton: AnyStr=\"\", dismissString: AnyStr=\"\",\n icon: AnyStr=\"\", message: AnyStr=\"\", messageAlign: AnyStr=\"\", parent:\n AnyStr=\"\", title: AnyStr=\"\", **kwargs)->AnyStr:\n pass",
"def confirm(msg=\"\"):\n answer = \"\"\n if not msg: msg = \"OK to continue\"\n while answer not in [\"y\", \"n\"]:\n answer = input(msg+\" [Y/N]? \").lower()\n return answer == \"y\"",
"def confirm(prompt_str, default=False):\r\n if default:\r\n prompt = '%s [Y/n]: ' % prompt_str\r\n else:\r\n prompt = '%s [y/N]: ' % prompt_str\r\n\r\n response = valid_response(prompt, 'y', 'yes', 'yeah', 'yup', 'yolo')\r\n\r\n if response is None:\r\n return default\r\n\r\n return response",
"def YesNoCancelDialog( message, caption, style=wx.ICON_QUESTION ):\n return MessageDialog( message, caption, style | wx.YES_NO | wx.CANCEL )",
"def get_confirm(self):\n self.warning('Would you like to execute[y/N]: ')\n _choice = input()\n choice = _choice.lower() if _choice else 'n'\n err_msg = \"must input yes(y)/no(n), not \" + _choice\n if not choice.startswith(('y', 'n')):\n self.error(err_msg)\n return\n if choice == 'y' or choice == 'yes':\n confirm = True\n elif choice == 'n' or choice == 'no':\n self.info(\"Nothing to do.\")\n confirm = False\n else:\n self.error(err_msg)\n confirm = None\n\n return confirm",
"def _confirm(message):\n result = ''\n while result not in ('y', 'n'):\n try:\n result = raw_input('%s Continue (y/n)? ' % message)\n except EOFError:\n result = 'n'\n return result == 'y'",
"def yesno_cancel(\n question, title=None, bitmap=None, yes=None, no=None, cancel=None, checkbox=None, checked=None\n):\n\n if title is None:\n title = _('Yes or no?')\n if yes is None:\n yes = _(\"Yes\")\n if no is None:\n no = _(\"No\")\n if cancel is None:\n cancel = _(\"Cancel\")\n if checkbox is None:\n checkbox = _(\"Apply to all\")\n\n return msg_dialogs.prompt3msg(question, title, bitmap, yes, no, cancel, checkbox, checked)",
"def ask_yes_no(message=\"\", title=None):\n return dialog(\"ask_yes_no\", message=message, title=title)",
"def confirm(message):\n if not sys.stdout.isatty():\n return False\n reply = BaseCommand.input(\"\\n{message} [Y/N]:\".format(message=message))\n return reply and reply[0].lower() == 'y'"
] | [
"0.6553291",
"0.6408498",
"0.63460416",
"0.633145",
"0.6314579",
"0.63029253",
"0.6250293",
"0.6190345",
"0.6184483",
"0.6169528",
"0.6107208",
"0.60912764",
"0.6072582",
"0.607161",
"0.60662067",
"0.6043061",
"0.6029693",
"0.6001996",
"0.5931839",
"0.59036756",
"0.59018284",
"0.5901682",
"0.58718824",
"0.5870372",
"0.5838642",
"0.58259517",
"0.57849574",
"0.5771144",
"0.5757551",
"0.57547545"
] | 0.85182613 | 0 |
Sets default value for order prefix | def default_order_prefix():
return 'mag_' | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def default_prefix(self) -> str:",
"def default_prefix(self) -> str:\n return \"\"",
"def default_prefix(self) -> str:\n return \"\"",
"def setDefaultTopicPrefix(self, prefix):\n\n internals.blpapi_SessionOptions_setDefaultTopicPrefix(\n self.__handle,\n prefix)",
"def _setordering_customer_50K(self, val):\n self.swift_obj.OrderingCustomer_K = val\n self.swift_obj.OrderingCustomer_K.swiftTag = '50K'",
"def set_prefix_arg(self, name, value):\n self._prefix_kwargs[name] = value",
"def prefix(self, prefix):\n\n self._prefix = prefix",
"def prefix(self, prefix):\n\n self._prefix = prefix",
"def set_Prefix(self, value):\n super(DescribeEvaluationsInputSet, self)._set_input('Prefix', value)",
"def _fillServiceDefaults(self, args):\n\t\tif self.service.core.hasProperty(\"defaultSortKey\"):\n\t\t\tif \"_DBOPTIONS_ORDER\" not in args:\n\t\t\t\targs[\"_DBOPTIONS_ORDER\"] = self.service.core.getProperty(\n\t\t\t\t\t\"defaultSortKey\").split(\",\")",
"def set_prefix(self, prefix):\n self._prefix = prefix\n self._update_layout()",
"def _init_prefix(self):\n self._.prefix = \"v%x\" % (hash(self) % Integer(2)**32)",
"def get_ordering_customer_option(self):\n return 'A'",
"def set_prefix_kwargs(self, **kwargs):\n self._prefix_kwargs = kwargs",
"def setPrefix(self, *args):\n return _libsbml.ASTBasePlugin_setPrefix(self, *args)",
"def get_default_ordering(self):\n return self.default_ordering",
"def _get_prefix(obj):\n return obj._prefix if obj._prefix is not PREFIX_NOT_SET else DEFAULT_PREFIX",
"def set_target_buy_policy_abbreviation(self, prefix):\n self.set_value_into_input_field(self.target_buy_policy_abbreviation_input_field_locator, value=(prefix + self.random_string_generator(size=4)))",
"def _set_OPTION_ordering_customer(self):\n getter_name = ''\n if self.use_operations_xml:\n ordering_customer_option = FSwiftWriterUtils.get_value_from_xml_tag(self.swift_metadata_xml_dom,\n ['SWIFT', 'ORDERING_CUSTOMER_OPTION'])\n else:\n ordering_customer_option = self.get_ordering_customer_option()\n if ordering_customer_option == \"A\":\n getter_name = 'ordering_customer_50A'\n elif ordering_customer_option == \"F\":\n getter_name = 'ordering_customer_50F'\n elif ordering_customer_option == \"K\":\n getter_name = 'ordering_customer_50K'\n else:\n notifier.WARN(\"%s Option %s is not supported for tag %s. Mapping default option.\" % (\n self.swift_message_type, str(ordering_customer_option), 'OrderingCustomer_50a'))\n getter_name = 'ordering_customer_50A' # default\n return getter_name",
"def _setordering_customer_50A(self, val):\n self.swift_obj.OrderingCustomer_A = val\n self.swift_obj.OrderingCustomer_A.swiftTag = '50A'",
"def prefix(self, prefix):\n self._path_prefix = prefix",
"def prefix(self, group):\n return",
"def get_default_prefix(path):\n if path in prefixes_dict.keys():\n return prefixes_dict[path]\n else:\n return ''",
"def on_init(self, prefix='space', **kwargs):\n assert prefix not in (None, '')\n self.prefix = prefix",
"def prefix(self) -> typing.Optional[str]:\n return self._values.get('prefix')",
"def prefix(self) -> typing.Optional[str]:\n return self._values.get('prefix')",
"def prefix(self) -> typing.Optional[str]:\n return self._values.get('prefix')",
"def test_calc_sort_prefix_reset(self):\n test_object = self.test.datum_type3\n sort_parts = [test_object.datum_group.sort,\n ]\n actual = test_object._calc_sort_value(sort_base_length=4,\n increment=1,\n sort_prefix_parts=sort_parts\n )\n expected = 201000\n self.assertEqual(expected, actual)",
"def set_prefix(prefix):\n PLUGINS.set_prefix(prefix)",
"def empty_prefix(self):\r\n raise NotImplementedError()"
] | [
"0.7458118",
"0.69559276",
"0.69559276",
"0.6246707",
"0.6121237",
"0.6042731",
"0.6010271",
"0.6010271",
"0.59950566",
"0.5959341",
"0.5876922",
"0.58622044",
"0.58620113",
"0.58606994",
"0.5838946",
"0.5803903",
"0.57959056",
"0.57950515",
"0.57381994",
"0.57282114",
"0.56767374",
"0.567627",
"0.56500274",
"0.56120175",
"0.5590592",
"0.5590592",
"0.5590592",
"0.55874294",
"0.5581316",
"0.5576537"
] | 0.7345233 | 1 |
Import order states for instances | def import_order_states(cls, instances):
OrderState = Pool().get('magento.order_state')
for instance in instances:
Transaction().context.update({
'magento_instance': instance.id
})
# Import order states
with OrderConfig(
instance.url, instance.api_user, instance.api_key
) as order_config_api:
OrderState.create_all_using_magento_data(
order_config_api.get_states()
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_owned_instances():\n\n global owned_instances\n\n owned_instances = []\n try:\n with open(state_file, 'r') as f:\n for line in f:\n # Strip spaces and skip empty lines\n inst = line.strip()\n if inst != '':\n owned_instances.append(inst)\n logging.info(\"Loaded list of owned instances: %s\" % ','.join(owned_instances))\n except IOError:\n logging.warning(\"Cannot read initial state from %s\" % state_file)",
"def initordering(cls):\n for i in range(len(clslist)):\n stages = cls.getConfigStages()\n for j in range(len(stages)):\n for k in range(len(slotlist)):\n cls.initorderingclsslot(clslist[i], stages[j], slotlist[k])\n # print(ordering)\n cls.log(1, ordering)",
"def bulkbuildstates():\n import sys\n state_codes = (x.lower() for x in sys.argv[1:])\n for state_code in state_codes:\n State = importlib.import_module('states.%s' % state_code).State\n #_temp = __import__('spam.ham', globals(), locals(), ['eggs', 'sausage'], -1)\n some_state = State()\n some_state.bulkbuild()",
"def reorder_incremental_state_scripting(\n self,\n incremental_state: Dict[str, Dict[str, Optional[Tensor]]],\n new_order: Tensor,\n ):\n for module in self.modules():\n if hasattr(module, \"reorder_incremental_state\"):\n result = module.reorder_incremental_state(incremental_state, new_order)\n if result is not None:\n incremental_state = result",
"def ordered_real_state_space(a2_data, py_order, a2_order):\n aux_dic = OrderedDict()\n for py_index, key in enumerate(py_order):\n if key in a2_data:\n try:\n a2_index = a2_order.index(key)\n except ValueError:\n a2_index = None\n aux_dic[StateParPickable(key, py_index, a2_index)] = a2_data[key]\n\n return aux_dic",
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def __init__(self):\n self._order_list = []",
"def load_states(self, checkpoint):\n raise NotImplementedError()",
"def _instantiate_input_states(self, context=None):\n from PsyNeuLink.Components.States.InputState import _instantiate_input_states\n _instantiate_input_states(owner=self, context=context)",
"def eom(self, state, order):",
"def createOrders(self):\n self.ordersDict = {}\n for pstep in self.processingSteps:\n if pstep.orderid not in self.ordersDict:\n self.ordersDict[pstep.orderid] = Order()\n self.ordersDict[pstep.orderid].addProcessingStep(pstep)",
"def _load_state_dict(self, state: dict):\n for o, dct in zip(self.optimizers, state.get('optimizers', [])):\n o.load_state_dict(dct)\n for s, dct in zip(self.schedulers, state.get('schedulers', [])):\n s.load_state_dict(dct)",
"def _load_state_dict(self, state: dict):\n for o, dct in zip(self.optimizers, state.get('optimizers', [])):\n o.load_state_dict(dct)\n for s, dct in zip(self.schedulers, state.get('schedulers', [])):\n s.load_state_dict(dct)",
"def load_custom_states(self, states, *args, **kwargs):\n pass",
"def import_ops(self):\n if self.is_training:\n self.lr = tf.get_collection_ref(\"lr\")[0]\n self.new_lr = tf.get_collection_ref(\"new_lr\")[0]\n self.lr_update = tf.get_collection_ref(\"lr_update\")[0]\n\n self.cost = tf.get_collection_ref(util.with_prefix(self.name, \"cost\"))[0]\n self.initial_state = util.import_state_tuples(\n self.initial_state, self.initial_state_name, self.name)\n self.final_state = util.import_state_tuples(\n self.final_state, self.final_state_name, self.name)",
"def import_forward(self):\n self.import_property('OG')\n self.import_property('IBU')\n self.import_property('ABV')\n self.import_property('SRM')",
"def buildstates():\n import sys\n state_codes = (x.lower() for x in sys.argv[1:])\n for state_code in state_codes:\n State = importlib.import_module('states.%s' % state_code).State\n #_temp = __import__('spam.ham', globals(), locals(), ['eggs', 'sausage'], -1)\n some_state = State()\n some_state.build()",
"def state_processing_enter(cfg, app, win):",
"def _get_state(self):",
"def state_list(self) -> Sequence[TState]:\n pass",
"def get_order(self):\n\n destination = self.factory_map[self.order.location]\n move_behaviour = self.move(destination)\n self.add_after_behaviour(move_behaviour, self.load_order)",
"def create_order():",
"def import_orders_button(cls, store_views):\n pass",
"def prepare(self):\n # Create a purchase order from a supplier\n Company = self.old_state.apps.get_model('company', 'company')\n PurchaseOrder = self.old_state.apps.get_model('order', 'purchaseorder')\n Part = self.old_state.apps.get_model('part', 'part')\n Supplierpart = self.old_state.apps.get_model('company', 'supplierpart')\n # TODO @matmair fix this test!!!\n # SalesOrder = self.old_state.apps.get_model('order', 'salesorder')\n\n supplier = Company.objects.create(\n name='Supplier A',\n description='A great supplier!',\n is_supplier=True,\n is_customer=True,\n )\n\n part = Part.objects.create(\n name='Bob',\n description='Can we build it?',\n assembly=True,\n salable=True,\n purchaseable=False,\n tree_id=0,\n level=0,\n lft=0,\n rght=0,\n )\n supplierpart = Supplierpart.objects.create(\n part=part,\n supplier=supplier\n )\n\n # Create some orders\n for ii in range(10):\n\n order = PurchaseOrder.objects.create(\n supplier=supplier,\n reference=f\"{ii}-abcde\",\n description=\"Just a test order\"\n )\n order.lines.create(\n part=supplierpart,\n quantity=12,\n received=1\n )\n order.lines.create(\n quantity=12,\n received=1\n )\n\n # TODO @matmair fix this test!!!\n # sales_order = SalesOrder.objects.create(\n # customer=supplier,\n # reference=f\"{ii}-xyz\",\n # description=\"A test sales order\",\n # )\n # sales_order.lines.create(\n # part=part,\n # quantity=12,\n # received=1\n # )",
"def order(self):\n raise NotImplementedError()",
"def reorder_incremental_state(self, incremental_state, new_order):\n\n def apply_reorder_incremental_state(module):\n if module != self and hasattr(module, \"reorder_incremental_state\"):\n module.reorder_incremental_state(\n incremental_state,\n new_order,\n )\n\n self.apply(apply_reorder_incremental_state)",
"def make_initial_state(self):\n pass",
"def load_state_dict(self, state_dict):\n own_state = self.state_dict()\n new_state = OrderedDict()\n for name, param in state_dict.items():\n if name in own_state:\n new_state[name] = param\n\n super(EncoderImagePrecomp, self).load_state_dict(new_state)",
"def test_twice_dependent_object_import(self):\n pass",
"def distribute_model(self):\n state_dict = self.model.state_dict()\n for client in self.clients:\n new_state_dict = copy.deepcopy(state_dict)\n client.model.load_state_dict(new_state_dict)"
] | [
"0.611863",
"0.5710451",
"0.55662745",
"0.55035776",
"0.5480113",
"0.54768515",
"0.54705364",
"0.54545605",
"0.5401249",
"0.5392934",
"0.5383165",
"0.53240365",
"0.53240365",
"0.5321685",
"0.5317529",
"0.5316439",
"0.5280355",
"0.52445763",
"0.51902944",
"0.51692367",
"0.5133493",
"0.513129",
"0.5130226",
"0.5126876",
"0.5125237",
"0.5120268",
"0.5087227",
"0.50777155",
"0.50731266",
"0.50644433"
] | 0.7806463 | 0 |
Sets default for active | def default_active():
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_active(self, active):\n self.active = active",
"def set_active(self):\n self.active = True",
"def set_active(self, active):\n self._active = active",
"def active(self, active):\n\n self._active = active",
"def active(self, active):\n\n self._active = active",
"def active(self, active):\n\n self._active = active",
"def active(self, active):\n\n self._active = active",
"def set_is_default(self):\n self.is_default = True",
"def set_is_default(self):\n self.is_default = True",
"def default(self, default):\n\n self._default = default",
"def setActive(self, active):\n\n self._active = active",
"def activate(self):\n self.active = True",
"def activate(self):\n self.active = True",
"def _activate(self):\n self.active = True",
"def active(self, activate):\n self.is_active = activate",
"def SetActive(self, b):\r\n\r\n self.active = b",
"def set_active(cls, name=None):\r\n if name is None:\r\n cls.active = True\r\n cls.non_actives = {} # Clear not actives\r\n else:\r\n if name in cls.non_actives:\r\n del cls.non_actives[name]",
"def default(self, default):\n self._default = default\n return self",
"def set_value_to_default(self):\n self.setValue(self.default_value)",
"def update_active(self):\n self.set_active(0)\n self.state = INACTIVE",
"def update_active(self):\n self.set_active(0)\n self.state = INACTIVE",
"def update_active(self):\n self.set_active(0)\n self.state = INACTIVE",
"def update_active(self):\n self.set_active(0)\n self.state = INACTIVE",
"def update_active(self):\n self.set_active(0)\n self.state = INACTIVE",
"def default(self, value):\n # save {value} as the default\n self._default = value\n # all done\n return",
"def activate(self):\n self._is_active = True",
"def default(self, value):\n # also easy\n self._default = value\n # all done\n return",
"def set_default_value(self, default_value):\n self._chb_bool.setChecked(bool(default_value))",
"def setactive(self, scriptname):\n code, data = self.__send_command(\n \"SETACTIVE\", [scriptname.encode(\"utf-8\")])\n if code == \"OK\":\n return True\n return False",
"def setActive(self, i, a=1):\n self.active[i] = a\n return"
] | [
"0.75453544",
"0.75211966",
"0.7276021",
"0.70126075",
"0.70126075",
"0.70126075",
"0.70126075",
"0.6970733",
"0.6970733",
"0.6938084",
"0.69131684",
"0.68543226",
"0.68543226",
"0.6832889",
"0.6776642",
"0.67710924",
"0.6755308",
"0.6708381",
"0.6620978",
"0.65976095",
"0.65976095",
"0.65976095",
"0.65976095",
"0.65976095",
"0.65659404",
"0.65374863",
"0.64948833",
"0.6410405",
"0.6388413",
"0.6368801"
] | 0.80122274 | 0 |
Sets current company as default | def default_company():
return Transaction().context.get('company') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def company(self, company):\n self._company = company",
"def company(self, company):\n\n self._company = company",
"def company(self, company):\n\n self._company = company",
"def set_as_default (self):\n\t\ttry:\n\t\t\tself.config.set('Global', 'Default', self.currentAccount.data['name'])\n\t\texcept ConfigParser.NoSectionError:\n\t\t\tself.setup_config()\n\t\t\tself.config.set('Global', 'Default', self.currentAccount.data['name'])\n\t\tself.config.write(open(self.configFile, 'w'))",
"def set_default_org(self, name):\n org = self.get_org(name)\n self.unset_default_org()\n org.config[\"default\"] = True\n org.save()\n if org.created:\n sfdx(\n sarge.shell_format(\n \"force:config:set defaultusername={}\", org.sfdx_alias\n )\n )",
"def company_name(self, company_name):\n\n self._company_name = company_name",
"def set_is_default_org(self, is_default_org):\n self.is_default_org = is_default_org",
"def design_company(self, design_company):\n\n self._design_company = design_company",
"def default_billing(self, default_billing):\n\n self._default_billing = default_billing",
"def default(self, default):\n\n self._default = default",
"def set_default_account(web3):\n web3.eth.defaultAccount = web3.eth.accounts[0]",
"def test_default_pricelist_with_company(self):\n company1_pricelist = self.env[\"product.pricelist\"].create({\n \"name\": \"company 1 pricelist\",\n \"currency_id\": self.currency.id,\n \"company_id\": self.company1.id,\n \"sequence\": 2,\n })\n\n # make sure this doesn't pick the company2 pricelist\n new_config = self.env[\"pos.config\"].create({\n \"name\": \"usd config\"\n })\n\n self.assertEqual(new_config.pricelist_id, company1_pricelist,\n \"POS config incorrectly has pricelist %s\" % new_config.pricelist_id.display_name)",
"def set_CompanyName(self, value):\n super(AddressValidationInputSet, self)._set_input('CompanyName', value)",
"def company(self):\n return self._company",
"def company(self):\n return self._company",
"def unset_default_org(self):\n for org in self.list_orgs():\n org_config = self.get_org(org)\n if org_config.default:\n del org_config.config[\"default\"]\n org_config.save()\n sfdx(\"force:config:set defaultusername=\")",
"def set_value_to_default(self):\n self.setValue(self.default_value)",
"def set_company_founded(self, value):\n self.company_founded = value",
"def company_name(self) -> Optional[str]:\n return pulumi.get(self, \"company_name\")",
"def set_company_id_value(self, company_id_value):\n self.company_id_value = company_id_value",
"def setcompany(func):\n if (not request.session.has_key('activecompany_id') or not request.session['activecompany_id']):\n return HttpResponseRedirect('/setcompany')\n else:\n return None",
"def default(self, default):\n self._default = default\n return self",
"def setup_dd(self, company):\n self.company = company\n self.apr = .03\n self.minimum_balance = 1000.0",
"def set_company_id_label(self, company_id_label):\n self.company_id_label = company_id_label",
"def default(self, value):\n # save {value} as the default\n self._default = value\n # all done\n return",
"def set_studio_default(self):\n raise NotImplementedError(\n \"{} Method `set_studio_default` not implemented!\".format(\n repr(self)\n )\n )",
"def set_is_default(self):\n self.is_default = True",
"def set_is_default(self):\n self.is_default = True",
"def get_company(self, name):\n return self.website.company.id",
"def set_site_default_page(site):\n site.setDefaultPage('front-page')\n logger.info(u'Visão padrão do site estabelecida')"
] | [
"0.70627534",
"0.6857343",
"0.6857343",
"0.6305711",
"0.62525135",
"0.6229821",
"0.6217805",
"0.60344994",
"0.59844",
"0.5886969",
"0.5837648",
"0.5830834",
"0.58194566",
"0.5790694",
"0.5790694",
"0.57838154",
"0.5768062",
"0.56869924",
"0.5668195",
"0.5659434",
"0.5657504",
"0.5655081",
"0.5633201",
"0.56204385",
"0.56152695",
"0.55890304",
"0.55744535",
"0.55744535",
"0.5530264",
"0.5498909"
] | 0.7678898 | 0 |
Import the websites and their stores/view from magento | def import_websites(cls, instances):
Website = Pool().get('magento.instance.website')
Store = Pool().get('magento.website.store')
StoreView = Pool().get('magento.store.store_view')
MagentoOrderState = Pool().get('magento.order_state')
try:
instance, = instances
except ValueError:
cls.raise_user_error('multiple_instances')
with Transaction().set_context(magento_instance=instance.id):
# Import order states
with OrderConfig(
instance.url, instance.api_user, instance.api_key
) as order_config_api:
MagentoOrderState.create_all_using_magento_data(
order_config_api.get_states()
)
# Import websites
with Core(
instance.url, instance.api_user, instance.api_key
) as core_api:
websites = []
stores = []
mag_websites = core_api.websites()
# Create websites
for mag_website in mag_websites:
websites.append(Website.find_or_create(
instance, mag_website
))
for website in websites:
mag_stores = core_api.stores(
{'website_id': {'=': website.magento_id}}
)
# Create stores
for mag_store in mag_stores:
stores.append(Store.find_or_create(website, mag_store))
for store in stores:
mag_store_views = core_api.store_views(
{'group_id': {'=': store.magento_id}}
)
# Create store views
for mag_store_view in mag_store_views:
store_view = StoreView.find_or_create(
store, mag_store_view
)
# AR refactoring
store_view.save() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_inventory(self, websites):\n for website in websites:\n website.export_inventory_to_magento()",
"def importSites(self,sites_list):\n \"\"\" Append these sites objects to a sample \"\"\" \n self.sites = []\n for s in sites_list:\n mySite = Site(s)\n self._addSite(mySite)",
"def import_stores(self):\n\n stores = self.product_infos['stores']\n\n for product_store in stores:\n try:\n store = Stores.objects.get(\n name=product_store\n )\n except Stores.DoesNotExist:\n super().new_entry()\n store = Stores.objects.create(\n name=product_store\n )\n except:\n pass\n try:\n ProdStore.objects.get(\n product=self.product_object,\n store=store\n )\n except ProdStore.DoesNotExist:\n super().new_entry()\n ProdStore.objects.create(\n product=self.product_object,\n store=store\n )\n except:\n pass\n\n return stores",
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def default_start(self, data):\n return {\n 'message': \"This wizard has imported all the websites for this \" +\n \"magento instance. It has also imported all the stores and \" +\n \"store views related to the websites imported. If any of \" +\n \"the records existed already, it wont be imported.\"\n }",
"def fetch_website_list(self):\r\n # Clear list\r\n self.website_list = []\r\n\r\n # Open websites overview\r\n self.browser.open(self.config[\"base_url\"] + \"websites\")\r\n\r\n # Find table and iterate over rows\r\n for table_row in self.browser.get_current_page().select(\"table tr\"):\r\n\r\n # Fetch cells\r\n cells = table_row.findAll('td')\r\n\r\n # Iterate over cells\r\n if(len(cells) > 0):\r\n\r\n # Get website ID\r\n website_id = table_row['data-id']\r\n # Get website name\r\n name = cells[1].text.strip()\r\n # Get website domain name\r\n domain = cells[2].text.strip()\r\n\r\n # Build website object\r\n website = {'id': website_id,\r\n 'name': name, 'domain': domain}\r\n\r\n # Add website object to list\r\n self.website_list.append(website)",
"def list_websites(self):\r\n\r\n # Fetch websites\r\n self.fetch_website_list()\r\n\r\n # Print website data\r\n for website in self.website_list:\r\n print(\"ID: {0} | Domain: {1} | Name: {2}\".format(\r\n website['id'], website['domain'], website['name']))",
"def websites(self, websites):\n\n self._websites = websites",
"def import_sites(input_csv=\"../2012_ROOMS_site_info_sample.csv\"):\n reader = csv.DictReader(open(input_csv))\n for s in reader:\n number = s[\"Site ID\"]\n site = models.NewSite.all().filter('number =', number).get()\n if site:\n logging.info('site %s exists, skipping', number)\n continue\n else:\n site = models.NewSite(number=number)\n site.program = PROGRAM\n site.budget = int(s[\"Budgeted Cost in Campaign\"]) if s[\"Budgeted Cost in Campaign\"] else 0\n\n # Because Python 2.x csv module only reads ascii.\n def clean_s(k):\n return s[k].replace('\\n', ' ').replace('\\xe2', \"'\").replace('\\x80', \"'\").replace('\\x99', '').replace('\\xc3', '').replace('\\x95', '').replace('\\xb1', '').encode('ascii', 'replace')\n\n site.name = clean_s(\"Repair Application: Applicant's Name\")\n site.street_number = clean_s(\"Street Address\")\n site.city_state_zip = \"%s CA, %s\" % (\n clean_s(\"Repair Application: Recipient's City\"), \n clean_s(\"Repair Application: Recipient's Zip Code\"))\n site.applicant = clean_s(\"Repair Application: Applicant's Name\")\n site.applicant_home_phone = clean_s(\"Repair Application: Applicant Home Phone\")\n site.applicant_work_phone = clean_s(\"Repair Application: Applicant Work Phone\")\n site.applicant_mobile_phone = clean_s(\"Repair Application: Applicant Mobile Phone\")\n site.sponsor = clean_s(\"(Sponsor) Campaign Description\")\n site.rrp_test = clean_s(\"Repair Application: RRP Test Results\")\n site.rrp_level = clean_s(\"Repair Application: RRP Result Notes\")\n # site.roof = clean_s(\"Roof?\")\n site.jurisdiction = clean_s(\"Jurisdiction\")\n site.announcement_subject = clean_s(\"Announcement Subject\")\n site.announcement_body = clean_s(\"Announcement Body\")\n site.put()\n logging.info('put site %s', number)",
"def import_sitefinder_data(path):\n asset_data = []\n\n site_id = 0\n\n with open(os.path.join(path), 'r') as system_file:\n reader = csv.DictReader(system_file)\n next(reader, None)\n for line in reader:\n if line['Operator'] != 'Airwave' and line['Operator'] != 'Network Rail':\n # if line['Operator'] == 'O2' or line['Operator'] == 'Vodafone':\n # if line['Anttype'] == 'MACRO' or \\\n # line['Anttype'] == 'SECTOR' or \\\n # line['Anttype'] == 'Sectored' or \\\n # line['Anttype'] == 'Directional':\n asset_data.append({\n 'type': \"Feature\",\n 'geometry': {\n \"type\": \"Point\",\n \"coordinates\": [float(line['X']), float(line['Y'])]\n },\n 'properties':{\n 'name': 'site_' + str(site_id),\n 'Operator': line['Operator'],\n 'Opref': line['Opref'],\n 'Sitengr': line['Sitengr'],\n 'Antennaht': line['Antennaht'],\n 'Transtype': line['Transtype'],\n 'Freqband': line['Freqband'],\n 'Anttype': line['Anttype'],\n 'Powerdbw': line['Powerdbw'],\n 'Maxpwrdbw': line['Maxpwrdbw'],\n 'Maxpwrdbm': line['Maxpwrdbm'],\n 'Sitelat': float(line['Sitelat']),\n 'Sitelng': float(line['Sitelng']),\n }\n })\n\n site_id += 1\n\n else:\n pass\n\n return asset_data",
"def import_orders(cls, store_views=None):\n if store_views is None:\n store_views = cls.search([])\n\n for store_view in store_views:\n store_view.import_order_from_store_view()",
"def _get_sites(web_crawl):\n site_list = \"\"\n if 'primarySite' in web_crawl:\n site_list = web_crawl['primarySite'] \n else:\n # TODO: throw exception, must have primary site\n pass\n if 'additionalSites' in web_crawl:\n for site in web_crawl['additionalSites']:\n site_list += \",\" + site\n return site_list",
"def __setup__(cls):\n super(WebsiteStoreView, cls).__setup__()\n cls._sql_constraints += [\n (\n 'magento_id_store_unique', 'UNIQUE(magento_id, store)',\n 'A store view must be unique in a store'\n )\n ]\n cls._error_messages.update({\n \"states_not_found\": 'No order states found for importing orders! '\n 'Please configure the order states on magento instance',\n })\n cls._buttons.update({\n 'import_orders_button': {},\n 'export_order_status_button': {}\n })",
"def test_store(self):\n self.selenium.get('{}/store'.format(self.live_server_url))",
"def get_list_of_sites(self) -> list:\n ah_write = self.get_iis_object()\n section = ah_write.GetAdminSection(\"system.applicationHost/sites\", \"MACHINE/WEBROOT/APPHOST\")\n collection = section.Collection\n result = []\n\n for i in range(collection.Count):\n\n site = collection[i]\n prop = site.Properties\n # site_id = prop[\"id\"].Value\n name = prop[\"name\"].Value\n default_app = self.get_default_app(site)\n bindings = self.get_site_bindings(site.ChildElements)\n applications = self.get_applications(site)\n if default_app and not os.path.exists(self.core.expandvars(default_app[\"physicalPath\"])):\n # не показывать сайты для которых нет физ. директории для иис экспреса\n continue\n site = Site(name, bindings, default_app, applications)\n if hasattr(site, 'port') and site.port != 0:\n result.append(site)\n\n return result",
"def scrape(site=''):\n scraper.scrape(get_site_config(site))",
"def __setup__(cls):\n super(WebsiteStore, cls).__setup__()\n cls._sql_constraints += [\n (\n 'magento_id_website_unique', 'UNIQUE(magento_id, website)',\n 'A store must be unique in a website'\n )\n ]",
"def get_stations(base_url, hts, mtype):\n stns1 = ws.site_list(base_url, hts, location='LatLong') # There's a problem with Hilltop that requires running the site list without a measurement first...\n stns1 = ws.site_list(base_url, hts, location='LatLong', measurement=mtype)\n stns2 = stns1[(stns1.lat > -47.5) & (stns1.lat < -34) & (stns1.lon > 166) & (stns1.lon < 179)].dropna().copy()\n stns2.rename(columns={'SiteName': 'ref'}, inplace=True)\n\n return stns2",
"def import_and_update_products(self, cursor, user, ids=None, context=None):\n if context is None:\n context = {}\n import_and_update_catalog_obj = self.pool.get('magento.instance.import_catalog')\n if not ids:\n ids = self.search(cursor, user, [], context)\n \n for website in self.browse(cursor, user, ids, context):\n import_and_update_catalog_id = import_and_update_catalog_obj.create(cursor, user, {'import_images': True}, context)\n context['active_id'] = website.id\n import_and_update_catalog_obj.import_and_update_products(cursor, user, [import_and_update_catalog_id], context)",
"def find_all(client):\n return list(map(lambda s: Site(s), client.get_api_resource(\"self/sites\")))",
"def __site_create_import(self, name, source_archive):\n try:\n debug_run(\"bin/pinit\")\n # yes, we are using the same label as the site name. label is actually\n # a mandatory parameter.\n # Full path to drush to avoid running a drush wrapper:\n debug_run(\"/usr/bin/drush psite-import --nopoll --label=%s %s %s\" % (name, name, source_archive))\n debug_run(\"drush paliases\")\n\n self.__wait_for_job_success(name, 'import_site_dev', delay=30, tries=36, loop_sleep=5)\n except SystemExit as e:\n raise Exception(\"Site import failed. Err: %s\" % str(e))",
"def insert_stores(self):\n logic = StoreLogic()\n \n # We create the list of store objects\n stores = self.objects_factory.create_store_object_list()\n stores = set(stores)\n \n for store in stores:\n logic.insert(store)",
"def setup_local_site(self):\n raise NotImplementedError",
"def download_all(self):\r\n # Fetch website list\r\n self.fetch_website_list()\r\n\r\n for website in self.website_list:\r\n self.download(website['id'])",
"def load_sites(self, site_list: list = None):\n try:\n sites = self.api.get(host=self.host, endpoint=f\"/api/v1/orgs/{self.oid}/sites\")\n except Exception as e:\n logger.error(f\"{TextColors.FAIL}Error getting org sites:{TextColors.ENDC} {e}\")\n raise e\n if site_list:\n sites = [s for s in sites if s['name'] in site_list]\n self.sites = sites",
"def runSiteAutomation(self, webretrievedelay, proxy, targetlist, sourcelist,\n useragent, botoutputrequested, refreshremotexml, versionlocation):\n if refreshremotexml:\n SitesFile.updateTekDefenseXMLTree(proxy, self._verbose)\n\n remotesitetree = SitesFile.getXMLTree(__TEKDEFENSEXML__, self._verbose)\n localsitetree = SitesFile.getXMLTree(__SITESXML__, self._verbose)\n\n if not localsitetree and not remotesitetree:\n print ('Unfortunately there is neither a {tekd} file nor a {sites} file that can be utilized for proper' \\\n ' parsing.\\nAt least one configuration XML file must be available for Automater to work properly.\\n' \\\n 'Please see {url} for further instructions.'\\\n .format(tekd=__TEKDEFENSEXML__, sites=__SITESXML__, url=versionlocation))\n else:\n if localsitetree:\n for siteelement in localsitetree.iter(tag=\"site\"):\n if self.siteEntryIsValid(siteelement):\n for targ in targetlist:\n for source in sourcelist:\n sitetypematch, targettype, target = self.getSiteInfoIfSiteTypesMatch(source, targ,\n siteelement)\n if sitetypematch:\n self.buildSiteList(siteelement, webretrievedelay, proxy, targettype, target,\n useragent, botoutputrequested)\n else:\n print ('A problem was found in the {sites} file. There appears to be a site entry with ' \\\n 'unequal numbers of regexs and reporting requirements'.format(sites=__SITESXML__))\n if remotesitetree:\n for siteelement in remotesitetree.iter(tag=\"site\"):\n if self.siteEntryIsValid(siteelement):\n for targ in targetlist:\n for source in sourcelist:\n sitetypematch, targettype, target = self.getSiteInfoIfSiteTypesMatch(source, targ,\n siteelement)\n if sitetypematch:\n self.buildSiteList(siteelement, webretrievedelay, proxy, targettype, target,\n useragent, botoutputrequested)\n else:\n print ('A problem was found in the {sites} file. There appears to be a site entry with ' \\\n 'unequal numbers of regexs and reporting requirements'.format(sites=__SITESXML__))",
"def models():\n if flask.request.args.get('refresh') == '1':\n app.config['store_cache'].reset()\n cached_data = app.config['store_cache'].read()\n if cached_data is not None:\n return json.dumps(cached_data)\n\n store_urls = app.config['store_url_list']\n aggregated_dict = dict()\n for i, store_url in enumerate(store_urls):\n if len(store_url) == 0:\n continue\n model_list = list()\n if store_url[-1] != '/':\n store_base_url = store_url + '/'\n else:\n store_base_url = store_url\n try:\n page = requests.get(store_base_url)\n except requests.exceptions.RequestException as e:\n logger.warning('Skip %s due to error %s' % (store_base_url, e))\n continue\n parser = StoreParser()\n parser.feed(page.content)\n msg = 'Thanks for visiting {}'.format(store_base_url)\n if len(parser.get_child_dirs()) > 0:\n dirs = [d[:-1] for d in parser.get_child_dirs()]\n else:\n response = requests.get(os.path.join(store_base_url,'master.json'))\n if response.status_code == 200:\n json_response = json.loads(response.content)\n dirs = json_response['children']\n msg = json_response['msg']\n else:\n continue\n for subdir in dirs:\n tmp_dict = {'dir_name': subdir}\n response = requests.get(os.path.join(store_base_url,subdir,'info.json'))\n if response.status_code == 200:\n tmp_dict['info'] = json.loads(response.content)\n tmp_dict['id'] = str(uuid.uuid4())\n response = requests.get(os.path.join(store_base_url,subdir,'aux.json'))\n if response.status_code == 200:\n tmp_dict['aux'] = json.loads(response.content)\n model_list.append(tmp_dict)\n store_info = {'base_url': store_base_url, 'welcome_msg': msg,\n 'model_list': model_list}\n aggregated_dict[store_base_url] = store_info\n app.config['store_cache'].write(aggregated_dict)\n return json.dumps(aggregated_dict)",
"def collect_web_sites(self):\n min_distance = None\n max_sequence_match = None\n index_string_match = index_distance = None\n self.collection = collections.OrderedDict()\n for i_web, web_row in self.company_urls_df.iterrows():\n # get the url first from the websites table which list all the urls belonging to\n # one kvk search\n url = web_row[URL_KEY]\n\n # skip all none uls and also the filtered urls\n if url is None or url == \"\":\n logger.debug(\"Skipping url because it is None or empty\")\n continue\n if self.filter_urls and url not in self.filter_urls:\n logger.debug(f\"filter urls is given so skip {url}\")\n continue\n\n # store a list of UrlInfo object with a minimum info the url which was tested\n url_info = UrlInfo(index=i_web, url=url)\n self.collection[url] = url_info\n\n print_banner(f\"Processing {url}\")\n\n # quick check if we can processes this url based on the country code\n suffix = url_info.url_extract.suffix\n if suffix in self.exclude_extensions.index:\n url_info.outside_nl = True\n logger.info(f\"Web site {url} has suffix '.{suffix}' Continue \")\n\n # get the processing time of the last time you did this url from the table\n try:\n processing_time = self.urls_df.loc[url, DATETIME_KEY]\n except KeyError:\n processing_time = None\n\n if self.force_process or self.rescan_missing_urls:\n url_info.needs_update = True\n else:\n url_info.needs_update = check_if_url_needs_update(processing_time=processing_time,\n current_time=self.current_time,\n older_time=self.older_time)\n if url_info.needs_update:\n # if the url needs update, store the current time\n url_info.processing_time = self.current_time\n else:\n url_info.processing_time = processing_time\n\n url_analyse = self.scrape_url_and_store_in_dataframes(url, url_info)\n\n url_info.url_analyse = url_analyse\n\n if url_analyse and not url_analyse.exists:\n self.logger.debug(f\"url '{url}'' does not exist\")\n continue\n\n # based on the company postcodes and kvknummer and web contents, make a ranking how\n # good the web sides matches the company\n match = UrlCompanyRanking(url, self.company_name_small,\n url_extract=url_info.url_extract,\n url_analyse=url_analyse,\n company_kvk_nummer=self.kvk_nr,\n company_postcodes=self.postcodes,\n threshold_string_match=self.threshold_string_match,\n threshold_distance=self.threshold_distance,\n logger=self.logger)\n\n url_info.match = match\n\n # update the min max\n if min_distance is None or match.distance < min_distance:\n index_distance = i_web\n min_distance = match.distance\n\n if max_sequence_match is None or match.string_match > max_sequence_match:\n index_string_match = i_web\n max_sequence_match = match.string_match\n\n self.logger.debug(\" * {} - {} - {}\".format(url, match.ext.domain,\n match.distance))\n\n if min_distance is None:\n self.company_urls_df = None\n elif index_string_match != index_distance:\n self.logger.warning(\n \"Found minimal distance for {}: {}\\nwhich differs from \"\n \"best string match {}: {}\".format(index_distance,\n self.collection[url].url,\n index_string_match,\n self.collection[url].url))",
"def sites(self):\n return self.data.sites.values",
"def create_site_retriever_api(client):\n response = client.create_rest_api(\n name='siteretriever',\n description='get site data from a list of sites',\n version='1'\n )\n file_name = \"api_info.pickle\"\n pickle_dictionary_to_file(response, file_name)"
] | [
"0.6846587",
"0.6164038",
"0.61030966",
"0.58853394",
"0.56397796",
"0.5540255",
"0.5497646",
"0.5465208",
"0.5342944",
"0.52653944",
"0.52565813",
"0.5253915",
"0.5238693",
"0.51912624",
"0.5140717",
"0.5108908",
"0.50549793",
"0.50509304",
"0.5028507",
"0.5003324",
"0.49764404",
"0.49707267",
"0.49632442",
"0.49484682",
"0.4938894",
"0.4936526",
"0.4930474",
"0.49261424",
"0.49136505",
"0.49099168"
] | 0.7438709 | 0 |
Import carriers/shipping methods from magento for instances | def import_carriers(cls, instances):
InstanceCarrier = Pool().get('magento.instance.carrier')
for instance in instances:
with Transaction().set_context({
'magento_instance': instance.id
}):
with OrderConfig(
instance.url, instance.api_user, instance.api_key
) as order_config_api:
mag_carriers = order_config_api.get_shipping_methods()
InstanceCarrier.create_all_using_magento_data(mag_carriers) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def separate_methods(self):\r\n methods = {obj['method'] for obj in self.cf_data}\r\n metadata = {obj['name']: obj for obj in self.csv_data}\r\n self.data = {}\r\n missing = set()\r\n for line in self.cf_data:\r\n if line['method'] not in self.units:\r\n missing.add(line['method'])\r\n\r\n if missing:\r\n _ = lambda x: sorted([str(y) for y in x])\r\n warnings.warn(\"Missing units for following:\" +\r\n \" | \".join(_(missing)))\r\n\r\n for line in self.cf_data:\r\n assert isinstance(line['amount'], Number)\r\n\r\n if line['method'] not in self.data:\r\n self.data[line['method']] = {\r\n 'filename': self.file,\r\n 'unit': self.units.get(line['method'], ''),\r\n 'name': line['method'],\r\n 'description': '',\r\n 'exchanges': []}\r\n self.data[line['method']]['exchanges'].append({\r\n 'name': line['name'],\r\n 'categories': line['categories'],\r\n 'amount': line['amount']})\r\n\r\n self.data = list(self.data.values())\r\n\r\n for obj in self.data:\r\n obj.update(metadata.get(obj['name'], {}))",
"def default_start(self, data):\n return {\n 'message': \"This wizard has imported all the carriers / \" +\n \"shipping methods for this magento instance. You should now \" +\n \"configure the imported carriers / shipping methods to \" +\n \"match the shipment carriers in Tryton to allow seamless \" +\n \"synchronisation of tracking information.\"\n }",
"def setup_method(self,method):\n from importlib import reload\n reload(bt_temp)\n self.transport_obj = bt_temp.ns.modelList[0].levelModelList[0]\n self.bdm2_obj = self.transport_obj.velocityPostProcessor.vpp_algorithms[0]\n self._setRelativePath()",
"def populate_processing_methods(self):\n self.FIELDS_PRE_PROCESSING_METHODS = {\n 'publisher': self.join_all,\n 'description': self.join_all,\n 'format': self.join_all,\n 'language': self.join_all,\n 'type': self.get_alignment,\n 'rights': self.join_all,\n 'date': self.get_alignment,\n 'relation': self.join_all,\n 'source': self.join_all,\n 'coverage': self.get_alignment,\n 'contributor': self.join_all,\n 'title': self.join_all,\n 'identifier': self.join_all,\n 'subject': self.get_alignment,\n 'creator': self.get_alignment\n }",
"def shipping_lines(self):\n raise NotImplemented",
"def import_order_states(cls, instances):\n OrderState = Pool().get('magento.order_state')\n\n for instance in instances:\n\n Transaction().context.update({\n 'magento_instance': instance.id\n })\n\n # Import order states\n with OrderConfig(\n instance.url, instance.api_user, instance.api_key\n ) as order_config_api:\n OrderState.create_all_using_magento_data(\n order_config_api.get_states()\n )",
"def get_available_shipping_methods(self):\n return [\n m for m\n in ShippingMethod.objects.available(shop=self.shop, products=self.product_ids)\n if m.is_available_for(self)\n ]",
"def update_shipping_cost(self, cr, uid, ids, context=None):\n datas = self.browse(cr, uid, ids[0], context=context)\n if context is None:\n context = {}\n if context.get('active_model', False) in ['sale.order', 'account.invoice'] and 'active_id' in context:\n model = context['active_model']\n model_obj = self.pool.get(model)\n model_id = context.get('active_id', False)\n if model_id:\n model_obj.write(cr, uid, [model_id], {\n 'shipcharge': datas.shipping_cost,\n 'ship_method': datas.rate_select.shipmethodname,\n 'sale_account_id': datas.account_id.id,\n 'ship_method_id': datas.rate_select.id,\n }, context=context)\n if model == 'sale.order':\n model_obj.button_dummy(cr, uid, [model_id], context=context)\n if model == 'account.invoice':\n model_obj.button_reset_taxes(cr, uid, [model_id], context=context)\n return {'nodestroy': False, 'type': 'ir.actions.act_window_close'}",
"def test_get_shipment(self):\n pass",
"def _create_methods(self):\n\n logger.debug('call %s presets._create_methods()', self._device.name)\n for preset_type in self._paths.keys():\n add, add_here = self._make_add(preset_type)\n self._register_method(self, 'add_' + preset_type, add)\n self._register_method(self, 'add_here_' + preset_type, add_here)\n for preset_type, data in self._cache.items():\n for name, info in data.items():\n if info['active']:\n mv, umv = self._make_mv_pre(preset_type, name)\n wm = self._make_wm_pre(preset_type, name)\n self._register_method(self._device, 'mv_' + name, mv)\n self._register_method(self._device, 'umv_' + name, umv)\n self._register_method(self._device, 'wm_' + name, wm)\n setattr(self.positions, name,\n PresetPosition(self, preset_type, name))",
"def extract(self, picking_out):\n picking = picking_out.pool['stock.picking'].browse(picking_out._cr, 1, picking_out.id)\n shipping_partner = picking_out.sale_id.partner_shipping_id\n invoice_partner = picking_out.sale_id.partner_invoice_id\n carrier_name = picking_out.sale_id.carrier_id and picking_out.sale_id.carrier_id.ads_ref or ''\n\n # Delivery method can also be added as a move line, so find all move lines whose products\n # are the delivery products of a delivery method and save IDS and ads ref for later\n carrier_move_ids = []\n if not carrier_name:\n carrier_obj = picking_out.pool['delivery.carrier']\n product_obj = picking_out.pool['product.product']\n\n product_ids = [move.product_id.id for move in picking_out.move_lines if move.product_id]\n carrier_map = product_obj.is_delivery_method(picking_out._cr, 1, product_ids)\n\n carrier_product_ids = [k for k, v in carrier_map.iteritems() if v]\n carrier_move_ids = [move.id for move in picking.move_lines if move.product_id and move.product_id.id in carrier_product_ids]\n\n for move in picking_out.move_lines:\n if move.id in carrier_move_ids:\n carrier = carrier_obj.browse(picking_out._cr, 1, carrier_map[move.product_id.id][0])\n carrier_name = carrier.ads_ref or ''\n\n so_data = {\n # general\n 'NUM_CMDE': picking.ads_send_number and picking_out.sale_id.name + '-' + str(picking.ads_send_number) or picking_out.sale_id.name,\n 'NUM_FACTURE_BL': picking_out.name,\n 'DATE_EDITION': convert_date(picking_out.date),\n 'MONTANT_TOTAL_TTC': picking_out.sale_id.amount_total,\n 'DATE_ECHEANCE': convert_date(picking_out.min_date),\n 'TYPE_ENVOI': carrier_name,\n\n # invoice_partner address and contact\n 'SOCIETE_FAC': invoice_partner.is_company and invoice_partner.name or '',\n 'NOM_CLIENT_FAC': invoice_partner.name or '',\n 'ADR1_FAC': invoice_partner.street or '',\n 'ADR2_FAC': invoice_partner.street2 or '',\n 'CP_FAC': invoice_partner.zip or '',\n 'VILLE_FAC': invoice_partner.city or '',\n 'ETAT_FAC': invoice_partner.state_id and invoice_partner.state_id.name or '',\n 'PAYS_FAC': invoice_partner.country_id and invoice_partner.country_id.name or '',\n 'CODE_ISO_FAC': invoice_partner.country_id and invoice_partner.country_id.code or '',\n\n # delivery address and contact\n 'SOCIETE_LIV': shipping_partner.is_company and shipping_partner.name or '',\n 'NOM_CLIENT_LIV': shipping_partner.name or '',\n 'ADR1_LIV': shipping_partner.street or '',\n 'ADR2_LIV': shipping_partner.street2 or '',\n 'CP_LIV': shipping_partner.zip or '',\n 'VILLE_LIV': shipping_partner.city or '',\n 'ETAT_LIV': shipping_partner.state_id and shipping_partner.state_id.name or '',\n 'PAYS_LIV': shipping_partner.country_id and shipping_partner.country_id.name or '',\n 'CODE_ISO_LIV': shipping_partner.country_id and shipping_partner.country_id.code or '',\n 'TELEPHONE_LIV': shipping_partner.phone or u'no_phone',\n 'EMAIL_LIV': shipping_partner.email or u'[email protected]',\n }\n\n # asserts for required data\n required_data = {\n 'NUM_CMDE': 'The picking was not created by a sales order',\n 'NUM_FACTURE_BL': 'This should never happen - please contact OpenERP',\n 'NOM_CLIENT_FAC': 'Invoice partner name',\n 'ADR1_FAC': 'Invoice partner address line 1',\n 'CP_FAC': 'Invoice partner zip',\n 'VILLE_FAC': 'Invoice partner city',\n 'CODE_ISO_FAC': 'Invoice partner country',\n 'NOM_CLIENT_LIV': 'Shipping partner name',\n 'ADR1_LIV': 'Shipping partner address line 1',\n 'CP_LIV': 'Shipping partner zip',\n 'VILLE_LIV': 'Shipping partner city',\n 'CODE_ISO_LIV': 'Shipping partner country',\n 'TELEPHONE_LIV': 'Shipping partner phone',\n 'MONTANT_TOTAL_TTC': 'This should never happen - please contact OpenERP',\n }\n\n missing_data = {}\n for field in required_data:\n if not so_data[field]:\n missing_data[field] = required_data[field]\n\n if missing_data:\n message = _('While processing sales order %s and picking_out %s there was some data missing for the following required fields:' \\\n % (so_data['NUM_CMDE'], so_data['NUM_FACTURE_BL'])) + '\\n\\n' \\\n + \"\\n\".join(sorted(['- ' + _(missing_data[data]) for data in missing_data]))\\\n + '\\n\\n' + _('These fields must be filled before we can continue')\n raise osv.except_osv(_('Missing Required Data'), message)\n\n self.insert_data('order', so_data)\n\n line_seq = 1\n for move in picking_out.move_lines:\n\n # skip lines that are cancelled, or don't have a product, or have a discount, delivery method or service product\n if move.state == 'cancel' \\\n or not move.product_id \\\n or move.id in carrier_move_ids \\\n or move.product_id.discount \\\n or move.product_id.type == 'service':\n continue\n\n # Raise error if missing x_new_ref\n if not move.product_id.x_new_ref:\n raise osv.except_osv(_('Missing Reference'), _('Product \"%s\" on picking_out \"%s\" is missing an IP Reference. One must be entered before we can continue.') % (move.product_id.name, picking_out.name) )\n\n line = {\n 'NUM_FACTURE_BL': picking_out.name,\n 'CODE_ART': move.product_id.x_new_ref,\n 'LIBELLE_ART': move.product_id.name or '',\n 'QTE': move.product_qty,\n 'OBLIGATOIRE': '1',\n }\n self.insert_data('order.articles.line', line)\n line_seq += 1\n\n return self",
"def testGetShippingMethod_1(self):\n sm = IShippingMethodManagement(self.shop)\n result = sm.getShippingMethod(\"standard\") \n\n self.assertEqual(result.getId(), \"standard\")",
"def import_orders_button(cls, store_views):\n pass",
"def get_all_gateways():\n\n gateways: List[Gateway] = []\n with open(\"/home/agora/Documents/Popular_paths/Data/saint_foy/saint_foy/cells_abcd.csv\", \"r\") as f:\n csv_reader = csv.reader(f, delimiter = ';')\n for line in csv_reader:\n if line[0] == 'ci': # Title line\n pass\n else:\n gateways.append(Gateway(\n line[0], # ID\n line[1], # LAC\n line[2], # NIDT\n line[3], # Azimuth\n line[5], # Longitude\n line[6] # Latitude\n ))\n return gateways",
"def test_get_eligible_shipment_services(self):\n pass",
"def main():\n my_taxi = SilverServiceTaxi(\"Stretch limo\", 100, 2)\n my_taxi.drive(18)\n print_fare_details(my_taxi)\n my_taxi.start_fare()\n my_taxi.add_fuel(40)\n my_taxi.drive(100)\n print_fare_details(my_taxi)",
"def refresh(self):\n self.__dict__ = self._api.get_payment_gateways(id=self.id).__dict__",
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def _register_api(app):\n\n app.add_url_rule('/shipping/',\n \"put_shipping_method\", put_shipping_method, methods=['PUT'])",
"def hydrate_from_rolodex(sender, instance, **kwargs):\n instance.sync_with_rolodex()",
"def GetOpsRates():\n return GetDataFromCsvFile('ops_rates.csv')",
"def testGetShippingMethod_2(self):\n sm = IShippingMethodManagement(self.shop)\n result = sm.getShippingMethod(\"dummy\")\n\n self.failUnless(result is None)",
"def test_get_eligible_shipment_services_old(self):\n pass",
"def export_shipment_status_to_magento(self):\n Shipment = Pool().get('stock.shipment.out')\n Sale = Pool().get('sale.sale')\n\n instance = self.instance\n\n sale_domain = [\n ('magento_store_view', '=', self.id),\n ('shipment_state', '=', 'sent'),\n ('magento_id', '!=', None),\n ('shipments', '!=', None),\n ]\n\n if self.last_shipment_export_time:\n sale_domain.append(\n ('write_date', '>=', self.last_shipment_export_time)\n )\n\n sales = Sale.search(sale_domain)\n\n self.last_shipment_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n # Get the increment id from the sale reference\n increment_id = sale.reference[\n len(instance.order_prefix): len(sale.reference)\n ]\n\n for shipment in sale.shipments:\n try:\n # Some checks to make sure that only valid shipments are\n # being exported\n if shipment.is_tracking_exported_to_magento or \\\n shipment.state not in ('packed', 'done') or \\\n shipment.magento_increment_id:\n sales.pop(sale)\n continue\n with magento.Shipment(\n instance.url, instance.api_user, instance.api_key\n ) as shipment_api:\n item_qty_map = {}\n for move in shipment.outgoing_moves:\n if isinstance(move.origin, SaleLine) \\\n and move.origin.magento_id:\n # This is done because there can be multiple\n # lines with the same product and they need\n # to be send as a sum of quanitities\n item_qty_map.setdefault(\n str(move.origin.magento_id), 0\n )\n item_qty_map[str(move.origin.magento_id)] += \\\n move.quantity\n shipment_increment_id = shipment_api.create(\n order_increment_id=increment_id,\n items_qty=item_qty_map\n )\n Shipment.write(list(sale.shipments), {\n 'magento_increment_id': shipment_increment_id,\n })\n\n if self.export_tracking_information and (\n shipment.tracking_number and shipment.carrier\n ):\n shipment.export_tracking_info_to_magento()\n except xmlrpclib.Fault, fault:\n if fault.faultCode == 102:\n # A shipment already exists for this order,\n # we cannot do anything about it.\n # Maybe it was already exported earlier or was created\n # separately on magento\n # Hence, just continue\n continue\n\n return sales",
"def pricing_import(request, simulation):\n try:\n # Get all pricing policies for this usertype.\n policies = get_query('policy', simulation)\n tolls = policies.filter(type='PRICING')\n # Get all links of the network.\n links = get_query('link', simulation)\n # Get all LinkSelection of the network.\n locations = LinkSelection.objects.filter(\n network=simulation.scenario.supply.network\n )\n # Get all usertypes.\n usertypes = get_query('usertype', simulation)\n # Get an empty Vector or create one if there is none.\n if Vector.objects.filter(data='').exists():\n empty_vector = Vector.objects.filter(data='')[0]\n else:\n empty_vector = Vector(data='')\n empty_vector.save()\n # Convert the imported file to a csv DictReader.\n encoded_file = request.FILES['import_file']\n tsv_file = StringIO(encoded_file.read().decode())\n if encoded_file.name.split(\".\")[-1] == 'tsv':\n reader = csv.DictReader(tsv_file, delimiter='\\t')\n else:\n reader = csv.DictReader(tsv_file, delimiter=',')\n # For each imported OD pair, if the pair already exists in t\n if 'traveler_type' in reader.fieldnames:\n has_type = True\n else:\n has_type = False\n if 'times' in reader.fieldnames:\n has_times = True\n else:\n has_times = False\n # For each imported link, if a Policy exists for the link, baseValue is\n # updated, else a new Policy is created.\n for row in reader:\n # Get link of current row.\n link = links.get(user_id=row['link'])\n # Get or create a LinkSelection associated with the link.\n if locations.filter(link=link).exists():\n # Take first matching LinkSelection.\n location = locations.filter(link=link)[0]\n else:\n # Create a LinkSelection for the current link.\n # Name and user_id of the Link Selection are set to the name\n # and user_id of the link.\n location = LinkSelection(\n network=simulation.scenario.supply.network,\n name=link.name,\n user_id=link.user_id,\n )\n location.save()\n location.link.add(link)\n # Get or create a pricing Policy with the corret LinkSelection\n # object.\n try:\n toll = tolls.get(location=location)\n except Policy.DoesNotExist:\n # Create a new toll with default values.\n toll = Policy(location=location, type='PRICING', usertype=None,\n valueVector=empty_vector,\n timeVector=empty_vector)\n toll.save()\n toll.scenario.add(simulation.scenario)\n # Update affected traveler type.\n toll.usertype = None\n if has_type:\n try:\n toll.usertype = usertypes.get(user_id=row['traveler_type'])\n except (UserType.DoesNotExist, ValueError):\n pass\n # Update values.\n values = row['values'].split(',')\n # First value is baseValue.\n toll.baseValue = float(values[0])\n if len(values) > 1:\n # Remaining values are stored in valueVector (as a string of\n # comma separated values).\n values = [str(float(x)) for x in values]\n v = Vector(data=','.join(values[1:]))\n v.save()\n toll.valueVector = v\n else:\n toll.valueVector = empty_vector\n # Update times.\n toll.timeVector = empty_vector\n if has_times:\n times = row['times'].split(',')\n if times[0] != ' ' and times[0]:\n # There is at least one value, store it in timeVector.\n times = [str(int(x)) for x in times]\n v = Vector(data=','.join(times))\n v.save()\n toll.timeVector = v\n toll.save()\n return HttpResponseRedirect(reverse(\n 'metro:pricing_main', args=(simulation.id,)\n ))\n except Exception as e:\n # Catch any exception while importing the file and return an error page\n # if there is any.\n print(e)\n context = {\n 'simulation': simulation,\n 'object': 'pricing',\n }\n return render(request, 'metro_app/import_error.html', context)",
"def _prepare_wsdl_objects(self):\r\n\r\n\t# Default behavior is to not request transit information\r\n\tself.ReturnTransitAndCommit = False\r\n\r\n # This is the primary data structure for processShipment requests.\r\n self.RequestedShipment = self.client.factory.create('RequestedShipment')\r\n self.RequestedShipment.ShipTimestamp = datetime.now()\r\n \r\n TotalWeight = self.client.factory.create('Weight')\r\n # Start at nothing.\r\n TotalWeight.Value = 0.0\r\n # Default to pounds.\r\n TotalWeight.Units = 'LB'\r\n # This is the total weight of the entire shipment. Shipments may\r\n # contain more than one package.\r\n self.RequestedShipment.TotalWeight = TotalWeight\r\n \r\n # This is the top level data structure for Shipper information.\r\n ShipperParty = self.client.factory.create('Party')\r\n ShipperParty.Address = self.client.factory.create('Address')\r\n ShipperParty.Contact = self.client.factory.create('Contact')\r\n \r\n # Link the ShipperParty to our master data structure.\r\n self.RequestedShipment.Shipper = ShipperParty\r\n\r\n # This is the top level data structure for Recipient information.\r\n RecipientParty = self.client.factory.create('Party')\r\n RecipientParty.Contact = self.client.factory.create('Contact')\r\n RecipientParty.Address = self.client.factory.create('Address')\r\n \r\n # Link the RecipientParty object to our master data structure.\r\n self.RequestedShipment.Recipient = RecipientParty\r\n \r\n Payor = self.client.factory.create('Payor')\r\n # Grab the account number from the FedexConfig object by default.\r\n Payor.AccountNumber = self._config_obj.account_number\r\n # Assume US.\r\n Payor.CountryCode = 'US'\r\n \r\n ShippingChargesPayment = self.client.factory.create('Payment')\r\n ShippingChargesPayment.Payor = Payor\r\n\r\n self.RequestedShipment.ShippingChargesPayment = ShippingChargesPayment\r\n \r\n # ACCOUNT or LIST\r\n self.RequestedShipment.RateRequestTypes = ['ACCOUNT'] \r\n \r\n # Start with no packages, user must add them.\r\n self.RequestedShipment.PackageCount = 0\r\n self.RequestedShipment.RequestedPackageLineItems = []\r\n \r\n # This is good to review if you'd like to see what the data structure\r\n # looks like.\r\n self.logger.debug(self.RequestedShipment)",
"def default_export_(self, fields):\n Store = Pool().get('magento.website.store')\n\n store = Store(Transaction().context.get('active_id'))\n\n return {\n 'products_count': store.export_tier_prices_to_magento()\n }",
"def test_create_shipment(self):\n pass",
"def __init__(self, id=None, customer_id=None, region=None, region_id=None, country_id=None, street=None, company=None, telephone=None, fax=None, postcode=None, city=None, firstname=None, lastname=None, middlename=None, prefix=None, suffix=None, vat_id=None, default_shipping=None, default_billing=None, extension_attributes=None, custom_attributes=None):\n\n self._id = None\n self._customer_id = None\n self._region = None\n self._region_id = None\n self._country_id = None\n self._street = None\n self._company = None\n self._telephone = None\n self._fax = None\n self._postcode = None\n self._city = None\n self._firstname = None\n self._lastname = None\n self._middlename = None\n self._prefix = None\n self._suffix = None\n self._vat_id = None\n self._default_shipping = None\n self._default_billing = None\n self._extension_attributes = None\n self._custom_attributes = None\n\n if id is not None:\n self.id = id\n if customer_id is not None:\n self.customer_id = customer_id\n if region is not None:\n self.region = region\n if region_id is not None:\n self.region_id = region_id\n if country_id is not None:\n self.country_id = country_id\n if street is not None:\n self.street = street\n if company is not None:\n self.company = company\n if telephone is not None:\n self.telephone = telephone\n if fax is not None:\n self.fax = fax\n if postcode is not None:\n self.postcode = postcode\n if city is not None:\n self.city = city\n if firstname is not None:\n self.firstname = firstname\n if lastname is not None:\n self.lastname = lastname\n if middlename is not None:\n self.middlename = middlename\n if prefix is not None:\n self.prefix = prefix\n if suffix is not None:\n self.suffix = suffix\n if vat_id is not None:\n self.vat_id = vat_id\n if default_shipping is not None:\n self.default_shipping = default_shipping\n if default_billing is not None:\n self.default_billing = default_billing\n if extension_attributes is not None:\n self.extension_attributes = extension_attributes\n if custom_attributes is not None:\n self.custom_attributes = custom_attributes",
"def export_tier_prices_to_magento(self):\n instance = self.website.instance\n\n for mag_product_template in self.website.magento_product_templates:\n product_template = mag_product_template.template\n product = product_template.products[0]\n\n # Get the price tiers from the product if the product has a price\n # tier table else get the default price tiers from current store\n price_tiers = product_template.price_tiers or self.price_tiers\n\n price_data = []\n for tier in price_tiers:\n if hasattr(tier, 'product'):\n # The price tier comes from a product, then it has a\n # function field for price, we use it directly\n price = tier.price\n else:\n # The price tier comes from the default tiers on store,\n # we dont have a product on tier, so we use the current\n # product in loop for computing the price for this tier\n price = self.price_list.compute(\n None, product, product.list_price, tier.quantity,\n self.website.default_uom\n )\n\n price_data.append({\n 'qty': tier.quantity,\n 'price': float(price),\n })\n\n # Update stock information to magento\n with magento.ProductTierPrice(\n instance.url, instance.api_user, instance.api_key\n ) as tier_price_api:\n tier_price_api.update(\n mag_product_template.magento_id, price_data\n )\n\n return len(self.website.magento_product_templates)"
] | [
"0.5449542",
"0.5294955",
"0.5138079",
"0.48752856",
"0.48261753",
"0.4798459",
"0.47784653",
"0.47548106",
"0.47265235",
"0.4684022",
"0.46793112",
"0.46229893",
"0.4609243",
"0.46083152",
"0.45978007",
"0.45886204",
"0.45643952",
"0.45614693",
"0.45093706",
"0.4497847",
"0.44845107",
"0.44802022",
"0.44735134",
"0.44669178",
"0.4449618",
"0.44490018",
"0.44435835",
"0.44379145",
"0.44297564",
"0.44200203"
] | 0.728472 | 0 |
Sets default root category id. Is set to 1, because the default root category is 1 | def default_magento_root_category_id():
return 1 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def init_id(root: TreeNode):\n current_id = [0]\n init_id_helper(root, current_id)\n return current_id[0]",
"def PrimaryCategory(self, default=None):\n return self.data.get('categories', [default])[0]",
"def root(self) -> 'Category':\n if self.parent:\n return self.parent.root()\n else:\n return self",
"def _set_msti_root_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"msti-root-id\", rest_name=\"msti-root-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='bridge-id-type', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"msti_root_id must be of a type compatible with bridge-id-type\"\"\",\n 'defined-type': \"brocade-xstp-ext:bridge-id-type\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"msti-root-id\", rest_name=\"msti-root-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-xstp-ext', defining_module='brocade-xstp-ext', yang_type='bridge-id-type', is_config=True)\"\"\",\n })\n\n self.__msti_root_id = t\n if hasattr(self, '_set'):\n self._set()",
"def save(self, setroot = False, force_insert = False, force_update = False, *a, **kw):\n inserted = False\n if self.parent is None:\n self.is_root = True\n else:\n self.is_root = False\n if not setroot:\n if self.pk and self.parent and self.root \\\n and self.root.pk != self.parent.root.pk:\n change_subtree_root(self, self.parent.root)\n try:\n self.root = self.parent.root\n except Category.DoesNotExist:\n self.root = None\n\n if not self.slug or self.slug == '':\n self.slug = slugify(self.name)\n\n if self.is_root and self.root is None:\n if not self.pk:\n inserted = True\n super(Category, self).save(force_insert = force_insert, *a, **kw)\n self.root = self\n\n if inserted:\n super(Category, self).save(force_update = True, *a, **kw)\n else:\n super(Category, self).save(force_insert = force_insert, force_update = force_update, *a, **kw)",
"def get_default_category():\n return Category.objects.get_or_create(name='Unknown')[0]",
"def rootid(self):\n candidates = [nid for nid, attrs\n in self.graph.nodes.items()\n if attrs['type'] == 'root']\n \n if len(candidates) > 1:\n errmsg = self.name + ' has more than one root'\n raise ValueError(errmsg)\n\n if len(candidates) == 0:\n errmsg = self.name + ' has no root'\n raise ValueError(errmsg) \n \n return candidates[0]",
"def category_id(self, category_id):\n\t\treturn CategoryId(self.connection, self.base_uri, category_id)",
"def category_id(self, category_id):\n\n self._category_id = category_id",
"def set_root(self, root):\n self.root_path = root",
"def test_set_category_parent(self):\n pass",
"def _setRoot(self):\n if self.state == BaseEditMode.ES_None:\n sheet = self._logic._getSheet()\n layout = sheet.getLayoutGroup()\n \n import suit.core.layout.LayoutGroupRadial as radialLayout\n if not isinstance(layout, radialLayout.LayoutGroupRadialSimple): \n return\n \n objs = sheet.getSelected()\n # get object for root changing \n if len(objs) == 1: \n layout.setRoot(objs[0])",
"def set_root(self, root):\n self.root = root\n self.sites = [root]",
"def set_CategoryParent(self, value):\n super(GetCategoriesInputSet, self)._set_input('CategoryParent', value)",
"def set_root(self):\n try:\n _check_call(_LIB.TreeliteTreeBuilderSetRootNode(\n self.tree.handle,\n ctypes.c_int(self.node_key)))\n except AttributeError:\n raise TreeliteError('This node has never been inserted into a tree; '\\\n + 'a node must be inserted before it can be a root')",
"def set_category(self, frontmatter):\n gcates = self._global_categories\n cate_name = ''\n segments = self.path.split(os.path.sep)\n if len(segments) > 2:\n cate_name = segments[1].lower()\n else:\n cate_name = 'uncategorized'\n if cate_name not in gcates:\n gcates[cate_name] = Category(name=cate_name, config=self._config)\n this_cate = gcates[cate_name]\n this_cate.notes.append(self)\n this_cate.count += 1\n self.category = this_cate\n\n # for key in frontmatter:\n # if key.strip().lower().startswith('cate'):\n # # public\n # self.category = frontmatter[key]\n # return\n # self.category = 'general'",
"def init_id_helper(node: TreeNode, current_id: List[int]) -> None:\n node.id = current_id[0]\n current_id[0] += 1\n if not isinstance(node, TreeNode):\n return\n init_id_helper(node.left, current_id)\n init_id_helper(node.right, current_id)",
"def set_rootname(self, root):\n self.rootname = root",
"def _get_msti_root_id(self):\n return self.__msti_root_id",
"def set_root(self, root):\n self.root = root\n if self.root is not None:\n correct_type(root, Tag)",
"def _default_parent_id(self):\n active_id = self.env.context.get('active_id')\n if not active_id:\n return False\n ticket = self.browse(active_id)\n if not ticket.parent_id:\n return active_id\n elif not ticket.type.has_children:\n return ticket.parent_id.id\n else:\n return active_id",
"def _getNewCatId(self):\n\n newCatId = COCO_PLUS.CAT_ID\n COCO_PLUS.CAT_ID += 1\n\n return newCatId",
"def tree_id(self):\n if self.is_root:\n return 0\n elif self._link is not None:\n return self._link.tree_id\n else:\n return self._tree_id",
"def set_id(self, uid):\n self.nccl_id = uid\n return self.nccl_id",
"def root_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"root_id\")",
"def assign_root_categories(\n graph: nx.Graph,\n wiki_data: Dict[str, List],\n mapping: Dict[str, List[str]],\n name: str,\n):\n inverse_mapping = {}\n for category, subcategories in mapping.items():\n for subcategory in subcategories:\n inverse_mapping[subcategory.lower()] = category.lower()\n\n names_to_categories = dict(zip(wiki_data[\"name\"], wiki_data[\"categories\"]))\n for node in graph.nodes:\n graph.nodes[node][name] = []\n\n for category in names_to_categories[node]:\n if category in inverse_mapping:\n graph.nodes[node][name].append(inverse_mapping[category])",
"def _root(self, ind):\n while (ind != self._id[ind]):\n #make every other node in path to point to its grandparent\n self._id[ind] = self._id[self._id[ind]]\n ind = self._id[ind]\n return ind",
"def _set_default_node(self, key):\n if key not in self._key_to_node_index:\n self._key_to_node_index[key] = self._graph.add_node(NodeData(key=key, equivs=[]))\n return self._key_to_node_index[key]",
"def tree_id(self, value):\n self._tree_id = value",
"def categories_id(self, categories_id):\n\n self._categories_id = categories_id"
] | [
"0.65238833",
"0.60481906",
"0.5950954",
"0.5714606",
"0.5712186",
"0.5594999",
"0.55752224",
"0.55726135",
"0.5566532",
"0.5562183",
"0.55386394",
"0.55160403",
"0.5515742",
"0.5502178",
"0.5425077",
"0.5389408",
"0.5376485",
"0.53576773",
"0.5263281",
"0.52428555",
"0.5228024",
"0.5214175",
"0.5205183",
"0.5159024",
"0.51530343",
"0.5151055",
"0.5140587",
"0.51287645",
"0.51241446",
"0.51135266"
] | 0.82208395 | 0 |
Sets default product uom for website | def default_default_uom():
ProductUom = Pool().get('product.uom')
return ProductUom.search([('name', '=', 'Unit')])[0].id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_default_product():\n return Product.objects.get_or_create(name='Unknown', category=get_default_category())",
"def setUp(self) -> None:\n self.default = Product('Test Product')\n self.tester = Product('Tester', price=15, weight=2)",
"def set_product(self, product):\n self.single_selection_from_static_kendo_dropdown(self.product_kendo_dropdown_locator, product)",
"def set_sms_product(self, product):\n self.single_selection_from_static_kendo_dropdown(self.sms_product_kendo_dropdown_locator, product)",
"def product(self) -> str:\n return pulumi.get(self, \"product\")",
"def product_name(self) -> Optional[str]:\n return pulumi.get(self, \"product_name\")",
"def uom(self, uom):\n\n self._uom = uom",
"def user_set_default_license(request):\n dbsession = DBSession()\n\n user_id = request.matchdict['user_id']\n user = User.get_by_user_id(user_id)\n\n form = Form(request, schema=UserDefaultLicenseSchema, obj=user)\n\n if 'form.submitted' in request.POST and form.validate():\n request.session.flash('form validated!')\n\n return {\n 'the_user_id': user_id,\n 'the_username': user.username,\n 'form': FormRenderer(form),\n }",
"def setup_prod():\n setup_general()",
"def __init__(self, hass, config, product, tipo, uid):\n super().__init__(config)\n self.hass = hass\n self._name = config.get(\"name\")\n self._id = uid\n self._product = product\n self._type = tipo",
"def test_default_product_price(self):\n prod = product('Test Product')\n self.assertEqual(prod.price, 10)\n self.assertEqual(prod.weight, 20)",
"def __init__(self, driver, log, allure):\n super().__init__(driver, log, allure)\n self.__product_sort = BaseSelect(driver, HomeLocator.product_sort, log)\n self.__products = self.get_elements(\n driver, HomeLocator.product_list, log)\n self.__products_add_cart = self.get_elements(\n driver, HomeLocator.products_add_cart, log)\n msg = \"Enter to Home page\"\n with self.allure.step(msg):\n self.allure.attach_image(self.driver, msg)",
"def _set_product_type(self) -> None:\n # Get MTD XML file\n prod_type = self.split_name[2][:3]\n self.product_type = getattr(Sv1ProductType, prod_type)\n\n # Manage not orthorectified product\n if self.product_type == Sv1ProductType.L1B:\n self.is_ortho = False",
"def setUA(self, useragent):\n\t\tpass",
"def productactivate():\n pass",
"def on_product(self, model, fqname, event_name, key):\r\n self.product.SetLabel(\"%s %s\" % (self.model.article, self.model.sn))",
"def _onchange_product_id(self):\n if not self.product_id:\n return\n else :\n thisid = self.search([\n ('product_id', '=', self.product_id.id),\n\n ], order='id', limit=1)\n # return {\n # 'type': 'ir.actions.act_window',\n # 'res_model': 'rental.shopify_product',\n # 'views': [[False, 'form']],\n # 'res_id': thisid.id,\n # }\n\n\n self.update({\n 'is_Edit' : True,\n 'edit_id' : thisid.id,\n 'shopify_product_title': self.product_id.title,\n 'rental_pricing_ids' : thisid.rental_pricing_ids\n\n })",
"def set_default_unit(unit):\n self.default_unit = unit",
"def emulate_off_api_manager_products(cls):\n cls.products = OFF_API_FILTERED_PRODUCTS",
"def product(self):\n return self.appName",
"def __defaultSuit(self):\n self.type = 's'\n self.name = 'ds'\n self.dept = getSuitDept(self.name)\n self.body = getSuitBodyType(self.name)",
"def test_default_product_price(self):\n prod = Product('Test Product')\n self.assertEqual(prod.price, 10)\n self.assertEqual(prod.weight, 20)\n self.assertEqual(prod.flammability, 0.5)",
"def test_default_product_price(self):\r\n prod = Product('Test Product')\r\n self.assertEqual(prod.price, 10)",
"def get_product_id(self, field_name='PRODUCT_ID'):\n return self.get_default(field_name)",
"def test_default_product_price(self):\n prod = Product('Test Product')\n self.assertEqual(prod.price, 10)",
"def test_default_product_price(self):\n prod = Product('Test Product')\n self.assertEqual(prod.price, 10)",
"def test_default_product_price(self):\n prod = Product('Test Product')\n self.assertEqual(prod.price, 10)",
"def test_default_product_price(self):\n prod = Product('Test Product')\n self.assertEqual(prod.price, 10)",
"def test_default_product_price(self):\n prod = Product('Test Product')\n self.assertEqual(prod.price, 10)",
"def test_default_product_price(self):\n prod = Product('Test Product')\n self.assertEqual(prod.price, 10)"
] | [
"0.56517774",
"0.56510484",
"0.563105",
"0.54381317",
"0.5406732",
"0.5265254",
"0.52629864",
"0.52540123",
"0.52490556",
"0.5216789",
"0.52122056",
"0.519658",
"0.51876277",
"0.5182377",
"0.5166348",
"0.5162013",
"0.51297545",
"0.50542104",
"0.5047263",
"0.5036484",
"0.50230443",
"0.5019622",
"0.5010306",
"0.50102836",
"0.50046134",
"0.50046134",
"0.50046134",
"0.50046134",
"0.50046134",
"0.50046134"
] | 0.64440674 | 0 |
Looks for the website whose `values` are sent by magento against the instance with `instance` in tryton. If a record exists for this, return that else create a new one and return | def find_or_create(cls, instance, values):
websites = cls.search([
('instance', '=', instance.id),
('magento_id', '=', int(values['website_id']))
])
if websites:
return websites[0]
return cls.create([{
'name': values['name'],
'code': values['code'],
'instance': instance.id,
'magento_id': int(values['website_id']),
}])[0] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_or_create(cls, website, values):\n stores = cls.search([\n ('website', '=', website.id),\n ('magento_id', '=', int(values['group_id']))\n ])\n\n if stores:\n return stores[0]\n\n return cls.create([{\n 'name': values['name'],\n 'magento_id': int(values['group_id']),\n 'website': website.id,\n }])[0]",
"def GetWebSiteInfo():\n if len(AppSettings.objects.filter(name='WebSiteName')) > 0:\n WebSiteInfo.WebSiteName = AppSettings.objects.filter(name='WebSiteName')[0].value\n if len(AppSettings.objects.filter(name='ICP')) > 0:\n WebSiteInfo.ICP = AppSettings.objects.filter(name='ICP')[0].value\n if len(AppSettings.objects.filter(name='ICP_address')) > 0:\n WebSiteInfo.ICP_address = AppSettings.objects.filter(name='ICP_address')[0].value\n if len(AppSettings.objects.filter(name='Copyright')) > 0:\n WebSiteInfo.Copyright = AppSettings.objects.filter(name='Copyright')[0].value\n if len(AppSettings.objects.filter(name='Address')) > 0:\n WebSiteInfo.Address = AppSettings.objects.filter(name='Address')[0].value\n if len(AppSettings.objects.filter(name='Phone')) > 0:\n WebSiteInfo.Phone = AppSettings.objects.filter(name='Phone')[0].value",
"def get_instance(self, name):\n return self.website.instance.id",
"def find_or_create(cls, store, values):\n store_views = cls.search([\n ('store', '=', store.id),\n ('magento_id', '=', int(values['store_id']))\n ])\n\n if store_views:\n return store_views[0]\n\n return cls(**{\n 'name': values['name'],\n 'code': values['code'],\n 'store': store.id,\n 'magento_id': int(values['store_id']),\n })",
"def update_instance_url(setting):\n site_obj = Site.objects.all().order_by('id').first()\n site_obj.domain = setting.value\n site_obj.save()",
"def get_best_matching_web_site(self):\n\n # first fill the web_df columns we need for ranking\n for i_web, (url_key, url_info) in enumerate(self.collection.items()):\n index = url_info.index\n if url_info.url_analyse is None:\n logger.warning(\"url {url_key} yielded None analyse. Skip to next\")\n continue\n exists = url_info.url_analyse.exists\n self.company_urls_df.loc[index, URL_KEY] = url_info.url\n self.company_urls_df.loc[index, EXISTS_KEY] = exists\n if exists:\n self.company_urls_df.loc[index, DISTANCE_KEY] = url_info.match.distance\n self.company_urls_df.loc[index, STRING_MATCH_KEY] = url_info.match.string_match\n self.company_urls_df.loc[index, HAS_POSTCODE_KEY] = url_info.match.has_postcode\n self.company_urls_df.loc[index, HAS_KVK_NR] = url_info.match.has_kvk_nummer\n self.company_urls_df.loc[index, RANKING_KEY] = url_info.match.ranking\n self.company_urls_df.loc[index, DISTANCE_STRING_MATCH_KEY] = \\\n url_info.match.url_match\n\n # only select the web site which exist\n mask = self.company_urls_df[EXISTS_KEY]\n\n # create mask for web name distance\n if self.threshold_distance is not None:\n # select all the web sites with a minimum distance or one higher\n m1 = (self.company_urls_df[DISTANCE_KEY] - self.company_urls_df[\n DISTANCE_KEY].min()) <= self.threshold_distance\n else:\n m1 = mask\n\n # create mask for web string match\n if self.threshold_string_match is not None:\n m2 = self.company_urls_df[STRING_MATCH_KEY] >= self.threshold_string_match\n else:\n m2 = mask\n\n m3 = self.company_urls_df[HAS_POSTCODE_KEY]\n m4 = self.company_urls_df[HAS_KVK_NR]\n\n # we mask al the existing web page and keep all pages which are either with\n # a certain string distance (m1) or in case it has either the post code or kvk\n # number we also keep it\n mask = mask & (m1 | m2 | m3 | m4)\n\n # make a copy of the valid web sides\n self.company_urls_df = self.company_urls_df[mask].copy()\n\n self.company_urls_df.sort_values([RANKING_KEY, DISTANCE_STRING_MATCH_KEY], inplace=True,\n ascending=[False, True])\n self.logger.debug(\"Sorted list {}\".format(self.company_urls_df[[URL_KEY, RANKING_KEY]]))",
"def get_or_create(cls, url, title=None):\n try:\n return super().get(Source.url == url)\n except peewee.DoesNotExist:\n return cls.create(url=url, title=title)",
"def test_find_by_site_name(self):\n\n self.new_details.save_details()\n twitter = Details('Dennis', 'Facebook', 'Kiplangat', 'kiplangat18')\n twitter.save_details()\n details_exists = Details.find_by_site_name('Facebook')\n self.assertTrue(details_exists, twitter)",
"def get_instance(self, instance):\n\n title = list(instance.keys())[0]\n instance = instance.get(title)\n return instance",
"def get_site(self, name) -> Site:\n ah_write = self.get_iis_object()\n section = ah_write.GetAdminSection(\"system.applicationHost/sites\", \"MACHINE/WEBROOT/APPHOST\")\n collection = section.Collection\n\n for i in range(collection.Count):\n site = collection[i]\n prop = site.Properties\n site_name = prop[\"name\"].Value\n if site_name == name:\n #site_id = prop[\"id\"].Value\n default_app = self.get_default_app(site)\n bindings = self.get_site_bindings(site.ChildElements)\n apps = self.get_applications(site)\n\n return Site(name, bindings, default_app, apps)\n\n return None",
"def import_websites(cls, instances):\n Website = Pool().get('magento.instance.website')\n Store = Pool().get('magento.website.store')\n StoreView = Pool().get('magento.store.store_view')\n MagentoOrderState = Pool().get('magento.order_state')\n\n try:\n instance, = instances\n except ValueError:\n cls.raise_user_error('multiple_instances')\n\n with Transaction().set_context(magento_instance=instance.id):\n\n # Import order states\n with OrderConfig(\n instance.url, instance.api_user, instance.api_key\n ) as order_config_api:\n MagentoOrderState.create_all_using_magento_data(\n order_config_api.get_states()\n )\n\n # Import websites\n with Core(\n instance.url, instance.api_user, instance.api_key\n ) as core_api:\n websites = []\n stores = []\n\n mag_websites = core_api.websites()\n\n # Create websites\n for mag_website in mag_websites:\n websites.append(Website.find_or_create(\n instance, mag_website\n ))\n\n for website in websites:\n mag_stores = core_api.stores(\n {'website_id': {'=': website.magento_id}}\n )\n\n # Create stores\n for mag_store in mag_stores:\n stores.append(Store.find_or_create(website, mag_store))\n\n for store in stores:\n mag_store_views = core_api.store_views(\n {'group_id': {'=': store.magento_id}}\n )\n\n # Create store views\n for mag_store_view in mag_store_views:\n store_view = StoreView.find_or_create(\n store, mag_store_view\n )\n # AR refactoring\n store_view.save()",
"def _create_site_list(self):\n\n connector = Connectors(baseLocation = \"../data/sqlite3/\", dbName = \"radars.sqlite\",\n isInMem = False, isAutoSave = False)\n #command = '{:s}and tval>=? and tval<=? ORDER BY tval ASC'.format(command)\n #connector.cursor.execute(command, (self.rad_id, self.stm, self.etm))\n #rows = connector.cursor.fetchall()\n command = \"SELECT tval FROM hdw WHERE id=? \"\n command = '{:s}and tval>=? ORDER BY tval ASC'.format(command)\n connector.cursor.execute(command, (self.rad_id, self.stm))\n tvals_stm = connector.cursor.fetchall()\n tvals_stm = [x[0] for x in tvals_stm]\n\n command = \"SELECT tval FROM hdw WHERE id=? \"\n command = '{:s}and tval>=? ORDER BY tval ASC'.format(command)\n connector.cursor.execute(command, (self.rad_id, self.etm))\n tval_etm = connector.cursor.fetchone()[0]\n indx_etm = tvals_stm.index(tval_etm)\n\n # select the tvals of interest\n tvals = tvals_stm[:indx_etm+1]\n\n site_list = []\n for tval in tvals:\n site_list.append(site(code=self.rad, dt=tval))\n return site_list",
"def test_with_localsite_in_data_and_instance(self):\n config = IntegrationConfig.objects.create(\n integration_id=self.integration.integration_id)\n\n form = MyConfigForm(\n integration=self.integration,\n request=self.request,\n data={\n 'name': 'Test',\n 'my_conditions_last_id': '0',\n 'my_conditions_mode': 'all',\n 'my_conditions_choice[0]': 'review-groups',\n 'my_conditions_operator[0]': 'contains-any',\n 'my_conditions_value[0]': [self.local_site_1_group.pk],\n 'group': self.local_site_1_group.pk,\n 'local_site': self.local_site_1.pk,\n },\n instance=config)\n self.assertTrue(form.is_valid())\n\n new_config = form.save()\n self.assertEqual(config.pk, new_config.pk)\n self.assertEqual(new_config.local_site, self.local_site_1)",
"def get_site(self, sitename):\n return self.cache.get(sitename)",
"def get_site(self):\n raise NotImplementedError",
"def _get_existing_instance(self, related_model, value):\n if self.columns:\n result = (\n self.session.query(related_model)\n .filter_by(\n **{prop.key: value.get(prop.key) for prop in self.related_keys}\n )\n .one()\n )\n else:\n # Use a faster path if the related key is the primary key.\n lookup_values = [value.get(prop.key) for prop in self.related_keys]\n try:\n result = self.session.get(related_model, lookup_values)\n except TypeError:\n keys = [prop.key for prop in self.related_keys]\n raise self.make_error(\"invalid\", value=value, keys=keys)\n if result is None:\n raise NoResultFound\n return result",
"def get_site(name):\n return sites[name]",
"def mock_single_site_api() -> Generator:\n instance = Mock()\n site = Site(\"01FG0AGP818PXK0DWHXJRRT2DH\", \"11111111111\", [])\n instance.get_sites.return_value = [site]\n\n with patch(\"amberelectric.api.AmberApi.create\", return_value=instance):\n yield instance",
"def oi_get_current(self):\n from django.conf import settings\n try:\n sid = settings.SITE_ID\n except AttributeError:\n sid = None\n try:\n sname = settings.SITE_NAME\n except AttributeError:\n from django.core.exceptions import ImproperlyConfigured\n raise ImproperlyConfigured(\"You're using the Django \\\"sites framework\\\" without having set the SITE_ID setting. Create a site in your database and set either the SITE_ID or the SITE_NAME setting to fix this error.\")\n if sid:\n try:\n current_site = SITE_CACHE[sid]\n except KeyError:\n current_site = self.get(pk=sid)\n SITE_CACHE[current_site.name] = current_site\n SITE_CACHE[current_site.id] = current_site\n else:\n try:\n current_site = SITE_CACHE[sname]\n except KeyError:\n current_site = self.get(name=sname)\n SITE_CACHE[current_site.name] = current_site\n SITE_CACHE[current_site.id] = current_site\n\n return current_site",
"def create_website(self):\n url = self.get_user_url()\n check_interval = self.get_user_check_interval()\n website = self.get_website(url, check_interval)\n\n if website:\n return website",
"def get_website(self, name):\n return self.store.website.id",
"def get_site_to_check(self, filename):\n csv_file = csv.reader(open(filename, 'r'), delimiter=\";\")\n \n sites={}\n for row in csv_file:\n if len(row) == 0:\n continue\n site_url = row[0]\n sites[site_url] = {\"url\":site_url}\n \n sites[site_url][\"checks\"] = []\n for check in row[1:]:\n sites[site_url][\"checks\"].append(check)\n return sites",
"def item_duplicate():\n return {'name':'chair',\n 'value':300}",
"def clean_site(self, obj):\n\n # Site doesn't have an id to itself, so if obj is a Site, use it.\n # Otherwise get the value of the `.site`\n return obj if isinstance(obj, Site) else getattr(obj, 'site')",
"def collect_web_sites(self):\n min_distance = None\n max_sequence_match = None\n index_string_match = index_distance = None\n self.collection = collections.OrderedDict()\n for i_web, web_row in self.company_urls_df.iterrows():\n # get the url first from the websites table which list all the urls belonging to\n # one kvk search\n url = web_row[URL_KEY]\n\n # skip all none uls and also the filtered urls\n if url is None or url == \"\":\n logger.debug(\"Skipping url because it is None or empty\")\n continue\n if self.filter_urls and url not in self.filter_urls:\n logger.debug(f\"filter urls is given so skip {url}\")\n continue\n\n # store a list of UrlInfo object with a minimum info the url which was tested\n url_info = UrlInfo(index=i_web, url=url)\n self.collection[url] = url_info\n\n print_banner(f\"Processing {url}\")\n\n # quick check if we can processes this url based on the country code\n suffix = url_info.url_extract.suffix\n if suffix in self.exclude_extensions.index:\n url_info.outside_nl = True\n logger.info(f\"Web site {url} has suffix '.{suffix}' Continue \")\n\n # get the processing time of the last time you did this url from the table\n try:\n processing_time = self.urls_df.loc[url, DATETIME_KEY]\n except KeyError:\n processing_time = None\n\n if self.force_process or self.rescan_missing_urls:\n url_info.needs_update = True\n else:\n url_info.needs_update = check_if_url_needs_update(processing_time=processing_time,\n current_time=self.current_time,\n older_time=self.older_time)\n if url_info.needs_update:\n # if the url needs update, store the current time\n url_info.processing_time = self.current_time\n else:\n url_info.processing_time = processing_time\n\n url_analyse = self.scrape_url_and_store_in_dataframes(url, url_info)\n\n url_info.url_analyse = url_analyse\n\n if url_analyse and not url_analyse.exists:\n self.logger.debug(f\"url '{url}'' does not exist\")\n continue\n\n # based on the company postcodes and kvknummer and web contents, make a ranking how\n # good the web sides matches the company\n match = UrlCompanyRanking(url, self.company_name_small,\n url_extract=url_info.url_extract,\n url_analyse=url_analyse,\n company_kvk_nummer=self.kvk_nr,\n company_postcodes=self.postcodes,\n threshold_string_match=self.threshold_string_match,\n threshold_distance=self.threshold_distance,\n logger=self.logger)\n\n url_info.match = match\n\n # update the min max\n if min_distance is None or match.distance < min_distance:\n index_distance = i_web\n min_distance = match.distance\n\n if max_sequence_match is None or match.string_match > max_sequence_match:\n index_string_match = i_web\n max_sequence_match = match.string_match\n\n self.logger.debug(\" * {} - {} - {}\".format(url, match.ext.domain,\n match.distance))\n\n if min_distance is None:\n self.company_urls_df = None\n elif index_string_match != index_distance:\n self.logger.warning(\n \"Found minimal distance for {}: {}\\nwhich differs from \"\n \"best string match {}: {}\".format(index_distance,\n self.collection[url].url,\n index_string_match,\n self.collection[url].url))",
"def get_or_create_url(url, session=session, model=URL):\n\n instance = session.query(model).filter_by(**{'text': url}).first()\n if instance:\n return instance\n else:\n instance = create_url(url)\n return instance",
"def process_item(self, item, spider):\n sqlinsert = self.siteInsert.format(\n \tsite_name = pymysql.escape_string(item['site_name']),\n \tsite_url = pymysql.escape_string(item['site_url'])\n \t)\n\n self.cursor.execute(sqlinsert)\n return item\n # print(\"this is item in pipeline process_item\")\n # session = self.Session()\n # sitesdb = SitesDB()\n # print(\"this is sitesdb in pipeline process_item\")\n # sitesdb.site_name = item[\"site_name\"]\n # sitesdb.site_url = item[\"site_url\"]\n\n # try:\n # session.add(sitesdb)\n # session.commit()\n # except:\n # session.rollback()\n # raise\n # finally:\n # session.close()\n\n # return item",
"def soap_GetSiteInfoObject(self, ps):\n try:\n rsp = WaterOneFlow.soap_GetSiteInfoObject(self,ps)\n request = self.request\n #construct/renew the siteInfo_site_dictionary.\n #currently, renew it every 14 days (subject to change)?\n if(len(siteInfo_site_dictionary.keys())==1 or \n datetime.now() >= siteInfo_site_dictionary[\"__modTime\"] + timedelta(days=14)):\n #here has a possible race condition, so a lock is placed\n semaphore = open('/home/txhis/CBIService/semaphore/semaphore.file', \"w\")\n lock(semaphore, LOCK_EX)\n treeIter = getIterator(centralRegUrl)\n #save the srs information in dictionary\n buildSiteInfo_siteDictionary(treeIter,siteInfo_site_dictionary)\n siteInfo_site_dictionary[\"__modTime\"]=datetime.now()\n #close semaphore, release the lock file. (otherwise deadlock will be possible)\n semaphore.close()\n #get input parameter, save it in a string siteCode.\n siteCodeArray = map(str, request.get_element_site().split(\":\"))\n #print len(siteInfo_site_dictionary[\"001\"][1])\n if len(siteCodeArray) < 2 or\\\n not siteInfo_site_dictionary.has_key(siteCodeArray[1]) or \\\n not siteCodeArray[0].upper() == \"CBI\":\n fault = Fault(Fault.Client, \"Illegal SiteCode\", actor=\"SiteCode\", detail=\"site code \\\"%s\\\" is illegal/not found\" % \":\".join(siteCodeArray))\n raise fault\n else:\n #construct siteResponseNode XML node \n siteResponseNode = rsp.new_sitesResponse()\n #construct the queryInfo XML node part. It is ofen used, so make it a function\n genQueryInfo(siteResponseNode,\"location\",','.join(siteCodeArray)) \n #site List array (if multiple sites)\n siteList = []\n try:\n genSite_forGetSite(siteResponseNode,siteInfo_site_dictionary[siteCodeArray[1]][1][0],\n siteCode,siteInfo_site_dictionary[siteCodeArray[1]][0],siteInfo_site_dictionary['srs'],siteList)\n except:\n fault = Fault(Fault.Client, \"Illegal SiteCode\", actor=\"SiteCode\", detail=\"site code \\\"%s\\\" is illegal/not found\" % \":\".join(siteCodeArray)) \n raise fault\n #seriesCatalog node\n seriesCatalogNode = siteList[0].new_seriesCatalog()\n seriesCatalogNode._attrs = dict(menuGroupName=\"CBI Observation Data\", serviceWsdl = WSDLAddress) \n #here, to be completed\n seriresList = []\n for xmlNode in siteInfo_site_dictionary[siteCodeArray[1]][1]:\n seriresList.append(generateSeriesNodeObj(xmlNode,seriesCatalogNode))\n seriesCatalogNode.set_element_series(seriresList)\n siteList[0].set_element_seriesCatalog([seriesCatalogNode])\n siteResponseNode.set_element_site(siteList) \n rsp.set_element_sitesResponse(siteResponseNode) \n #here, how to generate a fault!!! \n except Exception, e:\n import traceback\n traceback.print_exc(file=sys.stdout) \n if isinstance(e,Fault):\n detail = None\n if e.detail is not None: \n detail = Detail()\n detail.any = e.detail \n rsp = FaultType(e.code, e.string, e.actor, detail)\n return rsp",
"def get_website(self):\n if self.website:\n return self.website\n else:\n try:\n return self.parent.get_website\n except AttributeError: # I think this is right \n return None",
"def find_vcs_instance_ept(self, country, amazon_seller, instance_dict):\n instance_obj = self.env['amazon.instance.ept']\n instance = instance_obj.browse( \\\n instance_dict.get((country.id, amazon_seller.id), False))\n if not instance:\n instance = amazon_seller.instance_ids.filtered( \\\n lambda x: x.country_id.id == country.id)\n if instance:\n instance_dict.update( \\\n {(country.id, amazon_seller.id): instance.id})\n return instance"
] | [
"0.66654605",
"0.5773132",
"0.5575794",
"0.52986217",
"0.5219435",
"0.5175775",
"0.5101473",
"0.50378966",
"0.50342923",
"0.50271636",
"0.497034",
"0.49112558",
"0.48881826",
"0.48387477",
"0.4782479",
"0.47679812",
"0.47377402",
"0.47104242",
"0.46826082",
"0.46774152",
"0.4675909",
"0.46691802",
"0.4639047",
"0.46321997",
"0.45945436",
"0.4594295",
"0.4587325",
"0.45872104",
"0.45841068",
"0.45733017"
] | 0.78474057 | 0 |
Exports inventory stock information to magento | def export_inventory(self, websites):
for website in websites:
website.export_inventory_to_magento() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_inventory_to_magento(self):\n Location = Pool().get('stock.location')\n\n product_templates = []\n instance = self.instance\n\n locations = Location.search([('type', '=', 'storage')])\n\n for magento_product_template in self.magento_product_templates:\n product_template = magento_product_template.template\n product_templates.append(product_template)\n\n with Transaction().set_context({'locations': map(int, locations)}):\n product_data = {\n 'qty': product_template.quantity,\n 'is_in_stock': '1' if product_template.quantity > 0\n else '0',\n }\n\n # Update stock information to magento\n with magento.Inventory(\n instance.url, instance.api_user, instance.api_key\n ) as inventory_api:\n inventory_api.update(\n magento_product_template.magento_id, product_data\n )\n\n return product_templates",
"def export_shipment_status_to_magento(self):\n Shipment = Pool().get('stock.shipment.out')\n Sale = Pool().get('sale.sale')\n\n instance = self.instance\n\n sale_domain = [\n ('magento_store_view', '=', self.id),\n ('shipment_state', '=', 'sent'),\n ('magento_id', '!=', None),\n ('shipments', '!=', None),\n ]\n\n if self.last_shipment_export_time:\n sale_domain.append(\n ('write_date', '>=', self.last_shipment_export_time)\n )\n\n sales = Sale.search(sale_domain)\n\n self.last_shipment_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n # Get the increment id from the sale reference\n increment_id = sale.reference[\n len(instance.order_prefix): len(sale.reference)\n ]\n\n for shipment in sale.shipments:\n try:\n # Some checks to make sure that only valid shipments are\n # being exported\n if shipment.is_tracking_exported_to_magento or \\\n shipment.state not in ('packed', 'done') or \\\n shipment.magento_increment_id:\n sales.pop(sale)\n continue\n with magento.Shipment(\n instance.url, instance.api_user, instance.api_key\n ) as shipment_api:\n item_qty_map = {}\n for move in shipment.outgoing_moves:\n if isinstance(move.origin, SaleLine) \\\n and move.origin.magento_id:\n # This is done because there can be multiple\n # lines with the same product and they need\n # to be send as a sum of quanitities\n item_qty_map.setdefault(\n str(move.origin.magento_id), 0\n )\n item_qty_map[str(move.origin.magento_id)] += \\\n move.quantity\n shipment_increment_id = shipment_api.create(\n order_increment_id=increment_id,\n items_qty=item_qty_map\n )\n Shipment.write(list(sale.shipments), {\n 'magento_increment_id': shipment_increment_id,\n })\n\n if self.export_tracking_information and (\n shipment.tracking_number and shipment.carrier\n ):\n shipment.export_tracking_info_to_magento()\n except xmlrpclib.Fault, fault:\n if fault.faultCode == 102:\n # A shipment already exists for this order,\n # we cannot do anything about it.\n # Maybe it was already exported earlier or was created\n # separately on magento\n # Hence, just continue\n continue\n\n return sales",
"def export_inventory(self, cursor, user, ids, context):\n website_obj = self.pool.get('magento.instance.website')\n\n website_id = context.get('active_id')\n t = threading.Thread(target=website_obj.export_inventory_to_magento,\n args=(cursor, user, website_id, context, True))\n t.daemon = True\n t.start()\n\n return True#self.open_products(cursor, user, map(int, products), context)",
"def export(self):\r\n self.prices[\"returns\"] = self.returns\r\n self.prices.columns = ['prices', 'returns']\r\n self.prices = self.prices.dropna()\r\n \r\n name = QFileDialog.getSaveFileName(None, 'Save File', filter='*.xlsx')\r\n if(name[0] == ''):\r\n # if name empty\r\n pass\r\n else:\r\n self.prices.to_excel(name[0])",
"def save_catalog(self):\n self.catalog.to_csv(self.catalog_path, index_label='dateTime')",
"def create_backup():\n with open('inventory_backup.csv', 'a') as csvfile:\n fieldnames = ['product_name', 'product_price', 'product_quantity', 'date_updated']\n backupwriter = csv.DictWriter(csvfile, fieldnames=fieldnames)\n\n backupwriter.writeheader()\n for item in Product.select():\n backupwriter.writerow({\n 'product_name': item.product_name,\n 'product_price': round(Decimal(item.product_price / 100), 2),\n 'product_quantity': item.product_quantity,\n 'date_updated': item.date_updated.strftime(\"%m/%d/%Y\")\n })\n print(\"\\nBackup created!\\n\")\n input(\"\\nPress ENTER to continue\")\n clear()",
"def readStockFromOG(self):\n # TODO If not in stock, do not display item\n self.og_stock = pd.read_csv('data/stock.csv')\n self.og_stock.to_csv('data/menu.csv', index = False)",
"def save_inventory(file_name, lst_Inventory):\r\n \r\n objFile = open(file_name, 'w')\r\n for row in lst_Inventory:\r\n lstValues = [cd_instance.cd_id, cd_instance.cd_title, cd_instance.cd_artist]\r\n lstValues[0] = str(lstValues[0])\r\n objFile.write(','.join(lstValues) + '\\n')\r\n objFile.close()",
"def getStockData():\n pass",
"def save_inventory(junos_module, inventory):\n if junos_module.conn_type == \"local\" :\n dev = junos_module.dev\n file_name = '%s-inventory.xml' % (dev.facts['hostname'])\n else:\n facts = junos_module._pyez_conn.get_facts()\n file_name = '%s-inventory.xml' % (facts['hostname'])\n if junos_module.params.get('savedir') is not None:\n save_dir = junos_module.params.get('savedir')\n file_path = os.path.normpath(os.path.join(save_dir, file_name))\n junos_module.logger.debug(\"Saving inventory to: %s.\", file_path)\n try:\n with open(file_path, 'wb') as fact_file:\n fact_file.write(to_bytes(inventory, encoding='utf-8'))\n junos_module.logger.debug(\"Inventory saved to: %s.\", file_path)\n except IOError:\n junos_module.fail_json(msg=\"Unable to save inventory. Failed to \"\n \"open the %s file.\" % (file_path))",
"def save_inventory(self, data, batch):\n logger.info('AddStockInventory inventory save initiated')\n with Transaction().start(DBNAME, 1) as transaction:\n transaction.context = config.get_config().context\n batch = batch\n location = self.Location.search(['name', '=', 'MyInventory'])[-1]\n inventory = self.Inventory()\n inventory.location = location\n inventory.batch_number = batch\n inventory.save()\n for i in data:\n product = self.Product.search([('code', '=', i['code']),\n ('description', '=', 'Stock'),\n ('type', '=', 'goods')])[-1]\n units = self.Uom.search(['name', '=', i['units']])[-1]\n supplier = self.Party.search(['name', '=', i['supplier']])[-1]\n inventory_line = self.InventoryLine()\n inventory_line.product = product\n inventory_line.quantity = float(i['quantity'])\n inventory_line.uom = units\n inventory_line.supplier = supplier\n inventory_line.expiry_date = i['expiry_date']\n inventory_line.inventory = inventory\n inventory_line.save()\n # transaction.cursor.commit()\n inventory.state = 'done'\n inventory.save()\n transaction.cursor.commit()\n return True",
"def default_export_(self, fields):\n Store = Pool().get('magento.website.store')\n\n store = Store(Transaction().context.get('active_id'))\n\n return {\n 'products_count': store.export_tier_prices_to_magento()\n }",
"def export_data(self):\r\n stocks = {}\r\n headings = ['Security', 'Price', 'Change', 'Change %', '52 Week', 'Market Cap']\r\n\r\n for data in range(6):\r\n for items in self.root.main.treeview.get_children():\r\n values = self.root.main.treeview.item(items, 'values')\r\n if headings[data] not in stocks:\r\n stocks[headings[data]] = []\r\n stocks.get(headings[data]).append(values[data])\r\n\r\n df = pd.DataFrame(stocks, columns=headings)\r\n path = tk.filedialog.asksaveasfilename(title='Save File As...',\r\n filetypes=((\"CComma-separated values (.csv)\", \"*.csv\"), (\"Text Document(.txt)\", \"*.txt\")))\r\n\r\n if not path:\r\n return\r\n else:\r\n df.to_excel(path, index=False, header=True)",
"def store_stock_data(stock_name = 'TSLA'):\n stonk = yf.Ticker(stock_name) # gets stock data from yahoo\n hist = stonk.history(period=\"max\") # historical stock prices\n hist.reset_index(inplace=True) # takes the date stamp out of the index column\n hist.rename(columns = {'Date':\"DateTime\"},inplace=True) # Changes the name of the date column\n hist['DateTime'] = pd.to_datetime(hist['DateTime'],utc=True) # Changes the timestamps to UTC\n hist.to_csv('../data/raw/'+stock_name+'_stock_price.csv')\n return",
"def update_stock(self, instance, export_stock_from_date):\n product_obj = self.env['product.product']\n woo_product_product_obj = self.env['woo.product.product.ept']\n\n if not export_stock_from_date:\n export_stock_from_date = datetime.now() - timedelta(30)\n odoo_products = product_obj.get_products_based_on_movement_date_ept(export_stock_from_date,\n instance.company_id)\n instance.last_inventory_update_time = datetime.now()\n woo_templates = woo_product_product_obj.search([('product_id', 'in', odoo_products), (\n 'woo_is_manage_stock', '=', True)]).woo_template_id.filtered(\n lambda x:x.woo_instance_id == instance and x.exported_in_woo == True)\n if woo_templates:\n self.with_context(updated_products_in_inventory=odoo_products).woo_update_stock(\n instance, woo_templates)\n else:\n _logger.info(\n \"==There is no product movement found between date time from: '%s' to '%s' for export stock.\" % (\n export_stock_from_date, datetime.now()))\n return True",
"def export_as_json(self):\n f = open(\"{}_historical_prices.json\".format(self.symbol), \"a\")\n f.write(self.stocks.json())\n f.close()",
"def store_inventory(self, batch, location, quantity, inventory_stock):\n # no transaction needed\n logger.info('ReleaseDiscard store inventory initiated')\n to_inventory = self.Inventory.search([('location', '=', location.id), ('batch_number', '=', batch)])\n if to_inventory:\n return self.update_store(to_inventory[0], quantity, inventory_stock)\n inventory = self.Inventory()\n inventory.location = location\n inventory.batch_number = batch\n inventory.save()\n inventory_line = self.InventoryLine()\n inventory_line.product = inventory_stock.product\n inventory_line.quantity = float(quantity)\n inventory_line.uom = inventory_stock.uom\n inventory_line.supplier = inventory_stock.supplier\n inventory_line.expiry_date = inventory_stock.expiry_date\n inventory_line.inventory = inventory\n inventory_line.save()\n # transaction.cursor.commit()\n inventory.state = 'done'\n inventory.save()\n return True",
"def SaveToCSV(self):\n import csv \n csvfile = open(f\"Cache/{self.symbol}.csv\", \"w\", newline='')\n writer = csv.writer(csvfile, delimiter=',')\n writer.writerow([self.symbol, self.name, self.market])\n writer.writerow(['Latest P/E Ratio:', self.pe_ratio])\n writer.writerow(['Short Percent of Float:', self.short_percent_of_float])\n writer.writerow(['Date', 'Price', 'Dividend', 'Annualized Dividend'])\n for snapshot in self._history:\n writer.writerow([snapshot.date.strftime(\"%m/%d/%Y\"), snapshot.price, snapshot.dividend, snapshot.annualDividend])\n csvfile.close()\n print(f\"{self.name} saved to /Cache/{self.symbol}.csv\")",
"def woo_update_stock(self, instance, woo_templates):\n common_log_obj = self.env[\"common.log.book.ept\"]\n common_log_line_obj = self.env[\"common.log.lines.ept\"]\n model = \"woo.product.product.ept\"\n model_id = common_log_line_obj.get_model_id(model)\n log_lines = []\n\n product_ids = woo_templates.mapped('woo_product_ids').mapped('product_id')\n product_stock = self.check_stock_type(instance, product_ids)\n variable_products = woo_templates.filtered(lambda x:x.woo_product_type == 'variable')\n simple_products = woo_templates.filtered(lambda x:x.woo_product_type == 'simple')\n if variable_products:\n log_lines += self.export_stock_variable_products(variable_products, product_stock,\n instance, model_id)\n if simple_products:\n log_lines += self.export_stock_simple_products(simple_products, product_stock, instance,\n model_id)\n\n instance.write({'last_inventory_update_time':datetime.now()})\n if log_lines:\n common_log_obj.create({\n 'type':'export',\n 'module':'woocommerce_ept',\n 'woo_instance_id':instance.id,\n 'active':True,\n 'log_lines':[(6, 0, log_lines)],\n })\n return True",
"async def inv(self, ctx):\r\n author = ctx.author\r\n with DB() as db:\r\n company = await self.get_active_company(ctx, db, author)\r\n stock = self.iex.get_held_stocks(db, company.id)\r\n inventory = []\r\n for s in stock:\r\n close = await self.get_latest_close(ctx, db, s.symbol)\r\n inventory.append([s.symbol, s.quantity, close.close * s.quantity])\r\n inv_df = pd.DataFrame(inventory, columns=['Symbol', 'Quantity', 'Value'])\r\n aggregated = tabulate(inv_df.groupby(['Symbol']).sum().reset_index(), headers=['Symbol', 'Quantity', 'Value'])\r\n await ctx.send(f'```{aggregated}```')",
"def run(self):\n page = self.fetch_data(self.url)\n stock_list = self.pop_stock_list(page)\n self.write_csv(stock_list)",
"def load_stock(self):\n lines = []\n with Transaction().start(DBNAME, 1):\n stock_lines = self.Inventory.search([('state', '=', 'done'), ('location', '=', self.location.id)])\n if stock_lines:\n for i in stock_lines:\n batch = i.batch_number\n for j in i.lines:\n if j.quantity <= 0:\n continue\n dictionary = {}\n dictionary['code'] = j.product.code\n dictionary['item'] = j.product.template.name\n dictionary[\n 'category'] = j.product.template.category.name if j.product.template.category else None\n dictionary['quantity'] = Decimal(j.quantity).quantize(Decimal('0.11')).to_eng()\n dictionary['batch_number'] = batch\n dictionary['supplier'] = j.supplier.name if j.supplier else None\n dictionary['expiry_date'] = j.expiry_date.strftime('%d-%m-%Y') if j.expiry_date else None\n lines.append(dictionary)\n return lines",
"def get_stock_data():\n if not os.path.exists('./catalog/stock_data'):\n os.mkdir('./catalog/stock_data')\n \n inventory_data = {}\n inventory_file = './catalog/stock_data/inventory-bro.txt'\n \n download_data = True\n if os.path.exists(inventory_file):\n # Check that inventory file is no more than 1 day old\n filestat = os.stat(inventory_file)\n tm = datetime.datetime.fromtimestamp(filestat.st_mtime)\n today = datetime.datetime.now()\n dt = today - tm\n if dt.days < 1:\n download_data = False\n \n if download_data:\n # Get inventory data from ftp site\n from ftplib import FTP_TLS\n print 'Downloading inventory-bro.txt ....'\n ftps = FTP_TLS('ftp.appareldownload.com')\n ftps.login('Br0d3r', 'Br0d3r2oll')\n ftps.prot_p()\n #ftps.retrlines('LIST')\n ftps.retrbinary('RETR inventory-bro.txt', open(inventory_file, 'wb').write)\n ftps.quit()\n \n print \"Parse inventory-bro.txt ... \"\n first_row = None\n for row in csv.reader(open(inventory_file, 'rb')):\n itemRef = row[4].lower()\n if itemRef == 'style number':\n # save first row to be used as column header\n first_row = row\n continue\n \n source_attribs = [{'attribute_type': 'source', 'attribute_value': 'broderbros'}]\n \n inventory_data.setdefault(itemRef, [])\n \n color = row[8].lower()\n size = row[10].lower()\n \n # Warehouses starts at column 13\n for i in range(13, len(first_row)):\n wh_name = first_row[i]\n options = [\n {'option_type': 'color', 'option_value': color, 'attributes': []},\n {'option_type': 'size', 'option_value': size, 'attributes': []},\n {'option_type': 'warehouse', 'option_value': wh_name, 'attributes': source_attribs, 'shared': True},\n {'option_type': 'vendor', 'option_value': 'broderbros', 'attributes': source_attribs, 'shared': True},\n ]\n inventory_data[itemRef].append({'options': options, 'inventory': row[i]})\n \n # Pricing data\n pricing_tarfile = \"./catalog/stock_data/bro-AllStyles_R06.tar.gz\"\n download_data = True\n if os.path.exists(pricing_tarfile):\n # Check that file is no more than 1 day old\n filestat = os.stat(pricing_tarfile)\n tm = datetime.datetime.fromtimestamp(filestat.st_mtime)\n today = datetime.datetime.now()\n dt = today - tm\n if dt.days < 1:\n download_data = False\n \n if download_data:\n print 'Downloading items.csv for price data ....'\n br = utils.create_browser(1, 2)\n br.open(\"https://www.broderbros.com/cgi-bin/online/webbro/bro-index.w\")\n try:\n # Fill login form\n br.select_form(name = 'frmLogin')\n frm = br.form\n \n ctrl = frm.find_control('userName')\n ctrl.value = USERNAME\n ctrl = frm.find_control('password')\n ctrl.value = PASSWORD\n \n # Submit login form\n if TESTRUN: print 'Submit Login Form'\n \n br.select_form(name = 'frmLogin')\n br.submit()\n except:\n print \"Login form does not exist, please check URL, downloaded html or site is down\"\n return None\n try:\n tar_url = \"https://www.broderbros.com/cgi-bin/download/webshr/prod-info-view.w?f=bro-AllStyles_R06.tar.gz\"\n br.retrieve(tar_url, pricing_tarfile)\n except:\n print \"Error when downloading pricing file\"\n return None\n \n try:\n tar = tarfile.open(pricing_tarfile)\n for member in tar.getmembers():\n member.name = member.name.split('/')[-1] # strip directory from filename\n tar.extractall('catalog/stock_data/bro-AllStyles_R06')\n tar.close()\n except:\n print \"Error when extracting items.csv\"\n return None\n \n f_object = open('./catalog/stock_data/bro-AllStyles_R06/items_R06.csv', 'rb')\n #~ f_object = open('items_R06.csv', 'rb')\n \n print \"Parse items_R06.csv ... \"\n for row in csv.reader(f_object):\n itemRef = row[7].lower()\n if itemRef == 'style code':\n continue\n \n size = row[8].lower()\n color = row[11].lower()\n price = row[18]\n \n item_data = inventory_data.get(itemRef)\n if not item_data:\n continue\n # Find data with same size and color\n for var_dict in item_data:\n options = var_dict['options']\n opt_dict = {}\n for opt in options:\n opt_type = opt['option_type']\n opt_value = opt['option_value']\n if opt_type == 'size':\n opt_dict['size'] = opt_value\n elif opt_type == 'color':\n opt_dict['color'] = opt_value\n if opt_dict['size'] == size and opt_dict['color'] == color:\n var_dict['price'] = [{'price_type': 'retail_price', 'price': price}]\n \n f_object.close()\n \n try:\n shutil.rmtree(\"./catalog/stock_data/bro-AllStyles_R06\")\n #~ os.remove(\"./catalog/stock_data/bro-AllStyles_R06.tar.gz\")\n except:\n pass\n \n return inventory_data",
"def write_inventory_file(inventory_item):\n try:\n with open('inventory', 'w') as file:\n file.write(inventory_item)\n except OSError:\n pass",
"def save_data(self, inventory):\n ## Save partitions\n LOG.info('Saving partitions.')\n\n num = self._save_partitions(inventory.partitions.itervalues())\n\n LOG.info('Saved %d partitions.', num)\n\n ## Save groups\n LOG.info('Saving groups.')\n\n num = self._save_groups(inventory.groups.itervalues())\n\n LOG.info('Saved %d groups.', num)\n\n ## Save sites\n LOG.info('Saving sites.')\n\n num = self._save_sites(inventory.sites.itervalues())\n\n LOG.info('Saved %d sites.', num)\n\n ## Save sitepartitions\n LOG.info('Saving sitepartitions.')\n\n def all_sitepartitions():\n for site in inventory.sites.itervalues():\n for partition in inventory.partitions.itervalues():\n yield site.partitions[partition]\n\n num = self._save_sitepartitions(all_sitepartitions())\n\n LOG.info('Saved %d sitepartitions.', num)\n\n ## Save datasets\n LOG.info('Saving datasets.')\n\n num = self._save_datasets(inventory.datasets.itervalues())\n\n LOG.info('Saved %d datasets.', num)\n\n ## Save blocks\n LOG.info('Saving blocks.')\n\n def all_blocks():\n for dataset in inventory.datasets.itervalues():\n for block in dataset.blocks:\n yield block\n \n num = self._save_blocks(all_blocks())\n\n LOG.info('Saved %d blocks.', num)\n\n ## Save files\n LOG.info('Saving files.')\n\n def all_files():\n for dataset in inventory.datasets.itervalues():\n for block in dataset.blocks:\n for lfile in block.files:\n yield lfile\n\n num = self._save_files(all_files())\n\n LOG.info('Saved %d files.', num)\n\n ## Save dataset replicas\n LOG.info('Saving dataset replicas.')\n\n def all_replicas():\n for site in inventory.sites.itervalues():\n for replica in site.dataset_replicas():\n yield replica\n\n num = self._save_dataset_replicas(all_replicas())\n\n LOG.info('Saved %d dataset replicas.', num)\n\n ## Save block replicas\n LOG.info('Saving block replicas.')\n\n def all_replicas():\n for site in inventory.sites.itervalues():\n for dataset_replica in site.dataset_replicas():\n for block_replica in dataset_replica.block_replicas:\n yield block_replica\n\n num = self._save_block_replicas(all_replicas())\n\n LOG.info('Saved %d block replicas.', num)",
"def print_stock_rotation_report(self):\n warehouses = False\n locations = False\n from_date = False\n to_date = False\n active_id = self.ids[0]\n today=datetime.now().strftime(\"%Y-%m-%d\")\n f_name = 'Stock Rotation Report' + ' ' + today\n stock_warehouse_obj = self.env['stock.warehouse']\n stock_locations_obj = self.env['stock.location']\n product_obj = self.env['product.product']\n \n if self.filtaration == 'warehouse':\n if not self.include_all_warehouse:\n if not self.warehouse_ids:\n raise ValidationError(\"please select the Warehouse.\")\n warehouses = self.warehouse_ids\n else:\n warehouses = stock_warehouse_obj.search([])\n else:\n if not self.include_all_location:\n if not self.location_ids:\n raise ValidationError(\"please select the Locations.\")\n locations = self.location_ids\n else:\n locations = stock_locations_obj.search([('usage','=','internal')])\n\n\n if not self.from_date:\n raise ValidationError(\"please select the From Date.\")\n \n if not self.to_date:\n raise ValidationError(\"please select the To Date.\")\n\n all_products = product_obj.with_context(active_test=True).search([('type','=','product')])\n from_date = self.from_date\n to_date = self.to_date\n \n date_1 = time.strptime(from_date, \"%Y-%m-%d\")\n date_2 = time.strptime(to_date, \"%Y-%m-%d\")\n if not (date_1 <= date_2):\n raise ValidationError(\"Fromdate is not previous then Todate\")\n self.get_stock_rotation_report(from_date,to_date,warehouses,locations,all_products)\n if self.datas:\n return {\n 'type' : 'ir.actions.act_url',\n 'url':'web/content/?model=stock.rotation.report&download=true&field=datas&id=%s&filename=%s.xls'%(active_id,f_name),\n 'target': 'new',\n }",
"def storeCombinedInventory(df, file_name, category=''):\n meta = set_stewicombo_meta(file_name, category)\n method_path = paths.local_path / meta.category\n try:\n log.info(f'saving {meta.name_data} to {method_path}')\n write_df_to_file(df, paths, meta)\n except OSError:\n log.error('Failed to save inventory')",
"def StockUpdate(list,SaveOnly=False):\n if list: # Checks to see if list has items. [] = false, while [1,2] = true!\n if not SaveOnly:\n for component in StockLibrary:\n for item in list:\n if component.get(item):\n component.update({item:component.get(item)-1})\n \n # Saving to CSV file. \n with open(\"shopStock.csv\",\"w+\",newline='') as document:\n Writer = csv.writer(document,delimiter=',')\n for part in StockLibrary:\n listAp = []\n for key in part.keys():\n line = \"{} : {}\".format(key,part[key])\n listAp.append(line)\n Writer.writerow(listAp)\n document.close()\n else:\n print(\"[STOCK MODULE ERROR] No items listed to update!\")",
"def export_stock_simple_products(self, woo_simple_products, product_stock, instance,\n model_id):\n common_log_line_obj = self.env[\"common.log.lines.ept\"]\n wcapi = instance.woo_connect()\n _logger.info('==Start process of simple product for export stock')\n batches = self.prepare_batches(woo_simple_products)\n log_lines = []\n for woo_products in batches:\n batch_update = {'update':[]}\n batch_update_data = []\n for template in woo_products:\n info = {'id':template.woo_tmpl_id, 'variations':[]}\n variant = template.woo_product_ids[0]\n if variant.woo_is_manage_stock:\n quantity = product_stock.get(variant.product_id.id)\n if variant.fix_stock_type == 'fix' :\n if variant.fix_stock_value < quantity :\n quantity = variant.fix_stock_value\n if variant.fix_stock_type == 'percentage':\n percentage_stock = int((quantity * variant.fix_stock_value) / 100.0)\n if percentage_stock < quantity:\n quantity= percentage_stock\n info.update({'manage_stock':True, 'stock_quantity':int(quantity)})\n batch_update_data.append(info)\n if batch_update_data:\n batch_update.update({'update':batch_update_data})\n _logger.info('products batch processing')\n res = wcapi.post('products/batch', batch_update)\n _logger.info('products batch completed [status: %s]', res.status_code)\n if not isinstance(res, requests.models.Response):\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':\"Update Product Stock \\nResponse is not in proper format :: %s\" % (\n res),\n })\n log_lines.append(log_id.id)\n if res.status_code not in [200, 201]:\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':res.content,\n })\n log_lines.append(log_id.id)\n try:\n response = res.json()\n except Exception as e:\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':\"Json Error : While update product stock to WooCommerce for instance %s. \\n%s\" % (\n instance.name, e),\n })\n log_lines.append(log_id.id)\n if response.get('data', {}) and response.get('data', {}).get('status') != 200:\n message = response.get('message')\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':message\n })\n log_lines.append(log_id.id)\n _logger.info('==End process of simple product for export stock')\n return log_lines",
"def test_add_stock_item(self):\n pass"
] | [
"0.6697952",
"0.6533873",
"0.63715094",
"0.6170064",
"0.6122549",
"0.6118368",
"0.60934365",
"0.6089652",
"0.60319245",
"0.5859282",
"0.5852072",
"0.58282447",
"0.5828063",
"0.5802748",
"0.5768997",
"0.57482827",
"0.57467115",
"0.57139105",
"0.5640052",
"0.5583418",
"0.55793357",
"0.5577675",
"0.5572316",
"0.5571613",
"0.55401295",
"0.54960763",
"0.54783386",
"0.54473567",
"0.5432731",
"0.5423064"
] | 0.6653359 | 1 |
Exports stock data of products from tryton to magento for this website | def export_inventory_to_magento(self):
Location = Pool().get('stock.location')
product_templates = []
instance = self.instance
locations = Location.search([('type', '=', 'storage')])
for magento_product_template in self.magento_product_templates:
product_template = magento_product_template.template
product_templates.append(product_template)
with Transaction().set_context({'locations': map(int, locations)}):
product_data = {
'qty': product_template.quantity,
'is_in_stock': '1' if product_template.quantity > 0
else '0',
}
# Update stock information to magento
with magento.Inventory(
instance.url, instance.api_user, instance.api_key
) as inventory_api:
inventory_api.update(
magento_product_template.magento_id, product_data
)
return product_templates | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getStockData():\n pass",
"def export_inventory(self, websites):\n for website in websites:\n website.export_inventory_to_magento()",
"def export_inventory(self, cursor, user, ids, context):\n website_obj = self.pool.get('magento.instance.website')\n\n website_id = context.get('active_id')\n t = threading.Thread(target=website_obj.export_inventory_to_magento,\n args=(cursor, user, website_id, context, True))\n t.daemon = True\n t.start()\n\n return True#self.open_products(cursor, user, map(int, products), context)",
"def default_export_(self, fields):\n Store = Pool().get('magento.website.store')\n\n store = Store(Transaction().context.get('active_id'))\n\n return {\n 'products_count': store.export_tier_prices_to_magento()\n }",
"def readStockFromOG(self):\n # TODO If not in stock, do not display item\n self.og_stock = pd.read_csv('data/stock.csv')\n self.og_stock.to_csv('data/menu.csv', index = False)",
"def write_bestbuy_product_data(engine, api = 'http://api.remix.bestbuy.com/v1/products?format=json&pageSize=100&apiKey=q3yfbu6smh6bzydeqbjv9kas'):\n # delete the unnecesssary records\n products_data = preproc.getData_API(api, 'products')\n del products_data['videoChapters']\n del products_data['videoLanguages']\n products_data = products_data.where(pd.notnull(products_data), None)\n print products_data\n print products_data.columns\n # products_data.to_sql('bestbuy_products_data', con = engine, index = False, if_exists = 'replace')",
"def run(self):\n page = self.fetch_data(self.url)\n stock_list = self.pop_stock_list(page)\n self.write_csv(stock_list)",
"def update_stock(self, instance, export_stock_from_date):\n product_obj = self.env['product.product']\n woo_product_product_obj = self.env['woo.product.product.ept']\n\n if not export_stock_from_date:\n export_stock_from_date = datetime.now() - timedelta(30)\n odoo_products = product_obj.get_products_based_on_movement_date_ept(export_stock_from_date,\n instance.company_id)\n instance.last_inventory_update_time = datetime.now()\n woo_templates = woo_product_product_obj.search([('product_id', 'in', odoo_products), (\n 'woo_is_manage_stock', '=', True)]).woo_template_id.filtered(\n lambda x:x.woo_instance_id == instance and x.exported_in_woo == True)\n if woo_templates:\n self.with_context(updated_products_in_inventory=odoo_products).woo_update_stock(\n instance, woo_templates)\n else:\n _logger.info(\n \"==There is no product movement found between date time from: '%s' to '%s' for export stock.\" % (\n export_stock_from_date, datetime.now()))\n return True",
"def export_stock_simple_products(self, woo_simple_products, product_stock, instance,\n model_id):\n common_log_line_obj = self.env[\"common.log.lines.ept\"]\n wcapi = instance.woo_connect()\n _logger.info('==Start process of simple product for export stock')\n batches = self.prepare_batches(woo_simple_products)\n log_lines = []\n for woo_products in batches:\n batch_update = {'update':[]}\n batch_update_data = []\n for template in woo_products:\n info = {'id':template.woo_tmpl_id, 'variations':[]}\n variant = template.woo_product_ids[0]\n if variant.woo_is_manage_stock:\n quantity = product_stock.get(variant.product_id.id)\n if variant.fix_stock_type == 'fix' :\n if variant.fix_stock_value < quantity :\n quantity = variant.fix_stock_value\n if variant.fix_stock_type == 'percentage':\n percentage_stock = int((quantity * variant.fix_stock_value) / 100.0)\n if percentage_stock < quantity:\n quantity= percentage_stock\n info.update({'manage_stock':True, 'stock_quantity':int(quantity)})\n batch_update_data.append(info)\n if batch_update_data:\n batch_update.update({'update':batch_update_data})\n _logger.info('products batch processing')\n res = wcapi.post('products/batch', batch_update)\n _logger.info('products batch completed [status: %s]', res.status_code)\n if not isinstance(res, requests.models.Response):\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':\"Update Product Stock \\nResponse is not in proper format :: %s\" % (\n res),\n })\n log_lines.append(log_id.id)\n if res.status_code not in [200, 201]:\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':res.content,\n })\n log_lines.append(log_id.id)\n try:\n response = res.json()\n except Exception as e:\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':\"Json Error : While update product stock to WooCommerce for instance %s. \\n%s\" % (\n instance.name, e),\n })\n log_lines.append(log_id.id)\n if response.get('data', {}) and response.get('data', {}).get('status') != 200:\n message = response.get('message')\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':message\n })\n log_lines.append(log_id.id)\n _logger.info('==End process of simple product for export stock')\n return log_lines",
"def store_stock_data(stock_name = 'TSLA'):\n stonk = yf.Ticker(stock_name) # gets stock data from yahoo\n hist = stonk.history(period=\"max\") # historical stock prices\n hist.reset_index(inplace=True) # takes the date stamp out of the index column\n hist.rename(columns = {'Date':\"DateTime\"},inplace=True) # Changes the name of the date column\n hist['DateTime'] = pd.to_datetime(hist['DateTime'],utc=True) # Changes the timestamps to UTC\n hist.to_csv('../data/raw/'+stock_name+'_stock_price.csv')\n return",
"def export_data(self):\r\n stocks = {}\r\n headings = ['Security', 'Price', 'Change', 'Change %', '52 Week', 'Market Cap']\r\n\r\n for data in range(6):\r\n for items in self.root.main.treeview.get_children():\r\n values = self.root.main.treeview.item(items, 'values')\r\n if headings[data] not in stocks:\r\n stocks[headings[data]] = []\r\n stocks.get(headings[data]).append(values[data])\r\n\r\n df = pd.DataFrame(stocks, columns=headings)\r\n path = tk.filedialog.asksaveasfilename(title='Save File As...',\r\n filetypes=((\"CComma-separated values (.csv)\", \"*.csv\"), (\"Text Document(.txt)\", \"*.txt\")))\r\n\r\n if not path:\r\n return\r\n else:\r\n df.to_excel(path, index=False, header=True)",
"def export_tier_prices_to_magento(self):\n instance = self.website.instance\n\n for mag_product_template in self.website.magento_product_templates:\n product_template = mag_product_template.template\n product = product_template.products[0]\n\n # Get the price tiers from the product if the product has a price\n # tier table else get the default price tiers from current store\n price_tiers = product_template.price_tiers or self.price_tiers\n\n price_data = []\n for tier in price_tiers:\n if hasattr(tier, 'product'):\n # The price tier comes from a product, then it has a\n # function field for price, we use it directly\n price = tier.price\n else:\n # The price tier comes from the default tiers on store,\n # we dont have a product on tier, so we use the current\n # product in loop for computing the price for this tier\n price = self.price_list.compute(\n None, product, product.list_price, tier.quantity,\n self.website.default_uom\n )\n\n price_data.append({\n 'qty': tier.quantity,\n 'price': float(price),\n })\n\n # Update stock information to magento\n with magento.ProductTierPrice(\n instance.url, instance.api_user, instance.api_key\n ) as tier_price_api:\n tier_price_api.update(\n mag_product_template.magento_id, price_data\n )\n\n return len(self.website.magento_product_templates)",
"def woo_update_stock(self, instance, woo_templates):\n common_log_obj = self.env[\"common.log.book.ept\"]\n common_log_line_obj = self.env[\"common.log.lines.ept\"]\n model = \"woo.product.product.ept\"\n model_id = common_log_line_obj.get_model_id(model)\n log_lines = []\n\n product_ids = woo_templates.mapped('woo_product_ids').mapped('product_id')\n product_stock = self.check_stock_type(instance, product_ids)\n variable_products = woo_templates.filtered(lambda x:x.woo_product_type == 'variable')\n simple_products = woo_templates.filtered(lambda x:x.woo_product_type == 'simple')\n if variable_products:\n log_lines += self.export_stock_variable_products(variable_products, product_stock,\n instance, model_id)\n if simple_products:\n log_lines += self.export_stock_simple_products(simple_products, product_stock, instance,\n model_id)\n\n instance.write({'last_inventory_update_time':datetime.now()})\n if log_lines:\n common_log_obj.create({\n 'type':'export',\n 'module':'woocommerce_ept',\n 'woo_instance_id':instance.id,\n 'active':True,\n 'log_lines':[(6, 0, log_lines)],\n })\n return True",
"def print_stock_rotation_report(self):\n warehouses = False\n locations = False\n from_date = False\n to_date = False\n active_id = self.ids[0]\n today=datetime.now().strftime(\"%Y-%m-%d\")\n f_name = 'Stock Rotation Report' + ' ' + today\n stock_warehouse_obj = self.env['stock.warehouse']\n stock_locations_obj = self.env['stock.location']\n product_obj = self.env['product.product']\n \n if self.filtaration == 'warehouse':\n if not self.include_all_warehouse:\n if not self.warehouse_ids:\n raise ValidationError(\"please select the Warehouse.\")\n warehouses = self.warehouse_ids\n else:\n warehouses = stock_warehouse_obj.search([])\n else:\n if not self.include_all_location:\n if not self.location_ids:\n raise ValidationError(\"please select the Locations.\")\n locations = self.location_ids\n else:\n locations = stock_locations_obj.search([('usage','=','internal')])\n\n\n if not self.from_date:\n raise ValidationError(\"please select the From Date.\")\n \n if not self.to_date:\n raise ValidationError(\"please select the To Date.\")\n\n all_products = product_obj.with_context(active_test=True).search([('type','=','product')])\n from_date = self.from_date\n to_date = self.to_date\n \n date_1 = time.strptime(from_date, \"%Y-%m-%d\")\n date_2 = time.strptime(to_date, \"%Y-%m-%d\")\n if not (date_1 <= date_2):\n raise ValidationError(\"Fromdate is not previous then Todate\")\n self.get_stock_rotation_report(from_date,to_date,warehouses,locations,all_products)\n if self.datas:\n return {\n 'type' : 'ir.actions.act_url',\n 'url':'web/content/?model=stock.rotation.report&download=true&field=datas&id=%s&filename=%s.xls'%(active_id,f_name),\n 'target': 'new',\n }",
"def test_manufacturing_scrap(self):\n\n # Update demo products\n (self.product_4 | self.product_2).write({\n 'tracking': 'lot',\n })\n\n # Update Bill Of Material to remove product with phantom bom.\n self.bom_3.bom_line_ids.filtered(lambda x: x.product_id == self.product_5).unlink()\n\n # Create Inventory Adjustment For Stick and Stone Tools with lot.\n lot_product_4 = self.env['stock.production.lot'].create({\n 'name': '0000000000001',\n 'product_id': self.product_4.id,\n 'company_id': self.env.company.id,\n })\n lot_product_2 = self.env['stock.production.lot'].create({\n 'name': '0000000000002',\n 'product_id': self.product_2.id,\n 'company_id': self.env.company.id,\n })\n\n stock_inv_product_4 = self.env['stock.inventory'].create({\n 'name': 'Stock Inventory for Stick',\n 'product_ids': [(4, self.product_4.id)],\n 'line_ids': [\n (0, 0, {'product_id': self.product_4.id, 'product_uom_id': self.product_4.uom_id.id, 'product_qty': 8, 'prod_lot_id': lot_product_4.id, 'location_id': self.stock_location_14.id}),\n ]})\n\n stock_inv_product_2 = self.env['stock.inventory'].create({\n 'name': 'Stock Inventory for Stone Tools',\n 'product_ids': [(4, self.product_2.id)],\n 'line_ids': [\n (0, 0, {'product_id': self.product_2.id, 'product_uom_id': self.product_2.uom_id.id, 'product_qty': 12, 'prod_lot_id': lot_product_2.id, 'location_id': self.stock_location_14.id})\n ]})\n (stock_inv_product_4 | stock_inv_product_2)._action_start()\n stock_inv_product_2.action_validate()\n stock_inv_product_4.action_validate()\n\n #Create Manufacturing order.\n production_form = Form(self.env['mrp.production'])\n production_form.product_id = self.product_6\n production_form.bom_id = self.bom_3\n production_form.product_qty = 12\n production_form.product_uom_id = self.product_6.uom_id\n production_3 = production_form.save()\n production_3.action_confirm()\n production_3.action_assign()\n\n # Check Manufacturing order's availability.\n self.assertEqual(production_3.reservation_state, 'assigned', \"Production order's availability should be Available.\")\n\n location_id = production_3.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')) and production_3.location_src_id.id or production_3.location_dest_id.id,\n\n # Scrap Product Wood without lot to check assert raise ?.\n scrap_id = self.env['stock.scrap'].with_context(active_model='mrp.production', active_id=production_3.id).create({'product_id': self.product_2.id, 'scrap_qty': 1.0, 'product_uom_id': self.product_2.uom_id.id, 'location_id': location_id, 'production_id': production_3.id})\n with self.assertRaises(UserError):\n scrap_id.do_scrap()\n\n # Scrap Product Wood with lot.\n self.env['stock.scrap'].with_context(active_model='mrp.production', active_id=production_3.id).create({'product_id': self.product_2.id, 'scrap_qty': 1.0, 'product_uom_id': self.product_2.uom_id.id, 'location_id': location_id, 'lot_id': lot_product_2.id, 'production_id': production_3.id})\n\n #Check scrap move is created for this production order.\n #TODO: should check with scrap objects link in between",
"def prepare_data_with_warehouse(self,from_date,to_date,warehouses,all_products):\n data_dict = {}\n stock_quant_obj=self.env['stock.quant']\n for warehouse in warehouses:\n all_locations = self.get_all_locations(warehouse)\n if not all_locations:\n continue\n \n #here we are finding the opening stock for these we are using base query\n #of inventory at date v10\n result = self.get_product_qty(all_locations,from_date)\n qty_dict = dict((x,y) for x, y in result)\n \n for product in all_products:\n last_sales = ''\n qty_purchase_in_duration = 0\n qty_sales_in_duration = 0\n last_purchase_date = ''\n scrap_location_qty = 0\n adjusted_qty_in_duration = 0\n warehouse_out_qty = 0\n warehouse_in_qty = 0\n# here from result of inventory at date we are seaching for specific product.\n opening_product_qty = qty_dict.get(product.id)\n\n #finding last sales qty\n last_sales = self.find_last_sales_qty(from_date,to_date,warehouse,all_locations,product)\n #finding last purchase date of product\n last_purchase_date = self.find_last_purchase_date(from_date,to_date,all_locations,product)\n #fiding date purchase qty in duration for specific product\n qty_purchase_in_duration = self.find_purchase_qty_in_duration(from_date,to_date,all_locations,product)\n #fiding scrap qty of precific product\n scrap_location_qty = self.find_scap_location_qty(from_date,to_date,product,all_locations)\n #finding sales qty in duration\n qty_sales_in_duration = self.find_sale_qty_in_duration(from_date,to_date,warehouse,all_locations,product)\n #fidning adjusted qty in duration\n adjusted_qty_in_duration = self.find_adjusted_qty_in_duration(from_date, to_date, product, all_locations)\n \n dest_location_lst = self.get_other_wahouse_locations(warehouse)\n \n if any(all_locations) and any(dest_location_lst):\n #fidning warehouse in qty \n warehouse_in_qty = self.find_warehouse_transer_in_qty(product, all_locations, dest_location_lst,from_date,to_date)\n #fidning warehouse out qty for specific product.\n warehouse_out_qty = self.find_warehouse_transer_out_qty(product, all_locations, dest_location_lst,from_date,to_date)\n \n if warehouse_out_qty:\n warehouse_out_qty = warehouse_out_qty and warehouse_out_qty[0][0] or ''\n if warehouse_in_qty:\n warehouse_in_qty = warehouse_in_qty and warehouse_in_qty[0][0] or ''\n \n if adjusted_qty_in_duration:\n adjusted_qty_in_duration = adjusted_qty_in_duration and adjusted_qty_in_duration[0][0] or '' \n if scrap_location_qty:\n scrap_location_qty = scrap_location_qty and scrap_location_qty[0][0] or ''\n \n # if qty_sales_in_duration:\n # qty_sales_in_duration = qty_sales_in_duration and qty_sales_in_duration[0][0] or ''\n # if qty_purchase_in_duration:\n # qty_purchase_in_duration = qty_purchase_in_duration[0][0] or ''\n if last_sales:\n last_sales = datetime.strptime(last_sales and last_sales[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if last_purchase_date:\n last_purchase_date = datetime.strptime(last_purchase_date and last_purchase_date[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if data_dict.has_key(warehouse.id):\n data_lst=data_dict.get(warehouse.id)\n data_lst.append({'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,'last_sales':last_sales or '',\n 'last_purchase_date':last_purchase_date or '','qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0\n ,'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0 \n })\n data_dict.update({warehouse.id:data_lst})\n continue\n data_dict.update({warehouse.id:[{'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,\n 'last_sales':last_sales or '','last_purchase_date':last_purchase_date or '',\n 'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,\n 'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0\n }]})\n return data_dict",
"def export_and_update_products(self, cursor, user, ids=None, context=None):\n if context is None:\n context = {}\n export_and_update_catalog_obj = self.pool.get('magento.instance.website.export_catalog')\n if not ids:\n ids = self.search(cursor, user, [], context)\n \n for website in self.browse(cursor, user, ids, context):\n context['active_id'] = website.id\n attribute_set = export_and_update_catalog_obj._get_default_attribute_set(cursor, user, context=context)\n export_and_update_catalog_id = export_and_update_catalog_obj.create(cursor, user, {'export_images': True, 'attribute_set': attribute_set}, context)\n export_and_update_catalog_obj.update_and_export_products_openerp_to_magento(cursor, user, [export_and_update_catalog_id], context)",
"def export_shipment_status_to_magento(self):\n Shipment = Pool().get('stock.shipment.out')\n Sale = Pool().get('sale.sale')\n\n instance = self.instance\n\n sale_domain = [\n ('magento_store_view', '=', self.id),\n ('shipment_state', '=', 'sent'),\n ('magento_id', '!=', None),\n ('shipments', '!=', None),\n ]\n\n if self.last_shipment_export_time:\n sale_domain.append(\n ('write_date', '>=', self.last_shipment_export_time)\n )\n\n sales = Sale.search(sale_domain)\n\n self.last_shipment_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n # Get the increment id from the sale reference\n increment_id = sale.reference[\n len(instance.order_prefix): len(sale.reference)\n ]\n\n for shipment in sale.shipments:\n try:\n # Some checks to make sure that only valid shipments are\n # being exported\n if shipment.is_tracking_exported_to_magento or \\\n shipment.state not in ('packed', 'done') or \\\n shipment.magento_increment_id:\n sales.pop(sale)\n continue\n with magento.Shipment(\n instance.url, instance.api_user, instance.api_key\n ) as shipment_api:\n item_qty_map = {}\n for move in shipment.outgoing_moves:\n if isinstance(move.origin, SaleLine) \\\n and move.origin.magento_id:\n # This is done because there can be multiple\n # lines with the same product and they need\n # to be send as a sum of quanitities\n item_qty_map.setdefault(\n str(move.origin.magento_id), 0\n )\n item_qty_map[str(move.origin.magento_id)] += \\\n move.quantity\n shipment_increment_id = shipment_api.create(\n order_increment_id=increment_id,\n items_qty=item_qty_map\n )\n Shipment.write(list(sale.shipments), {\n 'magento_increment_id': shipment_increment_id,\n })\n\n if self.export_tracking_information and (\n shipment.tracking_number and shipment.carrier\n ):\n shipment.export_tracking_info_to_magento()\n except xmlrpclib.Fault, fault:\n if fault.faultCode == 102:\n # A shipment already exists for this order,\n # we cannot do anything about it.\n # Maybe it was already exported earlier or was created\n # separately on magento\n # Hence, just continue\n continue\n\n return sales",
"def export_as_json(self):\n f = open(\"{}_historical_prices.json\".format(self.symbol), \"a\")\n f.write(self.stocks.json())\n f.close()",
"def export(self):\r\n self.prices[\"returns\"] = self.returns\r\n self.prices.columns = ['prices', 'returns']\r\n self.prices = self.prices.dropna()\r\n \r\n name = QFileDialog.getSaveFileName(None, 'Save File', filter='*.xlsx')\r\n if(name[0] == ''):\r\n # if name empty\r\n pass\r\n else:\r\n self.prices.to_excel(name[0])",
"def prepare_data_with_location(self,from_date,to_date,locations,all_products):\n data_dict = {}\n stock_quant_obj=self.env['stock.quant']\n for loc in locations:\n all_locations = self.get_all_locations(warehouse=False, location=loc)\n if not all_locations:\n continue\n #here we are finding the opening stock for these we are using base query\n #of inventory at date v10\n result = self.get_product_qty(all_locations,from_date)\n qty_dict = dict((x,y) for x, y in result)\n \n for product in all_products:\n last_sales = ''\n qty_purchase_in_duration = 0\n qty_sales_in_duration = 0\n last_purchase_date = ''\n scrap_location_qty = 0\n adjusted_qty_in_duration = 0\n warehouse_out_qty = 0\n warehouse_in_qty = 0\n# here from result of inventory at date we are seaching for specific product.\n opening_product_qty = qty_dict.get(product.id)\n\n #finding last sales qty\n last_sales = self.find_last_sales_qty(from_date,to_date,False,all_locations,product)\n #finding last purchase date of product\n last_purchase_date = self.find_last_purchase_date(from_date,to_date,all_locations,product)\n #fiding date purchase qty in duration for specific product\n qty_purchase_in_duration = self.find_purchase_qty_in_duration(from_date,to_date,all_locations,product)\n #fiding scrap qty of precific product\n scrap_location_qty = self.find_scap_location_qty(from_date,to_date,product,all_locations)\n #finding sales qty in duration\n qty_sales_in_duration = self.find_sale_qty_in_duration(from_date,to_date,False,all_locations,product)\n #fidning adjusted qty in duration\n adjusted_qty_in_duration = self.find_adjusted_qty_in_duration(from_date, to_date, product, all_locations)\n\n # dest_location_lst = self.get_other_wahouse_locations(warehouse)\n \n # if any(all_locations) and any(dest_location_lst):\n # #fidning warehouse in qty \n # warehouse_in_qty = self.find_warehouse_transer_in_qty(product, all_locations, dest_location_lst,from_date,to_date)\n # #fidning warehouse out qty for specific product.\n # warehouse_out_qty = self.find_warehouse_transer_out_qty(product, all_locations, dest_location_lst,from_date,to_date)\n \n # if warehouse_out_qty:\n # warehouse_out_qty = warehouse_out_qty and warehouse_out_qty[0][0] or ''\n # if warehouse_in_qty:\n # warehouse_in_qty = warehouse_in_qty and warehouse_in_qty[0][0] or ''\n \n if adjusted_qty_in_duration:\n adjusted_qty_in_duration = adjusted_qty_in_duration and adjusted_qty_in_duration[0][0] or '' \n if scrap_location_qty:\n scrap_location_qty = scrap_location_qty and scrap_location_qty[0][0] or ''\n \n # if qty_sales_in_duration:\n # qty_sales_in_duration = qty_sales_in_duration and qty_sales_in_duration[0][0] or ''\n # if qty_purchase_in_duration:\n # qty_purchase_in_duration = qty_purchase_in_duration or ''\n if last_sales:\n last_sales = datetime.strptime(last_sales and last_sales[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if last_purchase_date:\n last_purchase_date = datetime.strptime(last_purchase_date and last_purchase_date[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if data_dict.has_key(loc.id):\n data_lst=data_dict.get(loc.id)\n data_lst.append({'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,'last_sales':last_sales or '',\n 'last_purchase_date':last_purchase_date or '','qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0\n ,'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0 \n })\n data_dict.update({loc.id:data_lst})\n continue\n data_dict.update({loc.id:[{'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,\n 'last_sales':last_sales or '','last_purchase_date':last_purchase_date or '',\n 'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,\n 'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0\n }]})\n return data_dict",
"def stock():\n stock=stock_data('AAPL',start(2019,12,1))\n return stock",
"def export_cart(self, products, user_email = '[email protected]', password = '2asefthukom,3'):\n\n\t\tfeedback = []\n\t\t\t\n\t\t# Clearing cookies\n\t\tself.crawler.empty_cookie_jar()\n\n\t\t# log user\n\t\tis_logued, code = self.login_user(user_email, password)\n\n\t\tif code == 200:\n\t\t\tif is_logued:\n\n\t\t\t\t# Empty cart\n\t\t\t\thtml, code = self.crawler.empty_cart()\n\n\t\t\t\tif code == 200:\n\t\t\t\t\t# Cycle throug every product and put it in cart\n\t\t\t\t\tfor product in products:\n\t\t\t\t\t\turl_product = product['url']\n\t\t\t\t\t\thtml, code = self.crawler.get(url_product)\n\t\t\t\t\t\tif code == 200:\n\t\t\t\t\t\t\tself.parser.set_html(html)\n\t\t\t\t\t\t\t# Getting options\n\t\t\t\t\t\t\tdata = self.parser.get_form_add_product(product['quantity'])\n\t\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\t\thtml, code = self.crawler.add_product( data)\n\t\t\t\t\t\t\t\tif code == 200:\n\t\t\t\t\t\t\t\t\tfeedback.append({\n\t\t\t\t\t\t\t\t\t\t'reference': product['reference'],\n\t\t\t\t\t\t\t\t\t\t'msg': 'Exportation OK',\n\t\t\t\t\t\t\t\t\t\t'code': code\n\t\t\t\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\t\telse:\n\t\t\t\t\t\t\t\t\tfeedback.append({\n\t\t\t\t\t\t\t\t\t\t'reference': product['reference'],\n\t\t\t\t\t\t\t\t\t\t'msg': 'Failed exportation',\n\t\t\t\t\t\t\t\t\t\t'code': code\n\t\t\t\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\texcept Exception, e:\n\t\t\t\t\t\t\t\tfeedback.append({\n\t\t\t\t\t\t\t\t\t'reference': product['reference'],\n\t\t\t\t\t\t\t\t\t'msg': 'Failed exportation',\n\t\t\t\t\t\t\t\t\t'code': code,\n\t\t\t\t\t\t\t\t\t'error': e\n\t\t\t\t\t\t\t\t\t})\n\t\t\t\t\t\telse:\n\t\t\t\t\t\t\tfeedback.append({\n\t\t\t\t\t\t\t\t'reference': product['reference'],\n\t\t\t\t\t\t\t\t'msg': 'Failed exportation',\n\t\t\t\t\t\t\t\t'code': code,\n\t\t\t\t\t\t\t\t})\n\n\t\treturn {\n\t\t\t'feedback': feedback,\n\t\t\t'code': code,\n\t\t\t'is_logued': is_logued\n\t\t}",
"async def stocks(self, ctx):\n\t\tpass",
"def download_all_stocks():\n stocks = get_stocklist()\n dfs = {}\n for i, r in stocks.iterrows():\n start = time.time()\n s = r['Ticker']\n stockfile = '../stockdata/' + s + '.csv.gz'\n print('downloading', s)\n stock = quandl.get('EOD/' + s)\n stock.to_csv(stockfile, compression='gzip')\n dfs[s] = stock\n print('took', time.time() - start, 's')\n\n return dfs",
"def export_stock_variable_products(self, woo_variable_products, product_stock, instance,\n model_id):\n log_lines = []\n common_log_line_obj = self.env[\"common.log.lines.ept\"]\n wcapi = instance.woo_connect()\n _logger.info('==Start process of variable product for export stock')\n for template in woo_variable_products:\n info = {'id':template.woo_tmpl_id, 'variations':[]}\n for variant in template.woo_product_ids.filtered(lambda\n x:x.product_id.type == 'product' and x.woo_is_manage_stock and x.variant_id):\n if variant.product_id.id in self._context.get('updated_products_in_inventory'):\n quantity = product_stock.get(variant.product_id.id)\n if variant.fix_stock_type == 'fix' :\n if variant.fix_stock_value < quantity :\n quantity = variant.fix_stock_value\n if variant.fix_stock_type == 'percentage':\n percentage_stock = int((quantity * variant.fix_stock_value) / 100.0)\n if percentage_stock < quantity:\n quantity= percentage_stock\n\n info.get('variations').append({\n 'id':variant.variant_id,\n 'manage_stock':True,\n 'stock_quantity':int(quantity)\n })\n if info.get('variations'):\n variant_batches = self.prepare_batches(info.get('variations'))\n for woo_variants in variant_batches:\n _logger.info(\n 'Export Stock||Variations batch processing for woo template name: %s' % (\n template.name))\n res = wcapi.post('products/%s/variations/batch' % (info.get('id')),\n {'update':woo_variants})\n _logger.info(\n 'Export Stock||Variations batch process completed [status: %s] for Woo template name: %s' % (\n res.status_code, template.name))\n if res.status_code not in [200, 201]:\n log_id = common_log_line_obj.create({\n 'model_id':model_id,\n 'message':\"Update woo template: %s Stock\\n%s\" % (\n template.name, res.content),\n })\n log_lines.append(log_id.id)\n _logger.info('==End process of variable product for export stock')\n return log_lines",
"def save_catalog(self):\n self.catalog.to_csv(self.catalog_path, index_label='dateTime')",
"def export_csv_file(self, product_templates):\n buffer = StringIO()\n\n delimiter = \",\"\n field_names = [\"template_name\", \"product_name\", \"product_default_code\",\n \"shopify_product_default_code\", \"product_description\",\n \"PRODUCT_TEMPLATE_ID\", \"PRODUCT_ID\", \"CATEGORY_ID\"]\n csvwriter = DictWriter(buffer, field_names, delimiter=delimiter)\n csvwriter.writer.writerow(field_names)\n\n rows = []\n for template in product_templates:\n if len(template.attribute_line_ids) > 3:\n continue\n if len(template.product_variant_ids.ids) == 1 and not template.default_code:\n continue\n for product in template.product_variant_ids.filtered(lambda variant: variant.default_code):\n row = {\n \"PRODUCT_TEMPLATE_ID\": template.id,\n \"template_name\": template.name,\n \"CATEGORY_ID\": template.categ_id.id,\n \"product_default_code\": product.default_code,\n \"shopify_product_default_code\":product.default_code,\n \"PRODUCT_ID\": product.id,\n \"product_name\": product.name,\n \"product_description\": product.description or None,\n }\n rows.append(row)\n\n if not rows:\n raise Warning(\"No data found to be exported.\\n\\nPossible Reasons:\\n - Number of \"\n \"attributes are more than 3.\\n - SKU(s) are not set properly.\")\n csvwriter.writerows(rows)\n buffer.seek(0)\n file_data = buffer.read().encode()\n self.write({\n \"datas\": base64.encodestring(file_data),\n \"file_name\": \"Shopify_export_product\"\n })\n\n return {\n \"type\": \"ir.actions.act_url\",\n \"url\": \"web/content/?model=shopify.prepare.product.for.export.ept&id=%s&field=datas&field=datas&download=true&filename=%s.csv\" % (\n self.id, self.file_name + str(datetime.now().strftime(\"%d/%m/%Y:%H:%M:%S\"))),\n \"target\": self\n }",
"def load_stock(self):\n lines = []\n with Transaction().start(DBNAME, 1):\n stock_lines = self.Inventory.search([('state', '=', 'done'), ('location', '=', self.location.id)])\n if stock_lines:\n for i in stock_lines:\n batch = i.batch_number\n for j in i.lines:\n if j.quantity <= 0:\n continue\n dictionary = {}\n dictionary['code'] = j.product.code\n dictionary['item'] = j.product.template.name\n dictionary[\n 'category'] = j.product.template.category.name if j.product.template.category else None\n dictionary['quantity'] = Decimal(j.quantity).quantize(Decimal('0.11')).to_eng()\n dictionary['batch_number'] = batch\n dictionary['supplier'] = j.supplier.name if j.supplier else None\n dictionary['expiry_date'] = j.expiry_date.strftime('%d-%m-%Y') if j.expiry_date else None\n lines.append(dictionary)\n return lines",
"def prepare_product_for_export(self):\n _logger.info(\"Starting product exporting via %s method...\" % self.export_method)\n\n active_template_ids = self._context.get(\"active_ids\", [])\n templates = self.env[\"product.template\"].browse(active_template_ids)\n product_templates = templates.filtered(lambda template: template.type == \"product\")\n if not product_templates:\n raise Warning(\"It seems like selected products are not Storable products.\")\n\n if self.export_method == \"direct\":\n return self.export_direct_in_shopify(product_templates)\n elif self.export_method == \"csv\":\n return self.export_csv_file(product_templates)"
] | [
"0.64082617",
"0.6399653",
"0.63895637",
"0.62795657",
"0.6145077",
"0.60885113",
"0.6076985",
"0.6049843",
"0.6021914",
"0.6008381",
"0.5943344",
"0.59400165",
"0.59058166",
"0.5860487",
"0.58527863",
"0.5842598",
"0.58248246",
"0.5812929",
"0.57357395",
"0.57285345",
"0.5700817",
"0.569193",
"0.5672761",
"0.563931",
"0.5606456",
"0.56031686",
"0.55891514",
"0.5570369",
"0.5544262",
"0.5529758"
] | 0.656638 | 0 |
Returns company related to website | def get_company(self, name):
return self.website.company.id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_website_companies_get_details(self):\n pass",
"def get_company(self, company_referece):\n url = 'companies/{0}'.format(company_referece)\n result = self.get(url)\n return result.get('company', result)",
"def run_whoxy_company_search(self,company):\n if self.whoxy_api_key:\n try:\n results = requests.get(self.reverse_whoxy_api_endpoint.format(self.whoxy_api_key,company),timeout=self.requests_timeout).json()\n if results['status'] == 1 and results['total_results'] > 0:\n whois_results = {}\n total_results = results['total_results']\n for domain in results['search_result']:\n domain_name = domain['domain_name']\n temp = self.parse_whoxy_results(domain,True)\n whois_results[domain_name] = temp\n return whois_results,total_results\n else:\n click.secho(\"[*] WhoXY returned status code 0, error/no results, for reverse company search.\",fg=\"yellow\")\n except requests.exceptions.Timeout:\n click.secho(\"\\n[!] The connection to WhoXY timed out!\",fg=\"red\")\n except requests.exceptions.TooManyRedirects:\n click.secho(\"\\n[!] The connection to WhoXY encountered too many redirects!\",fg=\"red\")\n except requests.exceptions.RequestException as error:\n click.secho(\"[!] Error connecting to WhoXY for reverse company search!\",fg=\"yellow\")\n click.secho(\"L.. Details: {}\".format(error),fg=\"yellow\")",
"def company(self):\n return self._company",
"def company(self):\n return self._company",
"def get_all_companies_and_people():",
"def pull_companies(cls, soup):\n companies = []\n parents = soup.findAll('div', 'row job-information')\n for parent in parents:\n try:\n temp = parent.find('div', 'columns large-2 medium-3 small-12').find('h4')\n except AttributeError:\n companies.append(None)\n else:\n if temp.a:\n # Company name is sometimes wrapped in anchor tag\n companies.append(temp.find('a').contents[0].strip())\n else:\n companies.append(temp.contents[0].strip())\n return companies",
"def get_companies_and_people(team):",
"def get_companies(self):\n url = 'companies'\n result = self.get(url)\n return result['companies']",
"def get_data_from_individual_company_pages(soup):\n individual_company_data = []\n usd_roe = get_usd_roe()\n company_code = (\n soup.find(\"meta\", {\"name\": \"description\"}).get(\"content\").split(\":\")[0]\n )\n current_price_usd = float(\n soup.find(\"span\", {\"class\": \"price-section__current-value\"}).text.replace(\n \",\", \"\"\n )\n )\n current_price = round(current_price_usd * usd_roe)\n try:\n p_e_ratio = float(\n soup.find(\n \"div\", {\"class\": \"snapshot__header\"}, string=\"P/E Ratio\"\n ).previous_sibling.replace(\",\", \"\")\n )\n except AttributeError:\n p_e_ratio = 0\n\n try:\n week_52_low = float(\n soup.find(\"div\", {\"class\": \"snapshot__header\"}, string=\"52 Week Low\")\n .previous_sibling.strip()\n .replace(\",\", \"\")\n )\n except AttributeError:\n week_52_low = 1\n\n try:\n week_52_high = float(\n soup.find(\"div\", {\"class\": \"snapshot__header\"}, string=\"52 Week High\")\n .previous_sibling.strip()\n .replace(\",\", \"\")\n )\n except AttributeError:\n week_52_high = 0\n\n unreal_profit_per_year_percent = round((week_52_high / week_52_low - 1) * 100, 2)\n\n individual_company_data.append(\n [company_code, current_price, p_e_ratio, unreal_profit_per_year_percent]\n )\n\n company_df = pd.DataFrame(\n columns=[\"company_code\", \"current_price\", \"P_E\", \"potential_profit_percent\"]\n )\n company_df = company_df.append(\n {\n \"company_code\": company_code,\n \"current_price\": current_price,\n \"P_E\": p_e_ratio,\n \"potential_profit_percent\": unreal_profit_per_year_percent,\n },\n ignore_index=True,\n )\n\n return company_df",
"def get_companies(self):\n response = self.do_request('/undertaking/list')\n if response:\n return response.json()",
"def companies():\n res = requests.get('http://0.0.0.0:5002/companies')\n return jsonify(res.json())",
"def company(self):\n\n x = 0\n my_company = self.data[\"Company Name\"]\n my_account = self.data[\"Account\"]\n result = []\n for i in my_company:\n my_string = i + \" -- \" + my_account[x]\n x += 1\n result.append(my_string)\n\n return result",
"def get_link_from_main_table(soup):\n # soup = asyncio.run(get_one_page_soup_object(url))\n\n paths_to_companies_names = soup.find_all(\n \"td\", {\"class\": \"table__td table__td--big\"}\n )\n companies_dict = {}\n\n for item in paths_to_companies_names:\n company_name = item.text.strip(\"\\n\")\n href_name = \"https://markets.businessinsider.com/\" + item.find(\"a\").get(\"href\")\n companies_dict[company_name] = href_name\n\n return companies_dict",
"def get_best_matching_web_site(self):\n\n # first fill the web_df columns we need for ranking\n for i_web, (url_key, url_info) in enumerate(self.collection.items()):\n index = url_info.index\n if url_info.url_analyse is None:\n logger.warning(\"url {url_key} yielded None analyse. Skip to next\")\n continue\n exists = url_info.url_analyse.exists\n self.company_urls_df.loc[index, URL_KEY] = url_info.url\n self.company_urls_df.loc[index, EXISTS_KEY] = exists\n if exists:\n self.company_urls_df.loc[index, DISTANCE_KEY] = url_info.match.distance\n self.company_urls_df.loc[index, STRING_MATCH_KEY] = url_info.match.string_match\n self.company_urls_df.loc[index, HAS_POSTCODE_KEY] = url_info.match.has_postcode\n self.company_urls_df.loc[index, HAS_KVK_NR] = url_info.match.has_kvk_nummer\n self.company_urls_df.loc[index, RANKING_KEY] = url_info.match.ranking\n self.company_urls_df.loc[index, DISTANCE_STRING_MATCH_KEY] = \\\n url_info.match.url_match\n\n # only select the web site which exist\n mask = self.company_urls_df[EXISTS_KEY]\n\n # create mask for web name distance\n if self.threshold_distance is not None:\n # select all the web sites with a minimum distance or one higher\n m1 = (self.company_urls_df[DISTANCE_KEY] - self.company_urls_df[\n DISTANCE_KEY].min()) <= self.threshold_distance\n else:\n m1 = mask\n\n # create mask for web string match\n if self.threshold_string_match is not None:\n m2 = self.company_urls_df[STRING_MATCH_KEY] >= self.threshold_string_match\n else:\n m2 = mask\n\n m3 = self.company_urls_df[HAS_POSTCODE_KEY]\n m4 = self.company_urls_df[HAS_KVK_NR]\n\n # we mask al the existing web page and keep all pages which are either with\n # a certain string distance (m1) or in case it has either the post code or kvk\n # number we also keep it\n mask = mask & (m1 | m2 | m3 | m4)\n\n # make a copy of the valid web sides\n self.company_urls_df = self.company_urls_df[mask].copy()\n\n self.company_urls_df.sort_values([RANKING_KEY, DISTANCE_STRING_MATCH_KEY], inplace=True,\n ascending=[False, True])\n self.logger.debug(\"Sorted list {}\".format(self.company_urls_df[[URL_KEY, RANKING_KEY]]))",
"def API_companyPO(request):\n company = request.GET.get(\"po\")\n search = request.GET.get(\"po_search\")\n company = models.Company.objects.get(pk = company)\n if not company: return django.http.HttpResponseBadRequest(\"Invalid company ID\")\n pos = doors.models.Order.objects.filter(customer_po__icontains = company)\n results = [po.customer_po for po in pos]\n return django.http.JsonResponse({\"success\":True,\"results\":results})",
"def company(request):\n domain = request.GET.get(\"domain\")\n version = get_version_or_leave(request, \"company\", domain)\n\n if version == '1':\n\n return company_v1(request)\n\n else:\n\n api_access_logging(\n request,\n \"company\",\n domain,\n \"400\",\n \"4\",\n None\n )\n return Response(\n {\n \"error_code\": \"4\",\n \"detail\": errors_for_customers[\"4\"]\n },\n status=status.HTTP_400_BAD_REQUEST\n )",
"def get_sp_list():\n bs = get_soup('https://en.wikipedia.org/wiki/List_of_S%26P_500_companies')\n sp_companies = bs.find_all('a', class_=\"external text\")\n return sp_companies",
"def get_companies(self, **kwargs):\n return self.get('companies.json', **kwargs)",
"def _go_company_site(self, linkedin_company_profile_url):\n self.driver.get(linkedin_company_profile_url)",
"def get_company_info(company_no):\n in_ = 'curl -s -X GET -u yLwgnyHvwlYxkbOBAoLEwsaEfVQ_a7kAuCUTNtSt: https://api.companieshouse.gov.uk/company/{}/officers?q=Officers&items_per_page=100&start_index=0'.format(company_no).split()\n\n out = subprocess.check_output(in_)\n res = json.loads(out.decode('utf8'))\n ret = res['items']\n \n return ret",
"def website(self):\n\n if \"website\" in self: return self.list(\"website\")[0]\n for cont in self.list(\"contact\") + self.list(\"comment\"):\n c = cont.lower()\n if (c.startswith(\"http://\") or c.startswith(\"https://\") or\n c.startswith(\"www.\")): return cont\n elif c.startswith(\"//www.\"): return \"http:\" + cont\n else:\n text = \"http://www.google.com/search?q=\"\n esc = lambda c: ord(c) > 127 and '%%%x'%ord(c) or c\n if \"labelid\" in self: text += ''.join(map(esc, self[\"labelid\"]))\n else:\n artist = util.escape(\"+\".join(self(\"artist\").split()))\n album = util.escape(\"+\".join(self(\"album\").split()))\n artist = util.encode(artist)\n album = util.encode(album)\n artist = \"%22\" + ''.join(map(esc, artist)) + \"%22\"\n album = \"%22\" + ''.join(map(esc, album)) + \"%22\"\n text += artist + \"+\" + album\n text += \"&ie=UTF8\"\n return text",
"def guess_company_name(self, response):\n # TODO here guess the name of the company\n # if og:title or title or smth else\n # if domain in the title then its the name\n # if not\n # take domain\n\n parts = urllib.parse.urlparse(response.url)\n name_parts = parts.netloc.split(\".\")\n if len(name_parts) > 2:\n name = name_parts[1]\n else:\n name = name_parts[0]\n\n site_name = response.xpath('//*/meta[@property=\"description\"]/@content').extract_first()\n if site_name:\n return site_name\n else:\n return name.title()",
"def get_company(self, name):\n return self.instance.company.id",
"def get_company_url(ticker_symbol: str):\n\n response = Ticker(ticker_symbol, asynchronous=True)\n data = response.asset_profile\n url = data[ticker_symbol][\"website\"]\n\n return url",
"def _commercial_fields(self):\n return ['website']",
"def __getCompaniesData(self, schema):\n try:\n self.cursor.execute(\"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\".format(schema=schema))\n data = self.cursor.fetchall()\n\n companies = []\n for entry in data:\n self.cursor.execute('SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'.format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n\n if cities is None:\n continue\n\n city = ''\n\n for cityId in cities:\n self.cursor.execute('SELECT city FROM {schema}.locations_location WHERE id = {city}'.format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n\n if cityName is not None:\n city += cityName[0]\n\n self.cursor.execute('SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'.format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n\n if catId is not None:\n self.cursor.execute('SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'.format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n\n companies.append(DBItemCompany(\n _id = entry[0],\n tweeter = entry[1],\n category = catData[0] if catData is not None else None,\n categoryUrl = self.__buildCategoryUrl(catId, schema) if catId is not None else None,\n provenScore = entry[2],\n ranking = rank,\n location = city,\n url = self.__buildProfileUrl(catData[1], entry[3], schema) if catData is not None else self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId = catId\n ))\n\n self.__companies[schema] = companies\n\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])",
"def get_company(self, name):\n return self.store.company.id",
"def website(self):\n return self._website",
"def fetch_website_list(self):\r\n # Clear list\r\n self.website_list = []\r\n\r\n # Open websites overview\r\n self.browser.open(self.config[\"base_url\"] + \"websites\")\r\n\r\n # Find table and iterate over rows\r\n for table_row in self.browser.get_current_page().select(\"table tr\"):\r\n\r\n # Fetch cells\r\n cells = table_row.findAll('td')\r\n\r\n # Iterate over cells\r\n if(len(cells) > 0):\r\n\r\n # Get website ID\r\n website_id = table_row['data-id']\r\n # Get website name\r\n name = cells[1].text.strip()\r\n # Get website domain name\r\n domain = cells[2].text.strip()\r\n\r\n # Build website object\r\n website = {'id': website_id,\r\n 'name': name, 'domain': domain}\r\n\r\n # Add website object to list\r\n self.website_list.append(website)"
] | [
"0.67787915",
"0.65286976",
"0.64474934",
"0.6438675",
"0.6438675",
"0.6394601",
"0.62661326",
"0.6226491",
"0.6183185",
"0.611617",
"0.6092316",
"0.60650074",
"0.60194445",
"0.5988",
"0.59472513",
"0.5944278",
"0.590658",
"0.58898836",
"0.5884298",
"0.5875273",
"0.5849969",
"0.5806832",
"0.5800494",
"0.57932115",
"0.5791586",
"0.5781013",
"0.57759887",
"0.5774084",
"0.5757034",
"0.5742362"
] | 0.67437315 | 1 |
Returns instance related to website | def get_instance(self, name):
return self.website.instance.id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_site(self):\n raise NotImplementedError",
"def website(self):\n return self._website",
"def get_website(self):\n if self.website:\n return self.website\n else:\n try:\n return self.parent.get_website\n except AttributeError: # I think this is right \n return None",
"def _get_instance(self):",
"def _get_instance(self):",
"def get_instance(self, instance):\n return self._get(_instance.Instance, instance)",
"def get_website(self, name):\n return self.store.website.id",
"def site(self, domain):\r\n return resource.Site(self, domain)",
"def instance(self):\n return self.__instance",
"def me(self):\n\n return self.client._get(self._url())",
"def instance(self):\n return self._instance",
"def _get_instance(self):\n #return '_earth_instance_' + rospy.get_name().strip('/')\n return self.instance",
"def site(self):\n if not self.__site:\n self.__site = Site(self)\n return self.__site",
"def get_object(selenium, obj):\n return _get_ui_service(selenium, obj).get_obj_from_info_page(obj)",
"def instance(self) -> str:\n return pulumi.get(self, \"instance\")",
"def instance(self) -> str:\n return pulumi.get(self, \"instance\")",
"def get_instance(self, instance_id):\n return self.instances.get(instance_id)",
"def get_instance(self, instance):\n\n title = list(instance.keys())[0]\n instance = instance.get(title)\n return instance",
"def get(self, id):\n if id == 'body':\n return window.document.body\n else:\n return self.instances[id]",
"def site(obj):\n return \"%s\" % (obj.site.name)",
"def GetWebSiteInfo():\n if len(AppSettings.objects.filter(name='WebSiteName')) > 0:\n WebSiteInfo.WebSiteName = AppSettings.objects.filter(name='WebSiteName')[0].value\n if len(AppSettings.objects.filter(name='ICP')) > 0:\n WebSiteInfo.ICP = AppSettings.objects.filter(name='ICP')[0].value\n if len(AppSettings.objects.filter(name='ICP_address')) > 0:\n WebSiteInfo.ICP_address = AppSettings.objects.filter(name='ICP_address')[0].value\n if len(AppSettings.objects.filter(name='Copyright')) > 0:\n WebSiteInfo.Copyright = AppSettings.objects.filter(name='Copyright')[0].value\n if len(AppSettings.objects.filter(name='Address')) > 0:\n WebSiteInfo.Address = AppSettings.objects.filter(name='Address')[0].value\n if len(AppSettings.objects.filter(name='Phone')) > 0:\n WebSiteInfo.Phone = AppSettings.objects.filter(name='Phone')[0].value",
"def get_short(self, cls, long):\n if cls == \"Website\":\n result = self.__session.query(Website).filter(Website.name == long).first()\n return result",
"def get_from_host(cls, host, silent=False):\n if cls.search([], count=True) == 1:\n return cls.search([])[0]\n try:\n website, = cls.search([('name', '=', host)])\n except ValueError:\n if not silent:\n raise WebsiteNotFound()\n else:\n return website",
"def get_site(name):\n return sites[name]",
"def GetInstance():\n pass",
"def get(self, id):\n if id == 'body':\n return document.body\n else:\n return self.instances[id]",
"def get_site(self, name) -> Site:\n ah_write = self.get_iis_object()\n section = ah_write.GetAdminSection(\"system.applicationHost/sites\", \"MACHINE/WEBROOT/APPHOST\")\n collection = section.Collection\n\n for i in range(collection.Count):\n site = collection[i]\n prop = site.Properties\n site_name = prop[\"name\"].Value\n if site_name == name:\n #site_id = prop[\"id\"].Value\n default_app = self.get_default_app(site)\n bindings = self.get_site_bindings(site.ChildElements)\n apps = self.get_applications(site)\n\n return Site(name, bindings, default_app, apps)\n\n return None",
"def get_site(self, sitename):\n return self.cache.get(sitename)",
"def get_current_site(cls):\n return cls.get_by_key_name(cls.the_key_name)",
"def get(cls):\n return cls.instance"
] | [
"0.64772785",
"0.636314",
"0.6204019",
"0.6150029",
"0.6150029",
"0.6122803",
"0.61147714",
"0.61060196",
"0.5981487",
"0.5979728",
"0.597262",
"0.5944322",
"0.5904715",
"0.5840119",
"0.57975864",
"0.57975864",
"0.5797228",
"0.5790714",
"0.5753903",
"0.57280713",
"0.56908727",
"0.5673728",
"0.5662016",
"0.5645956",
"0.5598724",
"0.55882174",
"0.5588167",
"0.55865574",
"0.55778486",
"0.5572141"
] | 0.7607952 | 0 |
Looks for the store whose `values` are sent by magento against the website with `website` in tryton. If a record exists for this, return that else create a new one and return | def find_or_create(cls, website, values):
stores = cls.search([
('website', '=', website.id),
('magento_id', '=', int(values['group_id']))
])
if stores:
return stores[0]
return cls.create([{
'name': values['name'],
'magento_id': int(values['group_id']),
'website': website.id,
}])[0] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_or_create(cls, instance, values):\n websites = cls.search([\n ('instance', '=', instance.id),\n ('magento_id', '=', int(values['website_id']))\n ])\n\n if websites:\n return websites[0]\n\n return cls.create([{\n 'name': values['name'],\n 'code': values['code'],\n 'instance': instance.id,\n 'magento_id': int(values['website_id']),\n }])[0]",
"def find_or_create(cls, store, values):\n store_views = cls.search([\n ('store', '=', store.id),\n ('magento_id', '=', int(values['store_id']))\n ])\n\n if store_views:\n return store_views[0]\n\n return cls(**{\n 'name': values['name'],\n 'code': values['code'],\n 'store': store.id,\n 'magento_id': int(values['store_id']),\n })",
"def find_store(request):\n r = {'result':'-1'}\n \n import httplib, urllib\n\n h = httplib.HTTPConnection(\"api.remix.bestbuy.com\")\n lat = request.POST['lat']\n lon = request.POST['lon']\n distance = request.POST['distance']\n\n h.request('GET', '/v1/stores(area(%s,%s,%s))?format=json&apiKey=%s'%(lat, lon, distance, api_key))\n\n result = h.getresponse()\n logger.info( \"BestBuy Location HTTP output: %s, reason: %s\"%(result.status, result.reason) )\n response = json.loads(result.read())\n\n stores = response.get(\"stores\", [])\n if len(stores) > 0: \n r['result'] = stores[0]\n\n return JSONHttpResponse(r)",
"def import_stores(self):\n\n stores = self.product_infos['stores']\n\n for product_store in stores:\n try:\n store = Stores.objects.get(\n name=product_store\n )\n except Stores.DoesNotExist:\n super().new_entry()\n store = Stores.objects.create(\n name=product_store\n )\n except:\n pass\n try:\n ProdStore.objects.get(\n product=self.product_object,\n store=store\n )\n except ProdStore.DoesNotExist:\n super().new_entry()\n ProdStore.objects.create(\n product=self.product_object,\n store=store\n )\n except:\n pass\n\n return stores",
"def GetWebSiteInfo():\n if len(AppSettings.objects.filter(name='WebSiteName')) > 0:\n WebSiteInfo.WebSiteName = AppSettings.objects.filter(name='WebSiteName')[0].value\n if len(AppSettings.objects.filter(name='ICP')) > 0:\n WebSiteInfo.ICP = AppSettings.objects.filter(name='ICP')[0].value\n if len(AppSettings.objects.filter(name='ICP_address')) > 0:\n WebSiteInfo.ICP_address = AppSettings.objects.filter(name='ICP_address')[0].value\n if len(AppSettings.objects.filter(name='Copyright')) > 0:\n WebSiteInfo.Copyright = AppSettings.objects.filter(name='Copyright')[0].value\n if len(AppSettings.objects.filter(name='Address')) > 0:\n WebSiteInfo.Address = AppSettings.objects.filter(name='Address')[0].value\n if len(AppSettings.objects.filter(name='Phone')) > 0:\n WebSiteInfo.Phone = AppSettings.objects.filter(name='Phone')[0].value",
"def get_website(self, name):\n return self.store.website.id",
"def test_find_by_site_name(self):\n\n self.new_details.save_details()\n twitter = Details('Dennis', 'Facebook', 'Kiplangat', 'kiplangat18')\n twitter.save_details()\n details_exists = Details.find_by_site_name('Facebook')\n self.assertTrue(details_exists, twitter)",
"def create_stores(self):\n (instances,) = self\n shop_obj = self.env['sale.shop']\n shop_ids = shop_obj.search([('instance_id', '=', self[0].id)])\n payment_ids = self.env['account.payment.term'].search([])\n\n if not shop_ids:\n shop_data = {\n 'sale_channel_shop': True,\n 'name': instances.name + ' Shop',\n 'payment_default_id': payment_ids[0].id,\n 'warehouse_id': 1,\n 'instance_id': self[0].id,\n 'marketplace_image': instances.image,\n 'order_policy': 'prepaid'\n }\n shop_id = shop_obj.create(shop_data)\n else:\n shop_id = shop_ids[0]\n return shop_id",
"def save(self, store):\n self.db.query(f\"\"\"\n INSERT INTO {self.table} (id, name)\n VALUES (:id, :name)\n ON DUPLICATE KEY UPDATE name = :name\n \"\"\", **vars(store))\n\n if not store.id:\n store.id = self.get(name=store.name).id\n return store",
"def import_websites(cls, instances):\n Website = Pool().get('magento.instance.website')\n Store = Pool().get('magento.website.store')\n StoreView = Pool().get('magento.store.store_view')\n MagentoOrderState = Pool().get('magento.order_state')\n\n try:\n instance, = instances\n except ValueError:\n cls.raise_user_error('multiple_instances')\n\n with Transaction().set_context(magento_instance=instance.id):\n\n # Import order states\n with OrderConfig(\n instance.url, instance.api_user, instance.api_key\n ) as order_config_api:\n MagentoOrderState.create_all_using_magento_data(\n order_config_api.get_states()\n )\n\n # Import websites\n with Core(\n instance.url, instance.api_user, instance.api_key\n ) as core_api:\n websites = []\n stores = []\n\n mag_websites = core_api.websites()\n\n # Create websites\n for mag_website in mag_websites:\n websites.append(Website.find_or_create(\n instance, mag_website\n ))\n\n for website in websites:\n mag_stores = core_api.stores(\n {'website_id': {'=': website.magento_id}}\n )\n\n # Create stores\n for mag_store in mag_stores:\n stores.append(Store.find_or_create(website, mag_store))\n\n for store in stores:\n mag_store_views = core_api.store_views(\n {'group_id': {'=': store.magento_id}}\n )\n\n # Create store views\n for mag_store_view in mag_store_views:\n store_view = StoreView.find_or_create(\n store, mag_store_view\n )\n # AR refactoring\n store_view.save()",
"def get_store(self, store_id):\n for store in self.get_stores():\n if store[\"code\"] == store_id:\n return store",
"def search_store_relationships(storename, exp_stor_db, budg_db, stor_exp_data_path, stor_db, stor_data_path):\n exp_stor_dbKeys = exp_stor_db.keys()\n\n if storename not in exp_stor_dbKeys:\n storename, stor_db, exp_stor_db = select_store_for_purchase(\n storename, stor_data_path, stor_db, exp_stor_db, stor_exp_data_path)\n\n exps_fr_store = exp_stor_db[storename]\n\n if len(exps_fr_store) == 0:\n selected_exps = util.select_indices_of_list(f\"No expenses for '{storename}'. Please select one or multiple to go with this store from now on.\",\n list(budg_db.keys()),\n return_matches=True)\n if len(selected_exps) == 1:\n selected_exp = selected_exps[0]\n else:\n selected_exp = util.select_from_list(\n selected_exps, f\"Please select which expense you want for this transaction at '{storename}': \", ret_match=True)\n exp_stor_db[storename] = selected_exps\n data_help.write_to_jsonFile(stor_exp_data_path, exp_stor_db)\n\n elif len(exps_fr_store) == 1:\n selected_exp = exps_fr_store[0]\n else:\n selected_exp = exps_fr_store[util.select_from_list(\n exps_fr_store, f\"Please select an expense for this transaction at '{storename}': \")]\n\n return selected_exp, dict(exp_stor_db), stor_db, storename",
"def get_site_to_check(self, filename):\n csv_file = csv.reader(open(filename, 'r'), delimiter=\";\")\n \n sites={}\n for row in csv_file:\n if len(row) == 0:\n continue\n site_url = row[0]\n sites[site_url] = {\"url\":site_url}\n \n sites[site_url][\"checks\"] = []\n for check in row[1:]:\n sites[site_url][\"checks\"].append(check)\n return sites",
"def getSitesForSE( storageElement, gridName = '' ):\n\n result = getSiteSEMapping( gridName )\n if not result['OK']:\n return result\n\n mapping = result['Value']\n\n finalSites = []\n\n for site in mapping:\n if storageElement in mapping[site]:\n finalSites.append( site )\n return S_OK( finalSites )",
"def get_or_create(cls, url, title=None):\n try:\n return super().get(Source.url == url)\n except peewee.DoesNotExist:\n return cls.create(url=url, title=title)",
"def search_by_product_store(self, product_id, store_id):\n\n return self.fetch_one(\"\"\"\n SELECT\n *\n FROM\n product_stores\n WHERE\n product_id=%s\n AND\n store_id=%s\n \"\"\", (product_id, store_id))",
"def get_site(self, name) -> Site:\n ah_write = self.get_iis_object()\n section = ah_write.GetAdminSection(\"system.applicationHost/sites\", \"MACHINE/WEBROOT/APPHOST\")\n collection = section.Collection\n\n for i in range(collection.Count):\n site = collection[i]\n prop = site.Properties\n site_name = prop[\"name\"].Value\n if site_name == name:\n #site_id = prop[\"id\"].Value\n default_app = self.get_default_app(site)\n bindings = self.get_site_bindings(site.ChildElements)\n apps = self.get_applications(site)\n\n return Site(name, bindings, default_app, apps)\n\n return None",
"def get(self, store_id):\n store = StoreModel.query.filter_by(id=store_id).first()\n if not store:\n store_api.abort(404, \"Store {} doesn't exist\".format(store_id))\n else:\n return store",
"def get_best_matching_web_site(self):\n\n # first fill the web_df columns we need for ranking\n for i_web, (url_key, url_info) in enumerate(self.collection.items()):\n index = url_info.index\n if url_info.url_analyse is None:\n logger.warning(\"url {url_key} yielded None analyse. Skip to next\")\n continue\n exists = url_info.url_analyse.exists\n self.company_urls_df.loc[index, URL_KEY] = url_info.url\n self.company_urls_df.loc[index, EXISTS_KEY] = exists\n if exists:\n self.company_urls_df.loc[index, DISTANCE_KEY] = url_info.match.distance\n self.company_urls_df.loc[index, STRING_MATCH_KEY] = url_info.match.string_match\n self.company_urls_df.loc[index, HAS_POSTCODE_KEY] = url_info.match.has_postcode\n self.company_urls_df.loc[index, HAS_KVK_NR] = url_info.match.has_kvk_nummer\n self.company_urls_df.loc[index, RANKING_KEY] = url_info.match.ranking\n self.company_urls_df.loc[index, DISTANCE_STRING_MATCH_KEY] = \\\n url_info.match.url_match\n\n # only select the web site which exist\n mask = self.company_urls_df[EXISTS_KEY]\n\n # create mask for web name distance\n if self.threshold_distance is not None:\n # select all the web sites with a minimum distance or one higher\n m1 = (self.company_urls_df[DISTANCE_KEY] - self.company_urls_df[\n DISTANCE_KEY].min()) <= self.threshold_distance\n else:\n m1 = mask\n\n # create mask for web string match\n if self.threshold_string_match is not None:\n m2 = self.company_urls_df[STRING_MATCH_KEY] >= self.threshold_string_match\n else:\n m2 = mask\n\n m3 = self.company_urls_df[HAS_POSTCODE_KEY]\n m4 = self.company_urls_df[HAS_KVK_NR]\n\n # we mask al the existing web page and keep all pages which are either with\n # a certain string distance (m1) or in case it has either the post code or kvk\n # number we also keep it\n mask = mask & (m1 | m2 | m3 | m4)\n\n # make a copy of the valid web sides\n self.company_urls_df = self.company_urls_df[mask].copy()\n\n self.company_urls_df.sort_values([RANKING_KEY, DISTANCE_STRING_MATCH_KEY], inplace=True,\n ascending=[False, True])\n self.logger.debug(\"Sorted list {}\".format(self.company_urls_df[[URL_KEY, RANKING_KEY]]))",
"def linkSearch(self):\n self.identificationParams = []\n try:\n url = 'https://shopee.sg/api/v2/search_items/?by=relevancy&keyword=' + self.searchParameters + '&limit=' + str(\n self.itemQuantity) + '&newest=' + str(\n self.items_per_page) + '&order=desc&page_type=search' # Base URL\n print(url)\n r = requests.get(url, headers=self.HEADERS).json()\n for item in r['items']: # Store name, price, stocks left and amount sold in respective lists\n self.identificationParams.append((item['shopid'], item['itemid']))\n except AttributeError:\n self.identificationParams = []",
"def get_site(self, sitename):\n return self.cache.get(sitename)",
"def test_getSiblingExistsBackend(self):\n self.store.powerUp(self.contentStore1, IBackendStore)\n self._retrievalTest()",
"def find_product_by_id(product_id, store_name): # TODO SEARCH PRODUCT BY ID IF DECIDED THAT EVERY PRODUCT HAS\n # DIFFERENT ID IS THE SAME NEED TO CHECK\n\n return store_handler.find_product_by_id(product_id, store_name)",
"def get_site(name):\n return sites[name]",
"def extract_stores_from_results(stores: Dict, filter_town: Optional[str] = 'Budapest'):\n all_stores = stores.get('results', [])\n target_stores = []\n for store in all_stores:\n store_data = store.get('location', {})\n coords = store_data.get('geo', {}).get('coordinates', {})\n address = store_data.get('contact', {}).get('address')\n if filter_town and filter_town not in address.get('town'):\n continue\n street = address.get('lines', [{}])[0].get('text')\n target_stores.append({'_id': store_data.get('id'),\n 'address': street,\n 'lon': coords.get('longitude'),\n 'lat': coords.get('latitude'),\n '_type': store_data.get('classification', {}).get('type'),\n 'name': store_data.get('name')})\n\n orm = ORM()\n for store in target_stores:\n orm.add_shop(**store)",
"def get_store(self, store_name: str) -> Any:\n pass",
"def test_creation_of_duplicate_service_in_store(self):\n create_store = self.client.post(create_store_url, data=json.dumps(self.shop_zero), headers=self.my_header)\n store_id = json.loads(create_store.data)\n store_id = json.loads(store_id['store_id'])\n store_id = store_id['$oid']\n response2 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.service_zero),\n headers=self.my_header)\n response3 = self.client.post(store_url + store_id + '/service/',\n data=json.dumps(self.service_zero),\n headers=self.my_header)\n self.assertEqual(response3.status, \"409 CONFLICT\")\n self.assertIn(\"Sorry. Live at the yard already exists in this store.\", str(response3.data))",
"def get_mixed_stores(mixed_setting):\n return mixed_setting[\"default\"][\"OPTIONS\"][\"stores\"]",
"def get_matched_sites(clowder_url: str, clowder_key: str, plot_name: str, lat_lon: tuple, filter_date: str) -> dict:\n # SENSOR is the plot\n matched_sites = {}\n if plot_name:\n # If provided a plot name, see if the sensor exists before going into more expensive logic\n sensor_data = __internal__.get_sensor_by_name(plot_name, clowder_url, clowder_key)\n if sensor_data:\n matched_sites[sensor_data['id']] = {\n \"name\": plot_name,\n \"geom\": sensor_data['geometry']\n }\n\n if not matched_sites:\n # If we don't have existing sensor to use quickly, we must query geographically\n site_list = get_sites_by_latlon(lat_lon, filter_date)\n for one_site in site_list:\n plot_name = one_site['sitename']\n plot_geom = json.loads(wkt_to_geojson(one_site['geometry']))\n\n # Get existing sensor with this plot name from geostreams, or create if it doesn't exist\n sensor_data = __internal__.get_sensor_by_name(plot_name, clowder_url, clowder_key)\n if not sensor_data:\n sensor_id = __internal__.create_sensor(plot_name, clowder_url, clowder_key, plot_geom,\n {\n \"id\": \"MAC Field Scanner\",\n \"title\": \"MAC Field Scanner\",\n \"sensorType\": GEOSTREAMS_CSV_SENSOR_TYPE\n },\n \"Maricopa\")\n matched_sites[sensor_id] = {\"name\": plot_name, \"geom\": plot_geom}\n else:\n sensor_id = sensor_data['id']\n matched_sites[sensor_id] = {\"name\": plot_name, \"geom\": plot_geom}\n\n return matched_sites",
"def CheckProductsToSave(self, request=\"\", default=None):\n listedParts = []\n retValues = {}\n \n for part in unpackDictionary(request):\n part=getCleanBytesDictionary(part)\n hasSaved = True\n existingID=False\n order = None\n if not('engineering_code' in part):\n continue\n if part['engineering_code'] in listedParts:\n continue\n\n if ('engineering_code' in part) and ('engineering_revision' in part):\n criteria = [\n ('engineering_code', '=', part['engineering_code']),\n ('engineering_revision', '=', part['engineering_revision'])\n ]\n elif ('engineering_code' in part) and not('engineering_revision' in part):\n criteria = [\n ('engineering_code', '=', part['engineering_code'])\n ]\n order='engineering_revision'\n existingIDs = self.search( criteria, order=order )\n if existingIDs:\n ids=sorted(existingIDs.ids)\n existingID = ids[len(ids) - 1]\n if existingID:\n hasSaved = False\n objPart = self.browse(existingID)\n part['engineering_revision']=objPart.engineering_revision\n if ('_lastupdate' in part) and part['_lastupdate']:\n if (getUpdTime(objPart) < datetime.strptime(part['_lastupdate'], '%Y-%m-%d %H:%M:%S')):\n if objPart._iswritable():\n hasSaved = True\n\n retValues[part['engineering_code']]={\n 'componentID':existingID,\n 'hasSaved':hasSaved} \n listedParts.append(part['engineering_code'])\n return packDictionary(retValues)"
] | [
"0.7311195",
"0.6705495",
"0.5444057",
"0.53808737",
"0.5374841",
"0.5243312",
"0.5237537",
"0.52265453",
"0.51574576",
"0.51567435",
"0.51019216",
"0.49754298",
"0.4973136",
"0.49432912",
"0.48770675",
"0.48759344",
"0.4862138",
"0.48415354",
"0.47996405",
"0.47916886",
"0.47872615",
"0.47731438",
"0.47700453",
"0.47255322",
"0.47177675",
"0.47162965",
"0.46981448",
"0.46809304",
"0.46642375",
"0.46620202"
] | 0.7607294 | 0 |
Exports tier prices of products from tryton to magento for this store | def export_tier_prices_to_magento(self):
instance = self.website.instance
for mag_product_template in self.website.magento_product_templates:
product_template = mag_product_template.template
product = product_template.products[0]
# Get the price tiers from the product if the product has a price
# tier table else get the default price tiers from current store
price_tiers = product_template.price_tiers or self.price_tiers
price_data = []
for tier in price_tiers:
if hasattr(tier, 'product'):
# The price tier comes from a product, then it has a
# function field for price, we use it directly
price = tier.price
else:
# The price tier comes from the default tiers on store,
# we dont have a product on tier, so we use the current
# product in loop for computing the price for this tier
price = self.price_list.compute(
None, product, product.list_price, tier.quantity,
self.website.default_uom
)
price_data.append({
'qty': tier.quantity,
'price': float(price),
})
# Update stock information to magento
with magento.ProductTierPrice(
instance.url, instance.api_user, instance.api_key
) as tier_price_api:
tier_price_api.update(
mag_product_template.magento_id, price_data
)
return len(self.website.magento_product_templates) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def default_export_(self, fields):\n Store = Pool().get('magento.website.store')\n\n store = Store(Transaction().context.get('active_id'))\n\n return {\n 'products_count': store.export_tier_prices_to_magento()\n }",
"def get_prices(self):\n pass",
"def import_prices(self):\n temp = dict(self.currencies_and_regions)\n for index, row in self.df.iterrows():\n self.set_journal_name(row[\"Journal Name \"])\n self.set_issn(row[\"ISSN\"])\n self.set_journal()\n self.set_currency(row[\"Currency\"])\n if not self.currency:\n continue\n cur = self.get_raw_currency(row[\"Currency\"])\n region = temp[cur]\n self.set_region(region)\n self.set_country(region)\n self.process_fte(row[\"Price Group\"])\n self.set_price(row[\"2021 rate\"])\n self.add_price_to_db()\n\n db.session.commit()",
"def get_transaction_prices(self):\n cleaned_data = self.cleaned_data()\n supplier_cleaned_data = cleaned_data.get('cleaned_supplier_data')\n transaction_cleaned_data = cleaned_data.get('cleaned_transaction_data')\n merged_data = self.merge_supplier_transaction(supplier_cleaned_data, transaction_cleaned_data)\n calculated_data = self.calculate_prices(merged_data)\n self.export_calculated_prices(calculated_data)\n return calculated_data",
"def _compute_amount(self):\n for line in self:\n price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)\n new_price = price\n if line.lot_id and line.product_id.tracking in ['lot','serial']:\n lot_id = self.env['stock.production.lot'].search([('name', '=', line.lot_id), ('product_id', '=', line.product_id.id)])\n if lot_id.tax_ids.filtered(lambda tax: tax.amount_type == 'based_on_margin'):\n if lot_id.cost_price:\n new_price -= lot_id.cost_price\n sh_tax = line.tax_id.filtered(lambda tax: tax.amount_type =='based_on_margin').compute_all(new_price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_shipping_id)\n taxes = line.tax_id.filtered(lambda tax: tax.amount_type !='based_on_margin').compute_all(price, line.order_id.currency_id, line.product_uom_qty, product=line.product_id, partner=line.order_id.partner_shipping_id)\n print(taxes)\n line.update({\n 'price_tax': sum(t.get('amount', 0.0) for t in taxes.get('taxes', [])) + sum(t.get('amount', 0.0) for t in sh_tax.get('taxes', [])),\n 'price_total': taxes['total_included'],\n 'price_subtotal': taxes['total_excluded'],\n })\n if self.env.context.get('import_file', False) and not self.env.user.user_has_groups('account.group_account_manager'):\n line.tax_id.invalidate_cache(['invoice_repartition_line_ids'], [line.tax_id.id])",
"def pricing_export(request, simulation):\n # Get all tolls.\n policies = get_query('policy', simulation)\n tolls = policies.filter(type='PRICING')\n # To avoid conflict if two users export a file at the same time, we\n # generate a random name for the export file.\n seed = np.random.randint(10000)\n filename = '{0}/website_files/exports/{1}.tsv'.format(settings.BASE_DIR,\n seed)\n with codecs.open(filename, 'w', encoding='utf8') as f:\n writer = csv.writer(f, delimiter='\\t')\n # Get a dictionary with all the values to export.\n values = list()\n for toll in tolls:\n if toll.usertype:\n usertype_id = toll.usertype.user_id\n else:\n usertype_id = ''\n values.append([toll.location.user_id, toll.get_value_vector(),\n toll.get_time_vector(), usertype_id])\n # Write a custom header.\n writer.writerow(['link', 'values', 'times', 'traveler_type'])\n writer.writerows(values)\n\n with codecs.open(filename, 'r', encoding='utf8') as f:\n # Build a response to send a file.\n response = HttpResponse(f.read())\n response['content_type'] = 'text/tab-separated-values'\n response['Content-Disposition'] = 'attachement; filename=tolls.tsv'\n # We delete the export file to save disk space.\n os.remove(filename)\n return response",
"def import_prices(self):\n temp = dict(self.currencies_and_regions)\n for index, row in self.df.iterrows():\n self.set_mini_bundle_name(row[\"Journal Name \"])\n self.set_issns(row[\"ISSN\"])\n self.set_currency(row[\"Currency\"])\n if not self.currency:\n continue\n cur = self.get_raw_currency(row[\"Currency\"])\n region = temp[cur]\n self.set_region(region)\n self.set_country(region)\n self.set_price(row[\"2021 rate\"])\n self.add_prices()\n\n # reset for next loop\n self.issns = []\n db.session.commit()",
"def getPrices(self, nodePair):\n\n core = self.core\n path = core.load_instances(self.META[\"UnitPrices\"])[0][\"nodePath\"]\n node = core.load_by_path(self.root_node,path)\n children = core.load_children(node)\n if children:\n for child in children:\n if core.is_connection(child) and core.get_pointer_path(child,\"src\") == nodePair.get_bundleGroupNode()[\"nodePath\"] and core.get_pointer_path(child,\"dst\") == nodePair.get_countryGroupNode()[\"nodePath\"]:\n nodePair.set_installCost(core.get_attribute(child, \"installCost\"))\n nodePair.set_additionalInstallCost(core.get_attribute(child, \"additionalInstallCost\"))\n nodePair.set_bronzeCost(core.get_attribute(child, \"bronzeCost\"))\n nodePair.set_additionalBronzeCost(core.get_attribute(child, \"additionalBronzeCost\"))\n nodePair.set_silverCost(core.get_attribute(child,\"silverCost\"))\n nodePair.set_additionalSilverCost(core.get_attribute(child,\"additionalSilverCost\"))\n nodePair.set_goldCost(core.get_attribute(child,\"goldCost\"))\n nodePair.set_additionalGoldCost(core.get_attribute(child,\"additionalGoldCost\"))\n return\n else:\n print(\"There are no UnitPrices in the database\")",
"def print_analysis_prices(pv, demand,retail,export, param, E,isCommunity=False,hh=None):\n RemainingSOC=E['LevelOfCharge'][-1]\n timestep = param['timestep']\n SelfConsumption = np.sum(E['inv2load']) * timestep # AC\n TotalFromGrid = np.sum(E['grid2load']) * timestep # AC\n TotalToGrid = np.sum(E['inv2grid']) * timestep # AC\n TotalLoad = demand.sum() * timestep # AC\n #TotalBattToLoad = np.sum(E['store2load']) * timestep # AC\n TotalBattToGrid = np.sum(E['store2grid']) * timestep # AC\n TotalPV = pv.sum() * timestep # DC\n TotalBatteryGeneration = np.sum(E['store2inv']) * timestep # DC\n TotalBatteryConsumption = np.sum(E['pv2store']) * timestep # DC\n if 'inv_losses' in E.keys():\n BatteryLosses=E['batt_losses'].sum()*timestep\n InverterLosses=E['inv_losses'].sum()*timestep\n else:\n BatteryLosses = TotalBatteryConsumption * (1 - param['BatteryEfficiency'])\n InverterLosses = (TotalPV - BatteryLosses-RemainingSOC) * (1 - param['InverterEfficiency'])\n SelfConsumptionRate = SelfConsumption / TotalPV * 100 # in %\n SelfSufficiencyRate = SelfConsumption / TotalLoad * 100\n Bill=((E['grid2load'] * timestep) * retail - (E['inv2grid'] * timestep ) * export).sum()\n Batt_revenue=((E['store2load']*param['InverterEfficiency']*timestep*retail-\n E['pv2store']*param['InverterEfficiency']*timestep*export)).sum()\n \n print ('Total yearly consumption: {:1g} kWh'.format(TotalLoad))\n print ('Total PV production: {:1g} kWh'.format(TotalPV))\n print ('Self Consumption: {:1g} kWh'.format(SelfConsumption))\n print ('Total fed to the grid: {:1g} kWh'.format(TotalToGrid))\n print ('Total bought from the grid: {:1g} kWh'.format(TotalFromGrid))\n print ('Self consumption rate (SCR): {:.3g}%'.format(SelfConsumptionRate))\n print ('Self sufficiency rate (SSR): {:.3g}%'.format(SelfSufficiencyRate))\n print ('Amount of energy provided by the battery: {:1g} kWh'.format(TotalBatteryGeneration))\n print ('Total battery losses: {:1g} kWh, i.e., {:1g}% of the total PV'.format(BatteryLosses,BatteryLosses/TotalPV*100))\n #print('Total energy from battery to the load {:1g} kWh'.format(TotalBattToLoad))\n print('Total energy from battery to the grid {:1g} kWh'.format(TotalBattToGrid))\n #print ('Total inverter losses: {:1g} kWh'.format(InverterLosses))\n #print ('Total inverter losses: {:1g} kWh'.format(InverterLosses))\n print ('Total inverter losses: {:1g} kWh, i.e., {:1g}% of the total PV'.format(InverterLosses,InverterLosses/TotalPV*100))\n \n \n TotalCurtailment=np.sum(E['inv2curt'])*timestep # DC\n print ('Total curtailment : {:1g} kWh'.format(TotalCurtailment)) \n residue = TotalPV + TotalFromGrid - TotalToGrid - BatteryLosses - InverterLosses - TotalLoad - TotalCurtailment - RemainingSOC\n print ('Residue (check): {:1g} kWh'.format(residue))\n PV_check = TotalPV - SelfConsumption - TotalToGrid - BatteryLosses - InverterLosses - TotalCurtailment - RemainingSOC\n print ('PV Residue (check): {:1g} kWh'.format(PV_check))\n \n print(bcolors.WARNING + 'Maximum power injected into the grid is {:1g} kW'.format(E['inv2grid'].max())+bcolors.ENDC)\n print(bcolors.WARNING + 'Maximum power drained from the grid is {:1g} kW'.format(E['grid2load'].max())+bcolors.ENDC)\n print (bcolors.WARNING + 'Total bill: {:1g}\\n\\n'.format(Bill)+bcolors.ENDC)\n print (bcolors.WARNING + 'Total Batt_revenue: {:1g}\\n\\n'.format(Batt_revenue)+bcolors.ENDC)\n \n if isCommunity==False:\n AverageDepth = TotalBatteryGeneration / (365 * param['BatteryCapacity'])\n Nfullcycles = 365 * AverageDepth \n print ('Number of equivalent full cycles per year: {:1g} '.format(Nfullcycles))\n print ('Average Charging/Discharging depth: {:1g}\\n\\n'.format(AverageDepth))\n \n out = { 'SCR': SelfConsumptionRate, # \n 'SSR':SelfSufficiencyRate, # \n 'EFC': Nfullcycles, # \n 'Demand_peak': E['grid2load'].max(), # \n 'Inj_peak': E['inv2grid'].max(), #\n 'avg_dod': AverageDepth, #\n 'bill': Bill,\n 'Batt_revenue':Batt_revenue,\n 'Batt_penetration':param['batt_penetration'],\n 'PV_penetration':param['pv_penetration'],\n 'seed':param['seed'],\n 'hh':hh\n }\n else:\n out = { 'SCR': SelfConsumptionRate, # \n 'SSR':SelfSufficiencyRate, # \n 'EFC': None, # \n 'Demand_peak': E['grid2load'].max(), # \n 'Inj_peak': E['inv2grid'].max(), #\n 'avg_dod': None, #\n 'bill': Bill,\n 'Batt_revenue':Batt_revenue,\n 'Batt_penetration':param['batt_penetration'],\n 'PV_penetration':param['pv_penetration'],\n 'seed':param['seed'],\n 'hh':hh\n }\n return out",
"def export_inventory_to_magento(self):\n Location = Pool().get('stock.location')\n\n product_templates = []\n instance = self.instance\n\n locations = Location.search([('type', '=', 'storage')])\n\n for magento_product_template in self.magento_product_templates:\n product_template = magento_product_template.template\n product_templates.append(product_template)\n\n with Transaction().set_context({'locations': map(int, locations)}):\n product_data = {\n 'qty': product_template.quantity,\n 'is_in_stock': '1' if product_template.quantity > 0\n else '0',\n }\n\n # Update stock information to magento\n with magento.Inventory(\n instance.url, instance.api_user, instance.api_key\n ) as inventory_api:\n inventory_api.update(\n magento_product_template.magento_id, product_data\n )\n\n return product_templates",
"def export_inventory(self, websites):\n for website in websites:\n website.export_inventory_to_magento()",
"def _compute_taxed_lst_price2(self):\n company_id = self._context.get(\n 'company_id', self.env.user.company_id.id)\n for product in self:\n product.taxed_lst_price2 = product.taxes_id.filtered(\n lambda x: x.company_id.id == company_id).compute_all(\n product.list_price2,\n self.env.user.company_id.currency_id,\n product=product)['total_included']",
"def export(self):\r\n self.prices[\"returns\"] = self.returns\r\n self.prices.columns = ['prices', 'returns']\r\n self.prices = self.prices.dropna()\r\n \r\n name = QFileDialog.getSaveFileName(None, 'Save File', filter='*.xlsx')\r\n if(name[0] == ''):\r\n # if name empty\r\n pass\r\n else:\r\n self.prices.to_excel(name[0])",
"def _compute_taxed_lst_price2(self):\n company_id = self._context.get(\n 'company_id', self.env.user.company_id.id)\n for product in self:\n product.taxed_lst_price = product.taxes_id.filtered(\n lambda x: x.company_id.id == company_id).compute_all(\n product.lst_price,\n self.env.user.company_id.currency_id,\n product=product)['total_included']",
"def click_vendor_price_list_detail_rates_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.vendor_price_list_detail_rates_grid_div_id)",
"def prepare_product_for_export(self):\n _logger.info(\"Starting product exporting via %s method...\" % self.export_method)\n\n active_template_ids = self._context.get(\"active_ids\", [])\n templates = self.env[\"product.template\"].browse(active_template_ids)\n product_templates = templates.filtered(lambda template: template.type == \"product\")\n if not product_templates:\n raise Warning(\"It seems like selected products are not Storable products.\")\n\n if self.export_method == \"direct\":\n return self.export_direct_in_shopify(product_templates)\n elif self.export_method == \"csv\":\n return self.export_csv_file(product_templates)",
"def getPrice(self):\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36\"}\n response = requests.get(self.__product_URL, headers=headers)\n #print(response.status_code)\n soup = BeautifulSoup(response.content, \"html.parser\")\n file = open(\"testproduct.html\", \"wb\")\n file.write(soup.prettify(\"utf-8\"))\n file.close()\n title = soup.find(\"span\", attrs={\"id\": \"productTitle\", \"class\": \"a-size-large\"}).string.strip()\n self.__product_title = title\n temp = soup.find_all(\"a\", attrs={\"class\": \"a-accordion-row a-declarative accordion-header\"})[1]\n price = temp.find(\"span\", attrs={\"class\": \"a-color-price\"}).text.strip()\n lst = list(price)\n lst.remove(\",\")\n price = int(float(\"\".join(lst)))\n self.__product_price = price\n #print(self.__product_price)",
"def get_install_uninstall_price(sites,i):\n if sites[i][\"Series\"] != \"Velocloud\":\n if sites[i][\"Site setup\"] == \"Standby\":\n price = 600\n else:\n price = 600 + 150 * (sites[i][\"Device quantity\"] - 1)\n elif sites[i][\"Series\"] == \"Velocloud\":\n excel_df = pandas.read_excel(\"./imports/velocloudInstallPrices.xlsx\",engine='openpyxl',dtype=object)\n result = excel_df.to_json(orient=\"records\")\n installprices = json.loads(result)\n for prices in installprices:\n if sites[i][\"Country code\"] == prices[\"code\"]:\n if sites[i][\"Site setup\"] == \"Standby\":\n price = prices[\"base\"]\n else:\n price= prices[\"base\"] + prices[\"additional\"] * (sites[i][\"Device quantity\"] - 1)\n return price",
"def export_inventory(self, cursor, user, ids, context):\n website_obj = self.pool.get('magento.instance.website')\n\n website_id = context.get('active_id')\n t = threading.Thread(target=website_obj.export_inventory_to_magento,\n args=(cursor, user, website_id, context, True))\n t.daemon = True\n t.start()\n\n return True#self.open_products(cursor, user, map(int, products), context)",
"def test_visualize_price_breakdown(self):\n pass",
"def pricing_export_save(simulation, dir):\n # Get all tolls.\n policies = get_query('policy', simulation)\n tolls = policies.filter(type='PRICING')\n # To avoid conflict if two users export a file at the same time, we\n # generate a random name for the export file.\n filename = dir + '/pricings.tsv'\n\n with codecs.open(filename, 'w', encoding='utf8') as f:\n writer = csv.writer(f, delimiter='\\t')\n # Get a dictionary with all the values to export.\n values = list()\n for toll in tolls:\n if toll.usertype:\n usertype_id = toll.usertype.user_id\n else:\n usertype_id = ''\n values.append([toll.location.user_id, toll.get_value_vector(),\n toll.get_time_vector(), usertype_id])\n # Write a custom header.\n writer.writerow(['link', 'values', 'times', 'traveler_type'])\n writer.writerows(values)\n\n return filename",
"def export_and_update_products(self, cursor, user, ids=None, context=None):\n if context is None:\n context = {}\n export_and_update_catalog_obj = self.pool.get('magento.instance.website.export_catalog')\n if not ids:\n ids = self.search(cursor, user, [], context)\n \n for website in self.browse(cursor, user, ids, context):\n context['active_id'] = website.id\n attribute_set = export_and_update_catalog_obj._get_default_attribute_set(cursor, user, context=context)\n export_and_update_catalog_id = export_and_update_catalog_obj.create(cursor, user, {'export_images': True, 'attribute_set': attribute_set}, context)\n export_and_update_catalog_obj.update_and_export_products_openerp_to_magento(cursor, user, [export_and_update_catalog_id], context)",
"def price_tier(self):\n return self._safe_value(VAR_PRICETIER, str)",
"def set_final_settle_prices(pr_trades, exer_date, mode, TestMode):\n\n if not pr_trades:\n msg = ('No Exercise/Assign trades made on date {0}'.format(exer_date))\n Logme()(msg, 'WARNING')\n return\n\n for t in pr_trades:\n ins = t.insaddr\n settle_price = getSettlePriceFromMarket(ins, exer_date, \"SETTLEMENT\")\n if not settle_price:\n msg = ('Will skip trade {0} since there is no price for this '\n 'instrument {1}.'.format(t.trdnbr, ins.insid))\n Logme()(msg)\n continue\n\n strike_price = convert_price_to_und_or_strike_quotation(ins,\n ins.strike_price, 1)\n\n if ins.settlement == 'Cash':\n if ins.call_option:\n p_der = FBDPCommon.create_quotetype_price(ins,\n settle_price - strike_price)\n elif ins.instype == 'Future/Forward':\n p_der = settle_price\n else:\n p_der = FBDPCommon.create_quotetype_price(ins,\n strike_price - settle_price)\n\n else: # Physical settlement\n p_phys = 0.0 # price to be set in the physical trade\n t_phys = get_physical_trade(t)\n if not t_phys:\n Logme()('Physical settlement trade does not exist for trade '\n '{0}.'.format(t.trdnbr))\n continue\n if mode == 'Market':\n p_phys = settle_price\n if ins.instype == 'Option':\n p_phys = settle_price\n if ins.call_option:\n p_der = FBDPCommon.create_quotetype_price(ins,\n settle_price - strike_price)\n else:\n p_der = FBDPCommon.create_quotetype_price(ins,\n strike_price - settle_price)\n else: # Future\n p_der = settle_price\n\n else: # Physical is done to the strike price (Strike mode)\n p_der = 0.0\n if ins.instype == 'Option':\n p_phys = ins.strike_price\n else: # Future\n p_phys = settle_price\n\n if (abs(ins.phys_contr_size) > 0.000001 and\n abs(ins.phys_contr_size - ins.contr_size) > 0.000001):\n update_exercise_payment(t, settle_price, mode, TestMode)\n\n phys_clone = t_phys.clone()\n phys_clone.price = p_phys\n if (ins.instype in ['Option', 'Warrant'] and\n ins.und_instype == 'Curr'):\n phys_clone.fx_update_non_dealt_amount(p_phys)\n else:\n phys_clone.premium = trade_premium_from_quote(\n phys_clone.trdnbr, p_phys, phys_clone.acquire_day)\n if not TestMode:\n phys_clone.commit()\n\n der_clone = t.clone()\n der_clone.price = p_der\n der_clone.premium = trade_premium_from_quote(der_clone.trdnbr, p_der,\n t.acquire_day)\n\n if not TestMode:\n der_clone.commit()\n ael.poll",
"def compute_all(self, price_unit, currency=None, quantity=1.0, product=None, partner=None, margin=0.0):\n\n non_margin_taxes = self.filtered(lambda t: not t.on_margin)\n\n result = super(AccountTax, non_margin_taxes).compute_all(\n price_unit,\n currency=currency,\n quantity=quantity,\n product=product,\n partner=partner\n )\n\n margin_taxes = self - non_margin_taxes\n if not margin_taxes:\n return result\n\n if len(self) == 0:\n company_id = self.env.user.company_id\n else:\n company_id = self[0].company_id\n\n if not currency:\n currency = company_id.currency_id\n\n prec = currency.decimal_places\n\n round_tax = False if company_id.tax_calculation_rounding_method == 'round_globally' else True\n if 'round' in self.env.context:\n round_tax = bool(self.env.context['round'])\n\n if not round_tax:\n prec += 5\n\n for tax in margin_taxes:\n\n tax_amount = tax._compute_amount(\n margin,\n price_unit,\n quantity=quantity,\n product=product,\n partner=partner\n )\n if not round_tax:\n tax_amount = round(tax_amount, prec)\n else:\n tax_amount = currency.round(tax_amount)\n\n result['taxes'].append({\n 'id': tax.id,\n 'name': tax.with_context(**{'lang': partner.lang} if partner else {}).name,\n 'amount': tax_amount,\n 'base': margin,\n 'on_margin': True,\n 'sequence': tax.sequence,\n 'account_id': tax.account_id.id,\n 'refund_account_id': tax.refund_account_id.id,\n 'analytic': tax.analytic,\n 'price_include': tax.price_include,\n 'tax_exigibility': tax.tax_exigibility\n })\n\n return result",
"def click_vendor_price_list_detail_dial_digits_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.vendor_price_list_detail_dial_digits_grid_div_id)",
"def woo_update_stock(self, instance, woo_templates):\n common_log_obj = self.env[\"common.log.book.ept\"]\n common_log_line_obj = self.env[\"common.log.lines.ept\"]\n model = \"woo.product.product.ept\"\n model_id = common_log_line_obj.get_model_id(model)\n log_lines = []\n\n product_ids = woo_templates.mapped('woo_product_ids').mapped('product_id')\n product_stock = self.check_stock_type(instance, product_ids)\n variable_products = woo_templates.filtered(lambda x:x.woo_product_type == 'variable')\n simple_products = woo_templates.filtered(lambda x:x.woo_product_type == 'simple')\n if variable_products:\n log_lines += self.export_stock_variable_products(variable_products, product_stock,\n instance, model_id)\n if simple_products:\n log_lines += self.export_stock_simple_products(simple_products, product_stock, instance,\n model_id)\n\n instance.write({'last_inventory_update_time':datetime.now()})\n if log_lines:\n common_log_obj.create({\n 'type':'export',\n 'module':'woocommerce_ept',\n 'woo_instance_id':instance.id,\n 'active':True,\n 'log_lines':[(6, 0, log_lines)],\n })\n return True",
"def precio(self):\n price = self.html.xpath(self.xpath_sale_price)\n precio = map(self.limpieza_precio, price)\n return precio",
"def do_change_standard_price(self, cr, uid, ids, new_price, context=None):\n location_obj = self.pool.get('stock.location')\n move_obj = self.pool.get('account.move')\n move_line_obj = self.pool.get('account.move.line')\n if context is None:\n context = {}\n user_company_id = self.pool.get('res.users').browse(cr,\n uid, uid,\n context=context).\\\n company_id.id\n loc_ids = location_obj.search(cr, uid,\n [('usage', '=', 'internal'),\n ('company_id', '=', user_company_id)])\n for rec_id in ids:\n datas = self.get_product_accounts(cr, uid, rec_id, context=context)\n for location in location_obj.browse(cr, uid, loc_ids,\n context=context):\n contextc = context.copy()\n contextc.update({'location': location.id,\n 'compute_child': False})\n product = self.browse(cr, uid, rec_id, context=contextc)\n\n diff = product.standard_price - new_price\n if not diff:\n continue\n for prod_variant in product.product_variant_ids:\n qty = prod_variant.qty_available\n if qty:\n # Accounting Entries\n ref = '[{code}] {name}'.\\\n format(code=prod_variant.default_code,\n name=prod_variant.name)\n move_vals = {\n 'journal_id': datas['stock_journal'],\n 'company_id': location.company_id.id,\n 'ref': ref,\n }\n move_id = move_obj.create(cr, uid, move_vals,\n context=context)\n\n if diff * qty > 0:\n amount_diff = qty * diff\n debit_account_id = \\\n datas['property_difference_price_account_id']\n credit_account_id = \\\n datas['property_stock_valuation_account_id']\n\n else:\n amount_diff = qty * -diff\n debit_account_id = \\\n datas['property_stock_valuation_account_id']\n credit_account_id = \\\n datas['property_difference_price_account_id']\n\n move_line_obj.create(cr, uid, {\n 'name': _('Standard Price changed'),\n 'account_id': debit_account_id,\n 'debit': amount_diff,\n 'ref': ref,\n 'credit': 0,\n 'move_id': move_id, }, context=context)\n move_line_obj.create(cr, uid, {\n 'name': _('Standard Price changed'),\n 'account_id': credit_account_id,\n 'debit': 0,\n 'ref': ref,\n 'credit': amount_diff,\n 'move_id': move_id\n }, context=context)\n self.write(cr, uid, rec_id, {'standard_price': new_price})\n return True",
"def dl_tier(self, tier):\n\n tier_df = pd.DataFrame()\n\n for t in self.tier_tables[tier]:\n\n for y in self.years:\n\n df = get_GHGRP_records(y, t)\n\n tier_df = tier_df.append(df, sort=True, ignore_index=True)\n\n tier_df.columns = [x.lower() for x in tier_df.columns]\n\n # Fix issues with natural gas HHV reporting\n # Other fuel HHVs were exammined manually. There's a wide range for\n # wood and wood residuals, but not other fuels.\n if tier == 't2_hhv':\n\n tier_df['high_heat_value'] = \\\n tier_df.high_heat_value.astype('float32')\n\n natgas_st_index = tier_df[\n (tier_df.fuel_type == 'Natural Gas (Weighted U.S. Average)') &\n (tier_df.high_heat_value_uom == 'mmBtu/short ton')\n ].index\n\n tier_df.loc[natgas_st_index, 'high_heat_value_uom'] = 'mmBtu/scf'\n\n m_index = tier_df[\n (tier_df.fuel_type == 'Natural Gas (Weighted U.S. Average)') &\n (tier_df.high_heat_value.between(1, 1.2))\n ].index\n\n tier_df.high_heat_value.update(\n tier_df.loc[m_index, 'high_heat_value'].divide(1000)\n )\n\n drop_index = tier_df[\n (tier_df.fuel_type == 'Natural Gas (Weighted U.S. Average)') &\n (tier_df.high_heat_value.between(0.0012, 0.0014))\n ].index\n\n tier_df = tier_df[~tier_df.index.isin(drop_index)]\n\n return tier_df"
] | [
"0.75007206",
"0.57632595",
"0.5730589",
"0.5632844",
"0.55756336",
"0.5540464",
"0.55365473",
"0.5528423",
"0.54979783",
"0.5493264",
"0.5401977",
"0.5359194",
"0.5324393",
"0.52958417",
"0.52836686",
"0.5243634",
"0.5186369",
"0.51700795",
"0.5145529",
"0.5140887",
"0.5122772",
"0.51166797",
"0.50770926",
"0.50523823",
"0.50444853",
"0.503266",
"0.5020073",
"0.5004716",
"0.4972078",
"0.49602646"
] | 0.83025616 | 0 |
Returns instance related to store | def get_instance(self, name):
return self.store.instance.id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_store(self):\n return self._store",
"def get_resource(self):\n return self._stores",
"def get_store(self, store_name: str) -> Any:\n pass",
"def get(cls):\n return cls.instance",
"def instance(self):\n return self.__instance",
"def get_store(store_name: str):\n return store_handler.get_store(store_name)",
"def get(cls, _id):\n return DataStore.get_instance(cls, _id)",
"def instance(self):\n return self._instance",
"def get_instance(self, instance):\n return self._get(_instance.Instance, instance)",
"def _get_instance(self):",
"def _get_instance(self):",
"def get(self, store_id):\n store = StoreModel.query.filter_by(id=store_id).first()\n if not store:\n store_api.abort(404, \"Store {} doesn't exist\".format(store_id))\n else:\n return store",
"def get_store(self, store_id):\n for store in self.get_stores():\n if store[\"code\"] == store_id:\n return store",
"def get_store(self, subset: str) -> AbstractStore:\n return self.get_source(subset).get_driver().store",
"def Get(self):\n\n if not hasattr(self, \"_instance\"):\n self._instance = PersistenceManager()\n\n return self._instance",
"def store(self) -> AbstractStore:\n if self._store is None:\n self._store = SquirrelStore(url=self.url, serializer=self.serializer, **self.storage_options)\n return self._store",
"def store(self):\n return self.proto.store",
"def get_instance(self, instance):\n\n title = list(instance.keys())[0]\n instance = instance.get(title)\n return instance",
"def shop_product(self):\n return self.product.get_shop_instance(self.shop)",
"def get_store(cls):\n store_path = cls.get_store_path()\n store = JsonStore(store_path)\n return store",
"def get_current_store_view(cls):\n return cls(Transaction().context.get('magento_store_view'))",
"def find_by_instance_id(self, instance_id: str) -> Optional[StorageObject]:\n return self._store.get(instance_id, None)",
"def retrieve(self, store, uuid):\n\n stored_file = self._retrieve(store.object_type, uuid)\n return self._to_storage_model(store, stored_file)",
"def get(self, name):\n # see if its in the store or return none\n if name in self.store:\n return self.store[name]\n else:\n return None",
"def _get_instance(self):\n #return '_earth_instance_' + rospy.get_name().strip('/')\n return self.instance",
"def dict(self):\n return self.store",
"def get_object(self):\n if getattr(self, 'current_instance', None):\n ret = self.current_instance\n else:\n ret = super().get_object()\n return ret",
"def save(self, instance):\n return instance",
"def store_get_obj(request, store_name, obj_id):\n storedb = redis.Redis(host=HOST, db = STOREDB)\n \n if store_name not in get_store(request):\n return json_response(status=\"ERROR\", status_code=404, error=\"Store does not exist.\") \n \n return storedb.get(store_name + \":\" + obj_id)",
"def get_instance(self, name):\n return self.website.instance.id"
] | [
"0.7551979",
"0.70366335",
"0.67878246",
"0.6653498",
"0.66288173",
"0.6600048",
"0.6577926",
"0.6564163",
"0.6541299",
"0.6418138",
"0.6418138",
"0.63910156",
"0.639095",
"0.63711053",
"0.63395643",
"0.6328525",
"0.6310312",
"0.6309283",
"0.62250817",
"0.62165284",
"0.6184102",
"0.61830276",
"0.61821234",
"0.6113025",
"0.6099723",
"0.6052548",
"0.60523",
"0.60376656",
"0.6022749",
"0.5994555"
] | 0.7497158 | 1 |
Returns website related to store | def get_website(self, name):
return self.store.website.id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def website(self):\n return self._website",
"def get_store(self, store_name: str) -> Any:\n pass",
"def get_store(self, store_id):\n for store in self.get_stores():\n if store[\"code\"] == store_id:\n return store",
"def storelocator():\n\n\treturn render_template(\"storelocator.html\")",
"def get_store_info(store_name: str):\n return store_handler.get_store_info(store_name)",
"def get_resource(self):\n return self._stores",
"def get_site(self):\n raise NotImplementedError",
"def find_store(request):\n r = {'result':'-1'}\n \n import httplib, urllib\n\n h = httplib.HTTPConnection(\"api.remix.bestbuy.com\")\n lat = request.POST['lat']\n lon = request.POST['lon']\n distance = request.POST['distance']\n\n h.request('GET', '/v1/stores(area(%s,%s,%s))?format=json&apiKey=%s'%(lat, lon, distance, api_key))\n\n result = h.getresponse()\n logger.info( \"BestBuy Location HTTP output: %s, reason: %s\"%(result.status, result.reason) )\n response = json.loads(result.read())\n\n stores = response.get(\"stores\", [])\n if len(stores) > 0: \n r['result'] = stores[0]\n\n return JSONHttpResponse(r)",
"def Site(self) -> str:",
"def get_product_info_for_store(cls, product_id, store_id):\n product_store_info = ProductStoreLink.query.filter_by(\n store_id=store_id, product_id=product_id).first()\n return product_store_info",
"def get_store(store_name: str):\n return store_handler.get_store(store_name)",
"def get(self, store_id):\n store = StoreModel.query.filter_by(id=store_id).first()\n if not store or not store:\n store_api.abort(404, \"Store {} doesn't exist\".format(store_id))\n return store.products # returns stock models",
"def get(self, store_id):\n store = StoreModel.query.filter_by(id=store_id).first()\n if not store:\n store_api.abort(404, \"Store {} doesn't exist\".format(store_id))\n else:\n return store",
"def website(self):\n\n if \"website\" in self: return self.list(\"website\")[0]\n for cont in self.list(\"contact\") + self.list(\"comment\"):\n c = cont.lower()\n if (c.startswith(\"http://\") or c.startswith(\"https://\") or\n c.startswith(\"www.\")): return cont\n elif c.startswith(\"//www.\"): return \"http:\" + cont\n else:\n text = \"http://www.google.com/search?q=\"\n esc = lambda c: ord(c) > 127 and '%%%x'%ord(c) or c\n if \"labelid\" in self: text += ''.join(map(esc, self[\"labelid\"]))\n else:\n artist = util.escape(\"+\".join(self(\"artist\").split()))\n album = util.escape(\"+\".join(self(\"album\").split()))\n artist = util.encode(artist)\n album = util.encode(album)\n artist = \"%22\" + ''.join(map(esc, artist)) + \"%22\"\n album = \"%22\" + ''.join(map(esc, album)) + \"%22\"\n text += artist + \"+\" + album\n text += \"&ie=UTF8\"\n return text",
"def get_mixed_stores(mixed_setting):\n return mixed_setting[\"default\"][\"OPTIONS\"][\"stores\"]",
"def site(obj):\n return \"%s\" % (obj.site.name)",
"def linkSearch(self):\n self.identificationParams = []\n try:\n url = 'https://shopee.sg/api/v2/search_items/?by=relevancy&keyword=' + self.searchParameters + '&limit=' + str(\n self.itemQuantity) + '&newest=' + str(\n self.items_per_page) + '&order=desc&page_type=search' # Base URL\n print(url)\n r = requests.get(url, headers=self.HEADERS).json()\n for item in r['items']: # Store name, price, stocks left and amount sold in respective lists\n self.identificationParams.append((item['shopid'], item['itemid']))\n except AttributeError:\n self.identificationParams = []",
"def get_information_about_products(store_name: str):\n\n return store_handler.get_information_about_products(store_name)",
"def get_website(self):\n if self.website:\n return self.website\n else:\n try:\n return self.parent.get_website\n except AttributeError: # I think this is right \n return None",
"def index():\n # Retrieve and do a join to retrieve relevant name values for store FKs.\n # Use labels because columns can have same names in multiple tables.\n stores = db.session.query(\n Store.country_code,\n DistributionCenter.tag.label('dc_tag'),\n Store.number,\n Store.name,\n StoreStatus.name.label('store_status'),\n Store.street_name,\n Store.street_number,\n Store.postal_code,\n Store.city).join(\n DistributionCenter, Store.dc_id == DistributionCenter.id).join(\n StoreStatus, Store.status_id == StoreStatus.id).order_by(\n Store.country_code, Store.number).all()\n\n return render_template(\n 'stores/stores.html',\n stores=stores\n )",
"def get_site(name):\n return sites[name]",
"def my_site(self):\n if \"mySite\" in self._prop_dict:\n return self._prop_dict[\"mySite\"]\n else:\n return None",
"def GetWebSiteInfo():\n if len(AppSettings.objects.filter(name='WebSiteName')) > 0:\n WebSiteInfo.WebSiteName = AppSettings.objects.filter(name='WebSiteName')[0].value\n if len(AppSettings.objects.filter(name='ICP')) > 0:\n WebSiteInfo.ICP = AppSettings.objects.filter(name='ICP')[0].value\n if len(AppSettings.objects.filter(name='ICP_address')) > 0:\n WebSiteInfo.ICP_address = AppSettings.objects.filter(name='ICP_address')[0].value\n if len(AppSettings.objects.filter(name='Copyright')) > 0:\n WebSiteInfo.Copyright = AppSettings.objects.filter(name='Copyright')[0].value\n if len(AppSettings.objects.filter(name='Address')) > 0:\n WebSiteInfo.Address = AppSettings.objects.filter(name='Address')[0].value\n if len(AppSettings.objects.filter(name='Phone')) > 0:\n WebSiteInfo.Phone = AppSettings.objects.filter(name='Phone')[0].value",
"def get_store(hass: HomeAssistant) -> dict[str, Any] | None:\n return hass.data.get(DATA_STORE)",
"def test_store(self):\n self.selenium.get('{}/store'.format(self.live_server_url))",
"def search_by_product_store(self, product_id, store_id):\n\n return self.fetch_one(\"\"\"\n SELECT\n *\n FROM\n product_stores\n WHERE\n product_id=%s\n AND\n store_id=%s\n \"\"\", (product_id, store_id))",
"def get(self):\n\n return self.api.query(None, None, \"\"\"\n select s.name site_name, r.fqdn ce, pr.release, pr.arch\n from site s\n join resource_element r on r.site = s.id\n join pinned_releases pr on pr.ce_id = r.id\n where r.type = 'CE'\n \"\"\")",
"def _commercial_fields(self):\n return ['website']",
"def get_site():\n try:\n from leaves.middleware import request_context\n return request_context.site\n except:\n return Site.objects.select_related('preferences').get(pk=settings.SITE_ID)",
"def homepage():\n prods = db(db.product.prod_starred == True).select()\n return dict(\n prods = prods\n )"
] | [
"0.60933346",
"0.6046867",
"0.6016062",
"0.5945412",
"0.5943508",
"0.594223",
"0.59373456",
"0.59351546",
"0.5915155",
"0.59022623",
"0.58764714",
"0.58564603",
"0.58500767",
"0.58093214",
"0.57855004",
"0.57465243",
"0.57146066",
"0.57071215",
"0.5671937",
"0.5665069",
"0.5619936",
"0.5612323",
"0.56035495",
"0.55831164",
"0.5576146",
"0.55720776",
"0.5554832",
"0.5553815",
"0.55235344",
"0.5518897"
] | 0.68372726 | 0 |
Returns company related to store | def get_company(self, name):
return self.store.company.id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_company(self, company_referece):\n url = 'companies/{0}'.format(company_referece)\n result = self.get(url)\n return result.get('company', result)",
"def company(self):\n return self._company",
"def company(self):\n return self._company",
"def get_company(self, name):\n return self.website.company.id",
"def get_company(self, name):\n return self.instance.company.id",
"def _company(self, uid=1):\r\n company = self.env['res.company'].browse(uid)\r\n return {\r\n 'journal': company.pledge_journal.id,\r\n 'product': company.pledge_product,\r\n 'account': company.pledge_receipt_account.id,\r\n 'property_account_income': company.pledge_product.property_account_income_id,\r\n }",
"def get_all_companies_and_people():",
"def test_get_company_props_by_company_id_using_get(self):\n pass",
"def get_company(self, cmp):\n if cmp in self.cnames:\n return self.cnames[cmp]\n else:\n return None",
"def get_companies(self, **kwargs):\n return self.get('companies.json', **kwargs)",
"def get_companies(self):\n url = 'companies'\n result = self.get(url)\n return result['companies']",
"def test_get_all_company_props_using_get(self):\n pass",
"def get_queryset(self):\n return self.request.user.setting_set.get().companies",
"def get_queryset(self):\n return self.request.user.setting_set.get().companies",
"def get_queryset(self):\n return self.request.user.setting_set.get().companies",
"def get_queryset(self):\n return self.request.user.setting_set.get().companies",
"def get_resource(self):\n return self._stores",
"def default_company():\n return Transaction().context.get('company')",
"def get_company(company_id):\n company = storage.get(Company, company_id)\n if not company:\n abort(404)\n\n return jsonify(company.to_dict())",
"def get_companies(self):\n response = self.do_request('/undertaking/list')\n if response:\n return response.json()",
"def for_company(cls, company_id):\n return cls.objects.filter(vacancy__company__id=company_id)",
"def get_companies_and_people(team):",
"def search_store_relationships(storename, exp_stor_db, budg_db, stor_exp_data_path, stor_db, stor_data_path):\n exp_stor_dbKeys = exp_stor_db.keys()\n\n if storename not in exp_stor_dbKeys:\n storename, stor_db, exp_stor_db = select_store_for_purchase(\n storename, stor_data_path, stor_db, exp_stor_db, stor_exp_data_path)\n\n exps_fr_store = exp_stor_db[storename]\n\n if len(exps_fr_store) == 0:\n selected_exps = util.select_indices_of_list(f\"No expenses for '{storename}'. Please select one or multiple to go with this store from now on.\",\n list(budg_db.keys()),\n return_matches=True)\n if len(selected_exps) == 1:\n selected_exp = selected_exps[0]\n else:\n selected_exp = util.select_from_list(\n selected_exps, f\"Please select which expense you want for this transaction at '{storename}': \", ret_match=True)\n exp_stor_db[storename] = selected_exps\n data_help.write_to_jsonFile(stor_exp_data_path, exp_stor_db)\n\n elif len(exps_fr_store) == 1:\n selected_exp = exps_fr_store[0]\n else:\n selected_exp = exps_fr_store[util.select_from_list(\n exps_fr_store, f\"Please select an expense for this transaction at '{storename}': \")]\n\n return selected_exp, dict(exp_stor_db), stor_db, storename",
"def test_website_companies_get_details(self):\n pass",
"def __getCompaniesData(self, schema):\n try:\n self.cursor.execute(\"\"\"SELECT id, twitter, proven_score, slug FROM {schema}.vendors_vendor WHERE\n twitter <> ''\"\"\".format(schema=schema))\n data = self.cursor.fetchall()\n\n companies = []\n for entry in data:\n self.cursor.execute('SELECT location_id FROM {schema}.vendors_vendorlocation WHERE vendor_id = {vendor}'.format(schema=schema, vendor=entry[0]))\n cities = self.cursor.fetchall()\n\n if cities is None:\n continue\n\n city = ''\n\n for cityId in cities:\n self.cursor.execute('SELECT city FROM {schema}.locations_location WHERE id = {city}'.format(schema=schema, city=cityId[0]))\n cityName = self.cursor.fetchone()\n\n if cityName is not None:\n city += cityName[0]\n\n self.cursor.execute('SELECT category_id, rank FROM {schema}.vendors_vendorcustomkind WHERE vendor_id = {vendor} AND \"primary\" is true'.format(schema=schema, vendor=entry[0]))\n customKind = self.cursor.fetchone()\n\n if customKind is None:\n catId = rank = None\n else:\n catId, rank = customKind\n\n if catId is not None:\n self.cursor.execute('SELECT name, slug FROM {schema}.categories_category WHERE id = {cat}'.format(schema=schema, cat=catId))\n catData = self.cursor.fetchone()\n else:\n catData = None\n\n companies.append(DBItemCompany(\n _id = entry[0],\n tweeter = entry[1],\n category = catData[0] if catData is not None else None,\n categoryUrl = self.__buildCategoryUrl(catId, schema) if catId is not None else None,\n provenScore = entry[2],\n ranking = rank,\n location = city,\n url = self.__buildProfileUrl(catData[1], entry[3], schema) if catData is not None else self.__buildProfileUrlWOCategory(entry[3], schema),\n categoryId = catId\n ))\n\n self.__companies[schema] = companies\n\n except psycopg2.DatabaseError as err:\n raise DBException(err.args[0])",
"def search_company(cls, name, clause):\n return [('sale.company', ) + tuple(clause[1:])]",
"def retrieve_company_data(self):\n self.set_stock_sym_append_str('')\n self.set_stock_retrieval_type('all') #'all', watcher\n self.load_stock_symbol_fr_file()",
"def get(self, store_id):\n store = StoreModel.query.filter_by(id=store_id).first()\n if not store or not store:\n store_api.abort(404, \"Store {} doesn't exist\".format(store_id))\n return store.products # returns stock models",
"def get_store(self, store_id):\n for store in self.get_stores():\n if store[\"code\"] == store_id:\n return store",
"def get_company(self, from_email, to_email):\n to_email = self._extract_email_address(to_email)\n from_email = self._extract_email_address(from_email)\n # use from and to email addresses combination as a primary key\n _id = base64.b64encode(bytes(from_email+\"-\"+to_email, encoding='utf-8'))\n res = self._client.get_item(\n TableName='Company',\n Key={\n 'id':{\n 'S':_id.decode('utf-8')\n }\n }\n )\n if 'Item' in res:\n return res['Item']['company']['S']\n else:\n return 'unknown'"
] | [
"0.64457345",
"0.6345455",
"0.6345455",
"0.6151367",
"0.6144638",
"0.6015758",
"0.5981194",
"0.588651",
"0.58488876",
"0.58255076",
"0.5811751",
"0.5811187",
"0.5736758",
"0.5736758",
"0.5736758",
"0.5736758",
"0.56247306",
"0.5568528",
"0.5560751",
"0.555124",
"0.5547191",
"0.5545074",
"0.5522697",
"0.5497697",
"0.5492581",
"0.5478063",
"0.546462",
"0.5448931",
"0.54207194",
"0.54112196"
] | 0.6659226 | 0 |
Looks for the store view whose `values` are sent by magento against the store with `store` in tryton. If a record exists for this, return that else create a new one and return | def find_or_create(cls, store, values):
store_views = cls.search([
('store', '=', store.id),
('magento_id', '=', int(values['store_id']))
])
if store_views:
return store_views[0]
return cls(**{
'name': values['name'],
'code': values['code'],
'store': store.id,
'magento_id': int(values['store_id']),
}) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def find_or_create(cls, website, values):\n stores = cls.search([\n ('website', '=', website.id),\n ('magento_id', '=', int(values['group_id']))\n ])\n\n if stores:\n return stores[0]\n\n return cls.create([{\n 'name': values['name'],\n 'magento_id': int(values['group_id']),\n 'website': website.id,\n }])[0]",
"def find_or_create(cls, instance, values):\n websites = cls.search([\n ('instance', '=', instance.id),\n ('magento_id', '=', int(values['website_id']))\n ])\n\n if websites:\n return websites[0]\n\n return cls.create([{\n 'name': values['name'],\n 'code': values['code'],\n 'instance': instance.id,\n 'magento_id': int(values['website_id']),\n }])[0]",
"def storelocator():\n\n\treturn render_template(\"storelocator.html\")",
"def save(self, store):\n self.db.query(f\"\"\"\n INSERT INTO {self.table} (id, name)\n VALUES (:id, :name)\n ON DUPLICATE KEY UPDATE name = :name\n \"\"\", **vars(store))\n\n if not store.id:\n store.id = self.get(name=store.name).id\n return store",
"def get_store(self, store_name: str) -> Any:\n pass",
"def _store(request):\n return _config(request)['store_factory'](request)",
"def get_current_store_view(cls):\n return cls(Transaction().context.get('magento_store_view'))",
"def find_store(request):\n r = {'result':'-1'}\n \n import httplib, urllib\n\n h = httplib.HTTPConnection(\"api.remix.bestbuy.com\")\n lat = request.POST['lat']\n lon = request.POST['lon']\n distance = request.POST['distance']\n\n h.request('GET', '/v1/stores(area(%s,%s,%s))?format=json&apiKey=%s'%(lat, lon, distance, api_key))\n\n result = h.getresponse()\n logger.info( \"BestBuy Location HTTP output: %s, reason: %s\"%(result.status, result.reason) )\n response = json.loads(result.read())\n\n stores = response.get(\"stores\", [])\n if len(stores) > 0: \n r['result'] = stores[0]\n\n return JSONHttpResponse(r)",
"def get(self, store_id):\n store = StoreModel.query.filter_by(id=store_id).first()\n if not store:\n store_api.abort(404, \"Store {} doesn't exist\".format(store_id))\n else:\n return store",
"def get_store(self, store_id):\n for store in self.get_stores():\n if store[\"code\"] == store_id:\n return store",
"def import_stores(self):\n\n stores = self.product_infos['stores']\n\n for product_store in stores:\n try:\n store = Stores.objects.get(\n name=product_store\n )\n except Stores.DoesNotExist:\n super().new_entry()\n store = Stores.objects.create(\n name=product_store\n )\n except:\n pass\n try:\n ProdStore.objects.get(\n product=self.product_object,\n store=store\n )\n except ProdStore.DoesNotExist:\n super().new_entry()\n ProdStore.objects.create(\n product=self.product_object,\n store=store\n )\n except:\n pass\n\n return stores",
"def stores(self, request, pk):\n user = User.objects.get(id=pk)\n user_store_ids = UserStore.objects.filter(user=user).values('store__id')\n stores = Store.objects.filter(id__in=user_store_ids)\n return Response(StoreSerializer(stores, many=True).data)",
"def create_stores(self):\n (instances,) = self\n shop_obj = self.env['sale.shop']\n shop_ids = shop_obj.search([('instance_id', '=', self[0].id)])\n payment_ids = self.env['account.payment.term'].search([])\n\n if not shop_ids:\n shop_data = {\n 'sale_channel_shop': True,\n 'name': instances.name + ' Shop',\n 'payment_default_id': payment_ids[0].id,\n 'warehouse_id': 1,\n 'instance_id': self[0].id,\n 'marketplace_image': instances.image,\n 'order_policy': 'prepaid'\n }\n shop_id = shop_obj.create(shop_data)\n else:\n shop_id = shop_ids[0]\n return shop_id",
"def select_store_for_purchase(storename, stor_data_path, stor_db, exp_stor_db, stor_exp_data_path):\n if storename not in stor_db.keys():\n prompt = f\"I cannot filter for the store '{storename}'. Please select the storename for this store and I will remember it for next time. If it is a new store, type 'n': \"\n matched_storename = util.select_dict_key_using_integer(\n exp_stor_db, prompt, quit_str='n', print_children=False, print_aborting=False)\n\n if matched_storename == None:\n matched_storename = util.process_input(\n f\"Could you enter a storename so I remember this store in the future? \")\n # if new store is added, add it to the exp_stor_db, with empty expenses to be added later by user\n exp_stor_db.update({matched_storename: []})\n data_help.write_to_jsonFile(stor_exp_data_path, exp_stor_db)\n stor_db.update({storename: matched_storename})\n\n data_help.write_to_jsonFile(stor_data_path, stor_db)\n\n else:\n matched_storename = stor_db[storename]\n\n return matched_storename, stor_db, exp_stor_db",
"def _get_store(self):\n return self._store",
"def __get_store(self) -> Optional[Store]:\n\n if self.lvp == LVP.COBOL_TO_CSHARP_9:\n return COBOLToCSharp9Store(self.template_processor, self.veil)\n\n return None",
"def get_store(store_name: str):\n return store_handler.get_store(store_name)",
"def get_store(request):\n storedb = redis.Redis(host=HOST, db=STOREDB)\n return storedb.lrange(\"store\",0,-1)",
"def newstore(self):\n sw = NewStoreWidget(self, self.town)\n self.connect(sw, QtCore.SIGNAL('store_created'), \n self.new_store_done)",
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def search_by_product_store(self, product_id, store_id):\n\n return self.fetch_one(\"\"\"\n SELECT\n *\n FROM\n product_stores\n WHERE\n product_id=%s\n AND\n store_id=%s\n \"\"\", (product_id, store_id))",
"def get(self, store_id):\n store = StoreModel.query.filter_by(id=store_id).first()\n if not store or not store:\n store_api.abort(404, \"Store {} doesn't exist\".format(store_id))\n return store.products # returns stock models",
"def search_store_relationships(storename, exp_stor_db, budg_db, stor_exp_data_path, stor_db, stor_data_path):\n exp_stor_dbKeys = exp_stor_db.keys()\n\n if storename not in exp_stor_dbKeys:\n storename, stor_db, exp_stor_db = select_store_for_purchase(\n storename, stor_data_path, stor_db, exp_stor_db, stor_exp_data_path)\n\n exps_fr_store = exp_stor_db[storename]\n\n if len(exps_fr_store) == 0:\n selected_exps = util.select_indices_of_list(f\"No expenses for '{storename}'. Please select one or multiple to go with this store from now on.\",\n list(budg_db.keys()),\n return_matches=True)\n if len(selected_exps) == 1:\n selected_exp = selected_exps[0]\n else:\n selected_exp = util.select_from_list(\n selected_exps, f\"Please select which expense you want for this transaction at '{storename}': \", ret_match=True)\n exp_stor_db[storename] = selected_exps\n data_help.write_to_jsonFile(stor_exp_data_path, exp_stor_db)\n\n elif len(exps_fr_store) == 1:\n selected_exp = exps_fr_store[0]\n else:\n selected_exp = exps_fr_store[util.select_from_list(\n exps_fr_store, f\"Please select an expense for this transaction at '{storename}': \")]\n\n return selected_exp, dict(exp_stor_db), stor_db, storename",
"def add_store():\n\n form = AddStoreForm()\n\n # HTTP POST\n # validate_on_submit returns True if the form has been both submitted\n # (HTTP POST or PUT) and validated.\n if form.validate_on_submit():\n # Save the new store\n # Required fields are user_id, country_code, dc, number, name\n # Conversion of user_id is required because current_user.get_id()\n # returns unicode\n new_store = Store(\n user_id=int(current_user.get_id()),\n country_code=form.country_code.data.country_code,\n dc_id=form.dc.data.id,\n number=form.number.data,\n name=form.name.data,\n status_id=form.status.data.id,\n street_number=form.street_number.data,\n street_name=form.street_name.data,\n postal_code=form.postal_code.data,\n city=form.city.data\n )\n # Keep an informative store_name for feedback\n store_name = ('{} - {} {}'.format(\n form.country_code.data.country_code,\n form.number.data,\n form.name.data)\n )\n try:\n db.session.add(new_store)\n db.session.commit()\n print('Created store {}'.format(store_name))\n flash('Thank you for adding store {}'.format(store_name), 'info')\n except IntegrityError:\n db.session.rollback()\n print('Country {} already has a store with number {}.'.format(\n form.country_code.data.country_code, form.number.data))\n flash('Country {} already has a store with number {}.'.format(\n form.country_code.data.country_code, form.number.data),\n 'error')\n # Return back to Stores Index\n return redirect(url_for('.index'))\n\n # HTTP GET\n return render_template('stores/add-store.html', form=form)",
"def get_store(self, key, silent=False):\n try:\n value = self.model.objects.get(key=key.name).value\n except (self.model.DoesNotExist, ProgrammingError, OperationalError):\n value = None\n except Exception:\n if not silent:\n logger.exception('option.failed-lookup', extra={\n 'key': key.name,\n })\n value = None\n else:\n # we only attempt to populate the cache if we were previously\n # able to successfully talk to the backend\n # NOTE: There is definitely a race condition here between updating\n # the store and the cache\n try:\n self.set_cache(key, value)\n except Exception:\n if not silent:\n logger.warn(CACHE_UPDATE_ERR, key.name, extra={\n 'key': key.name,\n }, exc_info=True)\n return value",
"def get_mixed_stores(mixed_setting):\n return mixed_setting[\"default\"][\"OPTIONS\"][\"stores\"]",
"def set_store_details(self):\n query = db.select([self.tables.columns.ProductName,\n self.tables.columns.QuantityPerUnit,\n self.tables.columns.UnitPrice,\n self.tables.columns.UnitsInStock])\n print(query)\n ResultProxy = self.connection.execute(query)\n ResultSet = ResultProxy.fetchall()\n return ResultSet",
"def form_valid(self, form):\n site_no = form.cleaned_data['site_no']\n overwrite = form.cleaned_data['overwrite']\n agency = AgencyLookup.objects.get(agency_cd='USGS')\n\n context = self.get_context_data()\n\n site_exists = MonitoringLocation.objects.filter(site_no=site_no, agency=agency).exists()\n if site_exists and not overwrite:\n context['show_overwrite'] = True\n\n elif site_exists and overwrite == 'n':\n return redirect(\n reverse('admin:registry_monitoringlocation_change',\n args=(MonitoringLocation.objects.get(site_no=site_no, agency=agency).id,))\n )\n else:\n resp = requests.get(settings.NWIS_SITE_SERVICE_ENDPOINT, params={\n 'format': 'rdb',\n 'siteOutput': 'expanded',\n 'sites': site_no,\n 'siteStatus': 'all'\n })\n if resp.status_code == 200:\n sites = parse_rdb(resp.iter_lines(decode_unicode=True))\n try:\n site = next(sites)\n except StopIteration:\n context['request_response'] = f'No site exists for {site_no}'\n else:\n valid, message = self._validate_site(site)\n if valid:\n monitoring_location = self._get_monitoring_location(site, self.request.user)\n\n monitoring_location.save()\n return redirect(reverse('admin:registry_monitoringlocation_change',\n args=(monitoring_location.id,)))\n\n context['request_response'] = message\n\n elif resp.status_code == 404:\n context['request_response'] = f'No site exists for {site_no}'\n else:\n context['request_response'] = f'Service request to NWIS failed with status {resp.status_code}'\n\n return render(self.request, self.template_name, context=context)",
"def store_get_obj(request, store_name, obj_id):\n storedb = redis.Redis(host=HOST, db = STOREDB)\n \n if store_name not in get_store(request):\n return json_response(status=\"ERROR\", status_code=404, error=\"Store does not exist.\") \n \n return storedb.get(store_name + \":\" + obj_id)",
"def p_store(self, index):\n if index == 1:\n return self.p1_store()\n else:\n return self.p2_store()"
] | [
"0.60469294",
"0.55298656",
"0.54436827",
"0.5388281",
"0.5386525",
"0.5251227",
"0.5241936",
"0.52329993",
"0.51904476",
"0.514361",
"0.50282407",
"0.49991724",
"0.49459726",
"0.49037516",
"0.4829308",
"0.48178092",
"0.48170227",
"0.47974437",
"0.47709832",
"0.47291148",
"0.47248504",
"0.47033417",
"0.46899912",
"0.46890315",
"0.46465883",
"0.45938626",
"0.45607498",
"0.45210037",
"0.45181772",
"0.4502932"
] | 0.79743385 | 0 |
Calls wizard to import orders for store view | def import_orders_button(cls, store_views):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def import_orders(cls, store_views=None):\n if store_views is None:\n store_views = cls.search([])\n\n for store_view in store_views:\n store_view.import_order_from_store_view()",
"def purchase_import(self, cr, uid, ids, context=None):\n wizard_row = self.browse(cr, uid, ids)[0]\n if wizard_row.add_import == 'add':\n self.pool.get('purchase.import').write(cr, uid, [wizard_row.import_id.id], {'purchase_related_ids': [(4,wizard_row.purchase_id.id)]})\n import_id = wizard_row.import_id.id\n elif wizard_row.add_import == 'create':\n import_id = self.pool.get('purchase.import').create(cr, uid, self.prepare_purchase_import(cr, uid, wizard_row, context), context)\n return import_id",
"def test_service_order_wizard(self):\n\n\t\t# Create objects\n\t\tmachine = Machine.objects.create(**data.machine_data)\n\t\tcustomer = Customer.objects.create(**data.customer_data)\n\t\tengineer = ServiceEngineer.objects.create(**data.engineer_name)\n\n\t\t# Create InfoForm\n\t\tinfo_form_data = {'engineer': engineer.id,\n\t\t\t\t\t\t'rma_number': data.service_order_data['rma_number'],\n\t\t \t\t\t\t'date': data.service_order_data['date'], 'customer': customer.id,\n\t\t\t\t\t\t'machine': machine.serial_number,\n\t\t\t\t\t\t'condition': data.service_order_data['condition']}\n\t\tinfo_form = InfoForm(info_form_data)\n\n\t\t# Create AssessmentForm\n\t\tassessment_form_data = {'correction': data.service_order_data['correction'],\n\t\t\t\t\t\t\t\t'notes': data.service_order_data['notes']}\n\t\tassessment_form = AssessmentForm(assessment_form_data)\n\n\t\t# Create InvoiceForm\n\t\tinvoice_form_data = {'purchase_order': data.service_order_data['purchase_order'],\n\t\t\t\t\t\t\t'zone_charge': data.service_order_data['zone_charge'],\n\t\t\t\t\t\t\t'parts_charge': data.service_order_data['parts_charge'],\n\t\t\t\t\t\t\t'payment_category': data.service_order_data['payment_category'],\n\t\t\t\t\t\t\t'service_category': data.service_order_data['service_category']}\n\t\tinvoice_form = InvoiceForm(invoice_form_data)\n\n\t\tform_list = ValuesView(OrderedDict([('info', info_form),\n\t\t\t\t\t('assessment', assessment_form), ('invoice', invoice_form)]))\n\n\t\tsow = ServiceOrderWizard()\n\t\tresponse = sow.done(form_list)\n\n\t\tself.assertEqual(response.status_code, 200)",
"def action_stagger_purchase_delivery(self):\n for wizard in self:\n #On vérifie que la quantité entrée est inférieure à la quantité de la ligne \n #d'achat \n purchase_line = wizard.purchase_line_id\n price_unit = purchase_line.price_unit\n if wizard.quantity <= 0:\n raise except_orm(_(\"Error\"), _('You must enter a quantity superior to 0'))\n \n if wizard.quantity >= purchase_line.sec_uom_qty:\n raise except_orm(_(\"Error\"), _('You must enter a quantity inferior to the initial purchase '\n 'line quantity'))\n \n #On récupère les valeurs entrées dans le wizard\n values = {'sec_uom_qty': wizard.quantity,\n 'expected_date': wizard.new_date}\n new_line = purchase_line.copy(values)\n new_line._onchange_sec_uom_qty(with_warning=False)\n new_line._onchange_uom_qty()\n new_line._onchange_uoi_qty()\n new_line.write({'price_unit': price_unit})\n #On décrémente la ligne initiale de la quantité de la nouvelle ligne (car celle-ci respecte forcément\n #le multiple et le minimum\n purchase_line.write({'sec_uom_qty': purchase_line.sec_uom_qty - wizard.quantity})\n purchase_line._onchange_sec_uom_qty(with_warning=False)\n purchase_line._onchange_uom_qty()\n purchase_line._onchange_uoi_qty()\n purchase_line.write({'price_unit': price_unit})\n #On retourne l'achat\n if wizard.purchase_id:\n action_dict = get_form_view(self, 'purchase.purchase_order_see_form')\n if action_dict and action_dict.get('id') and action_dict.get('type'):\n action = self.env[action_dict['type']].browse(action_dict['id'])\n action_struc = action.read()\n action_struc[0]['res_id'] = wizard.purchase_id.id\n action_struc = action_struc[0]\n \n return action_struc\n else:\n return {'type': 'ir.actions.act_window_close'}",
"def __setup__(cls):\n super(WebsiteStoreView, cls).__setup__()\n cls._sql_constraints += [\n (\n 'magento_id_store_unique', 'UNIQUE(magento_id, store)',\n 'A store view must be unique in a store'\n )\n ]\n cls._error_messages.update({\n \"states_not_found\": 'No order states found for importing orders! '\n 'Please configure the order states on magento instance',\n })\n cls._buttons.update({\n 'import_orders_button': {},\n 'export_order_status_button': {}\n })",
"def _get_import_step(self, cr, uid, external_session, context=None):\n return 100",
"def prepare(self):\n # Create a purchase order from a supplier\n Company = self.old_state.apps.get_model('company', 'company')\n PurchaseOrder = self.old_state.apps.get_model('order', 'purchaseorder')\n Part = self.old_state.apps.get_model('part', 'part')\n Supplierpart = self.old_state.apps.get_model('company', 'supplierpart')\n # TODO @matmair fix this test!!!\n # SalesOrder = self.old_state.apps.get_model('order', 'salesorder')\n\n supplier = Company.objects.create(\n name='Supplier A',\n description='A great supplier!',\n is_supplier=True,\n is_customer=True,\n )\n\n part = Part.objects.create(\n name='Bob',\n description='Can we build it?',\n assembly=True,\n salable=True,\n purchaseable=False,\n tree_id=0,\n level=0,\n lft=0,\n rght=0,\n )\n supplierpart = Supplierpart.objects.create(\n part=part,\n supplier=supplier\n )\n\n # Create some orders\n for ii in range(10):\n\n order = PurchaseOrder.objects.create(\n supplier=supplier,\n reference=f\"{ii}-abcde\",\n description=\"Just a test order\"\n )\n order.lines.create(\n part=supplierpart,\n quantity=12,\n received=1\n )\n order.lines.create(\n quantity=12,\n received=1\n )\n\n # TODO @matmair fix this test!!!\n # sales_order = SalesOrder.objects.create(\n # customer=supplier,\n # reference=f\"{ii}-xyz\",\n # description=\"A test sales order\",\n # )\n # sales_order.lines.create(\n # part=part,\n # quantity=12,\n # received=1\n # )",
"def create_purchase_order(self, cr, uid, ids, context=None):\n sale_obj = self.pool.get('sale.order')\n act_window = self.pool.get('ir.actions.act_window')\n wizard = self.browse(cr, uid, ids[0], context)\n sale_ids = context.get('active_ids', [])\n if wizard.advance_purchase_order == 'all':\n # create the final invoices of the active sales orders\n res = sale_obj.manual_purchase_order(cr, uid, sale_ids, context)\n \n return {'type': 'ir.actions.act_window_close'}\n\n if wizard.advance_purchase_order == 'lines':\n # open the list view of sales order lines to invoice\n res = act_window.for_xml_id(cr, uid, 'sale', 'action_order_line_tree2', context)\n res['context'] = {\n \n 'search_default_order_id': sale_ids and sale_ids[0] or False,\n }\n return res \n\n inv_ids = []\n for sale_id, inv_values in self._prepare_advance_po_vals(cr, uid, ids, context=context):\n inv_ids.append(self._create_purchase_order(cr, uid, inv_values, sale_id, context=context))\n\n \n return {'type': 'ir.actions.act_window_close'}",
"def export_order_status(self, store_views=None):\n if store_views is None:\n store_views = self.search([])\n\n for store_view in store_views:\n store_view.export_order_status_for_store_view()",
"def default_start(self, data):\n return {\n 'message': \"This wizard has imported all the websites for this \" +\n \"magento instance. It has also imported all the stores and \" +\n \"store views related to the websites imported. If any of \" +\n \"the records existed already, it wont be imported.\"\n }",
"def handle(self, *args, **options):\n run_tximport()",
"def jobs_import_view():\n import_job()\n return response.redirect(request.app.url_for('jobs'))",
"def workorderwizard_load(request):\n # Manually checking if user is authenticated rather than using @login_required\n # in order to return a 401 status that the workorder wizard understands so it can redirect the user to log in\n # instead of returning a 302 redirect to the login page, which wouldn't work because this view is called via AJAX\n if not request.user.is_authenticated:\n return HttpResponse('Unauthorized', status=401)\n\n response = {'locations': [], 'orgs': []}\n response['user'] = {'name': request.user.get_full_name(),\n 'email': request.user.email,\n 'phone': request.user.phone,\n 'address': request.user.addr}\n for loc in events_models.Location.objects.filter(show_in_wo_form=True):\n response['locations'].append({'id': loc.pk, 'name': loc.name, 'building': loc.building.name})\n for org in events_models.Organization.objects.filter(archived=False):\n data = {'id': org.pk,\n 'name': org.name,\n 'shortname': org.shortname,\n 'owner': org.user_in_charge == request.user,\n 'member': request.user in org.associated_users.all(),\n 'delinquent': org.delinquent}\n if request.user.has_perm('events.view_org', org):\n data['email'] = org.exec_email\n data['phone'] = org.phone\n data['address'] = org.address\n response['orgs'].append(data)\n return HttpResponse(json.dumps(response))",
"def export_order_status_button(cls, store_views):\n pass",
"def abc_transfer_wizard(self, lines, packages, data, params, res):\n # TODO: Add support for packages.\n res['results']['transfer'] = 'failure'\n action = self.do_enter_transfer_details()\n wizard = self.env['stock.transfer_details'].browse(action['res_id'])\n # Keep track of matched transfer items\n matched_ids = []\n for line in lines:\n if line['id'] > 0:\n # Original line. Match against item in wizard.\n if line['packop_id']:\n item = wizard.item_ids.filtered(lambda i: i.packop_id.id == line['packop_id']['id'])\n item.quantity = line['qty_done']\n matched_ids.append(item.id)\n else:\n # What if we don't have packop_id. Will this ever occur?\n _logger.warn(_(\"Couldn't match line (id %s) against existing transfer item!\\nlines:%s\\ntransfer items:%s\") % (line['id'], lines, wizard.item_ids.read()))\n else:\n # New line. Create a new item.\n # TODO: Split item based on original line from another package.\n item = wizard.item_ids.create({\n 'transfer_id': wizard.id,\n 'product_id': line['product_id']['id'],\n 'product_uom_id': line['product_uom_id']['id'],\n 'quantity': line['qty_done'],\n 'sourceloc_id': line['sourceloc_id']['id'],\n 'destinationloc_id': line['destinationloc_id']['id'],\n # 'result_package_id': line['result_package_id']['id'],\n # 'destinationloc_id': line['destinationloc_id']['id'],\n })\n matched_ids.append(item.id)\n extra_items = wizard.item_ids.filtered(lambda i: i.id not in matched_ids)\n if extra_items:\n _logger.warn(_(\"Found and deleted extra transfer items! %s\" % extra_items.read()))\n extra_items.unlink()\n wizard.do_detailed_transfer()\n res['results']['transfer'] = 'success'\n params['wizard'] = wizard",
"def import_order_states(cls, instances):\n OrderState = Pool().get('magento.order_state')\n\n for instance in instances:\n\n Transaction().context.update({\n 'magento_instance': instance.id\n })\n\n # Import order states\n with OrderConfig(\n instance.url, instance.api_user, instance.api_key\n ) as order_config_api:\n OrderState.create_all_using_magento_data(\n order_config_api.get_states()\n )",
"def delivery_page(cls, logger=None):\n if logger is None:\n logger = cls._logger\n\n database_connection = DatabaseConnection(f\"orders.csv\")\n view = database_connection.get_view()\n logger.log(view)\n\n while True:\n\n choice = input(\n \"Please choose: \"\n \"(1) refresh orders view, \"\n \"(2) next page, \"\n \"(3) previous page, \"\n \"(4) examine order, \"\n \"Enter empty to go back \"\n )\n if choice not in ('1', '2', '3', '4'):\n break\n\n if choice=='1':\n view = database_connection.get_view()\n logger.log(view)\n\n # next page\n elif choice=='2': \n database_connection.next_page()\n view = database_connection.get_view()\n logger.log(view)\n\n # previous page\n elif choice=='3':\n database_connection.prev_page()\n view = database_connection.get_view()\n logger.log(view)\n\n elif choice=='4':\n\n # get product_id\n while True:\n order_id = input(\"Enter the order id: \")\n try:\n order_id = int(order_id)\n except:\n logger.log(\"order id should be an integer\")\n break\n\n table = database_connection.table\n order = table.loc[(table['order_id']==order_id), \"order\"][0] # order_id should be unique\n logger.log(json.dumps(json.loads(order), indent=1)) # pretty logger.log the json\n\n\n else:\n break",
"def step_1(browser):\n browser.click_on(\"Import depuis eComptes\".decode('utf8'))",
"def order_numbers_tab(request, shop, form, template_name=\"manage/order_numbers/order_numbers_tab.html\"):\n return render_to_string(template_name, RequestContext(request, {\n \"shop\": shop,\n \"form\": form,\n }))",
"def _get_export_step(self, cr, uid, external_session, context=None):\n return 10",
"def import_stores(self):\n\n stores = self.product_infos['stores']\n\n for product_store in stores:\n try:\n store = Stores.objects.get(\n name=product_store\n )\n except Stores.DoesNotExist:\n super().new_entry()\n store = Stores.objects.create(\n name=product_store\n )\n except:\n pass\n try:\n ProdStore.objects.get(\n product=self.product_object,\n store=store\n )\n except ProdStore.DoesNotExist:\n super().new_entry()\n ProdStore.objects.create(\n product=self.product_object,\n store=store\n )\n except:\n pass\n\n return stores",
"def default_start(self, data):\n return {\n 'message': \"This wizard will export shipment status for all the \" +\n \"shipments related to this store view. To export tracking \" +\n \"information also for these shipments please check the \" +\n \"checkbox for Export Tracking Information on Store View.\"\n }",
"def start_import(request):\n if request.POST:\n form = ImportForm(request.POST, request.FILES)\n if form.is_valid():\n import_log = form.save(commit=False)\n import_log.user = request.user\n import_log.import_setting, created = ImportSetting.objects.get_or_create(\n user=request.user,\n content_type=ContentType.objects.get(id=form.data['model']),\n )\n import_log.save()\n return HttpResponseRedirect(reverse(match_columns, kwargs={'import_log_id': import_log.id}))\n else:\n form = ImportForm()\n if not request.user.is_superuser:\n form.fields[\"model\"].queryset = ContentType.objects.filter(\n Q(permission__group__user=request.user, permission__codename__startswith=\"change_\") |\n Q(permission__user=request.user, permission__codename__startswith=\"change_\")).distinct()\n \n return render_to_response('simple_import/import.html', {'form':form,}, RequestContext(request, {}),)",
"def post(self, request, *args, **kwargs):\n resource = self.get_import_resource_class()(**self.get_import_resource_kwargs(request, *args, **kwargs))\n\n confirm_form = ConfirmImportForm(request.POST)\n if confirm_form.is_valid():\n import_formats = self.get_import_formats()\n input_format = import_formats[\n int(confirm_form.cleaned_data['input_format'])\n ]()\n tmp_storage = self.get_tmp_storage_class()(name=confirm_form.cleaned_data['import_file_name'])\n data = tmp_storage.read(input_format.get_read_mode())\n if not input_format.is_binary() and self.from_encoding:\n data = force_text(data, self.from_encoding)\n dataset = input_format.create_dataset(data)\n\n result = resource.import_data(dataset, dry_run=False,\n raise_errors=True,\n file_name=confirm_form.cleaned_data['original_file_name'],\n user=request.user)\n\n if not self.get_skip_admin_log():\n # Add imported objects to LogEntry\n logentry_map = {\n RowResult.IMPORT_TYPE_NEW: ADDITION,\n RowResult.IMPORT_TYPE_UPDATE: CHANGE,\n RowResult.IMPORT_TYPE_DELETE: DELETION,\n }\n content_type_id = ContentType.objects.get_for_model(self.model).pk\n for row in result:\n if row.import_type != row.IMPORT_TYPE_ERROR and row.import_type != row.IMPORT_TYPE_SKIP:\n LogEntry.objects.log_action(\n user_id=request.user.pk,\n content_type_id=content_type_id,\n object_id=row.object_id,\n object_repr=row.object_repr,\n action_flag=logentry_map[row.import_type],\n change_message=\"%s through import_export\" % row.import_type,\n )\n success_message = str(_(u'Import finished')) + ' , ' + str(_(u'Add')) + ' : %d' % result.totals[\n RowResult.IMPORT_TYPE_NEW] + ' , ' + str(_(u'Update')) + ' : %d' % result.totals[\n RowResult.IMPORT_TYPE_UPDATE]\n\n messages.success(request, success_message)\n tmp_storage.remove()\n\n post_import.send(sender=None, model=self.model)\n model_info = (self.opts.app_label, self.opts.model_name)\n url = reverse('xadmin:%s_%s_changelist' % model_info,\n current_app=self.admin_site.name)\n return HttpResponseRedirect(url)",
"def abc_transfer_steps(self):\n return [\n (20, 'abc_transfer_wizard'),\n (40, 'abc_create_invoice'),\n (60, 'abc_confirm_invoice')]",
"def workorderwizard_submit(request):\n # Manually checking if user is authenticated rather than using @login_required\n # in order to return a 401 status that the workorder wizard understands so it can display a specific error message\n # instead of returning a 302 redirect to the login page, which wouldn't work because this view is called via AJAX\n if not request.user.is_authenticated:\n return HttpResponse('Unauthorized', status=401)\n\n # load JSON\n data = json.loads(request.body.decode('utf-8'))\n\n # check that all required fields are present\n mandatory_fields = ('org', 'event_name', 'location', 'start', 'end', 'setup_complete', 'services')\n if not all(key in data for key in mandatory_fields):\n return HttpResponse('Unprocessable Entity', status=422)\n\n reversion.set_comment('Event submitted using work order wizard')\n\n # create event object and populate fields\n event = events_models.Event2019()\n event.submitted_by = request.user\n event.submitted_ip = request.META.get('REMOTE_ADDR')\n event.contact = request.user\n event.event_name = data['event_name']\n if 'description' in data:\n event.description = data['description']\n try:\n event.location = events_models.Location.objects.filter(show_in_wo_form=True).get(pk=data['location'])\n except events_models.Location.DoesNotExist:\n return HttpResponse('Unprocessable Entity', status=422)\n event.datetime_setup_complete = parse_datetime(data['setup_complete'])\n event.datetime_start = parse_datetime(data['start'])\n event.datetime_end = parse_datetime(data['end'])\n try:\n org = events_models.Organization.objects.get(pk=data['org'])\n except events_models.Organization.DoesNotExist:\n return HttpResponse('Unprocessable Entity', status=422)\n event.billing_org = org\n\n # populate many-to-many fields\n event.save()\n event.org.add(org)\n \n # add services\n for service_data in data['services']:\n if 'id' not in service_data:\n return HttpResponse('Unprocessable Entity', status=422)\n try:\n service = events_models.Service.objects.filter(enabled_event2019=True).get(shortname=service_data['id'])\n except events_models.Service.DoesNotExist:\n return HttpResponse('Unprocessable Entity', status=422)\n service_instance = events_models.ServiceInstance()\n service_instance.service = service\n service_instance.event = event\n if 'detail' in service_data:\n service_instance.detail = service_data['detail']\n service_instance.save()\n\n # add extras\n for extra_data in data['extras']:\n if not all(key in extra_data for key in ('id', 'quantity')):\n return HttpResponse('Unprocessable Entity', status=422)\n try:\n extra = events_models.Extra.objects \\\n .filter(disappear=False, services__in=event.serviceinstance_set.values_list('service', flat=True)) \\\n .distinct().get(name=extra_data['id'])\n except events_models.Extra.DoesNotExist:\n return HttpResponse('Unprocessable Entity', status=422)\n extra_instance = events_models.ExtraInstance()\n extra_instance.extra = extra\n extra_instance.event = event\n extra_instance.quant = extra_data['quantity']\n extra_instance.save()\n\n # send confirmation email\n email_body = 'You have successfully submitted the following event.'\n bcc = [settings.EMAIL_TARGET_VP, settings.EMAIL_TARGET_HP] if event.has_projection else [settings.EMAIL_TARGET_VP]\n email = EventEmailGenerator(event=event, subject='New Event Submitted', to_emails=[request.user.email],\n body=email_body, bcc=bcc)\n email.send()\n\n # If the user does not have permission to submit events on behalf of the selected organization,\n # send an email to the organization to alert them that the event was submitted\n # if not request.user.has_perm('events.create_org_event', org):\n # email_body = ('The following event was submitted. You are receiving this email because the user who submitted '\n # 'this event is not expressly authorized to submit events on behalf of {}. The organization owner '\n # 'can update authorized users at {}.'.format(org.name,\n # request.scheme + '://' + request.get_host() + reverse('my:org-edit', args=(org.pk,))))\n # email = EventEmailGenerator(event=event, subject='Event Submitted on behalf of {}'.format(org.name),\n # to_emails=[org.exec_email], body=email_body, bcc=[settings.EMAIL_TARGET_W])\n # email.send()\n\n # return response with the URL to the event detail page\n return HttpResponse(json.dumps({'event_url': reverse('events:detail', args=[event.pk])}))",
"def custom_actions(self, form_wizard_entry, request=None):",
"def run_import(self, expanded, unexpanded) : \n\t\tif not unexpanded :\n\t\t\treturn self.errormessage(\"Needs some filenames to import\")\n\t\tif not self.HasPerms(self.__context, 'Import/Export objects') :\n\t\t\treturn -1\n\t\tfor filename in unexpanded :\n\t\t\tself.__context.manage_importObject(filename)\n\t\t\tself.htmlmessage('%s imported successfully' % filename)",
"def checkout(request):\n \n try:\n order=Order.objects.get(user=request.user,status='N')\n if request.method==\"POST\":\n\n # bind order object so that new object is not created when save method of form is called\n order_form=OrderPlacingForm(request.POST,instance=order)\n if order_form.is_valid():\n\n # update values in existing order by values entered by user in form\n saved_order=order_form.save()\n saved_order.token=get_random_string(10,'0123456789')\n\n # set order status as Preparing\n saved_order.status='P'\n saved_order.save()\n\n # delete entries from orderline table\n OrderLine.objects.filter(order_id=saved_order.id).delete()\n\n # when order is placed successfully,redirect to order details page\n return render(request,'HotelMgmt/order_details.html',{'order_details':saved_order})\n\n else:\n if order is not None:\n \n if OrderLine.objects.filter(order_id=order.id):\n order_form=OrderPlacingForm()\n\n # if order exists but there are no items\n else:\n return render(request,'HotelMgmt/checkout.html',{'msg':False}) \n except Order.DoesNotExist:\n return render(request,'HotelMgmt/checkout.html',{'msg':False})\n\n return render(request,'HotelMgmt/checkout.html',{'order_form':order_form,'msg':True})"
] | [
"0.72466475",
"0.7039832",
"0.6421888",
"0.5852894",
"0.58005047",
"0.56411994",
"0.5609187",
"0.5599366",
"0.5552095",
"0.55054647",
"0.5487412",
"0.54771256",
"0.5407994",
"0.5373156",
"0.53110003",
"0.5274243",
"0.52071506",
"0.52046293",
"0.5194662",
"0.5182003",
"0.5181731",
"0.51519567",
"0.5139982",
"0.5073672",
"0.5068087",
"0.5056989",
"0.5050096",
"0.5046869",
"0.50404704",
"0.5020442"
] | 0.7385691 | 0 |
Calls wizard to export order status for store view | def export_order_status_button(cls, store_views):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_order_status(self, store_views=None):\n if store_views is None:\n store_views = self.search([])\n\n for store_view in store_views:\n store_view.export_order_status_for_store_view()",
"def export_order_status_for_store_view(self):\n Sale = Pool().get('sale.sale')\n\n exported_sales = []\n domain = [('magento_store_view', '=', self.id)]\n\n if self.last_order_export_time:\n domain = [('write_date', '>=', self.last_order_export_time)]\n\n sales = Sale.search(domain)\n\n self.last_order_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n exported_sales.append(sale.export_order_status_to_magento())\n\n return exported_sales",
"def default_start(self, data):\n return {\n 'message': \"This wizard will export shipment status for all the \" +\n \"shipments related to this store view. To export tracking \" +\n \"information also for these shipments please check the \" +\n \"checkbox for Export Tracking Information on Store View.\"\n }",
"def export_shipment_status(cls, store_views=None):\n if store_views is None:\n store_views = cls.search([])\n\n for store_view in store_views:\n # Set the instance in context\n with Transaction().set_context(\n magento_instance=store_view.instance.id\n ):\n store_view.export_shipment_status_to_magento()",
"def _get_export_step(self, cr, uid, external_session, context=None):\n return 10",
"def import_orders_button(cls, store_views):\n pass",
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def get(self) :\n self.generate('export.html', {\n 'xml' : export(),\n 'title' : \"Admin Export\"})",
"def order_report():",
"def export_shipment_status_to_magento(self):\n Shipment = Pool().get('stock.shipment.out')\n Sale = Pool().get('sale.sale')\n\n instance = self.instance\n\n sale_domain = [\n ('magento_store_view', '=', self.id),\n ('shipment_state', '=', 'sent'),\n ('magento_id', '!=', None),\n ('shipments', '!=', None),\n ]\n\n if self.last_shipment_export_time:\n sale_domain.append(\n ('write_date', '>=', self.last_shipment_export_time)\n )\n\n sales = Sale.search(sale_domain)\n\n self.last_shipment_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n # Get the increment id from the sale reference\n increment_id = sale.reference[\n len(instance.order_prefix): len(sale.reference)\n ]\n\n for shipment in sale.shipments:\n try:\n # Some checks to make sure that only valid shipments are\n # being exported\n if shipment.is_tracking_exported_to_magento or \\\n shipment.state not in ('packed', 'done') or \\\n shipment.magento_increment_id:\n sales.pop(sale)\n continue\n with magento.Shipment(\n instance.url, instance.api_user, instance.api_key\n ) as shipment_api:\n item_qty_map = {}\n for move in shipment.outgoing_moves:\n if isinstance(move.origin, SaleLine) \\\n and move.origin.magento_id:\n # This is done because there can be multiple\n # lines with the same product and they need\n # to be send as a sum of quanitities\n item_qty_map.setdefault(\n str(move.origin.magento_id), 0\n )\n item_qty_map[str(move.origin.magento_id)] += \\\n move.quantity\n shipment_increment_id = shipment_api.create(\n order_increment_id=increment_id,\n items_qty=item_qty_map\n )\n Shipment.write(list(sale.shipments), {\n 'magento_increment_id': shipment_increment_id,\n })\n\n if self.export_tracking_information and (\n shipment.tracking_number and shipment.carrier\n ):\n shipment.export_tracking_info_to_magento()\n except xmlrpclib.Fault, fault:\n if fault.faultCode == 102:\n # A shipment already exists for this order,\n # we cannot do anything about it.\n # Maybe it was already exported earlier or was created\n # separately on magento\n # Hence, just continue\n continue\n\n return sales",
"def on_action_2_triggered(self):\n # TODO: not implemented yet\n model = self.model\n self.doExport(model)",
"def on_action_4_triggered(self):\n # TODO: not implemented yet\n model = self.model2\n self.doExport(model)\n #raise NotImplementedError",
"def woo_sale_report(self):\n version_info = odoo.service.common.exp_version()\n if version_info.get('server_version') == '14.0':\n action = self.env.ref('woo_commerce_ept.woo_action_order_report_all').read()[0]\n else:\n action = self.env.ref('woo_commerce_ept.woo_sale_report_action_dashboard').read()[0]\n\n return action",
"def _after_export(self, *args, **kwargs):\n return",
"def export_everything(self):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n previoustext = self.tabs.window.statuslabel['text']\n res = tkinter.messagebox.askyesno(\n 'Export Everything',\n 'Exporting data on all AIS stations, this may take some time.')\n if res:\n outpath = tkinter.filedialog.askdirectory()\n if outpath:\n self.tabs.window.statuslabel.config(\n text='Exporting all AIS station data to - {}'.format(\n outpath),\n fg='black', bg='gold')\n self.update_idletasks()\n export.export_overview(\n self.tabs.window.aistracker,\n self.tabs.window.nmeatracker,\n self.tabs.window.messagelog,\n outpath, orderby=orderby, region=currentregion)\n export.export_everything(\n self.tabs.window.aistracker,\n self.tabs.window.messagelog,\n outpath, orderby=orderby, region=currentregion)\n self.tabs.window.statuslabel.config(\n text=previoustext, bg='light grey')\n else:\n raise ExportAborted(\n 'Export of all AIS data cancelled by user.')\n else:\n raise ExportAborted('Export of all AIS data cancelled by user.')",
"def print_xlsx(self):\n if self.date_from and self.date_to:\n if self.date_from > self.date_to:\n raise ValidationError(\"Date From must be less than Date To\")\n\n # active_record = self._context['id']\n # record = self.env['room.accommodation'].browse(active_record)\n data = {\n 'date_from': self.date_from,\n 'date_to': self.date_to,\n 'guest_id': self.guest_id.id,\n 'model_id': self.id,\n 'check_out': self.check_out,\n 'date_today': fields.Datetime.now()\n }\n\n print(\"XLSX Wizard data : \", data)\n\n return {\n 'type': 'ir.actions.report',\n 'data': {\n 'model': 'accommodation.reporting',\n 'options': json.dumps(data, default=date_utils.json_default),\n 'output_format': 'xlsx',\n 'report_name': 'Accommodation Report'\n },\n 'report_type': 'xlsx'\n }",
"def print_stock_rotation_report(self):\n warehouses = False\n locations = False\n from_date = False\n to_date = False\n active_id = self.ids[0]\n today=datetime.now().strftime(\"%Y-%m-%d\")\n f_name = 'Stock Rotation Report' + ' ' + today\n stock_warehouse_obj = self.env['stock.warehouse']\n stock_locations_obj = self.env['stock.location']\n product_obj = self.env['product.product']\n \n if self.filtaration == 'warehouse':\n if not self.include_all_warehouse:\n if not self.warehouse_ids:\n raise ValidationError(\"please select the Warehouse.\")\n warehouses = self.warehouse_ids\n else:\n warehouses = stock_warehouse_obj.search([])\n else:\n if not self.include_all_location:\n if not self.location_ids:\n raise ValidationError(\"please select the Locations.\")\n locations = self.location_ids\n else:\n locations = stock_locations_obj.search([('usage','=','internal')])\n\n\n if not self.from_date:\n raise ValidationError(\"please select the From Date.\")\n \n if not self.to_date:\n raise ValidationError(\"please select the To Date.\")\n\n all_products = product_obj.with_context(active_test=True).search([('type','=','product')])\n from_date = self.from_date\n to_date = self.to_date\n \n date_1 = time.strptime(from_date, \"%Y-%m-%d\")\n date_2 = time.strptime(to_date, \"%Y-%m-%d\")\n if not (date_1 <= date_2):\n raise ValidationError(\"Fromdate is not previous then Todate\")\n self.get_stock_rotation_report(from_date,to_date,warehouses,locations,all_products)\n if self.datas:\n return {\n 'type' : 'ir.actions.act_url',\n 'url':'web/content/?model=stock.rotation.report&download=true&field=datas&id=%s&filename=%s.xls'%(active_id,f_name),\n 'target': 'new',\n }",
"def test_service_order_wizard(self):\n\n\t\t# Create objects\n\t\tmachine = Machine.objects.create(**data.machine_data)\n\t\tcustomer = Customer.objects.create(**data.customer_data)\n\t\tengineer = ServiceEngineer.objects.create(**data.engineer_name)\n\n\t\t# Create InfoForm\n\t\tinfo_form_data = {'engineer': engineer.id,\n\t\t\t\t\t\t'rma_number': data.service_order_data['rma_number'],\n\t\t \t\t\t\t'date': data.service_order_data['date'], 'customer': customer.id,\n\t\t\t\t\t\t'machine': machine.serial_number,\n\t\t\t\t\t\t'condition': data.service_order_data['condition']}\n\t\tinfo_form = InfoForm(info_form_data)\n\n\t\t# Create AssessmentForm\n\t\tassessment_form_data = {'correction': data.service_order_data['correction'],\n\t\t\t\t\t\t\t\t'notes': data.service_order_data['notes']}\n\t\tassessment_form = AssessmentForm(assessment_form_data)\n\n\t\t# Create InvoiceForm\n\t\tinvoice_form_data = {'purchase_order': data.service_order_data['purchase_order'],\n\t\t\t\t\t\t\t'zone_charge': data.service_order_data['zone_charge'],\n\t\t\t\t\t\t\t'parts_charge': data.service_order_data['parts_charge'],\n\t\t\t\t\t\t\t'payment_category': data.service_order_data['payment_category'],\n\t\t\t\t\t\t\t'service_category': data.service_order_data['service_category']}\n\t\tinvoice_form = InvoiceForm(invoice_form_data)\n\n\t\tform_list = ValuesView(OrderedDict([('info', info_form),\n\t\t\t\t\t('assessment', assessment_form), ('invoice', invoice_form)]))\n\n\t\tsow = ServiceOrderWizard()\n\t\tresponse = sow.done(form_list)\n\n\t\tself.assertEqual(response.status_code, 200)",
"def do(self):\r\n self.dlCsvReport()\r\n self.dlXlsReport()",
"def action_stagger_purchase_delivery(self):\n for wizard in self:\n #On vérifie que la quantité entrée est inférieure à la quantité de la ligne \n #d'achat \n purchase_line = wizard.purchase_line_id\n price_unit = purchase_line.price_unit\n if wizard.quantity <= 0:\n raise except_orm(_(\"Error\"), _('You must enter a quantity superior to 0'))\n \n if wizard.quantity >= purchase_line.sec_uom_qty:\n raise except_orm(_(\"Error\"), _('You must enter a quantity inferior to the initial purchase '\n 'line quantity'))\n \n #On récupère les valeurs entrées dans le wizard\n values = {'sec_uom_qty': wizard.quantity,\n 'expected_date': wizard.new_date}\n new_line = purchase_line.copy(values)\n new_line._onchange_sec_uom_qty(with_warning=False)\n new_line._onchange_uom_qty()\n new_line._onchange_uoi_qty()\n new_line.write({'price_unit': price_unit})\n #On décrémente la ligne initiale de la quantité de la nouvelle ligne (car celle-ci respecte forcément\n #le multiple et le minimum\n purchase_line.write({'sec_uom_qty': purchase_line.sec_uom_qty - wizard.quantity})\n purchase_line._onchange_sec_uom_qty(with_warning=False)\n purchase_line._onchange_uom_qty()\n purchase_line._onchange_uoi_qty()\n purchase_line.write({'price_unit': price_unit})\n #On retourne l'achat\n if wizard.purchase_id:\n action_dict = get_form_view(self, 'purchase.purchase_order_see_form')\n if action_dict and action_dict.get('id') and action_dict.get('type'):\n action = self.env[action_dict['type']].browse(action_dict['id'])\n action_struc = action.read()\n action_struc[0]['res_id'] = wizard.purchase_id.id\n action_struc = action_struc[0]\n \n return action_struc\n else:\n return {'type': 'ir.actions.act_window_close'}",
"def order_update_status():\n result = order_obj.order_update_status(request.forms) \n return result",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'bushfire_indicator_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def export_overview(self, outpath=None):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n if not outpath:\n outpath = tkinter.filedialog.askdirectory()\n if outpath:\n export.export_overview(\n self.tabs.window.aistracker,\n self.tabs.window.nmeatracker,\n self.tabs.window.messagelog,\n outpath, orderby=orderby, region=currentregion)\n else:\n raise ExportAborted('Export cancelled by user.')",
"def click_buy_and_sell_management_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.buy_and_sell_management_grid_div_id)",
"def export(self):\n rpt_date = datetime.now()\n filename = 'bushfire_regionbytenure_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def create_purchase_order(self, cr, uid, ids, context=None):\n sale_obj = self.pool.get('sale.order')\n act_window = self.pool.get('ir.actions.act_window')\n wizard = self.browse(cr, uid, ids[0], context)\n sale_ids = context.get('active_ids', [])\n if wizard.advance_purchase_order == 'all':\n # create the final invoices of the active sales orders\n res = sale_obj.manual_purchase_order(cr, uid, sale_ids, context)\n \n return {'type': 'ir.actions.act_window_close'}\n\n if wizard.advance_purchase_order == 'lines':\n # open the list view of sales order lines to invoice\n res = act_window.for_xml_id(cr, uid, 'sale', 'action_order_line_tree2', context)\n res['context'] = {\n \n 'search_default_order_id': sale_ids and sale_ids[0] or False,\n }\n return res \n\n inv_ids = []\n for sale_id, inv_values in self._prepare_advance_po_vals(cr, uid, ids, context=context):\n inv_ids.append(self._create_purchase_order(cr, uid, inv_values, sale_id, context=context))\n\n \n return {'type': 'ir.actions.act_window_close'}",
"def custom_actions(self, form_wizard_entry, request=None):",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'bushfire_by_tenure_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'quarterly_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def abc_transfer_steps(self):\n return [\n (20, 'abc_transfer_wizard'),\n (40, 'abc_create_invoice'),\n (60, 'abc_confirm_invoice')]"
] | [
"0.7288336",
"0.72106403",
"0.66292167",
"0.64519197",
"0.6352034",
"0.61387265",
"0.5909034",
"0.5826499",
"0.5795876",
"0.5792858",
"0.5707474",
"0.56563604",
"0.56291175",
"0.5613185",
"0.55068296",
"0.5367808",
"0.5351224",
"0.53355646",
"0.5288537",
"0.52830464",
"0.5270267",
"0.52664584",
"0.52519417",
"0.5235663",
"0.5175915",
"0.51736957",
"0.516653",
"0.51637053",
"0.5163181",
"0.5145741"
] | 0.7584346 | 0 |
Imports sale from store view | def import_order_from_store_view(self):
Sale = Pool().get('sale.sale')
MagentoOrderState = Pool().get('magento.order_state')
new_sales = []
instance = self.instance
with Transaction().set_context({
'magento_instance': instance.id,
'magento_website': self.website.id,
'magento_store_view': self.id,
}):
order_states = MagentoOrderState.search([
('instance', '=', instance.id),
('use_for_import', '=', True)
])
order_states_to_import_in = map(
lambda state: state.code, order_states
)
if not order_states_to_import_in:
self.raise_user_error("states_not_found")
with magento.Order(
instance.url, instance.api_user, instance.api_key
) as order_api:
# Filter orders with date and store_id using list()
# then get info of each order using info()
# and call find_or_create_using_magento_data on sale
filter = {
'store_id': {'=': self.magento_id},
'state': {'in': order_states_to_import_in},
}
if self.last_order_import_time:
last_order_import_time = \
self.last_order_import_time.replace(microsecond=0)
filter.update({
'updated_at': {
'gteq': last_order_import_time.isoformat(' ')
},
})
self.write([self], {
'last_order_import_time': datetime.utcnow()
})
orders = order_api.list(filter)
for order in orders:
new_sales.append(
Sale.find_or_create_using_magento_data(
order_api.info(order['increment_id'])
)
)
return new_sales | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def import_orders_button(cls, store_views):\n pass",
"def import_stores(self):\n\n stores = self.product_infos['stores']\n\n for product_store in stores:\n try:\n store = Stores.objects.get(\n name=product_store\n )\n except Stores.DoesNotExist:\n super().new_entry()\n store = Stores.objects.create(\n name=product_store\n )\n except:\n pass\n try:\n ProdStore.objects.get(\n product=self.product_object,\n store=store\n )\n except ProdStore.DoesNotExist:\n super().new_entry()\n ProdStore.objects.create(\n product=self.product_object,\n store=store\n )\n except:\n pass\n\n return stores",
"def sale(chain: BaseChain, token: Contract, beneficiary) -> Contract:\n args = [token.address, beneficiary]\n seed_sale = deploy_contract(chain, 'ViewlySeedSale', args=args)\n token.transact().setOwner(seed_sale.address)\n return seed_sale",
"def item_sale(request):\n\n items = Product.objects.all()\n on_sale = Product.objects.filter(on_sale=True)\n sort = None\n direction = None\n\n if request.GET:\n if 'sort' in request.GET:\n sortkey = request.GET['sort']\n sort = sortkey\n if sortkey == 'name':\n sortkey = 'lower_name'\n items = items.annotate(lower_name=Lower('name'))\n if sortkey == 'category':\n sortkey = 'category__name'\n if 'direction' in request.GET:\n direction = request.GET['direction']\n if direction == 'desc':\n sortkey = f'-{sortkey}'\n items = items.order_by(sortkey)\n\n current_sorting = f'{sort}_{direction}'\n\n context = {\n 'on_sale': on_sale,\n 'items': items,\n 'current_sorting': current_sorting,\n }\n\n return render(request, 'products/sale.html', context)",
"def test_get_sale_record(self):\n reply = self.admin_add_product()\n\n resp = self.admin_create_user()\n reply = self.attendant_login()\n token = reply['token']\n sale = dict(products = [\n {\n \"prod_name\":\"NY_denims\", \n \"quantity\":10\n }\n\t ])\n resp = self.client.post(\n '/api/v1/sales',\n content_type='application/json',\n data=json.dumps(sale),\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], 'Sale record created')\n self.assertEqual(resp.status_code, 200)\n\n resp = self.client.get(\n '/api/v1/sales/1',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], 'Sale fetched sucessfully!')\n self.assertEqual(resp.status_code, 200)",
"def import_orders(cls, store_views=None):\n if store_views is None:\n store_views = cls.search([])\n\n for store_view in store_views:\n store_view.import_order_from_store_view()",
"def get_data_sales(self):\n return {\n 'search_type': SearchForm.SEARCH_TYPE_SALE,\n 'min_price': '40000',\n 'max_price': '50000',\n 'location':'Test, Test',\n 'min_bedrooms': '5',\n 'property_type': str(PropertyTypeFactory().slug)\n }",
"def put_on_sale():\n\n item = {\n \"status\": 'for_sale',\n \"category\": request.form['item-type'],\n \"name\": request.form['item-name'],\n \"price\": request.form['item-price'],\n \"description\": request.form['item-description'],\n \"mail\": request.form['seller-email']\n }\n\n put_item(item)\n\n return redirect('/')",
"def create_product_sale(self, order, product_row):\n cols = ProcessedOrdersExport\n sku = product_row[cols.SKU]\n product = BaseProduct.objects.get(sku=sku)\n product_sale = ProductSale(\n order=order,\n sku=product_row[cols.SKU],\n name=product_row[cols.ITEM_TITLE],\n weight=product.weight_grams,\n quantity=product_row[cols.QUANTITY],\n supplier=product.supplier,\n purchase_price=self.convert_integer_price(product.purchase_price),\n tax=self.convert_integer_price(product_row[cols.LINE_TAX]),\n unit_price=self.convert_integer_price(product_row[cols.UNIT_COST]),\n item_price=self.convert_integer_price(product_row[cols.LINE_TOTAL]),\n item_total_before_tax=self.convert_integer_price(\n product_row[cols.LINE_TOTAL_EXCLUDING_TAX]\n ),\n )\n return product_sale",
"def test_get_one_sale_record(self):\n\t\tself.register_user()\n\t\tresult = self.login_user()\n\t\taccess_token = json.loads(result.data.decode())['token']\n\n\t\tresponse = self.client.post('/api/v1/sales',\n\t\t\tdata=self.sales_data,\n\t\t\theaders=dict(Authorization=\"Bearer \" + access_token))\n\t\t\n\t\tself.assertEqual(response.status_code, 200)",
"def index(request):\n\n products = Top_selling_product.objects.all()\n context = {'products':products}\n\n return render(request, 'home/index.html',context)",
"def test_get_specific_sale_record(self):\n \n self.register_admin_test_account()\n token = self.login_admin_test()\n\n response = self.app_test_client.get(\n '{}/saleorder'.format(self.base_url), json={\n 'sale_id': 1,\n 'name': \"Sample Bags\",\n 'price': 20,\n 'quantity': 1,\n 'totalamt': 20\n },\n headers=dict(Authorization=token),\n content_type='application/json')\n\n response = self.app_test_client.get(\n '{}/saleorder/1'.format(self.base_url),\n headers=dict(Authorization=token),\n content_type='application/json'\n )\n \n self.assertEqual(response.status_code, 200)",
"def test_get_all_sale_records(self):\n reply = self.admin_add_product()\n\n resp = self.admin_create_user()\n reply = self.attendant_login()\n token = reply['token']\n sale = dict(products = [\n {\n \"prod_name\":\"NY_denims\", \n \"quantity\":10\n }\n\t ])\n resp = self.client.post(\n '/api/v1/sales',\n content_type='application/json',\n data=json.dumps(sale),\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], 'Sale record created')\n self.assertEqual(resp.status_code, 200)\n\n reply = self.admin_login()\n token = reply['token']\n\n resp = self.client.get(\n '/api/v1/sales',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n \n self.assertEqual(reply['message'], 'All Sale records fetched sucessfully!')\n self.assertEqual(resp.status_code, 200)",
"def index(request):\n\n # Generate counts of the main objects\n\n num_sales=Prodsale.objects.all().count()\n\n # Render the HTML template index.html with the data in the context variable\n\n return render(\n request, 'index.html',\n context={'num_sales':num_sales},\n )",
"def shop(request):\n return render(request, 'shop/shop.html')",
"def purchase_import(self, cr, uid, ids, context=None):\n wizard_row = self.browse(cr, uid, ids)[0]\n if wizard_row.add_import == 'add':\n self.pool.get('purchase.import').write(cr, uid, [wizard_row.import_id.id], {'purchase_related_ids': [(4,wizard_row.purchase_id.id)]})\n import_id = wizard_row.import_id.id\n elif wizard_row.add_import == 'create':\n import_id = self.pool.get('purchase.import').create(cr, uid, self.prepare_purchase_import(cr, uid, wizard_row, context), context)\n return import_id",
"def get_sales_data():\n print(\"Retrieving all the sales information...\")\n data = SHEET.worksheet('sales')\n print(\"Compilation complete!\\n\")\n return data",
"def sales(self, sales):\n\n self._sales = sales",
"def get_single_sale(self, sale_id):\n sale_record = self.dbconn.get_single_sale(sale_id=sale_id)\n return sale_record",
"def all_sales(self, username):\n con = dbcon()\n cur = con.cursor()\n cur.execute(\"SELECT * FROM sales;\")\n res = cur.fetchall()\n sales_records=[]\n for a_sale in res:\n record = {\n 'sales_id':a_sale[0],\n 'attendant':a_sale[1],\n 'product_name':a_sale[2],\n 'price':a_sale[3],\n 'quantity':a_sale[4]\n }\n sales_records.append(record)\n return jsonify({\"Records\": sales_records}), 200",
"def storelocator():\n\n\treturn render_template(\"storelocator.html\")",
"def view_total_sales():\n # Later will add the ability to sort by date and Category\n try:\n with session_scope() as db_session:\n orders = db_session.query(Order).all()\n\n if len(orders) < 1:\n return {\n 'code': 404,\n 'message': 'There are no sales'\n }, 404\n\n nmbr_itm = 0\n for order in orders:\n for items in order.order_lines:\n nmbr_itm = nmbr_itm + items.quantity\n\n except DBAPIError as db_error:\n # Returns an error in case of a integrity constraint not being followed.\n return {\n 'code': 400,\n 'message': re.search('DETAIL: (.*)', db_error.args[0]).group(1)\n }, 400\n except NoResultFound:\n # Returns an error in case of a integrity constraint not being followed.\n return {\n 'code': 400,\n 'message': \"No sales have been registered\"\n }, 400\n return {\n 'numberItems': nmbr_itm\n }, 200",
"def products(request):\n\n if not request.user.is_superuser:\n messages.error(request, 'Sorry, only store owners can do that.')\n return redirect(reverse('home'))\n\n products = Product.objects.all()\n template = \"auctionsmng/products.html\"\n\n context = {\n 'products': products\n }\n\n return render(request, template, context)",
"def get_sale(id, res=None, user_role=None, user_id=None, user_email=None):\n sale = sale_insatnce.get_sale(id)\n if sale:\n data = {\n \"cost\": sale[3],\n \"sale_id\": sale[0],\n \"user_id\": sale[1],\n \"description\": sale[4]\n }\n return jsonify({\n \"sales\": data\n })\n else:\n return jsonify(\n {\n \"message\": \"sale not found\"\n }\n ), 404",
"def get_sales(res=None, user_email=None, user_role=None, user_id=None):\n sales = sale_insatnce.get_sales()\n sales_data = []\n for i in sales:\n data = {\n \"email\": i[2],\n \"sale_id\": i[0],\n \"user_id\": i[1],\n \"cost\": i[3],\n \"description\": i[4]\n }\n sales_data.append(data)\n\n return (jsonify({\n \"sales\": sales_data\n }))",
"def init(self, cr):\n\t\ttools.drop_view_if_exists(cr, 'purchase_order_line_summary')\n\n\t cr.execute(\"\"\" CREATE VIEW purchase_order_line_summary AS (\n\t SELECT max(id) as id,order_id,product_id,name,product_uom,sum(product_qty) as product_qty, \n\t\t\tsum(price_subtotal) as price_subtotal,avg(discount) as discount \n\t\t\tfrom purchase_order_line\n\t\t\tgroup by order_id,product_id,name,product_uom)\n\t\t\t\"\"\")",
"def get(self, sale_id):\n sales_record = Sales().get_all_sales()\n single_sale = [\n sale for sale in sales_record if sale['sale_id'] == sale_id]\n if single_sale:\n return {\"Sale\": single_sale}, 200 # ok\n return {\"Message\": \"Sale Not Found\"}, 400 #Bad Request",
"def get_all_sales():\n admin = \"admin\"\n if [\"role\"] != admin:\n return jsonify({\"message\": \"Only an admin can view all sales records\"}), 401\n response = jsonify(sale_object.get_all_sales())\n response.status_code = 200\n return response",
"def insert_sales(self, *args):\n sale_id = args[0]\n product_id = args[1]\n quantity_sold = args[2]\n total_cost = self.select_one('products', 'product_id', product_id)[4] * quantity_sold\n payment_mode = args[3]\n insert_sales = \"INSERT INTO sales(sale_id, product_id, quantity_sold, total_cost, payment_mode) \" \\\n \"VALUES('{}', '{}', '{}', '{}', '{}');\"\\\n .format(sale_id, product_id, quantity_sold, total_cost, payment_mode)\n self.cursor.execute(insert_sales, (sale_id, product_id, quantity_sold, payment_mode))\n self.connection.commit()",
"def find_or_create(cls, store, values):\n store_views = cls.search([\n ('store', '=', store.id),\n ('magento_id', '=', int(values['store_id']))\n ])\n\n if store_views:\n return store_views[0]\n\n return cls(**{\n 'name': values['name'],\n 'code': values['code'],\n 'store': store.id,\n 'magento_id': int(values['store_id']),\n })"
] | [
"0.5974712",
"0.58691406",
"0.5794356",
"0.578453",
"0.5557279",
"0.55490655",
"0.54773957",
"0.5474218",
"0.5408638",
"0.54054606",
"0.53851986",
"0.537367",
"0.5366755",
"0.5356549",
"0.5353029",
"0.53460276",
"0.52735",
"0.5259739",
"0.52387893",
"0.51985174",
"0.519066",
"0.51755726",
"0.51677036",
"0.51604354",
"0.514637",
"0.5143668",
"0.5138211",
"0.5137855",
"0.51174045",
"0.5112427"
] | 0.71493983 | 0 |
Export sale orders to magento for the current store view. If last export time is defined, export only those orders which are updated after last export time. | def export_order_status_for_store_view(self):
Sale = Pool().get('sale.sale')
exported_sales = []
domain = [('magento_store_view', '=', self.id)]
if self.last_order_export_time:
domain = [('write_date', '>=', self.last_order_export_time)]
sales = Sale.search(domain)
self.last_order_export_time = datetime.utcnow()
self.save()
for sale in sales:
exported_sales.append(sale.export_order_status_to_magento())
return exported_sales | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_shipment_status_to_magento(self):\n Shipment = Pool().get('stock.shipment.out')\n Sale = Pool().get('sale.sale')\n\n instance = self.instance\n\n sale_domain = [\n ('magento_store_view', '=', self.id),\n ('shipment_state', '=', 'sent'),\n ('magento_id', '!=', None),\n ('shipments', '!=', None),\n ]\n\n if self.last_shipment_export_time:\n sale_domain.append(\n ('write_date', '>=', self.last_shipment_export_time)\n )\n\n sales = Sale.search(sale_domain)\n\n self.last_shipment_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n # Get the increment id from the sale reference\n increment_id = sale.reference[\n len(instance.order_prefix): len(sale.reference)\n ]\n\n for shipment in sale.shipments:\n try:\n # Some checks to make sure that only valid shipments are\n # being exported\n if shipment.is_tracking_exported_to_magento or \\\n shipment.state not in ('packed', 'done') or \\\n shipment.magento_increment_id:\n sales.pop(sale)\n continue\n with magento.Shipment(\n instance.url, instance.api_user, instance.api_key\n ) as shipment_api:\n item_qty_map = {}\n for move in shipment.outgoing_moves:\n if isinstance(move.origin, SaleLine) \\\n and move.origin.magento_id:\n # This is done because there can be multiple\n # lines with the same product and they need\n # to be send as a sum of quanitities\n item_qty_map.setdefault(\n str(move.origin.magento_id), 0\n )\n item_qty_map[str(move.origin.magento_id)] += \\\n move.quantity\n shipment_increment_id = shipment_api.create(\n order_increment_id=increment_id,\n items_qty=item_qty_map\n )\n Shipment.write(list(sale.shipments), {\n 'magento_increment_id': shipment_increment_id,\n })\n\n if self.export_tracking_information and (\n shipment.tracking_number and shipment.carrier\n ):\n shipment.export_tracking_info_to_magento()\n except xmlrpclib.Fault, fault:\n if fault.faultCode == 102:\n # A shipment already exists for this order,\n # we cannot do anything about it.\n # Maybe it was already exported earlier or was created\n # separately on magento\n # Hence, just continue\n continue\n\n return sales",
"def export_order_status(self, store_views=None):\n if store_views is None:\n store_views = self.search([])\n\n for store_view in store_views:\n store_view.export_order_status_for_store_view()",
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def export_shipment_status(cls, store_views=None):\n if store_views is None:\n store_views = cls.search([])\n\n for store_view in store_views:\n # Set the instance in context\n with Transaction().set_context(\n magento_instance=store_view.instance.id\n ):\n store_view.export_shipment_status_to_magento()",
"def export(self, queryset=None):\n self.queryset = queryset or self.queryset\n exported_datetime = get_utcnow()\n filename = self.get_filename(exported_datetime)\n path = os.path.join(self.export_folder, filename)\n with open(path, 'w') as f:\n csv_writer = csv.DictWriter(\n f, fieldnames=self.field_names, delimiter=self.delimiter)\n csv_writer.writeheader()\n for model_obj in self.queryset:\n object_helper = self.object_history_helper_cls(\n model_obj=model_obj, create=True)\n objects = object_helper.get_not_exported()\n for obj in objects:\n row = self.prepare_row(\n model_obj=model_obj,\n exported_datetime=exported_datetime,\n export_change_type=obj.export_change_type)\n csv_writer.writerow(row)\n object_helper.update_as_exported(\n objects=objects, exported_datetime=exported_datetime)\n file_history_updater = self.file_history_updater_cls(\n path=path,\n delimiter=self.delimiter,\n model=self.model_cls._meta.label_lower,\n filename=filename)\n file_history_updater.update()\n return path",
"def export_player_sales():\n # get players belonging to a team\n players = Player.objects.filter(team__isnull=False)\n\n with open('/home/dan/Documents/ffooty_data/csv_exports/player_sales_auction.csv', 'wb') as f:\n writer = csv.writer(f)\n\n for p in players:\n writer.writerow([p.code, p.team.id, p.sale])",
"def export_completed_stock_orders(self, file_path):\n all_orders = self.get_all_stock_orders()\n with open(f\"{file_path}stock_orders_{dt.date.today().strftime('%b-%d-%Y')}.csv\", 'w', newline='') as f:\n writer = csv.writer(f)\n writer.writerow([\n 'symbol',\n 'date',\n 'order_type',\n 'side',\n 'fees',\n 'quantity',\n 'average_price'\n ])\n for order in all_orders:\n if order['state'] == 'filled' and order['cancel'] is None:\n writer.writerow([\n self.get_symbol_by_url(order['instrument']),\n order['last_transaction_at'],\n order['type'],\n order['side'],\n order['fees'],\n order['quantity'],\n order['average_price']\n ])\n f.close()",
"def woo_sale_report(self):\n version_info = odoo.service.common.exp_version()\n if version_info.get('server_version') == '14.0':\n action = self.env.ref('woo_commerce_ept.woo_action_order_report_all').read()[0]\n else:\n action = self.env.ref('woo_commerce_ept.woo_sale_report_action_dashboard').read()[0]\n\n return action",
"def update_orders(self, processed_orders_export=None):\n existing_order_ids = set(Order.objects.values_list(\"order_id\", flat=True))\n processed_orders_export = processed_orders_export or ProcessedOrdersExport()\n processed_orders = processed_orders_export.orders\n for order_id, order_rows in processed_orders.items():\n if order_id in existing_order_ids:\n continue\n row = order_rows[0]\n order = self.create_order(order_id, row)\n order.save()\n order_skus = []\n for product_row in order_rows:\n if product_row[ProcessedOrdersExport.COMPOSITE_PARENT_SKU]:\n continue\n if product_row[ProcessedOrdersExport.SKU] in order_skus:\n product_sale = ProductSale.objects.get(\n sku=product_row[ProcessedOrdersExport.SKU],\n order__order_id=order_id,\n )\n product_sale.quantity += int(\n product_row[ProcessedOrdersExport.QUANTITY]\n )\n product_sale.save()\n else:\n product_sale = self.create_product_sale(order, product_row)\n product_sale.save()\n order_skus.append(product_sale.sku)\n try:\n order._set_calculated_shipping_price()\n except Exception:\n pass",
"def save_catalog(self):\n self.catalog.to_csv(self.catalog_path, index_label='dateTime')",
"def export_inventory(self, websites):\n for website in websites:\n website.export_inventory_to_magento()",
"def export_order_status_button(cls, store_views):\n pass",
"def export_and_update_products(self, cursor, user, ids=None, context=None):\n if context is None:\n context = {}\n export_and_update_catalog_obj = self.pool.get('magento.instance.website.export_catalog')\n if not ids:\n ids = self.search(cursor, user, [], context)\n \n for website in self.browse(cursor, user, ids, context):\n context['active_id'] = website.id\n attribute_set = export_and_update_catalog_obj._get_default_attribute_set(cursor, user, context=context)\n export_and_update_catalog_id = export_and_update_catalog_obj.create(cursor, user, {'export_images': True, 'attribute_set': attribute_set}, context)\n export_and_update_catalog_obj.update_and_export_products_openerp_to_magento(cursor, user, [export_and_update_catalog_id], context)",
"def click_buy_and_sell_management_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.buy_and_sell_management_grid_div_id)",
"def download_queryset(self, queryset, export_format):\n dataset = StockItemResource().export(queryset=queryset)\n\n filedata = dataset.export(export_format)\n\n filename = 'InvenTree_StockItems_{date}.{fmt}'.format(\n date=datetime.now().strftime(\"%d-%b-%Y\"),\n fmt=export_format\n )\n\n return DownloadFile(filedata, filename)",
"def save_orders_as_csv(folder_path: str, orders, fieldnames, timestamp_offset: int = 1):\n\n import os\n from datetime import datetime, timedelta\n\n timestamp = datetime.now() + timedelta(minutes=timestamp_offset)\n file_name = f\"S-{timestamp.strftime(FILE_TIMESTAMP_PATTERN)}.csv\"\n file_path = os.path.join(folder_path, file_name)\n\n log.debug(f\"Creating file {file_path} with processed orders\")\n with open(file_path, \"w\") as csvfile:\n writer = csv.DictWriter(csvfile, fieldnames=fieldnames, dialect=CSV_DIALECT_NAME)\n writer.writeheader()\n writer.writerows(orders)\n\n csvfile.close()\n\n return file_path",
"def export(self,**kwargs):\n \n # import pdb;pdb.set_trace()\n \n # provide for case where recs are set extenally\n if not self.recs:\n self.select_recs(**kwargs)\n if self.recs:\n if self.export_file_name:\n filename = self.export_file_name\n else:\n filename = \"{table_name}_report_{datetime}.csv\".format(\n table_name = self.table.display_name,\n datetime = date_to_string(local_datetime_now(),'iso_datetime'),\n ).replace(' ','_').lower()\n \n if not self.export_fields:\n # include all fields by default\n self.export_fields = self._set_default_list_fields(include_all=True).copy()\n\n self.set_list_fields(self.export_fields)\n \n \n if self.export_template:\n result = render_template(self.export_template, data=self)\n else:\n # add a descriptive title row\n if self.export_title:\n result = self.export_title.strip() + '\\n'\n else:\n result = \"Export of table {} as of {}\\n\".format(self.table.table_name,excel_date_and_time_string(local_datetime_now()))\n \n result += ','.join([x['label'] for x in self.export_fields]) + '\\n'\n for rec in self.recs:\n rec_row = []\n for field in self.export_fields:\n data = rec.__getattribute__(field['name'])\n if field['type'].upper() == \"DATE\":\n data = local_date_string(data)\n elif field['type'].upper() == \"DATETIME\":\n data = excel_date_and_time_string(data)\n else:\n # just text\n data = str(data).strip()\n \n # replace double quotes with double-double quotes\n data = data.replace('\"','\"\"') #double up on double quotes\n \n if \",\" in data:\n # if any commas, wrap in quotes\n data = '\"' + data + '\"'\n \n #replace returns\n data = data.replace('\\r\\n',' -crnl- ')\n data = data.replace('\\n',' -nl- ')\n data = data.replace('\\r',' -rtn- ')\n\n rec_row.append(data)\n \n result += ','.join([str(x) for x in rec_row]) + '\\n'\n \n return DataStreamer(result,filename,'text/csv').send()\n \n self.result_text = \"No records selected\"\n self.success = False\n \n flash(self.result_text)\n return self.list(**kwargs)",
"def click_vendor_price_list_detail_rates_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.vendor_price_list_detail_rates_grid_div_id)",
"def export_inventory(self, cursor, user, ids, context):\n website_obj = self.pool.get('magento.instance.website')\n\n website_id = context.get('active_id')\n t = threading.Thread(target=website_obj.export_inventory_to_magento,\n args=(cursor, user, website_id, context, True))\n t.daemon = True\n t.start()\n\n return True#self.open_products(cursor, user, map(int, products), context)",
"def export_csv_action(description=\"Export as CSV\",\n fields=None,\n exclude=None,\n header=True,\n manyToManySep=';'):\n def export_as_csv(modeladmin, request, queryset):\n \"\"\" Generic csv export admin action.\n Based on http://djangosnippets.org/snippets/2712/\n \"\"\"\n opts = modeladmin.model._meta\n field_names = [field.name for field in opts.fields]\n labels = []\n\n if exclude:\n field_names = [f for f in field_names if f not in exclude]\n\n elif fields:\n field_names = [field for field, _ in fields]\n labels = [label for _, label in fields]\n\n response = HttpResponse(content_type='text/csv')\n response['Content-Disposition'] = 'attachment; filename=%s.csv' % (\n str(opts).replace('.', '_')\n )\n\n writer = csv.writer(response)\n\n if header:\n writer.writerow(labels if labels else field_names)\n\n for obj in queryset:\n writer.writerow([prep_field(request, obj, field, manyToManySep) for field in field_names])\n return response\n export_as_csv.short_description = description\n export_as_csv.acts_on_all = True\n return export_as_csv",
"def export_everything(self):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n previoustext = self.tabs.window.statuslabel['text']\n res = tkinter.messagebox.askyesno(\n 'Export Everything',\n 'Exporting data on all AIS stations, this may take some time.')\n if res:\n outpath = tkinter.filedialog.askdirectory()\n if outpath:\n self.tabs.window.statuslabel.config(\n text='Exporting all AIS station data to - {}'.format(\n outpath),\n fg='black', bg='gold')\n self.update_idletasks()\n export.export_overview(\n self.tabs.window.aistracker,\n self.tabs.window.nmeatracker,\n self.tabs.window.messagelog,\n outpath, orderby=orderby, region=currentregion)\n export.export_everything(\n self.tabs.window.aistracker,\n self.tabs.window.messagelog,\n outpath, orderby=orderby, region=currentregion)\n self.tabs.window.statuslabel.config(\n text=previoustext, bg='light grey')\n else:\n raise ExportAborted(\n 'Export of all AIS data cancelled by user.')\n else:\n raise ExportAborted('Export of all AIS data cancelled by user.')",
"def export(request):\n\n if not request.user.is_authenticated():\n return HttpResponseRedirect('/login/?next=%s' % request.path)\n\n filename = 'export-inscripcions-tallers-%s.csv' % date.today().strftime(\"%y-%m-%d\")\n\n regtaller_list = TallerRegistration.objects.all()\n\n table = ExportTallerRegistrationTable(regtaller_list)\n table.order_by = request.GET.get(\"sort\",'last_name')\n\n response = HttpResponse(mimetype='text/csv')\n response['Content-Disposition'] = 'attachment; filename=%s' % filename\n writer = csv.writer(response)\n # Write headers to CSV file\n headers = []\n for column in table.columns:\n headers.append(column.header.encode('utf8'))\n writer.writerow(headers)\n\n # Write data to CSV file\n for obj in table.rows:\n row = []\n for value in obj:\n if isinstance(value, basestring):\n row.append(value.encode('utf8'))\n else:\n row.append(value)\n writer.writerow(row)\n\n # Return CSV file to browser as download\n return response",
"def saveTrackerCSV(self, market='', save_file='tracker.csv'):\n\n # validate market is syntactically correct\n self._checkMarketSyntax(market)\n\n if self.mode == 'live':\n if self.app.getExchange() == 'coinbasepro':\n # retrieve orders from live Coinbase Pro account portfolio\n df = self.getOrders(market, '', 'done')\n elif self.app.getExchange() == 'binance':\n # retrieve orders from live Binance account portfolio\n df = self.getOrders(market, '', 'done')\n else:\n df = pd.DataFrame()\n else:\n # return dummy orders\n if market == '':\n df = self.orders\n else:\n if 'market' in self.orders:\n df = self.orders[self.orders['market'] == market]\n else:\n df = pd.DataFrame()\n\n if list(df.keys()) != [ 'created_at', 'market', 'action', 'type', 'size', 'value', 'fees', 'price', 'status' ]:\n # no data, return early\n return False\n\n df_tracker = pd.DataFrame()\n\n last_action = ''\n for market in df['market'].sort_values().unique():\n df_market = df[df['market'] == market]\n\n df_buy = pd.DataFrame()\n df_sell = pd.DataFrame()\n\n pair = 0\n # pylint: disable=unused-variable\n for index, row in df_market.iterrows():\n if row['action'] == 'buy':\n pair = 1\n\n if pair == 1 and (row['action'] != last_action):\n if row['action'] == 'buy':\n df_buy = row\n elif row['action'] == 'sell':\n df_sell = row\n \n if row['action'] == 'sell' and len(df_buy) != 0:\n df_pair = pd.DataFrame([\n [\n df_sell['status'], \n df_buy['market'], \n df_buy['created_at'], \n df_buy['type'], \n df_buy['size'],\n df_buy['value'],\n df_buy['fees'], \n df_buy['price'],\n df_sell['created_at'],\n df_sell['type'], \n df_sell['size'], \n df_sell['value'],\n df_sell['fees'], \n df_sell['price'] \n ]], columns=[ 'status', 'market', \n 'buy_at', 'buy_type', 'buy_size', 'buy_value', 'buy_fees', 'buy_price',\n 'sell_at', 'sell_type', 'sell_size', 'sell_value', 'sell_fees', 'sell_price' \n ])\n df_tracker = df_tracker.append(df_pair, ignore_index=True)\n pair = 0\n \n last_action = row['action']\n\n if list(df_tracker.keys()) != [ 'status', 'market', \n 'buy_at', 'buy_type', 'buy_size', 'buy_value', 'buy_fees', 'buy_price',\n 'sell_at', 'sell_type', 'sell_size', 'sell_value', 'sell_fees', 'sell_price' ]:\n # no data, return early\n return False\n\n df_tracker['profit'] = np.subtract(np.subtract(df_tracker['sell_value'], df_tracker['buy_value']), np.add(df_tracker['buy_fees'], df_tracker['sell_fees']))\n df_tracker['margin'] = np.multiply(np.true_divide(df_tracker['profit'], df_tracker['buy_value']), 100)\n df_sincebot = df_tracker[df_tracker['buy_at'] > '2021-02-1']\n\n try:\n df_sincebot.to_csv(save_file, index=False)\n except OSError:\n raise SystemExit('Unable to save: ', save_file)",
"def export(self):\r\n self.prices[\"returns\"] = self.returns\r\n self.prices.columns = ['prices', 'returns']\r\n self.prices = self.prices.dropna()\r\n \r\n name = QFileDialog.getSaveFileName(None, 'Save File', filter='*.xlsx')\r\n if(name[0] == ''):\r\n # if name empty\r\n pass\r\n else:\r\n self.prices.to_excel(name[0])",
"def import_orders(cls, store_views=None):\n if store_views is None:\n store_views = cls.search([])\n\n for store_view in store_views:\n store_view.import_order_from_store_view()",
"def print_stock_rotation_report(self):\n warehouses = False\n locations = False\n from_date = False\n to_date = False\n active_id = self.ids[0]\n today=datetime.now().strftime(\"%Y-%m-%d\")\n f_name = 'Stock Rotation Report' + ' ' + today\n stock_warehouse_obj = self.env['stock.warehouse']\n stock_locations_obj = self.env['stock.location']\n product_obj = self.env['product.product']\n \n if self.filtaration == 'warehouse':\n if not self.include_all_warehouse:\n if not self.warehouse_ids:\n raise ValidationError(\"please select the Warehouse.\")\n warehouses = self.warehouse_ids\n else:\n warehouses = stock_warehouse_obj.search([])\n else:\n if not self.include_all_location:\n if not self.location_ids:\n raise ValidationError(\"please select the Locations.\")\n locations = self.location_ids\n else:\n locations = stock_locations_obj.search([('usage','=','internal')])\n\n\n if not self.from_date:\n raise ValidationError(\"please select the From Date.\")\n \n if not self.to_date:\n raise ValidationError(\"please select the To Date.\")\n\n all_products = product_obj.with_context(active_test=True).search([('type','=','product')])\n from_date = self.from_date\n to_date = self.to_date\n \n date_1 = time.strptime(from_date, \"%Y-%m-%d\")\n date_2 = time.strptime(to_date, \"%Y-%m-%d\")\n if not (date_1 <= date_2):\n raise ValidationError(\"Fromdate is not previous then Todate\")\n self.get_stock_rotation_report(from_date,to_date,warehouses,locations,all_products)\n if self.datas:\n return {\n 'type' : 'ir.actions.act_url',\n 'url':'web/content/?model=stock.rotation.report&download=true&field=datas&id=%s&filename=%s.xls'%(active_id,f_name),\n 'target': 'new',\n }",
"def export_any_queryset(request, queryset, filename, excluded_fields=[], included_fields=[], csv_field_delimiter = \";\"):\n\n name, extension = os.path.splitext(filename)\n file_format = extension[1:]\n\n output = None\n if file_format == 'csv':\n content_type = 'text/csv'\n output = io.StringIO()\n writer = csv.writer(output, delimiter=csv_field_delimiter, quoting=csv.QUOTE_MINIMAL)\n exporter = SpreadsheetQuerysetExporter(writer, file_format=file_format)\n exporter.export_queryset(queryset)\n elif file_format == 'xlsx':\n content_type = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'\n #content_type = 'application/vnd.ms-excel'\n output = io.BytesIO()\n with open_xlsx_file(output) as writer:\n # # Write Spreadsheet\n # writer.write_headers_from_strings(\n # ['Cliente', 'Commessa', 'Progetto', 'Attività', ] +\n # ['Totale', ],\n # )\n # writer.apply_autofit()\n exporter = SpreadsheetQuerysetExporter(writer, file_format=file_format)\n exporter.export_queryset(queryset, excluded_fields=excluded_fields, included_fields=included_fields)\n writer.apply_autofit()\n assert writer.is_closed()\n else:\n raise Exception('Wrong export file format \"%s\"' % file_format)\n\n # send \"output\" object to stream with mimetype and filename\n assert output is not None\n output.seek(0)\n # response = HttpResponse(\n # output.read(),\n response = StreamingHttpResponse(\n output,\n content_type=content_type,\n )\n #response['Content-Disposition'] = 'inline; filename=\"%s\"' % filename\n response['Content-Disposition'] = 'attachment; filename=\"%s\"' % filename\n\n return response",
"def update_stock(self, instance, export_stock_from_date):\n product_obj = self.env['product.product']\n woo_product_product_obj = self.env['woo.product.product.ept']\n\n if not export_stock_from_date:\n export_stock_from_date = datetime.now() - timedelta(30)\n odoo_products = product_obj.get_products_based_on_movement_date_ept(export_stock_from_date,\n instance.company_id)\n instance.last_inventory_update_time = datetime.now()\n woo_templates = woo_product_product_obj.search([('product_id', 'in', odoo_products), (\n 'woo_is_manage_stock', '=', True)]).woo_template_id.filtered(\n lambda x:x.woo_instance_id == instance and x.exported_in_woo == True)\n if woo_templates:\n self.with_context(updated_products_in_inventory=odoo_products).woo_update_stock(\n instance, woo_templates)\n else:\n _logger.info(\n \"==There is no product movement found between date time from: '%s' to '%s' for export stock.\" % (\n export_stock_from_date, datetime.now()))\n return True",
"def default_export_(self, fields):\n Store = Pool().get('magento.website.store')\n\n store = Store(Transaction().context.get('active_id'))\n\n return {\n 'products_count': store.export_tier_prices_to_magento()\n }",
"def _set_last_exported_date(self, cr, uid, external_session, date, context=None):\n return True"
] | [
"0.67229575",
"0.6432975",
"0.6346789",
"0.6296307",
"0.5768794",
"0.5759411",
"0.5717263",
"0.5693412",
"0.564108",
"0.556255",
"0.5428441",
"0.53630245",
"0.52866554",
"0.52807623",
"0.5272195",
"0.5250655",
"0.5202378",
"0.51813996",
"0.51757",
"0.5057805",
"0.5045959",
"0.50339127",
"0.5007785",
"0.5002706",
"0.49784365",
"0.49704278",
"0.4962905",
"0.49604145",
"0.4944869",
"0.49397835"
] | 0.77887744 | 0 |
Import orders from magento for store views | def import_orders(cls, store_views=None):
if store_views is None:
store_views = cls.search([])
for store_view in store_views:
store_view.import_order_from_store_view() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def import_order_from_store_view(self):\n Sale = Pool().get('sale.sale')\n MagentoOrderState = Pool().get('magento.order_state')\n\n new_sales = []\n instance = self.instance\n with Transaction().set_context({\n 'magento_instance': instance.id,\n 'magento_website': self.website.id,\n 'magento_store_view': self.id,\n }):\n\n order_states = MagentoOrderState.search([\n ('instance', '=', instance.id),\n ('use_for_import', '=', True)\n ])\n order_states_to_import_in = map(\n lambda state: state.code, order_states\n )\n\n if not order_states_to_import_in:\n self.raise_user_error(\"states_not_found\")\n\n with magento.Order(\n instance.url, instance.api_user, instance.api_key\n ) as order_api:\n # Filter orders with date and store_id using list()\n # then get info of each order using info()\n # and call find_or_create_using_magento_data on sale\n filter = {\n 'store_id': {'=': self.magento_id},\n 'state': {'in': order_states_to_import_in},\n }\n if self.last_order_import_time:\n last_order_import_time = \\\n self.last_order_import_time.replace(microsecond=0)\n filter.update({\n 'updated_at': {\n 'gteq': last_order_import_time.isoformat(' ')\n },\n })\n self.write([self], {\n 'last_order_import_time': datetime.utcnow()\n })\n orders = order_api.list(filter)\n for order in orders:\n new_sales.append(\n Sale.find_or_create_using_magento_data(\n order_api.info(order['increment_id'])\n )\n )\n\n return new_sales",
"def import_orders_button(cls, store_views):\n pass",
"def import_order_states(cls, instances):\n OrderState = Pool().get('magento.order_state')\n\n for instance in instances:\n\n Transaction().context.update({\n 'magento_instance': instance.id\n })\n\n # Import order states\n with OrderConfig(\n instance.url, instance.api_user, instance.api_key\n ) as order_config_api:\n OrderState.create_all_using_magento_data(\n order_config_api.get_states()\n )",
"def export_order_status(self, store_views=None):\n if store_views is None:\n store_views = self.search([])\n\n for store_view in store_views:\n store_view.export_order_status_for_store_view()",
"def export_order_status_for_store_view(self):\n Sale = Pool().get('sale.sale')\n\n exported_sales = []\n domain = [('magento_store_view', '=', self.id)]\n\n if self.last_order_export_time:\n domain = [('write_date', '>=', self.last_order_export_time)]\n\n sales = Sale.search(domain)\n\n self.last_order_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n exported_sales.append(sale.export_order_status_to_magento())\n\n return exported_sales",
"def import_customers(ctx):\n load_csv(ctx, 'data/sample/customers.csv', 'res.partner')",
"def purchase_import(self, cr, uid, ids, context=None):\n wizard_row = self.browse(cr, uid, ids)[0]\n if wizard_row.add_import == 'add':\n self.pool.get('purchase.import').write(cr, uid, [wizard_row.import_id.id], {'purchase_related_ids': [(4,wizard_row.purchase_id.id)]})\n import_id = wizard_row.import_id.id\n elif wizard_row.add_import == 'create':\n import_id = self.pool.get('purchase.import').create(cr, uid, self.prepare_purchase_import(cr, uid, wizard_row, context), context)\n return import_id",
"def order_report():",
"def parse_orders(self):\n #save the information from the firebase for this cycle\n self.get_order()\n #Loop through all the stores\n for store_name,store_orders in self.orders.items():\n #Loop through all the orders\n for order_id,order_details in store_orders.items():\n #store order\n self.store_order(store_name,store_orders,order_id,order_details)\n pass",
"def test_get_orders(self):\n pass",
"def prepare(self):\n # Create a purchase order from a supplier\n Company = self.old_state.apps.get_model('company', 'company')\n PurchaseOrder = self.old_state.apps.get_model('order', 'purchaseorder')\n Part = self.old_state.apps.get_model('part', 'part')\n Supplierpart = self.old_state.apps.get_model('company', 'supplierpart')\n # TODO @matmair fix this test!!!\n # SalesOrder = self.old_state.apps.get_model('order', 'salesorder')\n\n supplier = Company.objects.create(\n name='Supplier A',\n description='A great supplier!',\n is_supplier=True,\n is_customer=True,\n )\n\n part = Part.objects.create(\n name='Bob',\n description='Can we build it?',\n assembly=True,\n salable=True,\n purchaseable=False,\n tree_id=0,\n level=0,\n lft=0,\n rght=0,\n )\n supplierpart = Supplierpart.objects.create(\n part=part,\n supplier=supplier\n )\n\n # Create some orders\n for ii in range(10):\n\n order = PurchaseOrder.objects.create(\n supplier=supplier,\n reference=f\"{ii}-abcde\",\n description=\"Just a test order\"\n )\n order.lines.create(\n part=supplierpart,\n quantity=12,\n received=1\n )\n order.lines.create(\n quantity=12,\n received=1\n )\n\n # TODO @matmair fix this test!!!\n # sales_order = SalesOrder.objects.create(\n # customer=supplier,\n # reference=f\"{ii}-xyz\",\n # description=\"A test sales order\",\n # )\n # sales_order.lines.create(\n # part=part,\n # quantity=12,\n # received=1\n # )",
"def __init__(self, temboo_session):\n super(ListOrders, self).__init__(temboo_session, '/Library/Amazon/Marketplace/Orders/ListOrders')",
"def get_all_orders():",
"def import_stores(self):\n\n stores = self.product_infos['stores']\n\n for product_store in stores:\n try:\n store = Stores.objects.get(\n name=product_store\n )\n except Stores.DoesNotExist:\n super().new_entry()\n store = Stores.objects.create(\n name=product_store\n )\n except:\n pass\n try:\n ProdStore.objects.get(\n product=self.product_object,\n store=store\n )\n except ProdStore.DoesNotExist:\n super().new_entry()\n ProdStore.objects.create(\n product=self.product_object,\n store=store\n )\n except:\n pass\n\n return stores",
"def add_imported(products):\n \n for product in products:\n add_product(product[\"product_name\"], product[\"product_quantity\"], product[\"product_price\"], product[\"date_updated\"])",
"def update_orders(self, processed_orders_export=None):\n existing_order_ids = set(Order.objects.values_list(\"order_id\", flat=True))\n processed_orders_export = processed_orders_export or ProcessedOrdersExport()\n processed_orders = processed_orders_export.orders\n for order_id, order_rows in processed_orders.items():\n if order_id in existing_order_ids:\n continue\n row = order_rows[0]\n order = self.create_order(order_id, row)\n order.save()\n order_skus = []\n for product_row in order_rows:\n if product_row[ProcessedOrdersExport.COMPOSITE_PARENT_SKU]:\n continue\n if product_row[ProcessedOrdersExport.SKU] in order_skus:\n product_sale = ProductSale.objects.get(\n sku=product_row[ProcessedOrdersExport.SKU],\n order__order_id=order_id,\n )\n product_sale.quantity += int(\n product_row[ProcessedOrdersExport.QUANTITY]\n )\n product_sale.save()\n else:\n product_sale = self.create_product_sale(order, product_row)\n product_sale.save()\n order_skus.append(product_sale.sku)\n try:\n order._set_calculated_shipping_price()\n except Exception:\n pass",
"def test_get_order_list(self):\n\n user = self.set_auth_token_header()\n\n # Order list API\n # User has no order\n url = reverse('orders-list')\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data, [])\n\n # User has orders\n data = [\n {\n 'stock': Stock.objects.get(code='AAPL'),\n 'order_type': OrderType.objects.get(code='BUY'),\n 'total_value': 18.75,\n 'status': OrderStatus.objects.get(code='FILLED'),\n 'quantity': 15.0,\n 'price': 1.25,\n 'account': user.account\n },\n ]\n data_obj = [Order(**item) for item in data]\n _ = Order.objects.bulk_create(data_obj)\n\n url = reverse('orders-list')\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(len(response.data), len(data))",
"def create_order(self, order_id, row):\n cols = ProcessedOrdersExport\n currency = self.currencies[row[cols.CURRENCY]]\n recieved_at = self.parse_date_time(row[cols.RECEIVED_DATE])\n exchange_rate = currency.exchange_rate(date=recieved_at.date())\n shipping_service = self.get_shipping_service(row)\n order = Order(\n order_id=order_id,\n recieved_at=recieved_at,\n dispatched_at=self.parse_date_time(row[cols.PROCESSED_DATE]),\n channel=self.get_channel(row[cols.SOURCE], row[cols.SUBSOURCE]),\n external_reference=row[cols.EXTERNAL_REFERENCE],\n country=self.countries[row[cols.SHIPPING_COUNTRY_CODE]],\n shipping_service=shipping_service,\n tracking_number=row[cols.TRACKING_NUMBER],\n priority=shipping_service.priority if shipping_service else False,\n displayed_shipping_price=self.convert_integer_price(\n row[cols.SHIPPING_COST]\n ),\n currency=currency,\n exchange_rate=exchange_rate,\n tax=self.convert_integer_price(row[cols.ORDER_TAX]),\n tax_GBP=self.convert_integer_price(\n Decimal(row[cols.ORDER_TAX]) * exchange_rate\n ),\n total_paid=self.convert_integer_price(row[cols.ORDER_TOTAL]),\n total_paid_GBP=self.convert_integer_price(\n Decimal(row[cols.ORDER_TOTAL]) * exchange_rate\n ),\n )\n return order",
"def setUp(self):\n super(Orders, self).setUp()",
"def import_websites(cls, instances):\n Website = Pool().get('magento.instance.website')\n Store = Pool().get('magento.website.store')\n StoreView = Pool().get('magento.store.store_view')\n MagentoOrderState = Pool().get('magento.order_state')\n\n try:\n instance, = instances\n except ValueError:\n cls.raise_user_error('multiple_instances')\n\n with Transaction().set_context(magento_instance=instance.id):\n\n # Import order states\n with OrderConfig(\n instance.url, instance.api_user, instance.api_key\n ) as order_config_api:\n MagentoOrderState.create_all_using_magento_data(\n order_config_api.get_states()\n )\n\n # Import websites\n with Core(\n instance.url, instance.api_user, instance.api_key\n ) as core_api:\n websites = []\n stores = []\n\n mag_websites = core_api.websites()\n\n # Create websites\n for mag_website in mag_websites:\n websites.append(Website.find_or_create(\n instance, mag_website\n ))\n\n for website in websites:\n mag_stores = core_api.stores(\n {'website_id': {'=': website.magento_id}}\n )\n\n # Create stores\n for mag_store in mag_stores:\n stores.append(Store.find_or_create(website, mag_store))\n\n for store in stores:\n mag_store_views = core_api.store_views(\n {'group_id': {'=': store.magento_id}}\n )\n\n # Create store views\n for mag_store_view in mag_store_views:\n store_view = StoreView.find_or_create(\n store, mag_store_view\n )\n # AR refactoring\n store_view.save()",
"def export_shipment_status_to_magento(self):\n Shipment = Pool().get('stock.shipment.out')\n Sale = Pool().get('sale.sale')\n\n instance = self.instance\n\n sale_domain = [\n ('magento_store_view', '=', self.id),\n ('shipment_state', '=', 'sent'),\n ('magento_id', '!=', None),\n ('shipments', '!=', None),\n ]\n\n if self.last_shipment_export_time:\n sale_domain.append(\n ('write_date', '>=', self.last_shipment_export_time)\n )\n\n sales = Sale.search(sale_domain)\n\n self.last_shipment_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n # Get the increment id from the sale reference\n increment_id = sale.reference[\n len(instance.order_prefix): len(sale.reference)\n ]\n\n for shipment in sale.shipments:\n try:\n # Some checks to make sure that only valid shipments are\n # being exported\n if shipment.is_tracking_exported_to_magento or \\\n shipment.state not in ('packed', 'done') or \\\n shipment.magento_increment_id:\n sales.pop(sale)\n continue\n with magento.Shipment(\n instance.url, instance.api_user, instance.api_key\n ) as shipment_api:\n item_qty_map = {}\n for move in shipment.outgoing_moves:\n if isinstance(move.origin, SaleLine) \\\n and move.origin.magento_id:\n # This is done because there can be multiple\n # lines with the same product and they need\n # to be send as a sum of quanitities\n item_qty_map.setdefault(\n str(move.origin.magento_id), 0\n )\n item_qty_map[str(move.origin.magento_id)] += \\\n move.quantity\n shipment_increment_id = shipment_api.create(\n order_increment_id=increment_id,\n items_qty=item_qty_map\n )\n Shipment.write(list(sale.shipments), {\n 'magento_increment_id': shipment_increment_id,\n })\n\n if self.export_tracking_information and (\n shipment.tracking_number and shipment.carrier\n ):\n shipment.export_tracking_info_to_magento()\n except xmlrpclib.Fault, fault:\n if fault.faultCode == 102:\n # A shipment already exists for this order,\n # we cannot do anything about it.\n # Maybe it was already exported earlier or was created\n # separately on magento\n # Hence, just continue\n continue\n\n return sales",
"def test_po_migration(self):\n PurchaseOrder = self.new_state.apps.get_model('order', 'purchaseorder')\n for ii in range(10):\n\n po = PurchaseOrder.objects.get(reference=f\"{ii}-abcde\")\n self.assertEqual(po.extra_lines.count(), 1)\n self.assertEqual(po.lines.count(), 1)\n\n # TODO @matmair fix this test!!!\n # SalesOrder = self.new_state.apps.get_model('order', 'salesorder')\n # for ii in range(10):\n # so = SalesOrder.objects.get(reference=f\"{ii}-xyz\")\n # self.assertEqual(so.extra_lines, 1)\n # self.assertEqual(so.lines.count(), 1)",
"def action_import(self):\n ctx = self._context\n account_obj = self.env[\"account.account\"]\n import_obj = self.env['import.journal.entries.advanced']\n import_line_obj = self.env[\"journal.entries.csv.import\"]\n if 'active_id' in ctx:\n import_id = import_obj.browse(ctx['active_id'])\n if not self.data:\n raise exceptions.Warning(_(\"Necesitas seleccionar un archivo!\"))\n # Decode the file data\n data = base64.b64decode(self.data).decode('utf-8')\n file_input = StringIO(data)\n file_input.seek(0)\n reader_info = []\n if self.delimeter:\n delimeter = str(self.delimeter)\n else:\n delimeter = ','\n reader = csv.reader(file_input, delimiter=delimeter,\n lineterminator='\\r\\n')\n try:\n reader_info.extend(reader)\n except Exception:\n raise exceptions.Warning(_(\"Archivo no valido\"))\n keys = reader_info[0]\n # check if keys exist\n if not isinstance(keys, list) or ('cuenta' not in keys):\n raise exceptions.Warning(_(\"No se encuentran 'cuentas' contable en el archivo\"))\n del reader_info[0]\n values = {}\n actual_date = fields.Date.today()\n for i in range(len(reader_info)):\n val = {}\n field = reader_info[i]\n values = dict(zip(keys, field))\n account = False\n if 'cuenta' in values and values['cuenta']:\n account_id = account_obj.search([('code', '=', values['cuenta'])]) \n if account_id:\n account = account_id[0]\n else:\n account = account_id\n\n val[\"ref\"] = values[\"descripcion\"]\n val[\"document_number\"] = values[\"num_documento\"]\n val[\"document_date\"] = datetime.strptime(values[\"fecha\"] , \"%d-%m-%Y\")\n val['account_id'] = account.id\n val['parent_id'] = import_id.id\n val['debit'] = values['debito']\n val['credit'] = values['credito']\n val['processed'] = False\n validate = import_line_obj.create(val)\n if validate:\n if validate.account_id:\n validate.is_ok = True",
"def orders(self):\n big = BigCommerceAPI()\n response = big.get('orders')\n return response.text",
"def import_products_from_csv(self):\n shopify_product_template = self.env[\"shopify.product.template.ept\"]\n shopify_product_obj = self.env[\"shopify.product.product.ept\"]\n common_log_obj = self.env[\"common.log.book.ept\"]\n shopify_product_image_obj = self.env[\"shopify.product.image.ept\"]\n common_log_line_obj = self.env[\"common.log.lines.ept\"]\n model_id = common_log_line_obj.get_model_id(\"shopify.process.import.export\")\n\n if not self.choose_file:\n raise ValidationError(\"File Not Found To Import\")\n if not self.file_name.endswith(\".csv\"):\n raise ValidationError(\"Please Provide Only .csv File To Import Product !!!\")\n file_data = self.read_file()\n log_book_id = common_log_obj.create({\"type\": \"export\",\n \"module\": \"shopify_ept\",\n \"shopify_instance_id\": self.shopify_instance_id.id,\n \"active\": True})\n required_field = [\"template_name\", \"product_name\", \"product_default_code\",\n \"shopify_product_default_code\", \"product_description\",\n \"PRODUCT_TEMPLATE_ID\", \"PRODUCT_ID\", \"CATEGORY_ID\"]\n for required_field in required_field:\n if not required_field in file_data.fieldnames:\n raise Warning(\"Required Column Is Not Available In File\")\n sequence = 0\n row_no = 1\n shopify_template_id = False\n for record in file_data:\n message = \"\"\n if not record[\"PRODUCT_TEMPLATE_ID\"] or not record[\"PRODUCT_ID\"] or not record[\n \"CATEGORY_ID\"]:\n message += \"PRODUCT_TEMPLATE_ID Or PRODUCT_ID Or CATEGORY_ID Not As Per Odoo Product %s\" % (\n row_no)\n vals = {\"message\": message,\n \"model_id\": model_id,\n \"log_line_id\": log_book_id.id,\n }\n common_log_line_obj.create(vals)\n continue\n shopify_template = shopify_product_template.search(\n [(\"shopify_instance_id\", \"=\", self.shopify_instance_id.id),\n (\"product_tmpl_id\", \"=\", int(record[\"PRODUCT_TEMPLATE_ID\"]))])\n\n if not shopify_template:\n shopify_product_template_vals = (\n {\"product_tmpl_id\": int(record[\"PRODUCT_TEMPLATE_ID\"]),\n \"shopify_instance_id\": self.shopify_instance_id.id,\n \"shopify_product_category\": int(record[\"CATEGORY_ID\"]),\n \"name\": record[\"template_name\"],\n \"description\": record[\"product_description\"],\n \"exported_in_shopify\":False,\n \"website_published\":False\n })\n shopify_template = shopify_product_template.create(shopify_product_template_vals)\n sequence = 1\n shopify_template_id = shopify_template.id\n\n else:\n if shopify_template_id != shopify_template.id:\n shopify_product_template_vals = (\n {\"product_tmpl_id\": int(record[\"PRODUCT_TEMPLATE_ID\"]),\n \"shopify_instance_id\": self.shopify_instance_id.id,\n \"shopify_product_category\": int(record[\"CATEGORY_ID\"]),\n \"name\": record[\"template_name\"],\n \"description\": record[\"product_description\"]\n })\n shopify_template.write(shopify_product_template_vals)\n shopify_template_id = shopify_template.id\n\n # For adding all odoo images into shopify layer.\n # if shopify_template_id != shopify_template.id:\n shoify_product_image_list = []\n product_template = shopify_template.product_tmpl_id\n for odoo_image in product_template.ept_image_ids.filtered(lambda x: not x.product_id):\n shopify_product_image = shopify_product_image_obj.search_read(\n [(\"shopify_template_id\", \"=\", shopify_template_id),\n (\"odoo_image_id\", \"=\", odoo_image.id)], [\"id\"])\n if not shopify_product_image:\n shoify_product_image_list.append({\n \"odoo_image_id\": odoo_image.id,\n \"shopify_template_id\": shopify_template_id\n })\n if shoify_product_image_list:\n shopify_product_image_obj.create(shoify_product_image_list)\n\n if shopify_template and shopify_template.shopify_product_ids and \\\n shopify_template.shopify_product_ids[\n 0].sequence:\n sequence += 1\n shopify_variant = shopify_product_obj.search(\n [(\"shopify_instance_id\", \"=\", self.shopify_instance_id.id), (\n \"product_id\", \"=\", int(record[\"PRODUCT_ID\"])),\n (\"shopify_template_id\", \"=\", shopify_template.id)])\n if not shopify_variant:\n shopify_variant_vals = ({\"shopify_instance_id\": self.shopify_instance_id.id,\n \"product_id\": int(record[\"PRODUCT_ID\"]),\n \"shopify_template_id\": shopify_template.id,\n \"default_code\": record[\"shopify_product_default_code\"],\n \"name\": record[\"product_name\"],\n \"sequence\": sequence\n })\n shopify_variant = shopify_product_obj.create(shopify_variant_vals)\n else:\n shopify_variant_vals = ({\"shopify_instance_id\": self.shopify_instance_id.id,\n \"product_id\": int(record[\"PRODUCT_ID\"]),\n \"shopify_template_id\": shopify_template.id,\n \"default_code\": record[\"shopify_product_default_code\"],\n \"name\": record[\"product_name\"],\n \"sequence\": sequence\n })\n shopify_variant.write(shopify_variant_vals)\n row_no = +1\n # For adding all odoo images into shopify layer.\n product_id = shopify_variant.product_id\n odoo_image = product_id.ept_image_ids\n if odoo_image:\n shopify_product_image = shopify_product_image_obj.search_read(\n [(\"shopify_template_id\", \"=\", shopify_template_id),\n (\"shopify_variant_id\", \"=\", shopify_variant.id),\n (\"odoo_image_id\", \"=\", odoo_image[0].id)], [\"id\"])\n if not shopify_product_image:\n shopify_product_image_obj.create({\n \"odoo_image_id\": odoo_image[0].id,\n \"shopify_variant_id\": shopify_variant.id,\n \"shopify_template_id\": shopify_template_id,\n })\n if not log_book_id.log_lines:\n log_book_id.unlink()\n return {\n \"type\": \"ir.actions.client\",\n \"tag\": \"reload\",\n }",
"def test_case_customer_part_orders(self):\n pass",
"def test_get_order_items(self):\n pass",
"def orders(self, orders):\n\n self._orders = orders",
"def orders(self, orders):\n\n self._orders = orders",
"def export_order_status_button(cls, store_views):\n pass"
] | [
"0.8316281",
"0.7346102",
"0.6070537",
"0.5948057",
"0.5888158",
"0.58869237",
"0.5863658",
"0.5711296",
"0.56941754",
"0.5657132",
"0.55940354",
"0.55788964",
"0.55533653",
"0.5447958",
"0.5446187",
"0.54387885",
"0.53899795",
"0.53408515",
"0.53375185",
"0.5320202",
"0.52955246",
"0.5292636",
"0.5288003",
"0.52530926",
"0.52332926",
"0.52188206",
"0.5205434",
"0.5192606",
"0.5192606",
"0.51832175"
] | 0.7725665 | 1 |
Export Shipment status for shipments related to current store view. This method is called by cron. | def export_shipment_status(cls, store_views=None):
if store_views is None:
store_views = cls.search([])
for store_view in store_views:
# Set the instance in context
with Transaction().set_context(
magento_instance=store_view.instance.id
):
store_view.export_shipment_status_to_magento() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_shipment_status_to_magento(self):\n Shipment = Pool().get('stock.shipment.out')\n Sale = Pool().get('sale.sale')\n\n instance = self.instance\n\n sale_domain = [\n ('magento_store_view', '=', self.id),\n ('shipment_state', '=', 'sent'),\n ('magento_id', '!=', None),\n ('shipments', '!=', None),\n ]\n\n if self.last_shipment_export_time:\n sale_domain.append(\n ('write_date', '>=', self.last_shipment_export_time)\n )\n\n sales = Sale.search(sale_domain)\n\n self.last_shipment_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n # Get the increment id from the sale reference\n increment_id = sale.reference[\n len(instance.order_prefix): len(sale.reference)\n ]\n\n for shipment in sale.shipments:\n try:\n # Some checks to make sure that only valid shipments are\n # being exported\n if shipment.is_tracking_exported_to_magento or \\\n shipment.state not in ('packed', 'done') or \\\n shipment.magento_increment_id:\n sales.pop(sale)\n continue\n with magento.Shipment(\n instance.url, instance.api_user, instance.api_key\n ) as shipment_api:\n item_qty_map = {}\n for move in shipment.outgoing_moves:\n if isinstance(move.origin, SaleLine) \\\n and move.origin.magento_id:\n # This is done because there can be multiple\n # lines with the same product and they need\n # to be send as a sum of quanitities\n item_qty_map.setdefault(\n str(move.origin.magento_id), 0\n )\n item_qty_map[str(move.origin.magento_id)] += \\\n move.quantity\n shipment_increment_id = shipment_api.create(\n order_increment_id=increment_id,\n items_qty=item_qty_map\n )\n Shipment.write(list(sale.shipments), {\n 'magento_increment_id': shipment_increment_id,\n })\n\n if self.export_tracking_information and (\n shipment.tracking_number and shipment.carrier\n ):\n shipment.export_tracking_info_to_magento()\n except xmlrpclib.Fault, fault:\n if fault.faultCode == 102:\n # A shipment already exists for this order,\n # we cannot do anything about it.\n # Maybe it was already exported earlier or was created\n # separately on magento\n # Hence, just continue\n continue\n\n return sales",
"def export_order_status_for_store_view(self):\n Sale = Pool().get('sale.sale')\n\n exported_sales = []\n domain = [('magento_store_view', '=', self.id)]\n\n if self.last_order_export_time:\n domain = [('write_date', '>=', self.last_order_export_time)]\n\n sales = Sale.search(domain)\n\n self.last_order_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n exported_sales.append(sale.export_order_status_to_magento())\n\n return exported_sales",
"def export_order_status(self, store_views=None):\n if store_views is None:\n store_views = self.search([])\n\n for store_view in store_views:\n store_view.export_order_status_for_store_view()",
"def export_order_status_button(cls, store_views):\n pass",
"def default_start(self, data):\n return {\n 'message': \"This wizard will export shipment status for all the \" +\n \"shipments related to this store view. To export tracking \" +\n \"information also for these shipments please check the \" +\n \"checkbox for Export Tracking Information on Store View.\"\n }",
"def shipmentDetails(request):\n order_id = request.GET.get('order_id')\n generate_request = oAuth_magento()\n\n payload = {\"searchCriteria[filter_groups][0][filters][0][field]\": \"increment_id\",\n \"searchCriteria[filter_groups][0][filters][0][value]\": order_id,\n \"searchCriteria[filter_groups][0][filters][0][conditionType]\": \"eq\",\n \"fields\": \"items[status,base_currency_code,grand_total,items[name,sku],extension_attributes[shipping_assignments[shipping[address[city,company,country_id,firstname,lastname,postcode,region,telephone]]]]]\",\n }\n response = requests.request(\"GET\", url=generate_request[0], headers=generate_request[1], params=payload)\n # with open('temp_files/magento_get_order_select.json','w') as f:\n # f.write(response.text)\n json_response = json.loads(response.text)\n context = {'result': json_response['items'][0]['extension_attributes']['shipping_assignments'][0]['shipping']['address'], \n 'status': json_response['items'][0]['status'],\n 'item_name': json_response['items'][0]['items'],\n 'price': json_response['items'][0]['base_currency_code'] + ' ' + str(json_response['items'][0]['grand_total']),\n }\n return JsonResponse(context)",
"def shipping_status(self):\n\n # As safeguard against identical timestamps, also sort by the primary\n # key. It's not recommended to rely on this behaviour, but in practice\n # reasonably safe if PKs are not manually set.\n events = self.shipping_events.order_by(\"-date_created\", \"-pk\").all()\n if not len(events):\n return \"\"\n\n # Collect all events by event-type\n event_map = {}\n for event in events:\n event_name = event.event_type.name\n if event_name not in event_map:\n event_map[event_name] = []\n event_map[event_name].extend(list(event.line_quantities.all()))\n\n # Determine last complete event\n status = _(\"In progress\")\n for event_name, event_line_quantities in event_map.items():\n if self._is_event_complete(event_line_quantities):\n return event_name\n return status",
"def mark_shipped(self):\n\n self.shipped = True\n # moved from DomesticMelonOrder",
"def test_get_shipment(self):\n pass",
"def ship():\n latest_log = get_latest_log(PATH_LOG_FILES)\n ship_status = {\n 'time': (datetime.now() - datetime.fromtimestamp(getmtime(latest_log))).seconds,\n 'status': None,\n 'type': None,\n 'location': None,\n 'star_class': None,\n 'target': None,\n 'fuel_capacity': None,\n 'fuel_level': None,\n 'fuel_percent': None,\n 'is_scooping': False,\n 'sys_fully_scanned': False\n }\n # Read log line by line and parse data\n with open(latest_log, encoding=\"utf-8\") as f:\n for line in f:\n log = loads(line)\n\n # parse data\n try:\n # parse ship status\n log_event = log['event']\n\n if log_event == 'StartJump':\n ship_status['status'] = str('starting_' + log['JumpType']).lower()\n ship_status['sys_fully_scanned'] = False\n if 'StarClass' in log:\n ship_status['star_class'] = log['StarClass']\n\n elif log_event == 'SupercruiseEntry' or log_event == 'FSDJump':\n ship_status['status'] = 'in_supercruise'\n\n elif log_event == 'SupercruiseExit' or log_event == 'DockingCancelled' or (\n log_event == 'Music' and ship_status['status'] == 'in_undocking') or (\n log_event == 'Location' and log['Docked'] is False):\n ship_status['status'] = 'in_space'\n\n elif log_event == 'Undocked':\n ship_status['status'] = 'in_space'\n\n elif log_event == 'DockingRequested':\n ship_status['status'] = 'starting_docking'\n\n elif log_event == \"Music\" and log['MusicTrack'] == \"DockingComputer\":\n if ship_status['status'] == 'starting_undocking':\n ship_status['status'] = 'in_undocking'\n elif ship_status['status'] == 'starting_docking':\n ship_status['status'] = 'in_docking'\n\n elif log_event == 'Docked':\n ship_status['status'] = 'in_station'\n\n elif log_event == 'FSSAllBodiesFound':\n ship_status['sys_fully_scanned'] = True\n\n # parse ship type\n if log_event == 'LoadGame' or log_event == 'Loadout':\n ship_status['type'] = log['Ship']\n\n # parse fuel\n if 'FuelLevel' in log and ship_status['type'] != 'TestBuggy':\n ship_status['fuel_level'] = log['FuelLevel']\n if 'FuelCapacity' in log and ship_status['type'] != 'TestBuggy':\n if type(log['FuelCapacity']) == float:\n ship_status['fuel_capacity'] = log['FuelCapacity']\n else:\n ship_status['fuel_capacity'] = log['FuelCapacity']['Main']\n\n if log_event == 'FuelScoop' and 'Total' in log:\n ship_status['fuel_level'] = log['Total']\n if ship_status['fuel_level'] and ship_status['fuel_capacity']:\n ship_status['fuel_percent'] = round(\n (ship_status['fuel_level'] / ship_status['fuel_capacity']) * 100)\n else:\n ship_status['fuel_percent'] = 10\n\n # parse scoop\n if log_event == 'FuelScoop' and ship_status['time'] < 10 and ship_status['fuel_percent'] < 100:\n ship_status['is_scooping'] = True\n else:\n ship_status['is_scooping'] = False\n\n # parse location\n if (log_event == 'Location' or log_event == 'FSDJump') and 'StarSystem' in log:\n ship_status['location'] = log['StarSystem']\n\n # parse target\n if log_event == 'FSDTarget':\n if log['Name'] == ship_status['location']:\n ship_status['target'] = None\n else:\n ship_status['target'] = log['Name']\n elif log_event == 'FSDJump':\n if ship_status['location'] == ship_status['target']:\n ship_status['target'] = None\n\n # exceptions\n except Exception as trace:\n logger.exception(\"Exception occurred: {}\".format(trace))\n # logger.debug('ship='+str(ship))\n return ship_status",
"def export(self):\n rpt_date = datetime.now()\n filename = 'bushfire_regionbytenure_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def action_ship_create(self):\n res = super(SaleOrder, self).action_ship_create()\n for sale_order in self:\n if sale_order.invoiced:\n sale_order.picking_ids.write({'x_is_paid': True})\n return res",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'bushfire_indicator_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'bushfire_by_tenure_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def woo_sale_report(self):\n version_info = odoo.service.common.exp_version()\n if version_info.get('server_version') == '14.0':\n action = self.env.ref('woo_commerce_ept.woo_action_order_report_all').read()[0]\n else:\n action = self.env.ref('woo_commerce_ept.woo_sale_report_action_dashboard').read()[0]\n\n return action",
"def export_outstanding_fires(request, region_id, queryset):\n #regions = Region.objects.filter(id=region_id) if region_id else Region.objects.all()\n regions = Region.objects.filter(id=region_id) if region_id else Region.objects.filter(dbca=True)\n region_name = regions[0].name if region_id else 'All-Regions'\n\n rpt_date = datetime.now()\n filename = 'outstanding_fires_{}_{}.xls'.format(region_name, rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n for region in regions:\n outstanding_fires(book, region, queryset, rpt_date)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def ship_mstone(request):\n if (request.method == \"POST\" and\n 'ms' in request.POST and\n request.user.has_perm('shipping.can_ship')):\n try:\n mstone = Milestone.objects.get(code=request.POST['ms'])\n # get current signoffs\n cs = _signoffs(mstone).values_list('id', flat=True)\n mstone.signoffs.add(*list(cs)) # add them\n mstone.status = 2\n # XXX create event\n mstone.save()\n except:\n pass\n return HttpResponseRedirect(reverse('shipping.views.milestones'))",
"def shipping_status(self):\n status_map = self.shipping_event_breakdown\n if not status_map:\n return \"\"\n\n events = []\n last_complete_event_name = None\n for event_dict in reversed(list(status_map.values())):\n if event_dict[\"quantity\"] == self.quantity:\n events.append(event_dict[\"name\"])\n last_complete_event_name = event_dict[\"name\"]\n else:\n events.append(\n \"%s (%d/%d items)\"\n % (event_dict[\"name\"], event_dict[\"quantity\"], self.quantity)\n )\n\n if last_complete_event_name == list(status_map.values())[0][\"name\"]:\n return last_complete_event_name\n\n return \", \".join(events)",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'bushfire_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.ministerial.get_excel_sheet(rpt_date, book)\n self.ministerial_auth.get_excel_sheet(rpt_date, book)\n self.ministerial_268.get_excel_sheet(rpt_date, book)\n self.quarterly.get_excel_sheet(rpt_date, book)\n self.by_tenure.get_excel_sheet(rpt_date, book)\n self.by_cause.get_excel_sheet(rpt_date, book)\n self.region_by_tenure.get_excel_sheet(rpt_date, book)\n self.indicator.get_excel_sheet(rpt_date, book)\n self.by_cause_10YrAverage.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 1')\n book.save(response)\n\n return response",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'ministerial_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def export(ctx):\n LOG.info(\"Running scout export\")",
"def test_create_shipment(self):\n pass",
"def export(self, exporter: model.Exporter, translations: model.Exporter) -> None:\n\n for station in filter(attrgetter(\"used\"), self.by_id.values()):\n station.export(exporter, translations)",
"def save_shipment_detail(shipment_detail):\n shipment = Shipment.objects.filter(shipment_id=shipment_detail['shipmentId']).first()\n\n # Update shipment\n shipment.pickup_point = shipment_detail.get('pickupPoint')\n shipment.shipment_reference = shipment_detail.get('shipmentReference')\n shipment.billing_details = json.dumps(shipment_detail.get('billingDetails'))\n shipment.customer_details = json.dumps(shipment_detail.get('customerDetails'))\n shipment.detail_fetched = True\n shipment.save()\n\n # Create Transport if not created\n transport_obj, create = Transport.objects.get_or_create(\n shipment=shipment, transport_id=shipment_detail['transport']['transportId']\n )\n transport = shipment_detail['transport']\n\n transport_obj.shipping_label_id = transport.get('shippingLabelId')\n transport_obj.shipping_label_code = transport.get('shippingLabelCode')\n transport_obj.transport_id = transport.get('transportId')\n transport_obj.transporter_code = transport.get('transporterCode')\n transport_obj.track_and_trace = transport.get('trackAndTrace')\n transport_obj.shipment = shipment\n transport_obj.save()\n\n for item in shipment_detail['shipmentItems']:\n # Create order if not created\n order, created = Order.objects.get_or_create(order_id=item.get('orderId'), order_date=item.get('orderDate'))\n\n # Create OrderItem\n order_item_obj = {\n 'order': order,\n 'order_item_id': item.get('orderItemId'),\n 'ean': item.get('ean'),\n 'title': item.get('title'),\n 'quantity': item.get('quantity'),\n 'offer_price': item.get('offerPrice'),\n 'offer_condition': item.get('offerCondition'),\n 'offer_reference': item.get('offerReference')\n }\n order_item = OrderItem.objects.create(**order_item_obj)\n\n # Create ShipmentItem\n shipment_item_obj = {\n 'shipment': shipment,\n 'order': order,\n 'order_item': order_item,\n 'latest_delivery_date': item.get('latestDeliveryDate'),\n 'fulfilment_method': item.get('fulfilmentMethod')\n }\n ShipmentItem.objects.create(**shipment_item_obj)",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'ministerial_268_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def save_shipment_list(seller_id, shipment_list):\n shipment_ids = [obj['shipmentId'] for obj in shipment_list]\n saved_shipments = Shipment.objects.filter(\n seller_id=seller_id, shipment_id__in=shipment_ids\n ).values_list('shipment_id', flat=True)\n\n new_shipment_ids = set(shipment_ids) - set(saved_shipments)\n new_shipments = filter(lambda x: x['shipmentId'] in new_shipment_ids, shipment_list)\n\n new_shipment_obj_list = []\n for shipment in new_shipments:\n shipment_dict = {\n 'seller_id': seller_id,\n 'shipment_id': shipment.get('shipmentId'),\n 'shipment_date': shipment.get('shipmentDate')\n }\n new_shipment_obj_list.append(Shipment(**shipment_dict))\n \n try:\n Shipment.objects.bulk_create(new_shipment_obj_list)\n except Exception as e:\n print(e)",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'bushfire_by_cause_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response",
"def export(self, queryset=None):\n self.queryset = queryset or self.queryset\n exported_datetime = get_utcnow()\n filename = self.get_filename(exported_datetime)\n path = os.path.join(self.export_folder, filename)\n with open(path, 'w') as f:\n csv_writer = csv.DictWriter(\n f, fieldnames=self.field_names, delimiter=self.delimiter)\n csv_writer.writeheader()\n for model_obj in self.queryset:\n object_helper = self.object_history_helper_cls(\n model_obj=model_obj, create=True)\n objects = object_helper.get_not_exported()\n for obj in objects:\n row = self.prepare_row(\n model_obj=model_obj,\n exported_datetime=exported_datetime,\n export_change_type=obj.export_change_type)\n csv_writer.writerow(row)\n object_helper.update_as_exported(\n objects=objects, exported_datetime=exported_datetime)\n file_history_updater = self.file_history_updater_cls(\n path=path,\n delimiter=self.delimiter,\n model=self.model_cls._meta.label_lower,\n filename=filename)\n file_history_updater.update()\n return path",
"def send_shipments(self):\n return SendIntensity(self)",
"def export(self):\n\n rpt_date = datetime.now()\n filename = 'bushfire_by_cause_10yr_average_report_{}.xls'.format(rpt_date.strftime('%d%b%Y'))\n response = HttpResponse(content_type='application/vnd.ms-excel')\n response['Content-Disposition'] = 'attachment; filename=' + filename\n\n book = Workbook()\n self.get_excel_sheet(rpt_date, book)\n\n book.add_sheet('Sheet 2')\n book.save(response)\n\n return response"
] | [
"0.8125076",
"0.7221847",
"0.66516876",
"0.62769276",
"0.60418105",
"0.5811156",
"0.5546291",
"0.55305487",
"0.55091906",
"0.5489711",
"0.54192644",
"0.54172236",
"0.53858155",
"0.53763556",
"0.53559977",
"0.53334343",
"0.53119606",
"0.5288046",
"0.52613235",
"0.5238616",
"0.52087057",
"0.5203891",
"0.5203069",
"0.520233",
"0.5192584",
"0.5177681",
"0.51715094",
"0.5154892",
"0.51236093",
"0.50670713"
] | 0.8081001 | 1 |
Exports shipment status for shipments to magento, if they are shipped | def export_shipment_status_to_magento(self):
Shipment = Pool().get('stock.shipment.out')
Sale = Pool().get('sale.sale')
instance = self.instance
sale_domain = [
('magento_store_view', '=', self.id),
('shipment_state', '=', 'sent'),
('magento_id', '!=', None),
('shipments', '!=', None),
]
if self.last_shipment_export_time:
sale_domain.append(
('write_date', '>=', self.last_shipment_export_time)
)
sales = Sale.search(sale_domain)
self.last_shipment_export_time = datetime.utcnow()
self.save()
for sale in sales:
# Get the increment id from the sale reference
increment_id = sale.reference[
len(instance.order_prefix): len(sale.reference)
]
for shipment in sale.shipments:
try:
# Some checks to make sure that only valid shipments are
# being exported
if shipment.is_tracking_exported_to_magento or \
shipment.state not in ('packed', 'done') or \
shipment.magento_increment_id:
sales.pop(sale)
continue
with magento.Shipment(
instance.url, instance.api_user, instance.api_key
) as shipment_api:
item_qty_map = {}
for move in shipment.outgoing_moves:
if isinstance(move.origin, SaleLine) \
and move.origin.magento_id:
# This is done because there can be multiple
# lines with the same product and they need
# to be send as a sum of quanitities
item_qty_map.setdefault(
str(move.origin.magento_id), 0
)
item_qty_map[str(move.origin.magento_id)] += \
move.quantity
shipment_increment_id = shipment_api.create(
order_increment_id=increment_id,
items_qty=item_qty_map
)
Shipment.write(list(sale.shipments), {
'magento_increment_id': shipment_increment_id,
})
if self.export_tracking_information and (
shipment.tracking_number and shipment.carrier
):
shipment.export_tracking_info_to_magento()
except xmlrpclib.Fault, fault:
if fault.faultCode == 102:
# A shipment already exists for this order,
# we cannot do anything about it.
# Maybe it was already exported earlier or was created
# separately on magento
# Hence, just continue
continue
return sales | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_shipment_status(cls, store_views=None):\n if store_views is None:\n store_views = cls.search([])\n\n for store_view in store_views:\n # Set the instance in context\n with Transaction().set_context(\n magento_instance=store_view.instance.id\n ):\n store_view.export_shipment_status_to_magento()",
"def export_order_status_for_store_view(self):\n Sale = Pool().get('sale.sale')\n\n exported_sales = []\n domain = [('magento_store_view', '=', self.id)]\n\n if self.last_order_export_time:\n domain = [('write_date', '>=', self.last_order_export_time)]\n\n sales = Sale.search(domain)\n\n self.last_order_export_time = datetime.utcnow()\n self.save()\n\n for sale in sales:\n exported_sales.append(sale.export_order_status_to_magento())\n\n return exported_sales",
"def action_ship_create(self):\n res = super(SaleOrder, self).action_ship_create()\n for sale_order in self:\n if sale_order.invoiced:\n sale_order.picking_ids.write({'x_is_paid': True})\n return res",
"def mark_shipped(self):\n\n self.shipped = True\n # moved from DomesticMelonOrder",
"def shipmentDetails(request):\n order_id = request.GET.get('order_id')\n generate_request = oAuth_magento()\n\n payload = {\"searchCriteria[filter_groups][0][filters][0][field]\": \"increment_id\",\n \"searchCriteria[filter_groups][0][filters][0][value]\": order_id,\n \"searchCriteria[filter_groups][0][filters][0][conditionType]\": \"eq\",\n \"fields\": \"items[status,base_currency_code,grand_total,items[name,sku],extension_attributes[shipping_assignments[shipping[address[city,company,country_id,firstname,lastname,postcode,region,telephone]]]]]\",\n }\n response = requests.request(\"GET\", url=generate_request[0], headers=generate_request[1], params=payload)\n # with open('temp_files/magento_get_order_select.json','w') as f:\n # f.write(response.text)\n json_response = json.loads(response.text)\n context = {'result': json_response['items'][0]['extension_attributes']['shipping_assignments'][0]['shipping']['address'], \n 'status': json_response['items'][0]['status'],\n 'item_name': json_response['items'][0]['items'],\n 'price': json_response['items'][0]['base_currency_code'] + ' ' + str(json_response['items'][0]['grand_total']),\n }\n return JsonResponse(context)",
"def export_order_status(self, store_views=None):\n if store_views is None:\n store_views = self.search([])\n\n for store_view in store_views:\n store_view.export_order_status_for_store_view()",
"def test_get_shipment(self):\n pass",
"def default_start(self, data):\n return {\n 'message': \"This wizard will export shipment status for all the \" +\n \"shipments related to this store view. To export tracking \" +\n \"information also for these shipments please check the \" +\n \"checkbox for Export Tracking Information on Store View.\"\n }",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def mark_shipped(self):\n\n self.shipped = True",
"def export_order_status_button(cls, store_views):\n pass",
"def test_create_shipment(self):\n pass",
"def shipping_status(self):\n\n # As safeguard against identical timestamps, also sort by the primary\n # key. It's not recommended to rely on this behaviour, but in practice\n # reasonably safe if PKs are not manually set.\n events = self.shipping_events.order_by(\"-date_created\", \"-pk\").all()\n if not len(events):\n return \"\"\n\n # Collect all events by event-type\n event_map = {}\n for event in events:\n event_name = event.event_type.name\n if event_name not in event_map:\n event_map[event_name] = []\n event_map[event_name].extend(list(event.line_quantities.all()))\n\n # Determine last complete event\n status = _(\"In progress\")\n for event_name, event_line_quantities in event_map.items():\n if self._is_event_complete(event_line_quantities):\n return event_name\n return status",
"def save_shipment_list(seller_id, shipment_list):\n shipment_ids = [obj['shipmentId'] for obj in shipment_list]\n saved_shipments = Shipment.objects.filter(\n seller_id=seller_id, shipment_id__in=shipment_ids\n ).values_list('shipment_id', flat=True)\n\n new_shipment_ids = set(shipment_ids) - set(saved_shipments)\n new_shipments = filter(lambda x: x['shipmentId'] in new_shipment_ids, shipment_list)\n\n new_shipment_obj_list = []\n for shipment in new_shipments:\n shipment_dict = {\n 'seller_id': seller_id,\n 'shipment_id': shipment.get('shipmentId'),\n 'shipment_date': shipment.get('shipmentDate')\n }\n new_shipment_obj_list.append(Shipment(**shipment_dict))\n \n try:\n Shipment.objects.bulk_create(new_shipment_obj_list)\n except Exception as e:\n print(e)",
"def test_get_eligible_shipment_services(self):\n pass",
"def shipping_status(self):\n status_map = self.shipping_event_breakdown\n if not status_map:\n return \"\"\n\n events = []\n last_complete_event_name = None\n for event_dict in reversed(list(status_map.values())):\n if event_dict[\"quantity\"] == self.quantity:\n events.append(event_dict[\"name\"])\n last_complete_event_name = event_dict[\"name\"]\n else:\n events.append(\n \"%s (%d/%d items)\"\n % (event_dict[\"name\"], event_dict[\"quantity\"], self.quantity)\n )\n\n if last_complete_event_name == list(status_map.values())[0][\"name\"]:\n return last_complete_event_name\n\n return \", \".join(events)",
"def test_get_eligible_shipment_services_old(self):\n pass",
"def ship():\n latest_log = get_latest_log(PATH_LOG_FILES)\n ship_status = {\n 'time': (datetime.now() - datetime.fromtimestamp(getmtime(latest_log))).seconds,\n 'status': None,\n 'type': None,\n 'location': None,\n 'star_class': None,\n 'target': None,\n 'fuel_capacity': None,\n 'fuel_level': None,\n 'fuel_percent': None,\n 'is_scooping': False,\n 'sys_fully_scanned': False\n }\n # Read log line by line and parse data\n with open(latest_log, encoding=\"utf-8\") as f:\n for line in f:\n log = loads(line)\n\n # parse data\n try:\n # parse ship status\n log_event = log['event']\n\n if log_event == 'StartJump':\n ship_status['status'] = str('starting_' + log['JumpType']).lower()\n ship_status['sys_fully_scanned'] = False\n if 'StarClass' in log:\n ship_status['star_class'] = log['StarClass']\n\n elif log_event == 'SupercruiseEntry' or log_event == 'FSDJump':\n ship_status['status'] = 'in_supercruise'\n\n elif log_event == 'SupercruiseExit' or log_event == 'DockingCancelled' or (\n log_event == 'Music' and ship_status['status'] == 'in_undocking') or (\n log_event == 'Location' and log['Docked'] is False):\n ship_status['status'] = 'in_space'\n\n elif log_event == 'Undocked':\n ship_status['status'] = 'in_space'\n\n elif log_event == 'DockingRequested':\n ship_status['status'] = 'starting_docking'\n\n elif log_event == \"Music\" and log['MusicTrack'] == \"DockingComputer\":\n if ship_status['status'] == 'starting_undocking':\n ship_status['status'] = 'in_undocking'\n elif ship_status['status'] == 'starting_docking':\n ship_status['status'] = 'in_docking'\n\n elif log_event == 'Docked':\n ship_status['status'] = 'in_station'\n\n elif log_event == 'FSSAllBodiesFound':\n ship_status['sys_fully_scanned'] = True\n\n # parse ship type\n if log_event == 'LoadGame' or log_event == 'Loadout':\n ship_status['type'] = log['Ship']\n\n # parse fuel\n if 'FuelLevel' in log and ship_status['type'] != 'TestBuggy':\n ship_status['fuel_level'] = log['FuelLevel']\n if 'FuelCapacity' in log and ship_status['type'] != 'TestBuggy':\n if type(log['FuelCapacity']) == float:\n ship_status['fuel_capacity'] = log['FuelCapacity']\n else:\n ship_status['fuel_capacity'] = log['FuelCapacity']['Main']\n\n if log_event == 'FuelScoop' and 'Total' in log:\n ship_status['fuel_level'] = log['Total']\n if ship_status['fuel_level'] and ship_status['fuel_capacity']:\n ship_status['fuel_percent'] = round(\n (ship_status['fuel_level'] / ship_status['fuel_capacity']) * 100)\n else:\n ship_status['fuel_percent'] = 10\n\n # parse scoop\n if log_event == 'FuelScoop' and ship_status['time'] < 10 and ship_status['fuel_percent'] < 100:\n ship_status['is_scooping'] = True\n else:\n ship_status['is_scooping'] = False\n\n # parse location\n if (log_event == 'Location' or log_event == 'FSDJump') and 'StarSystem' in log:\n ship_status['location'] = log['StarSystem']\n\n # parse target\n if log_event == 'FSDTarget':\n if log['Name'] == ship_status['location']:\n ship_status['target'] = None\n else:\n ship_status['target'] = log['Name']\n elif log_event == 'FSDJump':\n if ship_status['location'] == ship_status['target']:\n ship_status['target'] = None\n\n # exceptions\n except Exception as trace:\n logger.exception(\"Exception occurred: {}\".format(trace))\n # logger.debug('ship='+str(ship))\n return ship_status",
"def send_shipments(self):\n return SendIntensity(self)",
"def ArchiveStatus(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def check_if_dropship(doc,method):\n\n\tmr_list = []\n\tconditions = \"\"\n\tdairy = frappe.db.get_value(\"Company\",{\"is_dairy\":1},\"name\")\n\tuser_doc = frappe.db.get_value(\"User\",{\"name\":frappe.session.user},['operator_type','company'], as_dict =1)\n\tco = frappe.db.get_value(\"Village Level Collection Centre\",{\"name\":user_doc.get('company')},\"camp_office\")\n\n\tif user_doc.get(\"operator_type\") == 'Chilling Centre' and not doc.flags.is_api:\n\t\tfor item in doc.items:\n\t\t\tif item.material_request:\n\t\t\t\tmr_list.append(str(item.material_request))\n\n\t\tif mr_list:\n\t\t\tconditions = \"and pi.material_request = '{0}'\".format(mr_list[0]) if len(mr_list) == 1 else \"and pi.material_request in {0}\".format(tuple(mr_list))\n\n\t\t#check PO with dropship\n\t\tif conditions:\n\t\t\tpo = frappe.db.sql(\"\"\"select p.name,pi.material_request from `tabPurchase Order` p,`tabPurchase Order Item` pi where p.company = '{0}' \n\t\t\t\t\t\t\t{1} and p.docstatus = 1 and p.name = pi.parent and p.is_dropship = 1 group by pi.material_request\"\"\".format(dairy,conditions),as_dict=1)\n\t\t\tif po:\n\t\t\t\tpo_data = [data.get('name') for data in po]\n\n\t\t\t\tfor data in set(po_data):\n\t\t\t\t\tpo_doc = frappe.get_doc(\"Purchase Order\",data)\n\n\t\t\t\t\tpi = make_pi_against_localsupp(po_doc,doc)\n\t\t\t\t\tpr = make_pr_against_localsupp(po_doc,doc)\t\t\n\t\t\t\t\n\t\t\t\tif pi:\n\t\t\t\t\tpi.flags.ignore_permissions = True \t\t\n\t\t\t\t\tpi.save()\n\t\t\t\t\tpi.submit()\n\n\t\t\t\t# mi_status_update(doc)",
"def ship_mstone(request):\n if (request.method == \"POST\" and\n 'ms' in request.POST and\n request.user.has_perm('shipping.can_ship')):\n try:\n mstone = Milestone.objects.get(code=request.POST['ms'])\n # get current signoffs\n cs = _signoffs(mstone).values_list('id', flat=True)\n mstone.signoffs.add(*list(cs)) # add them\n mstone.status = 2\n # XXX create event\n mstone.save()\n except:\n pass\n return HttpResponseRedirect(reverse('shipping.views.milestones'))",
"def test_livereport_saving(shipping, shipping_group, tmpdir):\n\n outfile = str(tmpdir.join(\"out.hdf5\"))\n\n domain, _cloud = shipping\n slug = 'b--0001-0000-0000-04e7'\n\n CloudSession(user='[email protected]', password='test', domain=domain, verify=False)\n retval = main(['-t', 'save_hdf5', slug, '-d', domain, '-o', outfile, '--no-verify', '-c'])\n assert retval == 0\n\n assert os.path.isfile(outfile)\n\n ingroup = AnalysisGroup.FromSaved(outfile, 'hdf5')\n assert ingroup.stream_counts == shipping_group.stream_counts"
] | [
"0.72959864",
"0.65770334",
"0.61950433",
"0.61917406",
"0.5866233",
"0.5862453",
"0.5841632",
"0.57592666",
"0.5728388",
"0.5728388",
"0.5728388",
"0.5728388",
"0.5728388",
"0.5728388",
"0.5728388",
"0.5728388",
"0.5728388",
"0.5726259",
"0.5599219",
"0.5533492",
"0.5525183",
"0.54825884",
"0.541027",
"0.53540915",
"0.53399855",
"0.53335786",
"0.5316825",
"0.5225702",
"0.52226394",
"0.5222265"
] | 0.83704627 | 0 |
Helper method to get the current store view. | def get_current_store_view(cls):
return cls(Transaction().context.get('magento_store_view')) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_view(self):\n return self.view",
"def view(self) -> str:\n return pulumi.get(self, \"view\")",
"def _get_store(self):\n return self._store",
"def GetView(self):\r\n return self.model.GetView()",
"def View(self):\n return self._view",
"def getCurrentView(self):\n if self.viewFrame._expanded == -1: #four view\n self.currentView = None\n if self.viewFrame._expanded == 0: #top view\n self.currentView = self.topView\n if self.viewFrame._expanded == 1: #front view\n self.currentView = self.frontView\n if self.viewFrame._expanded == 2: #left view\n self.currentView = self.leftView\n if self.viewFrame._expanded == 3: #perspect view\n self.currentView = self.perspView\n\n return self.currentView",
"def view(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"view\")",
"def get_current():\n return getattr(_request_store, 'context', None)",
"def view(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"view\")",
"def get_view ( self, object ):\n return self.view",
"def getCurrentView(self):\n\n\n currentTab = self.tabWidget.currentWidget()\n\n View = currentTab.findChild(QtWidgets.QTableView)\n\n return View",
"def view_type(self):\n return self.container['view_type']",
"def view(self):\n return self._view_class(self)",
"def get_store(store_name: str):\n return store_handler.get_store(store_name)",
"def view_name(self):\n return self._view_name",
"def get_store(self, store_name: str) -> Any:\n pass",
"def view(self) -> 'outputs.ViewDefinitionResponse':\n return pulumi.get(self, \"view\")",
"def storelocator():\n\n\treturn render_template(\"storelocator.html\")",
"def get_view(self):\n from .views import default_page_view\n return (self.get_custom_view() or\n self.get_urlpattern_view() or\n default_page_view)",
"def log_store(self) -> Optional[str]:\n return pulumi.get(self, \"log_store\")",
"def log_store(self) -> str:\n return pulumi.get(self, \"log_store\")",
"def view( self, REQUEST=None ):\n REQUEST = REQUEST or self.REQUEST\n return self.storage(REQUEST)",
"def __get_store(self) -> Optional[Store]:\n\n if self.lvp == LVP.COBOL_TO_CSHARP_9:\n return COBOLToCSharp9Store(self.template_processor, self.veil)\n\n return None",
"def get_store(request):\n storedb = redis.Redis(host=HOST, db=STOREDB)\n return storedb.lrange(\"store\",0,-1)",
"def _viewer_by_reference(self, reference):\n viewer_item = self._viewer_item_by_reference(reference)\n\n return self._viewer_store[viewer_item['id']]",
"def my_view(cls):\n return cls.__my_view",
"def getCurrentPanda(self):\n\n\n view = self.getCurrentView()\n\n if view != None:\n panda = view.model()\n return panda\n else:\n return None",
"def get_store_info(store_name: str):\n return store_handler.get_store_info(store_name)",
"def get_current_site(cls):\n return cls.get_by_key_name(cls.the_key_name)",
"def viewer(self):\n return self.parent"
] | [
"0.69337434",
"0.6776546",
"0.6570207",
"0.644002",
"0.636768",
"0.6339466",
"0.63136166",
"0.63065296",
"0.6254641",
"0.6231597",
"0.622977",
"0.6139501",
"0.60650826",
"0.5972074",
"0.59618914",
"0.59374434",
"0.5819738",
"0.57517654",
"0.5725532",
"0.5725355",
"0.56592387",
"0.5641603",
"0.562698",
"0.5623491",
"0.55970657",
"0.5591167",
"0.5590058",
"0.55500835",
"0.5494114",
"0.5486414"
] | 0.8440489 | 0 |
Import the websites, store and store views and show user a confirmation message | def default_start(self, data):
return {
'message': "This wizard has imported all the websites for this " +
"magento instance. It has also imported all the stores and " +
"store views related to the websites imported. If any of " +
"the records existed already, it wont be imported."
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def home(request):\n rel_sup_csv_form = RelationSupportCSVDatasetForm()\n queries_csv_form = QueriesCSVDatasetForm()\n text_file_form = TextDatasetForm()\n article_url_form = NewsArticleURLForm()\n ind_sent_form = IndividualSentenceForm()\n html_files_form = HTMLFilesForm()\n data = []\n for i in results:\n data.append({'sentence':i['sentence'], 'head':i['head'], 'tail':i['tail'], 'pred_relation':i['pred_relation'], 'pred_sentiment':i['sent'], 'conf':i['conf'], 'dataset':i['rel_sup_ind']})\n proj_path = os.path.abspath(os.path.dirname(__file__)).split(\"FYP_Web_App\")[0]\n ckpts = [f for f in os.listdir(proj_path + \"FewRel/checkpoint\") if '.pth.tar' in f][::-1]\n _get_sup_relations(request.user)\n rel_sup_datasets = []\n for i in range(len(nk_stat)):\n rel_sup_datasets.append({'i':i+1, 'sup_relations': sup_relations[i], 'nk_stat': nk_stat[i]})\n \n analysis_status = \"Not Running\"\n if currently_analyzing:\n analysis_status = \"Running\"\n analysis_user_text = \"\"\n if analysis_user is not None:\n analysis_user_text = analysis_user\n return render(request, 'home.html', {'rel_sup_csv_form': rel_sup_csv_form, 'queries_csv_form': queries_csv_form, 'text_file_form': text_file_form, 'article_url_form': article_url_form, 'ind_sent_form':ind_sent_form, 'data': data, 'ckpts': ckpts, 'rel_sup_datasets': rel_sup_datasets, 'html_files_form':html_files_form, 'num_results':len(data), 'analysis_status':analysis_status, 'analysis_user':analysis_user_text})",
"def web_index():\n\n try:\n auth_check()\n except Exception as e:\n return flask.redirect(str(e))\n\n db_update_archives()\n\n return flask.redirect('videos')",
"def dev_view(request):\n if request.user.is_authenticated() and request.user.username == \"tola\" and request.user.is_staff:\n from tola.tables_sync import update_level1, update_level2\n # update TolaTables with WorkflowLevel1 and WorkflowLevel2 data\n message = {\"attempt\": \"Running Tables Loader\"}\n\n print \"Running Script...\"\n\n try:\n update_level1()\n message['level1'] = \"Level1 Success\"\n except Exception as e:\n print '%s (%s)' % (e.message, type(e))\n message['level1'] = '%s (%s)' % (e.message, type(e))\n\n try:\n update_level2()\n message['level2'] = \"Level2 Success\"\n except Exception as e:\n print '%s (%s)' % (e.message, type(e))\n message['level2'] = '%s (%s)' % (e.message, type(e))\n\n return render(request, \"dev.html\", {'message': message})\n else:\n # log person\n print request.user.is_authenticated()\n print request.user.username\n print request.user.is_staff\n redirect_url = '/'\n return HttpResponseRedirect(redirect_url)",
"def home():\n # Upload file button selected\n if request.method == 'POST':\n # check if the post request has the file part\n if 'file' not in request.files:\n flash('No file part')\n return redirect(request.url)\n file = request.files['file']\n # if user does not select file, browser also\n # submit an empty part without filename\n if file.filename == '':\n flash('No selected file')\n return redirect(request.url)\n if file and allowed_file(file.filename): # if file's type is allowed, then secure filename and save it to the project res folder.\n filename = secure_filename(file.filename)\n videoFilePath = os.path.join(\"static\", filename) # make videos filePath for saving it and then sending it to ETL\n file.save(videoFilePath) # save uploaded video to the project's res folder for ETL to extract\n #ETL(videoFromUI=videoFilePath) # send uploaded video's file path to ETL to begin processing.\n \n #os.system('cd ../../bin/ && ./launch.sh | tee launch_output.txt &') # run launch command and save results to text file\n \n \n messages = json.dumps({\"filename\":file.filename}) #messages is string of json\n session['messages'] = messages #store messages in session\n\n return redirect(url_for('displayAnalysisResults'))\n return render_template(\"home.html\")",
"def import_websites(cls, instances):\n Website = Pool().get('magento.instance.website')\n Store = Pool().get('magento.website.store')\n StoreView = Pool().get('magento.store.store_view')\n MagentoOrderState = Pool().get('magento.order_state')\n\n try:\n instance, = instances\n except ValueError:\n cls.raise_user_error('multiple_instances')\n\n with Transaction().set_context(magento_instance=instance.id):\n\n # Import order states\n with OrderConfig(\n instance.url, instance.api_user, instance.api_key\n ) as order_config_api:\n MagentoOrderState.create_all_using_magento_data(\n order_config_api.get_states()\n )\n\n # Import websites\n with Core(\n instance.url, instance.api_user, instance.api_key\n ) as core_api:\n websites = []\n stores = []\n\n mag_websites = core_api.websites()\n\n # Create websites\n for mag_website in mag_websites:\n websites.append(Website.find_or_create(\n instance, mag_website\n ))\n\n for website in websites:\n mag_stores = core_api.stores(\n {'website_id': {'=': website.magento_id}}\n )\n\n # Create stores\n for mag_store in mag_stores:\n stores.append(Store.find_or_create(website, mag_store))\n\n for store in stores:\n mag_store_views = core_api.store_views(\n {'group_id': {'=': store.magento_id}}\n )\n\n # Create store views\n for mag_store_view in mag_store_views:\n store_view = StoreView.find_or_create(\n store, mag_store_view\n )\n # AR refactoring\n store_view.save()",
"def jobs_import_view():\n import_job()\n return response.redirect(request.app.url_for('jobs'))",
"def db_update_files():\n _populate_table_files(File)\n _populate_table_files(Software)\n return redirect(url_for('view_index'))",
"def update_views():\n # replace Supervisor main entry\n here = path.abspath(path.dirname(__file__))\n # set main page\n VIEWS['index.html'] = {'template': path.join(here, 'ui/index.html'), 'view': SupvisorsView}\n # set address /processpage\n VIEWS['procaddress.html'] = {'template': path.join(here, 'ui/procaddress.html'), 'view': ProcAddressView}\n # set address/host page\n VIEWS['hostaddress.html'] = {'template': path.join(here, 'ui/hostaddress.html'), 'view': HostAddressView}\n # set application page\n VIEWS['application.html'] = {'template': path.join(here, 'ui/application.html'), 'view': ApplicationView}\n # set fake page to export images\n VIEWS['process_cpu.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': ProcessCpuImageView}\n VIEWS['process_mem.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': ProcessMemoryImageView}\n VIEWS['address_cpu.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': AddressCpuImageView}\n VIEWS['address_mem.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': AddressMemoryImageView}\n VIEWS['address_io.png'] = {'template': path.join(here, 'ui/empty.html'), 'view': AddressNetworkImageView}",
"def on_new_site(self, files):\n init_index()",
"def import_click(self):\n path = self.path.get()\n if not path:\n self.error_label.config(text='Alege baza de date.')\n return\n\n password = self.pass_entry.get()\n if not password:\n self.error_label.config(text='Introdu parola.')\n return\n\n try:\n sharing.import_database(self.us, path, password)\n self.error_label.config(text='Am importat baza de date cu succes.')\n except InvalidToken:\n self.error_label.config(text='Parolă incorectă.')",
"def runSiteAutomation(self, webretrievedelay, proxy, targetlist, sourcelist,\n useragent, botoutputrequested, refreshremotexml, versionlocation):\n if refreshremotexml:\n SitesFile.updateTekDefenseXMLTree(proxy, self._verbose)\n\n remotesitetree = SitesFile.getXMLTree(__TEKDEFENSEXML__, self._verbose)\n localsitetree = SitesFile.getXMLTree(__SITESXML__, self._verbose)\n\n if not localsitetree and not remotesitetree:\n print ('Unfortunately there is neither a {tekd} file nor a {sites} file that can be utilized for proper' \\\n ' parsing.\\nAt least one configuration XML file must be available for Automater to work properly.\\n' \\\n 'Please see {url} for further instructions.'\\\n .format(tekd=__TEKDEFENSEXML__, sites=__SITESXML__, url=versionlocation))\n else:\n if localsitetree:\n for siteelement in localsitetree.iter(tag=\"site\"):\n if self.siteEntryIsValid(siteelement):\n for targ in targetlist:\n for source in sourcelist:\n sitetypematch, targettype, target = self.getSiteInfoIfSiteTypesMatch(source, targ,\n siteelement)\n if sitetypematch:\n self.buildSiteList(siteelement, webretrievedelay, proxy, targettype, target,\n useragent, botoutputrequested)\n else:\n print ('A problem was found in the {sites} file. There appears to be a site entry with ' \\\n 'unequal numbers of regexs and reporting requirements'.format(sites=__SITESXML__))\n if remotesitetree:\n for siteelement in remotesitetree.iter(tag=\"site\"):\n if self.siteEntryIsValid(siteelement):\n for targ in targetlist:\n for source in sourcelist:\n sitetypematch, targettype, target = self.getSiteInfoIfSiteTypesMatch(source, targ,\n siteelement)\n if sitetypematch:\n self.buildSiteList(siteelement, webretrievedelay, proxy, targettype, target,\n useragent, botoutputrequested)\n else:\n print ('A problem was found in the {sites} file. There appears to be a site entry with ' \\\n 'unequal numbers of regexs and reporting requirements'.format(sites=__SITESXML__))",
"def update_views():\n # replace Supervisor main entry\n here = os.path.abspath(os.path.dirname(__file__))\n # set main page\n VIEWS['index.html'] = {'template': os.path.join(here, 'ui/index.html'),\n 'view': SupvisorsView}\n # set address /processpage\n VIEWS['procaddress.html'] = {'template': os.path.join(\n here, 'ui/procaddress.html'),\n 'view': ProcAddressView}\n # set address/host page\n VIEWS['hostaddress.html'] = {'template': os.path.join(\n here, 'ui/hostaddress.html'),\n 'view': HostAddressView}\n # set application page\n VIEWS['application.html'] = {'template': os.path.join(\n here, 'ui/application.html'),\n 'view': ApplicationView}\n # set fake page to export images\n VIEWS['process_cpu.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': ProcessCpuImageView}\n VIEWS['process_mem.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': ProcessMemoryImageView}\n VIEWS['address_cpu.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': AddressCpuImageView}\n VIEWS['address_mem.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': AddressMemoryImageView}\n VIEWS['address_io.png'] = {'template': os.path.join(\n here, 'ui/empty.html'),\n 'view': AddressNetworkImageView}",
"def step_1(browser):\n browser.click_on(\"Import depuis eComptes\".decode('utf8'))",
"def index():\n print(\"web page loading\")\n\n # Upload deployment file and run deployment routine\n if request.method == 'POST':\n # Check if the post request has the file part\n if 'file' not in request.files:\n print('No file part')\n return redirect(request.url)\n file = request.files['file']\n # \n if file.filename == '':\n print('No file selected for uploading')\n return redirect(request.url)\n # If proper file uploaded, save file to RasPi, emit event that triggers the run deployment process \n if file and allowed_file(file.filename):\n filename = secure_filename(file.filename)\n file.save(os.path.join('uploads', filename))\n # SOCKETIO.emit('run deployment', filename, broadcast=False)\n file_location = 'uploads/' + str(filename)\n run_deployment(file_location)\n print('File successfully uploaded')\n # Returns empty url to prevent page reload upon submission\n return '', 204\n\n template_data = {\n 'incoming':INCOMING,\n 'outgoing':OUTGOING\n }\n\n return render_template('robotWebapp.jinja2', **template_data)",
"def index(request):\n try:\n login(request)\n if 'username' not in request.session or 'password' not in request.session:\n err = \"\"\n login(request)\n return render_to_response('login.html', context={'error': err})\n else:\n if request.POST.get('inv') is not None:\n investigation = request.POST.get('inv')\n else:\n investigation = \"\"\n folders = []\n investigations = []\n if investigation is not None and investigation != \"\":\n username = request.POST.get('username')\n password = request.POST.get('password')\n storage = request.POST.get('storage')\n server = request.POST.get('server')\n oc_folders = subprocess.Popen(\n [\"curl -s -X PROPFIND -u \" + username + \":\" + password + \" '\" + storage + \"/\" + investigation + \"' | grep -oPm250 '(?<=<d:href>)[^<]+'\"],\n stdout=subprocess.PIPE, shell=True).communicate()[0].split(\"\\n\")\n inv_folders = subprocess.Popen(\n [\"curl -s -X PROPFIND -u \" + username + \":\" + password + \"'\" + storage + \"/' | grep -oPm250 '(?<=<d:href>)[^<]+'\"],\n stdout=subprocess.PIPE, shell=True).communicate()[0].split(\"\\n\")\n else:\n username = request.session.get('username')\n password = request.session.get('password')\n storage = request.session.get('storage')\n server = request.session.get('server')\n if not os.path.exists(username):\n call([\"mkdir\", username])\n else:\n pass\n oc_folders = \"\"\n inv_folders = subprocess.Popen(\n [\"curl -s -X PROPFIND -u\" + username + \":\" + password + \" '\" + storage +\n \"/' | grep -oPm250 '(?<=<d:href>)[^<]+'\"], stdout=subprocess.PIPE, shell=True).communicate()[0].split(\"\\n\")\n for inv in inv_folders:\n if \"/owncloud/\" in request.session.get('storage'):\n investigation_name = inv.replace('/owncloud/remote.php/webdav/', '').replace('/', '')\n if \".\" not in investigation_name:\n new = investigation_name\n investigations.append(new)\n else:\n investigation_name = inv.replace('/remote.php/webdav/', '').replace('/', '')\n if \".\" not in investigation_name:\n new = investigation_name\n investigations.append(new)\n for oc in oc_folders:\n if \"/owncloud/\" in request.session.get('storage'):\n study = oc.replace('/owncloud/remote.php/webdav/', '').replace('/', '').replace(investigation, '')\n if \".\" not in study:\n new = study\n folders.append(new)\n else:\n study = oc.replace('/remote.php/webdav/', '').replace('/', '').replace(investigation, '')\n if \".\" not in study:\n new = study\n folders.append(new)\n folders = filter(None, folders)\n investigations = filter(None, investigations)\n try:\n gi = GalaxyInstance(url=request.session.get('server'), email=request.session.get('galaxyemail'), password=request.session.get(\"galaxypass\"))\n except Exception:\n request.session.flush()\n return render_to_response('login.html', context={'error': 'Failed to log in'})\n user = gi.users.get_current_user()\n gusername = user['username']\n workflows = gi.workflows.get_workflows\n history = gi.histories.get_histories()\n hist = json.dumps(history)\n his = json.loads(hist)\n genomes = gi.genomes.get_genomes()\n dbkeys = []\n for gene in genomes:\n for g in gene:\n if \"(\" not in g:\n dbkeys.append(g)\n return render(request, 'home.html',\n context={'workflows': workflows, 'histories': his, 'user': gusername,\n 'username': username, 'password': password, 'server': server,\n 'storage': storage, 'investigations': investigations, 'studies': folders,\n 'inv': investigation, 'dbkeys': dbkeys})\n\n except ConnectionError as err:\n err = \"Invalid user\"\n request.session.flush()\n return render_to_response('login.html', context={'error': err})",
"def publish_info_in_pagebrowser():\n env.run('bin/django create_pagebrowser_books')",
"def export_inventory(self, websites):\n for website in websites:\n website.export_inventory_to_magento()",
"def web_archive():\n\n try:\n auth_check()\n except Exception as e:\n return flask.redirect(str(e))\n\n return flask.render_template('archive.html', user = flask.session['user'],\n archives = db_get_archives())",
"def do_admin_browser(request):\n from MoinMoin.util.dataset import TupleDataset, Column\n _ = request.getText\n\n data = TupleDataset()\n data.columns = [\n Column('page', label=('Page')),\n Column('file', label=('Filename')),\n Column('size', label=_('Size'), align='right'),\n ]\n\n # iterate over pages that might have attachments\n pages = request.rootpage.getPageList()\n for pagename in pages:\n # check for attachments directory\n page_dir = getAttachDir(request, pagename)\n if os.path.isdir(page_dir):\n # iterate over files of the page\n files = os.listdir(page_dir)\n for filename in files:\n filepath = os.path.join(page_dir, filename)\n data.addRow((\n (Page(request, pagename).link_to(request,\n querystr=\"action=AttachFile\"), wikiutil.escape(pagename, 1)),\n wikiutil.escape(filename.decode(config.charset)),\n os.path.getsize(filepath),\n ))\n\n if data:\n from MoinMoin.widget.browser import DataBrowserWidget\n\n browser = DataBrowserWidget(request)\n browser.setData(data, sort_columns=[0, 1])\n return browser.render(method=\"GET\")\n\n return ''",
"def home( request ):\n if not \"email\" in request.session:\n return redirect('/')\n\n user_email = request.session[\"email\"];\n box_data = box.dir_info( user_email, '/' )\n dropbox_data = dropbox.dir_info( user_email, '/' )\n google_data = retrieve_all_files( request.session[\"email\"] )\n pending_share_dump = pending_shares( request )\n\n user_data = {\n \"name\" : user_email,\n \"email\" : user_email\n }\n drive_stack = {\n \"box_data\" : json.dumps( box_data ),\n \"dropbox_data\" : json.dumps( dropbox_data ),\n \"google_data\": json.dumps( google_data )\n }\n sharing_info = {\n \"pending_shares\" : json.dumps( pending_share_dump ),\n }\n params = dict( user_data.items( ) + drive_stack.items( ) + sharing_info.items( ) )\n\n return render_to_response( 'home.html', params )",
"def step_3a(browser):\n browser.find_button_by_label('Importer').click()\n # write browser contents\n # with open('browser_contents', 'w') as f:\n # f.write(browser.contents)",
"def browserfiles(request,cloudItem):\n\n\tbrowser = None\n\thistoryForm = None\n\terror = None\n\n\ttry:\n\t\tci = checkCloudItem(cloudItem,request.user.id)\n\t\treport = openReport(ci)\n\t\tprofileForm = ProfileSelectorForm()\n\t\tprofileForm.setChoices(report)\n\t\thistoryForm = HistoryTimeLineForm()\n\texcept Exception as e:\n\t\terror = formatException(e)\n\n\treturn render_to_response(\"clouditem/browserHome.html\", {'browser':browser,'form':historyForm,'profileForm': profileForm,'objID':ci.id,'error':error}, context_instance=RequestContext(request))",
"def index(request):\r\n badRequest(\"Url not found\")",
"def handle_website_python(self, dot_cookietemple: Optional[dict]) -> None:\n self.web_struct.web_framework = cookietemple_questionary_or_dot_cookietemple(function='select',\n question='Choose between the following frameworks',\n choices=['flask'],\n default='flask',\n dot_cookietemple=dot_cookietemple,\n to_get_property='web_framework')\n self.web_struct.setup_type = cookietemple_questionary_or_dot_cookietemple(function='select',\n question='Choose between the basic and advanced'\n ' (database, translations, deployment scripts) template',\n choices=['basic', 'advanced'],\n default='basic',\n dot_cookietemple=dot_cookietemple,\n to_get_property='setup_type')\n\n self.web_struct.use_frontend = cookietemple_questionary_or_dot_cookietemple(function='confirm',\n question='Do you want to initialize your project'\n ' with a advanced frontend template?',\n default='Yes',\n dot_cookietemple=dot_cookietemple,\n to_get_property='use_frontend')\n\n # prompt the user for its frontend template, if he wants so\n if self.web_struct.use_frontend:\n print('[bold blue]The following templates are available:\\n')\n\n # strings that start with https: are recognized by most terminal (emulators) as links\n print('[bold blue]https://html5up.net/solid-state')\n\n self.web_struct.frontend = cookietemple_questionary_or_dot_cookietemple(function='select', # type: ignore\n question='Choose between the following predefined frontend templates',\n choices=['SolidState', 'None'],\n dot_cookietemple=dot_cookietemple,\n to_get_property='frontend').lower()\n\n self.web_struct.url = cookietemple_questionary_or_dot_cookietemple(function='text',\n question='Project URL (if already existing)',\n default='dummy.com',\n dot_cookietemple=dot_cookietemple,\n to_get_property='url')",
"def index():\n print 'Loading Index'\n\n user_upload_folder = os.path.join(app.config['UPLOAD_FOLDER'], str(session['user_id']))\n\n upload_list = ['No Uploads']\n if os.path.isdir(user_upload_folder):\n upload_list = [f for f in os.listdir(user_upload_folder) if f.endswith('.dvw')]\n\n return render_template('index.html', result_dict={'uploads': upload_list})",
"def home():\r\n list1 = ''\r\n list2 = ''\r\n whitesite = ''\r\n cache2=''\r\n con = sqlite3.connect(base)\r\n cur = con.cursor()\r\n cur.execute(\"select * from white\")\r\n for row in cur.fetchall():\r\n whitesite = whitesite + row[0]+'\\n'\r\n cur.execute(\"select * from ban\")\r\n for row in cur.fetchall():\r\n list1 = list1 + row[0]+'\\n'\r\n cur.execute(\"select * from log\")\r\n for row in cur.fetchall():\r\n list2 = list2 + row[0]+': ['+row[2]+'->'+row[1]+'] from:'+row[3]+' info:'+row[4]+'\\n'\r\n cur.execute(\"select * from cache\")\r\n for row in cur.fetchall():\r\n cache2 = cache2 + row[0]+'->'+row[1]+'\\n'\r\n\r\n con.close()\r\n return render_template(\r\n 'index.html',\r\n title='ATMSDNS',\r\n year=datetime.now().year,\r\n bans=list1,\r\n logs=list2,\r\n white1=whitesite,\r\n cache1=cache2,\r\n )",
"def clearViews(request):\n return redirect(\"home\")",
"def test_views_appear(self):\n\t\t\n\t\t\"\"\"\t\n\n\t\tlogging.basicConfig(filename=\"Views.log\", level=logging.INFO, filemode='w')\n\t\t\n\t\tdriver = self.driver\n\t\tself.login()\n\t\tdetect_and_pass_all_wizards(driver)\n\t\t\n\t\tclick_menu_element(driver,\"Tactical view\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Status report\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Group view\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Summary of the status groups\" in driver.page_source,True)\n\t\ttime.sleep(2)\n\t\tclick_menu_element(driver,\"Tree view\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Tree search\" in driver.page_source,True)\n\t\ttime.sleep(2)\n\t\tclick_menu_element(driver,\"Agent detail\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Description\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Monitor detail\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Monitor status\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Alert details\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Alert control filter\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Agent/Alert view\")\t\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Agents / Alert templates\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Agent/Module view\")\n\t\tclick_menu_element(driver,\"Module groups\")\n\t\tclick_menu_element(driver,\"Real-time graphs\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Clear graph\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Inventory\")\n\t\tclick_menu_element(driver,\"Log viewer\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Export to CSV\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"SNMP console\")\n\t\tclick_menu_element(driver,\"SNMP browser\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Starting OID\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"SNMP trap editor\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"MIB uploader\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Index of attachment/mibs\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"SNMP filters\")\n\t\tclick_menu_element(driver,\"SNMP trap generator\")\t\t\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Host address\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Network map\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"There are no network maps defined yet\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Network console\")\n\t\tclick_menu_element(driver,\"Services\")\n\t\tclick_menu_element(driver,\"Visual console\")\n\t\tclick_menu_element(driver,\"Custom reports\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create report\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Custom graphs\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Total items\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Main dashboard\")\n\t\tclick_menu_element(driver,\"Copy dashboard\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Replicate Dashboard\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Custom SQL\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create custom SQL\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"View events\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Event control filter\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Statistics\")\n\t\tclick_menu_element(driver,\"Edit my user\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Password confirmation\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"WebChat\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Send message\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"List of Incidents\")\n\t\tclick_menu_element(driver,\"Statistics\") \n\t\tclick_menu_element(driver,\"Message list\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create message\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"New message\")\n\t\tclick_menu_element(driver,\"Connected users\")\n\t\ttime.sleep(2)\n\t\tclick_menu_element(driver,\"Export data\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Source agent\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Scheduled downtime\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Execution type\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Recon view\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Task name\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"File repository\")\n\t\tclick_menu_element(driver,\"IPAM\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"IPAM\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Manage agents\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create agent\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Custom fields\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create field\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Component groups\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Module categories\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create category\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Module types\")\n\t\tclick_menu_element(driver,\"Module groups\")\n\t\tclick_menu_element(driver,\"Insert Data\")\n\t\tclick_menu_element(driver,\"Resource exporting\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Export\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Resource registration\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Upload\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Manage agent groups\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create group\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Module tags\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create tag\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Enterprise ACL Setup\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Add\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Manage users\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create user\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Profile management\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Connected users\")\n\t\ttime.sleep(2)\t\n\t\tclick_menu_element(driver,\"Network components\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Free Search\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Local components\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Search\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Module templates\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Inventory modules\")\n\t\tclick_menu_element(driver,\"Manage policies\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Collections\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Duplicate config\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Replicate configuration\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Agent operations\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"In order to perform massive operations\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Module operations\")\n\t\tclick_menu_element(driver,\"Plugin operations\")\n\t\tclick_menu_element(driver,\"User operations\")\n\t\ttime.sleep(2)\n\t\tclick_menu_element(driver,\"Alert operations\")\n\t\tclick_menu_element(driver,\"Policies operations\")\n\t\tclick_menu_element(driver,\"SNMP operations\")\n\t\tclick_menu_element(driver,\"Satellite Operations\")\n\t\tclick_menu_element(driver,\"List of Alerts\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Alert control filter\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Templates\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Actions\")\n\t\tclick_menu_element(driver,\"Commands\")\n\t\tclick_menu_element(driver,\"List of special days\")\n\t\tclick_menu_element(driver,\"Event alerts\")\t\n\t\tclick_menu_element(driver,\"SNMP alerts\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Maintenance\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Event filters\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create new filter\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Custom events\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Update\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Event responses\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create response\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Manage servers\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Saga\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Recon task\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Plugins\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Name\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Recon script\")\n\t\tclick_menu_element(driver,\"Export targets\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Register Plugin\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Upload\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Cron jobs\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"General Setup\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Pandora FMS Language settings\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Password policy\")\n\t\tclick_menu_element(driver,\"Enterprise\")\n\t\tclick_menu_element(driver,\"Historical database\")\n\t\tclick_menu_element(driver,\"Log Collector\")\n\t\ttime.sleep(2)\n\t\tclick_menu_element(driver,\"Authentication\")\n\t\tclick_menu_element(driver,\"Performance\")\n\t\tclick_menu_element(driver,\"Visual styles\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Behaviour configuration\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"eHorus\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Enable eHorus\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Edit OS\")\n\t\tclick_menu_element(driver,\"Licence\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Request new licence\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Skins\")\n\t\tclick_menu_element(driver,\"Translate string\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Search\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"System audit log\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"User\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Links\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Link name\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Diagnostic info\")\n\t\tclick_menu_element(driver,\"Site news\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Subject\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"File manager\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Index of images\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"DB information\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Module data received\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Database purge\")\n\t\tclick_menu_element(driver,\"Database debug\")\n\t\ttime.sleep(2)\n\t\tclick_menu_element(driver,\"Database audit\")\n\t\tclick_menu_element(driver,\"Database events\")\n\t\tclick_menu_element(driver,\"DB Status\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"DB settings\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"DB interface\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Run SQL query\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"API checker\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"IP\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"System Info\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Generate file\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Extension uploader\")\n\t\tclick_menu_element(driver,\"File repository manager\")\t\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Groups\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"System logfiles\")\n\t\tclick_menu_element(driver,\"Backup\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Description\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"CSV import\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Upload file\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"CSV import group\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Upload file\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"IPAM\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"Create\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Update Manager offline\")\n\t\tclick_menu_element(driver,\"Update Manager online\")\n\t\ttime.sleep(2)\n\t\tself.assertEqual(\"The last version of package installed is:\" in driver.page_source,True)\n\t\tclick_menu_element(driver,\"Update Manager options\")\n\t\t\n\t\tlogging.info(\"test_views_appear is correct\")\n\n\t\t\"\"\"",
"def page_import_prompt(self):\n\n nonce = self.create_new_nonce()\n history_back_id = self.helper_retrieve_last_request_get_dict_key_val_index_zero_or_return_none(\"history_back_id\")\n\n page_body = html_page_templates.page_import_prompt_template(\n nonce=nonce,\n history_back_id=history_back_id,\n self_sess_action_auth=self.sess_action_auth,\n )\n return page_body",
"def import_wiki(export_filenames, password):\r\n\r\n setup_opener()\r\n\r\n login(password)\r\n\r\n token = get_edit_token()\r\n\r\n for export_filename in export_filenames:\r\n do_import(export_filename, token)"
] | [
"0.5696574",
"0.55614066",
"0.55519164",
"0.5478478",
"0.54566526",
"0.5309509",
"0.52523816",
"0.5249747",
"0.5243258",
"0.5227952",
"0.5217405",
"0.521123",
"0.5184404",
"0.5179009",
"0.51621497",
"0.51396",
"0.51369673",
"0.51349753",
"0.5115782",
"0.51126957",
"0.5110532",
"0.50951725",
"0.5090815",
"0.5088539",
"0.5087621",
"0.507856",
"0.50657624",
"0.5063479",
"0.5043478",
"0.5040875"
] | 0.61303335 | 0 |
Import carriers and show the user appropriate message | def default_start(self, data):
return {
'message': "This wizard has imported all the carriers / " +
"shipping methods for this magento instance. You should now " +
"configure the imported carriers / shipping methods to " +
"match the shipment carriers in Tryton to allow seamless " +
"synchronisation of tracking information."
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def import_click(self):\n path = self.path.get()\n if not path:\n self.error_label.config(text='Alege baza de date.')\n return\n\n password = self.pass_entry.get()\n if not password:\n self.error_label.config(text='Introdu parola.')\n return\n\n try:\n sharing.import_database(self.us, path, password)\n self.error_label.config(text='Am importat baza de date cu succes.')\n except InvalidToken:\n self.error_label.config(text='Parolă incorectă.')",
"def pricing_import(request, simulation):\n try:\n # Get all pricing policies for this usertype.\n policies = get_query('policy', simulation)\n tolls = policies.filter(type='PRICING')\n # Get all links of the network.\n links = get_query('link', simulation)\n # Get all LinkSelection of the network.\n locations = LinkSelection.objects.filter(\n network=simulation.scenario.supply.network\n )\n # Get all usertypes.\n usertypes = get_query('usertype', simulation)\n # Get an empty Vector or create one if there is none.\n if Vector.objects.filter(data='').exists():\n empty_vector = Vector.objects.filter(data='')[0]\n else:\n empty_vector = Vector(data='')\n empty_vector.save()\n # Convert the imported file to a csv DictReader.\n encoded_file = request.FILES['import_file']\n tsv_file = StringIO(encoded_file.read().decode())\n if encoded_file.name.split(\".\")[-1] == 'tsv':\n reader = csv.DictReader(tsv_file, delimiter='\\t')\n else:\n reader = csv.DictReader(tsv_file, delimiter=',')\n # For each imported OD pair, if the pair already exists in t\n if 'traveler_type' in reader.fieldnames:\n has_type = True\n else:\n has_type = False\n if 'times' in reader.fieldnames:\n has_times = True\n else:\n has_times = False\n # For each imported link, if a Policy exists for the link, baseValue is\n # updated, else a new Policy is created.\n for row in reader:\n # Get link of current row.\n link = links.get(user_id=row['link'])\n # Get or create a LinkSelection associated with the link.\n if locations.filter(link=link).exists():\n # Take first matching LinkSelection.\n location = locations.filter(link=link)[0]\n else:\n # Create a LinkSelection for the current link.\n # Name and user_id of the Link Selection are set to the name\n # and user_id of the link.\n location = LinkSelection(\n network=simulation.scenario.supply.network,\n name=link.name,\n user_id=link.user_id,\n )\n location.save()\n location.link.add(link)\n # Get or create a pricing Policy with the corret LinkSelection\n # object.\n try:\n toll = tolls.get(location=location)\n except Policy.DoesNotExist:\n # Create a new toll with default values.\n toll = Policy(location=location, type='PRICING', usertype=None,\n valueVector=empty_vector,\n timeVector=empty_vector)\n toll.save()\n toll.scenario.add(simulation.scenario)\n # Update affected traveler type.\n toll.usertype = None\n if has_type:\n try:\n toll.usertype = usertypes.get(user_id=row['traveler_type'])\n except (UserType.DoesNotExist, ValueError):\n pass\n # Update values.\n values = row['values'].split(',')\n # First value is baseValue.\n toll.baseValue = float(values[0])\n if len(values) > 1:\n # Remaining values are stored in valueVector (as a string of\n # comma separated values).\n values = [str(float(x)) for x in values]\n v = Vector(data=','.join(values[1:]))\n v.save()\n toll.valueVector = v\n else:\n toll.valueVector = empty_vector\n # Update times.\n toll.timeVector = empty_vector\n if has_times:\n times = row['times'].split(',')\n if times[0] != ' ' and times[0]:\n # There is at least one value, store it in timeVector.\n times = [str(int(x)) for x in times]\n v = Vector(data=','.join(times))\n v.save()\n toll.timeVector = v\n toll.save()\n return HttpResponseRedirect(reverse(\n 'metro:pricing_main', args=(simulation.id,)\n ))\n except Exception as e:\n # Catch any exception while importing the file and return an error page\n # if there is any.\n print(e)\n context = {\n 'simulation': simulation,\n 'object': 'pricing',\n }\n return render(request, 'metro_app/import_error.html', context)",
"def onImportTextFromCAD(self):\n try:\n self.onCommandRejected()\n dialog = QImportTextFromCADDialog(self)\n dialog.show()\n dialog.exec_()\n except Exception as ex:\n message = 'error occurred({}) in {}:{}'.format(ex, sys.exc_info()[-1].tb_frame.f_code.co_filename,\n sys.exc_info()[-1].tb_lineno)\n self.addMessage.emit(MessageType.Error, message)",
"def doImport(self,textFile):\n self.loadText(textFile)\n self.getBooks()\n #self.copyBooks()\n self.genLibData()\n self.genLibCells()\n self.sortRecords()",
"def _import(self, __button):\r\n# WARNING: Refactor _import; current McCabe Complexity metric = 18.\r\n Widgets.set_cursor(self.modulebook.mdcRTK, gtk.gdk.WATCH)\r\n\r\n _import_errors = 0\r\n self._import_log.info('The following records could not be imported to '\r\n 'the open RTK database:\\n')\r\n\r\n # Find the number of existing incidents.\r\n if Configuration.BACKEND == 'mysql':\r\n _query = \"SELECT COUNT(*) FROM rtk_incident\"\r\n elif Configuration.BACKEND == 'sqlite3':\r\n _query = \"SELECT COALESCE(MAX(fld_incident_id)+1, 0) \\\r\n FROM rtk_incident\"\r\n (_num_incidents, _error_code, __) = self._dao.execute(_query,\r\n commit=False)\r\n for i in range(len(self._file_contents) - 1):\r\n _contents = []\r\n\r\n for j in range(len(self._file_index)):\r\n if self._file_index[j] == -1:\r\n _contents.append('')\r\n else:\r\n try:\r\n _contents.append(\r\n self._file_contents[i][self._file_index[j]])\r\n except IndexError:\r\n _contents.append('')\r\n\r\n _contents[14] = _contents[14].replace('$', '')\r\n\r\n # Remove any single and double quotes from the description and\r\n # remarks fields.\r\n for j in[4, 5, 8]:\r\n _contents[j] = _contents[j].replace('\\'', '')\r\n _contents[j] = _contents[j].replace('\\\"', '')\r\n\r\n # Remove any commas that may be in numerical fields.\r\n for j in [12, 14, 15]:\r\n _contents[j] = _contents[j].replace(',', '')\r\n\r\n # Convert all the date fields to ordinal dates.\r\n for j in [19, 22, 25, 28]:\r\n _contents[j] = Utilities.date_to_ordinal(_contents[j])\r\n\r\n # Convert missing values to correct default value.\r\n for j in [0, 1, 2, 3, 6, 7, 13, 15, 18, 20, 21, 23, 24, 26, 27,\r\n 29, 31, 32, 35, 36, 37, 38, 39]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n int(_contents[j]), 0)\r\n except ValueError:\r\n _contents[j] = 0\r\n\r\n for j in [16, 17]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n int(_contents[j]), -1)\r\n except ValueError:\r\n _contents[j] = -1\r\n\r\n for j in [12, 14, 33]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n float(_contents[j]), 0.0)\r\n except ValueError:\r\n _contents[j] = 0.0\r\n\r\n for j in [9, 34]:\r\n try:\r\n _contents[j] = Utilities.missing_to_default(\r\n int(_contents[j]), 1)\r\n except ValueError:\r\n _contents[j] = 1\r\n\r\n if _contents[1] == 0 or _contents[1] is None or _contents[1] == '':\r\n _contents[1] = _num_incidents[0][0] + i + 1\r\n\r\n _query = \"INSERT INTO rtk_incident \\\r\n (fld_revision_id, fld_incident_id, \\\r\n fld_incident_category, fld_incident_type, \\\r\n fld_short_description, fld_long_description, \\\r\n fld_criticality, fld_detection_method, fld_remarks, \\\r\n fld_status, fld_test_found, fld_test_case, \\\r\n fld_execution_time, fld_unit, fld_cost, \\\r\n fld_incident_age, fld_hardware_id, fld_sftwr_id, \\\r\n fld_request_by, fld_request_date, fld_reviewed, \\\r\n fld_reviewed_by, fld_reviewed_date, fld_approved, \\\r\n fld_approved_by, fld_approved_date, fld_complete, \\\r\n fld_complete_by, fld_complete_date, fld_life_cycle, \\\r\n fld_analysis, fld_accepted) \\\r\n VALUES ({0:d}, {1:d}, {2:d}, {3:d}, '{4:s}', '{5:s}', \\\r\n {6:d}, {7:d}, '{8:s}', {9:d}, '{10:s}', \\\r\n '{11:s}', {12:f}, {13:d}, {14:f}, {15:d}, \\\r\n {16:d}, {17:d}, {18:d}, {19:d}, {20:d}, \\\r\n {21:d}, {22:d}, {23:d}, {24:d}, {25:d}, \\\r\n {26:d}, {27:d}, {28:d}, {29:d}, '{30:s}', \\\r\n {31:d})\".format(_contents[0], _contents[1],\r\n _contents[2], _contents[3],\r\n _contents[4], _contents[5],\r\n _contents[6], _contents[7],\r\n _contents[8], _contents[9],\r\n _contents[10], _contents[11],\r\n _contents[12], _contents[13],\r\n _contents[14], _contents[15],\r\n _contents[16], _contents[17],\r\n _contents[18], _contents[19],\r\n _contents[20], _contents[21],\r\n _contents[22], _contents[23],\r\n _contents[24], _contents[25],\r\n _contents[26], _contents[27],\r\n _contents[28], _contents[29],\r\n _contents[30], _contents[31])\r\n (_results,\r\n _error_code, __) = self._dao.execute(_query, commit=True)\r\n\r\n if _error_code == 0:\r\n _query = \"INSERT INTO rtk_incident_detail \\\r\n (fld_incident_id, fld_component_id, \\\r\n fld_age_at_incident, fld_failure, fld_suspension, \\\r\n fld_cnd_nff, fld_occ_fault, \\\r\n fld_initial_installation, fld_interval_censored) \\\r\n VALUES ({0:d}, {1:d}, {2:f}, {3:d}, \\\r\n {4:d}, {5:d}, {6:d}, {7:d}, \\\r\n {8:d})\".format(_contents[1], _contents[32],\r\n _contents[33], _contents[34],\r\n _contents[35], _contents[36],\r\n _contents[37], _contents[38],\r\n _contents[39])\r\n (_results,\r\n _error_code, __) = self._dao.execute(_query, commit=True)\r\n else:\r\n self._import_log.info('{0:d} - {1:s}'.format(_contents[1],\r\n _contents[4]))\r\n _import_errors += 1\r\n\r\n if _import_errors > 0:\r\n Widgets.rtk_information(_(u\"Error importing {0:d} program \"\r\n u\"incidents. Refer to the import log \"\r\n u\"{1:s} for more details.\").format(\r\n _import_errors, self._import_log))\r\n\r\n Widgets.set_cursor(self.modulebook.mdcRTK, gtk.gdk.LEFT_PTR)\r\n\r\n # Reload the Incident class gtk.TreeView().\r\n self._modulebook.request_load_data(self._dao, self._revision_id)\r\n\r\n return False",
"def post(self, request, *args, **kwargs):\n resource = self.get_import_resource_class()(**self.get_import_resource_kwargs(request, *args, **kwargs))\n\n confirm_form = ConfirmImportForm(request.POST)\n if confirm_form.is_valid():\n import_formats = self.get_import_formats()\n input_format = import_formats[\n int(confirm_form.cleaned_data['input_format'])\n ]()\n tmp_storage = self.get_tmp_storage_class()(name=confirm_form.cleaned_data['import_file_name'])\n data = tmp_storage.read(input_format.get_read_mode())\n if not input_format.is_binary() and self.from_encoding:\n data = force_text(data, self.from_encoding)\n dataset = input_format.create_dataset(data)\n\n result = resource.import_data(dataset, dry_run=False,\n raise_errors=True,\n file_name=confirm_form.cleaned_data['original_file_name'],\n user=request.user)\n\n if not self.get_skip_admin_log():\n # Add imported objects to LogEntry\n logentry_map = {\n RowResult.IMPORT_TYPE_NEW: ADDITION,\n RowResult.IMPORT_TYPE_UPDATE: CHANGE,\n RowResult.IMPORT_TYPE_DELETE: DELETION,\n }\n content_type_id = ContentType.objects.get_for_model(self.model).pk\n for row in result:\n if row.import_type != row.IMPORT_TYPE_ERROR and row.import_type != row.IMPORT_TYPE_SKIP:\n LogEntry.objects.log_action(\n user_id=request.user.pk,\n content_type_id=content_type_id,\n object_id=row.object_id,\n object_repr=row.object_repr,\n action_flag=logentry_map[row.import_type],\n change_message=\"%s through import_export\" % row.import_type,\n )\n success_message = str(_(u'Import finished')) + ' , ' + str(_(u'Add')) + ' : %d' % result.totals[\n RowResult.IMPORT_TYPE_NEW] + ' , ' + str(_(u'Update')) + ' : %d' % result.totals[\n RowResult.IMPORT_TYPE_UPDATE]\n\n messages.success(request, success_message)\n tmp_storage.remove()\n\n post_import.send(sender=None, model=self.model)\n model_info = (self.opts.app_label, self.opts.model_name)\n url = reverse('xadmin:%s_%s_changelist' % model_info,\n current_app=self.admin_site.name)\n return HttpResponseRedirect(url)",
"def file_import(self):\r\n\r\n try:\r\n self.process_file_import()\r\n except InputError as ex:\r\n print(ex)\r\n self.file_import()",
"def error_output_import(self):\n im_methods_string = ''\n for i in range(len(methods_of_import)):\n if i == 0:\n continue\n elif i != len(methods_of_import) - 1:\n im_methods_string += ('from_' + methods_of_import[i] + '() or ')\n else: # i == len(methods_of_import)\n im_methods_string += ('from_' + methods_of_import[i] + '()')\n print(\"please import by \" + im_methods_string)\n sys.exit(0)",
"def action_import(self):\n ctx = self._context\n account_obj = self.env[\"account.account\"]\n import_obj = self.env['import.journal.entries.advanced']\n import_line_obj = self.env[\"journal.entries.csv.import\"]\n if 'active_id' in ctx:\n import_id = import_obj.browse(ctx['active_id'])\n if not self.data:\n raise exceptions.Warning(_(\"Necesitas seleccionar un archivo!\"))\n # Decode the file data\n data = base64.b64decode(self.data).decode('utf-8')\n file_input = StringIO(data)\n file_input.seek(0)\n reader_info = []\n if self.delimeter:\n delimeter = str(self.delimeter)\n else:\n delimeter = ','\n reader = csv.reader(file_input, delimiter=delimeter,\n lineterminator='\\r\\n')\n try:\n reader_info.extend(reader)\n except Exception:\n raise exceptions.Warning(_(\"Archivo no valido\"))\n keys = reader_info[0]\n # check if keys exist\n if not isinstance(keys, list) or ('cuenta' not in keys):\n raise exceptions.Warning(_(\"No se encuentran 'cuentas' contable en el archivo\"))\n del reader_info[0]\n values = {}\n actual_date = fields.Date.today()\n for i in range(len(reader_info)):\n val = {}\n field = reader_info[i]\n values = dict(zip(keys, field))\n account = False\n if 'cuenta' in values and values['cuenta']:\n account_id = account_obj.search([('code', '=', values['cuenta'])]) \n if account_id:\n account = account_id[0]\n else:\n account = account_id\n\n val[\"ref\"] = values[\"descripcion\"]\n val[\"document_number\"] = values[\"num_documento\"]\n val[\"document_date\"] = datetime.strptime(values[\"fecha\"] , \"%d-%m-%Y\")\n val['account_id'] = account.id\n val['parent_id'] = import_id.id\n val['debit'] = values['debito']\n val['credit'] = values['credito']\n val['processed'] = False\n validate = import_line_obj.create(val)\n if validate:\n if validate.account_id:\n validate.is_ok = True",
"def view(request):\n data = {'title': 'Chargebacks - Import 844 Files'}\n addGlobalData(request, data)\n\n if not data['company'] or not data['has_access_to_company']:\n return HttpResponseRedirect(reverse('companies'))\n\n try:\n files_844 = request.POST.get('844_files', '')\n if not files_844:\n return bad_json(message='Not 844 files to process')\n\n company = data['company']\n for file_844 in files_844.split(\"|\"):\n company_directory = os.path.join(CLIENTS_DIRECTORY, company.get_id_str())\n src_path = os.path.join(company_directory, DIR_NAME_844_ERM_INTAKE, file_844)\n parent_directory_name = os.path.basename(os.path.normpath(os.path.dirname(src_path)))\n\n # Is Valid 844 File\n if not validate_844_header(src_path):\n move_file_to_bad_folder(src_path)\n return bad_json(message=f\"{file_844} is not a valid 844 file\")\n\n # Wrong parent directory\n if parent_directory_name != DIR_NAME_844_ERM_INTAKE:\n return bad_json(message=f\"{file_844} is not in a valid 844 directory\")\n\n # IMPORT 844 Process\n import844_bulk_id = import_844_process(company, src_path)\n\n # IMPORT CHARGEBACKS and LINES Process\n import_chargebacks_process(company, import844_bulk_id)\n\n # Move it to \"DIR_NAME_844_PROCESSED\" folder\n shutil.move(src_path, os.path.join(Path(''.join(os.path.join(company_directory, DIR_NAME_844_PROCESSED))), os.path.basename(src_path)))\n\n print('FINISH!')\n\n return ok_json(data={\"message\": \"844 Files have been succesfully processed\"})\n\n except Exception as ex:\n return bad_json(message=ex.__str__())",
"def usertype_import(request, simulation_id):\n\n simulation = Simulation.objects.get(pk=simulation_id)\n\n try:\n\n # Get an empty Vector or create one if there is none.\n if Vector.objects.filter(data='').exists():\n empty_vector = Vector.objects.filter(data='')[0]\n else:\n empty_vector = Vector(data='')\n empty_vector.save()\n # Convert the imported file to a csv DictReader.\n encoded_file = request.FILES['import_file']\n tsv_file = StringIO(encoded_file.read().decode())\n if encoded_file.name.split(\".\")[-1] == 'tsv':\n reader = csv.DictReader(tsv_file, delimiter='\\t')\n else:\n reader = csv.DictReader(tsv_file, delimiter=',')\n\n # For each imported link, if a Policy exists for the link, baseValue is\n # updated, else a new Policy is created.\n for row in reader:\n\n name = row['name']\n comment = row['comment']\n\n alphaTI_mean = row['alphaTI_mean']\n alphaTI_std = row['alphaTI_std']\n alphaTI_type = row['alphaTI_type']\n alphaTI = Distribution(mean=alphaTI_mean,\n std=alphaTI_std,\n type=alphaTI_type)\n alphaTI.save()\n\n alphaTP_mean = row['alphaTP_mean']\n alphaTP_std = row['alphaTP_std']\n alphaTP_type = row['alphaTP_type']\n alphaTP = Distribution(mean=alphaTP_mean, std=alphaTP_std, type=alphaTP_type)\n alphaTP.save()\n\n beta_mean = row['beta_mean']\n beta_std = row['beta_std']\n beta_type = row['beta_type']\n beta = Distribution(mean=beta_mean,\n std=beta_std,\n type=beta_type)\n beta.save()\n\n delta_mean = row['delta_mean']\n delta_std = row['delta_std']\n delta_type = row['delta_type']\n delta = Distribution(mean=delta_mean,\n std=delta_std,\n type=delta_type)\n delta.save()\n\n departureMu_mean = row['departureMu_mean']\n departureMu_std = row['departureMu_std']\n departureMu_type = row['departureMu_type']\n departureMu = Distribution(mean=departureMu_mean,\n std=departureMu_std,\n type=departureMu_type)\n departureMu.save()\n\n gamma_mean = row['gamma_mean']\n gamma_std = row['gamma_std']\n gamma_type = row['gamma_type']\n gamma = Distribution(mean=gamma_mean,\n std=gamma_std,\n type=gamma_type)\n gamma.save()\n\n modeMu_mean = row['modeMu_mean']\n modeMu_std = row['modeMu_std']\n modeMu_type = row['modeMu_type']\n modeMu = Distribution(mean=modeMu_mean,\n std=modeMu_std,\n type=modeMu_type)\n modeMu.save()\n\n penaltyTP_mean = row['penaltyTP_mean']\n penaltyTP_std = row['penaltyTP_std']\n penaltyTP_type = row['penaltyTP_type']\n penaltyTP = Distribution(mean=penaltyTP_mean,\n std=penaltyTP_std,\n type=penaltyTP_type)\n penaltyTP.save()\n\n routeMu_mean = row['routeMu_mean']\n routeMu_std = row['routeMu_std']\n routeMu_type = row['routeMu_type']\n routeMu = Distribution(mean=routeMu_mean,\n std=routeMu_std,\n type=routeMu_type)\n routeMu.save()\n\n tstar_mean = row['tstar_mean']\n tstar_std = row['tstar_std']\n tstar_type = row['tstar_type']\n tstar = Distribution(mean=tstar_mean,\n std=tstar_std,\n type=tstar_type)\n tstar.save()\n\n typeOfRouteChoice = row['typeOfRouteChoice']\n typeOfDepartureMu = row['typeOfDepartureMu']\n typeOfRouteMu = row['typeOfRouteMu']\n typeOfModeMu = row['typeOfModeMu']\n localATIS = row['localATIS']\n modeChoice = row['modeChoice']\n modeShortRun = row['modeShortRun']\n commuteType = row['commuteType']\n\n usertypes = get_query('usertype', simulation)\n if usertypes.exists():\n user_id = usertypes.last().user_id + 1\n else:\n user_id = 1\n\n #usertype = UserType(name=name, comment=comment, alphaTI=alphaTI, alphaTP=alphaTP, beta=beta, delta=delta, departureMu=departureMu, gamma=gamma, modeMu=modeMu, penaltyTP=penaltyTP, routeMu=routeMu, tstar=tstar, typeOfRouteChoice=typeOfRouteChoice, localATIS=localATIS, modeChoice=modeChoice, modeShortRun=modeShortRun, commuteType=commuteType, user_id=user_id)\n usertype = UserType()\n usertype.alphaTI = alphaTI\n usertype.alphaTP = alphaTP\n usertype.beta = beta\n usertype.delta = delta\n usertype.departureMu = departureMu\n usertype.gamma = gamma\n usertype.modeMu = modeMu\n usertype.penaltyTP = penaltyTP\n usertype.routeMu = routeMu\n usertype.tstar = tstar\n usertype.user_id = user_id\n usertype.typeOfRouteChoice = typeOfRouteChoice\n usertype.typeOfDepartureMu = typeOfDepartureMu\n usertype.typeOfRouteMu = typeOfRouteMu\n usertype.typeOfModeMu = typeOfModeMu\n usertype.localATIS = localATIS\n usertype.modeChoice = modeChoice\n usertype.modeShortRun = modeShortRun\n usertype.commuteType = commuteType\n usertype.save()\n\n matrix = Matrices()\n matrix.save()\n demandsegment = DemandSegment()\n demandsegment.usertype = usertype\n demandsegment.matrix = matrix\n demandsegment.save()\n demandsegment.demand.add(simulation.scenario.demand)\n print(UserType.modeMu)\n print(UserType.alphaTI)\n\n return HttpResponseRedirect(reverse(\n 'metro:demand_view', args=(simulation_id,)\n ))\n except Exception as e:\n # Catch any exception while importing the file and return an error page\n # if there is any.\n print(e)\n context = {\n 'simulation': simulation,\n 'object': 'pricing',\n }\n return render(request, 'metro_app/import_error.html', context)",
"def test_regressions_imports(self):\n issue = {\n \"number\": \"main/main\",\n \"contract\": \"C\",\n \"txlimit\": 1,\n \"in_directory\": \"imports_issue\",\n }\n self._simple_cli_run(\n f'{issue[\"number\"]}.sol',\n contract=issue[\"contract\"],\n tx_limit=issue[\"txlimit\"],\n in_directory=issue.get(\"in_directory\"),\n )",
"def csv_import():\n activities = current_user.get_supervised_activities()\n if activities == []:\n flash(\"Fonction non autorisée.\", \"error\")\n return redirect(url_for(\"event.index\"))\n\n choices = [(str(a.id), a.name) for a in activities]\n form = CSVForm(choices)\n\n if not form.is_submitted():\n form.description.data = current_app.config[\"DESCRIPTION_TEMPLATE\"]\n\n failed = []\n if form.validate_on_submit():\n activity_type = ActivityType.query.get(form.type.data)\n\n file = form.csv_file.data\n processed, failed = process_stream(\n file.stream, activity_type, form.description.data\n )\n\n flash(\n f\"Importation de {processed-len(failed)} éléments sur {processed}\",\n \"message\",\n )\n\n return render_template(\n \"import_csv.html\",\n form=form,\n failed=failed,\n title=\"Création d'event par CSV\",\n )",
"def test_csv_import_hotel_fail(self):\n from django.contrib.messages import get_messages\n path = reverse(\"import-csv\")\n user = mixer.blend(User, is_staff=True, is_superuser=True)\n client = Client()\n client.force_login(user)\n file = open(\"hotel.csv\")\n r = client.post(path, {\"title\": \"hotel\", \"csv_file\": file})\n messages = list(get_messages(r.wsgi_request))\n assert r.status_code == 200\n assert len(messages) >= 1\n for message in messages:\n assert \"can not import\" in str(message)",
"def importCSV(request, template='contacts/person/import.html'):\n\n if not request.user.is_authenticated():\n return HttpResponseRedirect('/login/?next=%s' % request.path)\n\n registres = 0\n\n if request.method == 'POST':\n form = ImportCSVForm(request.POST, request.FILES)\n if form.is_valid():\n uploaded_file = request.FILES['fitxer']\n uploaded_file.read()\n reader = csv.reader(uploaded_file, delimiter=',', quotechar='\"')\n\n for row in reader:\n person = Person()\n person.first_name = row[0]\n person.last_name = row[1]\n person.contact_type = row[3]\n person.id_card = row[5]\n\n base_slug = slugify(\"%s %s %s\" % (p.first_name, p.last_name, p.secondlast_name))\n # hem de comprovar que no existeix cap persona amb aquest nom. Si no, hem d'afegir -1\n tmp_slug = base_slug\n trobat = True\n counter = 0\n\n while trobat:\n try:\n Person.objects.get(slug__iexact=tmp_slug)\n counter = counter + 1\n tmp_slug = \"%s-%s\" % (base_slug, str(counter))\n\n except Person.DoesNotExist:\n trobat = False\n\n person.slug = tmp_slug\n person.save()\n\n registres = registres + 1\n\n else:\n form = ImportCSVForm()\n\n return render_to_response(template, {'registres': registres, 'form': form}, RequestContext(request))",
"def importFile(self):\n\n ## Backing up old CSV and JSON files before beginning import operations\n if os.path.isfile(\"text_files/customers.csv\") and os.path.isfile(\"text_files/customers.json\"):\n print(\"\\nCreating a backup of the existing customer .csv and .json files before overwriting\")\n shutil.copy2(\"text_files/customers.csv\", \"text_files/customers.csv.backup\" + str(time.time()))\n shutil.copy2(\"text_files/customers.json\", \"text_files/customers.json.backup\" + str(time.time()))\n\n ## Importing the text file for cleaning then converting to CSV\n input_file = open(\"text_files/customer_export.txt\", \"r\")\n output_file = open(\"text_files/customers.csv\", \"w\")\n\n ## A loop to clean and write the customer_export txt file to a CSV\n for line in input_file:\n clean_text = \"\"\n check_line = line.replace(\"#\", \"\").replace(\",,\",\"\").split(\"|\")\n for line in check_line:\n if line != check_line[10]:\n clean_text += line + \",\"\n elif line == check_line[10]:\n clean_text += line + \"\\n\"\n output_file.write(clean_text)\n\n ## Closing TXT file and CSV file after formatting\n input_file.close()\n output_file.close()\n\n ## Opening the cleaned CSV file for conversion to Json\n with open('text_files/customers.csv') as clean_csv:\n ## Converting CSV file to Json\n converted = csv.DictReader(clean_csv)\n rows = list(converted)\n\n ## Writing converted CSV to Json file\n with open('text_files/customers.json', 'w') as convert:\n json.dump(rows, convert)\n\n ## Deleting all data currently in database before importing new file\n db_connection.executeQuery(\"DELETE FROM CRM;DBCC CHECKIDENT ('CRM', RESEED, 0) DELETE FROM Mailings; DBCC CHECKIDENT ('Mailings', RESEED, 0) COMMIT\") \n\n ## Loading the newly created Json file\n with open(\"text_files/customers.json\") as customers_json:\n customers = json.load(customers_json)\n\n ## A loop to add the contents of the Json file to the database \n print(\"Writing imported file to database please wait...\")\n for key in customers:\n db_connection.executeQuery(\"INSERT INTO dbo.CRM (f_name, l_name, company, address, city, county, state, zip, primary_phone, secondary_phone, email_address) VALUES ('\" + key[\"first_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"last_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"company_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"address\"] + \"', '\" + key[\"city\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"county\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"state\"] + \"', '\" + str(key[\"zip\"]) + \"', '\" + key[\"phone1\"] + \"', '\" + key[\"phone2\"] + \"' , '\" + key[\"email\"] + \"'); COMMIT\")\n db_connection.executeQuery(\"INSERT INTO dbo.Mailings (name, company, address) VALUES ('\" + key[\"first_name\"].replace(\"\\'\", \"\\'\\'\") + \" \" + key[\"last_name\"].replace(\"\\'\", \"\\'\\'\") + \"', '\" + key[\"company_name\"].replace(\"\\'\", \"\\'\\'\") + \"','\" + key[\"address\"] + \" \" + key[\"city\"] + \" \" + key[\"county\"] + \" \" + key[\"state\"] + \" \" + str(key[\"zip\"]) + \"'); COMMIT\") \n\n print(\"\\nFinished writing to file. Returning to main menu...\")",
"def import_bmarks(self):\r\n username = self.matchdict.get('username')\r\n\r\n # if auth fails, it'll raise an HTTPForbidden exception\r\n with ReqAuthorize(self.request):\r\n data = {}\r\n post = self.POST\r\n\r\n # We can't let them submit multiple times, check if this user has\r\n # an import in process.\r\n if ImportQueueMgr.get(username=username, status=NEW):\r\n # They have an import, get the information about it and shoot\r\n # to the template.\r\n return {\r\n 'existing': True,\r\n 'import_stats': ImportQueueMgr.get_details(\r\n username=username)\r\n }\r\n\r\n if post:\r\n # we have some posted values\r\n files = post.get('import_file', None)\r\n\r\n if hasattr(files, 'filename'):\r\n storage_dir_tpl = self.settings.get('import_files',\r\n '/tmp/bookie')\r\n storage_dir = storage_dir_tpl.format(\r\n here=self.settings.get('app_root'))\r\n\r\n out_fname = store_import_file(storage_dir, username, files)\r\n\r\n # Mark the system that there's a pending import that needs\r\n # to be completed\r\n q = ImportQueue(username, unicode(out_fname))\r\n DBSession.add(q)\r\n DBSession.flush()\r\n # Schedule a task to start this import job.\r\n tasks.importer_process.delay(q.id)\r\n\r\n return HTTPFound(\r\n location=self.request.route_url('user_import',\r\n username=username))\r\n else:\r\n data['error'] = [\"Please provide a file to import\"]\r\n\r\n return data\r\n else:\r\n # we need to see if they've got\r\n # just display the form\r\n return {\r\n 'existing': False\r\n }",
"async def importIncident(self, incident: Incident) -> None:",
"def object_import(request, simulation, object_name):\n try:\n if object_name == 'function':\n parent = simulation.scenario.supply.functionset\n else:\n parent = simulation.scenario.supply.network\n query = get_query(object_name, simulation)\n user_id_set = set(query.values_list('user_id', flat=True))\n if object_name == 'link':\n # To import links, we retrieve the user ids of all centroids, crossings\n # and functions and we build mappings between ids and objects.\n centroids = get_query('centroid', simulation)\n centroid_ids = set(centroids.values_list('user_id', flat=True))\n crossings = get_query('crossing', simulation)\n crossing_ids = set(crossings.values_list('user_id', flat=True))\n node_ids = centroid_ids.union(crossing_ids)\n # Mapping between the user id and the id of the nodes.\n node_mapping = dict()\n for centroid in centroids:\n node_mapping[centroid.user_id] = centroid.id\n for crossing in crossings:\n node_mapping[crossing.user_id] = crossing.id\n functions = get_query('function', simulation)\n function_ids = set(functions.values_list('user_id', flat=True))\n # Mapping between the user id and the id of the functions.\n function_id_mapping = dict()\n # Mapping between the user id and the instance of the functions\n function_mapping = dict()\n for function in functions:\n function_id_mapping[function.user_id] = function.id\n function_mapping[function.user_id] = function\n # Convert imported file to a csv DictReader.\n encoded_file = request.FILES['import_file']\n tsv_file = StringIO(encoded_file.read().decode())\n reader = csv.DictReader(tsv_file, delimiter='\\t')\n to_be_updated = set()\n to_be_created = list()\n # Store the user_id of the imported instance to avoid two instances\n # with the same id.\n imported_ids = set()\n if object_name == 'centroid':\n # Do not import centroid with same id as a crossing.\n crossings = get_query('crossing', simulation)\n imported_ids = set(crossings.values_list('user_id', flat=True))\n for row in reader:\n id = int(row['id'])\n if not id in imported_ids:\n imported_ids.add(id)\n if id in user_id_set:\n to_be_updated.add(\n (id, row['name'], float(row['x']),\n float(row['y']))\n )\n else:\n to_be_created.append(\n Centroid(user_id=id, name=row['name'],\n x=float(row['x']), y=float(row['y']))\n )\n elif object_name == 'crossing':\n # Do not import crossing with same id as a centroid.\n centroids = get_query('centroid', simulation)\n imported_ids = set(centroids.values_list('user_id', flat=True))\n for row in reader:\n id = int(row['id'])\n if not id in imported_ids:\n imported_ids.add(id)\n if id in user_id_set:\n to_be_updated.add(\n (id, row['name'], float(row['x']),\n float(row['y']))\n )\n else:\n to_be_created.append(\n Crossing(user_id=id, name=row['name'],\n x=float(row['x']), y=float(row['y']))\n )\n elif object_name == 'function':\n for row in reader:\n id = int(row['id'])\n if not id in imported_ids:\n imported_ids.add(id)\n if id in user_id_set:\n to_be_updated.add(\n (id, row['name'], row['expression'])\n )\n else:\n to_be_created.append(\n Function(user_id=id, name=row['name'],\n expression=row['expression'])\n )\n elif object_name == 'link':\n for row in reader:\n id = int(row['id'])\n if not id in imported_ids:\n imported_ids.add(id)\n if id in user_id_set:\n to_be_updated.add(\n (id, row['name'],\n node_mapping[int(row['origin'])],\n node_mapping[int(row['destination'])],\n function_id_mapping[int(row['function'])],\n float(row['lanes']), float(row['length']),\n float(row['speed']), float(row['capacity']))\n )\n else:\n if int(row['origin']) in node_ids \\\n and int(row['destination']) in node_ids \\\n and int(row['function']) in function_ids:\n # Ignore the links with unidentified origin,\n # destination or function.\n to_be_created.append(\n Link(user_id=id, name=row['name'],\n origin=node_mapping[int(row['origin'])],\n destination=node_mapping[int(row['destination'])],\n vdf=function_mapping[int(row['function'])],\n lanes=float(row['lanes']),\n length=float(row['length']),\n speed=float(row['speed']),\n capacity=float(row['capacity']))\n )\n if to_be_updated:\n if object_name in ('centroid', 'crossing'):\n values = set(query.values_list('user_id', 'name', 'x', 'y'))\n elif object_name == 'function':\n values = set(query.values_list('user_id', 'name', 'expression'))\n elif object_name == 'link':\n values = set(query.values_list('user_id', 'name', 'origin',\n 'destination', 'vdf_id', 'lanes',\n 'length', 'speed', 'capacity'))\n # Find the instances that really need to be updated (the values have\n # changed).\n to_be_updated = to_be_updated.difference(values)\n if object_name in ('centroid', 'crossing', 'function'):\n # Update the objects (it would be faster to delete and re-create\n # them but this would require to also change the foreign keys of\n # the links).\n for values in to_be_updated:\n # Index 0 of values is the id column i.e. the user_id.\n instance = query.filter(user_id=values[0])\n if object_name in ('centroid', 'crossing'):\n instance.update(name=values[1], x=values[2], y=values[3])\n else: # Function\n instance.update(name=values[1], expression=values[2])\n elif object_name == 'link':\n # Delete the links and re-create them.\n ids = list(query.values_list('id', 'user_id'))\n # Create a mapping between the user ids and the ids.\n id_mapping = dict()\n for i in range(len(values)):\n id_mapping[ids[i][1]] = ids[i][0]\n # Retrieve the ids of the links to be updated with the mapping and\n # delete them.\n to_be_updated_ids = [id_mapping[values[0]]\n for values in to_be_updated]\n with connection.cursor() as cursor:\n chunk_size = 20000\n chunks = [\n to_be_updated_ids[x:x + chunk_size]\n for x in range(0, len(to_be_updated_ids), chunk_size)\n ]\n for chunk in chunks:\n # Delete the relations first.\n cursor.execute(\n \"DELETE FROM Network_Link \"\n \"WHERE link_id IN %s;\",\n [chunk]\n )\n cursor.execute(\n \"DELETE FROM Link \"\n \"WHERE id IN %s;\",\n [chunk]\n )\n # Create a mapping between the id and the instance of the\n # functions.\n function_mapping = dict()\n for function in functions:\n function_mapping[function.id] = function\n # Now, create the updated instances with the new values.\n to_be_created += [\n Link(user_id=values[0], name=values[1], origin=values[2],\n destination=values[3], vdf=function_mapping[values[4]],\n lanes=values[5], length=values[6], speed=values[7],\n capacity=values[8])\n for values in to_be_updated\n ]\n # Create the new objects in bulk.\n # The chunk size is limited by the MySQL engine (timeout if it is too big).\n chunk_size = 10000\n chunks = [to_be_created[x:x + chunk_size]\n for x in range(0, len(to_be_created), chunk_size)]\n # Remove the orphan instances.\n if object_name == 'function':\n query.model.objects \\\n .exclude(functionset__in=FunctionSet.objects.all()) \\\n .delete()\n else:\n query.model.objects.exclude(network__in=Network.objects.all()).delete()\n for chunk in chunks:\n # Create the new instances.\n query.model.objects.bulk_create(chunk, chunk_size)\n # Retrieve the newly created instances and add the many-to-many\n # relation.\n # Add the many-to-many relation.\n if object_name == 'function':\n new_instances = query.model.objects \\\n .exclude(functionset__in=FunctionSet.objects.all())\n for instance in new_instances:\n instance.functionset.add(parent)\n else:\n new_instances = query.model.objects \\\n .exclude(network__in=Network.objects.all())\n for instance in new_instances:\n instance.network.add(parent)\n simulation.has_changed = True\n simulation.save()\n return HttpResponseRedirect(\n reverse('metro:object_list', args=(simulation.id, object_name,))\n )\n except Exception as e:\n print(e)\n context = {\n 'simulation': simulation,\n 'object': object_name,\n }\n return render(request, 'metro_app/import_error.html', context)",
"def test_manufacturer_bulk_import(self):\n form = ManufacturerBulkImportForm(data={\"pk\": [ManufacturerImport.objects.first().pk]})\n\n self.assertTrue(form.is_valid())",
"def import_customers(ctx):\n load_csv(ctx, 'data/sample/customers.csv', 'res.partner')",
"def import_route_csv(cf, filename):\n try:\n with open(filename, 'r') as routedata:\n print(\"Reading import file...\")\n routes_to_add = []\n routereader = csv.DictReader(routedata)\n for route in routereader:\n other_fields = {}\n orig = route[\"ORIGIN\"]\n dest = route[\"DEST\"]\n other_fields[\"airline\"] = route[\"OP_UNIQUE_CARRIER\"]\n other_fields[\"no\"] = route[\"OP_CARRIER_FL_NUM\"]\n other_fields[\"duration\"] = route[\"CRS_ELAPSED_TIME\"]\n other_fields[\"distance\"] = route[\"DISTANCE\"]\n\n try:\n other_fields[\"price\"] = route[\"PRICE\"]\n\n except KeyError:\n pass\n routes_to_add.append([orig, dest, other_fields])\n\n cf.add_many_flights(routes_to_add)\n\n print(\"Successfully imported {} flights\"\n .format(len(routes_to_add)))\n routedata.close()\n\n except FileNotFoundError:\n print(\"ERROR: File Not found.\")\n\n except KeyError as e:\n print(\"ERROR: Field not found in CSV: {}\".format(e))",
"def action_import(self):\n ctx = self._context\n \n data = base64.b64decode(self.data)\n file_input = cStringIO.StringIO(data)\n file_input.seek(0)\n reader_info = []\n if self.delimeter:\n delimeter = str(self.delimeter)\n else:\n delimeter = ','\n reader = csv.reader(file_input, delimiter=delimeter,\n lineterminator='\\r\\n')\n try:\n reader_info.extend(reader)\n except Exception:\n raise exceptions.Warning(_(\"Not a valid file!\"))\n keys = reader_info[0]",
"def handle(self, *args, **options):\n run_tximport()",
"def purchase_import(self, cr, uid, ids, context=None):\n wizard_row = self.browse(cr, uid, ids)[0]\n if wizard_row.add_import == 'add':\n self.pool.get('purchase.import').write(cr, uid, [wizard_row.import_id.id], {'purchase_related_ids': [(4,wizard_row.purchase_id.id)]})\n import_id = wizard_row.import_id.id\n elif wizard_row.add_import == 'create':\n import_id = self.pool.get('purchase.import').create(cr, uid, self.prepare_purchase_import(cr, uid, wizard_row, context), context)\n return import_id",
"def public_transit_import(request, simulation):\n try:\n # Create a set with all existing OD pairs in the OD matrix.\n matrix = simulation.scenario.supply.pttimes\n pairs = get_query('public_transit', simulation)\n existing_pairs = set(pairs.values_list('p_id', 'q_id'))\n # Create a dictionary to map the centroid user ids with the centroid\n # objects.\n centroids = get_query('centroid', simulation)\n centroid_mapping = dict()\n centroid_id_mapping = dict()\n for centroid in centroids:\n centroid_mapping[centroid.user_id] = centroid\n centroid_id_mapping[centroid.user_id] = centroid.id\n # Convert the imported file to a csv DictReader.\n encoded_file = request.FILES['import_file']\n tsv_file = StringIO(encoded_file.read().decode())\n if encoded_file.name.split(\".\")[-1] == 'tsv':\n reader = csv.DictReader(tsv_file, delimiter='\\t')\n else:\n reader = csv.DictReader(tsv_file, delimiter=',')\n # For each imported OD pair, if the pair already exists in the OD Matrix,\n # it is stored to be updated, else it is stored to be created.\n to_be_updated = set()\n to_be_created = list()\n for row in reader:\n pair = (\n centroid_id_mapping[int(row['origin'])],\n centroid_id_mapping[int(row['destination'])]\n )\n if pair in existing_pairs:\n to_be_updated.add((*pair, float(row['travel time'])))\n else:\n to_be_created.append(\n Matrix(p=centroid_mapping[int(row['origin'])],\n q=centroid_mapping[int(row['destination'])],\n r=float(row['travel time']),\n matrices=matrix)\n )\n if to_be_updated:\n # Create a mapping between the values (p, q, r) and the ids.\n pair_values = set(pairs.values_list('id', 'p_id', 'q_id'))\n pair_mapping = dict()\n for pair in pair_values:\n pair_mapping[pair[1:]] = pair[0]\n # Find the pairs that really need to be updated (i.e. r is also\n # different).\n pair_values = set(pairs.values_list('p_id', 'q_id', 'r'))\n to_be_updated = to_be_updated.difference(pair_values)\n # Retrieve the ids of the pairs to be updated with the mapping and\n # delete them.\n to_be_updated_ids = [pair_mapping[pair[:2]] for pair in to_be_updated]\n with connection.cursor() as cursor:\n chunk_size = 20000\n chunks = [to_be_updated_ids[x:x + chunk_size]\n for x in range(0, len(to_be_updated_ids), chunk_size)]\n for chunk in chunks:\n cursor.execute(\n \"DELETE FROM Matrix \"\n \"WHERE id IN %s;\",\n [chunk]\n )\n # Create a mapping between the centroids ids and the centroid objects.\n centroid_id_mapping = dict()\n for centroid in centroids:\n centroid_id_mapping[centroid.id] = centroid\n # Now, create the updated pairs with the new values.\n to_be_created += [\n Matrix(p=centroid_id_mapping[pair[0]],\n q=centroid_id_mapping[pair[1]],\n r=pair[2],\n matrices=matrix)\n for pair in to_be_updated\n ]\n # Create the new OD pairs in bulk.\n # The chunk size is limited by the MySQL engine (timeout if it is too big).\n chunk_size = 20000\n chunks = [to_be_created[x:x + chunk_size]\n for x in range(0, len(to_be_created), chunk_size)]\n for chunk in chunks:\n Matrix.objects.bulk_create(chunk, chunk_size)\n return HttpResponseRedirect(reverse(\n 'metro:public_transit_view', args=(simulation.id,)\n ))\n except Exception as e:\n print(e)\n context = {\n 'simulation': simulation,\n 'object': 'public_transit',\n }\n return render(request, 'metro_app/import_error.html', context)",
"def test_csv_import_hotel_success(self):\n from django.contrib.messages import get_messages\n path = reverse(\"import-csv\")\n user = mixer.blend(User, is_staff=True, is_superuser=True)\n file = open(\"city.csv\")\n client = Client()\n client.force_login(user)\n client.post(path, {\"title\": \"city\", \"csv_file\": file})\n file = open(\"hotel.csv\")\n r = client.post(path, {\"title\": \"hotel\", \"csv_file\": file})\n messages = list(get_messages(r.wsgi_request))\n assert r.status_code == 200\n assert len(messages) == 1\n assert str(messages[0]) == \"Successfully Uploaded!\"",
"def test_validate_form_import_contacts(self):\n data_contacts = open(self.test_dir + 'contacts.csv',\n encoding='utf-8')\n with data_contacts as contacts:\n response = self.client.post(reverse(\"import_contacts\"), {\n 'contacts': contacts})\n data_contacts.close()\n self.assertEqual(response.status_code, 200)",
"def start_import(data_import):\n\tdata_import = frappe.get_doc(\"Data Import Beta\", data_import)\n\ti = Importer(data_import.reference_doctype, data_import=data_import)\n\treturn i.import_data()",
"def step_1(browser):\n browser.click_on(\"Import depuis eComptes\".decode('utf8'))"
] | [
"0.5680752",
"0.5669067",
"0.54800856",
"0.5428995",
"0.5351158",
"0.5288161",
"0.528",
"0.52469295",
"0.5222503",
"0.5153588",
"0.5078875",
"0.5049231",
"0.5039973",
"0.503053",
"0.5001831",
"0.49440625",
"0.49394995",
"0.4930786",
"0.49300444",
"0.49074003",
"0.48968795",
"0.48927018",
"0.48897788",
"0.48896685",
"0.48515993",
"0.48470333",
"0.48418307",
"0.48387858",
"0.48365855",
"0.48305926"
] | 0.57213557 | 0 |
Export price tiers and return count of products | def default_export_(self, fields):
Store = Pool().get('magento.website.store')
store = Store(Transaction().context.get('active_id'))
return {
'products_count': store.export_tier_prices_to_magento()
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def export_tier_prices_to_magento(self):\n instance = self.website.instance\n\n for mag_product_template in self.website.magento_product_templates:\n product_template = mag_product_template.template\n product = product_template.products[0]\n\n # Get the price tiers from the product if the product has a price\n # tier table else get the default price tiers from current store\n price_tiers = product_template.price_tiers or self.price_tiers\n\n price_data = []\n for tier in price_tiers:\n if hasattr(tier, 'product'):\n # The price tier comes from a product, then it has a\n # function field for price, we use it directly\n price = tier.price\n else:\n # The price tier comes from the default tiers on store,\n # we dont have a product on tier, so we use the current\n # product in loop for computing the price for this tier\n price = self.price_list.compute(\n None, product, product.list_price, tier.quantity,\n self.website.default_uom\n )\n\n price_data.append({\n 'qty': tier.quantity,\n 'price': float(price),\n })\n\n # Update stock information to magento\n with magento.ProductTierPrice(\n instance.url, instance.api_user, instance.api_key\n ) as tier_price_api:\n tier_price_api.update(\n mag_product_template.magento_id, price_data\n )\n\n return len(self.website.magento_product_templates)",
"def price_count(self):\n return self.price_set.count()",
"def execQ2():\n # Put columns together\n frame = pan.DataFrame(data, columns=['Product', 'Amount'] )\n amount = frame.groupby(['Product']).count()\n return amount",
"def count_products(list_products):\n for each_item in ADD_PRODUCTS: #This iterates in the dictionary\n num_of_products = list_products.count(each_item) #This count each product\n if num_of_products > 0:\n price = ADD_PRODUCTS[each_item]\n print num_of_products, each_item + \"(s)\", \"a\", (\"Q%.2f c/u\") % price",
"def _get_as_dict_count(self):\n counter = Counter()\n for product in self.products:\n counter[product.id] += 1\n return counter",
"def test_default_num_products(self):\n products = acme_report.generate_products()\n self.assertEqual(len(products), 30)",
"def pricing_export(request, simulation):\n # Get all tolls.\n policies = get_query('policy', simulation)\n tolls = policies.filter(type='PRICING')\n # To avoid conflict if two users export a file at the same time, we\n # generate a random name for the export file.\n seed = np.random.randint(10000)\n filename = '{0}/website_files/exports/{1}.tsv'.format(settings.BASE_DIR,\n seed)\n with codecs.open(filename, 'w', encoding='utf8') as f:\n writer = csv.writer(f, delimiter='\\t')\n # Get a dictionary with all the values to export.\n values = list()\n for toll in tolls:\n if toll.usertype:\n usertype_id = toll.usertype.user_id\n else:\n usertype_id = ''\n values.append([toll.location.user_id, toll.get_value_vector(),\n toll.get_time_vector(), usertype_id])\n # Write a custom header.\n writer.writerow(['link', 'values', 'times', 'traveler_type'])\n writer.writerows(values)\n\n with codecs.open(filename, 'r', encoding='utf8') as f:\n # Build a response to send a file.\n response = HttpResponse(f.read())\n response['content_type'] = 'text/tab-separated-values'\n response['Content-Disposition'] = 'attachement; filename=tolls.tsv'\n # We delete the export file to save disk space.\n os.remove(filename)\n return response",
"def write_quantities(quant_filename, products):\n\n with open(quant_filename, \"w\") as quant_file:\n quant_file.write(\"Quantity,SKU,Description(ignored)\\n\")\n for product in products:\n sku = product[\"SKU\"]\n description = \"{}: {}\".format(\n product[\"Product Name\"],\n cctools.html_to_plain_text(product[\"Teaser\"])\n )\n quant_file.write(\",\".join([\"0\", sku, '\"%s\"' % description]) + \"\\n\")",
"def import_countOrFPKMTable(\n self,filename_I):\n #import and format the data\n io = base_importData();\n io.read_tab(filename_I);\n countOrFPKMTable = self.format_countOrFPKMTable(io.data);\n return countOrFPKMTable;",
"def get_product_count(self):\n return self.products.count()",
"def quantities():\n # publish the modules\n return (\n SI,\n angle, area, energy, force, length, mass, power, pressure,\n speed, substance, temperature, time, volume\n )",
"def ad_number_ts(self):\n piv = self.filtered_data.pivot_table(values='price',index='date',aggfunc='count')\n piv.plot()",
"def count_at(self, price):\n return Library.functions.count_at(self._book, price)",
"def _grand_total(self):\n count = 0\n for product in self.products:\n count += product.price\n return count",
"def plotprice(self):\n plt.figure()\n plt.hist( self.pricetree[-1,:] )\n plt.title(\"price Distribution\") \n plt.show()",
"def get_product_kpis(self):\n kpi_1 = self.products[['products', 'client']].reset_index().groupby([\"client\", \"products\"]).agg(\n {\"index\": \"count\"}).reset_index().groupby(\"products\").agg(\n {\"index\": \"mean\"}).reset_index().rename(columns={\"index\": \"average_product_sold_per_user_kpi\"})\n kpi_2 = self.products[['products', 'payment_amount']].groupby(\"products\").agg(\n {\"payment_amount\": \"sum\"}).reset_index().rename(columns={\"payment_amount\": \"total_product_revenue_kpi\"})\n try:\n kpi_3 = self.products[['products', 'discount_amount']].query(\"discount_amount == discount_amount\")\n kpi_3['discount_amount'] = kpi_3['discount_amount'].apply(lambda x: float(x))\n kpi_3 = kpi_3.groupby(\"products\").agg({\"discount_amount\": \"sum\"}).reset_index().rename(\n columns={\"discount_amount\": \"total_product_discount_kpi\"})\n except:\n kpi_3 = self.products[['products']]\n kpi_3['total_product_discount_kpi'] = 0\n\n kpi_4 = self.products[['products', 'client']].groupby(\"products\").agg(\n {\"client\": lambda x: len(np.unique(x))}).reset_index().rename(columns={\"client\": \"total_product_cust_kpi\"})\n\n self.product_kpis = pd.DataFrame(self.products.groupby('products').count().reset_index()[['products']]).merge(\n kpi_1, on='products', how='left').merge(\n kpi_2, on='products', how='left').merge(\n kpi_3, on='products', how='left').merge(kpi_4, on='products', how='left').fillna(0)\n\n return self.product_kpis",
"def tally(self):\n return self.count",
"def output_data(df, store_name):\r\n \r\n # keep liquid and selected store\r\n df_store = df.loc[(df['Store Name']==store_name) & (df['Product Name'].str.contains('Liquid')),\r\n df.columns]\r\n \r\n # split the Product Name field into Product Type and Size\r\n df_store[['Product Type', 'Size']] = df_store['Product Name'].str.extract('(.*) - (.*)')\r\n \r\n # rank based on sales and keep the top 10\r\n df_store['Rank of Product & Scent by Store'] = \\\r\n df_store['Sale Value'].rank(method='first', ascending=False)\r\n df_out = df_store.loc[df_store['Rank of Product & Scent by Store'] <= 10, df_store.columns]\r\n \r\n # round the Sales Values to the nearest 10 value (ie 1913 becomes 1910)\r\n df_out['Sale Value'] = df_out['Sale Value'].round(-1)\r\n\r\n # output the file \r\n df_out.to_csv(f'.\\\\outputs\\\\output-2022-31-{store_name}.csv', index=False,\r\n columns=['Store Name', 'Rank of Product & Scent by Store', 'Scent Name', \r\n 'Size', 'Sale Value'])\r\n \r\n print(f'\\n*** SUCCESS: the file for {store_name} has been created.\\n')",
"def get_num_of_sales_per_customer_ids():\n\n # your code",
"def get_prices(self):\n pass",
"def count_buy(self):\n return Library.functions.count_buy(self._book)",
"def test_table_counts():\n number_of_test_run = 2 # Run the pipeline twice\n for i in range(number_of_test_run):\n dp = DataPipeline()\n dp.run()\n\n dp = DataPipeline()\n assert dp.get_product_count() == (500000,)\n assert dp.get_duplicate_count(from_table=\"products\") == (0,)\n assert dp.get_aggregate_table_result_count() == (222024, )\n 222024\n dp.close()",
"def product_count(self) -> int:\n return self._product_count",
"def inventory_report(products):\n unique_names = []\n total_price = 0\n total_weight = 0\n total_flammability = 0\n num_products = len(products)\n for i in range(num_products):\n if products[i].name not in unique_names:\n unique_names.append(products[i].name) \n total_price += products[i].price\n total_weight += products[i].weight\n total_flammability += products[i].flammability\n mean_price = total_price / num_products\n mean_weight = total_weight / num_products\n mean_flammability = total_flammability / num_products\n print('ACME CORPORATION OFFICIAL INVENTORY REPORT')\n print(f'Unique product names: {len(unique_names)}')\n print(f'Average price: {mean_price}')\n print(f'Average weight {mean_weight}')\n print(f'Average flammabilitiy {mean_flammability}')\n return unique_names, mean_price, mean_weight, mean_flammability",
"def get_count(self, search_items, location_type, csv=False, output_dir=None, extra_param=None):\n\n if not location_type:\n raise InvalidArgument(location_type)\n elif not isinstance(location_type, str):\n raise TypeError(\"location is not a string\")\n\n # Get data from api and create objects\n api_datas = self.call_api(search_items, \"probability\", \"count\", location_type, extra_param=extra_param)\n product = [ProbabilityCount(api_data) for api_data in api_datas]\n\n if csv:\n csv_format.to_csv(product, \"probability\", \"count\", location_type, output_dir=output_dir)\n\n logging.info(\"Probability Count Data Ready.\")\n\n return product",
"def getPrice(self):\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.113 Safari/537.36\"}\n response = requests.get(self.__product_URL, headers=headers)\n #print(response.status_code)\n soup = BeautifulSoup(response.content, \"html.parser\")\n file = open(\"testproduct.html\", \"wb\")\n file.write(soup.prettify(\"utf-8\"))\n file.close()\n title = soup.find(\"span\", attrs={\"id\": \"productTitle\", \"class\": \"a-size-large\"}).string.strip()\n self.__product_title = title\n temp = soup.find_all(\"a\", attrs={\"class\": \"a-accordion-row a-declarative accordion-header\"})[1]\n price = temp.find(\"span\", attrs={\"class\": \"a-color-price\"}).text.strip()\n lst = list(price)\n lst.remove(\",\")\n price = int(float(\"\".join(lst)))\n self.__product_price = price\n #print(self.__product_price)",
"def import_thousand_data(dir_name, product_file, customer_file, rentals_file):\n counter = Counter()\n client = MongoDBConnection()\n with client:\n hp_norton_db = client.connection.rental\n products = hp_norton_db['products']\n customers = hp_norton_db['customers']\n rentals = hp_norton_db['rentals']\n exist_products = products.count()\n exist_customers = customers.count()\n exist_rentals = rentals.count()\n\n # 1. load the products collection\n start = time.time()\n LOGGER.info('Load the products collection')\n processed_records = read_thousand_csv_file(dir_name, product_file,\n products, counter)\n product_tuple = (processed_records, exist_products, products.count(),\n time.time() - start)\n # 2. load the customers collection\n start = time.time()\n LOGGER.info('Load the customers collection')\n processed_records = read_thousand_csv_file(dir_name, customer_file,\n customers, counter)\n customer_tuple = (processed_records, exist_customers, customers.count(),\n time.time() - start)\n # 3. load the rentals collection\n start = time.time()\n LOGGER.info('Load the rentals collection')\n processed_records = read_thousand_csv_file(dir_name, rentals_file,\n rentals, counter)\n rental_tuple = (processed_records, exist_rentals, rentals.count(),\n time.time() - start)\n LOGGER.info(f'Return product tuple {product_tuple}')\n LOGGER.info(f'Return customer tuple {customer_tuple}')\n LOGGER.info(f'Return rental tuple {rental_tuple}')\n LOGGER.info('Total record prcessed for all three files:'\n f'{counter.value}')\n return [product_tuple, customer_tuple]",
"def count_sell(self):\n return Library.functions.count_sell(self._book)",
"def amount_total(path, file_type):\n final_frame = clean_kdr_data(path, file_type)\n amount_work = final_frame.groupby(\"Date\")[\"Place\"].count()\n amount_work = amount_work.to_frame()\n amount_work.columns = [\"Freq\"]\n\n # Dropping outlier data\n amount_work = amount_work.drop([\"2019-01-04\"])\n amount_work = amount_work.drop([\"2019-01-07\"])\n\n return amount_work",
"def _calc_and_report(self) -> str:\n self.amount = self.amount + (len(self.servers) * TICK_COST)\n return \",\".join([str(len(s.users)) for s in self.servers]) or \"0\""
] | [
"0.668143",
"0.5804564",
"0.5580061",
"0.5539434",
"0.5399712",
"0.5346444",
"0.53069025",
"0.52848166",
"0.52742374",
"0.5270996",
"0.5269696",
"0.5262819",
"0.52455866",
"0.5227559",
"0.51672083",
"0.5146561",
"0.5138672",
"0.5123716",
"0.5082605",
"0.5078199",
"0.50692075",
"0.50540423",
"0.5046628",
"0.5045798",
"0.5034128",
"0.5030165",
"0.50223327",
"0.5021981",
"0.50089544",
"0.50075054"
] | 0.7045785 | 0 |
Type a PWIF amount then submit the form | def enter_pwif_amount(self, pwif_amount):
raise NotImplementedError | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def choose_pwif_amount(self, pwif_amount):\n raise NotImplementedError",
"def test_send_inaccurate_data(self):\n self.driver.get('http://psl-outbreak.herokuapp.com/report')\n self.driver.find_element_by_xpath(\"//select[@name='state_id']/option[text()='Casanare']\").click()\n self.driver.find_element_by_name('number_infections').send_keys(\"100\")\n self.driver.find_element_by_id('submit').click()",
"def send_mfa(\n self,\n form: object = None, # noqa: ARG002\n code: str = \"\",\n trusted_device: bool = True,\n ) -> None:\n el_otp = self._driver.find_element(By.CSS_SELECTOR, \"input[name=otc]\", timeout=5)\n el_otp.clear()\n el_otp.send_keys(code)\n\n el_verify = self._driver.find_element(By.CSS_SELECTOR, \"input[type=submit]\", timeout=5)\n if el_verify.accessible_name != \"Verify\":\n msg = f'{self.__class__.__name__}: Cannot find \"Verify\" button'\n raise IdpError(msg)\n\n if trusted_device:\n el_verify.click()\n\n self._stay_signed_in()",
"def Withdrawal(self):\n self.amount = (int)(raw_input (\" Enter your withdrawal amount \"))\n return self.amount",
"def get_user_input():\n return float(input('Your transaction amount please: '))",
"def test_send_correct_data(self):\n self.driver.get('http://psl-outbreak.herokuapp.com/report')\n self.driver.find_element_by_xpath(\"//select[@name='state_id']/option[text()='Casanare']\").click()\n self.driver.find_element_by_name('number_infections').send_keys(\"100\")\n self.driver.find_element_by_id('submit').click()",
"def pay_for_item(self, item):\n while self.amount < item.price:\n paid_amount = float(input(f\"Pay €{round((item.price - self.amount), 2)} : \"))\n if paid_amount <= 0:\n custom_log(\"Invalid amount entered.\", MSG_ERROR)\n continue\n self.amount = self.amount + paid_amount",
"def get_tx_amount():\n return float(input(\"Enter Transaction Amount: \"))",
"def _request_donation():\n\n amt = _get_input(\"Enter a donation amount:\\n> \", _quit_responses, allow_new=True)\n if amt in _quit_responses:\n return\n else:\n return float(amt)",
"def test_submit_iso20022_payment_instruction(self):\n pass",
"def sendOTP(code):\n # Modify the code here to change from print to any output \n print(\"Your OTP is \" + code + \". Kindly do not share it with anyone\")",
"def test_number(self):\n form_data = self.form_data('CDr=cpz&Z&a!cuP-nAQe')\n form = self.form(data=form_data, user=self.u)\n self.assertFalse(form.is_valid())",
"def input_postal_code(self, postal_code):\n self.send_keys_to_element(self.postalcode_textbox_selector, postal_code)",
"def submit(self):\n data = self.getFSNDataDict()\n if data != []:\n MOSES.addToPiggyBank(data, self.user_id, self.password)",
"def post_amount_input(message, bot):\n # print(message.text)\n try:\n chat_id = message.chat.id\n amount_entered = message.text\n if amount_entered=='Cancel':\n raise Exception(\"Cancelling record!!\")\n amount_value = validate_entered_amount(amount_entered) # validate\n if amount_value == 0: # cannot be $0 spending\n raise Exception(\"Spent amount has to be a non-zero number.\")\n\n user_bills['cost'] = float(amount_value)\n # print(user_bills)\n # print(user_bills['cost'])\n\n user_bills['timestamp'] = datetime.now()\n # print(user_bills['timestamp'])\n # print(count)\n # print(user_çcbills['number'])\n\n user_history = db.user_bills.find({'user_telegram_id' : message.chat.id})\n maximum = 0\n for rec in user_history:\n maximum = max(maximum, rec['number'])\n # print(maximum)\n # print('done')\n\n # global count_\n user_bills['number'] = maximum+1\n # count_ += 1\n\n get_sharing_details(message, bot)\n\n except Exception as e:\n bot.reply_to(message,str(e))\n display_text = \"\"\n for c in commands: # generate help text out of the commands dictionary defined at the top\n display_text += \"/\" + c + \": \"\n display_text += commands[c] + \"\\n\"\n bot.send_message(chat_id, 'Please select a menu option from below:')\n bot.send_message(chat_id, display_text)",
"def type_amount(self, amount):\n\n\t\twith allure.step(\"Type amount\"):\n\t\t\telement = Element(driver=self.driver,\n\t\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t\t locator=BillPayPageLocator.AMOUNT_INPUT)\n\t\t\telement.write(amount)\n\t\t\treturn None",
"def i_submit_the_form_with_valid_data():\n #driver.find_element_by_name(\"name_field\").sendKeys(data.get(1).get(1))\n #driver.find_element_by_name(\"address_field\").sendKeys(data.get(2).get(1))\n #driver.find_element_by_name(\"postcode_field\").sendKeys(data.get(3).get(1))\n #driver.find_element_by_name(\"email_field\").sendKeys(data.get(4).get(1))\n\n driver.find_element_by_name(\"name_field\").send_keys(\"Chris\")\n driver.find_element_by_name(\"address_field\").send_keys(\"Galaxy\")\n driver.find_element_by_name(\"postcode_field\").send_keys(\"P2D F3F\")\n driver.find_element_by_name(\"email_field\").send_keys(\"[email protected]\")",
"def do_submit(self, price_float, volume_float):\r\n raise NotImplementedError()",
"def withdraw_money():\n print(\"\\n\")\n print(messages.account_credentials)\n u_id = pyip.inputInt(\"Your Id: \", greaterThan=0)\n password = pyip.inputPassword(\"Your Password: \")\n\n credentials = {\"id\":u_id, \"password\":password}\n result = BankOperationsBackend.withdraw_money(credentials)\n start_again() if result else BankOperationsUi.withdraw_money()",
"def form(update, context):\n update.message.reply_text(\"\"\"Fill out the form 👇 👇 👇\n https://forms.gle/VREhdtCNqJ6rZNfQ7\"\"\")",
"def fill_out_bill_payment_form(self, payee, account, amount):\n\n\t\t# 1 Payee Name:\n\t\tpayee_full_name = payee.FIRST_NAME + \" \" + payee.LAST_NAME\n\t\tself.type_payee_name(payee_full_name)\n\t\twith allure.step(\"Verify payee name\"):\n\t\t\tprint('payee name: {}'.format(payee_full_name))\n\t\t\tassert payee_full_name == self.payee_name()\n\n\t\t# 2 Address:\n\t\tself.type_address(payee.ADDRESS)\n\t\twith allure.step(\"Verify payee address\"):\n\t\t\tprint('payee address: {}'.format(payee.ADDRESS))\n\t\t\tassert payee.ADDRESS == self.address()\n\n\t\t# 3 City:\n\t\tself.type_city(payee.CITY)\n\t\twith allure.step(\"Verify payee city\"):\n\t\t\tprint('payee city: {}'.format(payee.CITY))\n\t\t\tassert payee.CITY == self.city()\n\n\t\t# 4 State:\n\t\tself.type_state(payee.STATE)\n\t\twith allure.step(\"Verify payee state\"):\n\t\t\tprint('payee state: {}'.format(payee.STATE))\n\t\t\tassert payee.STATE == self.state()\n\n\t\t# 5 Zip Code:\n\t\tself.type_zip_code(payee.ZIP_CODE)\n\t\twith allure.step(\"Verify payee zip code\"):\n\t\t\tprint('payee zip code: {}'.format(payee.ZIP_CODE))\n\t\t\tassert payee.ZIP_CODE == self.zip_code()\n\n\t\t# 6 Phone:\n\t\tself.type_phone(payee.PHONE)\n\t\twith allure.step(\"Verify payee phone\"):\n\t\t\tprint('payee phone: {}'.format(payee.PHONE))\n\t\t\tassert payee.PHONE == self.phone()\n\n\t\t# 7 Account:\n\t\tself.type_account(account)\n\t\twith allure.step(\"Verify payee account\"):\n\t\t\tprint('payee account: {}'.format(account))\n\t\t\tassert account == self.account()\n\n\t\t# 8 Verify Account:\n\t\tself.type_verify_account(account)\n\t\twith allure.step(\"Verify 'Verify Account'\"):\n\t\t\tassert account == self.verify_account()\n\n\t\t# 9 Amount\n\t\tself.type_amount(amount)\n\t\twith allure.step(\"Verify amount\"):\n\t\t\tprint('payment amount: {}'.format(amount))\n\t\t\tassert amount == self.amount()\n\n\t\treturn None",
"def deposit(account):\r\n print(\"Your account balance is $\", format(account, \"0.2f\"), sep='')\r\n while True:\r\n try:\r\n deposit_amount = int(input(\"Enter deposit amount. $\"))\r\n break\r\n except ValueError:\r\n print(\"Error. Must be a whole number.\")\r\n account += deposit_amount\r\n print(\"Your new account balance is $\", format(account, \"0.2f\"), sep='')",
"def input_user_pass(self, user_pass):\n self.locate_element_by_css_selector(PASSWORD_SELECTOR).send_keys(user_pass)",
"def click_submit_payment_button(self):\n self.click(self.submit_payment_locator)\n time.sleep(2)",
"def signin():\n usernamefield = driver.find_element_by_xpath('//input[@aria-label=\"Phone number, username, or email\"]')\n usernamefield.send_keys(username)\n pwordfield = driver.find_element_by_xpath('//input[@aria-label=\"Password\"]')\n pwordfield.send_keys(password)\n sleep(2)\n pwordfield.submit()",
"def prompt_user_money_to_withdrawl():\n print('What amount of money do you want to withdrawl?:')\n return input()",
"def prompt_user_money_to_deposit():\n print('What amount of money do you want to deposit?:')\n return input()",
"def thank_you():\n donor = get_donor()\n if donor != 'q':\n donation = get_donation()\n add_donation(donor, donation, DONORS)\n display_email(donor, donation)\n handle_user_input(get_user_input())",
"def payment_transaction():\n print(\"Please insert coins.\")\n payment = dict(quarters=int(input(\"How many quarters?:\")) * 0.25,\n dime=int(input(\"How many dimes?: \")) * 0.10,\n nickles=int(input(\"How many nickles?: \")) * 0.05,\n pennies=int(input(\"How many pennies?: \")) * 0.01\n )\n\n return round(sum(payment.values()), 2)",
"def Ingresar(frm):\r\n #se asigna el valor de la variable seguir\r\n \r\n #se le pide al usuario que ingresa la altura\r\n posinicial=float(frm.txtposinicial.GetValue())\r\n #posinicial=raw_input(\"Ingrese la altura desde donde se va dejar caer (en metros): \")\r\n #se condiona para que solo permite ingresar numero positivos \r\n \r\n posinicial=float(posinicial)\r\n if posinicial>0:\r\n posinicial=posinicial+2\r\n return posinicial"
] | [
"0.63574934",
"0.5679091",
"0.5629604",
"0.5536104",
"0.54938644",
"0.5485624",
"0.5392728",
"0.538577",
"0.5369903",
"0.53328675",
"0.52951163",
"0.52555466",
"0.52504057",
"0.5243308",
"0.5236295",
"0.52079064",
"0.5151744",
"0.51280564",
"0.50812286",
"0.50660056",
"0.5040184",
"0.5035966",
"0.503202",
"0.50114006",
"0.49882412",
"0.49863794",
"0.498075",
"0.49682167",
"0.49621215",
"0.49584287"
] | 0.6748572 | 0 |
For every argument in kwargs sets a WITH_FOO if FOO=True or a WITHOUT_FOO if FOO=False | def set_with_options(self, **kwargs) -> None:
for k, v in kwargs.items():
assert not k.startswith("WITH_"), "Invalid WITH/WITHOUT options name " + k
assert not k.startswith("WITHOUT_"), "Invalid WITH/WITHOUT options name " + k
assert isinstance(v, bool)
self._with_options[k] = v | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def kwargs(kwargs):\n run_kwargs(kwargs)",
"def _UpdateWithKwargs(base, **kwargs):\n conflicts = set(kwargs.keys()) & set(base.keys())\n if conflicts:\n raise GanetiApiError(\"Required fields can not be specified as\"\n \" keywords: %s\" % \", \".join(conflicts))\n\n base.update((key, value) for key, value in kwargs.items()\n if key != \"dry_run\")",
"def set_kwargs(self, kwargs):\n accept = {\"command\" : None,\n \"clicked\" : False,\n \"unclick\" : False,\n \"active\" : True,\n \"key_bindings\" : []}\n for kwarg in kwargs:\n if kwarg in accept:\n accept[kwarg] = kwargs[kwarg]\n self.__dict__.update(accept)",
"def kwargsdec(f):\n def wrapper(**kwargs):\n args = inspect.getargspec(f).args\n return f(**{ k: kwargs[k] for k in args})\n return wrapper",
"def _merge_kwargs(**kwargs):\n overlap = set(kwargs) & set(partial_kwargs)\n if overlap:\n raise ValueError(\n f\"Cannot override the following kwargs: {overlap}.\\n\"\n f\"This is because they were already set at the time this \"\n f\"partial class was defined.\"\n )\n merged_kwargs = {**partial_kwargs, **kwargs}\n return merged_kwargs",
"def _get_argparse_kwargs(self, group, action='store_true', **kwargs):\n\n kwargs = super(BoolOpt, self)._get_argparse_kwargs(group, **kwargs)\n # type has no effect for BoolOpt, it only matters for\n # values that came from config files\n if 'type' in kwargs:\n del kwargs['type']\n\n # metavar has no effect for BoolOpt\n if 'metavar' in kwargs:\n del kwargs['metavar']\n\n kwargs['action'] = action\n\n return kwargs",
"def _post_processing(\n kwargs, skip_translate, invalid\n): # pylint: disable=unused-argument\n # If any defaults were not expicitly passed, add them\n for item in DEFAULTS:\n if item not in kwargs:\n kwargs[item] = DEFAULTS[item]",
"def permitted_kwargs(permitted):\n def _wraps(func):\n @functools.wraps(func)\n def _inner(name, description, kwargs):\n bad = [a for a in kwargs.keys() if a not in permitted]\n if bad:\n raise OptionException('Invalid kwargs for option \"{}\": \"{}\"'.format(\n name, ' '.join(bad)))\n return func(description, kwargs)\n return _inner\n return _wraps",
"def filter_extra_accepted_kwargs(fun, kwargs, skip_positional=0):\n sig = inspect.signature(fun)\n # the params from signature with up to skip_positional filtered out\n # (less only if there is not enough of positional args)\n params = [(name, param) for i, (name, param) in enumerate(sig.parameters.items())\n if i >= skip_positional or param.kind not in\n [inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.POSITIONAL_ONLY]]\n extra = [\n name for (name, param) in params\n if param.kind in [inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.KEYWORD_ONLY]\n ]\n return {name: value for name, value in kwargs.items() if name in extra}",
"def _filter_kwargs(names, dict_):\n return {k: v for k, v in dict_.items() if k in names and v is not None}",
"def add_kwargs():\n pass",
"def update_kwargs(self, module: dict, flat: dict):\n if \"kwargs\" in module:\n module[\"kwargs\"] = self.replace_with_comp(module[\"kwargs\"], flat)\n else:\n module[\"kwargs\"] = {}",
"def include_extra_kwargs(self, kwargs, extra_kwargs):\n if extra_kwargs.get(\"read_only\", False):\n for attr in [\n \"required\",\n \"default\",\n \"allow_blank\",\n \"allow_null\",\n \"min_length\",\n \"max_length\",\n \"min_value\",\n \"max_value\",\n \"validators\",\n \"queryset\",\n ]:\n kwargs.pop(attr, None)\n\n if extra_kwargs.get(\"default\") and kwargs.get(\"required\") is False:\n kwargs.pop(\"required\")\n\n if extra_kwargs.get(\"read_only\", kwargs.get(\"read_only\", False)):\n extra_kwargs.pop(\n \"required\", None\n ) # Read only fields should always omit the 'required' argument.\n\n kwargs.update(extra_kwargs)\n\n return kwargs",
"def _helper_parameters(func, args=(), kwargs=None, onlykeys=False, onlyused=False):\n if kwargs is None:\n kwargs = {}\n # params = list(inspect.signature(self.__init__).parameters.keys())\n params = inspect.getargspec(func).args[1:] # TODO replace deprecated getargspec to work with py2 and py3, perhaps by getfullargspec\n\n if onlykeys and not onlyused: # only add to keywords\n covered = 0 # simulate no args\n else:\n covered = len(args)\n\n if onlyused and onlykeys: # only add modified by user\n adds = [(True if i < covered or key in kwargs else False) for i, key in\n enumerate(params)]\n # add keys from args\n for i, val in enumerate(args):\n kwargs[params[i]] = val\n elif onlyused:\n adds = [(True if i >= covered and key in kwargs else False) for i, key\n in\n enumerate(params)]\n else:\n adds = [(True if i >= covered else False) for i, key in\n enumerate(params)]\n return adds, params, kwargs",
"def token_kwargs(bits, parser, support_legacy=False):\n if not bits:\n return {}\n match = kwarg_re.match(bits[0])\n kwarg_format = match and match.group(1)\n if not kwarg_format:\n if not support_legacy:\n return {}\n if len(bits) < 3 or bits[1] != 'as':\n return {}\n\n kwargs = {}\n while bits:\n if kwarg_format:\n match = kwarg_re.match(bits[0])\n if not match or not match.group(1):\n return kwargs\n key, value = match.groups()\n del bits[:1]\n else:\n if len(bits) < 3 or bits[1] != 'as':\n return kwargs\n key, value = bits[2], bits[0]\n del bits[:3]\n kwargs[key] = parser.compile_filter(value)\n if bits and not kwarg_format:\n if bits[0] != 'and':\n return kwargs\n del bits[:1]\n return kwargs",
"def _clean_kwargs(keep_name=False, **kwargs):\n if \"name\" in kwargs and not keep_name:\n kwargs[\"name_or_id\"] = kwargs.pop(\"name\")\n\n return __utils__[\"args.clean_kwargs\"](**kwargs)",
"def _clean_kwargs(keep_name=False, **kwargs):\n if \"name\" in kwargs and not keep_name:\n kwargs[\"name_or_id\"] = kwargs.pop(\"name\")\n\n return __utils__[\"args.clean_kwargs\"](**kwargs)",
"def _copy_kwargs(self, **kwargs):\n ns = self.__dict__\n for attr, kw in {'_engine': 'engine', '_format': 'format'}.items():\n assert kw not in kwargs\n if attr in ns:\n kwargs[kw] = ns[attr]\n return super()._copy_kwargs(**kwargs)",
"def set(self, **kwargs):\n for key in kwargs:\n if key in self.bool_params:\n self.bool_params[key] = kwargs[key]\n elif key in self.int_params:\n self.int_params[key] = kwargs[key]\n elif key in self.str_params:\n self.str_params[key] = kwargs[key]\n elif key in self.float_params:\n self.float_params[key] = kwargs[key]\n else:\n raise RuntimeError('MOPAC calculator: unknown keyword: ' + key)",
"def test_020_kwargs(self):\n caller = self.get_caller([KwargsTaskOverride])\n self.assertEqual([\"A\", \"B\"], caller(\"A\", \"B\"))",
"def to_kwargs(f, *args, **kwargs):\n\n s = inspect.getargspec(f)\n defaults = s.defaults or []\n default_args = s.args[-len(defaults):]\n\n kw = {}\n kw.update(zip(default_args, defaults))\n kw.update(kwargs)\n kw.update(zip(s.args, args))\n return kw",
"def _folium_kwargs(self):",
"def appropriate_kwargs(kwargs, func):\n sig = inspect.signature(func)\n filter_keys = [\n param.name\n for param in sig.parameters.values()\n if param.kind == param.POSITIONAL_OR_KEYWORD and param.name in kwargs.keys()\n ]\n appropriate_dict = {filter_key: kwargs[filter_key] for filter_key in filter_keys}\n return appropriate_dict",
"def get_filter_kwargs(self, *_, **__) -> Dict[str, Any]:",
"def _clean_kwargs(self, kwargs, fn):\n # Do not do the cleaning if server config\n # doesnt ask to ignore\n if not self.server.IGNORE_UNEXPECTED_KWARGS:\n return kwargs\n\n expected_kwargs = set(inspect.getargspec(fn).args)\n got_kwargs = set(kwargs.keys())\n unexpected_kwargs = got_kwargs - expected_kwargs\n for k in unexpected_kwargs:\n del kwargs[k]\n\n return kwargs",
"def inflate_kwargs(items, kwargs):\n\n return {k: inflate(items, v) for k, v in kwargs.items()}",
"def apply_generic_arg_defaults(t_args, t_kwargs):\n\n # don't worry about broken settings, validate_generic_args() will\n # take care of them\n\n if 'where_str' not in t_kwargs:\n t_kwargs['where_str'] = None\n\n if 'where_args' not in t_kwargs:\n t_kwargs['where_args'] = []\n\n if 'more_str' not in t_kwargs:\n t_kwargs['more_str'] = None\n\n if 'more_args' not in t_kwargs:\n t_kwargs['more_args'] = []",
"def _build_kwargs(element, plugin):\n lookup_table = PrefixLookupDict(plugin['args'])\n kwargs = {}\n for attr in element.attributes:\n if attr.name in lookup_table:\n kwargs[lookup_table[attr.name]] = attr.value\n element.removeAttribute(attr.name)\n return kwargs",
"def filter_args(prefix=None, **kwargs):\n if prefix is None:\n p = ''\n else:\n p = prefix + '_'\n valid_args = ('no_norm_mean', 'norm_mean', 'norm_var', 'left_context', 'right_context')\n\n d = dict((k, kwargs[p+k])\n for k in valid_args if p+k in kwargs)\n\n neg_args1 = ('no_norm_mean',)\n neg_args2 = ('norm_mean',)\n\n for a,b in zip(neg_args1, neg_args2):\n d[b] = not d[a]\n del d[a]\n\n return d",
"def test_onearg_and_keyword(self):\n varargs = (12,)\n kwargs = {'default' : 13}\n method = getattr(self.foo,'f_onearg_and_default')\n var_dict = reassign_function_arguments(method, varargs, kwargs)\n self.assert_(var_dict['arg1'] == 12)\n self.assert_(var_dict['default'] == 13)\n self.assert_(len(var_dict) == 2)"
] | [
"0.64105",
"0.6144896",
"0.6131709",
"0.60270417",
"0.5965363",
"0.59641284",
"0.59275097",
"0.59046793",
"0.58989435",
"0.58380234",
"0.58345735",
"0.5807486",
"0.5805582",
"0.57605976",
"0.5753322",
"0.57189995",
"0.57189995",
"0.5716972",
"0.5685885",
"0.5642669",
"0.5620802",
"0.5600136",
"0.5582606",
"0.5573927",
"0.5567426",
"0.5560789",
"0.5556179",
"0.5550514",
"0.5546132",
"0.5543144"
] | 0.7388897 | 0 |
Strip all ELF binaries to reduce the size of the benchmark directory | def strip_elf_files(self, benchmark_dir) -> None:
self.info("Stripping all ELF files in", benchmark_dir)
self.run_cmd("du", "-sh", benchmark_dir)
for root, dirnames, filenames in os.walk(str(benchmark_dir)):
for filename in filenames:
file = Path(root, filename)
if file.suffix == ".dump":
# TODO: make this an error since we should have deleted them
self.warning("Will copy a .dump file to the FPGA:", file)
# Try to reduce the amount of copied data
self.maybe_strip_elf_file(file)
self.run_cmd("du", "-sh", benchmark_dir) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clean_flatbuffer_binaries():\n for element in FLATBUFFERS_CONVERSION_DATA:\n for json in element.input_files:\n path = processed_json_path(json)\n if os.path.isfile(path):\n os.remove(path)",
"def clean():\n clean_flatbuffer_binaries()\n clean_webp_textures()",
"def scrub():\n\n\tlocal(\"rm -fr dist build\")\n\tlocal(\"find . -name \\\"*.pyc\\\" -exec rm '{}' ';'\")",
"def _clean_bins():\n rmtree(LIBS_DIR)\n rmtree(BINS_DIR)\n rmtree(HEADERS_DIR)",
"def _strip_skull_ROBEX(paths, i_process, out_dir=None):\n robex = os.path.join(DATAROOT, 'ROBEX/runROBEX.sh')\n\n # Start timer\n t_start = time()\n\n # Iterate over all files\n for i, path in enumerate(paths):\n p_split = path.split('/')\n # Select directory to save the result\n d = ('/').join(p_split[:-1]) if out_dir is None else out_dir\n # Build the new file name\n f_name = p_split[-1].split('.nii')[0] + \"_stripped.nii\"\n if path.endswith('.gz'):\n f_name += '.gz'\n # Set together to build the path where the stripped file is saved\n save_path = os.path.join(d, f_name)\n if not os.path.exists(save_path):\n # Run ROBEX\n result = subprocess.run(\n [robex, path, save_path], capture_output=True, text=True\n )\n result.check_returncode()\n print(f\"Process {i_process} finished {i + 1} of\"\n f\" {len(paths)} in {time() - t_start:.2f}s\")",
"def cleanup_intermediate_files():\n\n dirs = (DIR_PAGE, DIR_SRGB, DIR_VTI, DIR_TIFF, DIR_BACK, DIR_TEXT)\n map(lambda dir: shutil.rmtree(os.path.join(cwd, dir)) , dirs)",
"def clean(self):\n actual_output_file = path.splitext(self.source_name)[0] + \".actual\"\n if path.exists(self.binary_name):\n os.unlink(self.binary_name)\n if path.exists(actual_output_file):\n os.unlink(actual_output_file)",
"def _clean_up_optimization():\n for (root, dirs, files) in walk(TEMP_MODULES_DIR_PATH, topdown=False):\n for file in files:\n if file.startswith(\"__temp_\"):\n remove(f\"{root}/{file}\")\n try:\n rmdir(root)\n except OSError:\n G.warn_(f\"Unidentified file found in temporary directory: {root}\")",
"def RemoveBuildArtifacts():\n bin_dir = shell_interfaces.GetStdout(\n 'bazel info --config lcov bazel-bin').strip()\n if os.path.exists(bin_dir):\n shutil.rmtree(os.path.dirname(bin_dir))",
"def strip_dsym(self, platfiles):\n\n #\n # .dSYM directories are contain detached debugging information and\n # should be completely removed when the \"strip\" option is specified.\n #\n if self.dry_run:\n return platfiles\n for dirpath, dnames, fnames in os.walk(self.appdir):\n for nm in list(dnames):\n if nm.endswith('.dSYM'):\n print(\"removing debug info: %s/%s\"%(dirpath, nm))\n shutil.rmtree(os.path.join(dirpath, nm))\n dnames.remove(nm)\n return [file for file in platfiles if '.dSYM' not in file]",
"def removeRedundantFiles(workdir, outputfiles=[]):\n\n logger.info(\"Removing redundant files prior to log creation\")\n\n workdir = os.path.abspath(workdir)\n\n dir_list = [\"AtlasProduction*\",\n \"AtlasPoint1\",\n \"AtlasTier0\",\n \"buildJob*\",\n \"CDRelease*\",\n \"csc*.log\",\n \"DBRelease*\",\n \"EvgenJobOptions\",\n \"external\",\n \"fort.*\",\n \"geant4\",\n \"geomDB\",\n \"geomDB_sqlite\",\n \"home\",\n \"o..pacman..o\",\n \"pacman-*\",\n \"python\",\n \"runAthena*\",\n \"share\",\n \"sources.*\",\n \"sqlite*\",\n \"sw\",\n \"tcf_*\",\n \"triggerDB\",\n \"trusted.caches\",\n \"workdir\",\n \"*.data*\",\n \"*.events\",\n \"*.py\",\n \"*.pyc\",\n \"*.root*\",\n \"JEM\",\n \"tmp*\",\n \"*.tmp\",\n \"*.TMP\",\n \"MC11JobOptions\",\n \"scratch\",\n \"jobState-*-test.pickle\",\n \"*.writing\",\n \"pwg*\",\n \"pwhg*\",\n \"*PROC*\",\n \"madevent\",\n \"HPC\",\n \"objectstore*.json\",\n \"saga\",\n \"radical\",\n \"ckpt*\"]\n\n # remove core and pool.root files from AthenaMP sub directories\n try:\n cleanupAthenaMP(workdir, outputfiles)\n except Exception, e:\n print(\"Failed to execute cleanupAthenaMP(): %s\" % (e))\n\n # explicitly remove any soft linked archives (.a files) since they will be dereferenced by the tar command (--dereference option)\n matches = []\n import fnmatch\n for root, dirnames, filenames in os.walk(workdir):\n for filename in fnmatch.filter(filenames, '*.a'):\n matches.append(os.path.join(root, filename))\n for root, dirnames, filenames in os.walk(os.path.dirname(workdir)):\n for filename in fnmatch.filter(filenames, 'EventService_premerge_*.tar'):\n matches.append(os.path.join(root, filename))\n if matches != []:\n for f in matches:\n remove(f)\n # else:\n # print(\"Found no archive files\")\n\n # note: these should be partitial file/dir names, not containing any wildcards\n exceptions_list = [\"runargs\", \"runwrapper\", \"jobReport\", \"log.\"]\n\n to_delete = []\n for _dir in dir_list:\n files = glob(os.path.join(workdir, _dir))\n exclude = []\n\n if files:\n for exc in exceptions_list:\n for f in files:\n if exc in f:\n exclude.append(os.path.abspath(f))\n\n _files = []\n for f in files:\n if not f in exclude:\n _files.append(os.path.abspath(f))\n to_delete += _files\n\n exclude_files = []\n for of in outputfiles:\n exclude_files.append(os.path.join(workdir, of))\n for f in to_delete:\n if not f in exclude_files:\n remove(f)\n\n # run a second pass to clean up any broken links\n broken = []\n for root, dirs, files in os.walk(workdir):\n for filename in files:\n path = os.path.join(root, filename)\n if os.path.islink(path):\n target_path = os.readlink(path)\n # Resolve relative symlinks\n if not os.path.isabs(target_path):\n target_path = os.path.join(os.path.dirname(path), target_path)\n if not os.path.exists(target_path):\n broken.append(path)\n else:\n # If it's not a symlink we're not interested.\n continue\n\n if broken:\n for p in broken:\n remove(p)\n\n return 0",
"def cleanup():\r\n compiledir = theano.config.compiledir\r\n for directory in os.listdir(compiledir):\r\n file = None\r\n try:\r\n try:\r\n filename = os.path.join(compiledir, directory, \"key.pkl\")\r\n file = open(filename, 'rb')\r\n #print file\r\n try:\r\n keydata = cPickle.load(file)\r\n for key in list(keydata.keys):\r\n have_npy_abi_version = False\r\n have_c_compiler = False\r\n for obj in flatten(key):\r\n if isinstance(obj, numpy.ndarray):\r\n #Reuse have_npy_abi_version to\r\n #force the removing of key\r\n have_npy_abi_version = False\r\n break\r\n elif isinstance(obj, basestring):\r\n if obj.startswith('NPY_ABI_VERSION=0x'):\r\n have_npy_abi_version = True\r\n elif obj.startswith('c_compiler_str='):\r\n have_c_compiler = True\r\n elif (isinstance(obj, (theano.gof.Op, theano.gof.Type)) and\r\n hasattr(obj, 'c_code_cache_version')):\r\n v = obj.c_code_cache_version()\r\n if v not in [(), None] and v not in key[0]:\r\n #Reuse have_npy_abi_version to\r\n #force the removing of key\r\n have_npy_abi_version = False\r\n break\r\n\r\n if not have_npy_abi_version or not have_c_compiler:\r\n try:\r\n #This can happen when we move the compiledir.\r\n if keydata.key_pkl != filename:\r\n keydata.key_pkl = filename\r\n keydata.remove_key(key)\r\n except IOError, e:\r\n _logger.error(\r\n \"Could not remove file '%s'. To complete \"\r\n \"the clean-up, please remove manually \"\r\n \"the directory containing it.\",\r\n filename)\r\n if len(keydata.keys) == 0:\r\n shutil.rmtree(os.path.join(compiledir, directory))\r\n\r\n except EOFError:\r\n _logger.error(\r\n \"Could not read key file '%s'. To complete \"\r\n \"the clean-up, please remove manually \"\r\n \"the directory containing it.\",\r\n filename)\r\n except IOError:\r\n _logger.error(\r\n \"Could not clean up this directory: '%s'. To complete \"\r\n \"the clean-up, please remove it manually.\",\r\n directory)\r\n finally:\r\n if file is not None:\r\n file.close()",
"def _clean_native_build():\n rmtree(BUILD_DIR)",
"def hxlclean():\n run_script(hxlclean_main)",
"def strip_skull_ROBEX(paths, out_dir=None, num_cpus=None):\n\n # Check if robex is installed\n if not os.path.exists(os.path.join(DATAROOT, 'ROBEX/runROBEX.sh')):\n raise RuntimeError(f\"ROBEX not found at {os.path.join(DATAROOT, 'ROBEX/runROBEX.sh')}, \"\n \"download and install it from \"\n \"https://www.nitrc.org/projects/robex\")\n\n if isinstance(paths, str):\n paths = [paths]\n\n # Set number of cpus used\n num_cpus = min(os.cpu_count(), 24) if num_cpus is None else num_cpus\n\n # Split list into batches\n batches = [list(p) for p in np.array_split(paths, num_cpus) if len(p) > 0]\n print(f\"Skull stripping is using {len(batches)} cpu cores\")\n\n # Start multiprocessing\n for i, batch in enumerate(batches):\n p = multiprocessing.Process(\n target=_strip_skull_ROBEX, args=(batch, i, out_dir,))\n p.start()",
"def space_cleaning():\n for file in os.listdir(\".\"):\n if file.endswith(\".png\"):\n os.remove(file)",
"def clean_webp_textures():\n for webp in PNG_TEXTURES['output_files']:\n if os.path.isfile(webp):\n os.remove(webp)",
"def cleanUpTemporaryFiles(options):\n os.system(\"rm \"+options.output_directory_per_run+\"/*.abundance\")\n os.system(\"rm \"+options.output_directory_per_run+\"/*.phasing_score\")\n os.system(\"rm \"+options.output_directory_per_run+\"/*regionsOfInterest*\")\n os.system(\"mv \"+options.output_directory_per_run+\"/* \"+options.output_directory_per_run+\"/../\")\n os.system(\"rm -rf \"+options.output_directory_per_run)",
"def clean_filesystem(files=[]):\n remove_files(files + find_cache_files())",
"def clean(self):\n print(\"Cleaning outputs in %s\" % self.args.output)\n files = glob.glob(self.args.output + \"*.pkl\")\n for f in files:\n if os.path.exists(f):\n os.remove(f)",
"def Cleanup(benchmark_spec):\n del benchmark_spec # unused",
"def Cleanup(benchmark_spec):\n del benchmark_spec # unused",
"def clean_chunk_files(dirpath):\n workdir = os.getcwd()\n os.chdir(dirpath)\n for filename in glob.glob(\"[0-9]*_[0-9]*_[0-9]*.hdf5\"):\n os.remove(filename)\n os.chdir(workdir)",
"def clean():\n for dirpath, dirnames, filenames in os.walk('.'):\n for filename in filenames:\n if filename.endswith('.pyc') or filename.endswith('.pyo'):\n full_pathname = os.path.join(dirpath, filename)\n click.echo('Removing {}'.format(full_pathname))\n os.remove(full_pathname)",
"def crunch(self):\n while True:\n lst = self.want_line(r'\\s*\\.file\\s+(.*)')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.globl\\s+(.*)')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.ident\\s+(.*)')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.section\\s+(.*)')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.type\\s+(.*)')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.size\\s+(.*)')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.(bss)\\s+')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.(data)\\s+')\n if lst:\n self.erase(lst[0])\n continue\n lst = self.want_line(r'\\s*\\.(text)\\s+')\n if lst:\n self.erase(lst[0])\n continue\n break\n if osarch_is_amd64():\n self.crunch_amd64(lst)\n elif osarch_is_ia32():\n self.crunch_ia32(lst)\n self.__tag = None",
"def cleanup_intermediate_files(self):\n self.cmd(\"rm -f {local_temp_dir}/*rg_dict* \\\n {local_temp_dir}/*aln* \\\n {local_temp_dir}/snappy*\".\n format(\n local_temp_dir=self.local_temp_dir\n ),\n shell=True)",
"def remove_outputs_zip():\n os.remove(\"outputs.zip\")",
"def clean(session):\n clean_dirs = (\n get_path(\".cache\"),\n get_path(\".coverage\"),\n get_path(\".pytest_cache\"),\n get_path(\"__pycache__\"),\n get_path(\"build\"),\n get_path(\"dist\"),\n get_path(\"docs\", \"__pycache__\"),\n get_path(\"docs\", \"build\"),\n get_path(\"scripts\", \"macos\", \"__pycache__\"),\n get_path(\"src\", \"python\", \"bezier.egg-info\"),\n get_path(\"src\", \"python\", \"bezier\", \"__pycache__\"),\n get_path(\"tests\", \"__pycache__\"),\n get_path(\"tests\", \"functional\", \"__pycache__\"),\n get_path(\"tests\", \"unit\", \"__pycache__\"),\n get_path(\"tests\", \"unit\", \"hazmat\", \"__pycache__\"),\n get_path(\"wheelhouse\"),\n )\n clean_globs = (\n get_path(\".coverage\"),\n get_path(\"*.mod\"),\n get_path(\"*.pyc\"),\n get_path(\"docs\", \"abi\", \"example\"),\n get_path(\"src\", \"python\", \"bezier\", \"*.pyc\"),\n get_path(\"src\", \"python\", \"bezier\", \"*.pyd\"),\n get_path(\"src\", \"python\", \"bezier\", \"*.so\"),\n get_path(\"src\", \"fortran\", \"*.o\"),\n get_path(\"tests\", \"*.pyc\"),\n get_path(\"tests\", \"functional\", \"*.pyc\"),\n get_path(\"tests\", \"unit\", \"*.pyc\"),\n )\n for dir_path in clean_dirs:\n session.run(shutil.rmtree, dir_path, ignore_errors=True)\n for glob_path in clean_globs:\n for filename in glob.glob(glob_path):\n session.run(os.remove, filename)",
"def devclean():\n click.echo(\"start clean your output folder...\")\n rm(OUTPUTDIR, recursive=True)",
"def clean(cline):\n print(\"Deleting __pycache__ directories.\")\n cline.run(\"find . -iname '__pycache__' | xargs rm -rf\")"
] | [
"0.6843979",
"0.6425804",
"0.6238822",
"0.6171667",
"0.58739084",
"0.57915837",
"0.5787829",
"0.57830065",
"0.5750248",
"0.57371056",
"0.56529194",
"0.56070024",
"0.55917656",
"0.5587348",
"0.5559113",
"0.5551564",
"0.55382293",
"0.551886",
"0.5486453",
"0.54620624",
"0.5435801",
"0.5435801",
"0.54246706",
"0.54239494",
"0.5388997",
"0.5371108",
"0.53644204",
"0.53637296",
"0.5347888",
"0.5334125"
] | 0.8146737 | 0 |
Fail immediately, with the given message. | def fail(self, msg=None):
raise Exception, msg | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def fail(self, msg=None):\r\n raise self.failureException(msg)",
"def fail(self, message, *args, **kwargs):\n self.counters[\"failure\"] += 1\n self._write(message.format(*args, **kwargs), FAILURE)",
"def fail(self, message):\n logger.warning(message)\n g.failed = True",
"def tc_fail(self, msg):\n self.recover()\n tc_fail(msg)",
"def fail(msg):\n log('FAIL', msg)",
"def fail(msg):\n\n # Not sure if simply raising the exception is clearer.\n raise CommandFailed(msg)",
"def _failed(self, msg):\n self.log(msg)\n self.result.passed = False\n self.result.add_error(msg)\n self.log(u\"Failed\")",
"def fail(title='Error', message=''):\n if not isinstance(title, string_types):\n raise TypeError('fail() title must be a string.')\n if not isinstance(message, string_types):\n raise TypeError('fail() message must be a string.')\n _get_app().fail(title, message)",
"def Fail(message):\n print \"Content-type: text/plain\\n\"\n print message\n raise Exception(message)",
"def fail(self, text: str = \"FAIL\") -> None:\n _text = text if text else \"FAIL\"\n self._freeze(_text)",
"def fail(msg, exit_code=1):\n sys.stderr.write(\"{}\\n\".format(msg))\n sys.exit(exit_code)",
"def test_fail(self) -> defer.Deferred[None]:\n return deferLater(reactor, 0, self.fail, \"I fail later\") # type: ignore[arg-type]",
"def fail_with(s):\n print \"[FAILURE] %s\" % s\n sys.exit(1)",
"def failure(self, message=''):\n print(colored(message, 'red'))",
"def _fail(self, msg, err=None):\n if self.session:\n self.session.cleanup()\n\n if err:\n self.module.fail_json(msg=msg + \"\\n\" + str(err), **self.result)\n else:\n self.module.fail_json(msg=msg, **self.result)",
"def failed(self, message=None):\n doc = {self.STATE: self.STATE_FAILED}\n\n if message:\n doc.update({self.ERROR_MESSAGE: message})\n\n self.update(doc)",
"def fail(self, msg, *args):\n self.log.error(msg, *args)\n sys.exit(1)",
"def die(self, msg=None):\r\n raise Exception(msg)",
"def raise_fail(*args, **kwargs):\n raise Exception(\"oops\")",
"def fail(message):\n utils.logit(\"critical\", \"Failed dict.get routine. Error --> {}\".\n format(message), 0)",
"def fail(self, msg, shutit_pexpect_child=None, throw_exception=False):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\t# Note: we must not default to a child here\n\t\tif shutit_pexpect_child is not None:\n\t\t\tshutit_pexpect_session = self.get_shutit_pexpect_session_from_child(shutit_pexpect_child)\n\t\t\tshutit_util.print_debug(sys.exc_info())\n\t\t\tshutit_pexpect_session.pause_point('Pause point on fail: ' + msg, color='31')\n\t\tif throw_exception:\n\t\t\tsys.stderr.write('Error caught: ' + msg + '\\n')\n\t\t\tsys.stderr.write('\\n')\n\t\t\tshutit_util.print_debug(sys.exc_info())\n\t\t\traise ShutItFailException(msg)\n\t\telse:\n\t\t\t# This is an \"OK\" failure, ie we don't need to throw an exception.\n\t\t\t# However, it's still a \"failure\", so return 1\n\t\t\tshutit_global.shutit_global_object.handle_exit(exit_code=1,msg=msg)\n\t\tshutit_global.shutit_global_object.yield_to_draw()",
"def failure(self, error, rc, msg):\n self.module.fail_json(msg=msg, rc=rc, err=error)",
"def fail(self, text=u\"FAIL\", err=False):\n # type: (str, bool) -> None\n # Do not display spin text for fail state\n self._text = None\n\n _text = text if text else u\"FAIL\"\n err = err or not self.write_to_stdout\n self._freeze(_text, err=err)",
"def test_04_fail(self):\n if y == 2:\n self.fail('This is a custom fail message')",
"def fail(self, cause = None, annotations = {}):\n self.set_outcome(Result.FAIL, cause, annotations)",
"def print_failure(msg):\n\n tf.print(BColors.FAIL + msg + BColors.ENDC, output_stream=sys.stderr)\n sys.exit(1)",
"def test_fails(self):\n raise FoolishError(\"I am a broken test\")",
"def indicate_failure(self):\n pass",
"def error(message):\n if DEBUG:\n with print_lock:\n print((Colours.FAIL + 'ERROR: ' + Colours.END_COLOUR + message).strip())",
"def fail_job( self, job_state ):\n self.stop_job( self.sa_session.query( self.app.model.Job ).get( job_state.job_wrapper.job_id ) )\n job_state.job_wrapper.fail( getattr( job_state, \"fail_message\", GENERIC_REMOTE_ERROR ) )"
] | [
"0.8109415",
"0.761201",
"0.7590433",
"0.75791526",
"0.7566061",
"0.7528459",
"0.7067831",
"0.6895462",
"0.6790964",
"0.6782338",
"0.67756104",
"0.6773557",
"0.6760443",
"0.6701905",
"0.6625162",
"0.6528707",
"0.6483209",
"0.64499813",
"0.64096963",
"0.64075947",
"0.63567835",
"0.63434833",
"0.6272806",
"0.6237657",
"0.6235071",
"0.62029785",
"0.61373484",
"0.61187637",
"0.6102058",
"0.6084198"
] | 0.7741948 | 1 |
Validate (previous) KMALLOC/KFREE calls of a set of tasks (pids) | def validate(self, tracked_pids, test_case=stubTestcase, debug=False):
out = os.popen('dmesg -c -s %d' % LOG_BUF_LEN)
dmesg_lines = out.readlines()
out.close()
allocations = []
memory_allocated = False
if debug:
f = open('mm_debug.txt', 'w+')
f.write('All KMALLOC/KFREE messages:\n\n')
f.write(''.join(dmesg_lines))
f.write('\nTracked pids: %s\nOnly relevant KMALLOC/KFREE messages:\n' % repr(tracked_pids))
for line in dmesg_lines:
re_result = re.search(r'.*?(KMALLOC|KFREE) (\d*) (\w*)', line)
if not re_result:
continue
action = re_result.group(1)
pid = int(re_result.group(2))
address = re_result.group(3)
if pid not in tracked_pids:
continue
f.write(line)
f.write('\nProcessing KMALLOC/KFREE messages:\n')
try:
for line in dmesg_lines:
re_result = re.search(r'.*?(KMALLOC|KFREE) (\d*) (\w*)', line)
if not re_result:
continue
action = re_result.group(1)
pid = int(re_result.group(2))
address = re_result.group(3)
if pid not in tracked_pids:
continue
if debug:
f.write(line)
if action == 'KMALLOC':
memory_allocated = True
if address in allocations:
test_case.fail('Same address, %s, allocated twice without release.' % address)
break
allocations.append(address)
if action == 'KFREE':
if address not in allocations:
test_case.fail('Freeing a non allocated address, %s.' % address)
break
allocations.remove(address)
else:
test_case.assert_(memory_allocated, 'No memory allocated during execution.')
test_case.assert_(not allocations, 'Failed to free some of the allocated memory, left %d:\n%s' % (len(allocations), '\n'.join(allocations)))
finally:
if debug:
f.close() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bad_cgroup_processes_check():\n return CGCheck([], bad_cgroup_processes)",
"def check_kpts(self):\n if 'fleurinp' in self.ctx.inputs:\n fleurinp = self.ctx.inputs.fleurinp\n else:\n fleurinp = get_fleurinp_from_remote_data(self.ctx.inputs.parent_folder)\n\n only_even_MPI = self.inputs.add_comp_para['only_even_MPI']\n forbid_single_mpi = self.inputs.add_comp_para['forbid_single_mpi']\n try:\n machines, mpi_tasks, omp_threads, message = optimize_calc_options(self.ctx.num_machines,\n self.ctx.num_mpiprocs_per_machine,\n self.ctx.num_cores_per_mpiproc,\n self.ctx.use_omp,\n self.ctx.suggest_mpi_omp_ratio,\n fleurinp,\n only_even_MPI=only_even_MPI,\n forbid_single_mpi=forbid_single_mpi)\n except ValueError as exc:\n self.report(exc)\n return self.exit_codes.ERROR_NOT_OPTIMAL_RESOURCES\n\n self.report(message)\n\n self.ctx.inputs.metadata.options['resources']['num_machines'] = machines\n self.ctx.inputs.metadata.options['resources']['num_mpiprocs_per_machine'] = mpi_tasks\n if self.ctx.use_omp:\n self.ctx.inputs.metadata.options['resources']['num_cores_per_mpiproc'] = omp_threads\n if 'environment_variables' not in self.ctx.inputs.metadata.options:\n self.ctx.inputs.metadata.options['environment_variables'] = {}\n self.ctx.inputs.metadata.options['environment_variables']['OMP_NUM_THREADS'] = str(omp_threads)",
"def _constraints_task_valid(self):\n def rule(model):\n \"\"\"\n Bind the tail entries to zero\n \"\"\"\n num = self.num_timeslots\n ind_j = model.tasks\n total = sum(model.A2[num-1, j] for j in ind_j)\n total += sum(model.A3[num-1, j] for j in ind_j)\n total += sum(model.A4[num-1, j] for j in ind_j)\n total += sum(model.A3[num-2, j] for j in ind_j)\n total += sum(model.A4[num-2, j] for j in ind_j)\n total += sum(model.A4[num-3, j] for j in ind_j)\n return None, total, 0\n\n self.model.constrain_tail = Constraint(rule=rule)\n\n def rule(model):\n \"\"\"\n Only permit \"valid\" allocation on A, A2, A3, etc.\n \"\"\"\n ind_i = model.timeslots\n ind_j = model.tasks\n total = sum(model.A[i, j] * (1-self.valid[i, j]) for i in ind_i\n for j in ind_j)\n total += sum(model.A2[i, j] * (1 - self.valid[i, j]) for i in\n ind_i for j in ind_j)\n total += sum(model.A3[i, j] * (1 - self.valid[i, j]) for i in\n ind_i for j in ind_j)\n\n return None, total, 0\n\n self.model.constrain_valid0 = Constraint(rule=rule)\n\n def rule(model):\n \"\"\"\n Only permit \"valid\" allocation on A, A2, A3, etc.\n \"\"\"\n ind_i = model.timeslots2\n ind_j = model.tasks\n inv = 1-self.valid\n total = sum(\n model.A2[i, j] * inv[i + 1, j] for i in ind_i for j in ind_j)\n total += sum(\n model.A3[i, j] * inv[i + 1, j] for i in ind_i for j in ind_j)\n total += sum(\n model.A4[i, j] * inv[i + 1, j] for i in ind_i for j in ind_j)\n\n ind_i = model.timeslots3\n ind_j = model.tasks\n total += sum(\n model.A3[i, j] * inv[i + 2, j] for i in ind_i for j in ind_j)\n total += sum(\n model.A4[i, j] * inv[i + 2, j] for i in ind_i for j in ind_j)\n\n ind_i = model.timeslots4\n ind_j = model.tasks\n total += sum(\n model.A4[i, j] * inv[i + 3, j] for i in ind_i for j in ind_j)\n\n return None, total, 0\n\n self.model.constrain_valid1 = Constraint(rule=rule)",
"def bad_cgroup_processes():\n errors = []\n config = sppidutil.sp_cg_config()\n for pid, service, cpuset, memory in sppidutil.sp_pids_info():\n if service not in config:\n errors.append('no cgroups configuration found for {0}'.format(service))\n continue\n if not cpuset.startswith(config[service]['cpuset']):\n errors.append('{0}({1}) is not in the right cpuset group'.format(pid, service))\n if not memory.startswith(config[service]['memory']):\n errors.append('{0}({1}) is not in the right memory group'.format(pid, service))\n return errors, []",
"def check_resources(reqs):\n logger.info(\"REQUIREMENTS: \" + str(reqs))\n free_cpu, free_mem = get_resources()\n return check_if_free_resources(free_mem, free_cpu, reqs)",
"def race_condition():\n if len(allocated_pids) != len(set(allocated_pids)):\n return True\n else:\n return False",
"def check_subprocesses(self) : \n for sp_ident in self.active_subprocesses :\n if not os.path.exists(\"%s/%s.rc\" % (self.spool_dir, sp_ident ) ) : continue\n self.finished_subprocesses[sp_ident] = self.get_subprocess_result(sp_ident)\n self.active_subprocesses.pop(sp_ident)",
"def collect_garbage(self, required_inputs):",
"def _clean_up_when_fail(self):\n if self.user_pool.cache_lock.locked():\n self.user_pool.cache_lock.release()\n abnormal_interrupt = False\n if \"abnormal_interrupt\" in self.case_info_dict:\n abnormal_interrupt = self.case_info_dict[\"abnormal_interrupt\"]\n\n if abnormal_interrupt:\n self.p_stop_signal.set()\n # False,设置终止整个进程信号\n else:\n self.user_pool.cache_lock.acquire()\n\n if self.conf_group_id not in self.user_pool.group_id_list:\n self.user_pool.group_id_list.appendleft(self.conf_group_id)\n\n if self.conf_id is not None:\n self.user_pool.conf_id_list.remove(self.conf_id)\n\n if self.conf_id in self.user_pool.conf_id_obj_dict:\n del self.user_pool.conf_id_obj_dict[self.conf_id]\n\n for in_conf_mem_phone_num in self.in_conf_mem_phone_num_list:\n if in_conf_mem_phone_num in self.user_pool.in_conf_mem_phone_num_list:\n self.user_pool.in_conf_mem_phone_num_list.remove(in_conf_mem_phone_num)\n for out_conf_mem_phone_num in self.out_conf_mem_phone_num_list:\n if out_conf_mem_phone_num in self.user_pool.out_conf_mem_phone_num_list:\n self.user_pool.out_conf_mem_phone_num_list.remove(out_conf_mem_phone_num)\n\n for conf_mem_phone_num in self.conf_mem_phone_num_list:\n if conf_mem_phone_num in self.user_pool.conf_mem_phone_num_list:\n self.user_pool.conf_mem_phone_num_list.remove(conf_mem_phone_num)\n if conf_mem_phone_num in self.user_pool.conf_all_mem_phone_num_list:\n self.user_pool.conf_all_mem_phone_num_list.remove(conf_mem_phone_num)\n if self.chairman_phone_num in self.user_pool.conf_chair_phone_num_list:\n self.user_pool.conf_chair_phone_num_list.remove(self.chairman_phone_num)\n if self.chairman_phone_num in self.user_pool.conf_all_mem_phone_num_list:\n self.user_pool.conf_all_mem_phone_num_list.remove(self.chairman_phone_num)\n\n self.group_obj.is_in_conf = True\n self.group_obj.conf_id = None\n self.user_pool.cache_lock.release()",
"def check_vms(st):\n\n logging.info(\"Checking batch system's VMs...\")\n check_time = time.time()\n\n # Retrieve *all* running instances (also the non-owned ones) and filter out\n # statuses of workers which are not valid VMs: we are not interested in them\n rvms = running_instances()\n rvms2 = []\n\n rips = []\n if rvms is not None:\n for inst in rvms:\n ipv4 = inst.network_ip(network_name=cf[\"api\"][\"network_name\"])\n if ipv4 is not None:\n rips.append(ipv4)\n rvms2.append(inst)\n if len(rips) == 0:\n rips = None\n new_workers_status = BatchPlugin.poll_status( st['workers_status'], rips )\n\n rvms=rvms2\n\n if new_workers_status is not None:\n #logging.debug(new_workers_status)\n st['workers_status'] = new_workers_status\n new_workers_status = None\n\n hosts_shutdown = []\n for host,info in st['workers_status'].iteritems():\n if info['jobs'] != 0: continue\n if (check_time-info['unchangedsince']) > cf['elastiq']['idle_for_time_s']:\n logging.info(\"Host %s is idle for more than %ds: requesting shutdown\" % \\\n (host,cf['elastiq']['idle_for_time_s']))\n st['workers_status'][host]['unchangedsince'] = check_time # reset timer\n hosts_shutdown.append(host)\n\n if len(hosts_shutdown) > 0:\n inst_ok = scale_down(hosts_shutdown, valid_hostnames=st['workers_status'].keys())\n change_vms_allegedly_running(st, -len(inst_ok))\n\n # Scale up to reach the minimum quota, if any\n min_vms = cf['quota']['min_vms']\n if min_vms >= 1:\n rvms = running_instances(st['workers_status'].keys())\n if rvms is None:\n logging.warning(\"Cannot get list of running instances for honoring min quota of %d\" % min_vms)\n else:\n n_run = len(rvms)\n n_consider_run = n_run + st['vms_allegedly_running']\n logging.info(\"VMs: running=%d | allegedly running=%d | considering=%d\" % \\\n (n_run, st['vms_allegedly_running'], n_consider_run))\n n_vms = min_vms-n_consider_run\n if n_vms > 0:\n logging.info(\"Below minimum quota (%d VMs): requesting %d more VMs\" % \\\n (min_vms,n_vms))\n inst_ok = scale_up(n_vms, valid_hostnames=st['workers_status'].keys(), vms_allegedly_running=st['vms_allegedly_running'] )\n for inst in inst_ok:\n change_vms_allegedly_running(st, 1, inst)\n st['event_queue'].append({\n 'action': 'check_owned_instance',\n 'when': time.time() + cf['elastiq']['estimated_vm_deploy_time_s'],\n 'params': [ inst ]\n })\n\n # OK: schedule when configured\n sched_when = time.time() + cf['elastiq']['check_vms_every_s']\n\n else:\n # Not OK: reschedule ASAP\n sched_when = 0\n\n return {\n 'action': 'check_vms',\n 'when': sched_when\n }",
"def _validate_jobs(\n self,\n check_nlst_warn: bool = False\n ):\n counter = 0\n for job in self.jobs:\n counter += 1\n print(job.job_id)\n if counter == 0:\n ignore_restarts = False\n else:\n ignore_restarts = True\n\n check_input_files(\n hrldas_namelist=job.hrldas_namelist,\n hydro_namelist=job.hydro_namelist,\n sim_dir=os.getcwd(),\n ignore_restarts=ignore_restarts,\n check_nlst_warn=check_nlst_warn\n )",
"def test_free_space_rejects_file_arguments():\n result = _run_metric('free_space', '/etc/hosts')\n # 2 is the exit code for a UsageError, which includes bad parameters.\n assert result.exit_code == 2\n # Is this too fragile?\n assert 'Invalid value' in result.output",
"def check_processes(process_list):\n running = 1 # 0 when the subprocesses are all done\n while running:\n for proc in process_list:\n proc.poll()\n if proc.returncode == 1:\n raise RuntimeError(\"Program \" +\n \"number \" +\n \"{}\".format(process_list.index(proc)) +\n \" failed.\")\n running = bool(sum([int(proc.returncode) for proc in process_list]))\n return True",
"def test_free_inodes_rejects_file_arguments():\n result = _run_metric('free_inodes', '/etc/hosts')\n # 2 is the exit code for a UsageError, which includes bad parameters.\n assert result.exit_code == 2\n # Is this too fragile?\n assert 'Invalid value' in result.output",
"def check_launcher():\n\n # Storage in memory which holds info about currently running checks\n storage = {}\n\n # Storage in memory which holds process info: process id and project objects\n processes = {}\n\n # Close previously opened connections (if the exist)\n django.db.connections.close_all()\n\n while True:\n # Making Copy in order to compare updates in data base\n new_storage = copy.deepcopy(storage)\n\n # Fetch data from database\n check_sync(new_storage)\n\n # Get storage keys in order to compare storages for changes\n old_keys = set(storage.keys())\n new_keys = set(new_storage.keys())\n\n # Get keys of elements in init storage and updated storage\n added_checks = new_keys.difference(old_keys)\n deleted_checks = old_keys.difference(new_keys)\n common_checks = new_keys.intersection(old_keys)\n\n # Launch new processes\n for check_id in added_checks:\n # Spawn new process with name Process#id, where id = check_id\n start_process(check_id, new_storage, processes)\n\n # Stop (kill) deleted check's prorcesses\n for check_id in deleted_checks:\n stop_process(check_id, storage, processes)\n\n for check_id in common_checks:\n if storage[check_id] != new_storage[check_id]:\n stop_process(check_id, storage, processes)\n # Spawn new process with name Process#id, where id = check_id\n start_process(check_id, new_storage, processes)\n\n storage = copy.deepcopy(new_storage)\n time.sleep(30)",
"def clean_task_list():\n\n for _key in autorx.task_list.keys():\n # Attempt to get the state of the task\n try:\n _running = autorx.task_list[_key]['task'].running()\n _task_sdr = autorx.task_list[_key]['device_idx']\n except Exception as e:\n logging.error(\"Task Manager - Error getting task %s state - %s\" % (str(_key),str(e)))\n continue\n\n if _running == False:\n # This task has stopped. Release it's associated SDR.\n autorx.sdr_list[_task_sdr]['in_use'] = False\n autorx.sdr_list[_task_sdr]['task'] = None\n # Pop the task from the task list.\n autorx.task_list.pop(_key)\n # Indicate to the web client that the task list has been updated.\n flask_emit_event('task_event')\n\n # Check if there is a scanner thread still running. If not, and if there is a SDR free, start one up again.\n if ('SCAN' not in autorx.task_list) and (allocate_sdr(check_only=True) is not None):\n # We have a SDR free, and we are not running a scan thread. Start one.\n start_scanner()",
"def delcomperrsessallocfail(self) :\n\t\ttry :\n\t\t\treturn self._delcomperrsessallocfail\n\t\texcept Exception as e:\n\t\t\traise e",
"def refresh_pids(active_pids, resources):\n still_active_pids = []\n no_change = True\n for info in active_pids:\n pid, gpu, title, cmd, lock_path = info\n if still_active(pid, cmd):\n still_active_pids.append(info)\n else:\n print(f\"[{time.strftime(time.ctime())}] {title} seems to be over.\")\n os.remove(lock_path)\n resources.free(gpu=gpu)\n no_change = False\n return still_active_pids, no_change",
"def test_free_space_rejects_nonexistent_paths():\n totally_made_up_path = \"/cwmon/{0}\".format(uuid.uuid4())\n result = _run_metric('free_space', totally_made_up_path)\n # 2 is the exit code for a UsageError, which includes bad parameters.\n assert result.exit_code == 2\n # Is this too fragile?\n assert 'Invalid value' in result.output",
"def validate(self, keypoints):\n for k in keypoints:\n self.validate_keypoints(k)",
"def validate(self):\n errors = []\n app = errors.append\n\n if not self.hint_cores >= self.mpi_procs * self.omp_threads >= self.min_cores:\n app(\"self.hint_cores >= mpi_procs * omp_threads >= self.min_cores not satisfied\")\n\n if self.omp_threads > self.hw.cores_per_node:\n app(\"omp_threads > hw.cores_per_node\")\n\n if self.mem_per_proc > self.hw.mem_per_node:\n app(\"mem_mb >= self.hw.mem_per_node\")\n\n if not self.max_mem_per_proc >= self.mem_per_proc >= self.min_mem_per_proc:\n app(\"self.max_mem_per_proc >= mem_mb >= self.min_mem_per_proc not satisfied\")\n\n if self.priority <= 0:\n app(\"priority must be > 0\")\n\n if not (1 <= self.min_cores <= self.hw.num_cores >= self.hint_cores):\n app(\"1 <= min_cores <= hardware num_cores >= hint_cores not satisfied\")\n\n if errors:\n raise self.Error(str(self) + \"\\n\".join(errors))",
"def validate_invalids(invalids: Sequence[utils.TraceTuple]) -> List[utils.UpdateTuple]:\n # Collect validated\n update_tuples: List[utils.UpdateTuple] = []\n\n # multiprocessing!\n with ProcessPoolExecutor() as executor:\n # Iterate over invalids. submit as tasks\n futures = {\n executor.submit(validate_invalid, invalid): invalid for invalid in invalids\n }\n\n # Collect all tasks as they complete\n # Will not be in same order submitted!?\n for future in as_completed(futures):\n # If validation critically fails for a dataset\n # we can still proceed with other validations\n try:\n # Get result from Future\n # This will throw an error (if it happened in process)\n update_tuple = future.result()\n except Exception:\n # Catch and log critical failures\n logging.error(\n f\"Validation exception with {futures[future]}.\",\n exc_info=True,\n )\n update_values = {\n rules.ColumnNames.VALIDITY: rules.ValidationResults.CRITICAL.value\n }\n update_tuple = utils.UpdateTuple(\n area_name=futures[future].area_path.stem,\n update_values=update_values,\n error=True,\n traces_path=futures[future].traces_path,\n )\n # Collect result\n update_tuples.append(update_tuple)\n return sort_update_tuples_to_match_invalids(\n update_tuples=update_tuples, invalids=invalids\n )",
"def check_sp_cgroup(grp):\n pids = ctrl_info[grp]['cgroup.procs']\n for pid, status in sppidutil.recognise(pids).items():\n if status == 'unknown':\n errors.append('{0} pid {1} found in {2}:{3}'\n .format(status, pid, ctrl_name, grp))\n if status in ('stat', 'controller'):\n warnings.append('{0} pid {1} found in {2}:{3}'\n .format(status, pid, ctrl_name, grp))",
"def check_process_full(self) -> None:\n if len(self.process_queue) >= self.max_processes:\n task_name, sp = self.process_queue.pop()\n sp.wait()",
"def _sanityCheckKeySizes(other):\n if other.minKeySize < 512:\n raise ValueError(\"minKeySize too small\")\n if other.minKeySize > 16384:\n raise ValueError(\"minKeySize too large\")\n if other.maxKeySize < 512:\n raise ValueError(\"maxKeySize too small\")\n if other.maxKeySize > 16384:\n raise ValueError(\"maxKeySize too large\")\n if other.maxKeySize < other.minKeySize:\n raise ValueError(\"maxKeySize smaller than minKeySize\")\n # check also keys of virtual hosts\n for i in other.virtual_hosts:\n i.validate()",
"def delcomperrreqinfoallocfail(self) :\n\t\ttry :\n\t\t\treturn self._delcomperrreqinfoallocfail\n\t\texcept Exception as e:\n\t\t\traise e",
"def checkmem(self,file_,line_): # 3\n res = self.__obj.checkmemtask(file_,line_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)",
"def is_free(self) -> tuple:\n if self.running_procs >= self.procs_no:\n return (False, None)\n if self.gpus:\n for gpu in self.gpus:\n if self.gpu_running_procs[gpu] < self.per_gpu[gpu]:\n return (True, gpu)\n return (False, None)\n return (True, None)",
"def test_free_inodes_rejects_nonexistent_paths():\n totally_made_up_path = \"/cwmon/{0}\".format(uuid.uuid4())\n result = _run_metric('free_inodes', totally_made_up_path)\n # 2 is the exit code for a UsageError, which includes bad parameters.\n assert result.exit_code == 2\n # Is this too fragile?\n assert 'Invalid value' in result.output",
"def failed_task(self):\n self.report_total_usage()\n #print 'failure killed task %s from sim %s' % (self.name, self.sim.name)\n self.num_faults += 1\n self.retry = True\n self.state = \"ready\"\n if self.using.nodes > 0:\n self.RM.release_allocation(self, self.using.nodes - 1, failed=True)\n self.using.clear()\n self.curr_exec_time = 0\n self.fwk.logEvent(self.sim.name, self.name, \"failed_task\", \"task failed due to node failure\")"
] | [
"0.5835271",
"0.54331756",
"0.54150486",
"0.5370245",
"0.53074366",
"0.52768683",
"0.52522767",
"0.5199778",
"0.51450527",
"0.5127863",
"0.5077406",
"0.5031161",
"0.5010177",
"0.5008991",
"0.49914876",
"0.49872923",
"0.49616095",
"0.49612164",
"0.4950241",
"0.49074998",
"0.48958448",
"0.48910558",
"0.48745045",
"0.48525536",
"0.48454416",
"0.48247308",
"0.48223358",
"0.48117468",
"0.48018357",
"0.47926113"
] | 0.65229285 | 0 |
Release for the other side of the fork that is syncing this side | def release(self):
if self._inchild:
os.write(self._pw_child, self.RELEASE_MSG)
else:
os.write(self._pw_parent, self.RELEASE_MSG) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _release(self):\n\n os.write(self.job_pipe[1], b'+')",
"async def release(self) -> None:\n ...",
"async def release(self) -> None:\n ...",
"async def release(self) -> None:\n ...",
"def release(self):",
"def release(self):\r\n pass",
"def release(self):\n self.acquired = False",
"def release(self):\n self.filelock.set()\n self.locked = False\n self.exclusive = False",
"def release(self):\n self.filelock.set()\n self.locked = False\n self.exclusive = False",
"def release(local):\n local.__release__()",
"def release(self):\n self.is_locked = False\n os.unlink(self.lockfile)",
"def release(self):\n # type: () -> None\n for part in self.parts:\n part.release()",
"def sync(self):\n\n if self._inchild:\n os.read(self._pr_child, len(self.RELEASE_MSG))\n else:\n os.read(self._pr_parent, len(self.RELEASE_MSG))",
"def release(self):\n if not self._released:\n logging.debug('Release {}'.format(self.username))\n self.ref.done(self.username)\n\n self._released = True",
"def release(self, o):\n if not self.available(o):\n raise ValueError('you do not own this lock')\n self._owner = None",
"def release(self):\n if self.is_locked:\n os.close(self.fd)\n os.unlink(self.lockfile)\n self.is_locked = False",
"def release(self):\r\n if self.is_locked:\r\n os.close(self.fd)\r\n os.unlink(self.lockfile)\r\n self.is_locked = False",
"def _release(self):\n try:\n os.unlink(self.lockfile)\n\n # Log success.\n logging.info(\"Released lock at \" + self.lockfile + \"...\")\n except:\n # Ignore all errors.\n pass",
"def release_node(self, node):\n # use the lua script to release the lock in a safe way\n try:\n node._release_script(keys=[self.resource], args=[self.lock_key])\n except (redis.exceptions.ConnectionError, redis.exceptions.TimeoutError):\n pass",
"def release(self):\n self._needs_release = False\n send_message(self, \"release\", restype=objc_id, argtypes=[])",
"def release_control(self):\n pass",
"async def _maybe_release_last_part(self) -> None:\n ...",
"def run(self): \n\n #acquire the semaphore\n global wheel\n while True:\n time.sleep(2) \n self.s2.acquire() \n self.empty.acquire() \n #remove a table from the list\n\n wheel += 1 \n print \"Producer2(%s):deliver wheels, now wheels:%s\\n\" %(self.name, wheel) \n self.wheel.release() \n #self.threadSemaphore.release() ",
"def release_lock(self):\n self._multistore._unlock()",
"def __del__(self):\n if self.is_locked:\n self.release()",
"def write_release(self):\n self.is_locked = False\n self.rwlock = RWLock().write_release()",
"def release(self):\n if self._ctx is None:\n return\n self.atomicfile.delete()\n try:\n self._ctx.__exit__(None, None, None)\n finally:\n self._ctx = None",
"def RELEASE(self):\n return get_release()",
"def api_release(self):\n\n self._api_release_lock_with_timer()",
"def release():\n lockfile = path.user('.%s.lock' % application.NAME)\n if isfile(lockfile):\n unlink(lockfile)\n return True\n return False"
] | [
"0.703684",
"0.6900427",
"0.6900427",
"0.6900427",
"0.6875046",
"0.6745146",
"0.6730091",
"0.6527831",
"0.6527831",
"0.6390934",
"0.63661313",
"0.6281708",
"0.62689257",
"0.6262654",
"0.6258315",
"0.61609346",
"0.61515474",
"0.6143002",
"0.6137582",
"0.60939044",
"0.60918385",
"0.6026993",
"0.60175544",
"0.5979858",
"0.595569",
"0.59541404",
"0.59467196",
"0.59458345",
"0.5897824",
"0.5856754"
] | 0.6965916 | 1 |
Return the correct write side of the general pipe | def _wpipe(self):
if self._inchild:
return self._general_pw_child
else:
return self._general_pw_parent | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def write(self, s):\n\t\tif self._input: raise PlumberExceptions.PipeTypeException(self)\n\t\treturn pservlet.pipe_write(self._pipe_desc, s)",
"def write():\n pass",
"def _handle_write(self):\n pass",
"def write(self, out):",
"def make_send_write(pipe_to_child, uid, params_and_values):\n def helper():\n \"\"\"\n Helper function.\n \"\"\"\n pipe_to_child.send([\"write_params\", [uid, params_and_values]])\n return helper",
"def write( data ):",
"def handle_write(self):\n pass",
"def write_end(self) -> str:\n assert self._read_pipe_name\n return self._read_pipe_name",
"def _poll_advance_write(self, flush):\n if flush == 0:\n self._async_status = consts.ASYNC_READ\n return consts.POLL_READ\n\n if flush == 1:\n return consts.POLL_WRITE\n\n if flush == -1:\n raise self._create_exception()\n\n return consts.POLL_ERROR",
"def read_end(self) -> str:\n assert self._write_pipe_name\n return self._write_pipe_name",
"def socket_pipe():\n\n # Create read0end acceptor.\n read_acceptor = socket.socket()\n read_acceptor.bind(('localhost', 0))\n read_acceptor.listen(10)\n read_acceptor.setblocking(False)\n\n # Create writer and connect it\n writer = socket.socket()\n writer.setblocking(True)\n writer.connect(read_acceptor.getsockname())\n\n # Wait for connection from the right socket\n for _ in xrange(10):\n reader, writer_address = read_acceptor.accept()\n reader.setblocking(True)\n if writer_address != writer.getsockname():\n sys.stderr.write(__name__ + \".socket_pipe: Waring: port \"\n \"scanning detected.\\n\")\n reader.close()\n continue\n break\n else:\n raise RuntimeError(\"socket_pipe: did not receive writer connection.\")\n\n read_acceptor.close()\n\n # Verify, that the connected socket is really the right one.\n test_message = str(random.random())\n writer.sendall(test_message)\n while test_message:\n test_chunk = reader.recv(len(test_message))\n if not test_chunk or not test_message.startswith(test_chunk):\n raise RuntimeError(\"socket_pipe: invalid test data received.\")\n test_message = test_message[len(test_chunk):]\n\n return reader, writer",
"def is_raw_write(command): \n if command.startswith('<WRITE') and command.endswith('>'):\n return True\n else: \n return False\n # end if",
"def write(data):",
"def io_pipe():\n r_fd, w_fd = os.pipe()\n with io.open(r_fd, 'rb', 0) as r, \\\n \t io.open(w_fd, 'wb', 0) as w:\n \tyield r, w",
"def piped(self):\n\t\tpass",
"def write(self, buf: AnyReadableBuf, /) -> int:",
"def write_message_to_pipe(writehandle, channel, data):\n # Construct the dictionary\n mesg_dict = {\"ch\":channel,\"d\":data}\n\n # Convert to a string\n mesg_dict_str = marshal.dumps(mesg_dict)\n\n # Make a full string\n mesg = str(len(mesg_dict_str)) + \":\" + mesg_dict_str\n\n # Send this\n index = 0\n while index < len(mesg):\n bytes = os.write(writehandle, mesg[index:])\n if bytes == 0:\n raise EnvironmentError, \"Write send 0 bytes! Pipe broken!\"\n index += bytes",
"def write_ops(self):\n return self._write_ops",
"def packet_write(self):\n bytes_written = 0\n \n if self.sock == NC.INVALID_SOCKET:\n return NC.ERR_NO_CONN, bytes_written\n \n while len(self.out_packet) > 0:\n pkt = self.out_packet[0]\n write_length, status = nyamuk_net.write(self.sock, pkt.payload)\n if write_length > 0:\n pkt.to_process -= write_length\n pkt.pos += write_length\n \n bytes_written += write_length\n \n if pkt.to_process > 0:\n return NC.ERR_SUCCESS, bytes_written\n else:\n if status == errno.EAGAIN or status == errno.EWOULDBLOCK:\n return NC.ERR_SUCCESS, bytes_written\n elif status == errno.ECONNRESET:\n return NC.ERR_CONN_LOST, bytes_written\n else:\n return NC.ERR_UNKNOWN, bytes_written\n \n \"\"\"\n if pkt.command & 0xF6 == NC.CMD_PUBLISH and self.on_publish is not None:\n self.in_callback = True\n self.on_publish(pkt.mid)\n self.in_callback = False\n \"\"\"\n \n #next\n del self.out_packet[0]\n \n #free data (unnecessary)\n \n self.last_msg_out = time.time()\n \n \n return NC.ERR_SUCCESS, bytes_written",
"def _write_v2(self, data):\n return self.usb_dev.write(self.ep_out, data, self.usb_wr_timeout)",
"def write( shell, data ):\n #print 'cmd: ' + data\n global waiting\n os.write( shell.stdin.fileno(), data )\n waiting = True",
"def write(self):",
"def write(self):",
"def handle_write(self):\n # without overriding this we would get an \"unhandled write event\"\n # message from asyncore once connection occurs.",
"def write(fd, name, *args, version=None, **kwargs):\n\treturn access('write', fd, name, *args, version=version, **kwargs)",
"def pipemeter(cmd1, cmd2):\n\n proc1 = subprocess.Popen(cmd1, bufsize=0, shell=True, stdout=subprocess.PIPE)\n proc2 = subprocess.Popen(cmd2, bufsize=0, shell=True, stdin=subprocess.PIPE)\n bytes_piped = 0\n\n while True:\n data = proc1.stdout.read(CHUNKSIZE)\n length = len(data)\n if length == 0:\n break\n\n written = proc2.stdin.write(data)\n if written != length:\n raise RuntimeError(\"Write failed, wanted to write: {}, written={}\".format(length, written))\n\n bytes_piped += length\n\n proc1.stdout.close()\n proc2.stdin.close()\n\n return proc1.wait(), proc2.wait(), bytes_piped",
"def writable(self):\n ...",
"def _handle_writing(self, soc):\n self._log(\"writing %r\" % self._writing[soc])\n sent = soc.send(self._writing[soc])\n if not sent:\n self._handle_error(soc)\n # Offsets would be more efficient, but this is python so it's not worth it.\n self._writing[soc] = self._writing[soc][sent:]\n if not self._writing[soc]:\n # Finished writing the whole thing.\n self._cleanup(soc)",
"def mode(self):\n return self._vdma.writechannel.mode",
"def write(self, buf: bytes, /) -> Optional[int]:"
] | [
"0.6027396",
"0.5985349",
"0.58283186",
"0.57623416",
"0.5752828",
"0.56974834",
"0.5655385",
"0.5602302",
"0.5599069",
"0.55921566",
"0.5586974",
"0.55652577",
"0.555992",
"0.5538428",
"0.55314046",
"0.54355025",
"0.54124594",
"0.5402488",
"0.5365683",
"0.53546226",
"0.5337956",
"0.53350925",
"0.53350925",
"0.5317445",
"0.5305705",
"0.52762526",
"0.5268209",
"0.52464604",
"0.5244918",
"0.52433896"
] | 0.60171205 | 1 |
Dump data through the pipe. The data is sent using pickle binary format. | def pickle_dump(self, data):
os.write(self.wpipe, pickle.dumps(data, bin=True)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def dump(self, data: dict, file: IO):",
"def pickle(self,data,filename):\n pickle.dump(data, open(filename, 'wb'))",
"def dump(filename, data):\n _savez(filename, [], data, True, allow_pickle=False)",
"def pickle_dump(data, file):\n with open(file, 'wb') as f:\n pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)",
"def dump(): # pragma: no cover\n dods = sys.stdin.read()\n dds, xdrdata = dods.split(b'\\nData:\\n', 1)\n dataset = dds_to_dataset(dds)\n xdr_stream = io.BytesIO(xdrdata)\n data = unpack_dap2_data(xdr_stream, dataset)\n pprint.pprint(data)",
"def dump_data(ser, meta, args):\n ser.reset_input_buffer()\n ser.reset_output_buffer()\n\n command = b\"TEXT.DUMP\\r\"\n rx = \"\"\n ntry = 0\n while not rx or (rx.split()[-1] != \"data?\"):\n rx = send_cmd(ser, command, args.debug)\n # sys.stderr.write(rx)\n ntry += 1\n if ntry > 3:\n LOGGER.warning(\"Wrong response to dump command ({})\".format(command))\n return 0\n\n command = b\"Y\"\n rx = \"\"\n ntry = 0\n while not rx or (rx.split()[-1] != \"ready\"):\n rx = send_cmd(ser, command, args.debug)\n # sys.stderr.write(rx)\n ntry += 1\n if ntry > 3:\n LOGGER.warning(\"Wrong response to dump command ({})\".format(command))\n return 0\n\n b = b\"\\r\"\n n = ser.write(b)\n if args.debug:\n LOGGER.debug(\"{} byte ({}) written to port\\n\".format(n, repr(b)))\n time.sleep(0.05)\n\n dumpst = time.time()\n suff = \"\"\n if meta.badclock:\n suff = \"-badclock\"\n\n fname = \"{}/{}sb.{}{}\".format(args.path, meta.modserial, args.calday, suff)\n fh = open(fname, \"w\")\n\n fraw = \"\"\n rxline = 1\n try:\n while rxline:\n rxline = ser.readline()\n if rxline:\n sys.stdout.write(rxline.decode(errors=\"replace\"))\n fout = crlfpat.sub(linend, rxline.decode(errors=\"replace\"))\n if cafepat.search(fout):\n meta.cafe = True\n fh.write(fout)\n except KeyboardInterrupt:\n interrupt = b\"\\x03\"\n send_cmd(ser, interrupt, args.debug)\n # time.sleep(1.0)\n # rxline = 1\n # while rxline:\n # rxline = ser.readline()\n # if rxline:\n # sys.stdout.write(rxline.decode(errors='replace'))\n # fout = crlfpat.sub(linend, rxline.decode(errors='replace'))\n # fh.write(fout)\n ser.reset_input_buffer()\n fh.close()\n fsize = os.stat(fname).st_size\n frename = fname + \"-abort\"\n os.rename(fname, frename)\n sys.stderr.write(\"\\n\\n\")\n LOGGER.warning(\"Download aborted: wrote {} bytes to {}\".format(fsize, frename))\n return 0\n\n fh.close()\n\n if meta.cafe:\n frename = fname + \"-cafe\"\n os.rename(fname, frename)\n fname = frename\n\n dumpend = time.time()\n etsec = dumpend - dumpst\n dtet = datetime(1900, 1, 1, 0, 0, 0) + timedelta(seconds=etsec)\n\n fsize = os.stat(fname).st_size\n sys.stderr.write(\"\\n\\n\")\n if meta.badclock or meta.cafe:\n LOGGER.warning(\"Wrote {} bytes to {}\".format(fsize, fname))\n else:\n LOGGER.info(\"Wrote {} bytes to {}\".format(fsize, fname))\n LOGGER.info(\n \"Dumped {} records in {} (hh:mm:ss)\".format(meta.ndumprec, dtet.strftime(etfmt))\n )\n\n FLOGGER.info(\"Wrote {} bytes to {}\".format(fsize, fname))\n FLOGGER.info(\n \"Dumped {} records in {} (hh:mm:ss)\".format(meta.ndumprec, dtet.strftime(etfmt))\n )\n\n return fsize",
"def pickle_to_stream(data):\n pickle_stream = PickleStream()\n pickle.dump(data, pickle_stream)\n return pickle_stream",
"def dump(self,out):\n if self.changed: raise StateError(_('Data changed: ')+ self.name)\n if not self.data: raise StateError(_('Data undefined: ')+self.name)\n out.write(struct.pack('4s3i',self.name,self.size,self.delFlag,self.recFlag))\n out.write(self.data)",
"def dumpData(self,out):\n raise AbstractError",
"def dumpData(self,out):\n raise AbstractError",
"def dump(self):\n# self.partial_in=\"\"\n# for line in sys.stdin: \n# self.partial_in+=sys.stdin.read(1)\n sys.stdout = sys.__stdout__\n os.system('cls')\n for cb in self.buffers.values():\n cb.dump(sys.stdout)\n sys.stdout = self",
"def dump_pickle_data(obj, filename):\n path = \"../tmp/{}.pckl\".format(filename)\n f = open(path, 'wb')\n pickle.dump(obj, f)\n f.close()",
"def pickle_dump(file_name: str, data: Any):\n with open(file_name, 'wb') as file:\n pickle.dump(data, file, protocol=pickle.HIGHEST_PROTOCOL)",
"def dumpme(self) :\n fileName = \"./data/oP4_ModelBuilder.dump\"\n with open(fileName,\"wb\") as dumpedFile:\n oPickler = pickle.Pickler(dumpedFile)\n oPickler.dump(self)",
"def dump_pickle(path, data):\n with open(path, 'wb') as f:\n pickle.dump(data, f)",
"def pickle_load(self):\n \n #\n # I am wrapping the file descriptor because this way pickle\n # returns on each data send separately (allowing for sending\n # multiple data before reading).\n # I close the file descriptor or else for some reason just\n # closing the write side of the pipe doesn't raise an EOF\n # in the read side.\n #\n if not hasattr(self, '_rf'):\n self._rf = os.fdopen(os.dup(self.rpipe), 'r')\n \n data = pickle.load(self._rf)\n \n return data",
"def writePipe(self, data):\n if not '[Press a key]' in data:\n subprocess.Popen(\"echo \" + str(data) + \" > /tmp/g13-0\", shell=True)",
"def dump(self, data: Union['BaseFlow', 'BaseExecutor', 'BaseDriver']) -> Dict:\n raise NotImplementedError",
"def dump(data):\n if Utils.memdmp is None:\n Utils.memdmp = list()\n if NS.LOG_DISABLED is True:\n Utils.memdmp.append(data)\n if Utils.dmpfile is None and NS.LOG_DISABLED is False:\n Utils.dmpfile = open(r\"C:\\\\ProgramData\\\\Dmp.txt\", \"w\") \n if NS.LOG_DISABLED is False:\n Utils.dmpfile.write(str(data))",
"def dumps(self, data: Any) -> bytes:\n out = BytesIO()\n self._write(out, data)\n return out.getvalue()",
"def dump(self):\n return self._data.dump()",
"def send(connection, data):\n connection.send(pickle.dumps(data))",
"def _save_data(self, data):\n path = os.path.join(self._cache_path, '%s.data' % self._name)\n\n f = bz2.BZ2File(path, 'w')\n f.write(pickle.dumps(data))\n f.close()",
"def dump_data(self,filename,dump_id):\n # get pure data copy\n data = [ d.get_pure_data_copy() for d in self.plotter.data ]\n # full file name of the file with manipulator dump\n filename=tdc_Filenames.get_full_vis_filename(dump_id, filename+'.pickle')\n pickle.dump( data, open(filename,'w') )\n print '\\nContent dumped in \"%s\" \\n' % filename",
"def pickle_data(filename, data):\n f = open(filename, \"wb\")\n pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)\n f.close()",
"def dump(self):\n # dump self.data\n pickle.dump(self.data, open(self.data_dir + DATA_PATH, 'wb+'))\n # dump self.code2desc\n pickle.dump(self.code2desc, open(self.data_dir + CODE2DESC_PATH, 'wb+'))\n # dump self.family2tf\n pickle.dump(self.family2tf, open(self.data_dir + FAMILY2TF_PATH, 'wb+'))\n # dump self.word2tf\n pickle.dump(self.word2tf, open(self.data_dir + WORD2TF_PATH, 'wb+'))\n # dump self.word2df\n pickle.dump(self.word2df, open(self.data_dir + WORD2DF_PATH, 'wb+'))\n return None",
"def dump(self, outfile):\n with open(outfile, 'wb') as picklefile:\n pickle.dump(\n {str(key): value for key, value in self.data.items()},\n picklefile)",
"def dump(self, name, data):\n file_path = self.l2.dump(name, data)\n self.l3.dump(name, file_path)",
"def dumpData(self,out):\n #--Get sizes and dump into dataIO\n self.hedr.getSize()\n self.hedr.dump(out)\n for (name,size) in self.masters:\n out.packSub0('MAST',name)\n out.packSub('DATA','Q',size)\n if self.gmdt: \n self.gmdt.getSize()\n self.gmdt.dump(out)\n for other in self.others:\n other.getSize()\n other.dump(out)",
"def serialize(self, fout: t.BinaryIO) -> None:\n fout.write(pickle.dumps(self))"
] | [
"0.6462049",
"0.62800205",
"0.6105975",
"0.6078913",
"0.6073406",
"0.60545284",
"0.6024091",
"0.60216373",
"0.6004561",
"0.6004561",
"0.59766096",
"0.5972074",
"0.59595984",
"0.59463143",
"0.59452784",
"0.5923174",
"0.5905648",
"0.58634555",
"0.5854407",
"0.58264023",
"0.5821184",
"0.58189625",
"0.57737195",
"0.57543737",
"0.5751683",
"0.57017785",
"0.5651567",
"0.56449026",
"0.56249994",
"0.55916977"
] | 0.8239927 | 0 |
Load data from the pipe. The data is sent using pickle binary format. | def pickle_load(self):
#
# I am wrapping the file descriptor because this way pickle
# returns on each data send separately (allowing for sending
# multiple data before reading).
# I close the file descriptor or else for some reason just
# closing the write side of the pipe doesn't raise an EOF
# in the read side.
#
if not hasattr(self, '_rf'):
self._rf = os.fdopen(os.dup(self.rpipe), 'r')
data = pickle.load(self._rf)
return data | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def load_data():\n with open('data.pickle', 'rb') as f:\n data = pickle.load(f)\n return data",
"def pickle_dump(self, data):\n \n os.write(self.wpipe, pickle.dumps(data, bin=True))",
"def load_data(self, data):\n self._load_raw_data = data",
"def run(self, data: PipeLineDataObject) -> PipeLineDataObject:\n raise NotImplementedError",
"def read_data(self, workfile='workfile_tmp.p'):\n self.data = pickle.load(open(workfile, 'rb'))",
"def read_data(self, workfile='workfile_tmp.p'):\n self.data = pickle.load(open(workfile, 'rb'))",
"def load_data(self) -> None:",
"def _recv_protocol(self):\n if not self._protocol_recv:\n try:\n data = self._read_bytes(1, timeout=1.0)\n if len(data) == 0:\n self.close()\n raise PipeClosed()\n peer_protocol = struct.unpack('>B', data)[0]\n self._protocol = min(self._protocol or pickle.HIGHEST_PROTOCOL, peer_protocol)\n self._protocol_recv = True\n self._serializer = _PickleSerializer(self._protocol)\n except (OSError, socket.error):\n self.close()\n raise PipeClosed()",
"def load(self):\n # Check whether Unpickler was initialized correctly. This is\n # only needed to mimic the behavior of _pickle.Unpickler.dump().\n if not hasattr(self, \"_file_read\"):\n raise UnpicklingError(\n \"Unpickler.__init__() was not called by \"\n \"%s.__init__()\" % (self.__class__.__name__,)\n )\n self._unframer = _Unframer(self._file_read, self._file_readline)\n self.read = self._unframer.read\n self.readinto = self._unframer.readinto\n self.readline = self._unframer.readline\n self.metastack = []\n self.stack = []\n self.append = self.stack.append\n self.proto = 0\n read = self.read\n dispatch = self.dispatch\n try:\n while True:\n key = read(1)\n if not key:\n raise EOFError\n assert isinstance(key, (bytes, bytearray))\n dispatch[key[0]](self)\n print(\"STK\", bytes([key[0]]), self.stack)\n except _Stop as stopinst:\n return stopinst.value",
"def _load(self, load_dict):\n try:\n self.v_protocol = load_dict.pop(PickleParameter.PROTOCOL)\n except KeyError:\n # For backwards compatibility\n dump = next(load_dict.values())\n self.v_protocol = PickleParameter._get_protocol(dump)\n for key in load_dict:\n val = load_dict[key]\n self._data[key] = pickle.loads(val)",
"def post_load(self, data):\n return data",
"def loads(data):\n return cPickle.loads(data)",
"def recv_pickle(self, flags=0):\n pobj = self.recv(flags)\n return pickle.loads(pobj)",
"def load_synthetic_data():\n\n pickle_object = FM().data_file \n\n with pickle_object.open('rb') as data_file: \n return pickle.load(data_file)",
"def _loadData(self, data):\n Clip._loadData(self, data)\n PlexSession._loadData(self, data)",
"def pipeline(self):\n\n self._get_data()\n self._upload_to_raw()",
"def process_pickle(self, data):\n if self._handler is None:\n raise NotImplementedError\n\n self.notify_started()\n self.send_pickle(self._handler(data))",
"def _read_data(self) -> None:\n raw_data = self.__mmap[:].decode('ascii').rstrip('\\0')\n self.__data = json.loads(raw_data)",
"def load_object(self, filename):\n with open(filename, 'rb') as inp: # Overwrites any existing file.\n data = pickle.load(inp)\n return data",
"def load_pipeline():\n\n try:\n logging.info(\"Loading the fitted pipeline...\")\n with open(base.SAVED_MODEL_PATH, \"rb\") as model_file:\n pipeline = pickle.load(model_file)\n logging.info(\"Loading completed successfully...\")\n except FileNotFoundError:\n logging.error(\"Model file has not been found.\")\n raise\n return pipeline",
"def load(datastream):",
"def getData(self):\n return pickle.loads(self._data)",
"def load_data(self):\n raise NotImplementedError()",
"def load(self):\n logger.debug('Loading state from file %s', self.file_path)\n\n with open(self.file_path, 'rb') as f:\n self.data = pickle.load(f)",
"def setData(self, data):\n self._data = pickle.dumps(data)",
"def load_data(self):",
"def load_data_pickle(PATH, dataset, filename):\n with open(PATH + '/' + dataset + \"_\" + filename + \".pkl\",\"rb\") as f:\n new_data = pickle.load(f)\n\n # print(filename, \"opened\")\n return new_data",
"def _load(self, load_dict):\n if self.v_locked:\n raise pex.ParameterLockedException(\n \"Parameter `%s` is locked!\" % self.v_full_name\n )\n\n if \"data\" in load_dict:\n dump = load_dict[\"data\"]\n self._data = pickle.loads(dump)\n else:\n self._logger.warning(\n \"Your parameter `%s` is empty, \"\n \"I did not find any data on disk.\" % self.v_full_name\n )\n\n try:\n self.v_protocol = load_dict[PickleParameter.PROTOCOL]\n except KeyError:\n # For backwards compatibility\n self.v_protocol = PickleParameter._get_protocol(dump)\n\n if \"explored_data\" in load_dict:\n explore_table = load_dict[\"explored_data\"]\n\n name_col = explore_table[\"idx\"]\n\n explore_list = []\n for name_id in name_col:\n arrayname = self._build_name(name_id)\n loaded = pickle.loads(load_dict[arrayname])\n explore_list.append(loaded)\n\n self._explored_range = explore_list\n self._explored = True\n\n self._default = self._data\n self._locked = True",
"def _loadData(self, data):\n Episode._loadData(self, data)\n PlexSession._loadData(self, data)",
"def load(self):\n f = self.open(\"rb\")\n try:\n import pickle\n\n return error.checked_call(pickle.load, f)\n finally:\n f.close()"
] | [
"0.6285068",
"0.6276552",
"0.62092227",
"0.61791235",
"0.61763227",
"0.61763227",
"0.60887617",
"0.60785884",
"0.6005809",
"0.5972852",
"0.5966116",
"0.59555256",
"0.5851574",
"0.57980245",
"0.57809585",
"0.574515",
"0.5736422",
"0.5733839",
"0.5733341",
"0.5703455",
"0.56713384",
"0.56664497",
"0.565457",
"0.5645606",
"0.5633293",
"0.56224865",
"0.5622458",
"0.56119704",
"0.55918366",
"0.5588477"
] | 0.7654427 | 0 |
A function to force Tensorflow to use CPU even Nvidia GPU present | def cpu_fallback(flag=True):
gpu_phy_devices = tf.config.list_physical_devices("GPU")
cpu_phy_devices = tf.config.list_physical_devices("CPU")
general_warning_msg = (
f"Tensorflow has already been initialized, {inspect.currentframe().f_code.co_name}() needs "
f"to be called before any Tensorflow operation, as a result this function will have no effect"
)
if flag is True:
try:
tf.config.set_visible_devices([], "GPU")
except RuntimeError:
warnings.warn(general_warning_msg)
elif flag is False:
try:
tf.config.set_visible_devices(gpu_phy_devices, "GPU")
except RuntimeError:
warnings.warn(general_warning_msg)
else:
raise ValueError("Unknown flag, can only be True of False!") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def configure_gpu_tf():\n\n try:\n # locate available devices & set required environment variables\n available_device_ids = GPUtil.getFirstAvailable(order='first', maxLoad=0.7, maxMemory=0.7, attempts=1, interval=10)\n available_device_id = available_device_ids[0]\n os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'\n os.environ['CUDA_VISIBLE_DEVICES'] = str(available_device_id)\n print(f\"\\n GPU Found! running on GPU:{available_device_id}\\n\")\n\n # set GPU configuration (use all GPU memory if device 0, else use <50% of memory)\n tf.debugging.set_log_device_placement(False)\n physical_gpu = tf.config.experimental.list_physical_devices('GPU')[0]\n\n if available_device_id == 0:\n tf.config.experimental.set_memory_growth(physical_gpu, True)\n else:\n tf.config.experimental.set_virtual_device_configuration(\n physical_gpu,\n [tf.config.experimental.VirtualDeviceConfiguration(memory_limit=4500)]\n )\n logical_gpus = tf.config.experimental.list_logical_devices('GPU')\n assert len(logical_gpus) == 1, \"error creating virtual GPU to fractionally use memory\"\n\n # if we can't find a GPU, or they are all busy, default to using CPU\n except RuntimeError:\n print(\"\\n No GPUs available... running on CPU\\n\")\n os.environ['CUDA_VISIBLE_DEVICES'] = '-1'",
"def try_gpu(i=0): #@save\n if len(tf.config.experimental.list_physical_devices('GPU')) >= i + 1:\n return tf.device(f'/GPU:{i}')\n return tf.device('/CPU:0')",
"def handle_gpu_compatibility():\n try:\n gpus = tf.config.experimental.list_physical_devices(\"GPU\")\n for gpu in gpus:\n tf.config.experimental.set_memory_growth(gpu, True)\n except Exception as e:\n print(e)",
"def cuda_if_gpu(T):\n\n return T.cuda() if use_cuda else T",
"def create_gpu_device_if_present():\n d = dpctl.SyclDevice(\"gpu,cpu\")\n print(\"Selected \" + (\"GPU\" if d.is_gpu else \"CPU\") + \" device\")",
"def variables_on_gpu0():\n old_fn = tf.get_variable\n\n def new_fn(*args, **kwargs):\n with tf.device('/gpu:0'):\n return old_fn(*args, **kwargs)\n\n tf.get_variable = new_fn\n yield\n tf.get_variable = old_fn",
"def setup_device(n_gpus: int) -> object:\n if n_gpus >= 1 and torch.cuda.is_available():\n LOG.info('\\n CUDA is available! using GPU...')\n return torch.device('cuda')\n else:\n LOG.info('\\n Using CPU...')\n return torch.device('cpu')",
"def try_gpu():\n try:\n ctx = mx.gpu()\n _ = nd.array([0], ctx=ctx)\n except:\n ctx = mx.cpu()\n return ctx",
"def try_gpu():\n try:\n ctx = mx.gpu()\n _ = nd.array([0], ctx=ctx)\n except:\n ctx = mx.cpu()\n return ctx",
"def try_gpu():\r\n try:\r\n ctx = mx.gpu()\r\n _ = nd.array([0], ctx=ctx)\r\n except mx.base.MXNetError:\r\n ctx = mx.cpu()\r\n return ctx",
"def try_tensorflow_import(verbose=False):\n import os\n\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"1\"\n\n import tensorflow as tf\n\n tf.compat.v1.disable_eager_execution()\n\n if verbose:\n tf.debugging.set_log_device_placement(True) # logs what device is being used\n gpus = tf.config.experimental.list_physical_devices(\"GPU\")\n if not gpus:\n return\n\n for gpu in gpus:\n tf.config.experimental.set_memory_growth(gpu, True)\n\n if verbose:\n logical_gpus = tf.config.experimental.list_logical_devices(\"GPU\")\n print(len(gpus), \"Physical GPUs,\", len(logical_gpus), \"Logical GPUs\")",
"def setGPU(state):\n\n\timport tensorflow as tf\n\tfrom keras import backend as K\n\n\tcheckGPU()\n\n\tnum_cores = 1\n\tnum_CPU = 1\n\tnum_GPU = 0\n\tif state:\n\t\tnum_GPU = 1\n\n\tconfig = tf.ConfigProto(intra_op_parallelism_threads=num_cores,\\\n\t inter_op_parallelism_threads=num_cores, allow_soft_placement=True,\\\n\t device_count = {'CPU' : num_CPU, 'GPU' : num_GPU})\n\tsession = tf.Session(config=config)\n\tK.set_session(session)",
"def check_gpus(_: None, gpus_optional: bool = False) -> None:\n gpu_devices = tf.config.list_physical_devices(\"GPU\")\n if gpu_devices:\n logging.info(f\"Using GPU: {gpu_devices}\")\n elif gpus_optional:\n logging.warning(\"No GPUs found, defaulting to CPU.\")\n else:\n raise RuntimeError(\"No GPUs found.\")",
"def test_gpu_cuda_code() -> None:\n if get_from_environ(\"DISABLE_GPU_FOR_TESTING\") is not None:\n print(\"GPU payload disabled for testing\")\n return\n\n # if the command exists it can run on the hardware below\n proc = subprocess.Popen([\"nvidia-smi\"], stdout=subprocess.PIPE)\n stdout, _ = proc.communicate()\n str_stdout = stdout.decode()\n assert \"NVIDIA-SMI\" in str_stdout, str_stdout\n assert proc.returncode == 0\n # search the history for the CUDA implementation",
"def detect_gpus():\n def worker(q):\n # `device_lib` will not release the memory it took,\n # so we run it in a sub-process.\n try:\n from tensorflow.python.client import device_lib\n\n if is_tensorflow_version_higher_or_equal('1.8.0'):\n config = tf.ConfigProto()\n config.gpu_options.allow_growth = True\n devices = list(device_lib.list_local_devices(config))\n else:\n devices = list(device_lib.list_local_devices())\n gpus = [\n (device.name, device)\n for device in devices\n if device.device_type == 'GPU'\n ]\n union_set = {i: i for i in range(len(gpus))}\n\n for i, (name, device) in enumerate(gpus):\n assert (device.name == '/device:GPU:{}'.format(i))\n for link in device.locality.links.link:\n if link.device_id != i:\n union_set[i] = union_set[link.device_id]\n\n for i in six.iterkeys(union_set):\n while union_set[i] != union_set[union_set[i]]:\n union_set[i] = union_set[union_set[i]]\n\n root_devices = sorted(set(union_set.values()))\n gpu_groups = [[] for _ in range(len(root_devices))]\n dev_to_group = {j: i for i, j in enumerate(root_devices)}\n for i, (name, device) in enumerate(gpus):\n gpu_groups[dev_to_group[union_set[i]]].append(name)\n\n q.put((1, gpu_groups))\n except Exception:\n q.put((0, traceback.format_exc()))\n\n q = mp.Queue()\n p = mp.Process(target=worker, args=(q,))\n\n try:\n p.start()\n result = q.get()\n if result[0] == 1:\n return result[1]\n else:\n raise RuntimeError(\n 'Failed to retrieve GPU information, the traceback of '\n 'sub-process is:\\n {}'.\n format('\\n '.join(result[1].split('\\n')))\n )\n finally:\n p.terminate()\n p.join()",
"def return_free_GPU():\r\n if torch.cuda.is_available():\r\n gpu_num = torch.cuda.device_count()\r\n device = torch.device('cuda:{}'.format(gpu_num-1))\r\n print('Using GPU:[{}]/[{}] for training...'.format(gpu_num-1,gpu_num-1))\r\n return device\r\n \r\n raise ValueError('GPU not available for training. Check CUDA env with function \"check_cuda_env\"')",
"def try_gpu(i=0):\n if torch.cuda.device_count() >= i + 1:\n return torch.device(f'cuda:{i}')\n return torch.device('cpu')",
"def try_gpu(i=0):\n if torch.cuda.device_count() >= i + 1:\n return torch.device(f'cuda:{i}')\n return torch.device('cpu')",
"def try_gpu(i=0):\n if torch.cuda.device_count() >= i + 1:\n return torch.device(f'cuda:{i}')\n return torch.device('cpu')",
"def setup_gpu(use_gpu: int, silent=None) -> None:\n if silent is None:\n local_msg = Printer()\n else:\n local_msg = Printer(no_print=silent, pretty=not silent)\n if use_gpu >= 0:\n local_msg.info(f\"Using GPU: {use_gpu}\")\n require_gpu(use_gpu)\n else:\n local_msg.info(\"Using CPU\")\n if gpu_is_available():\n local_msg.info(\"To switch to GPU 0, use the option: --gpu-id 0\")",
"def with_cpu(ops, model):\n ...",
"def is_gpu_available() -> bool:\n return torch.cuda.is_available()",
"def maybe_cuda(t):\n if torch.cuda.is_available():\n return t\n return t",
"def try_all_gpus(): #@save\n num_gpus = len(tf.config.experimental.list_physical_devices('GPU'))\n devices = [tf.device(f'/GPU:{i}') for i in range(num_gpus)]\n return devices if devices else [tf.device('/CPU:0')]",
"def try_gpu(x):\n global _GPUS_EXIST\n if _GPUS_EXIST:\n try:\n return x.cuda()\n except (AssertionError, RuntimeError):\n print('No GPUs detected. Sticking with CPUs.')\n _GPUS_EXIST = False\n return x",
"def _on_gpu(self) -> bool:\n return self._current_device_index != CPU_INDEX",
"def check_cuda():\n if OS_VERSION[0] == \"Linux\":\n check_cuda_linux()\n elif OS_VERSION[0] == \"Windows\":\n check_cuda_windows()",
"def test_nvidia_driver2():\r\n a = numpy.random.rand(10000).astype(\"float32\")\r\n cuda.shared_constructor(a)\r\n assert theano.sandbox.cuda.use.device_number is not None",
"def only_gpu(request):\n if request.node.get_closest_marker('gpu'):\n if 'device' in request.fixturenames:\n if not isinstance(request.getfixturevalue('device'),\n hoomd.device.GPU):\n pytest.skip('Test is run only on GPU(s).')\n else:\n raise ValueError('only_gpu requires the *device* fixture')",
"def only_cpu(request):\n if request.node.get_closest_marker('cpu'):\n if 'device' in request.fixturenames:\n if not isinstance(request.getfixturevalue('device'),\n hoomd.device.CPU):\n pytest.skip('Test is run only on CPU(s).')\n else:\n raise ValueError('only_cpu requires the *device* fixture')"
] | [
"0.7213077",
"0.7027226",
"0.6988259",
"0.6981788",
"0.6814441",
"0.6727671",
"0.67254525",
"0.6702479",
"0.6702479",
"0.66789967",
"0.6660818",
"0.66421443",
"0.65990984",
"0.65961355",
"0.65846235",
"0.6550118",
"0.6528544",
"0.6528544",
"0.6528544",
"0.65123516",
"0.65119696",
"0.65039396",
"0.6499269",
"0.6475337",
"0.64024717",
"0.63990587",
"0.6377722",
"0.6369385",
"0.6261901",
"0.62541944"
] | 0.7158892 | 1 |
Return results from detector. This function prepares the environment loading the plugins, getting the response and passing it to the detector. In case of errors, it raises exceptions to be handled externally. | def get_detection_results(
url,
timeout,
metadata=False,
save_har=False,
splash_url="",
):
plugins = load_plugins()
if not plugins:
raise NoPluginsError("No plugins found")
logger.debug("[+] Starting detection with %(n)d plugins", {"n": len(plugins)})
response = get_response(url, plugins, timeout, splash_url)
# Save HAR
if save_har:
fd, path = tempfile.mkstemp(suffix=".har")
logger.info(f"Saving HAR file to {path}")
with open(fd, "w") as f:
json.dump(response["har"], f)
det = Detector(response, plugins, url)
softwares = det.get_results(metadata=metadata)
output = {"url": url, "softwares": softwares}
return output | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def request_plugins(self):",
"def setup(self):\n rc = self.rc\n try:\n for plugin in self.plugins:\n plugin.setup(rc)\n except Exception as e:\n self.exit(e)",
"def load_results(self):\n\n scan_results = engine_pb2.EnrichedLaunchToolResponse()\n collected_results = load_files(scan_results, self.pb_location)\n\n return collected_results",
"def _get_results(self, res):\n self.async_res = res\n self.full_res = res.wait() # pragma: no cover\n self.trained = True # pragma: no cover\n self.mod_id = self.full_res['model_id'] # pragma: no cover\n self.data_id = self.full_res['data_id'] # pragma: no cover\n self.params_dump = self.full_res['params_dump'] # pragma: no cover\n if self.verbose > 0: # pragma: no cover\n print(\"Result {} | {} ready\".format(\n self.mod_id, self.data_id)) # pragma: no cover",
"def dispatch(cls, request, reply):\n try:\n plugin = cls(\n request=request,\n reply=reply\n )\n plugin.load_config()\n\n try:\n plugin.execute()\n except Exception as e:\n raise RPCAborted(str(e))\n\n except ChoriaExternalException as e:\n reply.fail(str(e))",
"def _setup_results(self):\n\n self._ping()\n\n self._create_remote_dir(self._remote_results_path)\n\n self._setup_has_ran = True",
"def setup(self,context,result):\n pass",
"def get_plugins(request):\r\n res = requests.get(DaemonServer._mock_url + '/plugins')\r\n return res",
"def execute(self):\n rc = self.rc\n try:\n for plugin in self.plugins:\n plugin.execute(rc)\n except Exception as e:\n self.exit(e)",
"def setup(self, options, results):",
"def tesseract_recognize(): # pylint: disable=too-many-return-statements\n try:\n data = request.json\n\n ### Check options ###\n if 'options' in data:\n if not isinstance(data['options'], list) or \\\n not all(isinstance(i, string_types) for i in data['options']):\n return resp400('expected options as an array of strings')\n else:\n data['options'] = []\n\n info_option = any([op == \"-h\" or op == \"--help\" or \\\n op == \"-v\" or op == \"--version\" \\\n for op in data['options']])\n\n if not info_option:\n # @todo Accept input xml and images from HTTP and output xml in response\n\n ### Check input file ###\n if 'input_file' not in data or \\\n not isinstance(data['input_file'], string_types):\n return resp400('expected input_file as a string')\n if not os.path.isfile(data['input_file']):\n return resp400('input file not found: '+data['input_file'])\n\n ### Check output file ###\n if 'output_file' not in data or \\\n not isinstance(data['output_file'], string_types):\n return resp400('expected output_file as a string')\n\n ### Generate command list with additional options if present ###\n cmd = ['/usr/local/bin/tesseract-recognize']\n cmd.extend(data['options'])\n if not info_option:\n cmd.extend([data['input_file'], data['output_file']])\n\n ### Execute tesseract-recognize command ###\n proc = Popen(cmd, shell=False, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)\n cmd_out = str(proc.stdout.read())\n proc.communicate()\n cmd_rc = proc.returncode\n\n ### Response depending on the case ###\n msg = 'command='+(' '.join(cmd))+' output='+cmd_out\n if cmd_rc != 0:\n return resp400('execution failed: '+msg+' return_code='+str(cmd_rc))\n return resp200('execution successful: '+msg)\n\n ### Catch any problem and respond a accordingly ###\n except Exception as ex: # pylint: disable=broad-except\n return resp500(str(ex)+\"\\n\"+traceback.format_exc())",
"def setup_plugins(self) -> None:\n load_success = 0\n load_error = 0\n load_disabled = 0\n\n LOGGER.info(\"Loading plugins...\")\n usable_plugins = plugins.get_usable_plugins(self.settings)\n for name, info in usable_plugins.items():\n plugin_handler, is_enabled = info\n if not is_enabled:\n load_disabled = load_disabled + 1\n continue\n\n try:\n plugin_handler.load()\n except Exception as e:\n load_error = load_error + 1\n LOGGER.exception(\"Error loading %s: %s\", name, e)\n except SystemExit:\n load_error = load_error + 1\n LOGGER.exception(\n \"Error loading %s (plugin tried to exit)\", name)\n else:\n try:\n if plugin_handler.has_setup():\n plugin_handler.setup(self)\n plugin_handler.register(self)\n except Exception as e:\n load_error = load_error + 1\n LOGGER.exception(\"Error in %s setup: %s\", name, e)\n else:\n load_success = load_success + 1\n LOGGER.info(\"Plugin loaded: %s\", name)\n\n total = sum([load_success, load_error, load_disabled])\n if total and load_success:\n LOGGER.info(\n \"Registered %d plugins, %d failed, %d disabled\",\n (load_success - 1),\n load_error,\n load_disabled)\n else:\n LOGGER.warning(\"Warning: Couldn't load any plugins\")",
"def _introspection_complete(self):\r\n self.busy = False\r\n result = self.request.result\r\n info = self.request.info\r\n current = self._get_code_info('current')\r\n\r\n if result and current.filename == info.filename:\r\n func = getattr(self, '_handle_%s_response' % info.name)\r\n try:\r\n func(result, current, info)\r\n except Exception as e:\r\n debug_print(e)\r\n elif current.filename == info.filename and info.name == 'definition':\r\n result = self.plugins['fallback'].get_definition(info)\r\n\r\n if info == self.pending:\r\n self.pending = None\r\n\r\n self._handle_pending()",
"def execute(self, response):\n if not has_request_context:\n return\n\n self._fallback_fixture_names()\n\n try:\n app = self.auto_fixture.app\n\n # Create response fixture\n fixture = Fixture.from_response(response, app, self.response_name)\n self.auto_fixture.add_fixture(fixture)\n\n # Create request fixture\n if request.data:\n fixture = Fixture.from_request(request, app, self.request_name)\n self.auto_fixture.add_fixture(fixture)\n except TypeError: # pragma: no cover\n warnings.warn(\"Could not create fixture for unsupported mime type\")\n\n return response",
"def _init_results(self) -> None:\n pt_bond_dimensions = {}\n for site, pt in enumerate(self._process_tensors):\n if pt is not None:\n pt_bond_dimensions[site] = pt.get_bond_dimensions()\n\n self._results = {\n 'time':[],\n 'norm': [],\n 'bond_dimensions': [],\n 'dynamics': {},\n 'pt_bond_dimensions': pt_bond_dimensions,\n }\n for sites in self._dynamics_sites:\n self._results['dynamics'][sites] = Dynamics(name=f\"site{sites}\")",
"def _backend_run(self):\n results = None\n return results",
"def get_launch_response():\n \n return get_init_response()",
"def ready(self):\n if settings.PLUGINS_ENABLED:\n if not canAppAccessDatabase(allow_test=True):\n logger.info(\"Skipping plugin loading sequence\") # pragma: no cover\n else:\n logger.info('Loading InvenTree plugins')\n\n if not registry.is_loading:\n # this is the first startup\n try:\n from common.models import InvenTreeSetting\n if InvenTreeSetting.get_setting('PLUGIN_ON_STARTUP', create=False, cache=False):\n # make sure all plugins are installed\n registry.install_plugin_file()\n except Exception: # pragma: no cover\n pass\n\n # get plugins and init them\n registry.plugin_modules = registry.collect_plugins()\n registry.load_plugins()\n\n # drop out of maintenance\n # makes sure we did not have an error in reloading and maintenance is still active\n set_maintenance_mode(False)\n\n # check git version\n registry.git_is_modern = check_git_version()\n if not registry.git_is_modern: # pragma: no cover # simulating old git seems not worth it for coverage\n log_error(_('Your enviroment has an outdated git version. This prevents InvenTree from loading plugin details.'), 'load')\n\n else:\n logger.info(\"Plugins not enabled - skipping loading sequence\") # pragma: no cover",
"def collectPlugins(self):\n\t\tself.locatePlugins()\n\t\tself.loadPlugins()",
"def _handle_results(self, res, delay):\n if delay:\n thread = self._get_results(res)\n else:\n self.mod_id = res['model_id']\n self.data_id = res['data_id']\n self.params_dump = res['params_dump']\n\n self.trained = True\n self.full_res = res\n thread = None\n return res, thread",
"def prepare(self):\n\t\treturn self.api.prepare_request(self)",
"def read_callback():\n\n # Walk through the existing environments\n for name in ENVIRONMENT:\n env = ENVIRONMENT[name]\n collectd.info(\"read_callback: entering environment: \" + name)\n\n # Connects to vCenter Server\n service_instance = SmartConnect(\n host=env[\"host\"], user=env[\"username\"], pwd=env[\"password\"]\n )\n performance_manager = service_instance \\\n .RetrieveServiceContent() \\\n .perfManager\n\n # Walk through all Clusters of Datacenter\n for datacenter in service_instance \\\n .RetrieveServiceContent() \\\n .rootFolder.childEntity:\n if datacenter._wsdlName == \"Datacenter\":\n for compute_resource in datacenter.hostFolder.childEntity:\n if compute_resource._wsdlName == \\\n \"ComputeResource\" \\\n or compute_resource._wsdlName == \\\n \"ClusterComputeResource\":\n cluster_name = \\\n compute_resource.name if env['use_friendly_name'] \\\n else compute_resource._moId\n # Walk throug all hosts in cluster, collect its metrics\n # and dispatch them\n collectd.info(\n \"read_callback: found %d hosts in cluster %s\" % (\n len(compute_resource.host),\n compute_resource.name\n )\n )\n if len(env['host_counter_ids']) > 0:\n collet_metrics_for_entities(\n service_instance,\n performance_manager,\n env['host_counter_ids'],\n compute_resource.host,\n cluster_name,\n env\n )\n\n # Walk throug all vms in host, collect its metrics and\n # dispatch them\n for host in compute_resource.host:\n if host._wsdlName == \"HostSystem\":\n collectd.info(\n \"read_callback: found %d vms in host %s\" % (\n len(host.vm), host.name\n )\n )\n if len(env['vm_counter_ids']) > 0:\n collet_metrics_for_entities(\n service_instance,\n performance_manager,\n env['vm_counter_ids'],\n host.vm,\n cluster_name,\n env\n )\n Disconnect(service_instance)",
"def execute(self, directory, available_resources):\n\n return self._get_output_dictionary()",
"def run(self):\n\n try:\n # Get the content from this page\n if self.verbose:\n print \"Getting page content for '%s'\" % self.url.strip()\n \n content = getPageContent(self.url)\n\n # Verify that this is not binary data\n if content is not None and isHTML(content):\n\n\n # Extract basic data about this result\n content = content.lower()\n title, keywords, description = parseMetaDataFromContent(content)\n headers = parseHeaderInformationFromContent(content)\n\n # Add this result data\n self.resultDictionary['title'] = title\n self.resultDictionary['keywords'] = keywords\n self.resultDictionary['description'] = description\n self.resultDictionary['content'] = content\n self.resultDictionary['headers'] = headers\n\n # Run the extensions\n for extension in self.extensions:\n extension.run(self.resultDictionary)\n\n\n except URLError:\n\n # Skip this URL, and register it as an error on the cache\n if self.verbose:\n print(\"Error accessing '%s', %s\" % (self.url.strip(), str(sys.exc_info()[1]).strip()))",
"def doTask(self):\n d = self.plugin.collect(self.config)\n d.addBoth(self.plugin.onResult, self.config)\n d.addCallback(self.plugin.onSuccess, self.config)\n d.addErrback(self.plugin.onError, self.config)\n d.addBoth(self.plugin.onComplete, self.config)\n d.addCallback(self.processResults)\n d.addErrback(self.handleError)\n return d",
"def init_detector(config):\n\n crf_list = config[\"detection\"][\"crf_ner_list\"].split(\",\")\n crf_model_list = [load(crf) for crf in crf_list]\n\n crf_ner_classic = None\n if \"crf_ner_classic\" in config[\"detection\"]:\n crf_ner_classic_list = config[\"detection\"][\n \"crf_ner_classic\"].split(\",\")\n crf_ner_classic = [load(crf) for crf in crf_ner_classic_list]\n\n # search for mail list\n corp_mail_list = []\n if config[\"detection\"][\"corp_mail_list\"]:\n with open(config[\"detection\"][\"corp_mail_list\"], \"r\") as f_in:\n for line in f_in:\n line = line.rstrip(\"\\n\")\n corp_mail_list.append(line)\n\n # build the system here\n nlp = None\n if \"nlp_model\" in config[\"detection\"]:\n nlp = spacy.load(config[\"detection\"][\"nlp_model\"])\n\n custom_word_list = []\n\n if \"custom_word_list\" in config:\n with open(config[\"custom_word_list\"], \"r\") as f_in:\n custom_word_list = [line.rstrip(\"\\n\") for line in f_in]\n\n # configuration of the proximity regexp\n regexp_config_dict = OrderedDict()\n if \"proximity_regexp_config\" in config:\n for key in config[\"proximity_regexp_config\"]:\n regexp_config_dict[key] = OrderedDict()\n regexp_config_dict[key][\"left_span_len\"] = int(\n config[\"proximity_regexp_config\"][key][\"left_span_len\"])\n\n regexp_config_dict[key][\"right_span_len\"] = int(\n config[\"proximity_regexp_config\"][key][\"right_span_len\"])\n\n with open(config[\n \"proximity_regexp_config\"][key][\"word_file\"], \"r\") as f_in:\n word_list = [normalize_text_proximity(\n line.rstrip(\"\\n\").strip()) for line in f_in]\n\n regexp_config_dict[key][\"word_list\"] = word_list\n\n low_priority_list = None\n if \"low_priority_list\" in config:\n low_priority_list = config[\"low_priority_list\"]\n\n my_detector = Detector(nlp,\n crf_model_list,\n load(config[\n \"detection\"][\"personal_email_detection\"]),\n crf_ner_classic,\n corp_mail_list=corp_mail_list,\n custom_word_list=custom_word_list,\n regexp_config_dict=regexp_config_dict,\n signature_max_distance=config[\"signature_max_distance\"],\n low_priority_list=low_priority_list)\n\n return my_detector",
"def getResults():",
"def _load_results(self, filename):\n cr = CaseReader(filename)\n case = cr.system_cases.get_case(-1)\n loaded_outputs = cr.list_outputs(case=case, explicit=True, implicit=True, values=True,\n units=True, shape=True, out_stream=None)\n\n self.outputs = {'indep': {}, 'states': {}, 'controls': {}, 'control_rates': {},\n 'design_parameters': {}, 'input_parameters': {}, 'ode': {}}\n\n for output_name, options in loaded_outputs:\n\n if output_name.startswith('inputs.'):\n output_name = output_name.replace('inputs.', '')\n\n if output_name == 'time':\n var_type = 'indep'\n var_name = 'time'\n if output_name.startswith('states:'):\n var_type = 'states'\n var_name = output_name.replace('states:', '', 1)\n elif output_name.startswith('controls:'):\n var_type = 'controls'\n var_name = output_name.replace('controls:', '', 1)\n elif output_name.startswith('control_rates:'):\n var_type = 'control_rates'\n var_name = output_name.replace('control_rates:', '', 1)\n elif output_name.startswith('design_parameters:'):\n var_type = 'design_parameters'\n var_name = output_name.replace('design_parameters:', '', 1)\n # elif output_name.startswith('traj_design_parameters:'):\n # var_type = 'traj_design_parameters'\n # var_name = output_name.replace('traj_design_parameters:', '', 1)\n\n val = options['value']\n\n elif output_name.startswith('ode.'):\n var_type = 'ode'\n var_name = output_name.replace('ode.', '')\n\n if len(options['value'].shape) == 1:\n val = options['value'][:, np.newaxis]\n else:\n val = options['value']\n else:\n raise RuntimeError('unexpected output in file {1}: {0}'.format(output_name,\n filename))\n\n self.outputs[var_type][var_name] = {}\n self.outputs[var_type][var_name]['value'] = val\n self.outputs[var_type][var_name]['units'] = convert_to_ascii(options['units'])\n self.outputs[var_type][var_name]['shape'] = tuple(val.shape[1:])",
"def __call__(self, results):\n results = super().__call__(results)\n if self.with_bbox_3d:\n results = self._load_bboxes_3d(results)\n if results is None:\n return None\n if self.with_bbox_depth:\n results = self._load_bboxes_depth(results)\n if results is None:\n return None\n\n if self.with_corners_2d:\n results = self._load_corners_2d(results)\n if self.with_label_3d:\n results = self._load_labels_3d(results)\n if self.with_attr_label:\n results = self._load_attr_labels(results)\n if self.with_mask_3d:\n results = self._load_masks_3d(results)\n if self.with_seg_3d:\n results = self._load_semantic_seg_3d(results)\n if self.with_tokens:\n results = self._load_tokens(results)\n\n return results",
"def get_initial_response():\n # Message to the user\n message = {\n 'apiVersion': 'v1.1',\n 'status': 'Online',\n 'message': 'Welcome to the Space Object Registry API. Refer to the documentation on https://github.com/wdelenclos/messier-registry.',\n 'sources' : sources\n }\n # Making the message looks good\n resp = jsonify(message)\n # Returning the object\n return resp"
] | [
"0.57042605",
"0.55662245",
"0.54125327",
"0.5357799",
"0.5216095",
"0.519449",
"0.5178228",
"0.5124306",
"0.5099311",
"0.5020611",
"0.5015244",
"0.4988996",
"0.49813417",
"0.49721754",
"0.49609536",
"0.49307635",
"0.49148032",
"0.49138626",
"0.48853156",
"0.48720548",
"0.48627022",
"0.48571116",
"0.48166186",
"0.48120514",
"0.48014557",
"0.4789435",
"0.47757536",
"0.47542647",
"0.472399",
"0.47124293"
] | 0.58674943 | 0 |
Returns encoding of HTTP response. | def encoding(response: tornado.httpclient.HTTPResponse) -> str:
if 'Content-Encoding' in response.headers:
return response.headers['Content-Encoding'].decode()
elif 'Content-Type' in response.headers:
headers = email.message_from_string('Content-Type: ' +
response.headers['Content-Type'])
return headers.get_param('charset', 'utf-8')
else:
return 'utf-8' | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def return_response_string(self):\n response = \"{} {}\\r\\n\".format(self.protocol, self.code)\n str_headers = \"\"\n if self.headers:\n for k, v in self.headers.items():\n str_headers += \"{}: {}\\r\\n\".format(k, v)\n\n encoded_response = \"{}{}\\r\\n\".format(response, str_headers)\n encoded_response = encoded_response.encode(\"utf-8\")\n if self.body:\n if type(self.body) is not bytes:\n self.body = self.body.encode(\"utf-8\")\n encoded_response = encoded_response + self.body\n return encoded_response",
"def encode(self) -> bytes:\n\n encoded_message = struct.pack(Protocol.Formats.RESPONSE_FORMAT, self.response)\n return encoded_message",
"def encodeResponse(self, response):\n return self.encoder.encode(response)",
"def encode(self, response):\n encode_as = response.whichEncoding()\n if encode_as == ENCODE_KVFORM:\n wr = self.responseFactory(body=response.encodeToKVForm())\n if isinstance(response, Exception):\n wr.code = HTTP_ERROR\n elif encode_as == ENCODE_URL:\n location = response.encodeToURL()\n wr = self.responseFactory(\n code=HTTP_REDIRECT, headers={'location': location})\n elif encode_as == ENCODE_HTML_FORM:\n wr = self.responseFactory(code=HTTP_OK, body=response.toHTML())\n else:\n # Can't encode this to a protocol message. You should probably\n # render it to HTML and show it to the user.\n raise EncodingError(response)\n return wr",
"def declared_encoding(self) -> Optional[str]:\n content_type = self.get(\"Content-Type\", \"\")\n return http_content_type_encoding(content_type)",
"def readEncodingFromResponse(self, response):\n if not self.serverEncoding:\n try:\n ct = response.getheader('Content-Type')\n charsetR = re.compile('charset=(.+)')\n charset = charsetR.search(ct).group(1)\n self.serverEncoding = charset\n except:\n pass",
"def GetEncoding(self): \n return self.file.GetEncoding()",
"def encoding(self):\n\n return self._encoding",
"def get_response_byte(self):\n raise NotImplementedError",
"def test_response_autodetect_encoding():\n content = \"おはようございます。\".encode(\"EUC-JP\")\n response = httpcore.Response(200, content=content)\n assert response.text == \"おはようございます。\"\n assert response.encoding == \"EUC-JP\"",
"def decode_response(response):\n return response.read().decode('utf-8')",
"def encode(self):\n\n return self.get_content()",
"def encoding(self):\n return self.get_encoding()",
"def encoding(self):\n return self._encoding",
"def encoding(self):\n return self._encoding",
"def encoding(self):\n return self._encoding",
"def encoding(self):\n return self._encoding",
"async def text(self, encoding=\"utf-8\", errors=\"strict\"):\n return self.response.decode(encoding, errors=errors)",
"def getresponse(self):\n self.resp.status = self.resp.status_code\n old_getheader = self.resp.raw.getheader\n\n def _decode_header(string):\n if string is None:\n return string\n return string.encode('iso-8859-1').decode('utf-8')\n\n def _encode_header(string):\n if string is None:\n return string\n return string.encode('utf-8').decode('iso-8859-1')\n\n def getheaders():\n return [(_decode_header(k), _decode_header(v))\n for k, v in self.resp.headers.items()]\n\n def getheader(k, v=None):\n return _decode_header(old_getheader(\n _encode_header(k.lower()), _encode_header(v)))\n\n def releasing_read(*args, **kwargs):\n chunk = self.resp.raw.read(*args, **kwargs)\n if not chunk:\n # NOTE(sigmavirus24): Release the connection back to the\n # urllib3's connection pool. This will reduce the number of\n # log messages seen in bug #1341777. This does not actually\n # close a socket. It will also prevent people from being\n # misled as to the cause of a bug as in bug #1424732.\n self.resp.close()\n return chunk\n\n self.resp.getheaders = getheaders\n self.resp.getheader = getheader\n self.resp.read = releasing_read\n\n return self.resp",
"def encoding(self) -> str:\n return self._encoding",
"def encoding(self):\n return self._enc",
"def get_charset(response): # 根据请求返回的响应获取数据()\n _charset = requests.utils.get_encoding_from_headers(response.headers)\n if _charset == 'ISO-8859-1':\n __charset = requests.utils.get_encodings_from_content(response.text)\n if __charset:\n _charset = __charset[0]\n else:\n _charset = response.apparent_encoding\n\n return _charset",
"def test_response_default_encoding():\n response = httpcore.Response(200, content=b\"\")\n assert response.text == \"\"\n assert response.encoding == \"utf-8\"",
"def unparsed_response(self) -> bytes:\n return self._unparsed_response",
"def test_response_default_to_utf8_encoding():\n content = \"おはようございます。\".encode(\"utf-8\")\n response = httpx.Response(\n 200,\n content=content,\n )\n assert response.text == \"おはようございます。\"\n assert response.encoding == \"utf-8\"",
"def encoding(self):\n return self.original.encoding",
"def get_body_encoded(self):\r\n return self.encode(self.get_body())",
"def test_response_content_type_encoding():\n headers = {\"Content-Type\": \"text-plain; charset=latin-1\"}\n content = \"Latin 1: ÿ\".encode(\"latin-1\")\n response = httpcore.Response(200, content=content, headers=headers)\n assert response.text == \"Latin 1: ÿ\"\n assert response.encoding == \"latin-1\"",
"def get_encoded(self):\n pass",
"def encoding(self):\n if self._encoding:\n return self._encoding\n\n # Scan meta tags for charset.\n if self._html:\n self._encoding = html_to_unicode(self.default_encoding, self._html)[0]\n # Fall back to requests' detected encoding if decode fails.\n try:\n self.raw_html.decode(self.encoding, errors='replace')\n except UnicodeDecodeError:\n self._encoding = self.default_encoding\n\n return self._encoding if self._encoding else self.default_encoding"
] | [
"0.71907353",
"0.706094",
"0.6990889",
"0.6629994",
"0.6604779",
"0.65353036",
"0.64132226",
"0.6401666",
"0.6394215",
"0.6381188",
"0.6364501",
"0.633328",
"0.6321899",
"0.62905544",
"0.62905544",
"0.62905544",
"0.62905544",
"0.6269559",
"0.6250087",
"0.6205546",
"0.618269",
"0.6143813",
"0.6138578",
"0.61066777",
"0.60962266",
"0.6084444",
"0.6062734",
"0.6036731",
"0.60316205",
"0.6024251"
] | 0.7276046 | 0 |
Get HTTP response body as text. | def text_body(response: tornado.httpclient.HTTPResponse) -> str:
return response.body.decode(encoding(response)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def make_request_txt(self):\n #print (self.url)\n try:\n with closing(get(self.url, stream=True)) as resp: #returns b`txt`\n if self.is_txt(resp):\n return resp.content.decode(\"utf-8\")\n else:\n return None\n except RequestException as e:\n print('Error during requests to {0} : {1}'.format(url, str(e)))\n return None",
"def body(self):\n return self._response and self._response.body or \"\"",
"def get_body_text(self):\n if self.body_type != 'HTML':\n return self.body\n\n try:\n soup = bs(self.body, 'html.parser')\n except RuntimeError:\n return self.body\n else:\n return soup.body.text",
"async def text(self, encoding=\"utf-8\", errors=\"strict\"):\n return self.response.decode(encoding, errors=errors)",
"def get_text(self):\n return self.res.text",
"async def text(self, encoding=None, errors=\"strict\"):\n if self._body is None:\n await self.read()\n\n return self._body",
"def plain_text_body(self):\n return self._plain_text_body",
"def get_response(self):\n return self._get_multiline()",
"async def get_body(self, url):\n print(f'{30*\"-\"} > Get Body: {url}')\n try:\n async with aiohttp.ClientSession() as session:\n try:\n with async_timeout.timeout(30):\n async with session.get(url, headers=self.headers) as response:\n html = await response.read()\n return html\n except (asyncio.TimeoutError, ValueError):\n global_logger.write_log('error', f\"error: {ValueError}\")\n except (ServerDisconnectedError, ClientResponseError, ClientConnectorError) as s:\n global_logger.write_log('error', f\"error: {s}\")\n except (Exception, ValueError):\n global_logger.write_log('error', f\"error: {ValueError}\")\n return ''",
"def read_text(self, url: str) -> str:\n response = self._session().get(url)\n if not response.ok:\n response.raise_for_status()\n return response.text",
"def get_body(self):\n fp = self._environ['wsgi.input']\n return fp.read()",
"def get_body(self):\r\n fp = self._environ['wsgi.input']\r\n return fp.read()",
"def get_response(self):\n result = self.get_response_impl()\n if self.log_dest is not None:\n is_error, response = result\n if is_error:\n response = \"? \" + response\n else:\n response = \"= \" + response\n self._log(\"<< \", response.rstrip())\n return result",
"def body(self) -> str:\n return pulumi.get(self, \"body\")",
"def body(self) -> str:\n return self.data['body']",
"def get_content(self):\n return self.__response.content",
"def printable_reponse(self):\n resp = self.response\n msg = \"-- Reponse : {} -- \\r\\n\".format(resp.status_code)\n msg += \"Headers: {} \\r\\n\".format(str(resp.headers))\n msg += \"Body: {} \\r\\n\\r\\n\".format(str(resp.content))\n return msg",
"def _text_command(self, request):\n response = self._send(request)\n self._check_response(response)\n return response.text",
"def get_text(self):\n return self.output.getvalue()",
"def _single_body(part):\n content_type = part.get_content_type()\n try:\n body = part.get_payload(decode=True)\n except Exception:\n return ''\n\n if content_type == 'text/html':\n return BeautifulSoup(body, 'html.parser').text\n elif content_type == 'text/plain':\n return body\n return ''",
"def content(self):\n return(self.__response.content)",
"def __get_post_body(self):\n content_len = int(self.headers.getheader('content-length', 0))\n return self.rfile.read(content_len)",
"def _get_body(self, response):\n # TODO: Not yet implemented\n if response.status_code == 403:\n pass\n if response.status_code == 404:\n # Not Found: outside the geography?\n pass\n if response.status_code != 200:\n raise HTTPError(response.status_code)\n\n body = response.json()\n\n api_status = body[\"status\"]\n\n # Handle different types of 200 OK response types here\n if api_status == \"OK\":\n return body",
"def return_response_string(self):\n response = \"{} {}\\r\\n\".format(self.protocol, self.code)\n str_headers = \"\"\n if self.headers:\n for k, v in self.headers.items():\n str_headers += \"{}: {}\\r\\n\".format(k, v)\n\n encoded_response = \"{}{}\\r\\n\".format(response, str_headers)\n encoded_response = encoded_response.encode(\"utf-8\")\n if self.body:\n if type(self.body) is not bytes:\n self.body = self.body.encode(\"utf-8\")\n encoded_response = encoded_response + self.body\n return encoded_response",
"def plain_text_(self):\n return self.content.decode(self.encoding)",
"def body(self):\n\n return self._body",
"def body(self):\n return self.message.get_data()",
"def request_message_txt(self):\r\n headers, body = self.create_request()\r\n\r\n header_txt = \"\\n\".join(\r\n \"{}: {}\".format(h, v) for h, v in sorted(headers.items())\r\n )\r\n body_txt = json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8')\r\n\r\n return header_txt + \"\\n\\n\" + body_txt",
"def get_body(message):\n\n if message.is_multipart():\n # get the html text version only\n text_parts = [part\n for part in typed_subpart_iterator(message,\n 'text',\n 'html')]\n body = []\n for part in text_parts:\n charset = get_charset(part, get_charset(message))\n body.append(unicode(part.get_payload(decode=True),\n charset,\n \"replace\"))\n\n return u\"\\n\".join(body).strip()\n else:\n # if it is not multipart, the payload will be a string\n # representing the message body\n body = unicode(message.get_payload(decode=True),\n get_charset(message),\n \"replace\")\n return body.strip()",
"def ingest_plain_body(request):\n try:\n content = str(request.body, encoding='utf-8')\n except Exception as e:\n log.error(log.exc(e))\n return None\n return content"
] | [
"0.72369504",
"0.71414113",
"0.7134768",
"0.70143604",
"0.68329924",
"0.67339194",
"0.6597049",
"0.6577305",
"0.6570372",
"0.64789695",
"0.64780384",
"0.6470492",
"0.64699656",
"0.6439313",
"0.6419446",
"0.64055383",
"0.63803506",
"0.6376543",
"0.63081264",
"0.62654847",
"0.6176288",
"0.6172106",
"0.612597",
"0.6084788",
"0.6075294",
"0.60704005",
"0.6069545",
"0.60581213",
"0.60456836",
"0.6032383"
] | 0.8240622 | 0 |
Get HTTP response body as json | def json_body(response: tornado.httpclient.HTTPResponse):
return json.loads(text_body(response)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_json(response):\n\tif requests.__version__ >= \"1.0.0\":\n\t\treturn response.json()\n\telif requests.__version__ == \"0.14.2\":\n\t\treturn response.json\n\telse:\n\t\treturn json.loads(response.content)",
"def generate_http_response(body):\n body = json.dumps(body, ensure_ascii=False).encode('utf8')\n return body",
"def get_response(request_url):\n response = requests.get(request_url)\n return json.loads(response.text)",
"def _get_json_response(self, url, data, headers):\n if data:\n data = json.dumps(data)\n req = urllib2.Request(url, data, headers)\n response = urllib2.urlopen(req)\n raw_response = response.read()\n return raw_response",
"def get_body(self, environ=None):\n body = {\n 'success': False,\n 'data': {\n 'code': self.code,\n 'msg': self.msg,\n 'path': request.full_path,\n }\n }\n return json.dumps(body)",
"def get_json(response):\n return json.loads(response.data.decode('utf8'))",
"def get_json_body(response):\n # type: (AnyResponseType) -> JSON\n if isinstance(response, TestResponse):\n return response.json\n return response.json()",
"def _get_request_body(_request):\n return _request.json",
"def json_of_response(response):\n return json.loads(response.text)",
"def json(self) -> Any:\n return self.body.json()",
"def getResponse( self, url ):\n\n try:\n res = urllib2.urlopen( url ).read()\n except urllib2.HTTPError, e:\n print(e.code)\n except urllib2.URLError, e:\n print(e.args)\n return json.loads(res)",
"def _get_body(self, response):\n # TODO: Not yet implemented\n if response.status_code == 403:\n pass\n if response.status_code == 404:\n # Not Found: outside the geography?\n pass\n if response.status_code != 200:\n raise HTTPError(response.status_code)\n\n body = response.json()\n\n api_status = body[\"status\"]\n\n # Handle different types of 200 OK response types here\n if api_status == \"OK\":\n return body",
"def _get_json(self, url: str) -> dict:\n r = self._req_get(url)\n return r.json() if r else None",
"def jsonresp(value):\n body = (json.dumps(value),)\n cherrypy.response.headers['Content-Type'] = 'application/json'\n return body",
"def get_json(self, *args, **kwargs):\r\n resp = self.request_with_auth(\"get\", *args, **kwargs)\r\n self.assertHttpOK(resp)\r\n self.assertTrue(resp[\"Content-Type\"].startswith(\"application/json\"))\r\n return json.loads(resp.content)",
"def get_json(url):\n f = urlopen(url)\n response_text = f.read()\n response_data = json.loads(str(response_text, \"utf-8\"))\n #pprint(response_data)\n return response_data",
"async def json(self, encoding=\"utf-8\", content_type=None, loads=json_loads):\n return loads(self.response.decode(encoding))",
"def json(self):\n try:\n return self._json_provider.load(self._body())\n except ValueError:\n raise HTTPError(400, 'Misformated JSON object')",
"def getJson(self,url):\n r = req.get(str(url),\"GET\")\n jsonResponse = json.loads(r.text)\n return jsonResponse",
"def get_json(url):\n f = urllib.request.urlopen(url)\n response_text = f.read().decode('utf-8')\n response_data = json.loads(response_text)\n # pprint(response_data)\n return response_data",
"def get_json(response):\n import json\n try:\n import flask\n except ImportError:\n flask = None\n if flask and isinstance(response, flask.Response):\n # flask testing\n return json.loads(response.data.decode('utf-8'))\n else:\n # requests\n if hasattr(response.json, '__call__'):\n return response.json()\n else:\n return response.json",
"def json_of_response(response):\n return json.loads(response.data.decode('utf8'))",
"def json_of_response(response):\n return json.loads(response.data.decode('utf8'))",
"def get_whole_response_as_json(url, session=None):\n req = session or requests\n response = req.get(url, headers=get_headers())\n response.raise_for_status()\n if response.status_code == requests.codes.no_content:\n raise NoContent(\"204 No Content\")\n elif response.status_code == requests.codes.accepted:\n raise Accepted(\"202 Accepted. No cached data. Retry.\")\n return response.json()",
"def response_as_json(resp):\n resp_json = json.loads(resp.data.decode('utf-8'))\n return resp_json",
"def get_json():\n try:\n return request.get_json(force=True)\n except Exception:\n raise AXApiInvalidParam(\"Invalid json supplied\")",
"def get_json(url):\n f = urllib.request.urlopen(url)\n response_text = f.read().decode('utf-8')\n response_data = json.loads(response_text)\n\n return response_data",
"def get_json(url):\n f = urlopen(url, 1)\n response_text = f.read()\n response_data = json.loads(str(response_text, \"utf-8\"))\n return response_data",
"def _json(self, response):\n try:\n return response.json()\n except Exception:\n raise DatabricksApiException(\n 403, 3, \"Invalid json message: %s\" % self._remove_tags(response.text)\n )",
"def get_json(self, url):\n json_response = self.testapp.get(url)\n self.assertEqual(json_response.status_int, 200)\n return self._parse_json_response(json_response, expect_errors=False)"
] | [
"0.729981",
"0.7209948",
"0.7168147",
"0.706024",
"0.7018765",
"0.69994396",
"0.69508433",
"0.6911265",
"0.6746027",
"0.6719141",
"0.6719075",
"0.67182344",
"0.67025316",
"0.669225",
"0.66248244",
"0.6580149",
"0.65752757",
"0.6573697",
"0.65716434",
"0.6570659",
"0.65155184",
"0.65042484",
"0.65042484",
"0.6455057",
"0.6444034",
"0.6433841",
"0.64232594",
"0.6405351",
"0.64010996",
"0.63970226"
] | 0.7616408 | 0 |
Create a default list of bounds for a given signal description | def _default_bounds(signal):
# there's just the name
if isinstance(signal, str):
return (signal, 0, 0, 0, 0)
else:
# there's just the name in a list
if len(signal) == 1:
return signal + [0, 0, 0, 0]
# there's the name and bounds
if len(signal) == 3:
return signal + [signal[1], signal[2]]
return signal | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_bounds():\n return [0.00], [1.00]",
"def bounds(self):\n return [(2, None)]",
"def bounds(self): # -> tuple[()]:\n ...",
"def bounds(self, pos):",
"def _init_optimizer_bounds(self):\n bounds = []\n for filt in self.filters:\n if filt.optimize_fc:\n bounds.append((np.log10(filt.min_fc), np.log10(filt.max_fc)))\n if filt.optimize_q:\n bounds.append((filt.min_q, filt.max_q))\n if filt.optimize_gain:\n bounds.append((filt.min_gain, filt.max_gain))\n return bounds",
"def get_bounds():\n bounds = [\n (0.1, 0.5), # Omega_m\n (0.05, 0.15) # beta\n ]\n return np.array(bounds)",
"def GetBounds(self):\n ...",
"def GetBounds(self):\n ...",
"def GetBounds(self):\n ...",
"def GetBounds(self):\n ...",
"def GetBounds(self):\n ...",
"def GetBounds(self):\n ...",
"def GetBounds(self):\n ...",
"def GetBounds(self):\n ...",
"def create_bound_for_scipy(lb, ub):\n lb = tuple(map(convert_inf_to_none, lb))\n ub = tuple(map(convert_inf_to_none, ub))\n return list((lb[i], ub[i]) for i in range(len(ub)))",
"def define_range():\n\n def_range = {'lt': [0.0, 24.0],\n 'lon': [0.0, 360.0],\n 'angle': [0.0, 2.0 * np.pi]}\n\n return def_range",
"def get_bounds(self):\n return ([self.t_min] * self.dim,[self.t_max] * self.dim)",
"def initBoundedParams(bounds, sn=[]):\n hypinit = {\n 'cov': np.zeros(len(bounds)),\n 'lik': np.atleast_1d(np.log(sn)),\n 'mean': np.array([])\n }\n # Sample from a uniform distribution\n for idx, pair in enumerate(bounds):\n # Randomize only if bounds are specified\n if isinstance(pair, collections.Iterable):\n hypinit['cov'][idx] = np.random.uniform(pair[0], pair[1])\n # If no bounds, then keep default value always\n else:\n hypinit['cov'][idx] = pair\n return hypinit",
"def _initialize_bounds(problem, bounds, get_bound, set_bound):\n for constraint in problem.constraints:\n root_expr = constraint.root_expr\n expr_bounds = Interval(constraint.lower_bound, constraint.upper_bound)\n if root_expr not in bounds:\n set_bound(root_expr, expr_bounds)\n else:\n existing_bounds = get_bound(root_expr)\n new_bounds = existing_bounds.intersect(expr_bounds)\n set_bound(root_expr, new_bounds)",
"def bounds(self) -> typing.List[float]:\n raise NotImplementedError()",
"def extend_bounds(problem):\n\n num_vars = problem[\"num_vars\"]\n num_ff_vars = 2 ** find_smallest(num_vars)\n num_dummy_variables = num_ff_vars - num_vars\n\n bounds = list(problem[\"bounds\"])\n names = problem[\"names\"]\n if num_dummy_variables > 0:\n bounds.extend([[0, 1] for x in range(num_dummy_variables)])\n names.extend([\"dummy_\" + str(var) for var in range(num_dummy_variables)])\n problem[\"bounds\"] = bounds\n problem[\"names\"] = names\n problem[\"num_vars\"] = num_ff_vars\n\n return problem",
"def get_bounds_parameters(self):\n bounds = []\n bounds += self.var_noise.bounds\n bounds += self.mean.bounds\n bounds += self.kernel.get_bounds_parameters()\n\n return bounds",
"def getSampleBounds(args, matrix):\n bounds = matrix.parameters['sample_boundaries']\n if args.samples is None:\n return np.arange(0, matrix.matrix.matrix.shape[1])\n else:\n o = list()\n for sample in args.samples:\n if sample not in matrix.matrix.sample_labels:\n sys.exit(\"Error: '{0}' is not a valid sample\\n\".format(sample))\n idx = matrix.matrix.sample_labels.index(sample)\n o.extend(range(bounds[idx], bounds[idx + 1]))\n return o",
"def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]:\n return [(-1.0, 1.0)] * len(list(self.params()))",
"def add_default_bounds_to_params(params):\n defaults = pd.DataFrame(\n {\"lower_bound\": -np.inf, \"upper_bound\": np.inf},\n index=params.index,\n )\n params = params.combine_first(defaults)\n\n return params",
"def _parse_bounds(self, bounds):\n try:\n if bounds == None:\n return None\n elif not isinstance(bounds[0], tuple):\n if len(bounds)==2:\n return [tuple(bounds) for i in range(self.params.size)]\n else:\n raise ValueError\n elif len(bounds) == self.params.size:\n if all([len(b)==2 for b in bounds]):\n return bounds\n else:\n raise ValueError\n else:\n raise ValueError\n except:\n raise ValueError(\"'bounds' should be a list of two elements \"\n \"[lb, ub], or a list of the same length as the number of \"\n \"parameters where each element is a tuple (lb, ub)\")",
"def simple_bounds(child, lb, ub):\n assert len(lb) == len(ub), 'Lower and upper bounds have different #s of design variables in simple_bounds function.'\n assert len(lb) == len(child), 'Bounds and child have different #s of design variables in simple_bounds function.'\n for i in range(0, len(child), 1):\n if child[i] < lb[i]:\n child[i] = lb[i]\n\n for i in range(0, len(child), 1):\n if child[i] > ub[i]:\n child[i] = ub[i]\n\n return child",
"def bounds(self):\n return self.substrates.bounds",
"def from_data_bounds():\n return 'DATA_BOUNDS'",
"def cb_bounds(self, variable, results_dict, keys, fixed_bounds):\n tas_bound, pr_bound = fixed_bounds\n if variable == \"tas\":\n if tas_bound:\n bound_limit = tas_bound\n else:\n bound_limit = self.find_abs_bound_range(results_dict, keys)\n cmap = plt.cm.RdBu_r\n else:\n if pr_bound:\n bound_limit = pr_bound\n else:\n bound_limit = self.find_abs_bound_range(results_dict,\n keys,\n avg_over=25)\n cmap = plt.cm.BrBG\n bounds = np.linspace(-1 * bound_limit, bound_limit, 11)\n return [bounds, cmap]"
] | [
"0.7021897",
"0.67702514",
"0.6683141",
"0.63438934",
"0.63145226",
"0.62476254",
"0.6207278",
"0.6207278",
"0.6207278",
"0.6207278",
"0.6207278",
"0.6207278",
"0.6207278",
"0.6207278",
"0.6167822",
"0.61064106",
"0.60504097",
"0.6016998",
"0.5977074",
"0.5952667",
"0.589701",
"0.58884776",
"0.58879614",
"0.5810363",
"0.5763766",
"0.5746786",
"0.57291853",
"0.5709055",
"0.5708554",
"0.569415"
] | 0.7777678 | 0 |
Parseinterfaces described in YAML files, bundled with the package | def parse_interface_definitions(dir_name=DIR):
try:
filenames = listdir(dir_name)
except OSError:
raise OSError(f"Directory '{dir_name}' "
"doesn't exist or cannot be listed")
defs = []
for filename in filenames:
with open(join(dir_name, filename)) as f:
defs.append(load(f, Loader=Loader))
return defs | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse(self, infile):\r\n raise NotImplementedError()",
"def _parse(self, infile):\n raise NotImplementedError()",
"def __parse(self, to_parse):\n path = Path(to_parse)\n if not path.exists():\n raise FileNotFoundError(f\"Configuration file {path.absolute()} not found.\")\n else:\n with path.open() as file:\n data = json.load(file)\n\n if \"name\" not in data:\n raise AssertionError(f\"Missing fundamental parameter: name\")\n Validator(\n [\n (data[\"name\"], str),\n (data[\"include\"] if \"include\" in data else {}, dict),\n (data[\"modules\"] if \"modules\" in data else [], list),\n ]\n )\n # Input parsing\n if \"include\" in data:\n name = data[\"name\"]\n data = self.validate_include(data[\"include\"])\n data[\"name\"] = name\n elif \"modules\" not in data:\n raise AssertionError(\"No modules neither includes are defined.\")\n\n if \"args\" in data:\n Validator().dict(data[\"args\"])\n self.__get_modules(data)",
"def process_yamls(folder):\n for item in iglob(folder + \"/*.yaml\"):\n data_file = os.path.join(folder, item)\n data = yaml.load(open(data_file))\n load_data(data)",
"def vidl(*args, **kwargs):\n loadstring = ''\n #now read the file definitions\n for file in args:\n ext = os.path.splitext(file)[1][1:] # get the extension without the dot\n if ext == 'ddf':\n loadstring += open(file).read()\n continue\n elif ext == 'rdf':\n continue\n elif ext == 'udf':\n continue\n else:\n # load the internal definitions for the file type\n loadstring += open(kwargs[ext]).read()\n # now load the file\n loadstring += open(file).read()\n return yaml.load(loadstring)",
"def load_yaml_file(i):\n\n import yaml\n\n fn = i['yaml_file']\n\n try:\n if sys.version_info[0] > 2:\n f = open(fn, 'r', encoding='utf8')\n else:\n f = open(fn, 'r')\n except Exception as e:\n return {'return': 16, 'error': 'problem opening YAML file='+fn+' ('+format(e)+')'}\n\n try:\n s = f.read()\n except Exception as e:\n f.close()\n return {'return': 1, 'error': 'problem reading YAML file='+fn+' ('+format(e)+')'}\n\n f.close()\n\n try:\n d = yaml.load(s, Loader=yaml.FullLoader)\n except Exception as e:\n return {'return': 1, 'error': 'problem parsing YAML from file='+fn+' ('+format(e)+')'}\n\n return {'return': 0, 'dict': d}",
"def parse(self):\n\t\tself.maincfg_values = self._load_static_file(self.cfg_file)\n\t\t\n\t\tself.cfg_files = self.get_cfg_files()\n\t\t\n\t\tself.resource_values = self.get_resources()\n\t\t\n\t\tself.timestamps = self.get_timestamps()\n\t\t\n\t\t## This loads everything into\n\t\tfor cfg_file in self.cfg_files:\n\t\t\tself._load_file(cfg_file)\n\n\t\tself._post_parse()",
"def get_parser():\r\n parser = yamlargparse.ArgumentParser(\r\n prog='train_forcast',\r\n description='configurations realted to training process of forcasting mechanism'\r\n )\r\n parser.add_argument('--info.run_id', default='',\r\n help='the unique identifier for logging and metadata creation')\r\n parser.add_argument('--info.m', default=10,\r\n help='use past m values for prediction')\r\n parser.add_argument('--info.n', default=5,\r\n help='predict next n values')\r\n parser.add_argument('--info.operation_type',\r\n choices=[const.TRAIN_OP, const.DEPLOY_OP],\r\n help='choosing whether to perform training or deployment')\r\n parser.add_argument('--info.model_type',\r\n choices=[const.LIN_REG, const.RAN_FOR_REG, const.DEC_TREE_REG, const.MULT_OP_REG],\r\n help='choosing model type in case of training operation')\r\n parser.add_argument('--info.model_file', default='',\r\n help='the relative path to the stored model file')\r\n parser.add_argument('--info.output_dir', default='output',\r\n help='the relative path to the directory for storing results')\r\n parser.add_argument('--train_test_split.type',\r\n choices=[const.SPLIT_BY_DATE, const.SPLIT_BY_FILES],\r\n help='determines the way in which train-test split should be done')\r\n parser.add_argument('--train_test_split.date', default='',\r\n help='the date string in \\'YYYY-mm-dd\\' format, indicating the date at which split should be made')\r\n parser.add_argument('--train_test_split.train', default='',\r\n help='the relative path to the .tsv file containing train data')\r\n parser.add_argument('--train_test_split.test', default='',\r\n help='the relative path to the .tsv file containing test data')\r\n parser.add_argument('--visualize.train_data', action=yamlargparse.ActionYesNo, default=False,\r\n help='determines if the training visualizations are to be stored')\r\n parser.add_argument('--visualize.train_fname', default='',\r\n help='the relative path to the .pdf file storing train data visualizations')\r\n parser.add_argument('--random_forest_regression.max_depth', default=20,\r\n help='choosing hyperparams for random forest')\r\n parser.add_argument('--random_forest_regression.random_state', default=7,\r\n help='choosing hyperparams for random forest')\r\n parser.add_argument('--decison_tree_regression.max_depth', default=20,\r\n help='choosing hyperparams for decision tree')\r\n parser.add_argument('--multi_output_regression.n_estimators', default=100,\r\n help='choosing hyperparams for multioutput regression')\r\n\r\n parser.add_argument('--cfg', action=yamlargparse.ActionConfigFile, required=True)\r\n return parser",
"def yamlConfigForParsingPlugins():\n parameters = \"\"\"\njoinPaths: !joinPaths\n - a\n - b\n - \"c\"\nrunPageTemplates: !findRunPageTemplates\n - \"templates\"\nbcrypt: !bcrypt\n bcryptLogRounds: 12\n user: \"pass\"\nbcryptNoUser: !bcrypt\n bcryptLogRounds: 12\n null: null\nsecretKey: !secretKey 12345\nsecretKeyGen: !secretKey null\n \"\"\"\n # Load parameters\n parameters = yaml.load(parameters, Loader = yaml.SafeLoader)\n return parameters",
"def ParseInterfaceDescription(xml_string, keys=None):\n\n gm_dict = xmltodict.parse(xml_string)['task']\n\n module_id = gm_dict['@name']\n description = gm_dict['description']\n categories = gm_dict['keywords'].replace(' ', '').split(',')\n categories.append('grass-module')\n parameters = {}\n returns = {}\n extrakwargs = dict()\n\n try:\n grass_params = gm_dict['parameter']\n except KeyError:\n logstring(module_id, \"\", \"has no parameter\")\n grass_params = []\n\n try:\n flags = gm_dict['flag']\n except KeyError:\n logstring(module_id, \"\", \"has no flags\")\n flags = []\n\n for parameter in grass_params:\n\n kwargs = dict()\n schema_kwargs = dict()\n\n if keys:\n # case for actinia modules\n key = setVirtualParameterKey(module_id, parameter)\n if key not in keys:\n continue\n else:\n # case for GRASS modules\n key = setParameterKey(module_id, parameter)\n\n schema_kwargs = setParamType(module_id, key, parameter, schema_kwargs)\n kwargs = setParameterDescription(module_id, key, parameter, kwargs)\n kwargs = setParameterRequired(parameter, kwargs)\n schema_kwargs = setParameterDefault(parameter, schema_kwargs)\n schema_kwargs = setParameterEnum(parameter, schema_kwargs)\n\n param_object = ModuleParameter(\n **kwargs,\n schema=ModuleParameterSchema(**schema_kwargs)\n )\n if isOutput(parameter):\n returns[key] = param_object\n else:\n parameters[key] = param_object\n del kwargs\n del schema_kwargs\n\n for parameter in flags:\n # not possible to specify flag values via template at the moment\n if keys:\n continue\n\n kwargs = dict()\n schema_kwargs = dict()\n schema_kwargs['type'] = 'boolean'\n schema_kwargs['default'] = 'False'\n\n key = setParameterKey(module_id, parameter)\n\n kwargs = setParameterDescription(module_id, key, parameter, kwargs)\n kwargs = setParameterRequired(parameter, kwargs)\n\n param_object = ModuleParameter(\n **kwargs,\n schema=ModuleParameterSchema(**schema_kwargs)\n )\n parameters[key] = param_object\n del kwargs\n del schema_kwargs\n\n # custom extention for importer + exporter from actinia_core\n try:\n tpl = tplEnv.get_template('gmodules/' + module_id + '.json')\n pc_template = json.loads(tpl.render().replace('\\n', ''))\n for key in [*pc_template]:\n extrakwargs[key] = {}\n for param in pc_template[key]:\n extrakwargs[key][param] = ModuleParameter(**pc_template[key][param])\n except Exception as e:\n # if no template for module exist, use as is (default)\n log.debug('template %s does not exist.', e)\n\n grass_module = Module(\n id=module_id,\n description=description,\n categories=sorted(categories),\n parameters=parameters,\n returns=returns,\n **extrakwargs\n )\n\n return grass_module",
"def main(cls, **kwargs):\n try:\n import file_transformer\n except Exception as e:\n sys.exit(\"{}\\nSee https://github.com/benkehoe/file-transformer\".format(e))\n \n def loader(input_stream, args):\n return yaml.load(input_stream)\n \n def processor(input, args):\n transform = cls(input, vars(args))\n transform.apply()\n return transform.template\n \n def dumper(output, output_stream, args):\n yaml.dump(output, output_stream)\n \n return file_transformer.main(processor, loader, dumper, **kwargs)",
"def __parsePackages__(self, f):\n\n\t\tp = apt_pkg.ParseTagFile(f)\n\n\t\t# Just load into memory the fields that are going to be useful\n\t\twhile p.Step() == 1:\n\t\t\tpkg = p.Section['Package']\n\n\t\t\tself.packages[pkg] = {}\n\n\t\t\tfor field in p.Section.keys():\n\t\t\t\tif field == 'Package':\n\t\t\t\t\tpass \n\t\t\t\telif ['Depends', 'Recommends', 'Suggests', 'Enhances', 'Pre-Depends', 'Conflicts', 'Provides'].count(field):\n\t\t\t\t\tvalue = p.Section.get(field, \"\")\n\t\t\t\t\tself.packages[pkg][field] = apt_pkg.ParseDepends(value)\n\t\t\t\telif ['Size', 'Installed-Size'].count(field):\n\t\t\t \t\tvalue = p.Section.get(field, \"0\")\n\t\t\t \t\tself.packages[pkg][field] = int(value)\n\t\t\t\telif field == 'Source':\n\t\t\t\t\tsrc = p.Section.get(field, pkg)\n\t\t\t\t\tidx = src.find('(')\n\t\t\t\t\tif idx != -1:\n\t\t\t\t\t\tsrc = src[:idx].strip()\n\t\t\t\t\tself.packages[pkg][field] = src\n\t\t\t\telif field == 'Provides':\n\t\t\t\t\tself.packages[pkg][\"Provides\"] = apt_pkg.ParseDepends(p.Section.get(\"Provides\", \"\"))\n\t\t\t\telse:\n\t\t\t\t\tself.packages[pkg][field] = p.Section.get(field, '')\n\n\t f.close()",
"def _initialize_protocols(self):\n with open(str(pathlib.Path(__file__).parent / 'protocol.yml'), encoding='UTF-8') as handle:\n self._protocols = yaml.safe_load(handle)",
"def _parse_model_yaml(filename: str, cache_path: str = \"./\", only_models: list = []):\n model_classes = {\n \"ModelPointResult\": ModelPointResult,\n \"E3SMResult\": E3SMResult,\n \"ModelResult\": ModelResult,\n }\n models = []\n with open(filename, encoding=\"utf-8\") as fin:\n yml = yaml.safe_load(fin)\n for name, opts in yml.items():\n # optionally filter models\n if len(only_models) > 0 and name not in only_models:\n continue\n\n if \"name\" not in opts:\n opts[\"name\"] = name\n\n # if the model_year option is given, convert to lits of floats\n if \"model_year\" in opts:\n opts[\"model_year\"] = [\n float(y.strip()) for y in opts[\"model_year\"].split(\",\")\n ]\n\n # select the class type\n cls = model_classes[opts[\"type\"]] if \"type\" in opts else ModelResult\n if cls is None:\n typ = opts[\"type\"]\n raise ValueError(f\"The model type '{typ}' is not available\")\n fcns = dir(cls)\n\n # if the pickle file exists, just load it\n cache = os.path.join(cache_path, f\"{name}.pkl\")\n if os.path.exists(cache):\n if \"read_pickle\" in fcns:\n model = cls().read_pickle(cache)\n else:\n with open(cache, mode=\"rb\") as fin:\n model = pickle.load(fin)\n models.append(model)\n continue\n\n # call the constructor using keywords defined in the YAML file\n cls = model_classes[opts[\"type\"]] if \"type\" in opts else ModelResult\n model = cls(\n **{\n key: opts[key]\n for key in inspect.getfullargspec(cls).args\n if key in opts\n }\n )\n\n # some model types have a find_files() method, call if present loading\n # proper keywords from the YAML file\n if \"find_files\" in fcns:\n model.find_files(\n **{\n key: opts[key]\n for key in inspect.getfullargspec(model.find_files).args\n if key in opts\n }\n )\n\n # some model types allow you to specify snynonms\n if \"add_synonym\" in fcns and \"synonyms\" in opts:\n for mvar, syn in opts[\"synonyms\"].items():\n model.add_synonym(mvar, syn)\n\n # cache the model result\n if rank == 0:\n if \"read_pickle\" in fcns:\n model.to_pickle(cache)\n else:\n with open(cache, mode=\"wb\") as fin:\n pickle.dump(model, fin)\n\n models.append(model)\n\n for model in models:\n if isinstance(model.color, str) and model.color.startswith(\"#\"):\n model.color = clr.hex2color(model.color)\n return models",
"def load_yaml(self):\n env = self.state.document.settings.env\n relpath, abspath = env.relfn2path(directives.path(self.arguments[0]))\n\n env.note_dependency(relpath)\n\n encoding = self.options.get('encoding', env.config.source_encoding)\n with io.open(abspath, 'rt', encoding=encoding) as stream:\n spec = yaml.load(stream, _YamlOrderedLoader) # nosec\n self.spec = spec\n self.paths = spec[self.path_path]\n self.definitions = spec[self.models_path]\n self.openapi_version = spec.get('swagger', None) or spec['openapi']\n self.options.setdefault('uri', 'file://%s' % abspath)",
"def parse(self, config_file):\n\t\tself.options = yaml.load(open(config_file))",
"def __init__(self, path, input_type='f'):\n if input_type == 'f':\n file = open(path, 'r')\n elif input_type == 's':\n file = path\n else:\n raise exceptions.BadInputError(f\"invalid input type {input_type}\")\n\n pdl = yaml.safe_load(file)\n\n self.type_checks = {\n 'typedef': self.validate_typedef,\n 'component': self.validate_component,\n 'graph': self.validate_graph,\n }\n\n self.imports = []\n if 'import' in pdl:\n self.imports = pdl['import']\n\n self.namespace = pdl['name']\n self.body = pdl['body']\n self.typedefs = {}\n self.components = []\n self.graphs = []\n self.validate()",
"def semantic_capability_interface_from_file(file_handle):\n return semantic_capability_interface_from_dict(yaml.load(file_handle.read()), file_handle.name)",
"def semantic_capability_interface_from_file_path(file_path):\n with open(os.path.abspath(file_path), 'r') as f:\n return semantic_capability_interface_from_dict(yaml.load(f.read()), file_path)",
"def main():\n parse_file(sys.argv[1])",
"def register(cls):\n yaml.add_constructor(cls.label(), cls.parse_yaml)\n yaml.add_representer(cls, cls.dump_yaml)",
"def parse(fname):\n with open(fname) as f:\n all_configs = yaml.load(f, Loader=yaml.Loader)\n configs = {}\n for config_class, content in all_configs.items():\n if config_class in _configs:\n configs[config_class] = _configs[config_class](**content)\n return configs",
"def protocolParseFile( f ):\n\n\tpath = list( os.path.split( f.name )[:1] )\n\tparsed = ProtocolDescription.parseFile( f )[0]\n\n\timports = parsed[\"imports\"][:]\n\tparsed[\"imports\"] = []\n\n\tfor i in imports:\n\t\tp = os.path.join( path[0], i )\n\t\tparsed[\"imports\"].append( protocolParseFile( open( p, \"r\" ) ) )\n\n\treturn parsed",
"def source_interfaces(self):",
"def source_interfaces(self):",
"def run_step(context):\n logger.debug(\"started\")\n context.assert_keys_have_values(__name__,\n 'fileFormatYamlIn',\n 'fileFormatYamlOut')\n\n in_path = context.get_formatted('fileFormatYamlIn')\n out_path = context.get_formatted('fileFormatYamlOut')\n\n logger.debug(f\"opening yaml source file: {in_path}\")\n with open(in_path) as infile:\n payload = yaml.load(infile, Loader=yaml.RoundTripLoader)\n\n logger.debug(f\"opening destination file for writing: {out_path}\")\n os.makedirs(os.path.abspath(os.path.dirname(out_path)), exist_ok=True)\n with open(out_path, 'w') as outfile:\n formatted_iterable = context.get_formatted_iterable(payload)\n yaml.dump(formatted_iterable,\n outfile,\n Dumper=yaml.RoundTripDumper,\n allow_unicode=True,\n width=50)\n\n logger.info(\n f\"Read {in_path} yaml, formatted contents and wrote to {out_path}\")\n logger.debug(\"done\")",
"def parse_config(parser):\n\n all_inputs = {}\n all_outputs = {}\n input_container = {}\n output_container = {}\n\n #Parse config file into dictionaries\n for section_name in parser.sections():\n\n #Input pins\n if section_name.startswith('Input'):\n\n this_input = {}\n\n for name, value in parser.items(section_name):\n this_input[name] = value\n\n all_inputs[section_name] = this_input\n\n #Output pins\n if section_name.startswith('Output'):\n\n this_output = {}\n\n for name, value in parser.items(section_name):\n this_output[name] = value\n\n all_outputs[section_name] = this_output\n\n\n #Convert inputs into dict of named tuples\n for key in all_inputs:\n inp = all_inputs[key]\n input_container[key] = PifaceInput(name=inp['name'],\n type=inp['type'],\n description=inp['description'],\n pin=int(inp['pin']),\n rising=bool(inp['rising']),\n falling=bool(inp['falling']),\n disablepullup=bool(inp['disablepullup']))\n\n #Convert outputs into dict of named tuples\n for key in all_outputs:\n inp = all_outputs[key]\n output_container[key] = PifaceOutput(name=inp['name'],\n description=inp['description'],\n pin=int(inp['pin']))\n\n\n return input_container, output_container",
"def _parse_interface(self, interface_data):\n # Look for inputs\n for mo in re.finditer(self._INPUT_RE, interface_data):\n self._inputs.append(Input(mo.group(\"name\"), self.parse_width(mo.group(\"width\"))))\n # Look for outputs\n for mo in re.finditer(self._OUTPUT_RE, interface_data):\n self._outputs.append(Output(mo.group(\"name\"), self.parse_width(mo.group(\"width\"))))",
"async def parse_files(file):\n data = yaml.full_load(file)\n try:\n new_data = {\n \"task_name\": data[\"metadata\"][\"name\"],\n \"task_type\": data[\"kind\"],\n \"scheduled_at\": data[\"spec\"].get(\"schedule\"),\n }\n\n except KeyError as e:\n raise KeyError(f\"Invalid yaml file uploded \\n {e}\")\n model = TaskModel(**new_data)\n return model",
"def _load (cls, *files):\n config = ConfigParser.ConfigParser()\n config.read(files)\n \n metadata = {}\n if config.has_section(\"metadata\"):\n for key in config.options(\"metadata\"):\n metadata[key] = config.get(\"metadata\", key)\n\n processes = {}\n datasources = {}\n for section in config.sections():\n if section == \"metadata\": continue\n if section.startswith(\"process_\"):\n try:\n processes[section[8:]] = FeatureServer.Processing.loadFromSection(config, section)\n except Exception, E:\n pass \n else: \n datasources[section] = cls.loadFromSection(\n config, section, 'DataSource')\n\n return cls(datasources, metadata, processes)"
] | [
"0.5856186",
"0.5764196",
"0.5734122",
"0.5686092",
"0.56764734",
"0.56446433",
"0.5535054",
"0.55263555",
"0.5507395",
"0.54634154",
"0.5458527",
"0.5440412",
"0.53991145",
"0.53776085",
"0.536266",
"0.53351825",
"0.5331788",
"0.53305066",
"0.532903",
"0.5318388",
"0.5302064",
"0.528339",
"0.5282493",
"0.52751",
"0.52751",
"0.5263513",
"0.52545595",
"0.52468747",
"0.52394974",
"0.5227961"
] | 0.57860667 | 1 |
Given filenames of Verilog source and JSON target, use 'write_json' function of yosys | def verilog_to_json(verilog_filename, json_filename):
system(f'yosys -p "read_verilog {verilog_filename}" '
'-p "write_json {json_filename}"') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_write_source(self):\n req = Request()\n for name in sample_data.keys():\n orig_fn = self._filepath(name)\n temp_fn = self._filepath(name + '-write-source')\n\n # Read the message\n resp = req.get(fromfile=orig_fn)\n\n # Write to a temporary JSON file\n resp.write_source(temp_fn)\n\n # Read the two files and compare JSON (ignores ordering)\n with open(orig_fn) as orig, open(temp_fn) as temp:\n assert json.load(orig) == json.load(temp)\n\n # Delete the temporary file\n os.remove(temp_fn)",
"def process_files_json():\n # chdir into beep root\n pwd = os.getcwd()\n os.chdir(os.environ.get(\"BEEP_ROOT\", \"/\"))\n\n meta_list = list(filter(lambda x: '_Metadata.csv' in x, os.listdir(SRC_DIR)))\n file_list = list(filter(lambda x: '.csv' in x if x not in meta_list else None, os.listdir(SRC_DIR)))\n all_list = list(filter(lambda x: '.csv' in x, os.listdir(SRC_DIR)))\n\n all_list = sorted(all_list)\n dumpfn(all_list, \"all_files.json\")\n\n [file_id, mapdf] = init_map(PROJECT_NAME, DEST_DIR)\n\n new_file_index = file_id\n\n for filename in tqdm(sorted(file_list)):\n # If the file has already been renamed another entry should not be made\n if mapdf['filename'].str.contains(filename).sum() > 0:\n continue\n old_file = os.path.join(SRC_DIR, filename)\n new_path = os.path.join(DEST_DIR, PROJECT_NAME)\n shutil.copy(old_file, new_path) # copy main data file\n shutil.copy(old_file.replace(\".csv\", '_Metadata.csv'), new_path) # copy meta data file\n\n if PROJECT_NAME == 'FastCharge':\n [date, channel_no, strname, protocol] = get_parameters_fastcharge(filename, SRC_DIR)\n elif PROJECT_NAME == 'ClosedLoopOED':\n [date, channel_no, strname, protocol] = get_parameters_oed(filename, SRC_DIR)\n else:\n raise ValueError(\"Unsupported PROJECT_NAME: {}\".format(PROJECT_NAME))\n\n df_dup = mapdf.set_index(['protocol', 'date'])\n if (protocol, date) in df_dup.index:\n row = mapdf[(mapdf['protocol'] == protocol) & (mapdf['date'] == date)]\n file_id = row['fid'].iloc[0]\n protocol = row['protocol'].iloc[0]\n date = row['date'].iloc[0]\n strname = row['strname'].iloc[0]\n else:\n file_id = new_file_index\n new_file_index = new_file_index + 1\n\n new_name = \"{}_{}_{}\".format(PROJECT_NAME, f'{file_id:06}', channel_no)\n new_file = os.path.join(DEST_DIR, PROJECT_NAME, \"{}.csv\".format(new_name))\n\n new_row = pd.DataFrame([[file_id, protocol, channel_no, date, strname,\n os.path.abspath(old_file),\n os.path.abspath(new_file)]],\n columns=METADATA_COLUMN_NAMES)\n mapdf = mapdf.append(new_row)\n\n os.rename(os.path.join(DEST_DIR, PROJECT_NAME, filename), new_file)\n os.rename(os.path.join(DEST_DIR, PROJECT_NAME, filename).replace(\".csv\", \"_Metadata.csv\"),\n new_file.replace(\".csv\", \"_Metadata.csv\"))\n\n mapdf.to_csv(os.path.join(DEST_DIR, PROJECT_NAME, PROJECT_NAME + \"map.csv\"), index=False)\n mapdf = mapdf.reset_index(drop=True)\n os.chdir(pwd)\n return json.dumps(mapdf.to_dict(\"list\"))",
"def json_writer():\n with open(\"{}.json\".format(sys.argv[3]), \"w+\") as new_json:\n print(\"uploading the jason file... \")\n json.dump(json_file, new_json)\n print(\"file is done\")",
"def ref_resp2files(output_file, output_json):\n with open(output_file, \"w\") as reference_text:\n reference_text.write(output_json)",
"def _write_json(\n output_path, records\n):\n output_path.write_text(json.dumps(records))",
"def fix_jsons_in(bids_dir: Path):\n\n print(\"Finalizing task json files.\")\n\n for path in bids_dir.rglob(\"func/*_task-*.json\"):\n append_to_json_file(key=\"TaskName\", value=task_name_of(path), path_to_json=path)\n\n print(\"Appending echo times to phase difference json files.\")\n\n for path in bids_dir.rglob(\"fmap/*_phasediff.json\"):\n magnitude1_path = the_path_that_matches(pattern=\"sub-*_magnitude1.json\", in_directory=path.parent)\n magnitude2_path = the_path_that_matches(pattern=\"sub-*_magnitude2.json\", in_directory=path.parent)\n echo_time1 = value_of_key_in_json_file(\"EchoTime\", magnitude1_path)\n echo_time2 = value_of_key_in_json_file(\"EchoTime\", magnitude2_path)\n append_to_json_file(key=\"EchoTime1\", value=echo_time1, path_to_json=path)\n append_to_json_file(key=\"EchoTime2\", value=echo_time2, path_to_json=path)\n\n print(\"Setting targets of fieldmap json files.\")\n\n for path in bids_dir.rglob(\"fmap/*.json\"):\n func_dir = path.parent.parent / \"func\"\n trimmed_func_paths = [\"func/\" + func_path.name for func_path in func_dir.glob(\"*.nii\")]\n append_to_json_file(key=\"IntendedFor\", value=trimmed_func_paths, path_to_json=path)",
"def SaveJSON(self, filename):\n data = {\n 'files': self._files,\n 'ebuilds': self._ebuilds,\n }\n json.dump(data, open(filename, 'w'))",
"def save_json(file_name, file_content):\n with open(generate_file_path(\"output\", file_name), 'w', encoding='utf-8') as f:\n json.dump(file_content, f, ensure_ascii=False, indent=4)",
"def main(filename):\n with open(filename) as json_file:\n data = json.load(json_file)\n\n course_dict = {}\n course_dict['course_id'] = str(os.path.split(filename.strip('/'))[-1])\n course_dict['blocks'] = build_course_map(data)\n\n filename = '%s' % course_dict['course_id']\n filepath = os.path.join('../input/', filename)\n\n with open(filepath, 'w') as outfile:\n json.dump(course_dict, outfile, indent=4)",
"def cat_json(output_filename, input_filenames):\n\twith open(output_filename, \"w\") as outfile:\n\t\tfirst = True\n\t\tcounter = -1\n\t\tfor infile_name in input_filenames:\n\t\t\twith open(infile_name) as infile:\n\t\t\t\tif first:\n\t\t\t\t\toutfile.write('{')\n\t\t\t\t\tfirst = False\n\t\t\t\telse:\n\t\t\t\t\toutfile.write(',')\n\t\t\t\toutfile.write(mangle(infile.read(), counter))\n\t\t\t\tcounter -= 1\n\t\toutfile.write('}')",
"def write_json_file(self, fname, content):\n pass",
"def write_json_file(self, fname, content):\n pass",
"def save_json_to_destination_file(self):\n if self.source_file != self.output_file:\n click.secho(f'Saving output JSON file to {self.output_file}', fg='white')\n with open(self.output_file, 'w') as file:\n ujson.dump(self.data, file, indent=4, encode_html_chars=True, escape_forward_slashes=False,\n ensure_ascii=False)",
"def write_to_json(dicts, filename: str):\n\n with open(filename, 'w', encoding='utf-8') as f:\n mmcv.dump(dicts, f, file_format='json')",
"def dict_2_json(obj, filename):\n\twith open('data/output/' + filename, 'w') as fp:\n\t\tjson.dump(obj, fp, indent=4)",
"def copy_json():\n sourcePath = 'contents/external/'\n targetPath = 'build/external/'\n for base,subdirs,files in os.walk(sourcePath):\n for file in files:\n orig = os.path.join(base, file)\n if os.path.isfile(orig) and file[-5:] == '.json':\n targetBase = os.path.join(targetPath, base[len(sourcePath):])\n dest = os.path.join(targetBase, file)\n puts(\"Checking diretory %s\" % targetBase)\n if not os.path.exists(targetBase):\n puts(yellow(\"Not found! Creating...\"))\n os.makedirs(targetBase)\n puts(\"Copying from %s to %s\" % (orig, dest))\n copyfile(orig, dest)",
"def write_json_file(file_name: str, content: list):\n with open(file_name, 'w+') as file_object:\n json.dump(content, file_object)",
"def write_json_to_file(json_object, filename):\n try:\n # Try to serialize it before writing\n json_object = json.dumps(json_object)\n except TypeError:\n print(\"Failed to serialize the object\")\n try:\n json_object = json.loads(json_object)\n json_object = json.dumps(json_object)\n except TypeError:\n print(\"Failed secondary serialization of json object\")\n\n json_file = robot_dir + \"/output/original/{}_orig.json\".format(filename.replace(' ', ''))\n with open(json_file, 'w') as json_orig_file:\n json_orig_file.writelines(json_object)",
"def update_json(shell=False):\n\n surahs_modified = 0\n last_update = update_timestamp()\n if shell: tell.info(\"Updating JSON AST files.\")\n\n for surah in range(1, 115):\n\n source_path = \"source/surah{}.text\".format(surah)\n\n if last_modified(source_path) > last_update:\n\n if shell:\n tell.info(\"Rebuilding Surah `{}`.\".format(surah))\n surahs_modified += 1\n\n with open(source_path) as source_file:\n with open(\"json/surah{}.json\".format(surah), \"w\") as json_file:\n data = parse(source_file.read())\n json_file.write(json.dumps(data))\n\n if shell:\n if surahs_modified: tell.done(\"The JSON files are now up to date.\")\n else: tell.info(\"No edits were found in the source files.\")\n\n return surahs_modified",
"def write_tojson(data, filename) -> None:\r\n with open(\"static/json/\" + filename, \"w\") as out:\r\n out.write(\r\n json.dumps(\r\n [data[datum].__dict__() for datum in data]\r\n )\r\n )",
"def generate_code_files(code_list: List[str], base: str) -> None:\n for code in code_list:\n parts = code.split(' ')\n status = parts[0]\n name = \" \".join(parts[1:])\n path = os.path.join('codes', base, f'{status[0]}XX', f'{status}.json')\n data = {\n 'code': int(status),\n 'name': name,\n 'messages': []\n }\n with open(path, 'w') as jsonfile:\n json.dump(data, jsonfile, indent=4)",
"def batch_run_cfg2json():\n cfg_path = os.environ.get(\"CFG_FILE_PATH\")\n cfg_list = ['any_n1.cfg',\n 'ir_grism_n2.cfg',\n 'ir_grism_n4.cfg',\n 'ir_any_n2.cfg',\n 'ir_any_n4.cfg',\n 'uvis_any_n2.cfg',\n 'uvis_any_n4.cfg',\n 'uvis_any_n6.cfg',\n 'uvis_any_pre2012_n2.cfg',\n 'uvis_any_pre2012_n4.cfg',\n 'uvis_any_pre2012_n6.cfg',\n 'wfc_any_n2.cfg',\n 'wfc_any_n4.cfg',\n 'wfc_any_n6.cfg',\n 'sbc_blue_n2.cfg',\n 'sbc_blue_n6.cfg',\n 'sbc_any_n2.cfg',\n 'sbc_any_n6.cfg',\n 'hrc_any_n2.cfg',\n 'hrc_any_n4.cfg',\n 'hrc_any_n6.cfg']\n for cfgfile in cfg_list:\n cfgfile = os.path.join(cfg_path, cfgfile)\n cfg2json(cfgfile)\n\n cfg_path = os.path.realpath(__file__).replace(\"devutils/pars_utils.py\", \"pars/\")\n out_path = os.path.realpath(__file__).replace(\"devutils/pars_utils.py\", \"pars/hap_pars/any/\")\n cfg_list = [\"astrodrizzle_filter_hap.cfg\", \"astrodrizzle_single_hap.cfg\", \"astrodrizzle_total_hap.cfg\"]\n for cfgfile in cfg_list:\n cfgfile = os.path.join(cfg_path, cfgfile)\n cfg2json(cfgfile, outpath=out_path)",
"def test_write_to_json():\r\n tmp_dir = os.getcwd()\r\n json_content = '{ \"name\":\"John\", \"age\":30}'\r\n directory = os.path.join(tmp_dir, 'inputspec.json')\r\n write_to_json(directory, json_content) \r\n with open(directory) as json_file:\r\n data = json.load(json_file)\r\n json_string = json.dumps(data)\r\n if os.path.exists(directory):\r\n os.remove(directory)\r\n assert json_string.replace(' ', '') == json_content.replace(' ' , '')",
"def WriteJson(filename, data, keys, calculate_sha1=True):\n try:\n file = open(filename, 'wb')\n except IOError, e:\n print >> sys.stderr, ('I/O Error writing file %s(%s): %s' %\n (filename, e.errno, e.strerror))\n return False\n jsondata = []\n for key in keys:\n rowdata = GetRowData(data, key)\n if calculate_sha1:\n # Include an updated checksum.\n rowdata.append('\"sha1\": \"%s\"' % GetRowDigest(rowdata, key))\n else:\n if 'sha1' in data[key]:\n rowdata.append('\"sha1\": \"%s\"' % (data[key]['sha1']))\n jsondata.append('\"%s\": {%s}' % (key, ', '.join(rowdata)))\n jsondata.append('\"load\": true')\n jsontext = '{%s\\n}' % ',\\n '.join(jsondata)\n file.write(jsontext + '\\n')\n file.close()\n return True",
"def writejsonsol(self,filename_): # 3\n res = self.__obj.writejsonsol(filename_)\n if res != 0:\n result,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)",
"def write_to_json(config: dict, filename: str):\n\n with open(filename, 'w', encoding='utf-8') as f:\n mmengine.dump(config, f, file_format='json')",
"def write_as_json(filename, data):\n if not os.path.exists(os.path.dirname(OUT_DIR + filename)):\n print('creating ...')\n os.makedirs(os.path.dirname(OUT_DIR + filename))\n\n with open(OUT_DIR + filename, \"w\") as f:\n json.dump(data, f)",
"def _json_write(filename, res):\n with open(filename, 'w+') as file:\n return json.dump(res, file)",
"def update_json(change_dict: Dict[str, Any], files: LocalPath):\n for file in files:\n with open(file, \"r\") as f:\n t = json.load(f)\n for path, val in change_dict.items():\n merge_dict(path_to_dict(path, val), t)\n with open(file, \"w\") as f:\n json.dump(t, f, indent=2)",
"def writejsonsol(self,filename_):\n if isinstance(filename_,unicode):\n filename_ = filename_.encode(\"utf-8\",errors=\"replace\")\n res = __library__.MSK_XX_writejsonsol(self.__nativep,filename_)\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)"
] | [
"0.654977",
"0.6347858",
"0.6209616",
"0.6194429",
"0.6187571",
"0.6149994",
"0.6075553",
"0.6070677",
"0.60704535",
"0.60384846",
"0.60266185",
"0.60266185",
"0.600497",
"0.5989924",
"0.59744734",
"0.59439313",
"0.59291714",
"0.5905016",
"0.5904322",
"0.58849543",
"0.5879473",
"0.58788997",
"0.5857566",
"0.58313966",
"0.58266926",
"0.58243805",
"0.5800583",
"0.5798756",
"0.5784118",
"0.57689416"
] | 0.69165206 | 0 |
get channel metadata from index | def get_ch_metadata(self, index):
tag = self.get_ch_tag(index)
return getattr(self, f"{tag.lower()}_metadata") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_meta(self, *, index=None):\n\n return self.metadata(index=index, exclude_applied=False)",
"def showmeta(self,\r\n index):\r\n\r\n return self.get_metadata_from_note(index)",
"def getMetadata(self):\n result = super().getMetadata()\n if len(self._frames) > 1:\n result['frames'] = [\n {k: v for k, v in frame.items() if k.startswith('Index')}\n for frame in self._frames]\n self._addMetadataFrameInformation(result, self._channels)\n if hasattr(self, '_bands'):\n result['bands'] = self._bands.copy()\n return result",
"async def channel_info(bot, message):\n if isinstance(CHANNELS, (int, str)):\n channels = [CHANNELS]\n elif isinstance(CHANNELS, list):\n channels = CHANNELS\n else:\n raise ValueError(\"Unexpected type of CHANNELS\")\n\n text = '📑 **Indexed channels/groups**\\n'\n for channel in channels:\n chat = await bot.get_chat(channel)\n if chat.username:\n text += '\\n@' + chat.username\n else:\n text += '\\n' + chat.title or chat.first_name\n\n text += f'\\n\\n**Total:** {len(CHANNELS)}'\n\n if len(text) < 4096:\n await message.reply(text)\n else:\n file = 'Indexed channels.txt'\n with open(file, 'w') as f:\n f.write(text)\n await message.reply_document(file)\n os.remove(file)",
"def extract_channels(self, index: int) -> ListLike:\n cmd_pieces = self[index].split()\n channels = []\n for i, piece in enumerate(cmd_pieces):\n if piece in [\"--channel\", \"-c\"]:\n channels.append(cmd_pieces[i + 1])\n return channels",
"def extract_channelindex(self):\n if len(self.widgets['lbChannel'].curselection()) == 0 or len(self.widgets['lbIntChannel'].curselection()) == 0:\n print(\"Select a channel\")\n return\n self.parameters['channel'] = int(self.widgets['lbChannel'].curselection()[0])\n self.parameters['intensity_channel'] = int(self.widgets['lbIntChannel'].curselection()[0])\n self.stack.default_coords['c'] = self.parameters['channel']\n self.destroy_all()\n if self.has_multiple_series:\n self.open_seriesselector()\n else:\n self.parameters['selected_series'] = [0] # dummy index for looping\n self.launch_GUV_GUI()",
"def GetChannelDescription(vDataSet,aIndexC):\r\n\r\n s = \"\"\r\n if aIndexC >= 0 and aIndexC < vDataSet.GetSizeC():\r\n s = vDataSet.GetChannelDescription(aIndexC)\r\n\r\n return s",
"def channelinfo(self):\n\n return ChannelInfo(\n self._filetextbox.text(),\n self._idtextbox.text(),\n self._datafilebox.text()\n )",
"def get(self, index):\n raise NotImplementedError() # pragma: no cover",
"async def channel_stats(self, ctx, channel: discord.TextChannel = None):\n channel = channel or ctx.channel\n embed = discord.Embed(\n title=f\"Stats for **{channel.name}**\",\n description=f\"{'Category: {}'.format(channel.category.name) if channel.category else 'This channel is not in a category'}\",\n color=discord.Color.blurple(),\n )\n embed.add_field(name=\"Channel Guild\",\n value=ctx.guild.name, inline=False)\n embed.add_field(name=\"Channel Id\", value=channel.id, inline=False)\n embed.add_field(\n name=\"Channel Topic\",\n value=f\"{channel.topic if channel.topic else 'No topic.'}\",\n inline=False,\n )\n embed.add_field(name=\"Channel Position\",\n value=channel.position, inline=False)\n embed.add_field(\n name=\"Channel Slowmode Delay\", value=channel.slowmode_delay, inline=False\n )\n embed.add_field(name=\"Channel is nsfw?\",\n value=channel.is_nsfw(), inline=False)\n embed.add_field(name=\"Channel is news?\",\n value=channel.is_news(), inline=False)\n embed.add_field(\n name=\"Channel Creation Time\", value=channel.created_at, inline=False\n )\n embed.add_field(\n name=\"Channel Permissions Synced\",\n value=channel.permissions_synced,\n inline=False,\n )\n embed.add_field(name=\"Channel Hash\", value=hash(channel), inline=False)\n\n await ctx.message.delete()\n await ctx.send(embed=embed)",
"def get_metadata_from_note (self,\r\n index):\r\n\r\n if self.using_database:\r\n aprint('GET METADATA')\r\n value_tuple = (notebookname, str(index),)\r\n db_cursor.execute(\"SELECT user \"+\r\n \"FROM notes WHERE notebook=? \"+\r\n \"AND note_index=?;\",\r\n value_tuple)\r\n try:\r\n user = db_cursor.fetchone()[0]\r\n except:\r\n user = \"USER\"\r\n db_cursor.execute(\"SELECT size \"\r\n +\" FROM notes WHERE notebook=?\"\r\n +\" AND note_index=?;\",\r\n value_tuple)\r\n try:\r\n size = db_cursor.fetchone()[0]\r\n except:\r\n size = 60\r\n db_cursor.execute(\"SELECT timestamp\"\r\n +\" FROM timestamps WHERE notebook=? \"\r\n +\" AND note_index=?\"\r\n +\" ORDER BY timestamp\",\r\n value_tuple)\r\n dates = db_cursor.fetchall()\r\n try:\r\n date_list = [str(date[0]) for date in dates]\r\n except:\r\n date_list = [str(datetime.datetime.now())]\r\n\r\n metadata = {'user':user,\r\n 'date':date_list,\r\n 'size':size}\r\n\r\n return metadata\r\n\r\n if str(index) in self.note_dict:\r\n\r\n return self.note_dict[str(index)].meta\r\n return {}",
"def get_index_data():\n indexTickers = ['^DJI', '^RUA', '^GSPC', '^IXIC', '^SZSA', '^XCI', '^MSH']",
"def index(self):\n return dict(data='index')",
"def __getitem__(self, index: int) -> FaceDescriptor:\n _coreDescriptor = self._faceEngine.createDescriptor(self._coreIndex.getDescriptorVersion())\n error, descriptor = self._coreIndex.descriptorByIndex(index, _coreDescriptor)\n assertError(error)\n\n return FaceDescriptor(descriptor)",
"def get_channel_info(self):\n items = [('channel_number', int),\n ('range', float),\n ('sampling_rate', float),\n ('digitisation', float),\n ('offset', float),\n ]\n\n attrs = self['/UniqueGlobalKey/channel_id'].attrs\n info = {key: converter(attrs[key]) for key, converter in items}\n new_names = [('range','channel_range'),\n ('sampling_rate', 'channel_sampling_rate'),\n ('digitisation', 'channel_digitisation'),\n ('offset', 'channel_offset'),\n ]\n for old, new in new_names:\n info[new] = info[old]\n del info[old]\n return info",
"def get_metadata_for(layer_index):\n try:\n layer = CatalogLayer.objects.get(id=layer_index)\n meta = layer.metadata\n except CatalogLayer.DoesNotExist:\n return {'success': 'false', 'message':\n '{0} is not a valid index for CatalogLayer'.format(layer_index)}\n except LayerMeta.DoesNotExist:\n return {'success': 'false', 'message':\n 'No metadata found for CatalogLayer {0}'.format(layer_index)}\n # fixme: is 'requested' actually useful?\n return {'success': 'true', 'requested': layer.serialize(),\n 'data': meta.serialize()}",
"def extract_medialive_channel_info(ml_client, ml_channel_id):\n mediapackage_channel_list = []\n channel_name = None\n try:\n response = ml_client.describe_channel(\n ChannelId=ml_channel_id\n )\n channel_name = str(response[\"Name\"])\n destinations = response[\"Destinations\"]\n for destination in destinations:\n for output in destination[\"Settings\"]:\n url = str(output[\"Url\"])\n if \"mediapackage\" in url:\n mediapackage_channel_list.append(url)\n except Exception, e:\n print \"Error:\", e.message\n return channel_name, mediapackage_channel_list",
"def index(self):\n return self.container['index']",
"def read_channel(self, channel: int, /) -> int:",
"def get_channel_details(self, chan_ids_list, part='statistics'):\n\n chnl_details = {}\n key = self.keylist[self.keyindex]\n url_c = \"https://www.googleapis.com/youtube/v3/channels\"\n\n for ind, chan in enumerate(chan_ids_list):\n try:\n querystring = {\"id\": chan, \"part\": part,\n \"key\": key}\n response = request_handler(self, url_c, params=querystring, wait=100)\n #print(response)\n # Error-handling\n if response.get('error'):\n print(response.get('error'))\n while response['error']['errors'][0]:\n key = keychange(self)\n \n querystring = {\"id\": chan, \"part\": part,\n \"key\": key}\n response = request_handler(self, url_c, params=querystring, wait=100)\n\n if response.get('error'):\n #chnl_details.update({chan:[str(response), response.text]})\n #\n if response['error']['errors'][0]['reason'] == 'keyInvalid':\n return [{chan:[str(response), response.text]}]\n break\n\n if response.get('Interneterror'):\n chnl_details.update({chan: str(response)})\n continue\n\n chnl_details[chan] = response['items']\n\n except Exception as e:\n print(e, traceback.format_exc())\n\n if ind % 100 == 0:\n print(ind)\n \n return chnl_details",
"def metadata(self, rel_index) -> Metadata:\n\n row = self.image_df.iloc[rel_index]\n\n (\n site,\n well,\n replicate,\n plate,\n compound,\n concentration,\n moa,\n image_idx,\n ) = row[\n [\n \"site\",\n \"well\",\n \"replicate\",\n \"plate\",\n \"compound\",\n \"concentration\",\n \"moa\",\n \"image_idx\",\n ]\n ]\n\n metadata = Metadata(\n Plate(site, str(well), replicate, plate),\n Compound(compound, concentration, moa),\n image_idx,\n )\n\n return metadata",
"def get(self, channel):\n try:\n return self[channel.lower()]\n except KeyError:\n return None",
"def index(self):\n return self._data.get('index')",
"def __getitem__(self, index):\n clef = self.clef.clef # pandas dataframe\n vocab_concepts = self.vocab_concept\n vocab_word = self.vocab_word\n ann_id = self.ids[index]\n concepts_whole = clef.loc[ann_id]['concepts']\n concepts_whole = concepts_whole.split(';')\n caption = clef.loc[ann_id]['caption']\n img_id = clef.loc[ann_id]['image_id']\n path = clef.loc[ann_id]['file_name'] + \".jpg\"\n image = Image.open(os.path.join(self.root, path)).convert('RGB')\n if self.transform is not None:\n image = self.transform(image)\n tokens = nltk.tokenize.word_tokenize(str(caption).lower())\n caption = []\n caption.append(vocab_word('<start>'))\n caption.extend([vocab_word(token) for token in tokens])\n caption.append(vocab_word('<end>'))\n target = torch.Tensor(caption)\n concepts_idx = [[0,vocab_concepts(concept)] for concept in concepts_whole]\n return image, concepts_idx, target",
"def getChannelInfo( self, channel ):\n d = self.dcDict\n for dev in d:\n for devChannel in d[dev]['devChannels']:\n if d[dev]['devChannels'][devChannel]['channel'] == channel: return ( dev, devChannel )",
"def rtt_read_channel_info(self, channel_index, direction):\n if not self._is_u32(channel_index):\n raise ValueError('The channel_index parameter must be an unsigned 32-bit value.')\n\n if not self._is_enum(direction, RTTChannelDirection):\n raise ValueError('Parameter direction must be of type int, str or RTTChannelDirection enumeration.')\n\n direction = self._decode_enum(direction, RTTChannelDirection)\n if direction is None:\n raise ValueError('Parameter direction must be of type int, str or RTTChannelDirection enumeration.')\n\n channel_index = ctypes.c_uint32(channel_index)\n direction = ctypes.c_int(direction.value)\n name_len = ctypes.c_uint32(32)\n name = (ctypes.c_uint8 * 32)()\n size = ctypes.c_uint32()\n\n result = self._lib.NRFJPROG_rtt_read_channel_info(channel_index, direction, ctypes.byref(name), name_len, ctypes.byref(size))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n return ''.join(chr(i) for i in name if i != 0), size.value",
"def __getitem__(self, index):\n item_info = {\n \"ID\": self.ID[index], \n \"turn_id\": self.turn_id[index], \n \"turn_belief\": self.turn_belief[index], \n \"gating_label\": self.gating_label[index], \n \"context_plain\":self.dialog_history[index].split(), \n \"turn_uttr_plain\": self.turn_uttr[index], \n \"turn_domain\": self.turn_domain[index], \n \"generate_y\": [v.split() for v in self.generate_y[index]],\n \"slot_temp\": self.slot_temp\n }\n return item_info",
"def index_stats(self):\r\n request = http.Request('GET', '/metadata/index_stats')\r\n return request, parsers.parse_json",
"def get_info_format(self):\n return self.session.api.get_index(self)",
"def channel_details(token, channel_id):\n authorised_u_id = get_id_from_token(token)\n channel = channels.get(channel_id)\n if channel is None:\n raise ValueError(\"channel_id does not exist.\")\n if authorised_u_id not in channel[\"all_members\"]:\n raise AccessError(\"The authorised user is not a member of the channel.\")\n name = channel[\"name\"]\n all_members = []\n owner_members = []\n for member_id in channel[\"all_members\"]:\n member = users.get(member_id)\n all_members.append(\n {\n \"u_id\": member[\"u_id\"],\n \"name_first\": member[\"first_name\"],\n \"name_last\": member[\"last_name\"],\n \"profile_img_url \": member[\"img_url\"],\n }\n )\n for owner_id in channel[\"owners\"]:\n owner = users.get(owner_id)\n owner_members.append(\n {\n \"u_id\": owner[\"u_id\"],\n \"name_first\": owner[\"first_name\"],\n \"name_last\": owner[\"last_name\"],\n \"profile_img_url \": owner[\"img_url\"],\n }\n )\n return {\"name\": name, \"all_members\": all_members, \"owner_members\": owner_members}"
] | [
"0.6677486",
"0.64471376",
"0.63067454",
"0.6167251",
"0.60129225",
"0.5979515",
"0.59767544",
"0.5872275",
"0.5782871",
"0.57287145",
"0.5725852",
"0.5725031",
"0.5657962",
"0.56345564",
"0.5610363",
"0.56080425",
"0.5587384",
"0.55509144",
"0.5509374",
"0.5505031",
"0.5495094",
"0.5483648",
"0.54724544",
"0.54590017",
"0.54508305",
"0.5443261",
"0.54406554",
"0.5439668",
"0.5430563",
"0.54275054"
] | 0.749579 | 0 |
deletes and returns minimum value (in this case the root of the heap) | def delete_min(self):
self.switch(0, -1)
min = self.heap.pop(-1)
self.bubble_down(0)
return min | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def delete_min(self):\n #The length is 1 because the heap list was initialized with 0\n if len(self.heap_list) == 1:\n return \"Empty heap.\"\n\n #Store the min value of the heap\n top = self.heap_list[1]\n\n #Move the last value of the heap to the top\n self.heap_list[1] = self.heap_list[self.current_size]\n\n #Pop the last value from the heap (that was moved to the top)\n *self.heap_list, _ = self.heap_list\n\n # Decrease the size of the heap\n self.current_size -= 1\n\n #Move down the top value to the appropriate position (following the definition of a min heap)\n #The value is at index 1 since the heap list was initialized with 0) \n self.sift_down(1)\n\n #Return the min value of the heap\n return top",
"def remove_min(self) -> object:\n if self.is_empty() == True:\n raise MinHeapException\n\n # minimum value to be returned\n min_val = self.get_min()\n\n # get last index\n end = self.heap.length() - 1\n\n # root index\n root = 0\n\n # swap first and last nodes and remove last value\n self.heap.swap(root, end)\n self.heap.pop()\n\n # length\n length = self.heap.length()\n\n # left index and right index\n left_i = (2 * root) + 1\n right_i = (2 * root) + 2\n\n # if heap has only one value\n if left_i > length - 1:\n return min_val\n\n # if heap has only left child\n if right_i > length - 1:\n if self.heap.get_at_index(left_i) < self.heap.get_at_index(root):\n self.heap.swap(left_i, root)\n return min_val\n else:\n return min_val\n\n # percolate down heap\n while left_i < length and right_i < length:\n replace_val = self.heap.get_at_index(root)\n left_child = self.heap.get_at_index(left_i)\n right_child = self.heap.get_at_index(right_i)\n\n # find index to swap nodes and check that a node exists\n if self.find_replacement(left_i, right_i, left_child, right_child, replace_val):\n node = self.find_replacement(\n left_i, right_i, left_child, right_child, replace_val)\n\n # swap nodes, set new root and child indices\n self.heap.swap(root, node)\n root = node\n left_i = (node * 2) + 1\n right_i = (node * 2) + 2\n\n return min_val",
"def remove_min(self):\r\n # Should raise an exception of size is 0...\r\n if self._size == 0: raise KeyError # Can't remove from an empty heap\r\n result = self._data[0] # remember the smallest\r\n self._data[0] = None # None is so we don't have a reference.\r\n self._size -= 1 # don't forget we have one less\r\n # bring the last to the front and stick the None at the end\r\n self.swap(0, self._size)\r\n # and let the item inserted at the front \"drift down\"\r\n self.down_heap(0)\r\n return result # finally return what was the minimum\r",
"def remove_min(self):\n if self._size == 1: # Only root node in heap\n return self._delete_node(self.root())\n min_node = self._array[0] # Root node has min value\n last = self._array[self._size-1] # Bottom-right-most node\n self._swap(min_node, last) # Move last node to root\n element = self._delete_node(min_node) # Delete root\n self._downheap(last) # Down-heap bubble last node\n if self._size == self._N//4 and self._N > BinaryTree.DEFAULT_CAPACITY:\n self._resize_array(self._N // 2) # Halve size of array\n return element",
"def deleteMin(self):\n heap = self._heap\n position = self._position\n\n try:\n end = heap.pop(-1)\n except IndexError:\n raise KeyError('pqdict is empty')\n\n if heap:\n node = heap[0]\n # grab last node in PQ to root and sink it down appropriately\n heap[0] = end\n position[end.key] = 0\n self._sink(0)\n else:\n node = end\n del position[node.key] # delete index from position dict\n return node.key, node.value",
"def remove_min(self) -> object:\n if self.is_empty():\n raise MinHeapException\n return\n parent_index=0\n parent=self.get_min()\n #parent=5\n #print(parent)\n #print(self)\n self.heap.swap(parent_index,self.heap.length()-1)\n self.heap.pop()\n if self.is_empty():\n return parent\n min_child=self.find_min_child(1,2)\n while min_child!=None:\n if self.heap.get_at_index(min_child)>self.heap.get_at_index(parent_index):\n break\n self.heap.swap(min_child,parent_index)\n parent_index=min_child\n if parent_index==None:\n break\n min_child=self.find_min_child((parent_index * 2)+1,(parent_index * 2) + 2)\n return parent",
"def delMin(self):\n retval = self.heapList[1]\n self.heapList[1] = self.heapList[self.currentSize]\n self.currentSize = self.currentSize - 1\n self.heapList.pop()\n self.percDown(1)\n return retval",
"def delete_min(self):\n min_val = self.peek_min()\n self.remove(min_val)\n return min_val",
"def remove_min(self) -> Optional[T]:\n if self._array == []:\n return None\n else:\n # Remove top node\n value = self._array[0]\n self._array = self._array[1:]\n # If nodes remaing in the min heap...\n if self._array:\n # Move end node to the top\n end_node = self._array.pop()\n self._array = [end_node] + self._array\n # Rebuild the heap (heapify)\n self.__build()\n # Return the top node\n return value",
"def remove_min(self):\n if self.is_empty():\n raise Empty('Priority queue is empty.')\n self._swap(0, len(self._data) - 1) # put minimum item at the end\n item = self._data.pop() # and remove it from the list;\n self._downheap(0) # then fix new root\n return (item._key, item._value)",
"def delete_min(self):\n node = self.root \n if node is None:\n return \n # move to the leftmost \n while node.left is not None:\n node = node.left\n # promote the node's right subtree \n if node.parent is not None:\n node.parent.left = node.right \n # if node's parent is None, the root is the smallest element \n else: \n self.root = node.right \n if node.right is not None:\n node.right.parent = node.parent \n parent = node.parent \n node.parent = None \n node.left = None \n node.right = None \n return node, parent",
"def pop_smallest(self):\n smallest = heapq.heappop(self.heap)\n del self.set[smallest]\n return smallest",
"def remove_min(self):\n if self.is_empty():\n raise Empty('Priority queue is empty.')\n self._swap(0, len(self._data) - 1) # put minimum item at the end\n item = self._data.pop() # and remove it from the list;\n self._downheap(0) # then fix new root\n return (item._key, item._value)",
"def pop(self):\n if self.heap == [0]:\n raise EmptyHeapException('Heap is empty.')\n self.heap[1], self.heap[-1] = self.heap[-1], self.heap[1]\n minimum = self.heap[-1] # Store min val to return later\n self.heap = self.heap[:-1] # Remove final element\n self._percolate_down(1)\n return minimum",
"def extractMin(self):\n if not self.heap:\n raise IndexError(\"there is no root\")\n elif len(self.heap) < 2:\n return self.heap.pop()\n else:\n self.heap[0], oldMin = self.heap.pop(), self.heap[0]\n self._shiftDown()\n return oldMin",
"def delete_top_from_max_heap(x):\n last = x[-1]\n x = x.at[0].set(last)[:-1]\n return heapify_subtree(x, 0)",
"def pop_smallest(self):\n values = [item[0] for item in self.items] #list of the values\n #values = L[:]\n heapq.heapify(values)\n smallest = heapq.heappop(values)#not forgetting heapq.heapify(values)\n #directly writing t = heapq.heappop([4,2,4]) would result in t = 4\n i = self.getItemByValue(smallest)\n self.items.remove(i)\n return i[1]",
"def remove_min(self):\r\n try:\r\n if self.is_empty():\r\n raise \"List is Empty\"\r\n \r\n self.swap(0,len(self._data)-1) \r\n element = self._data.pop() # remove the value from list.\r\n self._heapify_after_remove(0) # heapify the list\r\n return element._key, element._value\r\n \r\n except Exception, e:\r\n print \"Error occurred in HeapDistance: remove_min\", e\r\n print traceback.print_exc(e)",
"def remove_min(self):\r\n if self.is_empty():\r\n raise Exception('Priority queue is empty.')\r\n self._swap(0, len(self._data) - 1) # put minimum item at the end\r\n item = self._data.pop() # and remove it from the list;\r\n self._downheap(0) # then fix new root\r\n return (item._key, item._value)",
"def delete_max(self):\n retval = self.heap_list[1]\n self.heap_list[1] = self.heap_list[self.size]\n self.size = self.size - 1\n pop_val = self.heap_list.pop()\n self.percolate_down(1)\n return retval",
"def extract_min(self):\r\n if self.is_empty():\r\n return None\r\n min_elem = self.heap_array[0]\r\n aux_elem = self.heap_array.pop()\r\n\r\n if self.is_empty() == False:\r\n self.heap_array[0] = aux_elem\r\n\r\n current_index = 0\r\n left_child_index = (2 * current_index) + 1\r\n current_value = self.heap_array[current_index]\r\n\r\n while left_child_index < len(self.heap_array): # loop that will repeat until no violation of the minheap properties exist\r\n current_min = current_value\r\n\r\n for i in range(2): # this loop is in place so that both children are compared and the smaller of the two is chosen \r\n if (left_child_index + i) > len(self.heap_array)-1: # condition to avoid out of bounds\r\n continue\r\n else:\r\n if int(self.heap_array[left_child_index + i]) < int(current_min): # if child is smaller than parent\r\n current_min = self.heap_array[left_child_index + i ] # set current minimum value\r\n current_min_index = left_child_index + i # and cureent minimim index( index where current minimum value is found )\r\n if current_min == current_value: # if no property is broken (in this case, the parent is actually less than its' children)\r\n break\r\n else: # if propert is broken\r\n self.heap_array[current_index], self.heap_array[current_min_index] = self.heap_array[current_min_index], self.heap_array[current_index] # swap the elements \r\n current_index = current_min_index\r\n left_child_index = int((2 * current_index) + 1)\r\n return min_elem",
"def min(self):\r\n if self._size == 0: raise KeyError # Nothing to return if heap empty\r\n return self._data[0] # so simple!\r",
"def min(self):\n return self.heap[1]",
"def extractmin(self):\n if len(self.heap) == 0: \n return None\n i = self.heap[0]\n last = self.heap[-1]\n del self.heap[-1]\n if len(self.heap) > 0:\n self.siftdown(last, 0)\n return i",
"def pop_min(self):\n if self.get_size() == 0:\n return None\n\n # put minimum item at the end\n self.swap(0, len(self.table) - 1)\n\n # and remove it from the list;\n item = self.table.pop()\n\n # then fix new root\n self.percolate_down(0)\n return item",
"def get_min(h: Heap) -> Node:\n prev, curr = _min(h)\n return curr",
"def findmin(self):\n return self.heap[0] if len(self.heap) > 0 else None",
"def removeMinimum(self, i = 1):\n\n # print(\"I\", i, self.heap[i], self.noOfRemovedElements)\n\n # Base cases\n if self.heap[i] == 'NaN' :\n self.noOfRemovedElements += 1\n # Restructures heap to be a continuous list otherwise a lot of \"Nan\" noOfElements\n # due to removal of minimums a lot of times interfere with the logic of the program\n if self.noOfRemovedElements == self.limitOfRestructuring:\n self.restructureHeap()\n self.noOfRemovedElements = 0\n return\n if 2 * i + 1 > self.noOfElements or 2 * i > self.noOfElements:\n self.heap[i] == \"NaN\"\n self.noOfRemovedElements += 1\n # Restructures heap to be a continuous list otherwise a lot of \"Nan\" noOfElements\n # due to removal of minimums a lot of times interfere with the logic of the program\n if self.noOfRemovedElements == self.limitOfRestructuring:\n self.restructureHeap()\n self.noOfRemovedElements = 0\n return\n\n # Initializing children element positions\n child1 = 2 * i\n child2 = ( 2 * i ) + 1\n # print(\"child 1\", child1, self.heap[child1])\n # print(\"child 2\", child2, self.heap[child2])\n\n # Case when there are no children\n if self.heap[child1] == 'NaN' and self.heap[child2] == 'NaN':\n self.heap[i] = 'NaN'\n self.noOfRemovedElements += 1\n # Restructures heap to be a continuous list otherwise a lot of \"Nan\" noOfElements\n # due to removal of minimums a lot of times interfere with the logic of the program\n if self.noOfRemovedElements == self.limitOfRestructuring:\n self.restructureHeap()\n self.noOfRemovedElements = 0\n return\n\n # Case when there is only one child\n elif self.heap[child2] == 'NaN':\n self.heap[i], self.heap[child1] = self.heap[child1], \"NaN\"\n self.noOfRemovedElements += 1\n # Restructures heap to be a continuous list otherwise a lot of \"Nan\" noOfElements\n # due to removal of minimums a lot of times interfere with the logic of the program\n if self.noOfRemovedElements == self.limitOfRestructuring:\n self.restructureHeap()\n self.noOfRemovedElements = 0\n return\n\n # Case when there is only one child, same as above\n elif self.heap[child1] == 'NaN':\n self.heap[i], self.heap[child2] = self.heap[child2], \"NaN\"\n self.noOfRemovedElements += 1\n # Restructures heap to be a continuous list otherwise a lot of \"Nan\" noOfElements\n # due to removal of minimums a lot of times interfere with the logic of the program\n if self.noOfRemovedElements == self.limitOfRestructuring:\n self.restructureHeap()\n self.noOfRemovedElements = 0\n return\n\n # Swapping parent with the smaller child\n # Bubbling down\n if self.heap[child1].dijkstraCriterion <= self.heap[child2].dijkstraCriterion:\n self.heap[i], self.heap[child1] = self.heap[child1], self.heap[i]\n self.removeMinimum( child1 )\n else:\n self.heap[i], self.heap[child2] = self.heap[child2], self.heap[i]\n self.removeMinimum( child2 )",
"def del_min(self):\n min_idx = self.__pq[1]\n self.__swap(1, self.__n)\n self.__n -= 1\n self.__sink(1)\n self.__keys[self.__pq[self.__n + 1]] = None\n self.__qp[self.__pq[self.__n + 1]] = -1\n return min_idx",
"def get_min(self) -> object:\n if self.is_empty()==True:\n return None\n return self.heap.get_at_index(0)"
] | [
"0.8399035",
"0.8222343",
"0.81165886",
"0.8064376",
"0.7997324",
"0.7903924",
"0.7903202",
"0.7774015",
"0.7720592",
"0.76391006",
"0.76284105",
"0.76253104",
"0.75730777",
"0.75428915",
"0.752482",
"0.7467781",
"0.7435114",
"0.7430907",
"0.739603",
"0.73590386",
"0.73556286",
"0.73268384",
"0.71378875",
"0.70962656",
"0.7056211",
"0.6948398",
"0.6926277",
"0.692462",
"0.6866999",
"0.68485963"
] | 0.8246364 | 1 |
Inserts key into heap and heapifies the heap | def insert(self, key):
self.heap.append(key)
self.bubble_up(len(self.heap) - 1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def insert(self, key, value):\n self.heap.append(None)\n hi = HeapItem(key,value)\n self.siftup(hi, len(self.heap)-1)\n return hi",
"def insert(self, k): \r\n self.heap_array.append(k)\r\n\r\n current_index = len(self.heap_array) - 1\r\n while (current_index > 0):\r\n parent_index = ((current_index-1)//2)\r\n\r\n if int(self.heap_array[current_index]) > int(self.heap_array[parent_index]): # if no vialation of the min heap property \r\n return\r\n else: # if heap property is broken then swap the parent and child that are breaking the prop \r\n self.heap_array[parent_index], self.heap_array[current_index] = self.heap_array[current_index], self.heap_array[parent_index]\r\n current_index = parent_index",
"def insert(self, key):\n\n badIndex = self._append(key)\n # heap invariant might be violated at the new index\n while badIndex > 0 and not self.invariant(self.queue[self._parent(badIndex)], self.queue[badIndex]):\n self._swap(badIndex, self._parent(badIndex))\n badIndex = self._parent(badIndex)",
"def insert(self, key: K, value: V) -> None:\n if key in self.__key_map__:\n self.remove(key)\n\n entry = (value, next(self.counter), key)\n self.__key_map__[key] = entry\n\n heapq.heappush(self.queue, entry)",
"def insert(self, key, value):\r\n self._data.append(self._Item(key, value))\r\n self._upheap(len(self._data) - 1) # upheap newly added position\r",
"def insert(self,key, value):\n if key in self._position:\n # reset value for this node\n node_pos = self._position[key]\n node = self._heap[node_pos]\n node.value = value\n self._sink(node_pos)\n self._swim(node_pos)\n else:\n # insert a new node\n new_node = _Node(key,value)\n node_pos = len(self._heap)\n self._heap.append(new_node)\n self._position[key] = node_pos\n\n # repair priority\n self._swim(node_pos)",
"def push(self, priority, key):\n index = len(self.__heap)\n self.__position[key] = index\n self.__heap.append([priority, key])\n self.__bubble_up(index)",
"def push(self, key, value):\r\n if len(self.heap)<self.depth:\r\n heapq.heappush(self.heap, key)\r\n self.elements[key] = value\r\n else:\r\n oldkey = heapq.heappushpop(self.heap, key)\r\n self.elements[key] = value\r\n del self.elements[oldkey]",
"def heap_increase_key(self, i, key):\n if key < self.heap[i].priority_key:\n print(\"The new key should be higher than the current priority_key \")\n else:\n self.heap[i].priority_key = key\n while i > 0 and self.heap[(i-1)//2].priority_key < self.heap[i].priority_key:\n self.heap[(i-1)//2], self.heap[i] = self.heap[i], self.heap[(i-1)//2]\n i = (i-1)//2",
"def max_heap_insert(self, A, key):\n A.append(-9999999)\n self.heap_increase_key(A, len(A)-1, key)",
"def max_heap_insert(self, A, key):\n A.append(-9999999)\n self.heap_increase_key(A, len(A)-1, key)",
"def append(self, key):\n if key is None:\n raise ValueError('Cannot insert None in the queue')\n\n i = len(self.heap)\n self.heap.append(key)\n while i > 1:\n parent = i // 2\n if key < self.heap[parent]:\n self.heap[i], self.heap[parent] = self.heap[parent], key\n i = parent\n else:\n break",
"def insert(self, k):\n #Append the element to the min heap\n self.heap_list.append(k)\n #Increase the size of the min heap\n self.current_size += 1\n #Move the value to its appropriate position in the min heap (following the definition of a min heap)\n self.sift_up(self.current_size)",
"def heap_increase_key(self, A, i, key):\n if key < A[i]:\n raise Exception(\"New key must be greater than current key\")\n A[i] = key\n while i > 0 and A[self.parent(A, i)] < A[i]:\n A[i], A[self.parent(A, i)] = A[self.parent(A, i)], A[i]\n i = self.parent(A, i)",
"def heap_increase_key(self, A, i, key):\n if key < A[i]:\n raise Exception(\"New key must be greater than current key\")\n A[i] = key\n while i > 0 and A[self.parent(A, i)] < A[i]:\n A[i], A[self.parent(A, i)] = A[self.parent(A, i)], A[i]\n i = self.parent(A, i)",
"def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position",
"def heappush(heap, item):\n pass",
"def insert(self, k):\n self.heapList.append(k)\n self.currentSize = self.currentSize + 1\n self.percUp(self.currentSize)",
"def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position",
"def test_insert(self):\n self.minheap.heap = [0, 1, 4, 6, 9]\n self.minheap.insert(2)\n assert self.minheap.heap == [0, 1, 2, 6, 9, 4]",
"def insert(self, key):\r\n if self.root.num_keys() == self.max_num_keys:\r\n self.root = Node([], [self.root])\r\n self.root.split_child(0)\r\n\r\n node = self.root \r\n while not node.is_leaf():\r\n index = node.search(key)\r\n\r\n child = node.children[index]\r\n if child.num_keys() == self.max_num_keys:\r\n node.split_child(index)\r\n\r\n if node.keys[index] < key:\r\n index += 1\r\n\r\n node = node.children[index] \r\n\r\n node.insert(key)",
"def add(self, key):\n node, parent = Treap._find_node(key, self.root)\n if node:\n node.n += 1\n else:\n heap_id = self.rand.randrange(self.max_heap_id)\n node = Node(key, heap_id)\n if parent:\n node.parent = parent\n parent.child[node.key > parent.key] = node\n else:\n self.root = node\n\n self._prioritize(node)",
"def insert(self, key, value):\r\n hash_val = Hash(key, value)\r\n self.hash_table[self.horner_hash(key)] = hash_val\r\n self.num_items += 1\r\n\r\n if self.get_load_factor() > 0.5:\r\n prev = HashTable(self.table_size)\r\n prev.num_items = self.num_items\r\n prev.hash_table = self.hash_table\r\n prev.table_size = self.table_size\r\n\r\n self.table_size = self.table_size * 2 + 1\r\n self.num_items = 0\r\n self.hash_table = [None] * self.table_size\r\n\r\n for i in range(prev.table_size):\r\n if prev.hash_table[i] is not None:\r\n self.insert(prev.hash_table[i].key, prev.hash_table[i].value)",
"def insert(self, key, value): #hidden\n # return the bin number of table\n index = self.hash_function(key)\n # do not insert empty string\n if index != -1:\n # insert item in empty bucket\n if self.table[index] is None:\n self.table[index] = HashNode(key, value)\n self.size += 1\n # if the key is present, update value\n elif self.table[index].key == key:\n self.table[index].value = value\n # resolve conflicts\n else:\n index = self.quadratic_probe(key)\n if self.table[index] is None:\n self.table[index] = HashNode(key, value)\n self.size += 1\n # if the key is present, update value\n elif self.table[index].key == key:\n self.table[index].value = value\n # grow size\n load_factor = self.size / self.capacity\n if load_factor > 0.75:\n self.grow()",
"def add(self, key, value=None):\n if value is None:\n value = key\n node = HeapNode(key, value)\n self.store.append(node)\n self.heap_up(len(self.store)-1)\n return None",
"def add_to_heap(self, key, count=0):\n entry = [1 + count, next(self.counter), key, HeapItemStatus.ACTIVE]\n self.map[key] = entry\n heappush(self.heap, entry)",
"def build_heap(self, items):\n for key in items:\n self.insert(key)",
"def heap_push(self, value):\n if self.find(value) is None:\n self.table.append(value)\n self.percolate_up(self.get_size() - 1)",
"def test_insert(self):\n data = [4, 4, 8, 9, 4, 12, 9, 11, 13]\n h = Heap(data)\n\n h.insert(7)\n self.assertTrue(Heap.is_heap(h.data), 'should still be a heap')\n\n h.insert(10)\n self.assertTrue(Heap.is_heap(h.data), 'should still be a heap')\n\n h.insert(5)\n self.assertTrue(Heap.is_heap(h.data), 'should still be a heap')",
"def insert(self, key, value):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n for idx, key_val_pair in enumerate(bucket):\n k, v = key_val_pair\n if k == key:\n bucket[idx] = [key, value]\n return\n bucket.append([key, value])"
] | [
"0.7821339",
"0.7730161",
"0.7576347",
"0.7494207",
"0.7476214",
"0.74364775",
"0.7433599",
"0.730302",
"0.7184035",
"0.7181078",
"0.7181078",
"0.7160404",
"0.715419",
"0.7034118",
"0.7034118",
"0.69545007",
"0.6943963",
"0.6931309",
"0.692168",
"0.68648547",
"0.6861515",
"0.6759144",
"0.6744374",
"0.6743277",
"0.66912824",
"0.66678166",
"0.6666609",
"0.6626871",
"0.6623966",
"0.66013926"
] | 0.796826 | 0 |
Returns parent of node at n; None if there isn't a parent | def parent(self, n):
return None if n == 0 else (n - 1) // 2 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _parent(node):\n if node == _root():\n return _root()\n return (node + 1) // 2 - 1",
"def parent(self, n):\n return n._parent",
"def parent(self, node):\n self._validate_node(node)\n idx = node._index\n if idx == 0:\n return None # Root node has no parent\n if idx % 2 == 0:\n return self._array[(idx-2)//2] # Right child (even number)\n return self._array[(idx-1)//2] # left child (odd number)",
"def parent(self, index):\n if index == 0:\n print(\"index 0 has no parent\")\n return None\n return (index - 1) // 2",
"def parent(self, node):\r\n return self.find_node(node).parent.content",
"def parent(self, p):\n node = self._validate(p)\n return self._make_position(node._parent)",
"def get_parent(self, index):\n return self.heap[self.get_parent_index(index)]",
"def get_parent(self):\n return BinaryNode.or_none(self.parent)",
"def get_parent(root_node: ast.AST, node: ast.AST):\n try:\n return node.parent\n except AttributeError:\n add_parent_info(root_node)\n return node.parent",
"def get_top_parent(node):\n\n\ttop_node = cmds.listRelatives(node, p=True)\n\twhile top_node:\n\t\tnode = top_node[0]\n\t\ttop_node = cmds.listRelatives(node, p=True)\n\treturn node",
"def get_parent(self):\n if self.parent:\n return self.parent()\n else:\n return None",
"def parent(self, p):\n node = self._validate(p)\n return self._make_position(node.parent)",
"def parent(self, p):\n node = self._validate(p)\n return self._make_position(node._parent)",
"def parent(self, p):\n node = self._validate(p)\n return self._make_position(node._parent)",
"def parent(self, p):\n node = self._validate(p)\n return self._make_position(node._parent)",
"def fm_get_parent(self, idx):\n return self._relation_lst[self.PARENT][idx]",
"def parent(self,p):\n node = self._validate(p)\n return self._make_position(node._parent)",
"def parent(self, p):\n node = self._validate_position(p)\n return self._make_position(node)",
"def parent(self, index):\n if index == 0:\n return -1\n return self.adjacency_list[index][0]",
"def _get_parent(self, child_ix):\n if child_ix == 0:\n return None\n t = 1 if child_ix & 1 else 2\n return (child_ix - t) / 2",
"def _get_next_parent_node(self, parent):\n grandparent = parent.parent\n if grandparent is None:\n # here, we work at yearly/linear level\n return None\n\n parent_siblings = list(grandparent.children)\n sorted_keys = sorted(parent_siblings)\n index = sorted_keys.index(parent.timeperiod)\n if index + 1 >= len(sorted_keys):\n return None\n else:\n return grandparent.children[sorted_keys[index + 1]]",
"def return_parent(self):\n # Return parent if completed\n if self.completed:\n return self.father\n return -1",
"def _find_parent_node(self, item):\n # Start with the root node and keep track of its parent\n node = self.root\n parent = None\n # Loop until we descend past the closest leaf node\n while node is not None:\n # TODO: Check if the given item matches the node's data\n if ...:\n # Return the parent of the found node\n return parent\n # TODO: Check if the given item is less than the node's data\n elif ...:\n # TODO: Update the parent and descend to the node's left child\n parent = node\n node = ...\n # TODO: Check if the given item is greater than the node's data\n elif ...:\n # TODO: Update the parent and descend to the node's right child\n parent = node\n node = ...\n # Not found\n return parent",
"def parent(self, u):\n return self._ll_tree.get_parent(u)",
"def _determine_parent(self, caller):\n self.msgin(4, \"determine_parent\", caller)\n\n parent = None\n if caller:\n pname = caller.identifier\n\n if isinstance(caller, Package):\n parent = caller\n\n elif '.' in pname:\n pname = pname[:pname.rfind('.')]\n parent = self.findNode(pname)\n\n elif caller.packagepath:\n # XXX: I have no idea why this line\n # is necessary.\n parent = self.findNode(pname)\n\n self.msgout(4, \"determine_parent ->\", parent)\n return parent",
"def _parent(self, index):\r\n # Declaring the \"root\" its own parent, otherwise usual math\r\n return (index - 1) // 2 if index else 0",
"def find_parent_of(self, *args):\n return _ida_hexrays.citem_t_find_parent_of(self, *args)",
"def find(node):\n if node.parent != node:\n node.parent = find(node.parent)\n return node.parent",
"def get_parent(self, index):\n return (index - 1) // (2)",
"def parent(self):\n if not self._parents:\n return None\n elif len(self._parents) == 1:\n return tuple(self._parents)[0]\n else:\n raise RuntimeError('Ambiguous parent: there are multiple parents.')"
] | [
"0.7602089",
"0.7594332",
"0.7559299",
"0.7262482",
"0.72294337",
"0.71347463",
"0.70621294",
"0.7060756",
"0.70123017",
"0.6988264",
"0.6957921",
"0.69313556",
"0.68682724",
"0.68682724",
"0.68682724",
"0.6822674",
"0.6808915",
"0.6743967",
"0.672329",
"0.6721794",
"0.6720354",
"0.6702175",
"0.6671794",
"0.66457915",
"0.66145474",
"0.65666616",
"0.6509958",
"0.65094346",
"0.64686763",
"0.6466385"
] | 0.7795919 | 0 |
Returns right child of node at n; None if there isn't a right_child | def right_child(self, n):
if 2 * n + 2 >= len(self.heap):
return None
else:
return 2 * n + 2 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_node_right(self, n: MazeCell) -> MazeCell:\n if n.x == self._ncols - 1:\n return None\n else:\n return self.get_node(n.x + 1, n.y)",
"def right_child(self, position):\n child = 2 * position + 2\n if child > len(self.table) - 1:\n return None\n return child",
"def get_rightmost_child(self):\n\t\tif self.right_child == None:\n\t\t\treturn self\n\t\telse:\n\t\t\treturn self.right_child.get_rightmost_child()",
"def get_right_child(self, index):\n return self.heap[self.get_right_child_index(index)]",
"def get_rightchild(self):\n return self._rightchild",
"def right(self, n):\n return n._right",
"def get_right_node(self):\n\t\tif self.right_child == None:\n\t\t\t# if we are at the end of a branch\n\t\t\tlowest_left_parent = self.get_lowest_left_parent()\n\t\t\tif lowest_left_parent.parent == None:\n\t\t\t\t# if this was called from right edge of the tree\n\t\t\t\t# the lowest left parent is the 1/1 node\n\t\t\t\t# return the 1/0 (infinity) node on the right edge of the tree\n\t\t\t\treturn SBNode(frac=(1,0))\n\t\t\telse:\n\t\t\t\t# if we had a lower left parent\n\t\t\t\treturn lowest_left_parent.parent\n\t\telse:\n\t\t\treturn self.right_child.get_leftmost_child()",
"def right_child(self, index):\n return 2 * index + 1",
"def rightChild(self, pos):\n return (2 * pos) + 1",
"def rightChild(self, pos):\n return (2 * pos) + 1",
"def right(self, node):\r\n if self._col(node.count) < self.width - 1:\r\n return self.nodes[node.count + 1]\r\n else:\r\n return None",
"def right_child(self, index):\n return 2 * index + 2",
"def right_child(self, pos): \n return (2 * pos) + 1",
"def right_child(self, u):\n return self._ll_tree.get_right_child(u)",
"def add_right(self, n, e):\n if self.right(n) is not None:\n raise ValueError('Right child exists')\n newest = self._Node(e, n)\n n._right = newest\n self._size += 1\n return newest",
"def getRightChild(self):\n return _libsbml.ASTNode_getRightChild(self)",
"def get_child(self, n):\n child, _ = self.recursive_get_child(n)\n return child",
"def left_most_child(n):\n if n == None:\n return None\n while n.left:\n n = n.left\n return n",
"def recursive_get_child(self, n):\n if n == 0:\n return self, n\n elif not self.children:\n return False, n-1\n else:\n n = n - 1\n for child in self.children:\n target, new_n = child.recursive_get_child(n)\n n = new_n\n if target:\n return target, n\n return False, n",
"def get_right(self):\n return BinaryNode.or_none(self.right)",
"def _right(node):\n return 2 * node + 2",
"def right(self, node):\n self._validate_node(node)\n idx = node._index\n right_idx = 2*idx + 2\n if right_idx >= self._N:\n return None # Exceeds length of array\n return self._array[right_idx]",
"def get_right_child_index(self):\n return (2 * self.index) + 2",
"def get_node_down(self, n: MazeCell) -> MazeCell:\n if n.y == self._nrows - 1:\n return None\n else:\n return self.get_node(n.x, n.y + 1)",
"def sibling(self, n):\n parent = self.parent(n)\n if parent is None: # n is root\n return None\n if n == self.left(parent): # n is left child\n return self.right(parent)\n else: # n is right child\n return self.left(parent)",
"def right(self) -> Optional[\"ExpressionNode\"]:\n return self.__right",
"def right_left_most_has_right_child():\n from bbst import Bst\n return Bst([1, 5, 3, 10, 8, 6, 20, 7])",
"def find_successor(node):\n # If right child exists, find right child's left-most descendant.\n if node.right is not None:\n node = node.right\n while node.left is not None:\n node = node.left\n return node\n # If no right child, continue up the tree until we are coming\n # from a node's left child, then visit that node.\n last_node = node.right\n while last_node is node.right:\n if node.parent is None:\n return None\n last_node, node = node, node.parent\n # if we get here, we've come up from a left child\n return node",
"def get_next_node(node):\n\n if node.right_child is not None:\n # First case: ``node`` has right child, the next node is in the subtree\n # rooted at the right child, and it is leftmost node in this subtree.\n current_node = node.right_child\n while current_node.left_child is not None:\n current_node = current_node.left_child\n return current_node\n else:\n # Second case: The next node is (grand)parent, which is the first node\n # to the right of `node`\n current_node = node\n while current_node.parent is not None:\n if current_node.parent.left_child == current_node:\n return current_node.parent\n current_node = current_node.parent",
"def remove_right(self):\n temp = self._rightchild\n self._rightchild.set_parent(None)\n self.set_rightchild(None)\n return temp"
] | [
"0.76178944",
"0.7611006",
"0.725117",
"0.71613806",
"0.71096224",
"0.7073449",
"0.69539064",
"0.6897677",
"0.6885739",
"0.6885739",
"0.68463993",
"0.68296987",
"0.6825695",
"0.67875445",
"0.6721572",
"0.67042595",
"0.6691901",
"0.6611986",
"0.65743893",
"0.6552465",
"0.6468489",
"0.6385313",
"0.62724525",
"0.622769",
"0.6206727",
"0.6202663",
"0.6170341",
"0.61395246",
"0.6125109",
"0.6078727"
] | 0.7729437 | 0 |
Test case for aws_service_api_availability_zones_get | def test_aws_service_api_availability_zones_get(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_aws_service_api_regions_get(self):\n pass",
"def get_availability_zones(self, context, filters=None, fields=None,\n sorts=None, limit=None, marker=None,\n page_reverse=False):",
"def compute_zones(self):\n path = '/os-availability-zone/detail'\n res = self.compute.call(path, 'GET', data='', \n token=self.manager.identity.token)\n self.logger.debug('Get openstack availability zone: %s' % truncate(res))\n return res[0]['availabilityZoneInfo']",
"def validate_availability_zones(self, context, resource_type,\n availability_zones):",
"def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")",
"def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")",
"def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")",
"def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")",
"def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")",
"def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"availability_zones\")",
"def availability_zones(self) -> Optional[Sequence[str]]:\n return pulumi.get(self, \"availability_zones\")",
"def test_list_zones_success(list_zone_context, shared_zone_test_context):\n result = shared_zone_test_context.list_zones_client.list_zones(name_filter=f\"*{shared_zone_test_context.partition_id}\", status=200)\n retrieved = result[\"zones\"]\n\n assert_that(retrieved, has_length(5))\n assert_that(retrieved, has_item(has_entry(\"name\", list_zone_context.search_zone1[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"adminGroupName\", list_zone_context.list_zones_group[\"name\"])))\n assert_that(retrieved, has_item(has_entry(\"backendId\", \"func-test-backend\")))\n\n assert_that(result[\"nameFilter\"], is_(f\"*{shared_zone_test_context.partition_id}\"))",
"def describe_availability_zones(\n self,\n request: dds_20151201_models.DescribeAvailabilityZonesRequest,\n ) -> dds_20151201_models.DescribeAvailabilityZonesResponse:\n runtime = util_models.RuntimeOptions()\n return self.describe_availability_zones_with_options(request, runtime)",
"def get_zones_output(project: Optional[pulumi.Input[Optional[str]]] = None,\n region: Optional[pulumi.Input[Optional[str]]] = None,\n status: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetZonesResult]:\n ...",
"def get_zones(self, context):\n # handling zones method in RPC\n response = self.dns_manager.get_zones(context)\n return response",
"def get(self, request):\n conn = get_sdk_connection(request)\n availability_zone_list = _sdk_object_to_list(\n conn.load_balancer.availability_zones()\n )\n\n return {'items': availability_zone_list}",
"def get_athlete_zones(self):\n pass",
"def test_list_zones_no_authorization(shared_zone_test_context):\n shared_zone_test_context.list_zones_client.list_zones(sign_request=False, status=401)",
"async def describe_availability_zones_async(\n self,\n request: dds_20151201_models.DescribeAvailabilityZonesRequest,\n ) -> dds_20151201_models.DescribeAvailabilityZonesResponse:\n runtime = util_models.RuntimeOptions()\n return await self.describe_availability_zones_with_options_async(request, runtime)",
"def test_list_zones_with_no_results(shared_zone_test_context):\n result = shared_zone_test_context.list_zones_client.list_zones(name_filter=\"this-wont-be-found\", max_items=2, status=200)\n zones = result[\"zones\"]\n\n assert_that(zones, has_length(0))\n\n assert_that(result[\"maxItems\"], is_(2))\n assert_that(result[\"nameFilter\"], is_(\"this-wont-be-found\"))\n assert_that(result, is_not(has_key(\"startFrom\")))\n assert_that(result, is_not(has_key(\"nextId\")))",
"def ex_list_availability_zones(self, only_available=True):\n params = {'Action': 'DescribeAvailabilityZones'}\n\n if only_available:\n params.update({'Filter.0.Name': 'state'})\n params.update({'Filter.0.Value.0': 'available'})\n\n params.update({'Filter.1.Name': 'region-name'})\n params.update({'Filter.1.Value.0': self.region_name})\n\n result = self.connection.request(self.path,\n params=params.copy()).object\n\n availability_zones = []\n for element in self._findall(result, 'availabilityZoneInfo/item'):\n name = self._findtext(element, 'zoneName')\n zone_state = self._findtext(element, 'zoneState')\n region_name = self._findtext(element, 'regionName')\n\n availability_zone = ExEC2AvailabilityZone(\n name=name,\n zone_state=zone_state,\n region_name=region_name\n )\n availability_zones.append(availability_zone)\n\n return availability_zones",
"def azs_lookup(session, lambda_compatible_only=False):\n if session is None:\n return []\n\n client = session.client('ec2')\n response = client.describe_availability_zones()\n # SH Removing Hack as subnet A is already in Production and causes issues trying to delete\n # We will strip out subnets A and C when creating the lambdas.\n #rtn = [(z[\"ZoneName\"], z[\"ZoneName\"][-1]) for z in response[\"AvailabilityZones\"] if z['ZoneName'] != 'us-east-1a']\n rtn = [(z[\"ZoneName\"], z[\"ZoneName\"][-1]) for z in response[\"AvailabilityZones\"]]\n\n if lambda_compatible_only:\n current_account = get_account_id_from_session(session)\n for az in rtn.copy():\n if az[1] == 'c' and current_account == hosts.PROD_ACCOUNT:\n rtn.remove(az)\n if az[1] == 'a' and current_account == hosts.DEV_ACCOUNT:\n rtn.remove(az)\n return rtn",
"def _get_available_regions():\n session = boto3.session.Session()\n\n return session.get_available_regions(service_name='s3')",
"def availability_zones(self, details=False):\n if details:\n az = _availability_zone.AvailabilityZoneDetail\n else:\n az = _availability_zone.AvailabilityZone\n return list(self._list(az, paginated=False))",
"def _extract_availability_zones(\n self,\n availability_zone: Optional[str],\n snapshot,\n source_volume,\n group: Optional[dict],\n volume_type: Optional[dict[str, Any]] = None) -> tuple[list[str],\n bool]:\n refresh_az = False\n type_azs = volume_utils.extract_availability_zones_from_volume_type(\n volume_type)\n type_az_configured = type_azs is not None\n if type_az_configured:\n assert type_azs is not None\n safe_azs = list(\n set(type_azs).intersection(self.availability_zones))\n if not safe_azs:\n raise exception.InvalidTypeAvailabilityZones(az=type_azs)\n else:\n safe_azs = self.availability_zones\n\n # If the volume will be created in a group, it should be placed in\n # in same availability zone as the group.\n if group:\n try:\n availability_zone = group['availability_zone']\n except (TypeError, KeyError):\n pass\n\n # Try to extract the availability zone from the corresponding snapshot\n # or source volume if either is valid so that we can be in the same\n # availability zone as the source.\n if availability_zone is None:\n if snapshot:\n try:\n availability_zone = snapshot['volume']['availability_zone']\n except (TypeError, KeyError):\n pass\n if source_volume:\n try:\n availability_zone = source_volume['availability_zone']\n except (TypeError, KeyError):\n pass\n\n if availability_zone is None and not type_az_configured:\n if CONF.default_availability_zone:\n availability_zone = CONF.default_availability_zone\n else:\n # For backwards compatibility use the storage_availability_zone\n availability_zone = CONF.storage_availability_zone\n\n if availability_zone and availability_zone not in safe_azs:\n refresh_az = True\n if CONF.allow_availability_zone_fallback:\n original_az = availability_zone\n availability_zone = (\n CONF.default_availability_zone or\n CONF.storage_availability_zone)\n LOG.warning(\"Availability zone '%(s_az)s' \"\n \"not found, falling back to \"\n \"'%(s_fallback_az)s'.\",\n {'s_az': original_az,\n 's_fallback_az': availability_zone})\n else:\n raise exception.InvalidAvailabilityZone(az=availability_zone)\n\n # If the configuration only allows cloning to the same availability\n # zone then we need to enforce that.\n if availability_zone and CONF.cloned_volume_same_az:\n snap_az = None\n try:\n snap_az = snapshot['volume']['availability_zone']\n except (TypeError, KeyError):\n pass\n if snap_az and snap_az != availability_zone:\n msg = _(\"Volume must be in the same \"\n \"availability zone as the snapshot\")\n raise exception.InvalidInput(reason=msg)\n source_vol_az = None\n try:\n source_vol_az = source_volume['availability_zone']\n except (TypeError, KeyError):\n pass\n if source_vol_az and source_vol_az != availability_zone:\n msg = _(\"Volume must be in the same \"\n \"availability zone as the source volume\")\n raise exception.InvalidInput(reason=msg)\n\n if availability_zone:\n return [availability_zone], refresh_az\n else:\n return safe_azs, refresh_az",
"def test_list_zones_ignore_access_success(shared_zone_test_context):\n result = shared_zone_test_context.list_zones_client.list_zones(ignore_access=True, status=200)\n retrieved = result[\"zones\"]\n\n assert_that(result[\"ignoreAccess\"], is_(True))\n assert_that(len(retrieved), greater_than(5))",
"def test_account_returns_list_of_zones(self):\n account = Account('test-account')\n a_zone = Zone('azone.com')\n b_zone = Zone('bzone.com')\n c_zone = Zone('czone.com')\n account.add_zone(a_zone)\n account.add_zone(b_zone)\n account.add_zone(c_zone)\n\n self.assertDictEqual(account.zones, {\n 'azone.com': a_zone,\n 'bzone.com': b_zone,\n 'czone.com': c_zone,\n })",
"def test_api_regions(self):\n # load api base\n r = requests.get('{server}/api/0.1/'.format(\n server=self.get_server_url())).json()\n # load regions from url specified in api base\n r = requests.get(r['regions']).json()\n self.assertIn('count', r)\n self.assertIn('next', r)\n self.assertIn('prev', r)\n self.assertIn('regions', r)",
"def availability_zone(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"availability_zone\")",
"async def public_get_time_zones_async(\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = PublicGetTimeZones.create(\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )"
] | [
"0.7338367",
"0.7293569",
"0.717585",
"0.7083272",
"0.68036807",
"0.68036807",
"0.68036807",
"0.68036807",
"0.68036807",
"0.68036807",
"0.6736183",
"0.67031825",
"0.66727173",
"0.66036886",
"0.65988815",
"0.6523843",
"0.6429594",
"0.6417555",
"0.63666177",
"0.6355343",
"0.62968653",
"0.6288459",
"0.62346643",
"0.6223419",
"0.6194103",
"0.6183128",
"0.6181463",
"0.6178133",
"0.6172322",
"0.609242"
] | 0.95381105 | 0 |
Test case for aws_service_api_flavor_get | def test_aws_service_api_flavor_get(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_aws_service_api_flavors_get(self):\n pass",
"def test_get_flavor(self):\n response = self.flavors_client.get_flavor_details(self.flavor_ref)\n flavor = response.entity\n self.assertEqual(self.flavor_ref, flavor.id)",
"def get_flavor(name):\r\n return nova.flavors.find(name=name)",
"def get_flavor(self, request, tenant_id, flavor_id):\n response_data = get_flavor(flavor_id)\n request.setResponseCode(response_data[1])\n return json.dumps(response_data[0])",
"def get_flavor(self, flavor_id):\n url = '%s/flavors/%s' % (self.catalog['compute'], flavor_id)\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['flavor']\n else:\n LOG.error('Get flavor failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)",
"def test_get_non_existent_flavor(self):\n try:\n self.flavors_client.get_flavor_details(999)\n self.fail('No exception thrown for a non-existent flavor id')\n except ItemNotFound:\n pass",
"def test_list_flavors_with_detail(self):\n response = self.flavors_client.list_flavors_with_detail()\n flavors = response.entity\n self.assertTrue(len(flavors) > 0)\n response = self.flavors_client.get_flavor_details(self.flavor_ref)\n flavor = response.entity\n self.assertIn(flavor, flavors)",
"def test_get_flavor_list_with_details_is_consistent(self):\n get_flavor_list_response_body = self.get_server_flavor('/flavors/detail')\n get_flavor_list2_response_body = self.get_server_flavor('/flavors/detail')\n self.assertEqual(get_flavor_list_response_body, get_flavor_list2_response_body)",
"def get_flavors(self):\n url = '%s/flavors/detail' % self.catalog['compute']\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['flavors']\n else:\n LOG.error('Get flavors failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)",
"def get_flavors() -> dict:\n flavor_rq = request(\n method=\"GET\", url=app.config[\"FLAVORS_REF\"], headers=build_header(),\n )\n\n if not flavor_rq.ok:\n HTTPError(f\"Can not get flavor id for virtual machine: {flavor_rq.status_code}\")\n\n return flavor_rq.json()",
"def get_flavor(self, flavor):\n return self._get(_flavor.Flavor, flavor)",
"def get_flavor(self, flavor):\n return self._get(_flavor.Flavor, flavor)",
"def get_azure_flavor(flavor):\n if flavor not in CONF.ec2_flavor_to_size_map:\n raise Exception(\"Could not find mapping for the EC2 image size \"\n \"'%s', please edit 'ec2_flavor_to_size_map' in the \"\n \"configuration.\" % (flavor))\n\n return CONF.ec2_flavor_to_size_map[flavor]",
"def test_list_flavors(self):\n response = self.flavors_client.list_flavors()\n flavors = response.entity\n self.assertTrue(len(flavors) > 0)\n response = self.flavors_client.get_flavor_details(self.flavor_ref)\n flavor = response.entity\n flavor_ids = [x.id for x in flavors]\n self.assertIn(flavor.id, flavor_ids,\n \"The expected flavor: %s was not found in \"\n \"the flavor list\" % flavor.id)",
"def values_from_flavor(flavor):\n try:\n flavor = Network.FLAVORS[flavor]\n except KeyError:\n raise faults.BadRequest(\"Unknown network flavor\")\n\n mode = flavor.get(\"mode\")\n\n link = flavor.get(\"link\")\n if link == \"pool\":\n link = allocate_resource(\"bridge\")\n\n mac_prefix = flavor.get(\"mac_prefix\")\n if mac_prefix == \"pool\":\n mac_prefix = allocate_resource(\"mac_prefix\")\n\n tags = flavor.get(\"tags\")\n\n return mode, link, mac_prefix, tags",
"def flavors(self, **kwargs):\n if kwargs is None:\n result = self.get_list(self.cloudman.compute.flavors(),\n kind=\"flavor\")\n if \"name\" in kwargs:\n result = self.flavor(name=kwargs['name'])\n\n else:\n result = self.get_list(self.cloudman.compute.flavors(**kwargs),\n kind=\"flavor\")\n\n return result",
"def flavor(self, name=None):\n raise NotImplementedError",
"def get_flavor(flavor_id, include_deleted=False):\n\n try:\n flavor_id = int(flavor_id)\n if include_deleted:\n return Flavor.objects.get(id=flavor_id)\n else:\n return Flavor.objects.get(id=flavor_id, deleted=include_deleted)\n except (ValueError, TypeError):\n raise faults.BadRequest(\"Invalid flavor ID '%s'\" % flavor_id)\n except Flavor.DoesNotExist:\n raise faults.ItemNotFound('Flavor not found.')",
"def test_check_add_flavor(self):\n for flavor_id, flavor in OPENSTACK_FLAVOR.items():\n self.cmd._add_flavor(flavor, flavor_id)\n ralph_flavor = CloudFlavor.objects.get(flavor_id=flavor_id)\n self.assertEqual(ralph_flavor.name, flavor['name'])\n self.assertEqual(ralph_flavor.cloudprovider, self.cloud_provider)\n self.assertIn(flavor['tag'], ralph_flavor.tags.names())\n self.assertEqual(flavor['cores'], ralph_flavor.cores)\n self.assertEqual(flavor['memory'], ralph_flavor.memory)\n self.assertEqual(flavor['disk'], ralph_flavor.disk)",
"def test_aws_service_api_vm_get(self):\n pass",
"def flavors(request): # pylint: disable=unused-argument\n # We call our method\n response = BACKEND.flavors()\n return JsonResponse(response)",
"def flavor(self, name=None):\n return self.find(self.flavors(), name=name)",
"def get(self, request, flavor_id):\n conn = get_sdk_connection(request)\n flavor = conn.load_balancer.find_flavor(flavor_id)\n return _get_sdk_object_dict(flavor)",
"def test_aws_service_api_image_get(self):\n pass",
"def test_will_not_get_instance_by_unknown_flavor_id(self):\n self.assertRaises(exception.FlavorNotFound,\n instance_types.get_instance_type_by_flavor_id,\n 'unknown_flavor')",
"def test_create_flavor(self):\n # Create Flavor\n flavor_settings = FlavorConfig(\n name=self.flavor_name, ram=1, disk=1, vcpus=1)\n self.flavor_creator = OpenStackFlavor(self.os_creds, flavor_settings)\n flavor = self.flavor_creator.create()\n self.assertTrue(validate_flavor(self.nova, flavor_settings, flavor))",
"def _create_flavor(self, context, flavor):\n flavor_dict = flavor.__dict__\n name = self.prefix + flavor.name\n flavorid = self.prefix + flavor.id\n memory = flavor.ram\n vcpus = flavor.vcpus\n root_gb = flavor.disk\n ephemeral_gb = flavor_dict.get('OS-FLV-EXT-DATA:ephemeral', 0)\n u_swap = flavor_dict.get('swap', 0)\n rxtx_factor = flavor_dict.get('rxtx_factor', 1.0)\n is_public = flavor_dict.get('os-flavor-access:is_public', True)\n if u_swap == \"\":\n swap = 0\n else:\n swap = int(u_swap)\n\n try:\n return flavors.create(name, memory, vcpus, root_gb,\n ephemeral_gb=ephemeral_gb,\n flavorid=flavorid, swap=swap,\n rxtx_factor=rxtx_factor,\n is_public=is_public)\n except exception.InstanceExists as err:\n raise err",
"def get_flavor(self, flavor_id):\n return self._flavor_manager.get(flavor_id)",
"def test_aws_service_api_vm_details_get(self):\n pass",
"def test_aws_service_api_vms_get(self):\n pass"
] | [
"0.86479574",
"0.7678447",
"0.71027285",
"0.70904493",
"0.7076974",
"0.6954673",
"0.68324184",
"0.6791326",
"0.6621454",
"0.657417",
"0.65623415",
"0.65623415",
"0.6551517",
"0.652183",
"0.637626",
"0.63374686",
"0.63270867",
"0.6284752",
"0.62383753",
"0.62356555",
"0.6222352",
"0.6212525",
"0.6197924",
"0.617048",
"0.6153198",
"0.6136496",
"0.61255306",
"0.6123151",
"0.6122831",
"0.60986197"
] | 0.9599665 | 0 |
Test case for aws_service_api_flavors_get | def test_aws_service_api_flavors_get(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_aws_service_api_flavor_get(self):\n pass",
"def get_flavors(self):\n url = '%s/flavors/detail' % self.catalog['compute']\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['flavors']\n else:\n LOG.error('Get flavors failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)",
"def get_flavors() -> dict:\n flavor_rq = request(\n method=\"GET\", url=app.config[\"FLAVORS_REF\"], headers=build_header(),\n )\n\n if not flavor_rq.ok:\n HTTPError(f\"Can not get flavor id for virtual machine: {flavor_rq.status_code}\")\n\n return flavor_rq.json()",
"def test_list_flavors(self):\n response = self.flavors_client.list_flavors()\n flavors = response.entity\n self.assertTrue(len(flavors) > 0)\n response = self.flavors_client.get_flavor_details(self.flavor_ref)\n flavor = response.entity\n flavor_ids = [x.id for x in flavors]\n self.assertIn(flavor.id, flavor_ids,\n \"The expected flavor: %s was not found in \"\n \"the flavor list\" % flavor.id)",
"def flavors(request): # pylint: disable=unused-argument\n # We call our method\n response = BACKEND.flavors()\n return JsonResponse(response)",
"def show_flavors():\n return get_flavors()",
"def test_list_flavors_limit_results(self):\n response = self.flavors_client.list_flavors(limit=1)\n flavors = response.entity\n self.assertEqual(1, len(flavors))",
"def test_list_flavors_with_detail(self):\n response = self.flavors_client.list_flavors_with_detail()\n flavors = response.entity\n self.assertTrue(len(flavors) > 0)\n response = self.flavors_client.get_flavor_details(self.flavor_ref)\n flavor = response.entity\n self.assertIn(flavor, flavors)",
"def test_get_flavor(self):\n response = self.flavors_client.get_flavor_details(self.flavor_ref)\n flavor = response.entity\n self.assertEqual(self.flavor_ref, flavor.id)",
"def test_list_flavors_detailed_limit_results(self):\n response = self.flavors_client.list_flavors_with_detail(limit=1)\n flavors = response.entity\n self.assertEqual(1, len(flavors))",
"def test_aws_service_api_vms_get(self):\n pass",
"def flavors(self, **kwargs):\n if kwargs is None:\n result = self.get_list(self.cloudman.compute.flavors(),\n kind=\"flavor\")\n if \"name\" in kwargs:\n result = self.flavor(name=kwargs['name'])\n\n else:\n result = self.get_list(self.cloudman.compute.flavors(**kwargs),\n kind=\"flavor\")\n\n return result",
"def test_list_flavors_min_disk_greater_than_max_flavor_disk(self):\n response = self.flavors_client.list_flavors(min_disk=self.max_disk+1)\n flavors = response.entity\n self.assertEqual(len(flavors), 0)",
"def list_flavors(cls):\n return cls.dbdriver.list_flavors()",
"def get_flavor(name):\r\n return nova.flavors.find(name=name)",
"def test_list_flavors_using_marker(self):\n response = self.flavors_client.list_flavors()\n flavors = response.entity\n self.assertGreater(len(flavors), 0, 'Flavors list is empty')\n flavor_marker = flavors[0]\n\n response = self.flavors_client.list_flavors(marker=flavor_marker.id)\n filtered_flavors = response.entity\n self.assertNotIn(flavor_marker, filtered_flavors,\n msg='Filtered flavor was incorrectly '\n 'included in the list of returned flavors')",
"def get(self, request):\n conn = get_sdk_connection(request)\n flavor_list = _sdk_object_to_list(\n conn.load_balancer.flavors()\n )\n\n return {'items': flavor_list}",
"def test_list_flavors_min_disk_greater_than_max_flavor_ram(self):\n response = self.flavors_client.list_flavors(min_ram=self.max_ram+1)\n flavors = response.entity\n self.assertEqual(len(flavors), 0)",
"def test_get_flavor_list_with_details_is_consistent(self):\n get_flavor_list_response_body = self.get_server_flavor('/flavors/detail')\n get_flavor_list2_response_body = self.get_server_flavor('/flavors/detail')\n self.assertEqual(get_flavor_list_response_body, get_flavor_list2_response_body)",
"def test_aws_service_api_vm_get(self):\n pass",
"def test_list_flavors_detailed_min_ram_larger_than_max_flavor_ram(self):\n response = self.flavors_client.list_flavors_with_detail(\n min_ram=self.max_ram+1)\n flavors = response.entity\n self.assertEqual(len(flavors), 0)",
"def test_list_flavors_detailed_min_disk_larger_than_max_flavor_disk(self):\n response = self.flavors_client.list_flavors_with_detail(\n min_disk='99999')\n flavors = response.entity\n self.assertEqual(len(flavors), 0)",
"def flavors(self, **kwargs):\n raise NotImplementedError",
"def test_aws_service_api_image_get(self):\n pass",
"def test_list_flavors_detailed_using_marker(self):\n response = self.flavors_client.list_flavors_with_detail()\n flavors = response.entity\n self.assertGreater(len(flavors), 0, 'Flavors list is empty')\n flavor_marker = flavors[0]\n\n response = self.flavors_client.list_flavors_with_detail(\n marker=flavor_marker.id)\n filtered_flavors = response.entity\n self.assertNotIn(flavor_marker, filtered_flavors,\n msg='Filtered flavor was incorrectly '\n 'included in the list of returned flavors')",
"def list_flavors(self, limit=None, marker=None):\n return self._flavor_manager.list(limit=limit, marker=marker)",
"def test_aws_service_api_volumes_get(self):\n pass",
"def test_list_flavors_filter_by_invalid_min_disk(self):\n with self.assertRaises(BadRequest):\n response = self.flavors_client.list_flavors(\n min_disk='invalid_disk')\n flavors = response.entity\n self.assertEqual(len(flavors), 0)",
"def test_list_flavors_filter_by_invalid_min_ram(self):\n with self.assertRaises(BadRequest):\n response = self.flavors_client.list_flavors(min_ram='invalid_ram')\n flavors = response.entity\n self.assertEqual(len(flavors), 0)",
"def test_get_cloud_resources(self):\n pass"
] | [
"0.8574573",
"0.7565045",
"0.714905",
"0.71012104",
"0.70156753",
"0.7013291",
"0.68955284",
"0.6883247",
"0.6825781",
"0.6584661",
"0.65827054",
"0.6564486",
"0.64388686",
"0.64029205",
"0.6390174",
"0.6388977",
"0.63272315",
"0.6317298",
"0.6264503",
"0.62626445",
"0.62079275",
"0.6201991",
"0.61698425",
"0.61174667",
"0.6102673",
"0.60981935",
"0.605647",
"0.6052927",
"0.6032569",
"0.60300946"
] | 0.95660037 | 0 |
Test case for aws_service_api_image_get | def test_aws_service_api_image_get(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_aws_service_api_public_image_get(self):\n pass",
"def test_aws_service_api_private_image_get(self):\n pass",
"def test_aws_service_api_public_images_get(self):\n pass",
"def test_aws_service_api_private_images_get(self):\n pass",
"def test_get_ao_image(self):\n response = self.client.open(\n '/rui-support/ao-image',\n method='GET',\n content_type='application/ld+json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))",
"def test_get_image_url(self, mock_get):\n\n image_page_url = self.test_data[\"image_page_url\"]\n image_page_response = self.test_data[\"image_page_response\"]\n image_tag_src = self.test_data[\"image_tag_src\"]\n \n mock_get.return_value = self._build_mock_response(text = image_page_response)\n\n self.assertEqual(self.retriever._get_image_url(image_page_url), \\\n image_tag_src, msg = \"Unable to extract image tag from the image page\")",
"def get_image(event, context):\n try:\n bucket_key = event['pathParameters']['bucket_key']\n except KeyError:\n return _bad_request_error('No bucket key')\n try:\n session_token= event['headers']['s3-session-token']\n access_key_id = event['headers']['s3-access-key-id']\n secret_access_key = event['headers']['s3-access-key']\n except KeyError:\n return _non_authorized_error(\n 'Not enough credentials to authorize')\n try:\n image = SafeImage(\n session_token=session_token,\n access_key_id=access_key_id,\n secret_access_key=secret_access_key).retrieve(bucket_key)\n except ClientError as e:\n # TODO: log here\n print(e)\n return _non_authorized_error(\n 'Invalid credentials')\n return _success_image(image)",
"def test_read_image(self):\n pass",
"def test_images(self):\n\n message = {\"method\": \"images\", \"params\": {\"elem\": None}}\n response = yield self._get_response(message)\n\n self.assertIsInstance(response, dict)\n self.assertEqual(response[\"method\"], \"images\")\n self.assertIsInstance(response[\"result\"], list)\n\n images = [i[\"tag\"] for i in response[\"result\"]]\n\n self.assertIn(self.tag_image, images)",
"async def test_get_image(opp, utcnow):\n helper = await setup_test_component(opp, create_camera)\n image = await camera.async_get_image(opp, helper.entity_id)\n assert image.content == base64.b64decode(FAKE_CAMERA_IMAGE)",
"def image_get(auth=None, **kwargs):\n cloud = get_operator_cloud(auth)\n kwargs = _clean_kwargs(**kwargs)\n return cloud.get_image(**kwargs)",
"def test_image_display(self):\n\n result = self.client.get(\"/select_image\")\n\n self.assertIn(b\"/static/uploads/girl-glowing-skin-blue-eyes.jpg\", result.data)",
"def get_image_output(id: Optional[pulumi.Input[Optional[int]]] = None,\n name: Optional[pulumi.Input[Optional[str]]] = None,\n slug: Optional[pulumi.Input[Optional[str]]] = None,\n source: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetImageResult]:\n ...",
"def testimage_handler(self):\n\t\t\n\t\tthings = Thing.objects.all()\n\t\tif len( things ):\n\t\t\tthing = things[0]\n\t\telse:\n\t\t\tc = Client()\n\t\t\tdata = parse_qs( 'title=&tags=&lattitude=32.82248&longitude=-96.762986&duration=&parent=&privacy=U&lifespan=&format=txt' )\n\t\t\tdata[ 'media' ] = open( MEDIA_ROOT + 'unittest_image.jpg' )\n\t\t\tc.post( '/api/place/', data )\n\t\t\t\n\t\t\tthing = Thing.objects.all()[0]\n\n\t\t\n\t\turi = thing.media.replace( 'http://' + DOMAIN, '' )\n\t\t\n\t\tc = Client()\n\t\tresponse = c.get( uri )\n\t\tself.failUnlessEqual(response.status_code, 200)",
"def test_aws_service_api_snapshots_get(self):\n pass",
"async def image(self, tag: Tag, get_bytes: bool=False):\r\n if not isinstance(tag, (str, enumeration.SFWImageTags, enumeration.NSFWImageTags)):\r\n raise TypeError(f\"{Tag} expected in `tag`\")\r\n\r\n if type(get_bytes) is not bool:\r\n raise TypeError(\"bool expected in `get_bytes`\")\r\n\r\n str_tag = tag if type(tag) is str else tag.value\r\n\r\n data_response = await self.http.endpoint(\"img/\" + str_tag)\r\n\r\n data_response[\"tag\"] = tag\r\n\r\n if get_bytes:\r\n image_url = data_response[\"url\"]\r\n image_bytes = await self.http.get_image_bytes(image_url)\r\n data_response[\"bytes\"] = image_bytes\r\n\r\n return result.ImageResult(data_response)",
"def get_image(self, request, tenant_id, image_id):\n response_data = get_image(image_id)\n request.setResponseCode(response_data[1])\n return json.dumps(response_data[0])",
"def test_list_image_metadata(self):\n pass",
"def test_create_image(self):\n pass",
"def get_image(id: Optional[int] = None,\n name: Optional[str] = None,\n slug: Optional[str] = None,\n source: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetImageResult:\n __args__ = dict()\n __args__['id'] = id\n __args__['name'] = name\n __args__['slug'] = slug\n __args__['source'] = source\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('digitalocean:index/getImage:getImage', __args__, opts=opts, typ=GetImageResult).value\n\n return AwaitableGetImageResult(\n created=pulumi.get(__ret__, 'created'),\n description=pulumi.get(__ret__, 'description'),\n distribution=pulumi.get(__ret__, 'distribution'),\n error_message=pulumi.get(__ret__, 'error_message'),\n id=pulumi.get(__ret__, 'id'),\n image=pulumi.get(__ret__, 'image'),\n min_disk_size=pulumi.get(__ret__, 'min_disk_size'),\n name=pulumi.get(__ret__, 'name'),\n private=pulumi.get(__ret__, 'private'),\n regions=pulumi.get(__ret__, 'regions'),\n size_gigabytes=pulumi.get(__ret__, 'size_gigabytes'),\n slug=pulumi.get(__ret__, 'slug'),\n source=pulumi.get(__ret__, 'source'),\n status=pulumi.get(__ret__, 'status'),\n tags=pulumi.get(__ret__, 'tags'),\n type=pulumi.get(__ret__, 'type'))",
"def test_get_image_task(self):\n resp = self.app.get('/api/2/inf/esrs/image',\n headers={'X-Auth': self.token})\n\n task_id = resp.json['content']['task-id']\n expected = 'asdf-asdf-asdf'\n\n self.assertEqual(task_id, expected)",
"def test_get_image_exists_not(self):\n with self.assertRaises(errors.NotFound):\n self.docker.images.get(\"image_does_not_exists\")",
"def test_read_namespaced_image_stream_image(self):\n pass",
"def test_list_image(self):\n pass",
"def test_get_image(self):\n with open(self.subject, \"rb\") as f:\n content = f.read()\n\n image = image_helper.get_image(content)\n\n self.assertEqual(image.size, (800, 450))",
"def get_image(self, image_id):\n url = '%s/v1/images/%s' % (self.catalog['image'], image_id)\n res = self.get(url)\n if res['status'] == 200:\n return json.loads(res['body'])['image']\n else:\n LOG.error('Get image failed: %s %s %s' %\n (res['status'], res['reason'], res['body']))\n raise InvalidResponse(res)",
"def test_get_file_image(self):\n image = image_helper.get_file_image(self.subject)\n\n self.assertEqual(image.size, (800, 450))",
"def test_one_image(self, img):\n return self.__image_pipeline(img)",
"def test_search_bogus_image(self):\n with self.assertRaises(errors.APIError):\n self.docker.images.search(\"bogus/bogus\")",
"def test_get_image_task(self):\n resp = self.app.get('/api/2/inf/esrs/image',\n headers={'X-Auth': self.token})\n\n task_id = resp.headers['Link']\n expected = '<https://localhost/api/2/inf/esrs/task/asdf-asdf-asdf>; rel=status'\n\n self.assertEqual(task_id, expected)"
] | [
"0.8832171",
"0.8454593",
"0.84126693",
"0.80433905",
"0.7256785",
"0.7198876",
"0.7104483",
"0.71016484",
"0.69397616",
"0.6722128",
"0.66869414",
"0.66866714",
"0.6682441",
"0.665069",
"0.6630389",
"0.6626029",
"0.6619967",
"0.6611323",
"0.66050965",
"0.6582941",
"0.6579301",
"0.6559919",
"0.6519843",
"0.64998126",
"0.647262",
"0.64610064",
"0.6447131",
"0.6446612",
"0.6396583",
"0.6387447"
] | 0.94997585 | 0 |
Test case for aws_service_api_interfaces_get | def test_aws_service_api_interfaces_get(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getInterface(self):\n\t\tquery = ''\n\t\tconn = self.get_connection()\n\t\theaders = { 'Content-type' : 'application/json', 'Authorization' : 'A10 %s' %self.sessionid}\n\t\tconn.request('GET', self.get_path() + '/' + query, headers=headers)\n\t\tresponse = conn.getresponse()\n\t\texpected_status = 200\n\t\terrors = {500: 'An unexpected runtime exception', 404: 'Specified interface does not exist'}\n\t\tpayload = self.get_output(response, expected_status, errors)\n\t\tconn.close()\n\t\tif self.debug:\n\t\t\tprint 'payload:', payload\n\t\tif payload == '':\n\t\t\tpayload = None\n\t\tif payload is not None:\n\t\t\tdata = json.loads(payload)\n\t\t\tpayload= data.get('interface')\n\t\treturn deserialize_Interface_json(payload)",
"def _get_interfaces(self):\n return self.__interfaces",
"def _get_interfaces(self):\n return self.__interfaces",
"def _get_interfaces(self):\n return self.__interfaces",
"def get_interfaces(self):\n raise NotImplementedError",
"def get_interface_info(self): # real signature unknown; restored from __doc__\n pass",
"def test_ipam_services_read(self):\n pass",
"def fusion_api_get_appliance_interfaces(self, api=None, headers=None):\n return self.interfaces.get(api, headers)",
"def test_Interfaces(self):\n self.assertTrue(\n self.ospf.parse_state(\n pattern='ospf_ints',\n cmd_key='sh_ospf_ints') == ['Ethernet0'], 'OSPF: interfaces not found')",
"def get_interfaces(switch):\n\n log(\"Entering {0}.\".format(sys._getframe().f_code.co_name), level='DEBUG')\n conn_error = False\n commands = [\"show interfaces status\"]\n\n try:\n response = switch.runCmds(1, commands)\n except ProtocolError, err:\n (errno, msg) = err[0]\n # 1002: invalid command\n if errno == 1002:\n log(\"Invalid EOS interface name ({0})\".format(commands), error=True)\n else:\n conn_error = True\n log(\"ProtocolError while retrieving {0} ([{1}] {2})\".\n format(commands, errno, msg),\n error=True)\n except Exception, err:\n conn_error = True\n # 60: Operation timed out\n # 61: Connection refused (http vs https?)\n # 401: Unauthorized\n # 405: Method Not Allowed (bad URL)\n if hasattr(err, 'errno'):\n if err.errno == 60:\n log(\"Connection timed out: Incorrect hostname/IP or eAPI\"\n \" not configured on the switch.\", error=True)\n elif err.errno == 61:\n log(\"Connection refused: http instead of https selected or\"\n \" eAPI not configured on the switch.\", error=True)\n else:\n log(\"General Error retrieving {0} ({1})\".format(commands,\n err),\n error=True)\n else:\n # Parse the string manually\n msg = str(err)\n msg = msg.strip('<>')\n err = msg.split(': ')[-1]\n\n if \"401 Unauthorized\" in err:\n log(\"ERROR: Bad username or password\")\n elif \"405 Method\" in err:\n log(\"ERROR: Incorrect URL\")\n else:\n log(\"HTTP Error retrieving {0} ({1})\".format(commands,\n err),\n error=True)\n\n if conn_error:\n raise EapiException(\"Connection error with eAPI\")\n\n # Filter out non-Ethernet interfaces\n for interface in response[0][u'interfaceStatuses'].keys():\n if str(interface)[:8] != 'Ethernet':\n response[0][u'interfaceStatuses'].pop(interface, None)\n\n return response[0][u'interfaceStatuses']",
"def interface(self, **kwargs):\n\n return self.api_request(self._get_method_fullname(\"interface\"), kwargs)",
"def test_get_client_and_interface_and_valid_interfaces(self):\n kwargs = {\n 'project_name': 'PROJECT_NAME',\n 'username': 'USERNAME',\n 'password': 'PASSWORD',\n 'auth_url': 'http://localhost:35357/v2.0',\n 'service_type': '',\n 'interface': ['ignored'],\n 'valid_interfaces': ['internal', 'public']\n }\n self._test_get_client(expected_interface=['internal', 'public'],\n **kwargs)",
"def test_exposeInterfaces(self):\n if self.plugin is None:\n return\n\n cs = settings.Settings()\n results = self.plugin.exposeInterfaces(cs)\n if results is None or not results:\n return\n\n # each plugin should return a list\n self.assertIsInstance(results, list)\n for result in results:\n # Make sure that all elements in the list satisfy the constraints of the\n # hookspec\n self.assertIsInstance(result, tuple)\n self.assertEqual(len(result), 3)\n\n order, interface, kwargs = result\n\n self.assertIsInstance(order, (int, float))\n self.assertTrue(issubclass(interface, interfaces.Interface))\n self.assertIsInstance(kwargs, dict)",
"def test_interfaces():\n with patch.object(\n salt.utils.network, \"win_interfaces\", MagicMock(return_value=True)\n ):\n assert win_network.interfaces()",
"def interfaces(self):",
"def interfaces(self):",
"def test_aws_service_api_image_get(self):\n pass",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface",
"def _get_interface(self):\n return self.__interface"
] | [
"0.6886365",
"0.68692887",
"0.68692887",
"0.68692887",
"0.6744842",
"0.64471203",
"0.6425522",
"0.64148664",
"0.63893855",
"0.63771516",
"0.6328008",
"0.6327504",
"0.6318172",
"0.61683834",
"0.6158",
"0.6158",
"0.6126433",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205",
"0.61211205"
] | 0.9369289 | 0 |
Test case for aws_service_api_keypair_delete | def test_aws_service_api_keypair_delete(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_delete_api_key(self):\n pass",
"def test_aws_service_api_keypair_get(self):\n pass",
"def test_delete_api_key_from_org(self):\n pass",
"def test_delete():\n test_key = 'qmk_compiler_test_unique_key_name'\n\n # Make sure our test key doesn't exist\n try:\n qmk_storage.get(test_key)\n raise RuntimeError('%s exists on S3 when it should not!' % test_key)\n except Exception as e:\n if e.__class__.__name__ != 'NoSuchKey':\n raise\n\n # Store a test key we can delete\n qmk_storage.put(test_key, 'hello')\n assert qmk_storage.get(test_key) == 'hello'\n qmk_storage.delete(test_key)\n\n # Make sure it actually deleted\n try:\n qmk_storage.get(test_key)\n raise RuntimeError('%s exists on S3 when it should not!' % test_key)\n except Exception as e:\n if e.__class__.__name__ != 'NoSuchKey':\n raise",
"def delete(ctx: CLIContext, access_key):\n with Session() as session:\n try:\n data = session.KeyPair.delete(access_key)\n except Exception as e:\n ctx.output.print_mutation_error(\n e,\n item_name='keypair',\n action_name='deletion',\n )\n sys.exit(1)\n if not data['ok']:\n ctx.output.print_mutation_error(\n msg=data['msg'],\n item_name='keypair',\n action_name='deletion',\n )\n sys.exit(1)\n ctx.output.print_mutation_result(\n data,\n extra_info={\n 'access_key': access_key,\n },\n )",
"def test_aws_service_api_keypair_generate_post(self):\n pass",
"def delete_keypair(self, username, access_key):\n msg = \"delete_keypair not implemented\"\n raise NotImplementedError(msg)",
"def deleteKey(self):\n\n self.key_del_response = self.ec2.delete_key_pair(KeyName=self.key)",
"def test_delete(self):\n fake_key_name = 'fake_key_name'\n\n with patch('iceit.backends.Key', spec=True) as mock_key:\n mock_key.return_value = mock_key\n backend = self.test_init_valid()\n backend.delete(fake_key_name)\n\n mock_key.assert_called_once_with(backend.bucket, fake_key_name)\n backend.bucket.delete_key.assert_called_once_with(mock_key)",
"def delete_key(stub, key, version):\n try:\n response = stub.Delete(keyval_pb2.DeleteRequest(key=key, current_version=version))\n print(\"Delete result:\")\n print_response(response)\n except grpc.RpcError as exception:\n print_response(exception)",
"def test_aws_service_api_keypairs_get(self):\n pass",
"def DeleteKey(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def test_create_delete_key(self):\n client = IPythonClient()\n dac = Context(client)\n # Create and push a key/value.\n key, value = dac._generate_key(), 'test'\n dac._push({key: value})\n # Delete the key.\n dac.delete_key(key)\n dac.close()\n client.close()",
"def delete_key(iam_username):\n try:\n previous_secret_value = secretmanager.get_secret_value(\n SecretId=iam_username, VersionStage=\"AWSPREVIOUS\"\n )\n previous_secret_string = json.loads(previous_secret_value[\"SecretString\"])\n previous_access_key_id = previous_secret_string[\"AccessKey\"]\n print(f\"previous_access_key_id: {previous_access_key_id}\")\n keylist = iam.list_access_keys(UserName=iam_username)[\"AccessKeyMetadata\"]\n\n for key in keylist:\n key_status = key[\"Status\"]\n key_id = key[\"AccessKeyId\"]\n\n print(f\"key id: {key_id}\")\n print(f\"key status: {key_status}\")\n\n if key_status == \"Inactive\":\n if previous_access_key_id == key_id:\n print(\"Deleting previous access key from IAM user\")\n iam.delete_access_key(UserName=iam_username, AccessKeyId=key_id)\n print(\n f\"Previous access key: \"\n f\"{key_id} has been deleted for user \"\n f\" {iam_username}.\"\n )\n return {\"status\": 200}\n else:\n print(\n \"secret manager previous value doesn't match with \"\n \"inactive IAM key value\"\n )\n return {\"status\": 400}\n else:\n print(\"previous key is still active\")\n return {\"status\": 200}\n except ClientError as e:\n print(e)\n return {\"status\": 500}",
"def _delete_external_keypair():\n\n if not utils.use_external_resource(ctx.node.properties):\n return False\n\n ctx.logger.info('External resource. Not deleting keypair.')\n\n utils.unassign_runtime_properties_from_resource(RUNTIME_PROPERTIES,\n ctx.instance)\n return True",
"async def delete(self, key: str):",
"def delete(self, key):",
"def test_aws_service_api_keypair_import_post(self):\n pass",
"def key_delete(self, name=None):\n\n cloud = self.cloud\n Console.msg(f\"delete the key: {name} -> {cloud}\")\n r = self.cloudman.delete_keypair(name)\n\n return r",
"def delete_api_key(api_key):\n api.delete(api_key)",
"def delete_key_pair(self, key_name):\r\n params = {'KeyName':key_name}\r\n return self.get_status('DeleteKeyPair', params, verb='POST')",
"def test_delete_bucket(self):\n pass",
"def _delete_key(self):\n return self.connection.delete(self.key)",
"def delete(id):\n repo = KeyRepository(getDb())\n try:\n if repo.delete(id):\n return '', 204\n else:\n return {'message': 'Key has not been found'}, 404\n except DBException:\n return {'message': 'Key id is invalid'}, 400",
"def key_delete(self, name=None):\n raise NotImplementedError",
"def delete(self,key):\n\n pass",
"def test_delete_empty(empty_bucket): # pylint: disable=redefined-outer-name\n with pytest.raises(KeyError):\n empty_bucket.delete(\"key 1\")",
"def delete_key(self, api_key):\n\t\ttry:\n\t\t\tvalidation.required(api_key, 'api_key')\n\t\texcept errors.ValidationError, ex:\n\t\t\tself.log.warning(\"Validation failure: %s\" % str(ex))\n\t\t\traise errors.APIError, str(ex)\n\n\t\treturn self.app.db.query(\n\t\t\t\"\"\"\n\t\t\tdelete from\n\t\t\t\tapi_keys\n\t\t\twhere\n\t\t\t\tapi_key = %s\n\t\t\t\"\"\", (api_key, ))",
"def s3_delete_data(self):\n\n self.k.delete()",
"def delete(self, key):\n pass"
] | [
"0.79903865",
"0.76005006",
"0.74329925",
"0.73833454",
"0.7266609",
"0.72653955",
"0.7207944",
"0.71949244",
"0.7136774",
"0.7025226",
"0.7004992",
"0.69196093",
"0.67822355",
"0.67449534",
"0.66366833",
"0.6636205",
"0.6635616",
"0.6596003",
"0.65895045",
"0.6583519",
"0.6565412",
"0.6563189",
"0.65067595",
"0.6492081",
"0.6489334",
"0.6473586",
"0.64730394",
"0.6470672",
"0.6470079",
"0.64628154"
] | 0.9563869 | 0 |
Test case for aws_service_api_keypair_generate_post | def test_aws_service_api_keypair_generate_post(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_aws_service_api_keypair_import_post(self):\n pass",
"def test_aws_service_api_keypair_get(self):\n pass",
"def test_create_api_key(self):\n pass",
"def test_aws_service_api_keypair_delete(self):\n pass",
"def create_key ():",
"def test_aws_service_api_keypairs_get(self):\n pass",
"def temp_api_key(cloud):\n payload = {'name': 'pelion_e2e_dynamic_api_key'}\n r = cloud.account.create_api_key(payload, expected_status_code=201)\n resp = r.json()\n\n log.info('Created new developer api key for test case, id: {}'.format(resp['id']))\n\n yield resp\n\n log.info('Cleaning out the generated test case developer api key, id: {}'.format(resp['id']))\n cloud.account.delete_api_key(resp['id'], expected_status_code=204)",
"def test_generate_api_key():\n\n key = auth.generate_api_key() # returns a NamedTuple with api_key and hashed_key\n hashed_api_key = sha256(key.api_key.encode('utf-8')).hexdigest()\n assert hashed_api_key == key.hashed_key",
"def create_keypair(address_type, addresses_path, address_prefix, name):\n vkey_file = get_vkey_file(addresses_path, address_prefix, name)\n skey_file = get_skey_file(addresses_path, address_prefix, name)\n\n if(path.exists(vkey_file)) :\n print(address_prefix, \"key pair already exists for\", name)\n return\n \n makedirs(path.dirname(vkey_file), mode=0o777, exist_ok=True)\n\n run_params = ['cardano-cli', address_type, 'key-gen', '--verification-key-file', vkey_file, '--signing-key-file', skey_file]\n subprocess_run(run_params, capture_output=False, text=True)\n return",
"def test_create_digital_access_key(self):\n pass",
"def test_create_key():\n\n assert symmetric.create_key() != \"\"",
"def create_key(iam_username):\n\n try:\n response = iam.create_access_key(UserName=iam_username)\n access_key = response[\"AccessKey\"][\"AccessKeyId\"]\n secret_key = response[\"AccessKey\"][\"SecretAccessKey\"]\n json_data = json.dumps({\"AccessKey\": access_key, \"SecretKey\": secret_key})\n secretmanager.put_secret_value(SecretId=iam_username, SecretString=json_data)\n\n \n emailmsg = (\n \"Hello,\\n\\n\"\n \"A new access key has been created for key rotation. \\n\\n\"\n f\"Access Key Id: {access_key}\\n\"\n f\"Secrets Manager Secret Id: {iam_username}\"\n )\n\n emailmsg = (\n f\"{emailmsg}\\n\\n\"\n f\"Please obtain the new access key information from \"\n \"secrets manager using the secret Id provided above in \"\n f\"{AWS_REGION_NAME} and update your application within 14 days \"\n \"to avoid interruption.\\n\"\n )\n\n sns.publish(\n TopicArn=SNS_TOPIC_ARN,\n Message=emailmsg,\n Subject=f\"AWS Access Key Rotation: New key is available for \"\n f\"{iam_username}\",\n )\n print(f\"New access key has been created for {iam_username}\")\n return {\"status\": 200}\n except ClientError as e:\n print(e)\n return {\"status\": 500}",
"def test_generate_key_pair(self):\n with patch('iceit.crypto.gnupg.GPG') as mock_gpg:\n mock_key = Mock()\n mock_key.fingerprint = 'fake-fingerprint'\n mock_gpg.gen_key.return_value = mock_key\n\n mock_gpg.return_value = mock_gpg\n encryptor = self.test_init()\n fake_key = encryptor.generate_key_pair(key_type=\"RSA\", length=4096, options={\n 'name_real': 'Fake Name', 'name_email': '[email protected]', 'name_comment': 'Fake comment'})\n\n self.assertEqual(mock_gpg.gen_key_input.call_count, 1)\n self.assertEqual(fake_key, mock_key.fingerprint)",
"def test_create_keypair_only(self):\n self.keypair_creator = create_keypairs.OpenStackKeypair(self.os_creds,\n create_keypairs.KeypairSettings(name=keypair_name))\n self.keypair_creator.create()\n\n keypair = nova_utils.keypair_exists(self.keypair_creator.nova, self.keypair_creator.keypair)\n self.assertEquals(self.keypair_creator.keypair, keypair)",
"def generate_api_key(self, **kwargs):\n\n all_params = []\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method generate_api_key\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/apikeys/_generate'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['privileges', 'apikey']\n\n response = self.api_client.call_api(resource_path, 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='ApiKey',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def create_key_pair(self, keypair, **kwargs):\n\n if not isinstance(keypair, models.CreateKeyPairReq):\n raise HuaweiCloudSDKException(\n message=\"The datatype of parameter(keypair) \"\n \"is not CreateKeyPairReq\")\n body_params = keypair.serialize()\n\n header_params = {}\n header_params['Accept'] = util.select_header_accept(\n ['application/xml', 'application/json'])\n\n header_params['Content-Type'] = util.select_header_content_type(\n ['application/json', 'application/xml'])\n\n return_code, return_data, _ = self.api_client.handle_raw_request(\n 'compute', 'POST',\n '/os-keypairs',\n headers=header_params,\n body=body_params,\n timeout=kwargs.get('_request_timeout', None),\n _preload_content=kwargs.get('_preload_content', True))\n\n if return_data is not None:\n return_data = json.loads(return_data)\n else:\n return_data = {}\n if return_code not in [200, 201]:\n raise HuaweiCloudSDKException(\n return_code,\n \"Run create_key_pair failed, \"\n \"message=%s\" % return_data.get(\"message\"))\n return models.CreateKeyPairResp().deserialize(return_data)",
"def newKeyGenerate():\n generate()\n return '', 204",
"def test_generate_key(self): \n k = Key().generate()\n self.assertRegex(k, \"[a-zA-Z0-9+\\/]+={0,2}\")",
"def create_key(self, owner, app_name, email, url):\n\t\ttry:\n\t\t\tvalidation.required(owner, 'owner')\n\t\t\tvalidation.required(app_name, 'app_name')\n\t\t\tvalidation.email(email)\n\t\texcept errors.ValidationError, ex:\n\t\t\tself.log.warning(\"Validation failure: %s\" % str(ex))\n\t\t\traise errors.APIError, str(ex)\n\n\t\t@stack\n\t\tdef act(count, api_key):\n\t\t\t@stack\n\t\t\tdef handle_count_result(result):\n\t\t\t\treturn result['count']\n\n\t\t\tif count:\n\t\t\t\t## self.log.debug(\"api_key => [%s] is a duplicate\" % api_key)\n\t\t\t\tt = datetime.datetime.now()\n\t\t\t\tapi_key = md5.md5(\"%s%s%s%s%s%d\" % (owner, app_name, email, url, t.strftime(\"%Y%m%d%H%M%S\"), t.microsecond)).hexdigest()\n\t\t\t\tself.log.warning(\"Checking api key: %s\" % api_key)\n\t\t\t\td2 = self.app.db.query(\n\t\t\t\t\t\"\"\"\n\t\t\t\t\tSELECT\n\t\t\t\t\t\tcount(*) AS count\n\t\t\t\t\tFROM\n\t\t\t\t\t\tapi_keys\n\t\t\t\t\tWHERE\n\t\t\t\t\t\tapi_key = %s\n\t\t\t\t\t\"\"\", (api_key, ), single_row=True)\n\t\t\t\td2.addCallback(handle_count_result)\n\t\t\t\td2.addCallback(act, api_key)\n\t\t\t\treturn d2\n\t\t\telse:\n\t\t\t\t## self.log.debug(\"api_key => [%s] is NOT a duplicate\" % api_key)\n\t\t\t\td2 = self.app.db.runOperation(\n\t\t\t\t\t\"\"\"\n\t\t\t\t\tINSERT INTO api_keys (\n\t\t\t\t\t\tapi_key,\n\t\t\t\t\t\towner,\n\t\t\t\t\t\tapp_name,\n\t\t\t\t\t\temail,\n\t\t\t\t\t\turl\n\t\t\t\t\t) VALUES (\n\t\t\t\t\t\t%(api_key)s,\n\t\t\t\t\t\t%(owner)s,\n\t\t\t\t\t\t%(app_name)s,\n\t\t\t\t\t\t%(email)s,\n\t\t\t\t\t\t%(url)s\n\t\t\t\t\t)\n\t\t\t\t\t\"\"\", {'api_key': api_key, 'owner': owner, 'app_name': app_name, 'email': email, 'url': url})\n\n\t\t\t\td2.addCallback(lambda _: api_key)\n\t\t\t\treturn d2\n\n\t\td = Deferred()\n\t\td.addCallback(act, \"\")\n\t\td.callback(1)\n\t\treturn d",
"def create_keypair(econfig_file=None, region=None, keyname=\"bcbio\"):\n import boto\n import boto.ec2\n if econfig_file:\n keypair_dir = os.path.dirname(econfig_file).replace(\"elasticluster\", \"aws_keypairs\")\n else:\n keypair_dir = os.path.join(os.getcwd(), \"aws_keypairs\")\n if not os.path.exists(keypair_dir):\n os.makedirs(keypair_dir)\n private_key = os.path.join(os.path.join(keypair_dir, keyname))\n new_key = not os.path.exists(private_key)\n if new_key:\n cmd = [\"ssh-keygen\", \"-t\", \"rsa\", \"-N\", \"\", \"-f\", private_key, \"-C\", \"bcbio_aws_keypair\"]\n subprocess.check_call(cmd)\n public_key = private_key + \".pub\"\n if region:\n ec2 = boto.ec2.connect_to_region(region)\n else:\n ec2 = boto.connect_ec2()\n key = ec2.get_key_pair(keyname)\n if key and new_key:\n print(\"Non matching key %s found in AWS, removing.\" % keyname)\n ec2.delete_key_pair(keyname)\n key = None\n if not key:\n print(\"Key %s not found in AWS, importing created key\" % keyname)\n with open(public_key) as in_handle:\n body = in_handle.read()\n try:\n ec2.import_key_pair(keyname, body)\n except TypeError as e:\n body = body.encode('utf-8')\n ec2.import_key_pair(keyname, body)\n return {\"user_key_name\": keyname, \"user_key_private\": private_key,\n \"user_key_public\": public_key}",
"def testCreateSshKeyPairKeyAreCreated(self):\n public_key = \"/fake/public_key\"\n private_key = \"/fake/private_key\"\n self.Patch(os.path, \"exists\", return_value=False)\n self.Patch(os, \"makedirs\", return_value=True)\n self.Patch(subprocess, \"check_call\")\n self.Patch(os, \"rename\")\n utils.CreateSshKeyPairIfNotExist(private_key, public_key)\n self.assertEqual(subprocess.check_call.call_count, 1) #pylint: disable=no-member\n subprocess.check_call.assert_called_with( #pylint: disable=no-member\n utils.SSH_KEYGEN_CMD +\n [\"-C\", getpass.getuser(), \"-f\", private_key],\n stdout=mock.ANY,\n stderr=mock.ANY)",
"def create_key_pair(self, key_name):\r\n params = {'KeyName':key_name}\r\n return self.get_object('CreateKeyPair', params, KeyPair, verb='POST')",
"def test_create_keypair_save_both(self):\n self.keypair_creator = create_keypairs.OpenStackKeypair(self.os_creds,\n create_keypairs.KeypairSettings(name=keypair_name,\n public_filepath=pub_file_path,\n private_filepath=priv_file_path))\n self.keypair_creator.create()\n\n keypair = nova_utils.keypair_exists(self.keypair_creator.nova, self.keypair_creator.keypair)\n self.assertEquals(self.keypair_creator.keypair, keypair)\n\n file_key = open(os.path.expanduser(pub_file_path)).read()\n self.assertEquals(self.keypair_creator.keypair.public_key, file_key)\n\n self.assertTrue(os.path.isfile(priv_file_path))",
"def create_api_keys(self, **kwargs):\n\n all_params = ['api_key']\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method create_api_keys\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/apikeys'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'api_key' in params:\n body_params = params['api_key']\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json'])\n\n # Authentication setting\n auth_settings = ['privileges', 'apikey']\n\n response = self.api_client.call_api(resource_path, 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='ApiKeyWithPrivileges',\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def create_keypair(self, username):\n msg = \"create_keypair not implemented\"\n raise NotImplementedError(msg)",
"def create_key_pair(self) -> Keypair:\n res = self.context.post(\n \"/dsum/create_key_pair\", None, None, \"DSum: failed creating a Curve 25519 Keypair\")\n return Keypair(res['private_key_id'], res['public_key_id'])",
"def create_key_pair(self, key_name):\n response = key_pair.create_key_pair(self.url, self.verb, self.headers,\n self.version, key_name)\n if response is not None :\n res = CreateKeyPairResponse.CreateKeyPairResponse()\n parseString(str(response.text), res)\n return res\n else :\n return None",
"def create(self, name, public_key=None):\n data = {\n \"keypair\": {\n \"name\": name\n }\n }\n if public_key is not None:\n data['keypair']['public_key'] = public_key\n \n path = '/os-keypairs'\n res = self.client.call(path, 'POST', data=json.dumps(data), \n token=self.manager.identity.token)\n self.logger.debug('Create/import openstack keypair: %s' % truncate(res))\n return res[0]['keypair']",
"def test_add_public_key(self):\n body = PublicKey()\n response = self.client.open(\n '/v0_9_1/PublicKeys',\n method='POST',\n data=json.dumps(body),\n content_type='application/json')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))",
"def test_create_keypair_save_pub_only(self):\n self.keypair_creator = create_keypairs.OpenStackKeypair(self.os_creds,\n create_keypairs.KeypairSettings(name=keypair_name,\n public_filepath=pub_file_path))\n self.keypair_creator.create()\n\n keypair = nova_utils.keypair_exists(self.keypair_creator.nova, self.keypair_creator.keypair)\n self.assertEquals(self.keypair_creator.keypair, keypair)\n\n file_key = open(os.path.expanduser(pub_file_path)).read()\n self.assertEquals(self.keypair_creator.keypair.public_key, file_key)"
] | [
"0.7824777",
"0.7448113",
"0.73979497",
"0.7172839",
"0.71695393",
"0.70038456",
"0.6849004",
"0.6799752",
"0.67478347",
"0.6668105",
"0.66087174",
"0.65747166",
"0.6556049",
"0.6553861",
"0.64367193",
"0.6361898",
"0.6323369",
"0.63042235",
"0.626602",
"0.6251426",
"0.6234759",
"0.60945827",
"0.6056475",
"0.6053107",
"0.60269576",
"0.600771",
"0.6004871",
"0.59694594",
"0.595761",
"0.5908714"
] | 0.9603776 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.