function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def current(self, request, *args, **kwargs): changeset = ChangeSet.get_for_request(request) return Response({ 'direct_editing': changeset.direct_editing, 'changeset': changeset.serialize() if changeset.pk else None, })
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def direct_editing(self, request, *args, **kwargs): # django-rest-framework doesn't automatically do this for logged out requests SessionAuthentication().enforce_csrf(request) if not ChangeSet.can_direct_edit(request): raise PermissionDenied(_('You don\'t have the permission to activate direct editing.')) changeset = ChangeSet.get_for_request(request) if changeset.pk is not None: raise PermissionDenied(_('You cannot activate direct editing if you have an active changeset.')) request.session['direct_editing'] = True return Response({ 'success': True, })
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def deactivate(self, request, *args, **kwargs): # django-rest-framework doesn't automatically do this for logged out requests SessionAuthentication().enforce_csrf(request) request.session.pop('changeset', None) request.session['direct_editing'] = False return Response({ 'success': True, })
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def changes(self, request, *args, **kwargs): changeset = self.get_object() changeset.fill_changes_cache() return Response([obj.serialize() for obj in changeset.iter_changed_objects()])
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def activate(self, request, *args, **kwargs): changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.can_activate(request): raise PermissionDenied(_('You can not activate this change set.')) changeset.activate(request) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def edit(self, request, *args, **kwargs): changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.can_edit(request): raise PermissionDenied(_('You cannot edit this change set.')) form = ChangeSetForm(instance=changeset, data=get_api_post_data(request)) if not form.is_valid(): raise ParseError(form.errors) changeset = form.instance update = changeset.updates.create(user=request.user, title=changeset.title, description=changeset.description) changeset.last_update = update changeset.save() return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def restore_object(self, request, *args, **kwargs): data = get_api_post_data(request) if 'id' not in data: raise ParseError('Missing id.') restore_id = data['id'] if isinstance(restore_id, str) and restore_id.isdigit(): restore_id = int(restore_id) if not isinstance(restore_id, int): raise ParseError('id needs to be an integer.') changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.can_edit(request): raise PermissionDenied(_('You can not edit changes on this change set.')) try: changed_object = changeset.changed_objects_set.get(pk=restore_id) except Exception: raise NotFound('could not find object.') try: changed_object.restore() except PermissionError: raise PermissionDenied(_('You cannot restore this object, because it depends on ' 'a deleted object or it would violate a unique contraint.')) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def propose(self, request, *args, **kwargs): if not request.user.is_authenticated: raise PermissionDenied(_('You need to log in to propose changes.')) changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.title or not changeset.description: raise PermissionDenied(_('You need to add a title an a description to propose this change set.')) if not changeset.can_propose(request): raise PermissionDenied(_('You cannot propose this change set.')) changeset.propose(request.user) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def unpropose(self, request, *args, **kwargs): changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.can_unpropose(request): raise PermissionDenied(_('You cannot unpropose this change set.')) changeset.unpropose(request.user) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def review(self, request, *args, **kwargs): changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.can_start_review(request): raise PermissionDenied(_('You cannot review these changes.')) changeset.start_review(request.user) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def reject(self, request, *args, **kwargs): changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not not changeset.can_end_review(request): raise PermissionDenied(_('You cannot reject these changes.')) form = RejectForm(get_api_post_data(request)) if not form.is_valid(): raise ParseError(form.errors) changeset.reject(request.user, form.cleaned_data['comment'], form.cleaned_data['final']) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def unreject(self, request, *args, **kwargs): changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.can_unreject(request): raise PermissionDenied(_('You cannot unreject these changes.')) changeset.unreject(request.user) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def apply(self, request, *args, **kwargs): changeset = self.get_object() with changeset.lock_to_edit(request) as changeset: if not changeset.can_end_review(request): raise PermissionDenied(_('You cannot accept and apply these changes.')) changeset.apply(request.user) return Response({'success': True})
c3nav/c3nav
[ 137, 31, 137, 17, 1461327231 ]
def __init__(self, *, detectors, rate, fmin, fknee, alpha, NET): self._rate = rate self._fmin = fmin self._fknee = fknee self._alpha = alpha self._NET = NET for d in detectors: if self._alpha[d] < 0.0: raise RuntimeError( "alpha exponents should be positive in this formalism") freqs = {} psds = {} last_nyquist = None for d in detectors: if (self._fknee[d] > 0.0) and (self._fknee[d] < self._fmin[d]): raise RuntimeError("If knee frequency is non-zero, it must " "be greater than f_min") nyquist = self._rate[d] / 2.0 if nyquist != last_nyquist: tempfreq = [] # this starting point corresponds to a high-pass of # 30 years, so should be low enough for any interpolation! cur = 1.0e-9 # this value seems to provide a good density of points # in log space. while cur < nyquist: tempfreq.append(cur) cur *= 1.4 # put a final point at Nyquist tempfreq.append(nyquist) tempfreq = np.array(tempfreq, dtype=np.float64) last_nyquist = nyquist freqs[d] = tempfreq if self._fknee[d] > 0.0: ktemp = np.power(self._fknee[d], self._alpha[d]) mtemp = np.power(self._fmin[d], self._alpha[d]) temp = np.power(freqs[d], self._alpha[d]) psds[d] = (temp + ktemp) / (temp + mtemp) psds[d] *= (self._NET[d] * self._NET[d]) else: psds[d] = np.ones_like(freqs[d]) psds[d] *= (self._NET[d] * self._NET[d]) # call the parent class constructor to store the psds super().__init__(detectors=detectors, freqs=freqs, psds=psds)
tskisner/pytoast
[ 29, 32, 29, 55, 1428597089 ]
def fmin(self, det): """(float): the minimum frequency in Hz, used as a high pass. """ return self._fmin[det]
tskisner/pytoast
[ 29, 32, 29, 55, 1428597089 ]
def alpha(self, det): """(float): the (positive!) slope exponent. """ return self._alpha[det]
tskisner/pytoast
[ 29, 32, 29, 55, 1428597089 ]
def setUp(self): story_set = story.StorySet(base_dir=os.path.dirname(__file__)) story_set.AddStory( page_module.Page('http://www.bar.com/', story_set, story_set.base_dir, name='http://www.bar.com/')) story_set.AddStory( page_module.Page('http://www.baz.com/', story_set, story_set.base_dir, name='http://www.baz.com/')) story_set.AddStory( page_module.Page('http://www.foo.com/', story_set, story_set.base_dir, name='http://www.foo.com/')) self.story_set = story_set
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def pages(self): return self.story_set.stories
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def testRepr(self): page0 = self.pages[0] v = scalar.ScalarValue(page0, 'x', 'unit', 3, important=True, description='desc', tir_label='my_ir', improvement_direction=improvement_direction.DOWN) expected = ('ScalarValue(http://www.bar.com/, x, unit, 3, important=True, ' 'description=desc, tir_label=my_ir, ' 'improvement_direction=down, grouping_keys={}') self.assertEquals(expected, str(v))
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def testScalarSamePageMerging(self): page0 = self.pages[0] v0 = scalar.ScalarValue(page0, 'x', 'unit', 1, description='important metric', improvement_direction=improvement_direction.UP) v1 = scalar.ScalarValue(page0, 'x', 'unit', 2, description='important metric', improvement_direction=improvement_direction.UP) self.assertTrue(v1.IsMergableWith(v0)) vM = scalar.ScalarValue.MergeLikeValuesFromSamePage([v0, v1]) self.assertEquals(page0, vM.page) self.assertEquals('x', vM.name) self.assertEquals('unit', vM.units) self.assertEquals('important metric', vM.description) self.assertEquals(True, vM.important) self.assertEquals([1, 2], vM.values) self.assertEquals(improvement_direction.UP, vM.improvement_direction)
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def testScalarWithNoneValueMerging(self): page0 = self.pages[0] v0 = scalar.ScalarValue( page0, 'x', 'unit', 1, improvement_direction=improvement_direction.DOWN) v1 = scalar.ScalarValue(page0, 'x', 'unit', None, none_value_reason='n', improvement_direction=improvement_direction.DOWN) self.assertTrue(v1.IsMergableWith(v0)) vM = scalar.ScalarValue.MergeLikeValuesFromSamePage([v0, v1]) self.assertEquals(None, vM.values) expected_none_value_reason = ( 'Merging values containing a None value results in a None value. ' 'None values: [ScalarValue(http://www.bar.com/, x, unit, None, ' 'important=True, description=None, tir_label=None, ' 'improvement_direction=down, grouping_keys={}]') self.assertEquals(expected_none_value_reason, vM.none_value_reason)
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def testScalarWithNoneReasonMustHaveNoneValue(self): page0 = self.pages[0] self.assertRaises(none_values.ValueMustHaveNoneValue, lambda: scalar.ScalarValue( page0, 'x', 'unit', 1, none_value_reason='n', improvement_direction=improvement_direction.UP))
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def testNoneValueAsDict(self): v = scalar.ScalarValue(None, 'x', 'unit', None, important=False, none_value_reason='n', improvement_direction=improvement_direction.DOWN) d = v.AsDictWithoutBaseClassEntries() self.assertEquals(d, {'value': None, 'none_value_reason': 'n'})
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def testFromDictFloat(self): d = { 'type': 'scalar', 'name': 'x', 'units': 'unit', 'value': 42.4, 'improvement_direction': improvement_direction.UP, } v = value.Value.FromDict(d, {}) self.assertTrue(isinstance(v, scalar.ScalarValue)) self.assertEquals(v.value, 42.4)
catapult-project/catapult-csm
[ 1, 7, 1, 3, 1494852048 ]
def log(message: str, color: str = "blue") -> None: _log(message, color)
ufal/neuralmonkey
[ 407, 105, 407, 119, 1466074003 ]
def __init__(self, **kwargs): """ Initializes a VNFThresholdPolicy instance Notes: You can specify all parameters while calling this methods. A special argument named `data` will enable you to load the object from a Python dictionary Examples: >>> vnfthresholdpolicy = NUVNFThresholdPolicy(id=u'xxxx-xxx-xxx-xxx', name=u'VNFThresholdPolicy') >>> vnfthresholdpolicy = NUVNFThresholdPolicy(data=my_dict) """ super(NUVNFThresholdPolicy, self).__init__() # Read/Write Attributes
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def cpu_threshold(self): """ Get cpu_threshold value. Notes: Threshold for CPU usage
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def cpu_threshold(self, value): """ Set cpu_threshold value. Notes: Threshold for CPU usage
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def name(self): """ Get name value. Notes: Name of VNF agent policy
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def name(self, value): """ Set name value. Notes: Name of VNF agent policy
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def last_updated_by(self): """ Get last_updated_by value. Notes: ID of the user who last updated the object.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def last_updated_by(self, value): """ Set last_updated_by value. Notes: ID of the user who last updated the object.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def last_updated_date(self): """ Get last_updated_date value. Notes: Time stamp when this object was last updated.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def last_updated_date(self, value): """ Set last_updated_date value. Notes: Time stamp when this object was last updated.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def action(self): """ Get action value. Notes: Action to be taken on threshold crossover
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def action(self, value): """ Set action value. Notes: Action to be taken on threshold crossover
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def memory_threshold(self): """ Get memory_threshold value. Notes: Threshold for memory usage
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def memory_threshold(self, value): """ Set memory_threshold value. Notes: Threshold for memory usage
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def description(self): """ Get description value. Notes: Description of VNF agent policy
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def description(self, value): """ Set description value. Notes: Description of VNF agent policy
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def min_occurrence(self): """ Get min_occurrence value. Notes: Minimum number of threshold crossover occurrence during monitoring interval before taking specified action
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def min_occurrence(self, value): """ Set min_occurrence value. Notes: Minimum number of threshold crossover occurrence during monitoring interval before taking specified action
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def embedded_metadata(self): """ Get embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def embedded_metadata(self, value): """ Set embedded_metadata value. Notes: Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def entity_scope(self): """ Get entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def entity_scope(self, value): """ Set entity_scope value. Notes: Specify if scope of entity is Data center or Enterprise level
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def monit_interval(self): """ Get monit_interval value. Notes: Monitoring interval (minutes) for threshold crossover occurrences to be considered
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def monit_interval(self, value): """ Set monit_interval value. Notes: Monitoring interval (minutes) for threshold crossover occurrences to be considered
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def creation_date(self): """ Get creation_date value. Notes: Time stamp when this object was created.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def creation_date(self, value): """ Set creation_date value. Notes: Time stamp when this object was created.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def assoc_entity_type(self): """ Get assoc_entity_type value. Notes: Type of the entity to which the Metadata is associated to.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def assoc_entity_type(self, value): """ Set assoc_entity_type value. Notes: Type of the entity to which the Metadata is associated to.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def storage_threshold(self): """ Get storage_threshold value. Notes: Threshold for storage usage
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def storage_threshold(self, value): """ Set storage_threshold value. Notes: Threshold for storage usage
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def owner(self): """ Get owner value. Notes: Identifies the user that has created this object.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def owner(self, value): """ Set owner value. Notes: Identifies the user that has created this object.
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def external_id(self): """ Get external_id value. Notes: External object ID. Used for integration with third party systems
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def external_id(self, value): """ Set external_id value. Notes: External object ID. Used for integration with third party systems
nuagenetworks/vspk-python
[ 19, 18, 19, 7, 1457133058 ]
def __init__ ( self , chain , ## input TChain/TTree selection = {} , ## selection/cuts save_vars = () , ## list of variables to save new_vars = {} , ## new variables no_vars = () , ## exclude these variables ## output = '' , ## output file name name = '' , ## the name addselvars = False , ## add varibles from selections? tmp_keep = False , ## keep the temporary file silent = False ): ## silent processing
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def __str__ ( self ) : return self.__report
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def output ( self ) : """``output'' : the output file name""" return self.__output
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def chain ( self ) : """``chain'': the reduced chain/tree (same as tree)""" return self.__chain
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def name ( self ) : """``name'' : the output chain name""" return self.__name
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def tree ( self ) : """``tree'': the reduced chain/tree (same as chain)""" return self.__chain
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def table ( self ) : """``table'' : get the statitics as table""" return self.__table
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def report ( self ) : """``report'' : get the statitics report""" return self.__report
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def reduce ( tree , selection , save_vars = () , new_vars = {} , no_vars = () , output = '' , name = '' , addselvars = False , silent = False ) :
OstapHEP/ostap
[ 15, 10, 15, 9, 1486653996 ]
def test_loc_setitem_multiindex_columns(self, consolidate): # GH#18415 Setting values in a single column preserves dtype, # while setting them in multiple columns did unwanted cast. # Note that A here has 2 blocks, below we do the same thing # with a consolidated frame. A = DataFrame(np.zeros((6, 5), dtype=np.float32)) A = pd.concat([A, A], axis=1, keys=[1, 2]) if consolidate: A = A._consolidate() A.loc[2:3, (1, slice(2, 3))] = np.ones((2, 2), dtype=np.float32) assert (A.dtypes == np.float32).all() A.loc[0:5, (1, slice(2, 3))] = np.ones((6, 2), dtype=np.float32) assert (A.dtypes == np.float32).all() A.loc[:, (1, slice(2, 3))] = np.ones((6, 2), dtype=np.float32) assert (A.dtypes == np.float32).all() # TODO: i think this isn't about MultiIndex and could be done with iloc?
pandas-dev/pandas
[ 37157, 15883, 37157, 3678, 1282613853 ]
def test_6942(indexer_al): # check that the .at __setitem__ after setting "Live" actually sets the data start = Timestamp("2014-04-01") t1 = Timestamp("2014-04-23 12:42:38.883082") t2 = Timestamp("2014-04-24 01:33:30.040039") dti = date_range(start, periods=1) orig = DataFrame(index=dti, columns=["timenow", "Live"]) df = orig.copy() indexer_al(df)[start, "timenow"] = t1 df["Live"] = True df.at[start, "timenow"] = t2 assert df.iloc[0, 0] == t2
pandas-dev/pandas
[ 37157, 15883, 37157, 3678, 1282613853 ]
def test_15231(): df = DataFrame([[1, 2], [3, 4]], columns=["a", "b"]) df.loc[2] = Series({"a": 5, "b": 6}) assert (df.dtypes == np.int64).all() df.loc[3] = Series({"a": 7}) # df["a"] doesn't have any NaNs, should not have been cast exp_dtypes = Series([np.int64, np.float64], dtype=object, index=["a", "b"]) tm.assert_series_equal(df.dtypes, exp_dtypes)
pandas-dev/pandas
[ 37157, 15883, 37157, 3678, 1282613853 ]
def test_iloc_setitem_unnecesssary_float_upcasting(): # GH#12255 df = DataFrame( { 0: np.array([1, 3], dtype=np.float32), 1: np.array([2, 4], dtype=np.float32), 2: ["a", "b"], } ) orig = df.copy() values = df[0].values.reshape(2, 1) df.iloc[:, 0:1] = values tm.assert_frame_equal(df, orig)
pandas-dev/pandas
[ 37157, 15883, 37157, 3678, 1282613853 ]
def test_12499(): # TODO: OP in GH#12499 used np.datetim64("NaT") instead of pd.NaT, # which has consequences for the expected df["two"] (though i think at # the time it might not have because of a separate bug). See if it makes # a difference which one we use here. ts = Timestamp("2016-03-01 03:13:22.98986", tz="UTC") data = [{"one": 0, "two": ts}] orig = DataFrame(data) df = orig.copy() df.loc[1] = [np.nan, NaT] expected = DataFrame( {"one": [0, np.nan], "two": Series([ts, NaT], dtype="datetime64[ns, UTC]")} ) tm.assert_frame_equal(df, expected) data = [{"one": 0, "two": ts}] df = orig.copy() df.loc[1, :] = [np.nan, NaT] tm.assert_frame_equal(df, expected)
pandas-dev/pandas
[ 37157, 15883, 37157, 3678, 1282613853 ]
def valid_user(filter_trips,trips): valid = False if len(filter_trips) >= 10 and len(filter_trips) / len(trips) >= 0.5: valid = True return valid
e-mission/e-mission-server
[ 20, 103, 20, 11, 1415342342 ]
def get_queryset(self): return Article.objects.published()
ilendl2/chrisdev-cookiecutter
[ 1, 1, 1, 2, 1392818821 ]
def get_queryset(self): return Article.objects.published()
ilendl2/chrisdev-cookiecutter
[ 1, 1, 1, 2, 1392818821 ]
def get_context_data(self, **kwargs): context = super(ArticleDetailView, self).get_context_data(**kwargs) context['section_list'] = Section.objects.all() return context
ilendl2/chrisdev-cookiecutter
[ 1, 1, 1, 2, 1392818821 ]
def maketopo_hilo(): x = loadtxt('x.txt') y = loadtxt('y.txt') z = loadtxt('z.txt')
rjleveque/tsunami_benchmarks
[ 2, 3, 2, 1, 1417293488 ]
def maketopo_flat(): """ Output topography file for the entire domain """ nxpoints = 201 nypoints = 301 xlower = 204.812 xupper = 205.012 ylower = 19.7 yupper = 20.0 outfile= "flat.tt2" topotools.topo2writer(outfile,topo_flat,xlower,xupper,ylower,yupper,nxpoints,nypoints)
rjleveque/tsunami_benchmarks
[ 2, 3, 2, 1, 1417293488 ]
def plot_topo_big(): figure(figsize=(8,12)) topo1 = topotools.Topography() topo1.read('flat.tt2',2) contourf(topo1.x,topo1.y,topo1.Z,linspace(-30,20,51), extend='both') topo2 = topotools.Topography() topo2.read('hilo_flattened.tt2',2) contourf(topo2.x,topo2.y,topo2.Z,linspace(-30,20,51), extend='both') x1 = 204.90028 x2 = 204.96509 y1 = 19.71 y2 = 19.95 plot([x1,x2,x2,x1,x1],[y1,y1,y2,y2,y1],'w') axis('scaled') colorbar()
rjleveque/tsunami_benchmarks
[ 2, 3, 2, 1, 1417293488 ]
def __init__(self): self._registry = {} # gadget hash -> gadget object.
praekelt/django-analytics
[ 33, 6, 33, 2, 1305634483 ]
def get_gadgets(self): return self._registry.values()
praekelt/django-analytics
[ 33, 6, 33, 2, 1305634483 ]
def _Colorize(color, text): # |color| as a string to avoid pylint's no-member warning :(. # pylint: disable=no-member return getattr(colorama.Fore, color) + text + colorama.Fore.RESET
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def install(device): if install_dict: installer.Install(device, install_dict, apk=apk) else: device.Install(apk)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _UninstallApk(devices, install_dict, package_name): def uninstall(device): if install_dict: installer.Uninstall(device, package_name) else: device.Uninstall(package_name) device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def launch(device): # The flags are first updated with input args. changer = flag_changer.FlagChanger(device, device_args_file) flags = [] if input_args: flags = shlex.split(input_args) changer.ReplaceFlags(flags) # Then launch the apk. if url is None: # Simulate app icon click if no url is present. cmd = ['monkey', '-p', apk.GetPackageName(), '-c', 'android.intent.category.LAUNCHER', '1'] device.RunShellCommand(cmd, check_return=True) else: launch_intent = intent.Intent(action='android.intent.action.VIEW', activity=view_activity, data=url, package=apk.GetPackageName()) device.StartActivity(launch_intent)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _ChangeFlags(devices, input_args, device_args_file): if input_args is None: _DisplayArgs(devices, device_args_file) else: flags = shlex.split(input_args) def update(device): flag_changer.FlagChanger(device, device_args_file).ReplaceFlags(flags) device_utils.DeviceUtils.parallel(devices).pMap(update)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _RunGdb(device, package_name, output_directory, target_cpu, extra_args, verbose): gdb_script_path = os.path.dirname(__file__) + '/adb_gdb' cmd = [ gdb_script_path, '--package-name=%s' % package_name, '--output-directory=%s' % output_directory, '--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(), '--device=%s' % device.serial, # Use one lib dir per device so that changing between devices does require # refetching the device libs. '--pull-libs-dir=/tmp/adb-gdb-libs-%s' % device.serial, ] # Enable verbose output of adb_gdb if it's set for this script. if verbose: cmd.append('--verbose') if target_cpu: cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu)) cmd.extend(extra_args) logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd)) print _Colorize('YELLOW', 'All subsequent output is from adb_gdb script.') os.execv(gdb_script_path, cmd)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _RunMemUsage(devices, package_name): def mem_usage_helper(d): ret = [] proc_map = d.GetPids(package_name) for name, pids in proc_map.iteritems(): for pid in pids: ret.append((name, pid, d.GetMemoryUsageForPid(pid))) return ret parallel_devices = device_utils.DeviceUtils.parallel(devices) all_results = parallel_devices.pMap(mem_usage_helper).pGet(None) for result in _PrintPerDeviceOutput(devices, all_results): if not result: print 'No processes found.' else: for name, pid, usage in sorted(result): print '%s(%s):' % (name, pid) for k, v in sorted(usage.iteritems()): print ' %s=%d' % (k, v) print
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _RunDiskUsage(devices, package_name, verbose): # Measuring dex size is a bit complicated: # https://source.android.com/devices/tech/dalvik/jit-compiler # # For KitKat and below: # dumpsys package contains: # dataDir=/data/data/org.chromium.chrome # codePath=/data/app/org.chromium.chrome-1.apk # resourcePath=/data/app/org.chromium.chrome-1.apk # nativeLibraryPath=/data/app-lib/org.chromium.chrome-1 # To measure odex: # ls -l /data/dalvik-cache/data@[email protected]@classes.dex # # For Android L and M (and maybe for N+ system apps): # dumpsys package contains: # codePath=/data/app/org.chromium.chrome-1 # resourcePath=/data/app/org.chromium.chrome-1 # legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib # To measure odex: # # Option 1: # /data/dalvik-cache/arm/data@[email protected]@[email protected] # /data/dalvik-cache/arm/data@[email protected]@[email protected] # ls -l /data/dalvik-cache/profiles/org.chromium.chrome # (these profiles all appear to be 0 bytes) # # Option 2: # ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex # # For Android N+: # dumpsys package contains: # dataDir=/data/user/0/org.chromium.chrome # codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w== # resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w== # legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib # Instruction Set: arm # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk # status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f # ilter=quicken] # Instruction Set: arm64 # path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk # status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q # uicken] # To measure odex: # ls -l /data/app/.../oat/arm/base.odex # ls -l /data/app/.../oat/arm/base.vdex (optional) # To measure the correct odex size: # cmd package compile -m speed org.chromium.chrome # For webview # cmd package compile -m speed-profile org.chromium.chrome # For others def disk_usage_helper(d): package_output = '\n'.join(d.RunShellCommand( ['dumpsys', 'package', package_name], check_return=True)) # Prints a message but does not return error when apk is not installed. if 'Unable to find package:' in package_output: return None # Ignore system apks. idx = package_output.find('Hidden system packages:') if idx != -1: package_output = package_output[:idx] try: data_dir = re.search(r'dataDir=(.*)', package_output).group(1) code_path = re.search(r'codePath=(.*)', package_output).group(1) lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)', package_output).group(1) except AttributeError: raise Exception('Error parsing dumpsys output: ' + package_output) compilation_filters = set() # Match "compilation_filter=value", where a line break can occur at any spot # (refer to examples above). awful_wrapping = r'\s*'.join('compilation_filter=') for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output): compilation_filters.add(re.sub(r'\s+', '', m.group(1))) compilation_filter = ','.join(sorted(compilation_filters)) data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name) # Measure code_cache separately since it can be large. code_cache_sizes = {} code_cache_dir = next( (k for k in data_dir_sizes if k.endswith('/code_cache')), None) if code_cache_dir: data_dir_sizes.pop(code_cache_dir) code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir, run_as=package_name) apk_path_spec = code_path if not apk_path_spec.endswith('.apk'): apk_path_spec += '/*.apk' apk_sizes = _DuHelper(d, apk_path_spec) if lib_path.endswith('/lib'): # Shows architecture subdirectory. lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path) else: lib_sizes = _DuHelper(d, lib_path) # Look at all possible locations for odex files. odex_paths = [] for apk_path in apk_sizes: mangled_apk_path = apk_path[1:].replace('/', '@') apk_basename = posixpath.basename(apk_path)[:-4] for ext in ('dex', 'odex', 'vdex', 'art'): # Easier to check all architectures than to determine active ones. for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'): odex_paths.append( '%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext)) # No app could possibly have more than 6 dex files. for suffix in ('', '2', '3', '4', '5'): odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % ( arch, mangled_apk_path, suffix, ext)) # This path does not have |arch|, so don't repeat it for every arch. if arch == 'arm': odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % ( mangled_apk_path, suffix)) odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths)) return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes, compilation_filter) def print_sizes(desc, sizes): print '%s: %dkb' % (desc, sum(sizes.itervalues())) if verbose: for path, size in sorted(sizes.iteritems()): print ' %s: %skb' % (path, size) parallel_devices = device_utils.DeviceUtils.parallel(devices) all_results = parallel_devices.pMap(disk_usage_helper).pGet(None) for result in _PrintPerDeviceOutput(devices, all_results): if not result: print 'APK is not installed.' continue (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes, compilation_filter) = result total = sum(sum(sizes.itervalues()) for sizes in result[:-1]) print_sizes('Apk', apk_sizes) print_sizes('App Data (non-code cache)', data_dir_sizes) print_sizes('App Data (code cache)', code_cache_sizes) print_sizes('Native Libs', lib_sizes) show_warning = compilation_filter and 'speed' not in compilation_filter compilation_filter = compilation_filter or 'n/a' print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes) if show_warning: logging.warning('For a more realistic odex size, run:') logging.warning(' %s compile-dex [speed|speed-profile]', sys.argv[0]) print 'Total: %skb (%.1fmb)' % (total, total / 1024.0)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def get_my_pids(): my_pids = [] for pids in device.GetPids(package_name).values(): my_pids.extend(pids) return [int(pid) for pid in my_pids]
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _RunPs(devices, package_name): parallel_devices = device_utils.DeviceUtils.parallel(devices) all_pids = parallel_devices.GetPids(package_name).pGet(None) for proc_map in _PrintPerDeviceOutput(devices, all_pids): if not proc_map: print 'No processes found.' else: for name, pids in sorted(proc_map.items()): print name, ','.join(pids)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _RunCompileDex(devices, package_name, compilation_filter): cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter, package_name] parallel_devices = device_utils.DeviceUtils.parallel(devices) outputs = parallel_devices.RunShellCommand(cmd).pGet(None) for output in _PrintPerDeviceOutput(devices, outputs): for line in output: print line
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _GenerateMissingAllFlagMessage(devices): return ('More than one device available. Use --all to select all devices, ' + 'or use --device to select a device by serial.\n\n' + _GenerateAvailableDevicesMessage(devices))
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def flags_helper(d): changer = flag_changer.FlagChanger(d, device_args_file) return changer.GetCurrentFlags()
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _DeviceCachePath(device, output_directory): file_name = 'device_cache_%s.json' % device.serial return os.path.join(output_directory, file_name)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def _SaveDeviceCaches(devices, output_directory): if not output_directory: return for d in devices: cache_path = _DeviceCachePath(d, output_directory) with open(cache_path, 'w') as f: f.write(d.DumpCacheData()) logging.info('Wrote device cache: %s', cache_path)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def __init__(self, from_wrapper_script): self._parser = None self._from_wrapper_script = from_wrapper_script self.args = None self.apk_helper = None self.install_dict = None self.devices = None # Do not support incremental install outside the context of wrapper scripts. if not from_wrapper_script: self.supports_incremental = False
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def RegisterArgs(self, parser): subp = parser.add_parser(self.name, help=self.description) self._parser = subp subp.set_defaults(command=self) subp.add_argument('--all', action='store_true', default=self.all_devices_by_default, help='Operate on all connected devices.',) subp.add_argument('-d', '--device', action='append', default=[], dest='devices', help='Target device for script to work on. Enter ' 'multiple times for multiple devices.') subp.add_argument('-v', '--verbose', action='count', default=0, dest='verbose_count', help='Verbose level (multiple times for more)') group = subp.add_argument_group('%s arguments' % self.name) if self.needs_package_name: # Always gleaned from apk when using wrapper scripts. group.add_argument('--package-name', help=argparse.SUPPRESS if self._from_wrapper_script else ( "App's package name.")) if self.needs_apk_path or self.needs_package_name: # Adding this argument to the subparser would override the set_defaults() # value set by on the parent parser (even if None). if not self._from_wrapper_script: group.add_argument('--apk-path', required=self.needs_apk_path, help='Path to .apk') if self.supports_incremental: group.add_argument('--incremental', action='store_true', default=False, help='Always install an incremental apk.') group.add_argument('--non-incremental', action='store_true', default=False, help='Always install a non-incremental apk.') # accepts_command_line_flags and accepts_args are mutually exclusive. # argparse will throw if they are both set. if self.accepts_command_line_flags: group.add_argument('--args', help='Command-line flags.') if self.accepts_args: group.add_argument('--args', help='Extra arguments.') if self.accepts_url: group.add_argument('url', nargs='?', help='A URL to launch with.') if not self._from_wrapper_script and self.accepts_command_line_flags: # Provided by wrapper scripts. group.add_argument( '--command-line-flags-file-name', help='Name of the command-line flags file') self._RegisterExtraArgs(group)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def Run(self): print _GenerateAvailableDevicesMessage(self.devices)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]
def Run(self): _InstallApk(self.devices, self.apk_helper, self.install_dict)
chrisdickinson/nojs
[ 72, 3, 72, 5, 1464475027 ]