repo
stringlengths 2
99
| file
stringlengths 13
225
| code
stringlengths 0
18.3M
| file_length
int64 0
18.3M
| avg_line_length
float64 0
1.36M
| max_line_length
int64 0
4.26M
| extension_type
stringclasses 1
value |
---|---|---|---|---|---|---|
mmda
|
mmda-main/tests/test_types/test_span.py
|
import unittest
from mmda.types import box as mmda_box
from mmda.types import span as mmda_span
class TestSpan(unittest.TestCase):
def setUp(cls):
cls.span = mmda_span.Span(start=0, end=0)
cls.span_dict = {
"start": 0,
"end": 8,
"box": {
"left": 0.2,
"top": 0.09,
"width": 0.095,
"height": 0.017,
"page": 0,
},
}
def test_from_json(self):
self.assertEqual(
self.span.from_json(self.span_dict),
mmda_span.Span(
start=0,
end=8,
box=mmda_box.Box(l=0.2, t=0.09, w=0.095, h=0.017, page=0),
),
)
def test_to_json(self):
self.assertEqual(self.span.from_json(self.span_dict).to_json(), self.span_dict)
def test_is_overlap(self):
span = mmda_span.Span(start=0, end=2)
self.assertTrue(span.is_overlap(mmda_span.Span(start=0, end=1)))
self.assertTrue(span.is_overlap(mmda_span.Span(start=1, end=2)))
self.assertFalse(span.is_overlap(mmda_span.Span(start=2, end=3)))
self.assertFalse(span.is_overlap(mmda_span.Span(start=4, end=5)))
def test_small_spans_to_big_span(self):
spans = [
mmda_span.Span(start=0, end=8),
mmda_span.Span(start=8, end=16),
mmda_span.Span(start=16, end=24),
]
self.assertEqual(
mmda_span.Span.small_spans_to_big_span(spans=spans, merge_boxes=False),
mmda_span.Span(start=0, end=24),
)
# if no boxes, should still work
self.assertEqual(
mmda_span.Span.small_spans_to_big_span(spans=spans, merge_boxes=True),
mmda_span.Span(start=0, end=24),
)
def test_small_spans_to_big_span_unsorted(self):
spans = [
mmda_span.Span(start=8, end=16),
mmda_span.Span(start=0, end=8),
mmda_span.Span(start=16, end=24),
]
self.assertEqual(
mmda_span.Span.small_spans_to_big_span(spans=spans),
mmda_span.Span(start=0, end=24),
)
spans = [
mmda_span.Span(start=16, end=24),
mmda_span.Span(start=8, end=16),
mmda_span.Span(start=0, end=8),
]
self.assertEqual(
mmda_span.Span.small_spans_to_big_span(spans=spans),
mmda_span.Span(start=0, end=24),
)
def test_are_disjoint(self):
# should be disjoint
span1 = mmda_span.Span(start=0, end=1)
span2 = mmda_span.Span(start=1, end=2)
self.assertTrue(mmda_span.Span.are_disjoint(spans=[span1, span2]))
self.assertTrue(mmda_span.Span.are_disjoint(spans=[span2, span1]))
# should overlap
span3 = mmda_span.Span(start=0, end=2)
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span1, span3]))
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span3, span1]))
# should handle strict containment
span4 = mmda_span.Span(start=0, end=3)
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span1, span4]))
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span4, span1]))
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span2, span4]))
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span4, span2]))
# should handle exact equality
span5 = mmda_span.Span(start=0, end=1)
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span1, span5]))
self.assertFalse(mmda_span.Span.are_disjoint(spans=[span5, span1]))
| 3,660 | 34.892157 | 87 |
py
|
mmda
|
mmda-main/tests/test_types/test_metadata.py
|
"""
Tests for Metadata
@soldni
"""
from copy import deepcopy
import unittest
from mmda.types import Metadata
class TestSpanGroup(unittest.TestCase):
def test_add_keys(self):
metadata = Metadata()
metadata['foo'] = 1
self.assertEqual(metadata.foo, 1)
metadata.bar = 2
self.assertEqual(metadata.bar, 2)
metadata.set('baz', 3)
self.assertEqual(metadata.baz, 3)
def test_access_keys(self):
metadata = Metadata()
metadata.foo = "bar"
self.assertEqual(metadata.foo, "bar")
self.assertEqual(metadata.get("foo"), "bar")
self.assertTrue(metadata["foo"])
self.assertIsNone(metadata.get("bar"))
def test_json_transform(self):
metadata = Metadata.from_json({'foo': 'bar'})
self.assertEqual(metadata.to_json(), {'foo': 'bar'})
self.assertEqual(Metadata.from_json(metadata.to_json()), metadata)
def test_len(self):
metadata = Metadata.from_json({f'k{i}': i for i in range(10)})
self.assertEqual(len(metadata), 10)
metadata.pop('k0')
self.assertEqual(len(metadata), 9)
del metadata.k1
self.assertEqual(len(metadata), 8)
def test_valid_names(self):
metadata = Metadata()
# this should work fine
metadata.set('foo', 'bar')
self.assertEqual(metadata.foo, 'bar')
# this should fail because `1foo` is not a valid python variable name
with self.assertRaises(ValueError):
metadata.set('1foo', 'bar')
def test_deep_copy(self):
metadata = Metadata.from_json({'foo': 1, 'bar': 2, 'baz': 3})
metadata2 = deepcopy(metadata)
self.assertEqual(metadata, metadata2)
def test_get_unknown_key(self):
metadata = Metadata()
self.assertIsNone(metadata.text)
| 1,847 | 25.028169 | 77 |
py
|
mmda
|
mmda-main/tests/test_types/test_span_group.py
|
"""
Tests for SpanGroup
@rauthur
"""
import json
import unittest
from mmda.types import SpanGroup, Document, Span
class TestSpanGroup(unittest.TestCase):
doc: Document
def setUp(self) -> None:
self.doc = Document("This is a test document!")
def test_annotation_attaches_document(self):
span_group = SpanGroup(id=1, spans=[Span(0, 4), Span(5, 7)])
self.doc.annotate(tokens=[span_group])
span_group = self.doc.tokens[0]
self.assertEqual(["This", "is"], span_group.symbols)
| 534 | 18.107143 | 68 |
py
|
mmda
|
mmda-main/tests/test_types/test_json_conversion.py
|
'''
Description: Test whether all properties for an mmda doc are preserved when
converting to json and back.
Author: @soldni
'''
import json
from pathlib import Path
from mmda.types import BoxGroup, SpanGroup, Document, Metadata
from mmda.parsers import PDFPlumberParser
PDFFILEPATH = Path(__file__).parent / "../fixtures/1903.10676.pdf"
def test_span_group_conversion():
sg = SpanGroup(spans=[], id=3, metadata=Metadata.from_json({"text": "test"}))
sg2 = SpanGroup.from_json(sg.to_json())
assert sg2.to_json() == sg.to_json()
assert sg2.__dict__ == sg.__dict__
bg = BoxGroup(boxes=[], metadata=Metadata.from_json({"text": "test", "id": 1}))
bg2 = BoxGroup.from_json(bg.to_json())
assert bg2.to_json() == bg.to_json()
assert bg2.__dict__ == bg.__dict__
def test_doc_conversion():
pdfparser = PDFPlumberParser()
orig_doc = pdfparser.parse(input_pdf_path=str(PDFFILEPATH))
json_doc = json.dumps(orig_doc.to_json())
new_doc = Document.from_json(json.loads(json_doc))
# We can't just have a `assert new_doc == orig_doc` statement since
# internal references to the doc itself (e.g. `doc.tokens[0].doc`)
# would make it fail. instead, we compare specific elements of the doc.
# compare just token representation and name of fields
assert orig_doc.symbols == new_doc.symbols
assert orig_doc.fields == new_doc.fields
for field_name in orig_doc.fields:
# this iterates over all span group for this field in both docs
field_it = zip(
getattr(orig_doc, field_name),
getattr(new_doc, field_name)
)
# type annotations to keep mypy quiet
orig_sg: SpanGroup
new_sg: SpanGroup
for orig_sg, new_sg in field_it:
# for each pair, they should have same metadata (type, id,
# and optionally, text) and same spans.
assert orig_sg.metadata == new_sg.metadata
assert orig_sg.spans == new_sg.spans
| 2,013 | 31.483871 | 83 |
py
|
mmda
|
mmda-main/tests/test_types/test_annotation.py
|
from mmda.types.annotation import BoxGroup
from mmda.types.box import Box
import unittest
class TestBoxGroup(unittest.TestCase):
def setUp(cls) -> None:
cls.box_group_json = {'boxes': [{'left': 0.1,
'top': 0.6,
'width': 0.36,
'height': 0.221,
'page': 0}],
'id': None,
'type': 'Text'}
def test_from_json(self):
self.assertIsInstance(BoxGroup.from_json(self.box_group_json), BoxGroup)
self.assertEqual(BoxGroup.from_json(self.box_group_json).boxes,
[Box(l=0.1, t=0.6, w=0.36, h=0.221, page=0)])
self.assertEqual(BoxGroup.from_json(self.box_group_json).id, None)
self.assertEqual(BoxGroup.from_json(self.box_group_json).type, 'Text')
def test_to_json(self):
boxgroup = BoxGroup.from_json(self.box_group_json)
self.assertIsInstance(boxgroup.to_json(), dict)
self.assertEqual(boxgroup.to_json()['boxes'],
[{'left': 0.1,
'top': 0.6,
'width': 0.36,
'height': 0.221,
'page': 0}])
assert 'boxes' in boxgroup.to_json()
assert 'metadata' in boxgroup.to_json()
| 1,430 | 37.675676 | 80 |
py
|
mmda
|
mmda-main/tests/test_recipes/test_core_recipe.py
|
"""
@kylel
"""
import os
import unittest
from mmda.recipes import CoreRecipe
from mmda.types import BoxGroup, Document, PILImage, SpanGroup
from tests.test_recipes.core_recipe_fixtures import (
BASE64_PAGE_IMAGE,
FIRST_3_BLOCKS_JSON,
FIRST_5_ROWS_JSON,
FIRST_10_TOKENS_JSON,
FIRST_10_VILA_JSONS,
FIRST_1000_SYMBOLS,
PAGE_JSON,
SEGMENT_OF_WORD_JSONS,
)
def round_all_floats(d: dict):
import numbers
def formatfloat(x):
return "%.4g" % float(x)
def pformat(dictionary, function):
if isinstance(dictionary, dict):
return {key: pformat(value, function) for key, value in dictionary.items()}
if isinstance(dictionary, list):
return [pformat(element, function) for element in dictionary]
if isinstance(dictionary, numbers.Number):
return function(dictionary)
return dictionary
return pformat(d, formatfloat)
class TestCoreRecipe(unittest.TestCase):
def setUp(self):
self.pdfpath = os.path.join(
os.path.dirname(__file__), "../fixtures/1903.10676.pdf"
)
self.recipe = CoreRecipe()
self.doc = self.recipe.from_path(pdfpath=self.pdfpath)
def test_correct_output(self):
self.assertEqual(self.doc.symbols[:1000], FIRST_1000_SYMBOLS)
self.assertDictEqual(self.doc.pages[0].to_json(), PAGE_JSON)
self.assertEqual(self.doc.images[0].to_json(), BASE64_PAGE_IMAGE)
self.assertListEqual(
[round_all_floats(t.to_json()) for t in self.doc.tokens[:10]],
round_all_floats(FIRST_10_TOKENS_JSON),
)
self.assertListEqual(
[round_all_floats(r.to_json()) for r in self.doc.rows[:5]],
round_all_floats(FIRST_5_ROWS_JSON),
)
self.assertListEqual(
[round_all_floats(b.to_json()) for b in self.doc.blocks[:3]],
round_all_floats(FIRST_3_BLOCKS_JSON),
)
self.assertListEqual(
[round_all_floats(v.to_json()) for v in self.doc.vila_span_groups[:10]],
round_all_floats(FIRST_10_VILA_JSONS),
)
self.assertListEqual(
[round_all_floats(w.to_json()) for w in self.doc.words[895:900]],
round_all_floats(SEGMENT_OF_WORD_JSONS),
)
def test_to_from_json(self):
doc_json = self.doc.to_json(with_images=True)
doc2 = Document.from_json(doc_dict=doc_json)
self.assertDictEqual(doc_json, doc2.to_json(with_images=True))
def test_manual_create_using_annotate(self):
"""
This tests whether one can manually reconstruct a Document without using from_json().
Annotations on a Document are order-invariant once created, so you can see this since the
fields are being annotated in a different order than they were computed.
"""
doc_json = self.doc.to_json(with_images=True)
doc2 = Document(symbols=doc_json["symbols"], metadata=doc_json["metadata"])
assert doc2.symbols == doc_json["symbols"] == self.doc.symbols
assert (
doc2.metadata.to_json()
== doc_json["metadata"]
== self.doc.metadata.to_json()
)
images = [PILImage.frombase64(img) for img in doc_json["images"]]
doc2.annotate_images(images)
assert (
doc2.images[0].to_json()
== doc_json["images"][0]
== self.doc.images[0].to_json()
)
rows = [SpanGroup.from_json(span_group_dict=r) for r in doc_json["rows"]]
doc2.annotate(rows=rows)
assert (
[r.to_json() for r in doc2.rows]
== doc_json["rows"]
== [r.to_json() for r in self.doc.rows]
)
vila_span_groups = [
SpanGroup.from_json(span_group_dict=v) for v in doc_json["vila_span_groups"]
]
doc2.annotate(vila_span_groups=vila_span_groups)
assert (
[v.to_json() for v in doc2.vila_span_groups]
== doc_json["vila_span_groups"]
== [v.to_json() for v in self.doc.vila_span_groups]
)
words = [SpanGroup.from_json(span_group_dict=w) for w in doc_json["words"]]
doc2.annotate(words=words)
assert (
[w.to_json() for w in doc2.words]
== doc_json["words"]
== [w.to_json() for w in self.doc.words]
)
tokens = [SpanGroup.from_json(span_group_dict=t) for t in doc_json["tokens"]]
doc2.annotate(tokens=tokens)
assert (
[t.to_json() for t in doc2.tokens]
== doc_json["tokens"]
== [t.to_json() for t in self.doc.tokens]
)
blocks = [SpanGroup.from_json(span_group_dict=b) for b in doc_json["blocks"]]
doc2.annotate(blocks=blocks)
assert (
[b.to_json() for b in doc2.blocks]
== doc_json["blocks"]
== [b.to_json() for b in self.doc.blocks]
)
| 4,970 | 33.282759 | 97 |
py
|
mmda
|
mmda-main/tests/test_recipes/__init__.py
| 0 | 0 | 0 |
py
|
|
mmda
|
mmda-main/tests/test_recipes/core_recipe_fixtures.py
|
FIRST_1000_SYMBOLS = """Field\nTask\nDataset\nSOTA\nB ERT -Base\nS CI B ERT\nFrozen\nFinetune\nFrozen\nFinetune\nBio\nNER\nBC5CDR (Li et al., 2016)\n88.85 7\n85.08\n86.72\n88.73\n90.01\nJNLPBA (Collier and Kim, 2004)\n78.58\n74.05\n76.09\n75.77\n77.28\nNCBI-disease (Dogan et al., 2014)\n89.36\n84.06\n86.88\n86.39\n88.57\nPICO\nEBM-NLP (Nye et al., 2018)\n66.30\n61.44\n71.53\n68.30\n72.28\nDEP\nGENIA (Kim et al., 2003) - LAS\n91.92\n90.22\n90.33\n90.36\n90.43\nGENIA (Kim et al., 2003) - UAS\n92.84\n91.84\n91.89\n92.00\n91.99\nREL\nChemProt (Kringelum et al., 2016)\n76.68\n68.21\n79.14\n75.03\n83.64\nCS\nNER\nSciERC (Luan et al., 2018)\n64.20\n63.58\n65.24\n65.77\n67.57\nREL\nSciERC (Luan et al., 2018)\nn/a\n72.74\n78.71\n75.25\n79.97\nCLS\nACL-ARC (Jurgens et al., 2018)\n67.9\n62.04\n63.91\n60.74\n70.98\nMulti\nCLS\nPaper Field\nn/a\n63.64\n65.37\n64.38\n65.71\nSciCite (Cohan et al., 2019)\n84.0\n84.31\n84.85\n85.42\n85.49\nAverage\n73.58\n77.16\n76.01\n79.27\nTable 1: Test performances of all B ERT variants on all tasks and datasets. Bold indicates the SOTA result (multiple\nresults bolded if difference wi"""
BASE64_PAGE_IMAGE = "iVBORw0KGgoAAAANSUhEUgAAAlMAAANKCAIAAAAhjVxYAAEAAElEQVR4nOydeVxN+f/HP7dVm1KRLVJUshQlS9YRphQSytdYRonJlmXsZF9HWTIxtrRYKqKkEJWQEqJ931fdtruvn98fH3N+Z+5NqBvN3M/zjx7nfM77nPf7LJ33Ped8Pq83BUIIMBgMBoORGmR+dAAYDAaDwXxXcObDYDAYjHSBMx8Gg8FgpAuc+TAYDAYjXeDMh8FgMBjpAmc+DAaDwUgXOPNhMBgMRrrAmQ+DwWAw0gXOfBgMBoORLnDmw2AwGIx0gTMfBoPBYKQLnPkwGAwGI13gzIfBYDAY6QJnPgwGg8FIFzjzYTAYDEa6wJkPg8FgMNIFznwYDAaDkS5w5sNgMBiMdIEzHwaDwWCkC5z5MBgMBiNd4MyHwWAwGOkCZz4MBoPBSBc482EwGAxGusCZD4PBYDDSBc58GAwGg5EucObDYDAYjHSBMx8Gg8FgpAuc+TAYDAYjXeDMh8FgMBjpAmc+DAaDwUgXOPNhMBgMRrrAmQ+DwWAw0gXOfBgMBoORLnDmw2AwGIx0gTMfBoPBYKQLnPkwGAwGI13gzIfBYDAY6QJnPgwGg8FIFzjzYTAYDEa6wJkPg8FgMNIFznwYDAaDkS5w5sNgMBiMdIEzHwaDwWCkC5z5MBgMBiNd4MyHwWAwGOkCZz4MBoPBSBc482EwGAxGusCZD4PBYDDSBc58GAwGg5EucObDYDAYjHSBMx8Gg8FgpAuc+TAYDAYjXeDMh8FgMBjpAmc+DAaDwUgXOPNhMBgMRrrAmQ+DwWAw0gXOfBgMBoORLnDmw2AwGIx0gTMfBoPBYKQLnPkwGAwGI13gzIfBYDAY6ULuRwfw46FQKD86BAwGg/kBQAh/dAg/BvzMh8FgMBjpgiK1OR+DwWAw0gl+5sNgMBiMdIEzHwaDwWCkC5z5WoDP54s3stlsYhpCyOVyv2NEn40EIz0IhUIIoUAgILcIhcLWV+n4uDCiiJ8pTGcDZz5QW1v7008/rV+//ubNmydOnLh+/fq1a9dErtqwsLAVK1agaaFQ6Obm9vjxY8mG0dzcPHPmzPPnz/ft2/fy5cuzZ8/mcDjiZidPnjx27Fj73eXk5BgYGJw+fTowMHDr1q0lJSXiNjwe72s29QN/BEgP79+/37dv35kzZxYuXAgAYLFYO3fuvH379sWLF3fu3FlTU9PixbNu3bqGhobvE2FeXt6wYcOOHDly5MiRtWvXtvm+HxMTo6OjExAQsGHDhgsXLkg2yO+AyJn6HPX19U5OTsuXLz979uzSpUurqqq+W4QYgEc1AAB69Oihr6//008/zZ49m8ViNTY29ujRAy0SCoUsFktFRWXkyJF37txBjUwm09jYWOJhUCiUP//8s3///rt373ZxcZkyZYpQKKTRaEpKSnJycgAAOp2uqqpqZmb2/PlzPp8vEAjk5eVlZNr428XIyEhNTW3evHl9+vTJzs6eMWNGcnJy165dCY9HjhyZPXu2iYkJhLCpqUlDQwMAwOVyIYSKiooAABaLJS8vDyFct27dmTNnFBQUJHcwMKKEhIQMGzbMycnJ0NAQALBv376BAwfOnz8fAODr63vixAnxi4dKpebl5V28eHHLli3fIcJBgwbp6OjY2tqamppaWVl9+PDBxMRkx44dJiYmOjo6NTU1S5Ys2b1794ABA5qbm21tbaurq/Pz89++fTt58uTKyspNmzah7VhbWwsEgsWLFzs6Ovbv33/lypUAAC6Xy+fzlZWVAQBMJrNLly7oyqfRaGpqat9h774ekTMFALh+/bqsrCyDwairq9u4ceOxY8c0NTV/++23YcOGqamprV27dufOnTdv3tywYcOlS5dKS0vHjx//5s2b7du3BwQE5OXl9e3bV0dHZ/z48d7e3oqKimpqahUVFSdOnPixu/lvBz/zAQAAhUJ5/fr13bt3d+3apaqqam1tXV1dnZSU9OTJE3d3d/Lj3aFDh5KSkpKTkyUeg5qaWv/+/YlZfX39M2fO5Obm/vrrrwCAM2fOFBUVXbx4ES198OBBaGhoOx+2iIGMxsbGgwYNio2NPXbsGPLI5/OfPXuWmppKpVJ37doVGhoaFhZWWVnp5+d35swZJpMZFhaWlJRka2ubl5f38uXLjIyM9kSC+SILFizYunXr2bNnbWxsAAB3796dNGkSWjR+/Pjo6GiRi0dJSSk8PDwoKMjPz6/Ft/cdAYVCefHixZ9//tmrV6/hw4crKipSKBQLCws7O7spU6bIy8vLyclZWFhYWVlduHBh4MCBK1eurK+vnzVrloODA3k7fD4/Kipq//79a9euBQDExcXduHFj27Zt5eXl4eHh7969++OPPwAA165di4+PRwek8yBypp49exYbG4se78aMGSMnJ6eurq6rqwsAoFAoWVlZQUFBGRkZc+bMAQD07NlTS0tr+vTpTk5OAIBevXqpq6v/+uuvR48elZeX37Rpk6am5qBBg9zd3X/oLv4XwJnvE0ZGRmPGjDE0NFRTU+vTpw8AwNfXVygUTp8+nUgwRUVFpaWlU6dOHTVq1HcIydXVlcfjlZSUsFis4uLisLAw9P/w9OlTCoWyaNGiLl26SMqXkpIShUIhPPJ4vJ49e44ePbpbt25Lly4FALx69YrP5/v7+0+aNKlLly5+fn4cDgf9u6qrq48YMUJSkWBaZPjw4UlJSTExMYsWLQJin6LFc5tQKExJSUlKSurRo0doaOh3i9PIyMjCwqKmpiY7OxsAQKFQnj175u3tXVZWhmYjIiICAwNXr14tkqrfvXsXGRmZlZUFAJCVlTU1NR09enR0dDSXyx0xYoSFhQWEMC0tTSAQnDp1ysnJKS0tLSkpSV5efuLEiSwW67vt4BcROVOxsbGTJ09GiyZOnAgAoFAoxI/OPn36mJubKykpJSQkoEUZGRk3b96MiopCs+np6Rs3bty1a1fXrl27deuG1howYMB3363/GjjzAQAAhLBr1649e/ZEr1bQdcnj8YYNG7Zo0SLitg4hrKmpQQYdNw6S2PKBAwf69++vqakpFAp37typpaXl6uoKADAzM/Px8WlubpaUx48fP+bl5U2aNInsEUWSl5cXGBhoYWEhFApVVVUDAwM3btz4/v17Op0+bdo0d3d3DQ0NPCT0O3Dnzh0dHZ2wsLCUlJS6ujobG5tXr16hRcnJycRzD3EuIiIifvvtt/Hjxx87dszb2/u7xamtrW1paWllZRUSEoJaJk6cuGHDhn79+qHZqVOnFhUVqaioiKzYpUsXVVVV9CJdRkamd+/eDg4OQqHw5cuX9+/fLygoMDAwEAqFkyZNWrt27axZs2RkZAQCwYwZMzw8PFr8Iv6jEDlT/fr1q6ys/Jxx165djY2Nly5deuXKFdQyZMgQZ2fn+fPn0+l0AMDQoUOFQiH63oGRIPiAgtra2rS0tK5du9ra2srLyzc0NOTn57979+7XX3+1t7e3t7d3cnLKzs4uKSnp3r07hULZsmVLZWUlhNDOzk7iwcTExDQ0NLx8+XLcuHFZWVmBgYF0Ov3Ro0epqamzZ8/mcrlpaWnNzc2urq4ODg5+fn7otUkbyMnJKSsrCw4O7tGjR2pqqr+/v7q6OtmjiYnJX3/9NWXKlBcvXmhra79+/To+Pp7H4y1atEhLS2vKlCk2NjZTpkxZvXq1trb29evX//e//0n2UGDI3L17t66urnv37pMmTdLW1j506NCePXuEQiGXy01PTz906BAgXTwGBga+vr7onOrp6eXn5/v7+y9ZsqRDI8zPzy8oKLh7925SUlJOTo63tzeHw8nPz4+Pj8/Pz3/69OmpU6dQv6o///xz2bJlly5d0tDQKCkp+fDhw9ixYwcPHjx48GAAwJMnT5qbmwMDAysrKw0NDceNG3f8+PHi4mJlZeWioqLi4mIzM7P58+ebmJgUFxe7urqOHj2a6H3WGRA5U4sWLVq/fv3FixcNDQ01NDSGDBmSmZnZ2Ng4duzY169fKyoqduvWLTo6eseOHQCAjIyMnJyc+/fvR0ZG7t27Nz09PSsr6/Tp046OjkpKSlZWVunp6agr3I/ey389WMOlNTgcjoKCgoiwJ5PJVFRUlJWV7WjvQqEQdZCTlZWlUCgsFgt94f8+HmVkZLhcroKCAofDUVRURIcCvVWTl5cHALDZbPS6VSAQUCiUNve1wXwNHA5HXl6ewWCQO3QwGAxZWVkJvvTutJAvQvI/AnERdh5aPFMsFktRURH/j3QecObDYDAYjHSBf4NgMBgMRrrA3/kwGKkGddciv9JHsy02tj5LNLayhc/ZfL3T9gTWhuD/jYF9ffBS+84PP/NhMBgMRrrA3/kwGEwL1NbWOjs7Dxs2bOzYsWVlZX369Plu3XczMjKmTZu2c+dOTU3N2NjYXbt2RUdHu7m5tbIK6o3VEcFwudzNmzfn5eUtW7aMwWAUFhYePHiwIxyJI34KWCzWsmXLWule13HH4T8GftuJwWBaQFzVj8vlUigU1O+3sbFRTU1NVlYWCekBAGRlZeXk5AjBPwAAm80WCATiQ/e+yJAhQ5SVlX/55Rd1dfWffvpJRUVl2bJl4O++x1wul9hmc3Nz165dq6urT58+vX//fqFQqKCgwOVy5eTkZGVl2Ww2hUJB0+Bvvb1vHRunoKBgaWkpKyvr5OTE4XDq6uoghHw+XygUKioqMplMCoWipKQkFAqRzi2FQkG5B8XWnuPQirCiyDaRihuPx0NSghBCdC4AAPLy8mw2W1FRkc1mKykptfk4/MeQ6p3HYDCtgFT9IIQJCQlbtmwxMzNzc3MbNWpUVFTUtGnTbt265ebmlpWV1bNnzxMnTixbtqx///5oKN4vv/ySlZX1/v17fX39pqam48ePt8F1dHQ0hLCiosLR0dHJyenFixf/+9//LCwsMjMzHR0dx48ff//+/crKSg6H89NPP71586a4uHjdunU+Pj5RUVFNTU22tra//fabm5tbaGjogwcPwsLCunXrdvjwYX9//549e35rMIWFhffv379///7hw4cdHBxGjhyJlJ40NDSKioq0tbXRnr569aqhoeHQoUNEbBoaGu08DsQp2Lt3r7W1dWBg4O3bt8nbvHbtmpaW1rlz5wICApCUYGRkZK9evUaPHr1hw4bg4OAJEybs3r374sWLgYGBiYmJbT4O/yVw5sNgMJ8FqfrV1NTo6Oh0797dw8MjNze3uLh40qRJfD7/5MmT/v7+ycnJKioqCxYsWLZs2cKFC5Hgn5GREYvF2rp1K6Hd9a2MGjVKVlb2+fPnenp6SEXdwMBg0qRJU6ZMCQkJKS4ulpeXHzJkSHFxsa6uro6OzqBBg/r27QsAGDx48KtXr0xMTLp167Z8+fLbt28zmUw/Pz93d/c5c+bU19e34Y7fq1evMWPGVFVVKSoq6unp2dnZjRw5ctSoUa9fv2axWEOHDs3Pz29ubj569Gh0dLSfnx8R26BBg9p5HIhTQAgrko8tUnGbPXv2xIkTVVRUkJRgZmYmm81GygDdunXr3r27k5NTaWnp27dv23kc/jPgzIfBYFpGRNUPDcSWlZXNzc0FAPTo0UNZWZnH4/3+++8hISEMBgMJ/vXu3buysjItLQ1tREQI4utda2lpqaurI3lP8kZkZGRQwRA1NTU7O7uqqipUQoRYUaRAErJHenszZsxoRUuslWCUlJS0tbWRWAx65QsA+PjxI3rN2LNnTwqFsnnz5r1798rIyJBj+/DhQzuPg7iwIgGFQlFUVEQqbhMnTuRwOB16HP5L4MyHwWBaQETVr7q6uqioKCMjY8iQIRMmTLh3715aWtqWLVsOHjw4ZMiQ1NTUvLw8suBfVlZWfn5+Xl5eSUlJQ0MDobb8NWRkZFRVVcXExDg6OgIACgsLc3NzS0pK8vLycnJyhEJhQUHBhg0bbGxs3r59O23atEmTJqGaIRMnTjx8+HC/fv1KS0szMzOrq6srKipKSkpycnLIenvfdBy4XG5CQkJubm5dXZ22tjaLxcrLy0tKSjI1NT1y5AgqTLFt27anT59mZmYCAE6cOPHbb79Nnz4dxdae4/A5YcX8/Hxim927dyeruCEpwVGjRq1evRqtUlJSUl5eXlhYmJ6eTqFQ2nwc/mPgvp0YDOabYTKZLWrptSj410EIhUI+n4+6kxB9Gnk8HuqDI27fEVJnXC5XRkZGvLcIObbvgLiUII/Hk5eXFwqF4oeiE0q+fX9w5sNgMBiMdIFHsmMwGAxGusCZD4PBYDDSBc58GAwGg5EucN/ONvY2xmAwmH87UtvPAz/zYTAYDEa6wH07MRgMBiNd4Gc+DAaDwUgXOPNhREGS82TpI4FAgBrB33r5IgbthM8XfusqbDa/laUCwT/eZJC3L/6Sg0bjfqv3H4LIGWndAMFms69ceXf6dNKJEy8l6B3885JA8Pn8zxlLCpHNIi2urzTuiBi+6AJVS+hokIgamhYIBHV1dSIGdDqdwWAQszwej8VifYfAOjO4h4uUkpeXN3fuXFRxrbKy8tSpU7Kyso8fP75///7o0aOrq6sTExNDQkKCg4MDAgKUlJSuX7/OZDJ37NhhYGDQo0ePmzdvrl69OiYm5s2bN66uriUlJT169Fi6dGlubu6MGTPWr19PpVKVlJR27txJeIQQ3r1719x84rJl9y0seh88+JOCgiwAoLaW8fJl2Zw5xuJBcrkCZCNCYWHDnDk3P3z4rcVd8/JKjI8vkZWl3L69oLmZe/z4Cysr3Zoaxq+/muXmUpctu/vypQuyDA/P+fiRaWdnKCvLf/78+fTp09t/YDsCKpUaEhIyYMCAysrKX3/91cfHZ+jQoa9evdq2bVuLBrm5uR4eHrKysn/88cfy5SOKihorK2mS8k6+JJABqhhgZWVVU1Pz66+/BgQEDBgwICYmZsuWLS3qvLQ/BgCAj49Pnz59ZsyYgQwuXLjw+PFjeXn5oqKixMREb2/v+Ph4WVnZ27dvS6oLm0gM4ruZkpKyd+9eNTW1goICf3//tLS0Dx8+lJSU+Pv7SySAFvHy8mpsbMzPz58/f76VlZWrq+uECRMUFBTWr1+PDJ4/fx4UFMRisVauXGlqahoUFLR3797g4GArK6uOi+pfAMRIK1OnTk1NTYUQjhs37u3btyUlJcOHD+fxeGipn5+fUChcvnz527dvUcu6desuX76MpisqKh4/fhwWFubq6opaXF1d//jjDwjhkCFDqqurBQJBjx49GhoaCHd+fn7l5eUQQheXe2FhWeRIOBw+eZbJ5PF4Ai5XsHJlBHmRUChsaGCh6fHjr7S4Ux8/MrKz6yCEy5ffS06u2Ls37vnzUgihnd11Op0LIZw48SqyfPKkcMuWx8SKRUVFwcHBX3fkvjdBQUEXL14UCoUrV66sqKhYtGgRhNDNzU0oFIobQAi3bt368OFDPv/Tobt2LVUgEErEu8glgdi7d+/z588hhHZ2dnQ6HdVqOHv27IcPH9rstJUYIIT79++/d+8e2aCsrAxCyOPxtm7d+vHjx+zsbAjh8uXLk5OTOygG8d1EMUAIN2zYIBAIamtrIYSOjo4CgUBSMYijoaFx7dq1U6dOmZiYbNu2zcnJ6cOHD6qqqjQaDRmMGTPG19f3zJkzkydPhhBWVVUBAND5kmbw207phUKhvHjxAknuDh8+/NGjR5aWloQC4dKlS1ks1qhRo1xcXLZv3w4AuH37NvETu3fv3tbW1uRf06tWrQoMDESbraysvHHjxujRozU0NAiDqKgoVGOFQqGQV7x3L2ft2ihiNiwsOymp3NY2KC+P+vJlWUbGR9ReX8/atSs2NDQzLCy7lZ3S1lY2MtICACgqyhoaar1+XdG/vzoAQFNTKSenDgAgI/PJtZdXora28qJFd2JiCgEAenp6V65cadOB7HDs7e0DAgJWrVrl6enZu3dvNTU1BweHJUuWEIeRbAAAMDc3/+uvv3766Sf0QpLPFxJ73U7vIpcE4vXr16iigqamZk5OztKlS62trRUUFIYNG9au3f5MDEKh8K+//qqtrV2wYEFpaSkyQPWJnj59OnnyZG1tbSMjIwCAoqKioaFhR8QAABDfTRQDjUbr2rWrjIyMpqZmQEBAv379WhQRlRR9+/atqamBEFZVVaWkpMjLyysoKNDp9OzsbAAAhPDt27eoMSUlBQAgLy/fccH8i8CZT6oxMjKysLCoqanJzs5mMBjkTzUAAGVl5VWrVr1+/frx48ccDkfcgIySkhJxI46Pjz9+/HhwcDCxlMfjNTU1tbjikCHdhcL///bm55fK4QjQy0919S4jRnwqIaah0WXpUlMAwKtX5V/cr6ysupkzDdXVFYVCiG76MjIUkbdeZWXNv/8+7swZm3374lELUV68s8Hlcu3t7alUKnrm1tfXNzMz27dvX4sGAID58+eHhoaampo+e/YMALBkiamkvItcEsiAkEWWQUeZQnFycjpz5gx6vJAI5BgKCwstLS1dXV2dnZ0vXLhANouJiZk6dSqazsrKmjlzprq6ekfEAAD43G5GRETMnDkTACAjIzN16tT09HSiUFFHcPny5YKCgvDwcHNzc7JwNrrakXp1x3n/94IPilSjra1taWlpZWUVEhJibW394sULLvdTdw/iG7isrKyFhYW8vPyMGTMeP35MrCvykTw0NHThwoUAAAjhwoULlyxZ4ubmRiyVl5dns9niAaSn14okJDqdO22avrv7KA2NLpDUGyUvrz4w8IOFRW9ymmyRujpmeXnzzJmDaDTuyJG90CeuxkY2ehYk6NpVsaGBraWlRHSWUVFRQXXXOhvBwcEWFhbBwcGJiYnoN4qnp6eRkVF+fr64AbHWkCFDUE0cObl2/ZuLb5y4JNDsyJEjUbG3xsZGIyOjsLAwd3f3ffv2RUZGtsfv52Lo1atXeXk5AMDQ0JBcZE4gEAgEAhRVXV1deXn5zJkzia4fko0BAPC53UxKSho1ahQAgEKh9O7de/HixTU1NZKKQRxLS8sdO3akpqYePnx41KhRPB6PzWarqakZGxtHR0czmUyi0dLSEvzdwwtK/WA23MNFSsnPzy8oKLh7925SUlJOTo63t7eent7OnTvd3NxmzZolEAh69eolJycXGRk5atSouXPnysjInD592sPDg8FgDBgwgM1mT548OT4+PjMz89atW9XV1UpKSps2bcrJyamoqHjy5MmmTZvmz5+/fft2T09PVBJl4MCBEMKPH5lpaTUyMhQmk5eXR+VyBUOH9sjPr6fTuaqqCgCAKVP0bGyCpkzRW73aUltb+fr1tP/9bxgAoLKS9uJFmba28uvXFenptUVFDWVlzffv55qa6pSVvUxLSzt48CAAgEbjLlgQoq7exccn2d7ecM0aS1/fFIEA2tkZKivLFxQ05OTUZWXVDR6s7ek56dKlt3p6Gu7uowAAEMJ+/fr90HPyWWxsbC5evKiiomJra2tkZNTU1IQSgIGBga+vr6mpKdmARqP9/vvvtra2ysrKI0eOlKz3V69ekS8J5H3NmjW+vr4CgcDOzk5ZWdnU1DQ2NraystLBwaH93sVjUFFRcXR0vH//fmpq6vr161EM48aNi4uLmzhxIgCARqMtWLBAXV3dx8fH3t7e1dVV4jEAAMi7mZqaiq55Op2uoqICABAIBNu2bZs8eTKfz7e2tpZIAC1y9+7dhw8fopMyYMAAFxeXyMjI48ePy8jIODg4REREHD9+/OLFiwCAo0ePstlsPz8/AEBYWJipqamamlrHBdbJwSPZMaJQqVQtLS1iWkNDg/wkxGAwZGVl21Dfq7KyMiMjY9q0aV+0ZLP5XbrIAQAEAkih/P+XOQ5HoKgoi/6iFoEAyspSuFxubGws8Q1SBIEAcjh8ZeUWPm9wOAI+X6iiIg8AiIyMHDx4sL6+/rfu1/cBQshkMtFdFQBAp9NVVVUBAAKBAJ0dsgGNRpOXl5dgDTbyxsmXBOFdIBBwOByiJyeDwSBC7YgYAADNzc3KyspycnJEDGw2W0FBoUNf7onEQN5NPp+PguHz+YqKigAAGo0mJyenpKTUcfEAUmFCAqJ0IofDQZGgz734Cx8ZnPkw34/y8nJNTU1J9XQnINJAm2EwGDU1NZ027WEwGMmCMx8Gg8FgpAvcwwWDwWAw0gXOfBgMBoORLnDm60SEh4cPGjQI9YFOTEycN2/ey5cve/fu7ePjc+PGDTc3t9LS0tDQUAMDg8uXL7u7u9+/f/9HhyzK1wgV8nhCICatCQDg84WoEQ0zaIOYpwgQQi63tfF54jF8vTE5PKEQksdaQAjbH7w4DAaj6W9QS3V1tcjXiubm5tYNJAUx+oVAfKxni+NYJAgU0+0kx9DK2FPJ0tG72X7ENVd/VCSdiO8pGINpHT6fP3369PHjx3O5XAihj48PhNDAwKCxsRFCWF1dTaPRPn78aGxsDCH88OHDiBEj2uPu9OnTw4cPz87O3rVr18yZM5HYlbe39+7duz98+NCrV6+QkBBkyeVyN2zYMGPGDC8vL1dX1+Li4oMHD44bN+7y5cvLly9PSEggtrl69er6+noRR+np6e/evYMQenklenrG3ryZvnnzowMH4sk2TU3siROv5ufX37mT9csvdyCEly695fPbrrklEAhdXcPv389Fs0wmb+XKiDlzbtbVMS9derts2d3CwobHjwsKCkSjbZH793MXLbo9fvyVkpJGCOGePbFPnxbt2xcHIUxKKj97NunPP1+/fl2BjI8cSbh7NxtCGBMT09zc3OZdEMHU9NOAdBMTk7KyMmtr67dv344YMQKpZEEIa2pq0ODIcePGtWjQNs6fP+/o6Ojs7Dx69Gikl/bixYuNGzcSBo2NjTt27IiMjLxy5QqEMCcnx8bGxs7ODkmISQTxGM6ePXvnzh0Gg9FiDMePH3/06NH27dsJES+JxyC+m69fv545c6azs/OoUaNSU1N//vlnZ2fnGTNmXLx4UVIxiOPu7u7k5OTk5LRs2TLxowQhvH///qJFi8aPH19SUgIhPHv2bGxs7JEjRzoupH8FOPN1Ivh8vp+f34YNG1avXg0hPH/+PIRw4MCBN2/eDAsLO3r0KISwrq5u4MCBhYWFmzdvRi1tJi0tberUqRDCmJiY6dOnb926FUKYmZn59OlTCKGBgQFZdfPGjRseHh4QQk9Pz927dyckJMyZMwdC+OjRo1mzZiGburq66dOnHzt2jOyFw+GcOHECQnjnTtbixXdQo0AgDAh4DyHk8QSEFOeyZXfz8+uLixtR5uPzhSjzCQRCJLkJIeRw+Gx2y0qeLBaPMIMQ0micP/54SWQ+COHFi2+2bn1cWtp09Ohz4r6AdEFZLB6EsLmZAyEkb4QAiX/6+78/fvxFXFzxwYPPIISenrHx8cW2tkFcroDN5v/8cyCEMC2tZtOmhyjzCQSCkydPfsWp+DJcLtfb2zstLe3cuXOHDh0KCQnp0qVLdnb22LFj37x5g2xu3rz5/PnzmpoaOp3eokHbIEtiQgibmprOnTuHLgaEiG6niGqoRBCJQVy3kxxDenr64sWLIYRBQUGnT5/uoBjEd5Os21ldXY1+v3p5eZWWlkoqBhGEQiHSwn3z5k1gYKBIhAh0WPz9/Y8fP96i6Kt0gt92djpOnDiRnZ199epVomXUqFEWFhY9e37S8eJwOOfPn6dSqVu3bm2PI7J+5tmzZ589exYSEiIjI4NGRImoa6JpKpWanJw8atQoCoVSU1Nz7969gICAFStWIJvw8PCgoCA/Pz/yi6bbt28PGjQIAHD7duaMGQNRo4wM5Zdfhr95U3XixMuXL8vIup0IGo1rbe1fXU1PSqp48qTI3T3y8ePCo0efr1x5//DhBGRDVvI8cyZp9eoHp0692rLlMQDg0KGEpKSK5OQKkf3NzaXu3v10y5ZxFApFKIS///44JCSzqYljaXkxKChtyZIwH5/kS5febt78SCQeKytdAACXK7Cw6E1ogfbvr5GSUllR0SwvL6OoKFta2sRk8oqKGgcP7v73bsq8ffuWSqW27QSRkZeX9/DwGDp0aFJSEnqSGDFihLm5ub29PTFcPTw8fPz48VOnTi0qKmrRoG2QJTEBAA8ePBApaiGi2ymiGioRyDG0qNtJjqGysrK4uBgAoKOjU1RU1BExADFxVPBP3U4dHR00fq60tFRXV1dSMYhAoVCQFm54eLidnZ1IhAhUk4HL5VpYWLQo+iqd4MzXiUDyS7Kysrdu3Tpx4kRDQwNq19LS6tu379KlS5FGvoqKytGjR6lUqgQVlhUVFW/fvr1z585WNAazs7Pv3bt38OBBe3t7AICGhoaZmZmxsXFERAQAQCgUpqSkJCUl9ejRIzQ0lFgrMzOze/fuAAAGgyfy9evkyZfz55vY2g5KTa0uKGggL1JTU+jTRw0A4Ov7WiiE06cbcLkCY2PtYcN67Ns3GdmQlTyNjLQNDbV27pyQnFxRVNRYWto0deqAUaN6i+yCgoJsWlrt3bs5AAAZGQrSM1NXV+zTp+u8eSYLFw6lUCjr149OTa0W330mk8fh8KdM0RPRAiXuIPLyMjdupI8e3YdG4zQ3c9DOGhgYZGZmfvH4fyUMBqOiokJfX5/L5Q4YMGDOnDm7du16/fo1Wurr65uUlKSvr3/48OEWDdoDksSMjo4eNmxYQ0MDi8Ui5OtEdDtFVEMlCIqhRd1Ocgza2tpz5sw5evSon5/fiBEjOiIGICaOSkDodgIAysvLUWbqaBobGwmFUrJ4KYLJZHI4nClTpsCWRF+lE5z5OhGhoaGPHj1iMplaWlo3b96Uk5PLyMioqqoKCgq6efPmqlWrCgoKnjx5UllZmZOT4+/vf/LkyVu3brXHI/pvQRm3V69e/v7+a9asETdLT0+HEBobGy9fvpx4gFBUVOzfv/+GDRuuXr3K4/EiIiJ+++238ePHHzt2zNvbm1hXW1u7trYWADBjhsHjxwVEO4vFl5WVyc2lAgB69lRVUhIV0kMZhccTDhvWY9GiYSNG9KRQKLKy/3/FtqjkSaFQIIQ1NXRimlgEIRwwoNvDh794esai+gwiP3uJjYv3fBEK4YMHeStXWpC1QCsqmkeO7NWrl6pQCPl8YZ8+XZubOb6+KfHxJQ8e5FVV0ckHWSLcvn179uzZAIDg4GAZGZnAwMB+/folJycjhcbq6mpLS8sbN27IycmJGLTTLyGJyWKxQkNDr1+//v79e6T9D8R0O1EjoRoqKYgYWtTtFIlh48aNGzZsqKqqQr/SJB4D0SK+m4RuJwDg9u3bEpRw+xzv378fPnz45yIUCoUPHjxYuXIljUZrUfRVOsG6nZ2I//3vf6hULABg+PDhJiYmcnJyRO81Z2dnNOHk5IQmMjIy2uzr1KlTTU1N//vf/zgcTkREhJ6e3qZNm8aMGXP48GG05aqqKj8/P01NzZcvX06dOjUhISEjI6O2trZHjx58Pv/Jkyf5+fnXr19/9erVoUOH6uvrfX19/f391dXV9fT08vPz/f39lyxZAgCYNm3aw4cPAQArVpjn5lJ37nxqadmHw+GbmfXcutXq3LnXFArFykpXVVWhoKA+La0GQlBS0lhW1pyfX//uXdWvv5rZ29+wtzd0chr6/n11cXEjoV5GVvLU0lIqKGjIy6svKWns1k2JQqFs2fK4spIGIbSzMwQAsFj8589Lq6vpMjIUD48xc+bcPH/e7t27KiUleVvbQeXlzXl51MzMj+XlzSUlTWVlTW/fZm7dup5Q6F658n5lJS0oKK1fP/XTp39+9KggNbWaQqFMmtSfzxf6+79XVpbfsWMCeil67dp7DY0uurpdAQB0Ot3YuIWiu20jKCgIlTmdNGlSREREdHS0mZmZnZ2dsbFxeHj4hg0bunTpsnTp0gMHDjAYDMJg3rx57fRLSGI6ODg4ODiUlJScOnVqwoQJ4rqdAoFg1apVElQNFY+hRd1OEe3QvLy8W7dunTt3TrLZlywNShZHFdftROTn5xsYGEgwgBYJCwtbu3atSIQAADc3t9OnT69bt66ysjIoKKhfv37e3t5k0deODqwzgzVcpBQIIYvFalFIDCkQStDXzZs3HRwckIQgny9kMHjq6opEGGy2QPyBjwyHI1BQkGnxs4S4kicBk8lTVJSTlW37x4zw8PBZs2Z9bimNxlVT+6SXyGLx5eVlxEsiFBQU5OTkIIHjjoDFYiFZSEKhkc1mk+U6CYN28jlJzBZ1OyWuGtpiDOK6neQYKisre/cWfdct2RhEdlNctxN0jH6pOGQv5Ag/94/cfrW//wA482G+B7m5uRKsEfodEAqFbDa7nRKjaWlpEqzOisFgJAXOfBgMBoORLnAPl5b5bgIQnRChUIhUHr5V6wGJWfyXDh2H09oRIPesEem2KvKDsnUpma+HzWYTPX75fD4av0U24HA4hMHfQQqbm5vb71rkYviihgsUE1iReAziLsgGHaRU8sUYyDJG3+d/AQ1QI2Y/t+OdX2vme4IzH6ivr3dyclq+fPnZs2eXLl1aVVUFALh27dp/W+MnKyvLxMQkLi4OAFBaWmpnZ5efn89kMj08PM6dOxcaGurg4BAXF3fo0CErK6srV664uLg8f/6cz+cfOXLE3Ny8sLBQZIP+/v67d+8GAEhwrAUAICMjIzU1NSwse8CA0zdupF+8+PbAgWcMhsQGioHPpCWBAG7a9CgwsOUxHpmZH93dI62srkRF5QMALl9+FxWVf+DAs/p6FgAgLCx7xYoIwphKZTk6BgMA0tLSCgoKWtzg13Dw4ME///zTx8dn3759Hz9+HDt2bG5u7oQJE4gklJiYuG7dOiS+AwBgsVibNm06dOgQh8Nps1MAAJVKPX/+fExMDDHG9OXLl9u3bycMmpqadu7c+ejRI8LAx8fn7t27EiyVJx6DiAsRg4CAgMTExL179zKZzO8WA5vNvnDhwqpVq1Df5hMnTsTGxu7YsUPivwDIxMfHR0VFXbhw4c2bN+IRAgD4fP78+fPt7e3v37+fkpJiZ2e3cOFCS0vL7Ozsjouq84MzH9DU1Bw2bJipqenatWv79u178+ZNAMCyZcvQUj6f39jY+APD6yAGDx5sZ2fn4uJSWlrar18/GxubgQMHbt++ffjw4WvXrnVycjp37hyEcNKkST169Fi+fLmzs/OJEyfk5OSmTJnSq1cvkVJ2DAZj8ODBaJo4dOR/eOKxg8vlEjcjJpNJ/ECm0WjiQXK53KioKDMzsylT9OTkZBYuHLpixUh9/W7Ozp/GCzY2sokRCBBCGu3/H0QaGj79wkWaLyzWP359C4UQpc9nz0oCAj6Qn95oNC6fL5SVpQwfrvO5o9fQwP7zz5m3bs07dy6ZweDdv59rb284YUI/H59kAMD48f2amv4/2Tx9+mkw9bBhw8LDw9v8HHDz5s2MjAxzc/N3794FBQVRqdSpU6dmZGTcu3cPGRgZGV24cGHw4MEVFRXofkehUHbv3o3GU7aZhw8fysnJTZ8+PSkpCQDQ3NycmppKfrI5deqUra2tra0tkhM7cOBAv379HBwcJFiIUSQGcRciBoGBgaNHj9bW1m7PT41vjYFGo7m6uvr4+Lx69aqoqCgtLW3atGlDhw6V7G9BEa5fvz5o0KDx48e/fPlSJEJEcHDw1KlTg4KC5s2b17Nnz/v379+4cWP8+PES7HL8bwRnPgAAoFAoWVlZQUFBGRkZc+bModFo1tbW1dXVb968OXHixMuXL4lOw/8ljIyMvLy8HB0d2Ww26gN2+/ZtorJ57969ra2txbVaRLRdAADXr1+PjIx8+vQpAKC4uHjChAkAgDNnzhQVFV28eLGhoSEgIMDX13f//v1xcXE3btzYtm1beXl5eHj4u3fv/vjjDwDAtWvX4uPjbWxsRCIk9F/IHhcuHPrsWUljI3vXrqfv39esXfsgNbW6sZF9/PjL48dfLF16t66OuXnzo4KC+s2bHzU0sC0s/goOzrS3v15d/SkTk3Vhnj0rycmpIxLVsWMvcnOpv/56r/VDR0i6mJv3zsr6qKWlBP6WdAHg/4vIAwAePiywtv7/XwkqKipt1hnfsWNHYGCgh4fH5cuXdXV1qVSqQCCAEKK3FAAATU3NP/74Iysra9CgQS9evHjw4IFQKNy3b187n/ns7e0DAgJWrVrl6ekJvqThkpWVJS6w0n7IMbSo4SIS5NKlS62trRUUFCTYw+iLMXTv3p3BYGzfvn3BggVKSkodoSMjzrp165ydnYODg1euXClyEBADBgzIzc01NTUtLi4mq8x0XEj/CnDm+0SfPn3Mzc2VlJQSEhLU1NSQ8sLJkyfnz59va2ubmpoqwR+PnYfZs2fb2dmtXLkSfSdgMBjiTyQiWi0EMTExkZGRtbW1V69eXbBgAZKN0NPTQ93oi4uLw8LCnJycAgMDGQzGkCFDNDU1R4wYYWFhASFMS0sTCASnTp1ycnJKS0tLSkqSl5efOHEiIQuCIPRfyMjIULp0kUMKn5Mm9Xd0NDl5MvHlyzIuV7BixUhd3a7a2sobN45lMHipqdXdunXp3l3FyWnIjBkD3779lCHIujC6uupGRtrdun3qm+7qOpLHE5SUNIo8I7bIgwd5O3ZMEJF0IRsUFDTweAKhEHK5guZmDgBg0KBBaWlpX9xyizCZzDVr1tTX1y9btszOzm7r1q1btmxpamoyNzcnbFavXu3o6Lh169aUlBRNTc2TJ096eXmFh4e3zSOCy+Xa29tTqdTLly9/UcOlsrJSXGCl/ZBjaFHDhWwAAKBQKE5OTmfOnCF+FnyHGAAASkpKS5YsOXLkSIfqyJBhsVguLi6RkZEJCQkiBwExduxYLy+vkydPEo1klRmpBWe+T3Tt2tXY2Hjp0qXo1QS6hcnKyubm5gIAevbsKZFxUZ0KpDe4Z8+epqam6OhoAMCMGTOIsdsAAHRrE9FqIZaqqKioqqrKycl9/PhRKBQSgino0O3cuVNLS8vV1VVRUVFdXd3Ozg6NPi4oKDAwMBAKhZMmTVq7du2sWbNkZGQEAsGMGTM8PDxEnk4I/Rcyz56VWFj07tZNCem/9OihoqwsP22aQX09Kzu7bv36MXy+cMeOJ+PG/UMsUUaGQnQCIOvCgH/2RjlwIL5/fw1NTSXy+88WiYkpXLhwmIKCjK6uemMjG/wt6UK2aW7mvHlT5eOTXFradO9eDmpss6TLzp073d3d//jjj1evXikqKu7YsYPJZM6bN2/cuHFIw6W0tFRJSWnbtm0aGhq6urpcLpdCoWhqaoofw28iODjYwsIiODg4MTHxixouY8eOFRdYaT/kGFrUcCEbAADCwsLc3d337dsXGRn53WIAAMjLy5uamlpYWHC53A7SkRHB29vbzc0tKirK399f5CCQIWvNkFVmpBas4QLq6+tfv36tqKjYrVu36OjoHTt2NDQ05Ofnv3v3buvWrefOnaNQKFZWVh0xMPYHkpaWdv369ZkzZ+rq6gYEBGzbtg0AcPr0aQ8PDwaDMWDAADabPWXKFBGtFgqF8vjx49zc3MLCwrFjx6JNLVy4cNGiRUZGRkVFRTk5Obm5ueXl5RcvXpw9ezaXy509e/b06dPfvn07bdq0oqKi4uJiZWVlNGFmZjZ//nwTE5Pi4mJXV9fRo0cT4tcIQv8lJqaQSmXeuJHOYvHy8ur9/R20tJQmTOh/715OWlrNli1WOTl1ublUeXmZpibOjBkG795VX7uWWlfHTE6uKC9vLixsSE+vpVDAzJmDAABkXRgjI62jR5//9NMAff1uAICsrLrAwA90Ovfhw/z376sBAByOwN098vLlWZMnTw4MDETviwICPvj6vtbRUeXxBCEhC6ys+j17VpKUVLFu3WgAQHx8cVlZU3U1fcSInii5pqZWL148HACQk5Mjoqn49SxZsiQ4OJjH4+3du7eysvLq1as6Ojre3t5sNtvBwSEsLMzFxWXixIljx469ePHiwIEDL126FBYW1q9fv19++aWNVwkAAAAbG5uLFy+qqKjY2tp+UcOle/fuZIGV9vj9XAwtariQDQAApqamsbGxlZWVEhQPayUGpOEye/bsgICAcePG2djYdJyOjAjz588PCAjo06fPvHnzhg0bRj4ISMNl37595ubmVCp15cqVAAARlRmpBY/n+wIQQjab/d974BOBLPfAYDBkZWW/SYODyWQqKioiKQ0EWSNGKBTy+XwFBQXwt9oIh8NRUFAgi8iIiI8QkPVfWvLLU1aWBwBcv542aZIelyuIispbtGi4kpKcgkLL2i4Isi4MlytQUPhkJhRC1GVGVpZCfK7j84VycjLPnj0bPny4hoZGixtsbuZ07dpykARsNtvf39/Nza11s1bgcDjy8vIyMjKEaAvRrqioKBQKGxsbNTU1SVE1S+SLDoSQyWSK3zFb1E8BJIGV9rv+XAziGi4iBh2hn/K5GMDf/0FUKlVDQwPF00E6MuKgh3uk1UmOEIXE5XJZLBbxpkFEZUZqwZkP09n5Gv2Xv/568/Ztlb5+t5Eje5F7lEgQGo2mpqbWni1kZGQMHjxYgn39MRhM28CZD4PBYDDSBf75icFgMBjpAme+HwAxJI4ihkijyCyPx6NQKAKBgLxUZIOtb+HrbT63Shu8tN9pW+OUa3UVGdKs7OecCgTw63ft+5wCGo32lavw+XxJOf2aVZC773PeIYQtrsJmszti11o80cR/omR3rXWnIjYMBoM8iw6LyCromBBUV1eTXUgnOPP9AAidPfhPbt68aWBg8Oeff7q5uSUkJDx58kRTUzMhIQFC6OXl5enpeefOnc2bNx85cgRC6OPjc/78+evXr2/YsKGkpITYoMhmW5wFAKSnp/fq1evs2bPXr193c3MrKCg4ePDguHHjLl++7OLigsYG7d69e8SIEcSKR48eHTlyZGlpKZrlcDi7du2aOXMmGgLo7e29e/fuDx8+9OrVKzQ0FNlwudwNGzb8/PPPXl5eK1asKC8vJyKJjY0tLy+/dy974MAzaIz5y5dljo63qFTmN+0Lh8MX2TUIIZ8v3LTp0aVLr1tcJSOj9rff7o8ZczEqKh9CeOnS2/DwrP374+vrWRDCsLBsF5d7yP7s2aSEhJIjRxIghB8+fEDFPMmRtD7b+in4oo34Kvfu3bt06RKTyYQQ0mi0gwcPJiQkoPHsAIBnz5798ccf1tbWSHVoz549CQkJ+/bta6dTCGFGRsaxY8eysrIAADk5OWPHjhVf5ciRI5GRkaixrq7Ozs6unU7JjefPn3d0dHR2dh49ejSEMCcnx8rKSmSVXbt22dnZof+OkJCQXbt2LV68+Ju8fHE2MzOTOA7+/v6JiYmenp4MBgPZFBQU+Pr62tvbP3v2DEJ4+fLl8PDw/fv3U6nU9jht/eqysrJCdxUTE5OCgoLRo0fLycmdPHmSWOXevXsmJibq6urx8fFlZWXW1tZVVVUjRoyora2F0vypC2I6DVVVVWZmZhDC2NjYCRMmQAi1tLQghHfu3CH+hwUCQUBAQFRU1KJFi1BLZmYmcSf6JgwMDBobGyGE1dXVNBotISFhzpw5EMJHjx7NmjULQvj48WNzc/N79+5BCDkczvLly2fOnEneQkxMzPTp07du3YrCePr0KdpsQ0MDYXPjxg0PDw8Ioaen5+7du1FjdnY2yo58vnD69IDx469wuQIIoY9PMo8nYLP5QqGQyxXweAIIIZPJQxMEAoGQTudCCOPjiy9deisQCIlFzc0cZOznl3rp0tsWd/z581IIYUlJ48yZQXQ6d86cmxDC2NiiffviIIS1tYx584IhhBUVzYsW3YYQurlFCIVCCKGXlxePx2vDoZYIT5482bJlC5rm8/mOjo7kXxIQwqqqKgjh/v373759GxcXd/DgQQihp6dnfHx8e/yisQF8Pp9omThxoohNWlrapk2b7t69i2aDg4Pt7Oza41QEpM3N4/HQxSYeQ3Fx8erVq9FPQIFAgG7rjo6O6GeBRBA5DtOnT+dyuWfPnv3w4QNqQcc/Pj7e29ubTqej/6bY2Fj046Mj4HK53t7eaWlp586dO3To0PPnz4VC4ZYtW4YOHUrYJCQkMJnMiRMnrlmzJiQkpEuXLtnZ2WPHjn3z5k0HRfWvAD/zdSLQywc2m/3kyZPRo0cTLWRRMRkZmV9++eXu3buTJk1CLYMHD87Jyamrq2uDu+jo6Lt37/r5+amqqlJaEirbvHnziRMnAAB3796dO3eu+OuRs2fPPnv2LCQkREZGBvVaFHmLgqapVGpycjIxftbLy8va2hpN/+9/w0aN6r1hQzQAQE5O5v37mtGjL3K5wr/+epOVVRcWlp2UVG5rG/T9tcd691ZTU1N0cLi1ZIkp2ov2aI+1Hy8vL21t7UWLFsXExLx7966qqiogIMDV1ZWQ3enZs+eLFy/S09ONjIwIRbH+/fsTQ87bRlBQkKam5ubNm728vFCLSPdUJpNZVFRECLc+fPiQOLmSAg2jfPr06eTJk1uMAULYt2/fCRMmhIeHy8jIaGpqBgQE9OvXT4I9aUWOg7hAWs+ePYuLi8+ePTtr1qysrCwtLS0giePfCvLy8h4eHkOHDk1KSnJ2draysqJQKKqqquQxo+PHj1dSUpKTk5s6deqMGTNGjBhhbm5ub28/cuTIDorqXwHOfJ2L+vp6pCd77NgxolFcVExklkKhtE0HedSoURYWFj179kSz4kJlaLj6q1ev6urqCDNCtwwAoKioePv27Z07d3740HJZAwBAdnb2vXv3Dh48SIhZ5Ofnk6VMTpyYnp1dd/VqKgDA3LzX0KE9Cgsb1NQUhg3r4eeXyuEI5swxRmUQwHfUHoMQ6ut3MzPruW9fHGppj/ZY+ykrK/v999/PnDmzb9++1NRUZ2fnbdu2KSsrx8bGEjZDhw7V19e/dOkSWVGsnZ9z0GBtb2/vW7dutSgsfuPGjdGjR9NotObm5tzcXB6PJxQKuVyuRKojkYmJifmcDoCent62bduSk5NPnToFAJCRkZk6dWp6enorl+W3InIcKC0JpOno6CxatGjLli0SPP5fhMFgVFRUIBF5Op1eUFBw9OhRkcjHjBkzZ84cLpc7YMCAOXPm7Nq16/Xr1x0aVScHZ75OBIRQU1Pz119/nTFjBvm3qriomI2NzatXr9BsQUGBrq4ukZa+CS0trb59+y5duvTt27dATKhMIBAIBILNmzcvX74cvVxCaxG6ZcigV69e/v7+a9asEd9+eno6hNDY2Hj58uXk35hdu3ZF36IEAqFAIJSVpdy6Nf/EiRcNDSwAwJYtVnv3xvXooQIAoNO506bpu7uP0tD4lN6+m/ZYdjaVweB6ek4yMtLOz69HjW3WHms/Xbt2bWho0NLSYrPZenp6SEhWRD1LXV3d3d29pqaGUBSrqKho5697wpeurm59fb24QXNzs6+vb3x8/IMHDwoLC9+8eePj41NaWkoUkZAI6GJD47U/h46ODhr6SaFQevfuvXjx4pqaGkkFIHIcWhRIU1JSmjNnjpKSkrGxMbrC23/8v8jt27dnz54NAIAQHj16dPny5bdv387KykKadvX19devX581a9aJEyeCg4NlZGQCAwP79euXnJzcoVF1crB6WSfiyZMnpaWlWVlZ6MVRbGxsfX19QkLCihUrcnNzd+7caWlpyeFwzMzMHB0ds7KyvLy8+vXr9+TJE1RZ6ZvIyMioqqpCL3Di4uImT56cm5tLFioTCoURERF6enobN258+vTp0KFDDx06lJubW1ZWhh4EORwOMti0adOYMWMOHz5MbNbPz09TU/Ply5dTp05NSEjIyMiora3t0aMH4d3GxiY3N9fS0jI0NPPRowJn56FaWko3b8579KgAAIDKA02dqg8AmDJFz8YmaMoUvdWrLdG63017zMhIq6mJk5hY3quXqoFBN9A+7bH24+npeenSJT09PXd396lTp96+fTsxMTE/P//gwYNIp+r8+fN9+/al0+keHh7du3d/9OhRamoqhUIhXoy3DXd396NHj2pqahobG/fv37+goCAnJwddpUg5bMOGDQCAa9euaWho/Pzzzz///DMAIDU1dfHixZLZcwAAAHFxcRMnTkTT4jFkZWXxeDw1NbV169YJBIJt27ZNnjyZz+dL8L2ryHEgC6ShGOrq6ioqKnR1dd3d3bt27WplZfXs2bOkpKR169ZJKoYWCQoK8vf3BwCgN7GHDh1SUlJCv37CwsL279+fmJh44sSJadOmnTp1KiIiIjo62szMbN68eR0aVScHj2T/18Dn8xkMBvmZQ7zlx0KWQGsdoVAYFBQkfmdEOmECAbx5M33Rok+fT9hsfpcu/9js99Qeo9O5qqoKQBLaY+2Hw+Hw+XxCPYtKpaKPSYROFZPJJO9j+3VnEEKhsKmpSVx/klAO+w6w2WwFBQXx73YoBgghlUrV1tZGjTQaTU5OTuKigyLHgRBII2Koq6sjVxeRlHpc2xCRuCNgsVj/eTnGL4IzH+bHwGAwGhsbUTUoMo8fF547l/zXX/bobaekwNpjGAyGAGc+DAaDwUgX+AcsBoPBYKQL3MPlB4A+UMG/dYbIiDS2OEs0tj77NVtov9NvX4UCAPxWpy3asFg8JSX5z62CPu+RVpEBQPj3rAwAwr9XkUHxSGLXPs0SPdp/+CkgPgS232mHxtkGpzwej6jL86P+C4gYOtvxbH0V8tdZqX3nhzPfD4C42lq87EQaW5wV2cLnZlvfwosXL27dumVpaXnw4MFp06bdv39/8+bNubm5t27dunz5cmFh4aJFixwdHePi4iCEpaWlM2fOPHny5JEjR0aOHFlYWIi2c+3aNVSEjOylsbHx/v37lZW08eOvTJmix+UKXF3Djx9/0dDASkmp1Nfvhrpifk2cHA6f6MBCtiksbLC0vPi5XfPySpw795aDw00IYWMj+/jxF1ZWujU1DADAnj2xkyfrJSSU7NkzKSGh9I8/XkRH54eGLpCXFzx69OhzB5DL5W7evDkvL+/jx48iNqtXr0aaNQCA9+/f79mz58yZM/Pnzw8ODv7Kk9iKDXnWy8tr9+7d+fn5YWFhDAbj4sWLvXv3Dg4OJvSrTp482djYmJ+fP3/+fC6X6+zs3L179zlz5oSFhbXZKQAgIyPj4cOHxLCZPXv2TJ48OSEhYc+ePeBvTS8fH593795FRUWhOGfNmoV6fLTZqUijSAyXLl2Kiorav3//6tWrkTFSDnvw4AFS+0PqZSUlJajT41d6aX02IyPD29v7+vXr4jGgmohsNvv8+fNv3rwZPHgw6u969uzZPn36zJgxA1UubP/REN9CcHBwaGhoXl6enZ2drq7uzp07IyMjr169Sl4FQmhjY3Pjxg1FRcVr166tWrXKy8sLRSi9QIxUUlNTM3jwYKQACSFEWmIBAQFIaYzH4yEhqN9//11fXx9N+/j4QAgTExPJGmYbN260sbER2fjhw4eRatSiRbejovJiY4tu3UonliKlTRaLx+MJmEwehBBJkZFpbuZACLlcwcqVEYQyJ4RQKBQ2NLDQ9PjxV1rctY8fGdnZdRDC5cvvJSdX7N0bh+TK7OyuR0fnHzz4DELo6RkbH19cVUWDEO7fH//2bRWEsKioKDg4+HNHLCAgYP369SKNdXV106dPP3bsGJrduXPnzZs3IYQPHjz43HbajIaGxrVr106dOmViYgIhRAOonz9/3qLBhw8fmpqa0FCzurq69vhdsmRJQUEBhHDMmDGxsbHiomgohpKSkpkzZ378+DE7OxtCuHz58uTk5Pb4/VwMNBpNXBiMrBzWQeplX4yhtraWz+dzOJwFCxZACPfv34+U/zoUpF9aUVHh4OCwd+9edC7s7OzodDphExkZOWfOnMbGRpEIpRn8nU9KefTo0ciRI1Hn5rKyMmNj48rKSgqFUlhYeP/+/f3790MIAQBGRkZeXl6Ojo5sNhuNWCCLkyUlJS1evJjJZGZnZxNbrqys/PjxI6FkdvdudlJSxYIFQwAAQiH8/ffHISGZubnUceMuBwZ+mD494NatjK1bH9+7l0Ns4dq19/HxxTY2QU1N7JcvyzIyPqL2+nrWrl2xoaGZYWH/704cbW1lIyMtAICioqyhodbr1xX9+6sDADQ1laKj89E00irr2VP1xYuy9PRaZK+np3flypXPbVZElQ0RHh4eFBTk5+eHNHQWLFiwdevWs2fP2tjYfN15+Ab69u1bU1MD/8554mO6yQbDhg3r2rWrmpqaqakpeuHZZlRVVQsLCwEAcnJyz58/FxdFQw+dXC7X3NxcW1vbyMgIAKCoqPjFesJtiyElJUVcGIysHNZB6mVfjKF79+4MBmP79u0LFiwQCoV//fVXbW3tggULSktLJRWDOHQ6nclk6ujoFBUVEZJ1mpqaOTmf/qFycnJ69+6tqqoqEmHHhfSvAGc+KYXBYBC3Ti0trTFjxnTp0gUA0KtXrzFjxgwfPpy4ZcyePdvOzm7lypVQ7N3svXv3Kioqhg0bhiSjEFlZWcSwKgCAunqXq1ffIQ0UGRkKyjGGhlrq6l2WLTMbNqzH0KE93NzMnz0rQfZpabVJSeXy8rITJ/ZXUVFQV++CtFoAABoaXZYuNQUAvHpV/sUdzMqqmznTUF1dkaxPBqGoVtnQoT309btduvQWrcXn8wUCATo+kZGRkZGRaLZFhEJhSkpKUlJSjx49QkNDAQDDhw9PSkqKiYlZtGjRFyP8Vi5fvlxQUBAeHm5ubv6VBiEhIbdv326n3/Xr1yNxVxqNRoyoExflevDgwY4dO9B0VlbWzJkzJTjSlByDsrJyizEQymGgY9TLviYGJSWlJUuWHDlyJC8vz9LS0tXV1dnZ+cKFC5KKQZwjR47s3bv32LFjI0aMEJdM43K5z58/19XVZbPZVCqVHGHb9A7/M+DMJ6VYW1snJiZyuVwAgLKysra2NvpWoaSkpK2tPW/evKamJh6Px+PxAAB79uxpamqKjo4mb6G6utrY2Hj8+PF79+59/Pgx+r8CAGhrayNJTwAAhPCnnwb4+c2ZNetGeXkz+Lt3D4GsrAz6y+V+SjCKirICAZwxw8DDYwx6z0kY5+XVBwZ+sLDo/UVxsro6Znl588yZg2g07siRvSoraQCAxkb2tGkGaJrQKlNXV3R3H4U+AQIAVFRU0Md/GRkZVVVVJOQtvn2kThkREfHbb78hkVVvb28AwJ07d3R0dMLCwlJSUtqgId46lpaWO3bsSE1NRXI5kPTJB+lUiRj4+fmNHz++uLj47t277fFraGjo4+PD4XBcXFw+J4oWExOzcOFCBQUFOp1eV1dXXl4+c+bMFkU+2x/D54TBCOUw0DHqZV8Tg7y8vKmpqYWFhaamZnl5ORCTl5M4NjY2x48fz8vLW7VqFXF2Ghsb0ZM3g8EoLy/38fHJz8+/fPkyOUL0vy+14B4uUoqBgYGXl9fatWttbW0ZDIa5uTmXy01ISMjNzQ0JCWloaLh3797Ro0evX78+c+ZMXV3dgICAbdu28fn8x48f5+bm5ubm7t+/f+XKlerq6iwWS19ff/v27b6+vrKysoMHD0b/VJWVtIyMj3FxxQcP/mRh0XvqVH9/f4d376qUlORHjepdVUWrqqIXFja8e1fVpYtcaWkThyNA7yeLixtdXcNHj+67YsVIbW3l69fT/ve/YWiDL16UaWsrv35dkZ5eW1TUUFbWfP9+rqmpTlnZy7S0tIMHDwIAaDTuggUh6updfHyS7e0N16yx9PVNEQignZ3hzJmD4uOLU1OrKRTKpEn9vb1f9e3blU7neniMAQBACPv164eOj5KSEln0Cx2czMzMGzdu1NXVxcXF/fnnn76+vv7+/urq6np6evn5+f7+/jExMUjFY9KkSeQHX4lw9+7dhw8fRkZGjho1is1m+/n5AQDCwsIMDQ0dHBwiIiLodDphcO/ePRcXF6FQCAB48eJFe/yy2ewHDx5wudy1a9cCAMiiaEi1C/Uu0dHR4fF4V65ccXZ2VldX9/Hxsbe3d3V1lcSui8ZAFgYTVw7rIPWyVmJITU198uTJ7NmzAwICxo0bZ2Nj0717d0dHx/v37yOda0nFIE5dXd3t27cdHR0tLS379evn6+srEAjs7OyUlZWRpp2npycAoLi4eMuWLfn5+USEqNON9PKjPjBixLlz546ent7169f/+uuv/fv30+l0cv3YFStWlJSU3Lx5U19fn6he236nVCqVyxXtYNIiX1+aLjY2tri4uB1BQRbrky8+X0guv8dm84m/hAGEkMPhREdHf25rfL6Qwfj/fUTdZyCEHA6f6C8DIbx//z7qwtBm2Gy2QCBobm5uz0Y+B4fDacVv6wbtoa6uTmTLxA6Si/Z1KOIxNDU1kWMQCoWoVwsRIdF76zvEAP/+76irqyMfE/TiRLJhiFBVVYWKRyKQoiE5JBFEIpRasIZLJ6KxsXHUqFF5eXkAgKCgoJs3b0ZERAwcOPDNmzfq6uo1NTUqKip0Ot3Gxubdu3dxcXF79ux59uzZj466ZXJzcyXYweGL0Ol09A2/zTAYjJqaGlTqBYPB/LfB3/k6EeRPSgsXLnz27FljYyNFrH4s+Gf12s7J90x7AIB2pj0AgIqKCk57GIyUgL/zdVJkZGRQZ0sAwKhRoxQUFJqamtAsUb122rRpPy5ADAaD+beCn/k6Kc+ePbOwsEAVZ8j1Y+Fnqte2GdR7s5W++63zrW/LURUhicBmt9YtW8QRny9scfqL3US/BtSRhORaANpxSFvh48eP5A6T4nVi2Wx2Q0MDuQUNQWs/5N1p8aSL7G9HdB0UcSEeBtmgg3rtk120eIrJfr/PyIFvOizge0XVycGZrxMRExNDpVJv3Lhx5cqVqKgof39/on7szZs3V61aVVBQQFSvbb+7Fy9erFu37tatWwcPHrSxseFyuevWrfv5559v3bp15cqVXbt2ZWVlmZiYxMXFAQBKS0vt7OxycnKOHDlibm5O3E/9/f3J9eIRjY2NkZGRlZW0CROu7tkTy+UKVqyIOH78RWMjOza2qLCwAXw1xGgHEZB62efWQuplc+feghA2NXF27nz66FHB1aupAABPz7iEhNL9++MBAAkJpd7er6ZNC2hq4jCZzEePHn0+DO62bdumTp2KeskfPHhw+/btKOEVFRWh7nOIa9euXb58ed++feRGieDl5XX27NmVK1eGhYVxudyFCxd2797dwcGBMDh48OCff/7p4+Ozb98+1BIREWFhYdFOv5mZmd7e3sHBwTdu3AAA5ObmEnppCCqVev78+ZiYmKtXrxItjo6O7fTbSgx1dXWOjo6ZmZmfi+HEiROxsbE7duyg0+kdFIOXl9fcuXPnzp1LzjSenp4JCQn79+8HAFy+fDkqKurAgQMtFrKXFMHBwXfv3j169Gh6ejoAICwsbMWKFZ8zYDKZe/fuffr0KYpQqvlxnWswPxKsXvat6mVhYWEuLi5oOj09ndDuunDhwtChQ4kjOXny5PLyctgBAmZfFCcbMmTI8uXLIyMjZ8+eDSEsKytzcXHR0NBop1+yahfqRjhx4kSyQVBQ0MWLF4VCIZI7gBAGBwfb2dm102/rMbi7u6en//9FRY6hsLBw8eLFqPH06dMdEUNtba24SFtcXBwh7fbw4UNxebOOgKxeBiGsra2dN2/e5wyuXLly+fJlCOGKFSvev3/fcVF1fvAzn5SC1cvao14mJyeHBryzWKwuXbrMnTuXUEZ2cXGZMGFCbGysxAXMvihOtmPHjsDAQA8Pj8uXLwuFwqioKIl8CSardjU3NwMARF6z29vbBwQErFq1Cj3mPnz4UIKj6NoQg5KSUnFxMQAAaXp1RAwKCgriIm2EeFj//v2joqLE5c06ArJ6GRA7LCIGxC5I9sj8G8GZT0rB6mVtUC+D/5TMBwDcuHFDRkamT58+p06dQo2//PJLaGjounXrJK5Z9UVxMiaTuWbNmvr6+mXLll27dk1NTS01NZXH46FxMm2GrNrVoiAZl8u1t7enUqkoQh6PJxQKuVwuSlES4Zti6Nmz55w5c44ePern5zdixIiOi0FEpI0sHkb+26IMkKQgq5d90cDOzo7JZP7555/R0dESPDL/RnDmk1Kwetm3qpfp6OgQMlR1dXU9evQAADQ2Ntrb2zs5ORkbG0dHRzc3N8fExIwcOTIqKurs2bNffTa+ilbEyZB62c6dO93d3f/4449Xr15xOJygoKDo6GgOh5OUlNQev2TVrhYNgoODLSwsgoODExMTm5ub37x54+PjU1paeu/evfb4bXMMAICNGzdu2LChqqrK3t6+g2IQF2kjS7tNmzatRXkziUNWL/uigZKSkpeX1+TJkwcMGEDIFUkneFSDlILVy75JvQwAMHr06K5du54+fbpPnz4NDQ3jx4/39fVlMBjoJ/+QIUP27NkTHR194MCBhoaGioqKlStXSvaUkdXLyOJkT58+tbW1jYiIWLJkSXBwMI/H27t376pVq1atWhUUFLRmzZpffvmlPX5FVLsKCgpycnKysrIGDx6MlMNsbGwuXryooqJia2s7YsQI9DCRmpq6ePFiiey4eAxUKjUjIyMlJcXExOT8+fMiMQAA8vLybt26de7cuW7dunVEDDQabcGCBYRIm4WFxZMnTzZt2kRIu9na2ubl5RHyZpKKQRyyehkAID4+vqysrLq6umfPnki9jMFgEAYQwhcvXsTExCDpO2kGa7hIO/X19WpqauIlb8Th8/noU98XiYuLGzBgAPrm0TbYbH6XLnIAAIEAUigAvaIEAKDsiP6iFoEAyspSuFxubGzsjBkzWtyaQAA5HL6y8qd9pNG4amoKAAAuV8Bk8jQ0Po2bjIyMHDx4cOvj2RsbG7t06UIMtRQHCVxRKBRFRcVv2+cvweVyFRQUPucUueNwOPLy8hIszQMAoFKpampqLbomqntDCJlMpoqKigT9tieGyspKVDD5+8QASP8dNBpNTU0NNTY3N3ft2lWyYYhQXV2to6PT4gtVFBLZgMlk8ng8CdbQ+PeCMx+mQ8DqZRgMptOCMx8Gg8FgpAvcwwWDwWAw0gXu4fIDQO/c0dcgkUUijS3OEo2tz37NFtrvVLKr/D0nA6HgW1aRBUDQbqciq8gAAJHSw2dsZIilPJ5AXl6WWEr0bkdwOHxFRUUABN//FBAfn9rvtEPjbINTHo+HPk538v+C7+wUXXtfv4rUvvPDz3w/ACQiQEwQhISEGBgYXLp06bfffouIiIAQBgQE6OvrX79+PTAwcMmSJbW1tVOmTNmyZQvqRgFIhbnFZ1v0QrZ59OjRunXrgoKCvLy85s2b9/Tp04ULF7a+CjFRV1dnbW3t7u5++PDh9evXi1T8Iq/C4XDIs0+fPi0vL09Nre7R40RkZG5zM2fKFL/g4AxSiTF44ULyV+4Lny/cuPHhpUuv23w0uFyBeNhNTeyJEy/n51NbXCUjo9bdPXLMmIsPHuQBAPbsiU1IKN23Lw5CyGbzDxyIv3DhDTJubuYcPPgsOblCIODFxMSgsnZff5pEZlevXu3k5OTk5PTrr79CCGk02sGDBxMSEpBcDrLJyMg4duwYErc7e/ZsQkLCkSNH2uO0xc3GxsaSN5uRkfHbb7+NGTMmKioKQlhXVzd37lwkptUep+TG8+fPOzo6Ojs7jx49GkJ4/Pjx2NjY7du3oxEFyObOnTtovAGaraurI3RkvtJL67Pnz5+fN28eEUNOTo6tra2dnR0Sc0E2e/bsefr0KRJtCQkJ2bVrF1KTabNTcmOLWzh48ODJkyf379+/d+/ekpKSXbt2aWtrr1+/nrzKkydPXFxcCFGhiIiIbt26EQZSCsR0Gj5+/GhsbAwh/PDhw4gRIyCEZWVlZmZmaGl+fj6E0MXFJSwsrP2+SkpKhg8fTtSu9PPzq66udnZ2ZrFYRBpDPcEghEKhkMPhoDqcHA4HVbJduXLl3bt3IYSTJk168uQJGryMSqQ2NDSgjcTHx1+6dAndmiGE2dnZSCYNQtinz0kWi/fXX2+QchiEkMPho5KzaBifQCDkcgVkYbPGRjYxzWTyeDwBhNDPL/XSpbdcroDLFfD5QrQFoVDI4fCbmlC9Vj45vaEto80ePpyQkVFLXtTczEGbXbbsbn5+fYuHDmmhlZQ0zpwZFBdXTJZDgxAGB2f4+CRDCPl8oaPjrfLy5r+dCk6ePPk1p6ZFhEIhEkV78+ZNYGAgn893dHRELQSVlZUODg7oyFdUVCxatAhC6ObmRq5c2ga+uNnnz59DCEtKSghZOxFpsfZTVlYGIeTxeFu3bv2cOJmIcJfEFdTIMUAIt27d+vDhQ/JvPrJ6WWxsLKqU6+joSFz/HQFZso5KpUIIPTw8kHwdIjY2VlNTs6qqitgLiWja/dvBz3ydCPR6qqioyN/f38nJCbUgecagoCA0HpmsodUeHj16ZGlpSYxSWLp0KQCgvLz82rVrM2fOBACEhYUlJSXZ2tpWVFQsX7788OHD+/fv/+23327dujVv3jzw9zvbwsLC6upqPT09BweHI0eOBAYG7tq16/3792vXrk1NTX327FlOTg5RX8nLy4vQtaJQKFu3xhgaaiGJlufPS3/66ZqnZ+yHDzUTJlzl84XOzqHe3omrV0dGROQCAA4dSnjypHDs2MsZGR/DwrKTksptbYOqqz/pEScllS9ffq+xkT1y5AWBAC5fHn74cML+/fG//RZ561bGvHnBxI4nJVU8eVLk7h4ZHZ3/7FlJamo1/PuX77FjL3Jzqb/++oXx11ZWugAALldgbt6bkEZDcmgA/P8AjHfvqqqq6AEB711dw/l8oYyMzNu3b4nx/t8KhULp06cPACA8PNzOzu7du3dVVVUBAQGurq6E9H5QUJCmpubmzZu9vLx69+6tpqbm4OCwZMmSdl4wX9wsErDmcrmEuIxkh1UAAPr27QsAePr06eTJkz8nTkZ22hEKauQYAADm5uZ//fXXTz/9hNQewD/Vy96+faupqRkQENCvXz+JHw0yZMk6TU3Nx48fBwYGmpiYEAb79u0bOXLkzp07X7x4IUFNu387OPN1Ljgczvnz56lU6tatW1GLiorKmDFjzM3N2198lQyDwRAvVtK3b9+VK1cqKSk1Njb6+flxOJw5c+Y0NTUZGxuPHTt2y5YtdXV1ixcvlpWVRW+Z4uLiEhISHj9+rK+vr6enZ2dnN2HChOLi4kmTJjk6Op48eVJXV9fIyIgYTZyfn08eS9Sjh8qePbEsFh8AMHRoD01NpaNHrYcP11FSkpOTkzEw0Jw0SW/1asuEhBI6nRsbWzR37uCBAzWHDOnu55fK4QjmzDGur2ehTQ0Y0A0AoKWl1KOHiqwsxdhYe+xY3S1brOrqmIsXD5eVpdBon4rm+Pq+Fgrh9OkGAgHs2VN19Oi+xB3c1XUkjycoKWlEIbXOgwd5O3ZMIEujiSSY1NRqZ+eh27aNV1aWj40tBgAYGBiQywu0jcbGRnV19dTUVGdn523btikrK8fGxv7tMXX9+vXe3t63bt1qbm7W19c3MzMj6ja0ma/c7IMHD3bs2NFOX60TExMzderUL4qTdZCCGjkGAMD8+fNDQ0NNTU2fPXuGFpHVyygUioyMzNSpU9PT0z98+CDZGMiQJesAANbW1nv27Dl27BgqKgIASElJmT17tomJiZubmwQ17f7t4MzXiYAQqqioHD16lEqlEtLJcnJyWlpaxsbGkydPzs/PJ9ujTyltw9ra+sWLF0QRNRaLRSySk5MTCAR0On3atGnu7u6oRiAAAI0XRhPohefkyZOXLl2qq6sLAKBQKLKysrKysrm5uQCAHj16KCsrg39+Qu/atSuSdELtmzePc3QcPHfuLS5XQKFQkJIZ+KfCmYwMBUKgqqowdar+gwd5W7daAQDodO60afru7qOIQeiEI4Hg/0vlERskq6PxeMJhw3osWjQMPWuSwztwIL5/fw1NTaUvqqPFxBQuXDhMQUFm8ODuInJoBHp6GgUF9QAAQ0MtZAMAaOcg4vfv3w8fPhwAoKenV1BQAAAwNDQkNNWIRl1d3devXzMYDE9PTyMjI5HL5lv5ms3GxMQsXLhQQUFBglWBRBAIBAKBAPVqaV2crIMU1ERiQAwZMoT4YUdWLxs5ciSFQundu/fixYuJJNQRkCXrqqqq+Hz+mjVr+vTpo6ysjDTtdHV1uVwu0hSUoKbdvx2c+ToRT548qayszMnJ8ff3P3ny5K1bt2JiYmpqam7evBkQEODg4MDhcNLS0h48eHDjxo39+/ffvHmzzb6GDBmyc+dONze3O3fuhISEvHnzJiMjo6CgoLy8vLCwMDMzc8qUKTY2NkePHlVSUsrOzs7Ly8vMzCwqKqqoqCgrK3v9+vWHDx9iY2NR7mSxWHl5eUlJSQYGBhMmTLh37969e/e2bNliZGR0//59opifjY0Nyovv39d8/Mh88qTQxWVkSUnj/PkhMTGFRUUNHz8yCwsbcnOpJSVNeXnUnJy6tLSagoJ6DkcQH1/8+HHh06dFLBZ/yhQ9G5ugo0efq6govH9fnZFR27OnakUFzdc3hcsVpKfXZmfX5eVRMzM/FhU1VFTQysqa8vI+vWb89Vcze/sbe/fGNTVxTEy6//XXGwbj09uqrKy6wMAPdDr37t3sgoL6tLQaLlfg4hIOAEC1h5BZQMCHPXti3dwi7O1vWFvr19YyCDk0Dkfw6lV5ZuZHBoM3deoADkeQmFien1/v6DgYAECn042Njdt8ygAAYWFhs2fPBgBMnTqVw+EkJibm5+c7Ojq6ubmxWCx3d/enT58+e/bM2Nh4ypQpTU1NiYmJvXr1MjAwaI/TVjbr6+v78uXLgICAPXv2uLm52dvby8rKEtJiUKIdKOLi4iZOnIim8/LyTpw4gcTJUAyAJNw1YsQIT09PT09PY2NjCSqokWOg0WirVq0KDw9XVlYeOXJkamrqyZMnra2ta2trkXrZ+PHjf//998jISD6fL/H3rmSQZF1paenevXvXrVs3duzYq1evnjlzRk5OzsHB4eXLlydOnEhOTs7IyDhy5MiqVasiIiI2b96sqqraTk27fzt4JLu0Q6VSiTI3IrDZ7FZkulqByWSiBz7wT80toVAYFBTUhpvR27dVdDpXX79baWlTczPn558HEvJmBEjnDJC+tH0ODkegoPBJQZ/LFSgoyP4dHkSV3GVlKcRG+HyhnJzMs2fPhg8fTjz+ikDIoYlDpbK0tJTA33KXSFWyzTAYDLJCGHHuiNELQqGwqamJeAppv7QN4nObJZTDvgNsNltBQQG9TiSLk/2oGGg0mry8PPEPIq5eRqPR5OTkUCGwDoUsWUelUjU1NdG1TWja8Xg8Pp//HSL5F4EzH+a7wmAwGhsbUWeNryc5ueLMmSRDQ62BAzXnzx8iL/+931WQxRjbRlpa2rBhwyQVDwaDaQ8482EwGAxGusDf+TAYDAYjXeDMh8FgMBjpAut2ilYJx2AwGClBar924Wc+DAaDwUgXuIcLBoPBYKQL/MyHwWAwGOkCZz4MBoPBSBc480k1AoGA+EtGXMy6dZBcvfh2vhscTmuuyTqcfL6QvIj8th8JuHR+kFw4glBeRYicgm89j1/pVygUtmjAZrNbDKMjYhDx2GJgIgfn+8Two2hoaEClxBDkU9/c3IwaBQIBYdBJwv5R4MzXiQgPDx80aBDSt01MTJw3b97Lly979+7t4+Nz48YNNze30tLS0NBQAwODy5cvu7u7379/vz3u9u7dGxAQsGLFisjISJFFV65c4XK569at+/nnn2/dunXlypVdu3bl5OTo6el5e3vv2rXr0KFDyPLx48fr168PCQnx8vJydnYmtgAhDAsLKy2l/vTTtS1bHhOC0e2hxY0IBHDTpkeBgS3L4WdmfnR3j7SyuhIVlQ8AuHz5XVRU/oEDz1CRh7Cw7BUrIpClj09yQkLJ0aPPAQBpaWlIo7mzER4efvnyZSaTiWZfvny5fft2YmlkZOTSpUsnTJhQWloKAPD09ExISNi/f3/7/WZmZh4/fryiooLNZl+4cGHVqlXe3t5kg927d9vb2x85cgQAEBAQkJiYuHfvXiJOiUDEAADIzc21tbW1t7cvKSkhlrq7u1tZWUVFRaEWKpXq6OgowQC+GENKSoqdnd3ChQstLS3fv39vY2OzcOHCn3/++dKlS5INQ4QnT564urq+evVq69atGhoaGhoaWlpadXV1aGltbS1qtLW1jYiI0Pibdt49/vX8gJqAmM/A5/OnT58+fvx4VAnBx8cHQmhgYNDY2AghrK6uptFo4tVr2waLxTIzM+Pz+Twe7/Hjx6iRRqOhCVTzPSAgwMPDA0LI4/FKSkoghEOGDKmurhYIBD169GhoaBAvb0ts38/PD9VNdXG5FxaWRbSz2Xw+X4gKyaKW+nrW3yHx0FJynEQV2fj44kuX3goE/7+UqCKLitO2uJvkKrJ0OnfOnJsQwtjYIlRCvbaWMW9eMISwoqJ50aLbEEI3twhUcNXLy4vYr07CkydPtmzZQsw2NTWdO3cOnSAEqhDr7+9//PhxcpXU+Pj49vglV6atra3l8/kcDmfBggWEQXFx8erVq9EVAiGcPn06l8s9e/bshw8f2uP3czHAlqrCilfHlXhl2i/GgErXQgg3bNhQXV2N/ou9vLxKS0slGIYI5MKzvr6+79+/f/nypY2NDWFw8+bN58+f19TU0On0W7duJScnp6enjx07lslkdlxUnR/8zNe5+N///jdq1KgNGzYAAJAALoVCiY6Ovnv3rp+fn6qqqnj12rbRpUuXsWPHTpw4sby83Nramk6nnzp1KiEhYcuWLcXFxRMmTECuCwsL79+/v3//fgghaqmsrLxx48bo0aM1NDRaLG+LiIqKQuKcIqV0T5x44eeXmpNTZ2sbBADYvPlRQUH95s2P3r2rnjzZLyjog739dcKYqCL7+HHhs2clOTl1TU0ctKgNVWSzsj4i8WjxKrK9e6upqSk6ONxassQURauiotLZfhR7eXlpa2svWrQoJiYGAPDgwYPp06eTDYgKsRYWFuQqqSkpKe3xS65M2717dwaDsX379gULFhAGEMK+fftOmDAhPDwcALB06VJra2sFBQUJ6pSSYwAtVYUVqY7bEZVpvxgDKl1Lo9G6du2qo6ODihmVlpaiMl4dBLnw7KpVq4YPH56dnT1//nzCIDw8fPz48VOnTi0qKlqwYMGoUaOUlJQMDQ2lXMAaZ75Ox4kTJ7Kzs69evUq0jBo1ysLComfPnmhWvHpt2/jzzz/Xrl07ZcqU169f+/v7Dx8+3MbGZt++fXp6esR/Ra9evcaMGTN8+HCirnR8fPzx48eDg4PBZ8rbAgB4PB5Rh10EVEJ28ODuaHbjxrEMBi81tdrEpHu3bkrLl4+gUChM5qdbCVFFlssV6OqqGxlpd+v2SRpfslVkIYT6+t3MzHru2xeHWgYNGpSWlvbFLX9PysrKfv/99zNnzuzbty86OnrYsGENDQ0sFotcW5HJZHI4nClTpohUSW2PX3JlWhqNpqSktGTJkiNHjhCnXk9Pb9u2bcnJyadOnQIAUCgUJyenM2fOVFVVtcdvKzGIV4VFoOq4HVSZ9itjiIiImDlzJpouLy//VnH2b4VceBa13Lp1a+7cuYSBr69vUlKSvr7+4cOHUYu/v//ChQs7NKrOD858nQhU91JWVvbWrVsnTpxoaGhA7VpaWn379l26dOnbt29hS9Vr28CLFy9qa2udnZ0PHDhw7do1RUXFrKwsAAC6UxD3SiUlJW1t7Xnz5jU1NaG3fwsXLlyyZAn6N/tceVt5efkWv5+np9cCACCEqH4sny/csePJuHH/+EWMStEiJFtFVldXvbGRDVqqIpudTWUwuJ6ek4yMtPPz61FjO6vISpyuXbs2NDRoaWmx2WwWixUaGnr9+vX3798Tj3RCofDBgwcrV66k0WgiVVLb45dcmba+vl5eXt7U1NTCwkKk/4iOjo6hoSEAICwszN3dfd++feLfjyUVA2okV4UFpOq41dXVHVGZ9mtiAAAkJSWNGjUKTd++fdvBwUFSAbQIufAsAKCkpERVVRVduqgybXV1taWl5Y0bN4h3Mw8fPkRl5aUZrF7WiQgNDX306JGzs7OWltbNmzcfPXqUkZFRVVWFXrPExcVNnTo1Ly+PqF47btw4FRWVtr3zVFZWXr16NcqmTk5OQ4YMsbKy+vDhg4ODg6GhYW5ublFRUUJCQm5ubkhISENDw71797y8vCoqKp48ebJp06b58+dv377d09MTlbedNWuWQCDo1avX+PHj0fYHDhwIIfz4kZmWViMjQ2EyeXl5VC5XsGSJ6erVD+TlZRsaWNXV9Hfvqq9dS62rY0ZF5VVX0ysqaCUljTk5dSgzoSqy9vaGTk5DjYy0jh59/tNPA/T1uwFSFdmHD/Pfv68GAHA4Anf3yMuXZ02ePDkwMBC9egoI+ODr+1pHR5XHE4SELLCy6vfsWUlSUsW6daMBAPHxxWVlTdXVdCMjraYmTmJiea9eqgYG3QAAOTk5ne3u4OnpeenSJT09PXd3dwcHBwcHh5KSklOnTk2YMMHNze306dPr1q2rrKwMCgrq16/f6dOnHz16hKqkTpo0qT1+3d3djx49qqmpaWxszOPxPD09x40bZ2Njo6ys7Ovra2pqmpWVxePx1NTU1q1bBwAwNTWNjY1FX8UktOv/iEFTU3PVqlW2traoKiyKoaCgwNfXV0dHh8fjhYSEoJefqampEqxM+8UYxo0bR6fTyQUU8/Pz21kW+IucOHEiICCgX79+RPci1NGMxWI5ODiEh4dv2LChS5cuS5cuPXDgAADg+fPn5ubmRBaUWrCGS+eFqHXZEaB+5wwGQ01NDT3hQQh5PB5RRfabEC9vW1lZmZGRMW3aNHFjHk8oLy+D3j2iwrAcjkBRseXiohKvItvczOnaVbHFRXQ6V1VVAQDAZrP9/f2J10edBw6Hw+fzyfdWxOculfaXFUSQK9NSqVQNDQ1UDBa9ooAQUqlUbW1twl6kgq5EIMdArgr7PSvTfjEGgUDA5/NRPVjQMcdBnM8VniUq07a5xPR/GJz5MB1FeXm5pqYmUZz9u9H+231GRsbgwYOJT5sYDOY/Bs58GAwGg5EupP1tLxkejycvL09+cSErK9sR71KIt4vine5EGlucJRpbn/2aLbTfqWRX+SFxtsEpPgU/3Om/5RT8EKdfv4rUPvngzAcAAAUFBcePHx87diyE8PTp0+/evfP39+dwOFVVVTwe7+DBg5J1R1xtIpcd0me5dOnSmzdvbG1t7ezsAgIC9PX1Dx48KBQKHz16VFtbO2XKlC1btnA4HPRBDm1BZIMim21xFv0lsruIDY/HI383+qIX1O2T/CuhzYF9zmnrqyD1ss85JcYzAAB4PIGc3P+/xhQKhcRdQCCA33oA27On7Tka4ltAogTfdNbaHKf4iRZfBfX87LjzTr5oP7enrdziJXgKyF/RIITEeBLChjDoiPPOYDAaGxvRtLq6OoPBqK6uFlnl48ePzc3NampqAoGATqc3NTU1NjYqKipK9cc/KPUIhUJzc/Pc3Fw0GxISwmAwJk+ejCRIHjx48N0iEddnKSsrMzMzQ0vz8/MhhC4uLmFhYRJx5+npefXqVVdX13v37oksunDhAofDWbt27YwZM27evHn58uWdO3dmZ2f379/fy8tr586dSB8EQvjo0aN169YFBQWdPHly3rx5xBaEQuGdO3dKSuqmTPH7/fdHHA4ftpsWN8LnCzdufPg5DZeMjNrffrs/ZsylBw/yIISXLr0ND8/Zvz+eSmVCCO/cyXJx+bTvZ88mxcYWHTmSACH88OEDOtr/Iv7444+ioqKO2PL58+cdHR2dnZ1Hjx5No9E2btw4ZcoU4gIQMUAKOHV1dZLVTxFxcfbs2djY2CNHjnzO4OPHj3Pnzk1PT++4GHJycmxsbOzs7LKzs5FBRkbGb7/9NmbMGHTTEDfoCExNTdGd3MTEJD4+ftasWSdOnPj9998Jg5MnT+7evXvhwoV37twJCwsj7vwhISEdF1XnB2c+mJeXh7rgkwkICBgwYMDTp0+/ZyR1dXUDBw4sLCzcvHnz0aNHIYTl5eUDBgy4f/9+YGBgUFAQhNDV1fXu3bvt94XVy+C/Tb1MBBaLxWazCfUsd3d3CKFQKGxoaJCsIyTKxePxtm7dmpyczGazIYRmZmboQIkYoBaJK4eRXVRUVCxatAhC6Obm1koM7u7uks18Ii6+qKAmbiBxuFyut7d3WlrauXPnDh06NHv27F9//TU1NVVGRqa6uhrZaGhoXLt27dSpUyYmJli9jAD3XmtZiOSXX34JDQ1dt27dhQsXvmcw4vosKioqY8aMMTc3V1VVlaAjrF4G/m3qZWTevXs3efLkoKAge3t7AEBJSUm/fv3q6+t37doVGhpK/mnfftDIyKdPn06ePHnUqFGoo/yQIUOI00o2AB2jHEZ20bt3bzU1NQcHhyVLlnwuBgCAxLvmirj4ooKauIHEkZeX9/DwGDp0aFJSkrOzs66ubk1NDYRQKBQi4XsUNmqsqqrC6mUEOPOBwYMHQwhzc3PRLJ/Pr6+vj4mJGTlyZFRU1NmzZ79bJLAlfRY5OTktLS1jY+PJkyfn5+eT7dPT09vjDquXEcB/g3oZGRMTk27dui1fvpxCoTCZzJCQEEdHRw0NDfTj49WrVxL3GBMTQwztDwsL27dvX4sGHaQcRnYBIdTX1zczM/tcDJJ1+jkXrSuotWIgcRgMRkVFhb6+/rZt2wwMDE6dOqWkpDR48GC09PLlywUFBeHh4SgfA6xeBgDAPVwAAAoKCiEhIdu3b3dwcFBRUaFQKLNmzTpw4EBDQ0NFRcXKlSu/WyRPnjwR0Wdhs9k1NTU3b97k8Xh+fn5nzpxJS0uTkZFhMpl5eXlcLrfNvW9evHgxaNAgZ2dnPp9/7dq1ESNGZGVl/fTTT83NzUpKSuLqZenp6T179oQQLly4UCAQuLm5+fv7W1tbnzt3jsvlou42LBYLpcxvUi+7eHEW2UZcvax3b7XKStrjx4Xwn+plW7eOl6x62d69k9eujcrPrx84UBN0PvWyFpGRkYF/v7HPyckJCgqaO3duTk6OZL0gXT0kwZyenm5gYGBgYEAeN0kYNDc3v3nz5s2bN0g5TIISKoSLrKwsBoOxd+/etWvX5ufnDxw4UDzIDkLcxecU1Oh0OnpJIy5vJnFu3749e/ZsAECfPn1OnTo1fPjwEydOyMvLR0dHT5w40dLSsmfPnmZmZg8fPkT2Dx8+3LVrV4eG1PnBmQ8AAEaNGnX79u2GhgZ1dXX0cPPo0SMIIYVCIeQYvgNOTk6EFFlGRgaaIF4hopuIpH7OY/Uy8G9TLyOTlZVVXV1dUVFRUlISFxfXr18/AEBlZeWLFy+0tbVfv37d1NQkwcwdFxc3ceJEAEBaWpqLi0ufPn04HM6BAweSk5ORahdhMGLEiBEjRgBJK4eRYzAyMmpqakpMTOzVq5eBgQGhHEYYAACoVGpGRkZKSoqJiYn42KH2x0Cj0X7//fdWFNSuXLmyefNmwkBSAbRIUFCQv78/AODNmzdBQUFbtmxZsmQJUi+LiIig0+kPHz6MjIxEaqJYvQyBR7JLKVi9TJzOr17WIkgoHD1tI8EqQrZKUrDZbAUFBfEvZ8Twhs8ZdFwMxEPVj4rhiwpqZIPvA/H2BYEuA5FGDAJnPkxHgdXLMBhM5wRnPgwGg8FIF9L+tveHgNXLvtVph8bZBqf4FPxwp/+WU/BDnH79KlL75IMz3w8A/lOLSHwpoSAqbgP/1m0iL4WfFzf63CzRKBQKRWy+XoqslX8h8irEBr8msM/F2YZVvmYLbDaf/IlR7OD8/yz6iEjMErJnEEJC9uybwm7nriHZgc+tQhztNgT2Oac0Gu3v/W1BvQyNbyFW6SD1MiIGNIZdZBWRf4qvvD6/NTAiBjRKvfVVyJ/ZOuKSFlEvEwgEHz9+FFmFUC9DLdXV1WTRPukEf8noXHh7e+/du/fOnTu///77kSNHkJQMqpaOuHbt2uXLl/ft2+fp6dl+d0KhcNmyZcTs48eP169fHxIS4uXl5ezsnJGR0bt379DQ0M+t7u/v//jxYzSdk5Ojp6fn7e29a9euQ4cOtbhBYkUIYVhYGIvFev26UkvreFxccfv3pRW4XIF4o0AAN216FBj4ocVVMjM/urtHWlldiYrKBwB4esYlJJTu3x8PAGCz+QcPPrtw4Q2y9PFJTkgoOXr0OQDgyZMn6LbYoWRmZh4/fryioiIlJcXOzm7hwoWWlpbZ2dnEUm9v7+Dg4Bs3bkjcI4PB2LRp07Rp04hTDABoamrauXPno0ePrl69ilqoVKqjo6OkvIvEwGazL1y4sGrVKm9vb/JS8l7X1dU5OjpmZmZ2UAwAgMuXL0dFRR04cICoz37hwoV58+YtXLhwzJgxKPF0xHEQwcrKSkNDQ0NDY9y4cbW1tQ4ODlevXj19+jRh4OXldfbs2ZUrV4aFhZWXl0+bNq2qqsrc3BwlSOkFYjoNd+7cWbx4MZoWCAQBAQFlZWWmpqZkG8kKikZFRVlZWSUlJUEIW5QiMzAwaEUNa+PGjTY2NsRsG7TNIIQ6OiegmKoZh8PncgVIq6yxkU2syGTykGKZQCDkcgVI2AzZs9n/kImSoOxZXFzxwYPPIISenrHx8cUQwuDgDB+fZCgmeyYQCE6ePPm5wyURUK1z9LSB9LQghBs2bCAMlixZUlBQACEcM2YMIe4lKY8tqpft3bsXCXfZ2dnR6XTYAepl5Bhqa2v5fD6Hw1mwYAFhIL7XElcvI8dAp9PnzJkDIYyNjd23bx8y+A4qbiKIqJdt27bNycnpw4cPqqqqhBIhWb0sJCSkS5cu2dnZY8eOffPmTccF1vnBz3ydiNu3b8+YMQNNy8jI/PLLLyLSXwAAFxeXCRMmxMbG2tjYtN9jcXHxgQMH0G/nFqXIxAMgSEpKWrx4MZPJJJ42KN+ubQb+/upJVjWrqWGYmZ0/dy45MbH80KGEJ08Kx469nJHxMSwsOymp3NY2qLy82dk51Ns7cfXqyIiI3MpKmp9f6pkzSYT4i2Rlz16/rujfXx18heyZjIzM27dvqVTq15+CbyUoKEhTU3Pz5s1eXl5ozCKNRuvatSthoKqqWlhYCACQk5OTiIoK2WOL6mWvX7/u378/AEBTUzMnJ6cj1MvIMXTv3p3BYGzfvn3BggWEgfheS7xrLjmGrKwsNIynf//+KSkpyOA7qLiJIKJelpKSIi8vj8bRE/+VZPWyGTNmjBgxwtzc3N7evqNHGXZycObrRHxODIyMBAVF8/LySktLGQxGbGxsWVnZ13iPiYmJjIysra0FANy7d6+iomLYsGGnTp0iDNqsbUZWNdPRUeneXcXDY4ypqU5sbNHcuYMHDtQcMqS7n18qhyOYM8e4uZljYKA5aZLe6tWWCQklfL7Q3//9pEn9u3T5lGV/oOyZgYGBxF+ykUlNTV2/fr23t/etW7fQm9WIiIiZM2cSBuvXr797966fnx+NRpPIYHZxjyLqZURdHhkZmYqKio5QLxOJQUlJacmSJUeOHCEuMInvdesx0Ol0YpdFLonvoOImAqFeRh6fTkRFVi/jcrkDBgyYM2fOrl27Xr9+3aFRdXJw5utEzJgxg/hsBv4eoUzA4XBSU1MlKCgaFRW1ZcuWCRMm/P777z4+PtbW1i9evEAdE8S9AwDS09NVVFRUVVXl5OSqq6uNjY3Hjx+/d+/ex48fo6ccCOHChQuXLFmCxoB/boMi2mbq6p/GXMO/Vc3A3w9VqqoKU6fqP3iQt3WrFQCATudOm6bv7j5KQ6MLYQYhUFVVCAycu3Hjw/fvP6n0ItmzRYuGjRjRE/yzR8CBA/H9+2t8k+zZ4MHdKytp4POyZ56ek4yMtPPz6//eow6UPdPT0ysoKAAA6Orqoi9MSUlJSJ4DYWho6OPjw+FwXFxcOsIjWb0MGYwcObKyshIA0NjYqK2t/ebNGx8fH6ReJpEAxGOQl5c3NTW1sLAgri6J73XrMXTv3h31K6moqCA/PImouEn8OLQIoV42atQoHo/HZrPV1NSMjY2jo6OZTKalpeWOHTtSU1MPHz4cHBwsIyMTGBjYr1+/5OTkDo2qk4P7dnYiVqxYkZubu3PnTktLSw6HY2ZmlpiYWFVVFRgYyGazQ0NDAwMD169fLxFB0ZSUlOfPn7u5uXXp0sXU1HTu3LmLFy8WkSLr1q1bVVWVn5+fpqbmy5cvp0+f7uDgAADg8Xi//vrrypUr1dXVWSyWvr7+9u3bPTw8vlXbLCmp4u7dbCenoQCAUaN6E6pmubnUoqKGjIyPRkZa8fHFtbWMAQM0DAw0p0zRs7EJmjJFb9Uqi7w8ak5OnVAICwrqk5Mrmps5ixYNR6UYQAfIniUklKSmVlMolEmT+nM4glevyplMHoPBE5c9o9PpxsbG7Tk1rePu7n706FFNTU1jY+P+/fvT6XQVFRW0CGlojRw58sGDB1wud+3atRL32NzcLK5etmbNGl9fX4FAYGdnN3bs2LFjxwJJq5eRY+DxeJ6enuPGjbOxsVFWVm5xrztCvYwcA1L7e/bsWVJS0rp168QV1DpOxU0cQr1s3bp1Li4ukZGRx48fl5GREVcvU1FRiYiIiI6ONjMzmzdvXodG1cnBI9k7HXw+n8FgfO65AVXOo3SkoKi4FJnEN0homzEYPBWVT/q/ZFUz1PL2bRWdztXX71Za2tTczPn554FsNp94pUkAIeTzIQBAXv7/32FIXPaMRuOqqbWsAkXInhUUFOTk5Nja2n71sWkLQqGwqakJ6SALBAI+n48uBjTYgEqlqqmpSVawiuyRDDG8QSAQcDicDtXrIcdApVI1NDQI1x20163HAABobm5GX1i/p4LaF2EymehEfE69jFCWl2Zw5sP8GL5G2yw5ueLMmSRDQ62BAzXnzx9CTmwdQftlz9LS0oYNGyapeDAYTAeBMx8Gg8FgpAvcwwWDwWAw0gXOfBgMBoORLnDfTiDl+nUYDEZqkdqvXfg7HwaDwWCkC/y2E4PBYDDSBc58GAwGg5EucObDSAxUHU0EslCZ+CxGUvD5/C/KrrYTQq4M1XRsxQAAQOiKdVAMCJHL6XOBdVwM4rtJjgGVBfgOIQEAGhoaAABMJrOpqampqYn4Z2QwGE1/g1p4PJ64NqG0gTOflJKdna2rq3vixIlNmzZdvHixDVvgcrnr1q2zsbG5devWlStXdu3aFRsbi/TyCQoLCy0tLYlZf3//3bt3tzd0TEucPn26vLy8gzbeem088M/CdaCD6/MBAHJzc21tbe3t7UtKStBScmBsNtvGxmbhwoU///zzpUuXOigGILabIgcnPj4+KirqwoULb968kWAM4jx58sTV1fXVq1cAAEtLSw0NjZ49ezIYDLSUXMCPyWRevHhRT0/v7du3HRrSv4DvXhcJ01kwMjJqbGxkMBgaGhpMJlMoFBKl+NhsdlNTE1GArbm5GU1wOBwul4vqfUMIAwICPDw8kD2qt4fE1SCETU1NaGL8+PFogk6nJycnb968+TvsmjTAYrHYbDYqFwchdHd3hxCST6Kk+GJtPLIBokPr80EIt27d+vDhQ7JHcmDV1dVcLhdC6OXlVVpa2kExQLHdFDk4bm5uubm5aWlpZ86ckVQM4sTGxmpqalZVVUEIS0tLfXx8ysrKiGtApIAfhLCqqgoAgOopSjP4mU96oVAoQqEwPj5+4MCBLBZr165doaGhYWFhBw4ccHFx2bNnD9KfvHbtWnx8vI2NTU1NjZmZ2blz5xITE4ktFBYW3r9/f/369V27dv39999DQkIaGhoCAgJ8fX33799P+Lp+/XpkZOTTp09/zK7+53j37t3kyZODgoLs7e0BACUlJf369auvrydOogR9fbE2HtkAdExdOhEX5ubmf/31108//cTjfarISA5MR0dHXl4eAFBaWqqrq9tBMYjvpsjBWbdunbOzc3BwcDvF5Vtn3759I0eO3Llz54sXL6Kjo9esWWNmZvbkyRO0VKSAH2rpuGD+ReDMJ9XcuHGDTqfHxsZqaGigyrGvXr0yMTGxsLBAVfcePXqUlJQkLy8/ceLErl27du/e3cPDw8rKithCr169xowZY25uLiMjY2RkBAAIDAxkMBhDhgzR1NQkzK5evbpgwYKpU6d+7z38j2JiYtKtW7fly5dTKBQmkxkSEuLo6Eg+iRL09cXaeGSDDx8+fIf6fPPnzw8NDTU1NX327BlhIxJYeXk5Uf1Y4jF8bjfJMbBYLFQ5ISEhQYJhiJCSkjJ79mwTExM3N7fFixdnZmauWLECvYkhbIgCfh0Xxr8OnPm+AQ6HQ0y32JtDUqBfsp9zgdrbHwCEcNGiRfPnz1dVVc3LywsMDLSwsEDf59Ho/m7duqmqqgoEghkzZnh4eHA4HBERegihkpKStrb2ihUrVFRU0FqKiorq6up2dnbkTyAfP34UCoUUCh4/KmFkZGQghHl5eQMHDhQ5iZLii7XxyAbl5eXfoT4fahwyZAi5fIRIYLdv30ZFtToihs/tJjkGb29vNze3qKgoVEKog9DV1eVyudra2rW1tdXV1QYGBkeOHBk9ejSFQkH1+QCpgB/4e+g6/jfEGi6grq5uxYoVffr08fHxQS329vYaGhq+vr6qqqqoRSAQbNmyxcTExMXFBVX9iI2N1dfXl/jPKG9v76ampsGDB6ekpKirq/fv33/Pnj0PHjwYPHgwMrh27RqHw6mqquLxeAcPHmyzo6ysrMrKyocPH6I3M5WVlS9evNDW1n79+vXw4cPT0tJCQkJGjhw5bty4ffv2ubq6jh492t7evqioKCMjY8iQIQAALpebkJCQm5tbV1enra3N5/PfvXunpKS0efPm6dOnv337dtq0aVwut6ioqKysbOHChYsWLTIyMioqKhIvm4L5VrKysqqrqysqKkpKSuLi4vr16wf+eRKbmpokVSD3i7XxyAa2trboJXnH1efT1NRctWqVra2tsrLyyJEjUQw9evQICAggAgMA5OfnGxgYSCoAkRhEdjM1NfXJkyezZ88mxzB//vyAgIA+ffp0aCW8EydOBAQE9OvX78iRI8ePHw8PD1+zZo2HhweLxUL1+aytrYkCfmw228/PDwAQFhZmamrazsok/25+4DfGzsO5c+cGDBhQXV0NIXz//r2tre2JEye4XC6Xy+Xz+Ww2G0Lo5+d36dKlqqqqbdu28Xg8SOrNISnu3LmzePFiNC0QCAICAsrKykxNTck2kydPRn1JHjx4IFnvaDdRCdxTp06hWQSLxfqmTaFqbSKNDAaD3DsAIymYTCaTyUTTxEmUrAuBQFBfX4+m6+rqiPNITJANOgiyi+bmZuKaJGIgBwYhpNPpHRqDCOieIBID6hEm8TBE4HK5xAVA/leV+GXwXwK/7QQAAEVFRTc3N/TMFxcXN2XKFAqFkpSUtHz58sbGxpEjRxKW+fn5b968KSkpQb05JBvG7du3Z8yYgaZlZGR++eUXCoUiIivq4uIyYcKE2NhYGxsbyXpH1U0VFRUzMjKysrLIb8y6dOnyTZuSkZERf6pTVlZG1TsxkkVJSYkoNEqcRMm6kJGRId4ramlpEeeRmCAbdBBkF2pqasQ1ScRADgwAQJSq76AYRJCTkxOPQUFB4Tv0KJGXlycuAPK/ascVr/4PgN92fmLVqlVmZmYuLi69evUqLS0FAAwYMAAAoKWl1aNHD8JMV1dXR0fHwMAA9eaQLOipqHWbX375xcTEZOnSpWvWrOmgPmN79uzpiM1iMBhMJwE/8wEAAI/H09DQcHBwcHFxmTNnDvz78y+aEOlLgho7osLDjBkzHj9+TMyK6CxwOJzU1NSYmJiRI0dGRUWdPXtW4gFgMBiMNIAzH6ipqbl161ZycvKGDRtsbGxYLFZiYuKrV6/U1NQqKip8fX25XG5GRsb79+8zMjJ0dHTy8vISEhLevXuXmpoq2UhWrFjRo0ePnTt33rt3Lzg4uKysLCYmpqqqKjAw8NKlS7Nnz+7bt++BAwdCQkJCQ0Ml8sD3uW6i5EdPoVBImLWhQymPJwQACASifck4HMl3jmWzW3tiFomBz/+HytTnzDoh6EMFuUVcQ0viSmYip7511S7QMb0HvxiD+PUpWbU8CGF1dTWaFgqFSBKMUEshIJSMGhsbO+I4iMBms5F0GWhJvQzR1NREpVIBAHw+v6ys7DtE1dn5MZ8X/yXw+Xx0uyc3Srxjiwg8Hg/9w7QIm81G4h3td+Tp6Xn16lVXV9d79+6JLLpw4QKEkMFgrF+//syZMzdv3pwzZ05sbOzjx48LCgpgqwchPT393bt3EEIvr0RPz9ibN9M3b3504EA82ebp06KFC0PbFjaH03I3mYKC+mHD/vzcWidPvpw164aDw02hUNjYyN6x40lkZO6VK+8ghDk5dWPHXkJmt26lh4ZmHjmSkJZWw2AwHj582LYgO464uLjIyEhfX9+UlBTU8uLFi40bNxIGjY2NO3bsiIyMvHLlikQ81tXV+fr6RkdHExsU8Zifn+/r62tvb//s2TMI4cePH+fOnZueni4R718Zg4hBTk6OjY2NnZ1ddna2pGKIiopavnz5gwcPfvrpJwgh8Zl/x44dZLPw8PBu3bpBCA8cOKCurq6np9fKv3P7OXDgwMmTJ/fv3793714IIep33aVLF0JECULI4/HGjBlz6tSp2tpaCwuLmJgYKyurjr6PdXLwM19ryMrKysjIiAxi6+ge+XJycq30R1dUVOzSpUv7v12z2ex79+4tXrzY19cX9QIHANDpdDSxbNkyAMD27duHDx++du1aJyenc+fO8fl8a2vrvn37VldXe3p6oqcKFotFfrzgcrlRUVFmZmZhYdnv3lXt3TvZyWnIsWPWenoaAAA+X9jYyAYAmJh0hxCw2XziAYtG46KHMKEQ8nhCBoMnEjCNxgUA8HjCdeuiuNz//z0LIUTb1Nfvpq7eck+cujrmzJmG9+45d+umlJJSderUK1vbQba2g+7cyWIweIaGWvLyn3olnDmT5Og4eMkS0z17YpWVlQ0NDSXej6mdXL9+fdCgQePHj3/58iUAoLm5OTU1lfy8derUKdTn/s6dO+KPI23g4cOHcnJy06dPT0pKatGjiorKqlWrNm/ejAQqtbW1e/bs2X6/3xSDiMGVK1c8PDzu3r0rwe/x9+/ff/PmzYgRIzIyMrhcbnV19fv37z9+/Ojp6UnYlJeXo9+RNBpt1qxZmZmZDQ0NMTExkopBnJs3b2ZkZJibm797966srOy3334rKyurqqrq2rUrYRMcHFxTUwMACAoKolKpU6dOzcjIkOBoy38jOPNJKV26dBk7duzEiRPLy8utra3pdPqpU6cSEhK2bNlSXFw8YcIE8M++pr179/7pp59Qj1bUwbWsrCwsLCwpKcnW1pZ4BXT79u1BgwYBAG7fzpwxYyBqlJGh/PLL8Ddvqk6cePnyZdnatVEAgPLy5mvX3s+cGQQAOHbsRW4u9ddf7/H5QmfnUG/vxNWrIyMicolor117Hx9fbGMT1NTEfvmyLCPjI2qvr2ft2hUbGpoZFpbdys5qaysbGWkBABQVZQ0NtV6/rujfXx0AoKmplJNThyJElnQ6l8nk6eioFhU1AgD09PSuXLkikQMuKUQ0sR48eDB9+nSywevXr/v37w8A0NTUzMnJab9He3v7gICAVatWoVu8uMeePXsWFxefPXt21qxZqEXkx+J3iEHEQFzerP2sXLmytrbWyMgoNDRUQUHhyJEjpqamrq6uxPd4oVAYFRU1bdo0AICamtrw4cO7deumpKREFm2XODt27AgMDPTw8Lh8+bK4ehkAICEhwdTUFP1W1tXVpVKp6CUWEvCUWnDmk17+/PPPtWvXTpky5fXr1/7+/sOHD7exsdm3b5+enh7qJC3S15TQJ0MdXAcMGODn58fhcObMmUPIamRmZnbv3h0AwGDwyB/SAAAnT76cP9/E1nZQamp1cXFj375dV640V1KSb2xku7qO5PEEJSWNPJ7QwEBz0iS91astExI+yfCnpdUmJZXLy8tOnNhfRUVBXb3LiBGfHik0NLosXWoKAHj16stlCrKy6mbONFRXVxQKIUp1MjKig0aOHLHeuzfu2LHnhAv0xrsNh7eDIGtiRUdHDxs2rKGhgcVike+/KPHIyMhIpB8Wl8u1t7enUqno3iruEQCgo6OzaNGiLVu2tN9d22IgGwAAWpQ3aydMJnP27Nn6+voODg4cDufdu3f379/Pz8+/desWMrh27ZqamlpqaiqPx8vLywMA+Pv7X7hwQYLaoS1GtWbNmvr6+mXLlomrl3E4nLCwsPLycjqdnpWVNXPmzK1bt27ZsqWpqcnc3Lzjour84Mwnpbx48aK2ttbZ2fnAgQPXrl1TVFTMysoCACARQnTHFO9rStxJ0f8VnU6fNm2au7u7hoYGakcqSgCAGTMMHj8uIK3Ll5WVyc2lAgB69lTt0uXTcBo5ORmBAB44EN+/v4amppJQ+Onlp4wMhfgGr6goKxDAGTMMPDzGoI98xGbz8uoDAz9YWPQmVvwcdXXM8vLmmTMH0WjckSN7VVbSAACNjWz0LEhgYzPw+PFpeXn1q1ZZoBYVFZVONQyRrInFYrFCQ0OvX7/+/v37lJQUZDBy5MjKykoAQGNjo0Te9QUHB1tYWAQHBycmJrboEQCgpKQ0Z84cYlSZxPliDGQDYi0RebN24uXlZWFhERYWVldXl52dzWAwZs6ceerUKQjh69evy8rKOBxOUFBQdHQ0h8NJSkpKSUmh0WhaWlqSrZQkws6dO93d3f/4449Xr16Jq5c1Njbm5eX5+PjU19cnJCQoKCjs2LGDyWTOmzdv3LhxHRdV5weP55NSlJWVV69evXTp0rdv3zo5OQ0ZMsTKyurDhw8ODg6Ghoa5ubnl5eWnT5/28PBgMBgDBgxgs9lTpkxB+mTz5s3Ly8t7+fLllClTbGxspkyZsnr1arTZadOmPXz4EACwYoV5bi51586nlpZ9OBy+mVnPrVutzp17TaFQrKx0qVRmQUF9eXlzYWFDZubHrKy6wMAPdDo3Ojo/L4+ak1MnFMKCgnqBAMrKUgwNtYqLG11dw0eP7rtixUhtbeXr19P+979hAIDKStqLF2Xa2sqvX1ekp9cWFTWUlTXfv59raqpTVvYyLS0NCbzRaNwFC0LU1bv4+CTb2xuuWWPp65siEEA7O0NlZfmCgoacnLqsrLrBg7Xr6pi3b2c5Og62tOwDAIAQImGwzgNZE8ve3t7BwaGkpOTUqVMTJkxAOl5r1qzx9fUVCAR2dnbEF9z2YGNjc/HiRRUVFVtbWwcHB3GPdXV1FRUVurq67u7uAAAqlZqRkZGSkmJiYiKpwT9fjIFsQKPRfv/9d0LeTCIBAAAcHBwePXqkpKTk6uqKtjx16tSpU6e6uLhMmzZt4sSJBw4cWLVqVVBQ0Jo1a8aNGzdmzJiPHz8CAA4dOiSpGMRZsmRJcHAwj8fbu3dvi+plERERAIBBgwa5ublVV1dfvXpVR0fH29u7I8Zl/YvACsJSCnqDx2Aw1NTU0P8AhJDH44n332EwGLKysiIyLoT2JpvNFll08+ZNBwcH9F2BzxcyGDx19U/9cSCEbLZASUn095ZQCFFXF1lZCvHJTQQ2m4+eFAUCSKH8/5c5DkegqCiL/v69a1BWlsLlcmNjY4nvlGK7DzkcvrKyqL5GdTVdR0eFuClERkYOHjy4s4ncc7lcCoUiLg4iEAjQ4ylSj5NI2kNACJlMprgqCvIIIayrq0NvuTuO1mMQMaDRaPLy8t+qPfQ1MXC5XHRtCwQCoVCIzgKXy5WXl/9RuYTD4cjLy6NX3OT/Rw6HI9IVTrxFasGZDyN5cnNzDQ0Nf2wMdDqdEBxvGwwGo6amprOlPQwG035w5sNgMBiMdIF7uGAwGAxGusCZD4PBYDDSBe7b2SHa0xgMBtP5kdqvXfiZD4PBYDDSBe7hgsFgMBjpAj/zYTAYDEa6wJnv/xEpMIbBYFrkc/8pRDG8H6V0KvIGC0JIlB/pOI8iLkQqI37/l2riB79TCc92EnDm+3/WrVtHFHjEYDCICxcuzJs3b+HChWPGjGGxWBcuXFi1apW3tzfZZvfu3fb29keOHAEABAQEJCYm7t27l8lkdlAMNjY2Cxcu/Pnnn8mSmGFhYStWrCBmfXx87t69K8GqEeQYUD4Td+Hp6ZmQkLB//34AQF1dnaOjY2ZmpqQCaJHVq1c7Ozs7Ozv/+uuvmZmZ3t7ewcHBN27cIAwiIyOXLl06YcKE0tLSlJQUOzu7hQsXWlpaZme3Vt7kv893qwTYyamrq5s+ffqxY8cghDwej81mC4VCLpfL4/EghM3NzciMw+FwuVxU5qO+vp5Yvbm5WSgUomkmk4nWwmD+A6AS3jweb+vWrbW1tXw+n8PhLFiwgDAoLi5evXp1SUkJmp0+fTqXyz179uyHDx86Iobq6moulwsh9PLyKi0tJWxqa2vnzZuHpvfv3y9eb1mCMbToIi4u7uDBgxBCT0/P+Ph4CKG7u7tkK/SKIBQKy8vLIYRv3rwJDAxcsmQJKhw9ZswY4nb0/PlzCKG/v//x48fRLkAIN2zY0HFR/SvAz3yfCA8PDwoK8vPz4/P5+fn5pqamfD7//PnzhYWF165di4+Pt7GxqampMTMzO3fuXGJi4ubNmwsKCjZv3gwAOHfu3Nu3b0eMGPH48WPxknUYzL+avn37AgCePn06efLk7t27MxiM7du3L1iwgDCAEPbt23fChAnh4eEAgKVLl1pbWysoKAwbNqwjYtDR0UFqmaWlpeQCQMSzl1Ao/Ouvv2praxcsWFBaWtoRMbTogqiM2L9/f1REQuJ1CkWgUCh9+vQBAISHh9vZ2amqqhYWFgIA5OTkUNEVAICVlRUAgMvlWlhYoF2g0WjkurXSCc58AAAgFApTUlKSkpJ69OgRGhpqbGxsZWWVnZ3do0cPVHBEXl5+4sSJXbt27d69u4eHh5WV1caNGxkMRmpqKgDg1q1bY8eORers4iXrMJj/ADExMVOnTgUAKCkpLVmy5MiRI8QHLT09vW3btiUnJ586dQoAQKFQnJyczpw5I/Hap0QMAIDy8nJ00xensLDQ0tLS1dXV2dn5woULHRFDiy4kXhnx62lsbFRXV1+/fv3du3f9/PxoNJq6ujqxlMlkcjicKVOmoNmIiIiZM2d+z/A6ITjzAQBARETEb7/9Nn78+GPHjqEPGGvWrDl8+HDv3r0VFRUFAsGMGTM8PDw4HA66svl8/o4dO4gCVxs3brx27dovv/yipqYmXrIOg/m3IxAIBAIBetKSl5c3NTW1sLDgcrlkGx0dHSRTHhYW5u7uvm/fvsjIyA6KAQBw+/ZtBweHFi179epVXl4OADA0NESlCiUeQ4suiMqIFRUVEqyO9EXev38/fPhwFIyPjw+Hw3FxcSGWCoXCBw8erFy5kkajoZakpKRRo0Z9t/A6J1jDBdTU1Pj6+vr7+6urq+vp6eXn5/v7+y9ZskRBQWH8+PEUCqW4uNjV1XX06NH29vZFRUUZGRm6urrv3r27du0aqlH5+PFjGRmZmpoaXV1d8ZJ1GMy/nbi4uIkTJwIA8vPzAwICxo0bZ2Njo6ysjGrjZWVl8Xg8NTW1devWAQBMTU1jY2MrKys/l5naGQMiPz/fwMAAAIBiGDduXHx8fFlZWXV1dc+ePR0dHe/fv5+amrp+/fqOiEFFRYXsIjU19cmTJ5s2bXr06FFqaiqFQpk0aVJH1ClskbCwsLVr1wIA2Gz2gwcPuFwumnVzczt9+vS6desqKyuDgoL69et3+vRpOp0uXulJCsEj2T8LUfcLtFSFDhWo43A4HA4nLCzs559/rqure/369bJly8SNMZh/NWw2W0FBAb3woFKpGhoaRCFAVJ+PSqVqa2sT9gwGQ+K3V3IMZBfk/1Myzc3NysrKcnKS/HEvEgPZBZ/PRxM0Gk1NTU2CTr8IcSioVKqamhpRYpMIiYxAIODz+bhKH8587aWxsXH16tUDBgwYMGCAjY1N7969f3REGAwGg2kNnPkwGAwGI13gHi4YDAaDkS5w5sNgMBiMdIEzHwaDwWCkCzyqAVemxWAwUorU9vPAz3wYDAaDkS5w304MBoPBSBf4mQ+DwWAw0gXOfBgMBoORLnDmw2AwGIx0gTMfBoPBYKQLnPkwGAwGI13gzIfBYDAY6QJnPgwGg8FIFzjzYTAYDEa6wJkPg8FgMNLFf1a38/jx4xQKhcVi8fn8bt261dfXHzhwAC2qqKjYvn37pUuXUPFioVDo5+fH5XJXrVrV4qaoVOqZM2f27dsnvojP52/YsMHQ0LCsrExbW5vL5fbr12/JkiUS3JHq6uqnT58mJCT8+eefnURiNDIy8sOHD+b/x95Zx0WxNQz4LN2dgiIKUoogioIIdiKIBTYoKhZe64rXunYLJrYoJgKKEqKEII000o3AEksvu2zN98d53/nmnQ0x8Hp1nj/4DbNnTs+cyedYWEyfPh27Hq3JpUuXnj17dvr06TY2NgAANpvt7++fmppqYWFRX18/c+ZMS0vL/s7k+fPn7e3thw0b1t8JQd6/fx8UFHT+/HnBfQlHb2/vpUuXDAwM5s6d+9VJIwji6+urpKTk4uKCXR8VFfX+/Xue/fY70tDQcPjw4bKystmzZ1dWVlpbW+OyAQA4f/58RETEixcv4NTh1dXV+/fv19DQOH36NAwQGxvb3Ny8ePFinkmgBfnubfr06dOOjo5169YJCNPb23v79m0hIaGWlhZHR8cRI0ZwOJx79+6JiIh8+vTJxsZmwoQJ/I42r169mjhx4jdO0V5dXX3w4MHLly/LyMhw/xoTEzN58uS+xAO7qI+PD7qGZ312dnZid95flV/2mk9CQmLXrl3i4uI0Gm3btm0qKiroT1paWtnZ2ai2TUhISEtLq7i4mF9UbW1t165d4/lTT0/P6NGjt2zZkpeXZ2Jisnfv3vb2dn7xvH79+isKcuzYsfHjx586deonGfaampqeP3/u5eVlbW2N+wmtSTk5OSaTSaFQ4HphYWErK6tXr165u7tv3rx5zpw5hYWFvb29b9++jfwvTCaztLQ0MjIyKiqquroaJhSJoaur64vy6e7uPnTo0M+WJSsr64ui5YelpWVcXBx3XxKchLi4OIlEqq+v/5akSSSSjIwMrDQsw4YNS0tLw63kcDhv377te+Sf7bSampq6urpRUVFOTk6GhoZLlixJSUnBhRk0aFBUVBSLxYL/6ujoMBgMbLVMmDBh5syZ/JJAC9KXNu3jXgaDKSoqFhYWCg557do1Dofj4eFhbW1dUlICAHB1dZWWll6xYoWXl9f169cfPXrE72hz8uTJO3fu9CU/AtDR0SkuLmaz2dxt5+fnZ2xs3Md4YBcFmD7Jsz5xO++vyi97zbdhwwbcv8nJye3t7YqKiuPGjRMWFk5KSoqNjV2xYoW+vr6IyH/qoba29s2bN0JCQq6uruhIo6enJycnB5epVOqDBw/Wr18P/5WVlV22bBmaColE8vDwCA8PLyoqmjt3rr6+fl5eXlNTE4lEEhMT27p1682bN21tbQEAHR0dz549MzAwePPmzcKFC0eOHIlNOjMzk0wml5SUjBgxIiUlJSoqatasWQwG482bN3Q63dHRUUFBITIyUkJCIikpacqUKfX19RwOp7KycvXq1bdv37aysrK1tW1pacnPz6+pqXFxcamtrY2JidHV1c3Jydm4caOkpGReXl5RUZGEhMTcuXN7enpevnxZV1e3bt06WVnZxMRECoViYGBgYGAAAGCxWIGBgWw2e+TIkUZGRv7+/tXV1bGxseiZJrZi0ZpEFyASEhKwPhUVFRUUFAAA4uLiSUlJqqqqrq6ur1+/bm9vHzJkyOzZs5OSkg4cOGBnZycuLq6pqfn69evBgwfLyckVFhbCK8Xw8PDq6uo1a9bk5+dTKBRzc3O0mI2NjTExMfLy8tLS0j09PQYGBoaGhmj2xowZ8+LFCxUVlcTExEmTJllaWrq5uZmbm2tpaTU2NsJmQgvV0tKC1raQkFBAQIC5uXlkZKSrq+vAgQNhmIKCgpaWFgqF4uTkxLPgbDYbTaKlpQUNTKPRUlNT6+rqHB0dYfiGhobg4OCFCxcyGIzKysqGhgZnZ2c0JwLaMT8/v6KiIjMzU1NTE02XQqHExcW1tLRw5/PUqVOpqalaWlpqampotGw2G82PjIwM2oEbGxthpx0zZgw2AG5fExYWhp2fw+HIyMgMHjwYQZDr16/Lysp2dnZ6eHiguxKLxTp69Gh7e3tLSwv2TO7Dhw9dXV2jR4/G1TO2IAwGIzY2FrYptvdi6yctLQ3dy7C7Ia5LJyQkwGDCwsIsFuvKlStycnIrVqyAvQvdCuZNSUnJx8fHyclp8uTJtbW1+fn5b9++vXfvHizy1q1bFy9eDEdE7NEGAJCUlOTp6blv374tW7YICf3nGqOrqyswMFBfX7+4uHjNmjU8M0mn0zMyMpYuXfro0SMzM7NRo0bBToK2HRztGhsbCwsLXV1dq6qqIiIiYL2tW7cuPDxcSEjIzc3t5cuX0tLSw4cPf/jw4apVq+B+h/ZJNTW1Dx8+GBgYaGpq4o5FaB9OT09PS0szMjLq42Xlv4hf9ppPVFQU+y+TyQwLC5s5c+Zff/0F17BYrAkTJkybNg09FWUymRcuXJg3b15YWFhAQADPaEkkkoSEBPZf3CH+0aNHcnJyNjY2Dg4OAIAjR45MmTKFRqOZm5vLycnBYQ8AICYmdvXq1Y6Ojvnz57u4uOCSLisr8/HxGT9+vJWVlYqKytSpU9XV1ZcuXbp48eJJkyZNmjSJzWa/fv06Ojp62rRpEhISZ86cmTBhQmlp6Z07dzw9PXfv3g0A8PPz09fXb29vDwsL09LS+vvvv/X19dva2l68eFFVVfXo0aNFixalpKR0dnYeOnRo4sSJQkJCf/75Z3V1dUhIyIwZM2pra2FWt2zZYmpqunjx4mXLltXW1trY2Ghra6N7ApVKxVUsP7q7uy9fvjxp0qTJkycbGRkBAGRkZOTl5aWkpAYMGKCqqiosLCwjI6Oqqqqjo9PY2Dh37txx48ZJSkrKysrOnz9/1KhRMJ7Ro0f7+vqKiYnV19cbGhpiiykqKurl5TVo0KBBgwY9f/68vLwcmz1hYeHc3Nzw8HAPD4/NmzcLCwsbGxtbWVmpqamhzQSTYLPZ2NqWk5N7/Phxc3OzlZXV0aNH0RLduHHD1tb2wYMHra2tPIuMTQIbOCAggEqlDh8+vKOjAyYXFRW1YMECdXX148ePm5iYYEcXAe1YX1/v4+Pj4OCgoaGBhkcQZM2aNQ4ODqamptz5NDU11dfXNzY2xkaLzY+fnx/agdFOi8swT/z9/U+dOrV37141NbVr165dv3592bJlcAENc+HChVu3bh07dgw77CEIkpOT8+bNGwUFBWw94wqCIAhsU1zvxRYEzTC2FNxdGrszVlVVrVq16v79+zU1Ndit0OwtWbJk8uTJpqamfn5+AwcOzMnJ0dXVRfM/ZMiQqqoqBoOBrQr4GCU1NdXZ2dnIyOjly5fYLnHlypX29vZRo0bxy+TgwYN9fX0lJCTa2trS09PRbdG2g/9GRUXp6uoCALS0tI4fPy4nJ6enp7dhw4bly5eHhoZWVVUhCPL69Wt1dfXY2Fj0NAjtkyoqKrA+ccciNLni4uLExMTFixe7ubnV1dXxa/d/Kb/syIdDWlra2dn53bt3ZDIZrrG1tYXDxqdPn+CawsLC5ubm/Pz8LVu2jB07lmc8UlJSq1atEpDQq1evGAwGjUaDN0gNDAymTZtmYmKCCyYpKSkvLz9mzBhzc3MFBYWYmBhs0tra2np6epaWlvC5CACgvLy8t7dXREREV1eXyWR++vRJXV3d1NR07NixcnJyqqqqGhoaenp6WlpakpKSLBaLwWB4enrm5ubW1dXV19dLSEjIycnp6OgYGRnV1NS8evXKzMwMAHDs2DE5ObnU1NTi4uJRo0atX79eS0srKytr+/bt6M3MsLAwPT09UVFRa2vrmJiYz1YsP2RkZDZv3hwZGVlcXPzo0SO4Mi0t7f79+2/evIH/9vT0/PHHHy0tLRs3bsSdUqD/qqmpWVhYxMfHt7S0DBw4EFtMDQ0NFRWVUaNGGRgYqKqqcmdPTk5u2LBhSkpKuEMVrplwtV1dXS0rK2toaAhrD91qz549kZGR3d3dDQ0NgsuOC2xjY7N3796EhAQtLS0AwI0bN4SFheHoNXv27ClTpkhJSaEbCmjHly9fDh8+HACAXoYCAOC9VhEREXQlz3xio8XmB9eBIbgM82TFihV3797ds2fPmTNnIiIi4B0/NTW18PBwNMy7d+/09PSkpaWxTx9IJJK2tjZcwNYzriDi4uKwTXG9F1sQNE5sKbi7NBY9PT0ZGRkDA4PKykqeZRcRETlz5syLFy+8vLxevnypqqqKPdFpaGiQlpbGthe6Pisr6/r16xoaGt7e3uh6KSkpePvB3NycXyb53TjBUVNTA58gioqKysjIGBsb6+npqaqqiouL6+rqlpSUCI4HrU/csQi9bR4ZGYkgSEFBwf3799ED0S/DLz7ycTgc+DyvqKjowoULkyZNgndmUBQUFAYMGACXtbW1a2pqbG1t7ezs0NP/L01o0KBBHA7Hzs5OV1eXSqWuWbNm9+7d8NE9m83muS2JRBoxYgS/pBEEQRBERUUlNzcXxiAlJaWuri44PwiCrF692sjIaMiQIdwTUUlKSkZHRwMAuru74fFi+PDhdnZ2YmJiDQ0N4eHhurq627dvh4FVVVXhUwESiQRvZGEj5Fex3PmBC+Li4mZmZu/fv4f/Wlparly58s8//4Tn41JSUgcPHkxOTm5qahIQ26ZNm44ePQoPl/yKKbjdUWCVYpsJANCX2maz2S4uLtOmTVNSUhI81RebzcYFFhYWjo+Pf//+/ZMnTwAArq6uvr6+8IHTiBEjAgMDt2/fjp6QCWhHeBUL/ttJeK7kzicsFzZabH5wHRgGxmWYH4MHDwYANDQ0aGlpwVspDAYDNhNEXl6+ubkZYPYXCM8K5C4d/Ivrvbj6gRnGlqK8vBzXpdFguHRxZYcrT5w4AQCYMGHCnj17oqOjJ0yYwGQyKysr4a/v379fsmQJvATEFurhw4fXrl1bv379tWvXampqMjMzuQvIL5MkEglmj8Fg4HKIPYYMHjy4s7OTZ0OQSCQEQT4bD3e1k0gkNTU1uKyjo0OhUOzs7CwtLQW8vvAv5Zd9zgcAIJPJcXFx3d3d1dXVdDo9JSXl2rVrLBYrOjra2Nj48ePHDAZjz549oqKi79+/LywshOf4tra2tra22J0kJyfn06dPiYmJ48ePh2+gxcbGYhPKy8srKCgIDw+3s7NzdXVdsGDBlClTpk+fvmDBAg8PD09Pz8WLF0tLSwsJCd25c2f16tXohk+fPlVVVfXw8BgwYAA26dDQ0Ly8vMbGRjqdXlZW9vr16/Xr1x8+fPjcuXNqamqenp5CQkLZ2dlkMtnR0TEtLa2uro5MJufm5n769MnW1rahoSE3N5fD4Zw4cUJBQaGgoMDGxoZMJldXV6ekpLS3t1++fPnmzZtOTk7jx4/fsWPHqlWrbGxsZs+evXTp0ra2tocPH1pZWaFn9z4+Pvfu3SOTyerq6pMmTTpx4kR+fn5tbS08E8dWbFRUVEJCQmFhYV1dXXZ2NofDcXBwgLtfUFBQa2vr9evX29raSkpKrl692tvbm52dLS8vLysrm5SUZGdnR6VSq6qqWltbL168uHDhwvv378N3kVpbW+fNm4cdukaPHq2oqAjvuGKLOWLEiNra2oKCAk1NzZycHAkJCS0tLWz2cnNzFRUVKysra2pqioqK4B0nAwODo0ePwmaC8SspKWFrm8lkVlVVwSqtqalpa2tTVFRks9lkMvnUqVNUKjUoKGjChAlkMrmqqgr2pc7OTvhsGCYxZMgQbGAFBQVdXd1JkyYZGxu/efNGVlb2yJEjs2fPfvjwYVBQ0Lx585ycnNAbmALa8cKFC7dv396/fz8cJhkMhpiYmL6+vq2trbu7++DBg5uamshkMjbplStXHjx4MDY2FhstjUYzMDCYNGmSubm5kZERtgPDTtvV1QUzbG5unpeXd+zYMXQIbGhogK9d+Pn5VVVVubi4HDx4sKury9nZ+eXLl0wm86+//rp8+TLcj7Zv3z5t2rRt27YVFxcjCNLQ0KCpqclisRISEvLy8j59+oStZxUVFWxBSkpKYJt6eXlhey+2IG5ubjDD2N1QQ0Pj+fPn2C6N7oyNjY15eXkNDQ15eXm6urq4nRcGrq6uPnDggIWFRUxMzPHjxyUlJV+8eHHs2LEpU6YwGIzCwsJz587hjjYtLS0hISGurq5SUlJUKnXYsGG7du2C7V5fX19dXf327dvly5fzy6SUlJSGhsaBAwfgVX5lZSVscWNjY9h2kyZNAgBMmjQJJl1eXt7Q0FBaWpqenl5QUFBfX19YWKimprZmzZr9+/efP3+eRqNlZWXV19fDzgP7JLqPzJkzB3ssYjKZcOfdu3fvwYMHS0tLx4wZs2PHjm8+Hv9kIL8NdDod/YsgSE9PD5vN5g5Go9H6Eo8A4M1GuMxms7u7u+Eyg8GAZ4WQiRMn1tTU9Pb29j3p3t5eJpMpOAx3VvlFi12PLsMLFNwmHA6np6dHcCqfrZbvC7YeBBdTcPZg/WObCfuT4NpmMplsNpvFYgkOBpPABmaz2XQ6nedWPHMiuIBUKpXFYnGvhG8DcucT7XJotLj8YDsw7LS4AH1paw6HQyaTudfTaLSOjo7PdnWeBcHFgy5j6wfdy9BS8OzSuJ0RBVt2bEJkMhlXyRQK5dv7vIBMMhgM7h6CPVwgCPL48eOWlhYB8TOZTA6H89l4uI9FKH1vqX8XxJzs/wy9vb1jxoy5cePGuHHj/um8EBAQ/FtJTk62srL6lhh+z2MRMfL9M7S2tjY3N5NIpB/2qTUBAQEBN7/nsYgY+QgICAgIfi9+8Xc7CQgICAgIcBAj33/o7Ow8cOBAQkKCgDA0Gm3//v2pqak8f+VwOHfu3MF+CRQVFXXw4EF+ye3atevjx4/omtjYWPj5PHSmbNq0CX2vHRfb+fPnoTMCDVlVVcVzk34FzTAkKytr165d2AAbNmwQcEeBQqH4+fndu3cPrYT3799v2LBh3bp1paWlAID6+nr4qV9NTc21a9dw3zn4+fl9uxfqB/DixYtXr14dO3YMXcPd9F8HhULZt2/fN0aCBd0FPn36JOALxZycHE9PzytXrtBotOfPn+/YseOLDHC4bvN1oLsA5OnTpzdu3PjGOL+avLw8d3f3fyp1gq+DGPn+A9ZWx0/9Jykp2dHRwe/TFm5nI09xIpochULp7u5G16DqwpCQECqVeurUKdxXw9z2QjTkhw8feG7Sr6AZfvfuHZ1ONzExiYqKwgbAqTpweHh4zJgxY8WKFevWraPT6QCA9PR0+BGIvr5+d3f34sWL58+fDwBgMpk9PT3oZ0YQOTk5OED2Ee42/Y7STn5ERES0tbXNnTt37dq16Erupv86lJWVsd/eoHx1udBdQFtbOyYmhl8/HzlyZEpKir6+vqSkZFZW1sqVK83NzfueimBFZx9Bd4G+6zcF83VOXciIESOgD/Mf50ulrL8zv+b3fGw2G9Va/vXXXxkZGah9rra2FnoRJ0+efOfOne3bt0NdMqpeRJ1+PF2FwsLCNTU1f//994QJE6ZMmYJ1Wg4fPhx1JeDEiTgd6Lt371paWnCn1VBdaGpqGhoaqqCgYG5ujtqneNoLFRUVYchBgwaFhYXBTfT09FADZ3t7O5RYysvLjx07Fl2fl5fX3Nzc3NwsKSkJpaNYCyK3ubStrc3X19fFxQV+Wrdy5cro6GhDQ8PGxkaY4VWrVp06dQp+/xQaGlpUVLRhwwYGgxEUFLRq1aoPHz7gkoMkJyfLy8sLCQmpqamlp6crKiqeOHEiNTUV6sGioqIsLCxkZGRERERSUlK49f/CwsJ0Ov3s2bOKioqrV68mkUivX7/u7u6WlpaeNWsWAAD7L3eb2tvbQ3uhvLx8XFwc9Cg6OjqiBsj6+vrY2FhlZeXMzMwtW7YoKyvDdJlMZlBQEIlEmjBhwoABA3Jzc4uKihgMhouLS3V1NVaqyWAwHjx4oKWl9eTJk5ycnMOHD4uKiuKaHtU2qqio4JSVLS0tkZGRwsLC8+fPZ7FYWLEq3La8vDw5OXnJkiX8ZKS9vb2wKVesWPHmzRu4Ozg6Or569WrdunUIgrx69crV1TU2NrapqcnW1lZLSwvtwPPmzfvrr78uXLjAc/8SFxcXFhbev3//0qVLoYgO/K/xEvU92traRkRESElJJSQkmJubOzg48FN0cjichISErKwsPT09MzMzeA6Xm5sbFRW1atWqp0+f6ujozJo1y9/f38nJCe4CLS0t/PSbAAAWi4VLGmv4FBMTQz2Zzc3NWKcuh8O5du2akZGRnJxcbGysq6trU1NTU1OTjY0NVmCLPcLAenv69KmKisrkyZOxJ3z8dh91dXVcm/I7anHXIU6li24lJCSUlpaGij07OzufPXsGZTFQcJqTk7NhwwYZGRmsaxfbpdlsNnrQGDFiBKquRfv/L8Ovec2HIAiqtSwtLcXa51Avoqqq6pMnTxgMhqamJvb2y2ddhQ0NDUuXLl2zZk1RURHWaVlVVYWmjvUN4pycL1++LCoqWrhwIfK/DguoLlRXVzc0NDQzM0OHPYSPvRANOWnSJHQTrIETlVgOHjwYu55Op3t7ey9cuPDWrVv19fVYCyKFQuE2lyoqKqalpXV0dBgaGoaHh4uJiZHJ5EGDBqEZVlNTmzt3rpCQUFtbm5mZWU9PT3BwsLCw8JkzZ5hMJi45tMg2NjZxcXEcDodOpwsLCw8fPrympsbCwmLmzJlUKtXExERYWPjZs2eDBg3q7u6urKy8f/9+Tk4OtiHKysoWLVoUHBx869atp0+fVlRULFy48OnTpwEBAbh/udu0u7sb2gs1NDSwHkXUACkpKenl5WVkZKSurg49qJA///xz4sSJ5ubmQUFBlZWV586dW7x4MYvF8vT0xEk15eXlhwwZMnbsWBcXl5CQEA6Hg2t6rLYRp6xks9m7du1ydnYWEhJKTU3FNh+aE9iv+MlIFRUVsU2J7g4jRox4/vw5jUYjkUhycnLv37/v7Ow0MTFBJwyCSEtL5+fnC9jFtm/fTiaT0WEPWxas77G+vh7Ov7F27VovL6+amhqeik4AQHBwMJlMHjdu3NmzZ9ETTQMDg9u3b0MnQHl5uZCQEIvFEhcXh7sAP/0m3JZEImGTrq2txbYv1pOJc+oKCQkxGIzs7GxTU9OHDx8qKCg0NTUNHDgQt7OjVQpt1AUFBZKSklOmTMHd5+C5++jo6HC3Kb+jFq4Oa2tr+eVk5MiRWLGnrKzsq1ev8vLy3NzcFi1aNGLECHFx8cDAQJxrF9ul0YPGwIEDsepaDocjoDP8G/k1Rz4RERFUa4mzz2G9iPBMjZ8cj5+rcOzYscOGDZs5c2ZKSgpPpyXON4jTgT569Ah+N4N1LaLqQm742Qt5gjVwohJLXV1d7Hp5efmBAwcqKCjo6enV1tZiLYh1dXU8zaXr1q3z9/dvaGiorKxsamoSFRUVFhbmzrCioqK2traxsXFFRYWcnJy8vDwAAJccGvj06dMJCQm+vr7Z2dkWFhYAAElJyT///NPKyqqwsFBfX//YsWPz589/+/athYXFkSNHVq5cCcVXKMOHD9fR0VmxYkViYmJISAg8Cs+aNSs0NBT3r4A2xXoUsQZIdXV1JSUlfX39lStXYu9lpaena2hoDBs2bMuWLW/fvoW+rpkzZ4aGhuKkmtisiouLAwBwTY/VNuKUlQUFBRISEiIiIosXL54wYQK2+dA40f7DU0aK7XXW1tbo7gAA2Lx58927dyMjIx0cHOzs7JSUlD5+/Mg9UxKVShXg8Dt8+HB+fv7x48fhv9iyYPc4KJU1MzMbMGCAo6NjbGwsT0UnAKCxsbG4uNjY2HjkyJGw58B6mzNnTkREhIiISHBwcFlZmZGREc9dAKvfhGuEhYWxScfExGDbV7DMc+XKlc+ePevu7paUlIyIiKiurh46dCh2Z4+Li8NWKZPJ3Llz54wZM3jWFffuA09ocG3K76iFq8OYmBgBOcECK3n48OGDBg2SkpLS1NQcMmRISUkJTmaL7dLoQYPD4WDVtehp/S/DrznyYcHZ57BeRCjW4ie1E+wqJJFIxsbGOKcl/AnnG8TpQHm6FgHGoYdbz89eiFuDrkQNnLgM81uPtSCKiIjw1IfOnDkzPj6+qKho586dq1evhvIkbCahDQcXs+APZgYNGnTs2LGenh4vLy84MEAUFRUHDRoEM1ZfX6+srBwdHb1q1SoWi8XzqSFsCE1NTfgQFDYE7l/Ap01xJlWehkw6na6jo4P+W11dDV8jKi4uxqYCrfmCwTUlT0skRFJS8t27d/BEG77Kwd18AqqXzWYLkNA6OzvDa1AxMbFbt25VVlaOGjWKOzZxcXFJSUme8XM4HHl5+YiIiCdPnly5cgX8r4JSXV2dp++xp6dHV1eXX7ZdXV1pNFpKSgp2NgwAgLu7+5UrV2RlZfX19S9fvgwHKjQSfv5JHDBpbPvi/LS4nqCkpDR48ODDhw/fv3/fx8cHjsQ8d3aIqKjoihUrNm3axDN1nrsP4L9LAv7OTFgQATnhJweGQJ8nTmaL7dJoyC8VBf/r+DVHPhqNlp2dnZCQQKfTp0+fHhoa6uzsfOXKFR0dnUuXLjU0NEAvooODw5YtW2JiYmpqaqBqMi0tTUpKCjr9QkJC3r17B12FaMz6+vpRUVFPnz41MjKytLSETsuQkBDotITORnV1degb9PPza2pqYrFY0MkJJ3DZvn37pUuXTpw4UVlZib6JgKoL6+vr09PTExMT0YlYUQ0jjA3aC5OTkzs7O2FI7CbQwLljxw4ajZaWlgYllgAA7PqsrKyqqqr6+vqPHz/m5OS4uLjk5OQ4OTnBZxvYrKKlFhIScnR0tLa2XrBggbKyMvQRwwzDZzbHjx+PjY2tq6urqalJSkrKyckpKSmpra1NSkrCJYfG2d7efuvWLXFx8a1btwIAbty4sXfv3kePHk2fPh1N+tmzZwsXLpw8eXJ1dfXLly/t7OzQzeHLRK9evSooKNi8efOuXbtycnIiIiI+fPiwbds23L+opxHbpvD25vv376FHEfzXkFlRUREZGUmlUltaWoKDg2/fvo29E3j8+PHJkydv3769q6trzpw5WlpaQUFB169f9/b2Li4uRqWa+fn5VCo1Pz8/PT29tra2vr4+NTUV1/Surq4eHh7r169PT09H1aApKSk1NTXKysoTJ060srLat2+foqIitvnQnLx7966urg5ulZeXh5ORNjc3o00pJyeH7g4AAAkJiWnTpsEhhMViPXz4MCYmJj8/PycnB+4CCILQaDRDQ0MAwNKlS3E3mXNyckpLS8PDwyUlJT08PLZs2XLixAlnZ2e0LA4ODtg9DgDw9u3b8PBwRUVFa2trnKITlretrS0wMDAjIyMgIODmzZvoxGEAgGHDhikrKzs5Obm7u8NTora2NrgLoM2anp6O6jc/fPiAzS2atK2tLbZ9S0pKzp07N2bMGDs7OzQe7IYeHh46Ojp6enoDBgyAT46xO7uVlRVapfn5+fA+bU1Nza5duyoqKrS0tLAzu3LvPrhd8rNHLVxB+OXE2Ng4Pj4etQq3tbWVlZXl5eXl5ubW1NSUl5dnZGSUlpZ2dHRgJcbYLo0eNFB1rZ+fn6en5683V8Pv4u3EeimxXkQGg8FtBeTpKkSBz67Qf/k5LXG+Qaz+jsVi0el0bteiAPjZC7npi6VT8K88Q6K5xVkNBawUTF1dHbZuORxOQ0MDrk46OjoER0KlUvv4L8825RYVYg2QhoaGPT093HWOszXiEhUMrum5LZFYPtso/EDLxW+rz1pP4QMz5Et0rLiyoBH+/fffjx49+mwtXbt2rbGxsbS09O3bt7GxsbiY4QJ3XfHTb/JLGqsqxXoyecYD08UmKlhgixIcHIyrT55F+GybCqhDfjnh6d7kBiezFWC1/SJR8L+IX/PdTm7Q6WSFhISw5y+4CWyxK0kkEvYuHIqIiAj20SCJROJ5Uwg3ZRd2PlthYWHBc/p8NjYBYBPqy3ruX3mGRDMsoMa+CHRyKAiJRMJOrwqBrzYIAFctAv7l2abcN5rgrxISEvX19a2trUwmk7txcb2i700DuJpecE/4bKPwAy0Xv62wHRgtMrrm06dPlpaW6HPlPiaKKwuMEEGQ+vp6SUlJfjdOUQIDA6uqqpSVlTU1NRcuXIiLGS5wdzMBHY9n0rjCogs844HpYn/it7PjGDt2LK7meRbhs20qoA755YTnvVNuYD2gjcuvlfsY278Rwl5GQMCDT58+UalUaWlpfm8eEfQFOp0OZzrV0dH57IEezvL62dOd/kj6Z+aXKchPBTHyERAQEBD8Xvyab7gQEBAQEBDw49d8zsdgMG7cuFFRUWFqasrhcMrKyjZv3jxgwIDIyEgGgzF37twfk42wsLDc3FwLC4vp06cLDhkaGhoZGXns2DE5Obn29vbr16+rqamNHj365s2bOjo6ysrKnZ2dkpKSTk5OV65cYTAYysrK4uLiK1as4L41RKPRcnJyvnGqLQ6H4+fnx2AwPDw8+rhJdXX1wYMHL1++XF5ebmho2PfnQ9/Oq1evwsPDR40aJSoqWl9fb21tPXHixJ+2rQsLC69fv66pqclms4cOHbpo0SIWi3Xjxo3y8vKRI0fC7rp+/fpXr16VlJSsXr0afm35pSAI4uvrq6SkhNPflJaWPnnyREVFhcFgdHR0rF+/Hn6W4OHhgT55/fTp09OnT2VkZEpLS8+ePYuL+bt0sK/g/fv3QUFBPj4+fQz/vTpkZ2fnkSNHXF1dTUxMvi6GPsKzGxcWFsbExHB/L8FisV68eBEbG7tnzx7BN+Rh7/r48eOoUaNoNJqzs3N7e/vt27fl5eVXr16trq4eFRWVmpqqrq7e2dmpoKCwevVqDodz7949ERGRT58+2djYTJgwoT/L/Q/xT75e05+EhoaOHz8eLqelpRUXF7e1tf31118nT578MRlobGxcs2YNh8Pp6urqS3g9PT0HBwf4gtm9e/dKSkoQBBk9evTbt29hgAcPHiAIsn///h07diAI4uPjM2vWLO54Tp8+/UVvjfLj9evXf/zxxxdtMm7cuPb2dg6Hc+XKlW/PQN+BDkz4EhqVSg0ODv7J23rChAmvXr1is9nz588/deoUwqu7hoaGjh49+ltyde/ePVwNZGVlWVhYUCgUBEG6u7sXLVqEIMjOnTt37tyJDTZv3ry8vDwEQfbv38895feXdrDY2NjvMq83nU43MzP7ok2+V4d0c3NLSUn5lhj6Anc3ZjAYz58/nzlzJnfgwMBAPz+/rq6uvrzvHRkZaW1tDRd0dHTYbLarq6uPjw+CIL6+vsuXL4cNWl5evmXLFgRBVqxY8fTpUwRBOBzOsmXLHj58+F0L+lPwa17zAQAkJCTQD585HA6cdHHIkCGoSxMLavAzMDCAvkddXV0LC4uWlhbUXCcmJhYYGAgFj0uWLME697CbwwjZbLa/v391dXVsbOzkyZOxDsmMjAwymVxSUrJgwQL4fRJk/fr1oaGhhw8fPnjwoLS0NHxjEC3Fp0+fZs+eDQAQFxeHHwCNGjXq4sWLuIKEhYXp6OgICwvTaDRUOlpQUEAmk2tqajgczqZNm0gkEqpqlJKSwukT0ajQ1/9Qk+fKlStv376tpqY2b96858+fm5iYaGtrY+sBbkIikYYOHfrs2bNFixbhsldVVRUREQGTW7duXXh4uJCQkJubGwAAKxLEKivz8vLQ6pKSkuIpEoSP/WFF5eXlOTk5fce2XrNmDdZI+dm2xpo8GxoaUAsi+v0ybEQSiSQkJGRmZgY/H8Z2VwkJiWHDhtXW1gp4nQGroKytrcX6QiUlJfPz8ysqKjIzMzU1NbFbeXl5eXp6KikpAQCkpaX//vtvwOsNQyUlpbNnz167dg2OiJ/tYDIyMvwEpGJiYlDrunjxYlREaWdnh3WNWllZ4Wyo2Arn7pA9PT2w17m7uz99+vRbOiQAoKCgoKWlhUKhODk5JSUl4RyzPBW7/eTD5NmNR4wYwZ3nxsZG1O5rbGzMzyYKtWoAAHFxcbg8evTourq6jo4OCQkJCQkJFou1c+fOgoIC+OrpkCFDPDw88vPz3759e+/ePZiZrVu3Ll68eOnSpbw74r+WX/k5X319/ZUrV7Zt2xYSEiIgGNbgFxQURKVSHR0dHz58yGazseY6EomECh6xzj3s5micwsLCNjY22trakydPxjkky8rKfHx8xo8fj5PAioqKPnv2zM/P79WrV9j1L168uHTp0rJly3p6euCavLy8gwcPXrt27f79+7iyvHjxAuo9sdLRysrKixcvLly4MD09/fHjx1hVI7c+EQdWOhoYGKihoQFlYE1NTfr6+jx9kgAAMzOz58+fc8empaV1/PhxOTk5PT29DRs2LF++PDQ0tKqqCisSxCkr0epSUFAQLBK8cuXK0aNHeZbiW9oaa6T8bFvjTJ5YdSouJxEREdu2bauurj58+DBcA7vrgQMH+jLhDlZBifOF1tfX+/j4ODg4cH8lkpWVhR1LUMEjjpMnTzY0NJibmxcUFKAiMQjPDiZAQIpqXbEiSjKZjHWNgv9VR2Jj45k9tNd5eXl9Y4cEANy4ccPW1vbBgwetra04xyxPxS7oZx9mX7ox1u4r2CaK0tra+uzZMw8Pj4MHDyoqKsKVpaWlLBYLe/5tbGyck5Ojq6uLnocNGTIE7qEC8vNv5Fce+QYMGLBp0yZvb++pU6cKCIY1+D169MjGxkZUVPT8+fPl5eVYc11TUxMqeMQ69wQLAAEAOIektra2np6epaUltxZBVVU1KCho3bp1RUVF6Mp58+Zt2bLl6tWr6Ln5iBEjxMTEenp6uFOsqamB3nesoFJDQ0NfX19DQ2P58uXR0dFYVSNOn9jR0bFr165du3bl5eXBCHHSUXt7+w8fPrS2tsrKypJIJJ4+SQCAvLx8eXk5d1WIiorKyMgYGxvr6empqqqKi4vr6uriRII4ZSVaXWQyWbBIcNOmTfv27du2bdv3bWuskfKzbY0zeWLVqbiQ06dPLygoGD58OGoQhd318OHDuK/ZeIJVUOJ8oS9fvhw+fDj4XzEsRF5evrGx8bORq6qqvn79euvWrdOnT8eF59nBBAhI0Q1x+lycaxSrjsTGxt0hwf/Kab+xQwIA9uzZExkZ2d3d3dDQgHPM8lTsgn72YfalG2Ppo8NTWlra0tLSz88PO6ejvLx8b29vW1sbNqSqqmprayv6L/zU5Is+Wv1X8MuOfPD2N1yePHmygJBYgx8qsSwvL1dUVBRgrkOdezgBIAq8mwwA4HZI8gQam0aNGnX27NlDhw6hkcAFExOT2tpaCoUCywWvjeDdKiyDBw/u7OwEfKSjUPonQNUoJye3b9++ffv2QXMVAACnfxQSElq+fPnatWuhzwnwcQ92d3dzH3Z5wi0S5FZWQgSIBGFgWJzv3tZYI2V5eXnf21qAyZPD4QgJCT169OjSpUtQnIZtCzs7u+joaFzr4OZd46kYhfATwwIAnJ2dsdOSwFENG6anpyc+Pv7cuXMkEsnDw2PKlClJSUnYGHh2MAECUgibzeYnooRg1ZHY2ISFhXEdEoL2um/skGw228XFZdq0aUpKSl9Ukzi+iw+z790YmyUBDk9sYDExMR0dHXQAg5sPGDBgwoQJz549Q0M2NjZOmDCByWSi4u/3798vWbJEwESb/1J+zed8DAYjLCysrKwsNzcXne6nq6srJSWlp6enpaVFRUUFDVxTU/Pw4UMrKystLS0TE5MFCxbExMTMnDlz4cKF0Fynpqbm6enZ0dEBBY/Lly+Hzr3Zs2cvXbq0ra0N3RyNE0EQ6EKsra3dtWvX9u3boUNyz549t27dysvLa2xsxHb6ly9fvnjxYuHChTo6OsuWLcvMzAQA5Ofnl5aWBgUF1dTUNDc3wzAJCQk0Gq2hoeH+/ftjxoyRk5P7448/UEPEvHnzsrOzjYyMQkJCdHV1oaCSTCbn5eWFhYWlp6f/+eefT548efr0KY1Gy8/Ph1LN3NxcDodTU1MDH1HA/EMHKTw5tbW1tbW1hYd7Nze3srIyGAxbD1VVVVBZOWPGjNzcXHt7++bmZjMzs9zcXPS+bnl5eUNDQ2lpaXp6ekFBQX19fWFhoZqamrq6OioSrK6uhsrKadOmbd26NSQkBK0ubHNgr5iDg4MBAEFBQc7Ozt+9rV1dXRcsWDBlypTp06draGg8f/5cQFvPmTMnJCQkKCjo48eP3t7eqAURe1+xsLCwuLg4MjJyypQpT58+nTt37u3bt9++fVtdXX3r1i0OhxMfH29nZ1dQUFBRUQHr5P379/ApLwpUUCooKBQUFNjY2KC+0Pb29gsXLty+fXv//v3wsMtgMNBhYN++fbt37968ebONjU1vby8UQiYmJrLZ7Js3b3Z2dgYEBAQEBOzZs0dCQkJJSYnJZOJeVeXZwYyMjNAqmjJlCrZHtbW1Qa3rtm3bDh48WFpaOmbMmG3btuXm5ioqKqKuUaiOtLe3X7p0KbbC4aSPkJiYGFhMbK/7xg7JZrPJZPKpU6eoVGpQUJCWlhbWMbt9+3YPD4+6ujroWUVfZ0V9mKKiop/1YcJCYXtC37sxh8OJjo6urq4uLy+HM/FCoLNXSUlpwYIF0OFJJpOhw9PX15dMJjs6OqK3iFgsVmhoaGlpaU5OzsiRIwEAZWVl6enp8NHm48eP//jjDzKZbGBgAG8jqaurv3jx4tixY1OmTGEwGIWFhefOnQO/Hv3z4sy/CZzBD529AcLPXIcNj92cH19qd/zq9zOvXLmCE1S+e/fOw8MDayvlqWoUAC4ktkJ4RnLy5Ek4ewO3wJAfOJEgv62+UST4dW2NGin7o62/GsGNSKVS+XUhFovV1NQkIGYajcZischkMs9fuTsY8jkBKfqTgKrDqiMFx8Ydz7d0SCaTyWazWSwWv6b/IsXuP+LD7KNNVABUKpXbkUuhUPpubf3XQThcfjWYTCb8sAxd8+jRo6dPnz579uzHWPgyMzN1dXXhOXh9fT3Oz0nwb4e7g/3kEB2SgBti5Pv1gZ8GqqqqwtfZCQgICH5ziJGPgICAgOD34pd9t5OAgICAgIAnv+a7nRDBgsHg4ODS0tLdu3f3PcKvsFmissGcnJyvlgf23RxIoVC8vb0FfwbLDU/Nqaqq6p07dyZOnIjKSiB9FwYCANhstp+fX1tbm7KyckNDw5gxY9LS0uzs7GxsbPqevYiIiJcvXxoZGZFIpIkTJ/K0Wnxfzp8/b29vP2zYsO7u7sjISPiCKPY9Qxxox1i6dOnZs2enT5/elwLm5+ffuHED62Vdu3at4E2ioqKCgoJGjBghJSUlIyPj5OQkLCzMHc/48eOh+lVBQWHo0KEzZ8784iroN1CdpoyMDLqyrq4OvvbM81G0AGMnv10jJibGxMSEW0kKKS8vb2lpQT966+3tvX37tpCQUEtLi6Ojo6KiorCwME5/81kQPqJUfnR2dnJ3la+uB36pZ2VlaWtrP3/+PDc3183NzcLCori4+MaNG/b29lAq1B9V8a/gV77mu3z58pgxYwAAr1+/5v5VR0cnOzv7iyIUEhLS0tLCfp3zWSwtLePi4gAApqamt2/f/qLkUOTk5CgUCtT6CUZZWRn3qVlfEBMT09XVTUtLc3V1Xb16tZOTU3d3N4VCefbsGfenVyEhIVQq9dSpU9g3+/mxbNkyNpu9c+dONzc3Y2Pj6upqKpVKoVC+KHtTpky5du3ahg0bli1bZmNjU1payi8kh8PBfff2dbi7u8OXyI8cOQI/ixYs7UU7hpycHJPJ7GMBhw8fnpycPHLkSFdXV09Pz758LDx58uS7d+/CV/+FhYUtLS2bm5u54zE2Nk5PT7e0tNywYcPRo0fv3LnTx4L3B+/evaPT6ei/Ojo6xcXF8IM2FC0trezsbH5PXtCdiBueu4afn5+xsbG6ujqNRqPRaNyvtNy5c+fkyZPov9euXeNwOB4eHtbW1iUlJdra2jExMdw9XzAkEklGRgZOpNcXeHaVr64Hnqmnpqa2t7erqqoOHz48JiYGvpRkYGAgJyeHuvT6oyr+Ffyy13yoYDAhIWHr1q03b960tbXF+jOh049Go927d8/Ozs7IyIife/BbbJbo4UyAPBDrYPzw4UNfzIH8BJjl5eXJycnLly/n5wycMGECavmDpg8IT80p97neFwkDU1JS0tLS0E/p582bV1BQUF1d3dzcfPTo0UmTJo0fPx51MK5bt45CofAsFDz/FRISUlJSGjJkSGZmZkJCAirVxLbpqVOnUlNTtbS00O/namtrKysrGxoaFixYEBERISUllZCQYG5u7uDggE1aVlY2Ly+vqKhIQkJixowZsbGxBgYGIiIikZGRxsbGsbGx3d3dnp6eOL0kVtKIdgy4kJmZGRUV5ebmxuFwXrx4gVOKcFd7Zmams7OzAHcoDC8kJCQqKgo3cXJySkxMPHbsmI+PD0+/q5CQkJCQ0JgxY/Lz87GJtrW1+fr6uri4ZGdnt7a2rly5Mjo62tDQUF1dHVshaAWinUdPTw/2eVdX1/T0dKwMVlhYmKec1s7ODvV2oj4tWEUcDgf1x2ppaQkLCyclJcXGxq5YsUJfXx/dv1xdXbmNnTCHPHeNxsbGwsJCV1dXwGfScyqVOmjQoGfPnlVUVAwZMgQAoKSk5OPj4+TkNHnyZOilmzdv3l9//XXhwgV0q68Qpebk5GRkZCxduvTRo0dmZmby8vK4rdByYTsSrh6wRUYPJrh64Kdp9fX19fPzg50Be7dJQUHhW6ri1+CXveZDBYPm5uZycnK2trY4fyYMlpSUZGFhYWRkJMA9yB3597VZYh2MfTQH8hNgwowBPs7AwYMH4yx/2Dj7ojn9ImEgzhIJ/iuKbGpq2rx585YtWwDGwfjnn3/yKxTcNjQ0dNeuXbq6utOnT0elmrg2NTU11dfXx342fvz4cRMTExkZGRKJFBkZGRUVtXbtWi8vr9raWmzSVVVVjx49WrRoUUpKSkdHx/Pnz8vLy/X09GRlZe3t7e3t7R8/foyrVZykEYe5ufnDhw8BACQSCT3Q8OTFixfe3t6urq7wZilPdyi/bcePH5+RkYHGg/O7pqWlnT17NiMjw9PTE7uVoqJiWlpaR0eHoaFheHi4mJgYmUzW0dHh2W/RzqOtrY32+YCAAJwMlp+c1sDAAHo7cRpJAADWHwvXsFisCRMmTJs2jUajYdNCN8HmkN+uERUVJcCeAwAICgpauHChh4cHKnxfsmTJ5MmTTU1N/fz84GmutLQ07nThK0SpgwcP9vX1lZCQaGtrS09Px22FBuPuSGg9sFgsno2CXckv9bq6Ouyl9nesil+DX3bkQwWDKDh/JgCgoKDg2bNn8I6oAPdgf9sssQ7GPpoD+Qkw0WA8nYG6urpYy19MTAw2zj5qTlE+KwzkZ4k0MTFRUFCAwkZsXfErFNxq+PDhe/fuDQ4OVlRURKWa3G2KY/bs2VOmTJGSkhIWFlZVVTUzMxswYICjo2NMTAw26VevXsFp8I4dO6aqqqqqqoqNBD1fxtYqTtKIA0rur1y5EhkZyc+8DJk3b962bduuXr1aVVXFzx3Kb9vGxka0xbn9roMGDVq+fHl8fDy31GrdunX+/v4NDQ2VlZVNTU2ioqLQD87db9HOQ6PRsH0eJ4Ptu5wWBeuPhWtsbW2nTZsmISERFxeHTQvdBJtDfrsG946PIyYmJjAwkMFg3L9/H5rYREREzpw58+LFCy8vr5cvX8JgVCoVTooC+QpRKu42AG4rNBh3R0Lr4dOnTzwbBbuSX+rYehASEsKeH6B3d76uKn4NftmRDxUMAgDgQwVufya8OHjw4AH4Xz0jzj3Y3zbLr3Mw4oDmQDQMT2cg4G/567vmFE3ls8LA6dOn19fXFxQUoGsaGxu5S8GzrrCFgss6OjrcF0/cbYp7gDRixIjAwMDt27fD2oBAfyk2adTh2d3dXV9fz6+qsbWKkzRys3z58ufPn/f09EhKSvIMADDVbmNjo6GhIcATi4LmjUaj+fv7b9iwAfDyuyIIoq6uzj1dA2TmzJnx8fFFRUU7d+5cvXo1+tRHQFvg+jy6HlamYDktrlEg/PyxCgoKZmZmPNPC5pDfroHd8ZH/VZImJSVFR0evW7du/fr1Xl5e9vb2t27dAgCcOHECADBhwoQ9e/bAbgAAEBcXxzbcV+ykJBIJFhz6aLhrAMKvIykoKMAnlDwb5YvqYdCgQXCeRTRjAICvropfg1925IOCQQCAtLS0kJDQnTt3du3alZOTA/2Z27ZtS0lJqamp2bNnz549e/z8/FatWuXh4bF+/fr09HToUM/NzYVh2tvb4XMOERERnM1y7969ampqAAA3NzdVVVWsPHDHjh00Gi0uLg7KBgEAqDxQS0sL+2QbOhgrKioiIyNTUlJw5sBLly6dOHECmgPRTXgKMHNzc9+9e1dXV1dTUwOdgdu3b+/q6kKdgQAAaPkLCQlRV1dHRzis5hRNoqGhIT8/Pykpqbe3F10JhYGJiYldXV3YqKysrLKzsxMSErA3WFRUVF68eLFr164rV648efIEPnLIy8tLTk6GZczIyMDWFb9CwVvEqFe3vr4eSjUBALg2NTY2jo+Pj42NRfNw6dKlhoYGJycnOAa8ffs2PDxcUVHR1tYWm7SLi0tOTo6Tk9O1a9ckJCRycnKSk5PJZHJtbW16enpqamptbW1DQwO2Vul0OippjIqKgh2jrq4uOzs7LS0NQRAxMTFnZ2d4y93X1xe+Z4gFelnDw8NhpSkpKUGjo5+fH9Ydit3k9evXVCr1+vXrt27d2rt3799//z1hwgTU73r37t3Tp09v3ry5sbGxpKQkLCwM3vncvn37wYMHsfEICQk5OjpaW1svWLBAWVkZvqyErRA0JNp5lJSUcH0elcF6enriGiIpKQnaVgEA0NvZ0dEBI0R1miwW6+HDh1B5Wl5ebmxs/Pjx4xs3buzZs0dDQwObFroTYXPIb9eYNGlSRUUFAKCxsTExMTEuLu7mzZvnzp2bNGmSmJjYnj170PMnbW3t06dPl5SUVFdXHzhwICQkJCYmZt26dQAAGo2Gc2Rjd9Ls7GxUlJqfn79gwYKCgoL9+/dHR0cXFxfDmxkAACkpKQ0NjQMHDhQXF+fn5xcVFWG3amtrg12FRqNhbZ/YeoCzG3IfTLArFy9ezDN1TU1NdLBUVlZet26dm5vbkydPzp49O3HiRDKZ/NVV8YvwvXVoPxFQMIggCIPBQGcuFuBU/KwtEMd3lAcKcDB+qTkQFyc33275+9KoKBRKb28vv1+/fcJubJviEmKz2aiw9O+//3706BE2MC7pvuQEW6s4SSM3V69ehQt9F7F+X6MjpLu7OyQkBLcSzQ+2z3+2BtAA3DJYhP/OxW+3wnX7np4euIMIyAx2Jb9d4/Hjx9xTyQsAxkkmk9Go7t+/X1NTIzi3OPiJUhkMxmcbFNeRvrQe+KWelZX1/v179N+enh5400UAfayKX4Bf2eHyUwkGCXngPwuCIB4eHkOHDt21a9cPmHIlPj7++PHjW7duRW+A/4M0NDR890+yfrAM9ktJTk62srL6um0/ffpEpVJxn7H+SykoKNDS0sJNL9x3fqWqwPErj3wEBCh0Oh3ec9bR0eH5svv3hcPhdHR0oJNf/3oQMliCfzXEyEdAQEBA8Hvxy77hQkBAQEBAwBNi5PsfGAzGsWPHEhIS/umM9BXcN3mCef/+/R9//MHv14qKipUrVwIAYmNjsZ8P/zxs2LDhs7cocnJysC+jQshk8qNHj/qyOeRn7gadnZ27du36+PEjbuWBAwf6kuHz58+jn0gCAJ4+fXrjxg3uYFlZWbt27fr23P4C8KzwlJSUe/funT9/XvC2DAbj8uXLnp6ePEWJ0HWAkpqaev78+cuXL6PflfOkvLw8NTUVLnM4nDt37ly7dq3vHUAAgo8Pvxi/y8gH1Z2f9TqKiYl9hVgSx/eyR34W6Cfse3gB3j8AwJAhQz58+AAAmDBhwg9zHPNUqvILc+zYsc++nMLTj3rs2LHx48efOnXqs5vDtvsu3aCf4Gmq7LsvFFWSwlpVVFQsLCzkDmZiYhIVFfWdsvz9aWpqwn7J0E+bQHhW+LFjx1atWuXu7i54W2jETU5Ohp4ELM3NzZs3b87JyYH/IgiycuXKbdu2rV+/Hn6VwQ+safPrhLH8QI8PP+wI9g/yy3o7sZ69tLQ0qO5MTEyEXkc1NTX0VzExMdTZOHfuXGFhYVQsaWVl9erVKwUFhczMzEGDBhkaGr548WLp0qW6urpYr2BycjJWtinAHuns7IxzP/K0HS5YsIBEIqHx0+n01NTUuro6R0dH1HCP9ROmp6enpaUZGRmNHTv23r17Ojo60Nowe/bslpaWyMhIYWHh+fPnQ5cEg8G4e/fulClTaDRadHT0H3/8wWazQ0ND4evXAIAPHz50dXVNnjz5xYsXKioqiYmJsCo4HE5CQkJWVpaenp6ZmRn8CKyjo+PZs2cGBgZv3rxZuHDhgAED7ty5s3379sjISCaT6eDggIsEW/yKigrYLu3t7dLS0sOHD3/48OGqVasUFRVRo2NlZSUMM2LEiKCgoFWrVomJiWErLSkpCVv53H7Ujx8/pqSkREVFzZo1i8FgCBZjom2H7QY4vyhWEZKXl9fU1EQikSZPnozaFy0tLfvSbdBhODU19dOnTxwOp7KycvXq1bdv37aysrK1tQUAYEsKuCSuqGlWX18fNYZAcnNzo6KiVq1a9fTpUx0dnVmzZvn7+zs5OUElaUtLC6xVYWFhFot15coVOTm5FStWoJvDr6pDQ0OLioo2bNhQXFyM1ZBCtamysnJmZuaWLVuUlZXRHmhnZ4caXKdOndrU1IR1e6JVZGJigla+m5sb2tyysrLPnj0zNzd/9erV3LlzW1pacnJyNmzYICMjg03Czc3N3NxcS0urt7cX1ueKFSvevHmDmmOTk5MpFIqBgQF8L5HNZsNNJCQkXr58ifbPiRMn4gy9uAbiqQaNi4vLz88PCQmZNWvWZ1WrEhISPN+oCgoKOnHihI+Pz927dwEAJBKJxWJdv37dw8Njx44d/A5r3KZNnCkGpaurC83MkiVLsL03MTERVk5xcTF2v0M/70P3AllZWfTAxS9L/1J+2Ws+rGcPVXeiXkfsr1hnI7QeoGJJISGhioqKJ0+erF69+vDhw21tbXPnzj106BDW2xkQEICTbQqwR4L/dT/ysx3Kyclh4w8ICKBSqcOHD0c/BwYYP2FxcXFiYuLixYvd3Nza29sNDQ3v3btXUFAwc+ZMNpu9a9cuZ2dn6KaCG4qJiRUVFRUXF48YMQJ+Xv3nn38aGBjMnTsXfp6Vk5Pz5s0b6IYIDw/38PDYvHkzACA4OJhMJo8bN+7s2bPoACwmJnb16tWOjo758+e7uLioqqo+efKEwWBoamoGBARwR4ItPtouCIK8fv1aXV09Nja2paUFa3REwwgLC585c4bJZOIqDVf5gMuPamJioqKiMnXqVHV19c+KMbFtx88viu1mR44cgecQWPtiH7sNGomYmNiZM2cmTJhQWlp6584dT09POHkWrqQ4UyXWNMvd/w0MDG7fvi0nJwcAKC8vFxISYrFY4uLiUEmK1ioAoKqqatWqVfCzLWwMbW1tZmZmPT09wcHBOA2ppKSkl5eXkZGRurr67t27sT3w06dPqMG1uroa6/bEVpGwsDBa+djmlpWVffXqVV5enpub26JFi0aMGCEuLh4YGIhNgkwmGxsbW1lZKSoqYusTTbe2tjYkJGTGjBnQuQwAEBYWhpsYGRlh+yfO0ItrIH5qUDs7OwkJCUdHR2Fh4S9VrULYbDaLxXJzc3v79i1q+AsMDDx79uycOXP4WYEAL9MmP7A1jO291dXVaOXg9jt0W3QvwB64fjF+2ZEP69kT/CvW2QiPFFixJBQoy8vLDxgwQFdXd8iQISUlJThvJ062yZ0cao8E/+t+5Gc7rKmpwcZvY2Ozd+/ehIQE7MRAqJcvMjISQZCCgoL79+9LS0tPnjxZREREXV1dSEiooKBAQkJCRERk8eLF2Hl2cOeJoaGhhoaGEhISKioqJBIJnXVPTk5u2LBhSkpKsCoaGxuLi4uNjY1HjhyJfiEkKSkpLy8P9ZIKCgrV1dUwTjQJXCTY4vPLD7fREcYDE8VVGnfl8/OjlpeXf5EYk59fFBvGwMBg2rRpJiYmOPtiX7oNtmiqqqoaGhp6enpaWlqSkpJQqoArKc5UiTXNcmdeXFx8zpw5ERERIiIiwcHBZWVlRkZG4uLiOCUpAEBPT09GRsbAwKCyshK7XlFRUVtb29jYuKKiAqchVVdXV1JS0tfXX7lyZVxcHLYHysvLowZXnNsTW0VSUlJo5WObGypzhw8fPmjQICkpKU1NTVh1uE4Oc4itT2trazRdLS2trKys7du3W1tbc9cMtn/iDL24BuKnBv3qHoUSEhLS2Njo5+enp6d39epVuNLc3DwnJ0dLS8ve3p7fhtymTX5gaxjbe7GVw+96EQV74PrF+GVHPpxnDzUHwgXsr310NkKgSZKfwxBFgD0S637kZzvExS8sLBwfH//+/Xt0xh+A8fLp6OhQKBQ7OztLS8v29vbGxsZJkyadPXu2o6NDUlLy3bt3HA4HAIB9rwEaBREEgbc3u7u74fk+LDu/GnB1dYWmJX4z35JIJDU1NRg5P1chTigKKwpnOMQZHdHKhP8KVkQCXn5UaG1QUVHpixgTBuDOPE994po1a3bv3r148WLBGs8+dhtcnnElxRkaBZhmIe7u7leuXJGVldXX1798+TIcBtBy8SsmmjpuDU8NKZ1O19HRwfVA7tig25NfFfETeEJg1XEnwWaz+dVnQ0NDeHi4rq4ubq5KbGfj2T+59zt+1lz4b196FG5D+PwsPz//yJEj69evv3379vXr1+l0elxcXHJysrS09I0bNyorK9va2rij4mna7CNo78VWjgCzKFzPU3v7a/DLjnxYzx4AAKo7Ua8j9lcHBwfU2aikpIQVS6alpWVmZhYWFkJXZGJiYkZGRmNjI4vFwnoFs7KysLJNwfZIrPuRn+0Q50gMCQl59+7dpEmTsOeSqJ9w+vTpoaGhzs7O8GnNrl27Vq5c6ejo6ObmpqamNnHiRCsrq3379ikqKqLev+nTp1+4cOH69eskEqm0tPTQoUNr1qy5cOECnU5PS0tLSEjIy8vr6OjIzc3Ny8urrKysqakpKioKDAzMyMgICAi4efMmi8XC1vbTp08fP37s4eEhKSnp4OCwZcuWmJgYqDzFRYItPqpUHTduXFxc3Pnz52k0WlZWFtboSCaTYZjy8vLa2tqkpCRcpeEqH/zXj4rmrbq6uqys7PXr14qKip8VY8K2e/PmjQC/KDb8/v375eXlFy9ejNV4vnnzpi/dBo0kLS2trq6OTCZDVSx0hObm5uJKijNVurq6oqZZNpuN+kLRaIcNG6asrOzk5OTu7j5o0CAAQFtbG1SSojWfnp6el5fX0NCQl5cH32+CpKenQwFsUlJSTk4Oi8XCakgBAC0tLcHBwbdv3z59+jS2B6qpqWENrli3J7aKAgIC0MrHNndmZmZZWVleXl5ubm5NTU15eXlGRkZpaemECRPQJHR0dEaNGuXn59fc3IzWp5ycHJpuTU3NuXPnxowZY2dnh5YIblJcXIztn7W1tVhDL4lEwjYQPzVoQkJCdXV1VlbWZ1Wr0IhbUVFx7dq1y5cvOzs719fX37p1q6ysDJ50iouLIwiyb9++wYMH79y58+HDh6dOnVq5cqWioiLO9crTtFlcXMwtjIUBsHpbbO/FVg5uv0OPD+gRDKe9/aX4jia0nw2sZw9Vd6JeR5yF7yvskQI2EWCPRLhMj/xsh2j8bDabTqdz2/+wfkIBmeH5EzzLQ+Ok0Wjw2QO/SBAEuXbtWmNjY2lp6du3b2NjY9H1EydOrKmpwRaZwWCw2WzUlYoDW3y0XZhMJofDQfPDs+2wCPCvon5UnnxWjMlPMcqzGrEt+1mNp4B4BIAtKc5U+VnTLE85J7qGXwPxA9WQIghiaGjY09ODjYG7XNxuT35VJFiJyTMJtJm4t2Kz2Ww2m3s9uong/tkXNSgu2m9XrXI4HDqd3tPT09raiib9FbZefmAPJtjKwe13KLCucAeuXwnC4fLv5lv8hF/KtGnTRo8eraysrKmpuXDhQjhrXW9v75gxY27cuAGfiPzjYP2oBN8LnIa0vr7e3Ny8tLQUPhfnx0/u9iT4nSFGPoIvoKGhQVpaGnu8a21tbW5uJpFIw4YN+wczRtCv4DSkUGQsLS2NvgzFE8LtSfDTQox8BAQEBAS/F7/sGy4EBAQEBAQ8IUa+/+endRV+qXYSwm3hwzkbIdXV1a6urjg5U//RR28khFslymKxAgMDN23aVFVVBRd+8PvWPC2O3xe01AQEBP0EMfIBAMC7d+/odPpP6yrsu3YSC2rhQ42FqLMRi46OTnFxMe4DxP6jj95ICLdKNCQkhEqlnjp16sOHD3AB+3V/3+mLL5QnPC2O3xe01AQEBP3Eb+HtbGxsjImJkZeXl5eXHzt2LE7A2NjYuGrVqlOnTi1YsABgXIXS0tKoKnDy5MlozKh7UFZWNiQkZN26dQiCvHr1avXq1TzthV5eXnFxcVBLqKWl1dnZmZSUVFxcbGJiYmVl1drayi1yzM3NLSoqYjAYLi4uxcXFqHYSvlrCT/P48uVLbgsfaixUU1P78OGDgYGBhIQETrqIGhywHki4LVqKOXPmZGVlLV269NGjR2ZmZvLy8jExMbq6ujk5ORs3bpSUlERlgNCU2NbW5uvr6+Likp2d3draunLlyujo6KFDhxYWFvbFG8lTJWpqahoaGqqgoDBo0KCwsDAFBQVzc3NTU1M02xoaGjxdhU1NTdjcZmRkwNShvqu2thbtHurq6i0tLRQKxcnJCWcEBVzaTKxUU4B7E4KtH2w9Y0WRHA4HW2oajcbtayUgIPh2ftlrPqyZU1RU1MvLa9CgQYMHD+YWMKqrq6upqc2dO1dISAjrKsSqAuvq6mBgrHvQxMTk+fPnNBqNRCLJycnxsxcmJiaiWkIAwF9//TVq1Cg6nQ6vL7lFjpWVlefOnVu8eDGLxfL09ES1kwMGDIAB+GkeEV4WPtRYqKKiAp2NOOkitsZwHkgYISzFkCFDfH19JSQk2tra0tPTtbS0/v77b319/ba2thcvXmBlgHBbRUXFtLS0jo4OQ0PD8PBwMTExMpk8cODAPnojeapE1dXVDQ0NzczMJk2aBBdMTU2x2ebnKsTlFps6AADbPW7cuGFra/vgwYPW1lacERRnccRJNfk1CgRbP9gM40SR2FIDAHj6WgkICL6dX3bkw5o5NTQ0VFRURo0apaury0/ACMG6CnmqAnF6xs2bN9+9exeKYPjZC3EWStR+OW7cuPLycm6R49u3b6GUa+bMmaGhodyZ5Kd5FGzhQ52NOOkiGoDbAwn9n7AUuMglJCTk5OR0dHSg8JCnKXHdunX+/v4NDQ2VlZVNTU2ioqKSkpJ99EbyU4lyg802P1chLre4GLDdY8+ePZGRkd3d3Q0NDTgjKM7iiJNq8msUmAS2frAZxokisaUGAPD0tRIQEHw7v+zIh/N2YuEpYITWD+wanjZCnHvQ2dk5JCSEw+GIiYnxsxfitIT79+9PSEhQVlZ2cHDgKR7E2hrhbAxQOiC4vAiCfNbCh1sJpYvov4I9kAIiB3xMiTNnzoyPjy8qKtq5c+fq1asnTZoE+uyN5KcSRasCXRCQbZ4NjU2de6WLi8u0adOUlJS4M4azOH5WH4rNNrZ+sBlWUlLCdgBcqXn6WgkICL6dX3bkw5o54+LiamtrCwoKwP8q7NDAo0ePPn78eGxsLNZVOHnyZKwqEIbEugejo6MlJCSmTZsGL3T42QuxWsLy8nJfX9+cnBx/f/+QkBCcnxMmMWfOHC0traCgoOvXr3t7e6PaSfQwyk/zyM/CB42F6enp0NkI/le6WFVVVV1dnZKSgvVAwmtcGo2GlkJKSkpDQ+PAgQPFxcX5+flFRUUw8pSUlPz8/KqqKm5TopCQkKOjo7W19YIFC5SVlbW0tPrujeSpEq2vr09PT09MTEQXurq6sNnGihOxDV1cXIzNrYSEBEwdrU/YPdhsNplMPnXqFJVKDQoKwhlBcRZHnFSTX6PAJLCyRGyGtbW1sR0AW+qMjAysrxVncSQgIPgWfuUv2Xt7e8XFxel0Om5mSO41AAAmkykqKsodCXdgGC38CwBgsVjYu4s8I0dzIiwsfPPmzYULF1IolNevX2/cuJFnDgEAPT09XzE5CIvFEhYWZrPZuBueDAYDe+ljZGSUmZkpISGBe1kUygx51gOEyWSSSCTuu6lwOggGg4ErCJvNhhfH3NXLZDJFREQEvK1Kp9PFxMQQBBEwXZngbPOsWMGps1gsOKkvgiDcxYRqUxERETRLfWwmXP3gMozNJ7bUUKgoLCwsIiICL1I/WxUEBAR94Vce+X5CyGTy3LlzFyxYICwsbGpqOmPGjB+fhz5KFwkICAh+VYiR70eDIEhNTY2Ghga8ZPzx9FG6SEBAQPCrQox8BAQEBAS/F7/sGy4EBAQEBAQ8IUY+AgICAoLfC2LkIyAgICD4vSBGPgICAgKC3wti5CMgICAg+L0gRj4CAgICgt8LYuQjICAgIPi9IEY+AgICAoLfC2LkIyAgICD4vSBGPgICAgKC3wti5CMgICAg+L0gRj4CAgICgt8LYuQjICAgIPi9IEY+AgICAoLfC2LkIyAgICD4vSBGPgICAgKC3wti5CMgICAg+L0gRj4CAgICgt8LYuQjICAgIPi9EPmnM/CzQCKR/uksEPybQBDkn84CAQHBV0Jc8xEQEBAQ/F6QiFNXAgICAoLfCuKaj4Dg56W3t/f9+/fPnz8vKirKzc0FAHz48KG6uvorompoaIALVVXtz58XvX9f09DQXV7ehgvG4SCJibVkcvc35vyz5OU1ZWeT+xi4ubknOLiwtLQV/osgSGxsVVJSLfbE/ePHZmz4ly+LKyraAAA0Gq29vf0rcshgMFJTU2G1AwCYTGZqampqamp9ff1XxIbCZrNfvHjxLTHwJD8//+PHj+i/DAYjLi4ONnpYWBidTscG5nA4iYmJZPJn6p/BYERFRXV2dn733P7jECMfAcFPyocPH2bMmNHR0TF69OjExMRZs2YBACorK+EBi81mU6nUvsTD4XCuXr0qLS0NAPD0jPD2TjEyUpGTE3dzexEaWoILTKHQNm8Or67u+Pb8d3b2Cvjp4sXUsLDSvsRTXt62d290TEzluHG3YmOrAACenq9VVaXExUV27XoLAEAQ5M2bcjOzazB8RUXbvn0xU6cO8fX9EBlZLikpGR8fX1tb+6X5FxMT6+npGTlyZFBQEABAVFSUTqffuHFDQ0PjS6PC0tvbm5mZ+S0xcNPT03PixIl3796ha5qbm7dt21ZRUQEAyM3NxXUVCoWyefPmz55CNTQ0rFmzpq0Nf3r0C0C84fJl0Ol0CQmJfzoXBJ+BzWYDAISFhf/pjHw9VCp18eLF9+7dmzBhAgBgzZo18MJl0aJFMMC+ffuWL19uYmLy2aju3LljY2MjJyfn6/vh48fm6OiVcH1wsPP9+zm4wKqqUtract+e/4iIsqqq9g0bRvP8VU5OfPhwte5uRl+iqqhou3FjLgBATU06JKRIS0u2qKhl+HA1AMDmzeG1tZ0DB8pNnz5UXl4chr96Nd3aeqCUlKirq9nq1SEzZgydO3furl27zp49+6WlGDVqlL29/Zo1a4yMjIyNjUeNGpWbmysk9E0XDFJSUocPH/6WGHjGaWxsjF2jpaWlo6MDl/fs2YMLr6qqqq2t/dlodXR01NTUvlcmfyqIa77/sGLFCm9v77Fjxx47dmzbtm3R0dHcYdLT0+fMmfPVSTCZTC8vr2nTpj19+vT06dN+fn7cD1kZjD4dC74o5HentLR0xIgRJ06cOHHixJYtW+Aw810Cfzt0On3r1q13796dPn26gLs0LBbr2LFj48ePv3Pnzpo1axISEvo1V19BSkpKV1cXHPYgW7Zs6ezs3Lx588WLFxsbG4ODgyMiIurr69+8eXPr1i1HR8dXr17xjMrPz8/U1BQA8OhR3vz5Ruh6KSnR1avN6XTW3r0x/v65Hh6h8fH/uQiIjCzz8Ah1dg4EAHz61HnoUNzz50Xr14ey2ciePdGuri8OH46bMOFuW9v/30Oj0Vg+PimnTyc6Oj6h0Vg3b2akp9dh72dGRVXcuJFx4kTC7dtZX1QV06YNQTNsbq754UP9kCGKcI2urkJaWh1cRl/Pbm2l1dZ2AAC0tGQLCprhT+Li4lFRUV+ULmTmzJleXl7z5s3r6OgQEhKCw15FRcWePXsePny4atWqT58+cW9169atqVOnHjlyxNzc3NfXNyMjY9iwYbdu3XJycoqPj4fNQafT9+7d6+/v7+HhER8fjw3z2QibmpqsrKw+fvyYm5urq6sLu3paWtqKFStGjBiBve1ZVVU1derUjIwMAMClS5fCw8OPHj0Kf4qMjPTw8HB2dgYAFBcXX7t2bePGjQcPHgQAxMfHX7p06datW5WVlV9RaT8/xDXff9i9e/fw4cPDw8Pt7e1NTU0LCwu7urokJSVFREQAAN3d3TIyMqNGjeJwOBwOh8lkCgsLw5/6jqio6Lhx4ygUCuxqa9eupVAoO3bsQBCko6NDQUGBTCZfuHDhyJEjIiIi3KnDSDo7O+Xk5LAhv3dNfB59fX11dfXZs2ePHDly/Pjxubm5I0aMOHXqlJKS0oYNGxoaGp48eTJkyBAymezu7s4d2NjY+K+//jI2NlZXV29sbFy5cuX+/fvh3jt79mwymVxWVpaZmTlx4sT6+vodO3Z8Ud4SEhIYDIa7u/uECRMoFIqcnFxKSkpubq68vHx8fPyFCxfQfNrZ2X348GH16tUDBw48c+aMjY3Nhw8fLl26tGLFivz8/IULF8bGxpaWlmpra6urq9vY2Hh7e4uLi8vKytbV1Z05c6af6haloqJCUVERu0ZMTExMTGzAgAFMJlNdXX3QoEGzZs1SUlI6efLkmjVr9PT0/v7777lz5+LiaWhoEBUV/W+cbYqKEv8bp/DZs0mqqlIrVphaWGhOnXq/rm47AMDGZtDkybojRvjW13f19DBtbAbZ2up4ekYICQF9faXeXtaBA3Z1dV3v3lU5ORnCqC5cSGGxOLq6ivn5TW/elFtYDFBQkDAz+/8bgzQaa/Zs/Y6O3s2bw9esMf/SCmGxODU1HX/8Me7ixVQpqf+USEpKlELpwYVcsMB4797oZctM4+Orhw79Tx0OHTo0PT196tSpX5ouAMDLyyszM3P58uWPHj2Cazw9PY8ePWpmZtbR0bFr167Hjx/jNrG0tLx169b+/ftXrFhhYmLS3t7e1tY2f/58d3d3AAAcLC9fvqyqqrpixQoLC4upU6fW1dVhwwiO0N3dXUFBgcVijRw5Evz3Gxs9Pb39+/efO3du//79wcHBcMPBgwfD0bqpqenevXuxsbGDBw+GP9nY2EyePHnEiBH19fV79uxxcnIyMzPbsmXLpk2bNm3alJOTIyQk5OPj8xU19vNDXPP9h+HDh6PLJBLp1atXJSUlbm5uAICLFy9WVlbevHkT/lpeXn727Nmurq6vSAX71aCHh8eDBw9aW1v37dsXGBj4/PnzsrKyjIyM2traU6dOcafe1tbm7+/v6+t7+PBhNOQ3lfkbIJFIiYmJV69e1dTUNDU1FRERkZeXHzhwIABg/fr1bm5ujo6OUlJSsNJwgcXFxUkk0ujRo+3t7SdNmiQqKioiIjJ69Ojx48dfv35dT09v/fr1ra2tDg4OPE9+BWNlZZWRkbF27dqBAwfq6up2dHR4enquW7fO2dnZ2dkZm08SidTY2BgSEuLv77927VoAgKGhYWdn59SpU5csWaKoqKipqSkvL+/m5nby5ElRUdEdO3YoKSnp6+tv3Ljxe1cnD3R1daurq1ksFm497hZuWVnZ0KFDly1bdu7cOXhej6OtrU1MTOy/cSqUlbXiAmRlkSUkRAAARkYqvb1sCoUGAJCWFgMAKChItLXRhwxRrK3teP++GgDA4QBhYSFxcREAgJycOPZJ3ocP9W5u5suWjSgv93R0NODOibX1wIiIspISCovF+bK6AAAA4OeX7eVlIyIipKOj0NX1n3S7uhhDhyrhQs6Zo3/njmNSUi2F0jNhwn/u+ElKSjY1NX1FupC7d+/W1NQcOnQI/puVlQWfetjY2OTl5XGHFxYWlpWVBQAMHjxYSUmps7NTRERESUkJ/RUbiZGRUW9vL4VCwYb5bITcYeC2U6dOxR0ZYHJqamouLi7Gxsbo4z346FdBQaGtra2+vn7FihXr1q3r7e2FfQaOl5KSkl9aV/8KiJGPN+7u7kwms7q6mkajVVVVPX/+HF6oNTY2BgUF7d27F3c+/hVISkqSSCQFBYVVq1YBAFJSUgYOHKiurq6rq8sz9QcPHlCpVBMTEyUlJTTkdyjq12JgYDB69OjGxsaioiIAAIlEguN6dXW1goICAGDkyJHoXUTuwPHx8d7e3nAXhacaDx482LRpE/pwAgAwZMiQL82VtLR0YmKiurr6mDFjOjs7s7Oz4UkxAMDW1habTwCAgoKCmZmZoaEhvE9IIpHIZPLLly+9vb2lpaVJJFJ+fv727dv37dsnJyeHtviPqfaJEyfq6ureuHEDXVNRUcFkMhEEgSf4JBKJyWSqqqpGRkbCdxDS09O5bybr6emhZ2nr14++ezebSmXCfxEEKSmhmJioJiTUAADodLaysqSKihSaBFy4eTOzqYk6ebIum83B3p/H3atXU5N+9CgPAECns7KzySQSYDL/JzMbN4aNHKlubKyK25DN/s+/vb1834h5/bps6tQhAwbI1tV1jR49oLKyHa6vq+u0sNDkDj9qlKa9/bDo6MrDhyfBNW1tbfr6+uC/z4D7CJvNhuGlpaVfvHjh5+cH15uYmMC+3dLSYmFhISAGOp0uJyenrKyMXQlrAI2ETqcrKyurqKj0JUtohMLCwrDGenp6sFUKb4TikkMQ5NOnT+vWrYuIiNi8eTPyX9Bfu7q60tPTAQBVVVUsFis/Px92GwRBOJyvOVP5ySFGvv8B7UBHjhzR0dFRUlLicDh79+5VVlaGtyDU1NSys7Ozsr7sQQVPAgMDlyxZUlpa+uDBg9GjR8PuBTPAM3VxcXF5eXl7e/sFCxaAn8AhoqKiYmlpOX78+GfPnmHXy8rKwoNFTU0NOnRxB7a1td22bdugQYPgv1OmTKmsrIQnod/CixcvREREjh49amVl9ebNm4EDB6Jv83MjLi6uo6Ozbdu2u3fvMplMAICGhoaDg8POnTvhOfXw4cM5HM4/cktZRETk9evXERERO3fuvHfv3rVr12pqarq6upKTk5OTkzs7O62srI4dO0ahUJYuXWpubu7u7k6n07lf6hETExsyZAh8qX3FCtNdu6ydnJ5cvZp+507W9esZmpqynp5j6XTWs2cFN29m3Lgxt6Ghu6SEEh1dWVnZXlHRlpRUq6gocf9+jrd3irKy1I0bGSkpn/Lzm+rruzIyGtLS6tB+uG2b1bVrH2bOfHDkSLypqfqYMVqPH+e/fl2G5kRFRWr//tiYmMqKirY3b8qTk2s/fKhvb6evXfvyyZP87u5uRUXF5ORk7qoICip0cwuZNeuhkdEVV9cXOjryc+cOe/Qoz88ve82aUQoKEgCAqKgKCqUnNLSEw0FYLE5kZPnBg7GXL89G7+5+/PgRPqQ3MDCoq6vrSxP09vZevnw5JCSkqqoKAKCrq/vkyRNYwz4+PuHh4W/evImPjz9+/DjPzcvKyp4+fXrhwoWrV69mZma2trbCjxmio6MpFEpCQoKnpyedTn/27NnNmzdv3LiBDfPZCAEACxYs2Llz57lz59TV1RMTE4cNG5aQkBAREZGenv73339XV1cXFRXFxsaWlJSUlJS8efOmo6Nj3759NBrN3d2dTCaXlJRER0dXVlZWVFQkJSUdO3bMwcHBxcUlJibGxMRk165d9vb2R48epVKpP+FT8O8AQvBfysrKdHR0Tp06xWazp0+ffvr06SlTpgQHBx84cCAjI+P8+fNZWVkmJiYfP34cNmxYcnLyl8bPYDC2bdtmbW395MkTHx+f06dPs9nsmJiYyZMnX7hwwc7OrrGx0dLSMjExkWfqZDLZ1NR0586dkZGRdDodhuyPevgspaWlurq6f//99/Xr1+fNm1dZWclkMjdu3Hj06FEEQeLj448dO/b27dtdu3a1tbVxB6bT6fPmzbtw4UJgYODGjRsZDMaiRYvu3r1bUVEBx7+2trYxY8YkJSV9Rd7Onz+/b9++0NBQV1fXpqYmBEFOnjx57Nixd+/excbGovlkMpmHDh0aPnz4w4cPt2zZcvr0aQRBUlNTR48e/fr163PnzgUHB/v4+KxZs6a7u3vGjBlxcXEsFsvDw+P48ePftzI/C5VKbW5u5vkTjUaDC93d3QJiqK2tha9TodTXdzGZbOyazs5eDofDL4aeHiaCIHQ6C1728YPD4VCpDEz2mDzjwYaBwWDSCQkJJSUlAuLH0t5Ob2uj8fyppaWntfV/fqqvr/f29v5vHnr6mARP6HQ6utzR0cEvWH5+/tSpU7u6uthsNr8wkM7OTgE1LzhCmBn0TgCbze7s7OQXA3xHQUCA3t7e3t5e9N/u7m42m81gMPiF/1dDOFx4w+Fw0DfjSSQSjUaTkpLqp7R6e3vFxcXhXwaDISYmxi91DofDYrHgYxsYsp+y9O1QqdRvv4D7CmA1dnR0yMvLoytZLBabzRYXF//x+flJKCwsBAAYGRl9NuQ/SElJybBhw757tDQaLTQ0dOHChT/SzZuWlrZly5bU1NSfNsLfHGLkIyAgIPjOhIWFtbS0jBw50szM7OeM8DeHGPkICAgICH4viDdcCAgICAh+L4iRj4CAgIDg94JwuPwHYmZagi/ixzwmSEtLo9PpAwYMUFdXhx8y02i0jIyMtra20aNHa2pqAgC433oYOXIk/ES6qqoKFXZ8I52dnbGxsY6Ojt8YT1hY2JQpU/oiv83KysrPz583bx4seF8oLi7u7e2FbjB+sFgs9JN/LS0tLS2tb9/3KyoqmpubsWtMTU1jYmK4S8pgMJKTk4cNGwbbDgDQ3t6ekpIyc+ZMXJz81hN8F4hrvv/wz75iS/Cv4wf0yRUrVpSWlioqKrq7u0MDQGZmpouLi7S09Lhx406ePHnmzBkGg+Hl5dXa2nrno5/OawABAABJREFUzp39+/d3dnZeuXKltLQUANDQ0DB79uzv9RlyZWXlkiVL4PK3TFvDPW8A4DXvxJs3bwoKCpSVlfs4JVNnZyeDwTh16lRERITgkCIiIlVVVba2thwOJyIiYty4cbC6vgU1NbUNGzbcu3cPfvoZFBRUUlLCs6TYKRQg2dnZGzZs4I6T3/q+z9GB8ktOM/St/NMHEAICAt5oamoWFBQgCNLQ0BAREdHb26uvrx8XFwd/ZTAY+vr6ERERWVlZCIKcPHly+fLlCII0NzdXVFQgCOLt7W1paRkSEvK98qOiooIgSE1NzYYNG75XnBAvL6/8/Hzsms2bN79+/bqPm4eHh1+9ehVBkLNnz548efKz4fPz82VkZODyunXrtm/f/oX55cHChQvPnj0Llzs7O1tbW/mFnDdvXkJCAvpvd3f34MGDuYPxW89dV4Lp7e2dP39+38P/JhB3OwkIflLmz58/bdq0u3fvTps2bebMmSkpKdXV1TY2NvBXUVFRGxub58+fX79+HbuVioqKiooKk8lkMBg7duzw9vZ2cHDABrh7925gYOCwYcNUVVVHjx5dU1Pz6tUrd3f3uXPnXrp0aejQoZmZmevWrXN0dLx16xabzXZ0dMzJyZGT+8/URYGBgZmZmW/evBkxYkRYWBj0aLu4uKDx37t3r7u7++XLl1evXo2JiTl37tzr16937Nhx+vRpYWFhd3f3U6dOWVhYoGmtXbs2ODhYWVlZUVFxwIABAIC8vLy8vDwpKSlFRUVTU9MjR44YGhomJiYuXbpUWlp6yZIlf/75Z1hY2PPnzwEAbDb75s2bCgoKUNn18ePHAwcOxMTEBAcHq6mpYTMzdOhQ7kr+8OHDypUro6KiKioqKBSKmpra7NmzV61aNWbMmNra2rKyspcvX0pLS1+/fp3BYCQmJvr7+x8+fLi7u7u6unrv3r1oc4D/qs5qamoiIiJmz569aNEiWFJ+ebhx44aMjAz2+o/nemze7O3t0bp6/PixiYlJcHDwhg0bzM3N0frct2/fmzdv0GYdMGBAdHT006dPFyxY8I/YiH5OiLudPwXop+tYqSB2GmXo1urvKX76D3iehf7LXRBcABgGlrpfweaEWw/NvRL5gV8B+fj4bNiwwcHBwcPDg8Vi1dbWqqioYCeHU1dX5yfiCgoKmjdv3vz580tKSrKzs7E/WVpa1tbWent7b9++/eTJk5KSknCeB+jynzBhwvz589XU1OBUAPCZGbbUlpaWAwcOnD59+vv379PT05cvX459rpaSkvL27VsFBQUREZErV66sXbvW1NT04MGD3t7eQ4cOxc0bANNC552Awx4AYMSIEbq6ura2tpaWluiEBp6eni4uLqNGjYITGsBhDwAgLCxsYWFhYWEBP3QbPHjw4cOHzczMoqKicJnBVgKDwThz5szKlSvnz5/v6elJo9Fmz57t4ODw4MEDTU1NGRmZCRMm3L9/f9iwYdevX79w4UJ3d7eWllZ+fn5sbKyurq6YmFhERAR22AMAJCYmXr58+cqVK11dXWhJ+eXh48ePERERS5cu9fDwwEbCvR6bN2xdycrKTps2beTIkS9fvsTWJ51OxzarhYWFpKQk1LXz7Wq/H0Rd/DhYLNapU6fCw8PXrFmTmJjo5uZmY2PT09Pz119/DR06VE1N7cmTJ1u3brWxsVmyZAmdTl+1atXChQvfvn0bGho6duxYMpmcnJx89OjR2bNne3p6Kisr5+Xlbdy4UUtLC94AcXBw+PDhw+7duw0MeJjy/0Hi4uKoVGpNTc2YMWMGDx787NkzXV3d+vp6OBkFLoCFhUVAQIC/v7+kpCQ6KUx/UFBQEBkZCefXXrJkycGDBydOnPj+/fsDBw7AAB0dHadPnx4/fnxjY6Obm1tLS8v69esPHz7cl8lgvx0owdm7d+/8+fOnTp06fPhwGxsbMpkMJTUwTHFxMbQwcxMVFQUHxREjRvj4+KCqZQCAiIgIFCij8zwsW7YM/gRd/jdu3MBNc8qTuXPnvnjxYsyYMQEBAejKDx8+zJo1CxvnX3/9NXfuXHSyG9y8AZ9NKysrC05S2JcJDcB/5x9QVFRsa2vjzgyKmJjYrl270H+tra2Dg4NVVFTgiQ46McKECROys7MbGhouXLigpaUF47l+/TrOQA2xtbXdunUrAAA+lIUl5ZeHhIQEaLXFTYbAvR6XNxQrK6vHjx+3tLSwWCxsfXI4HFyzEnBDXPP9OEREROzs7NTU1FavXu3i4gLneNuzZ4+pqemWLVucnZ2vXLnCYrECAgKmTJny8OHDhQsX1tTUQCnt0qVLt2/fbm9vb2BgICsru3DhwuXLl7u5uc2YMaOnp2fcuHG6urobN240NzfH3fv6GXj06JG+vr6NjU1SUlJkZKSIiMj06dOxbyRiAyAIEhkZefjw4YCAgH49Sz116pSjo+OSJUsuXrz47t07MTGxSZMmcTic+Ph4GMDHx2f27NmzZ88ODg6mUqkqKipwmPwxlJWVwcsaIyOjhQsX1tXVmZmZWVtbBwYGwgBdXV0ZGRnr16+H/7LZbPRllrS0tOnTp+/YsWPHjh3Xrl17+vQpmUzmTgI3z0N1dTXW5c9vKgA4RwQAoKKi4tGjR3/++ecff/yB/gpP4GB46LZ9/fr1X3/9hYaBF/e4eQPQOFHQewB9mdAA3RzdCi7gMoOGhwZLbAwbN24cOXKksbEx7pq+trZ27Nixampq8CSMTqfjLqBRoE4TLhsaGn42DyoqKomJiYBrMgTu9bi8wcKy2Ww48ZaGhgauPlVUVHDTd6A3Nv69N42+O8TI90PhnhMuKChoxowZ8NcBAwZMnTpVV1e3pKRk5MiRVVVVb968sbS0RAcAOJ8R+hK2oaGhvr5+bGwsiUTq6enJz8+Pior69vfOvzvwPlVAQMD69evnzp0LJ6GGUz9zB6DRaGPGjFmzZs2ePXv6NVfooxQREZGEhAQ4O5KOjs6HDx9ggPT0dLhSSUmpuLgYAIC909jfDB06dO/evd7e3v7+/vn5+Zs3bwYABAYGxsTE3L59+9WrV1u3br1x4wY8yNbW1sbGxn748CErK4tMJm/duhWdtIHFYsnIyHh6enZ3d8M1b9++LS0t/fjxo7q6Onaeh+7ubtTlTyKRcFMBJCUltbW1vXv3ztjYOC8vz9fXNyEh4ebNm6qqqosWLUKzPW/evO7ubgsLi82bNw8cOHDv3r0jRoxYsWJFZGTkxYsXy8rKuOcNIJFIcN6JgoICGElVVVV2dnZkZCScXvGzExqMGTPm8ePHr169Sk1NTUlJIZPJaWlpaWlpDg4OaGbQ2a9YLNbTp097enqwdxRUVFT2798fExNTUVEBB56nT5++fPmSRqMtWbJk27Zt165dmzlz5pEjR/T19RMTE5OTk7EnEzk5OZmZmW/fvkUnxkNLiq0QHR0ddAoFe3t7GRmZlStX+vj4sFis3NxcuKGDgwNuPS5vsK6KiopERES2bNny6dOn169fl5SUoPWpoaGBm77DyMho79697e3tfZ+n4tenf16cIeBNQkLCrFmzqqqqjh07tm7dOgRBFBQUqqqquEMGBQXt27fPx8fH1dUV95OZmdmnT5/g8qJFi0JCQgIDA+3t7adPn37v3r3+LsJXkJ6efuXKlVGjRkVFRbW0tJw5c2bBggVHjhzhGQCuYbFYFhYWWC/+d6e4uHjTpk13794dOXLkqVOnHj58iCCIn5/f+fPnYYBZs2bV1dUhCOLq6pqZmYkgyObNm7/otbpvhEqlMpnM2tpanMufSqXCaSi+C+g8D9wuf9xUAChwjggo8qdSqdxxdnV1CU6UOy103gmefHZCAwGbfzYzEHhpC4uzcOHCuLg4bPY4HA7PkvYRfnno6OjgcDjcNYxbj80bgql/2EBUKpW7PrHTd7BYLDjlwjfOU/ErQVzz/Whwc8LNmDHj7du36K80Gg0umJiYKCoqTp06NTExkcFg4H6FNDc3l5aW2tnZIQiira396NGjU6dOoSePPw/e3t7wVsz9+/cDAgJGjx4dEBCAvfODDQDXCAsLjx49WlRUtP9yNWzYsMuXL/f29q5Zs2bUqFH19fUAgLq6ulGjRsEA6Ep4stx/OeGHlJSUiIiItrY27lNrKSkpVVXV75UKOqUGnLMX+9k4fKDIfc8Zfp0tJCQkKirKcw4TGRkZwYlypyX423ZZWVnB35sL2PyzmYHA52qwOFQqlUqlYrNHIpG+ZbYWfnmQk5MjkUjcNYxbj80bwNQ/bCApKSnu+sTOlCIsLAx3pV91gvWvgHjD5cfBYrGio6PLysoePXqUkpJy7NgxUVHRCxcu/PHHH1QqVVdXl06nT5s27dChQxYWFhQKZf369fAdh3Xr1jk4OLDZbE1NTVVV1dra2oCAADhH7v3796WlpePj4/Py8jgczr179xYvXvzkyZOfSui+aNEif39/LS2thQsXjhgx4ubNm9LS0rNnzwYArFu37sKFC9gAKSkpYWFhY8aMmT9/fr/eXaTT6eHh4QwGY8uWLQCAN2/eZGdnk0gkOzs7X1/fkSNHbt682dfXl81m29vbS0lJUSiUjx8/fvjwwdjYmDD+/MJ8+vTJxcUFDn7/yExbBD8AYq6GnwIqlSosLAxP5RgMBo1Gw04vBwCgUCg8Xyf7t8BgMEgkEjzxRBCkp6cHHlNYLBY8scUGoFAoCgoK3NOLf18oFIqsrCx2jsOuri541sxms2HqbDa7t7e3/6ZmJCAg+EcgRj4CAgICgt8L4jkfAQEBAcHvBTHyERAQEBD8XhAj36/A9/Lx/+OgwjaeIrF/BGzdEh8CExD8GhAj3394+fKlvr5+Y2MjACA5OXnhwoUlJSWTJ0/29PR88uTJmTNnHj16FBgYOHTo0Nu3b2/cuDE0NPQrUqmpqRk3btyJEyc+fvxobGz87t07uNLe3r6wsNDT03PWrFnoRF9FRUUDBw48c+bMjh07bt68WVpaOmLEiBMnTpw4cWLLli3oUbiyshL7VfhPyKZNm1xcXFxcXNzc3K5fv75w4cIlS5aMGzcOfcbMYrEWLVo0d+7c0NDQnp6ev//+OyYm5vDhw/2aK2xO0tPT7e3tlyxZYmlpCdVTAICCgoKNGzeOHz8eTnzj7++fnJz8999/9/T09GvGCAgI+p1/8mPCnwkWizV9+nQbGxv4yefly5cRBFmzZs2LFy8QBOnp6amvr29ubjY0NEQQJDc319zc/OsSWrBgwfv37xEE2bVr15AhQ6qrq9Hk/P39t27dig1sYGDQ3t5OpVIVFBR6enqmTJmSnZ2NIIi1tTX8thpBkOvXrw8fPvyn/UaVw+HA7+4zMjIePHhQW1uLIAiTydy9ezca5uHDh76+vh0dHQiC3Llz5/bt2wiCrF27Nicnp/8yhs0JXEYQZNu2bWgAOJVMdXX1nDlzEASZPn06g8G4dOlSbm5u/+WKgIDgB0Bc8/0/S5cuHTNmzLZt28B/v94lkUjp6emvXr3at2+fpqYmiURisViVlZX37993dnb+ulTgN6cAAAMDg/Pnzy9YsIBOp6PJ4T4UI5FIHA4nLi5OT09PUlKSRCIlJiZevXpVU1MTCvJpNJqEhMT8+fPRb8B/NkgkkpaWFgDg5cuX9vb22traAICYmJiJEyeiYbDCNlQqpq6uXllZ2X8Zw+YELnd1daFz8QAAxo8fDwBgMBgWFhYAgFWrVk2dOlVMTGzEiBH9lysCAoIfADHy/Q9nzpwpKiq6e/cuusbAwGDs2LHorFq9vb3Xrl2jUCi7d+/+9uQcHR3t7e3Xr1+P8P+25PHjx93d3bGxsWh+Ro8e3djYCG/KPX78WEhISEtLy8fHR0AkPwPt7e3oR4pRUVFTpkxBf7Kysjp//vy5c+du375tb2/f09Nz9erV169fm5ub93eusDl59erVnDlzcAHCw8P/+usvAACJRHJ2dr548WJDQ0N/54qAgKBfIUa+/wCN5sLCwk+fPj1z5gw0nQMA5OTk1NTUNm7cCO8uSktLnzx5kkKh3Llz56vTggMAtMsfOHCgo6Pj9evXuDAlJSXQ2rds2bJFixah9iMVFRVLS8vx48c/e/YMANDe3j537lxnZ2dDQ0PuSH4ecnJy0FncYFVzm8mgsE1SUvL8+fMTJ07U1dUdNGhQv+YKl5PU1NQxY8ZgA0RFRS1ZskRMTKy7u/v58+cbN248dOhQWFhYv+aKgICgvyHsZf8hMDDwzZs3Li4uysrKT548efPmTVNTU15enri4eG9vb1VVVUNDw7hx4+rr64uLi+/fv29tbS0tLf1F9zxpNNqaNWsGDBgwfPjwvLy8R48ezZkzZ+DAgf7+/l5eXgwG4/379wUFBXDOrXfv3h09erS+vj4yMnLx4sUAgLKysvLy8hcvXqSmphYXF3t7e/v6+lKpVDiOmpiYHDhwwNLS8udUvTx//hxKwgAA7969s7W1hcvQXoYVtiEIkpiYGBUVhZ1Srp/A5qS7uxtVVUF7WXl5ua+vr7q6OpPJfPbs2ciRI2NjY+vr652cnPo7YwQEBP0K4XDhDWrV+r7wMwH2U3I/D9iC0+l0MTEx6OSEBccK23p6ephMJk7e1k9gc8Jms1ksFlQAo/YyAaUgICD490KMfAQEBAQEvxe/8nUGAcG/HQqFAheEhIQUFRU/G769vR390FNRURE72QWbzS4rK+uP6ZZKSkqGDh3a34ZxAoLvCPGGCwHBT0pFRYXKf3F3d+/LJikpKVpaWioqKjdv3jx+/Li1tXVgYCD86fr164aGhpmZmd83kwUFBQYGBhcuXPi+0RIQ9CvE3c6fBSaTKSoqyu8JEz8YDIaoqCiHw/nJz7hhN0O/VuQuJofDwc3GB5UC8MFb/4HNCb/Kp9PpgudN7Sdu3brV3d0NK83Ozq6Pcy4aGhoWFxeTyWR1dfXr1697eHhcvHhxy5YtVVVV/v7+O3fu/L7Tk/b29p45c2bJkiXolz8EBD8/xN3OHweDwThy5EhWVlZISIiwsLCPj09ra+vBgwdjYmJCQ0PHjh1LJpOTk5OPHz8+f/78pUuXAgDq6+t9fHwqKioWLVq0bNmyHTt2YIeHjo6OSZMmvX//Pjk5eciQIUOGDPnnCieIuLg4KpVaU1MzZswYSUnJyMhIDQ0NAMCSJUtggPfv36elpb1+/TowMFBeXr6rq+vixYt2dnbW1tb9lysKhfLs2TNdXd36+no3N7fLly8PHz48JSXFy8sLDbN///7s7OxRo0bt2bPHyclJQUGhra1t4cKFfbwC+0bev3+voaFhYWGxaNGivs+Fiw25YsUKT09PLy8vd3f3gIAABoPR3NysqqoaEBDQ2tra3t6+e/duKSmpd+/e5efn9/b2Ll++PCoqqqioaPbs2b29vdHR0bq6uqtXr87Ozk5MTOzu7tbX158/f/6rV6+qq6u7urpmzJhRVFTU29v76dOnoUOHtre33717V0RERF9ff+bMmdnZ2UFBQdra2gMHDnz9+rWLi0u/NigBwRfwT8ljfk+ioqKmT58OxV0FBQUxMTHV1dWmpqbw0z0EQfz8/BAE4baUzZgxIz09HRdbd3e3vb19d3c3giC9vb3wL51Oh7/ClyThcmdnJ7rc1dUFF9hsNty2v1m3bl1JSUleXt7FixdXrlxZXl6OIMi4ceM4HA4M0NDQgCDI4cOHMzMzWSzWggULoPCsX3n48OHNmzc5HM769evr6uqWLVsGs4rmqqqqatOmTVAvRyaT4TXo+fPna2pq+jtvCILQ6XT0PdLx48c3NTX1cUNDQ0MAAJlMhv/q6OgAAHJzc8+ePQsASEhI2Lt379KlSxEE2b59e3t7e0BAgJycXG9v76RJk1atWhUXFwcAgGIEBQWFiRMntre3S0lJlZSU1NbWHjhw4O3btwMGDEAQJCQk5MWLFx8+fAAAHD16lE6nGxsbHz58mEajSUpK3r9/H0EQbW3tQYMGvXjxwtbWVl1dncVi9UtlERB8IcRzvh/NpUuX4uPjnz17JiQkJCQk9ObNG0tLS/SThlWrVgEAuC1lOLFZT0/Prl27cnNzS0pKOBzOrl27nj17Vl9f7+fnd/HixZ6enufPn6emps6ePZtMJp86daqkpMTNzQ0AcPHixcrKyps3b6ampkZHR2/cuPHt27f9XWRPT08XF5eAgID169ejcjIREZHOzk4YQENDIzExMT8/38DAICsrq6Ghwd/f393dvV9nbJg7d66/v7+Hh8fBgwcHDBggKyvr5OS0cuVKtJ4RBNHW1p4wYcLLly/V1dXhB+81NTUDBw7sv1yhiImJFRUVRUdHL1q0KDEx8cqVK98Sm4yMDHrDlkQiPXnyxN3dfePGjXJycidPnjQyMhITEztz5szu3buxt5fhJkJCQgiCzJw5MzEx0cvLi0Qi1dfXT5o0SVNTc8aMGWi04eHhBQUFw4YNk5CQGDhw4Pnz52EMampqjo6OY8aMaWxsJJPJ31IKAoLvBTHy/WjExcWDgoL27t2bm5sLAKBSqTyP7zhLGaSmpiYsLCwhIeHp06ejRo2ysrLS19cXEhKCL+yxWKz79+/b2dlJSEj4+fn19vbOmzevtbXV3d2dyWRWV1fTaLSqqqrnz587Ozv7+vpyOBxoYe7vIsNP+MPCwt6/f79169YXL174+fl1dXVhP9obPnz4kCFDbt26lZ2d7eLi4uXlJSUlhTrb+gMGgzF37lwKhQIF2UOGDDEzMzt06BAaYPDgwV5eXmlpaT4+PnDNp0+foIP0B0AikbS1tSdPnhwQELBx40ZUKvRFMBiMhoaGAQMGDB48GF25ffv2BQsW3LlzZ+zYsVQqta6ujkqlAgAsLCyMjIy4I5GVlb13715vb6+Li8uFCxcmT568b9++tLQ0KyurjIwMNBicYwSeH4iJiTU1NeGK8xX5JyDoJ4iR74cCdVmampr379/fvHkzAGDq1KmJiYno8EOj0eACzlIGERUVlZGRkZKSAgDACZVIJBKCIPCwIiMj8+DBg+3bt+fk5HR3d0+bNm3jxo0KCgpHjhzR0dFRUlLicDh79+5VVlaGY+GIESOWLVv2A9yY3t7e69ati4iIuH///rBhwy5fvtzb27tmzRpsGHl5+Y0bNzY2Ng4ePLi8vBwAMGzYsPr6+v7LVUBAwOjRowMCApKTk4uKiqhU6sGDBw0MDMrKyrDB1NXVhw0bBpeDgoJ+mMAlOzvbw8MDXh/r6+s7ODj0cUMEQdC/jx49YrFY165dg/0EBrhz505AQEBCQgKVSv3w4cOYMWM+fvyYl5cHACCTyfAWa29vLwCAzWZzOJyPHz/KyMiUlpY6Ozu/fv364cOHmzdvLi4uNjAwePv2LRqttbW1kJBQd3c3AKCjo2PChAloNggIfjaIN1x+HL29va9evRo8ePCOHTvGjRt3/PhxAICJicnevXvXrVvn4OAAB0UNDQ2cpaysrKy0tPTNmze7d+/W1NQEAOjo6MyaNYvJZLa2tmZmZmZlZUlKSqqqqra3ty9btkxZWXnSpEmzZs2aNGnSpk2bCgsLHzx40N3d/ebNm+zsbEdHRwaDMWLEiLlz50Ln54ABA/q14IsWLfL399fS0lq4cCGdTg8PD2cwGFBmBu1l165d09bW7u7u/uOPP1RUVIKCgpKTk8vKyo4ePdp/uZo1a9bNmzelpaVnz55tYGDQ0dGRnJysqak5dOhQaC8rLCxkMpmysrKenp5wk7Kysh/2BiOTyQwODr5///6KFSumTp2KFXwL4PHjx/C84dixY+rq6h8+fEhKSho7diyLxXrz5g0AICYmJjc3d+/evaampiNHjrS0tDx//nxxcfHEiRPnzJnj7u5ua2s7ZsyYy5cvUygUFovV3t5eUlKCmmxXr17d2Ni4Y8eO+fPnKykpLViwAFpM4+Pjvby8zp49GxwcLCoqqqamdu7cuezs7IaGhvb29vz8/MTERABAbGzs8uXL+7HWCAj6BvFVwz8JVlpGoVC+SLnJ4XDgJw3oi/gIgsAbp/COE/ouPofDgV83CwsLk0gkGo0Grxp7e3vFxMR+zG0oBoNBIpFERUUpFIqsrKyYmBhcj9rLenp6FBQU0PBfWhtfB4IgPT096Isk3d3d0AwOP29AEIRCoaioqKDhf7y9rD9S7OzslJGRqa2t1dbWhp0HQZBPnz4NGDAA7Uv19fUDBgyAqdPpdHgxhyCIsrJyZ2ennJxcXV2doqIi7EhYaDRaa2vrD7snTEDwdRAjHwEBAQHB7wXxnI+AgICA4PeCeM73/cG+Fo+9lwj/RVfi/u3LJt8Y5luiFbAJvzBfscm/qIDEzRICgn8vxDXfPwzqFyb4R+hL/Xd1df2AnBAQEPwwiJHvPwQGBg4dOvT27dsbN24MDQ0FAPj7+w8dOvTx48cPHz5ctWpVU1PT5MmT//zzz89+AIdqArDL8F8qlfrHH39cvHjxyZMnTk5Ob9++nTNnTlFRkYBNsCsLCwu1tbXPnDmzffv2Gzdu8AyDIAh8JZ07Hn7R9iWMgE34hYELdDp95syZLi4uM2bMuHnzJgDg48ePp0+fLiwsxG4SEhJy69YtaB65fPlybGzsiRMnvjS3X1RACoXi6+sbFRV1584dGo12/fr1tWvXent7Y8MUFBScOnWqrq4O3cTe3h6NnICA4N8KQoAgCII0NzcbGhoiCJKbm2tubo4gSG1trZmZGfy1rKwMQZA1a9Y8f/78W1Lx9PSE300jCFJXV/f27VsPD4/CwkLUKIYgSEdHB1yg0WhMJrOnpwdBEFQzZmBg0N7eTqVSFRQUenp6sLqylpYWBEEYDMb69euhzOxnAOf9gnOa4yxW0dHRf/75J1zmKRLrD7D2sqamJhaL1dvbu3jxYjQAd1YDAgLs7e37L0sEBAQ/BuKa7z+QSCQWi1VZWXn//n1nZ2e4pqOjIyws7OHDh6mpqYBLIfYVBAUFzZgxAy4PGDBg6tSpAICwsLCtW7cGBwe3tbX5+/v7+voePny4pKTE2tr6wYMH06dPf/r06e7du0NCQmAeOBxOXFycnp5eRkbG5MmTDx48WFtbu2PHjpKSkiVLltTV1SUlJX38+PFba+Q7gfN+PXz4UElJaefOndBuBTl//ryKisqyZcuioqJ4isT6A6y9TFVVlUql7tmzZ/HixWgAXFYjIyNhexEQEPzbIUa+/6e3t/fatWsUCmX37t1wjbS09Lhx4ywsLOBnXt8OT1eZvb29p6dnfHz8gwcPqFSqiYmJkpLSsGHD5OXlXV1dR4wYMXz48HXr1sXHx8Pwjx8/7u7ujo2NHT58uJKS0smTJyMiIoyMjKysrPT09KKjo+Xl5X+AmeWLQL1f2dnZW7du9fb2fvr0Kfr8rLa2dteuXRcvXjx06BDCSyTWH2DtZQAASUnJlStXnjhxAm0gbFZzc3OZTCb8hhLVjRIQEPxLIUa+/4AgiLS09MmTJykUyp07d+BKERERZWVlQ0PDiRMn4qRW+fn5X5HKjBkzsIZo1FUmIiLCZrPFxcXl5eXt7e0XLFiAhoEfFwsLC8PniwiCLFu2bNGiRTIyMiQSCf21pKQEAKCmpiYlJYX8fE+hUO8XKicbOHBga2sr/FVOTq6trU1ZWZlOpwsQiX1fsPYyAICoqOjIkSNHjx6NPsfFZvXTp08ZGRmXL1+uqamBF98EBAT/XoivGv5DdHR0fX19cXHx/fv3ra2tobqisbHxyZMnTCYTzoGQl5cnJCTU09NTWlrKYDC+wq114cKFP/74g0ql6urq0ul0W1vb8vLy7OxsJpNZVla2e/fuOXPmZGZmTps2raurq6GhoaGhoaKiIisrS0JCoqamJicnp76+PjIyEt6Uy8nJqaysbG5udnFxWb9+fUxMTGNj4/r1658+ffro0SM4w99PAur92rhx48mTJ5WUlAwNDXV0dKC97ODBg7du3Ro8ePDGjRtxIrH+yxLWXlZWVubv729tbT1r1iwpKSloL8Nmdfbs2bNnzwYAZGdnr1ixov9yRUBA8AMgPkv6B6BSqcLCwjyn+eZwOCwWC5V7fRE9PT3QJsVms0kkEm6K838WrIWLw+F0dHQoKioCjL+tt7eXxWJxi8T6FQRjL6NQKAoKCvAaGp2cHZtVAgKCXwZi5CMgICAg+L34iS4LCAgICAgIfgDEyEdAQEBA8HtBvOFCQPCT0tra+urVK7g8efLkgQMHoj9RqdSwsDDs14f8YDKZT5484XA42JVz5sxRUVGB0+bp6OhMmzbt/v37wsLC06ZNgxNAfguRkZE1NTWWlpYjR44EAGRkZOBehNbX1zc1Nb179+7q1au/+xxMZWVl79+/d3Nz+77REvxq/INf0f9U/NPtQPAv4wf0SfihIQBASEiooaEBrmSz2Q8fPtTQ0NDW1u5jPPCzDXt7ezqdXllZ6eDgkJKSAmfpe/DggaSkpKenp7Gx8eHDh11dXQVHdfjw4c7OTgEBYmJiAACBgYGDBw+GazgczqxZswAAqampLS0tN2/eXLduXWJiIolESk5O7kv+P5soluPHj8vKykJtEAEBP4i7nQQEPyldXV1QfcBgMDQ0NOBKISGhpUuXmpiY9D0eeBmnoKAgLi5OoVDOnTsnKSlZVFTU3d0tJSWVnZ1dWVkpISGxYcOG06dPAwCam5vpdDq6eUtLC5PJBAD4+/sfPHiwp6cHF39nZyf6dX9GRgYAwMbG5t27d3ANiURSV1cHACgrK7e1tTk7O8+ZM2f06NENDQ2WlpYMBqO3txe+Q4u1h9fX18PrVDRRDofT29vLZDLZbDZcgGvgIId+G/rHH3+UlZWJior29vZCgS2FQsHmlkwm973qCH5ViJHvP/zTpyAE/zL6u0NyOJxDhw7JysrOnj0bflCPBQrhvpSqqqpr167p6emZmpq+fv0aABAREdHU1FRaWtrc3BwfH6+iouLi4nLnzp1FixYlJCRQqdSZM2cGBwebmZnFxcUFBwcjCHL37l3suLh9+/bjx48fPHhw165dPT09cXFxAAB/f38dHR1c6s3NzcePH+/p6XFwcPD29tbQ0IiNjb1z546EhIS7u7ujo6OOjk5HR0djY6OlpWVycvL48eObmprQRGtqaoYOHTp79uyqqqqhQ4dOnz69ra1t1KhRBgYGu3btGjJkyOnTp1kslrOzs7q6OpvNdnV1lZCQ2Ldvn5WVFfSMP3361MbG5s2bN7Kysi4uLl9RgQS/DMTIR0DwM0KlUr28vJYtWxYXF+fg4PDtERYXF585cwa94pk+fToAwNHR0cbGZvDgwerq6vPnz4+MjHz69OnChQv19fVPnTrl4+OTm5u7bt26FStWMJlMGxsbAMCGDRvQT1HfvXvn7e29bNkyFxeXs2fP5ubmomG4M3D79m043AIAYDAAgJWVFQBgxowZ58+fr6uri4mJ8fb27unpmThxIofD8ff3RyMcPHgwfNI5dOhQXV1dAICysvKwYcMkJCTOnj1ra2v78OFDERGRsWPHwpjHjRsHANiyZYuHh0dYWFhra+vFixfV1NRWrlzJZrO/6KKZ4NeDGPkICH5GZGVl//zzz/v37+fl5dXW1tbX139jhAYGBleuXIEDXmNjI88w0IEXGRk5fvz4rVu3ZmZmwnuJXl5ePG3deXl5AABpaWmoHRCs9PPy8rp06RLPn0RERMTFxQEADAajpKSksbHx3bt3f/75Z18U4dCEIC4uznP6MDRmJpO5YMGC8vLywsLC3t5edIAk+D0hRj4Cgp+R4uJiX1/f3t5efX39OXPmaGpqHj58+MWLF/BX+Lirj1Fh782uXbu2oaHhypUruJ/gAtTFqaurL1iwQE9PT19fn0KhZGdnA4xjls1md3d3w+Xhw4fDn+DDP/RCCnc3GPnv5IgODg6qqqqCczt06ND29nYbG5sFCxbIy8tjExUTE4Ol/robzrNmzbK2ti4pKamqqoJnAAS/LcTIR0DwM1JbW7t79259ff3Dhw/v378fAHDjxo3AwEAEQYKCgtLT0xsaGvz9/bmn/sDBZDKvXbsGAEhNTb179+7Zs2dHjx6tpaUVHR0NAAgODi4qKiopKSkvL09ISJg1a9a8efPWrFmzbNkyOPWVubm5g4PDzp07i4uLx4wZIyYmtmXLFjTRSZMmbdq06cGDB0+ePNmxY4eFhUVCQgIAwM/PD83Ahw8f4MulN2/ebG5uhlK92NhYAEBSUhJ8LpiSkgLDpKambt++XU9Pz9raev369SIiIthEFy5cmJGR4eXl1dDQ0N7enpWVVVhYWF9fn5+fX1BQ0NjYWF5enpiYCABITEyEc5tgY967d29gYOC5c+d27NiB3ncl+D0h7GUEBD8pbDabwWBISkrCf3t6eiQkJH6AjrWpqUlJSQneRQQANDQ0qKurw3SpVKqUlBRu3sT29nYSiYRen307CIKQyWT0y0JsohQKRV5enslkotXSdy5cuJCSkqKiotLR0REdHV1XV/e9Mkzwr4MY+QgICH4LbGxs3N3dra2tCwoK4uLivL29/+kcEfxjECMfAQHBbwGCINXV1WQyedCgQQMGDPins0PwT0KMfAQEBAQEvxfEGy4EBAQEBL8Xv6yx+vTp0yQSiUajsVgsRUXF1tbWI0eOwJ/q6ur27Nlz69YtOAEsh8Px8/NjMBgeHh6C42Sz2WfPnk1NTV2xYoWTk9MX5ae0tPTJkycqKioMBqOjo+PAgQNfVy6eUCgUb2/vr5gjHgAQGBgInZBaWloLFiwQEDI+Pv7x48fGxsadnZ2GhoaCA38758+ft7e3HzZsGPz36dOnHR0d69at++yGWVlZjx49OnPmTL9mry8gCOLr66ukpEQYQwgIfip+2ZFPQkLC09Pz1KlTXV1dhw8fvnDhAvqTlpZWdnY2eptXSEhIS0urL285BwYG2tvbT548eebMmePGjeu71T47O9vd3f3NmzdKSkpUKvW7i+SVlZW3b9/+FRvm5OQ8efIEDn63bt2Cg1lXV9e7d+/ExMQUFBQsLCzi4+PZbLa1tbWtra2zs/OOHTsGDx48ffr01tbWtWvXJiUldXV1wdjMzMxEREQ+fPgAANDU1DQ1NWWxWPDteYihoSG31Iof7u7uUOT/+vXrmTNnKioqpqSkCN7k3bt348aNMzExiYqK+ora+GpgDrnXk0gkGRmZ6urqH5kZAgKCz/LL3u3E+ZM2bNiQnJwcEREBj57CwsJJSUkHDhwoLS0F/9VAAABqa2tv37599+5dno8/p02bZmJiMmbMGEtLSwAAlUq9fv16XzLj5eXl6emppKQEAJCWlv77778BALm5uQEBAQ8ePGCxWFVVVb6+vikpKYcOHWpoaIB5AABUVVXdvXv35cuXf//9NxTvoqXo6uq6e/duQkLC7du3y8vLw8PD2Wx2UFBQXFzc8ePH4TdMHA4nPj7+woULYWFhPN/hVlRUjI2NTUpK0tTUdHV1hTXg7u4+fvx4Q0PDK1euiIiIfPr0KSYmBg5CEhISJBJJREREV1cXflPMZDLv3r1rZ2enq6ubm5urrKx87tw5dXX19PT0DRs2UCgUWMNBQUEDBw58+/Ytdx5yc3PPnz9PoVCuXr0aFhbG4XDu3bvX2dkZGxtbWlqakJCwdevW+Ph4YWFhFot15coVf39/npXc2Ni4atWqFy9ewNffQ0NDz549S6VSAQDp6elXrlyB0whwk5GRERYW5u3tXVNTg4ZksVivXr2Kjo4+dOjQy5cvAQAsFuvJkycPHz7Mz89ns9nh4eExMTFHjx6Nj4+HOQQAhIeHnz9/HhY5Pz//5cuXmZmZfekhBAQEP5JfduTDKX2ZTGZYWNjMmTP/+usvuIbFYk2YMGHatGnoZ7lMJvPChQvz5s0LCwsLCAjgjhMOXXQ6feDAgZqamiQSCRUYCiYrK0tfXx/919jYuLKy8ty5c4sXL2axWJ6enlpaWsePH5eTk9PT09uwYcPy5ctDQ0OrqqokJSW9vLyMjIzU1dV3794NZ2WDpRAWFr5y5Up7e/uoUaNgzoWFhXNzc8PDwz08PDZv3gwACA4OJpPJ48aNO3v2LPRL4Rg0aNClS5dmz569c+dOKNQ/duyYm5ubkpKSjo7OyZMnAcZNBXn48KGrq2tBQcHSpUsBADIyMrKyshISEhQKZdq0aTC8oqKiiYkJmUxWVVXdtGkTnCXA2NgYDq44DAwMbt++LScnBwAoLy8XEhJisVji4uLPnz8vLy83NzeXk5OztbUFAFRVVa1ater+/fs1NTXc8airq6upqc2dO1dISKitrc3MzKynpyc4OLi4uDgxMXHx4sVubm48h/+ysjIfH5/x48e3traiIevr6yMjI6OiotauXevl5VVbW7tlyxZTU9PFixcvW7asqqrq9evX0dHR06ZNs7CwgDn08/OTk5OzsbFxcHCor6/38fFxcHBA51ggICD4efhlRz4c0tLSzs7O7969Q429tra206ZNk5CQ+PTpE1xTWFjY3Nycn5+/ZcsWAVq/W7duwclcpKSkVq1a1ZfU5eXlcabEt2/fDh48GAAwc+bM0NBQUVFRGRkZY2NjPT09VVVVcXFxXV3dkpISdXV1JSUlfX39lStXxsXFYUshJSWlqKg4ZswYc3NzdM5SOTm5YcOGKSkpQYdhY2NjcXGxsbHxyJEj+X1ovHTp0pycnKSkJC8vLwBAWVmZmpoa/Innm9/Lli3z8/ObOHHizp074ZrKyspnz57duXMHW0XHjh27ceOGkJAQ9qtn9Noai7i4+Jw5cyIiIkRERIKDg8vKyoyMjMTFxbk1V3p6ejIyMgYGBpWVlfwrGwAAFBUVtbW1jY2NKyoqIiMjEQQpKCi4f/8+z3lQtbW19fT0LC0t4+Pj0ZBycnKqqqpmZmYDBgxwdHSMiYkJCwvT09MTFRW1traOi4tTV1c3NTXF9pNXr14xGAwajXbt2rWXL19Csxd2OlkCAoKfhF985ONwOPC+ZVFR0YULFyZNmiQsLIwNoKCggB7ftbW1a2pqbG1t7ezsUEshjsjISCcnJwUFhS+a5cvZ2Rl7EdnY2KipqZmWlgYAIJFIUDyPg0T6nw9O6HS6jo4Ov1Lw+zTF1dWVRqOlpKTwe/nl2rVr7e3tOjo6vr6+8IGchYUFfFDHEzShsWPHvn//Hi7r6uouWrTo0qVLtbW1cM2aNWskJCSg77EvuLu7X7lyRVZWVl9f//Lly9bW1ti0UFVjX6Jis9m4kDo6OhQKxc7OztLSsr29XcC2/EL29PTo6uqqqqpmZWUBAEgkEjxrwSYKABg0aBCHw4E3flksVm5uLvhawyQAoKSkJDU1NS0tjUKhpKampqamVlVVAQAKCwtTU1P5Kadx5Ofnf/z48StS7wtsNhv1iP4YEATJzs6GWu1vgcFgxMXFNTQ0fOmGny1yXl4ebKzs7Oy2travzyIX5eXlL1++hDfVvyNhYWHYOad+H37lkY9MJsfFxaWkpFRXV9Pp9JSUlGvXrsF3LoyNjR8/fnzjxo09e/aIioq+f/++sLAQPr6ytbXdu3cvet2D5fHjx56eni4uLlZWVtHR0c3NzZMmTepLTvbt26emprZ58+YnT57cu3evsLBwzpw5WlpaQUFB169f9/b2Li8vb2hoKC0tTU9PLygoqK+vLywshIfOlpaW4ODg27dvnz59GluKgICA6upq+OTs3bt3dXV1VVVVubm5eXl5lZWVNTU1RUVFgYGBGRkZAQEBN2/e5Cl4lJKS8vT0fP78+aVLl+A13549e5KTk8+dOxcZGRkZGclisdLT03Nzc6lU6vv37+vr6/38/C5evHjr1q2LFy8CAFJTU0tKSl6+fOnn53f37l0KhVJRUZGWlnbr1q2//voLZi8xMbGwsFDAgWDYsGHKyspOTk7u7u6DBg0CALS1teXk5CQnJ0tLSwsJCd25cyc9PT0vL6+hoSEvL4/f2Dx69Ojjx4/HxsbW1dXV1NQkJSXl5ORMnjw5NDTU2dn5ypUrPN+vSUpKysvLa2xsnD59Oi7k27dvw8PDFRUVbW1tfXx87t27FxISoq6ubmVllZ2dnZCQQKfT0Ry6urp6eHisX78+PT19xYoVBQUF+/fvj46OLi4u5jmNgGBkZGTmz59fWFioqKj44sWLVatWaWtrw/WnTp1SVFT8bAw9PT0nTpxAZ4j97vT29v7gp5idnZ0HDx6Ez7C/hebm5m3btlVUVMA4+77hZ4uspaW1aNGi8PDw2traVatWbdu27RuzCvnw4cPr16/nzJkTFxcHJ+wVDJvNhk+4Pwvctb85g/8+fqMv2Xt7e8XFxeFfAACNRhMXF+e2INLp9D4+vUPj7GNgNpvd2tqKvYnX09MjJSUleCsjI6PMzEz4agl3KQRz/fp1Jyenzs7OqqoqERGRiRMn4gLAwra1tUlISGBFiFQqVUJCAndl2a+w2WyYHJPJ5H5GKyIignNF8oN7c0jfmxUNeejQoWHDhjk6OqJthCAInU7nNkaiOWSz2RwOB81AT0+PuLj4V1fjoUOHSktLHzx40NHRoampmZ+fP2TIkOjoaHFxcXR+O8Ec+z/2zjquiuXx+0NIiUhKWJSBrSgqghigooCgIFiIigoSFmJiIAaYKCWgoqKglIJIC9JS0iLd3Rw4cGp/f8xz99nvnsMR496r3PP+g9eeZXZmdnZ3Znfi87l6VVBQ0MLC4scy8Bty8uTJOXPmjHCUgQl6eno2NjZLlizZvn17UFDQL8kbRFVV1dLS0tDQsKKiYtq0adD4/tWrVy9evDA2NiaTyWVlZWfPnr1+/Xpzc/O8efOam5uPHz+el5f39u1bISGhqqoqRUXF3t5eW1tbNE4rK6utW7fSP7/DcebMmV27drE8CJkwmr/5cMCmAm0weHl5GYr/jrzZw8Y2Ejg4OHBjV99s9hobGzs7O8lkMlrv486COYGBgXfv3n3z5k1LSwu0AMUBT1ZISAhXm48dO/afbPYAAGhy9O3WmDFjRtjsMTwcMvLLCkMiCNLY2FhXV4ctGTY2NoZCyWgOOTg4sBng4+P7mWLctWtXcHBwT09PV1eXoKAgnO6bkpKyYsWKr1+/enh4HD58+OLFi9hDHjx44OnpaWdnh/oQZWRk7N69e+7cubDb88GDB+/fv4e930+fPnV1dV2/fn1FRUV2dvb06dO9vb11dXWPHj0K5159/vzZ1taWSCTeu3fPyclp8+bN2I/XxMTEefPmAQDCw8OXLVt2//59TU1NR0dHLy8vTU3NmJiYpqamdevWnTt3ztjYWFlZub29/cmTJ5s2bTp27Ni1a9eio6Nv3Ljx8OHDw4cPk0gk5ok2NDRcvXrV19cX192HZltPTw93SExMTGRkpJ2dXUVFhbu7+969ewcGBo4ePYpdSltQUBAXF/fq1SsKhYItGQiNRsPlCj3l+vr6y5cvh4SEHDp0iOG16+3tffHixcaNG+Ezrqio2NfXp6+vv337dkNDQ9i9JCAgcOjQoY6OjpcvX/Lw8Dg6OioqKgIAzMzMVFVV0agqKioyMzPj4+NfvHghJyeXmZn55csXeXl5X19fXV1dFxcXAAD2fmhpaQkODo6IiMjPz1++fHlRUVF+fr6MjExvb+/r169Xr17t4uKycuXKlJSU6upqdXX17Oxs+gi7urpOnjzp5OS0cuVK7BD+6AFh8RtTV1dXUlJSV1f3wzE0Njb29PT8wiz9RyASiSUlJSUlJUQi8V/MxvLlyz08PJycnMLCwiZNmtTb2+vk5IQgiJ6e3rNnzx4+fMjFxdXR0QEDZ2Zmbt68GW5LSEgUFxc7ODjY29sjCHLr1i09Pb2Wlhb4PVFUVJSWlrZz505fX9+NGzceO3YMQRBRUVEYVVNTk5ycHJVKDQ4Orq+vv379+pUrV3x9fWVlZd+8eYPNnpCQEIIgnZ2d0tLSCILk5OQsWbIEQZDo6Oj9+/fDfEZERCAIsmfPHgcHh8LCwrlz5yIIMjQ0JC8vT6FQEAQxMDDw8PBgnui2bds+f/6MIIiFhYWPjw82D2i2cYeYm5v7+vp2dXU1NDSkp6fr6uoiCOLj43P06FEEQXR1dZOTk2FBIQiCLRls5Lhcoaf89evX2NhYEok0adIkOJkARUVFZefOnXZ2dsrKyvBiIQhSXl6+aNGi9PT0Fy9eJCQkIAji6+tra2tbX1+/ZMmSvLw8GCwyMtLU1JT+NtDX14dHrV+/PisrC0GQyZMn9/T0NDY2zps3j/5+UFdXLywsRBBkw4YNcO2ytLR0d3c3nCyNIMjLly+tra0RBNHQ0GAYoYuLy7Vr1xAEmTdvXnV19fB36J/Kf+ib709k0qRJM2bMgAM8P4akpCRcMMDiu+Dh4ZkxY8aMGTO+qw/gl7Nr165Hjx719PRs2rSJi4vLzMxMW1sbANDY2Lh79+6DBw8ODQ3BxTYAgM+fP6O5hS/74K+lOOrq6nV1dRMmTDAyMpo1a1ZNTU1WVpampubOnTvDw8Pv3LkDAODk5ISBJSQkli5dGhkZ2dLSMnHixKysrL179+7cubOiomLz5s3Y7MEvWnZ2djhrV0BAAPZGCAgIwDk4HBwc48aNAwCoqqo2NzdzcnKKiIgAAGpqavr6+uDhKioqBQUFzBNNSkqSlZUFANB/c6PZxh1ia2t769YtCwuLkdgnYUsGux+XK/SUZWVl6+rq4CQv+oE3bW1te3v7+Ph4JycntCuVk5NTUFBQSEgI7WoqKyvT1ta+fPky/I4cCWjnBwcHBzc3t4CAABynZHg/0AOPAgCgB6J9ErgIFyxY8OHDBwKBIC8vz3DSw58Oq+VjweL3xdDQMDc3V1VVlY2NzcTEpKioaObMmQCAvr6+zMxMAEB1dTU6zXj27NnQGBYAAFc0ovG0trYuX768vr7+4MGDERERlpaWYmJi/v7+CIIAAOjnjFhZWdnb28PGZsKECS9fvgQADA4O4ubrInSzBOj3QOrq6rArQCZPnjwwMFBeXg4AaG9vh718TBIVERFJTU2F8Q83xQN3SH9/f2ZmpoiIiIeHBwcHx9DQEACgv78fl0M4LxdbMrgA2FyhJ+jl5dXa2rpmzRr6ucRo9jg5OXl4eNCZk3x8fDNmzNDU1JSXl+/s7EQQZMaMGUFBQba2tugVHK700P3we4V+P+5+YGNjI5PJAAD0xAcGBhgeOFyE0JE4MTHx+fPnP2CF+Pszalu+xMREc3PzBw8eXL16dSQj2B0dHU5OTra2tm5ubjdu3GC4Vvrvg0ajPX78GHpn/yQ+Pj7YfvmOjo7z58//TIQIgri5ufn7+wMAzM3Nh3s4wfBnUVlZaWxs/DN5+M8iIiKyb98+dXV1AMCePXu2bdsG91+9elVHR8fIyOjDhw/oYnllZeWdO3c6OjoGBwcbGBjIy8tPnz49OTk5IiIiMzPz0qVLPT0958+fJxKJpqamenp6BAJBUVHR0tJy6tSpOTk5nZ2d6JT9ZcuWTZkyBaZ77NgxDw+PDRs2XLlyBft1EhcX19HRkZycnJCQ0N7eXlZW9vHjx8rKyoqKiqSkpOrq6tbWVgDAq1evQkNDiUTi9u3bY2JiysrKioqKeHh4Hj9+7OjoGBMTQ6PR4O3BJNFr167Z2NicO3euoKAgNzcXVusAAGy2cYc8fvw4NjZWQUFh3bp1CxYsaGpqsrGxgQb0X758KSkpiY+Pp9FoCgoK586dq6+vR0sGN6iMzRV6ykJCQs+ePbt7966IiIiXlxcaODEx8cuXLwEBAXfv3t27d6+enh6UbE1ISKirqwsKCvL19TU2Nh47dmxKSkpubq6QkND169f19PRqa2vJZHJ8fHxhYSFuxVRra2tpaWl8fHx9fX1FRUV8fHx5eXlra2tqampCQkJbW1tFRQXufli+fPnVq1eLi4u3bt1qY2MDZZVSUlJSU1MbGxtra2sTEhJKSko+f/5cWloaHR1NH2FcXJyXl5e3t/eJEycKCwt/zd38W/GP9av+80hISJSVlZHJ5NWrV3t6eiIIUl5eHhkZmZ+fjyAIhUKJj49///59d3c3DO/h4bFz504EQeLi4mD3/eDgYHR0dORfkEik9PT0yMjIhIQEOLTw5csX9L+xsbE/k9vIyEg4AhEfH/8zY0tBQUGnT5/G7kHHgX6Yp0+f3rhxYyRRoWeBQ0FBgX4nHAEaIS0tLTk5OSMPP2oYGhoabhv7E4VIJGL3U6nU3t5euE2j0Wg0GvoTQZC+vr7h0iWTyeg2jUaDX0vfi76+/sePH7EpYqFSqbgMMEkUqgSQSCQmyWEPoVKpBAIBjZBGo5FIJGz8EAqFQiKR6EtmuFyhwK+owcFB+Nn3r4O7H9A6ZHBwEBnmFJjw7NmzmpqasrKy9PT0e/fu/cJ8/iaM2m8+wEhkUk5OzsLCYtq0aVQqdefOnbKyskuXLrW2toad++jKAXl5eQRBaDQaNzd3ampqRUWFqqpqf39/d3c3lUr18fGZOnXq1q1bKysrY2Njp02bdvPmzalTp6Iru8H/SkFitUCJRGJCQsKLFy86OzsfPnxYXl5eUFBw79498Je+CSo+SaPRUlJSQkNDv379ikbb3t7+8uXLx48fd3R0dHV1PXz4MCMj48qVK+gScgAABwfH4ODgrVu3Hj16hCAIlPQE/ys7CQD49OlTUFBQQECAk5NTe3u7o6MjnDU3MDDg7+9/+/ZtqEONFZ/s6uoKCgqCs/siIyPhYkEAQHFxcWJiYkhICKBTaaFSqW/fvg0ICIAv6TQaLS4uzs/Pr6GhARXkxO4Ef023gxqbaNFRKJS9e/cGBQW1trZiA/wXgI4iDLexP1F4eHiw+9nZ2eFIGwCAjY2NjY0N/QkAYKhpB8FeSjY2tm/OQ2ZIf39/f38/NkUs7OzsuAwwSXTMmDE8PDzDzd2lP4SdnX3s2LFohGxsbGPGjKFXEYLTcelLZrhcocA+QIYro/4VcPcDOuILB/YYngITXFxcXr9+XVJSAieI/sJ8/ib8Ftfs7wMnMgkAgCKToaGhEhISU6ZMERYW1tfXv337NvxvWVmZk5PThg0bHBwc4NgvPz//+PHj+fj4pKSkxMTExo4dy8PDIy0tzc3N3dvba2ZmJisrO2bMGDExMWynIioFKSAggNUCff36dX9//5w5c+DUwa9fv86dOxedgA4w4pN1dXVv375dv3492qpRqdQdO3Zs27Zt9erVq1evFhAQ8PPza2trW758OU6ipby83MDAIDg42NvbG0p6AgBwspNcXFw3b95UVVUtKyt7/PixtbX1qVOnAACXL19etWoVOzu7ra0tTnySg4Pj5s2bZDI5KCiov79/8+bNL168AAB4enquXLnS19e3s7MTV/62trYzZszQ1tYmEAgAgKSkpN7e3tmzZzs5OaGCnNidAIArV66sXbuWSCRiZVSDgoJmzZq1fPnyCRMmoAF+3W3C4m+hvr7eyMgINn7/dl5YfB8fPnyYP38+BwfHtm3boED/KGOUt3z0IpOQmpoaKNAMABATE0NH9aZNm2ZraxsbG3v69Gl0lldGRsazZ8+io6Phz9raWk1NTRMTE2jKg8aJ3UalIGtra7FaoCoqKufOnUtOTp44cSIanuHr2MSJEz9//nz8+HEo5QUAqKioGBoagp+wZDK5pqZm3LhxM2fOVFBQwI1KzpkzZ+rUqbt3705JSUF1I7Gykx8+fIC6lBISEvLy8hMnTuTl5YXdPp8+ffr69euiRYsOHTqEE58UEBCA0+RevnypoqIyZswYOCfwzJkzUVFRBAKBXg7q3bt3M2fO5OHhERUVBQCoqakJCwsXFRU1NjaiYXA7Z8yYAT0xhpNRRQMwuN4sficmTZpkbGysr6/PUC6Vxe/M2LFjNTQ0NDU1RzIz9k9kNLd8CCORSXRPcXEx3C4qKoIvNWh4KSkpCQkJ1A1OSUnJ2NjY1tYWfn5NmTLl8uXLT58+ZagHhgOnBcrBwZGYmJiUlOTv7w8lPxAEQYfrUahUalNT0/v372VkZFDjPVFR0fz8fNhty8fHh7bcw8HGxjZr1iz0pJjITqLAwHPmzFFTU+Pi4oLmDwAzAQz+5eXlhSKfFRUVJBLJyMhIQ0NDWFgYoZv8QiAQYKsM/+Xt7V1VVbVo0SL4E54Lbuf+/ftPnTq1bds2ehlVGB4N8M3CZ8GCBQuGjFpnWlRkUkxMLDo6GopMVlZWNjQ0ZGZmLl++PDEx8cmTJ+PHjy8pKbG3t+/s7IyNjS0oKPD09CwtLZ0/f76ent7Q0FBubu748ePHjRuXmpqqpqbW0NBQVlY2bdo0bW1tY2NjDw+P/v7+qqqq1NRUuNAKgkpBiouLQy3QlStXHj9+3NfXV0ZGZvXq1QsXLhQVFb1+/XpjYyMbG1tpaSnUDu3t7YXik6tXr37x4sXy5cvhKiIAgLCwsL29/e3btydMmGBtbU0mk6FQJ41Gq62t7erqglqOEydO/Pr1a1hYWHFx8alTpz58+ABFLKHsZHNzs7i4+Jo1a+C4WnNzc35+fn19/cqVK5uamvLz8/fs2aOiorJx40bYs/ro0SM7OztoZwHX1Kemptra2m7duvXDhw8bNmyYPHlyc3Ozo6Njf39/YGAgAACeBVxEePny5f3792tpaQ0ODmZnZ1MolFevXhGJRDiBDcpdYndWVFRcunTJ2tp627ZtwsLC2KJbtGiRj4/PjBkzHBwcYAAAwI4dO06dOjV//vx//P5iwYLFH8x/SLeTHjKZTCaTf2zo/rtApSBpNBqZTObg4IA9nFBkkkKh4Do8YRgAAIlEwq2kJpFI6MJhJjBUBEWGkZ0cLrdoVPTikzQajUqlwukGFAqFnZ0dfhfSZ2xwcJCLiwtBEBgDVByFSaByl9idNBqNSCSi/WPYzJBIJC4uLmyA79JNHWXQaLS0tDQ5ObkfswBsbW0tLi4euRTkd0EkEj98+KCurv5rr05WVpaYmBgUE+/q6srMzOzt7d2yZcvPzzEhEolxcXFaWlronubm5srKSnSs4Y+jqqoqPT192bJlDK1gWIzm3s5vMmbMmH+g2QOYeVbs7Ozc3Nxo8wBbDvrWYsyYMezs7Ozs7PQCIlxcXCOZpsXwvIaTnRwut2hU9OKT7Ozs6Cw7Tk5OdnZ2tDmnjw3+F/6EVSFMApW7xO6EU/IYZgZOXcMG+G82e1CJv6Ojw9LSEqc5MhKgSMfbt28dHR1/LAPf9Deor683MTH55fY3VVVVcK0bgUC4ePHiunXrCATCN80aR0J1dfX27duxe9ra2srKyuD2SPwcvsvz4bv4gZjb2trc3NyWLFkCBzhY0POfbvlYsPgTOX/+fHV1tZiY2A/I2tXV1UE7qkWLFv1Y6iQSae/evczDTJs2Dc5p+rUYGBjAuU5RUVFQo8vExEROTu7nY1ZQUMC98M2dOxc6QkRERMA5zEwYSZn8GOj1+i4SExM5OTnl5eW3bNnyd+RqFDBqx/lYsPijyc7O3r59u62tbXh4uIWFRWVlZUdHx4QJE7S0tIKDg0VEROCwblRU1JMnT7q6uuBY6cOHD0kkUkpKyvPnz+3t7QkEQk1Nzblz51BXo8DAwJycnOjoaBERkc7Oztu3b4eGhl66dGn16tW3b9+ePXt2cHCwubk5Dw+Ptrb2pUuXAgMD1dXVLS0t0Yyh/gZ6enrnz59ft27do0ePPD09CQRCeHg4XFUGhUsAAF5eXl1dXXv27MFOyHrw4IGcnFxOTs7+/fv37NmzZMmSurq68vLy0NDQjo6O+Pj4/Px8MTGxy5cvJyQkVFVV1dfXi4uLGxkZnT17dvr06Xv37g0PD+/r6/Pz83v+/PmaNWtsbGywIQ8ePIimRaPRTp8+jWYyMDAwMDBw+vTpYmJiysrK2EMQBPH29g4PD1dVVT127Jirq2tGRsaTJ0+8vLwEBQWXL1/Oy8s7XN4UFRVhmcjLy+/cubOkpMTHx8fX1/fdu3fnz59Hr0JFRQWBQAgNDXVzc5OTk0PLAbsgij7D8HqtXLnyypUrM2fOTElJ2bFjx7Rp03BFh75qtLS0xMXF1dXVvXv3rq2tDT1fIyMjLy+vOXPmREdHX716NTk5mUgk8vHxnT59Ojg4eObMmejN4+/vb29v39TUJCsrGxMTExoaOhI/yD+Mf2rJPAsWLL4P1IUgNDS0rq6usLBw1apVCIKgSvxaWlpxcXEIgsyZM6ehoQFnVuDi4mJlZYWLMzk5WV9fH0GQrKwsVVVVBEECAgIOHDiAIMjDhw8pFIqLi8ulS5cQOvF+LNDfgEgkPn78GPnLSeDVq1cHDx6kUqnQ7mDmzJlhYWExMTG4Y3GuCDgzB6znQFNT09q1a2FCL168QBDk6tWrt27dQhAEzeT169dv3bo1NDSEC4mCyyTWLAJ3iJCQEJFIbGtrmzZtGoIgKSkpWlpaCII4ODi4uLgg/+uHQJ83WCboRnV19YIFC2BW4VXAmWMM5w6ByzB6vW7evHn37l0EQYqKiiQlJWk0Gq7osJH4+PjY2NggCIKeL4IgmzZtgn4Xrq6uRkZGTU1NCIKcPn0aSpXibp5Hjx5BB4+DBw8GBwcjow5WbycLFr8pqAuBsrJyREREaWkp/UIaOOQpKCjY1dWFMytAjRGGAx4rJCTU1dUFAFi+fLmfn197eztMBSfeTw8PD4+IiEhYWBiFQqFQKNra2n19fUuWLEEHX2/cuAHlm/v6+vT09PT09FxdXXGuCDgzB6znQEdHBxxL5uHhgUoU9OPNcJy4rKwMF3K4TKJlQn8IBwcHDw8PWhr0aTHP23CgKeLMMYZzh8BlGN2PGnEoKCjADOCKjnnq2BhQc4ycnJzg4OAbN24AOqcLeleHUQar5WPB4nfn8OHD8+fPR1dnokr88O0V3WBiqoAy3LFUKtXQ0NDQ0BB+sqDhEUZzv+HCyi9fvnh4eGhra/Pw8CAIUllZ+fLlS1tb26NHj8JgXl5eR44cqa6uHjduXEhISEhIiIWFxXCuCNDMAes5AABISkpqb28HAKBGDQhmXSn2xHEhUXCZRPfTH4ItDewGWmLM8wbLBD2Q3hRiwoQJWHOM4coBl2E0ddSIY3BwUEREBDuMivPBAHQODBA0BmiOQSaT9+/f7+XlNXbsWOhgxfDmYXgDjAJYLR8LFr8jWBcCUVFROzu7Dx8+VFZWpqSkQCX+jx8/lpaWxsXFVVVVVVZWpqamYs0Kpk2blpKSkpaWhvsamDVrVkFBgbu7Ozy8vr4+Njb269ev3d3dnJycVlZW9fX1kZGRycnJOPF+bCTQ32BoaKiwsPDSpUtQzDYxMdHLy0tMTMzAwAAuZq2urrawsFi/fj3WBQnrFwG/2LBmDljPgdmzZ58+fRqurCWTyZ2dnWlpaWlpae3t7RkZGVlZWc3NzVlZWZ8+feLn58eGxGaVm5sbm8moqChoFiEmJoY9JCsrq6OjIyUlJTY2tru7u7CwMCoq6uvXr9XV1UuWLPHz84uMjGSSN7RMuru7tbS0du7cGRUV1d3d/eXLF/Qq6OrqYs0x6MuBYYanT58Or5e1tfXg4GBAQICXl5enpycMjC06NIbe3t4PHz5kZGRUV1ej5hgAgHv37r1//z46OjoxMfHatWs3b97k5uZubW318PCIiIjA3jxz5sxJT08vLCxsbGzMzs7OyMgYfe3ff3o9HwsWfwpEIpGXlxddpolbc4mCIAictsAkquGOhYtNubm5GS4GxUKlUmk02pgxYwYHB7m5uREEgUstqVTqNxfIwgqHQCDAnjoDAwMrK6uFCxeiatFQEh0VXx4YGBjhSp7hQmIziTvxEUaOlhiTvKFlAgAYGhqCC0/pu0wJBAIU6caVA5MMY69XX18fPz8/bCnpi24koCoT9Izk5hk1sOZ2smDxBwAXYqK10nBO8SMxVRjuWLjYFAyzGBQLBwcHrNNhVGxsbOhaTOZeCuCvkTm0sqY3c8AZUIy8Ih4uJC6TPxA5eiCTvKFlAv5aZkrf7AGMOQauHJhkGJttbHjmPhjDMVyzB37CkeNPhNXbyYIFi38HlpnDD8Mqup+E1dvJggULFiz+W7C++ViwYMGCxX8L1jgfCxZ/MFQqNSkpae7cucyX7iEIkpeXx8fHN336dLintra2vr5eWVkZKwP9M0AbEOw0RZS0tDQymayiooKTlk5JSenu7l63bh0cHRwcHMzLy4O5XbZsGRqssrKyra0Ne+C8efPgwGdxcfGsWbNGnkOcCPXXr1+HhobmzZs33CEZGRmDg4NSUlLi4uLMR9S6urpKS0vhtoiIiKys7D/p1U5/fYeDTCZHRUXJysrSl1t5eXlHR8fQ0JCCgoKYmFh3d3d6evrSpUtHoYAL65uPBYs/mpaWlsOHDw+3kBmlt7f34sWL6OoCGo3m4+Pj5eUFMDLQP0NWVlZ+fj7DZu/JkyfFxcVeXl6o0yTk1q1bYWFhDg4OmpqacI+vr6+Li4uLi0t3dzc25IQJE8zNzZ8+fQpnUQYFBZWWlhIIBEdHRwsLi5FnEidCTSKRHB0dIyIihgu/e/fusrIyISEhU1PTkpIS5pGPHz8+JSVl8+bNCIKEh4fPmTMHriX4Z8Bd3+FAEOT69eudnZ379u2DC2ZQhoaGTp486eLi4ufnJyIiEh8f/+DBA1VVVWtr6y9fvvyNWf+XYH3zsWDxByMlJYU6ODJh/Pjx2K8BdnZ2RUVF6HJgYGDwk3kgkUj+/v63bt2CPykUyuvXr1FlEw0NjUmTJqmrq2PlNAcGBpSVlZWVlfv7+yUlJbu7u8eNG/fhw4czZ87MnTsXFz8/P7+cnJycnJyioiIAYNasWRQKhZ+ff9OmTZGRkSPP59y5c2HkERER1dXV5ubms2fPZuIvHRcXd/bsWQUFBX9/f7i4m0Kh3LlzR0BAYPz48cHBwW5ubn19fUePHtXX1zc2Np4/f76QkNCyZcuWLVuWlZX1/PnzGzdufPnyxczMbP/+/Tw8PAkJCW5ubm/evPHx8dHV1S0rKzMxMZk2bZqNjY2srGxcXJy6unpxcfHp06dHck2x4K4vFuzl6O7utrGx4ePjQxDky5cvurq6aLDnz59v3Lhx27Zt0IQ9KSlJRERk7NixCgoKzc3NCgoK35Wf3x/WNx8LFr8pT58+dXV1Xb9+fUVFRXh4+LJly+7fv6+pqeno6Ojl5aWpqRkTEwNDvnjxQktLS0NDg0AgEInEe/fuOTk5bd68mUQiNTQ0XL161dfXNzExEQBAJpMdHBwCAgL8/PwAAL29vZaWlvfv3+/s7NTS0rpy5Yq5ufnu3bthtHZ2dp6enmvWrLl06VJfX9+DBw/ev3/v4OBAn09shyGVSg0ICEB/Tpo0iUajubi4QJUsCB8fH+x15OHhkZWVFRQUbGpq4uHhWbt2rYaGxsDAAH1pUKlUKpVaVVX1/Plz2P/G8BMTAGBiYnLy5MmmpiYlJSVos2BmZtbc3Ozi4mJsbEylUr28vDIzM2FjVlRUdOHCBRUVldbWVlw8W7Zs0dDQiImJkZCQ2LBhAwDg8uXL48ePNzMz2759+/79+wkEgqysbG9vL87GLz8/PysrS0dHBwCgoKBQUVFhbGy8bdu2I0eOAAAWL17c0tJiYmKiqKh44cIFAICBgcHhw4cbGxt1dHTOnDmDa4xv374dGRl58ODBz58/018m3PVlWG7o5RASEuLj4+vr64uJicGqkAMAEAR5+fLl5MmTob+0kZERutpdTU2NYcx/NKyWjwWL35H09PSYmBhBQUFOTk5XV1dlZeWWlhZra+tr164FBQUdOHDg+PHjr169goE1NTXfvXvHw8Pz+PFjZ2dnAoEwceLEwsLCiIiI48ePb9q0adeuXUuWLAEAeHt78/PzGxgYwPd9AQEBKSkpMpksLCwsLCw8f/58d3d3KGhSVFSUkZFx8ODBuXPnysjIEInEp0+fqqqq0hvfZGVlwWHC/v7+hoaG+vp6IpHY0NDQ0NAAdTvj4+PLy8tNTU3pTzMsLOzq1asAgEmTJj1+/Pjr169cXFx3796lD5mSkuLi4uLq6trX18e86MzNzYuLiyUlJdXV1ZuamgAA8+bNk5CQWLRoUVdXFwcHh6KioqKi4oIFCwAA0tLS9vb2CxYsiI2NxcVz7949c3NzHR0dMzMz2Bo9e/YM/UTesGHDlClTAABsbGxoG9zZ2eno6Hj//v1JkyZBzVUY4NOnT9HR0VDnjI2NDUGQnp6esLCw1atXAwCw2mNSUlK4Mddx48ZpaGjMnz8/NDSU/jLhri8WhpeDSqUGBwdDmRhs4AMHDsTHx7969crU1HRwcHD69Onm5ubu7u6NjY3/5IDlP8YoPCUWLEYBOIFjdnZ2OMolICCASgm3tLTAwLCSVVdXr6urw0kPJyUlycrKgr/WwicmJkJDO9SjGF1wjYoU8/PzEwiEKVOm1NXVNTQ08PLyzps3bziFZQBAV1cXXOKdmZnp4OBw48aNr1+/Ojg4ODg4EIlEAMDatWtDQkK4ublxxzY2Nra3t2/atAndIyQkdPr06fLycvoCWbly5ZEjR27durV582bmRbd06dLGxsbGxkYODg5/f//s7OzFixeDYZaW42S7Ufr7+zk5Oc+dO5eTkxMWFubh4QEA6O7uxn5o0kcoLCx86tQpb29vTU1NbDeyoKCgiIgIKhbT2dlpZWU1YcIEbA/wcDBUEgd/XSbc9cXC8HJwcHDs2bMnMTExPDycPi1NTU1FRcWWlpbXr19LSEhkZ2enpaXBr8BRBqvlY8HidwQncIz913BrcFtbW5cvX46THhYREUEVmWk0mqioaEpKCvoTMFI3hj/HjRt38uTJT58+HT16dOHChcMpLAMAZs6c2dnZCQBYtWqVu7u7i4vLggUL3N3d3d3dYbsCmTRpkpiYGPhL3Lmrqys5ORl+CKKzIgEA7e3t2ImdECqViiY6c+bMb5be3r17TUxMduzYIS8v//DhQyUlJcBIhJpenxqlvLw8JCQEAKCgoKCvr9/Q0AAAUFNTi4+PR8PgNEJheULGjRuHtaSfMWOGoqLijh07SkpKEAQRERHx9vZOTk7GxjacODhDJXE0PO76YgMwuRw8PDyzZ89Gk8Aexc3NPWnSpMTExEmTJsEXkezsbEZl/GfDmuHCgsXviK6urru7u6KiorKy8tmzZxMSEtrb28vKypKSkiorKysqKpKSkqqrq1tbW+fNm/fq1aulS5eOHz9+y5Ytc+fO3bBhQ1xcnKKi4pUrV65du2ZjY5OUlFRQUEAmk62srKDIck9PT21tbWlpaVpaGicnZ11d3ZcvX9LS0qZPn15dXZ2ens7NzX3jxo0ZM2a8efPG0NBQWlrayclp9+7dOIVlAMDWrVtDQ0Oxn25Ydu7cqaysLCwsbGpqysfH9+zZs9TU1Lt372ppaXV0dFy8eHFoaMjNze3du3efP3/W19fv6uo6cOAANoa8vLycnJz+/n5DQ8PJkyfDnUQiMTQ0tLq6Oj8/n35Zwu7du7OyshQUFPbt2we/IKlUKlaE+vz583Jycp8+fSKTyc3NzRkZGRISEjQaDe3Zk5OT2759e3V1taioaGFh4bNnzwAArq6utra2nZ2dEhISQ0NDGzdurKmpqaqqioyMNDIyCgwMbGhocHZ27unpSUlJ8fHxAQAUFRW1t7e/ePGCi4srJCTExsYmOzu7tra2rq7Oz88POtCuWLGitLS0pqYmJibGxMQEeyLwW9/KykpMTCwyMnLHjh24y4S7vmQymYmAXEtLy4EDB4yMjHp7e+GwK7wcd+7cWbt27c6dOydMmHDp0iUODo59+/bduHGDi4srKirquybQ/imwNFxYsPh9QQWOmTM0NESj0dD+Lpz0MJlMplKpHBwcsE5EEIRAIPDw8DDX2ExNTUUQZNKkSZ2dnaGhoXAuBkOFZQDA3bt3Dx06xFD1kUgkdnV1SUlJwZ+oLjZ9yMbGRhEREYb/+gHgKSMIQqFQ6M90ONluLFCQurm5eeLEidjGvq+vj5ubG6fh+ffxTSVx3PVlDvw6R8cgsZH39vZKSEhgo21qapKUlBxJtH8crJaPBQsWDDhx4kRfX5+2tnZLS4u4uLi2tjaTwDQa7d27d9ra2sPNt2TB4reC1fKxYMGCAVQqNTs7u6GhYfHixWgfIwsWowNWy8eCBQsWLP5bsOZ2smDBggWL/xaslo8FCxYsWPy3YK1qYMHiT4JKpYaFhWEVF1tbW4uLi1etWkUfuKCgACqBcXNzT5069ZeI7v+7pgT/AC0tLWJiYkxOikQiff78GbtHWFh42rRpvzAPRCIxLi5OS0vru46ivze+SXh4+Nq1a+E014qKiqKiIkFBwZUrV35Xun8io+qWxVJQUPDp06dPnz7l5ubi1BmGhoaSkpJCQkJKSkry8/PhzvLy8k+fPiUmJqJ+KJ2dnREREZGRkfX19YWFhdXV1WiEvb29aGw1NTVwf05ODi6h76K7u3s4+d3q6uqQkJCkpKSmpqaKiorW1taEhITvjZ9CoXz6i/r6+n9gfBdBkNzcXOwi5R+IITo6+m+VkMjKyqIXJcFCpVKhqn1vb+/bt2+Zx1ZQUADVIP8+hoaGcnJy4Da8D9++fevo6Mgw8MSJEw0MDN6/f19XV7dnz55jx479fAb+XVOCfwAhIaFnz57hVqlj4eLiqqioWLlyJRsbGwcHR09Pj6ur66/NQ3V19fbt27/3KOy9wRy0BsvPz4eu7llZWZGRkZs2bfr48SNuRfzoBBmldHR0TJ48+cKFC6Ghodra2kePHoX7MzMz1dTUwsLCamtrvb29paSkEAQZHBzU1dXdtWuXmZkZVIvw8fHR0tL69OlTRUXFyZMndXV1BwYGVq5cuXPnzqSkJBcXl40bN8bHxyMI0t/fv3z58j179iQlJe3cuXPfvn0/luH4+HhpaWn6/VZWVtAoJDc3d/369ffu3fP09NywYcMPJOHv78/FxZWamurp6amkpFRaWso8fE9Pzw+kgtLd3a2jo+Pj4/PDMZw7d662tvbBgwf9/f0/kxMmvH79Oj09nUmA/v5+Ozs7BEFyc3N5eXmZhOzp6TE1NXVwcPjFWRyG2tpac3NzBEGysrKY3A8qKipQC6a8vJyNja2/v39oaMjOzs7c3DwgIODRo0dPnjzp6OjYs2fP1atXb9++ff/+fQRBkpKSbGxsrl69ampq6u7u7ujoiI0zNjZ25syZcHvXrl2nTp36G8/z36ChoeHWrVtMAhQWFvLz88NtCoUCXyV/LaKior88TsjQ0NCWLVtwOy0tLWGF9h9h1LZ8CKNnnkAgyMjIJCYmomHg/e3l5eXp6dnd3Q135ubmiouLoz/RYCYmJpcuXYJ7iouLRURE8vLyEATZtWsXrO+gz1l1dfUP5JZAINC3fG5ubmvWrEF/9vf3Q6XaH2v5sI/rwYMHjx8/ziTw+/fv3dzcfiAVLDY2Nj/T8s2ZM2dwcPAn8/AL+WZldO/evV/V8uXm5k6ePLm+vv7evXurV6/u7OzMzc29ePHix48f586diyDInTt3li5dGhUVlZWVpaSkdOvWrZUrV3748AEXD3wKenp6Ll++vGnTJrjTy8sL3slUKhW+AO3fvz8yMhJBkLlz55aUlGRmZlKp1MjISFNTUwRB4Kp2FLTly8vLmzlzZkpKCoIgdXV1ly5dCg4OPnjwIIIg0dHRERER58+fLy8vHxgYuHv3rqOjo46OztDQEP3JEonEs2fPPnv27NChQx8/fuzo6Ni0aZO9vb2ZmdmuXbuwIbGpUKnUI0eOGBgYkMnknJyckydPYhPq6ek5ceLEoUOHNmzY8PHjx5MnT8bExED5EgRBzp8///Dhw9WrV1+8eLG3tzcqKsrLy0tHRyc0NBQmpK6uzuTNr7CwcOzYsRQKpb+///z58wiCVFRUnD592tfX19jYuK6u7vHjxxs3bjx69OjVq1fRo0pKStzd3c3NzS9cuIAgSExMzMOHD69du+bt7Y0gSHx8/OPHj+3t7R8+fIggiIiIiJeXl66u7u3bt7FJNzY2enl5PX361M/PD0EQuHHo0KFPnz6h9wa2HAYGBk6fPr1nz57Lly+rqKh0dnZmZWWNHz/e39+/vLx87dq1WVlZ5eXlS5cuvXDhgq+vr6ysbEZGBi7m4crhz2X0j/P19va+ePFi48aNfHx8cXFxfX19qqqq6H+trKzAXw4dJ06cePz4sb6+fmBgoKqqKvSpwgbDoqCgoKGh8fjx43v37qE7s7KyJCQkcO7Y9fX1jx49mjdvXmRk5MOHD1+/fu3u7r5169bXr19fv359xYoVnp6e/Pz8lZWV9Jl/+fKlkZER+pOPj2/fvn0FBQWdnZ23b98ODQ29dOnS6tWrnz59SiAQQkNDYVulq6trYWGRmZmJIIiGhkZUVJSCgsLp06exMWdlZW3fvt3GxoZAINTU1Jw7d25gYCAnJ0dISCgvL+/u3bteXl6CgoLLly+HkvY4YmNjKysrOzo6JkyYoKenZ2xsDGWCCQTC8+fPGxoafHx8pk6dmpiYOGfOHOyBmZmZO3futLa2Dg0NlZKS8vHxiY6Orq2tDQsLMzU1XbJkyf79+5ctW/bu3bsLFy60tLT4+/urqamVlpaiebt///7z588DAwOnT58+bty4r1+/SktLAwCSkpLOnTv38ePHioqKoKCgpqam8PBwLi4uLi4ubBkCAJ4+fTp58uSQkJCrV6+ePXt2+vTpxsbGhw4dYhhPYmKipaUl2itOf0GfPHkCMyMmJgZ1EVtaWvT19Y2MjCwsLDw9PSUkJKBhzXcxf/782bNnNzY2Hjx48Nq1a2PHjq2vr9+7d+/UqVPr6+sBAEpKSqmpqevWrcvOzubm5j5x4sTUqVP9/Pyg/D+WsLCwoqKiuLg4dASIjY2tvr4+PT09KCjo5s2b4C8py+zsbDKZLC0tjRNSWb58OS5OaEpQVlaGmhIMDAyoqKisXLnS2toaQZCQkJAVK1acOHFiYGDA2dmZQqHIyMhA+wh6yWkXFxcxMbHdu3crKiqqq6s3NDRARwI7OzsFBYWOjg70mcKmwsbGdvr0aWj1Xl1dfeTIEWxC8fHxMjIyX79+jYiIGBwcrKioUFdXf/jwYU5OjqioaEZGxpUrV4qKimRkZMaMGXPjxo39+/fLy8tfunQJrtmXlJQsKiqiP3EUMpns4uIyNDT04cOHK1euWFtbOzg4LFiwoKen5+TJk+fPn7979y5OFfrMmTN6enoLFiywsrI6cuQIkUjcuHFjT0+PpaXl7t27HRwcYmNjBwcHg4ODAQA0Gm3Xrl26urrKyspYU9+kpKTMzEx3d/eSkpKcnJzPnz/fvXt36dKl1dXVq1evhvcGthyio6OnTZs2NDR04cKFhoaGhIQEPT09Xl5eQ0NDAAAczpSTk5s8efKaNWvU1NSeP3/Ozs6Oi3m4QvhzGbXjfJCwsLBbt25FRUVBi6nKykrcID+UIMI5dAwXDMf06dMbGxvh9qdPn9TU1Hx9fXNycnByU/BZ1dLSev/+PYIgixcvhve6ubn569evi4qKIiIiduzYYWZmRp/EcDmBNZ2VlZWfnx/OzkZeXp6Dg2Pjxo2PHz8OCQnR1dWFtTM8nEQi3bx509jYeMuWLSdOnJCRkeHi4oqIiFBSUrKwsDh58uShQ4fa29t9fHywTi70wIdWR0fH19f3u5xTlixZ0tXVtXfv3sjIyMTExPj4+Bs3bvDy8sJKR0JCgo+Pb/bs2Z8+fdq0adPYsWP19fWlpKSweXv06JGSklJdXd3du3ft7e0lJCRmzZrl6OgoJibGxsbm5OTU3NxcX18PK4hdu3bRizq6ublJSUmZm5ujHj2CgoLDxbNy5UpYmwx3QdHMnD17FgYQFxeHbmoAgK6uLubqJ0wwNTV98uRJVFSUqqpqWFhYTU0N9K+htwgYznAAoq2tbW9vHx8f7+TkFBQUBHfy8PAICgpiX+8+fvy4devWuLg4hvphXl5eenp6enp6HR0dgJEpgaysbF1dXVJSEgCARqPZ2treunXLwsJi/PjxOPsI+sg/f/4MJ1koKCgMDQ11dHTgHAnQkLhUJCQkli5dGhkZ2dLSMnHiRFxCnJycsMnk4eEREREJCwujUCgUCgVnQ1FeXi4nJ7dz587bt2+j6sy8vLz0jn1YuLi4jhw5Ymtr++jRI+wpqKioFBQUoEljaWxs3L1798GDB4eGhoSFhZWVlSMiIkpLSykUSllZGRQJ4+HhgS6yHBwcPDw89NdUW1u7r69vyZIl3NzcaWlpUG9MRkYGvvHAewNXDmhhCggIYOcoAEb3ElThoY95lDHKWz7cMy8jI1NTUzOcCzPq0CEjI8PQJwVHUVERWqsuXbrUzs4uPj5+aGgIFwz3rOLuwuTk5OFMRgAAw+UEW9Ph7GzAXyq3AIBx48Zxc3Pz8PBAsT4AABcX18mTJ589e3bu3Dk2Njb0+aypqenr64OPAXx0mZ879qEF3+OcAgDg5OQcO3YsOzv7ihUrcnNzcZUOfZVBnzdsmOHse7AVBC4DV69e1dDQCAsLA5gnn4kNEK52wF1QhnWckZHRx48f6+rq+Pn5f1jQS0dH58OHD+Xl5dBrZtKkSXA/MmLDAYAxEODk5OTh4UENBERFRWfOnHn+/PnCwkJ4rLq6+t27d/ft24eK92NjO3DgQEhISEhIiIiICENTAi8vr9bW1jVr1sCR8v7+/szMTBEREQ8PD5x9BIVCwfkDzJ49Ozk5GQYQERERFRVF/4U7I1wqAAArKyt7e3t4v+ESQo/68uWLh4eHtrY2Dw8PgiA4GwoxMbGoqCjYwMBuXgBAV1cXnK6JyyqE3jsCPYX29nboHU9PX19fZmYmAKC6urq5ufnw4cPz58+fNWsWgiATJkxISkpqb28HAKDeC4DRNa2srHz58qWtre3Ro0clJSWDg4NhDqGhBww8XDmgUWEvMZoQdg99zKOM0dzy0T/zq1atkpGR8fT0RMNUVlZiJ3FBh469e/dmZGTk5eWh+0tKSsD/upCkpKTk5uaam5uDvx4DdXV1a2vrzZs3w7lSKPTPKgRu07vGYDl06NCTJ0/QCBEEQaelgL/uUSZ2NuB/Kw4ajTbcrK3JkycPDAzAVhY+umjFSl9PAQCwDy19ckycU7A0NjaqqqrSVzrY2BAEoc/bcBFiM4OtIHDBJCUl8/LywsLCsrOzGbYWDE8KZbgLimXMmDEWFhbbtm373onpuEi2bt26bt26mTNnSktLb9y4EQAQFxfX0dGRnJw8a9asgoICd3f3jx8/lpaW1tfXx8bGfv36FX3LAQAkJiZ++fIlICDg7t27e/fu1dPTMzIyIpFIaWlp2dnZISEhDx8+vHPnTkdHR2FhYWJi4ubNm+fNm2dubk4gEMhkcnx8fGFhIRy9Runo6EBNCezt7QMDA6EpAZwSeffuXThA9fjx49jYWAUFhXXr1h07dszDw2PDhg1XrlyZN2/ehQsXcN2/1tbWg4ODAQEB6Ig7dCSoqqqCjgRoSFwqAIBly5ZNmTJFXV0dAIBNaNq0aSkpKWlpac3Nzdzc3NCLlUql+vj41NfX37hxw8fHx9bWNjw8XFxcfMeOHQsXLoRdPvBFh0QizZgxo7q6GtddDwAYGhp6+fJlf38/6gwMALh379779++jo6MTExOvXbsWExNTVlaGm/V69epVHR0dIyOjDx8+SEhIiIqK2tnZffjwobKysrS09PTp0/Pnz4dOGrDvJCUlJTY2FpYGGklycrKXl5eYmJiBgYGOjo6oqOjcuXN37do1efJk9N7AlsOcOXPS09MLCwsbGxuzs7PhGJ6CggK0HiwtLY2Ojm5tbS0tLY2Pj6+vr6+oqIiPj8fF/EP37+/NzwwS/s58/PhRSEho69atd+7cMTY2trKyolAoCIJUVVVpaWmdOHHCx8fH3d09Pj6+v79/2bJlDx48ePXqFTqWGx8fr6amdu3aNR8fHzc3t9LS0uLiYgUFhZUrVz548MDR0dHKyqqmpgZBkOLi4mnTpq1ataqmpoZGo2lra2/YsKG2thbNiZ+f3+zZs+/cuTNnzhw3N7eXL19OmTKlpqbmxIkTy5Yta2trW7t27e7du69cuSIlJQWnzGBxdXXV0NBwdXV99OiRu7t7b2/vrVu3ZGRk6urqTp06NWfOnKamplWrVi1cuNDCwqKhoaGsrExUVDQgIKCoqIiHhyc8PDwjI4OTk/Pz58/nzp0DALx48QLGPDAwsHv37vXr1zc1NSEIEhAQYGpqGh0dfe7cOQqFEh0draSkFBERcebMmY0bN+Jydfjw4XXr1rm5uUlKSkZGRi5duvTixYuVlZUiIiL+/v6hoaGzZ88+e/bsunXrrK2tSSQS9lgxMTF3d/fnz5+7uLggCHLq1KmpU6fu378/MTGxvr5+/vz5p0+fJhKJ6enpY8aMefLkCZVKxeXtwYMH0HO8s7NzzZo1R48era+vX7x48enTp2tra6dPn+7q6urh4eHp6RkXF/f06VNc5g0NDVNSUuzs7BobG3V0dLZs2VJTUzNcPNCqOykpKSUlhYODIz4+HndB0cz09PQYGhrq6up2dXXB4tXT0/uZexhBEHjTIggy3MSQn4z/1zIwMIAgyODgIJVKpVKpsPmE/6LRaOgEXSqVSn9REATp7e2l0WjflQrcg6aCSwgLkUiEjuREIjElJSU5Obm6ujonJweds0YgENDAERERcXFx2ORGyDenQw8NDWEvJYwczXB/fz/2XBhCpVJJJBL2HBkmOlw5QCgUCu6pZMhPzu7+nfmP6nYODAwMDAygnSr0Dh2Qjo4OHh4erLvmj0EkEnl5eYeGhsaMGcNwhWxvb++4ceOoVCrq2oyjqalJTExsuP+CEdvZMIdGow0MDKDxQCcXGo0GZ6zhAsOT+gHnFElJSfiWgNrE9Pf3f7OQcXkbyblQqVQymUyfPRqN1tfXhx3i+l6+eUEBAHAKib6+/g+nMlqpq6sTFRVl2BP+z8DchqKmpqaysnJUDm6xQPmPtnwsRs6vradoNJqgoGBTU9PPv0/8tgwMDBw8eBAA4OPjw+RlhcW/BcuGggWr5WPxj5KdnV1YWCguLr5hw4Z/Oy9/I+Hh4UuXLsXO1GDBgsXvA6vlY8GCBQsW/y1G89xOFixGAUwEXUdIeHg4upjhb4VAIODWbv9NfP36FastMBwkEunjx49NTU3/QJYgCEZptqqqys/Pr6qq6tcm0dPTExQUhJvF3dXVBdP9T0hu/gpYLR8AAJDJZFTNuaKi4tOnT+jU8Kqqqk+fPmVkZFRWVsIA2AcpPz8f7kRXtjFRygYANDU1kUgk5pmpra2FSwJGTnNz88gP+Wb19E0R5x/mu6rgn6/xfxgqlZqQkACXbP/roItnfhhUlfjvpqysbO/evX9rEr29vSQSydHRMSIi4puB29rajh07xlAd6YdTZx4Ais40NzfX1ta6ubktWbIE5+qAg0qlfu+lOXr06Pr169EV9wAAAoFw8eLFdevWEQiEETa03zyRUQ+r5QMAgDFjxhAIhGXLlrGzs0+cOPHChQsbNmyAa9InTZp08eJFUVHRCRMmmJubP3z4EDt4IyIioqKiUlhYWFhYuHr16rCwMObq+NeuXfP392eSExqN5uPjAxcqjZy2traysjK4/c17+pvVU1VVFW4J1wj5ZtLfVQX/fI3/w7S0tBw+fJhJIfyTFQe9Ds73cubMGfq19n8HCxcupNcE+YVERES8ePGCi4tr9uzZIwk/ceJEqHrzC1NnHubt27cTJkywtLTMzMzk5OSUl5ffsmULk/Dnz5//Lm2wjo6OkpISfn5+S0tLdGdUVBQUkDMxMZGTk/tmJCM5kVEPq+X7f0CZrnnz5vHw8CgrK/Py8h4+fBgAMGbMGFVVVVlZWX5+fjk5udmzZ2On6U+cOFFQUHDBggVbtmw5fPjwpUuXhIWFp06dOmvWLG1t7bt37zo7O0ODNABAX19fc3MzVueTHnZ2diYrtYdj7ty5e/bsASO7p79ZPRkYGCxduvR78zCSpL+rCv75Gv+HkZKSmjhx4nD/raurw4mg/k14enq+fPny7t278Gd0dPSNGzcePnx4+PBhCoUSHh6+bNmy+/fva2pqOjo6enl5aWpqxsTEAABu374dGRkJFdSqq6vV1dWzs7O/fPkiLy/v6+urq6vr4uLCMEUTE5OTJ082NTUpKSnBq2lmZtbS0vL161cPD4/Dhw9fvHgRG76ysvLMmTMvXrzYs2cP1HijUqkODg5KSkonTpwAADx9+vTDhw9Q9hYbSXNz86ZNm65cubJ06dKcnBw5ObnU1FQajWZmZlZdXR0dHe3t7b1582aoswOhUqleXl6ZmZlQlKSoqOjChQsqKipQY+zp06eurq7r16+vqKigPy+ophYbG7t9+/a+vr4nT55s2rTp2LFj165de/z48cOHD3V1da2srL58+YI7UzT/uNRRsOnGxcVBpdmsrKy4uLjCwsJ3794RicR79+45OTlt3ryZRCLV1dXdu3fv2bNnBw8ebGlpCQ4OjoiIaGxsxBYUlgcPHnh6etrZ2bm6uiII4ufn19ra+urVK2ytEh4eXlRU5Ofnt3Hjxlu3blGp1DNnzpiYmNjb26uqqnZ1dWHzQCQS4YlkZGSYmpp6eXk1NjauX78+MjJyJHfIqGHUtnyDg4Pnzp17/vy5mZkZVLIY+UXl5OQMCAiIiYlxd3eHP0eSYlZW1pQpU9CfWKVsuMfPz8/FxWVwcPDjx4/0h5PJZAcHh4CAAD8/P7gH9/w/ePDg/fv3Dg4OTU1N69atO3funLGxsbKycltbm4uLi7GxMfbhHK6qguCqJ2xCvb29lpaW9+/f7+zs1NLSunLlirm5+e7duwEAXV1dJ0+edHJyWrly5ePHj3ERoo+TjY2NmZmZpqZmYmIitrphUgXjzhRX4+PANgBDQ0O4hxwXODY21tPT8/r161BcEUtMTExkZKSdnV1FRYW7u/vevXsHBgaOHj164cIFGODFixdaWloaGhoEAgEbODAwMCcnJzo6ur6+/vLlyyEhIYcOHaKvbhhmfuTgBF1JJBJOvFRZWbmlpcXa2vratWtBQUEHDhw4fvw4VBUZN26chobG/PnzQ0NDpaWl4YpDBQUFEomko6Pj7u4+XKeCubl5cXGxpKSkuro67NWfN2+euLj4mTNnxo4du2DBghs3bmA1YqytrQ0NDXfu3Ll06dKTJ0/CfJ44cSIpKenZs2f5+fmoRCoAABsJFxcXKtC6aNGiTZs2FRUVsbOzz5kzR0JCAivliqbFwcGB1ZKVlpa2t7dfsGBBbGwsTrqW/rxIJJKCgoK6ujqFQsnJycFKrV6+fPnAgQN79uwhEAgKCgq4M0Xzj0sdgkt37dq1UGl28eLFS5cunTlzppaWlrOzM4FAgFoHERERR44c2bVrl7GxsYqKiqio6JQpUzQ1NaWkpLAFhQJb0IMHD165csXBwaGkpERbW1tYWNjQ0BCtVcaNG6eoqDhnzpzt27ejloHTpk0TFha+cOHCrFmzEhISsHmIjo6GJ6KkpDRt2rSBgQEpKSkpKamhoaGR3CGjhlHb8qHq79bW1kZGRjNnzvyuizphwoSQkJDTp09DaTHmvHz58vDhw/39/Wh7gFPKBgAgCNLa2iouLm5pacnws8/b25ufn9/AwAAK6g8ODmKf/9bW1qdPn6qqqm7ZskVSUpKfn19VVfXZs2fTp0/39PRctGhRV1cX9uEcrqqCYKunzMxMbEKoiDO9DvXLly+FhYVtbW17enrWrl2LjRBNWklJCauCja1uhquCcWfKXMIb1wD4+PjgHnJceKyyNu5fISEhHR0dJ06c4OXlXbRoUXd3Nx8f38KFC/v6+mAATU3Nd+/e8fDwQO1vNLCSktLkyZPXrVuHla5mZ2dnnpPvBSfoSi9eykRodPny5X5+fu3t7aiqKnqZuLm56WWLUaDhRmNjIwcHh7+/f3Z29uLFiwGd1DIaHifTDHPLy8vLzc2toqJSV1eHlUjFRYLVO7W0tPT29v706ZOKigpD/Wh6mEvX4sApVmOTnjNnTmRkJLyB6TOJzT8930wX0IlH5+XlCQoKAgCMjY2xXS8ME0KLFwCwfPnybzoAowqxOHHgbyqGo+DukCNHjujp6f1bgw5/K6O25RtO/Z3JY49DUVHRxcXFwMAANWQYjh07dri5uXl4eKC+CvTq+LGxse3t7bdv3+7q6oqNjaXvk0lMTIR99LCmwz3/EyZMMDIymjVrFpx7wsHBMW7cOACAqqpqc3Mzfe/lcFUVBFs9ZWZm4ioaXEUJ/tKhXrBgwYcPHwgEgry8/IQJE4YrjeEE8uljhtcCd6bMJbzpGwAmOvSATlkbC9ZMgOGJwHJTV1evq6tjGJi5FnlaWho0N2D4if9NcIKuzMVLsWuTqFSqoaGhoaGhhIQEwkj1GN1mqMi6d+9eExOTHTt2yMvLP3z4UElJCdBJLaOBmcg09/b2Lly4ECuROlwkAIDp06cLCQm9evVqwYIFDPWjIcOJdDOXrgV0itXYf504caKlpWXatGlw5AyXSWz+0dRR6NPFZQzQiUePHTsWztv68uVLV1cXGic2IfriBQB0dXUtWLAAdylxF5T+vwzzgCbKwcEBZzNAnTP6CJ2dnUNCQmDX1yhj1LZ8w6m/M1y/2NnZiSDIlClTYM2FTr/cvXu3kZERvGnA/ypWAwCePn0KGMlAM1THj4qKunfv3okTJ86dO2dgYHD//n1cHnA1He75r6mpOXjwYEREhKWlJfYU6urqli5dit7x6D3NpJbB0tvbu2LFClxFM9zzs3DhQh0dncTExOfPn9M3S/T1An11w7AKxp2psLAwEwlvJg0Awys7nLI2AABrJjBcFQAAaG1tXb58OTYweqbMtciXL18OzQ3Q7/7vQkdHh5+f39jY+N69exQKpbS09PHjx46OjjExMTQazdjYOCEhob29vays7OPHj5WVlRUVFUlJSdXV1e3t7ZycnFZWVvX19ZGRkcnJyVCVuLy8vLW1NTU1NSEhoa2traKigl45GgCwe/duCQkJBQWFffv2oT17OKllNDBOphkAICcn5+vr6+/vb2pqKiUldeXKlZKSkjVr1syZMwcbCZVK/fLlS2RkJDrX18rKau7cuQAAhvrRkCVLlvj5+YWFhX369Ck9Pb25uTkjIyMjI0NHR4dAICgqKlpaWmKntNTU1JSUlMTHx48ZMwarWB0VFYXKSV++fPn169eXL192dnamP1Ns/mHq2PnGurq62HQ/ffrU2NgYGBjY3d394cOHjIyM6upqnFr3zZs3TU1N161bl5iYKCQktHz58qtXrxYXF2MTQuNXVlbeuXOno6NjcHCwgYGBnJzcmzdvqqqqsrKy0DAkEikjIyMrK6u5uTkrK+vTp0/9/f04ieqjR49i84CeyPr16319fe3t7QcHB7Ozs+nvkB+4b/8YfkLz87emr68Pur86OzvHx8eXlZXx8fF9+PAhLCxs7Nix5eXl2MCrVq06cuSIvb09giA5OTnz5s1DncQpFMq2bdsQBMnLy5ORkVFTU7t///61a9dWrVrl7e39/v178JcMIAzPUCn7yZMnq1evbm9vRxCETCbb2Njw8PAkJCRg81BRUTFr1qzjx4/v379/1apVra2tWCnnwsJCKyurjIyMa9euIQiir69vZWX19u1bW1vboaGhS5cuycvLV1VVoTLTISEhEhIShoaGjx49oi+cFStWPH/+3M/P7+XLl8j/akZ3dHRAEefa2lp6Heq5c+fq6emZmZkVFBTg4oRJv337FlXBrqiomDx58sWLFw0MDHbu3FlSUiIjI3P16lX6a4HNAIlEYi7hjVWvJpFIZmZmWlpaDQ0Nq1atOnz4ME7yGKusnZycjP3X8ePHIyIi3Nzc8vPzyWTyggULoOWhpqYmgUA4fvy4nZ3du3fvHB0dcYG7urpkZWXd3Nyw0tWurq7Mc/Jj9PT00Gg0VMWYSqWidxoTqFQq9LJnolmMDK8cDWWyaTQaVtQYJ7WMyyT2Z19fHyq0TaVSu7u7RxIJ8r/C01j9aCxMRLqZlwxWsRrd2dLS8vz588bGxoKCgmfPnn3+/BmXSVz+Gab+zSuCE48mk8nYnzBOXEK4nDMptBGCywN6IjDmb8pkjz5GuYZLX1/fSAzSKBRKR0eHuLg4w/8ODQ0x9Or85SAIQiAQeHh40OmjqJQzvEwEAgF2choYGFhZWS1cuBD+xAJlpsFfX64MPXVhVLy8vOgL9Ug0o58/f66mpkYikTo6OtLT048cOTJc0tg93NzcCIKQSCTcv3DgMsBcwvu71KuHU9am0WhEIpGbmxsmgSAIhUKBnoUwwNDQEI1Gg1+3uMDomY5Euvq35V9Xjv7XCQsLu337tpWV1eDgIJlM3r59+z/zpLP41xnlLd9oZePGjfDr5J9MdOnSpQYGBjNnzmxqapo/fz4cAWLB4o+mtLS0oKBAXl5+/vz5/3ZeWPxzsFq+P4/6+voPHz7w8fFpamr+k44H/f39qampFApFWVn5Zyx+WLBgweLfhdXysWDBggWL/xZ/2MgECxYsWLBg8ZOwbDNZsPhNCQ8PX7t2LfOZQT8MjUZLS0uTk5PDrlL446BSqWFhYVD84VfR1NQkKSlJv39wcDAvLw8AgCDIsmXLmOxPTU3t7+9ftWoVOlWtvb0dt0hAWloaTqkrLi6eNWvWSDKGux+IRGJcXJyWlhbDwBkZGYODg1JSUuLi4vTz4LB0dXWVlpbCbREREVlZ2T9urtYPMPrPkEqlvnnzhn4/HC3DbvxaCgoKoMQfuvFNmpqaEhMTsXt+lb8M6v8wEh+G3t7et2/f/nyiLH6Sv8ljAfoDdHR0WFpa/k2mHCPk54W/h4aGcnJyfklmAAA0Gs3NzW24sXNfX18XFxcXF5fu7m4m+21sbDg4OGRkZOBqKBhGUFDw6dOn1tbWHBwc7OzspaWlAQEBBALB0dHRwsJihNlD7wdYbtXV1du3b2cYcvfu3WVlZUJCQqampiUlJcyjHT9+fEpKyubNmxEECQ8PnzNnzjfFYkYD/8pain8GuMyov7/fzs4O9y8qlerh4aGuro5u/PKkTU1NHRwc0I2RHHX37l19fX3snmvXrsFVgD8DlUq9fPmyiYkJgiCvX79OT09nHj43N5eXl/cnE2Xx23L69OnCwkIEQbS0tL55M/x9vH//3s3N7d9KnSFeXl7Y9aNRUVFNTU1wm0KhbN++PT8/H3cIbn9PT4+srCzc1tfXf//+PRrSxcVFS0sLPaq+vh5BkIKCglWrVn1XJoeGhrZs2QK3RUVFGYaRlJQsLi5GEKSpqSkiIgLuDAgIuHHjhp+f3/Hjx8PCwioqKrS1teGCztjY2JkzZ8Jgu3btOnXq1Hdl6U9k1H7zoYL6fHx89vb2uP+ys7PDfgl049ciICAAtRjQjZFA707wS/xlsP4PI/FhmD9//j85ZZQFQ5gIfGdlZTk4OJibmxsaGkJzBshItLyx/gAAgKioKDMzM0NDQwAAzlUAjfbZs2f+/v5mZmawAw0rBN/d3W1oaHjq1KlTp04pKyuHh4fb2tpu3boVAODt7a2urn7lypWFCxe6u7tDKZyioqL8/HwZGZmuri6G6uqDg4Oo4jkq3NXU1OTt7Q2zQaFQbt++HRgYaGBg0NPTk5iYOG/ePFzmiUQi7txxRw13pj4+PjA2SHx8PNRBhXng4eFZu3athoYG6pNAv7+7uxv14Jw4cWJxcTH2msKl9J2dnU5OTtAMhH6pcV5e3pQpUxoaGpydndesWdPV1ZWXl3fp0iX0figoKIiLi3v16hWUC/D29tbT08Ophm7ZskVDQyMmJkZCQmLDhg0AgNjY2ODg4FOnThkZGZ05c2ZgYEBWVra3t1dZWRl7YH5+flZWFr2yz+hj1LZ8qKA++mxglfXpw1MoFFNT03379g0NDRUWFmJtaLAPXnl5+Zw5c9zd3fft27d3796XL1/u2bPnxo0bgKktAATrSIB1acH2aLW0tBw9enTmzJmvXr1C73Xwv2YlNBrtypUrhoaG169fX7Jkib+//+3bt9esWdPQ0AD+1zYF5/+A+jAAABISEp48eXLlyhVPT0+chwvDwsFWHLjqLzs7e/r06fAJxNV3ubm5M2fOBAD4+Pioq6uD/3VI+OGL+1+AiceCo6Pj2rVrHRwcsrKyNDQ0YPgRanmLi4uj/gAAABUVFQ8Pj+Li4sbGRpyrAAyfk5Pz+fNnIyOjU6dO9ff344Tgx48fLyEhMWvWLEdHRzExMTY2Nicnp+bm5vr6eiUlJQKBYGdnFxISYmNjIyQkJCgoSKFQ4MMI38bo1dUHBgZQxXMVFRWYh6SkpMzMzF27ds2bN+/BgweysrL6+vrm5uZdXV0rV66E7kg4OwLcueOOYnimTU1N6LBcU1NTQ0NDb29vS0tLQ0NDd3f3pEmTHj9+/PXrVy4uLqyLCG7/5MmT582b9/jx48LCwry8PJxbXnV1NewXZdIDOX/+/NmzZzc2Nh48eLCoqGjs2LH19fV79+5F7wdFRUVeXl5DQ0NOTk4ajbZr1y4vLy8PDw9sJPfu3TM3N9fR0TEzM4OKtc+fP9fX14f/FRUVha6BbGxsaNPb2dnp6Oh4//79SZMm0Sv9jj5G7QwXJSWl1NTUdevWAQDgswGV9VeuXGltbY3QreXg5OQ8d+7cxo0bx4wZU1xcfPDgQfRf8MFzd3cvKSmRl5fn4ODYuHGjubm5oKCgq6vrli1b5s+ff/r0aWgL0NPTY2lpuX//flz80JFg//790JEgOzsburTY2dlhg40ZM+bevXvZ2dnr1q1rb2+H9zo0K4GjlZKSklDcD1qj8fDw5OTkODk5dXZ2xsfHy8vLx8TEaGpqQtuUadOmQf8HNja28PBw1IeBRCI5ODjExsYODg4GBwejHi4PHz7Mycmh15mEFYeenp6wsHBXV1dgYCCs/hQVFdXV1RsaGrq6urZs2WJqapqfn+/t7W1nZ7d79+7Zs2d3d3f39PQAAFavXg11EUNCQlasWHHixAnsizMLhuAEvjk4OOAAj5KSUkhIiK2tLfbznV7Le8mSJUy0vCHw415QUBA6Hjg7O0+cOHHnzp1ogLS0NDgFRkZGRkZGxtvbW1VVFWCE4IezjODh4YETK6SlpYWFhZkM6UF1dQAAfOiwRgoQbW3tN2/eLFmy5PXr16mpqfAzZc2aNdhSwmX+6dOn2HPHHcXwTLu6ulDBo9u3b0P1y9bWVlFR0VWrVsHPYiEhodOnT+MsurD72djYYmNjw8PDW1tbW1pali9fjg0mKysLlY+Yj72Zmpo+efJk3bp1qqqqYWFhLS0tmzZtApj7AYWDgwNKPmFdsaAc0rlz57Zs2aKurj5nzhxLS8vu7m7s9yW9NJKwsPCpU6cAAHfu3DEwMCgoKOjr6zM2NgYAqKurj3ww8k9h1H7zYYF3DE5Znz6YjIwMtDRrbGyEdgEQbW3tvr4+tB5BH/Vx48Zxc3Pz8PBAGyAmtgCAznsBMHrCwV/+AIsWLQIA9Pf3w5zTm5UMV93gbFNw/g9oUZSVlcHXWx4enh07dtCbKuBITU2Fnwhr1qyRlpam98Hg5OSEOUdNJIar777pkMACBQ5IYH/CDT09PRkZmZKSEqz10si1vJk4HmAV/WFgSUnJ4OBgaJiQlpY2nBA8w4Qgg4ODAgICIiIiqCz4wMAAgiAjV1evrKx8+fKlra3t0aNHJSUlAwIC4FHQFwlhZEeAyxLuKIaB5eXl0Q6PW7duubu7b9iw4fz58+7u7rDZg7S3t8PxEZzNBbqfn5/f0NDwy5cvlpaWWEFErKw57AgZDh0dnQ8fPpSXlzs4ONy4cWPSpEnoucAY0KQRRkLw5eXlISEhAAAFBQV9fX3YFaSmphYfH4+Gwfbxgv+tD8eNGwdn1Y0bNw5Kro++Zg+M4pYPax0Ab4vhlPWxnDp1yt7eHjenGfvg4cJj42FiCwDoHAno3WGwwD4ffn5+eE/Tm5UMlwecbQrO/wFg6rikpKT29nYAQGpqKhMPFwiu4mBe/UHQ+g6miHogYE0PAF31wQKlvLx8OI+Fhw8f+vv7Ozs7nz9/HjVfhD6CqJnDrl27cIL96JWF/gAfP34sLS2Ni4urqqqqrKxMTU3FuQrAwDo6OqKionPnzt21a9fkyZOtra0HBwcDAgK8vLw8PT27urrgyFBDQ0NJSUlcXFxdXV1lZeWnT5/gKbx69crZ2dnNzQ0AsHXrVhsbm9u3b4uLi6ekpKCOAVh7BDjPMC0tDdsEJicne3l5iYmJGRgY2NjYhIaGqqioXLhwQUFBIS4urqOjIzk5GZv5OXPm4M79xIkT2KMYnikXF5esrOxwU6nv3Lmze/fut2/fdnV1HThwAAAwY8aMhoYG+v11dXVOTk5sbGyHDx9GD29ubo6MjMzLy8OaEBGJxNDQ0Orq6vz8fGxaY8aM2bp167p162bOnCktLb1x40bs/QAAUFBQOHfuXHx8fEdHR0pKSmxsbHd395cvX+DhcnJy586du3v37vPnzwsLC6H7koWFBYVCuXHjxvv37wMDA1tbW8vLy6uqqiIjIzs6OgIDA+HIor29fWBgoI+Pz4/csn8Wv3bCzO8DKqgfGxsLAEhKSsIq67u5uTk6OsrKylZUVKAb8MAtW7bgFO49PDw8PT3j4uKePn1aVlYmKioaEBBQVFTEw8MTHh6ekZHBycmZn5+PtQWIiooyNDTU1dWtra2FG11dXVhHgvr6erSPFE2ovLwc9m84OztnZ2eXlZVBcwMEQWxtbW/cuBEUFOTq6kqhUE6cOKGmptbQ0AAdTxoaGgwMDIyNjbu7u1etWrVw4UILC4uGhgac/8PXr1+hD0NPTw90QtDV1U1ISMCZKiQmJnJwcMTHx6MZq6mpmTZt2ooVK44ePUomk3E+GNnZ2VxcXCEhIQiCFBYWSktL+/v737hxA5pR7N+/f/v27Xfu3Jk6dWplZSXW9KCqqgqdUcZihNBotHv37rW3t5eUlCQkJPj6+mL/O0Izh+EcD3CK/ig4N4be3t5vOlEUFhaqq6v39fWhvlcIgkAHCdQZAOsYwMSOgEqlkkgkbMZw+WGeeYZHMQxcV1eHmrTQ09DQAPMPgV+u9PsrKyuxp/xjoGYXDIuFQqFgbTTo6e/vJ5PJdXV1uMs0NDTU29v7k3kbHYxm9TJ664BvKusjCHL//n2cCwGckUUmk3Fi//QMZwuAMhJLhJ6eHoY9gYODg+zs7MN5L2AhEAiojwFC5/+AMjAwwMXFhZoPfNNUobe3V0BAAP3J0AejqKjo6NGjISEhfHx8aAkPDQ1xcXFB71ac6QEssW+eEQuU1tbWFStWnD9/XkBAoL29ff369VOmTPm3M8WAjIwMKysr+PH3BwG/nBQUFP7tjLD4exnNLd/3cubMma9fv96+fVtGRubfzsufyh9a3/1ZtLS0pKenCwkJKSsrM3Rx+h0IDw9vb2+fP38+rnOeBYvfAVbL9//59OnT+PHjmQ8+s2AOq75jwYLF7w+r5WPBggULFv8tftOuEhYsWPwDdHd3p6enQ5kPJpDJ5KioKFlZWXptZRKJVFtbKy8vDwBoaWmprq6G+0VFRdF13L9Erxl8j2RzTU0NnB3KwcEhKSkJBVN+EiqVmpWVBbfHjRsnIyPzvUPUw2lhjwT6K9XU1FRWVrZy5Up0T1ZWlpiY2NSpU38sCSZpjT5G7aoGFixYfJPc3Fxzc3PmYRAEuX79emdn5759+3Di73AR+osXL+DPq1evPn782NfX18HBISgoCA32S/SawfdINouLi9+8edPe3p5AINy6dWv16tUEAmHkCTGEg4ODg4Nj2bJlcOGsmpoalEYaOc3NzXCV0Q9Af6VevXr14MED7J6qqiqGCyJ/Pq3RB+ubjwWL35G2trZjx47NmjVr+vTpJSUlWlpaY8eONTMz27FjR11dnbKy8rp162xsbGRlZePi4tTV1YuLi0+fPv29Hzf0UrGQsrKy9vZ2KEHS3d1tY2PDx8eHIMiXL1+wlkBSUlKqqqodHR3w59GjR6EExNmzZ6E+FoSTk3P27Nl1dXWLFy8GACxatKi5uZmfn3/Tpk2RkZEjz+2ZM2cAACQSae/evUFBQQoKCsPNQ+bh4VFQUGhvb1+1apWamtrUqVOTk5M3bNjw6tWrFy9eGBsbk8nksrKys2fPXr9+vbm5ed68ec3NzcePH8/Ly3v79q2QkFBVVZWiomJvb6+trS0a7cKFCwEAS5cuFRcXJ5PJzs7OwzW9DFm4cOGVK1dmz54tKCg48qMg9FdqyZIluHbUwMDge6MdYVqjD1bLx4LF74iYmBgAYMWKFWpqagMDAx0dHZMnT66pqdm1a1dvb+/ChQvr6+uh/vjz5891dHQ2b96MSjcAAOrr6x89ejRv3rzIyEg3N7fz5883NTXJysrGxMSEhoYKCQl5enry8/NXVlYyTL2ysrKkpAS2fEJCQgCAvr6+mJgYd3d3XEjsshZU+ai0tBT2f2KBq4N6enoePnwI2zB6vWYAQF5enra2dlpaWmBg4Nu3b4OCgmpra0NCQkxMTExNTR0dHQEAULJ569atCIJ4e3uHh4erqqoeP36cPjYymQyX9q9YsQIAoKio6OHhARUsv379ysnJKSMjMzAwcOjQIWtr65cvXyoqKkI1gKqqKjMzs7S0NIblU19fHxYWBuPBFvXDhw9jYmKoVGpKSoqJiYmUlNTDhw9JJFJKSkpAQAAXF9eyZctu3brl4ODwS64UlPmNjIy8fPmypqbm2bNnp0+fbm1tHR0dnZOTIyQklJeXd//+/aioqCtXruzYsSMiImLVqlXCwsLBwcHHjx/X0NCIjo6ura0NCwszNTXV1tZmfleMJlgtHwsWvylsbGzFxcXs7OwpKSlQQp2NjY1IJAYHB8PRHaxuJ5SXQ8Gq1LKzs0+bNm1oaOjChQsNDQ0JCQnTp0+PiIgICQlpb2/HCax3dnYSicS2trbu7u6GhgZOTk5xcXEqlRocHFxYWHjp0qXbt28zz3ZeXt7cuXPp90O95p6eHuZi5VjJ5mvXrqGSzVOnTsVJNgMAoFqNrq6usrIyfcuXl5fn5OSUlpamqakJNYzY2Nh6e3s/ffpUUVExceLEGTNmsLGx0Wi0hoaG9PR0U1NTrFcDAAAnvAlxdXUdGBhoaWmBy/5wgsBYZVpnZ2cKhSIjIwOlsTdv3iwnJ3fr1i1sbD98pQCdzC+qymthYVFSUsLBwbFt27ZHjx5t27atpaXF2tpaVVX10KFDGRkZ0tLSr169UlVVxYoJy8rKMklrlMFq+Viw+H3h5+cXEhLCCiM8ePAgMjIS52BMj6ysbGpqKqpSCzWvwV8KzsnJyfD7jH6ORmBg4OfPn2tra7u7u5ubm6WkpOzs7Dg4OPbs2aOnp6ekpPTNli8gIMDIyIhhlkai1wx+WrIZZf78+efOnQMA6OjonD9/Ho6KcXJyCgoKCgkJoUoLZWVl2traV69exTV7EC8vr/fv3wMAvL29YS+lhYWFuLh4Tk7OsmXLampqcEVta2urp6f3/v17T09PemlsXl7e1tZWXLH82JUCw8j80muXGxkZMZT5RcWEYfYePnyIS+vIkSO1tbUSEhL03/p/OqwZLixY/L5MmTJlzpw51tbWhYWFAAAEQWxtbZcsWXL9+nU0DMOFScOp1MJtekFXlIMHD7q7u1tbW2/bts3d3R3rJcLDwzN79mzwLbXV3Nxcek/Kkes1g5+WbIYwVGEGAPDx8c2YMUNTU1NeXr6zsxNBkBkzZgQFBdna2qLTQ7CxHThwAAo3i4iI4OKECu+4osYq09JLY3d1dU2bNg2bzx++Uig4mV8m2uW4U8OJCQsLC+PScnZ2DgkJGX3NHmB987Fg8XvS2tr65csXKChcUFAgICAwZsyY5ubm+Pj4mzdvrlmzRkRExNzcvLS0tKamJiYmxsTEBHu4kJCQq6vrmDFjREREPD09CwoK6uvroYIzkUi8e/euu7u7sbHx9OnTKRRKfn4+w88dSEtLy4EDB4yMjHp7e6EV5YEDB9atW2dkZNTa2hofH0+hUOrq6iZPngwAyM/Pp2/2oF5zQUFBdnY2WhFj9ZpxqTOXbF68eDGUbFZXV4eSzX19fVCyGVUdKy0tTUhI6O/vv3//fn19PYFAuHnzJgAgISGhrq4uKCiISCQ+ffr03bt3KSkp1dXVQkJC169f19PTe/XqlaSkZHx8fGFhYXNzM3RoglCpVG9vbwCAs7OzoKBgUlKSp6fn5MmTsUXt5eVVXl6uoaGhoKCgoqLCw8OzYcOGuLg4RUXFK1euAACys7NRn7yfvFISEhItLS3v3r2rrKz09PTs7OxMS0vj5OQ8ePAg1C7ftm0bjUYzNjZ+9+5de3t7WVlZUlJSZWVlRUVFUlJSdXU1Gxvbjh07Fi5cqK6uvmfPHl1d3YcPH47wrvjTYa1kZ8FidPJNldre3t5x48ZRqdRvSqBBRwjUsBSqvDKcnwI/rYabcjlyqFQq7K8jkUj0WrVUKpVGo9FL0f5bYIsa/kSVaREEIRKJsL+aRqOdO3cO+71Of/j3XqnhZH5pNNrAwACq38sEnJjwyO+KPxpWy8eCBQsW/wQhISFr1qxhOVP+DrBaPhYsWLBg8d9iNH/PQiIjI3l5edXU1NA9jY2NAQEBU6dOxa7J/SaJiYklJSXi4uJwhayysvKvz+t3UlhYmJGRwcPDM27cuOnTp8+YMQP7XwKB8OTJk3379n3TF2kkZGVlFRUVAQDY2NgmTZo0b948hoa038uTJ09WrlyJylyxYMGCxT/B32X893uQm5vLy8t76dIldE9jY+OSJUugpdmtW7dGEgmZTN66dStcKUwikY4cObJ48eK/I7cNDQ13794dYeDc3NwFCxZQKJS+vj4NDY3IyEhcgJSUFDY2trS0tF+SNyqVumzZMgBAeXl5QkKCpKTklStXfjJOEonEz89/48aNX5JDFixYsBgho3lVQ19f37t374hEInZnQUFBcnLyly9fxMXFv7koCnLr1q2goCA7OztOTk64dBRd30oikVpaWuA29JUGALS2tiIIAgBob29Hgw0NDdFotLa2NjghG+5BMG7U3d3durq6FRUVFAoFAEAmk+EcaxiGRCIBALCLlqKjo9vb2xsaGvj5+e/fv48OZbe3t0Mtj8WLFzc1NSkpKcH9jY2NcKYymk9UdAqCVfxDU0dhZ2eHH3mioqJqamonTpyws7MLDg6G/+3t7YVSiszLAQBApVLb2trgNhsbW0VFxYkTJygUytDQEPz78/qKLFiwYMGc0dzyeXh44KZ6AwDWrVvHxcXFx8cnJSXFUKCBnlevXgEA4EomyN27dwEA/v7+27Zti4yMXLNmTU9Pj5GREQ8Pz/Xr15cvX75q1aqbN28qKytra2sDAHx8fHh4eIyNjW1sbGRkZKqqqhwdHXl4eFJTUy9dusTDwwO7UvPy8mDDnJycvGbNmvDw8K1btxIIhNWrV0tLS2/ZskVUVLSmpgbmQUVFpb6+fvbs2WfPnhUTE1uxYkV/f/+GDRuCg4MXLFiQlJR09+5dCQmJ+Pj4lpYWJSWltLS0FStWtLe3m5iY8PDwnD9/fvny5VDn/suXL6qqqpGRkbKysl1dXdjUhysTqCECW77jx49fu3bt4sWLJ0+epFAoTMrB3d399OnTL168UFBQqKioyMrKEhcXv3z5cklJCT8//8aNG01NTSUkJD5+/DjyqzyKCQ8PR1ehDUd9ff2HDx++N2YikRgeHg5fUEZCU1MTw/1tbW3BwcFlZWW4/UNDQ1FRURkZGbj9ODGw9vb2T/8L+h4Ju9ZHCK6giETiu3fvmITPyMhITEwsLy/v6+tjHjOVSkXzVlxcjHuN/rshkUixsbHoOyVzhoaGcnJy6Pf39PTExcWhb59tbW3oqydKbm4u9ipkZ2czCTxK+Fe/OP9GwsLC8vPz4fXG9nZC8vPzly9fDj+2vomoqCg7OztuZ19f39ixY+/fv48gCC8v7/Xr1+FSp66urqtXrwIA2tranJycAAAdHR1wGbKvry+ZTObh4Tly5AgU6k1OTo6NjQUAJCQkIAjCz89/9OhRBEGWLl168OBBWN1kZWWZmZlxcXH19fWFh4eTyWQ0Dw8ePBg3bhwAQFpaur6+3sHBQVJSEkGQ69evx8TEJCcnAwBiY2NPnTo1e/bs9vZ2JSWlW7du3bt3DwDQ3NwMxTg6OjrU1dW3b99Oo9FMTEzq6upwqaPJwWayu7sbQRAow7F+/fr4+HgAQH5+fnp6OgAgLS2NSTlYWlra29vDJ/nBgwcIgnBycp4/fx5BEEFBQTMzMwRBBAQEoAoUi2vXrrW3tzMJQKVSPTw81NXVcft7enqYx1xaWioqKgovJXOoVKqrqyvDCMvLyw8cOGBhYSEsLPzhwwd0P5lMNjQ0bGlpef369cWLF9H9ERERy5Ytw8ZAJpPNzc2VlJQyMzMzMjKePXv24MEDGo0WFRXFycn5zbyhoAUF81lcXIwu66Zn165dvr6++fn5ampqGRkZ34w8MzMTAPD27dunT58uWbLk5cuXI8/YT1JdXT1lypTq6uqRBLa1tT19+jRup5+f35kzZwgEAvxZUFAwc+ZMWNtgqamp4eTk9Pb2zsrKSkpKMjQ0ZBJ4dDBqZ7jcv3+/q6sL9vt5enru2rULnUYxODjo6uoaHh5OIpEGBwcFBASYR6WgoJCUlNTR0SEiIoLurKqqQtfBjB07trCwEGoVjhkzBuoDjRkzBq5DQnWEOTk5oXJSbW0t8xRLS0t5eHhSUlICAwOhoZeAgAD8KkLDfP782dLScufOnVeuXLl3796zZ89ycnJIJBKCIFDjEdVxLy0tbWlpSUhIsLW1lZeXT0hIgJmB+SSTyTk5OatWrWJjY3vy5AnD1OmBfaELFiwoKCiAJQDXIcE2frhyuHDhwu3bt2HTS68DAtcPcXFxwa5dFlDWmQns7OzLli0LDAzE7kStDJgcOG3atBFOUHr8+LGKigr6jGA9HOACagDAhAkT3r59u3r1ahgGzimbMGGCgYHB8ePHjx49Kigo2NTU1NjYiIucoYcDGxvbunXrvmvq/8g9HAAAcXFxZ8+eVVBQ8Pf3h7oqAIDAwMCKioqpU6dmZmauXr161qxZR48e1dfXNzY2ZujP8M84aUydOnXChAn0+ykUyuvXr3fs2IHuSUxMHDduHLRwQomKigoICMDeCXPmzMH2XaFMmTJFUFBwwYIFUGcAToMYLvDoYNT2du7du3fXrl2wv27evHmcnJyHDx8uLi5GEOTQoUMSEhLe3t5GRkYMV+PiOHnyJAAgPDwc3RMUFCQjIzN27FjY+0EkEul1K+ih0WhkMrm7u3vWrFmwMUD1ipC/1pbQaLS+vj45Obmurq4tW7ZoaWnBFgWhW3zy5MmT9vZ2ISGhO3fuLFy4kIODY9q0aR0dHfBhxnbLyMnJdXd3q6iobN26lWGFMm3atMTERNjeEIlE+tTpeffunaio6LFjx+CJE4nEgYEB8L99wvRs2rQpLy/P0tLym2XForq6Wl1dHfY7PXv2zN/f38zMLCMjo76+/vLlyyEhIYcOHWJ4YEFBAbQyIJFItra2sbGx27dv7+vra2pq8vb2hlGhgb28vJycnFpaWmJiYiIjI+3s7HBy0j4+Plghj8rKSrQPU0NDA27w8fHB5gHS2dlZV1cHt0VERCoqKhAECQoKwvoWYYEeDp2dnU5OTmjbQP9g9vT0LFmyJCQkJD09fcqUKeXl5b29vfv27auoqIAFhZ44hUJBEMTb21tPT+/OnTu4eLZs2aKhoRETEyMhIQHNV2NjY4ODg0+dOmVkZHTmzJmBgQFZWdne3l7c/G2sPwPqpKGvr3/8+HEREZFp06ZBJw0LC4t9+/YBAAwMDA4fPtzY2Kijo3PmzBk4fo+eMvbSvH79evXq1S4uLitXroQvrImJiQ8ePPD29q6qqqIvMSqVGhAQgP7s6OhoaGigVwm3s7OTlJQ8efIklJ4ZrmBRKBQKmUx+8+YN2rk9kurxD2XUfvNB36yWlpYLFy4sW7asv7/f09Nz0aJFjx49evbsGQwzZ84c2FvIHG1tbR8fnwsXLlRXV0+cOLGystLIyIifn9/DwyMwMFBAQEBJSenQoUO7du0CAGRkZMB+v/T0dFhHfPr0CX5uvn371tfXV1pa2srKSkBAYMqUKXZ2djNmzBg7dmxaWtqqVatUVFRCQkJWrlx58+ZNXV3dFStWrFq16vjx49nZ2Z2dnVFRUevXr0dzNTQ0ZGxsfPLkycbGRmFhYXNz88HBwejoaB0dHUNDw127dsF+yNTU1OPHj797905ZWVldXf38+fNwXk96ejocdPn06dPdu3c3b96srKysq6t74MABbOqXLl2CyWVkZHz+/BkAcPPmTT4+vsbGxszMTHFxcXFxcQsLC19fXxKJdOLEicWLF0MHFoblICgomJaW5uTkJCgoGBsbu3DhQgqFkpGRkZWV1dfX9/nz59zc3J6envz8fKhq8YvuhT8SaWlp+NqRk5Pz+fPnu3fvLl26tLq6GucMQH8gamUwODiooKCgrq7+8OHDnJyclpaWzMxMd3d3VDA6KSlJRkbmwIEDAACswwAaVVNTE6qTwtDDAQBAoVBqa2uPHj2KHqWhoXH69OmMjIzBwcGWlhYZGRk/P79t27YNV42O0MNh/Pjx+vr65eXlenp6kyZN6u7uFhISWr9+vZyc3Hd5ONy7d09SUlJHR2fPnj0uLi6cnJzPnz9HFcVERUVhC83GxobNMM6fAfyckwaJRMJemsWLF/f09FhaWoqIiLx+/Xrp0qUWFhZ5eXns7OywgwSlv7+/u7t7cHCQSCQ2NDQAACQlJX19fa2trUNDQ7EhiURiWVlZVFQUPz//ihUrBAUFcapp9Lx+/RpqZ586dYp5yNHAv9fR+k/T19f3M4fTaLTKykpcn/vg4GBzc/M3j4V9gH5+fq2trTQaDe6kUChtbW2Dg4Pwyw+XyaGhoba2NiZxwv9WVlbW1dVh9zc2NmIjRDPf2NjIJDYymdzU1IT+/GbqOLq6ukYyaESj0VpaWhAE6e7uJhKJI4//v8mGDRuysrJcXFywCz/IZPKTJ0/i4uImTZpEoVByc3Ppx/kkJCTgxtu3b0NDQ3V1dWNjYwcGBrZv375o0aLy8nIEQWbOnLlixYqoqCgYsqqqasGCBTt27EDHhBAEKSoqWrduHdx++PChmZnZxo0blZWVzczM7O3t4X4vL6+GhgZcBmpra58/fx4RETF//vz29nZ1dfXTp08fPXpUSkrq2rVr2JAuLi5aWlpw+8uXL+h+UVFR+gJpamqaN29eQ0PD7t27LS0tHz16BO8iWFDYE4eHUygUXDzo2RUXF0tJScHBZh0dneDgYFxaq1atggUFv9XgY56dnT1mzBj4KO3atevZs2cFBQXOzs7wEHl5eXt7e2VlZfjRCVm2bFl9fT39uWAvTXV1NRwBfffunYmJSUlJyaJFi2CwxYsXY+uc+Ph4MzMzU1NTaWlpMzMzMzOzgICAnTt3nj59esuWLcuWLfP394ch+/r6xMXF4falS5eOHz8Ot/X19RkO3YmKisIy7OnpQS/ocIFHAaP2m4+ekUjYMYGNjU1GRga3k5ubG775Mgd2/hQWFmLdWzg4OOjHWtBMcnFxMR+Jgf+lzxLDkTk2NrbhRuwgnJycWHHeb6aOY4Qe02xsbHDcgiXgNBLgIyopKeno6GhjY8PBwZGWlpabm9vX12diYoKT9scCx1C/fPni4eHx/v17OCmjsrLy5cuXr169Onr0aFhYGADAy8try5YtERER0tLS0GHg+PHjHh4eJ06cgPHIy8ujsx8PHjwIAIiKiiopKYFmQwCAyMhIdXV1KSmphoYGQUFBHh4eKLY5efLkXbt27dmzx9vbW0BAAH649Pb2fvjwAdfn+V0eDhISEnJycufPn/f09FRSUpo+fTocz4MFBUbg4VBeXl5ZWamnp6egoKCvrw8/m9TU1OLj4/X09GAYnFIoQ38G+BM6acyZM6ewsHDOnDkIgtja2nZ0dFy/fv38+fPYnODAXRp0P9wWFhYuLCzs6+sbN24c8r8WDatWrVq1atXQ0JCRkRG0UOju7oafoXFxcfn5+dCAl0ql8vPzS0pKtre3wwl6qGnwcKAXQkBA4JtTH0YB/6GW719EUlISzrTEicOyYDEcqDWBra3to0eP5s6du2jRohs3btTU1GCdAfr6+iorKysrK7FVG7QyMDAwgF6yVCrVx8dnxYoVqampcnJyBgYGBQUFLS0t1dXVFhYW69ev9/HxCQwMRB0G0Hi4uLhkZWUHBwcZThgJCgqytLSELz2TJk2KiYnZt28f9HDIzc19+fKlsbExnLoCR397enqgQxAaA0MPBwBAbGxsR0fHu3fvNm7ciBtpNjMz+/r1Kw8Pj7a29tq1a8F3ejjIyclt3769urpaVFS0sLAQDnxYWFgcO3bsxo0b8+bNGxgYWLZs2eDgYFVVVWRkpJmZGUN/hp900uDm5sZemg0bNjQ2NtbW1iYkJJSUlHBxcZ08eVJLS0tDQ6O/vz85OZn+BRdFUFAQXoLy8vKmpqZJkyY9e/YsNTXVw8Pj3r17Fy9e1NXVrampgW8zZWVlOTk5CQkJCxcuxDZvb9++7erqevXq1axZs1AzyOECjw5Yup0sWPwB9Pb2orXPN6X9USsDaKqAIAj8jqFSqWQyGetzi0Kj0bAOAyj19fVxcXF79uwZSSZhcmQyuaWlBZoW/R1AG4cf9nAYGBjg4uJqbm6eOHEidiQPikuMZOD/l4C9NAxfLPr7+3l5ealU6vdaUsCZdOjM7fb2dub9Pf9NWC0fCxYsmPHlyxcAAPrZxILFKIDV8rFgwYIFi/8Wo3Y9HwsWLFiwYMEQVsvHggULFiz+W7BaPhYsWLBg8d+C1fKxYPEnQaVS37x5g93T2toKtVjpQ/4dPgNdXV1otOXl5djVZiOBSCR2d3f/ZB6YmBgMVxq/hN7e3rdv32I3fhIEQaKjo6HyalVVlZ+fH0O5sn+YHzMA+bMYtS1fVlbWp0+fSkpKqqqq4FPa2dkJAIDb31ULUKnUhIQEnJvd38rXr1/z8/N/+PA/xYTlh/n76oiamhpYGllZWXCZ8+8G1owGVv1v3751dHSkD8nBwcHBwbFs2bKWlpasrCw1NTU/P7+fz8D48eNTUlI2b96MIEh4ePicOXO+y1GIl5c3MTERFfb8MZqamvbv34+1qwTfKo1fQlVVFZRFRDeY802DITs7OwUFhebm5traWjc3tyVLlkCNwJ+J8yeh0Wjh4eHXr1//W1P51xm1LV9fX9/y5ct5eXklJCROnTrl5eUlJCQEACgtLU1KSvouTciWlpbDhw/jnFr/Jnp7e0kkkqOjY0REBPNgw/1r9+7dZWVlQkJCpqamqELjcPxNleN38b1Pcltb2wjriB9AXFz85s2b9vb2BALh1q1bq1ev/t2ccvn4+Ozt7QEAdXV1UC5y0aJFwwVGfQaMjY0PHTrk7OwMACCRSBcuXDh8+HBgYODjx499fHw6OztNTEyuXbt2586dBw8eAADc3NycnJysrKwuXrx47do1rN0dOzv7/PnzhYSEli1bduTIEUVFxefPn3/XKWhra8Oc/DD0JgYjKY2fZ/78+VCJAt1gApolJrx9+3bChAmWlpaZmZmcnJzy8vLD6XpDIiIiXrx48b3Z/i6gAcjfmsTvwKht+VavXq2mphYTE8PLy3vmzJm4uDi4v6mp6fDhw98VlZSU1Pfai/wY8Lbm4uJi7njA/ImKi4tbtGjR3Llz/f390e/UwMBAR0dHf3//EydOvHv3rrKyUkdHBwpYMKwc29radu3ade3atcDAQAcHh9zc3LKysrVr1z569OjSpUvR0dEAABsbGzc3t61bt7q7u1tZWf3YF9IPPMmJiYkjqSN+DB4eHgUFhSlTpqxaterOnTsVFRVQfOfLly9qamrPnj17/fo1vH/evHmjq6vr4+Nz7ty5srKy/v5+ExMTLy+vTZs2+fj4/LwfRV5e3pQpUxoaGpydndesWdPV1ZWXl3fp0qXExERonhAYGJiTkwOvRWdn5+3bt6EKF8PYsD4DXFxcU6ZMERcX19fXNzExWbFihbCwMCcnp6Ki4vHjx728vL5+/aqoqGhra8vPzz99+vSzZ89i/bmw5OfnZ2Vl6ejoAABiY2M9PT2vX7/+6NEjAMDTp08/fPhgZWUFAPj69auHh8fhw4cvXrwIAGBjY+Pm5obOlDh+2MSASWlER0d7e3tv3rwZarahoDmk0WhXrlwxNDS8fv36kiVL/P39b9++vWbNmoaGBlx+6DNMJBLv3bvn5OS0efNmEomEtbzAZgmbqKur6/r16ysqKuLi4lpaWvz9/bOysuLi4goLC9+9e4eLkEKh3L59OzAw0MDAoLOz08vLKzMzEzVXAgAMDg6eO3fu+fPnZmZmiYmJX758kZeX9/X11dXVdXFxwRbscOeYm5sLdeN8fHzU1dUZXujRx6ht+QAAu3fvfvz4MQCgtbV1YGAAmmcODg7y8fFh7z/6A3E3E9z54sULqCdEIBCw9zf2Tu3t7bWxsTEzM9PU1IyIiFi3bt2tW7cAAK6urm/evME9/zioVCr2ti4qKrpw4YKKigoUucY+fgyfKJTfwYQFpbKy8syZMy9evNizZ099fT3uGUNPOSMj48iRIyYmJkePHp09e3Z6evpwpRoaGorWEbjL1NzcvGnTpitXrixdujQoKGjZsmX379/X1NR0dHT08vLS1NSMiYkB/1sjM4FMJkdERNBoNCiECE3kjY2Nt23bBlUrFy9e3NLSYmJioqioeOHCBQqFcubMGWNj49LSUhMTk507dzKP/5vMnz9/9uzZjY2NBw8eLCoqGjt2bH19/d69e1euXFlfXw8AUFJSmjx58rp16wAA3NzcJ06csLKyYvjJ7urqeu/ePZzPQH19fXp6+qlTp6ZNmwb3kMnk7OxsMpksLS2N9RkAAEBDPiydnZ2Ojo7379+fNGmSsLAwAIBIJG7cuFFHR8fX1xcA4ObmJiUlZW5uDgA4c+bM2LFjFyxYcOPGDTjoICcnBx1fcaAmBhQKBWtiYG5u/vr1awqFYmFhYWFhYWpqipWZZVIag4ODN27c4OXllZeXR41HIGgO2dnZ58yZw8fHd+bMmR07duTk5Jw4cWL58uXx8fG4/NBn2NnZmUAgTJw4sbCwMCIiIiQkpKOj48SJE7y8vNgsQdLT02NiYgQFBTk5OV1dXdeuXTt27Fh9ff3FixcvXbp05syZWlpauAgfPHggKyurr69vbm7e29urqKioqKi4YMECNE4XFxcxMbHdu3dbW1sbGRnNnDmTRCLp6Oi4u7t7eXmhwZic44IFC3p6egAAq1ev/ifHdP5dRrNu59atWy0sLEpKSsrKyg4fPvzo0SMuLi5VVVV4/2lqasL7j97BC72ZFBUV1dXV4deMpqbmtWvXtLW1Hz9+XFJSglq6ODs7UygUGRmZwsLC+Ph4GRmZr1+/wr5KIpEIbSERBNHV1d2yZYuent6CBQusrKyOHDkC6wsUDg4ORUVF6A8ZFxcnLS1tb2/f2dkZGxu7ZcsWrKeJkpJSamoq9onC8juYsKBYW1s7ODjAR+vkyZN+fn7oM+bs7IyespKSUnp6emNj440bN3x8fOzs7NauXTtcqXZ1dY0dO1ZLS+vWrVu4y8THxzd79mw7O7uuri4bGxtra2tVVdVDhw5lZGRIS0u/evVKQ0PDzc3t6dOnsEYejry8PCcnp7S0NE1NTXQGBxsb26dPn6CS/YwZM9jY2BAE6enpCQsLW7169fjx48ePHz80NAQD0zcVP4CpqemTJ0/WrVunqqoaFhbW0tKyadMmAABUhcYCe96EhIRwQ18QCwsLcXHxnJycZcuW1dTUQC0rHh4eQUFBrHT4x48fAwICUlNTofAVjiNHjtTW1kpISEChZGFhYehlc+fOHSgEqqysHBwcLCoqCt+Brl69qqGhYWlpOWvWrMbGxt27d4O/ZK8BALy8vK2trfSp8PDwiIiIhIWFQWFoDg4OmBkBAYHe3t6KigouLi4o2MZkwAJbGuXl5XJycjt37qR/F8HmkJ2dHcq2CQgIwLZZQECgpaUFlx/6tLKyspydnSdOnAjjnz9/vp6e3vv376FnL31gTU1NhpkZLkIDAwP4hrpmzRqG4T9//qyqqgoAUFBQGBoa6ujogIXGwcGBG0cY7hyHywmgu+ijidH8zTd+/HhtbW07O7upU6fu2bPnzZs3r1+/VlNTQ++/8PBw+mYPAPD582eopIfeTAAA2FCpq6vX1dXZ2treunXLwsJi/PjxWVlZe/fu3blzZ0VFxebNmzk5OdGuoc2bN8OJElAMED7/Bw8eHBoawjV79GCf3m8+fij9/f2cnJznzp3LyckJCwvz8PAAAHR3d2MbOZwwI8TCwuLWrVuurq56enqoLyU/P7+QkBBW5vHBgwfPnj0b+bgOWpIqKirQvX04ODg4YCmpqqo2NzczKVX6yNHLhAbDPuRo1Qkfcljf4Tq+cMyfP//cuXPv3r1raWlBRfcBAIKCgiIiImgBdnZ2WllZTZgwAa3QsaSlpenp6enp6X38+PGbBcUQHR2dDx8+lJeXOzg43LhxY9KkSXA/1F2CX2kA40hAb03AxGdAVFR05syZ58+fhxZaCIKoq6vfvXt33759OMcDiLOzc0hICKwBcdEODg4CAA4fPjx//vxZs2bBoyQlJfPy8sLCwrKzs/v6+uAXXnV1NRwv7+rqgt+a6LsCBJoYaGtr8/DwMDExgHuw2RiuNMTExKKiouALQWZmJnpquBxi84BNd7j8oEyYMOHly5cAgMHBwdzcXGh5ISIi4uHhgWYJGxi6CAEAoDsm/bXDRSgpKQlNaPv6+goKCujjnD17NuyQHxwcFBERQS1WmItz4f4LU+/v78ftx170UcZobvkAALt373737p2+vr60tPTy5cs5ODjY2dnp7z8cw91MAIDW1tbly5dj72/cnYqNh4ODY+/evcbGxtra2gAA+ucfx3BPL+7xo7/7UcrLy0NCQgAA9CYsaBjovY7yTRMWa2trtHK0tbVdsmQJdt4X8wcMLcn29nYoxo97xujPpa6ubunSpUxKFS0cJpcJFx77c7j6brgCgdU6ZMaMGYqKijt27CgpKUEQRERExNvbOzk5GS1ebFrLly8PCQkJCQlRU1NjUkRMGDNmzNatW9etWzdz5kxpaemNGzcCAOLi4jo6OpKTk2fNmlVQUODu7v7x48fS0tL6+vrY2NivX7/C13kAAJVKRX0GnJycjh8/Dn0GSCRSWlpadnZ2SEjIw4cP79y509HRUVhYmJiYuHnz5nnz5pmbmxMIhI6OjpycnJSUFNyM346OjsDAQDgAaW9vHxgY6OPjAwAQFRW1s7P78OFDZWVlSkrKlStXSkpK1qxZM2fOnKtXr+ro6BgZGX348AH2UhYVFW3atIlAIAgJCWGfQZyJQVJSEkMTAwcHB2higB44XGmMGTNmx44dCxcuNDU1HRwcxH4uozlUUFBISkoqKytrbGxMT0/PzMxsbGzMzs7Oz89HEASXn66uroSEhNTUVLhx7NgxDw+PDRs2XLlyZd68eY8fP46NjVVQUFi3bh2aJTRFXV1dAoGgqKhoaWk5derUT58+NTY2BgYGdnd3f/jwISMjo7q6GhehjY1NaGioiorKhQsXFBQUlixZ4ufnFxkZicZpbW09ODgYEBDg5eXl6elZXl7e2tqampqakJDQ1taGjuZQqdThzpFIJGppae3cuTMqKqq7u7uqqioqKgoagPzYfftn8A3/vj8cEolkbm4Ot58/f56cnIwgyNDQ0KpVqxYuXGhhYUFvqokgSF9f39atW1+/fu3s7BwfH48gyPHjx+3s7N69e+fo6Ah/RkREuLm55efnl5aWysrKrl+//uzZswQCYffu3evXr0ddXtvb2w8ePAi3Q0JCJCQkDA0NHz16xDC30dHRSkpKoaGh+vr6mzdvbmpq0tDQgHM1J0+efPHiRQMDg507d7a1tcnKyrq5uTHMuYKCwp07d549e7ZmzRpoiTk4OGhubn79+vXw8PCAgIC6urqysrKpU6e6uLhQKBQ3NzcAwJkzZxwdHbW0tLy8vBAEaWlpUVRUPH36dFBQ0KVLl+7cuVNSUjJ27NiIiAgSiaSiogJT//r1q4SExJMnT4Yr/6KiIj09vaioqIsXL8LM7N+/f/v27Xfu3Jk6dWplZSU85YiICBcXl3Xr1oWEhNja2tbX1w9Xqj09Pbt371ZVVa2qqsJdpvr6+vnz558+fZpIJL5580ZQULC0tPTRo0eSkpLl5eU3b96cM2dOS0uLoaFhSkqKnZ3d4OAgfYa/fv2qrKw8f/58Z2fnkydP6ujowDuksLCQh4fH19f39evX27dvz87O9vX1lZKSKi8vr6mpmTFjBry13r59y8/PX1BQMFyBfC+ox+nQ0BD9f383d9+BgQHkr9caKpWKNSseGhpCT6GxsfHu3btwOzk5ubS0FBsJkUik0WhUKnW4syMQCFQqlUQi4fYzKQ2s3S4KLofD8c380Gg0eMowTgKBQCaTmWTpmxbZ2AghPT092PzQH9Lb24taXv8Yg4ODNBoN66k7uhn9itWomwkcNkD7/QgEAnOv2r6+Pn5+fjT80NAQjUaDows4SxcEQYhEIkPzF5gu2j8Gv7fo3VVQhvNCw3maDBcM/DYmLFiwDjsAgKGhIS4uLhqNBl/A4bm4urp2dXVZWVmhI0/MSxUFd5m+CY1G6+vrY1nj/lsQiUTYDQMvWWlp6fTp0//tTLH4zzH6Wz4WfwROTk4dHR1/3xpkFixYsEAZ5eN8LP4IBgYGpkyZMm/ePDhfnwULFiz+VljffCxYsGDB4r/FaF7Px4LF6CA8PHzt2rXoyG53d3d6ejqUKfglEf4YZWVlXV1dSkpKIwnc3NwsLi7OcDg2Nze3ra1NRUUFu0SvrKwMnaQqKysLpRVgYOxCCCg9AwBoa2sDfykwfJOsrCwxMbGpU6fCnzQaLS0tTU5ODrc6HkImkz98+DBhwgRubu5Zs2Yxj7mmpgZO2+bg4JCUlPxntJ8gCILk5eXx8fF9c9yURCLV1tbKy8vT/6u4uBh3jmlpab9kcervBqu3kwWL3xR0JXJ+fn5/fz+6Pzc3l/lK/G+Ci5B56gwZHBx89OjRCFVes7Ky8vPzGTZ7L168yMrKUlJSMjExQdfbkMnkI0eOPH/+3NfX19zcHCtUJiwsrKKiUlhYyMnJSSaTb968CQAoLCxcuXJlcXHxSDIDAKiqqoLtE5VK7e/v7+josLS0rKmpYXiaK1euFBUV7ejo0NPT+2bM/6Lua29v78WLFxku08ICVxXTSwYSCARHR0cLCwvszsjIyOPHj//ijP4ejM6W7w+1IPgnaW5uTk1N/YcTbWtrG050bTgaGxsZ7u/o6Hjz5g1DBYqcnJxPnz6hCbW2tn769Ck9Pf3Lly/YYCQS6dP/UlZWhuYTfkOMEPrC/Em3DZi9vXv3wu0zZ85gF/IvWbLkZ2Kmj5Ceb+qp8vDwQMXXb0Iikfz9/VHJIQqFAldqQi5fvmxsbDx+/PipU6dCsx4AQFdX1+vXr11cXB48eDBt2jTs+U6ZMgXqHCkqKqqoqDg5OQEA5syZw1zqFoeBgQFUIDp//nx1dbWYmBiqEoCjpqamvr5+3rx56urqN27cgG8DFArFycnJw8PDz8/PwMCgra0tPj5eTU0tLy/vX9R9HT9+PMOvPVyBS0lJQc0XHPz8/FAhCKWpqWm4p28UMDpbvt/BguDvAL6i/pKo2tra0Ir+V/FN14WUlBQoeTVCAgMDGcoENzQ0uLi4tLe3Kykp4Rq/3NzcK1euuLi4oNY5jo6OLi4urq6uOL0rLi6uioqKlStXsrGxcXBw9PT0uLq6gu//gAD/W5gjdNv4JgUFBXFxca9evaqoqFBXV4fr7j09PV++fHn37l0YJjo6+saNGw8fPjx8+DCFQgkPDx9OrfT27duRkZEHDx78/PlzdXU1jHA4dWOshCxOhxqrWIue786dO6FOzYMHD96/f+/g4IA9kadPn0KJbTRyKEoCAGhsbGRjY4OLfOTl5dHvlQkTJsAVR/X19ZKSkvQfixQKhUwmv3nzBtUbog/T09OzZMmSkJCQ9PT0KVOmlJeX9/b27tu3r6ury9LS8v79+y0tLcHBwREREbB+j4qKMjMzMzQ0xEYyY8YMQUFBNTW1iooKPT09uDjn8uXL48ePNzMz2759+/79+wkEwurVqysqKubPn48e+Gt1X7GXj/6qNTQ0XL161dfXFxUZxoIt8OHKin4/giBBQUF/hyj8b8KoHedDLQjExcXJZLKzs/NI/LR+c86fP79r167ver0djrlz586dO/fn40Gpq6u7fv06XBc/HN81YPDly5fW1tYZM2bAn2VlZe3t7TAGBEGg6ndKSkplZaW4uDh6lLe397Fjx5YvXz5mzBgAQGlp6eDg4LVr1yZPnkyfxPz587m4uOBIFZVKhQX7vR8QAFOYERER1dXV5ubms2fPZq4z900UFRV5eXlhRQyVKouKiqAmcnt7+6NHj0gkEpSl5eDg2LZt26NHj7Zt29bS0sJQrXTcuHEaGhoVFRWhoaEXL16EESooKEB147Vr127YsAH9zsBKyIaFhW3cuBHKRu/fvz8kJARVrAUAEInEFy9euLu7CwgItLa2Pn36ND4+XlpaGnsiWVlZO3bsAAD09/d3d3cPDg4SiUSoLtTU1ISu1+Tj46OXSw4KCmLYx/j69evU1NSkpCQmL1Ljx4/X19cvLy/X09ObNGlSd3e3kJDQ+vXrhYSEpKSkyGSyuLj4lClTNDU1oeqsiorKmjVr5s6d29jYiNWhjY+PP3bs2Ny5c52dnQ8cOAAAePbsGeqQtWHDBqiIhm02frnuK+7y4a7a8ePHz5w5s2DBgvT0dOxR9AUuKSkJL/038fPz27Zt28iXyf5xjM5vPixYCwKc6UFTU5O3t/ezZ8/8/f3B/xqI4CLBvVzTm6dgwRqL9PT04PwKhnsxHxoawvkVuLu77927d2Bg4OjRoxcuXMC+omKNV7Kzs6dPn+7t7U1fR2Df0Ovq6u7du/fs2TOoKePi4mJsbAz+10Gms7NTS0vrypUr5ubmUGIYexSgs5vBgjpI1NfXX758OSQk5NChQwAAXCFDjhw58urVq4GBASbOCQ4ODlpaWujPysrKjIwMuA37psrKyri5uXF1BDc3t5mZmYKCAvxoa2pqam1tnT59+nC+TgiCUKnUgYGBS5cuofMRhnvgTUxMTp482dTUpKSkBDsDzczMmpubYWEycdtgGNvIgUv+k5OTZWVlwV9izTU1NX19ffBfUBaViVrp8uXL/fz82tvbYXuMinhBdWOoB80waWVl5YiIiNLSUnggVrEWAPD58+fAwEDYek2YMMHIyGjWrFm4AbOuri74VZeZmQkFSL9+/erg4ODg4CAuLo5+0/f19cnJyeFS//jxo4qKCn2uduzYcezYMR8fH4YvNCh79uzx9fVtbGyUl5d/+vTp27dvN2/eDBhJfoO/lHIFBQWxqt8EAkFUVPT58+fPnz+3tLSE2rM4IVz62H6J7isW+suHvWpJSUnYGwOFvsBHOO7T0dHx5MmTu3fvXrp0qba2dlS61I7ylg/nz4LzHElKSsrMzNy1a9e8efNwBiLYSODL9cmTJw8dOgRft3HmKbhEscYiXV1d1tbWhoaGO3fuXLp06cmTJ5WVleGL+bVr14KCgg4cOHD8+PFXr15xc3PD2WX37t07efKknZ3dokWLuru7+fj4Fi5c2NfXh76iCgsLY41XFBUVu7q6tmzZAhU7sWA9U44cObJr1y5jY2MVFRUajbZo0SL4hGMdZAAAwsLC8+fPd3d3z8rK6ujowB5FpVLp7WZQUE+WgYEBFRUVLS2t9+/fIwiCLWQAAIIgr169Mjc3NzQ05OPjw3rZ4MjJyYEtXGdnZ0NDQ1tbW3d3d0NDA6zKOzs73759GxgYiNOevn37dkFBwY4dO2CfkpqaWkBAwOfPn1+/fk3/jgIAIJPJLi4uLi4uHz58+ObtZG5uXlxcLCkpqa6uDvvZ5s2bJyEhAQsTfiqhJjLQbWPBggUMjehGAlY5GkEQUVFReAoIgtBotMmTJw8MDJSXlwOMLCoKdrUSlUo1NDQ0NDSUkJBAGGlb0y9tQvVUcTrUWMVaAICysrKysjL88Kqvrz948GBERISlpSU2wpkzZ8JbZdWqVe7u7i4uLgsWLHB3d3d3d588ebKYmBjswC8qKlq7di32rBsbG8XExOjbFSqVCuMXEBAYziQEIiEhIScnd/78eU9Pz48fP/b398PprOjpf1P128/PD34tbd26debMmbBfFCeEixOe/VW6r9jzxV0+XGARERE4zIz8r4o3fYEzdNPFqnhDBAQE7t27t2vXrm3btqHuLqOMUd7y4SwIcKYH2trafX19S5Ys4ebmZmLgQP9yjTNPwSWampoKH8g1a9ZIS0vj/AqYvJjj/AqGOynUeOX27dtw+IeTk5Oh+QP2DT0vL09QUBAAYGxsDMdBYRicgwR6avz8/AQCAXfUSOwmZGVl6+rqkpKSAAA0Gg1byACA3t7ea9euoSJqTJwT+vr6YM8MdMf18/OLjo52cHCA/i/CwsI2NjYvXrwIDg7GHcjBwXHmzBl0BAgAMHPmzL1798JGAgcXF9eRI0dsbW3hOBZzli5d2tjY2NjYyMHB4e/vn52dvXjxYsD0G2I456CRoKCgAG03SktLo6OjdXR0+Pn5jY2N7927R6FQSktLHz9+7OjoGBMTQ6PRjI2NExIS2tvby8rKPn78WFlZWVFRkZSUVF1d3d7ezsnJaWVlVV9fHxkZCaUyo6Ojh1M3BgCgysg4HWpUkVlNTS05ObmkpMTMzCwgIOD8+fMNDQ3nz58nEommpqbYT6KtW7cydLaD3Lhx4+bNmwkJCTw8PKtWrXr27Bk6vTAwMJC+G+Pt27ddXV2wwwDdWVZWlpOTk5CQQP8wmpmZLVy4kIeHR1tbG7asnZ2daWlpaWlpvb29y5cvv3r1KhS5jouLq6qqqqysxE5WkpKS2r9/v4+Pz82bN6dNmwaNW11dXQMCAry9vd+9excUFEShUGJjY9va2uCVSkhISEtLu3//vq2tLYFAuHz5MgCgqKiovb39xYsXAQEBu3fvHhgY+PjxY21tbV1dnZ+fn7m5OXyniY6Obm5uhgLxKLDGwF4+3FW7du2ajY3NuXPnCgoKcnNzh5Ozh7S2tsbHx3/+/Lmurg4AgBY4kUgMDQ2trq7Oz88fM2bM7NmzZ8+ePWvWLD4+PnTEYVTxd4iB/g7AGdLNzc0IgpSWlrKxsdXW1hYXF2tqaiIIYmRkFBMTU1hYiCCIv7+/lpbWq1evtLS0oOpramoqNioikThu3LiysjIEQezs7B4/flxdXb1s2TIEQcLCwkxMTHBJW1lZnThxAkGQ3t7e/Px8DQ0NKAMdFxdnbGzc3d0tLy+PIEh5efmKFSsQBElPT9fS0kIQxMXFxcHBAUGQ+Pj4/fv3Z2Zmwty6uroeOXIEQRANDY3Pnz83NzdPnjy5s7MTQZCMjAwKhQJfBukpLCwkk8lWVla3bt2aO3duWFgYgiDFxcXw4d+0aROCILNmzcrIyEAQpKqqqqmpycTEJDIyEkEQRUXF6upq3FG4wNi0UlJSNm/ejCCIm5sb1PWWlJQkk8nYQm5ubl6wYEF0dLSysjLULy4sLOzo6FixYkVWVhYu86qqqlih4cjIyHv37uHCfPr0yd7eHsEoO0Pa2tp27tyJ3XPs2DGYEyx5eXljx46lLzd9ff2EhASGRers7KyhoVFcXGxoaHjgwAG4Ey3Mq1evQi3mmzdvXr9+HUGQc+fOPXjwgGFU34RCodDrMvf09NBoNFQTmUqlflMBGQaDCt04KWQmoMrIOB1qrCIzFhqNRqPRent76f91584dJukODQ2h9xKaTwRBenp64OfdTwLvDYaS38i3VL8HBwfhKbe0tOD+1dvbO1ycv5xvXj4SiUQkEunvlpHH/F9jdH7zDefPgvNASUxM9PLyEhMTMzAwwBmIYGPj4eHBvVynpqZizVOg2yoKzljk3r1779+/j46OTkxMvHbt2nAv5nA0KDEx8c2bNxEREZcvX16wYEFTU5ONjU1JSUlpaWl/fz98Re3o6MAar+Tl5XV2dr5584a+HLCeKTdv3jQ1NV23bl1iYqKAgEBUVNTXr1+rq6uxDjI8PDxfvnxJS0urqqqqrq5OT0/HHiUkJERvN4OCerIICQk9e/bs7t27IiIiXl5eycnJaCHHxMS0tLTIyspKSkrq6+tXV1djvWxwmd+2bRtD224AwPv3701NTQMDAzMyMmxsbAAABw4c8Pf3T09PX716tZ+fX2ho6O3btwEAO3fuPHPmTGhoqJqaGm7eytDQ0MuXL/v7+1+9eoXdz+QDAgCwe/duCQkJBQWFffv2wV5NKpWKFib8VAoLC4PrKJqbmzMyMjIyMrB9UCOHg4MDztPBIiAgwMbGho4VsbOzMxdeR4PBb+5vKoCjoOvc4egRPJCdnX3s2LEMLR6h1zFDSfQjR47ExsYiw8hFcXFxofcSmk8AgICAwAinYzAHfpEPJxPPfDk/Nzc3POUJEybg/jVu3Dgm0vO/lm9evjFjxvDw8NDfLSOP+b/Gf069DGt6wMXFRaVSyWQyej8xMXCg0WgDAwMjqWUgOIMC3E+GMPQroFAo2JoOdWno7+9n2GuPyzPWUwJ+Q9A/PMwdJHBHMQmM5o1IJPLy8g4NDcFHEVfIuBwO55xApVKdnJzOnDnDMFeNjY1CQkLokD68rGxsbNCfFo2QSqU2NTVJSUn9kjoUjZODgwNeGvq6homNBgsWLH4T/nMt3+8My68AR2dnZ0FBwQ87u7JgwYIFQ1gt3+/CwMBAaGgolUpVU1MbTlGCBQsWLFj8PKyWjwULFixY/LcYtRouLFj80VRWVuK0Q+fNm8fLy8tEaH84vsuaAPw9hgNUKjUrKwtujxs3TkZGBrfs+ps0NTVJSkr+QNJVVVW5ubkTJ05cuHDhD8wB+eXU1tbW19crKyv/2xn5TzM653ZCqFTqx48f/fz86FWRfjJahhMpfxXd3d2RkZEjD48KVDLZTyaT3717x1CLkolw83AxDwd9ybS2tiYkJHxXJJCWlpbvmg9Jo9HQ/GNXeo0cEokUGxuLzudEECQ3N7e0tBQAEB4ejl2PzHDPL2fChAnm5uZPnz6Fs5OCgoJKS0uHE9pnzsitCSB/h+HAL5HSpV+7yRwEQY4cORIYGLhy5crBwcHNmzdjbR/+FWg0mo+Pj5eXF/2/vil7+0sO+Q2T+FcYzS3fsWPHFi1aJC8vP0JTghFe46GhISYrc3+ekXvQIAgSHR0N59Yz2Y8gyPXr1zs7O/ft20ffZjMUbh4uZuZgSwYW5tu3b793wg48EC6NYL4mFws7O/v48eOXLVuGlYL8Lpqamvbv34+uOsd6vqCePsPZBv0d8PPzy8nJycnJKSoqLl261M7ObsqUKcMJ7TNn5NYEkOEMB169eqWjoxMYGOjn52dvb0+hUK5cuWJhYfHw4cPLly/39fUxNxxApXSNjY0PHTrk7Oz8XWchKSmJIMh3GYw8ffq0oKDg5MmTIiIiK1eu3LZtm4mJyXcl+sthZ2fHqe1A6urqhhPYG44fOOR7wRqGjDb+hTWE/wjt7e1qamojDz80NLRly5a/LTvfAYFAkJaWHu6/T58+xe0RERFhGBLd39nZCRfA+vj4XLt2jT5kYWEhPz8/3KZQKPX19cxj/ia1tbXm5uYIgmRlZW3YsGHkB2KvQkNDw61bt74rXQBAdXX1dx2CZfHixdjDbWxsfHx8GObtn0FfX9/R0ZFCoVRWVsLXEQRBXFxcLl26xDD8nj17bGxsGhsblyxZ4uvriyDIoUOHysrKLCwsnJ2dm5ubp0+ffvPmzYaGBi0trcuXLx86dGjbtm0Mozp//ryZmRmJRAoPD584cSJcn15WVrZq1SoYAOpvPX/+/PTp0wiCWFlZeXh4dHd3/x975x3XRPL38aEXkSJFUSwUERQLImIDG3aqgqKo4IkFERt2sYKKvdEUVAROQBE8EKUJSm8iICC9hd5LIBCSzPPH/G6fvU2CnOVOubz/8BWX2ZnvzJbZ3Zn5fAoKCnp7e5FWA0ERAmlO1tfXk0gkQ0PDa9euQQhJJNK5c+eCgoKQnGxtba2Hh8eTJ0/8/Px6enpu3bp15coVAwMDtGa8u7t78eLFg2/AFStWXLx4Efsves1FHYaFhcX58+cXLFiAFCHw4MulUCj79+83NTXt7+/PzMw8cuQI/q8dHR12dna7du1auXKlp6fnuHHjMjMz6XS6jY1NZWUlIVsqlerg4PDs2TNzc3NLS0s6nX7kyJGoqCgzM7POzs6bN29qaWlFREQMvkGwXZgr7uXl5ezsvHz58pKSkgcPHkyaNKm8vHzt2rUlJSUQQpTbrl270AcSLM/Ozs59+/ZZWFjs379/8uTJycnJGRkZYmJi/v7+LLULfmmG7Dufn59fU1OTv7//wYMHDQwMAADMMtN43WfMFObDhw8qKioAAC8vL11dXWYR57i4OCRByZxhRkaGo6MjEqVE7jB48FLOSAPT0tLywoUL2tra6FWD4EHDrl6ELV/0HJGQkBAWFu7q6oqKimLn+wX/jnBzdnb2uHHjampq7ty5s2TJkra2tuzs7HPnzmEtg6lXAwBaW1tv3LiBSR0StL8JqtzYUaDRaKNHjw4PD/+Kjy3szHcwCM47cXFx9+7d8/T0RJ/CCJ4vmKcPS9uge/fuPXjw4PTp0y4uLiyP6TeSmJiILJZYujURYCkrqqSkxM6awN3dPT8/n50HGzIccHV1xQwHuLi4Ojs7U1NTnz59ij6fcnFxMRiMmpqalJSUuXPniomJ4ZWuWFpzEKR0B1B5vXPnDplMHjNmTG5uLrJ8EhYWrqmpGfxncBKJhHdpR54e9fX1EydOHDFixJkzZyZPnsz8NR5fbkRExPHjxzMzM7m5uSsqKvbv34//a2xsrLy8PD8//5s3b7Zv366rq5uXl8fNzS0nJzdu3DhCtp6eniIiIkg0AzBpCA9G9pbQINguhIIIEsQ7duyYNm3a2bNnb926paiomJmZ+fHjRzMzs2PHjnV3d+PzjImJIUgHY4YhLLULfm3+5Z73h5Gbm4ueT5OTk5E2WGlpqbq6OoTw6dOn+/bto1Aoixcv9vX1PXTo0MyZMyGEmAYY+lFRUTFjxgwI4ZYtW/744w8IoYqKSnNzM4RQQkKCOUMIoYmJSVJSUnNzs4KCAnNIhYWF0dHRVCpVTk6OwWA8fPjw4MGDEMKdO3cGBQXl5uYaGRlBCJuampjf+fr7+6urq6urqxctWoR+IE0pCKGUlBTLFsBvp9FoXl5e06dPP3ToEMu24ufnv3379pUrV+bNm8cyBwIrV65MS0vr6emRkZHp6+t79eoVeltCLZOQkGBiYgIhzMjI0NbWhhA+f/58x44dfX19SkpKSE3K1NTU3d09JSUF1drLy+vAgQP4o4BanvDeMDDgz3e+sWPHdnR01NbWTps2jTlZSEgIiURCZ0h/f7+amhpSyZoyZUpFRcX69es/fvwIIbSxsUHvfMuWLUPialhsaEt6ejoSbEN/ys/PJxzTwUfOEhMTE+yt9/Pnz+jHAO98EMIZM2Yg/UwNDY2MjIzU1FQIoZOTE8pHV1cXSbjp6emlpKRACJG/OXM+6J0P/dbX10ci1CUlJbNnzy4oKHj9+nVcXByE0NfX19jYWF1dHd2mEdg7H4QwKSnJyMjIyMjo3bt32DsfhPDDhw98fHy1tbX9/f2PHz9++/atnJwcjUbr6enZuHHjzJkzS0pK1q1bh31+wFBWVm5raxtkAxoaGp46dQr7b3Z2NgCgra3Ny8sLvaoSXusRzOVu2rQpLCzMzc2N+a/u7u7Y4cjPz1dXV//w4cP79++ZgzEzMwsJCYEQvnr1Ckke/vHHHyEhIUZGRtHR0dglM/gGwXYhcO/ePfTGj/Hx40c5OTn0duvs7Ozk5MSuspiAYklJiZqaGvzr9TiUGLLvfBjYiwtBZppZ95kdBBFn8KceErNu9ezZs4ODgyGEaFiFAEHKmbA7wYOGQGNjI/IZKS0tRT8KCgoG3wg8PDwWFhZxcXFhYWEsE/wt4WYAgJWV1ePHjyMiIrS1tUNDQysrK9HUQWbtZrxwM7P298ClCAkJfZ3Fz8DmO3jnndLSUn5+fqTwglqe2fOFuVJoC6ZFDgCYO3duXl7ewFLmfxfMlAAAgL5DfJFt27ZZWlpu2rRJSUnp/v37yHoQXepgENYEGOwMB5B+8apVq5SUlNCddNKkSS9evDh69CimsY7PcO7cucHBwcHBwQsXLiTkiVTjPTw8GhsblyxZgipbVlb29OnTo0ePHjhwQEZGBpmJ9/b2IuMnAAAfH5+4uDizvQBL9u7dGxQUhI0WR0REbN26FSmwM4eKwVyura3thQsX0FnBMioEGhy9ceMGy7FYgtXG58+f3d3d9fX1BQUFIYTYoRl8g2C7MMfv7++PqpacnAwhDA8PP3ny5IEDBwAAsrKyQUFBqAGTk5PZVYdEIqGb2CCb+pdjyPZ82DnNfG2j/0pLS0dERKBPUunp6XQ6nWAKg8bG2OXJcruxsbG8vHxBQYGvry9zSIRzmrA74cIg7Dt69GjkMzJlyhT0A80XIDDwaSooKMjSc/Ur7rAGBgYxMTElJSXIAAybMTHwHZbZWIeHhwe5cWKtja9CW1vbxIkTv1gvfNH4pmN5XwN/dd4ZMWJEbm4u+paIWp7Z8wWrBeEMmTJlCpr6gULFTwjCisa8Rv8u2dnZmZmZUVFRSFMfQRDaZ4ZZVvRvWRMg2BkOvHv3jkQivXjxwtfXd+vWrcOGDUtMTMzKypKQkLh8+bKxsXFVVRVgYzjATkp3AJXXgwcPuru7r1y50sHBAX1FJ5FIOjo64E+Z1i+2oa6u7smTJw8ePPjmzZs7d+5UVlY6OzszGIyUlJTc3Nza2toPHz4gBXb8XszlzpkzZ9y4ccioAf/XiRMnJiYmJicnY73+kSNHJk+ezHKY4ODBg6GhoXZ2di9fvqyqquru7sZrCCsrK39R9pYQGKaUSygIL0E8duzYU6dOTZ06dcuWLREREXfv3jUwMJCSkpo6dermzZvHjh3LXFm8dDD40zCkvb39i639i/GN74w/La6uruPGjauoqHBycpo4cWJ1dfXTp0/HjRtXWVlpZ2c3Z86c9vb2Y8eOjR8/fvv27ejTjY6OzsmTJ9va2rZv375x48abN2+OHz8+KytLS0vr7NmzZWVlkpKS/v7+yG4tPj6eOcPDhw/r6OiYmJicOHGipaWFEJKfn9+UKVNu3ryppqbm4uKye/duPT29mpqaRYsW7dmzp6+vb+nSpVu2bHFwcBg9enR2djbLehEmjERFRXFxcYWGhtLp9PLychUVFebt9fX1+vr6yDu7qKiIkGFvby/yV0PPiSxzZhnJyZMnUZAmJiZoABxrmba2NgUFBVdX1+vXr8vLy5NIpGPHjqmpqbW0tDx//tzKyioyMvLUqVM0Gq2/v3/GjBl2dna2trarVq0ik8nYUYAQGhoa0mg0fL3YgVxhAQDnzp3LyckRFhaOiYkJDQ0dNmwYGtXHs2fPnuXLl7u6usrKyiYkJJw6dUpHR8fBwUFFRcXb2zskJGTKlCknT55cvnz5vn37Pn/+LC8vjyZKoNg+fPiAbTl69KiTk9OLFy/QOB/hmIaHh0tKSmLfpf8Z0MdkBoPBUrl/YGuCfwXUPsgVgU6nU6lUzJGAwWDg3QmuXbvW2NgIIaRQKMhWZZDU1NT8LR8DQrkQQvwUD+a/Yjx79oz5Cy1+x87OTiwSVAs6nY4OCsEcYzANMsDRHNjBo6Ojg2V1nJ2dHRwc2tvbsb+yNAwZAvzXNVzwus90Op3BYKC1rn19ffz8/Oib5CCzghDevXt38+bNzc3N9fX11dXV5ubmhDR4KWeWGsqdnZ3Dhw+n0+lfN6SM8mfejqxB2TnqfR1IuBkAgLS/CX8dQLiZoP0N/6rKjR2F8PBwfn7+JUuWAPb1+mpQhj09PWgJRHd3t5CQEJ1OR0e/v78f1Y6w8Bl/huBrys3NzU7v29vbe/Pmzd9RL/s/S2pqqpiY2CC/Sfzz5OTkXLlyRUFBwcHB4d+O5ev570gH/9d7vu9IY2Pj/Pnz7e3tRUVFm5ubV6xYwTy/i8MgqaysLCsrW7x48b8dyDfR09PT2trKUWH9L9DV1RUbG7t69epfdxrkf0o6mNPzfU8aGhpSUlIkJCTmzZv3614AHDhw4DC04fR8HDhw4MDhvwXnvYQDh5+RgWWjs7KySkpK5OXlRUVFZWRkxMTEGhsby8vLIYRiYmJokXhfX19aWlpzc7OqqipaRYoUWbm5ucePH4+ZjLe1tSF5Ui4uLjk5ObTO/e/CrMLc0dERFxc3ZcoUKpXKPDhXX19fVlaGpa+oqPj48aOUlJSSklJPT8+ECRNCQ0PRiu/B8HNKbH+Rv6UkXlpampeXJy4ujma3fi/CwsKWLl36H/RS5gy8c+DwM8JONrqlpWX16tUJCQmampqtra2rV6+uqKgAAFy5cgWpvaAbdEZGxooVKzo6OmbNmpWYmLhq1SpxcfHg4ODffvuNRqPFxMQYGhrev38f9ZQBAQE7d+5kMBg+Pj6ampp/V32GWYU5Nzd3w4YNEydOjImJYakt2dTUhAmj79u379atW6qqqqKiotu2bXv16hWzAOzXtdW38F0ktgdm8EriGRkZ4eHha9asef/+/WAkbFCGg4nhH1Cg/Un512aV/nioVGp4eHhmZmZeXt4XE7e2tqb8SXFxMbup/N+dnp6e0NBQ/Ja6urrExMR/pvS/RXt7e0hISGlpKaYnAiEsKysLCgpKTU1FU59JJFLKXykuLkYpa2trkfrid4REIr19+/b75skMy8g/fvz4/PnzjIyMoqKixsZGVFm0OOx7gQmpMBiMsWPHvnnzBkK4fv36CxcuYGmSkpKSkpIKCwv37NlTVVWFNpLJZHl5ebRWB4E0XLy8vDDhTTKZPGPGDFdXVwihp6enrq4u2q6iosIsa/JFMF0ShLu7+4YNG9Dvx48fD7Cjq6vrkiVLsP92d3cjqRQEJgD7RVi2lb+/v76+/vPnz58+fXr+/Pn+/v4LFy7s2bMHCa90dnaSyWQLC4sHDx6sXr368ePHNjY2+DzxojOenp5aWlqDieQrOH78OEFbh8DevXtjY2O/IkMO7Biy73y9vb06OjpSUlItLS3GxsZfTC8mJpaYmIjEqMLCwtTU1P6uR8/XUVFRsXHjRvwW/OPwzwPzUzxkZQHz4MGDDx8+9PT06OjolJSUlJeXYzOkL126NJjVx4OHwWCEhYVdvnwZ/ffH2akQImd+66qtrS0tLZ07dy5eH+R70d/f/+bNGwaDMX/+/L6+vqCgoLVr12J/nTt37qxZs+rq6hobG5WVldGhSUlJ6erqwiuJ2NraErIdNmzY7t2779+/j99IIpFaWlrwHyfxGqfMwqT9/f2Ojo7Pnz8nvA+tXLny1atXVlZWXV1dyB6BRqPduHEjMDDQ1NS0vb3d2dl569atAICnT5/iqyMsLPzbb78xC8BSKJTbt29fvXrV0NCQSqUOpq0AABoaGl1dXSYmJhs3bkTik+j78K5du1paWp4+fUqj0U6cOLF169aioiJLS0vmZUiI6urq0NBQExMTAACDwTh69Gh0dPTGjRu7urrq6uo8PT29vb3RGfLkyRMXF5cVK1aUlpaifTs6OjQ1NYODg1NSUsaNG1dSUtLZ2fnbb7+1tbXt3bv37t27DQ0NQUFBb968QQKqERERu3fv3rBhA1Z6aWlpenp6bGzs77//rqiomJ6ezqxMW1hY6O7uvmfPnrNnz2IZ5uTkIGmhnJwceXn5zs5OgtQwpknLnGFbW9uRI0euXr2qo6Pz6NEjdg3+6zJke77Kysrq6upp06bp6uo6OTmh2yKNRrt69aq7u7ufn5+pqWlTU1NsbOzChQuzs7O5ubmnT58uISExZ86c/fv3a2ho+Pj4AAA+f/68cOFCb2/vZ8+e7dmzBwDw8uVLIyMjLy+vU6dOoS7q8OHDrq6u69atc3Nzs7W1rampGXycqqqqhI/sU6dOtbCw+J5t8T1ITEwUFxdXUVGxsrJCAzAsLWBWr169Z8+exYsXc3Nzz5kzx8zMbMeOHQCArq6u+vr627dvf8eQUBHo95s3b/6ua90gYY58z549c+fO3bt37/jx45ctW+bt7d3T0zN9+vRhw4YhxZnvCEE2uqqqikajSUhI4NPw8fEtXLjw+fPnHz9+fPbsWWJiYllZGSENy+WGysrKmFx1dXW1hYXF2rVro6Ki8Np7FApl9erVBgYGvr6+PDw8BK1nggozxvjx45OSkj59+qSqqpqSkgIAuHfvnoKCgomJibW1dXt7+8yZM9E3VZah6ujoVFdXAwAwUWZm9eovthX4MRLbBKVpvKI0QSoa7S4mJmZiYlJSUjJnzhw5Obn29vb+/v4VK1ZISEgMUklcUVFx7NixS5YsMTc3nzhxIjc3Nxq4NTAwcHNzQx+ZT5w4MWzYsBkzZjg5OfHx8aEMp02bJi4uTqPR0GMEhHDWrFkdHR179+61trZ+9uzZhAkT0EpT5gyfPn06YsSIo0ePdnR0LF26lGWD/9IM2Z5v0qRJ4uLiCxcuLC0tNTY2FhUVBQCcP39eTExs9+7dGzdu3L59O5lMXrx4cWlp6fTp0/H75uTkZGRkIIcHVVXV0tLSrVu3rl+/fv/+/QCAWbNmNTQ0WFpaamhonDlzBgBgamq6Z8+e2tpaAwODEydOoI8keAjmAO/evXv8+LGDg8ODBw8AABBCT09PY2PjmzdvQgixx2H8c9znz58VFRWTkpIYDMbu3bsrKirwRhMAgHv37r1+/drR0ZFQNEsDAfyTqaWl5ZEjR+rq6mbPno06j927dzc0NBDyYX6K9/f3R3pOiCVLlsTFxWHD9RhIOtLPz8/Z2bm3t/f9+/fMB6u3t/fw4cO7d+9etWpVQkIC4cEZq1pWVhbeRgNfRw8Pj/T09KysrKioqPDw8NOnT2MP3Xi+wlqBEDm7ty6W+34706dPP3Xq1KtXrxoaGuzt7ceOHcvDw4Pk35hRUVHZtm0bmvlSWVnJfB4SyMvLQ/dEAICcnJyTk1NNTQ1hnAmvcQqYtGrj4uIUFRUBk9gsmUyeNm1acnLyxo0bjYyM6HR6UlISurMvWbJkwoQJmECEvLw8y+oQFCQyMjK2bdtmbm5eWlpqaGg4mLZCG3l5ecXFxSUkJDAxgeLiYn19/fPnz2N1x5OcnGxsbGxsbIydqDY2NtevX3dxcTE2Nq6rqxMUFJSUlAwNDUW6o/r6+l1dXZqamgICAhkZGatWrTI3Nw8LC7t58yaWp4WFha+vb21trZKS0pMnT/744w9UBZYqGUhYQ1xcnOUJSVAhxuRhkWXxzp07+/r6BhCsYNaVxWIgZDhjxoyYmBgymaykpIRNhhpKDNmeDwAQGxurqKg4depUbOzd29vb1NQU/V65ciVaaY4X2Wttbb1y5crdu3fl5OSwE4iLiys1NTUyMhIpHHJxcUEIOzo6QkND0VJr/DPy6NGjme/++AdnKpXq6Oi4bdu2I0eOIB0TBoOxefNmDw8Pd3d3Li4u7HEY/xw3cuTINWvWIA8UNTW1UaNGOTk5CQkJKSkpnTt3rrGx8cmTJ9ra2vg7MoL5OZ3wZMrS2gb5ueBhfopnaQHD8n0XQtjY2Dhy5Mi9e/eyfO0TFBTE3F54eXnx4eGrNmPGjI6ODgDA4sWLW1pa8HXU0NDQ0NCYMWNGcHBwS0uLnZ0d80y8jIyMt2/f7ty508HBwdHRsaioaGC3GpaRs3vrYt7322GWjRYUFLSwsMBbL3V1deFthtra2mbNmrVo0SJ5eXn0XIUoKyvr7+/HZ9jY2Oji4nLu3Dnwp3CrrKysv7+/paVlfn4+lgyvcYqPDQ4oNnvr1i06nc7NzX3y5Mnm5ube3l5ZWdnnz5+jgD99+oTGWgAAu3btevz4MTbJAkKIJprCvwrAMgsrEwRR/zGJbYLSNEFRGi8Vje07atQoRUVFe3v7Bw8evH//vru7G33mwRrhi0rikI0KMfa7q6srPT0dAFBRUVFfX49liOniIkW0wWeorq5uYGAQFxfn4+Pz3Se1/gwM2VUNZDJZSkrKx8fHyMho06ZNc+bMmTp1ant7O76fY37mGjFiBBKxvHnzpqmpKWYmgG7E2JtQa2urra2trKzszp07BxPMvHnzgoKCpKSkaDRacXExulEKCgpu2rQJhSEoKMjHx4c6PCwq9BwHAECl7N27d8uWLdOmTVuwYAFmNIGNTJiZmU2ePPnBgweTJ08mlE540KupqUFPpti+tbW1tbW1PDw8/v7+ixcvZvkGgz3FHzt2zMjIqKamZuLEifj3g8LCQgCAkpIS877R0dHNzc03btzo7e2Njo4uLS1FLwp4eHl5JSUlAQDYg/NgqsbM0aNHjY2NX79+jb/vI77CWoE5cuyt64uz/7u6utC7u66uro2NzRcjJ4Bko7u7u+/evVtdXU0mk69duwYAuHv37pEjR7Zs2bJkyRIajSYsLLxp0yZzc/Nx48bNnTt34cKFSJQ8PDzc1ta2rKxs6tSpFApFRUVFVFT09evXxcXFd+/eBQAgT6V58+Y1Nze/fv06Nzc3LS1NR0fn1KlTenp6AQEBmpqaAAApKanTp08bGRmVlZXFx8enpKRUV1cjrWcKhXLo0CEDA4P+/v6Ojo6qqqqmpib0MNTV1bVnzx5dXd2IiIhLly4NGzbs8OHDurq6KSkpmpqaTk5OTk5OhYWFFRUVW7Zs6erqMjY2NjIyEhQUpFKp5ubmb9++bWlpSUhIUFNTQ6LMBw8eXLly5du3bzU0NBwcHCIjIzdt2kQikdBNmV1bYRLbFArlyZMnr169QoNbmMR2QEDAuHHjMIltNTU1rP3xEtvi4uLx8fFIYrusrAyvND1//vykpCRFRUX0ydfNzU1DQ2PevHknT57EH83du3cXFhYKCgrq6+ujj4dISZyXlxdTEt+7dy9SEh85ciRSEkfxNDY2FhUVoef40tLS2NhYcXHxxsbGpKSk7u7upqam0tLSixcvGhgYLFy4cPny5b/99hvK8Pz58+vWrTt8+LChoeHIkSMTExM7Oztra2urqqrevXtXUFDw8ePHoqKiyMhI5gzz8/M9PDyUlJRCQ0NtbGzwLTNE+P6TZn4OHjx4gEnHTps2LTw8HEKor6//4sULLA2ajignJ4f+GxkZiSkjP3jwAPMYwxKgJz4SiTRnzpy+vr558+bFxMRguWlpabETq12/fn1qampBQcH8+fMbGxuHDRvW1NQEIURzOJH1eX9/P/qRnJy8Zs0aCOHkyZPRXMHy8vK6ujoI4YoVK5D9W319/dixY9EzbFpaWkVFRUdHx6dPnxQUFJiVfDE3Mjs7Oy8vr4CAAD09PZQMud/duXNn2bJl+fn5GzZs2LFjB8sqXLhwAUkht7a28vDwkMnkqKgoNDyAEly9enXr1q1Yen5+fkwd287ODtu+bds25GVIAPM5I4RHIpHwVRs5ciSDwcjLy5s+fTqEMCsrC01KvHjx4q1btyCEubm5/f39tra2zH7uiYmJY8aMQb8XLVpUXFxMaBnmqFhG/ttvv5mammLb0cNETk4O5msPIYyPj2dWyv6O0Gi0mpoa/H9JJBLzhGR0I/vGstDrAjuNZoIKM1YuhLCpqQmZuWPghZIJIK8+5u2YKDNBrPnJkyf/2ARs5pAwpWmCojRkLxWNLh9205u/XUm8r68PnzmWYW9vL/yr4vZg8Pb2rqysLC4uTklJuX379jfG9hMyZL92jh49evv27V5eXteuXZs4cSIaFnJxcXn+/Lmnp+erV69evHhBo9Gio6ObmpoyMzNbWloCAwORz/iFCxcCAwO9vLwAAHl5ec3Nzb///vvz58+3bNnS09Pz/v37qqoqEonk5+dnbW2NvvYUFRVVVlYy+7Aj0INzTExMWVlZUVHR8ePHp0+fbmxs3N/fn5GR0dLSkpiYGB0d3d7ejmyg0eMweo4zMzOLiYlBi1ttbW2nTp0KABg5cuSmTZvU1dWtrKx6e3vJZLK9vT2FQrGysiI4pDB7shgaGmImJujDLLO1DTPoKf758+dHjhxBT/EsLWBQYj8/PyqV+vvvv1MoFC8vL9S8AAAajSYpKYm++eAzp1AomNsL3mNl/PjxHR0d+Krp6emZm5tHRES0t7eXl5dHRESUlZWVlZVpamr6+fmFh4c/evQoOjpaVVWV2at63rx55ubmV65cCQoKMjU1VVBQGNithl3kd+/elZKS2rJly+PHjz08PEJCQkaMGOHn59fT03Px4sW7d+8eOnRo165dzO+13xEeHh78SycPD4+cnByzLrawsLCUlNQ3loXeq5C0NzNcXFzDhw8nfO9FiaWkpIYPH47fjobbWSIrK8tS8A97Tefi4sJi6OnpWbJkyb+lAy4oKMjFxcXNzS0oKMjNzc3Hx4dvHEyKnQD6lsNO2fzb15Lz8/PjM8cyRF81/q6YorOz87NnzwoKCtAE0W+M7SdkyKqXIT8ECoXS3d1NGKHt6uoSEBBgdwr+IAjmAD09Pfz8/F88HdEEbnyoNBoN2wszmkAHkUwmE240A0Amk/GXKLImgBDSaDSWo1Yo8ubmZgEBAUIptbW10tLS33esCwuPuWrsbDTQMBiDwaBQKAICAuzadmBrhcFDp9MbGhq+TvGEA4efnO7u7qSkJBqNNm/ePDExsX87nO/PkO35OHDgwIEDB5YM2a+dHDhw4MCBA0s4PR8HDhw4cPhvwen5OHD4D9HR0fHixQv8arPvQm9v79/Vuf4iPyhURH19PVqeOwBhYWG9vb1UKvX9+/doqSs7qqurY2JiviKMhoaGwShQfwVUKjU6Opp5rc5XhzrEGPo9H1KyR9Dp9NQ/KS0tJYhctLa2vnnzJjw8vLq6Ojc3l2VuSL4WANDXR4+PrwoOLigoaM7JISqeAAAaG7vfvatg3v596ezs++OPwsGkbGrqCQr6XFzcim3JyqqPiiqjUGj4NE1NPeh3Xx89IqI0Le1/K9MHvvIHD7sLEsFgMBITE7ElxgNTVlaWmpra2vq/GpWXl6empqalpaHteCgUyqdPn9Dv3NxcCoXyXeoyGMrLy/38/MrLy39E5l8hVXrgwIEVK1Z8+PDhO4bR0tLy5MmT765Z+iNCxRiMOi7yMWhqajp48GBZWRm7ZAT9WHawPFgSEhLe3t5o1fn3pa6ubvv27YTHkUGG+l9g6Pd8RkZG2H2Hh4enrq5u7ty5aDnB9OnTU1NT0Z+ePHliYWEhKSmprKx89+7d06dPM2eVkZGRk5PDxcWVkVG7YoVPR0fvrFmjExNJq1axUIz844/CK1cSvz1+Oh12d7O9MMrL2zduDPxiJqWlbadOvY2JKZ8zxzM2tgIA8PvvnzIyamfPHmNp+ZJKpQMAcnMbdXQe5+c3AQBoNIaFRbC6+qjKyo5z596hTIKCgr69OiwvSDAIoxZmRo8efebMmZUrVyKVCjk5ubNnz0pJScnIyFhbWz958gRN73zx4kVRUZG0tPSiRYvS09NramqWLVuGt9T5CgbZ5TQ1Nbm6umpqan78+PFbimMJiURiaQA0AC0tLQUFBSIiInv37v2OkTg5OVlYWBCW03wjPyhUjMGo4544cUJSUnLMmDHMqkx48Pqx7GCnK8vPz798+XKkLfB9wVswYgwm1P8IQ1bDBfHu3buRI0feu3cPk9GbOnUqDw/PggUL0F/d3d21tLSys7OPHTtWWFiI5u9evXr1xo0bhKyoVKq/v//169e7u/vXr3/+5ImxtvY4AMD27ert7b3MRc+cKRsU9Pnbq2BvH7N587QpU6RZ/nX69JHDhn15gn5ZWduDB/oAABmZYX/8UbB48YTz59/l5u7h5+cZP14sMDB/06apamoyWCnh4SVCQnwyMsNMTScfOhRx4MAcWVlZCGFSUhLeffQrYHlBAgDs7e03b948ZcoUOTm5QWYlKCiIxAT27Nnz8OFDPj4+bW1tBQUFAICioqKioqKGhgYAYPLkyUhvTEpKSl1dff78+e3t7cePH0dS2l8BlUrdtm3bixcvvpgyLi6Ol5dXSUmJpbTNF7l3756AgACJRBo1apSpqamhoaGnpyedTjc0NMzOzsZ8DKqqqvz9/RcuXBgUFLRz505ra2u0MTQ01MrKavTo0Rs3bjx69CjyIG1sbAwICNDX109ISMjMzJSQkEBCz+fOnSOTyZWVlfv373/8+PGECRMAAPHx8adOnXr//n1paemLFy/q6urCwsLQujEzMzMUZGFhYWtrK1o9FhkZieV59+5dHx+fwMBAZWVlaWlppGni6elJCPXJkydkMjkkJMTV1TUuLg6ll5KSQl67KNSHDx9i7bB9+3Z7e3sUqoWFhaOjo42NTXp6OoRw2bJlERERqqqqx48fj4qKotPpiYmJlpaW2MJKbKOFhUV4eHhaWpqXl9fFixdzc3NnzJgRFBRkZ2dXU1MTFhbm4+PT399vZWV15coVdBYBABgMxvHjx5cvX/7w4cMHDx4UFRVhDYtE4Gg02u7duxkMhpubW3Fxsa+vr5OTE/hTV1ZcXHzu3LmioqIeHh5qamqRkZEXL15EVsDh4eE7duxAix0JpaDFPO3t7bt27WJ5UHp7ex0cHFRUVBITEzdt2qSjoxMXF5ednS0kJISe+AsLC2NjY3NycqSlpc+fP084x9zc3NLS0lxcXE6ePCkqKnrhwoUnT56MHTs2ODj43r17X3HS/jL8a2vo/xEcHByqq6tHjBiBaUmUlJTw8vJCCHt7excsWHDt2jUIob29vYmJCX5HZqmFBw8ePHnyBEIYHV0mJXX1r4lpEMLS0tbjx6N9fXO2bg0mkToyMmpnz/a4fj1JR+dxTEw5hPD69aQ3b4p37AjJzKzLz29SVLzj45NtaOh3714qPreCgmY3t3Rr61dnzsTW15OVle9du5ZYU/O/+Ol0xpEjkVFRpWZmgZ2dfRBCQjADc+1aopdXVk1Np7LyPbTl/v2MvXtfo98mJs/evauAED55krV06RO0cfp0t4yMWghhd3f34sWL2eVMp9OPHDkSFRVlZmbW2dkZEBCwaNGie/fuaWtrJyQkQAjRGnAPDw9JScmKigr8vvX19crKyteuXaupqdHT0zt//vyuXbvWr18PISwoKHBzc7O2tj5z5gxzoY6Ojg0NDWPHjkU+c05OTn9WxOTKlSs0Gq25ufnu3btoo5ycHIrkyJEjSCUHT21t7bJly06ePLlly5a5c+c2NTWRSKRz586hezSFQrGzs9u1a9fKlSs9PT3FxMT8/f0JuhgUCuXkyZPe3t67du16//59fX29tbW1np4ewX9xkKSnpyPPLAjhqFGj8vPzV65cmZWVBSGcMGFCe3t7QkICOmmzs7ORdVx5ebmwsHBnZ+fixYt9fX0PHTo0c+ZMCCHy6oIQVlRUzJo1C0LY19enpKSEVEVMTU3d3d2dnZ1tbW1Rcfv27UOKNgYGBmFhYRDCefPmkUgkZGBLp9PxhpdPnz5F4jvMeebm5k6dOhVfKUKo8fHx5ubmvr6+q1evPnjwID49FipzO+BDnTZtGjqXxMTEuru7KRSKsrIyhNDa2trX17etrQ2vdIPfmJiYqKenByEMCgpCzoI3b948cuQIhPDkyZM+Pj4QwmXLlmVkZEAIjYyMEhISKBTKo0ePIIQmJibv3r3DNyymJVRWVqaiokKn0wMCAkpLS7GiHR0dnZ2dIYRr1qz5+PEjhNDFxcXMzAz9dcuWLUhNCULIXMrAB+XatWtIvSgvL09WVpZKpaqpqSF1mylTplRUVBgbG3t7e9+/f5+fn7+lpQULFZGSkmJkZAQh9PLyOnDgAIRw9uzZnz9/Hoyn6S/NUP7aSSKR5OTkxowZs3TpUrzFFIPBOHnypJKS0rFjxw4fPgzYWKUQcsvIyEAfPcrK2iQkBP+amAcAsG/fmw0bppibT9XSGnPkSBQAQECAx85urq3tbD+/TwCA4cP5ly1TnD59VEhIoaqqFJVKNzCY5Oam5+GRic/txInoYcP4Z8wY5eSUwMfHPW6c2KpVE0eP/t86biqVrqoqraurQKMxMjP/3tgbjcaoquowN59aX08WFv7fwnNhYb6Wlh5CymXLFPPzm9LSauLiKhsauuXlxQEAwsLCNTU17AbkCe4tBD8UGo1mY2NjY2NjZWWF9GjwDGDUgpftxob08MjIyAQHBx8/fhyJ6WAkJiY6OzvfunULP9AbEBAwYcKEESNGBAYSPxHLysqKiIhoa2t7e3srKyvfv3+/p6dnwYIFenp6r1+/FhAQwDS1t2/fLiQkhPze8Dk4OztLS0tv2bJl3759ZmZmMjIyWlpaKioqenp6LFtsYJhVRtml5OHhQW8GqGrZ2dlI0/XGjRtokIyXl5eg319ZWdnV1YWkABYsWPDp0ydMNxUAwM3NjaomKiqK6Zo2NDTgfQmwrNra2tDFMnCeLENFZvGYuQFzepbtwDLU4cOHCwgICAoKopPk6NGj169ft7Gxwa/Cxm/EZBDYVRYw6foSXBpYNqy8vPz06dNDQkJqa2vR5wd21UFNhDYKCQk1NjayK2XgOLEMVVVV+/r6Pnz4wM/Pj9RtkPjOIG0cMC5evLhs2TLkALN//35jY2Nra+sv7vXLMZR7vt9//72mpubGjRvS0tJ3797Fbtnc3NwODg4TJ07E5PnZWaXgwa5weXnxysoOGo3YAXz8WC8oyAsAWLBg3KdPDQAA9B1SQkKora0XADB37lg/v0/NzT1oXx4ebgEBXlFRgc7Ov6jO19Z2bdkybedOjb4++xEjiCrpgoK8kpJCoaFFNBqDOYaB8fLKOn58AS8v9/jx4l1d/yu0q4uqqEi8HmRlRVJTdxQVtfT09I8cOQwfBrshLsIVSxCDLi0tJVyQA4A3ahnMdauhoeHs7Gxqaop3LdDR0dm/f7+jo+P27duxjRs2bLCwsAgNDWUpfIXdl7W1tevr6xUUFEgkUnx8PACAwWCwvC/jIdyD8G4SX8GUKVMSEhLQ77a2thkzZhB09zE9foze3l5RUdGJEydGRESgkdT09HQ6nY4lQE+7AICxY8f29PSgc765uRn7oMcMxCld4H0JsI0qKiqosxl8nlio8vLyLM0N8KEyt8NgQu3u7k5PT5eUlHR3d2e5EcufXQ6AyceA4NLALoxjx45duHBBVlYWvxE7WFh18E3U1taGOTsOphT8dizD3t5eSUlJRUXF3Nzcrq4u8KeBBsHGgZAVdlIh6VEAgKysbHZ2dmho6IcPH+7cuRMcHOzm5sausr8uQ7bno1AoPT09p06dsrOzQ3qSf/zxB/jTkAX5Evj7+yP72W3btqWlpWVnZ2O7FxQUEDLErvBFi1wLqkoAAK/RSURBVCbIy4s/ePD/U87Kytr6+xlTpkgnJFQBAJqbezQ0RmOXDfpBp8MNG55v2KA2apQIZGULgtHVRU1PrwUAVFS019eTubhAf///37w+f252d8/Q11cWFOQl7EunQwAAjUbD3+wwwsNLdHUVRo8eXlPTJSjIKy09DE2cyctrXLpUnjn92LGimzdP8/PL9fQ0wDby8fGJi4sTDGL+DIz1FYt+jxgxgnBBEnZnZ9Qy8HWL2XNv2bLFzMwMGdmAv3rWqKqqoqOMDv3Zs2eHDx+OTIbZQSKRtLS0PDw8GhsblyxZgnbEJ2DZwoR7kJSUFMvb6yAhqIwqKSkh3f0bN24g3f3JkycjHwMAQElJSUBAwJ07d1xdXQmartnZ2a2trS9fvoQQvnz5sry8PCMjQ1BQ8NGjR1euXImKimIwGKampphualtbG/KnrKmpKSgoePv2LYlEQtNlExISPDw8pKWlMasvAMCcOXPQGxIhz61bt0ZFRRUXFxPeVvGhEjRasfT4UAntMGbMGCzUkpKS6urq5OTk/Pz85ubmyMjI9PT09vb2T58+sZRvxTYiE4nCwsLS0tL4+Pji4uLa2tqUlJT09HQk4pqTk5Obm4t8DCorKwsKCmJjY/n4+PAuDenp6ahhAQCYfiwAQF1dXV5eXl9fH19rTFf29u3br1+/joyMjIuLu3TpEvorlUrFDHIFBATwpaDXvgEOyr59+3p7e58/f+7h4fHgwQNpaekjR47o6ek5Ojp2d3cnJCQQ5H/xoQIAZsyYUVdXd/jw4YKCgqKiou7ubgcHh4KCgiVLlgxBfwY83+276c9Ef3//jh07Dh48iEZimpubly5dqqqqWlJScvbsWQAAGrFLSkoSFRVF3/SROfulS5e8vLxcXV0xnwGMrKysCxcuoN/l5W16ek/t7CK8vLLc3NJjY8shhHl5jcbG/hERJWfPxlZXd16/niQvf5tE6jh2LEpNzbW5uWfqVNddu0Lt7WM0NO7Hx1cKC1+MiSkPDS0cNuxiSUkrVlBw8OdRo65v2PD84cNMCOGZM7EmJs/y8hrRX0tLW8eOvXn2bKyp6TNz8xdxcZU8POdjY8vLy9tUVJwhhCdOnFi9ejUh+MDA/FGjrquoOKuoOOvqekMI372rOHs2Nja23M4uAqUpKmpRULhz7ty7jo5eCOHHj3VHjkRGR5dhmVRVVVlbW0MI586de/UqcXAROficPXvW1NTU3Nzcx8dn3LhxlZWVdnZ2c+bMaW9vP3XqlI6ODhqN9/b2Jux+5swZNLAxceJER0fHsrIyWVnZ+/fvBwcHjxo1asOGDQ8fPiTskpmZOW3aNMxjgUajoaHB7OxseXn5ZcuWubi43Lx5c/ny5a6urhEREdzc3La2tmQyubm5efz48UePHiX4D5iYmNja2v7xxx9Hjx6l0Wh+fn5Tpky5efOmmpranTt3tmzZsmLFCmSaoaOjc/Lkyba2NvzuXV1d69ate/bs2Z07d2JjYzs6OrZs2aKtrV1eXg6/FgqFgh9yJujuIz3+3NxcXV3drq4uvHcBmUz+YuZ0Op2dsQC79ARfAkRoaCgagPxinixDHUwMhHYYTKhkMpkwEMty498C79LALg2DwWBpboDfBe9Z8ebNm7dv3/7dUgh0dnbiTVrIZDI6WOi/BBsH5oCpVCrWLHQ6vb29fZDl/roMzZ7vq2lubh7glnHz5k38Zd/dTW1qIt4FULfBEjqd0dtLQzsOHEZfHw3NmkFQKISZFP0MBoNOZxC29/T0QwjpdDrq179IXx+tro71Taevj1ZVRTSUuXbtWmNjI4Swra0NvUMQ+OIVS7ggmXdnE8xA1y0e1DF8NSYmJu/fv8cb66Dvir29vQRDHBqNxq4WhHvQP0Bqaurs2bP/yRKZef/+fXNz8xeT/Qyh/lCOHz9ubGxcVlb25aQQQggrKirwTmcc/jE4itV/AwaD8erVK319/e+7dOn7QiKRpKSkvruNcmpqqpiYmIqKCgCgqKhIWVn5++b/M7B69WpbW9tVq1b924H8PcLCwpqbm6dPnz7AGNhPwi8U6teBv0w4/Mxwej4OHAD4U9VJWFh41apVaIoNBw4chiqcno8DBw4cOPy3GLJzOzlw+NVBisl/axcIYWRkJPNqxe9CRUVFWFhYXV3dACKWGJhQalZW1ncXsx4YMpkcFhb2xWTMErJVVVVIxvorWp7DrwWn5+PA4ScFKSb/rV1Onz6tqqpaX1/f00NUJ/hGPD0979y5o6ysfPny5cEot40ZM8bU1PT169ckEsnCwuLgwYPfN54BKC4u3rZt2xeTESRkGQyGl5cXEnT9ipbn8GsxlL929vf3x8TEyMjICAgITJ48mfDX+vr6srKyWbNmEdSER4wYISUlVVRUBADg4uJCwnr4BBUVFR8/fpSSklJSUurp6cFUAfE5d3fzNzdTAADjxonJyop8/7qxp7q6s6ioZckSFkv08BQVteTmNi5cOEFSUggAACF8965SQIBn7lw5bP5Ofn7T5Mn/U/Ls72fEx1fKy0sgPZe6ujrCWl1msrKy8Cv/eHl5B17d/EWqq6uLioqWLFnCLkFGRoa0tPTA+sIDM8gcKioq8vLyZs6cSaFQmKU6vj2Mr2bq1KkZGRl4jZXvxcaNGxcuXLh7927wp7w7ACArKys4OFhZWZlEIgEAdu3adfbsWTExsePHjw8bNkxbW3vv3r0bNmwoLS2dOHEimUzm5eV1dHRsbm5esmRJZ2cnNze3gYHBoUOHlJWVBQUF+fj4bG1tXV1dyWQyiUQaMWKEgIDAtGnTvkIER1ZWdjDuIpqamoGBgdiRCgsLCwwMfPz48d8tjsMvx5B95+vt7dXR0UHCesbGxswJkE0JPz9/aWmpjo4OAICHh6ejo8PFxUVMTAxJFDIYDB8fH01NTezBcN++fbdu3VJVVRUVFd22bdurV68I2SI/h5EjRaytX92/nyElJfzdq0bQfMHDYMCwsOLLlxMGzuHVqyI3twx//9ypU12RLdG+feHS0sICArxId41Mpl65kmhj8xqlb2rqsbIKmT17THDw58ePswAA9fX1BLUwZkaMGLFgwYLc3FxeXt7+/v5r1679nVoyV42twQryeQAAlJeXD9LhiB2DyeGLbz/fHgYAoKKiQldX98OHD58/f1ZSUvL19TUyMkKaDBkZGY6OjtbW1hs2bIiKisJ2efv2bUNDg7+/f3p6+po1axwcHLS0tFpaWk6dOuXj47N79+64uDgAwIEDB4KDg7dv375lyxYAwJMnT1xcXFasWFFaWvrs2bPFixc7Ozvr6OgQju/atWsPHjx4+/ZtBoOBur2GhoYdO3acOHHC3Nz82LFjYmJiEhISI0aMGD9+PH6KUGdn5++//7569WphYWF+fv5x48aNHDnSxMTE0tJy/vz5I0aMQI9Ehw4d8vDwKCws1NDQOHr0qIiIiLKy8smTJ/G6ORQK5fbt21evXjU0NERKzRs2bLh8+bKmpqa/v/+NGzeWLFlSU1MDAKDT6Y6OjrNnz7azswMAFBYWuru779mzBy3njYuLu3fvnqenJ9J07u/vd3R0fP78uZ+f38At39bWduTIkatXr+ro6Dx69Kiurs7T09Pb29vf3/8bDzeHf5p/c0nFj6SgoEBOTg4tugoKCsIvHSWQm5srIiKCftPpdBKJBCH09PTEdF1VVFTQcmlXV9clS5ZgO3Z3d7u5ueGz6uvrs7OzQ79NTJ5dv570PasEIYSwqqrD2vrVAAmysurRWvUBiIz8n5bu4sVewcGfCwubsV3mzPFEK/k+fWpYtOh/i8Tfv68wMXkGIUxMrDpzJhZtvHDhAmEpNzNSUlJI8xdCWFlZOXDiL0IQ28U4fvx4bm7uN2Y+eMzMzLDjjq2j/xFgisljx47t6Oiora2dNm0ahNDExCQpKam5uVlBQYGwy4QJE9CCVBMTkxcvXkAICYrGDAajrq6urq5u3Lhx5eXlycnJeM3o0tJSdXV1COHTp0/37dtHyDwoKGj06NHz5s1DLpWPHj3ClKPhn+vrz5075+npibYsWLDA3Nz89OnT8+bNw6QPPD09rayskpOTDx8+jLZYWVmFhoZmZGSoqKhgKzKPHz/u6+tLCODy5csODg6+vr4KCgovX74cQGxaTEysp6ent7dXSkoqOzsbr9rc0NBA0HR2dXVFTfT8+XOUIbuWd3Z2vnTpEvxTKZulhDeHX4Ih+843adIkcXHxhQsXlpaWGhsbi4qK0mi0GzduBAYGmpqatre3Ozs7b926FSWGENLp9P7+/gcPHhBWwpFIpJaWFrRA5+nTp2vXrsX+JCws/Ntvv+ETP3nyZNq0aexCcnPL2Lbtj56e/gMHws+ciWUw4NGjUdHRZRs3vujqon7+3KykdNfXN8fIyN/ZOe2v2Wa7uKSvWOFbWtoWGJifmVkXGVmK/bW6uvP8+ffBwQW7dhFfQNmxbNn/PtAhaeyMjFoFhf8JdsvLiyM3WvyaRS0tudLS1kOHIiIjS/ft00Ib58yZc/369S+WRaPR+vv7X758WVdX19vbi3//+PDhg7KysqenJ/6lHL8R/4yPCZUBAKKjox88eHD58uWHDx82NDQEBQW9efOmsLBw7969yOosMjLSycnp/v37e/bsodFozK8y9+7de/36taOjIz7Uzs5OlENra6uenp6Dg4O1tTV6McLD/PZDIpFu377t7e29c+dOLJNBHosBwBSTkQgqUkAFAMyePTs4OBhCqKWlxW5fTGWUWU101KhRe/bsOXny5IQJEzIyMvCa0QS1VXyG3d3dxsbG+fn5/Pz8NjY2AID29nb8SULQ70bo6+tfuHAhNjb26tWr2PuxoKCguLg4Xkv6/fv369ate/v27cDfaTMyMrZt22Zubl5aWmpoaDiA2LSQkJCQkJCAgMCCBQtIJBJe/RUJ8OIlZOPi4tCYBXbts2v5GTNmxMTEkMlkJSUlGRkZlhLeHH4JhmzPBwCIjY1VVFScOnUqGrW+d++egoKCiYmJtbV1e3v7zJkzsW+Y/f39zs7Od+/exYQfAQDV1dUWFhZr166NiopCt5gvWjpgfg4smTlTtr29V1iYT11dtquLSnBdYOfekJJSHRVVKi4uyMvL7eKSNnv2mLFjxZYv///BxZ6e/gULxunpKb9+XQz/zqgtidSpqTl6wgTxL1o3CAjwXLy49O3b8vfvK/n4/nfOKCoqIkXNgXn27Jmzs7O3tzdgcjNAh2Dt2rXBwcFYeg0NDWzjnTt3yGTymDFjcnNz37x5g6WhUCirV682MDDw9fXFfB4mTZo0evTo/v5+KpVqY2Nz5MiRXbt2NTc3P3z4kGAc0djY+OTJE21tbfxzDABAVFQU5TBixIgRI0ZMnz7dzc0tIyODoD1tamr69OnTa9euaWtro/vs/v37N2/evHXr1gULFgwbNgxlMvgDwQ70cIr/L/phbGwsLy9fUFDg6+s78C6AlZros2fPOjs7d+7cWV9fLykpyVIzmvlEunr1KgBATExs37596IsisoLDJFLxjyYI7E+8vLyCgoLYbEkpKSkVFRV7e/vc3FxUlq6u7q1bt3777TdMEJXlmSwjI4Ou0N7e3qysLELFmdMDADo7O9XV1fHqrzQajSAhKyUlhZ6H4J+KsuxaXl1d3cDAIC4uzsfHR0hIiCDhzVLNlcPPyZDt+chkspSUlI+Pj4+Pz969ez99+pSUlITmqixZsmTChAl4CxJ+fv79+/fb2dk9efJEWPh/I3NycnJOTk41NTWYRfgXLR0wP4fBwOy6wNK9ISOjdtWqiebmU8PCNt28uYI5HwUFCRKpIz6+EgDAxkGIBTQaw8/v08mT2gCAL1o3VFS0Bwd//vhx1+TJ0idPvkUb8dYqA7Bp06aDBw96eXmNHTuW+f2D2ecF4MxfCM/4WIJ58+a9efOmqKgIb+MC/nxUZ7bLIbzKyMjImJmZTZ48mdn8nfCwDwAQEREhk8n4NMxvP9nZ2eLi4gCArVu38vDwoEy6urqMjY2NjY1dXFy+2ErMlJSUIMXkkpKSxsbGpKSkd+/eNTU1lZaW3r9/39/f/86dO/b29njnptTU1Nra2sDAQBKJ9Pnz5/Dw8N7eXoKicXt7O9Kpefbs2fHjx9etW4fXjE5KSqqtra2qqnr37l1BQUFHRweWeWJi4pkzZ168ePHw4UP0rqyhoWFlZWVra/vq1asXL17k5eW1trZ++PAhMTGRTCbHxcV9/vz5+fPnt27d2rZtm7GxsZmZGZVKTU5O/vDhQ3Bw8P3792/evNnS0pKbmxsXF2doaDht2jRra2symdzS0pKZmZmYmEihUPBtcvDgQXd395UrVzo4OEyZMoWd2DSFQlFUVPT19fX390fevHjV5ilTphA0nQ8ePBgaGmpnZ/fy5cuqqqq0tDR2Lf/27VsPDw9PT087O7vc3FyChPekSZPQMwGHX4B/7LvqP8yDBw+qq6vR72nTpoWHh9va2qJBuM7OzpycnOTkZGRPmpOTg43zQQjj4uLKysru37+/dOlSCOH79+8lJCTQd3xvb29sHAVCyGAwCgsL8YWePn361av/DcKtXRuAH+fz8spKT69ZtcoXQujikrZ//5v8/Cb0XzOzwKioUgjhhAm3e3tpXV19Eyb8v+JtQECunt5TJAWZlERKTKwyNPTDF+rqmn7lSgKEUFb2en8/HT/OR6MxICs1SwaD4eWVhWQ/CwubGxu7lyz5n9Tn/PkP29qQDnIjNs73/HnesWNREMKOjt65c/83ipOXl2dqagohRGakLJGQkEB+2YiLFy+am5tDCHt6eiZOnAghHDVqFPNe2EZra2s0PkShUD5+/IiN861fvz41NbWgoGD+/PkQwmXLliG3z8uXL1+7do1CoQwfPry4uBgdkUePHlVUVMyZMwdCGBoaamlpSSKROjo6Pn36pKCgQNDYRDlACC0tLcPDwyGEGhoaBB9dzCM3KCgIZTt16lRkP5ufn9/a2opl8iNAgsjNzc0FBQXv3r1jHgxjycBqooPRjEaKtdXV1QQNVQaDgdxZ/xkYDAazZDZLurq68GcmQf2VICHLYDA6OzvZabFieHt7V1ZWFhcXp6Sk3L59myDhjVReOfwSsPg0PzQYPXr09u3bzczMmpqaJk6cqKuri9xJUlJSNDU1nZycnJycCgsLi4qK/Pz8KBTKjRs3hISEqqurX758GRcX9/r169zc3LS0NB0dnVOnTunp6QUEBGzZsgU9yBsZGQkKClKpVHNzc3yh69atCwkJWbNmTU5Ow8ePdS0tPfz8PGQyNTKydPPmaTNmjKqrIx8+HEml0ktKWul0Rm5u47lz7+h0hpdX1tixYo2N3UlJJCSEXVrapqgoAQAwMlJxc8vQ0Hgwb97Ykye1hYX5Pn1qdHPLsLaehQqVkBB0cUnj4+ORlBT28Mjs6uorK2srK2tLSKhKSiK5u+stXrzY2Nj4yJEjWJwHD0a8fFng5JTAYMD166c4OCzW11d++vQTlUrfvn2muLgghUILCSmsqGjPyWmYNm3k8uWKvr45ISGF+flNtrb/G1v68OGDiYkJAGDSpEnv378fM2YM4RD88ccfbW1tAQEBkydPRm/S+/bts7S0fP78eV1d3YMHDzIzM5HPi5GREbYXfuPBgwdXrlz59u1bDQ0NBweH69evI4MVKSmp06dPGxkZlZWVJSYmzp079+LFi4cOHUpOTubl5d25cyeyy1m/fj2yy3n27Bn+Vaampubq1atbtmyxsrLCj1S1traiHNA7U3JysrKyckVFRUpKCv4jNnr7mT59+uPHj9Hbz7Vr1ywsLKZNm7Zu3TpTU1OUSWdnp6io6LefyQSampqcnZ3FxcVFRUWbm5tXrGDxGYAZ5DvIDhGRLy+8QUeQ+ShzcXENxu/0e8HFxYV9lRkYQqUIH2PQ7FPMppGLi2vgJkI4Ozs3NDSoqKjU1dXNnTuXm5ubm5ubj+9/IwXfXSyXw49jyK7n6+vr4+Pjo1Ao3d3dMjIy2Pbvcj+qq6uTlpZmOaR/69atXbt2sbs4IYQ0GuTiAry83ACA3l6agAAPhIBKpSNXW3aQyVQRkf9dur29NEJiCoUmJMTb10fn4+Pm5v7frZzBgP39DAEBnvb29vfv3+O/FrKko6MPQiguLsjyrxDCmpouCQmhYcP4AAAMBuPUqVNojQGFQvlb13xXV5eIiMggVb8hhBQKhbk9UaE9PT3oT729vZhzN4LBYPT09LC8p6NznkwmD+ZmxwwqtKamRlpaGrufIveGQd6Uv5GGhoaUlBQJCYl58+axPAk5/CC6u7uTkpJoNNq8efPwM3Q4/HIM2Z7v3+In9HP4EdYKwcHBS5Ys4Vz8HDhw+BXh9HwcOHDgwOG/xZCd28mBAwcOHDiwhNPzceDAgQOH/xacno8DBw4cOPy3GLITw5iNAhQUFAZwYGhtbU1NTeXi4lJTU2tvb1dTU/unI/6Tzs7O2NjYL07FHHqQyeT379+vWbMG2/JFcwZ2ENwSGAxGcnKyoqLiqFGj2O1SWFjY19c3gPjcYEAGIPPmzfuWTDhw4PCjGbLvfMxGAQM4MCDXFUlJSWVl5bt3754+ffpfjLy8vHzjxo3oN0E78UfzDxdHgGCrNoA5wxfB3BKQjUNLS8vevXuZFVsQaAnzlStX8AJpXwcyAPnGTDhw4PDD+fcW0f9wmI0CWDowZGVljRw5sr29Hdvx+vXr/3iwf0FKSgpCWFVVZW1t/Y8V+g8XxxKCpAs7c4bBg9k46OnppaSkMCd4/fq1q6srhPD69etOTk7fUhYHDhx+FYbs104EjUaj0WgFBQVFRUXjxo3DtuMdGAIDA7W1tfFL02xtbZmzKisr8/DwUFNTi4yMvHjxYmhoaFpamouLy8mTJ0VFRffs2bN9+/Y5c+a8evUqJiYGKUQ8fvw4MDBQWVlZWlp61qxZVVVVoaGhVlZW+vr69+7dU1RUzMzM3Llzp6GhoaenJ51ONzQ0zM7OxhbaBwYGZmZmRkZGTp06NSwsjJ+fn5+f38zMDP2VwWAcP358+fLlDx8+fPDgQWBgIFbWunXrYmNjc3JypKWlz58/j9J//vxZX1//3LlzgYGBurq6e/fuJdQIK2758uVY6925c2f8+PEBAQGenp4tLS349L29vUZGRjY2NkifbNmyZREREaqqqgcPHjx69GhHR4e4uHhUVNTDhw/l5OQePnyINOTu37+PbxYBAYEpU6YEBQVZW1urq6sjW7WQkBBtbe0bN25gjV9YWMhcIwCApaWltLT0oUOHDA0N9+/fb25uvnv37sOHD9++fVtZWXnDhg1BQUGSkpJIZzwiIuLx48dIVgbtTqfTPTw8xMXF586dCwDIy8s7c+ZMTExMUFCQjIzMvXv3BAQESCTSqFGjbGxsoqKi6HR6YmKipaUljUZjjgdC6OLikpaW9vjxY3t7+7q6OgUFhaioqJCQEILQOQcOHP5l/u2u9wciJSV16NChW7dubd26NTAwEELo6empoqKydevWWbNmZWVloWSbNm3asWPHF3Nbs2YNEod0cXExMzNLSUkxMjKCEHp5eR04cADiHNEwcnNzp06dCiGkUCiLFy/29fU9dOjQzJkzGxoaNDQ0Ojs7kRzoypUrUTATJkxAr57onS8hIcHExARCyNIGjEKhPHr0CJX77t07rCwIId6NDK+pSDAbI9QIKw7j5s2bQUFBEMK3b9+Wl5cT0sM/XcoghGJiYt3d3RQKRVlZGUJ4586dY8eOQQgfP36sq6tbWFgYHR1NpVLl5OQYDAY+1Pv379NoNGdn53PnzkEmWzXsnY9djVJSUlavXg0hPHHiBJLKdHFxgRBevHgRvbjr6upi73xv376FEKqpqdXU1GA5ODo6Ojs7QwivX79++vRpCKGNjc3vv/+enp5uaGiI0owaNSo/P9/a2trX17etra2mpoZdPImJiXp6ehDChw8fHjx4EEK4c+dO1IYcOHD4eRiy43yITZs2HThw4OHDh1OnTkVbvsKBAYH5DCD5f+YEmCMa85aSkhJFRUVzc/MbN258+PBhAK8AlrC0ARMUFJSUlAwNDUXvtfjS8W5keE1FgtnYF2tEcLdgTo8ZpA0fPlxAQEBQUBBZB/Dw8CBhMG1t7fr6egUFBRKJFB8fDwBgMBj4UOfOnevn59fc3IxcFwi2algk7GqkpaVVW1tbW1vLw8Pj7+//4cOHWbNmAZzlAh70Ii4uLo6N77JMICEh0dbWhlUWBZmXl3f06NHr16/b2NiIiYkN0ML4pgasjO44cODwrzOUez68PZi0tPSrV6+QEbOsrKy/v7+lpWV+fj4AYNu2bWlpadnZ2diOBQUFAAD81FCA8zlrbm7W0NDg4eFBCZBS+8CRSEtLR0REoBtuenp6ZWXlzp0737x5s3fvXgghlhXSesf24uLiQjZvBBswxOfPn93d3fX19QUFBQkB4N3I0EQPPFhiQo2w4jBkZWWfP3+OMvz06RMhPbts8ZBIJC0tLQ8Pj8bGxiVLlqD2x/5Kp9M3bNiwYcMGNLyH3xHZqg2mRtu2bbO0tNy0aZOSktL9+/dnz54NcP5qWKWwLdgPQiMTEmCVBQC0tbXNmDGju7s7PT1dUlLS3d2dXTyEzNk1CwcOHP5dhuw4X0hISGtr66NHj9LS0tra2n7//XdkrMXswKCpqfnq1av9+/evWLFi9OjRPT09urq6ZDJZRkbm7du3aAQIAHD79m17e/tx48YlJSVdunRp5MiRdXV1hw8fplKpJSUlxcXFyBFNS0sLe1eIiooqLi7Oy8ubMmXKpk2b1NXVdXV1LSwsyGTyjRs3MK+AdevWHT582NDQcOTIkYmJieid4927dzNmzPj06ZObmxs3N3dSUpKioiKyAUMICAjk5uaeO3eOTqd7eXnNnj0bKwu5kS1cuHD58uWYazxmNtbd3Y3Mxgg1GjZsGCrO2toa7XL48GHM3eLatWuE9CUlJdXV1cnJyZMnT25ubo6MjJSWlm5vb0evg3FxcVOmTElOTj5//nx8fLyLiwsfH5+kpKSHhwedTkehTp48mZeX19bWVlpaOjw8fPfu3chWjZeXF9mq+fr6InMGljVCbNmyJSMjQ1VV9bfffkPv7pjlQmdnJ7Jx2Lt3b1FR0du3b0eOHFlWVpaUlIStWtHU1LS3t1dUVExNTe3v76+vr09LS0Ou5ebm5leuXJk4caKpqamSkpKdnd2yZctUVVUXLFigqKjIHA+dTo+IiCgsLCwrK0tJSamurkaOcRQKZevWrT+PjisHDhw4up3/T0tLi6CgIPrkBQBITEyUkZGZOHEiPg3e6gFCSKPRuLi4BqmX393djTKHTF4BfX19AgIC6KMlfhdkQcBgMOh0en9/P8EKoLe3V0BAAEJIpVIJTgXIIHswNrn4GjE7HgAmd4vBmF24uLi0tbXZ2tpi84aQtQIy0MCsYQAADAajv79fQEAAc10gk8lCQkLMnysHqBGdTufh4UGHA7OMwWBZqUEm6O3t5ebmRoUyGAwKhSIgIICO0eBbmAMHDj8bnJ6PLT/C4uA/wtWrV1taWq5cufJvB/Jr09raGhoain4vWbJk7Nix6HdfX5+vry+dTt+yZcsX/aH6+/v9/f2xL/+INWvWSElJ5ebmJiYmjh8/ftmyZd7e3jw8PMuWLZOVlf3GsCMiIqqqqmbPnj19+nQshmfPnnFzcwsJCTEYjLVr134xZj8/P3RrUldXnzZtWnBwcGdnJ3rb/sbwOHAAYEjP7eTwr9Dd3e3n5+fr60sikf7tWH5tHj58iC5Sbm7uuro6tJHBYBgaGqIZW2ge6RdJTk5GiXt7e8vLyw0MDFJSUjo7O0VERHx9fYWEhPbt2zd58uQLFy5YWloOnNWFCxc6OzsHSBATEwMACAwMnDBhArbR1NTU398fQvjq1asFCxbg07Nzin/9+jUAYNmyZei/bW1t1tbWWGJ2ezFvH8CJnsN/GU7Px4HDT8rt27e7u7vR3F1sY25ubllZGYRQT09PVFR0MPlUVFQAADZv3gwhzMjIKC4uzs7OTktLAwAEBQUVFhbq6+vPnDmzqampsbERQtjY2EihULDdm5qaqFQqhNDb25uLi6u+vp6Qf0dHR0dHB/p97do1AEB9fT1a7oIYNmzYmTNn0G9PT0/0IzIycseOHa9fv3Zzc1u3bh2NRjtz5oycnBxalwIhnDFjBj8/f0NDA4Tw2bNnpaWlEML6+noLCwt/f/87d+4cOnSot7eXZW5Y0fv27auurka/6+vr16xZM2vWLB8fn1WrVhUVFQ2m9TgMSTg9HwcOPyN0Ol1CQoKbm3v58uWFhYXMCQ4ePLhixYrBZIX1fOXl5VZWVmjjhQsXAAA7duyIj49XUVEZO3bsixcvGAzGhg0bnJyc9PT04uPjyWTyihUr7t+/P3ny5Hfv3hkZGQEALl++jO8XDx48eOzYsQMHDhw+fLi7u1tPTw8AgNZWYiBthLlz50ZERKAtRUVFgoKCaHkohBD1i9HR0QCAkpIStPHRo0cAgIsXL0IIMXmdhQsX2tjYoN8zZ85Ey0ZZ5gYhJJPJEhISaJkmYvfu3bNmzYIQysrKnjhxYjCtx2FIMmR7vo8f61JSqlNSqslk6j9cdE5Ow8ePdQMkqKurQx9hEhKqXr0qolLpaDuJ1PHqVVFDAxlL2ddHKy7+/4XS5eVtb9+W9fXRIIT19fVokQCerq6uV69efa+K/KJkZmZ6e3sP/FGOHenp6fiXlX+Rzs7OK1eubNmyRUBAYNKkSYS/UiiU6dOno5e/L4J6Pk1NzT179mAfSFNSUgAA6GxZuXIl6g+QcmlJScnBgwf19PQcHR1lZWUhhJcvX46Kirp+/ToAAK/zFxsbCwDIyclBuSUnJzs5OQEAyGQyPoDKysqFCxeiL7eOjo4QQicnJ7SeBCVAlwOh56NQKFJSUmPHji0vL3/69CmEsLa2FgDw4MEDlGDbtm3jx49nlxuEMDAw8Pjx4yNHjuzr60Nbdu/erays7OHhMX/+/PLy8sG0HochyZBdzzdihNCCBY9ycxuHDSNO9vt2Ojv7BvjT3bupYWFsZYszMjJycnK4uLiuX08KDS10dIxbtcoXAFBS0nrxYvzy5Ypnz74rKWkFANTWdm3ZEvz77/9bY+7oGFdc3KqmJrNmzVM6HUpISHh7exNW4BFEn/+DREZG5ufnS0pKDlIlgAAmdf2vM3z48KNHj3p7e3/69IlEIqGbPsalS5e8vLzk5eUJ2wdg0qRJLi4u6PWroaGBZRpkZhIRETF//vz9+/dnZmai75zHjx/X1dVlTo9WsAwbNkxERAQAkJuby5ymq6urv7//3bt30dHREydOdHJy6u/vp1AoAABspQfLJR+CgoI7d+4kkUi7d+9Gr5to2Ss2n5afnx/NsGWXW2Nj45YtW5qamgIDA7FsRUVFFyxYUFFRERwcPHCLcRjCDNmeb9w4MXFxwRkz2LrSfDUkUufx49Hs/ioqKqCmJsPur1Qq1d/ff/ny5T09/fPmjXVy0o2O3pqWVtPe3nvu3LtNm6by8XHr6ytfvZoIABg9eri29v9rjcbElI8ePVxGZpiwMF9fH42fn3/58uV3797F56+urs5SvuS/Q2hoqJSU1OrVq7/OZ8rU1FRLS+u7R/UVFBYWurm59fX1TZw4cc2aNbKyshcuXHj58iUAwMXFpba2Njo6eu/evVlZWV/MCuLmb+/YsaOurs7FxYXwJ/QDzZwcOXLkunXrlJSUJk6c2NLSgopAvQsAgE6nk8lk9Bs1MoVC6enpAQBMmTKFucTOzs779+8DAJYuXYq0WLm5uVeuXAkAyMvLQ2mqq6tZRm5tbY38xdAU1gkTJkyZMgVTXCouLl69ejUAgGVuaWlpQkJCJBJpyZIl9+7dw2eroqIye/ZsLy+vLzYdh6HKkF3JzkxjY7ehob+npwGdzjA09M/O3t3Z2ffw4cdp00aGh5e4uq6xt4+pq+tSUJCIiioLCdkoIfG/NV4UCu3+/QwqlZ6YSHr+3DQwMD8zsy4ysnT58v9NsI6OLisra2tpocjIDNu+XZ19CODJkyfIAU5YmG/evLEAAEFBXgUFCXFxwbS0mmvXlgMAlJRGJCf/70aAfxbetWuWgYHf/v1aW7dOFxbmAwCMHj06PDx8x44d+AV2BNFngtbzu3fvysvLq6urR44caWVlhde89vX1xWtwX7hw4cmTJ2PHjg0ODr537x47zejs7Gx9ff3k5OTAwMA//vjjxYsXVVVVwcHB586de/LkCZlMDgkJcXV1xU9Gv3HjBqZSzWAwNm7cePTo0bCwsKdPn96/f59KpSYmJj5//hx7tK+rq7OwsNDU1CSRSCUlJSEhIcOGDcNS+vj4XLhwgUwmV1ZWbty48dOnT8LCwhISEtOmTXNwcFBRUUlMTNy0adOwYcOwgqysrBwcHDZt2vTmzZtFixaNGDEiKCjo0KFDWlpaJ0+eVFZW3rx589atW5E0GplM9vHxaWtru3TpElICsrS0xK+mxwtws9QK/zpIJNKxY8cuX75sZWWFbLMePHiwaNEiLi6uffv2YasUzp07N3A+/f397u7uAIDU1NTHjx/X1NS4ubmdOXPm7du3AICgoCAlJaWioqLW1taEhIRVq1YZGRlt3749KCho9+7ddnZ2kZGRBgYGGzZs2Lx5s6amJj8/v62tLdZxLl682MbGxtfXl0ql2tnZaWhoXLp0CQDg5eW1d+9eLIYnT56MGjVKS0vLz8/P3d2dh4dnzpw5Li4ue/fu3b9/P4RQSEhIVlY2MjISAIBkJdCOcnJya9euxbf2s2fPDhw48O7dOxKJJCoqiibUMOcmISFx/vz5a9euTZo0KTAw0NPT09fXV1dXNycnp6am5tmzZ/z8/FevXv2WA8Th1+Zf+87645GSupqRUYvfsnKlb1ZWPYRwwoTb7e29hYXN0dFlVCpdTu4mg8F4+DDz4MFwCOHOnaFBQZ+xvS5fjndweO/rm6OgcOfly4KEhCoTk2f4bENCCkmkjtzcxkWLvCCEt2+nODrGsQxp586d7969w28JDv786lURhHD48Evt7b0QQhKpQ1b2fzZJzs5p5879Lz2VSrewCJaSuhoQkIvtvmXLlqSkJHyGBNFnvLZyXV3d0qVLIYQUCuX3338naF4za3DPnj378+fPSCabnUYzhHDlypVpaWk9PT0yMjJ9fX2vXr2qqKhITk42Nzf39fVdvXo10m7GIKhUS0lJoQwvX77s4ODg6+uroKDw8uVL/C7GxsZv3ryBEFpYWDg6OhJSOjs729raopSWlpZo7OratWu3bt2CEObl5cnKyjIYDKyg1tZWNOc+MzNTU1MTQhgZGbl9+3aIk7resmXLH3/8ASFUUVFpbm52dna+dOkSxIl0YwxGK/zroNFoSNAO0d3dzTyy+yNoaGjAxswghLW1tVi5ZDKZeZ1AW1sbfvCPAJVK7ejoIJPJ+fn5SOoPo7+/v6qqauBgmpubmTfW1tYylziY3DhwQAzZr52DQUFBgkTqiI+vBAAwGICHh1tAgBcAICoqgB/Jy8io3bZN3dx8amnpPkPDScz5zJs39s2bkqKiFhqNwfxXPG1tbXjVj9rarubmnjVrJgIAxo8X7+rqAwB0dVEVFUcw77t//5tTp3Tevt166FBEXd3/PjcJCQk1NjbikxFEn/Hayi0tLUjiRFBQcNOmTQTNa+YSL168uGzZMrSYmp1GMwDAysrq8ePHERER2traoaGhlZWV48ePz8jIWLVqlbm5eVhY2M2bN/HpCSrVvLy8KMOMjIxt27aZm5uXlpYSLOkJEtiElMxa4QCnx62qqorqjhWECW2LiopiutJo6ItZclpERIRMJs+YMSMmJoZMJispKcnI/OVrNjutcD4+PmNjY2NjY+wN6e/Cw8ODX6guLCyMl7/5ccjIyOC1hGRlZbFyhw0bxjwmJy4ujjf5IsDHxycqKjps2DBVVVWCCBEvLy+2PJ8dzEcWhcRc4mBy48ABMZR7Pgbj/wcb2tt7Q0IKeXi4+vpoAICenn4IoYdHZmNj95Il8nQ6A+JGJuBfdW1kZIY9ffoJANDbS8vKqufiAv39dHyCPXvCpk8fOXmyNGFHOp2oj6OiooLcDAAAbW29CQlVVlYzAQBFRS0rVijm5zcBAPLyGpculWeuTlISSVZWZNq0kevXT8nJafgzkzaCvhoGEn3GaysDAOLj45ubmwEASUlJBM1rZg1uWVnZ7Ozs0NDQDx8+DKAZbWBgEBMTU1JS4ujo6OTkJCcnBwCQkZFBi5cBAGgl9Z9twlalWkZG5unTpwCA3t5edsNXSAJ7gJTogQ7g9Lh7e3slJSWlpKRYZgiZBKaZtwAA1NXVDQwM4uLifHx8BpBNwbdSd3d3cHBwcHCwjY0Nu/QcOHD4VxiyPV9ISGFrK8XVNd3ZOe3s2Xeamh5TpsisWzf58OHIGzeSR44clphIkpAQ9PbOvnUrRVJS+MGDDykp1bm5jbW1XR8+1KWl1WB3wIMH57q7Z6xc6evgEDdt2sjJk6U/fWp0c8vAypKSEj59OjYmprysrC0ysjQ5mZSRUdve3rtjR4i//19mu61bty4zMxMAQKHQ9PSenjkTq6rqoqBwp6ys7fDhecHBBWlpNZGRpYcPzwMANDZ2x8aWf/xYRyJ1AgD27dOyt4+Jji5rb+/V0RmPMqRSqZMm/eU1FIk++/v7I9FnpPVsZmYWExMzZcqU48ePT58+3djYGKll4jWv1dTUkAY3MvLt7u52cHAoKChYsmSJmpoaPp9Ro/4yb4iPj2/dunXLly9XUVGZMGECmnRgZGREJpM1NDT27t07fvx4LDF637K1ta2urg4PD3/z5k1rayuauHHw4EF3d/eVK1c6ODig0VA8AQEBISEhFApl48aN+JQTJ05MTExMTk5Gq6ezsrIiIiI6Ojr27dvX29v7/PlzDw+PBw8eZGZmYgW9e/euubm5uLj4/fv3ZWVlpaWl8fHxFRUVRUVFycnJycnJJBLp8+fPycnJ5eXlFRUVKSkpb9++RYrndnZ2hBmMmC45AGCAVuLAgcPPw39Ot7Ovjy4gwEOjMXh5uQEAFApNSIi3r4/Ox8fNzc1WTR9CSKHQ0LwSAEBvL01Q8C+Tg1A+PT39WBqUTECAh/B16NatW7t27SJ89sFKqa0ly8qKsIuETKZ2dPSNHi2C8gwPD+fn51+yZAlTsr+IPhO0lXt6evj5+dHnLILmNfyrBjeDwejq6sI+K31RMxqlwScgk8lovjseZpVqfAtQKBTmxjE1NbW1tVVXV8c0vtmlJNDV1SUiIvLtPgk+Pj4LFy6kUqktLS0pKSnYFAxmOErWHDj8/Pzner5/HQaD8erVK319/W+8HVdWVpaVlS1evPh7BfYzs3r1altb21WrVv1bAWhpaZmamqqoqNTV1U2fPh25AHLgwOEXhdPzcfjZqa6ujomJERYWXrVqFeYh9Q/T3d2dlJREo9HmzZs3wGwODhw4/BJwej4OHDhw4PDfYsjOcOHAgQMHDhxYwun5OHDgwIHDf4sh2/OhwaEBEtTX1yclJX11/lQq9f3793V1dYPfJSMjA5NRbmtri4yMDAwMJJhlD0xYWFhvb+/fC5R9DN8d5jZpb28PDw//iqyYj843Hi88DAYjMTERvyoRy/yLpw27HL47RUVFqampaWlpLS0tqampqampaEXm58+fU1NT2UlO/5M0Nja+e/du8Ol/6Ln3g+jo6Hjx4gV+QepXMPhTF0KYlZWFdMO/DjqdjpbufAvf8Vr7aRmaPR+DwQgLC7t8+TLLv3Z2dgIAmpqaiovZOip8kaampoMHD5aVlQ1+F8wHgEwmnz17dvny5WQyuby8/Is7ooABADk5Od3d3V8XMCGGHwFzm2RlZVlbW/+tTJiPznc5XnhaWlr27t2LvwujzAc+bRB0Or27u5s5h++OiIjI2rVrP3/+LCEh8fLlSwsLCyQRICIicuXKFQkJiR9X9BdBR+SPP/64cuXK4Pf6vucedlH8UA4cOLBixYoPHz58SyaDP3U7OzvPnj37LR1tX18fWjH8LXzHa+3n5Z+QSPs3yMrK0tXVZd7++vVrV1fX71KEkZFRQkLCV+wYGBh49uzZQSbu6+tbu3btV5Tyr0BoEzKZjBQyBwnz0fmOxwuPnp5eSkoK83Z2pw3G8ePHc3NzB8jhO3Lu3Dlzc3MIYXt7u5CQEDIlj46Ojo+P/6HlDkxVVZW1tTWEMCMjY+XKlf9KDP/MRdHc3DxnzpwfXQqBw4cPe3l5/cOF/gcZ+l4N0dHRZWVlLS0tMjIylpaWHh4e4uLic+bMSUxMTEtL8/b2Lisr8/DwUFNTi4yMvHjxYlJSkpub27p16549e3b58uX58+djWeGNDnbu3Ik2MhgMvOMBmUwOCwvj5+fn5+c3MzO7d++eoqJiZmbmvn37kA/Atm3bwsLCurq6/Pz8fHx8lixZcvjwYULOBDeDt2/fBgQEzJo1a9euXVeuXNHQ0Lh3756AgACJRBo1atTGjRsJxgL46p8+fXrs2LH+/v46Ojo7d+68dOmSsrLysmXL9PT0fHx85syZs2fPnuPHjxcVFVVVVYWGhlpZWSkpKenr6587dy4wMFBXVxcvup+RkREeHl5TU9Pa2mplZaWtrY33Q9DR0cFSPnjwQEREhPmdODIyEito/vz5+Mi9vLwIR+fx48fMx4vgGoE3lCCU1dHRoaure/LkSVlZ2fXr18fExMjIyBw4cMDDwwMAEBER8fjx47a2Nn9/fxcXF5Q5ti9Lb4qGhoagoCBJSUn0yoXlEBAQQKFQMPsIPz8/ZKfg5uZWXFzs6+uL/Fr/Lps3b546dWpHR0dbW5u4uPjjx48dHBwSExNPnz7NzjoDgT9/1NXVsZPQ3t4e31z4Y6Gvr4//0wCtGhgYmJmZGRkZKSkp2draeuPGjZCQkHPnzi1evJiQIX6vzs5OdP5v3bp1165dEyZMAADEx8efOnXq/fv3paWlL1688PT09Pf3X7hwYVBQ0M6dO62trfHn+fbt2+3t7ZEph4mJCboojI2N7e3tsauvurqacOoSrix2/iHYyZmZmSkhIZGdnX3nzh0/P7/GxsaAgAB9fX2CZgK+hcePHz/ABQghRGfX48eP7e3t6+rqFBQUoqKiQkJC8C/uNTU1Xl5e48ePj4uLQ95P+LpbW1tfvHgxNzd3xowZQUFBdnZ2NTU1YWFhPj4+srKy+PvPx48f9+7dm5OT8+zZM3b3MURUVBSdTk9MTLS0tOTn53/x4sWIESMSEhLu37/PfDkMQf7trvdHgT28h4SEkEik3NzcRYsWQQgdHR2dnZ0hhImJicifes2aNR8/foQQuri4mJmZlZaWqqurQwifPn26b98+LMO+vj680QH88/2G4HgQEBCwc+dOOp2el5fX0NCgoaHR2dmJ7A4wHwDMpuDy5cvXr19nzpngZoAkLiGEy5Yty8jISE9PNzQ0RFtGjRqVn59PMBbAYs7NzV2+fDmEcN++fehBEovB1tYWeVvfu3ePQqEsXrzY19f30KFDM2fOhBCOHTu2o6OjtrZ22rRp+FY1MTFJSkpqbm5WUFCArPwQUJvk5uYi24empib8Ox9zQYTImY8O8xaCHwLeUIIZJyenq1evQgjnzp2bnp7e3NyM1ET19PTevn0LIVRTU6upqcEyx04bdq4Lurq62DsfPgeCfURZWZmKigqdTg8ICEDval/H3Llz3d3dr169GhoaKicn19nZiaozsCkE/vwhnIRYczEfC3xLDtCqCQkJJiYmEMKMjAxtbW0I4fPnz3fs2MGcIQHs3MPORgMDg7CwMAjhvHnzSCRSdna2lpYWhLC8vFxYWDg5OZlwnuNNOdBFQbj64F9PXcKVNYB/CISwr69PSUmJRqNBCE1NTd3d3SsqKpBVPTOEK5TdBYjAzq6HDx+icnfu3BkUFIRPs379enQXsrGx8fLyYr7Gg4KCLC0tIYQ3b948cuQIhPDkyZM+Pj7MLSAhIQEhZHcfw7C2tvb19W1ra6upqTE2Nm5qaoIQPnnyhEajYQEPYYbmOB+eefPmvXnzpqioiGBHgIl7YaL+CxYs+PTpEybSLyoqih9LKC4uxhsdYNsJjgf6+vpdXV2ampoCAgIyMjJmZmaTJ09GA0LMnrFIxoU5Z4KbASFmLGCUMi8vj2AsgKUfN24ciUSqqakREhJCSphYDHv37vX09ExNTV2wYEFJSYmioqK5ufmNGzfQkAbKkNACAIDZs2cHBwdDCJF9K7MfAkqWkJCgoKAAACCIO7MriDly5rbCthBcI/CGEsxYWFj4+vrW1tYqKSk9efLkjz/+wFwg0KJ4cXHxtrY25uIG8KbAwOdAsI+Ql5efPn16SEhIbW0taoqvY/PmzQ8fPuzo6FizZg0/P//u3bvRu9TA4eHPH8JJiDUX87HAt+TArUpoAQkJiba2NuYMCWCNzM4rA3PkmDBhwogRI1JSUgjnObMpB7PfCP7UJVxZA/iHAAAqKyu7urpQkOhWMEDFCVcou9OYUHHCvSU5ORm5ebx//z4+Ph5/yTBf4+wajWULMJfFXIWjR49ev37dxsZGTEwsOztbXFwcALB161YeHh4s4P379xsbG//dofpfgqHf8+3Zs2f69OmTJ0+GEAIAuLi4+vv7AStR/+bmZg0NDWxHyOQkgDc6wLYTHA/KysqePn169OjRAwcOVFdX79y5882bN3v37sWeNcBfjbAhhIScmd0M6HQ6Pj0WMACgra1txowZ7GIePnz4kSNHUlNTDxw4oK6ujq+1srKyhIREQEDAjBkzpKWlIyIi2traAADp6en44giNaWxsLC8vX1BQ4OvrC9j7IUhJSSUmJqIc8JNX2RWElcV8dJi3EFwj8IYSNBoNnycAYNSoUYqKivb29g8ePHj//n13dze6oeCPBf7QYLDzpmCOBzuIBPuIY8eOXbhwQVZWFnwDGzZsyMrK0tbW5uLisrS0zMvLU1FRGSA8wOSGQTgJseaqq6sjHAt8S+J/E0Ji1wIDHFwEcyMDVucYAKC3t1dUVHT27NnsznPw50VBuPoI2RKuLHb+IYixY8f29PQgw3d0K2AZMHMLf7E6zPmg/86dOxe5eSxcuFBSUhLdVdAlM8A1TiiFuQUGExIAoLu7Oz09XVJS0t3dfdiwYWgO9ufPn9va2rCA79y5Exwc7ObmxjKHX5oh2/NFRESUlZWVlZVJSUmdPn06JiamrKwsMTFRU1PTz8/v9evXERERhYWFFRUVt2/ffv36dWRkZFxc3KVLl5KSkmpra6uqqt69e1dQUNDR0YEylJaWxhsdVFZWFhQUxMbG8vHx4R0P4uLiPDw8pKWlTU1NOzo67O3tKRSKlZVVW1sb8gFobm5OS0vLyMhAPnOpqakiIiL4nAluBtXV1aqqqqdOncrMzCwqKoqMjJw3b565ufmVK1eCgoJMTU2lpKQIxgJYI9TX1zs5OXl5eSEv8tbWVhQDega0tbWdOnUqAGDkyJGbNm1SV1e3srLq7e0tLy9vbGxMSkp69+5dU1NTaWkpluH9+/f9/f3v3Lljb2/f2tpK8EPA2kRPT09ERGTr1q23b9+m0Wg5OTlod0JBXV1dhMiZjw7zFoIfAt5Q4syZMwYGBoQzYffu3erq6oKCgvr6+kuXLgUA1NXVFRUVvX37try8vKysLCEhAcscO23YuS7MnTv34sWL79+/x+eQlJTEbDShrq4uLy9PGO76u0hKSv7222+6uroAAAsLi/Xr16PtA5hCEM6foqIi7CTk4uLCmktHRwd/LHh4ePAtif9NCGny5MmfPn1yc3NDjVBdXR0dHV1YWMjHx0fIEL8Xdu5VVVXl5ORkZGTU1NQUFBS8ffuWRCKVlZWlpqYCAEpKSgICAu7cuePq6ko4z8eMGYOZcgAA0EXR19eHv/oKCwvxp25nZyf+ymLnH4IQFBR89OjRlStXoqKiGAzGli1bXr58WV5enpGRQUhJaOH8/Hx2FyAAgE6no7OrrKwsJSUlNze3trb2w4cPaWlp+D7p0qVLhw8fPnXq1KdPn7KysjQ1NfF1l5eXj4+PLy4urq2tTUlJSU9PR5nk5ORACPEtEBkZ2dLSkpCQwO4+hvHo0aPo6GhVVdXly5dfu3bNyspq+fLlcXFxoqKi2OXwxfPzF4btd9AhBDK2xvygKRQKc5qOjo7BZNXd3Y23q8YyZDAYdDqdQqHQ6XQqlYrKYjAYDAajs7Pz7+ZMp9N7e3uxmGk0GpVKZS60r69v4DwTExMTEhIqKioyMzPRgAQBfF3IZPLAuTEYjNu3bzc3NxcUFLx7987X1xdt7+zsZPbphhB2dHQgWwbC9oELYj46zFv6+vqwutPpdMyem06nP3nyhDlPNHjzxeYaoJSB40EwGAy85zhqrr9VIrsw2P1mVyP8+UM4CfHNBf96LAgtOYDNOrsWgIM4iwYgNzdXV1e3q6sLbz3P7jzHLgr81ccyW8I129XVNUAMdDp94ARYMvwV+l2gUqkUCgV/pQ/mGoeDaAGW0Ol0MpmMtUx/f/93rMvPD0e3cyhjZ2fX1dWlr6/f0NAwcuTIb3z/aGxsnD9/vr29vaioaHNz84oVK8aNG/e9Qv0ukEgkKSmpAZxj/zFOnDhRWFh448YNeXkWJsMcWJKWlmZra4te/jhw+KFwer6hDJ1O//DhQ01NzaxZs8aOHfvtGTY0NKSkpEhISMybNw+Nt3NgSWpqqpiYGBqT4zBIwsLCmpubp0+fThjW4sDhu8Pp+Thw4MCBw3+LITvDhQMHDhw4cGAJp+cD4Dvp0nZ3d7NTZ/52pWkC311SdvDS0p2dnX/88ccACX4eudufJxIOHDj8VHB6PgC+ky5tV1cXNn2fADulaSR//LdKYSfo/I0MXlq6vLx848aNg4zte/F36/jjIvkJ+XYPkL9FXV1dXFzcAAnQvPyvyxyJ/33dvt/ItzwnUSiUV69eDT59RkbG8+fPCwsLv644dnz3J+whDKfnAy0tLQUFBSIiIniByq9g1KhRR48eZfmnEydOELQnEPb29n9r0cybN29+//13AMDUqVMtLCzwW74RTU3NQaacPn06Uu74Ymzfi79bxx8XyU8F9tj01R4gX0dAQACzmCdGZ2fn3bt3v7r3Ki4u3rZt29eG9pV8y3MS2reiooLd4yAzrq6uAIBFixZFRkYOvojB8O1eLv8dhub0vL6+vqNHj3Z0dIiLi0dFRT18+HDOnDl4sdq4uLjAwEBlZWUpKSkxMTFMl/bhw4csFXItLCwcHR1tbGzS09MhhMuWLYuIiFBVVT1+/DhWaGBg4J07d+Lj4wkS2DQazcrK6sqVK8LCwng5XVNTU0z+ePTo0SgTvOYvQTna2tqanaDz7Nmz3dzcWEokV1dXP3z4cNq0aeHh4a6ursyauSylpevr67dv3z5nzpxXr17FxMQEBgYy6/wScv6iFDhaX8FSCBtf65kzZ27ZsmXVqlXc3NwCAgJYrTU0NJ49e4YOx4kTJ169eoWp9AoJCd25c2f8+PEBAQH379//lkh+Iezt7Tdv3jxlyhRTU1O0JSIiAimZWVpa/rhyNTU1kUAPS0RFRdXU1JgVvAaJuro6s5LcD+XNmzcVFRXW1tZTp05Fwg6Dh0qlbtu27cWLF6qqqpjY2Be5f//+7t27ubm5bW1tB1/EYHI+ceLEIGPgMGRXst+5c+fYsWMQwsePH+vq6hLEanNzc6dOnYpSYrq0zCqxeIXcadOmVVRUQAjFxMS6u7spFIqysjK+xJaWFiUlJcgkgQ3/VJqGTErQmPwx4ovK0QMIOrOTSC4sLIyOjqZSqXJycgwGg6CZy05aGkJoYmLy4sULCCGzzq+UlBRzzl+UAmeuDrtaZ2VlIT8KlADLGTscBJXemzdvIv1fJKry1ZH8bHh5eTk7Oy9fvhwpmyxatOjevXva2toJCQn19fXKysrXrl0rKCiwsbG5c+dOZ2fntm3bTExMnj59umrVqmvXrkEIY2NjHz16dOHChfv372PZ0un0/fv3m5qa9vf3Z2ZmHjlyhEKhnDx50tvbe9euXe/fv4cQPnnyxM/Pb9euXampqXQ6/ciRI1FRUWZmZp2dnQkJCdra2vv37580aZK/v7+rq6ulpWV3d/f+/ftPnz4NIbx9+7ajoyOE8Pr162/evNmxY0dmZmZGRsbEiRM9PDyMjIzu37+vqanZ1NTU0dGxfft2JDSBIS0t7eDgoKmpeejQofr6+gULFqCjef/+faQKjairq1u9evWFCxdmz57d1NR069atK1euGBgYdHZ27tu3z8LCYv/+/ZMnT05OTia0JIlEOnfuHLKDoNFoxsbG27Zty8zMvHfv3pYtW2g02vHjxy0sLM6fP79gwYLW1lYI4cOHD93d3Q0NDffu3Zufn4/FkJGRISYm5u/v39/fLykpiWp348YNCGFBQYGbm5u1tfWZM2fwtUtKSho+fLifn5+rqyuSls7Pz1dUVPTx8TE0NETnfEREhIeHh4GBQUhICFZERkbGpEmTIISPHz9eunQpc5zl5eVLly7NyMhgzjA9Pd3BwWH37t3r16+PjIzEx4OPs7q6evHixVevXr1+/Tp29Q1VhuzXTkz9VltbG+mE4cVqmaVvASuVWHwyTDF2+PDhAgICgoKCra2thBIJ+WC6twTJWnYasoNXjmZ+LmYnkaygoEAikeLj4wEADAaDoGPLTloaAIDVnZ3OLyHnQbYDc3WYaz19+vQJEyYwvzdgIRFUepOSktBL85IlS5D9zddF8lORkpISFRUlLi7Oy8vr4uIya9asjo6OvXv3WltbP3v2bOTIkePGjVu1atWkSZNGjx7d398/fPhwDQ0NNTW1jRs36ujocHFxUalUR0fHbdu2HTlyREREBMuZm5v7+PHjmZmZ3NzcFRUV+/fvd3Z2lpaW3rJly759+8zMzD58+PDx40czM7Njx451d3dTqVRVVVVdXV0ajYZcT/n4+G7fvv3777/v2bNHXV29vb1dWFhYXV29q6sLX4Xhw4cvW7YMnZYaGhptbW1r164NDg62srLq6uoik8lkMllHR4dw7lGpVDs7u/j4eG9v74aGhp07d378+BEA0NbWhpdiGDVqlLCw8JQpU1JTUz09Pclk8pgxY3Jzc2NiYhQVFUeNGnX79u0jR46cPn2a0JI9PT0LFizQ09N7/fo1Nze3hoaGhoaGurr6zJkzkXb5xIkTR4wYcebMmcmTJyPT+fPnz+/YscPCwoJMJquqqmIxaGhoCAkJbdiwgZeXl8FgbN682cPDw93dHQBw4sSJYcOGzZgxw8nJCX+jmDt3roCAgJmZmbW1NTc3NwBAVVWVSqUaGBi4ubl5eHj09vY6OTkJCQkpKSmdO3cOK0JDQwMpkC1evLilpYU5zgkTJrDMEABw5cqVpUuXOjo6ZmRkLFu2DN/a+DiFhIRu3boVEBAgICDwi34IGTxDtufDIJFIWlpaA4jVokcAgNNfBqxUYgm7DFAiswQ2VgRhd0z8F/FF5egBBJ0BG4lkDw+PxsbGJUuWIEUoQgzspKXxsGs6Qs5fLQXOXOv4+PgdO3Z4e3t//vyZuZUAk0qvrKzs8+fPAQBdXV2fPn36ukiYFZb/XQgPHANI7w/eAwRj1KhRWlpa4eHhDQ0NY8aMIXhuREdHIy1QeXn5xYsXM7sBoG+qM2fOBAAMMKWCYGjAy8uLduTm5t6/f//t27fDwsLWrVtH2EtISEhISEhAQGDBggUkEsnMzOz9+/ckEklERATVCwP/cIY3yhj4qfeLT2zMTa2mphYeHs7Dw4M/eQjw8PAICgoi2wowOK8PrDr457AvWl7gSyTEye4Jm2CxgocQJ+Ghk+PV8EsSFxf38uXLN2/enD9/niBWGxUVVVxcjOzHMF3aARRyS0pKqqurk5OT8/Pzm5ubIyMj09PT29vbWVqZECSwS0pKkNJ0SUkJQQkayR/n5+ejHb+oHM1O0BktSGApkSwhIeHt7X3r1i1JSckHDx4QNHP19fVZSkvX1NR8/vw5PDy8t7eX0HRJSUltbW3v3r3D5+zh4fFFKXDm6rOsdUlJye3bt1esWPHbb79t3LiRRCKhnENCQrDDISAggFfpPXDgQEhIyIIFC86cOaOqqvoVkXh7e9vY2PzAc/Hvw+6BY4DHIOxPaAs7dxGEra3thQsX0Bs/wXNj4sSJQUFB6FEgOTmZnR9CT0+PvLy8iIhIX18fAACpPmJ/HcDQAABgaWmJ/JlZzpZCdHZ2qqur8/Hx2djYrF+/Xk9Pb4C2IhhlIFg+9X7xiY25qe3s7BoaGiZOnMj8JkR4PMUyGcBMA3+YmLczPwgSrFoI7QyYjjvzdoLFCh5CnISHziHs1TBkx/mcnZ0dHBwIqruD0aIdpEosM62trXgfy0FKYDOLzH61oDM7iWQ0jtLb24sXAsbDTloaD8umI+T8LVLgX1FrgkovvqC/GwkmQPzz0NfXt2jRInV1dRsbm5qamqdPn44bN66ystLOzm7OnDnt7e1nzpxBRsEGBgZr165tamrasmWLnp5eXV3dunXrTExMenp6HBwcRo8ebWRkhDxLCZiamqID19XVhcy779y5Exsb29/fv3r1alVVVXNzcxKJVFpaOnbs2LNnz5qampqbmxcWFmpra4eGht65c+fDhw/9/f0zZsyws7OztbVdtWpVXV3dhg0bjIyMWltbp06dumvXLnt7ew0NjdevX/Pz86M3D4Sjo2NCQgJzVPPnz/fx8fHz83v69Cna0tPTY2xsTEhWXV09ffr048ePUyiUoqIiBQWFFStWnDx5kk6noyG94ODgo0ePVldXE1rSz89vypQpN2/eVFNTc3V1jYyMnD17dlhY2Llz55SUlEpLS3fv3q2np1dTU7No0aI9e/YwGAwdHZ2VK1du2LCB+frS0dE5efJkTEwMACAhIeHNmzc8PDz5+fnBwcGjRo3asGHDw4cP8elzcnJ4eHhiY2NTUlK4uLhSU1OLi4uFhYVjYmJCQ0OHDRtWUlJy7Nix8ePHb9++PS4uDiuira1t+/btGzduvHnz5vjx40tKSghxFhUVycvLX7x4kTnDw4cP6+jomJiYnDhxguBjjI+zoKBg7dq1EMJr165Nnz69qqpqUGfqr8mQVS+7evVqS0vLlStX/pnibt++raWlFR0dffr06X+mRAIcieQhCZlMxg/REejt7f3ilMKenh5+fn6WIqs0Gg2/vaurC/9FsbOzU1RUFCtIQEAAQkilUlGJHR0dYmJi6K8QQhqNxsXFRSgFPU4JCAj09PQICwsTSr9z587+/fvZ1VpISAj7dlddXZ2SkmJiYjJANSGEFAoFleLi4tLW1mZra4tFCP7akhQKRUhIqK+vj4+Pj5ube+BmbGxsjIyMXLp0aUtLy8ePH6dOnYofB6HT6QwGA31VJkClUgEA/Pz8A4TNju7ubuxtGF9EX18fPz8/GrAfZFYQwrt3727evLm5ubm+vr66utrc3Px7xfnrMjRXNfT09IwbN27MmDHV1dVycnL/QIlaWlpkMvnf6vYAAEZGRmJiYpxub4gxQLcHABjMTHrmLgeD0FGhsTEMrNvDCuLi4sJKxHcqXFxcLG/9aF0Kcwy+vr5hYWED9GRYrXt6enbu3AkA8PLyYpcYiwErpbu7u6urCx8h+GtLojk1KDbwpWZEM2iEhITQtw38DBcAAN7BnMC39CX4j8D4IlDMf2vhR1NTk7Ozs7i4OGax8h3j/HUZsu98HDhw+DmpqKiorKxcuHDhYBKHhYVpaWlJSUkNMvOenp6QkBA6nb5w4cLv9dRbVFT06dMnJSWl6dOnf5cM/2E4FivMcHo+Dhw4cODw32Ioz+3kwIEDBw4cmOG8+XLg8GtDp9NDQ0ONjIx+dEEfP35Eoj+EEcEBqKioyMvLmzlzJoVCwQssMEOn0zMyMtDv4cOHy8vLM6srfHfCwsKWLl2KjfNRKJS3b9+yWz7x6dOnnp4eAICAgMD48eMlJCS+utyKioqGhgYAAFqN3tTUVFZWJiAg8A9b8hYVFbW1tXFxcSkqKpaUlAAARo4cOWHChM+fP3d2dk6YMGHkyJH/ZDz/JJx3Pg4cfm36+vqQtMq38EUtm8jIyPz8fElJScwX4ot4enreuXNHWVn58uXLX1SeRPM45syZ09DQkJGRsXDhQj8/v0EW9NVgEs+DEZ4eM2aMqanp69evSSSShYXFwYMHv7pcSUlJOzs7Nzc3tMhdUlLy+vXr/7BgKQBARERk7dq1nz9/lpCQePnypYWFBRoZFRERuXLlyrd07T8/nHE+Dhz+62CqzQOksbW11dPTY54ZOAAbN25cuHDh7t27AQBPnjxBvhmBgYGlpaXjx49PT09fvHjx5MmTDxw4YGJisnXrVjqdzsvLW19fP3LkyIcPH3p4eKSkpDQ1NR08eHDy5MnKysoFBQV6enrDhg3bvXv3pk2bSCTSvHnzli9f7urqSiaTSSTSiBEjBAQEpk2bNsCyd2aoVOrGjRtR3ywtLd3U1MQupba29t69ezds2FBaWjpx4kQymYxNKHVycjp69CjSDwMA3L9/n5+fPzk5ef/+/TIyMoQqzJgxIzY21szMrKqqSkBAgE6nnzx5Ei3B+vTpU0xMzOjRowsLC3fu3CkjIwMAyMrKCg4OVlZWJpFIAABMKB9faFtb2+nTp0eMGCEjI9PV1XX06NHBdKXnz59HGvcdHR2ysrK5ubkKCgpv375FGjqDb8Nfj39vKSEHDhzYkpaWNnHixHv37i1btszCwgL+VcgYr9ccERGB5NdfvXqlpaV1586dlStXOjk5PXjwYOXKlUihGL8vQdEYU23++PFjZGTkmzdv7O3tS0pK8MHk5OQsXLjw6NGjqampBHlrvBQ1oQrPnj0TFBS8desWJqEQFRW1ceNG9LupqSkgIABCuHDhwuLiYgghEjmrr68nkUiGhoZIdBtCaG5ujlbid3d3o+XVioqKvb29jY2NY8aMgRCmpKRACI8fP+7r6wshTEpKwoeRlZU1duzY6urq27dvL168uLW1NSsr6+zZs5jE8wDC0wQWLFjg7+/f0dFx/vz5NWvWYNubmppmzZoVEhKC/tvd3a2rqwshrK+vj4mJYVkFCKG6ujoSXg8NDY2NjYUQtrS0zJo1i0qlQgjLy8vnz5/PYDDq6+tnzZqFxBkYDIarqyvLQiGEp06d8vLyghBu3LgxNDSUzZn1F0pKSoSEhNrb28vLy2VlZe3t7SGE58+fZzAY7ES3hwacno8Dh58UKSkpMplMp9Pl5eVjYmIIjhaYmQaEEKn+t7a2IsONzMxMTU1NCGFkZOT27dsH7wFibW3t6+vb1tZWU1NDCMbS0vLVq1cQwmvXrt26dQtCmJeXJysry2AwpKSkCMogGEFBQaNHj543b159fT2EcOvWrVjMEEIkG7Ro0SLU0aKe7/Tp00inBhUHIdy8eTNyFrt8+TLaoqSk1NbW5u7ujvWjENfzMbNy5cq0tLSenh4ZGZm+vr5Xr14h3xXMRAVJrKGWpFAoTU1NEydOZM5nwYIF5ubmp0+fnjdv3tWrV7HtSO5r6dKl2BYDA4NNmzZhskHMVYAQ+vj4qKmpQQgPHTrEYDAghE+fPv3tt9+wBGpqasXFxY8ePcIcY7BGY1movb392bNnw8LC1qxZ09jYyLIpmJk7d667u/vVq1dDQ0Pl5OQ6OztR1YyNjb29vdHLK7vj++vCGefjwOEnhZeXd9iwYdzc3PPnz8/KyiIIGeONRNB3LcxORFRUFNMybmhoGLwHyNGjR69fv25jY0NYBo6HIG/d0tKCSVET6O7uNjY2zs/P5+fnR7Ko7e3teNVplmvLbGxsrl+/7uLiYmxsXFdXhzaKiIhISEjgV8Tfu3fP29vbx8dnMC1pZWX1+PHjiIgIpLtWWVk5fvx4wGpJOEF4mhl9ff0LFy7ExsZevXoVfSCl0+l5eXkNDQ2NjY2Y8m1wcLC6urqWllZ1dTW7KmzYsKGtre3x48ejR49GzUKhULD19QAAfn7+np4elo3GslAAAB8f3/Dhw3l4eAbvsrt58+aHDx92dHSsWbOGn59/9+7dSPt3MKLbvy6cno8Dh5+d2tpabW1tlj4eCMg0Wo/fMngPkO7u7vT0dElJSWS1Q8gQ/tX4AslbD7DG/OrVqwAAMTGxffv21dTUAAAWLlwYGxuLJUC6WRh454Thw4dj1hAAgHHjxqmpqe3bty83NxcFc/ToUU1NzcuXLw/QCBgGBgYxMTElJSWOjo5OTk7YCnesUuyEpwlgEfLy8goKCiKfitDQUFNT0wkTJhw6dOj27dsAgOTk5I6OjsOHD1tYWAQEBLCsAgCAj49v3759tra2mzdvRlsWLVqELJkAAL29vZ2dnSoqKjo6OnFxcVjRqNGYC0XIyclpa2sbGBj4+/uDwTmQbNiwISsrS1tbm4uLy9LSMi8vT0VFBQwouj0E4Kxq4MDhJ4VOp7u7u6MJeLNmzUKOFrq6uhYWFvX19chMQ0tLKzExsaWlJSEhoaWlpbm5ubi4OD4+vqysrLS0ND4+vqKigouLC78v5gHS3d2NeYDY29urqKhERUUtW7ZMVVWVMLuhoqIiKytr+PDh2tra+/bts7S0fP78eV1d3YMHDzIzM1tbW1++fMm8rCIxMfHMmTPTp09//Pixo6MjAMDGxubgwYNOTk7Tpk3r6emZM2cO8iQJDw/fvXu3p6cnAODOnTvi4uLx8fEPHjwYO3ZsY2MjqmlLS8unT59ERUX5+Pjq6+tjY2OvXbu2ZMkSSUlJa2vrlpaWzMzMzs7OtWvXMi+H4OPjW7du3fLly1VUVCZMmLB69WoAAGaiMmvWLFVV1VOnTunq6ra0tCQmJnZ1dbW3t3/+/BmvVRYXF/f58+fnz5/X1tZmZWUZGxubmZk1NDR4eXk9evRoxIgR8+fP3717t5WVFS8v7759+5DTiLW1NXMV1NTUUJ47d+7Mzc3FFg8oKCjY2tra29vPnj07Ojrax8eHn59fQ0PDysoKCYL39fUpKCiMHj2auVAVFZWPHz/W1dXx8PBERkaePXsWALBjx47ly5ebmZkNcJpJSkr+9ttvurq6AAALCwvsPfjixYsGBgYLFy5cvnz5b7/99nfO3F8AztxODhx+UmRlZSsrKxkMBrbgDC9k/HcZeF+k2sxgMNAHty9qXBHkrVmChKpramqkpaXx4pBUKrWvr2/wiwK/C3Q6Hd3TqVQqs1LlAMLTX1cWAIBMJg/w0Rijr68P/4UTAAAhbGtrI3xgZLnxiyCp8YEPE/hrmxB+gyEq7Mnp+Thw+BlhMBji4uJ1dXVf3dVx4MCBHUO/53N3d1+1ahUa0EY0NjaGhYVJSEisWrWK8LTFTE1NTXR0NABgxowZmF4ttnH69OnMsgthYWHNzc0rV67EKyAgB68lS5YQEvf39/v5+UEI0WonZjIyMvLy8gAAXFxccnJy06ZNG7x6LzvIZPLjx49/++03zl31p+XDhw/oO9jKlSv/7Vg4cBhy/HPTSP8N7t+/DwDA23JWVVVdvXoVdTOXLl0aTCbXr18XERFB08QRFy5cAAAYGBigucgEXr58iRUaGRmJFlQtX7585cqVLPM/cODAAAeCTqfPmTMHAFBSUvLu3TtZWVkHB4fBhD0AiYmJXFxcycnJ35gPBw4cOPyKDOW5ndnZ2cyqTkJCQkeOHLl//z4vL297e/tg8hk1apSFhUV6enpqaioAoKurC42CjBs3jouLq6+vD3m4ox8A5wSWnZ29cePG9vZ2Go0WEBDw7NkzBoPR19dHpVL7+/tbWlpQMrxzWH9/P2EaFTc3N3rJk5KSWrhwoZ2d3enTp4OCgtBfGxoa0Ld4LGcGg4FEKCCEra2tWD50Oh0Tp5g1a1ZdXd3s2bPRiAuDwejo6MBPb2tqakKqThw4cOAw9BiyPV9PT09MTMyyZcsI26WkpLq6uk6dOsXHx4dNJv4iO3fuFBAQuHfvHgAgMDDQ0NAQbafRaIaGhiIiImQy2cDAgGBxGRMT09LSEhERkZ6ePnPmzNWrV7e1tc2aNUtJSenChQsaGhr29vb49AkJCUuWLAkLC1u3bh27SHR0dAAAQUFBXV1dixcvjo2NNTIyev78eWNj45QpU6ZNm3b8+HF5eXlra+sTJ04oKCigmeVubm7Hjx///fffVVVVS0tLb926NWrUqNjY2EePHgkKClpZWRkaGo4fP76jo6OxsdHY2DgxMXH58uWDbBwOHDhw+LUYsj2fh4cHcnNmhsFgoCXA7IbWmBk5cuT69evRTO729nZsyhYvL6+6ujoAYPjw4bNnzybshXopfX39uXPnKioqAgAkJSVVVFQEBAQcHBx27Njh5OTU3NyMpT98+PDkyZPXrFkTFBSEVhwzg5yym5ubb9y4kZOTY2ZmtnDhQisrKwkJiYkTJwoKCl69enXevHnx8fFOTk7z58///fffAQD5+fmioqLbt28vKCh48+YNNmd97ty5AIAVK1bcvHmzpqYmJiYmOjo6ISGBm5v74cOHg2wcDj+O0tLSkJCQuLi4AdJACLOysoqKitglaGxsfPfu3XePrb6+PikpafDpqVTq+/fvscXpGD8ovJ+T8vJyPz+/8vLyv7VXWlpaXFxcSUlJV1fXwCnb2tpS/6SkpAS/RJIDnqHZ8/X09Fy/fn3RokVHjx4FAKB/McTExE6cOOHq6pqdnY3Wog4GW1tbKpW6adMm5lkqfxf0sVRKSopOp9fW1mLbi4qKCgsLExMTAwMDZWVlWe6LvoXOmDHj06dPaH6KiIhIZ2cnkrJF07IFBATQD35+fvQt9MyZM93d3WjFK/PiVl5eXjTTh0qlrl69eurUqYaGhmg9EId/kYyMjPDw8DVr1rx//36AW1hnZ+fZs2eTk5NZ/gkA8McffyA15O8FyrapqWnwQiEo/cGDB8vKygjbv3t4Pw46nf4towBNTU2urq6amprYcvXBsGXLluLiYgkJCSsrq4KCgoETi4mJJSYmGhoaQgjDwsLU1NTQ/DgOBIZmz8fHx3f48OHNmzejlxsdHZ2urq49e/bk5+fHxsYiYYJJkyYtWrSI8H2SJRDC/v5+TU1NLS0tKpU6ffp0CCH4U9CBn58fm+fCcncGg0F4UkM7NjY2CgoKysvLY9sVFRXb2trWrl2rp6eHib4TePXqlZSU1MGDB9XU1CgUCgCgp6dHVFR07NixA1RhzZo12dnZe/fu/WJlU1NTw8PDnzx5EhgYOPSEG34tnjx5MmXKFB4entOnT7M7HwAAYmJiysrKzNtJJBIS9Z85c+Z3jOrNmzfoQ8LUqVMH/9UEADBmzBj8FGuM7xveD8Xe3r6iouKrd4+Li+Pl5VVSUlq7du3g93r79u3MmTOnTp3q7++PJgfQaLSrV6+6u7v7+fmZmpoiez8DAwNvb29ubu7p06dLSEjMmTNn//79GhoaSODt8+fPCxcu9Pb2fvbs2Z49ewAASHzAy8vr1KlT6Anm8OHDrq6u69atc3Nzs7W1RbI7Q5WhqeHCx8e3f/9+AEBAQIC3t7eenh6JRHrw4MHMmTOzs7Pd3d3t7OxEREQGI/pXXV3t7++fnJzs6Oi4b98+BoNBo9FcXFwAADExMZmZmWvWrLl586alpWVdXZ2UlFRsbCz6dBMfH79//35paemrV6+KiIgUFxfTaDTUlzQ3N585c8bV1fXy5cuCgoLok1FCQsK1a9eMjIzmz5+/aNGic+fOoQDS0tLQE+K1a9eEhYVra2vT09NHjhx56NChmJiYgICAd+/eeXh4NDc3FxQUUCiUvLy84uLi1tbWvLy8wsLChoaGyspKcXHx5OTkq1eviouLR0dHo2f2pKQk9Nk2JSUFSVulpqaOHDkyIiJiwYIFJiYmo0aN+hFHh8NgKC0tTU9PHzFiRE1NzZkzZ/z9/UVERPT19c+dOxcYGKirq7t3796amhovL6/x48fHxcVhsiAYgYGBmZmZkZGRkpKSra2tN27cCAkJOXfu3OLFi588eUImk0NCQlxdXdF3eIze3l4HBwcVFZXExMRNmzZNnDjRwsJCU1OTRCKVlJQEBwd7eHiIi4vPmTMnMTExLS3Ny8vr4sWLubm5M2bMCAoKsrOzq6mpCQsL8/HxkZWVPX78+PLlyx8+fPjgwYMBlq7jw5s8ebKJiYmZmZmNjc2DBw9GjRrV2Njo7++/cOHCoKCgnTt3Wltb4+OPi4sLDAxUVlaWkJBoaWnp6OgQFxePiop6+PChnJzcw4cPp02bFh4efv/+/cePH6OU0tLSAgICU6ZMCQoKsra2Hj58uJGRkY2NTXp6OoRw2bJlERERqqqqx48fLywsjI2NzcnJkZaW3rNnT1BQkKSkpISERG5ublVVVWhoqJWVlaam5vbt25HE9tOnT2NjY/n5+fn5+QnKKQ0NDW/fviWRSK9evdLV1cU3srKyMpZDTEwMYa3R2rVrly1b9vjx42XLlqElLufPn5eTk9u1axcAQEJCgkwmKygodHZ2zps3D79jTk5ORkYGMp9CA/xbt24FAKDVWbNmzbpy5YqlpWVQUNCZM2dQJ6qlpeXj42NgYGBoaIgE7YYs/8aE0n+Hrq4u9KO5ubmzs/M75kwmk7u6unp6epj/RKVSe3t78VtMTExUVFQoFAqm446nr6+vqalp8EXX1dWhmaUDw2AwGhoaIITt7e3I7oQdZDKZwWAMXuidw4/DxMQErY1ZsWIFshQgeCysX7/+48ePEEIbGxtkT4MnISHBxMQEQpiRkaGtrQ0hfP78+Y4dO5KTk83NzX19fVevXn3w4EHCXsxWDMbGxm/evIEQWlhYODo6YsYOiYmJenp6EMKgoCBLS0sI4c2bN48cOQIhPHnypI+PD4VCQS48WEWMjIwSEhIIJRLCgxB6e3tv374dQujk5MRgMLKzs7W0tCCE5eXlwsLC8fHx+Phzc3ORSROE8M6dO8eOHYMQPn78WFdXt7CwMDo6mkqlysnJMRgMfMr79+/TaDRnZ+dz585BCKdNm4bcG8TExLq7uykUirKyMmTyK9DV1c3NzWX2vsB8MwICAnbu3InkpJkPqJeX1+HDh1k2Mt55g0B/f7+jo6OgoOCuXbuQUcO4cePw5gk0Gg3iLC+io6NlZGScnJy2b9+uq6v7+fNnlExOTi4lJSUiIgIdlOrqai0trfb2dktLy/v372O5zZkzp7q6mmUkQ4mh+bWTJdjiAUlJye+rnDRs2DARERFmtUAAAB8fH36xPJVKbWxsrK2t7e7uRnNVCPDz8/+theqjRo0ajLYQFxcXsrgUExMb+APvsGHDuLi4pKWlBx8Dhx8Npj5F8FiIj49XUFAAALA89/Cg1whkQZCRkbFq1Spzc/OwsLCbN28SUjJbMfDw8KDrRVtbG/8BHBN4ZOcRISgoKCkpGRoaitee/mJ4AAAzM7P379+TSCQkkIYFMGHChBEjRiQmJuLjJ3hW4ENVUFAgkUjx8fEAAAaDgU85d+5cPz+/5uZmFBhWheHDhwsICAgKCqIVQSz9Cpi9L7Cc9fX1u7q6NDU1B5bIYOl3gcWGp7u7m5eX99SpU5mZmaGhoUhJnODewOw4MWLEiGPHjnl6eq5atcrU1BTbLi4uLikpiUnTtba22traysjIsJwM2NXVZWxsbGxsjD5xDTH+Qz3fz0B3d/elS5dev37d2Nj4b8fC4WcH/jlyjJ5SmbdLSkqiT+UQQuYpMJgDA7Y7+iEjI+Pv74+2MM+LGcCKgUQiaWlpMWfLMmYAwOfPn93d3fX19QUFBZlT4neBf3VI4OPjs7GxWb9+PcFavbe3V1RUVF5efoD48aF6eHg0NjYuWbIEWeNif6XT6Rs2bNiwYQOy5RugCgS/AlR3dt4XAICysrKnT58ePXoUyVOwq+ng/S7QF2YAgKqqqomJCUvLC8JnSYLlBX4S36RJkzQ0NDZt2lRQUAAhlJSU9PT0TEhIwOeGVX/48OHBwcHBwcHIYWqIMTTH+X5aJCQk5s+f/29HweEXoLGxsaioKDY2VlFRsbS0NDY2VlxcnOCxcOnSpcOHD8fHx3/69Km/v7+/vx+vuTx58uRPnz65ubn19PQUFRVVV1dHR0cXFhbq6Oi4ublpaGjMmzfv5MmThHIJVgxoY0BAQEtLC4VC2bhxY0xMjL29vbKycnp6emFhIXKEKC4urq2tTUlJqaqqqq2t/fDhg5CQ0KpVq3Jzc8+dO0en0728vOTl5QsKCmJjY+fOnYufsPP+/Xt8eK2trSNGjNi1a1dcXBw2I6akpCQgIKCiosLV1XXu3Ln4+IOCgoqLi/Py8qZMmQIAiIuLmzJlSnJy8vnz5+Pj411cXPj4+JDTOp1ORyknT57My8tra2srLS0dHh6+cuXK6urq5OTkyZMnNzc3R0ZGSktLt7e3f/r0ieBXMHfu3IsXL54/f56db0ZCQkJSUpKioiL+TQvR2dkZExNTUVFRUVFBaOSamhosB8InGUVFxY0bN1ZUVEhJSeXm5np7ewMAXFxcjh492traOmrUqL6+vtWrV1dWViLLCzMzs8DAwJqamjt37nR0dCQmJnp5eQEA8vLympubf//9d35+/uDg4MOHD3/48KGqqopEIvn5+aGx2Pnz5xcVFVVWVkZFRVlaWn6Hk/gnZujrdnLgMITp7+9HLgQsfQaQAwPLHclkMl48iADeisHU1NTW1lZdXR0bIxggW+YABAQEIIRUKnWQuyCqq6tTUlJMTEwAAHl5eQcOHAgODhYWFsa6TJbxu7i4tLW12draYituKRSKkJBQX18fHx8fvrtlMBj9/f0CAgLIUGKASAh+BVjdWXpfMBgMOp3e398/cJ6IwfhdAAB6enr4+fnr6+vHjBmDT9zV1SUgIDAkjRT+ATjvfBw4/MLw8fEN4K0zQGczQLcHAMAPhHd3d3d3d+O3DL4PQym5uLgGv0tPTw8adkIvKyiAzs5OQsAs4+/u7u7q6sJ7A6ERUOZRN25ubrTxi10UoWvBKsJS7Z2bm5ubm3uQbkeDnG2AIsTcdP/u7hxYwhnn48CBA1uqq6vNzMxQ5/fPlCgsLLxx48bbt29jEzGampr27NmTlZU18I49PT3jxo2bNm1adXX1D4+Swy8O52snBw4cOHD4b8F55+PAgQMHDv8tOD0fBw4/I99RephGo6F8cnJy0JaPHz+mpqbi1dL/ATo6Ol68eMFuHcJXQ6fTkSPmFwkLC8NP8W9vbw8PD/8uJX5RdJvBYCQmJv4gOcCPHz/6+PgQJBI5ItcDMzR7vp6eHnQga2trs7Oz0W8ajdbV1ZWamsrOBoEddDr93bt3mJ3eP0BhYSF2k/oKBn/S0+l07KTPz89HQqA/P18neD8YKisrUWtkZGT8u7qF31F6mJeXt6amZs6cOZh4Ap1Od3FxERcX/27hDoIDBw6sWLHiw4cPaBn+96Kvr4/ZhpMlOTk5+NHKrKwspOz1LSV+URMciVy3tLTs3bu3srLyK4obmMjIyPz8fElJSXzmHJHrL/MDdGH+fRgMxvnz5xUVFWk0WnV19YgRI7y9vdH2HTt2pKSk/K3campqVFVVc3Nzf0ywf6Gjo6Ovr2/btm1OTk4DJ2P3p82bN/v6+ubk5CxcuDAtLe2LJaKFun/88ceTJ080NTWfPn36NXF/AwPUhSWNjY2HDx8uLi5mp/b0LVAolHXr1q1evTo2NvbAgQOLFi3CRO/+eaKjo1VUVNDvzZs3I10uRGlpqZ+fH/bf4uLiGzdu3Lhx49SpUxBCf39/fX3958+fP3369Pz58yiNjo4OUsyCELq7uxcWFkIIaTTa3bt3X7z4v/bOPB6q9v//p5QoFVnTHinlbtO+KEUbWSpRCHcSN2lR3S3aZUkLZblDEiJlqYQIFYWk1ZJkH/u+DDPDzFy/P67ffR7nc2ZJy939vc31/GvmzHXe13Kuc95zttcrytvbG74eDgDo7e11c3Pz9fUNCwvbsmULLmVHrLS3t/fKlSsWFhaBgYHHjh2rq6vj35empqZFixYBABgMxqZNm35kWH4WVCp14sSJPxKhsrLSxsYGAJCTk7Nu3TquZY4cOQIPHdra2t965OkLdnZ2jx8/Ji0cPXp0QUEBAKC2thYqzwEA7t275+rqGh4efuDAgdjY2JKSko0bN966dQvwnWn9lf55zjdgwAAHB4f6+vrS0tIxY8ZYWVmlpKTA5fLy8gsXLvymaPLy8mPGjPlnWvo/QBV8YWFh+E4uL3ANfq5wKrtjGBYZGenm5nbnzh0HB4dHjx7hyu4YhkF/wYULF+7YsWP37t2enp4YhjU2NpqYmDg7O0dGRjo5Ob1///7Lly+rV6++cePG6dOnk5KSsJ+k7I4L//ed7xO87yMiIiLKysrjx49fuXLl5cuXS0pKoNbGV6Xuu7q6zM3N/f39tbS0goKC+mKL0Xeg9LCOjg6+JCkp6erVq/jXwMBAVVXVAwcOwJmjqqra2dm5ZcuWbdu2GRoawjIODg6enp5QcKSoqAjaOzg7O4uIiGzatOmPP/5ISUmJi4vDMOzMmTMjR460trbetm3bzp07qVQqZ6WDBg1SUVEBAFhYWEyYMMHd3Z3U5mvXrvn5+Z04ccLb2xsAEB4e3tDQEBER8fr165SUlIiICCaTeevWLW9v77Vr15aUlNy9e1ddXd3Ly0tNTe3ly5fEUEwm89KlS5GRkQYGBu3t7aWlpUePHr19+7aZmVlVVVVaWtrMmTMxDOOM8OzZs5s3b547d87Pz6+8vFxDQwNe7/Hz8wsLC7ty5QqM//nz57/++uuPP/4gOXNdv35dU1OzpaXFzMxsz549bDbb19c3KSkJrxHXBMf+Ft0mqavU19dHR0cnJCRAM7LExERra2u4Reh0+vHjx0NCQqytraH/4rVr1+Lj452cnOBQBwQE6OrqxsbGEptE6ntubm5ubm5qamp2djaxGBS5fvLkiZycHBS5Tk5Ojo6O/vPPP42MjI4ePdrd3c1H5Jo40/ot/3bq/QcxNTU9evQoAODPP/8UExOD+ww8+QsKCvLy8lqzZg3UeCVBo9GOHTsWHBy8e/fu58+fAwA0NDSOHj2qpaWloaHR2dmZlJSUkJDg6OhYXFzc3d195coVNzc3HR2d9vZ2BweH3bt3r1u3Lj4+XlNT093dHQDg5eUVExNTWFjo6+trY2Nz8uRJzkqZTKa+vr6FhcW7d+8uXrxoamp64sSJpUuX1tfXs1isQ4cOPXnyxMjIqKOj4/LlywsXLkxMTOTaa1tb2zFjxiQlJeFLnjx5sm3bNvi5sbExIiICALBixYovX77AejEMq6uro1Aourq6sMEAAGNjY6gy3NXVVVlZCQBQUFCg0+kNDQ1jxowBAMA/sFDftrq6Ggr+kigpKTly5EhoaOiOHTsoFMq7d++mTp0KALh58+bq1avxLr969cre3t7MzGzv3r3Tp0/PzMzkNaoPHjywsbHR1taOjY0lbaba2toNGzacPXt2wYIFkZGRCxcu9PT0XLdunaurq5+f37p16+CYBAUFpaSk2NnZ8Zo2jo6O1tbWPT09cXFxY8aMwcXNYa8BAFD5iUKhwJOYqKgoIyOjtra2wsJCOp2uqKgIAMjIyOAVv+/wkh7u6uqKiIgwNTVNS0uDS9LS0iZPnhwXFwe/FhcXz507Nysr6/bt23AjAgBYLNaUKVMiIyPfvHkTGhoKFyopKcGNCwCIjIw0NjYGPASROStNTk7W1tZ+/vy5qalpamoqseWvX7+GV88AAHJycgUFBeXl5fPmzcOXAABIwtklJSVz5swBAISFhdnb2xOjXb58OTo6GgCQkpJSVlampaUFdbq9vb2NjIwAABISEgAAUgQGg7F69WoAAI1Gu337NgBAU1MzJycnLy9PT08PANDY2AjP+Uiy1Hi9NBpt8uTJdDo9MTFRS0sLAODj4wMtyWCNvDTBiY2HItcAAG1t7ZSUFACAiopKdXU1SbS6rq5OVVW1o6MjPz+fUxEbh7Pv5ubmjx49Av8Lp8j1jh07iNdI4MKvilz3Y/rnOR/ExMQkODi4vb1dUlJy3rx5d+7ciY6O1tfXz8rKevLkibi4+KBBg7iKsXp5eUlLS5uamtrb28PphWHY+vXrHz16JCIiEhgYGBMT09zc7ODgICoq6unpSaVSx4wZk5eX9/Tp00mTJgkLCyckJKxfv97a2hoaDAEA9PT0jh49OmzYsNmzZ7u6ukI9XCJCQkKqqqqqqqqzZ8/GMGzixIlnz56dPXs2FJtXVlbW0NBgMplv375dsGDBuHHj1qxZw7XXHh4eNjY2Ojo61tbWMKuFhIRALQwMw6SkpOCp0oABA4h6EN7e3h4eHvX19crKynDJgAEDCgoK0tPTr169Cs3/BgwYQKPRoqOjodc88dRZXl6eq/Wavb29oaGhsbHxwoULDx06NHv27Pb2dgzD1NXVoRQy7PKCBQsUFBTk5OQ8PDwOHTp04sQJXqOqo6OzcOHCadOmaWtrkzaTrKzs0KFDZ8yY8erVq1WrVtXX19vb2zs7O0dFRe3atevAgQMREREYhvn4+MjLy/O/wfPhw4cLFy74+PisX78ev+E/YMCAV69eJSUlQanMAQMGAADa29tjY2PV1dVHjhw5depUPAI0u/9xuEoPR0REDBs2bNasWdBqGMOw5cuXJyQknDt37vz583DJoEGDxMXFJSQkcOGSgQMH7t+///Lly5GRkZs3b4YLaTQa/pa3sLBwd3c3xkMQmbNSWMvQoUNHjhyZl5dHbDauyAyHgut9I5JwNhTjxjAM1+PGycjIkJeXxzBs1apVEydOxIMvW7YsNzcXbyEpwpcvX+Ab5SIiItu3b8eLvXjxgiTzzVWWGq64YcOGBw8eQOmviooKYWFhODKcItEk0W2uwDLi4uKtra0k0WohISEjI6Pp06dXVFRwKmJzDized076InKNvyuJwznTkGL1f5XVq1ez2WwrKys9PT0LC4uAgIC2tjYxMTH+WvUYNyV1DMPgLqGhoUGhUA4fPnzx4kVbW9uRI0fm5ORYWFgYGxuXlJTo6uoSNdd1dXXhgxJQbYHXDsYV4o7Ud83775v0GIbZ2tpevHjR29tbX1+/trYWLhQTE5OQkCCKXFy7di04OLgvvoaQvuyoEJLKPp9R5QzOKXjPyz0Aw7Dz589ramqSLiKRmDVr1vHjxx89elRfX+/o6Igv/yap+8zMTHjgeP78+VcHiiu8pIfb2tpUVFS2bNlSXl4OH/OJj49XUlJKTU3FlTaHDh06derU9evXKyoq4n+zzMzMPn/+TBQS09TUfPXqFfz89u1bDQ0NjIcgMmelcEDmzZu3b98+f39/YstxRWYMw1pbW2fPng3/aMMl8IorL+FswPGG8ejRo+/du4dhWGdnZ25uLh68qalJVVWVcxX4VUZGJj09HT6/iut6AwCkpKTgtVDwt8w3SZaaGGrXrl0eHh5SUlLm5ubGxsYbN24kVsFLE5wYgVcZkmg1nU63srJKSEiws7OTkpLipYjNte+kGvsicg0l2XC4zrT+rVjdnzOfkJDQtm3bKisrp0yZsmXLlsLCQjhX+GvVY3yV1BsaGhYvXtzV1fX69WtJScm//vpLRkYmLCwMFibJTAgJCVlYWOzYsQPuMHx2MAivnYSkeY8X4+THJz0xuY4fP15FRcXe3h7+owcAHD58eP78+S4uLvgqnMcpriNJ2lG7urpIxw4cqLLPZ1Txwemj4D2phaNHj/7w4UNsbCyvR3x/ROqeWNfixYvhgWPFihV8hogXzc3NuPTw2bNnIyMjoZqXj4/PgAEDJkyYMG7cOBUVlcOHD9Pp9Dt37gQFBT18+BD+W3/27BmFQomKioLXmXE1r6FDh1pbW0N7Uoibm1tSUlJ4eLi3tzeNRoNmp97e3vfu3QsICHj06FFUVBSTyeSslEqlPn/+vLCwMCIi4uLFi8T/BxiGLVmyxNjY2M3NLTo62sDAQEFB4f79+2VlZTk5ORiGKSsrHz9+fOXKlVQqVVVV1c7ObsKECRkZGTU1NZWVlc+ePSssLITXBiAHDx58+PDhsmXLTp48qays7OHhER8fD++3OTs7p6SkNDc3Q6loYgRhYeEjR47MmjVLX1+/t7e3uLi4qKgoKSlJR0dHTExsx44dHh4eTCbz48ePUJbayMgoNTWV5MY8c+ZMeXn5rVu3btu2TVFREVp94TXimuCcotvEaXD+/HlYAF6tLS0tzcjIsLe3p9Pp9+7d8/f39/Pza29vd3R0pNFolpaWcnJyUBHb0tKSTqcTzy9JfS8vL3///n1iYiJxuBQUFI4fP37lypWQkJC8vDx4y9nW1pbJZLq6usbHx0dGRjY0NBQXF0ORa14zrZ/zU66Z/p/l7du3Pj4+8PPevXuhKSuDwVi5cuWcOXNsbW2rq6s51+rs7Ny8efPdu3c9PT2fPn0KADhw4MCJEycePXrk5uYGvyYkJPj4+Hz8+LGoqGjy5Mlr1649duwYlUo1NTVdu3ZtbW0tDNXU1GRlZQU/x8TEyMnJGRoa3rhxg2trk5KSFixY8PDhwy1btujq6tbW1mpqasIHlMeNG3fq1CkDAwNjY+PGxsbJkyfj/SK1XFlZ+fLly8HBwatWrYIOk3Q63cbGxsXFJS4u7t69exQK5cuXLxMmTPDy8oIHNQzDjh496ubmpq2t7e/vDwCor69XVVU9cuRIVFTU6dOnL1++XFhYOGzYsISEhJ6enmXLlsHaP3/+LCcnd/PmTV7jn5+fr6+vn5iYeOrUKdiYnTt3btu27fLlyxMmTCgtLYVdTkhIgLddY2JiDh8+XFVVxWtU29vbTU1Nly9fXlZWRtpMVVVVs2bNOnLkCI1Gu3//vri4eFFR0Y0bN0aPHl1cXOzu7q6iolJfX29oaPjy5csTJ06QHIMhnz9/XrJkyaxZszw9PQ8dOqSjowNnSF5enoiISGho6N27d7dt2wZvlcnLyxcXF1dUVEydOhUarj548EBMTCw3N5fXgPxDMBgMJpNJpVK/WpJrrzs6Ojj9jbku/CZoNBrXCEwms6enB37u+6OzpGeA+/hIcFdXF7ynxRkNilbDrwwGg1dn4W1OWIbzV/4+z18t09HRAW8cstlsNptNdMzmszW/2nfYawqFAoPjMBiMn2vK/d+l/6uX9fT0QM1Z/AOEv1Y9xqGkzmAw2Gw2vDfAZrPhDRJ44QsAQKPReEnfMplM/PoYSfedE14q+CTNez5i+byU3Xt6ehgMxr+ictvR0UG04WUwGMLCwmw2G/6ZhX3hVNnnP6o4fRS8x2Gz2SRRYwQCIWj0/8yH+E9w4cKF5uZmXq8DIxAIxE+kP9/nQ/xXQCr7CATiV4LO+RAIBAIhWKBzPgQCgUAIFv3Tk53FYsHnpzEMGz58+KRJk/C3Vn8xDQ0NBQUFK1eu/OmRc3NzWSwWfO0dkpOTIy0tjb9R3tPTk5aWtmDBAuLTJd8EKeCPQKPRUlNTNTQ0ON2x+VBbWzt69GjO5VQqFaqWTJs2jfPX7OzsefPmwTe4MzIyurq6Vq5cyWmTXVpa2tjYSFwyc+ZMUVHRnp6eyspKRUXFvrezrq6utLSUKAT1+fNnBoMBNa7+RcrLy/Pz8+fOnQvlSPiUZDKZ8DUPUVFR2Ox379719PQoKCjwel0Egfjv0j/P+YSEhISEhBYtWlRfX5+Tk7NixYrw8PBf3Iavirj/YPCrV69ClUUoBo9hWFlZGfE1wdra2p07d/JRlPgqpID828O/QFVVlbm5OfHdOP6w2WwfHx/4Oj+J3t5eMzMzNTW15OTkqKgo0q+VlZUbN26E7wgePHhQSEho0qRJW7du5byqLyMjY2Njc+vWLfjkbVRUVFFREVQb+FYp0cbGxi9fvsDPHR0dPT09bm5uCQkJ3xTkpxMQEODp6amkpOTi4sI5UCT+j/g5IBC/hv55zocRhJhlZWV7e3s9PT23bdv2y2qnUCguLi4+Pj5z586Njo7+6fFHjBihoqICpYQdHR1NTExmzJiBq1tBJkyYAF+8/W5IAXnR09NjYWHB/9g6ZcqUbzp1CAwMXLZsGX4UTkpKmjlzJnzL+NatW7NnzxYTEzMzM1u4cCEuxIVhGJvNjo+Ph0pXHR0dMTExFy9exDBs0KBBjx8/Xr9+PbEKMTExBQUFBQUF+Ir99OnTmUymhITE8uXLv9WR6rfffvvtt98wDEtISCgvL7exsZkxYwZ/tZ2v0tjYuH///unTpyspKRUWFmpraw8bNsza2nr79u0UCmXJkiVr1qw5ePDg5MmTU1JSNDQ0CgoKjhw5QpRWT0lJWbFixZQpU65evXrr1i24MDIysqSkZMKECa9fv1ZXV1+6dOmpU6dGjhx55MiRTZs2qamp3b9/f9++fRiGvXnzxtHRcdCgQSwWy8fHZ8yYMXV1dZKSklBwmclkXr58ecSIESNHjoyOjvbx8ZGWlsYwrLS0NDs728jICJbx8vL6+PHj8uXLi4uL7e3tZWVlf2RMEIifRb/NfDhVVVWxsbFQuJLNZh85cmTNmjU3btzw8/OjUqlxcXHCwsLCwsJGRka3bt2iUqkPHz708fFRUFAgxbl27dqQIUMoFIqcnNy2bdt27NixcOHCmpoaKpXKqeaFi7hLSkpCEfeHDx+ePn1aXV09KSmpsrIyNjbW0tISF0OCtLW17d69e+LEiRiGpaenHz9+/Pnz5yUlJVFRUb6+vtnZ2d7e3seOHRsxYsTZs2fhKlAMHkpqXbt2TUlJyd7ePi0t7cOHD6KiolBlqrS01N/fX0VFJSkp6fz583Q6XU9Pz9bW9vXr1wAATU3NxMREZWXlI0eOJCcnl5aWNjc3y8jIGBgYHDt2TElJycTEhH9nc3Nzofq+vr6+o6Mjr+GFhf39/VtbW83MzD5+/MhisV6+fGlubs452kFBQZaWlvjXp0+fysrKwsyXnZ2trq6OYdjw4cPb2tqam5txYbPQ0FAjI6Pr16/DwaytrYUvcY4ZM6agoICU+SAsFovFYlVWViYkJEATBj6vBpqbm0tLSx84cEBXV3fv3r3GxsbW1tanT5+OjIzMzs6+efOmv7+/uLg4FO3Mz88/efJkampqdHT0d/wFgYlk6dKlK1as6O7ubm5uHjduXEVFhYmJSUdHx5w5c6qqqgwMDBYuXBgSEqKjo6Orq0tSw9m0adOOHTvodDpUA8f+FuyH4jgaGhqpqakSEhKjRo0aO3YsPL12cHDYu3fvnj17hISEioqKoKSLs7OznJwclHu1srISExPT0tI6c+bM2LFjYQEJCQkqlQobnJSUFBwcDDc39HP48OGDhYWFn5+fu7s7/COCQPzr9M+rnTgkIWaS9HN6evrr169NTExmzpzJX8Y6JycnJSXFysrq3LlzTk5O9fX1o0aNmjVrlq+vb05ODucpAlFUesiQIQ4ODnv27AkPD6fT6a6urqKiooqKiqdPnyatJS4uLicnN336dDc3N2lp6QEDBly4cKGurq6qqmru3LltbW1Dhw6dM2cO0W9WVlZ2/Pjx69evnzp1qry8fG9vL5PJtLW1tbW1hTJIGIdstKKiopCQ0IYNG6D0tp6e3vXr12/evIlhGI1G27Bhg46OTmho6IgRI2DAUaNG8e+sqqqqqKiooaEhm83mNbywZHp6+qRJkw4fPiwrK0tU/SYFrK2txW/L1dbWVldXd3R01NfXV1dXt7W11dXV4a+3Dx06FG/PmzdvJk2ahF+dGzdu3MyZMwMDA/Py8j58+MCZXCEvX7708vLy9vb+qosvhmE2NjYFBQWjR4/W0NCA6qbwTHTu3Lmtra18NMe/GpkrPygabmBgEBYW5u7uvnz5cqhZ+lXtcm1t7cGDB9+/f//t27dz586FC0NDQ6HTDYZha9euhTcOgoOD8UsC69atGz9+PIZh3d3do0aNUlRUTE9Px7vQ1NSUlpb24sULLS2t7xsHBOKn088zH0mImST9vHHjxs7Ozvnz5w8ZMoS/jDWn9jwuDC8mJoYbmHGFqD3NR4Udwkdq+atASZSSkhJhYWH4fAdMKpyy0Xgtw4cPHzJkiIiICFQaXLJkSUJCQlFREbxShwsG9rGzfIYXFnB1dcVVMYmq36Q4ra2tuMzNpUuXnJycMjIy/P39nZycEhMTJ0yYgGep7u5u/HDv4uISHx9/9OjRmpoaR0fHlpaW5ORkCQmJhoaG+vp6XuYJampqe/fuvXjxoq6u7ldHGJ741tTUCAkJ3blz582bN/PmzcO4KfdjfRPv/yrfJBpOEtfv6urS19cvKCgQFhaGosNf1S7/lX4OCMS/SL/NfFyFmEnSz6WlpWFhYYcPH963bx9/GWtO7Xn8J64vRPLSnpaWlualws4JMbKQkBCDwcAwDNd65lXXqFGj8vLyYG4AALDZbE7ZaF61/PHHH7NmzZo+fTrgITzP6+1P2BE+wwuL+fv77927t7y8HHYEV/0mRVNUVMRz28WLF319fdetW+fo6Ojr62toaLh27dqCggIMwxobG6dOnTpkyBBYu7Ozs4mJiYmJiYSExLZt24YPHy4mJmZoaPjp0yc7Ozuud5hYLBbeI66PiXJiYWFhbm6+fft2RUXF69evL1iwgDhQfRHv/ya+STScJK5/4cIFDMNGjhxpb2/fF+1yyC/wc+A/7RGIX0D/vM/HYrECAgIwDPP09BQXF09PT/fz8xs3blxpaWleXt7p06dZLFZQUNDSpUszMjIUFBQMDAz09PR8fX1VVVWXLFly7NgxUkBce37KlCkGBgZSUlKfPn3KzMxUUlIqLy/PysoiXWjCRdy7u7uJIu6DBw+GKuwaGhpmZmakc4XW1taPHz9iGLZy5crCwsKUlJSpU6eWlpa+evXKysqqtrb24MGDPT09xcXFdXV1mZmZDAajra0NisEfOHAgMzNz0KBBVlZWhw4d0tbW1tTU7OrqevHihYeHh6Oj4/jx4zMyMpydnYuLi6uqqjIzM6dPn97U1JSUlCQtLd3W1pabmyslJXXixAk9Pb3S0tKEhAQYkEKh8O8s9rf6voGBAa/hzc3Nra+vLy8vt7W1Xbt2bVBQUGRkpKamprKy8rJly0jRhIWFoSMoV21SLS2tuLi4Fy9exMfHw7PzqVOnPn/+HJqMYxgmKio6Y8YMYWFhCoUSHh4uJiYGb+CR+PDhw9u3b7u6ugwNDeG1RAzDGhoanj59ymQyKRQKvpCIqalpTk6OsrLy77//XlxcDCdbYmLi58+fy8vL58+f7+joqKCg8OrVq97e3rq6uuzsbDk5OTabjfvk9ZGGhoZPnz5BKf3c3NwRI0YMHjy4rq7u6dOn7u7uq1atkpSUtLGxKSoqqqioePLkibm5OSnCy5cvT548OWvWrJs3b0Knb1tb2/3797u6us6cObO7u3vRokVDhw598+ZNRUWFoaEhlLGFfg5bt27F47i5uZ05c4ZKpba0tBD9HA4fPtzS0iInJ8dgMDZs2ODv7w/9HNhsNvRzuHnzJu7n8OzZM+jnUF5evn79+k+fPn3TaCAQPxeB03AhSj8LCwuzWKze3l78ahJ/GWs6nT5w4EA+etOkwrxEpbu6urg+r88fAACTyRwwYADnRSrOurq6ukRFRVksFn7DjCQbzQsajSYqKtrd3f1VqWgiLBaLzWYPHjyY//ASIal+k6iqqkpJSYHPZXClvr5eXFwcXoWDbeYsU1ZWNmHChG9NOV+FxWIJCQnBzcH5miCf7f6LgRuxurpaWlqaOGm/ql3OYDA4X7vs7OwcMmQIafJzXfhVeG0vBOKXIXCZD/FfAZ4W4B7xCAQC8bNAmQ+BQCAQgkW/fcIFgUAgEAiuoMyHQCAQCMECZT4EAoFACBb9OfOxWKznz5+Hh4d/qwzjV8Pev3//JwbkSltb2+PHj79plaqqqri4uIaGBtLyxsbGhw8flpaWwq+9vb2PHj2Cr8Rxpaen59X/gssxNzY2kvwN+gLniDU0NDx79uxb42AYVl9fT3xT8ztgs9l4v+BL2d9HT09PcnIyUaq7srIyIyMDw7C4uLi+a3P/OFCk7dfUhffxlwEAeP/+fVFR0a+s9EeAon2/uFJe+z7k/fv38I1MAMDTp08zMjJIj3fAAji4yMb37e//Cfpz5tu/f//cuXMVFRX7uK9+1XAAwmAw3r59+2NN+zrv37+3sbHpe/ni4uLz58+vWbPm1KlT8D0zSGlpqaOjo4aGhq+vb2JiIgDAxcWlpaXl999/55W/hYWFS0pK1NTUBgwYICQk1N7eDjVB8vLy1NTU+KRMXhBH7LstLOCKEhISwcHBJIHKb2LgwIEjR45ctGhRbW3tN725QYJkhcFms4OCguDL2h8/foTuGb8GZ2fnO3fu/IKKiH38ZXR0dJw6dYpTXALr8w77g6t8K1++fLGwsPinayHCa9/HMKyjo8PS0rK9vR0K3dnb20tLSw8ZMuTQoUPEYqNGjVq2bFleXt6gQYN6e3vd3d2xH9jf/xuAfkpTU9OKFSv6Xp7BYGzatOkfa843Q6VSJ06cyL9MYmJibW0t/GxsbJyWlgYAiIuL27VrF17GwcEhKCgIAJCXl7dgwYKWlhaoAhMUFOTs7Mwrcl5enpiYGPzMZDKrqqrg582bNz979uy7O1VZWWljYwMAyMnJWbduXd9XJG6d6urqixcvfncbIBiGlZeX/2CQefPmEYM8evTI3Nz8B2N+Kx0dHVu2bJkzZ86vqe5f6ePBgwfhHCaCz6W+Ex8f7+Pj8/PaxRM5Obl/uoq+7Ps9PT0aGhpQmB4A8PnzZw0NDfh50aJFlZWVxIBSUlI5OTnwc0VFBfzwg/v7/2X67TlfeHh4Y2PjnTt39u/fr6Ojg2HY3bt31dXVvby81NTUXr58iWFYUlJSQECArq5ubGwsbjjw5s0bKGQVFBSkoaHR0tKira197tw5GxsbU1NTDMPS0tKgBDNnwJycHCcnJxsbG0NDwydPnnBtWFVV1ZkzZ2JiYnbv3s1isY4ePWpubn727Nnly5fDEwg/P7+wsLArV658tY9Pnz7FJT2zs7OhmaqioiLxD3JLSwuFQsEwDPoVQBHIzs7OJ0+e2NnZ8QkOAGCxWN3d3adPn8a9b/j4GGAY9uHDh/Hjx1dXV3t6eq5ataq1tfXDhw+nT5/GRwy3sIANu3TpEi6ClZSU5Orqev369T/++IPJZPr6+lpYWHR3d+/bt+/kyZP41mEymfLy8o8fP/4pf94/ffqkqKgYGhqqp6fn5eXFq1hycrKfn5+Li8uNGzcwDEtLS7t27VpAQAAU6Ort7XVycrp37x6Uci4vL9fQ0OAqyvpPEB4e7uXlRafTnz9/DnukoKCQkZHBZrOtra3Ly8uJk/zNmzdKSkoBAQH6+vrEeYhhWGtr66FDhy5cuKCmphYYGEij0Tw8PC5cuKCrq9vT00PqI5FLly49fvzYysrq3bt3GIYFBwffuXPH2toamldoaWnt37/f2dm5tLT06NGjt2/fNjMzq6qqqq2tDQgIgIUxDLt27Vp8fDwUmsGprq4+f/58aGhoWloahmFsNvvw4cPJycnbtm3r7OzE5xKpI0+ePHn8+PGJEydKSkqIvaDRaP7+/q9fv37//j2xDOeQXr9+XVNTs6WlxczMbM+ePWw229fXNykpiTQmeHk6nX78+PGQkBBra2vYVBaL5eTktGDBAgcHBz5N6ujoOHjwoLW19fr162/fvs1nw3E2si/7/pMnT8rKyp4+fbp3796ysrKcnBzcnXjSpEnZ2dmkmEwms7e39/79+1CNHfva/v6fpt9mPnV1dRkZGSMjI0NDQwAAhmHz5s1rb2+3s7OzsbG5e/cuyTYBNxxQVVVtb2+HEZqbmzmdCtTU1KqqqjgDYhjm5ua2evVqJyennJwcTU1Nrg3r7u5etmyZtrZ2fHz8wIEDp0yZMmrUqJMnT06fPv3Zs2f5+fkJCQnbt2+3trbm0zs+DgZE+wIMwzZv3hwZGVlWVvbgwQPoV8BisaKjo6HMGJ8qent7vby8vLy8UlNT+zjms2bNmjFjRk1NjZWVVX5+/rBhw6qqqiwsLPAR42Vh0dPTY2tre+jQod27dzc1Nd24cYPkTYFvHSj4Mnr06Pz8/D62ig/Kyso9PT06Ojq+vr58LuIRLSw4rTACAgLExMSgAB6GYRMnTvzpqjG8AAA0NDTIysra2dlBkWhlZWUtLa38/PyBAweqqKjIycmRJnlra+umTZtiYmKI8xAAEBYWNmrUqMOHD7e3t69evdrT05NKpY4ZMyYvLy8hIYHURyLDhw/X1NScNWvWw4cP3759++7dOyMjoz///LOrq2vBggUUCuXKlSvHjh0jGYYQfTwaGhpu3bq1fPly6B2Bc+DAAS0tLRMTk/nz52McRiv4XCJ1hOgBQuxFUlIS7qTBxycEwzAzM7PS0tJhw4YZGxuXlZXBrampqUkaE7y8l5eXtLS0qampvb29kZERAKCnp8fBwSE9PT04OPjjx4+8mvT06dNJkyYJCwsnJCQYGxvz2XDE5vV938/IyDA0NDx06NCqVas2btzIy+QE5+7du15eXsHBwd8wBf+z9NvMh4P/bcENB0aMGNHR0fFV2wQcTqcCqLdJCohh2IIFC2JiYgAARPsYEpMnT6ZQKNDGhc1mk4K8ePEC/i/jL+/Ex8Ggs7OT6MijpaUVGBiYkZHR3Ny8fPly2GwzM7O0tDT+9+GFhYX37t17+PBheKLTRywtLW/evJmYmLh8+fLY2NiKigoo8snpZkC0MqioqOjs7IRlcEMJPoiKivK6mf+twPHHtyBXiBYWnFYYaWlpcMDxTQY7kpmZCZ0T4NnYP0FycnJTU9OlS5daW1uTk5PhGYydnV1AQMCrV6+WLVvGOckHDRo0atQojGMezp49OzU1lUqlKioqysjI5OTkWFhYGBsbl5SU6OrqcvYRZ/HixeHh4U1NTUwmMzMzE/4bmDRpkrq6+qBBg3DrRJJhCNHHA/5DnT59ekVFBTFyeno6cV8gOYHgxUgdIXqAkHqBr8LHJwRWtGHDhgcPHuTn5zc1NVVUVAgLCw8YMIBXNLxrysrKDAajublZVFRUVFR0yJAhy5Yto1AofJpEHCL+Gw6n7/s+g8GA2rPr1q0rKSmRkZHBheBJJSHbt2/fv39/UFAQV8Xafka/zXzg74eX4FVdzp84bRNwCXm4CqcrAjEm1+X6+vqTJk0qLCwMDQ3l1TB/f/+GhoZVq1YRjQLwIFJSUvDCKQCAz0OMfBwM8vPzV69ejREU8efOnautrZ2SkoL72WIYJiIiMmPGDF7xv8PEAKKjo5OamlpcXOzk5OTq6jp27Fhi73hZGYwbN667uxvenIeGEpzeFESB/9bW1ilTpmDfq/oPAxKHF/BVMiJaWHBaYXBuMtipxYsXQ+eEFStWfEcj+0JiYqKHh4eDgwOUC7969SqGYUpKShISEhEREbNnz+bjDUKah3PmzNHR0UlLSwsJCREVFZWRkYEGtnQ6/f3797ymJYvFMjQ0NDQ0hHe2Ro8eHR0dDWshPZNCMgwh+nhUVVVZWVklJCTY2dkRN4SkpCR8Ng1WSnICwecSqSNEDxBSL/BVSD4hcKYR2bVrl4eHh5SUlLm5ubGxMXSQJkXj7BqdTpeUlJSSksJ/gh7CfJpErLSPG67v+/6SJUvg8gEDBkhLS69duxY30KiuribZtuC7PDTm5Jxs/Yz+6dWAYVh6enppaWlFRcXz58+Lioqqq6szMjJqamoqKyufPXtWWFgoIiJCsk2AhgPQ6MDY2Hj+/PltbW0fPnwgORUUFxc3Nze/ePGCQqEQA7a3t1+/fj07Ozs1NTU+Pv7gwYPwzzUJCQkJb2/vwYMHS0pK+vn55ebmVlVV1dTUvHnzhkajXblyxdfXd8eOHUpKSkwm8+PHj7itK38OHjx45swZcXHxpKQkooOBrKxsSkpKQkKCl5eXhIREfX39rl27jIyMOjo6XF1duYZiMBhhYWFdXV0RERGGhob48i9fvrx9+/bZs2dz5szhJX49ePDgzZs3r1mzZtq0aRMnTtywYQOGYSkpKXDEVFRUuFpYdHd3BwYGurm5bd26lc1m79ixAwBA9Kbo6urCt464uHhPT8/UqVO/T/WfzWYHBgZiGBYcHLxp06aGhoaMjIyurq7GxsaSkhKuBrZEC4uioiKSFcb+/fs3btzY29vb3t5eWVmZnZ1dVFSUlJQErfv+OYKCgt6+fQst6ZlMpqSkpJeX16ZNm1asWLFnzx54TiwrK0uc5B8+fGhpabl//76enh5xHvr7+48dO9bf319RUTE2NhZaOqxbty4lJUVVVfXcuXOkPjY2NkIHdmj0uGfPHmlp6cePH1taWkpJSf32229z5851dXW9f//+ly9f8vPzZ8yYQTIMefToEe7j0d7efuHCBVNTU0tLS+KNJWdn54MHD6anp+fm5vb29i5dupToBOLh4QHnEqkjxcXFuAeIiIgIsReNjY2Ojo7Tpk178uQJXiYpKWn79u0UCoV4Ojtz5kx5efmtW7fS6fSsrCwZGRkMw0hjghe2t7c3Nze/d+9ebW2tn58fhmEKCgqhoaGDBg2ytLSUl5e/dOkS1yYdO3bs8uXLDQ0NdXV18FyZ14bjagCJw2vf19PTi42NhSev7u7u0tLSGzduDAsL6+np2blzJ+7hjGHYgwcPWltbIyIipk+fjl8R7cv+/t9F0HU7ibYJuOEAhmEMBkNYWBhejexjKADA1atXTUxMmpqaoJG6sbEx15JQq57BYAwePJjrPaGOjo7hw4ezWCyuPgZ8GlBTUzN69GgYE9bS3Nw8cOBACQkJvBg0oeWalX8K0MoAwzBo10D6lY+VAZvN7u7uxr0ywP96U+Bb5/Hjx8LCwqtWrcJ+oeo/ycKCZIUBAKBSqSIiIpzWDf8WTCYTnzy8vEGI8/D27dsrVqzo6elpbm7Oysrau3cvAIBGo+HHQV59ZLPZvb29Q4YMwQeHjysI/hObzcZ9POAhiEqlctpH9Pb2wukEKyU6gYiIiOBzidgR7G8rXdh9Ui/gKiSfkODgYBMTE9KeyHUak6IR6ezsFBMTwzM3lUoVFRWFEUjV8QmC9W3DccJ134c/NTY2iomJ4V/b29sBAMS0J5gIeub7iTQ0NCxdutTR0XHEiBFNTU1r164dP378v92o/kZFRUVpaam6uvq/3ZD+xsKFCw0MDKZNm1ZbWztr1izouCsIdHd3t7S04JflEQICynw/k/r6+qysLAkJiSVLlnzT6RoC8e/S1dWVkZHBZDKXLFnC9bkPBKI/gTIfAoFAIASLfvtsJwKBQCAQXEGZD4FAIBCCRf+/F1VXV1daWrpkyZIfD5WdnU2n0+Xl5WVlZTmfQyPS2tqKq8tLSkpOnjz5l+l6fDcAgCdPnkAdyH+oChqNlpKSoq2t/U1rcQ47AODDhw8lJSXTp09XVlbGMKygoAB/SxcyadIk+DB6eXn5xIkTf1ofMAzGzM/Pnzt3Lo1GwxWhcHJycqSlpeEr/D+dhoaGgoKClStX/vTI393s3t7e1NRUGRmZIUOGTJ8+nX/h8vJyKLsF1YsaGxtLS0uHDBkye/bs72v29/H+/Xvia3yDBg0ivd/247DZ7MzMTAUFBfjGwjcRFxe3evVqXk9Bc0Lcdq2tra9fv+7o6Ni0aVNfDjtwbxo6dKiSktK3tvO/y//1w/GPAFU5GhsbcZOdH8HU1PTLly8SEhKWlpaFhYX8C48cOfLly5e6uroAgLi4OBUVlZ+ittV3vkPW8sSJE8rKynV1dT/i3cO/PeXl5du2bfumFTmHvbm5efPmzZWVlatWrcrKyjIxMaHRaKdPn6ZQKG/evNm4cWNXV1daWlpERASMoKenh7/A+1MICAjw9PRUUlJycXGJioriLFBWVlZXV/cTayTyHTYXfeT7mk2n09XU1KSkpJqbm/X19b9aXlJS0sHBwdfXF75XIykpefHixb6/O/Sz4OpO8HNpbm62s7MjCdP0kT7afbBYLFgM33ZUKvXUqVNr1qyhUql9nPZ83DD6M9+lc/0f4Kfrso8ePbqgoAAAUFtbm5CQAADo7e11c3Pz9fUNCwvbsmVLQ0NDamqqmpra+/fvAQDJycnTpk2D65qYmPz5558AgIKCAjU1tVu3bkVERECl+ZiYGF1d3Zs3bx47dqyoqAgA4ODg4O3tvWnTJh8fHzs7O9wnoe98n++EiooKnU7/1rW+tT1SUlLftC7nsJuZmZ05cwYvYGZmdvLkyczMTABAVlbW2LFj4fJXr14BAJ4+fbpmzZr9+/f/jH78f4yMjHx9feFnTg+Bf5pvtbn4pyksLBw7dmxPTw8AIDo6Gr4uVl9f7+joGBQUdP36dejtQNw14AkinGxMJvPw4cMw1MePHz08PO7evXvu3Ln6+nq48N27dydPngwNDXVxcXFxccHrdXFxgbIjAICWlhZbW9sTJ05cu3bN2dmZyWT2peVc3Ql+Ltra2llZWf9EZMiRI0fy8vKISyIjI0+dOvWtcbi6YfRv+mfmYzKZ+vr6FhYWb9++vXbtmqmpKQCgpKTkyJEjoaGhO3bsoFAoBQUFCgoKISEhurq6165d+2pMW1vbMWPGJCUl4UscHR3/+usv+DkhIaG0tBQAMGbMGLgEz3wfPnyYNm3ay5cv4XK8QGFhIQCAQqEsWrQIABAVFQXlbuGusmjRoqqqqurqaq5mOomJiS4uLn/99ZeNjQ2VSt25c6efn191dfWaNWsSEhJycnJGjhx5586dzMzMKVOmXLt2TVNT08zMDFbq6+trY2Nz8uTJ2traDRs2nD17dsGCBY8ePZKWlg4KCiorKyMNVGBg4IYNG/bt2+fk5HT27NmtW7c6OzvPmzcvPDz84sWL6urqMDdfvXo1Li7u3LlznK3F29Pb2wuFNvT09C5dukRqT1+GncViiYqK4oMJALh165aSkhL8TMx8kHPnzlVVVY0aNaqjo4NrfBqNduzYseDg4N27dz9//ry5uVlLS+vs2bPW1tYmJiZcV7l7966IiMiVK1fwI29lZeWVK1du3bq1a9eu9vZ2W1tbT09Pruv2nffv348bN66qqsrDw0NdXb2lpeX9+/enTp3KyclZsGDBxYsX1dTUUlNTAQCJiYn+/v46OjoPHz7kP6t9fHzMzc27urr27t174sQJQNhqeLO5joCjo+P169fV1dVPnTrFOZIqKiqLFy8uLi7Gl6irq+fn58PP3t7e8AM+8wEAc+bMCQwMBADExsY+ffoUANDc3Dxv3jyYQcvKypYuXcpms+vq6ubNm0ej0QAAbDYb/y/b2Ng4b968hw8f4gGPHz8Oj93btm2LjY3tywhLSUllZWX19PTExMRkZWWRZgJx76BSqcQVg4KCvLy81qxZU1xcTKFQTp8+HR0dbWVlBf53JgAAtLW1z5w5s3v37q1bt3I2gLjLPH36NDAw8OzZs9evXy8rK1u9ejXMysS6IiIiVq5cee3ateXLl7948aKurk5JScnd3b2wsBBuu46ODgsLiy1btoSFha1fv97d3Z0UmdSAqqoqJyenkJCQBQsWwNG7evXq9evXHR0dvby8+jKG/136Z+YDADg5OcGN9/LlS21tbQCAlpbWu3fvAADe3t4wx4wbN669vb2mpmbmzJlfDQiNWkRERHbv3t3b2wsAGD9+fHNzM14A/tPEj7zJyckyMjKurq47d+7U0ND49OkTXD527NisrKzExES451dVVS1cuLCtrc3c3Jw4NWHm49oSBoOhqKgIqzMwMPjrr79cXV09PDwAAObm5vfv3wcEhzApKSkqlcpisSZNmvTixQt9ff3g4ODr168LCws3Nzdv2bIlKioKlpw4cSLcw0kDlZeX99tvv8Ey0dHR8C/85cuXDx06BAA4duxYSEhIfX29qqpqR0cHfrwjgbdHQkKCRqM1NjZOmTIFAEBqz1eHHWo7wZNjyOPHj4cNGwY/kzJfZWXlzZs34SjB8eHE3d39ypUrAID8/PzRo0ez2WxTU9MHDx4AAKZNm9bU1MR1rejoaHl5+SVLltTV1cFeNDY2AgBu3brFZDLPnz//4w6CAIB169ZlZ2d3d3fLyMgwGIxHjx6Vl5fn5OQsX74cAHDv3r1du3bRaDR1dfXQ0NADBw7MnTsX8J3VWVlZenp6AICgoKB9+/aRthrebNII5OXlrVmzBgBgb2/P9cygsbHRxMREVFTUz88PAFBaWopPGAAA3FkAYdcAAISEhKioqAAADhw4wGazAQBhYWG///47XkBFReXLly+BgYF79uzhDBUcHPzp06fVq1fjPzk6Op46dSouLk5LS6uhoaEvwyslJXXgwIHLly/r6+tnZWVxzgTi3oGTmZlpbGwcGhq6YcOG/fv3f/78OTk5uaenZ+zYsWw2mzQToF4u7E51dTUxDnHwGQwG7AuNRrt9+zYAQFNTMycnh1RXSUkJNGIMCwuzt7cHAGhoaMBzPnzbeXl5nT59GgDg4uJy8eJFzshEtm7dCvd0W1vboKCg169fwxs0AAA5OTl4raW/0p/v80HwWwgktXisbzr9kK6urkGDBh0/fvzt27exsbFQ6LatrY0oM8h5r2LUqFF//vlnQEDA+vXrDQwM8OXi4uKSkpL4q+4tLS179uyRkZGxsrLqS4++ydlg0KBBw4YNGzhw4NKlS+vq6mpqakxNTa2srBgMxqhRo4hS8TikgSKWgTqNGIaNGDECt5ior6/nJbfPiZCQkIiICLRowDCM1B5SYc5hl5aWFhcXJ9by+fNnqF7Nye3bt6urqy9duiQtLX316lWuCuCcQvuc1hycrdLX1y8oKBAWFra1tcUw7MOHD1AOaseOHUJCQnDTdHZ2Qq8G6Gj/HfAyviDaXHCK+vd9VpO2Gj6BSSMwfvx4CoVSXV0tKirKqSJLpVKlpKRCQkJCQkLs7Oxyc3NJ+wVXSQdDQ8PW1tabN2/Ky8vDwlDfCy8gLCzc3d3NNRSLxcrPz6+vr29oaPj48SP+6+DBg4cPHy4kJNT3+/pEdwLOmcB178jJyVm/fr2xsXFcXNzly5dJThGkmYD9vbHExcXhhMchDv6XL1+g7hoUE8b+3hakujjNYXA4Dz5w3DgjEyG5YeAjgGHY4sWL8/Pz9+7dq6+vb2Nj08fx/A/RbzMfpy0ASS0eLwn+fpefyWTy0v4vLi6OiYnBMExZWXnLli3V1dUYhuGuqhBYHQ7xODt8+HA6nY5/nTp1qqqq6vbt2+EFT0lJyYCAgBcvXhCjAd4KA9/qbACpqalRVVXt7Ox8/fo1hmHl5eWkxxm+OlAkiC3kJbePQ/TBINbFpz0Yj2G3tbXFn17BMCwxMdHe3h6vBR92Go3W3d19/PhxBwcH6Dr74MEDzobxEdrntQkuXLiAYdjIkSPt7e1hk4YNG/b48WMMwz59+tTa2gp7N3z4cOjVALPjd8DV+AIfOviBl6g/18aT5glpq+GRSSMwfPjwQ4cOvXr1at++fXPmzCHFDA8Ph4OwefPmadOm1dTUKCsrt7a21tTUwAJEE1ecwYMH29vb79mzx8TEBC5ZuXIl9LbFMIxOp3d0dEybNk1NTS0tLQ3fpjBUbGysgYHBxIkTDxw4AF0JIWPHjl2+fLmOjg50u/2qjwfJnYDPTCAiIyNz584duGJmZibJKYLrTMC4OcYQB19aWjo9Pb2pqQnDMNyeAgBAqgtfFw/F1fwELwMjkCITIblh4COAYVhra+vs2bM9PT1jYmJ8fX35j+R/kX6b+ebPnx8eHh4fH5+YmPj58+fy8nIPD4/4+PikpKS0tDRnZ+fi4mKo0//s2TOo03/y5Eno3s6JgoLC8ePHr1y5EhISkpeXB93Mvb297927FxAQ8OjRo6ioKCaTmZyc3NjYCBX0IyMjoTv52bNnIyMjg4KCMAyDjl+3b9++d++eqalpd3f38+fPKysrKRRKeHi4jY0N9IIpKiqqqKjg5eouIiICnQ2ePHkCnQ3Wrl0bGhp69uxZOp0O//hDZ4O2tjYWi/XXX3+FhoZu2rRp4sSJ58+f19HRMTIySk1NZbFYnz59evz4MZ1Of/XqVU1NTWRkJJvNJg3UkydPoOI+i8VKT0//8uVLTU1NVlbW69evocXEx48fGxoaHB0daTQaSW4fB7bn6dOnzc3NL1++TE5Obmtr+/TpE7E9nA9/cx32kydPSklJubi4PH78+M8//1y1apWFhQWGYZ2dnTExMXV1ddDCbe/evVQqFRq5tbS0TJo06fjx4+Xl5aQq7O3t6XT6vXv3/P39/fz8YKsyMzPLysqgNQdnX16+fHny5MmoqKgbN25AG3F3d3dLS8s1a9akpaUBADIzMzMzM3/cNZ6r8QX0HsFtLgYPHgxF/S0tLel0ellZGWlWEwPOnj0bOmAUFhYWFRXV1dXhW621tRU2m0KhkEagrq7O1dU1KCjo8OHDnJ6O8vLyO3fuDAoKcnd3nzJlioaGhoiISEhIiIODQ1RU1P379+H/OXzXwFe0srLatGmTrKws/Dp58uQ9e/Y4Ojo+fPjw8OHDISEhwsLCqqqqlpaWe/bsgbsYPNULCgqaNGnShAkTli5dGhYWlpGR0dLS8u7du/T09NDQ0KSkJOjqvGvXLpgCuYK7E+BPMpNmQnV1Nb53EFfU09OjUqmqqqp2dnYTJkyQkJAIDg6+cuUKvIFNnAl0Or2oqCglJaWsrKy0tJSUeNrb2/HBl5GROXLkyKxZs/T19Xt7e4uLi6HdB6kukttMe3v74sWLz58/n/k3TU1N2dnZOTk5dXV1OTk5r169EhMTI0YmDQJ0wzh+/Hhubu779+/nz59vbGzs5uYWHR1tYGAAfd77K/1ZvYyrLQAfIXk2mw0f6+D6a3d3t7CwcF1d3ZgxY4gH987OziFDhnCaEvzTkJwNoKI8LvSOOxuMHj26oqKCzWbjQwH/OH+1wXwGihPAW24fQvTBIMG/PbyGncVi1dXV4dfKfhCS0D5/oClBdXW1tLQ03mwmk9nT08NLgP+74W98gfNNov64A8ZXtxokIyMDADB27NiWlpaHDx+eOnWK+Ct0SKDRaF1dXfAFSpyWlhYJCQk+o8pgMIhXOGHzWltbSde9uS78KtDY4VunRx9nApVKxXc9kvVKH2cC5+DDqc55cZhYFyd8zE9weEXGONwwYMCBAwf++gPaL6Y/Z75vhUKhSElJ/RrXm18Dm80WFxevra3t42ERgeDEwcGhs7Nz48aN9fX1srKy0KYVgfhPgzJff+bNmzd5eXmysrLr1q37t9uC+K/CYrHevHlTXV09b968cePG/dvNQSB+AijzIRAIBEKw6LdPuCAQCAQCwRWU+RAIBAIhWKDMh0AgEAjBAmU+BAKBQAgWKPMhEAgEQrBAmQ+BQCAQggXKfAgEAoEQLFDmQyAQCIRggTIfAoFAIAQLlPkQCAQCIVigzIdAIBAIwQJlPgQCgUAIFijzIRAIBEKwQJkPgUAgEIIFynwIBAKBECxQ5kMgEAiEYIEyHwKBQCAEC5T5EAgEAiFYoMyHQCAQCMECZT4EAoFACBYo8yEQCARCsECZD4FAIBCCBcp8CAQCgRAsUOZDIBAIhGCBMh8CgUAgBAuU+RAIBAIhWKDMh0AgEAjBAmU+BAKBQAgWKPMhEAgEQrBAmQ+BQCAQggXKfAgEAoEQLFDmQyAQCIRggTIfAoFAIAQLlPkQCAQCIVigzIdAIBAIwQJlPgQCgUAIFijzIRAIBEKwQJkPgUAgEIIFynwIBAKBECxQ5kMgEAiEYIEyHwKBQCAEC5T5EAgEAiFYoMyHQCAQCMECZT4EAoFACBYo8yEQCARCsECZD4FAIBCCBcp8CAQCgRAsUOZDIBAIhGCBMh8CgUAgBAuU+RAIBAIhWKDMh0AgEAjBAmU+BAKBQAgWKPMhEAgEQrBAmQ+BQCAQggXKfAgEAoEQLFDmQyAQCIRggTIfAoFAIAQLlPkQCAQCIVigzIdAIBAIwQJlPgQCgUAIFijzIRAIBEKwQJkPgUAgEIIFynwIBAKBECxQ5kMgEAiEYIEyHwKBQCAEC5T5EAgEAiFYoMyHQCAQCMECZT4EAoFACBYo8yEQCARCsECZD4FAIBCCBcp8CAQCgRAsUOZDIBAIhGCBMh8CgUAgBAuU+RAIBAIhWKDMh0AgEAjB4v8BmGoNiKO4Gz0AAAAASUVORK5CYII="
PAGE_JSON = {
"spans": [
{
"start": 0,
"end": 3696,
"box": {
"left": 0.12100741176470588,
"top": 0.08015236441805225,
"width": 0.7625643173109246,
"height": 0.8289201816627079,
"page": 0,
},
}
],
"id": 0,
"metadata": {"width": 595.0, "height": 842.0, "user_unit": 1.0},
}
FIRST_10_TOKENS_JSON = [
{
"spans": [
{
"start": 0,
"end": 5,
"box": {
"left": 0.14541159663865547,
"top": 0.08015236441805225,
"width": 0.031124640759663848,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 0,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 6,
"end": 10,
"box": {
"left": 0.2218368002857143,
"top": 0.08015236441805225,
"width": 0.028109224561344556,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 1,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 11,
"end": 18,
"box": {
"left": 0.28294983802016804,
"top": 0.08015236441805225,
"width": 0.04515740219831938,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 2,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 19,
"end": 23,
"box": {
"left": 0.5239827089210084,
"top": 0.08015236441805225,
"width": 0.03749755185546227,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 3,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 24,
"end": 25,
"box": {
"left": 0.6157472036638656,
"top": 0.08015236441805225,
"width": 0.010051387327731112,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 4,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 26,
"end": 29,
"box": {
"left": 0.6266233613445378,
"top": 0.08181785724465564,
"width": 0.02369895794957974,
"height": 0.00851912114014249,
"page": 0,
},
}
],
"id": 5,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 7.173099999999977,
},
},
{
"spans": [
{
"start": 30,
"end": 31,
"box": {
"left": 0.6508250420168067,
"top": 0.08015236441805225,
"width": 0.005018158890756309,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 6,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 31,
"end": 35,
"box": {
"left": 0.6558673121815126,
"top": 0.08015236441805225,
"width": 0.02927711439327727,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 7,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 36,
"end": 37,
"box": {
"left": 0.7629575354285715,
"top": 0.08015236441805225,
"width": 0.008378667697478945,
"height": 0.010648907363420378,
"page": 0,
},
}
],
"id": 8,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 8.966379999999958,
},
},
{
"spans": [
{
"start": 38,
"end": 40,
"box": {
"left": 0.7722364705882353,
"top": 0.08181785724465564,
"width": 0.012888674302521032,
"height": 0.00851912114014249,
"page": 0,
},
}
],
"id": 9,
"metadata": {
"fontname": "HXONRZ+NimbusRomNo9L-Regu",
"size": 7.173099999999977,
},
},
]
FIRST_5_ROWS_JSON = [
{
"spans": [
{
"start": 0,
"end": 5,
"box": {
"left": 0.14541159663865547,
"top": 0.08015236441805225,
"width": 0.03112464075966384,
"height": 0.010648907363420376,
"page": 0,
},
}
],
"id": 0,
"metadata": {},
},
{
"spans": [
{
"start": 6,
"end": 10,
"box": {
"left": 0.2218368002857143,
"top": 0.08015236441805225,
"width": 0.02810922456134457,
"height": 0.010648907363420376,
"page": 0,
},
}
],
"id": 1,
"metadata": {},
},
{
"spans": [
{
"start": 11,
"end": 18,
"box": {
"left": 0.28294983802016804,
"top": 0.08015236441805225,
"width": 0.045157402198319374,
"height": 0.010648907363420376,
"page": 0,
},
}
],
"id": 2,
"metadata": {},
},
{
"spans": [
{
"start": 19,
"end": 23,
"box": {
"left": 0.5239827089210084,
"top": 0.08015236441805225,
"width": 0.03749755185546222,
"height": 0.010648907363420376,
"page": 0,
},
}
],
"id": 3,
"metadata": {},
},
{
"spans": [
{
"start": 24,
"end": 35,
"box": {
"left": 0.6157472036638656,
"top": 0.08015236441805225,
"width": 0.06939722291092432,
"height": 0.010648907363420376,
"page": 0,
},
}
],
"id": 4,
"metadata": {},
},
]
FIRST_3_BLOCKS_JSON = [
{
"spans": [
{
"start": 0,
"end": 851,
"box": {
"left": 0.14541159663865547,
"top": 0.08015236441805225,
"width": 0.7133684323462186,
"height": 0.2190099524940618,
"page": 0,
},
}
],
"id": 0,
"metadata": {},
"box_group": {
"boxes": [
{
"left": 0.14228497673483456,
"top": 0.07860914035534348,
"width": 0.7309202049960609,
"height": 0.22434301670826529,
"page": 0,
}
],
"metadata": {"type": "Table"},
},
},
{
"spans": [
{
"start": 852,
"end": 1183,
"box": {
"left": 0.12100823529411764,
"top": 0.31637727296912105,
"width": 0.7625634937815128,
"height": 0.040477662327790986,
"page": 0,
},
},
{
"start": 1185,
"end": 1289,
"box": {
"left": 0.1266559638184874,
"top": 0.3591322037054633,
"width": 0.7513104815193276,
"height": 0.011832114014251716,
"page": 0,
},
},
{
"start": 1291,
"end": 1461,
"box": {
"left": 0.12100823529411764,
"top": 0.37338398517814736,
"width": 0.7624131321277309,
"height": 0.025941021377672124,
"page": 0,
},
},
],
"id": 1,
"metadata": {},
"box_group": {
"boxes": [
{
"left": 0.12920637371159402,
"top": 0.31513023829516773,
"width": 0.7484953551733193,
"height": 0.08984719319468157,
"page": 0,
}
],
"metadata": {"type": "Text"},
},
},
{
"spans": [
{
"start": 1462,
"end": 1588,
"box": {
"left": 0.14803378151260504,
"top": 0.43017611738717343,
"width": 0.31311072265546214,
"height": 0.069223729216152,
"page": 0,
},
}
],
"id": 2,
"metadata": {},
"box_group": {
"boxes": [
{
"left": 0.13913303663750656,
"top": 0.4264316875974243,
"width": 0.3255823984867384,
"height": 0.0700013076890959,
"page": 0,
}
],
"metadata": {"type": "Table"},
},
},
]
FIRST_10_VILA_JSONS = [
{"spans": [{"start": 0, "end": 851}], "metadata": {"type": "Table"}},
{"spans": [{"start": 852, "end": 1461}], "metadata": {"type": "Caption"}},
{"spans": [{"start": 1462, "end": 1588}], "metadata": {"type": "Table"}},
{"spans": [{"start": 1589, "end": 1679}], "metadata": {"type": "Caption"}},
{"spans": [{"start": 1680, "end": 1803}], "metadata": {"type": "Paragraph"}},
{"spans": [{"start": 1804, "end": 1831}], "metadata": {"type": "Section"}},
{"spans": [{"start": 1832, "end": 2309}], "metadata": {"type": "Paragraph"}},
{"spans": [{"start": 2310, "end": 2330}], "metadata": {"type": "Section"}},
{"spans": [{"start": 2331, "end": 2604}], "metadata": {"type": "Paragraph"}},
{"spans": [{"start": 2605, "end": 2642}], "metadata": {"type": "Section"}},
]
SEGMENT_OF_WORD_JSONS = [
{
"spans": [
{
"start": 3370,
"end": 3372,
}
],
"id": 895,
"metadata": {},
},
{
"spans": [
{
"start": 3373,
"end": 3382,
}
],
"id": 896,
"metadata": {"text": "in-domain"},
},
{
"spans": [
{
"start": 3383,
"end": 3394,
}
],
"id": 897,
"metadata": {"text": "sci-entific"},
},
{
"spans": [
{
"start": 3395,
"end": 3405,
}
],
"id": 898,
"metadata": {},
},
{
"spans": [
{
"start": 3406,
"end": 3408,
}
],
"id": 899,
"metadata": {},
},
]
| 249,118 | 509.489754 | 234,906 |
py
|
mmda
|
mmda-main/tests/test_utils/test_stringify.py
|
"""
@kylel
"""
import json
import pathlib
import unittest
from mmda.types.annotation import SpanGroup
from mmda.types.box import Box
from mmda.types.document import Document
from mmda.types.span import Span
from mmda.utils.stringify import stringify_span_group
class TestStringify(unittest.TestCase):
def test_stringify(self):
doc = Document.from_json(
{
"symbols": '[1] Alan Higgins and R Wohlford,\n"Keyword recognition, "in Proc. ICASSP , 1990, pp. 1233– 1236.',
"words": [
{"id": 0, "spans": [{"start": 0, "end": 3}], "text": "[1]"},
{"id": 1, "spans": [{"start": 4, "end": 8}], "text": "Alan"},
{"id": 2, "spans": [{"start": 9, "end": 16}], "text": "Higgins"},
{"id": 3, "spans": [{"start": 17, "end": 20}], "text": "and"},
{"id": 4, "spans": [{"start": 21, "end": 22}], "text": "R"},
{"id": 5, "spans": [{"start": 23, "end": 31}], "text": "Wohlford"},
{"id": 6, "spans": [{"start": 31, "end": 32}], "text": ","},
{"id": 7, "spans": [{"start": 33, "end": 34}], "text": '"'},
{"id": 8, "spans": [{"start": 34, "end": 41}], "text": "Keyword"},
{
"id": 9,
"spans": [{"start": 42, "end": 53}],
"text": "recognition",
},
{"id": 10, "spans": [{"start": 53, "end": 54}], "text": ","},
{"id": 11, "spans": [{"start": 55, "end": 56}], "text": '"'},
{"id": 12, "spans": [{"start": 56, "end": 58}], "text": "in"},
{"id": 13, "spans": [{"start": 59, "end": 63}], "text": "Proc"},
{"id": 14, "spans": [{"start": 63, "end": 64}], "text": "."},
{"id": 15, "spans": [{"start": 67, "end": 73}], "text": "ICASSP"},
{"id": 16, "spans": [{"start": 74, "end": 75}], "text": ","},
{"id": 17, "spans": [{"start": 76, "end": 80}], "text": "1990"},
{"id": 18, "spans": [{"start": 80, "end": 81}], "text": ","},
{"id": 19, "spans": [{"start": 82, "end": 85}], "text": "pp."},
{"id": 20, "spans": [{"start": 86, "end": 90}], "text": "1233"},
{"id": 21, "spans": [{"start": 90, "end": 91}], "text": "–"},
{"id": 22, "spans": [{"start": 92, "end": 96}], "text": "1236"},
{"id": 23, "spans": [{"start": 96, "end": 97}], "text": "."},
],
}
)
# make sure test fixture is defined correctly
for word in doc.words:
assert word.text == doc.symbols[word.start : word.end]
# SpanGroup with single span
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 0, "end": 16},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"[1] Alan Higgins",
)
# SpanGroup with multiple spans
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 0, "end": 16},
{"start": 17, "end": 20},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"[1] Alan Higgins and",
)
# SpanGroup with disjoint spans -> grabs symbols & join, but ignore text in-between
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 0, "end": 16},
{"start": 23, "end": 31},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"[1] Alan Higgins Wohlford",
)
# stringify the whole bib entry
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 0, "end": 97},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
'[1] Alan Higgins and R Wohlford, "Keyword recognition, "in Proc. ICASSP , 1990, pp. 1233– 1236.',
)
def test_multiple_whitespace(self):
doc = Document.from_json(
{
"symbols": "This is a \n test.",
"words": [
{"id": 0, "spans": [{"start": 0, "end": 4}], "text": "This"},
{"id": 1, "spans": [{"start": 5, "end": 7}], "text": "is"},
{"id": 2, "spans": [{"start": 8, "end": 9}], "text": "a"},
{"id": 3, "spans": [{"start": 18, "end": 22}], "text": "test"},
{"id": 4, "spans": [{"start": 22, "end": 23}], "text": "."},
],
}
)
# make sure test fixture is defined correctly
for word in doc.words:
assert word.text == doc.symbols[word.start : word.end]
# does whitespace normalize fine?
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 0, "end": 24},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"This is a test.",
)
def test_partial_word_match(self):
doc = Document.from_json(
{
"symbols": "This is a test.",
"words": [
{"id": 0, "spans": [{"start": 0, "end": 4}], "text": "This"},
{"id": 1, "spans": [{"start": 5, "end": 7}], "text": "is"},
{"id": 2, "spans": [{"start": 8, "end": 9}], "text": "a"},
{"id": 3, "spans": [{"start": 10, "end": 14}], "text": "test"},
{"id": 4, "spans": [{"start": 14, "end": 15}], "text": "."},
],
}
)
# make sure test fixture is defined correctly
for word in doc.words:
assert word.text == doc.symbols[word.start : word.end]
# does it grab partial word matches?
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 2, "end": 7},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"This is",
)
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 6, "end": 13},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"is a test",
)
def test_use_word_metadata_text(self):
doc = Document.from_json(
{
"symbols": "This is a te-\nst.",
"words": [
{"id": 0, "spans": [{"start": 0, "end": 4}], "text": "This"},
{"id": 1, "spans": [{"start": 5, "end": 7}], "text": "is"},
{"id": 2, "spans": [{"start": 8, "end": 9}], "text": "a"},
{"id": 3, "spans": [{"start": 10, "end": 16}], "text": "test"},
{"id": 4, "spans": [{"start": 16, "end": 17}], "text": "."},
],
}
)
# make sure test fixture is defined correctly
for i, word in enumerate(doc.words):
if i != 3:
assert word.text == doc.symbols[word.start : word.end]
else:
assert word.text == "test"
assert doc.symbols[word.start : word.end] == "te-\nst"
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 5, "end": 17},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"is a test.",
)
def test_normalize_whitespace(self):
doc = Document.from_json(
{
"symbols": " This has \n \n white\n space",
"words": [
{"id": 0, "spans": [{"start": 1, "end": 5}], "text": "This"},
{"id": 1, "spans": [{"start": 8, "end": 11}], "text": "has"},
{"id": 2, "spans": [{"start": 16, "end": 22}], "text": "white\n"},
{"id": 3, "spans": [{"start": 24, "end": 30}], "text": "space"},
],
}
)
for i, word in enumerate(doc.words):
assert word.text == doc.symbols[word.start : word.end]
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 0, "end": 30},
]
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"This has white space",
)
# now try again but with newline replacement
# should avoid newlines that aren't in words, since they were
# never part of stringify considered text
# but should replace the newline that's within the word
# given the flag
self.assertEqual(
stringify_span_group(
span_group=query_span_group, document=doc, replace_newlines_with="XXX"
),
"This has whiteXXX space",
)
# `replace_newlines_with` defaults to replacing `\n` with a whitespace char
# but then setting normalize flag to False means we are left with two whitespace chars
self.assertEqual(
stringify_span_group(
span_group=query_span_group,
document=doc,
normalize_whitespace=False,
),
"This has white space",
)
# combining the two
self.assertEqual(
stringify_span_group(
span_group=query_span_group,
document=doc,
replace_newlines_with="XXX",
normalize_whitespace=False,
),
"This has whiteXXX space",
)
def test_how_words_relate_to_stringify(self):
"""This test is a comprehensive dive into how `words` interacts
with `stringify()`. There are 4 cases defined here:
1. `words` arent comprehensive. That is, `doc.symbols` contains chars
that arent whitespace but also not part of any `word.
2. `words` are comprehensive. each word is effectively a token.
3. `words` are comprehensive. each word is a bigger chunk (includes punct)
4. `words` are comprehensive and big chunks. they also override the text.
"""
# 1) for example, what might happen is puncts and newlines aren't included in words
doc = Document.from_json(
{
"symbols": "Symbols in-\nclude hyph- ens.",
"words": [
{"id": 0, "spans": [{"start": 0, "end": 7}], "text": "Symbols"},
{"id": 1, "spans": [{"start": 8, "end": 10}], "text": "in"},
{"id": 2, "spans": [{"start": 12, "end": 17}], "text": "clude"},
{"id": 3, "spans": [{"start": 18, "end": 22}], "text": "hyph"},
{"id": 4, "spans": [{"start": 24, "end": 27}], "text": "ens"},
],
}
)
# make sure test fixture is defined correctly
for i, word in enumerate(doc.words):
assert word.text == doc.symbols[word.start : word.end]
query_span_group = SpanGroup.from_json(
{
"spans": [
{"start": 0, "end": 28},
]
}
)
# intended behavior here is that default should return just
# the strings from the `word.text`, stitched together by whitespace
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"Symbols in clude hyph ens",
)
# 2) now repeat this test, but keeping the punctuation as indiv words
doc = Document.from_json(
{
"symbols": "Symbols in-\nclude hyph- ens.",
"words": [
{"id": 0, "spans": [{"start": 0, "end": 7}], "text": "Symbols"},
{"id": 1, "spans": [{"start": 8, "end": 10}], "text": "in"},
{"id": 2, "spans": [{"start": 10, "end": 11}], "text": "-"},
{"id": 3, "spans": [{"start": 12, "end": 17}], "text": "clude"},
{"id": 4, "spans": [{"start": 18, "end": 22}], "text": "hyph"},
{"id": 5, "spans": [{"start": 22, "end": 23}], "text": "-"},
{"id": 6, "spans": [{"start": 24, "end": 27}], "text": "ens"},
{"id": 7, "spans": [{"start": 27, "end": 28}], "text": "."},
],
}
)
for i, word in enumerate(doc.words):
assert word.text == doc.symbols[word.start : word.end]
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"Symbols in- clude hyph- ens.",
)
# 3) repeat this test, but merging hyphen into bigger word chunks
doc = Document.from_json(
{
"symbols": "Symbols in-\nclude hyph- ens.",
"words": [
{"id": 0, "spans": [{"start": 0, "end": 7}], "text": "Symbols"},
{"id": 1, "spans": [{"start": 8, "end": 11}], "text": "in-"},
{"id": 2, "spans": [{"start": 12, "end": 17}], "text": "clude"},
{"id": 3, "spans": [{"start": 18, "end": 23}], "text": "hyph-"},
{"id": 4, "spans": [{"start": 24, "end": 28}], "text": "ens."},
],
}
)
for i, word in enumerate(doc.words):
assert word.text == doc.symbols[word.start : word.end]
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"Symbols in- clude hyph- ens.",
)
# 4) finally, let's override the word text with alternative string
doc = Document.from_json(
{
"symbols": "Symbols in-\nclude hyph- ens.",
"words": [
{"id": 0, "spans": [{"start": 0, "end": 7}], "text": "Symbols"},
{"id": 1, "spans": [{"start": 8, "end": 11}], "text": "IN"},
{"id": 2, "spans": [{"start": 12, "end": 17}], "text": "clude"},
{"id": 3, "spans": [{"start": 18, "end": 23}], "text": "HYPH"},
{"id": 4, "spans": [{"start": 24, "end": 28}], "text": "ENS"},
],
}
)
self.assertEqual(
stringify_span_group(span_group=query_span_group, document=doc),
"Symbols IN clude HYPH ENS",
)
| 15,427 | 38.660668 | 128 |
py
|
mmda
|
mmda-main/tests/test_utils/test_tools.py
|
"""
@kylel
"""
import json
import pathlib
import unittest
from mmda.types.annotation import BoxGroup, SpanGroup
from mmda.types.span import Span
from mmda.types.box import Box
from mmda.types.document import Document
from mmda.utils.tools import MergeSpans
from mmda.utils.tools import box_groups_to_span_groups
fixture_path = pathlib.Path(__file__).parent.parent / "fixtures" / "utils"
class TestMergeNeighborSpans(unittest.TestCase):
def test_merge_multiple_neighbor_spans(self):
spans = [Span(start=0, end=10), Span(start=11, end=20), Span(start=21, end=30)]
merge_spans = MergeSpans(list_of_spans=spans, index_distance=1)
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert len(out) == 1
assert isinstance(out[0], Span)
assert out[0].start == 0
assert out[0].end == 30
def test_different_index_distances(self):
spans = [Span(start=0, end=10), Span(start=15, end=20)]
merge_spans = MergeSpans(list_of_spans=spans, index_distance=1)
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert out == spans # no merge happened
merge_spans = MergeSpans(list_of_spans=spans, index_distance=2)
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert out == spans # no merge happened
merge_spans = MergeSpans(list_of_spans=spans, index_distance=4)
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert out == spans # no merge happened
merge_spans = MergeSpans(list_of_spans=spans, index_distance=5)
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert len(out) == 1
assert isinstance(out[0], Span)
assert out[0].start == 0
assert out[0].end == 20
def test_zero_index_distance(self):
spans = [Span(start=0, end=10), Span(start=10, end=20)]
out = MergeSpans(list_of_spans=spans, index_distance=0).merge_neighbor_spans_by_symbol_distance()
assert len(out) == 1
assert isinstance(out[0], Span)
assert out[0].start == 0
assert out[0].end == 20
def test_handling_of_boxes(self):
spans = [
Span(start=0, end=10, box=Box(l=0, t=0, w=1, h=1, page=0)),
Span(start=11, end=20, box=Box(l=1, t=1, w=2, h=2, page=0)),
Span(start=21, end=150, box=Box(l=2, t=2, w=3, h=3, page=1))
]
merge_spans = MergeSpans(list_of_spans=spans, index_distance=1)
merge_spans.merge_neighbor_spans_by_symbol_distance()
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert len(out) == 2
assert isinstance(out[0], Span)
assert isinstance(out[1], Span)
assert out[0].start == 0
assert out[0].end == 20
assert out[1].start == 21
assert out[1].end == 150
assert out[0].box == Box(l=0, t=0, w=3, h=3, page=0)
# unmerged spans from separate pages keep their original box
assert out[1].box == spans[-1].box
spans = [
Span(start=0, end=10, box=Box(l=0, t=0, w=1, h=1, page=1)),
Span(start=11, end=20, box=Box(l=1, t=1, w=2, h=2, page=1)),
Span(start=100, end=150, box=Box(l=2, t=2, w=3, h=3, page=1))
]
merge_spans = MergeSpans(list_of_spans=spans, index_distance=1)
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert len(out) == 2
assert isinstance(out[0], Span)
assert isinstance(out[1], Span)
assert out[0].start == 0
assert out[0].end == 20
assert out[1].start == 100
assert out[1].end == 150
assert out[0].box == Box(l=0, t=0, w=3, h=3, page=1)
# unmerged spans that were too far apart in symbol distance keep their original box
assert out[1].box == spans[-1].box
spans = [
Span(start=0, end=10, box=Box(l=0, t=0, w=1, h=1, page=0)),
Span(start=11, end=20),
Span(start=21, end=150),
Span(start=155, end=200)
]
merge_spans = MergeSpans(list_of_spans=spans, index_distance=1)
merge_spans.merge_neighbor_spans_by_symbol_distance()
out = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert len(out) == 3
assert isinstance(out[0], Span)
assert isinstance(out[1], Span)
assert out[0].start == 0
assert out[0].end == 10
assert out[1].start == 11
assert out[1].end == 150
# spans without boxes are able to group together
assert out[1].box is None
# or not
assert out[2].start == 155
assert out[2].end == 200
assert out[1].box is None
list_of_spans_to_merge = [
Span(start=3944, end=3948,
box=Box(l=0.19238134915568578, t=0.22752901673615306, w=0.06941334053447479, h=0.029442207414270286,
page=4)),
Span(start=3949, end=3951,
box=Box(l=0.27220460878651254, t=0.22752901673615306, w=0.03468585042904468, h=0.029442207414270286,
page=4)),
Span(start=4060, end=4063,
box=Box(l=0.4204075769894973, t=0.34144142726484455, w=0.023417310961637895, h=0.014200429984914883,
page=4)),
Span(start=4072, end=4075,
box=Box(l=0.5182742633669088, t=0.34144142726484455, w=0.029000512031393755, h=0.014200429984914883,
page=4)),
Span(start=4076, end=4083,
box=Box(l=0.5522956396696659, t=0.34144142726484455, w=0.06440764687304719, h=0.014200429984914883,
page=4)),
Span(start=4119, end=4128,
box=Box(l=0.2686971421659869, t=0.36273518298114954, w=0.08479235581478171, h=0.014200429984914883,
page=4)),
Span(start=4134, end=4144,
box=Box(l=0.40387889180816966, t=0.36273518298114954, w=0.08368776567508182, h=0.014200429984914883,
page=4)),
Span(start=4145, end=4148,
box=Box(l=0.4943548659781345, t=0.36273518298114954, w=0.042396177907390975, h=0.014200429984914883,
page=4)),
Span(start=4149, end=4162,
box=Box(l=0.5435392523804085, t=0.36273518298114954, w=0.11491754144296094, h=0.014200429984914883,
page=4)),
Span(start=4166, end=4177,
box=Box(l=0.6876581404256177, t=0.36273518298114954, w=0.09146006356715199, h=0.014200429984914883,
page=4)),
Span(start=4419, end=4427,
box=Box(l=0.2686971421659869, t=0.4479113936500019, w=0.06846450520430858, h=0.014200429984914883,
page=4)),
Span(start=4497, end=4505,
box=Box(l=0.2686971421659869, t=0.46920514936630686, w=0.06846450520430858, h=0.014200429984914883,
page=4)),
Span(start=4517, end=4520,
box=Box(l=0.42195400318507725, t=0.46920514936630686, w=0.029000512031393755, h=0.014200429984914883,
page=4)),
Span(start=4574, end=4581,
box=Box(l=0.2686971421659869, t=0.49049890508261185, w=0.07810456460532592, h=0.014200429984914883,
page=4)),
Span(start=4582, end=4587,
box=Box(l=0.35061756361754887, t=0.49049890508261185, w=0.03904224057412029, h=0.014200429984914883,
page=4)),
Span(start=4588, end=4591,
box=Box(l=0.39347566103790516, t=0.49049890508261185, w=0.023417310961637943, h=0.014200429984914883,
page=4)),
Span(start=4592, end=4601,
box=Box(l=0.4207088288457791, t=0.49049890508261185, w=0.08254300862121101, h=0.014200429984914883,
page=4)),
Span(start=4602, end=4613,
box=Box(l=0.5070676943132262, t=0.49049890508261185, w=0.09481400090042272, h=0.014200429984914883,
page=4)),]
list_of_spans_to_merge_2 = [Span(start=30113, end=30119,
box=Box(l=0.12095229775767885, t=0.3578497466414853, w=0.05243790645011725,
h=0.014200429984914883, page=19)),
Span(start=30120, end=30124,
box=Box(l=0.17929474059091924, t=0.3578497466414853, w=0.030687522426571887,
h=0.014200429984914883,
page=19)),
Span(start=30125, end=30129,
box=Box(l=0.21799556239458678, t=0.3578497466414853, w=0.04350076804709073,
h=0.014200429984914883, page=19)),
Span(start=30130, end=30135,
box=Box(l=0.26740086682480063, t=0.3578497466414853, w=0.050208642713631964,
h=0.014200429984914883,
page=19)),
Span(start=30136, end=30141,
box=Box(l=0.32351404592155575, t=0.3578497466414853, w=0.0446254416438761,
h=0.014200429984914883, page=19)),
Span(start=30142, end=30151,
box=Box(l=0.37404402394855496, t=0.3578497466414853, w=0.0769598075514552,
h=0.014200429984914883, page=19)),
Span(start=30152, end=30155,
box=Box(l=0.4569284513402187, t=0.3578497466414853, w=0.029000512031393852,
h=0.014200429984914883, page=19)),
Span(start=30156, end=30165,
box=Box(l=0.4918334997547357, t=0.3578497466414853, w=0.0792091547450259,
h=0.014200429984914883, page=19)),
Span(start=30166, end=30175,
box=Box(l=0.5769471908828846, t=0.3578497466414853, w=0.07175819216632291,
h=0.014200429984914883, page=19)),
Span(start=30176, end=30179,
box=Box(l=0.6576023545380633, t=0.3578497466414853, w=0.03122977576787907,
h=0.014200429984914883, page=19)),
Span(start=30180, end=30184,
box=Box(l=0.6947366666890655, t=0.3578497466414853, w=0.03904224057412024,
h=0.014200429984914883, page=19)),
Span(start=30185, end=30190,
box=Box(l=0.7396834436463088, t=0.3578497466414853, w=0.05020864271363187,
h=0.014200429984914883, page=19)),
Span(start=30191, end=30193,
box=Box(l=0.7957966227430638, t=0.3578497466414853, w=0.015624929612482252,
h=0.014200429984914883, page=19)),
Span(start=30194, end=30197,
box=Box(l=0.12095229775767885, t=0.37500875791374183, w=0.024541984558423317,
h=0.014200429984914883,
page=19)),
Span(start=30198, end=30207,
box=Box(l=0.1518205712980198, t=0.37500875791374183, w=0.07695980755145514,
h=0.014200429984914883, page=19)),
Span(start=30208, end=30210,
box=Box(l=0.2351066678313926, t=0.37500875791374183, w=0.013395665875996984,
h=0.014200429984914883,
page=19)),
Span(start=30211, end=30214,
box=Box(l=0.2548286226893072, t=0.37500875791374183, w=0.02231272082193805,
h=0.014200429984914883, page=19)),
Span(start=30215, end=30217,
box=Box(l=0.283467632493163, t=0.37500875791374183, w=0.015624929612482252,
h=0.014200429984914883, page=19)),
Span(start=30218, end=30221,
box=Box(l=0.3054188510875629, t=0.37500875791374183, w=0.024541984558423317,
h=0.014200429984914883,
page=19)),
Span(start=30222, end=30229,
box=Box(l=0.33628712462790383, t=0.37500875791374183, w=0.055570925755447906,
h=0.014200429984914883,
page=19)),
Span(start=30230, end=30235,
box=Box(l=0.3981843393652693, t=0.37500875791374183, w=0.04183384110899822,
h=0.014200429984914883, page=19)),
Span(start=30236, end=30240,
box=Box(l=0.44668588822663785, t=0.37500875791374183, w=0.03570838669793504,
h=0.014200429984914883,
page=19)),
Span(start=30241, end=30244,
box=Box(l=0.4887205639064905, t=0.37500875791374183, w=0.020083457085452783,
h=0.014200429984914883,
page=19)),
Span(start=30245, end=30255,
box=Box(l=0.5151303099738609, t=0.37500875791374183, w=0.08810612623388145,
h=0.014200429984914883, page=19)),
Span(start=30256, end=30259,
box=Box(l=0.6095627251896601, t=0.37500875791374183, w=0.022312720821938,
h=0.014200429984914883, page=19)),
Span(start=30260, end=30262,
box=Box(l=0.6382017349935157, t=0.37500875791374183, w=0.015624929612482252,
h=0.014200429984914883,
page=19)),
Span(start=30263, end=30268,
box=Box(l=0.6601529535879158, t=0.37500875791374183, w=0.03958449391542752,
h=0.014200429984914883, page=19)),
Span(start=30269, end=30273,
box=Box(l=0.7098795933314969, t=0.37500875791374183, w=0.035708386697935225,
h=0.014200429984914883,
page=19)),
Span(start=30274, end=30276,
box=Box(l=0.7519142690113497, t=0.37500875791374183, w=0.013395665875997033,
h=0.014200429984914883,
page=19)),
Span(start=30277, end=30278,
box=Box(l=0.7716362238692644, t=0.37500875791374183, w=0.008917054945941066,
h=0.014200429984914883,
page=19)),
Span(start=30279, end=30281,
box=Box(l=0.7868795677971232, t=0.37500875791374183, w=0.02454198455842322,
h=0.014200429984914883, page=19)),
Span(start=30282, end=30291,
box=Box(l=0.12095229775767885, t=0.3921677691859983, w=0.08031374488472577,
h=0.014200429984914883, page=19)),
Span(start=30292, end=30296,
box=Box(l=0.2062869069137678, t=0.3921677691859983, w=0.03904224057412024,
h=0.014200429984914883, page=19)),
Span(start=30297, end=30302,
box=Box(l=0.25035001175925126, t=0.3921677691859983, w=0.050208642713631964,
h=0.014200429984914883,
page=19)),
Span(start=30303, end=30311,
box=Box(l=0.30557951874424644, t=0.3921677691859983, w=0.08143841848151108,
h=0.014200429984914883, page=19)),
Span(start=30312, end=30314,
box=Box(l=0.3920388014971207, t=0.3921677691859983, w=0.016729519752182193,
h=0.014200429984914883, page=19)),
Span(start=30315, end=30321,
box=Box(l=0.4137891855206661, t=0.3921677691859983, w=0.0535625800469026,
h=0.014200429984914883, page=19)),
Span(start=30322, end=30328,
box=Box(l=0.47237262983893197, t=0.3921677691859983, w=0.05354249658981717,
h=0.014200429984914883, page=19)),
Span(start=30329, end=30333,
box=Box(l=0.5309359907001122, t=0.3921677691859983, w=0.03681297683763493,
h=0.014200429984914883, page=19)),
Span(start=30334, end=30336,
box=Box(l=0.5727698318091105, t=0.3921677691859983, w=0.01672951975218224,
h=0.014200429984914883, page=19)),
Span(start=30337, end=30344,
box=Box(l=0.5945202158326559, t=0.3921677691859983, w=0.060230287799273016,
h=0.014200429984914883, page=19)),
Span(start=30345, end=30348,
box=Box(l=0.6597713679032922, t=0.3921677691859983, w=0.029000512031393946,
h=0.014200429984914883, page=19)),
Span(start=30349, end=30359,
box=Box(l=0.6937927442060494, t=0.3921677691859983, w=0.07834556609035141,
h=0.014200429984914883, page=19))]
def test_merge_spans():
assert len(list_of_spans_to_merge) == (len(MergeSpans(list_of_spans_to_merge, 0, 0)
.merge_neighbor_spans_by_box_coordinate()))
assert 4 == len(MergeSpans(list_of_spans_to_merge, 0.04387334, 0.01421097).merge_neighbor_spans_by_box_coordinate())
merge_spans = MergeSpans(list_of_spans_to_merge_2, 0.04387334, 0.01421097)
assert 1 == len(merge_spans.merge_neighbor_spans_by_box_coordinate())
assert [30113, 30359] == [merge_spans.merge_neighbor_spans_by_box_coordinate()[0].start, merge_spans.merge_neighbor_spans_by_box_coordinate()[0].end]
def test_merge_neighbor_spans_by_symbol_distance():
assert 7 == (len(MergeSpans(list_of_spans_to_merge, index_distance=10)
.merge_neighbor_spans_by_symbol_distance()))
assert 10 == len(MergeSpans(list_of_spans_to_merge, index_distance=1).merge_neighbor_spans_by_symbol_distance())
list_of_spans_to_merge_2 = [
Span(start=1, end=3, box=Box(l=0.1, t=0.2, w=0.2, h=0.2, page=11)),
Span(start=5, end=7, box=Box(l=0.3, t=0.2, w=0.2, h=0.2, page=11)),
]
merge_spans = MergeSpans(list_of_spans_to_merge_2, index_distance=1)
result = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert 2 == len(result)
assert set([(1, 3), (5, 7)]) == set([(entry.start, entry.end) for entry in result])
merge_spans = MergeSpans(list_of_spans_to_merge_2, index_distance=4)
result = merge_spans.merge_neighbor_spans_by_symbol_distance()
assert 1 == len(result)
assert set([(1, 7)]) == set([(entry.start, entry.end) for entry in result])
assert [Box(l=0.1, t=0.2, w=0.4, h=0.2, page=11)] == [entry.box for entry in result]
def test_from_span_groups_with_box_groups():
# convert test fixtures into SpanGroup with BoxGroup format
list_of_spans_to_merge_in_span_group_format = []
for span in list_of_spans_to_merge:
list_of_spans_to_merge_in_span_group_format.append(
SpanGroup(
spans=[Span(start=span.start, end=span.end)],
box_group=BoxGroup(boxes=[span.box])
)
)
assert 7 == (len(MergeSpans.from_span_groups_with_box_groups(
list_of_spans_to_merge_in_span_group_format,
index_distance=10).merge_neighbor_spans_by_symbol_distance())
)
assert len(list_of_spans_to_merge) == (len(MergeSpans.from_span_groups_with_box_groups(
list_of_spans_to_merge_in_span_group_format,
0,
0).merge_neighbor_spans_by_box_coordinate()))
def test_box_groups_to_span_groups():
# basic doc annotated with pages and tokens, from pdfplumber parser split at punctuation
with open(fixture_path / "20fdafb68d0e69d193527a9a1cbe64e7e69a3798__pdfplumber_doc.json", "r") as f:
raw_json = f.read()
fixture_doc_json = json.loads(raw_json)
doc = Document.from_json(fixture_doc_json)
# boxes drawn neatly around bib entries
with open(fixture_path / "20fdafb68d0e69d193527a9a1cbe64e7e69a3798__bib_entries.json", "r") as f:
raw_json = f.read()
fixture_bib_entries_json = json.loads(raw_json)["bib_entries"]
box_groups = []
# make box_groups from test fixture bib entry span groups (we will test the method to generate better spans)
for bib_entry in fixture_bib_entries_json:
box_groups.append(BoxGroup.from_json(bib_entry["box_group"]))
# generate span_groups with different settings
overlap_span_groups = box_groups_to_span_groups(box_groups, doc, center=False)
overlap_at_token_center_span_groups = box_groups_to_span_groups(box_groups, doc, center=True)
overlap_at_token_center_span_groups_x_padded = box_groups_to_span_groups(box_groups, doc, center=True, pad_x=True)
assert (len(box_groups) == len(overlap_span_groups) == len(overlap_at_token_center_span_groups) == len(overlap_at_token_center_span_groups_x_padded))
# annotate all onto doc to extract texts:
doc.annotate(overlap_span_groups=overlap_span_groups)
doc.annotate(overlap_at_token_center_span_groups=overlap_at_token_center_span_groups)
doc.annotate(overlap_at_token_center_span_groups_x_padded=overlap_at_token_center_span_groups_x_padded)
# when center=False, any token overlap with BoxGroup becomes part of the SpanGroup
# in this example, tokens from bib entry '29 and '31' overlap with the box drawn neatly around '30'
"""
Recommendation with Hypergraph Attention Networks. In SDM’21 .
[30] Meirui Wang, Pengjie Ren, Lei Mei, Zhumin Chen, Jun Ma, and Maarten de Rijke. 2019. A Collaborative Session-Based Recommendation Approach with
Parallel Memory Modules. In SIGIR’19 . 345–354. [31] Pengfei Wang, Jiafeng Guo, Yanyan Lan, Jun Xu, Shengxian Wan, and Xueqi
"""
assert "[30]" in doc.overlap_span_groups[29].text
assert "[31]" in doc.overlap_span_groups[29].text
# and the starting text includes tokens from actual bib entry 29
assert not doc.overlap_span_groups[29].text.startswith("[30]")
assert not doc.overlap_span_groups[29].text.startswith("[30]")
# better text for same box when `center=True`:
"""
[30] Meirui Wang, Pengjie Ren, Lei Mei, Zhumin Chen, Jun Ma, and Maarten de Rijke. 2019. A Collaborative Session-Based Recommendation Approach with
Parallel Memory Modules. In SIGIR’19 . 345–354.
"""
assert doc.overlap_at_token_center_span_groups[29].text.startswith("[30]")
assert "[31]" not in doc.overlap_at_token_center_span_groups[29].text
# same results for padded version on this bib entry
assert doc.overlap_at_token_center_span_groups_x_padded[29].text.startswith("[30]")
assert "[31]" not in doc.overlap_at_token_center_span_groups_x_padded[29].text
# without padding, starting "[" is missed from some bib entries
assert doc.overlap_at_token_center_span_groups[6].text.startswith("6]")
assert doc.overlap_at_token_center_span_groups_x_padded[6].text.startswith("[6]")
# original box_group boxes are saved
assert all([sg.box_group is not None for sg in doc.overlap_at_token_center_span_groups])
| 25,116 | 55.064732 | 153 |
py
|
mmda
|
mmda-main/tests/test_utils/__init__.py
| 0 | 0 | 0 |
py
|
|
mmda
|
mmda-main/tests/test_utils/test_outline_metadata.py
|
"""
Test extraction of outline metadata from a PDF.
@rauthur
"""
import pathlib
import unittest
from mmda.parsers.pdfplumber_parser import PDFPlumberParser
from mmda.utils.outline_metadata import (
Outline,
PDFMinerOutlineExtractor,
PDFMinerOutlineExtractorError,
)
class TestPDFMinerOutlineExtractor(unittest.TestCase):
def setUp(self) -> None:
self.fixture_path = pathlib.Path(__file__).parent.parent / "fixtures"
self.parser = PDFPlumberParser()
self.extractor = PDFMinerOutlineExtractor()
def test_query(self):
input_pdf_path = (
self.fixture_path / "4be952924cd565488b4a239dc6549095029ee578.pdf"
)
doc = self.parser.parse(input_pdf_path=input_pdf_path)
outline_pred = self.extractor.extract(input_pdf_path=input_pdf_path, doc=doc)
doc.add_metadata(outline=outline_pred.to_metadata_dict())
self.assertIsNotNone(doc.metadata.outline)
self.assertEqual(18, len(doc.metadata.outline["items"]))
outline = Outline.from_metadata_dict(doc.metadata)
x = outline.items[0]
self.assertEqual("I Introduction", x.title)
self.assertEqual(0, x.level)
x = outline.items[4]
self.assertEqual("IV-A Overview", x.title)
self.assertEqual(1, x.level)
def test_raise_exceptions(self):
input_pdf_path = self.fixture_path / "1903.10676.pdf"
doc = self.parser.parse(input_pdf_path=input_pdf_path)
with self.assertRaises(PDFMinerOutlineExtractorError):
self.extractor.extract(
input_pdf_path=input_pdf_path, doc=doc, raise_exceptions=True
)
def test_swallow_exceptions(self):
input_pdf_path = self.fixture_path / "1903.10676.pdf"
doc = self.parser.parse(input_pdf_path=input_pdf_path)
outline = self.extractor.extract(input_pdf_path=input_pdf_path, doc=doc)
doc.add_metadata(outline=outline.to_metadata_dict())
self.assertEqual(0, len(doc.metadata.outline["items"]))
def test_does_not_capture_file_missing_exception(self):
input_pdf_path = self.fixture_path / "this-pdf-does-not-exist.pdf"
doc = None
with self.assertRaises(FileNotFoundError):
self.extractor.extract(input_pdf_path=input_pdf_path, doc=doc)
| 2,325 | 30.432432 | 85 |
py
|
mmda
|
mmda-main/tests/test_parsers/test_override.py
|
import os
import pathlib
import unittest
from typing import List
from mmda.types.document import Document
from mmda.types.annotation import SpanGroup
from mmda.types.names import TokensField
from mmda.parsers.pdfplumber_parser import PDFPlumberParser
from mmda.predictors.base_predictors.base_predictor import BasePredictor
PDF_FIXTURE = (
pathlib.Path(__file__).parent.parent
/ "fixtures/1903.10676.pdf"
)
class MockPredictor(BasePredictor):
REQUIRED_BACKENDS = [] # pyright: ignore
REQUIRED_DOCUMENT_FIELDS = [] # pyright: ignore
def predict(self, document: Document) -> List[SpanGroup]:
token: SpanGroup
return [
SpanGroup(
spans=token.spans,
box_group=token.box_group,
metadata=token.metadata,
)
for token in getattr(document, TokensField, [])
]
class TestPDFPlumberParser(unittest.TestCase):
def test_parse(self):
parser = PDFPlumberParser()
mock_predictor = MockPredictor()
doc = parser.parse(input_pdf_path=str(PDF_FIXTURE))
tokens = mock_predictor.predict(doc)
# this should fail because we haven't specified an override
with self.assertRaises(AssertionError):
doc.annotate(tokens=tokens)
doc.annotate(tokens=tokens, is_overwrite=True)
| 1,360 | 26.22 | 72 |
py
|
mmda
|
mmda-main/tests/test_parsers/test_pdf_plumber_parser.py
|
"""
@kylel
"""
import json
import os
import pathlib
import re
import unittest
import numpy as np
from mmda.parsers import PDFPlumberParser
from mmda.types import Box, BoxGroup, Document, Span, SpanGroup
class TestPDFPlumberParser(unittest.TestCase):
def setUp(cls) -> None:
cls.fixture_path = pathlib.Path(__file__).parent.parent / "fixtures"
'''
def test_parse(self):
parser = PDFPlumberParser()
doc = parser.parse(input_pdf_path=self.fixture_path / "1903.10676.pdf")
# right output type
assert isinstance(doc, Document)
# the right fields
assert doc.symbols
assert doc.pages
assert doc.tokens
assert doc.rows
# roughly the right content
for keyword in ["Field", "Task", "SOTA", "Base", "Frozen", "Finetune", "NER"]:
assert keyword in doc.symbols[:100]
def test_parse_page_dims(self):
parser = PDFPlumberParser()
doc = parser.parse(input_pdf_path=self.fixture_path / "1903.10676.pdf")
for page in doc.pages:
self.assertEqual(595.0, page.metadata.width)
self.assertEqual(842.0, page.metadata.height)
self.assertEqual(1.0, page.metadata.user_unit)
def test_non_default_user_unit(self):
parser = PDFPlumberParser()
doc = parser.parse(input_pdf_path=self.fixture_path / "test-uu.pdf")
for page in doc.pages:
self.assertEqual(595.0, page.metadata.width)
self.assertEqual(842.0, page.metadata.height)
self.assertEqual(2.0, page.metadata.user_unit)
def test_parse_fontinfo(self):
parser = PDFPlumberParser()
doc = parser.parse(input_pdf_path=self.fixture_path / "1903.10676.pdf")
metadata = doc.tokens[0].metadata # pylint: disable=no-member
self.assertEqual("HXONRZ+NimbusRomNo9L-Regu", metadata["fontname"])
self.assertAlmostEqual(8.96638, metadata["size"])
def test_split_punctuation(self):
no_split_parser = PDFPlumberParser(split_at_punctuation=False)
no_split_doc = no_split_parser.parse(
input_pdf_path=self.fixture_path / "2107.07170.pdf"
)
no_split_tokens_with_numbers = [
token.text
for token in no_split_doc.tokens
if re.search(r"[0-9]", token.text)
]
assert "[1-5]" in no_split_tokens_with_numbers
assert "GPT-3[10]" in no_split_tokens_with_numbers
custom_split_parser = PDFPlumberParser(split_at_punctuation=",.[]:")
custom_split_doc = custom_split_parser.parse(
input_pdf_path=self.fixture_path / "2107.07170.pdf"
)
custom_split_tokens_with_numbers = [
token.text
for token in custom_split_doc.tokens
if re.search(r"[0-9]", token.text)
]
assert "[1-5]" not in custom_split_tokens_with_numbers
assert "1-5" in custom_split_tokens_with_numbers
assert "GPT-3[10]" not in custom_split_tokens_with_numbers
assert "GPT-3" in custom_split_tokens_with_numbers
default_split_parser = PDFPlumberParser(split_at_punctuation=True)
default_split_doc = default_split_parser.parse(
input_pdf_path=os.path.join(self.fixture_path, "2107.07170.pdf")
)
default_split_tokens_with_numbers = [
token.text
for token in default_split_doc.tokens
if re.search(r"[0-9]", token.text)
]
assert "1-5" not in default_split_tokens_with_numbers
assert "GPT-3" not in default_split_tokens_with_numbers
assert (
len(no_split_tokens_with_numbers)
< len(custom_split_tokens_with_numbers)
< len(default_split_tokens_with_numbers)
)
def test_align_coarse_and_fine_tokens(self):
parser = PDFPlumberParser()
# example
coarse_tokens = ["abc", "def"]
fine_tokens = ["ab", "c", "d", "ef"]
out = parser._align_coarse_and_fine_tokens(
coarse_tokens=coarse_tokens, fine_tokens=fine_tokens
)
assert out == [0, 0, 1, 1]
# minimal case
coarse_tokens = []
fine_tokens = []
out = parser._align_coarse_and_fine_tokens(
coarse_tokens=coarse_tokens, fine_tokens=fine_tokens
)
assert out == []
# identical case
coarse_tokens = ["a", "b", "c"]
fine_tokens = ["a", "b", "c"]
out = parser._align_coarse_and_fine_tokens(
coarse_tokens=coarse_tokens, fine_tokens=fine_tokens
)
assert out == [0, 1, 2]
# misaligned case
with self.assertRaises(AssertionError):
coarse_tokens = ["a", "b"]
fine_tokens = ["ab"]
parser._align_coarse_and_fine_tokens(
coarse_tokens=coarse_tokens, fine_tokens=fine_tokens
)
# same num of chars, but chars mismatch case
with self.assertRaises(AssertionError):
coarse_tokens = ["ab"]
fine_tokens = ["a", "c"]
parser._align_coarse_and_fine_tokens(
coarse_tokens=coarse_tokens, fine_tokens=fine_tokens
)
def test_convert_nested_text_to_doc_json(self):
parser = PDFPlumberParser()
# example
token_dicts = [
{"text": text, "bbox": Box(l=0.0, t=0.1, w=0.2, h=0.3, page=4)}
for text in ["ab", "c", "d", "ef", "gh", "i", "j", "kl"]
]
word_ids = [0, 0, 1, 2, 3, 4, 5, 5]
row_ids = [0, 0, 1, 1, 2, 2, 3, 3]
page_ids = [0, 0, 0, 0, 1, 1, 1, 1]
page_dims = [(100, 200, 1.), (400, 800, 1.)]
out = parser._convert_nested_text_to_doc_json(
token_dicts=token_dicts,
word_ids=word_ids,
row_ids=row_ids,
page_ids=page_ids,
dims=page_dims,
)
assert out["symbols"] == "abc\nd ef\ngh i\njkl"
tokens = [
SpanGroup.from_json(span_group_dict=t_dict) for t_dict in out["tokens"]
]
assert [(t.start, t.end) for t in tokens] == [
(0, 2),
(2, 3),
(4, 5),
(6, 8),
(9, 11),
(12, 13),
(14, 15),
(15, 17),
]
assert [out["symbols"][t.start : t.end] for t in tokens] == [
"ab",
"c",
"d",
"ef",
"gh",
"i",
"j",
"kl",
]
rows = [SpanGroup.from_json(span_group_dict=r_dict) for r_dict in out["rows"]]
assert [(r.start, r.end) for r in rows] == [(0, 3), (4, 8), (9, 13), (14, 17)]
assert [out["symbols"][r.start : r.end] for r in rows] == [
"abc",
"d ef",
"gh i",
"jkl",
]
pages = [SpanGroup.from_json(span_group_dict=p_dict) for p_dict in out["pages"]]
assert [(p.start, p.end) for p in pages] == [(0, 8), (9, 17)]
assert [out["symbols"][p.start : p.end] for p in pages] == [
"abc\nd ef",
"gh i\njkl",
]
'''
def test_parser_stability(self):
"""
We need output to be stable from release to release. Failure of this test is caused
by changes to core output: document text, tokenization, and bbox localization.
It deliberately excludes `metadata` from consideration as we are expanding
its scope of coverage, but that should probably be locked down too the moment
we depend on particular fields.
Updates that break this test should be considered potentially breaking to downstream
models and require re-evaluation and possibly retraining of all components in the DAG.
"""
parser = PDFPlumberParser()
current_doc = parser.parse(input_pdf_path=self.fixture_path / "4be952924cd565488b4a239dc6549095029ee578.pdf")
with open(self.fixture_path / "4be952924cd565488b4a239dc6549095029ee578__pdfplumber_doc.json", "r") as f:
raw_json = f.read()
fixture_doc_json = json.loads(raw_json)
fixture_doc = Document.from_json(fixture_doc_json)
self.assertEqual(current_doc.symbols, fixture_doc.symbols, msg="Current parse has extracted different text from pdf.")
def compare_span_groups(current_doc_sgs, fixture_doc_sgs, annotation_name):
current_doc_sgs_simplified = [
[(s.start, s.end) for s in sg.spans] for sg in current_doc_sgs
]
fixture_doc_sgs_simplified = [
[(s.start, s.end) for s in sg.spans] for sg in fixture_doc_sgs
]
self.assertEqual(
current_doc_sgs_simplified,
fixture_doc_sgs_simplified,
msg=f"Current parse produces different SpanGroups for `{annotation_name}`"
)
current_doc_sg_boxes = [[list(s.box.xywh) + [s.box.page] for s in sg] for sg in current_doc_sgs]
fixture_doc_sg_boxes = [[list(s.box.xywh) + [s.box.page] for s in sg] for sg in current_doc_sgs]
self.assertAlmostEqual(
current_doc_sg_boxes,
fixture_doc_sg_boxes,
places=3,
msg=f"Boxes generated for `{annotation_name}` have changed."
)
compare_span_groups(current_doc.tokens, fixture_doc.tokens, "tokens")
compare_span_groups(current_doc.rows, fixture_doc.rows, "rows")
compare_span_groups(current_doc.pages, fixture_doc.pages, "pages")
| 9,608 | 35.536122 | 126 |
py
|
mmda
|
mmda-main/tests/test_parsers/test_grobid_header_parser.py
|
import os
import pathlib
import unittest
import unittest.mock as um
import pytest
from mmda.parsers.grobid_parser import GrobidHeaderParser
os.chdir(pathlib.Path(__file__).parent)
XML_OK = open("../fixtures/grobid-tei-maml-header.xml").read()
XML_NO_TITLE = open("../fixtures/grobid-tei-no-title.xml").read()
XML_NO_ABS = open("../fixtures/grobid-tei-no-abstract.xml").read()
def mock_post(*args, **kwargs):
class MockResponse:
def __init__(self, xml: str, status_code: int) -> None:
self._xml = xml
self._status_code = status_code
@property
def text(self):
return self._xml
@property
def status_code(self):
return self._status_code
if args[0].endswith("ok"):
return MockResponse(XML_OK, 200)
elif args[0].endswith("no-title"):
return MockResponse(XML_NO_TITLE, 200)
elif args[0].endswith("no-abs"):
return MockResponse(XML_NO_ABS, 200)
elif args[0].endswith("err"):
return MockResponse(None, 500)
return MockResponse(None, 404)
class TestGrobidHeaderParser(unittest.TestCase):
@um.patch("requests.post", side_effect=mock_post)
def test_processes_header(self, mock_post):
parser = GrobidHeaderParser(url="http://localhost/ok")
with um.patch("builtins.open", um.mock_open(read_data="it's xml")):
document = parser.parse(input_pdf_path="some-location")
assert document.title[0].text.startswith("Model-Agnostic Meta-Learning")
assert document.abstract[0].text.startswith("We propose an algorithm")
assert document.title[0].symbols[0:2] == ["Model-Agnostic", "Meta-Learning"]
assert document.abstract[0].symbols[0:2] == ["We", "propose"]
@um.patch("requests.post", side_effect=mock_post)
def test_processes_header_without_title(self, mock_post):
parser = GrobidHeaderParser(url="http://localhost/no-title")
with um.patch("builtins.open", um.mock_open(read_data="it's xml")):
document = parser.parse(input_pdf_path="some-location")
assert document.title[0].text == ""
assert document.abstract[0].text.startswith("We propose an algorithm")
assert document.abstract[0].symbols[0:2] == ["We", "propose"]
@um.patch("requests.post", side_effect=mock_post)
def test_processes_header_without_title(self, mock_post):
parser = GrobidHeaderParser(url="http://localhost/no-abs")
with um.patch("builtins.open", um.mock_open(read_data="it's xml")):
document = parser.parse(input_pdf_path="some-location")
assert document.abstract[0].text == ""
assert document.title[0].text.startswith("Model-Agnostic Meta-Learning")
assert document.title[0].symbols[0:2] == ["Model-Agnostic", "Meta-Learning"]
@um.patch("requests.post", side_effect=mock_post)
def test_processes_header_server_error_raises(self, mock_post):
parser = GrobidHeaderParser(url="http://localhost/err")
with pytest.raises(RuntimeError) as ex:
with um.patch("builtins.open", um.mock_open(read_data="it's xml")):
parser.parse(input_pdf_path="some-location")
assert "Unable to process" in str(ex.value)
| 3,260 | 35.640449 | 84 |
py
|
mmda
|
mmda-main/tests/test_parsers/test_grobid_augment_existing_document_parser.py
|
import json
import logging
import os
import pathlib
import unittest
import unittest.mock as um
import pytest
from mmda.types.document import Document
from mmda.parsers.grobid_augment_existing_document_parser import (
GrobidAugmentExistingDocumentParser,
)
os.chdir(pathlib.Path(__file__).parent)
PDF_PATH = "../fixtures/grobid_augment_existing_document_parser/e5910c027af0ee9c1901c57f6579d903aedee7f4.pdf"
PDFPLUMBER_DOC_PATH = "../fixtures/grobid_augment_existing_document_parser/e5910c027af0ee9c1901c57f6579d903aedee7f4__pdfplumber_doc.json"
OK_CONFIG_PATH = "../fixtures/grobid_augment_existing_document_parser/grobid.config"
XML_OK = open(
"../fixtures/grobid_augment_existing_document_parser/e5910c027af0ee9c1901c57f6579d903aedee7f4.xml"
).read()
NO_AUTHORS_CONFIG_PATH = (
"../fixtures/grobid_augment_existing_document_parser/grobid-no-authors.config"
)
XML_NO_AUTHORS = open(
"../fixtures/grobid_augment_existing_document_parser/e5910c027af0ee9c1901c57f6579d903aedee7f4_no_authors.xml"
).read()
def mock_request(*args, **kwargs):
class MockResponse:
def __init__(self, xml: str, status_code: int) -> None:
self._xml = xml
self._status_code = status_code
@property
def text(self):
return self._xml
@property
def status_code(self):
return self._status_code
# config file url is used to determine which XML to return from mock Grobid server
if args[1].startswith("ok"):
return MockResponse(XML_OK, 200)
elif args[1].startswith("no-authors"):
return MockResponse(XML_NO_AUTHORS, 200)
return MockResponse(None, 404)
class TestGrobidAugmentExistingDocumentParser(unittest.TestCase):
@um.patch("requests.request", side_effect=mock_request)
def test_processes_full_text(self, mock_request):
with open(PDFPLUMBER_DOC_PATH) as f_in:
doc_dict = json.load(f_in)
doc = Document.from_json(doc_dict)
augmenter_parser = GrobidAugmentExistingDocumentParser(
config_path=OK_CONFIG_PATH, check_server=False
)
augmented_doc = augmenter_parser.parse(input_pdf_path=PDF_PATH, doc=doc)
# bib_entries
assert len(augmented_doc.bib_entries) is 40
assert augmented_doc.bib_entries[0].text.startswith(
"ISPRS 2D Semantic Labeling Challenge."
)
for b in augmented_doc.bib_entries:
assert b.box_group.metadata.grobid_id is not None
# authors
assert len(augmented_doc.authors) is 4
# citation_mentions
assert len(augmented_doc.citation_mentions) is 67
bib_entry_grobid_ids = [
sg.box_group.metadata.grobid_id for sg in augmented_doc.bib_entries
]
mentions_with_targets = 0
for m in augmented_doc.citation_mentions:
if m.box_group.metadata.target_id:
mentions_with_targets += 1
assert m.box_group.metadata.target_id.startswith("b")
assert m.box_group.metadata.target_id in bib_entry_grobid_ids
assert mentions_with_targets == 66
@um.patch("requests.request", side_effect=mock_request)
def test_passes_if_xml_missing_authors(self, mock_request):
with open(PDFPLUMBER_DOC_PATH) as f_in:
doc_dict = json.load(f_in)
doc = Document.from_json(doc_dict)
augmenter_parser = GrobidAugmentExistingDocumentParser(
config_path=NO_AUTHORS_CONFIG_PATH, check_server=False
)
augmented_doc = augmenter_parser.parse(input_pdf_path=PDF_PATH, doc=doc)
assert len(augmented_doc.authors) is 0
| 3,677 | 34.028571 | 137 |
py
|
mmda
|
mmda-main/tests/test_internal_ai2/test_api.py
|
import unittest
from pydantic.error_wrappers import ValidationError
import ai2_internal.api as mmda_api
import mmda.types.annotation as mmda_ann
from mmda.types import Metadata
from mmda.types.box import Box as mmdaBox
from mmda.types.span import Span as mmdaSpan
class ClassificationAttributes(mmda_api.Attributes):
label: str
score: float
class ClassificationSpanGroup(mmda_api.SpanGroup):
attributes: ClassificationAttributes
class TestApi(unittest.TestCase):
def test_vanilla_span_group(self) -> None:
sg_ann = mmda_ann.SpanGroup.from_json({
'spans': [{'start': 0, 'end': 1}],
'id': 1,
'metadata': {'text': 'hello', 'id': 999} # note id not used; it's just in metadata
})
sg_api = mmda_api.SpanGroup.from_mmda(sg_ann)
self.assertEqual(sg_api.text, 'hello')
self.assertEqual(sg_api.id, 1)
self.assertEqual(sg_api.attributes.dict(), {})
def test_classification_span_group(self) -> None:
sg_ann = mmda_ann.SpanGroup.from_json({
'spans': [{'start': 0, 'end': 1}],
'metadata': {'text': 'hello', 'id': 1}
})
with self.assertRaises(ValidationError):
# this should fail because metadata is missing label
# and confidence
ClassificationSpanGroup.from_mmda(sg_ann)
sg_ann.metadata.label = 'label'
sg_ann.metadata.score = 0.5
sg_api = ClassificationSpanGroup.from_mmda(sg_ann)
self.assertEqual(
sg_api.attributes.dict(), {'label': 'label', 'score': 0.5}
)
# extra field should just get ignored
sg_ann.metadata.extra = 'extra'
self.assertEqual(
sg_api.attributes.dict(), {'label': 'label', 'score': 0.5}
)
with self.assertRaises(ValidationError):
# this should fail bc score is not a float
sg_ann.metadata.score = 'not a float'
ClassificationSpanGroup.from_mmda(sg_ann)
def test_equivalence(self):
sg_ann = mmda_ann.SpanGroup.from_json({
'spans': [{'start': 0, 'end': 1}],
'metadata': {'label': 'label', 'score': 0.5}
})
sg_ann_2 = ClassificationSpanGroup.from_mmda(sg_ann).to_mmda()
self.assertDictEqual(sg_ann.to_json(), sg_ann_2.to_json())
self.assertDictEqual(sg_ann.__dict__, sg_ann_2.__dict__)
def test_box(self):
box = mmda_api.Box(left=0.1, top=0.1, width=0.1, height=0.1, page=0)
assert box.to_mmda() == mmdaBox(l=0.1, t=0.1, w=0.1, h=0.1, page=0)
assert mmda_api.Box.from_mmda(box.to_mmda()) == box
def test_span(self):
span = mmda_api.Span(start=0, end=1, box=mmda_api.Box(left=0.1, top=0.1, width=0.1, height=0.1, page=0))
assert span.to_mmda() == mmdaSpan(start=0, end=1, box=mmdaBox(l=0.1, t=0.1, w=0.1, h=0.1, page=0))
def test_box_group(self):
box_group = mmda_api.BoxGroup(
boxes=[
mmda_api.Box(left=0.1, top=0.1, width=0.1, height=0.1, page=0)
],
id=0,
type='test',
# these attributes are going to be discarded because
# BoxGroup is using the default Attributes class
attributes={'one': 'Test string'}
)
self.assertEqual(
mmda_api.BoxGroup.from_mmda(box_group.to_mmda()),
box_group
)
def test_span_group(self):
box_group = mmda_api.BoxGroup(
boxes=[
mmda_api.Box(left=0.1, top=0.1, width=0.1, height=0.1, page=0)
],
id=0,
type='test',
attributes={'one': 'Test string'}
)
span_group = mmda_api.SpanGroup(
spans=[],
box_group=box_group,
attributes={'one': 'Test string'},
id=0,
type='test',
text='this is a test'
)
self.assertEqual(
mmda_api.SpanGroup.from_mmda(span_group.to_mmda()),
span_group
)
| 4,062 | 31.766129 | 112 |
py
|
mmda
|
mmda-main/tests/test_eval/test_metrics.py
|
import unittest
from mmda.eval.metrics import box_overlap, levenshtein
from mmda.types.box import Box
class TestLevenshteinDistance(unittest.TestCase):
def test_calculates(self):
assert levenshtein("hello", "kelm") == 3
assert levenshtein("kelm", "hello") == 3
assert levenshtein("", "hello") == 5
assert levenshtein("heck", "hecko") == 1
assert levenshtein("ecko", "hecko") == 1
def test_unicode_edits(self):
assert levenshtein("Na+", "Na\u207a") == 1
def test_case_sensitivity(self):
assert levenshtein("Hello", "heLlo") == 2
assert levenshtein("Hello", "heLlo", case_sensitive=False) == 0
def test_strips_spaces(self):
assert levenshtein("\nHel lo\r", "Hello") == 3
assert levenshtein(" Hel lo ", "Hello", strip_spaces=True) == 0
def test_normalizes(self):
assert levenshtein("\nHel lo\r", "Hello") == 3
assert levenshtein(" Hel lo ", "Hello", normalize=True) == 0.375
class TestBoxOverlap(unittest.TestCase):
def _box(self, l, t, w, h):
return Box(l=l, t=t, w=w, h=h, page=0)
def test_consumed(self):
box = self._box(1.0, 2.0, 3.0, 3.0)
container = self._box(0.0, 0.0, 4.0, 5.0)
assert box_overlap(box, container) == 1.0
def test_no_overlap(self):
box = self._box(0.0, 0.0, 1.0, 1.0)
container = self._box(2.0, 2.0, 1.0, 1.0)
assert box_overlap(box, container) == 0.0
def test_partially_contained_top(self):
box = self._box(1.0, 0.0, 1.0, 2.0)
container = self._box(0.0, 1.0, 100.0, 2.0)
assert box_overlap(box, container) == 0.5
assert box_overlap(container, box) == 1.0 / 200.0
def test_partially_contained_bottom(self):
box = self._box(1.0, 1.0, 1.0, 2.0)
container = self._box(0.0, 0.0, 100.0, 2.0)
assert box_overlap(box, container) == 0.5
assert box_overlap(container, box) == 1.0 / 200.0
def test_partially_contained_left(self):
box = self._box(0.0, 2.0, 2.0, 1.0)
container = self._box(1.0, 1.0, 2.0, 100.0)
assert box_overlap(box, container) == 0.5
assert box_overlap(container, box) == 1.0 / 200.0
def test_partially_contained_right(self):
box = self._box(1.0, 1.0, 2.0, 1.0)
container = self._box(0.0, 0.0, 2.0, 100.0)
assert box_overlap(box, container) == 0.5
assert box_overlap(container, box) == 1.0 / 200.0
def test_partially_contained_corner(self):
box = self._box(1.0, 0.0, 2.0, 2.0)
container = self._box(0.0, 1.0, 2.0, 2.0)
assert box_overlap(box, container) == 0.25
| 2,670 | 31.975309 | 72 |
py
|
mmda
|
mmda-main/release/pypi-aliases/scipdf/src/scipdf/__init__.py
|
from mmda import (
eval,
featurizers,
parsers,
predictors,
rasterizers,
types,
utils
)
__all__ = [
"eval",
"featurizers",
"parsers",
"predictors",
"rasterizers",
"types",
"utils"
]
| 238 | 10.95 | 18 |
py
|
mmda
|
mmda-main/release/pypi-aliases/papermage/src/papermage/__init__.py
|
from mmda import (
eval,
featurizers,
parsers,
predictors,
rasterizers,
types,
utils
)
__all__ = [
"eval",
"featurizers",
"parsers",
"predictors",
"rasterizers",
"types",
"utils"
]
| 238 | 10.95 | 18 |
py
|
PRISim
|
PRISim-master/setup.py
|
import glob
import os
import re
from subprocess import Popen, PIPE
from setuptools import setup, find_packages
githash = 'unknown'
if os.path.isdir(os.path.dirname(os.path.abspath(__file__)) + '/.git'):
try:
gitproc = Popen(['git', 'rev-parse', 'HEAD'], stdout=PIPE)
githash = gitproc.communicate()[0]
if gitproc.returncode != 0:
print
"unable to run git, assuming githash to be unknown"
githash = 'unknown'
except EnvironmentError:
print
"unable to run git, assuming githash to be unknown"
githash = githash.replace('\n', '')
with open(os.path.dirname(os.path.abspath(__file__)) + '/prisim/githash.txt', 'w+') as githash_file:
githash_file.write(githash)
metafile = open(os.path.dirname(os.path.abspath(__file__)) + '/prisim/__init__.py').read()
metadata = dict(re.findall("__([a-z]+)__\s*=\s*'([^']+)'", metafile))
setup(name='PRISim',
version=metadata['version'],
description=metadata['description'],
long_description=open("README.rst").read(),
url=metadata['url'],
author=metadata['author'],
author_email=metadata['authoremail'],
license='MIT',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'],
packages=find_packages(),
package_data={'prisim': ['*.txt', 'examples/simparms/*.yaml',
'examples/schedulers/*.txt',
'examples/dbparms/*.yaml',
'examples/ioparms/*.yaml',
'examples/codes/BispectrumPhase/*.yaml',
'examples/codes/BispectrumPhase/*.py',
'examples/codes/BispectrumPhase/*.ipynb',
'data/catalogs/*.txt', 'data/catalogs/*.csv',
'data/catalogs/*.fits', 'data/beams/*.hmap',
'data/beams/*.txt', 'data/beams/*.hdf5',
'data/beams/*.FITS', 'data/array_layouts/*.txt',
'data/phasedarray_layouts/*.txt',
'data/bandpass/*.fits', 'data/bandpass/*.txt']},
include_package_data=True,
scripts=glob.glob('scripts/*.py'),
install_requires=[
'astropy>=1.0, <3.0',
'astroutils @ git+git://github.com/nithyanandan/AstroUtils',
'healpy>=1.5.3',
'ipdb>=0.6.1',
'matplotlib>=1.4.3, <3.0',
'mpi4py>=1.2.2',
'numpy>=1.8.1',
'progressbar>=2.3',
'psutil>=2.2.1',
'pyephem>=3.7.5.3',
'pyyaml>=3.11',
'scipy>=0.15.1',
'h5py>=2.6.0',
'pyuvdata>=1.1',
'gdown',
'aipy',
],
tests_require=['pytest'],
zip_safe=False)
| 3,158 | 38.987342 | 100 |
py
|
PRISim
|
PRISim-master/prisim/interferometry.py
|
from __future__ import division
import numpy as NP
import scipy.constants as FCNST
from scipy import interpolate, ndimage
import datetime as DT
import progressbar as PGB
import os, ast
import copy
import astropy
from astropy.io import fits, ascii
from astropy.coordinates import Galactic, SkyCoord, ICRS, FK5, AltAz, EarthLocation
from astropy import units
from astropy.time import Time
import warnings
import h5py
from distutils.version import LooseVersion
import psutil
import astroutils
from astroutils import geometry as GEOM
from astroutils import gridding_modules as GRD
from astroutils import constants as CNST
from astroutils import DSP_modules as DSP
from astroutils import catalog as SM
from astroutils import lookup_operations as LKP
from astroutils import nonmathops as NMO
import prisim
import baseline_delay_horizon as DLY
import primary_beams as PB
try:
import pyuvdata
from pyuvdata import UVData
from pyuvdata import utils as UVUtils
except ImportError:
uvdata_module_found = False
else:
uvdata_module_found = True
try:
from mwapy.pb import primary_beam as MWAPB
except ImportError:
mwa_tools_found = False
else:
mwa_tools_found = True
prisim_path = prisim.__path__[0]+'/'
################################################################################
def _astropy_columns(cols, tabtype='BinTableHDU'):
"""
----------------------------------------------------------------------------
!!! FOR INTERNAL USE ONLY !!!
This internal routine checks for Astropy version and produces the FITS
columns based on the version
Inputs:
cols [list of Astropy FITS columns] These are a list of Astropy FITS
columns
tabtype [string] specifies table type - 'BinTableHDU' (default) for binary
tables and 'TableHDU' for ASCII tables
Outputs:
columns [Astropy FITS column data]
----------------------------------------------------------------------------
"""
try:
cols
except NameError:
raise NameError('Input cols not specified')
if tabtype not in ['BinTableHDU', 'TableHDU']:
raise ValueError('tabtype specified is invalid.')
use_ascii = False
if tabtype == 'TableHDU':
use_ascii = True
if astropy.__version__ == '0.4':
columns = fits.ColDefs(cols, tbtype=tabtype)
elif LooseVersion(astropy.__version__)>=LooseVersion('0.4.2'):
columns = fits.ColDefs(cols, ascii=use_ascii)
return columns
################################################################################
def thermalNoiseRMS(A_eff, df, dt, Tsys, nbl=1, nchan=1, ntimes=1,
flux_unit='Jy', eff_Q=1.0):
"""
-------------------------------------------------------------------------
Generates thermal noise RMS from instrument parameters for a complex-
valued visibility measurement by an interferometer.
[Based on equations 9-12 through 9-15 or section 5 in chapter 9 on
Sensitivity in SIRA II wherein the equations are for real and imaginary
parts separately.]
A_eff [scalar or numpy array] Effective area of the interferometer.
Has to be in units of m^2. If only a scalar value
provided, it will be assumed to be identical for all the
interferometers. Otherwise, it must be of shape broadcastable
to (nbl,nchan,ntimes). So accpeted shapes can be (1,1,1),
(1,1,ntimes), (1,nchan,1), (nbl,1,1), (1,nchan,ntimes),
(nbl,nchan,1), (nbl,1,ntimes), or (nbl,nchan,ntimes). Must
be specified. No defaults.
df [scalar] Frequency resolution (in Hz). Must be specified. No
defaults.
dt [scalar] Time resolution (in seconds). Must be specified. No
defaults.
Tsys [scalar or numpy array] System temperature (in K).
If only a scalar value provided, it will be assumed to be
identical for all the interferometers. Otherwise, it must be of
shape broadcastable to (nbl,nchan,ntimes). So accpeted shapes
can be (1,1,1), (1,1,ntimes), (1,nchan,1), (nbl,1,1),
(1,nchan,ntimes), (nbl,nchan,1), (nbl,1,ntimes), or
(nbl,nchan,ntimes). Must be specified. No defaults.
nbl [integer] Number of baseline vectors. Default=1
nchan [integer] Number of frequency channels. Default=1
ntimes [integer] Number of time stamps. Default=1
flux_unit [string] Units of thermal noise RMS to be returned. Accepted
values are 'K' or 'Jy' (default)
eff_Q [scalar or numpy array] Efficiency of the interferometer(s).
Has to be between 0 and 1. If only a scalar value
provided, it will be assumed to be identical for all the
interferometers. Otherwise, it must be of shape broadcastable
to (nbl,nchan,ntimes). So accpeted shapes can be (1,1,1),
(1,1,ntimes), (1,nchan,1), (nbl,1,1), (1,nchan,ntimes),
(nbl,nchan,1), (nbl,1,ntimes), or (nbl,nchan,ntimes).
Default=1.0
Output:
Numpy array of thermal noise RMS (in units of K or Jy depending on
flux_unit) of shape (nbl, nchan, ntimes) expected on a complex-valued
visibility measurement from an interferometer. 1/sqrt(2) of this goes
each into the real and imaginary parts.
[Based on equations 9-12 through 9-15 or section 5 in chapter 9 on
Sensitivity in SIRA II wherein the equations are for real and imaginary
parts separately.]
-------------------------------------------------------------------------
"""
try:
A_eff, df, dt, Tsys
except NameError:
raise NameError('Inputs A_eff, df, dt, and Tsys must be specified')
if not isinstance(df, (int,float)):
raise TypeError('Input channel resolution must be a scalar')
else:
df = float(df)
if not isinstance(dt, (int,float)):
raise TypeError('Input time resolution must be a scalar')
else:
dt = float(dt)
if not isinstance(nbl, int):
raise TypeError('Input nbl must be an integer')
else:
if nbl <= 0:
raise ValueError('Input nbl must be positive')
if not isinstance(nchan, int):
raise TypeError('Input nchan must be an integer')
else:
if nchan <= 0:
raise ValueError('Input nchan must be positive')
if not isinstance(ntimes, int):
raise TypeError('Input ntimes must be an integer')
else:
if ntimes <= 0:
raise ValueError('Input ntimes must be positive')
if not isinstance(Tsys, (int,float,list,NP.ndarray)):
raise TypeError('Input Tsys must be a scalar, float, list or numpy array')
if isinstance(Tsys, (int,float)):
Tsys = NP.asarray(Tsys, dtype=NP.float).reshape(1,1,1)
else:
Tsys = NP.asarray(Tsys, dtype=NP.float)
if NP.any(Tsys < 0.0):
raise ValueError('Value(s) in Tsys cannot be negative')
if (Tsys.shape != (1,1,1)) and (Tsys.shape != (1,nchan,1)) and (Tsys.shape != (1,1,ntimes)) and (Tsys.shape != (nbl,1,1)) and (Tsys.shape != (nbl,nchan,1)) and (Tsys.shape != (nbl,1,ntimes)) and (Tsys.shape != (1,nchan,ntimes)) and (Tsys.shape != (nbl,nchan,ntimes)):
raise IndexError('System temperature specified has incompatible dimensions')
if not isinstance(A_eff, (int,float,list,NP.ndarray)):
raise TypeError('Input A_eff must be a scalar, float, list or numpy array')
if isinstance(A_eff, (int,float)):
A_eff = NP.asarray(A_eff, dtype=NP.float).reshape(1,1,1)
else:
A_eff = NP.asarray(A_eff, dtype=NP.float)
if NP.any(A_eff < 0.0):
raise ValueError('Value(s) in A_eff cannot be negative')
if (A_eff.shape != (1,1,1)) and (A_eff.shape != (1,nchan,1)) and (A_eff.shape != (1,1,ntimes)) and (A_eff.shape != (nbl,1,1)) and (A_eff.shape != (nbl,nchan,1)) and (A_eff.shape != (nbl,1,ntimes)) and (A_eff.shape != (1,nchan,ntimes)) and (A_eff.shape != (nbl,nchan,ntimes)):
raise IndexError('Effective area specified has incompatible dimensions')
if not isinstance(eff_Q, (int,float,list,NP.ndarray)):
raise TypeError('Input eff_Q must be a scalar, float, list or numpy array')
if isinstance(eff_Q, (int,float)):
eff_Q = NP.asarray(eff_Q, dtype=NP.float).reshape(1,1,1)
else:
eff_Q = NP.asarray(eff_Q, dtype=NP.float)
if NP.any(eff_Q < 0.0):
raise ValueError('Value(s) in eff_Q cannot be negative')
if (eff_Q.shape != (1,1,1)) and (eff_Q.shape != (1,nchan,1)) and (eff_Q.shape != (1,1,ntimes)) and (eff_Q.shape != (nbl,1,1)) and (eff_Q.shape != (nbl,nchan,1)) and (eff_Q.shape != (nbl,1,ntimes)) and (eff_Q.shape != (1,nchan,ntimes)) and (eff_Q.shape != (nbl,nchan,ntimes)):
raise IndexError('Effective area specified has incompatible dimensions')
if not isinstance(flux_unit, str):
raise TypeError('Input flux_unit must be a string')
else:
if flux_unit.lower() not in ['k', 'jy']:
raise ValueError('Input flux_unit must be set to K or Jy')
if flux_unit.lower() == 'k':
rms = Tsys/eff_Q/NP.sqrt(dt*df)
else:
rms = 2.0 * FCNST.k / NP.sqrt(dt*df) * (Tsys/A_eff/eff_Q) / CNST.Jy
return rms
################################################################################
def generateNoise(noiseRMS=None, A_eff=None, df=None, dt=None, Tsys=None, nbl=1,
nchan=1, ntimes=1, flux_unit='Jy', eff_Q=None):
"""
-------------------------------------------------------------------------
Generates thermal noise from instrument parameters for a complex-valued
visibility measurement from an interferometer.
[Based on equations 9-12 through 9-15 or section 5 in chapter 9 on
Sensitivity in SIRA II wherein the equations are for real and imaginary
parts separately.]
noiseRMS [NoneType or scalar or numpy array] If set to None (default),
the rest of the parameters are used in determining the RMS of
thermal noise. If specified as scalar, all other parameters
will be ignored in estimating noiseRMS and this value will be
used instead. If specified as a numpy array, it must be of
shape broadcastable to (nbl,nchan,ntimes). So accpeted shapes
can be (1,1,1), (1,1,ntimes), (1,nchan,1), (nbl,1,1),
(1,nchan,ntimes), (nbl,nchan,1), (nbl,1,ntimes), or
(nbl,nchan,ntimes). It is assumed to be an RMS comprising of
both real and imaginary parts. Therefore, 1/sqrt(2) of this
goes into each of the real and imaginary parts.
A_eff [scalar or numpy array] Effective area of the interferometer.
Has to be in units of m^2. If only a scalar value
provided, it will be assumed to be identical for all the
interferometers. Otherwise, it must be of shape broadcastable
to (nbl,nchan,ntimes). So accpeted shapes can be (1,1,1),
(1,1,ntimes), (1,nchan,1), (nbl,1,1), (1,nchan,ntimes),
(nbl,nchan,1), (nbl,1,ntimes), or (nbl,nchan,ntimes). Will
apply only if noiseRMS is set to None
df [scalar] Frequency resolution (in Hz). Will apply only if
noiseRMS is set to None
dt [scalar] Time resolution (in seconds). Will apply only if
noiseRMS is set to None
Tsys [scalar or numpy array] System temperature (in K).
If only a scalar value provided, it will be assumed to be
identical for all the interferometers. Otherwise, it must be of
shape broadcastable to (nbl,nchan,ntimes). So accpeted shapes
can be (1,1,1), (1,1,ntimes), (1,nchan,1), (nbl,1,1),
(1,nchan,ntimes), (nbl,nchan,1), (nbl,1,ntimes), or
(nbl,nchan,ntimes). Will apply only if noiseRMS is set to None
nbl [integer] Number of baseline vectors. Default=1
nchan [integer] Number of frequency channels. Default=1
ntimes [integer] Number of time stamps. Default=1
flux_unit [string] Units of thermal noise RMS to be returned. Accepted
values are 'K' or 'Jy' (default). Will only apply if noiseRMS
is set to None. Otherwise the flux_unit will be ignored and
the returned value will be in same units as noiseRMS
eff_Q [scalar or numpy array] Efficiency of the interferometer(s).
Has to be between 0 and 1. If only a scalar value
provided, it will be assumed to be identical for all the
interferometers. Otherwise, it must be of shape broadcastable
to (nbl,nchan,ntimes). So accpeted shapes can be (1,1,1),
(1,1,ntimes), (1,nchan,1), (nbl,1,1), (1,nchan,ntimes),
(nbl,nchan,1), (nbl,1,ntimes), or (nbl,nchan,ntimes).
Default=1.0. Will apply only if noiseRMS is set to None
Output:
Numpy array of thermal noise (units of noiseRMS if specified or in units
of K or Jy depending on flux_unit) of shape (nbl, nchan, ntimes) for a
complex-valued visibility measurement from an interferometer.
[Based on equations 9-12 through 9-15 or section 5 in chapter 9 on
Sensitivity in SIRA II wherein the equations are for real and imaginary
parts separately.]
-------------------------------------------------------------------------
"""
if noiseRMS is None:
noiseRMS = thermalNoiseRMS(A_eff, df, dt, Tsys, nbl=nbl, nchan=nchan, ntimes=ntimes, flux_unit=flux_unit, eff_Q=eff_Q)
else:
if not isinstance(noiseRMS, (int,float,list,NP.ndarray)):
raise TypeError('Input noiseRMS must be a scalar, float, list or numpy array')
if isinstance(noiseRMS, (int,float)):
noiseRMS = NP.asarray(noiseRMS, dtype=NP.float).reshape(1,1,1)
else:
noiseRMS = NP.asarray(noiseRMS, dtype=NP.float)
if NP.any(noiseRMS < 0.0):
raise ValueError('Value(s) in noiseRMS cannot be negative')
if (noiseRMS.shape != (1,1,1)) and (noiseRMS.shape != (1,nchan,1)) and (noiseRMS.shape != (1,1,ntimes)) and (noiseRMS.shape != (nbl,1,1)) and (noiseRMS.shape != (nbl,nchan,1)) and (noiseRMS.shape != (nbl,1,ntimes)) and (noiseRMS.shape != (1,nchan,ntimes)) and (noiseRMS.shape != (nbl,nchan,ntimes)):
raise IndexError('Noise RMS specified has incompatible dimensions')
return noiseRMS / NP.sqrt(2.0) * (NP.random.randn(nbl,nchan,ntimes) + 1j * NP.random.randn(nbl,nchan,ntimes)) # sqrt(2.0) is to split equal uncertainty into real and imaginary parts
################################################################################
def read_gaintable(gainsfile, axes_order=None):
"""
---------------------------------------------------------------------------
Read gain table from file and return
Input:
gainsfile [string] Filename including the full path that contains the
instrument gains. It must be in HDF5 format. It must contain
the following structure:
'antenna-based' [dictionary] Contains antenna-based
instrument gain information. It has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex antenna-based
instrument gains. Must be
of shape (nax1, nax2, nax3)
where ax1, ax2 and ax3 are
specified by the axes
ordering under key 'ordering'.
If there is no variations in
gains along an axis, then the
corresponding nax may be set
to 1 and the gains will be
replicated along that axis
using numpy array broadcasting.
For example, shapes (nax1,1,1),
(1,1,1), (1,nax2,nax3) are
acceptable. If specified as a
scalar, it will be replicated
along all three axes, namely,
'label', 'frequency' and 'time'
'label' [None or list or numpy array]
List of antenna labels that
correspond to the nax along
the 'label' axis. If the
nax=1 along the 'label' axis,
this may be set to None, else
it must be specified and must
match the nax.
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nax=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nax.
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
nax=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the nax. It must be
a float and can be in seconds,
hours, days, etc.
'baseline-based' [dictionary] Contains baseline-based
instrument gain information. It has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex baseline-based
instrument gains. Must be
of shape (nax1, nax2, nax3)
where ax1, ax2 and ax3 are
specified by the axes
ordering under key 'ordering'.
If there is no variations in
gains along an axis, then the
corresponding nax may be set
to 1 and the gains will be
replicated along that axis
using numpy array broadcasting.
For example, shapes (nax1,1,1),
(1,1,1), (1,nax2,nax3) are
acceptable. If specified as a
scalar, it will be replicated
along all three axes, namely,
'label', 'frequency' and 'time'
'label' [None or list or numpy array]
List of baseline labels that
correspond to the nax along
the 'label' axis. If the
nax=1 along the 'label' axis
this may be set to None, else
it must be specified and must
match the nax.
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nax=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nax.
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
nax=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the nax. It must be
a float and can be in seconds,
hours, days, etc.
axes_order [None or list or numpy array] The gaintable which is read is
stored in this axes ordering. If set to None, it will store in
this order ['label', 'frequency', 'time']
Output:
gaintable [None or dictionary] If set to None, all antenna- and baseline-
based gains must be set to unity. If returned as dictionary, it
contains the loaded gains. It contains the following keys and
values:
'antenna-based' [None or dictionary] Contains antenna-based
instrument gain information. If set to None,
all antenna-based gains are set to unity.
If returned as dictionary, it has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency' as specified
in input axes_order
'gains' [scalar or numpy array]
Complex antenna-based
instrument gains. Must be
of shape (nant, nchan, nts)
If there is no variations in
gains along an axis, then the
corresponding nax may be set
to 1 and the gains will be
replicated along that axis
using numpy array broadcasting.
For example, shapes (nant,1,1),
(1,1,1), (1,nchan,nts) are
acceptable. If specified as a
scalar, it will be replicated
along all three axes, namely,
'label', 'frequency' and 'time'
'label' [None or list or numpy array]
List of antenna labels that
correspond to nant along
the 'label' axis. If nant=1,
this may be set to None, else
it will be specified and will
match the nant.
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nchan=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nchan.
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
ntimes=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the ntimes. It will
be a float and in same units as
given in input
'baseline-based' [None or dictionary] Contains baseline-based
instrument gain information. If set to None,
all baseline-based gains are set to unity.
If returned as dictionary, it has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency' as specified
in input axes_order
'gains' [scalar or numpy array]
Complex baseline-based
instrument gains. Must be
of shape (nbl, nchan, nts)
If there is no variations in
gains along an axis, then the
corresponding nax may be set
to 1 and the gains will be
replicated along that axis
using numpy array broadcasting.
For example, shapes (nbl,1,1),
(1,1,1), (1,nchan,nts) are
acceptable. If specified as a
scalar, it will be replicated
along all three axes, namely,
'label', 'frequency' and 'time'
'label' [None or list or numpy array]
List of baseline labels that
correspond to nbl along the
'label' axis. If nbl=1 along
the 'label' axis this may be
set to None, else it will be
specified and will match nbl.
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nchan=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nchan.
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
ntimes=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the ntimes. It will
be a float and in same units as
given in input
---------------------------------------------------------------------------
"""
if axes_order is None:
axes_order = ['label', 'frequency', 'time']
elif not isinstance(axes_order, (list, NP.ndarray)):
raise TypeError('axes_order must be a list')
else:
if len(axes_order) != 3:
raise ValueError('axes_order must be a three element list')
for orderkey in ['label', 'frequency', 'time']:
if orderkey not in axes_order:
raise ValueError('axes_order does not contain key "{0}"'.format(orderkey))
gaintable = {}
try:
with h5py.File(gainsfile, 'r') as fileobj:
for gainkey in fileobj:
try:
gaintable[gainkey] = {}
grp = fileobj[gainkey]
if isinstance(grp['gains'].value, (NP.float32, NP.float64, NP.complex64, NP.complex128)):
gaintable[gainkey]['gains'] = NP.asarray(grp['gains'].value).reshape(1,1,1)
elif isinstance(grp['gains'].value, NP.ndarray):
if 'ordering' in grp:
ordering = list(grp['ordering'].value)
else:
raise KeyError('Axes ordering for gains not specified')
if len(ordering) != 3:
raise ValueError('Ordering must contain three elements')
elif ('time' not in ordering) or ('label' not in ordering) or ('frequency' not in ordering):
raise ValueError('Required elements not found in ordering of instrument gains')
else:
if grp['gains'].value.ndim == 3:
transpose_order = NMO.find_list_in_list(ordering, axes_order)
gaintable[gainkey]['gains'] = NP.transpose(grp['gains'].value, axes=transpose_order)
for subkey in ['time', 'label', 'frequency']:
gaintable[gainkey][subkey] = None
if isinstance(grp[subkey].value, NP.ndarray):
if gaintable[gainkey]['gains'].shape[axes_order.index(subkey)] > 1:
if subkey not in grp:
raise KeyError('Key "{0}" not specified'.format(subkey))
else:
if not isinstance(grp[subkey].value, (list, NP.ndarray)):
raise TypeError('"{0} key must be specified as a list or numpy array'.format(subkey))
gaintable[gainkey][subkey] = NP.asarray(grp[subkey].value).ravel()
if gaintable[gainkey][subkey].size != gaintable[gainkey]['gains'].shape[axes_order.index(subkey)]:
raise ValueError('List of labels and the gains do not match in dimensions')
else:
raise TypeError('Value of key "{0}" in {1} gains must be a numpy array'.format(subkey, gainkey))
else:
raise ValueError('Gains array must be three-dimensional. Use fake dimension if there is no variation along any particular axis.')
else:
warnings.warn('Invalid data type specified for {0} instrument gains. Proceeding with defaults (unity gains)'.format(gainkey))
gaintable[gainkey]['ordering'] = axes_order
except KeyError:
warnings.warn('No info found on {0} instrument gains. Proceeding with defaults (unity gains)'.format(gainkey))
except IOError:
warnings.warn('Invalid file specified for instrument gains. Proceeding with defaults (unity gains)')
gaintable = None
if not gaintable:
gaintable = None
return gaintable
################################################################################
def extract_gains(gaintable, bl_labels, freq_index=None, time_index=None,
axes_order=None):
"""
---------------------------------------------------------------------------
Extract complex instrument gains for given baselines from the gain table.
Inputs:
gaintable [None or dictionary] If set to None, all antenna- and baseline-
based gains must be set to unity. If returned as dictionary, it
contains the loaded gains. It contains the following keys and
values:
'antenna-based' [None or dictionary] Contains antenna-based
instrument gain information. If set to None,
all antenna-based gains are set to unity.
If returned as dictionary, it has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex antenna-based
instrument gains. Must be
of shape (nant, nchan, nts)
If there is no variations in
gains along an axis, then the
corresponding nax may be set
to 1 and the gains will be
replicated along that axis
using numpy array broadcasting.
For example, shapes (nant,1,1),
(1,1,1), (1,nchan,nts) are
acceptable. If specified as a
scalar, it will be replicated
along all three axes, namely,
'label', 'frequency' and
'time'.
'label' [None or list or numpy array]
List or antenna labels that
correspond to nant along
the 'label' axis. If nant=1,
this may be set to None, else
it will be specified and will
match the nant.
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nchan=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nchan
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
ntimes=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the ntimes. It must
be a float and can be in
seconds, hours, days, etc.
'baseline-based' [None or dictionary] Contains baseline-based
instrument gain information. If set to None,
all baseline-based gains are set to unity.
If returned as dictionary, it has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex baseline-based
instrument gains. Must be
of shape (nbl, nchan, nts)
If there is no variations in
gains along an axis, then the
corresponding nax may be set
to 1 and the gains will be
replicated along that axis
using numpy array broadcasting.
For example, shapes (nant,1,1),
(1,1,1), (1,nchan,nts) are
acceptable. If specified as a
scalar, it will be replicated
along all three axes, namely,
'label', 'frequency' and
'time'.
'label' [None or list or numpy array]
List or baseline labels that
correspond to nbl along
the 'label' axis. If nbl=1
along the 'label' axis
this may be set to None, else
it will be specified and will
match nbl.
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nchan=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nchan
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
ntimes=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the ntimes. It must
be a float and can be in
seconds, hours, days, etc.
bl_labels [Numpy structured array tuples] Labels of antennas in the pair
used to produce the baseline vector under fields 'A2' and 'A1'
for second and first antenna respectively. The baseline vector
is obtained by position of antennas under 'A2' minus position
of antennas under 'A1'
freq_index [None, int, list or numpy array] Index (scalar) or indices
(list or numpy array) along the frequency axis at which gains
are to be extracted. If set to None, gains at all frequencies
in the gain table will be extracted.
time_index [None, int, list or numpy array] Index (scalar) or indices
(list or numpy array) along the time axis at which gains
are to be extracted. If set to None, gains at all timesin the
gain table will be extracted.
axes_order [None or list or numpy array] Axes ordering for extracted
gains. It must contain the three elements 'label',
'frequency', and 'time'. If set to None, it will be returned
in the same order as in the input gaintable.
Outputs:
[numpy array] Complex gains of shape nbl x nchan x nts for the specified
baselines, frequencies and times.
---------------------------------------------------------------------------
"""
try:
gaintable, bl_labels
except NameError:
raise NameError('Inputs gaintable and bl_labels must be specified')
blgains = NP.asarray(1.0).reshape(1,1,1)
if gaintable is not None:
a1_labels = bl_labels['A1']
a2_labels = bl_labels['A2']
for gainkey in ['antenna-based', 'baseline-based']:
if gainkey in gaintable:
temp_axes_order = ['label', 'frequency', 'time']
inp_order = gaintable[gainkey]['ordering']
temp_transpose_order = NMO.find_list_in_list(inp_order, temp_axes_order)
if NP.all(inp_order == temp_axes_order):
gains = NP.copy(gaintable[gainkey]['gains'])
else:
gains = NP.transpose(NP.copy(gaintable[gainkey]['gains']), axes=temp_transpose_order)
if freq_index is None:
freq_index = NP.arange(gains.shape[1])
elif isinstance(freq_index, (int,list,NP.ndarray)):
freq_index = NP.asarray(freq_index).ravel()
if NP.any(freq_index >= gains.shape[1]):
raise IndexError('Input freq_index cannot exceed the frequency dimensions in the gain table')
if time_index is None:
time_index = NP.arange(gains.shape[2])
elif isinstance(time_index, (int,list,NP.ndarray)):
time_index = NP.asarray(time_index).ravel()
if NP.any(time_index >= gains.shape[2]):
raise IndexError('Input time_index cannot exceed the time dimensions in the gain table')
if gains.shape[0] == 1:
blgains = blgains * gains[:,freq_index,time_index].reshape(1,freq_index.size,time_index.size)
else:
labels = gaintable[gainkey]['label']
if gainkey == 'antenna-based':
ind1 = NMO.find_list_in_list(labels, a1_labels)
ind2 = NMO.find_list_in_list(labels, a2_labels)
if NP.sum(ind1.mask) > 0:
raise IndexError('Some antenna gains could not be found')
if NP.sum(ind2.mask) > 0:
raise IndexError('Some antenna gains could not be found')
blgains = blgains * gains[NP.ix_(ind2,freq_index,time_index)].reshape(ind2.size,freq_index.size,time_index.size) * gains[NP.ix_(ind1,freq_index,time_index)].conj().reshape(ind1.size,freq_index.size,time_index.size)
else:
labels_conj = [tuple(reversed(label)) for label in labels]
labels_conj = NP.asarray(labels_conj, dtype=labels.dtype)
labels_conj_appended = NP.concatenate((labels, labels_conj), axis=0)
gains_conj_appended = NP.concatenate((gains, gains.conj()), axis=0)
ind = NMO.find_list_in_list(labels_conj_appended, bl_labels)
selected_gains = gains_conj_appended[NP.ix_(ind.compressed(),freq_index,time_index)]
if ind.compressed().size == 1:
selected_gains = selected_gains.reshape(NP.sum(~ind.mask),freq_index.size,time_index.size)
blgains[~ind.mask, ...] = blgains[~ind.mask, ...] * selected_gains
if axes_order is None:
axes_order = inp_order
elif not isinstance(axes_order, (list, NP.ndarray)):
raise TypeError('axes_order must be a list')
else:
if len(axes_order) != 3:
raise ValueError('axes_order must be a three element list')
for orderkey in ['label', 'frequency', 'time']:
if orderkey not in axes_order:
raise ValueError('axes_order does not contain key "{0}"'.format(orderkey))
transpose_order = NMO.find_list_in_list(inp_order, axes_order)
blgains = NP.transpose(blgains, axes=transpose_order)
return blgains
################################################################################
def hexagon_generator(spacing, n_total=None, n_side=None, orientation=None,
center=None):
"""
------------------------------------------------------------------------
Generate a grid of baseline locations filling a regular hexagon.
Primarily intended for HERA experiment.
Inputs:
spacing [scalar] positive scalar specifying the spacing between
antennas. Must be specified, no default.
n_total [scalar] positive integer specifying the total number of
antennas to be placed in the hexagonal array. This value
will be checked if it valid for a regular hexagon. If
n_total is specified, n_side must not be specified.
Default = None.
n_side [scalar] positive integer specifying the number of antennas
on the side of the hexagonal array. If n_side is specified,
n_total should not be specified. Default = None
orientation [scalar] counter-clockwise angle (in degrees) by which the
principal axis of the hexagonal array is to be rotated.
Default = None (means 0 degrees)
center [2-element list or numpy array] specifies the center of the
array. Must be in the same units as spacing. The hexagonal
array will be centered on this position.
Outputs:
Two element tuple with these elements in the following order:
xy [2-column array] x- and y-locations. x is in the first
column, y is in the second column. Number of xy-locations
is equal to the number of rows which is equal to n_total
id [numpy array of string] unique antenna identifier. Numbers
from 0 to n_antennas-1 in string format.
Notes:
If n_side is the number of antennas on the side of the hexagon, then
n_total = 3*n_side**2 - 3*n_side + 1
------------------------------------------------------------------------
"""
try:
spacing
except NameError:
raise NameError('No spacing provided.')
if not isinstance(spacing, (int, float)):
raise TypeError('spacing must be scalar value')
if spacing <= 0:
raise ValueError('spacing must be positive')
if orientation is not None:
if not isinstance(orientation, (int,float)):
raise TypeError('orientation must be a scalar')
if center is not None:
if not isinstance(center, (list, NP.ndarray)):
raise TypeError('center must be a list or numpy array')
center = NP.asarray(center)
if center.size != 2:
raise ValueError('center should be a 2-element vector')
center = center.reshape(1,-1)
if (n_total is None) and (n_side is None):
raise NameError('n_total or n_side must be provided')
elif (n_total is not None) and (n_side is not None):
raise ValueError('Only one of n_total or n_side must be specified.')
elif n_total is not None:
if not isinstance(n_total, int):
raise TypeError('n_total must be an integer')
if n_total <= 0:
raise ValueError('n_total must be positive')
else:
if not isinstance(n_side, int):
raise TypeError('n_side must be an integer')
if n_side <= 0:
raise ValueError('n_side must be positive')
if n_total is not None:
sqroots = NP.roots([3.0, -3.0, 1.0-n_total])
valid_ind = NP.logical_and(sqroots.real >= 1, sqroots.imag == 0.0)
if NP.any(valid_ind):
sqroot = sqroots[valid_ind]
else:
raise ValueError('No valid root found for the quadratic equation with the specified n_total')
n_side = NP.round(sqroot).astype(NP.int)
if (3*n_side**2 - 3*n_side + 1 != n_total):
raise ValueError('n_total is not a valid number for a hexagonal array')
else:
n_total = 3*n_side**2 - 3*n_side + 1
xref = NP.arange(2*n_side-1, dtype=NP.float)
xloc, yloc = [], []
for i in range(1,n_side):
x = xref[:-i] + i * NP.cos(NP.pi/3) # Select one less antenna each time and displace
y = i*NP.sin(NP.pi/3) * NP.ones(2*n_side-1-i)
xloc += x.tolist() * 2 # Two lists, one for the top and the other for the bottom
yloc += y.tolist() # y-locations of the top list
yloc += (-y).tolist() # y-locations of the bottom list
xloc += xref.tolist() # Add the x-locations of central line of antennas
yloc += [0.0] * int(2*n_side-1) # Add the y-locations of central line of antennas
if len(xloc) != len(yloc):
raise ValueError('Sizes of x- and y-locations do not agree')
xy = zip(xloc, yloc)
if len(xy) != n_total:
raise ValueError('Sizes of x- and y-locations do not agree with n_total')
xy = NP.asarray(xy)
xy = xy - NP.mean(xy, axis=0, keepdims=True) # Shift the center to origin
if orientation is not None: # Perform any rotation
angle = NP.radians(orientation)
rot_matrix = NP.asarray([[NP.cos(angle), -NP.sin(angle)],
[NP.sin(angle), NP.cos(angle)]])
xy = NP.dot(xy, rot_matrix.T)
xy *= spacing # Scale by the spacing
if center is not None: # Shift the center
xy += center
return (NP.asarray(xy), map(str, range(n_total)))
################################################################################
def rectangle_generator(spacing, n_side, orientation=None, center=None):
"""
------------------------------------------------------------------------
Generate a grid of baseline locations filling a rectangular array.
Primarily intended for HIRAX, CHIME and PAPER experiments
Inputs:
spacing [2-element list or numpy array] positive integers specifying
the spacing between antennas. Must be specified, no default.
n_side [2-element list or numpy array] positive integers specifying
the number of antennas on each side of the rectangular array.
Atleast one value should be specified, no default.
orientation [scalar] counter-clockwise angle (in degrees) by which the
principal axis of the rectangular array is to be rotated.
Default = None (means 0 degrees)
center [2-element list or numpy array] specifies the center of the
array. Must be in the same units as spacing. The rectangular
array will be centered on this position.
Outputs:
Two element tuple with these elements in the following order:
xy [2-column array] x- and y-locations. x is in the first
column, y is in the second column. Number of xy-locations
is equal to the number of rows which is equal to n_total
id [numpy array of string] unique antenna identifier. Numbers
from 0 to n_antennas-1 in string format.
Notes:
------------------------------------------------------------------------
"""
try:
spacing
except NameError:
raise NameError('No spacing provided.')
if spacing is not None:
if not isinstance(spacing, (int, float, list, NP.ndarray)):
raise TypeError('spacing must be a scalar or list/numpy array')
spacing = NP.asarray(spacing)
if spacing.size < 2:
spacing = NP.resize(spacing,(1,2))
if NP.all(NP.less_equal(spacing,NP.zeros((1,2)))):
raise ValueError('spacing must be positive')
if orientation is not None:
if not isinstance(orientation, (int,float)):
raise TypeError('orientation must be a scalar')
if center is not None:
if not isinstance(center, (list, NP.ndarray)):
raise TypeError('center must be a list or numpy array')
center = NP.asarray(center)
if center.size != 2:
raise ValueError('center should be a 2-element vector')
center = center.reshape(1,-1)
if n_side is None:
raise NameError('Atleast one value of n_side must be provided')
else:
if not isinstance(n_side, (int, float, list, NP.ndarray)):
raise TypeError('n_side must be a scalar or list/numpy array')
n_side = NP.asarray(n_side)
if n_side.size < 2:
n_side = NP.resize(n_side,(1,2))
if NP.all(NP.less_equal(n_side,NP.zeros((1,2)))):
raise ValueError('n_side must be positive')
n_total = NP.prod(n_side, dtype=NP.uint8)
xn,yn = NP.hsplit(n_side,2)
xn = NP.asscalar(xn)
yn = NP.asscalar(yn)
xs,ys = NP.hsplit(spacing,2)
xs = NP.asscalar(xs)
ys = NP.asscalar(ys)
n_total = xn*yn
x = NP.linspace(0, xn-1, xn)
x = x - NP.mean(x)
x = x*xs
y = NP.linspace(0, yn-1, yn)
y = y - NP.mean(y)
y = y*ys
xv, yv = NP.meshgrid(x,y)
xy = NP.hstack((xv.reshape(-1,1),yv.reshape(-1,1)))
if len(xy) != n_total:
raise ValueError('Sizes of x- and y-locations do not agree with n_total')
if orientation is not None: # Perform any rotation
angle = NP.radians(orientation)
rot_matrix = NP.asarray([[NP.cos(angle), -NP.sin(angle)], [NP.sin(angle), NP.cos(angle)]])
xy = NP.dot(xy, rot_matrix.T)
if center is not None: # Shift the center
xy += center
return (NP.asarray(xy), map(str, range(n_total)))
################################################################################
def circular_antenna_array(antsize, minR, maxR=None):
"""
---------------------------------------------------------------------------
Create antenna layout in a circular ring of minimum and maximum radius with
antennas of a given size
Inputs:
antsize [scalar] Antenna size. Critical to determining number of antenna
elements that can be placed on a circle. No default.
minR [scalar] Minimum radius of the circular ring. Must be in same
units as antsize. No default. Must be greater than 0.5*antsize.
maxR [scalar] Maximum radius of circular ring. Must be >= minR.
Default=None means maxR is set equal to minR.
Outputs:
xy [2-column numpy array] Antenna locations in the same units as
antsize returned as a 2-column numpy array where the number of
rows equals the number of antenna locations generated and x,
and y locations make the two columns.
---------------------------------------------------------------------------
"""
try:
antsize, minR
except NameError:
raise NameError('antsize, and minR must be specified')
if (antsize is None) or (minR is None):
raise ValueError('antsize and minR cannot be NoneType')
if not isinstance(antsize, (int, float)):
raise TypeError('antsize must be a scalar')
if antsize <= 0.0:
raise ValueError('antsize must be positive')
if not isinstance(minR, (int, float)):
raise TypeError('minR must be a scalar')
if minR <= 0.0:
raise ValueError('minR must be positive')
if minR < 0.5*antsize:
minR = 0.5*antsize
if maxR is None:
maxR = minR
if not isinstance(maxR, (int, float)):
raise TypeError('maxR must be a scalar')
elif maxR < minR:
maxR = minR
if maxR - minR < antsize:
radii = minR + NP.zeros(1)
else:
radii = minR + antsize * NP.arange((maxR-minR)/antsize)
nants = 2 * NP.pi * radii / antsize
nants = nants.astype(NP.int)
x = [(radii[i] * NP.cos(2*NP.pi*NP.arange(nants[i])/nants[i])).tolist() for i in range(radii.size)]
y = [(radii[i] * NP.sin(2*NP.pi*NP.arange(nants[i])/nants[i])).tolist() for i in range(radii.size)]
xpos = [xi for sublist in x for xi in sublist]
ypos = [yi for sublist in y for yi in sublist]
x = NP.asarray(xpos)
y = NP.asarray(ypos)
xy = NP.hstack((x.reshape(-1,1), y.reshape(-1,1)))
return (xy, map(str, range(NP.sum(nants))))
################################################################################
def baseline_generator(antenna_locations, ant_label=None, ant_id=None,
auto=False, conjugate=False):
"""
---------------------------------------------------------------------------
Generate baseline from antenna locations.
Inputs:
antenna_locations: List of tuples containing antenna coordinates,
or list of instances of class Point containing
antenna coordinates, or Numpy array (Nx3) array
with each row specifying an antenna location.
Input keywords:
ant_label [list of strings] Unique string identifier for each
antenna. Default = None. If None provided,
antennas will be indexed by an integer starting
from 0 to N(ants)-1
ant_id [list of integers] Unique integer identifier for each
antenna. Default = None. If None provided,
antennas will be indexed by an integer starting
from 0 to N(ants)-1
auto: [Default=False] If True, compute zero spacings of
antennas with themselves.
conjugate: [Default=False] If True, compute conjugate
baselines.
Output:
baseline_locations: Baseline locations in the same data type as
antenna locations (list of tuples, list of
instances of class Point or Numpy array of size
Nb x 3 with each row specifying one baseline
vector)
antpair_labels [Numpy structured array tuples] Labels of
antennas in the pair used to produce the
baseline vector under fields 'A2' and 'A1' for
second and first antenna respectively. The
baseline vector is obtained by position of
antennas under 'A2' minus position of antennas
under 'A1'
antpair_ids [Numpy structured array tuples] IDs of antennas
in the pair used to produce the baseline vector
under fields 'A2' and 'A1' for second and first
antenna respectively. The baseline vector is
obtained by position of antennas under 'A2'
minus position of antennas under 'A1'
-------------------------------------------------------------------
"""
try:
antenna_locations
except NameError:
warnings.warn('No antenna locations supplied. Returning from baseline_generator()')
return None
inp_type = 'tbd'
if not isinstance(antenna_locations, NP.ndarray):
if isinstance(antenna_locations, list):
if isinstance(antenna_locations[0], GEOM.Point):
inp_type = 'loo' # list of objects
elif isinstance(antenna_locations[0], tuple):
inp_type = 'lot' # list of tuples
antenna_locations = [(tuple(loc) if len(loc) == 3 else (tuple([loc[0],0.0,0.0]) if len(loc) == 1 else (tuple([loc[0],loc[1],0.0]) if len(loc) == 2 else (tuple([loc[0],loc[1],loc[2]]))))) for loc in antenna_locations if len(loc) != 0] # Remove empty tuples and validate the data range and data type for antenna locations. Force it to have three components for every antenna location.
elif isinstance(antenna_locations, GEOM.Point):
if not auto:
warnings.warn('No non-zero spacings found since auto=False.')
return None
else:
return GEOM.Point()
elif isinstance(antenna_locations, tuple):
if not auto:
warnings.warn('No non-zero spacings found since auto=False.')
return None
else:
return (0.0,0.0,0.0)
else:
if not auto:
warnings.warn('No non-zero spacings found since auto=False.')
return None
else:
return (0.0,0.0,0.0)
else:
inp_type = 'npa' # A numpy array
if antenna_locations.shape[0] == 1:
if not auto:
warnings.warn('No non-zero spacings found since auto=False.')
return None
else:
return NP.zeros(1,3)
else:
if antenna_locations.shape[1] > 3:
antenna_locations = antenna_locations[:,:3]
elif antenna_locations.shape[1] < 3:
antenna_locations = NP.hstack((antenna_locations, NP.zeros((antenna_locations.shape[0],3-antenna_locations.shape[1]))))
if isinstance(antenna_locations, list):
num_ants = len(antenna_locations)
else:
num_ants = antenna_locations.shape[0]
if ant_label is not None:
if isinstance(ant_label, list):
if len(ant_label) != num_ants:
raise ValueError('Dimensions of ant_label and antenna_locations do not match.')
elif isinstance(ant_label, NP.ndarray):
if ant_label.size != num_ants:
raise ValueError('Dimensions of ant_label and antenna_locations do not match.')
ant_label = ant_label.tolist()
else:
ant_label = ['{0:0d}'.format(i) for i in xrange(num_ants)]
if ant_id is not None:
if isinstance(ant_id, list):
if len(ant_id) != num_ants:
raise ValueError('Dimensions of ant_id and antenna_locations do not match.')
elif isinstance(ant_id, NP.ndarray):
if ant_id.size != num_ants:
raise ValueError('Dimensions of ant_id and antenna_locations do not match.')
ant_id = ant_id.tolist()
else:
ant_id = range(num_ants)
if inp_type == 'loo': # List of objects
if auto:
baseline_locations = [antenna_locations[j]-antenna_locations[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
# antpair_labels = [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
antpair_labels = [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
antpair_ids = [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
else:
baseline_locations = [antenna_locations[j]-antenna_locations[i] for i in range(0,num_ants) for j in range(0,num_ants) if j > i]
# antpair_labels = [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
antpair_labels = [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
antpair_ids = [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
if conjugate:
baseline_locations += [antenna_locations[j]-antenna_locations[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
# antpair_labels += [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
antpair_labels += [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
antpair_ids += [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
elif inp_type == 'lot': # List of tuples
if auto:
baseline_locations = [tuple((antenna_locations[j][0]-antenna_locations[i][0], antenna_locations[j][1]-antenna_locations[i][1], antenna_locations[j][2]-antenna_locations[i][2])) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
# antpair_labels = [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
antpair_labels = [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
antpair_ids = [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
else:
baseline_locations = [tuple((antenna_locations[j][0]-antenna_locations[i][0], antenna_locations[j][1]-antenna_locations[i][1], antenna_locations[j][2]-antenna_locations[i][2])) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
# antpair_labels = [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
antpair_labels = [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
antpair_ids = [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
if conjugate:
baseline_locations += [tuple((antenna_locations[j][0]-antenna_locations[i][0], antenna_locations[j][1]-antenna_locations[i][1], antenna_locations[j][2]-antenna_locations[i][2])) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
# antpair_labels += [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
antpair_labels += [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
antpair_ids += [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
elif inp_type == 'npa': # Numpy array
if auto:
baseline_locations = [antenna_locations[j,:]-antenna_locations[i,:] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
# antpair_labels = [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
antpair_labels = [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
antpair_ids = [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j >= i]
else:
baseline_locations = [antenna_locations[j,:]-antenna_locations[i,:] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
# antpair_labels = [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
antpair_labels = [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
antpair_ids = [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j > i]
if conjugate:
baseline_locations += [antenna_locations[j,:]-antenna_locations[i,:] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
# antpair_labels += [ant_label[j]+'-'+ant_label[i] for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
antpair_labels += [(ant_label[j], ant_label[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
antpair_ids += [(ant_id[j], ant_id[i]) for i in xrange(0,num_ants) for j in xrange(0,num_ants) if j < i]
baseline_locations = NP.asarray(baseline_locations)
maxlen = max(len(albl) for albl in ant_label)
antpair_labels = NP.asarray(antpair_labels, dtype=[('A2', '|S{0:0d}'.format(maxlen)), ('A1', '|S{0:0d}'.format(maxlen))])
antpair_ids = NP.asarray(antpair_ids, dtype=[('A2', int), ('A1', int)])
return baseline_locations, antpair_labels, antpair_ids
#################################################################################
def uniq_baselines(baseline_locations, redundant=None):
"""
---------------------------------------------------------------------------
Identify unique, redundant or non-redundant baselines from a given set of
baseline locations.
Inputs:
baseline_locations [2- or 3-column numpy array] Each row of the array
specifies a baseline vector from which the required
set of baselines have to be identified
redundant [None or boolean] If set to None (default), all the
unique baselines including redundant and non-redundant
baselines are returned. If set to True, only redundant
baselines that occur more than once are returned. If set
to False, only non-redundant baselines that occur
exactly once are returned.
Output:
4-element tuple with the selected baselines, their unique indices in the
input, their count and the indices of all occurences of each unique
baseline. The first element of this tuple is a 3-column numpy array
which is a subset of baseline_locations containing the requested type of
baselines. The second element of the tuple contains the selected indices
of the input array from which the first element in the tuple is determined
relative to the input array. The third element of the tuple contains the
count of these selected baselines. In case of redundant and unique
baselines, the order of repeated baselines does not matter and any one of
those baselines could be returned without preserving the order. The fourth
element in the tuple contains a list of lists where each element in the
top level list corresponds to a unique baseline and consists of indices
of all occurrences of input baselines redundant with this unique baseline
---------------------------------------------------------------------------
"""
try:
baseline_locations
except NameError:
raise NameError('baseline_locations not provided')
if not isinstance(baseline_locations, NP.ndarray):
raise TypeError('baseline_locations must be a numpy array')
if redundant is not None:
if not isinstance(redundant, bool):
raise TypeError('keyword "redundant" must be set to None or a boolean value')
blshape = baseline_locations.shape
if blshape[1] > 3:
baseline_locations = baseline_locations[:,:3]
elif blshape[1] < 3:
baseline_locations = NP.hstack((baseline_locations, NP.zeros((blshape[0],3-blshape[1]))))
blo = NP.angle(baseline_locations[:,0] + 1j * baseline_locations[:,1], deg=True)
blo[blo >= 180.0] -= 180.0
blo[blo < 0.0] += 180.0
bll = NP.sqrt(NP.sum(baseline_locations**2, axis=1))
blza = NP.degrees(NP.arccos(baseline_locations[:,2] / bll))
blstr = ['{0[0]:.2f}_{0[1]:.3f}_{0[2]:.3f}'.format(lo) for lo in zip(bll,3.6e3*blza,3.6e3*blo)]
uniq_blstr, ind, invind = NP.unique(blstr, return_index=True, return_inverse=True) ## if numpy.__version__ < 1.9.0
# uniq_blstr, ind, invind, frequency = NP.unique(blstr, return_index=True, return_inverse=True, return_counts=True) ## if numpy.__version__ >= 1.9.0
count_blstr = [(ubstr,blstr.count(ubstr)) for ubstr in uniq_blstr] ## if numpy.__version__ < 1.9.0
if redundant is None:
retind = NP.copy(ind)
counts = [tup[1] for tup in count_blstr]
counts = NP.asarray(counts)
else:
if not redundant:
## if numpy.__version__ < 1.9.0
non_redn_ind = [i for i,tup in enumerate(count_blstr) if tup[1] == 1]
retind = ind[NP.asarray(non_redn_ind)]
counts = NP.ones(retind.size)
else:
## if numpy.__version__ < 1.9.0
redn_ind_counts = [(i,tup[1]) for i,tup in enumerate(count_blstr) if tup[1] > 1]
redn_ind, counts = zip(*redn_ind_counts)
retind = ind[NP.asarray(redn_ind)]
counts = NP.asarray(counts)
allinds_where_found = NMO.find_all_occurrences_list1_in_list2(invind[retind], invind)
return (baseline_locations[retind,:], retind, counts, allinds_where_found)
#################################################################################
def getBaselineInfo(inpdict):
"""
---------------------------------------------------------------------------
Generate full baseline info from a given layout and return information
about redundancy and the mapping between unique and redundant baselines
Input:
inpdict [dictionary] It contains the following keys and values:
'array' [dictionary] It contains the following keys and values:
'redundant' [boolean] If this key is present, it says
whether the array could be redundant (true)
or not (false). If key is absent, this
value is assumed to be true. When it is set
to true, it basically checks for redundancy
otherwise not. It is not meant to say if
the array is actually redundant or not but
only used for redundancy check to happen or
not
'layout' [string] Preset array layouts mutually
exclusive to antenna file. Only one of
these must be specified. Accepted
values are 'MWA-I-128T'
(MWA Phase I 128-tile),
'MWA-II-Hex-LB' (MWA Phase II Hex
and Long Baselines),
'MWA-II-compact' (MWA Phase II
compact=core + 2Hex baselines),
'MWA-II-LB' (MWA Phase II Long
Baselines), 'HERA-7', 'HERA-19',
'HERA-37', 'HERA-61', 'HERA-91',
'HERA-127', 'HERA-169', 'HERA-217',
'HERA-271', 'HERA-331', 'PAPER-64',
'PAPER-112', 'HIRAX-1024', 'CHIME', 'GMRT',
'CIRC', or None (if layout file is
specified).
'file' [string] File containing antenna locations
parsed according to info in parser (see
below). If preset layout is specified, this
must be set to None.
'filepathtype'
[string] Accepted values are 'default' (if
layout file can be found in prisim path,
namely, prisim/data/array_layouts folder)
and 'custom'. If set to 'default', only
filename should be specified in file and it
will be searched in the default
array_layouts folder
prisim/data/array_layouts.
If set to 'custom' then the full path
to the file must be specified.
'parser' [dictionary] Will be used for parsing the
file if file is specified for array layout.
It contains the following keys and values:
'comment' [string] Character used to
denote commented lines to be
ignored. Default=None ('#')
'delimiter' [string] Delimiter string.
Accepted values are whitespace
(default or None), ',' and '|'
'data_strart'
[integer] Line index for the
start of data not counting
comment or blank lines. A line
with only whitespace is
considered blank. It is
required. No defaults.
Indexing starts from 0
'data_end' [integer] Line index for the end
of data not counting comment or
blank lines. This value can be
negative to count from the end.
Default is None (all the way to
end of file). Indexing starts
from 0.
'header_start'
[integer] Line index for the
header line not counting comment
or blank lines. A line with only
whitespace is considered blank.
Must be provided. No defaults
'label' [string] String in the header
containing antenna labels. If
set to None (default), antenna
labels will be automatically
assigned. e.g. of some accepted
values are None, 'label', 'id',
'antid', etc. This must be found
in the header
'east' [string] String specifying East
coordinates in the header and
data. Must be provided. No
defaults.
'north' [string] String specifying North
coordinates in the header and
data. Must be provided. No
defaults.
'up' [string] String specifying
elevation coordinates in the
header and data. Must be
provided. No defaults.
'minR' [string] Minimum radius of circular ring.
Applies only when layout = 'CIRC'
'maxR' [string] Maximum radius of circular ring.
Applies only when layout = 'CIRC'
'rms_tgtplane'
[float] Perturbation of antenna positions
(in m) in tangent plane. Default=0.0
'rms_elevation'
[float] Perturbation of antenna positions
(in m) in perpendicular to tangent plane.
Default=0.0
'seed' [integer] Random number seed for antenna
position perturbations. Default=None means
no fixed seed
'baseline' [dictionary] Parameters specifying baseline
selection criteria. It consists of the following keys
and values:
'min' [float] Minimum baseline in distance
units (m). Default=None (0.0)
'max' [float] Maximum baseline in distance
units (m). Default=None (max baseline)
'direction' [string] Baseline vector directions to
select. Default=None (all directions).
Other accepted values are 'E' (east)
'SE' (south-east), 'NE' (north-east),
and 'N' (north). Multiple values from
this accepted list can be specified
as a list of strings. e.g., ['N', 'E'],
['NE', 'SE', 'E'], ['SE', 'E', 'NE', 'N']
which is equivalent to None, etc.
'skyparm' [dictionary] Sky model specification. It contains the
following keys and values:
'model' [string] Sky model. Accepted values
are 'csm' (NVSS+SUMSS point sources),
'dsm' (diffuse emission), 'asm' (both
point sources and diffuse emission),
'sumss' (SUMSS catalog), nvss (NVSS
catalog), 'mss' (Molonglo Sky Survey),
'gleam' (GLEAM catalog), 'custom'
(user-defined catalog), 'usm' (uniform
sky model), 'mwacs' (MWACS catalog),
'HI_monopole' (global EoR), HI_cube (HI
cube from external simulations), and
'HI_fluctuations' (HI fluctuations with
the global mean signal removed). If set
'HI_monopole' or 'monopole' the orientation
of the baseline vector does not matter
and only unique baseline lengths will be
selected if value under 'redundant' key is
set to True.
Output:
Dictionary containing the following keys and values.
'bl' [numpy array] Baseline vectors (unique ones or all depending on
value in key 'redundant'). It is of shape nbl x 3 and will
consist of unique baselines if value under key 'redundant' was
set to True. Otherwise, redundancy will not be checked and all
baselines will be returned.
'label' [numpy recarray] A unique label of each of the baselines.
Shape is nbl where each element is a recarray under fields 'A1'
(first antenna label) and 'A2' (second antenna label)
'id' [numpy recarray] A unique identifier of each of the baselines.
Shape is nbl where each element is a recarray under fields 'A1'
(first antenna id) and 'A2' (second antenna id)
'redundancy'
[boolean] If the array was originally found to be made of unique
baselines (False) or redundant baselines were found (True). Even
if set to False, the baselines may still be redundant because
redundancy may never have been checked if value under key
'redundant' was set to False
'groups'
[dictionary] Contains the grouping of unique baselines and the
redundant baselines as numpy recarray under each unique baseline
category/flavor. It contains as keys the labels (tuple of A1, A2)
of unique baselines and the value under each of these keys is a
list of baseline labels that are redundant under that category
'reversemap'
[dictionary] Contains the baseline category for each baseline.
The keys are baseline labels as tuple and the value under each
key is the label of the unique baseline category that it falls
under.
'layout_info'
[dictionary] Contains the antenna layout information with the
following keys and values:
'positions' [numpy array] Antenna locations with shape nant x 3
'labels' [numpy array of strings] Antenna labels of size nant
'ids' [numpy array of strings] Antenna IDs of size nant
'coords' [string] Coordinate system in which antenna locations
are specified. Currently only returns 'ENU' for East-
North-Up coordinate system
---------------------------------------------------------------------------
"""
try:
inpdict
except NameError:
raise NameError('Input inpdict must be specified')
if not isinstance(inpdict, dict):
raise TypeError('Input inpdict must be a dictionary')
if 'array' in inpdict:
if 'redundant' in inpdict['array']:
array_is_redundant = inpdict['array']['redundant']
else:
array_is_redundant = True
else:
raise KeyError('Key "array" not found in input inpdict')
sky_str = inpdict['skyparm']['model']
use_HI_monopole = False
if sky_str == 'HI_monopole':
use_HI_monopole = True
antenna_file = inpdict['array']['file']
array_layout = inpdict['array']['layout']
minR = inpdict['array']['minR']
maxR = inpdict['array']['maxR']
antpos_rms_tgtplane = inpdict['array']['rms_tgtplane']
antpos_rms_elevation = inpdict['array']['rms_elevation']
antpos_rms_seed = inpdict['array']['seed']
if antpos_rms_seed is None:
antpos_rms_seed = NP.random.randint(1, high=100000)
elif isinstance(antpos_rms_seed, (int,float)):
antpos_rms_seed = int(NP.abs(antpos_rms_seed))
else:
raise ValueError('Random number seed must be a positive integer')
minbl = inpdict['baseline']['min']
maxbl = inpdict['baseline']['max']
bldirection = inpdict['baseline']['direction']
if (antenna_file is None) and (array_layout is None):
raise ValueError('One of antenna array file or layout must be specified')
if (antenna_file is not None) and (array_layout is not None):
raise ValueError('Only one of antenna array file or layout must be specified')
if antenna_file is not None:
if not isinstance(antenna_file, str):
raise TypeError('Filename containing antenna array elements must be a string')
if inpdict['array']['filepathtype'] == 'default':
antenna_file = prisim_path+'data/array_layouts/'+antenna_file
antfile_parser = inpdict['array']['parser']
if 'comment' in antfile_parser:
comment = antfile_parser['comment']
if comment is None:
comment = '#'
elif not isinstance(comment, str):
raise TypeError('Comment expression must be a string')
else:
comment = '#'
if 'delimiter' in antfile_parser:
delimiter = antfile_parser['delimiter']
if delimiter is not None:
if not isinstance(delimiter, str):
raise TypeError('Delimiter expression must be a string')
else:
delimiter = ' '
else:
delimiter = ' '
if 'data_start' in antfile_parser:
data_start = antfile_parser['data_start']
if not isinstance(data_start, int):
raise TypeError('data_start parameter must be an integer')
else:
raise KeyError('data_start parameter not provided')
if 'data_end' in antfile_parser:
data_end = antfile_parser['data_end']
if data_end is not None:
if not isinstance(data_end, int):
raise TypeError('data_end parameter must be an integer')
else:
data_end = None
if 'header_start' in antfile_parser:
header_start = antfile_parser['header_start']
if not isinstance(header_start, int):
raise TypeError('header_start parameter must be an integer')
else:
raise KeyError('header_start parameter not provided')
if 'label' not in antfile_parser:
antfile_parser['label'] = None
elif antfile_parser['label'] is not None:
antfile_parser['label'] = str(antfile_parser['label'])
if 'east' not in antfile_parser:
raise KeyError('Keyword for "east" coordinates not provided')
else:
if not isinstance(antfile_parser['east'], str):
raise TypeError('Keyword for "east" coordinates must be a string')
if 'north' not in antfile_parser:
raise KeyError('Keyword for "north" coordinates not provided')
else:
if not isinstance(antfile_parser['north'], str):
raise TypeError('Keyword for "north" coordinates must be a string')
if 'up' not in antfile_parser:
raise KeyError('Keyword for "up" coordinates not provided')
else:
if not isinstance(antfile_parser['up'], str):
raise TypeError('Keyword for "up" coordinates must be a string')
try:
ant_info = ascii.read(antenna_file, comment=comment, delimiter=delimiter, header_start=header_start, data_start=data_start, data_end=data_end, guess=False)
except IOError:
raise IOError('Could not open file containing antenna locations.')
if (antfile_parser['east'] not in ant_info.colnames) or (antfile_parser['north'] not in ant_info.colnames) or (antfile_parser['up'] not in ant_info.colnames):
raise KeyError('One of east, north, up coordinates incompatible with the table in antenna_file')
if antfile_parser['label'] is not None:
ant_label = ant_info[antfile_parser['label']].data.astype('str')
else:
ant_label = NP.arange(len(ant_info)).astype('str')
east = ant_info[antfile_parser['east']].data
north = ant_info[antfile_parser['north']].data
elev = ant_info[antfile_parser['up']].data
if (east.dtype != NP.float) or (north.dtype != NP.float) or (elev.dtype != NP.float):
raise TypeError('Antenna locations must be of floating point type')
ant_locs = NP.hstack((east.reshape(-1,1), north.reshape(-1,1), elev.reshape(-1,1)))
else:
if array_layout not in ['MWA-I-128T', 'MWA-II-Hex-LB', 'MWA-II-compact', 'MWA-II-LB', 'HERA-7', 'HERA-19', 'HERA-37', 'HERA-61', 'HERA-91', 'HERA-127', 'HERA-169', 'HERA-217', 'HERA-271', 'HERA-331', 'PAPER-64', 'PAPER-112', 'HIRAX-1024', 'CHIME', 'GMRT', 'CIRC']:
raise ValueError('Invalid array layout specified')
if array_layout in ['MWA-I-128T', 'MWA-II-Hex-LB', 'MWA-II-compact', 'MWA-II-LB']:
comment = '#'
delimiter = ' '
header_start = 0
data_start = 2
data_end = None
antfile = array_layout + '_tile_coordinates.txt'
ant_info = ascii.read(prisim_path+'data/array_layouts/'+antfile, comment=comment, delimiter=delimiter, header_start=header_start, data_start=data_start, data_end=data_end, guess=False)
ant_label = ant_info['Tile'].data.astype('str')
east = ant_info['East'].data
north = ant_info['North'].data
elev = ant_info['Height'].data
ant_locs = NP.hstack((east.reshape(-1,1), north.reshape(-1,1), elev.reshape(-1,1)))
elif array_layout == 'HERA-7':
ant_locs, ant_label = hexagon_generator(14.6, n_total=7)
elif array_layout == 'HERA-19':
ant_locs, ant_label = hexagon_generator(14.6, n_total=19)
elif array_layout == 'HERA-37':
ant_locs, ant_label = hexagon_generator(14.6, n_total=37)
elif array_layout == 'HERA-61':
ant_locs, ant_label = hexagon_generator(14.6, n_total=61)
elif array_layout == 'HERA-91':
ant_locs, ant_label = hexagon_generator(14.6, n_total=91)
elif array_layout == 'HERA-127':
ant_locs, ant_label = hexagon_generator(14.6, n_total=127)
elif array_layout == 'HERA-169':
ant_locs, ant_label = hexagon_generator(14.6, n_total=169)
elif array_layout == 'HERA-217':
ant_locs, ant_label = hexagon_generator(14.6, n_total=217)
elif array_layout == 'HERA-271':
ant_locs, ant_label = hexagon_generator(14.6, n_total=271)
elif array_layout == 'HERA-331':
ant_locs, ant_label = hexagon_generator(14.6, n_total=331)
elif array_layout == 'PAPER-64':
ant_locs, ant_label = rectangle_generator([30.0, 4.0], [8, 8])
elif array_layout == 'PAPER-112':
ant_locs, ant_label = rectangle_generator([15.0, 4.0], [16, 7])
elif array_layout == 'HIRAX-1024':
ant_locs, ant_label = rectangle_generator(7.0, n_side=32)
elif array_layout == 'CHIME':
ant_locs, ant_label = rectangle_generator([20.0, 0.3], [5, 256])
elif array_layout == 'GMRT':
comment = '#'
delimiter = ' '
header_start = 0
data_start = 2
data_end = None
antfile = 'GMRT_antenna_coordinates.txt'
ant_info = ascii.read(prisim_path+'data/array_layouts/'+antfile, comment=comment, delimiter=delimiter, header_start=header_start, data_start=data_start, data_end=data_end, guess=False)
ant_label = ant_info['Station'].data.astype('str')
east = ant_info['east'].data
north = ant_info['north'].data
elev = ant_info['up'].data
ant_locs = NP.hstack((east.reshape(-1,1), north.reshape(-1,1), elev.reshape(-1,1)))
elif array_layout == 'CIRC':
ant_locs, ant_label = circular_antenna_array(element_size, minR, maxR=maxR)
ant_label = NP.asarray(ant_label)
if ant_locs.shape[1] == 2:
ant_locs = NP.hstack((ant_locs, NP.zeros(ant_label.size).reshape(-1,1)))
antpos_rstate = NP.random.RandomState(antpos_rms_seed)
deast = antpos_rms_tgtplane/NP.sqrt(2.0) * antpos_rstate.randn(ant_label.size)
dnorth = antpos_rms_tgtplane/NP.sqrt(2.0) * antpos_rstate.randn(ant_label.size)
dup = antpos_rms_elevation * antpos_rstate.randn(ant_label.size)
denu = NP.hstack((deast.reshape(-1,1), dnorth.reshape(-1,1), dup.reshape(-1,1)))
ant_locs = ant_locs + denu
ant_locs_orig = NP.copy(ant_locs)
ant_label_orig = NP.copy(ant_label)
ant_id = NP.arange(ant_label.size, dtype=int)
ant_id_orig = NP.copy(ant_id)
layout_info = {'positions': ant_locs_orig, 'labels': ant_label_orig, 'ids': ant_id_orig, 'coords': 'ENU'}
bl_orig, bl_label_orig, bl_id_orig = baseline_generator(ant_locs_orig, ant_label=ant_label_orig, ant_id=ant_id_orig, auto=False, conjugate=False)
blo = NP.angle(bl_orig[:,0] + 1j * bl_orig[:,1], deg=True)
neg_blo_ind = (blo < -67.5) | (blo > 112.5)
bl_orig[neg_blo_ind,:] = -1.0 * bl_orig[neg_blo_ind,:]
blo = NP.angle(bl_orig[:,0] + 1j * bl_orig[:,1], deg=True)
maxlen = max(max(len(albl[0]), len(albl[1])) for albl in bl_label_orig)
bl_label_orig = [tuple(reversed(bl_label_orig[i])) if neg_blo_ind[i] else bl_label_orig[i] for i in xrange(bl_label_orig.size)]
bl_label_orig = NP.asarray(bl_label_orig, dtype=[('A2', '|S{0:0d}'.format(maxlen)), ('A1', '|S{0:0d}'.format(maxlen))])
bl_id_orig = [tuple(reversed(bl_id_orig[i])) if neg_blo_ind[i] else bl_id_orig[i] for i in xrange(bl_id_orig.size)]
bl_id_orig = NP.asarray(bl_id_orig, dtype=[('A2', int), ('A1', int)])
bl_length_orig = NP.sqrt(NP.sum(bl_orig**2, axis=1))
sortind_orig = NP.argsort(bl_length_orig, kind='mergesort')
bl_orig = bl_orig[sortind_orig,:]
blo = blo[sortind_orig]
bl_label_orig = bl_label_orig[sortind_orig]
bl_id_orig = bl_id_orig[sortind_orig]
bl_length_orig = bl_length_orig[sortind_orig]
bl = NP.copy(bl_orig)
bl_label = NP.copy(bl_label_orig)
bl_id = NP.copy(bl_id_orig)
bl_orientation = NP.copy(blo)
if array_is_redundant:
bl, select_bl_ind, bl_count, allinds = uniq_baselines(bl)
else:
select_bl_ind = NP.arange(bl.shape[0])
bl_count = NP.ones(bl.shape[0], dtype=int)
allinds = select_bl_ind.reshape(-1,1).tolist()
bl_label = bl_label[select_bl_ind]
bl_id = bl_id[select_bl_ind]
bl_orientation = bl_orientation[select_bl_ind]
if NP.any(bl_count > 1):
redundancy = True
else:
redundancy = False
bl_length = NP.sqrt(NP.sum(bl**2, axis=1))
sortind = NP.argsort(bl_length, kind='mergesort')
bl = bl[sortind,:]
bl_label = bl_label[sortind]
bl_id = bl_id[sortind]
bl_length = bl_length[sortind]
bl_orientation = bl_orientation[sortind]
bl_count = bl_count[sortind]
select_bl_ind = select_bl_ind[sortind]
allinds = [allinds[i] for i in sortind]
if minbl is None:
minbl = 0.0
elif not isinstance(minbl, (int,float)):
raise TypeError('Minimum baseline length must be a scalar')
elif minbl < 0.0:
minbl = 0.0
if maxbl is None:
maxbl = bl_length.max()
elif not isinstance(maxbl, (int,float)):
raise TypeError('Maximum baseline length must be a scalar')
elif maxbl < minbl:
maxbl = bl_length.max()
min_blo = -67.5
max_blo = 112.5
subselect_bl_ind = NP.zeros(bl_length.size, dtype=NP.bool)
if bldirection is not None:
if isinstance(bldirection, str):
if bldirection not in ['SE', 'E', 'NE', 'N']:
raise ValueError('Invalid baseline direction criterion specified')
else:
bldirection = [bldirection]
if isinstance(bldirection, list):
for direction in bldirection:
if direction in ['SE', 'E', 'NE', 'N']:
if direction == 'SE':
oind = (bl_orientation >= -67.5) & (bl_orientation < -22.5)
subselect_bl_ind[oind] = True
elif direction == 'E':
oind = (bl_orientation >= -22.5) & (bl_orientation < 22.5)
subselect_bl_ind[oind] = True
elif direction == 'NE':
oind = (bl_orientation >= 22.5) & (bl_orientation < 67.5)
subselect_bl_ind[oind] = True
else:
oind = (bl_orientation >= 67.5) & (bl_orientation < 112.5)
subselect_bl_ind[oind] = True
else:
raise TypeError('Baseline direction criterion must specified as string or list of strings')
else:
subselect_bl_ind = NP.ones(bl_length.size, dtype=NP.bool)
subselect_bl_ind = subselect_bl_ind & (bl_length >= minbl) & (bl_length <= maxbl)
bl_label = bl_label[subselect_bl_ind]
bl_id = bl_id[subselect_bl_ind]
bl = bl[subselect_bl_ind,:]
bl_length = bl_length[subselect_bl_ind]
bl_orientation = bl_orientation[subselect_bl_ind]
bl_count = bl_count[subselect_bl_ind]
select_bl_ind = select_bl_ind[subselect_bl_ind]
allinds = [allinds[i] for i in range(subselect_bl_ind.size) if subselect_bl_ind[i]]
if use_HI_monopole:
bllstr = map(str, bl_length)
uniq_bllstr, ind_uniq_bll = NP.unique(bllstr, return_index=True)
count_uniq_bll = [bllstr.count(ubll) for ubll in uniq_bllstr]
count_uniq_bll = NP.asarray(count_uniq_bll)
bl = bl[ind_uniq_bll,:]
bl_label = bl_label[ind_uniq_bll]
bl_id = bl_id[ind_uniq_bll]
bl_orientation = bl_orientation[ind_uniq_bll]
bl_length = bl_length[ind_uniq_bll]
bl_count = bl_count[ind_uniq_bll]
select_bl_ind = select_bl_ind[ind_uniq_bll]
allinds = [allinds[i] for i in ind_uniq_bll]
sortind = NP.argsort(bl_length, kind='mergesort')
bl = bl[sortind,:]
bl_label = bl_label[sortind]
bl_id = bl_id[sortind]
bl_length = bl_length[sortind]
bl_orientation = bl_orientation[sortind]
count_uniq_bll = count_uniq_bll[sortind]
bl_count = bl_count[sortind]
select_bl_ind = select_bl_ind[sortind]
allinds = [allinds[i] for i in sortind]
blgroups = {}
blgroups_reversemap = {}
for labelind, label in enumerate(bl_label_orig[select_bl_ind]):
if bl_count[labelind] > 0:
blgroups[tuple(label)] = bl_label_orig[NP.asarray(allinds[labelind])]
for lbl in bl_label_orig[NP.asarray(allinds[labelind])]:
# blgroups_reversemap[tuple(lbl)] = tuple(label)
blgroups_reversemap[tuple(lbl)] = NP.asarray([label], dtype=bl_label.dtype)
if array_is_redundant:
if bl_label_orig.size == bl_label.size:
warnings.warn('No redundant baselines found. Proceeding...')
outdict = {'bl': bl, 'id': bl_id, 'label': bl_label, 'groups': blgroups, 'reversemap': blgroups_reversemap, 'redundancy': redundancy, 'layout_info': layout_info}
return outdict
#################################################################################
def getBaselineGroupKeys(inp_labels, blgroups_reversemap):
"""
---------------------------------------------------------------------------
Find redundant baseline group keys of groups that contain the input
baseline labels
Inputs:
inp_labels
[list] List where each element in the list is a two-element tuple
that corresponds to a baseline / antenna pair label.
e.g. [('1', '2'), ('3', '0'), ('2', '2'), ...]
blgroups_reversemap
[dictionary] Contains the baseline category for each baseline.
The keys are baseline labels as tuple and the value under each
key is the label of the unique baseline category that it falls
under. That label could be a two-element Numpy RecArray or a tuple.
Each element in this two-element tuple must be an antenna label
specified as a string. e.g. {('9','8'): ('2','3'),
('12','11'): ('2','3'), ('1','4'): ('6','7'),...} or {('9','8'):
array[('2','3')], ('12','11'): array[('2','3')],
('1','4'): array[('6','7')],...}
Output:
Tuple containing two values. The first value is a list of all baseline
group keys corresponding to the input keys. If any input keys were not
found in blgroups_reversemap, those corresponding position in this list
will be filled with None to indicate the label was not found. The second
value in the tuple indicates if the ordering of the input label had to be
flipped in order to find the baseline group key. Positions where an input
label was found as is will contain False, but if it had to be flipped will
contain True. If the input label was not found, it will be filled with
None.
Example:
blkeys, flipped = getBaselineGroupKeys(inp_labels, blgroups_reversemap)
blkeys --> [('2','3'), ('11','16'), None, ('5','1'),...]
flipped --> [False, True, None, False],...)
---------------------------------------------------------------------------
"""
try:
inp_labels, blgroups_reversemap
except NameError:
raise NameError('Inputs inp_label and blgroups_reversemap must be provided')
if not isinstance(blgroups_reversemap, dict):
raise TypeError('Input blgroups_reversemap must be a dictionary')
if not isinstance(inp_labels, list):
inp_labels = [inp_labels]
blgrpkeys = []
flip_order = []
for lbl in inp_labels:
if lbl in blgroups_reversemap.keys():
if isinstance(blgroups_reversemap[lbl], NP.ndarray):
blgrpkeys += [tuple(blgroups_reversemap[lbl][0])]
elif isinstance(blgroups_reversemap[lbl], tuple):
blgrpkeys += [blgroups_reversemap[lbl]]
else:
raise TypeError('Invalid type found in blgroups_reversemap')
flip_order += [False]
elif lbl[::-1] in blgroups_reversemap.keys():
if isinstance(blgroups_reversemap[lbl[::-1]], NP.ndarray):
blgrpkeys += [tuple(blgroups_reversemap[lbl[::-1]][0])]
elif isinstance(blgroups_reversemap[lbl[::-1]], tuple):
blgrpkeys += [blgroups_reversemap[lbl[::-1]]]
else:
raise TypeError('Invalid type found in blgroups_reversemap')
flip_order += [True]
else:
blgrpkeys += [None]
flip_order += [None]
return (blgrpkeys, flip_order)
#################################################################################
def getBaselinesInGroups(inp_labels, blgroups_reversemap, blgroups):
"""
---------------------------------------------------------------------------
Find all redundant baseline labels in groups that contain the given input
baseline labels
Inputs:
inp_labels
[list] List where each element in the list is a two-element tuple
that corresponds to a baseline / antenna pair label.
e.g. [('1', '2'), ('3', '0'), ('2', '2'), ...]
blgroups_reversemap
[dictionary] Contains the baseline category for each baseline.
The keys are baseline labels as tuple and the value under each
key is the label of the unique baseline category that it falls
under. That label could be a two-element Numpy RecArray or a tuple.
Each element in this two-element tuple must be an antenna label
specified as a string. e.g. {('9','8'): ('2','3'),
('12','11'): ('2','3'), ('1','4'): ('6','7'),...} or {('9','8'):
array[('2','3')], ('12','11'): array[('2','3')],
('1','4'): array[('6','7')],...}
blgroups
[dictionary] Contains the grouping of unique baselines and the
redundant baselines as numpy recarray under each unique baseline
category/flavor. It contains as keys the labels (tuple of A1, A2)
of unique baselines and the value under each of these keys is a
list of baseline labels that are redundant under that category
Output:
Tuple with two elements where the first element is a list of numpy
RecArrays where each RecArray corresponds to the entry in inp_label and is
an array of two-element records corresponding to the baseline labels in
that redundant group. If the input baseline is not found, the corresponding
element in the list is None to indicate the baseline label was not found.
The second value in the tuple indicates if the ordering of the input label
had to be flipped in order to find the baseline group key. Positions where
an input label was found as is will contain False, but if it had to be
flipped will contain True. If the input label was not found, it will
contain a None entry.
Example:
list_blgrps, flipped = getBaselineGroupKeys(inplabels, bl_revmap, blgrps)
list_blgrps --> [array([('2','3'), ('11','16')]), None,
array([('5','1')]), ...],
flipped --> [False, True, None, ...])
---------------------------------------------------------------------------
"""
if not isinstance(blgroups, dict):
raise TypeError('Input blgroups must be a dictionary')
blkeys, flip_order = getBaselineGroupKeys(inp_labels, blgroups_reversemap)
blgrps = []
for blkey in blkeys:
if blkey is not None:
blgrps += [blgroups[blkey]]
else:
blgrps += [None]
return (blgrps, flip_order)
#################################################################################
def antenna_power(skymodel, telescope_info, pointing_info, freq_scale=None):
"""
---------------------------------------------------------------------------
Generate antenna power received from sky when a sky model, telescope and
pointing parameters are provided.
Inputs:
skymodel [instance of class SkyModel] Sky model specified as an instance
of class SkyModel
telescope_info
[dictionary] dictionary that specifies the type of element,
element size and orientation. It consists of the following keys
and values:
'latitude' [float] latitude of the telescope site (in degrees).
If this key is not present, the latitude of MWA
(-26.701 degrees) will be assumed.
'id' [string] If set, will ignore the other keys and use
telescope details for known telescopes. Accepted
values are 'mwa', 'vla', 'gmrt', 'ugmrt', 'hera',
'paper', 'hirax' and 'chime'
'shape' [string] Shape of antenna element. Accepted values
are 'dipole', 'delta', and 'dish'. Will be ignored
if key 'id' is set. 'delta' denotes a delta
function for the antenna element which has an
isotropic radiation pattern. 'delta' is the default
when keys 'id' and 'shape' are not set.
'size' [scalar] Diameter of the telescope dish (in meters)
if the key 'shape' is set to 'dish' or length of
the dipole if key 'shape' is set to 'dipole'. Will
be ignored if key 'shape' is set to 'delta'. Will
be ignored if key 'id' is set and a preset value
used for the diameter or dipole.
'orientation' [list or numpy array] If key 'shape' is set to
dipole, it refers to the orientation of the dipole
element unit vector whose magnitude is specified by
length. If key 'shape' is set to 'dish', it refers
to the position on the sky to which the dish is
pointed. For a dipole, this unit vector must be
provided in the local ENU coordinate system aligned
with the direction cosines coordinate system or in
the Alt-Az coordinate system. This will be
used only when key 'shape' is set to 'dipole'.
This could be a 2-element vector (transverse
direction cosines) where the third (line-of-sight)
component is determined, or a 3-element vector
specifying all three direction cosines or a two-
element coordinate in Alt-Az system. If not provided
it defaults to an eastward pointing dipole. If key
'shape' is set to 'dish', the orientation refers
to the pointing center of the dish on the sky. It
can be provided in Alt-Az system as a two-element
vector or in the direction cosine coordinate
system as a two- or three-element vector. If not
set in the case of a dish element, it defaults to
zenith. This is not to be confused with the key
'pointing_center' in dictionary 'pointing_info'
which refers to the beamformed pointing center of
the array. The coordinate system is specified by
the key 'ocoords'
'ocoords' [scalar string] specifies the coordinate system
for key 'orientation'. Accepted values are 'altaz'
and 'dircos'.
'element_locs'
[2- or 3-column array] Element locations that
constitute the tile. Each row specifies
location of one element in the tile. The
locations must be specified in local ENU
coordinate system. First column specifies along
local east, second along local north and the
third along local up. If only two columns are
specified, the third column is assumed to be
zeros. If 'elements_locs' is not provided, it
assumed to be a one-element system and not a
phased array as far as determination of primary
beam is concerned.
'groundplane' [scalar] height of telescope element above the
ground plane (in meteres). Default = None will
denote no ground plane effects.
'ground_modify'
[dictionary] contains specifications to modify
the analytically computed ground plane pattern. If
absent, the ground plane computed will not be
modified. If set, it may contain the following
keys:
'scale' [scalar] positive value to scale the
modifying factor with. If not set, the
scale factor to the modification is unity.
'max' [scalar] positive value to clip the
modified and scaled values to. If not set,
there is no upper limit
pointing_info
[dictionary] Contains information about the pointing. It carries
the following keys and values:
'lst' [numpy array] LST values (in degrees) for each pointing
'pointing_coords'
[string scalar] Coordinate system in which the
pointing_center is specified. Accepted values are
'radec', 'hadec', 'altaz' or 'dircos'. Must be specified
if pointing_center is specified
'pointing_center'
[numpy array] coordinates of pointing center (in the
coordinate system specified under key 'pointing_coords').
Mx2 array when value under key 'pointing_coords' is set
to 'radec', 'hadec' or 'altaz', or Mx3 array when the
value in 'pointing_coords' is set to 'dircos'. Number of
rows M should be equal to number of pointings and LST.
If only one row (M=1) is provided the same pointing
center in the given coordinate system will apply to all
pointings.
freq_scale
[string scalar] Units of frequency. Accepted values are 'Hz',
'kHz', 'MHz' or 'GHz'. If None provided, default is set to 'GHz'
Output:
2-dimensional numpy array containing the antenna power. The rows denote
the different pointings and columns denote the frequency spectrum obtained
from the frequencies specified in the sky model.
Notes:
For each pointing the visible sky spectrum is multiplied with the power
pattern and summed over all sky locations to obtain the received antenna
power as a function of pointings and frequency.
---------------------------------------------------------------------------
"""
try:
skymodel, telescope_info, pointing_info
except NameError:
raise NameError('Sky model, telescope and pointing information must be provided')
if not isinstance(skymodel, SM.SkyModel):
raise TypeError('Input parameter skymodel must be an instance of class SkyModel')
if not isinstance(telescope_info, dict):
raise TypeError('Input parameter telescope_info must be a dictionary')
if not isinstance(pointing_info, dict):
raise TypeError('Input parameter pointing_info must be a dictionary')
if 'latitude' in telescope_info:
latitude = telescope_info['latitude']
else:
latitude = -26.701
n_src = skymodel.location.shape[0]
nchan = skymodel.frequency.size
if 'lst' not in pointing_info:
raise KeyError('Key "lst" not provided in input parameter pointing_info')
else:
lst = NP.asarray(pointing_info['lst'])
n_lst = lst.size
if 'pointing_center' not in pointing_info:
pointing_center = NP.repeat(NP.asarray([90.0, 270.0]).reshape(1,-1), n_lst, axis=0)
pointing_coords = 'altaz'
else:
if 'pointing_coords' not in pointing_info:
raise KeyError('key "pointing_info" not found in input parameter pointing_info')
pointing_coords = pointing_info['pointing_coords']
if not isinstance(pointing_info['pointing_center'], NP.ndarray):
raise TypeError('Value in key "pointing_center" in input parameter pointing_info must be a numpy array')
pointing_center = pointing_info['pointing_center']
if len(pointing_center.shape) > 2:
raise ValueError('Value under key "pointing_center" in input parameter pointing_info cannot exceed two dimensions')
if len(pointing_center.shape) < 2:
pointing_center = pointing_center.reshape(1,-1)
if (pointing_coords == 'dircos') and (pointing_center.shape[1] != 3):
raise ValueError('Value under key "pointing_center" in input parameter pointing_info must be a 3-column array for direction cosine coordinate system')
elif pointing_center.shape[1] != 2:
raise ValueError('Value under key "pointing_center" in input parameter pointing_info must be a 2-column array for RA-Dec, HA-Dec and Alt-Az coordinate systems')
n_pointings = pointing_center.shape[0]
if (n_pointings != n_lst) and (n_pointings != 1):
raise ValueError('Number of pointing centers and number of LST must match')
if n_pointings < n_lst:
pointing_center = NP.repeat(pointing_center, n_lst, axis=0)
n_snaps = lst.size
if pointing_coords == 'dircos':
pointings_altaz = GEOM.dircos2altaz(pointing_center, units='degrees')
elif pointing_coords == 'hadec':
pointings_altaz = GEOM.hadec2altaz(pointing_center, latitude, units='degrees')
elif pointing_coords == 'radec':
pointings_altaz = GEOM.hadec2altaz(NP.hstack(((lst-pointing_center[:,0]).reshape(-1,1), pointing_center[:,1].reshape(-1,1))), latitude, units='degrees')
else:
pointings_altaz = NP.copy(pointing_center)
if skymodel.coords == 'radec':
lst_temp = NP.hstack((lst.reshape(-1,1),NP.zeros(n_snaps).reshape(-1,1))) # Prepare fake LST for numpy broadcasting
lst_temp = lst_temp.T
lst_temp = lst_temp[NP.newaxis,:,:]
sky_hadec = lst_temp - skymodel.location[:,:,NP.newaxis] # Reverses sign of declination
sky_hadec[:,1,:] *= -1 # Correct for the reversal of sign in the declination
sky_hadec = NP.concatenate(NP.split(sky_hadec, n_snaps, axis=2), axis=0)
sky_hadec = NP.squeeze(sky_hadec, axis=2)
sky_altaz = GEOM.hadec2altaz(sky_hadec, latitude, units='degrees')
elif skymodel.coords == 'hadec':
sky_altaz = GEOM.hadec2altaz(skymodel.location, latitude, units='degrees')
elif skymodel.coords == 'dircos':
sky_altaz = GEOM.dircos2altaz(skymodel.location, units='degrees')
else:
sky_altaz = NP.copy(skymodel.location)
sky_altaz = NP.split(sky_altaz, range(0,sky_altaz.shape[0],n_src)[1:], axis=0) # Split sky_altaz into a list of arrays
retval = []
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(), PGB.ETA()], maxval=len(sky_altaz)).start()
for i in xrange(len(sky_altaz)):
pinfo = {}
pinfo['pointing_center'] = pointings_altaz[i,:]
pinfo['pointing_coords'] = 'altaz'
# if 'element_locs' in telescope_info:
# pinfo['element_locs'] = telescope_info['element_locs']
upper_hemisphere_ind = sky_altaz[i][:,0] >= 0.0
upper_skymodel = skymodel.subset(indices=NP.where(upper_hemisphere_ind)[0])
pb = PB.primary_beam_generator(sky_altaz[i][upper_hemisphere_ind,:], skymodel.frequency, telescope_info, freq_scale=freq_scale, skyunits='altaz', pointing_info=pinfo)
spectrum = upper_skymodel.generate_spectrum(interp_method='pchip')
retval += [NP.sum(pb*spectrum, axis=0) / NP.sum(pb, axis=0)]
progress.update(i+1)
progress.finish()
return NP.asarray(retval)
#################################################################################
class GainInfo(object):
"""
----------------------------------------------------------------------------
Class to manage instrument gains
Attributes:
gaintable [None or dictionary] If set to None, all antenna- and
baseline-based gains will be set to unity. If returned as
dictionary, it contains the loaded gains. It contains the
following keys and values:
'antenna-based' [None or dictionary] Contains antenna-
based instrument gain information. If
set to None, all antenna-based gains are
set to unity. If returned as dictionary,
it has the following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency' as specified
in input axes_order
'gains' [scalar or numpy array]
Complex antenna-based
instrument gains. Must be
of shape (nant, nchan, nts)
If there is no variations in
gains along an axis, then
the corresponding nax may be
set to 1 and the gains will
be replicated along that
axis using numpy array
broadcasting. For example,
shapes (nant,1,1), (1,1,1),
(1,nchan,nts) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy
array] List or antenna
labels that correspond to
nant along the 'label' axis.
If nant=1, this may be set
to None, else it will be
specified and will match the
nant.
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nchan=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nchan
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
ntimes=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the ntimes. It must
be a float and can be in
seconds, hours, days, etc.
'baseline-based' [None or dictionary] Contains baseline-
based instrument gain information. If
set to None, all baseline-based gains
are set to unity. If returned as
dictionary, it has the following keys
and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency' as
specified in input
axes_order
'gains' [scalar or numpy array]
Complex baseline-based
instrument gains. Must be
of shape (nbl, nchan, nts)
If there is no variations in
gains along an axis, then
the corresponding nax may be
set to 1 and the gains will
be replicated along that
axis using numpy array
broadcasting. For example,
shapes (nant,1,1), (1,1,1),
(1,nchan,nts) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy
array] List or baseline
labels that correspond to
nbl along the 'label' axis.
If nbl=1 along the 'label'
axis this may be set to
None, else it will be
specified and will match nbl
'frequency' [None or list or numpy array]
Frequency channels that
correspond to the nax along
the 'frequency' axis. If the
nchan=1 along the 'frequency'
axis, this may be set to None,
else it must be specified and
must match the nchan
'time' [None or list or numpy array]
Observation times that
correspond to the nax along
the 'time' axis. If the
ntimes=1 along the 'time'
axis, this may be set to None,
else it must be specified and
must match the ntimes. It must
be a float and can be in
seconds, hours, days, etc.
interpfuncs [dictionary] Determined in member function interpolator().
Contains interpolation information under two keys, namely,
'antenna-based' and 'baseline-based'. Under each of these keys
is another dictionary with the following keys and values:
'dims' [numpy array of strings] Contains the axes labels
of the interpolated axes for antenna or baseline
labels. It could contain a single element ['time'],
of ['frequency'] indicating 1D splines along that
axis or contain two elements 'time' and 'frequency'
indicating 2D splines. 1D splines will have been
obtained with scipy.interpolate.interp1d while
2D splines obtained with scipy.interpolate.interp2d
'interp' [numpy recArray] Holds the interpolation functions
(instances of scipy.interpolate.interp1d or
scipy.interpolate.interp2d depending on the value
under 'dims' key) for each antenna or baseline
label. It is of size nbl. Each entry in this
numpy recArray has two fields, 'real' for
interpolation of real part and 'imag' for the
imaginary part. If it is a one element recarray,
then it applies to all antennas and baselines
Member function interpolate_gains() uses this attribute to
return interpolated gains
splinefuncs [dictionary] Determined in member function splinator().
Contains spline information under two keys, namely,
'antenna-based' and 'baseline-based'. Under each of these keys
is another dictionary with the following keys and values:
'dims' [numpy array of strings] Contains the axes labels
of the interpolated axes for antenna or baseline
labels. It could contain a single element ['time'],
of ['frequency'] indicating 1D splines along that
axis or contain two elements 'time' and 'frequency'
indicating 2D splines. 1D splines will have been
obtained with scipy.interpolate.UnivariateSpline
while 2D splines obtained with
scipy.interpolate.RectBivariateSpline
'interp' [numpy recArray] Holds the spline functions
(instances of scipy.interpolate.UnivariateSpline or
scipy.interpolate.RectBivariateSpline depending on
the value under 'dims' key) for each antenna or
baseline label. It is of size nbl. Each entry in
this numpy recArray has two fields, 'real' for
interpolation of real part and 'imag' for the
imaginary part. If it is a one element recarray,
then it applies to all antennas and baselines.
Member function spline_gains() uses this attribute to return
spline-interpolated gains
Member functions:
__init__() Initialize an instance of class GainInfo from a file
read_gaintable()
Read gain table from file in HDF5 format and return and/or
store as attribute
eval_gains()
Extract complex instrument gains for given baselines from the
gain table
interpolator()
Sets up interpolation functions and stores them in the
attribute interpfuncs. Better alternative is to use splinator()
splinator() Sets up spline functions and stores them in the attribute
splinefuncs. Better alternative to interpolator()
interpolate_gains()
Interpolate at the specified baselines for the given
frequencies and times using attribute interpfuncs. Better
alternative is to use spline_gains()
spline_gains()
Evaluate spline at the specified baselines for the given
frequencies and times using attribute splinefuncs. Better
alternative to interpolate_gains()
nearest_gains()
Extract complex instrument gains for given baselines from the
gain table determined by nearest neighbor logic
write_gaintable()
Write gain table with specified axes ordering to external file
in HDF5 format
-----------------------------------------------------------------------------
"""
def __init__(self, init_file=None, axes_order=None):
"""
------------------------------------------------------------------------
Initialize an instance of class GainInfo from a file
Attributes initialized are:
gaintable, interpfuncs, splinefuncs
Read docstring of class GainInfo for details on these attributes
Keyword Inputs:
gainsfile [string] Filename including the full path that contains the
instrument gains. It must be in HDF5 format. It must contain
the following structure:
'antenna-based' [dictionary] Contains antenna-based
instrument gain information. It has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex antenna-based
instrument gains. Must be
of shape (nax1, nax2, nax3)
where ax1, ax2 and ax3 are
specified by the axes
ordering under key
'ordering'. If there is no
variations in gains along an
axis, then the corresponding
nax may be set to 1 and the
gains will be replicated
along that axis using numpy
array broadcasting. For
example, shapes (nax1,1,1),
(1,1,1), (1,nax2,nax3) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy array]
List or antenna labels that
correspond to the nax along
the 'label' axis. If the
nax=1 along the 'label' axis,
this may be set to None, else
it must be specified and must
match the nax.
'frequency' [None or list or numpy
array] Frequency channels
that correspond to the
nax along the 'frequency'
axis. If the nax=1 along the
'frequency' axis, this may
be set to None, else it must
be specified and must match
the nax.
'time' [None or list or numpy
array] Observation times
that correspond to the nax
along the 'time' axis. If
the nax=1 along the 'time'
axis, this may be set to
None, else it must be
specified and must match the
nax. It must be a float and
can be in seconds, hours,
days, etc.
'baseline-based' [dictionary] Contains baseline-based
instrument gain information. It has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex baseline-based
instrument gains. Must be
of shape (nax1, nax2, nax3)
where ax1, ax2 and ax3 are
specified by the axes
ordering under key
'ordering'. If there is no
variations in gains along an
axis, then the corresponding
nax may be set to 1 and the
gains will be replicated
along that axis using numpy
array broadcasting. For
example, shapes (nax1,1,1),
(1,1,1), (1,nax2,nax3) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy
array] List of baseline
labels that correspond to
the nax along the 'label'
axis. If the nax=1 along the
'label' axis this may be set
to None, else it must be
specified and must match the
nax.
'frequency' [None or list or numpy
array] Frequency channels
that correspond to the
nax along the 'frequency'
axis. If the nax=1 along the
'frequency' axis, this may
be set to None, else it must
be specified and must match
the nax.
'time' [None or list or numpy
array] Observation times
that correspond to the nax
along the 'time' axis. If
the nax=1 along the 'time'
axis, this may be set to
None, else it must be
specified and must match the
nax. It must be a float and
can be in seconds, hours,
days, etc.
axes_order [None or list or numpy array] The gaintable which is read is
stored in this axes ordering. If set to None, it will store
in this order ['label', 'frequency', 'time']
------------------------------------------------------------------------
"""
self.gaintable = None
self.interpfuncs = {key: None for key in ['antenna-based', 'baseline-based']}
self.splinefuncs = {key: None for key in ['antenna-based', 'baseline-based']}
if init_file is not None:
self.gaintable = self.read_gaintable(init_file, axes_order=axes_order, action='return')
self.interpolator()
self.splinator(smoothness=None)
#############################################################################
def read_gaintable(self, gainsfile, axes_order=None, action='return'):
"""
------------------------------------------------------------------------
Read gain table from file in HDF5 format and return and/or store as
attribute
Input:
gainsfile [string] Filename including the full path that contains the
instrument gains. It must be in HDF5 format. It must contain
the following structure:
'antenna-based' [dictionary] Contains antenna-based
instrument gain information. It has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex antenna-based
instrument gains. Must be
of shape (nax1, nax2, nax3)
where ax1, ax2 and ax3 are
specified by the axes
ordering under key
'ordering'. If there is no
variations in gains along an
axis, then the corresponding
nax may be set to 1 and the
gains will be replicated
along that axis using numpy
array broadcasting. For
example, shapes (nax1,1,1),
(1,1,1), (1,nax2,nax3) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy
array] List or antenna
labels that correspond to
the nax along the 'label'
axis. If the nax=1 along the
'label' axis, this may be
set to None, else it must be
specified and must match the
nax.
'frequency' [None or list or numpy
array] Frequency channels
that correspond to the nax
along the 'frequency' axis.
If the nax=1 along the
'frequency' axis, this may
be set to None, else it must
be specified and must match
the nax.
'time' [None or list or numpy
array] Observation times
that correspond to the nax
along the 'time' axis. If
the nax=1 along the 'time'
axis, this may be set to
None, else it must be
specified and must match the
nax. It must be a float and
can be in seconds, hours,
days, etc.
'baseline-based' [dictionary] Contains baseline-based
instrument gain information. It has the
following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency'. Must be
specified (no defaults)
'gains' [scalar or numpy array]
Complex baseline-based
instrument gains. Must be
of shape (nax1, nax2, nax3)
where ax1, ax2 and ax3 are
specified by the axes
ordering under key
'ordering'. If there is no
variations in gains along an
axis, then the corresponding
nax may be set to 1 and the
gains will be replicated
along that axis using numpy
array broadcasting. For
example, shapes (nax1,1,1),
(1,1,1), (1,nax2,nax3) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy
array] List of baseline
labels that correspond to
the nax along the 'label'
axis. If the nax=1 along the
'label' axis this may be set
to None, else it must be
specified and must match the
nax.
'frequency' [None or list or numpy
array] Frequency channels
that correspond to the nax
along the 'frequency' axis.
If the nax=1 along the
'frequency' axis, this may
be set to None, else it must
be specified and must match
the nax.
'time' [None or list or numpy
array] Observation times
that correspond to the nax
along the 'time' axis. If
the nax=1 along the 'time'
axis, this may be set to
None, else it must be
specified and must match the
nax. It must be a float and
can be in seconds, hours,
days, etc.
axes_order [None or list or numpy array] The gaintable which is read is
stored in this axes ordering. If set to None, it will store
in this order ['label', 'frequency', 'time']
action [string] If set to 'store' (default), the gain table will
be stored as attribute in addition to being returned. If set
to 'return' the gain table will be returned.
Output:
gaintable [None or dictionary] If set to None, all antenna- and
baseline-based gains will be set to unity. If returned as
dictionary, it contains the loaded gains. It contains the
following keys and values:
'antenna-based' [None or dictionary] Contains antenna-
based instrument gain information. If
set to None, all antenna-based gains are
set to unity. If returned as dictionary,
it has the following keys and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency' as specified
in input axes_order
'gains' [scalar or numpy array]
Complex antenna-based
instrument gains. Must be
of shape (nant, nchan, nts)
If there is no variations in
gains along an axis, then
the corresponding nax may be
set to 1 and the gains will
be replicated along that
axis using numpy array
broadcasting. For example,
shapes (nant,1,1), (1,1,1),
(1,nchan,nts) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy
array] List or antenna
labels that correspond to
nant along the 'label' axis.
If nant=1, this may be set
to None, else it will be
specified and will match the
nant.
'frequency' [None or list or numpy
array] Frequency channels
that correspond to the nax
along the 'frequency' axis.
If the nchan=1 along the
'frequency' axis, this may
be set to None, else it must
be specified and must match
the nchan.
'time' [None or list or numpy
array] Observation times
that correspond to the nax
along the 'time' axis. If
the ntimes=1 along the
'time' axis, this may be set
to None, else it must be
specified and must match the
ntimes. It will be a float
and in same units as given
in input
'baseline-based' [None or dictionary] Contains baseline-
based instrument gain information. If
set to None, all baseline-based gains
are set to unity. If returned as
dictionary, it has the following keys
and values:
'ordering' [list or numpy array] Three
element list of strings
indicating the ordering of
axes - 'time', 'label',
and 'frequency' as
specified in input
axes_order
'gains' [scalar or numpy array]
Complex baseline-based
instrument gains. Must be
of shape (nbl, nchan, nts)
If there is no variations in
gains along an axis, then
the corresponding nax may be
set to 1 and the gains will
be replicated along that
axis using numpy array
broadcasting. For example,
shapes (nant,1,1), (1,1,1),
(1,nchan,nts) are
acceptable. If specified as
a scalar, it will be
replicated along all three
axes, namely, 'label',
'frequency' and 'time'.
'label' [None or list or numpy
array] List or baseline
labels that correspond to
nbl along the 'label' axis.
If nbl=1 along the 'label'
axis this may be set to
None, else it will be
specified and will match nbl
'frequency' [None or list or numpy
array] Frequency channels
that correspond to the nax
along the 'frequency' axis.
If the nchan=1 along the
'frequency' axis, this may
be set to None, else it must
be specified and must match
the nchan.
'time' [None or list or numpy
array] Observation times
that correspond to the nax
along the 'time' axis. If
the ntimes=1 along the
'time' axis, this may be set
to None, else it must be
specified and must match the
ntimes. It will be a float
and in same units as given
in input
------------------------------------------------------------------------
"""
if not isinstance(action, str):
return TypeError('Input parameter action must be a string')
action = action.lower()
if action not in ['store', 'return']:
raise ValueError('Invalid value specified for input parameter action')
gaintable = read_gaintable(gainsfile, axes_order=axes_order)
if action == 'store':
self.gaintable = gaintable
return gaintable
#############################################################################
def interpolator(self, kind='linear'):
"""
------------------------------------------------------------------------
Sets up interpolation functions and stores them in the attribute
interpfuncs. Better alternative is to use splinator()
Inputs:
kind [string] Type of interpolation. Accepted values are
'linear' (default), 'cubic' or 'quintic'. See documentation
of scipy.interpolate.interp1d and scipy.interpolate.interp2d
for details
------------------------------------------------------------------------
"""
kind = kind.lower()
if kind not in ['linear', 'cubic', 'quintic']:
raise ValueError('Specified kind of interpolation invalid')
if self.gaintable is not None:
for gainkey in self.gaintable:
if self.gaintable[gainkey] is not None:
self.interpfuncs[gainkey] = None
if self.gaintable[gainkey]['gains'] is not None:
if isinstance(self.gaintable[gainkey]['gains'], NP.ndarray):
if self.gaintable[gainkey]['gains'].ndim != 3:
raise ValueError('Gains must be a 3D numpy array')
# if self.gaintable[gainkey]['gains'].size > 1:
if (self.gaintable[gainkey]['gains'].shape[self.gaintable[gainkey]['ordering'].index('frequency')] > 1) or (self.gaintable[gainkey]['gains'].shape[self.gaintable[gainkey]['ordering'].index('time')] > 1):
temp_axes_order = ['label', 'frequency', 'time']
inp_order = self.gaintable[gainkey]['ordering']
temp_transpose_order = NMO.find_list_in_list(inp_order, temp_axes_order)
if NP.all(inp_order == temp_axes_order):
gains = NP.copy(self.gaintable[gainkey]['gains'])
else:
gains = NP.transpose(NP.copy(self.gaintable[gainkey]['gains']), axes=temp_transpose_order)
dims = []
for ax in NP.arange(1,3):
if gains.shape[ax] > 1:
dims += [temp_axes_order[ax]]
dims = NP.asarray(dims)
interpf = []
for labelind in xrange(gains.shape[0]):
if dims.size == 1:
interpf_real = interpolate.interp1d(self.gaintable[gainkey][dims[0]], gains[labelind,:,:].real.ravel(), kind=kind, bounds_error=True)
interpf_imag = interpolate.interp1d(self.gaintable[gainkey][dims[0]], gains[labelind,:,:].imag.ravel(), kind=kind, bounds_error=True)
else:
interpf_real = interpolate.interp2d(self.gaintable[gainkey]['time'], self.gaintable[gainkey]['frequency'], gains[labelind,:,:].real, kind=kind, bounds_error=True)
interpf_imag = interpolate.interp2d(self.gaintable[gainkey]['time'], self.gaintable[gainkey]['frequency'], gains[labelind,:,:].imag, kind=kind, bounds_error=True)
interpf += [(copy.copy(interpf_real), copy.copy(interpf_imag))]
self.interpfuncs[gainkey] = {'interp': NP.asarray(interpf, dtype=[('real', NP.object), ('imag', NP.object)]), 'dims': dims}
############################################################################
def splinator(self, smoothness=None):
"""
-----------------------------------------------------------------------
Sets up spline functions and stores them in the attribute splinefuncs.
Better alternative to interpolator()
Inputs:
smoothness [integer or float] Smoothness of spline interpolation. Must
be positive. If set to None (default), it will set equal to
the number of samples using which the spline functions are
estimated. Read documentation of
scipy.interpolate.UnivariateSpline and
scipy.interpolate.RectBivariateSpline for more details
-----------------------------------------------------------------------
"""
if smoothness is not None:
if not isinstance(smoothness, (int,float)):
raise TypeError('Input smoothness must be a scalar')
if smoothness <= 0.0:
raise ValueError('Input smoothness must be a positive number')
if self.gaintable is not None:
for gainkey in self.gaintable:
if self.gaintable[gainkey] is not None:
self.splinefuncs[gainkey] = None
if self.gaintable[gainkey]['gains'] is not None:
if isinstance(self.gaintable[gainkey]['gains'], NP.ndarray):
if self.gaintable[gainkey]['gains'].ndim != 3:
raise ValueError('Gains must be a 3D numpy array')
# if self.gaintable[gainkey]['gains'].size > 1:
if (self.gaintable[gainkey]['gains'].shape[self.gaintable[gainkey]['ordering'].index('frequency')] > 1) or (self.gaintable[gainkey]['gains'].shape[self.gaintable[gainkey]['ordering'].index('time')] > 1):
temp_axes_order = ['label', 'frequency', 'time']
inp_order = self.gaintable[gainkey]['ordering']
temp_transpose_order = NMO.find_list_in_list(inp_order, temp_axes_order)
if NP.all(inp_order == temp_axes_order):
gains = NP.copy(self.gaintable[gainkey]['gains'])
else:
gains = NP.transpose(NP.copy(self.gaintable[gainkey]['gains']), axes=temp_transpose_order)
dims = []
for ax in NP.arange(1,3):
if gains.shape[ax] > 1:
dims += [temp_axes_order[ax]]
dims = NP.asarray(dims)
interpf = []
for labelind in xrange(gains.shape[0]):
if dims.size == 1:
if smoothness is None:
smoothness = self.gaintable[gainkey][dims[0]].size
interpf_real = interpolate.UnivariateSpline(self.gaintable[gainkey][dims[0]], gains[labelind,:,:].real.ravel(), s=smoothness, ext='raise')
interpf_imag = interpolate.UnivariateSpline(self.gaintable[gainkey][dims[0]], gains[labelind,:,:].imag.ravel(), s=smoothness, ext='raise')
else:
if smoothness is None:
smoothness = gains.shape[1]*gains.shape[2]
interpf_real = interpolate.RectBivariateSpline(self.gaintable[gainkey]['time'], self.gaintable[gainkey]['frequency'], gains[labelind,:,:].real.T, bbox=[self.gaintable[gainkey]['time'].min(), self.gaintable[gainkey]['time'].max(), self.gaintable[gainkey]['frequency'].min(), self.gaintable[gainkey]['frequency'].max()], s=smoothness)
interpf_imag = interpolate.RectBivariateSpline(self.gaintable[gainkey]['time'], self.gaintable[gainkey]['frequency'], gains[labelind,:,:].imag.T, bbox=[self.gaintable[gainkey]['time'].min(), self.gaintable[gainkey]['time'].max(), self.gaintable[gainkey]['frequency'].min(), self.gaintable[gainkey]['frequency'].max()], s=smoothness)
interpf += [(copy.copy(interpf_real), copy.copy(interpf_imag))]
self.splinefuncs[gainkey] = {'interp': NP.asarray(interpf, dtype=[('real', NP.object), ('imag', NP.object)]), 'dims': dims}
#############################################################################
def interpolate_gains(self, bl_labels, freqs=None, times=None,
axes_order=None):
"""
------------------------------------------------------------------------
Interpolate at the specified baselines for the given frequencies and
times using attribute interpfuncs. Better alternative is to use
spline_gains()
Inputs:
bl_labels [Numpy structured array tuples] Labels of antennas in the
pair used to produce the baseline vector under fields 'A2'
and 'A1' for second and first antenna respectively. The
baseline vector is obtained by position of antennas under
'A2' minus position of antennas under 'A1'. The array is of
size nbl
freqs [None or numpy array] Array of frequencies at which the
gains are to be interpolated using the attribute
interpfuncs. If set to None (default), all frequencies in
the gaintable are assumed. The specified frequencies must
always lie within the range which was used in creating the
interpolation functions, otherwise an exception will be
raised. The array is of size nchan
times [None or numpy array] Array of times at which the gains
are to be interpolated using the attribute interpfuncs. If
set to None (default), all times in the gaintable are
assumed. The specified times must always lie within the
range which was used in creating the interpolation
functions, otherwise an exception will be raised. The
array is of size nts
axes_order [None or list or numpy array] Axes ordering for extracted
gains. It must contain the three elements 'label',
'frequency', and 'time'. If set to None, it will be
returned in the same order as in the input gaintable.
Outputs:
[numpy array] Complex gains of shape nbl x nchan x nts for the
specified baselines, frequencies and times.
------------------------------------------------------------------------
"""
try:
bl_labels
except NameError:
raise NameError('Input bl_labels must be specified')
blgains = NP.asarray(1.0).reshape(1,1,1)
if self.gaintable is not None:
a1_labels = bl_labels['A1']
a2_labels = bl_labels['A2']
for key in ['antenna-based', 'baseline-based']:
if self.interpfuncs[key] is not None:
labels = self.gaintable[key]['label']
if freqs is None:
if self.gaintable[key]['frequency'] is not None:
freqs = self.gaintable[key]['frequency']
elif isinstance(freqs, (int,list,NP.ndarray)):
freqs = NP.asarray(freqs).ravel()
else:
raise TypeError('Input freqs must be a scalar, list or numpy array')
if times is None:
if self.gaintable[key]['time'] is not None:
times = self.gaintable[key]['time']
elif isinstance(times, (int,list,NP.ndarray)):
times = NP.asarray(times).ravel()
else:
raise TypeError('Input times must be a scalar, list or numpy array')
if self.gaintable[key]['frequency'] is not None:
ib_freq_index = NP.logical_and(freqs <= NP.amax(self.gaintable[key]['frequency']), freqs >= NP.amin(self.gaintable[key]['frequency']))
oobl_freq_index = freqs < NP.amin(self.gaintable[key]['frequency'])
oobr_freq_index = freqs > NP.amax(self.gaintable[key]['frequency'])
oob_freq_index = NP.logical_not(ib_freq_index)
if NP.any(oob_freq_index):
raise ValueError('One or more of the frequencies outside interpolation range')
else:
if freqs is not None:
ib_freq_index = NP.ones(freqs.size, dtype=NP.bool)
oob_freq_index = NP.zeros(freqs.size, dtype=NP.bool)
oobl_freq_index = NP.zeros(freqs.size, dtype=NP.bool)
oobr_freq_index = NP.zeros(freqs.size, dtype=NP.bool)
else:
ib_freq_index = None
oob_freq_index = None
if self.gaintable[key]['time'] is not None:
ib_time_index = NP.logical_and(times <= NP.amax(self.gaintable[key]['time']), times >= NP.amin(self.gaintable[key]['time']))
oobl_time_index = times < NP.amin(self.gaintable[key]['time'])
oobr_time_index = times > NP.amax(self.gaintable[key]['time'])
oob_time_index = NP.logical_not(ib_time_index)
if NP.any(oob_time_index):
raise ValueError('One or more of the times outside interpolation range')
else:
if times is not None:
ib_time_index = NP.ones(times.size, dtype=NP.bool)
oob_time_index = NP.zeros(times.size, dtype=NP.bool)
oobl_time_index = NP.zeros(times.size, dtype=NP.bool)
oobr_time_index = NP.zeros(times.size, dtype=NP.bool)
else:
ib_time_index = None
oob_time_index = None
if isinstance(self.interpfuncs[key], dict):
if 'dims' not in self.interpfuncs[key]:
raise KeyError('Key "dims" not found in attribute interpfuncs[{0}]'.format(key))
if not isinstance(self.interpfuncs[key]['dims'], NP.ndarray):
raise TypeError('Key "dims" in attribute interpfuncs[{0}] must contain a numpy array'.format(key))
if self.interpfuncs[key]['dims'].size == 1:
if self.interpfuncs[key]['dims'][0] == 'time':
ntimes = ib_time_index.size
if freqs is None:
nchan = 1
else:
nchan = ib_freq_index.size
inp = times[ib_time_index]
else:
nchan = ib_freq_index.size
if times is None:
ntimes = 1
else:
ntimes = ib_time_index.size
inp = freqs[ib_freq_index]
else:
inp_times = times[ib_time_index]
inp_freqs = freqs[ib_freq_index]
ntimes = ib_time_index.size
nchan = ib_freq_index.size
if key == 'antenna-based':
ind1 = NMO.find_list_in_list(labels, a1_labels)
ind2 = NMO.find_list_in_list(labels, a2_labels)
if NP.sum(ind1.mask) > 0:
raise IndexError('Some antenna gains could not be found')
if NP.sum(ind2.mask) > 0:
raise IndexError('Some antenna gains could not be found')
g1_conj = None
g2 = None
for i in xrange(ind1.size):
if self.interpfuncs[key]['dims'].size == 1:
if g1_conj is None:
g1_conj = (self.interpfuncs[key]['interp']['real'][ind1[i]](inp) - 1j * self.interpfuncs[key]['interp']['imag'][ind1[i]](inp)).reshape(1,nchan,ntimes)
g2 = (self.interpfuncs[key]['interp']['real'][ind2[i]](inp) + 1j * self.interpfuncs[key]['interp']['imag'][ind2[i]](inp)).reshape(1,nchan,ntimes)
else:
g1_conj = NP.concatenate((g1_conj, (self.interpfuncs[key]['interp']['real'][ind1[i]](inp) - 1j * self.interpfuncs[key]['interp']['imag'][ind1[i]](inp)).reshape(1,nchan,ntimes)), axis=0)
g2 = NP.concatenate((g2, (self.interpfuncs[key]['interp']['real'][ind2[i]](inp) + 1j * self.interpfuncs[key]['interp']['imag'][ind2[i]](inp)).reshape(1,nchan,ntimes)), axis=0)
else:
if g1_conj is None:
g1_conj = (self.interpfuncs[key]['interp']['real'][ind1[i]](inp_times,inp_freqs) - 1j * self.interpfuncs[key]['interp']['imag'][ind1[i]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)
g2 = (self.interpfuncs[key]['interp']['real'][ind2[i]](inp_times,inp_freqs) + 1j * self.interpfuncs[key]['interp']['imag'][ind2[i]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)
else:
g1_conj = NP.concatenate((g1_conj, (self.interpfuncs[key]['interp']['real'][ind1[i]](inp_times,inp_freqs) - 1j * self.interpfuncs[key]['interp']['imag'][ind1[i]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)), axis=0)
g2 = NP.concatenate((g2, (self.interpfuncs[key]['interp']['real'][ind2[i]](inp_times,inp_freqs) + 1j * self.interpfuncs[key]['interp']['imag'][ind2[i]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)), axis=0)
blgains = blgains * g1_conj * g2 * NP.ones((1,nchan,ntimes), dtype=NP.complex)
else:
g12 = None
for labelind,label in enumerate(bl_labels):
if label in labels:
ind = NP.where(self.gaintable[key]['label'] == label)[0]
if self.interpfuncs[key]['dims'].size == 1:
if g12 is None:
g12 = (self.interpfuncs[key]['interp']['real'][ind[0]](inp) + 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.interpfuncs[key]['interp']['real'][ind[0]](inp) + 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)), axis=0)
else:
if g12 is None:
g12 = (self.interpfuncs[key]['interp']['real'][ind[0]](inp_times,inp_freqs) + 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.interpfuncs[key]['interp']['real'][ind[0]](inp_times,inp_freqs) + 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)), axis=0)
elif NP.asarray([tuple(reversed(label))], dtype=bl_labels.dtype)[0] in labels:
ind = NP.where(labels == NP.asarray([tuple(reversed(label))], dtype=bl_labels.dtype)[0])[0]
if self.interpfuncs[key]['dims'].size == 1:
if g12 is None:
g12 = (self.interpfuncs[key]['interp']['real'][ind[0]](inp) - 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.interpfuncs[key]['interp']['real'][ind[0]](inp) - 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)), axis=0)
else:
if g12 is None:
g12 = (self.interpfuncs[key]['interp']['real'][ind[0]](inp_times,inp_freqs) - 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.interpfuncs[key]['interp']['real'][ind[0]](inp_times,inp_freqs) - 1j * self.interpfuncs[key]['interp']['imag'][ind[0]](inp_times,inp_freqs)).reshape(1,nchan,ntimes)), axis=0)
else:
if g12 is None:
g12 = NP.ones((1,nchan,ntimes), dtype=NP.complex)
else:
g12 = NP.concatenate((g12, NP.ones((1,nchan,ntimes), dtype=NP.complex)), axis=0)
blgains = blgains * g12 * NP.ones((1,nchan,ntimes), dtype=NP.complex)
interp_axes_order = ['label', 'frequency', 'time']
if axes_order is None:
axes_order = self.gaintable['antenna-based']['ordering']
elif not isinstance(axes_order, (list, NP.ndarray)):
raise TypeError('axes_order must be a list')
else:
if len(axes_order) != 3:
raise ValueError('axes_order must be a three element list')
for orderkey in ['label', 'frequency', 'time']:
if orderkey not in axes_order:
raise ValueError('axes_order does not contain key "{0}"'.format(orderkey))
transpose_order = NMO.find_list_in_list(interp_axes_order, axes_order)
blgains = NP.transpose(blgains, axes=transpose_order)
return blgains
#############################################################################
def spline_gains(self, bl_labels, freqs=None, times=None, axes_order=None):
"""
------------------------------------------------------------------------
Evaluate spline at the specified baselines for the given frequencies and
times using attribute splinefuncs. Better alternative to
interpolate_gains()
Inputs:
bl_labels [Numpy structured array tuples] Labels of antennas in the
pair used to produce the baseline vector under fields 'A2'
and 'A1' for second and first antenna respectively. The
baseline vector is obtained by position of antennas under
'A2' minus position of antennas under 'A1'. The array is of
size nbl
freqs [None or numpy array] Array of frequencies at which the
gains are to be interpolated using the attribute
splinefuncs. If set to None (default), all frequencies in
the gaintable are assumed. The specified frequencies must
always lie within the range which was used in creating the
interpolation functions, otherwise an exception will be
raised. The array is of size nchan
times [None or numpy array] Array of times at which the gains
are to be interpolated using the attribute splinefuncs. If
set to None (default), all times in the gaintable are
assumed. The specified times must always lie within the
range which was used in creating the interpolation
functions, otherwise an exception will be raised. The array
is of size nts
axes_order [None or list or numpy array] Axes ordering for extracted
gains. It must contain the three elements 'label',
'frequency', and 'time'. If set to None, it will be
returned in the same order as in the input gaintable.
Outputs:
[numpy array] Complex gains of shape nbl x nchan x nts for the specified
baselines, frequencies and times.
---------------------------------------------------------------------------
"""
try:
bl_labels
except NameError:
raise NameError('Input bl_labels must be specified')
blgains = NP.asarray(1.0).reshape(1,1,1)
if self.gaintable is not None:
a1_labels = bl_labels['A1']
a2_labels = bl_labels['A2']
for key in ['antenna-based', 'baseline-based']:
if self.splinefuncs[key] is not None:
labels = self.gaintable[key]['label']
if freqs is None:
if self.gaintable[key]['frequency'] is not None:
freqs = self.gaintable[key]['frequency']
elif isinstance(freqs, (int,list,NP.ndarray)):
freqs = NP.asarray(freqs).ravel()
else:
raise TypeError('Input freqs must be a scalar, list or numpy array')
if times is None:
if self.gaintable[key]['time'] is not None:
times = self.gaintable[key]['time']
elif isinstance(times, (int,list,NP.ndarray)):
times = NP.asarray(times).ravel()
else:
raise TypeError('Input times must be a scalar, list or numpy array')
if self.gaintable[key]['frequency'] is not None:
ib_freq_index = NP.logical_and(freqs <= NP.amax(self.gaintable[key]['frequency']), freqs >= NP.amin(self.gaintable[key]['frequency']))
oobl_freq_index = freqs < NP.amin(self.gaintable[key]['frequency'])
oobr_freq_index = freqs > NP.amax(self.gaintable[key]['frequency'])
oob_freq_index = NP.logical_not(ib_freq_index)
if NP.any(oob_freq_index):
raise IndexError('One or more of the frequencies outside interpolation range')
else:
if freqs is not None:
ib_freq_index = NP.ones(freqs.size, dtype=NP.bool)
oob_freq_index = NP.zeros(freqs.size, dtype=NP.bool)
oobl_freq_index = NP.zeros(freqs.size, dtype=NP.bool)
oobr_freq_index = NP.zeros(freqs.size, dtype=NP.bool)
else:
ib_freq_index = None
oob_freq_index = None
if self.gaintable[key]['time'] is not None:
ib_time_index = NP.logical_and(times <= NP.amax(self.gaintable[key]['time']), times >= NP.amin(self.gaintable[key]['time']))
oobl_time_index = times < NP.amin(self.gaintable[key]['time'])
oobr_time_index = times > NP.amax(self.gaintable[key]['time'])
oob_time_index = NP.logical_not(ib_time_index)
if NP.any(oob_time_index):
raise IndexError('One or more of the times outside interpolation range')
else:
if times is not None:
ib_time_index = NP.ones(times.size, dtype=NP.bool)
oob_time_index = NP.zeros(times.size, dtype=NP.bool)
oobl_time_index = NP.zeros(times.size, dtype=NP.bool)
oobr_time_index = NP.zeros(times.size, dtype=NP.bool)
else:
ib_time_index = None
oob_time_index = None
if isinstance(self.splinefuncs[key], dict):
if 'dims' not in self.splinefuncs[key]:
raise KeyError('Key "dims" not found in attribute splinefuncs[{0}]'.format(key))
if not isinstance(self.splinefuncs[key]['dims'], NP.ndarray):
raise TypeError('Key "dims" in attribute splinefuncs[{0}] must contain a numpy array'.format(key))
if self.splinefuncs[key]['dims'].size == 1:
if self.splinefuncs[key]['dims'][0] == 'time':
ntimes = ib_time_index.size
if freqs is None:
nchan = 1
else:
nchan = ib_freq_index.size
inp = times[ib_time_index]
else:
nchan = ib_freq_index.size
if times is None:
ntimes = 1
else:
ntimes = ib_time_index.size
inp = freqs[ib_freq_index]
else:
inp_times = times[ib_time_index]
inp_freqs = freqs[ib_freq_index]
ntimes = ib_time_index.size
nchan = ib_freq_index.size
tgrid, fgrid = NP.meshgrid(inp_times, inp_freqs)
tvec = tgrid.ravel()
fvec = fgrid.ravel()
if key == 'antenna-based':
ind1 = NMO.find_list_in_list(labels, a1_labels)
ind2 = NMO.find_list_in_list(labels, a2_labels)
if NP.sum(ind1.mask) > 0:
raise IndexError('Some antenna gains could not be found')
if NP.sum(ind2.mask) > 0:
raise IndexError('Some antenna gains could not be found')
g1_conj = None
g2 = None
for i in xrange(ind1.size):
if self.splinefuncs[key]['dims'].size == 1:
if g1_conj is None:
g1_conj = (self.splinefuncs[key]['interp']['real'][ind1[i]](inp) - 1j * self.splinefuncs[key]['interp']['imag'][ind1[i]](inp)).reshape(1,nchan,ntimes)
g2 = (self.splinefuncs[key]['interp']['real'][ind2[i]](inp) + 1j * self.splinefuncs[key]['interp']['imag'][ind2[i]](inp)).reshape(1,nchan,ntimes)
else:
g1_conj = NP.concatenate((g1_conj, (self.splinefuncs[key]['interp']['real'][ind1[i]](inp) - 1j * self.splinefuncs[key]['interp']['imag'][ind1[i]](inp)).reshape(1,nchan,ntimes)), axis=0)
g2 = NP.concatenate((g2, (self.splinefuncs[key]['interp']['real'][ind2[i]](inp) + 1j * self.splinefuncs[key]['interp']['imag'][ind2[i]](inp)).reshape(1,nchan,ntimes)), axis=0)
else:
if g1_conj is None:
g1_conj = (self.splinefuncs[key]['interp']['real'][ind1[i]].ev(tvec,fvec) - 1j * self.splinefuncs[key]['interp']['imag'][ind1[i]].ev(tvec,fvec)).reshape(1,nchan,ntimes)
g2 = (self.splinefuncs[key]['interp']['real'][ind2[i]].ev(tvec,fvec) + 1j * self.splinefuncs[key]['interp']['imag'][ind2[i]].ev(tvec,fvec)).reshape(1,nchan,ntimes)
else:
g1_conj = NP.concatenate((g1_conj, (self.splinefuncs[key]['interp']['real'][ind1[i]].ev(tvec,fvec) - 1j * self.splinefuncs[key]['interp']['imag'][ind1[i]].ev(tvec,fvec)).reshape(1,nchan,ntimes)), axis=0)
g2 = NP.concatenate((g2, (self.splinefuncs[key]['interp']['real'][ind2[i]].ev(tvec,fvec) + 1j * self.splinefuncs[key]['interp']['imag'][ind2[i]].ev(tvec,fvec)).reshape(1,nchan,ntimes)), axis=0)
blgains = blgains * g1_conj * g2 * NP.ones((1,nchan,ntimes), dtype=NP.complex)
else:
g12 = None
for labelind,label in enumerate(bl_labels):
if label in labels:
ind = NP.where(self.gaintable[key]['label'] == label)[0]
if self.splinefuncs[key]['dims'].size == 1:
if g12 is None:
g12 = (self.splinefuncs[key]['interp']['real'][ind[0]](inp) + 1j * self.splinefuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.splinefuncs[key]['interp']['real'][ind[0]](inp) + 1j * self.splinefuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)), axis=0)
else:
if g12 is None:
g12 = (self.splinefuncs[key]['interp']['real'][ind[0]].ev(tvec,fvec) + 1j * self.splinefuncs[key]['interp']['imag'][ind[0]].ev(tvec,fvec)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.splinefuncs[key]['interp']['real'][ind[0]].ev(tvec,fvec) + 1j * self.splinefuncs[key]['interp']['imag'][ind[0]].ev(tvec,fvec)).reshape(1,nchan,ntimes)), axis=0)
elif NP.asarray([tuple(reversed(label))], dtype=bl_labels.dtype)[0] in labels:
ind = NP.where(labels == NP.asarray([tuple(reversed(label))], dtype=bl_labels.dtype)[0])[0]
if self.splinefuncs[key]['dims'].size == 1:
if g12 is None:
g12 = (self.splinefuncs[key]['interp']['real'][ind[0]](inp) - 1j * self.splinefuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.splinefuncs[key]['interp']['real'][ind[0]](inp) - 1j * self.splinefuncs[key]['interp']['imag'][ind[0]](inp)).reshape(1,nchan,ntimes)), axis=0)
else:
if g12 is None:
g12 = (self.splinefuncs[key]['interp']['real'][ind[0]].ev(tvec,fvec) - 1j * self.splinefuncs[key]['interp']['imag'][ind[0]].ev(tvec,fvec)).reshape(1,nchan,ntimes)
else:
g12 = NP.concatenate((g12, (self.splinefuncs[key]['interp']['real'][ind[0]].ev(tvec,fvec) - 1j * self.splinefuncs[key]['interp']['imag'][ind[0]].ev(tvec,fvec)).reshape(1,nchan,ntimes)), axis=0)
else:
if g12 is None:
g12 = NP.ones((1,nchan,ntimes), dtype=NP.complex)
else:
g12 = NP.concatenate((g12, NP.ones((1,nchan,ntimes), dtype=NP.complex)), axis=0)
blgains = blgains * g12 * NP.ones((1,nchan,ntimes), dtype=NP.complex)
interp_axes_order = ['label', 'frequency', 'time']
if axes_order is None:
axes_order = self.gaintable['antenna-based']['ordering']
elif not isinstance(axes_order, (list, NP.ndarray)):
raise TypeError('axes_order must be a list')
else:
if len(axes_order) != 3:
raise ValueError('axes_order must be a three element list')
for orderkey in ['label', 'frequency', 'time']:
if orderkey not in axes_order:
raise ValueError('axes_order does not contain key "{0}"'.format(orderkey))
transpose_order = NMO.find_list_in_list(interp_axes_order, axes_order)
blgains = NP.transpose(blgains, axes=transpose_order)
return blgains
#############################################################################
def nearest_gains(self, bl_labels, freqs=None, times=None, axes_order=None):
"""
------------------------------------------------------------------------
Extract complex instrument gains for given baselines from the gain table
determined by nearest neighbor logic
Inputs:
bl_labels [Numpy structured array tuples] Labels of antennas in the
pair used to produce the baseline vector under fields 'A2'
and 'A1' for second and first antenna respectively. The
baseline vector is obtained by position of antennas under
'A2' minus position of antennas under 'A1'
freqs [None or numpy array] Array of frequencies at which the
gains are to be interpolated using the attribute
splinefuncs. If set to None (default), all frequencies in
the gaintable are assumed. The specified frequencies must
always lie within the range which was used in creating the
interpolation functions, otherwise an exception will be
raised. The array is of size nchan
times [None or numpy array] Array of times at which the gains
are to be interpolated using the attribute splinefuncs. If
set to None (default), all times in the gaintable are
assumed. The specified times must always lie within the
range which was used in creating the interpolation
functions, otherwise an exception will be raised. The array
is of size nts
axes_order [None or list or numpy array] Axes ordering for extracted
gains. It must contain the three elements 'label',
'frequency', and 'time'. If set to None, it will be
returned in the same order as in the input gaintable.
Outputs:
[numpy array] Complex gains of shape nbl x nchan x nts for the specified
baselines, frequencies and times.
------------------------------------------------------------------------
"""
try:
bl_labels
except NameError:
raise NameError('Input bl_labels must be specified')
blgains = NP.asarray(1.0).reshape(1,1,1)
if self.gaintable is not None:
a1_labels = bl_labels['A1']
a2_labels = bl_labels['A2']
for gainkey in ['antenna-based', 'baseline-based']:
if gainkey in self.gaintable:
temp_axes_order = ['label', 'frequency', 'time']
inp_order = self.gaintable[gainkey]['ordering']
temp_transpose_order = NMO.find_list_in_list(inp_order, temp_axes_order)
if NP.all(inp_order == temp_axes_order):
gains = NP.copy(self.gaintable[gainkey]['gains'])
else:
gains = NP.transpose(NP.copy(self.gaintable[gainkey]['gains']), axes=temp_transpose_order)
freqs_to_search = copy.copy(freqs)
if freqs_to_search is None:
freqs_to_search = copy.copy(self.gaintable[gainkey]['frequency'])
if freqs_to_search is not None:
if self.gaintable[gainkey]['frequency'] is not None:
inpind, refind_freqs, distNN= LKP.find_1NN(self.gaintable[gainkey]['frequency'].reshape(-1,1), freqs_to_search.reshape(-1,1), remove_oob=True)
else:
refind_freqs = None
if refind_freqs is None:
refind_freqs = NP.arange(gains.shape[1])
times_to_search = copy.copy(times)
if times_to_search is None:
times_to_search = copy.copy(self.gaintable[gainkey]['time'])
if times_to_search is not None:
if self.gaintable[gainkey]['time'] is not None:
inpind, refind_times, distNN = LKP.find_1NN(self.gaintable[gainkey]['time'].reshape(-1,1), times_to_search.reshape(-1,1), remove_oob=True)
else:
refind_times = None
if refind_times is None:
refind_times = NP.arange(gains.shape[2])
if gains.shape[0] == 1:
blgains = blgains * gains[:,refind_freqs,refind_times].reshape(1,refind_freqs.size,refind_times.size)
else:
labels = self.gaintable[gainkey]['label']
if gainkey == 'antenna-based':
ind1 = NMO.find_list_in_list(labels, a1_labels)
ind2 = NMO.find_list_in_list(labels, a2_labels)
if NP.sum(ind1.mask) > 0:
raise IndexError('Some antenna gains could not be found')
if NP.sum(ind2.mask) > 0:
raise IndexError('Some antenna gains could not be found')
blgains = blgains * gains[NP.ix_(ind2,refind_freqs,refind_times)].reshape(ind2.size,refind_freqs.size,refind_times.size) * gains[NP.ix_(ind1,refind_freqs,refind_times)].conj().reshape(ind1.size,refind_freqs.size,refind_times.size)
else:
labels_conj = [tuple(reversed(label)) for label in labels]
labels_conj = NP.asarray(labels_conj, dtype=labels.dtype)
labels_conj_appended = NP.concatenate((labels, labels_conj), axis=0)
gains_conj_appended = NP.concatenate((gains, gains.conj()), axis=0)
ind = NMO.find_list_in_list(labels_conj_appended, bl_labels)
selected_gains = gains_conj_appended[NP.ix_(ind.compressed(),refind_freqs,refind_times)]
if ind.compressed().size == 1:
selected_gains = selected_gains.reshape(NP.sum(~ind.mask),refind_freqs.size,refind_times.size)
blgains[~ind.mask, ...] = blgains[~ind.mask, ...] * selected_gains
if axes_order is None:
axes_order = inp_order
elif not isinstance(axes_order, (list, NP.ndarray)):
raise TypeError('axes_order must be a list')
else:
if len(axes_order) != 3:
raise ValueError('axes_order must be a three element list')
for orderkey in ['label', 'frequency', 'time']:
if orderkey not in axes_order:
raise ValueError('axes_order does not contain key "{0}"'.format(orderkey))
transpose_order = NMO.find_list_in_list(inp_order, axes_order)
blgains = NP.transpose(blgains, axes=transpose_order)
return blgains
#############################################################################
def eval_gains(self, bl_labels, freq_index=None, time_index=None,
axes_order=None):
"""
------------------------------------------------------------------------
Extract complex instrument gains for given baselines from the gain table
Inputs:
bl_labels [Numpy structured array tuples] Labels of antennas in the
pair used to produce the baseline vector under fields 'A2'
and 'A1' for second and first antenna respectively. The
baseline vector is obtained by position of antennas under
'A2' minus position of antennas under 'A1'
freq_index [None, int, list or numpy array] Index (scalar) or indices
(list or numpy array) along the frequency axis at which
gains are to be extracted. If set to None, gains at all
frequencies in the gain table will be extracted.
time_index [None, int, list or numpy array] Index (scalar) or indices
(list or numpy array) along the time axis at which gains
are to be extracted. If set to None, gains at all timesin
the gain table will be extracted.
axes_order [None or list or numpy array] Axes ordering for extracted
gains. It must contain the three elements 'label',
'frequency', and 'time'. If set to None, it will be
returned in the same order as in the input gaintable.
Outputs:
[numpy array] Complex gains of shape nbl x nchan x nts for the specified
baselines, frequencies and times.
------------------------------------------------------------------------
"""
return extract_gains(self.gaintable, bl_labels, freq_index=None,
time_index=None, axes_order=None)
#############################################################################
def write_gaintable(self, outfile, axes_order=None, compress=True,
compress_fmt='gzip', compress_opts=9):
"""
------------------------------------------------------------------------
Write gain table with specified axes ordering to external file in HDF5
format
Inputs:
outfile [string] Filename including full path into which the gain
table will be written
axes_order [None or list or numpy array] The axes ordering of gain
table that will be written to external file specified in
outfile. If set to None, it will store in the same order
as in the attribute gaintable
compress [boolean] Specifies if the gain table is written in
compressed format. The compression format and compression
parameters are specified in compress_fmt and compress_opts
respectively
compress_fmt
[string] Accepted values are 'gzip' (default) or 'lzf'. See
h5py module documentation for comparison of these
compression formats
compress_opts
[integer] Applies only if compress_fmt is set to 'gzip'. It
must be an integer in the range 0 to 9. Default=9 implies
maximum compression
------------------------------------------------------------------------
"""
try:
outfile
except NameError:
raise NameError('outfile not specified')
if axes_order is not None:
if not isinstance(axes_order, (list, NP.ndarray)):
raise TypeError('axes_order must be a list')
else:
if len(axes_order) != 3:
raise ValueError('axes_order must be a three element list')
for orderkey in ['label', 'frequency', 'time']:
if orderkey not in axes_order:
raise ValueError('axes_order does not contain key "{0}"'.format(orderkey))
if not isinstance(compress, bool):
raise TypeError('Input parameter compress must be boolean')
if compress:
if not isinstance(compress_fmt, str):
raise TypeError('Input parameter compress_fmt must be a string')
compress_fmt = compress_fmt.lower()
if compress_fmt not in ['gzip', 'lzf']:
raise ValueError('Input parameter compress_fmt invalid')
if compress_fmt == 'gzip':
if not isinstance(compress_opts, int):
raise TypeError('Input parameter compress_opts must be an integer')
compress_opts = NP.clip(compress_opts, 0, 9)
with h5py.File(outfile, 'w') as fileobj:
for gainkey in self.gaintable:
if self.gaintable[gainkey] is not None:
if axes_order is not None:
transpose_order = NMO.find_list_in_list(self.gaintable[gainkey]['ordering'], axes_order)
else:
axes_order = self.gaintable[gainkey]['ordering']
if NP.all(self.gaintable[gainkey]['ordering'] == axes_order):
gains = NP.copy(self.gaintable[gainkey]['gains'])
else:
gains = NP.transpose(NP.copy(self.gaintable[gainkey]['gains']), axes=transpose_order)
grp = fileobj.create_group(gainkey)
for subkey in self.gaintable[gainkey]:
if subkey == 'gains':
if compress:
chunkshape = []
for ind,axis in enumerate(axes_order):
if axis == 'frequency':
chunkshape += [gains.shape[ind]]
else:
chunkshape += [1]
chunkshape = tuple(chunkshape)
if compress_fmt == 'gzip':
dset = grp.create_dataset(subkey, data=gains, chunks=chunkshape, compression=compress_fmt, compression_opts=compress_opts)
else:
dset = grp.create_dataset(subkey, data=gains, chunks=chunkshape, compression=compress_fmt)
else:
grp.create_dataset(subkey, data=gains, chunks=chunkshape)
elif subkey == 'ordering':
dset = grp.create_dataset(subkey, data=axes_order)
else:
if isinstance(self.gaintable[gainkey][subkey], NP.ndarray):
dset = grp.create_dataset(subkey, data=self.gaintable[gainkey][subkey])
#################################################################################
class ROI_parameters(object):
"""
----------------------------------------------------------------------------
Class to manage information on the regions of interest for different
snapshots in an observation.
Attributes:
skymodel [instance of class SkyModel] The common sky model for all the
observing instances from which the ROI is determined based on
a subset corresponding to each snapshot observation.
freq [numpy vector] Frequency channels (with units specified by the
attribute freq_scale)
freq_scale [string] string specifying the units of frequency. Accepted
values are 'GHz', 'MHz' and 'Hz'. Default = 'GHz'
telescope [dictionary] Contains information about the telescope parameters
using which the primary beams in the regions of interest are
determined. It specifies the type of element, element size and
orientation. It consists of the following keys and information:
'id' [string] If set, will ignore the other keys and use
telescope details for known telescopes. Accepted
values are 'mwa', 'vla', 'gmrt', 'ugmrt', 'hera',
'paper', 'hirax', 'chime' and 'mwa_tools'. If using
'mwa_tools', the MWA_Tools and mwapb modules must
be installed and imported.
'shape' [string] Shape of antenna element. Accepted values
are 'dipole', 'delta', and 'dish'. Will be ignored
if key 'id' is set. 'delta' denotes a delta
function for the antenna element which has an
isotropic radiation pattern. 'delta' is the default
when keys 'id' and 'shape' are not set.
'size' [scalar] Diameter of the telescope dish (in meters)
if the key 'shape' is set to 'dish' or length of
the dipole if key 'shape' is set to 'dipole'. Will
be ignored if key 'shape' is set to 'delta'. Will
be ignored if key 'id' is set and a preset value
used for the diameter or dipole.
'orientation' [list or numpy array] If key 'shape' is set to
dipole, it refers to the orientation of the dipole
element unit vector whose magnitude is specified by
length. If key 'shape' is set to 'dish', it refers
to the position on the sky to which the dish is
pointed. For a dipole, this unit vector must be
provided in the local ENU coordinate system aligned
with the direction cosines coordinate system or in
the Alt-Az coordinate system. This will be
used only when key 'shape' is set to 'dipole'.
This could be a 2-element vector (transverse
direction cosines) where the third (line-of-sight)
component is determined, or a 3-element vector
specifying all three direction cosines or a two-
element coordinate in Alt-Az system. If not provided
it defaults to an eastward pointing dipole. If key
'shape' is set to 'dish', the orientation refers
to the pointing center of the dish on the sky. It
can be provided in Alt-Az system as a two-element
vector or in the direction cosine coordinate
system as a two- or three-element vector. If not
set in the case of a dish element, it defaults to
zenith. This is not to be confused with the key
'pointing_center' in dictionary 'pointing_info'
which refers to the beamformed pointing center of
the array. The coordinate system is specified by
the key 'ocoords'
'ocoords' [scalar string] specifies the coordinate system
for key 'orientation'. Accepted values are 'altaz'
and 'dircos'.
'element_locs'
[2- or 3-column array] Element locations that
constitute the tile. Each row specifies
location of one element in the tile. The
locations must be specified in local ENU
coordinate system. First column specifies along
local east, second along local north and the
third along local up. If only two columns are
specified, the third column is assumed to be
zeros. If 'elements_locs' is not provided, it
assumed to be a one-element system and not a
phased array as far as determination of primary
beam is concerned.
'groundplane' [scalar] height of telescope element above the
ground plane (in meteres). Default = None will
denote no ground plane effects.
'ground_modify'
[dictionary] contains specifications to modify
the analytically computed ground plane pattern. If
absent, the ground plane computed will not be
modified. If set, it may contain the following
keys:
'scale' [scalar] positive value to scale the
modifying factor with. If not set, the
scale factor to the modification is unity.
'max' [scalar] positive value to clip the
modified and scaled values to. If not set,
there is no upper limit
'latitude' [scalar] specifies latitude of the telescope site
(in degrees). Default = None (advisable to specify
a real value)
'longitude' [scalar] specifies latitude of the telescope site
(in degrees). Default = 0 (GMT)
'altitude' [scalar] Specifies altitude of the telescope site
(in m) above the surface of the Earth. Default=0m
'pol' [string] specifies polarization when using
MWA_Tools for primary beam computation. Value of
key 'id' in attribute dictionary telescope must be
set to 'mwa_tools'. 'X' or 'x' denotes
X-polarization. Y-polarization is specified by 'Y'
or 'y'. If polarization is not specified when 'id'
of telescope is set to 'mwa_tools', it defaults
to X-polarization.
info [dictionary] contains information about the region of interest.
It consists of the following keys and information:
'radius' [list of scalars] list of angular radii (in degrees),
one entry for each snapshot observation which defines
the region of interest.
'center' [list of numpy vectors] list of centers of regions of
interest. For each snapshot, there is one element in
the list each of which is a center of corresponding
region of interest. Each numpy vector could be made of
two elements (Alt-Az) or three elements (direction
cosines).
'ind' [list of numpy vectors] list of vectors of indices
that define the region of interest as a subset of the
sky model. Each element of the list is a numpy vector
of indices indexing into the sky model corresponding
to each snapshot.
'pbeam' [list of numpy arrays] list of array of primary beam
values in the region of interest. The size of each
element in the list corresponding to each snapshot is
n_roi x nchan where n_roi is the number of pixels in
region of interest.
pinfo [list of dictionaries] Each dictionary element in the list
corresponds to a specific snapshot. It contains information
relating to the pointing center. The pointing center can be
specified either via element delay compensation or by directly
specifying the pointing center in a certain coordinate system.
Default = None (pointing centered at zenith). Each dictionary
element may consist of the following keys and information:
'gains' [numpy array] Complex element gains. Must be of
size equal to the number of elements as
specified by the number of rows in
'element_locs'. If set to None (default), all
element gains are assumed to be unity.
'delays' [numpy array] Delays (in seconds) to be applied
to the tile elements. Size should be equal to
number of tile elements (number of rows in
antpos). Default = None will set all element
delays to zero phasing them to zenith.
'pointing_center' [numpy array] This will apply in the absence of
key 'delays'. This can be specified as a row
vector. Should have two-columns if using Alt-Az
coordinates, or two or three columns if using
direction cosines. There is no default. The
coordinate system must be specified in
'pointing_coords' if 'pointing_center' is to be
used.
'pointing_coords' [string scalar] Coordinate system in which the
pointing_center is specified. Accepted values
are 'altaz' or 'dircos'. Must be provided if
'pointing_center' is to be used. No default.
'delayerr' [int, float] RMS jitter in delays used in the
beamformer. Random jitters are drawn from a
normal distribution with this rms. Must be
a non-negative scalar. If not provided, it
defaults to 0 (no jitter).
Member functions:
__init__() Initializes an instance of class ROI_parameters using default
values or using a specified initialization file
append_settings()
Determines and appends ROI (regions of interest) parameter
information for each snapshot observation using the input
parameters provided. Optionally also computes the primary beam
values in the region of interest using the telescope parameters.
save() Saves the information about the regions of interest to a FITS
file on disk
-----------------------------------------------------------------------------
"""
def __init__(self, init_file=None):
"""
-------------------------------------------------------------------------
Initializes an instance of class ROI_parameters using default values or
using a specified initialization file
Class attribute initialized are:
skymodel, freq, freq_scale, telescope, info, and pinfo
Read docstring of class ROI_parameters for details on these attributes.
Keyword input(s):
init_file [string] Location of the initialization file from which an
instance of class ROI_parameters will be created. File
format must be compatible with the one saved to disk by
member function save()
-------------------------------------------------------------------------
"""
argument_init = False
init_file_success = False
if init_file is not None:
try:
hdulist = fits.open(init_file)
except IOError:
argument_init = True
warnings.warn('\tinit_file provided but could not open the initialization file. Attempting to initialize with input parameters...')
if not argument_init:
n_obs = hdulist[0].header['n_obs']
extnames = [hdulist[i].header['EXTNAME'] for i in xrange(1,len(hdulist))]
self.info = {}
self.info['radius'] = []
self.info['center'] = []
self.info['ind'] = []
self.info['pbeam'] = []
self.telescope = {}
if 'id' in hdulist[0].header:
self.telescope['id'] = hdulist[0].header['telescope']
if 'latitude' in hdulist[0].header:
self.telescope['latitude'] = hdulist[0].header['latitude']
else:
self.telescope['latitude'] = None
if 'longitude' in hdulist[0].header:
self.telescope['longitude'] = hdulist[0].header['longitude']
else:
self.telescope['longitude'] = 0.0
if 'altitude' in hdulist[0].header:
self.telescope['altitude'] = hdulist[0].header['altitude']
else:
self.telescope['altitude'] = 0.0
try:
self.telescope['shape'] = hdulist[0].header['element_shape']
except KeyError:
raise KeyError('Antenna element shape not found in the init_file header')
try:
self.telescope['size'] = hdulist[0].header['element_size']
except KeyError:
raise KeyError('Antenna element size not found in the init_file header')
try:
self.telescope['ocoords'] = hdulist[0].header['element_ocoords']
except KeyError:
raise KeyError('Antenna element orientation coordinate system not found in the init_file header')
if 'ANTENNA ELEMENT ORIENTATION' in extnames:
self.telescope['orientation'] = hdulist['ANTENNA ELEMENT ORIENTATION'].data.reshape(1,-1)
else:
raise KeyError('Extension named "orientation" not found in init_file.')
if 'ANTENNA ELEMENT LOCATIONS' in extnames:
self.telescope['element_locs'] = hdulist['ANTENNA ELEMENT LOCATIONS'].data
if 'ground_plane' in hdulist[0].header:
self.telescope['groundplane'] = hdulist[0].header['ground_plane']
if 'ground_modify_scale' in hdulist[0].header:
if 'ground_modify' not in self.telescope:
self.telescope['ground_modify'] = {}
self.telescope['ground_modify']['scale'] = hdulist[0].header['ground_modify_scale']
if 'ground_modify_max' in hdulist[0].header:
if 'ground_modify' not in self.telescope:
self.telescope['ground_modify'] = {}
self.telescope['ground_modify']['max'] = hdulist[0].header['ground_modify_max']
else:
self.telescope['groundplane'] = None
if 'FREQ' in extnames:
self.freq = hdulist['FREQ'].data
else:
raise KeyError('Extension named "FREQ" not found in init_file.')
self.info['ind'] = [hdulist['IND_{0:0d}'.format(i)].data for i in range(n_obs)]
self.info['pbeam'] = [hdulist['PB_{0:0d}'.format(i)].data for i in range(n_obs)]
self.pinfo = []
if 'ANTENNA ELEMENT LOCATIONS' in extnames:
for i in range(n_obs):
self.pinfo += [{}]
# try:
# self.pinfo[-1]['delays'] = hdulist['DELAYS_{0:0d}'.format(i)].data
# except KeyError:
# raise KeyError('Extension DELAYS_{0:0d} for phased array beamforming not found in init_file'.format(i))
if 'DELAYS_{0:0d}'.format(i) in extnames:
self.pinfo[-1]['delays'] = hdulist['DELAYS_{0:0d}'.format(i)].data
if 'DELAYERR' in hdulist['DELAYS_{0:0d}'.format(i)].header:
delayerr = hdulist['DELAYS_{0:0d}'.format(i)].header['delayerr']
if delayerr <= 0.0:
self.pinfo[-1]['delayerr'] = None
else:
self.pinfo[-1]['delayerr'] = delayerr
len_pinfo = len(self.pinfo)
if len_pinfo > 0:
if len_pinfo != n_obs:
raise ValueError('Inconsistency in number of pointings in header and number of phased array delay settings')
for i in range(n_obs):
if 'POINTING_CENTER_{0:0d}'.format(i) in extnames:
if len_pinfo == 0:
self.pinfo += [{}]
self.pinfo[i]['pointing_center'] = hdulist['POINTING_CENTER_{0:0d}'.format(i)].data
try:
self.pinfo[i]['pointing_coords'] = hdulist['POINTING_CENTER_{0:0d}'.format(i)].header['pointing_coords']
except KeyError:
raise KeyError('Header of extension POINTING_CENTER_{0:0d} not found to contain key "pointing_coords" in init_file'.format(i))
len_pinfo = len(self.pinfo)
if len_pinfo > 0:
if len_pinfo != n_obs:
raise ValueError('Inconsistency in number of pointings in header and number of pointing centers')
hdulist.close()
init_file_success = True
return
else:
argument_init = True
if (not argument_init) and (not init_file_success):
raise ValueError('Initialization failed with the use of init_file.')
self.skymodel = None
self.telescope = None
self.info = {}
self.info['radius'] = []
self.info['ind'] = []
self.info['pbeam'] = []
self.info['center'] = []
self.info['center_coords'] = None
self.pinfo = []
self.freq = None
#############################################################################
def append_settings(self, skymodel, freq, pinfo=None, lst=None,
time_jd=None, roi_info=None, telescope=None,
freq_scale='GHz'):
"""
------------------------------------------------------------------------
Determines and appends ROI (regions of interest) parameter information
for each snapshot observation using the input parameters provided.
Optionally also computes the primary beam values in the region of
interest using the telescope parameters.
Inputs:
skymodel
[instance of class SkyModel] The common sky model for all the
observing instances from which the ROI is determined based on
a subset corresponding to each snapshot observation. If set
to None, the corresponding entries are all set to empty values
freq [numpy vector] Frequency channels (with units specified by the
attribute freq_scale)
pinfo [list of dictionaries] Each dictionary element in the list
corresponds to a specific snapshot. It contains information
relating to the pointing center. The pointing center can be
specified either via element delay compensation or by directly
specifying the pointing center in a certain coordinate system.
Default = None (pointing centered at zenith). Each dictionary
element may consist of the following keys and information:
'gains' [numpy array] Complex element gains. Must be
of size equal to the number of elements as
specified by the number of rows in
'element_locs'. If set to None (default), all
element gains are assumed to be unity.
'delays' [numpy array] Delays (in seconds) to be
applied to the tile elements. Size should be
equal to number of tile elements (number of
rows in antpos). Default = None will set all
element delays to zero phasing them to zenith
'pointing_center' [numpy array] This will apply in the absence
of key 'delays'. This can be specified as a
row vector. Should have two-columns if using
Alt-Az coordinates, or two or three columns
if using direction cosines. There is no
default. The coordinate system must be
specified in 'pointing_coords' if
'pointing_center' is to be used.
'pointing_coords' [string scalar] Coordinate system in which
the pointing_center is specified. Accepted
values are 'altaz' or 'dircos'. Must be
provided if 'pointing_center' is to be used.
No default.
'delayerr' [int, float] RMS jitter in delays used in
the beamformer. Random jitters are drawn
from a normal distribution with this rms.
Must be a non-negative scalar. If not
provided, it defaults to 0 (no jitter).
lst [scalar] LST in degrees. Will be used in determination of sky
coordinates inside ROI if not provided. Default=None.
time_jd [scalar] Time of the snapshot in JD. Will be used in
determination of sky coordinates inside ROI if not provided.
Default=None.
telescope
[dictionary] Contains information about the telescope parameters
using which the primary beams in the regions of interest are
determined. It specifies the type of element, element size and
orientation. It consists of the following keys and information:
'id' [string] If set, will ignore the other keys and
use telescope details for known telescopes.
Accepted values are 'mwa', 'vla', 'gmrt', 'ugmrt',
'hera', 'paper', 'hirax', 'chime' and 'mwa_tools'. If
using 'mwa_tools', the MWA_Tools and mwapb modules
must be installed and imported.
'shape' [string] Shape of antenna element. Accepted values
are 'dipole', 'delta', and 'dish'. Will be ignored
if key 'id' is set. 'delta' denotes a delta
function for the antenna element which has an
isotropic radiation pattern. 'delta' is the
default when keys 'id' and 'shape' are not set.
'size' [scalar] Diameter of the telescope dish (in
meters) if the key 'shape' is set to 'dish' or
length of the dipole if key 'shape' is set to
'dipole'. Will be ignored if key 'shape' is set to
'delta'. Will be ignored if key 'id' is set and a
preset value used for the diameter or dipole.
'orientation' [list or numpy array] If key 'shape' is set to
dipole, it refers to the orientation of the dipole
element unit vector whose magnitude is specified
by length. If key 'shape' is set to 'dish', it
refers to the position on the sky to which the
dish is pointed. For a dipole, this unit vector
must be provided in the local ENU coordinate
system aligned with the direction cosines
coordinate system or in the Alt-Az coordinate
system. This will be used only when key 'shape'
is set to 'dipole'. This could be a 2-element
vector (transverse direction cosines) where the
third (line-of-sight) component is determined,
or a 3-element vector specifying all three
direction cosines or a two-element coordinate in
Alt-Az system. If not provided it defaults to an
eastward pointing dipole. If key
'shape' is set to 'dish', the orientation refers
to the pointing center of the dish on the sky. It
can be provided in Alt-Az system as a two-element
vector or in the direction cosine coordinate
system as a two- or three-element vector. If not
set in the case of a dish element, it defaults to
zenith. This is not to be confused with the key
'pointing_center' in dictionary 'pointing_info'
which refers to the beamformed pointing center of
the array. The coordinate system is specified by
the key 'ocoords'
'ocoords' [scalar string] specifies the coordinate system
for key 'orientation'. Accepted values are 'altaz'
and 'dircos'.
'element_locs'
[2- or 3-column array] Element locations that
constitute the tile. Each row specifies
location of one element in the tile. The
locations must be specified in local ENU
coordinate system. First column specifies along
local east, second along local north and the
third along local up. If only two columns are
specified, the third column is assumed to be
zeros. If 'elements_locs' is not provided, it
assumed to be a one-element system and not a
phased array as far as determination of primary
beam is concerned.
'groundplane' [scalar] height of telescope element above the
ground plane (in meteres). Default = None will
denote no ground plane effects.
'ground_modify'
[dictionary] contains specifications to modify
the analytically computed ground plane pattern. If
absent, the ground plane computed will not be
modified. If set, it may contain the following
keys:
'scale' [scalar] positive value to scale the
modifying factor with. If not set, the
scale factor to the modification is unity.
'max' [scalar] positive value to clip the
modified and scaled values to. If not set,
there is no upper limit
'latitude' [scalar] specifies latitude of the telescope site
(in degrees). Default = None, otherwise should
equal the value specified during initialization
of the instance
'longitude' [scalar] specifies longitude of the telescope site
(in degrees). Default = None, otherwise should
equal the value specified during initialization
of the instance
'altitude' [scalar] specifies altitude of the telescope site
(in m). Default = None, otherwise should
equal the value specified during initialization
of the instance
'pol' [string] specifies polarization when using
MWA_Tools for primary beam computation. Value of
key 'id' in attribute dictionary telescope must be
set to 'mwa_tools'. 'X' or 'x' denotes
X-polarization. Y-polarization is specified by 'Y'
or 'y'. If polarization is not specified when 'id'
of telescope is set to 'mwa_tools', it defaults
to X-polarization.
------------------------------------------------------------------------
"""
try:
skymodel, freq, pinfo
except NameError:
raise NameError('skymodel, freq, and pinfo must be specified.')
if self.freq is None:
if freq is None:
raise ValueError('freq must be specified using a numpy array')
elif not isinstance(freq, NP.ndarray):
raise TypeError('freq must be specified using a numpy array')
self.freq = freq.ravel()
if (freq_scale is None) or (freq_scale == 'Hz') or (freq_scale == 'hz'):
self.freq = NP.asarray(freq)
elif freq_scale == 'GHz' or freq_scale == 'ghz':
self.freq = NP.asarray(freq) * 1.0e9
elif freq_scale == 'MHz' or freq_scale == 'mhz':
self.freq = NP.asarray(freq) * 1.0e6
elif freq_scale == 'kHz' or freq_scale == 'khz':
self.freq = NP.asarray(freq) * 1.0e3
else:
raise ValueError('Frequency units must be "GHz", "MHz", "kHz" or "Hz". If not set, it defaults to "Hz"')
self.freq_scale = 'Hz'
if self.telescope is None:
if isinstance(telescope, dict):
self.telescope = telescope
else:
raise TypeError('Input telescope must be a dictionary.')
if skymodel is None:
self.info['pbeam'] += [NP.asarray([])]
self.info['ind'] += [NP.asarray([])]
self.pinfo += [None]
elif not isinstance(skymodel, SM.SkyModel):
raise TypeError('skymodel should be an instance of class SkyModel.')
else:
self.skymodel = skymodel
if self.freq is None:
if freq is None:
raise ValueError('freq must be specified using a numpy array')
elif not isinstance(freq, NP.ndarray):
raise TypeError('freq must be specified using a numpy array')
self.freq = freq.ravel()
if (freq_scale is None) or (freq_scale == 'Hz') or (freq_scale == 'hz'):
self.freq = NP.asarray(freq)
elif freq_scale == 'GHz' or freq_scale == 'ghz':
self.freq = NP.asarray(freq) * 1.0e9
elif freq_scale == 'MHz' or freq_scale == 'mhz':
self.freq = NP.asarray(freq) * 1.0e6
elif freq_scale == 'kHz' or freq_scale == 'khz':
self.freq = NP.asarray(freq) * 1.0e3
else:
raise ValueError('Frequency units must be "GHz", "MHz", "kHz" or "Hz". If not set, it defaults to "Hz"')
self.freq_scale = 'Hz'
if roi_info is None:
raise ValueError('roi_info dictionary must be set.')
pbeam_input = False
if 'ind' in roi_info:
if roi_info['ind'] is not None:
self.info['ind'] += [roi_info['ind']]
if roi_info['ind'].size > 0:
if 'pbeam' in roi_info:
if roi_info['pbeam'] is not None:
try:
pb = roi_info['pbeam'].reshape(-1,self.freq.size)
except ValueError:
raise ValueError('Number of columns of primary beam in key "pbeam" of dictionary roi_info must be equal to number of frequency channels.')
if NP.asarray(roi_info['ind']).size == pb.shape[0]:
self.info['pbeam'] += [roi_info['pbeam'].astype(NP.float32)]
else:
raise ValueError('Number of elements in values in key "ind" and number of rows of values in key "pbeam" must be identical.')
pbeam_input = True
if not pbeam_input: # Will require sky positions in Alt-Az coordinates
if skymodel.coords == 'radec':
skycoords = SkyCoord(ra=skymodel.location[:,0]*units.deg, dec=skymodel.location[:,1]*units.deg, frame='fk5', equinox=Time(skymodel.epoch, format='jyear_str', scale='utc'))
if self.telescope['latitude'] is None:
raise ValueError('Latitude of the observatory must be provided.')
if lst is None:
raise ValueError('LST must be provided.')
if time_jd is None:
raise ValueError('Time in JD must be provided')
skycoords_altaz = skycoords.transform_to(AltAz(obstime=Time(time_jd, format='jd', scale='utc'), location=EarthLocation(lon=self.telescope['longitude']*units.deg, lat=self.telescope['latitude']*units.deg, height=self.telescope['altitude']*units.m)))
skypos_altaz = NP.hstack((skycoords_altaz.alt.deg.reshape(-1,1), skycoords_altaz.az.deg.reshape(-1,1)))
# skypos_altaz = GEOM.hadec2altaz(NP.hstack((NP.asarray(lst-skymodel.location[:,0]).reshape(-1,1), skymodel.location[:,1].reshape(-1,1))), self.telescope['latitude'], units='degrees') # Need to accurately take ephemeris into account
elif skymodel.coords == 'hadec':
if self.telescope['latitude'] is None:
raise ValueError('Latitude of the observatory must be provided.')
skypos_altaz = GEOM.hadec2altaz(skymodel.location, self.telescope['latitude'], units='degrees')
elif skymodel.coords == 'dircos':
skypos_altaz = GEOM.dircos2altaz(skymodel.location, units='degrees')
elif skymodel.coords == 'altaz':
skypos_altaz = skymodel.location
else:
raise KeyError('skycoords invalid or unspecified in skymodel')
if 'radius' in roi_info:
self.info['radius'] += [roi_info['radius']]
if 'center' in roi_info:
self.info['center'] += [roi_info['center']]
else:
if roi_info['radius'] is None:
roi_info['radius'] = 90.0
else:
roi_info['radius'] = max(0.0, min(roi_info['radius'], 90.0))
self.info['radius'] += [roi_info['radius']]
if roi_info['center'] is None:
self.info['center'] += [NP.asarray([90.0, 270.0]).reshape(1,-1)]
else:
roi_info['center'] = NP.asarray(roi_info['center']).reshape(1,-1)
if roi_info['center_coords'] == 'dircos':
self.info['center'] += [GEOM.dircos2altaz(roi_info['center'], units='degrees')]
elif roi_info['center_coords'] == 'altaz':
self.info['center'] += [roi_info['center']]
elif roi_info['center_coords'] == 'hadec':
self.info['center'] += [GEOM.hadec2altaz(roi_info['center'], self.telescope['latitude'], units='degrees')]
elif roi_info['center_coords'] == 'radec':
if lst is None:
raise KeyError('LST not provided for coordinate conversion')
hadec = NP.asarray([lst-roi_info['center'][0,0], roi_info['center'][0,1]]).reshape(1,-1)
self.info['center'] += [GEOM.hadec2altaz(hadec, self.telescope['latitude'], units='degrees')]
elif roi_info['center_coords'] == 'dircos':
self.info['center'] += [GEOM.dircos2altaz(roi_info['center'], units='degrees')]
else:
raise ValueError('Invalid coordinate system specified for center')
if skymodel.coords == 'radec':
if self.telescope['latitude'] is None:
raise ValueError('Latitude of the observatory must be provided.')
if lst is None:
raise ValueError('LST must be provided.')
if time_jd is None:
raise ValueError('Time in JD must be provided')
skycoords = SkyCoord(ra=skymodel.location[:,0]*units.deg, dec=skymodel.location[:,1]*units.deg, frame='fk5', equinox=Time(skymodel.epoch, format='jyear_str', scale='utc'))
skycoords_altaz = skycoords.transform_to(AltAz(obstime=Time(time_jd, format='jd', scale='utc'), location=EarthLocation(lon=self.telescope['longitude']*units.deg, lat=self.telescope['latitude']*units.deg, height=self.telescope['altitude']*units.m)))
skypos_altaz = NP.hstack((skycoords_altaz.alt.deg.reshape(-1,1), skycoords_altaz.az.deg.reshape(-1,1)))
# skypos_altaz = GEOM.hadec2altaz(NP.hstack((NP.asarray(lst-skymodel.location[:,0]).reshape(-1,1), skymodel.location[:,1].reshape(-1,1))), self.telescope['latitude'], units='degrees')
elif skymodel.coords == 'hadec':
if self.telescope['latitude'] is None:
raise ValueError('Latitude of the observatory must be provided.')
skypos_altaz = GEOM.hadec2altaz(skymodel.location, self.telescope['latitude'], units='degrees')
elif skymodel.coords == 'dircos':
skypos_altaz = GEOM.dircos2altaz(skymodel.location, units='degrees')
elif skymodel.coords == 'altaz':
skypos_altaz = skymodel.location
else:
raise KeyError('skycoords invalid or unspecified in skymodel')
dtheta = GEOM.sphdist(self.info['center'][-1][0,1], self.info['center'][-1][0,0], 270.0, 90.0)
if dtheta > 1e-2: # ROI center is not zenith
m1, m2, d12 = GEOM.spherematch(self.info['center'][-1][0,0], self.info['center'][-1][0,1], skypos_altaz[:,0], skypos_altaz[:,1], roi_info['radius'], maxmatches=0)
else:
m2, = NP.where(skypos_altaz[:,0] >= 90.0-roi_info['radius']) # select sources whose altitude (angle above horizon) is 90-radius
self.info['ind'] += [m2]
if self.info['center_coords'] is None:
if 'center_coords' in roi_info:
if (roi_info['center_coords'] == 'altaz') or (roi_info['center_coords'] == 'dircos') or (roi_info['center_coords'] == 'hadec') or (roi_info['center_coords'] == 'radec'):
self.info['center_coords'] = roi_info['center_coords']
if not pbeam_input:
if pinfo is None:
raise ValueError('Pointing info dictionary pinfo must be specified.')
self.pinfo += [pinfo]
if 'pointing_coords' in pinfo: # Convert pointing coordinate to Alt-Az
if (pinfo['pointing_coords'] != 'dircos') and (pinfo['pointing_coords'] != 'altaz'):
if self.telescope['latitude'] is None:
raise ValueError('Latitude of the observatory must be provided.')
if pinfo['pointing_coords'] == 'radec':
if lst is None:
raise ValueError('LST must be provided.')
self.pinfo[-1]['pointing_center'] = NP.asarray([lst-pinfo['pointing_center'][0,0], pinfo['pointing_center'][0,1]]).reshape(1,-1)
self.pinfo[-1]['pointing_center'] = GEOM.hadec2altaz(self.pinfo[-1]['pointing_center'], self.telescope['latitude'], units='degrees')
elif pinfo[-1]['pointing_coords'] == 'hadec':
self.pinfo[-1]['pointing_center'] = GEOM.hadec2altaz(pinfo[-1]['pointing_center'], self.telescope['latitude'], units='degrees')
else:
raise ValueError('pointing_coords in dictionary pinfo must be "dircos", "altaz", "hadec" or "radec".')
self.pinfo[-1]['pointing_coords'] = 'altaz'
if 'pbeam_chromaticity' not in roi_info:
roi_info['pbeam_chromaticity'] = False
if 'pbeam_reffreq' not in roi_info:
roi_info['pbeam_reffreq'] = self.freq[self.freq.size//2]
beam_chromaticity = roi_info['pbeam_chromaticity']
if beam_chromaticity:
freqs_to_compute = self.freq
else:
nearest_freq_ind = NP.argmin(NP.abs(self.freq - roi_info['pbeam_reffreq']))
freqs_to_compute = NP.asarray(roi_info['pbeam_reffreq']).reshape(-1)
ind = self.info['ind'][-1]
if ind.size > 0:
if 'id' in self.telescope:
if self.telescope['id'] == 'mwa_tools':
if not mwa_tools_found:
raise ImportError('MWA_Tools could not be imported which is required for power pattern computation.')
pbeam = NP.empty((ind.size, self.freq.size))
for i in range(freqs_to_compute.size):
pbx_MWA, pby_MWA = MWAPB.MWA_Tile_advanced(NP.radians(90.0-skypos_altaz[ind,0]).reshape(-1,1), NP.radians(skypos_altaz[ind,1]).reshape(-1,1), freq=freqs_to_compute[i], delays=self.pinfo[-1]['delays']/435e-12)
if 'pol' in self.telescope:
if (self.telescope['pol'] == 'X') or (self.telescope['pol'] == 'x'):
pbeam[:,i] = pbx_MWA.ravel()
elif (self.telescope['pol'] == 'Y') or (self.telescope['pol'] == 'y'):
pbeam[:,i] = pby_MWA.ravel()
else:
raise ValueError('Key "pol" in attribute dictionary telescope is invalid.')
else:
self.telescope['pol'] = 'X'
pbeam[:,i] = pbx_MWA.ravel()
else:
pbeam = PB.primary_beam_generator(skypos_altaz[ind,:], freqs_to_compute, self.telescope, freq_scale=self.freq_scale, skyunits='altaz', pointing_info=self.pinfo[-1])
else:
pbeam = PB.primary_beam_generator(skypos_altaz[ind,:], freqs_to_compute, self.telescope, freq_scale=self.freq_scale, skyunits='altaz', pointing_info=self.pinfo[-1])
self.info['pbeam'] += [pbeam.astype(NP.float64) * NP.ones(self.freq.size).reshape(1,-1)]
else:
self.info['pbeam'] += [NP.asarray([])]
#############################################################################
def save(self, infile, tabtype='BinTableHDU', overwrite=False, verbose=True):
"""
------------------------------------------------------------------------
Saves the information about the regions of interest to a FITS file on
disk
Inputs:
infile [string] Filename with full path to be saved to. Will be
appended with '.fits' extension
Keyword Input(s):
tabtype [string] indicates table type for one of the extensions in
the FITS file. Allowed values are 'BinTableHDU' and
'TableHDU' for binary ascii tables respectively. Default is
'BinTableHDU'.
overwrite [boolean] True indicates overwrite even if a file already
exists. Default = False (does not overwrite)
verbose [boolean] If True (default), prints diagnostic and progress
messages. If False, suppress printing such messages.
----------------------------------------------------------------------------
"""
try:
infile
except NameError:
raise NameError('No filename provided. Aborting ROI_parameters.save()...')
filename = infile + '.fits'
if verbose:
print('\nSaving information about regions of interest...')
hdulist = []
hdulist += [fits.PrimaryHDU()]
hdulist[0].header['EXTNAME'] = 'PRIMARY'
hdulist[0].header['n_obs'] = (len(self.info['ind']), 'Number of observations')
if 'id' in self.telescope:
hdulist[0].header['telescope'] = (self.telescope['id'], 'Telescope Name')
hdulist[0].header['element_shape'] = (self.telescope['shape'], 'Antenna element shape')
hdulist[0].header['element_size'] = (self.telescope['size'], 'Antenna element size [m]')
hdulist[0].header['element_ocoords'] = (self.telescope['ocoords'], 'Antenna element orientation coordinates')
if self.telescope['latitude'] is not None:
hdulist[0].header['latitude'] = (self.telescope['latitude'], 'Latitude (in degrees)')
hdulist[0].header['longitude'] = (self.telescope['longitude'], 'Longitude (in degrees)')
if self.telescope['altitude'] is not None:
hdulist[0].header['altitude'] = (self.telescope['altitude'], 'Altitude (in m)')
if self.telescope['groundplane'] is not None:
hdulist[0].header['ground_plane'] = (self.telescope['groundplane'], 'Antenna element height above ground plane [m]')
if 'ground_modify' in self.telescope:
if 'scale' in self.telescope['ground_modify']:
hdulist[0].header['ground_modify_scale'] = (self.telescope['ground_modify']['scale'], 'Ground plane modification scale factor')
if 'max' in self.telescope['ground_modify']:
hdulist[0].header['ground_modify_max'] = (self.telescope['ground_modify']['max'], 'Maximum ground plane modification')
hdulist += [fits.ImageHDU(self.telescope['orientation'], name='Antenna element orientation')]
if verbose:
print('\tCreated an extension for antenna element orientation.')
if 'element_locs' in self.telescope:
hdulist += [fits.ImageHDU(self.telescope['element_locs'], name='Antenna element locations')]
hdulist += [fits.ImageHDU(self.freq, name='FREQ')]
if verbose:
print('\t\tCreated an extension HDU of {0:0d} frequency channels'.format(self.freq.size))
for i in range(len(self.info['ind'])):
if self.info['ind'][i].size > 0:
hdulist += [fits.ImageHDU(self.info['ind'][i], name='IND_{0:0d}'.format(i))]
hdulist += [fits.ImageHDU(self.info['pbeam'][i], name='PB_{0:0d}'.format(i))]
if self.pinfo: # if self.pinfo is not empty
if self.pinfo[i] is not None: # if the specific i-th entry in self.pinfo is not empty
if 'delays' in self.pinfo[i]:
hdulist += [fits.ImageHDU(self.pinfo[i]['delays'], name='DELAYS_{0:0d}'.format(i))]
if 'delayerr' in self.pinfo[i]:
if self.pinfo[i]['delayerr'] is not None:
hdulist[-1].header['delayerr'] = (self.pinfo[i]['delayerr'], 'Jitter in delays [s]')
else:
hdulist[-1].header['delayerr'] = (0.0, 'Jitter in delays [s]')
if 'pointing_center' in self.pinfo[i]:
hdulist += [fits.ImageHDU(self.pinfo[i]['pointing_center'], name='POINTING_CENTER_{0:0d}'.format(i))]
if 'pointing_coords' in self.pinfo[i]:
hdulist[-1].header['pointing_coords'] = (self.pinfo[i]['pointing_coords'], 'Pointing coordinate system')
else:
raise KeyError('Key "pointing_coords" not found in attribute pinfo.')
if verbose:
print('\t\tCreated HDU extensions for {0:0d} observations containing ROI indices and primary beams'.format(len(self.info['ind'])))
if verbose:
print('\tNow writing FITS file to disk...')
hdu = fits.HDUList(hdulist)
hdu.writeto(filename, overwrite=overwrite)
if verbose:
print('\tRegions of interest information written successfully to FITS file on disk:\n\t\t{0}\n'.format(filename))
#################################################################################
class InterferometerArray(object):
"""
----------------------------------------------------------------------------
Class to manage information on a multi-element interferometer array.
Attributes:
astroutils_githash
[string] Git# of the AstroUtils version used to create/save
the instance of class InterferometerArray
prisim_githash
[string] Git# of the PRISim version used to create/save
the instance of class InterferometerArray
A_eff [scalar, list or numpy vector] Effective area of the
interferometers (in m^2). If a scalar is provided, it is assumed
to be identical for all interferometers. Otherwise, one value
must be specified for each interferometer. Default is
pi * (25/2)^2, appropriate for a 25 m VLA dish.
baselines: [M x 3 Numpy array] The baseline vectors associated with the
M interferometers in SI units. The coordinate system of these
vectors is specified by another attribute baseline_coords.
baseline_coords
[string] Coordinate system for the baseline vectors. Default is
'localenu'. Other accepted values are 'equatorial'
baseline_lengths
[M-element numpy array] Lengths of the baseline in SI units
projected_baselines
[M x 3 x n_snaps Numpy array] The projected baseline vectors
associated with the M interferometers and number of snapshots in
SI units. The coordinate system of these vectors is specified by
either pointing_center, phase_center or as specified in input to
member function project_baselines().
bp [numpy array] Bandpass weights of size n_baselines x nchan x
n_acc, where n_acc is the number of accumulations in the
observation, nchan is the number of frequency channels, and
n_baselines is the number of baselines
bp_wts [numpy array] Additional weighting to be applied to the bandpass
shapes during the application of the member function
delay_transform(). Same size as attribute bp.
channels [list or numpy vector] frequency channels in Hz
eff_Q [scalar, list or numpy vector] Efficiency of the interferometers,
one value for each interferometer. Default = 0.89, appropriate
for the VLA. Has to be between 0 and 1. If only a scalar value
provided, it will be assumed to be identical for all the
interferometers. Otherwise, one value must be provided for each
of the interferometers.
freq_resolution
[scalar] Frequency resolution (in Hz)
labels [list of 2-element tuples] A unique identifier (tuple of
strings) for each of the interferometers.
lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as channels. This is
computed in member function delay_transform().
lag_kernel [numpy array] Inverse Fourier Transform of the frequency
bandpass shape. In other words, it is the impulse response
corresponding to frequency bandpass. Same size as attributes
bp and bp_wts. It is initialized in __init__() member function
but effectively computed in member function delay_transform()
latitude [Scalar] Latitude of the interferometer's location. Default
is 34.0790 degrees North corresponding to that of the VLA.
altitude [Scalar] Altitude of the interferometer's location. Default
is 0 m.
lst [list] List of LST (in degrees) for each timestamp
n_acc [scalar] Number of accumulations
groups [dictionary] Contains the grouping of unique baselines and the
redundant baselines as numpy recarray under each unique baseline
category/flavor. It contains as keys the labels (tuple of A1, A2)
of unique baselines and the value under each of these keys is a
list of baseline labels that are redundant under that category
bl_reversemap
[dictionary] Contains the baseline category for each baseline.
The keys are baseline labels as tuple and the value under each
key is the label of the unique baseline category that it falls
under.
gaininfo [None or instance of class GainInfo] Instance of class
Gaininfo. If set to None, default gains assumed to be unity.
gradient_mode
[string] If set to None, visibilities will be simulated as
usual. If set to string, both visibilities and visibility
gradients with respect to the quantity specified in the
string will be simulated. Currently accepted value is
'baseline'. Plan to incorporate gradients with respect to
'skypos' and 'frequency' as well in the future.
gradient [dictionary] If gradient_mode is set to None, it is an empty
dictionary. If gradient_mode is not None, this quantity holds
the gradient under the key specified by gradient_mode.
Currently, supports 'baseline' key. Other gradients will be
supported in future. It contains the following keys and values.
If gradient_mode == 'baseline':
'baseline' [numpy array] Visibility gradients with respect to
baseline vector. Complex numpy array of shape
3 x nbl x nchan x nts
obs_catalog_indices
[list of lists] Each element in the top list corresponds to a
timestamp. Inside each top list is a list of indices of sources
from the catalog which are observed inside the region of
interest. This is computed inside member function observe().
pointing_center
[2-column numpy array] Pointing center (latitude and
longitude) of the observation at a given timestamp. This is
where the telescopes will be phased up to as reference.
Coordinate system for the pointing_center is specified by another
attribute pointing_coords.
phase_center
[2-column numpy array] Phase center (latitude and
longitude) of the observation at a given timestamp. This is
where the telescopes will be phased up to as reference.
Coordinate system for the phase_center is specified by another
attribute phase_center_coords.
pointing_coords
[string] Coordinate system for telescope pointing. Accepted
values are 'radec' (RA-Dec), 'hadec' (HA-Dec) or 'altaz'
(Altitude-Azimuth). Default = 'hadec'.
phase_center_coords
[string] Coordinate system for array phase center. Accepted
values are 'radec' (RA-Dec), 'hadec' (HA-Dec) or 'altaz'
(Altitude-Azimuth). Default = 'hadec'.
skycoords [string] Coordinate system for the sky positions of sources.
Accepted values are 'radec' (RA-Dec), 'hadec' (HA-Dec) or
'altaz' (Altitude-Azimuth). Default = 'radec'.
skyvis_freq [numpy array] Complex visibility due to sky emission (in Jy or K)
along frequency axis for each interferometer estimated from the
specified external catalog. Same size as vis_freq. Used in the
member function observe(). Read its docstring for more details.
Has dimensions n_baselines x nchan x n_snaps.
skyvis_lag [numpy array] Complex visibility due to sky emission (in Jy Hz or
K Hz) along the delay axis for each interferometer obtained by
FFT of skyvis_freq along frequency axis. Same size as vis_freq.
Created in the member function delay_transform(). Read its
docstring for more details. Same dimensions as skyvis_freq
telescope [dictionary] dictionary that specifies the type of element,
element size and orientation. It consists of the following keys
and values:
'id' [string] If set, will ignore the other keys and use
telescope details for known telescopes. Accepted
values are 'mwa', 'vla', 'gmrt', 'ugmrt', 'hera',
'paper', 'hirax', 'chime'and other custom values.
Default = 'mwa'
'shape' [string] Shape of antenna element. Accepted values
are 'dipole', 'delta', and 'dish'. Will be ignored
if key 'id' is set. 'delta' denotes a delta
function for the antenna element which has an
isotropic radiation pattern. 'dish' is the default
when keys 'id' and 'shape' are not set.
'size' [scalar] Diameter of the telescope dish (in meters)
if the key 'shape' is set to 'dish' or length of
the dipole if key 'shape' is set to 'dipole'. Will
be ignored if key 'shape' is set to 'delta'. Will
be ignored if key 'id' is set and a preset value
used for the diameter or dipole. Default = 25.0.
'orientation' [list or numpy array] If key 'shape' is set to
dipole, it refers to the orientation of the dipole
element unit vector whose magnitude is specified by
length. If key 'shape' is set to 'dish', it refers
to the position on the sky to which the dish is
pointed. For a dipole, this unit vector must be
provided in the local ENU coordinate system aligned
with the direction cosines coordinate system or in
the Alt-Az coordinate system.
This could be a 2-element vector (transverse
direction cosines) where the third (line-of-sight)
component is determined, or a 3-element vector
specifying all three direction cosines or a two-
element coordinate in Alt-Az system. If not provided
it defaults to an eastward pointing dipole. If key
'shape' is set to 'dish', the orientation refers
to the pointing center of the dish on the sky. It
can be provided in Alt-Az system as a two-element
vector or in the direction cosine coordinate
system as a two- or three-element vector. If not
set in the case of a dish element, it defaults to
zenith. The coordinate system is specified by
the key 'ocoords'
'ocoords' [scalar string] specifies the coordinate system
for key 'orientation'. Accepted values are 'altaz'
and 'dircos'.
'groundplane' [scalar] height of telescope element above the
ground plane (in meteres). Default = None will
denote no ground plane effects.
'ground_modify'
[dictionary] contains specifications to modify
the analytically computed ground plane pattern. If
absent, the ground plane computed will not be
modified. If set, it may contain the following
keys:
'scale' [scalar] positive value to scale the
modifying factor with. If not set, the
scale factor to the modification is unity.
'max' [scalar] positive value to clip the
modified and scaled values to. If not set,
there is no upper limit
layout [dictionary] contains array layout information (on the full
array even if only a subset of antennas or baselines are used
in the simulation). It contains the following keys and
information:
'positions' [numpy array] Antenna positions (in m) as a
nant x 3 array in coordinates specified by key
'coords'
'coords' [string] Coordinate system in which antenna
positions are specified. Currently accepts 'ENU'
for local ENU system
'labels' [list or numpy array of strings] Unique string
identifiers for antennas. Must be of same length
as nant.
'ids' [list or numpy array of integers] Unique integer
identifiers for antennas. Must be of same length
as nant.
timestamp [list] List of timestamps during the observation (Julian date)
t_acc [list] Accumulation time (sec) corresponding to each timestamp
t_obs [scalar] Total observing duration (sec)
Tsys [scalar, list or numpy vector] System temperature in Kelvin. At
end of the simulation, it will be a numpy array of size
n_baselines x nchan x n_snaps.
Tsysinfo [list of dictionaries] Contains a list of system temperature
information for each timestamp of observation. Each dictionary
element in the list following keys and values:
'Trx' [scalar] Recevier temperature (in K) that is
applicable to all frequencies and baselines
'Tant' [dictionary] contains antenna temperature info
from which the antenna temperature is estimated.
Used only if the key 'Tnet' is absent or set to
None. It has the following keys and values:
'f0' [scalar] Reference frequency (in Hz)
from which antenna temperature will
be estimated (see formula below)
'T0' [scalar] Antenna temperature (in K) at
the reference frequency specified in
key 'f0'. See formula below.
'spindex' [scalar] Antenna temperature spectral
index. See formula below.
Tsys = Trx + Tant['T0'] * (f/Tant['f0'])**spindex
'Tnet' [numpy array] Pre-computed Tsys (in K)
information that will be used directly to set the
Tsys. If specified, the information under keys
'Trx' and 'Tant' will be ignored. If a scalar
value is provided, it will be assumed to be
identical for all interferometers and all
frequencies. If a vector is provided whose length
is equal to the number of interferoemters, it
will be assumed identical for all frequencies. If
a vector is provided whose length is equal to the
number of frequency channels, it will be assumed
identical for all interferometers. If a 2D array
is provided, it should be of size
n_baselines x nchan. Tsys = Tnet
vis_freq [numpy array] The simulated complex visibility (in Jy or K)
observed by each of the interferometers along frequency axis for
each timestamp of observation per frequency channel. It is the
sum of skyvis_freq and vis_noise_freq. It can be either directly
initialized or simulated in observe(). Same dimensions as
skyvis_freq.
vis_lag [numpy array] The simulated complex visibility (in Jy Hz or K Hz)
along delay axis for each interferometer obtained by FFT of
vis_freq along frequency axis. Same size as vis_noise_lag and
skyis_lag. It is evaluated in member function delay_transform().
vis_noise_freq
[numpy array] Complex visibility noise (in Jy or K) generated
using an rms of vis_rms_freq along frequency axis for each
interferometer which is then added to the generated sky
visibility. Same dimensions as skyvis_freq. Used in the member
function observe(). Read its docstring for more details.
vis_noise_lag
[numpy array] Complex visibility noise (in Jy Hz or K Hz) along
delay axis for each interferometer generated using an FFT of
vis_noise_freq along frequency axis. Same size as vis_noise_freq.
Created in the member function delay_transform(). Read its
docstring for more details.
vis_rms_freq
[list of float] Theoretically estimated thermal noise rms (in Jy
or K) in visibility measurements. Same size as vis_freq. This
will be estimated and used to inject simulated noise when a call
to member function observe() is made. Read the docstring of
observe() for more details. The noise rms is estimated from the
instrument parameters as:
(2 k T_sys / (A_eff x sqrt(2 x channel_width x t_acc))) / Jy, or
T_sys / sqrt(2 x channel_width x t_acc)
simparms_file
[string] Full path to filename containing simulation parameters
in YAML format
Member functions:
__init__() Initializes an instance of class InterferometerArray
observe() Simulates an observing run with the interferometer
specifications and an external sky catalog thus producing
visibilities. The simulation generates visibilities
observed by the interferometer for the specified
parameters.
observing_run() Simulate an extended observing run in 'track' or 'drift'
mode, by an instance of the InterferometerArray class, of
the sky when a sky catalog is provided. The simulation
generates visibilities observed by the interferometer
array for the specified parameters. Uses member function
observe() and builds the observation from snapshots. The
timestamp for each snapshot is the current time at which
the snapshot is generated.
generate_noise() Generates thermal noise from attributes that describe
system parameters which can be added to sky visibilities
add_noise() Adds the thermal noise generated in member function
generate_noise() to the sky visibilities after
extracting and applying complex instrument gains
apply_gradients() Apply the perturbations in combination with the
gradients to determine perturbed visibilities
duplicate_measurements()
Duplicate visibilities based on redundant baselines
specified. This saves time when compared to simulating
visibilities over redundant baselines. Thus, it is more
efficient to simulate unique baselines and duplicate
measurements for redundant baselines
getBaselineGroupKeys()
Find redundant baseline group keys of groups that
contain the input baseline labels
getBaselinesInGroups()
Find all redundant baseline labels in groups that
contain the given input baseline labels
getThreePointCombinations()
Return all or class Inonly unique 3-point combinations of
baselines
getClosurePhase() Get closure phases of visibilities from triplets of
antennas
rotate_visibilities()
Centers the phase of visibilities around any given phase
center. Project baseline vectors with respect to a
reference point on the sky. Essentially a wrapper to
member functions phase_centering() and
project_baselines()
phase_centering() Centers the phase of visibilities around any given phase
center.
project_baselines() Project baseline vectors with respect to a reference
point on the sky. Assigns the projected baselines to the
attribute projected_baselines
conjugate() Flips the baseline vectors and conjugates the visibilies
for a specified subset of baselines.
delay_transform() Transforms the visibilities from frequency axis onto
delay (time) axis using an IFFT. This is performed for
noiseless sky visibilities, thermal noise in visibilities,
and observed visibilities.
concatenate() Concatenates different visibility data sets from instances
of class InterferometerArray along baseline, frequency or
time axis.
save() Saves the interferometer array information to disk in
HDF5, FITS, NPZ and UVFITS formats
pyuvdata_write() Saves the interferometer array information to disk in
various formats through pyuvdata module
----------------------------------------------------------------------------
"""
def __init__(self, labels, baselines, channels, telescope=None, eff_Q=0.89,
latitude=34.0790, longitude=0.0, altitude=0.0,
skycoords='radec', A_eff=NP.pi*(25.0/2)**2,
pointing_coords='hadec', layout=None, blgroupinfo=None,
baseline_coords='localenu', freq_scale=None, gaininfo=None,
init_file=None, simparms_file=None):
"""
------------------------------------------------------------------------
Intialize the InterferometerArray class which manages information on a
multi-element interferometer.
Class attributes initialized are:
astroutils_githash, prisim_githash, labels, baselines, channels,
telescope, latitude, longitude, altitude, skycoords, eff_Q, A_eff,
pointing_coords, baseline_coords, baseline_lengths, channels, bp,
bp_wts, freq_resolution, lags, lst, obs_catalog_indices,
pointing_center, skyvis_freq, skyvis_lag, timestamp, t_acc, Tsys,
Tsysinfo, vis_freq, vis_lag, t_obs, n_acc, vis_noise_freq,
vis_noise_lag, vis_rms_freq, geometric_delays, projected_baselines,
simparms_file, layout, gradient, gradient_mode, gaininfo, blgroups,
bl_reversemap
Read docstring of class InterferometerArray for details on these
attributes.
Keyword input(s):
init_file [string] Location of the initialization file from which an
instance of class InterferometerArray will be created.
File format must be compatible with the one saved to disk
by member function save().
simparms_file
[string] Location of the simulation parameters in YAML
format that went into making the simulated data product
Other input parameters have their usual meanings. Read the docstring of
class InterferometerArray for details on these inputs.
------------------------------------------------------------------------
"""
argument_init = False
init_file_success = False
if init_file is not None:
try:
with h5py.File(init_file+'.hdf5', 'r') as fileobj:
self.astroutils_githash = None
self.prisim_githash = None
self.simparms_file = None
self.latitude = 0.0
self.longitude = 0.0
self.altitude = 0.0
self.skycoords = 'radec'
self.flux_unit = 'JY'
self.telescope = {}
self.telescope['shape'] = 'delta'
self.telescope['size'] = 1.0
self.telescope['groundplane'] = None
self.Tsysinfo = []
self.layout = {}
self.blgroups = None
self.bl_reversemap = None
self.lags = None
self.vis_lag = None
self.skyvis_lag = None
self.vis_noise_lag = None
self.gradient_mode = None
self.gradient = {}
self.gaininfo = None
for key in ['header', 'telescope_parms', 'spectral_info', 'simparms', 'antenna_element', 'timing', 'skyparms', 'array', 'layout', 'instrument', 'visibilities', 'gradients', 'gaininfo', 'blgroupinfo']:
try:
grp = fileobj[key]
except KeyError:
if key in ['gradients', 'gaininfo']:
pass
elif key not in ['simparms', 'blgroupinfo']:
raise KeyError('Key {0} not found in init_file'.format(key))
if key == 'header':
self.flux_unit = grp['flux_unit'].value
if 'AstroUtils#' in grp:
self.astroutils_githash = grp['AstroUtils#'].value
else:
self.astroutils_githash = astroutils.__githash__
if 'PRISim#' in grp:
self.prisim_githash = grp['PRISim#'].value
else:
self.prisim_githash = prisim.__githash__
if key == 'telescope_parms':
if 'latitude' in grp:
self.latitude = grp['latitude'].value
if 'longitude' in grp:
self.longitude = grp['longitude'].value
if 'altitude' in grp:
self.altitude = grp['altitude'].value
if 'id' in grp:
self.telescope['id'] = grp['id'].value
if key == 'layout':
if 'positions' in grp:
self.layout['positions'] = grp['positions'].value
else:
raise KeyError('Antenna layout positions is missing')
try:
self.layout['coords'] = grp['positions'].attrs['coords']
except KeyError:
raise KeyError('Antenna layout position coordinate system is missing')
if 'labels' in grp:
self.layout['labels'] = grp['labels'].value
else:
raise KeyError('Layout antenna labels is missing')
if 'ids' in grp:
self.layout['ids'] = grp['ids'].value
else:
raise KeyError('Layout antenna ids is missing')
if key == 'antenna_element':
if 'shape' in grp:
self.telescope['shape'] = grp['shape'].value
if 'size' in grp:
self.telescope['size'] = grp['size'].value
if 'ocoords' in grp:
self.telescope['ocoords'] = grp['ocoords'].value
else:
raise KeyError('Keyword "ocoords" not found in init_file')
if 'orientation' in grp:
self.telescope['orientation'] = grp['orientation'].value.reshape(1,-1)
else:
raise KeyError('Key "orientation" not found in init_file')
if 'groundplane' in grp:
self.telescope['groundplane'] = grp['groundplane'].value
if key == 'simparms':
if 'simfile' in grp:
self.simparms_file = grp['simfile'].value
if key == 'spectral_info':
self.freq_resolution = grp['freq_resolution'].value
self.channels = grp['freqs'].value
if 'lags' in grp:
self.lags = grp['lags'].value
if 'bp' in grp:
self.bp = grp['bp'].value
else:
raise KeyError('Key "bp" not found in init_file')
if 'bp_wts' in grp:
self.bp_wts = grp['bp_wts'].value
else:
self.bp_wts = NP.ones_like(self.bp)
self.bp_wts = grp['bp_wts'].value
if key == 'skyparms':
if 'pointing_coords' in grp:
self.pointing_coords = grp['pointing_coords'].value
if 'phase_center_coords' in grp:
self.phase_center_coords = grp['phase_center_coords'].value
if 'skycoords' in grp:
self.skycoords = grp['skycoords'].value
self.lst = grp['LST'].value
self.pointing_center = grp['pointing_center'].value
self.phase_center = grp['phase_center'].value
if key == 'timing':
if 'timestamps' in grp:
self.timestamp = grp['timestamps'].value.tolist()
else:
raise KeyError('Key "timestamps" not found in init_file')
if 't_acc' in grp:
self.t_acc = grp['t_acc'].value.tolist()
self.t_obs = grp['t_obs'].value
self.n_acc = grp['n_acc'].value
else:
raise KeyError('Key "t_acc" not found in init_file')
if key == 'instrument':
if ('Trx' in grp) or ('Tant' in grp) or ('spindex' in grp) or ('Tnet' in grp):
for ti in range(grp['Trx'].value.size):
tsysinfo = {}
tsysinfo['Trx'] = grp['Trx'].value[ti]
tsysinfo['Tant'] = {'T0': grp['Tant0'].value[ti], 'f0': grp['f0'].value[ti], 'spindex': grp['spindex'].value[ti]}
tsysinfo['Tnet'] = None
if 'Tnet' in grp:
if grp['Tnet'].value[ti] > 0:
tsysinfo['Tnet'] = grp['Tnet'].value[ti]
self.Tsysinfo += [tsysinfo]
if 'Tsys' in grp:
self.Tsys = grp['Tsys'].value
else:
raise KeyError('Key "Tsys" not found in init_file')
if 'effective_area' in grp:
self.A_eff = grp['effective_area'].value
else:
raise KeyError('Key "effective_area" not found in init_file')
if 'efficiency' in grp:
self.eff_Q = grp['efficiency'].value
else:
raise KeyError('Key "effeciency" not found in init_file')
if key == 'array':
if 'labels' in grp:
self.labels = grp['labels'].value
else:
self.labels = ['B{0:0d}'.format(i+1) for i in range(self.baseline_lengths.size)]
if 'baselines' in grp:
self.baselines = grp['baselines'].value
self.baseline_lengths = NP.sqrt(NP.sum(self.baselines**2, axis=1))
else:
raise KeyError('Key "baselines" not found in init_file')
if 'baseline_coords' in grp:
self.baseline_coords = grp['baseline_coords'].value
else:
self.baseline_coords = 'localenu'
if 'projected_baselines' in grp:
self.projected_baselines = grp['projected_baselines'].value
if key == 'visibilities':
if 'freq_spectrum' in grp:
subgrp = grp['freq_spectrum']
if 'rms' in subgrp:
self.vis_rms_freq = subgrp['rms'].value
else:
self.vis_rms_freq = None
# raise KeyError('Key "rms" not found in init_file')
if 'vis' in subgrp:
self.vis_freq = subgrp['vis'].value
else:
self.vis_freq = None
if 'skyvis' in subgrp:
self.skyvis_freq = subgrp['skyvis'].value
else:
raise KeyError('Key "skyvis" not found in init_file')
if 'noise' in subgrp:
self.vis_noise_freq = subgrp['noise'].value
else:
self.vis_noise_freq = None
else:
raise KeyError('Key "freq_spectrum" not found in init_file')
if 'delay_spectrum' in grp:
subgrp = grp['delay_spectrum']
if 'vis' in subgrp:
self.vis_lag = subgrp['vis'].value
if 'skyvis' in subgrp:
self.skyvis_lag = subgrp['skyvis'].value
if 'noise' in subgrp:
self.vis_noise_lag = subgrp['noise'].value
if key == 'gradients':
if key in fileobj:
for gradkey in grp:
self.gradient_mode = gradkey
self.gradient[gradkey] = grp[gradkey].value
if key == 'gaininfo':
if key in fileobj:
self.gaininfo = GainInfo(init_file=grp['gainsfile'].value)
if key == 'blgroupinfo':
if key in fileobj:
self.blgroups = {}
self.bl_reversemap = {}
for blkey in grp['groups']:
self.blgroups[ast.literal_eval(blkey)] = grp['groups'][blkey].value
for blkey in grp['reversemap']:
self.bl_reversemap[ast.literal_eval(blkey)] = grp['reversemap'][blkey].value
except IOError: # Check if a FITS file is available
try:
hdulist = fits.open(init_file+'.fits')
except IOError:
argument_init = True
warnings.warn('\tinit_file provided but could not open the initialization file. Attempting to initialize with input parameters...')
extnames = [hdulist[i].header['EXTNAME'] for i in xrange(1,len(hdulist))]
self.simparms_file = None
if 'simparms' in hdulist[0].header:
if isinstance(hdulist[0].header['simparms'], str):
self.simparms_file = hdulist[0].header['simparms']
else:
warnings.warn('\tInvalid specification found in header for simulation parameters file. Proceeding with None as default.')
try:
self.gradient_mode = hdulist[0].header['gradient_mode']
except KeyError:
self.gradient_mode = None
self.gradient = {}
try:
self.freq_resolution = hdulist[0].header['freq_resolution']
except KeyError:
hdulist.close()
raise KeyError('Keyword "freq_resolution" not found in header.')
try:
self.latitude = hdulist[0].header['latitude']
except KeyError:
warnings.warn('\tKeyword "latitude" not found in header. Assuming 34.0790 degrees for attribute latitude.')
self.latitude = 34.0790
try:
self.longitude = hdulist[0].header['longitude']
except KeyError:
warnings.warn('\tKeyword "longitude" not found in header. Assuming 0.0 degrees for attribute longitude.')
self.longitude = 0.0
try:
self.altitude = hdulist[0].header['altitude']
except KeyError:
warnings.warn('\tKeyword "altitude" not found in header. Assuming 0m for attribute altitude.')
self.altitude = 0.0
self.telescope = {}
if 'telescope' in hdulist[0].header:
self.telescope['id'] = hdulist[0].header['telescope']
try:
self.telescope['shape'] = hdulist[0].header['element_shape']
except KeyError:
warnings.warn('\tKeyword "element_shape" not found in header. Assuming "delta" for attribute antenna element shape.')
self.telescope['shape'] = 'delta'
try:
self.telescope['size'] = hdulist[0].header['element_size']
except KeyError:
warnings.warn('\tKeyword "element_size" not found in header. Assuming 25.0m for attribute antenna element size.')
self.telescope['size'] = 1.0
try:
self.telescope['ocoords'] = hdulist[0].header['element_ocoords']
except KeyError:
raise KeyError('\tKeyword "element_ocoords" not found in header. No defaults.')
try:
self.telescope['groundplane'] = hdulist[0].header['groundplane']
except KeyError:
self.telescope['groundplane'] = None
if 'ANTENNA ELEMENT ORIENTATION' not in extnames:
raise KeyError('No extension found containing information on element orientation.')
else:
self.telescope['orientation'] = hdulist['ANTENNA ELEMENT ORIENTATION'].data.reshape(1,-1)
try:
self.baseline_coords = hdulist[0].header['baseline_coords']
except KeyError:
warnings.warn('\tKeyword "baseline_coords" not found in header. Assuming "localenu" for attribute baseline_coords.')
self.baseline_coords = 'localenu'
try:
self.pointing_coords = hdulist[0].header['pointing_coords']
except KeyError:
warnings.warn('\tKeyword "pointing_coords" not found in header. Assuming "hadec" for attribute pointing_coords.')
self.pointing_coords = 'hadec'
try:
self.phase_center_coords = hdulist[0].header['phase_center_coords']
except KeyError:
warnings.warn('\tKeyword "phase_center_coords" not found in header. Assuming "hadec" for attribute phase_center_coords.')
self.phase_center_coords = 'hadec'
try:
self.skycoords = hdulist[0].header['skycoords']
except KeyError:
warnings.warn('\tKeyword "skycoords" not found in header. Assuming "radec" for attribute skycoords.')
self.skycoords = 'radec'
try:
self.flux_unit = hdulist[0].header['flux_unit']
except KeyError:
warnings.warn('\tKeyword "flux_unit" not found in header. Assuming "jy" for attribute flux_unit.')
self.flux_unit = 'JY'
if 'POINTING AND PHASE CENTER INFO' not in extnames:
raise KeyError('No extension table found containing pointing information.')
else:
self.lst = hdulist['POINTING AND PHASE CENTER INFO'].data['LST'].tolist()
self.pointing_center = NP.hstack((hdulist['POINTING AND PHASE CENTER INFO'].data['pointing_longitude'].reshape(-1,1), hdulist['POINTING AND PHASE CENTER INFO'].data['pointing_latitude'].reshape(-1,1)))
self.phase_center = NP.hstack((hdulist['POINTING AND PHASE CENTER INFO'].data['phase_center_longitude'].reshape(-1,1), hdulist['POINTING AND PHASE CENTER INFO'].data['phase_center_latitude'].reshape(-1,1)))
if 'TIMESTAMPS' in extnames:
self.timestamp = hdulist['TIMESTAMPS'].data['timestamps'].tolist()
else:
raise KeyError('Extension named "TIMESTAMPS" not found in init_file.')
self.Tsysinfo = []
if 'TSYSINFO' in extnames:
self.Tsysinfo = [{'Trx': elem['Trx'], 'Tant': {'T0': elem['Tant0'], 'f0': elem['f0'], 'spindex': elem['spindex']}, 'Tnet': None} for elem in hdulist['TSYSINFO'].data]
if 'TSYS' in extnames:
self.Tsys = hdulist['Tsys'].data
else:
raise KeyError('Extension named "Tsys" not found in init_file.')
if 'BASELINES' in extnames:
self.baselines = hdulist['BASELINES'].data.reshape(-1,3)
self.baseline_lengths = NP.sqrt(NP.sum(self.baselines**2, axis=1))
else:
raise KeyError('Extension named "BASELINES" not found in init_file.')
if 'PROJ_BASELINES' in extnames:
self.projected_baselines = hdulist['PROJ_BASELINES'].data
if 'LABELS' in extnames:
# self.labels = hdulist['LABELS'].data.tolist()
a1 = hdulist['LABELS'].data['A1']
a2 = hdulist['LABELS'].data['A2']
self.labels = zip(a2,a1)
else:
self.labels = ['B{0:0d}'.format(i+1) for i in range(self.baseline_lengths.size)]
self.layout = {}
if 'LAYOUT' in extnames:
for key in ['positions', 'ids', 'labels']:
self.layout[key] = hdulist['LAYOUT'].data[key]
self.layout['coords'] = hdulist['LAYOUT'].header['COORDS']
if 'EFFECTIVE AREA' in extnames:
self.A_eff = hdulist['EFFECTIVE AREA'].data
else:
raise KeyError('Extension named "EFFECTIVE AREA" not found in init_file.')
if 'INTERFEROMETER EFFICIENCY' in extnames:
self.eff_Q = hdulist['INTERFEROMETER EFFICIENCY'].data
else:
raise KeyError('Extension named "INTERFEROMETER EFFICIENCY" not found in init_file.')
if 'SPECTRAL INFO' not in extnames:
raise KeyError('No extension table found containing spectral information.')
else:
self.channels = hdulist['SPECTRAL INFO'].data['frequency']
try:
self.lags = hdulist['SPECTRAL INFO'].data['lag']
except KeyError:
self.lags = None
if 'BANDPASS' in extnames:
self.bp = hdulist['BANDPASS'].data
else:
raise KeyError('Extension named "BANDPASS" not found in init_file.')
if 'BANDPASS_WEIGHTS' in extnames:
self.bp_wts = hdulist['BANDPASS_WEIGHTS'].data
else:
self.bp_wts = NP.ones_like(self.bp)
if 'T_ACC' in extnames:
self.t_acc = hdulist['t_acc'].data.tolist()
self.n_acc = len(self.t_acc)
self.t_obs = sum(self.t_acc)
else:
raise KeyError('Extension named "T_ACC" not found in init_file.')
if 'FREQ_CHANNEL_NOISE_RMS_VISIBILITY' in extnames:
self.vis_rms_freq = hdulist['freq_channel_noise_rms_visibility'].data
else:
self.vis_rms_freq = None
if 'REAL_FREQ_OBS_VISIBILITY' in extnames:
self.vis_freq = hdulist['real_freq_obs_visibility'].data
if 'IMAG_FREQ_OBS_VISIBILITY' in extnames:
self.vis_freq = self.vis_freq.astype(NP.complex128)
self.vis_freq += 1j * hdulist['imag_freq_obs_visibility'].data
else:
self.vis_freq = None
if 'REAL_FREQ_SKY_VISIBILITY' in extnames:
self.skyvis_freq = hdulist['real_freq_sky_visibility'].data
if 'IMAG_FREQ_SKY_VISIBILITY' in extnames:
self.skyvis_freq = self.skyvis_freq.astype(NP.complex128)
self.skyvis_freq += 1j * hdulist['imag_freq_sky_visibility'].data
else:
raise KeyError('Extension named "REAL_FREQ_SKY_VISIBILITY" not found in init_file.')
if 'REAL_FREQ_NOISE_VISIBILITY' in extnames:
self.vis_noise_freq = hdulist['real_freq_noise_visibility'].data
if 'IMAG_FREQ_NOISE_VISIBILITY' in extnames:
self.vis_noise_freq = self.vis_noise_freq.astype(NP.complex128)
self.vis_noise_freq += 1j * hdulist['imag_freq_noise_visibility'].data
else:
self.vis_noise_freq = None
if self.gradient_mode is not None:
self.gradient = {}
if 'real_freq_sky_visibility_gradient_wrt_{0}'.format(self.gradient_mode) in extnames:
self.gradient[self.gradient_mode] = hdulist['real_freq_sky_visibility_gradient_wrt_{0}'.format(self.gradient_mode)].data
if 'imag_freq_sky_visibility_gradient_wrt_{0}'.format(self.gradient_mode) in extnames:
self.gradient[self.gradient_mode] = self.gradient[self.gradient_mode].astype(NP.complex128)
self.gradient[self.gradient_mode] += 1j * hdulist['imag_freq_sky_visibility_gradient_wrt_{0}'.format(self.gradient_mode)].data
try:
gainsfile = hdulist[0].header['gainsfile']
except KeyError:
warnings.warn('\tKeyword "gainsfile" not found in header. Assuming default unity gains.')
self.gaininfo = None
else:
self.gaininfo = GainInfo(init_file=gainsfile, axes_order=['label', 'frequency', 'time'])
if 'REAL_LAG_VISIBILITY' in extnames:
self.vis_lag = hdulist['real_lag_visibility'].data
if 'IMAG_LAG_VISIBILITY' in extnames:
self.vis_lag = self.vis_lag.astype(NP.complex128)
self.vis_lag += 1j * hdulist['imag_lag_visibility'].data
else:
self.vis_lag = None
if 'REAL_LAG_SKY_VISIBILITY' in extnames:
self.skyvis_lag = hdulist['real_lag_sky_visibility'].data
if 'IMAG_LAG_SKY_VISIBILITY' in extnames:
self.skyvis_lag = self.skyvis_lag.astype(NP.complex128)
self.skyvis_lag += 1j * hdulist['imag_lag_sky_visibility'].data
else:
self.skyvis_lag = None
if 'REAL_LAG_NOISE_VISIBILITY' in extnames:
self.vis_noise_lag = hdulist['real_lag_noise_visibility'].data
if 'IMAG_LAG_NOISE_VISIBILITY' in extnames:
self.vis_noise_lag = self.vis_noise_lag.astype(NP.complex128)
self.vis_noise_lag += 1j * hdulist['imag_lag_noise_visibility'].data
else:
self.vis_noise_lag = None
hdulist.close()
init_file_success = True
return
else:
argument_init = True
if (not argument_init) and (not init_file_success):
raise ValueError('Initialization failed with the use of init_file.')
self.astroutils_githash = astroutils.__githash__
self.prisim_githash = prisim.__githash__
self.baselines = NP.asarray(baselines)
if len(self.baselines.shape) == 1:
if self.baselines.size == 2:
self.baselines = NP.hstack((self.baselines.reshape(1,-1), NP.zeros(1)))
elif self.baselines.size == 3:
self.baselines = self.baselines.reshape(1,-1)
else:
raise ValueError('Baseline(s) must be a 2- or 3-column array.')
elif len(self.baselines.shape) == 2:
if self.baselines.shape[1] == 2:
self.baselines = NP.hstack((self.baselines, NP.zeros(self.baselines.shape[0]).reshape(-1,1)))
elif self.baselines.shape[1] != 3:
raise ValueError('Baseline(s) must be a 2- or 3-column array')
else:
raise ValueError('Baseline(s) array contains more than 2 dimensions.')
self.baseline_lengths = NP.sqrt(NP.sum(self.baselines**2, axis=1))
self.baseline_orientations = NP.angle(self.baselines[:,0] + 1j * self.baselines[:,1])
self.projected_baselines = None
if not isinstance(labels, (list, tuple, NP.ndarray)):
raise TypeError('Interferometer array labels must be a list or tuple of unique identifiers')
elif len(labels) != self.baselines.shape[0]:
raise ValueError('Number of labels do not match the number of baselines specified.')
else:
self.labels = labels
self.simparms_file = None
if isinstance(simparms_file, str):
self.simparms_file = simparms_file
else:
warnings.warn('\tInvalid specification found in header for simulation parameters file. Proceeding with None as default.')
if isinstance(telescope, dict):
self.telescope = telescope
else:
self.telescope = {}
self.telescope['id'] = 'vla'
self.telescope['shape'] = 'dish'
self.telescope['size'] = 25.0
self.telescope['ocoords'] = 'altaz'
self.telescope['orientation'] = NP.asarray([90.0, 270.0]).reshape(1,-1)
self.telescope['groundplane'] = None
self.layout = {}
if isinstance(layout, dict):
if 'positions' in layout:
if isinstance(layout['positions'], NP.ndarray):
if layout['positions'].ndim == 2:
if (layout['positions'].shape[1] == 2) or (layout['positions'].shape[1] == 3):
if layout['positions'].shape[1] == 2:
layout['positions'] = NP.hstack((layout['positions'], NP.zeros(layout['positions'].shape[0]).reshape(-1,1)))
self.layout['positions'] = layout['positions']
else:
raise ValueError('Incompatible shape in array layout')
else:
raise ValueError('Incompatible shape in array layout')
else:
raise TypeError('Array layout positions must be a numpy array')
else:
raise KeyError('Array layout positions missing')
if 'coords' in layout:
if isinstance(layout['coords'], str):
self.layout['coords'] = layout['coords']
else:
raise TypeError('Array layout coordinates must be a string')
else:
raise KeyError('Array layout coordinates missing')
if 'labels' in layout:
if isinstance(layout['labels'], (list,NP.ndarray)):
self.layout['labels'] = layout['labels']
else:
raise TypeError('Array antenna labels must be a list or numpy array')
else:
raise KeyError('Array antenna labels missing')
if 'ids' in layout:
if isinstance(layout['ids'], (list,NP.ndarray)):
self.layout['ids'] = layout['ids']
else:
raise TypeError('Array antenna ids must be a list or numpy array')
else:
raise KeyError('Array antenna ids missing')
if (layout['positions'].shape[0] != layout['labels'].size) or (layout['ids'].size != layout['labels'].size):
raise ValueError('Antenna layout positions, labels and IDs must all be for same number of antennas')
if self.layout:
antlabel_dtype = self.layout['labels'].dtype
self.labels = NP.asarray(self.labels, dtype=[('A2', antlabel_dtype), ('A1', antlabel_dtype)])
self.blgroups = None
self.bl_reversemap = None
if blgroupinfo is not None:
if not isinstance(blgroupinfo, dict):
raise TypeError('Input blgroupinfo must be a dictionary')
self.blgroups = blgroupinfo['groups']
self.bl_reversemap = blgroupinfo['reversemap']
self.latitude = latitude
self.longitude = longitude
self.altitude = altitude
self.vis_freq = None
self.skyvis_freq = None
self.vis_noise_freq = None
self.gradient_mode = None
self.gradient = {}
self.gaininfo = None
if gaininfo is not None:
if not isinstance(gaininfo, GainInfo):
raise TypeError('Input gaininfo must be an instance of class GainInfo')
self.gaininfo = gaininfo
if (freq_scale is None) or (freq_scale == 'Hz') or (freq_scale == 'hz'):
self.channels = NP.asarray(channels)
elif freq_scale == 'GHz' or freq_scale == 'ghz':
self.channels = NP.asarray(channels) * 1.0e9
elif freq_scale == 'MHz' or freq_scale == 'mhz':
self.channels = NP.asarray(channels) * 1.0e6
elif freq_scale == 'kHz' or freq_scale == 'khz':
self.channels = NP.asarray(channels) * 1.0e3
else:
raise ValueError('Frequency units must be "GHz", "MHz", "kHz" or "Hz". If not set, it defaults to "Hz"')
self.bp = NP.ones((self.baselines.shape[0],self.channels.size)) # Inherent bandpass shape
self.bp_wts = NP.ones((self.baselines.shape[0],self.channels.size)) # Additional bandpass weights
self.lag_kernel = DSP.FT1D(self.bp*self.bp_wts, ax=1, inverse=True, use_real=False, shift=True)
self.Tsys = NP.zeros((self.baselines.shape[0],self.channels.size))
self.Tsysinfo = []
self.flux_unit = 'JY'
self.timestamp = []
self.t_acc = []
self.t_obs = 0.0
self.n_acc = 0
self.pointing_center = NP.empty([1,2])
self.phase_center = NP.empty([1,2])
self.lst = []
if isinstance(eff_Q, (int, float)):
if (eff_Q >= 0.0) or (eff_Q <= 1.0):
self.eff_Q = eff_Q * NP.ones((self.baselines.shape[0], self.channels.size))
else:
raise ValueError('Efficiency value of interferometer is invalid.')
elif isinstance(eff_Q, (list, tuple, NP.ndarray)):
eff_Q = NP.asarray(eff_Q)
if (NP.any(eff_Q < 0.0)) or (NP.any(eff_Q > 1.0)):
raise ValueError('One or more values of eff_Q found to be outside the range [0,1].')
if eff_Q.size == self.baselines.shape[0]:
self.eff_Q = NP.repeat(eff_Q.reshape(-1,1), self.channels.size, axis=1)
elif eff_Q.size == self.channels.size:
self.eff_Q = NP.repeat(eff_Q.reshape(1,-1), self.channels.size, axis=0)
elif eff_Q.size == self.baselines.shape[0]*self.channels.size:
self.eff_Q = eff_Q.reshape(-1,self.channels.size)
else:
raise ValueError('Efficiency values of interferometers incompatible with the number of interferometers and/or frequency channels.')
else:
raise TypeError('Efficiency values of interferometers must be provided as a scalar, list, tuple or numpy array.')
if isinstance(A_eff, (int, float)):
if A_eff >= 0.0:
self.A_eff = A_eff * NP.ones((self.baselines.shape[0], self.channels.size))
else:
raise ValueError('Negative value for effective area is invalid.')
elif isinstance(A_eff, (list, tuple, NP.ndarray)):
A_eff = NP.asarray(A_eff)
if NP.any(A_eff < 0.0):
raise ValueError('One or more values of A_eff found to be negative.')
if A_eff.size == self.baselines.shape[0]:
self.A_eff = NP.repeat(A_eff.reshape(-1,1), self.channels.size, axis=1)
elif A_eff.size == self.channels.size:
self.A_eff = NP.repeat(A_eff.reshape(1,-1), self.channels.size, axis=0)
elif A_eff.size == self.baselines.shape[0]*self.channels.size:
self.A_eff = A_eff.reshape(-1,self.channels.size)
else:
raise ValueError('Effective area(s) of interferometers incompatible with the number of interferometers and/or frequency channels.')
else:
raise TypeError('Effective area(s) of interferometers must be provided as a scalar, list, tuple or numpy array.')
self.vis_rms_freq = None
self.freq_resolution = self.channels[1] - self.channels[0]
self.baseline_coords = baseline_coords
self.lags = None
self.skyvis_lag = None
self.vis_noise_lag = None
self.vis_lag = None
self.obs_catalog_indices = []
self.geometric_delays = []
if (pointing_coords == 'radec') or (pointing_coords == 'hadec') or (pointing_coords == 'altaz'):
self.pointing_coords = pointing_coords
self.phase_center_coords = pointing_coords
else:
raise ValueError('Pointing center of the interferometer must be "radec", "hadec" or "altaz". Check inputs.')
if (skycoords == 'radec') or (skycoords == 'hadec') or (skycoords == 'altaz'):
self.skycoords = skycoords
else:
raise ValueError('Sky coordinates must be "radec", "hadec" or "altaz". Check inputs.')
if (baseline_coords == 'equatorial') or (baseline_coords == 'localenu'):
self.baseline_coords = baseline_coords
else:
raise ValueError('Baseline coordinates must be "equatorial" or "local". Check inputs.')
#############################################################################
def observe(self, timeobj, Tsysinfo, bandpass, pointing_center, skymodel,
t_acc, pb_info=None, brightness_units=None, bpcorrect=None,
roi_info=None, roi_radius=None, roi_center=None, lst=None,
gradient_mode=None, memsave=False, vmemavail=None,
store_prev_skymodel_file=None):
"""
-------------------------------------------------------------------------
Simulate a snapshot observation, by an instance of the
InterferometerArray class, of the sky when a sky catalog is provided. The
simulation generates visibilities observed by the interferometers for the
specified parameters. See member function observing_run() for simulating
an extended observing run in 'track' or 'drift' mode.
Inputs:
timeobj [instance of class astropy.time.Time] Time object
associated with each integration in the observation
Tsysinfo [dictionary] Contains system temperature information for
specified timestamp of observation. It contains the
following keys and values:
'Trx' [scalar] Recevier temperature (in K) that is
applicable to all frequencies and baselines
'Tant' [dictionary] contains antenna temperature info
from which the antenna temperature is estimated.
Used only if the key 'Tnet' is absent or set to
None. It has the following keys and values:
'f0' [scalar] Reference frequency (in Hz)
from which antenna temperature will
be estimated (see formula below)
'T0' [scalar] Antenna temperature (in K) at
the reference frequency specified in
key 'f0'. See formula below.
'spindex' [scalar] Antenna temperature spectral
index. See formula below.
Tsys = Trx + Tant['T0'] * (f/Tant['f0'])**spindex
'Tnet' [numpy array] Pre-computed Tsys (in K)
information that will be used directly to set the
Tsys. If specified, the information under keys
'Trx' and 'Tant' will be ignored. If a scalar
value is provided, it will be assumed to be
identical for all interferometers and all
frequencies. If a vector is provided whose length
is equal to the number of interferoemters, it
will be assumed identical for all frequencies. If
a vector is provided whose length is equal to the
number of frequency channels, it will be assumed
identical for all interferometers. If a 2D array
is provided, it should be of size
n_baselines x nchan. Tsys = Tnet
bandpass [numpy array] Bandpass weights associated with the
interferometers for the specified timestamp of observation
pointing_center
[2-element numpy vector or list] Pointing center (latitude
and longitude) of the observation at a given timestamp.
This is where the telescopes will be phased up to as
reference. Coordinate system for the pointing_center is
specified by the attribute pointing_coords initialized in
__init__().
skymodel [instance of class SkyModel] It consists of source flux
densities, their positions, and spectral indices. Read
class SkyModel docstring for more information.
t_acc [scalar] Accumulation time (sec) corresponding to timestamp
brightness_units
[string] Units of flux density in the catalog and for the
generated visibilities. Accepted values are 'Jy' (Jansky)
and 'K' (Kelvin for temperature). If None set, it defaults
to 'Jy'
Keyword Inputs:
roi_info [instance of class ROI_parameters] It consists of indices
in the polskymodel object, polarized beams for different
baseline types for every time stamp that will be simulated
roi_radius [scalar] Radius of the region of interest (degrees) inside
which sources are to be observed. Default = 90 degrees,
which is the entire horizon.
roi_center [string] Center of the region of interest around which
roi_radius is used. Accepted values are 'pointing_center'
and 'zenith'. If set to None, it defaults to 'zenith'.
gradient_mode
[string] If set to None, visibilities will be simulated as
usual. If set to string, both visibilities and visibility
gradients with respect to the quantity specified in the
string will be simulated. Currently accepted value is
'baseline'. Plan to incorporate gradients with respect to
'skypos' and 'frequency' as well in the future.
memsave [boolean] If set to True, enforce computations in single
precision, otherwise enforce double precision (default)
vmemavail [NoneType, int or float] Amount of virtual memory available
(in bytes). If set to None (default), it will be determined
using psutil functions though that may be less reliable
than setting it explicitly if the available virtual memory
is known.
store_prev_skymodel_file
[string] Filename including full path to store source
indices and spectrum from previous computation which can
be read during the next iteration to generate spectrum
only of new sources that come into the field of view thus
saving computations. If set to None (default), the full
spectrum of all sources in the field of view will be
computed in each iteration.
------------------------------------------------------------------------
"""
if len(bandpass.shape) == 1:
if bandpass.size != self.channels.size:
raise ValueError('Specified bandpass incompatible with the number of frequency channels')
if len(self.bp.shape) == 2:
self.bp = NP.expand_dims(NP.repeat(bandpass.reshape(1,-1), self.baselines.shape[0], axis=0), axis=2)
else:
self.bp = NP.dstack((self.bp, NP.repeat(bandpass.reshape(1,-1), self.baselines.shape[0], axis=0)))
elif len(bandpass.shape) == 2:
if bandpass.shape[1] != self.channels.size:
raise ValueError('Specified bandpass incompatible with the number of frequency channels')
elif bandpass.shape[0] != self.baselines.shape[0]:
raise ValueError('Specified bandpass incompatible with the number of interferometers')
if len(self.bp.shape) == 2:
self.bp = NP.expand_dims(bandpass, axis=2)
else:
self.bp = NP.dstack((self.bp, bandpass))
elif len(bandpass.shape) == 3:
if bandpass.shape[1] != self.channels.size:
raise ValueError('Specified bandpass incompatible with the number of frequency channels')
elif bandpass.shape[0] != self.baselines.shape[0]:
raise ValueError('Specified bandpass incompatible with the number of interferometers')
elif bandpass.shape[2] != 1:
raise ValueError('Bandpass can have only one layer for this instance of accumulation.')
if len(self.bp.shape) == 2:
self.bp = bandpass
else:
self.bp = NP.dstack((self.bp, bandpass))
self.bp_wts = NP.ones_like(self.bp) # All additional bandpass shaping weights are set to unity.
if isinstance(Tsysinfo, dict):
set_Tsys = False
if 'Tnet' in Tsysinfo:
if Tsysinfo['Tnet'] is not None:
Tsys = Tsysinfo['Tnet']
set_Tsys = True
if not set_Tsys:
try:
Tsys = Tsysinfo['Trx'] + Tsysinfo['Tant']['T0'] * (self.channels/Tsysinfo['Tant']['f0']) ** Tsysinfo['Tant']['spindex']
except KeyError:
raise KeyError('One or more keys not found in input Tsysinfo')
Tsys = Tsys.reshape(1,-1) + NP.zeros(self.baselines.shape[0]).reshape(-1,1) # nbl x nchan
else:
raise TypeError('Input Tsysinfo must be a dictionary')
self.Tsysinfo += [Tsysinfo]
if bpcorrect is not None:
if not isinstance(bpcorrect, NP.ndarray):
raise TypeError('Input specifying bandpass correction must be a numpy array')
if bpcorrect.size == self.channels.size:
bpcorrect = bpcorrect.reshape(1,-1)
elif bpcorrect.size == self.baselines.shape[0]:
bpcorrect = bpcorrect.reshape(-1,1)
elif bpcorrect.size == self.baselines.shape[0] * self.channels.size:
bpcorrect = bpcorrect.reshape(-1,self.channels.size)
else:
raise ValueError('Input bpcorrect has dimensions incompatible with the number of baselines and frequencies')
Tsys = Tsys * bpcorrect
if isinstance(Tsys, (int,float)):
if Tsys < 0.0:
raise ValueError('Tsys found to be negative.')
if len(self.Tsys.shape) == 2:
self.Tsys = Tsys + NP.zeros((self.baselines.shape[0], self.channels.size, 1))
else:
self.Tsys = NP.dstack((self.Tsys, Tsys + NP.zeros((self.baselines.shape[0], self.channels.size, 1))))
elif isinstance(Tsys, (list, tuple, NP.ndarray)):
Tsys = NP.asarray(Tsys)
if NP.any(Tsys < 0.0):
raise ValueError('Tsys should be non-negative.')
if Tsys.size == self.baselines.shape[0]:
if self.Tsys.ndim == 2:
self.Tsys = NP.expand_dims(NP.repeat(Tsys.reshape(-1,1), self.channels.size, axis=1), axis=2)
elif self.Tsys.ndim == 3:
self.Tsys = NP.dstack((self.Tsys, NP.expand_dims(NP.repeat(Tsys.reshape(-1,1), self.channels.size, axis=1), axis=2)))
elif Tsys.size == self.channels.size:
if self.Tsys.ndim == 2:
self.Tsys = NP.expand_dims(NP.repeat(Tsys.reshape(1,-1), self.baselines.shape[0], axis=0), axis=2)
elif self.Tsys.ndim == 3:
self.Tsys = NP.dstack((self.Tsys, NP.expand_dims(NP.repeat(Tsys.reshape(1,-1), self.baselines.shape[0], axis=0), axis=2)))
elif Tsys.size == self.baselines.shape[0]*self.channels.size:
if self.Tsys.ndim == 2:
self.Tsys = NP.expand_dims(Tsys.reshape(-1,self.channels.size), axis=2)
elif self.Tsys.ndim == 3:
self.Tsys = NP.dstack((self.Tsys, NP.expand_dims(Tsys.reshape(-1,self.channels.size), axis=2)))
else:
raise ValueError('Specified Tsys has incompatible dimensions with the number of baselines and/or number of frequency channels.')
else:
raise TypeError('Tsys should be a scalar, list, tuple, or numpy array')
# if (brightness_units is None) or (brightness_units=='Jy') or (brightness_units=='JY') or (brightness_units=='jy'):
# if self.vis_rms_freq is None:
# self.vis_rms_freq = 2.0 * FCNST.k / NP.sqrt(2.0*t_acc*self.freq_resolution) * NP.expand_dims(self.Tsys[:,:,-1]/self.A_eff/self.eff_Q, axis=2) / CNST.Jy
# elif len(self.vis_rms_freq.shape) == 3:
# self.vis_rms_freq = NP.dstack((self.vis_rms_freq, 2.0 * FCNST.k / NP.sqrt(2.0*t_acc*self.freq_resolution) * NP.expand_dims(self.Tsys[:,:,-1]/self.A_eff/self.eff_Q, axis=2)/CNST.Jy))
# self.flux_unit = 'JY'
# elif (brightness_units=='K') or (brightness_units=='k'):
# if len(self.vis_rms_freq.shape) == 2:
# self.vis_rms_freq = 1 / NP.sqrt(2.0*t_acc*self.freq_resolution) * NP.expand_dims(self.Tsys[:,:,-1]/self.eff_Q, axis=2)
# elif len(self.vis_rms_freq.shape) == 3:
# self.vis_rms_freq = NP.dstack((self.vis_rms_freq, 1 / NP.sqrt(2.0*t_acc*self.freq_resolution) * NP.expand_dims(self.Tsys[:,:,-1]/self.eff_Q, axis=2)))
# self.flux_unit = 'K'
# else:
# raise ValueError('Invalid brightness temperature units specified.')
if not self.timestamp:
self.pointing_center = NP.asarray(pointing_center).reshape(1,-1)
self.phase_center = NP.asarray(pointing_center).reshape(1,-1)
else:
self.pointing_center = NP.vstack((self.pointing_center, NP.asarray(pointing_center).reshape(1,-1)))
self.phase_center = NP.vstack((self.phase_center, NP.asarray(pointing_center).reshape(1,-1)))
pointing_lon = self.pointing_center[-1,0]
pointing_lat = self.pointing_center[-1,1]
lst = timeobj.sidereal_time('apparent').deg
if self.skycoords == 'radec':
if self.pointing_coords == 'hadec':
if lst is not None:
pointing_lon = lst - self.pointing_center[-1,0]
pointing_lat = self.pointing_center[-1,1]
else:
raise ValueError('LST must be provided. Sky coordinates are in RA-Dec format while pointing center is in HA-Dec format.')
elif self.pointing_coords == 'altaz':
pointing_lonlat = GEOM.altaz2hadec(self.pointing_center[[-1],:], self.latitude, units='degrees').squeeze() # Should now be of shape (2,)
pointing_lon = lst - pointing_lonlat[0]
pointing_lat = pointing_lonlat[1]
elif self.skycoords == 'hadec':
if self.pointing_coords == 'radec':
if lst is not None:
pointing_lon = lst - self.pointing_center[-1,0]
pointing_lat = self.pointing_center[-1,1]
else:
raise ValueError('LST must be provided. Sky coordinates are in RA-Dec format while pointing center is in HA-Dec format.')
elif self.pointing_coords == 'altaz':
pointing_lonlat = lst - GEOM.altaz2hadec(self.pointing_center[[-1],:], self.latitude, units='degrees').squeeze()
pointing_lon = pointing_lonlat[0]
pointing_lat = pointing_lonlat[1]
else:
if self.pointing_coords == 'radec':
if lst is not None:
pointing_lonlat = GEOM.hadec2altaz(NP.asarray([lst-self.pointing_center[-1,0], self.pointing_center[-1,1]]), self.latitude, units='degrees')
pointing_lon = pointing_lonlat[0]
pointing_lat = pointing_lonlat[1]
else:
raise ValueError('LST must be provided. Sky coordinates are in Alt-Az format while pointing center is in RA-Dec format.')
elif self.pointing_coords == 'hadec':
pointing_lonlat = GEOM.hadec2altaz(self.pointing_center,
self.latitude,
units='degrees').squeeze()
pointing_lon = pointing_lonlat[0]
pointing_lat = pointing_lonlat[1]
baselines_in_local_frame = self.baselines
if self.baseline_coords == 'equatorial':
baselines_in_local_frame = GEOM.xyz2enu(self.baselines, self.latitude, 'degrees')
pc_altaz = self.pointing_center[-1,:] # Convert pointing center to Alt-Az coordinates
if self.pointing_coords == 'hadec':
pc_altaz = GEOM.hadec2altaz(self.pointing_center[-1,:], self.latitude, units='degrees')
elif self.pointing_coords == 'radec':
if lst is not None:
pc_altaz = GEOM.hadec2altaz(NP.asarray([lst-self.pointing_center[-1,0], self.pointing_center[-1,1]]), self.latitude, units='degrees')
else:
raise ValueError('LST must be provided. Sky coordinates are in Alt-Az format while pointing center is in RA-Dec format.')
pc_dircos = GEOM.altaz2dircos(pc_altaz, 'degrees') # Convert pointing center to direction cosine coordinates
pc_delay_offsets = DLY.geometric_delay(baselines_in_local_frame, pc_dircos, altaz=False, hadec=False, dircos=True, latitude=self.latitude)
if memsave:
pc_delay_offsets = pc_delay_offsets.astype(NP.float32)
# pointing_phase = 2.0 * NP.pi * NP.repeat(NP.dot(baselines_in_local_frame, pc_dircos.reshape(-1,1)), self.channels.size, axis=1) * NP.repeat(self.channels.reshape(1,-1), self.baselines.shape[0], axis=0)/FCNST.c
if not isinstance(skymodel, SM.SkyModel):
raise TypeError('skymodel should be an instance of class SkyModel.')
skycoords = SkyCoord(ra=skymodel.location[:,0]*units.deg, dec=skymodel.location[:,1]*units.deg, frame='fk5', equinox=Time(skymodel.epoch, format='jyear_str', scale='utc')).transform_to(FK5(equinox=timeobj))
if self.skycoords == 'hadec':
skypos_altaz = GEOM.hadec2altaz(skymodel.location, self.latitude, units='degrees')
elif self.skycoords == 'radec':
src_altaz = skycoords.transform_to(AltAz(obstime=timeobj, location=EarthLocation(lon=self.longitude*units.deg, lat=self.latitude*units.deg, height=self.altitude*units.m)))
skypos_altaz = NP.hstack((src_altaz.alt.deg.reshape(-1,1), src_altaz.az.deg.reshape(-1,1)))
if memsave:
datatype = NP.complex64
else:
datatype = NP.complex128
skyvis = NP.zeros( (self.baselines.shape[0], self.channels.size), dtype=datatype)
pb = None
if roi_info is not None:
if ('ind' not in roi_info) or ('pbeam' not in roi_info):
raise KeyError('Both "ind" and "pbeam" keys must be present in dictionary roi_info')
if (roi_info['ind'] is not None) and (roi_info['pbeam'] is not None):
m2 = roi_info['ind']
if m2.size > 0:
try:
pb = roi_info['pbeam'].reshape(-1,len(self.channels))
except ValueError:
raise ValueError('Number of columns of primary beam in key "pbeam" of dictionary roi_info must be equal to number of frequency channels.')
if NP.asarray(roi_info['ind']).size != pb.shape[0]:
raise ValueError('Values in keys ind and pbeam in must carry same number of elements.')
else:
if roi_radius is None:
roi_radius = 90.0
if roi_center is None:
roi_center = 'zenith'
elif (roi_center != 'zenith') and (roi_center != 'pointing_center'):
raise ValueError('Center of region of interest, roi_center, must be set to "zenith" or "pointing_center".')
if roi_center == 'pointing_center':
m1, m2, d12 = GEOM.spherematch(pointing_lon, pointing_lat, skycoords.ra.deg, skycoords.dec.deg, roi_radius, maxmatches=0)
else: # roi_center = 'zenith'
m2 = NP.arange(skypos_altaz.shape[0])
m2 = m2[NP.where(skypos_altaz[:,0] >= 90.0-roi_radius)] # select sources whose altitude (angle above horizon) is 90-roi_radius
if len(m2) > 0:
skypos_altaz_roi = skypos_altaz[m2,:]
coords_str = 'altaz'
prev_skymodel_success = False
if store_prev_skymodel_file is not None:
if not isinstance(store_prev_skymodel_file, str):
raise TypeError('Input store_prev_skymodel_file must be a string')
try:
with h5py.File(store_prev_skymodel_file, 'a') as fileobj:
if 'ind' in fileobj:
stored_ind_dset = fileobj['ind']
stored_spectrum_dset = fileobj['spectrum']
stored_ind = stored_ind_dset.value
stored_spectrum = stored_spectrum_dset.value
ind_of_m2_in_prev = NMO.find_list_in_list(stored_ind, m2)
fluxes = NP.zeros((m2.size, self.channels.size))
if NP.sum(~ind_of_m2_in_prev.mask) > 0: # Previously stored
fluxes[NP.where(~ind_of_m2_in_prev.mask)[0],:] = stored_spectrum[ind_of_m2_in_prev[~ind_of_m2_in_prev.mask],:]
if NP.sum(ind_of_m2_in_prev.mask) > 0: # Previously unavailable and have to be generated fresh
fluxes[NP.where(ind_of_m2_in_prev.mask)[0],:] = skymodel.generate_spectrum(ind=m2[NP.where(ind_of_m2_in_prev.mask)[0]], frequency=self.channels, interp_method='pchip')
del fileobj['ind']
del fileobj['spectrum']
else:
fluxes = skymodel.generate_spectrum(ind=m2, frequency=self.channels, interp_method='pchip')
ind_dset = fileobj.create_dataset('ind', data=m2)
spec_dset = fileobj.create_dataset('spectrum', data=fluxes, compression='gzip', compression_opts=9)
prev_skymodel_success = True
except:
prev_skymodel_success = False
if not prev_skymodel_success:
fluxes = skymodel.generate_spectrum(ind=m2, frequency=self.channels, interp_method='pchip')
if pb is None:
pb = PB.primary_beam_generator(skypos_altaz_roi, self.channels/1.0e9, skyunits='altaz', telescope=self.telescope, pointing_info=pb_info, pointing_center=pc_altaz, freq_scale='GHz')
pbfluxes = pb * fluxes
geometric_delays = DLY.geometric_delay(baselines_in_local_frame, skypos_altaz_roi, altaz=(coords_str=='altaz'), hadec=(coords_str=='hadec'), latitude=self.latitude)
vis_wts = None
if skymodel.src_shape is not None:
eps = 1.0e-13
f0 = self.channels[int(0.5*self.channels.size)]
wl0 = FCNST.c / f0
wl = FCNST.c / self.channels
skypos_dircos_roi = GEOM.altaz2dircos(skypos_altaz_roi, units='degrees')
# projected_spatial_frequencies = NP.sqrt(self.baseline_lengths.reshape(1,-1)**2 - (FCNST.c * geometric_delays)**2) / wl0
projected_spatial_frequencies = NP.sqrt(self.baseline_lengths.reshape(1,-1,1)**2 - (FCNST.c * geometric_delays[:,:,NP.newaxis])**2) / wl.reshape(1,1,-1)
src_FWHM = NP.sqrt(skymodel.src_shape[m2,0] * skymodel.src_shape[m2,1])
src_FWHM_dircos = 2.0 * NP.sin(0.5*NP.radians(src_FWHM)).reshape(-1,1) # assuming the projected baseline is perpendicular to source direction
# src_sigma_spatial_frequencies = 2.0 * NP.sqrt(2.0 * NP.log(2.0)) / (2 * NP.pi * src_FWHM_dircos) # estimate 1
src_sigma_spatial_frequencies = 1.0 / NP.sqrt(2.0*NP.log(2.0)) / src_FWHM_dircos # estimate 2 created by constraint that at lambda/D_proj, visibility weights are half
# # Tried deriving below an alternate expression but previous expression for src_FWHM_dircos seems better
# dtheta_radial = NP.radians(src_FWHM).reshape(-1,1)
# dtheta_circum = NP.radians(src_FWHM).reshape(-1,1)
# src_FWHM_dircos = NP.sqrt(skypos_dircos_roi[:,2].reshape(-1,1)**2 * dtheta_radial**2 + dtheta_circum**2) / NP.sqrt(2.0) # from 2D error propagation (another approximation to commented expression above for the same quantity). Add in quadrature and divide by sqrt(2) to get radius of error circle
# arbitrary_factor_for_src_width = NP.sqrt(2.0) # An arbitrary factor that can be adjusted based on what the longest baseline measures for a source of certain finite width
# src_sigma_spatial_frequencies = 2.0 * NP.sqrt(2.0 * NP.log(2.0)) / (2 * NP.pi * src_FWHM_dircos) * arbitrary_factor_for_src_width
# extended_sources_flag = 1/NP.clip(projected_spatial_frequencies, 0.5, NP.amax(projected_spatial_frequencies)) < src_FWHM_dircos
vis_wts = NP.ones_like(projected_spatial_frequencies)
# vis_wts = NP.exp(-0.5 * (projected_spatial_frequencies/src_sigma_spatial_frequencies)**2)
vis_wts = NP.exp(-0.5 * (projected_spatial_frequencies/src_sigma_spatial_frequencies[:,:,NP.newaxis])**2) # nsrc x nbl x nchan
if memsave:
pbfluxes = pbfluxes.astype(NP.float32, copy=False)
self.geometric_delays = self.geometric_delays + [geometric_delays.astype(NP.float32)]
if vis_wts is not None:
vis_wts = vis_wts.astype(NP.float32, copy=False)
else:
self.geometric_delays = self.geometric_delays + [geometric_delays]
# memory_available = psutil.phymem_usage().available
if vmemavail is None:
memory_available = psutil.virtual_memory().available
else:
memory_available = vmemavail
# memory_available = min([vmemavail, psutil.virtual_memory().available])
if gradient_mode is None:
if memsave:
memory_required = len(m2) * self.channels.size * self.baselines.shape[0] * 4.0 * 2 # bytes, 4 bytes per float, factor 2 is because the phase involves complex values
else:
memory_required = len(m2) * self.channels.size * self.baselines.shape[0] * 8.0 * 2 # bytes, 8 bytes per float, factor 2 is because the phase involves complex values
else:
if not isinstance(gradient_mode, str):
raise TypeError('Input gradient_mode must be a string')
if gradient_mode.lower() not in ['baseline', 'skypos', 'frequency']:
raise ValueError('Invalid value specified in input gradient_mode')
if self.gradient_mode is None:
self.gradient_mode = gradient_mode
if gradient_mode.lower() == 'baseline':
skyvis_gradient = NP.zeros((3, self.baselines.shape[0], self.channels.size), dtype=datatype)
if memsave:
memory_required = 3 * len(m2) * self.channels.size * self.baselines.shape[0] * 4.0 * 2 # bytes, 4 bytes per float, factor 2 is because the phase involves complex values, factor 3 because of three vector components of the gradient
else:
memory_required = 3 * len(m2) * self.channels.size * self.baselines.shape[0] * 8.0 * 2 # bytes, 8 bytes per float, factor 2 is because the phase involves complex values, factor 3 because of three vector components of the gradient
memory_sufficient = float(memory_available) > memory_required
if memory_sufficient:
try:
if memsave:
phase_matrix = NP.exp(-1j * NP.asarray(2.0 * NP.pi).astype(NP.float32) * (self.geometric_delays[-1][:,:,NP.newaxis].astype(NP.float32) - pc_delay_offsets.astype(NP.float32).reshape(1,-1,1)) * self.channels.astype(NP.float32).reshape(1,1,-1)).astype(NP.complex64)
if vis_wts is not None:
# phase_matrix *= vis_wts[:,:,NP.newaxis]
phase_matrix *= vis_wts
skyvis = NP.sum(pbfluxes[:,NP.newaxis,:] * phase_matrix, axis=0) # SUM(nsrc x nbl x nchan, axis=0) = nbl x nchan
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
skyvis_gradient = NP.sum(skypos_dircos_roi[:,:,NP.newaxis,NP.newaxis].astype(NP.float32) * pbfluxes[:,NP.newaxis,NP.newaxis,:] * phase_matrix[:,NP.newaxis,:,:], axis=0) # SUM(nsrc x 3 x nbl x nchan, axis=0) = 3 x nbl x nchan
else:
phase_matrix = 2.0 * NP.pi * (self.geometric_delays[-1][:,:,NP.newaxis].astype(NP.float64) - pc_delay_offsets.astype(NP.float64).reshape(1,-1,1)) * self.channels.astype(NP.float64).reshape(1,1,-1)
if vis_wts is not None:
# skyvis = NP.sum(pbfluxes[:,NP.newaxis,:] * NP.exp(-1j*phase_matrix) * vis_wts[:,:,NP.newaxis], axis=0) # Don't apply bandpass here
skyvis = NP.sum(pbfluxes[:,NP.newaxis,:] * NP.exp(-1j*phase_matrix) * vis_wts, axis=0) # SUM(nsrc x nbl x nchan, axis=0) = nbl x nchan
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
skyvis_gradient = NP.sum(skypos_dircos_roi[:,:,NP.newaxis,NP.newaxis].astype(NP.float64) * pbfluxes[:,NP.newaxis,NP.newaxis,:] * NP.exp(-1j*phase_matrix[:,NP.newaxis,:,:]) * vis_wts[:,NP.newaxis,:,:], axis=0) # SUM(nsrc x 3 x nbl x nchan, axis=0) = 3 x nbl x nchan
else:
skyvis = NP.sum(pbfluxes[:,NP.newaxis,:] * NP.exp(-1j*phase_matrix), axis=0) # SUM(nsrc x nbl x nchan, axis=0) = nbl x nchan
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
skyvis_gradient = NP.sum(skypos_dircos_roi[:,:,NP.newaxis,NP.newaxis].astype(NP.float64) * pbfluxes[:,NP.newaxis,NP.newaxis,:] * NP.exp(-1j*phase_matrix[:,NP.newaxis,:,:]), axis=0) # SUM(nsrc x 3 x nbl x nchan, axis=0) = 3 x nbl x nchan
except MemoryError as memxption:
print(memxption)
memory_sufficient = False
raise
if not memory_sufficient:
warnings.warn('\t\tDetecting memory shortage. Serializing over sky direction.')
downsize_factor = NP.ceil(memory_required/float(memory_available))
n_src_stepsize = int(len(m2)/downsize_factor)
src_indices = range(0,len(m2),n_src_stepsize)
if memsave:
warnings.warn('\t\tEnforcing single precision computations.')
for i in xrange(len(src_indices)):
phase_matrix = NP.exp(-1j * NP.asarray(2.0 * NP.pi).astype(NP.float32) * (self.geometric_delays[-1][src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),:,NP.newaxis].astype(NP.float32) - pc_delay_offsets.astype(NP.float32).reshape(1,-1,1)) * self.channels.astype(NP.float32).reshape(1,1,-1)).astype(NP.complex64, copy=False)
if vis_wts is not None:
phase_matrix *= vis_wts[src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),:,:].astype(NP.float32)
# phase_matrix *= vis_wts[src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),:,NP.newaxis].astype(NP.float32)
phase_matrix *= pbfluxes[src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),NP.newaxis,:].astype(NP.float32)
skyvis += NP.sum(phase_matrix, axis=0)
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
skyvis_gradient += NP.sum(skypos_dircos_roi[src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),:,NP.newaxis,NP.newaxis].astype(NP.float32) * phase_matrix[:,NP.newaxis,:,:], axis=0)
else:
for i in xrange(len(src_indices)):
phase_matrix = NP.exp(-1j * NP.asarray(2.0 * NP.pi).astype(NP.float64) * (self.geometric_delays[-1][src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),:,NP.newaxis].astype(NP.float64) - pc_delay_offsets.astype(NP.float64).reshape(1,-1,1)) * self.channels.astype(NP.float64).reshape(1,1,-1)).astype(NP.complex128, copy=False)
if vis_wts is not None:
phase_matrix *= vis_wts[src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),:,:].astype(NP.float64)
phase_matrix *= pbfluxes[src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),NP.newaxis,:].astype(NP.float64)
skyvis += NP.sum(phase_matrix, axis=0)
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
skyvis_gradient += NP.sum(skypos_dircos_roi[src_indices[i]:min(src_indices[i]+n_src_stepsize,len(m2)),:,NP.newaxis,NP.newaxis].astype(NP.float64) * phase_matrix[:,NP.newaxis,:,:], axis=0)
self.obs_catalog_indices = self.obs_catalog_indices + [m2]
else:
warnings.warn('No sources found in the catalog within matching radius. Simply populating the observed visibilities and/or gradients with noise.')
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
skyvis_gradient = NP.zeros( (3, self.baselines.shape[0], self.channels.size), dtype=datatype)
if self.timestamp == []:
self.skyvis_freq = skyvis[:,:,NP.newaxis]
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
self.gradient[gradient_mode] = skyvis_gradient[:,:,:,NP.newaxis]
else:
self.skyvis_freq = NP.dstack((self.skyvis_freq, skyvis[:,:,NP.newaxis]))
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
self.gradient[gradient_mode] = NP.concatenate((self.gradient[gradient_mode], skyvis_gradient[:,:,:,NP.newaxis]), axis=3)
self.timestamp = self.timestamp + [timeobj.jd]
self.t_acc = self.t_acc + [t_acc]
self.t_obs += t_acc
self.n_acc += 1
self.lst = self.lst + [lst]
numbytes = []
variables = []
var = None
obj = None
for var,obj in locals().iteritems():
if isinstance(obj, NP.ndarray):
variables += [var]
numbytes += [obj.nbytes]
nGB = NP.asarray(numbytes) / 2.0**30
totalmemGB = NP.sum(nGB)
############################################################################
def observing_run(self, pointing_init, skymodel, t_acc, duration, channels,
bpass, Tsys, lst_init, roi_radius=None, roi_center=None,
mode='track', pointing_coords=None, freq_scale=None,
brightness_units=None, verbose=True, memsave=False):
"""
-------------------------------------------------------------------------
Simulate an extended observing run in 'track' or 'drift' mode, by an
instance of the InterferometerArray class, of the sky when a sky catalog
is provided. The simulation generates visibilities observed by the
interferometer array for the specified parameters. Uses member function
observe() and builds the observation from snapshots. The timestamp for
each snapshot is the current time at which the snapshot is generated.
Inputs:
pointing_init [2-element list or numpy array] The inital pointing
of the telescope at the start of the observing run.
This is where the telescopes will be initially phased up to
as reference. Coordinate system for the pointing_center is
specified by the input pointing_coords
skymodel [instance of class SkyModel] It consists of source flux
densities, their positions, and spectral indices. Read
class SkyModel docstring for more information.
t_acc [scalar] Accumulation time (sec) corresponding to timestamp
brightness_units
[string] Units of flux density in the catalog and for the
generated visibilities. Accepted values are 'Jy' (Jansky)
and 'K' (Kelvin for temperature). If None set, it defaults
to 'Jy'
duration [scalar] Duration of observation in seconds
channels [list or numpy vector] frequency channels in units as
specified in freq_scale
bpass [list, list of lists or numpy array] Bandpass weights in
the form of M x N array or list of N-element lists. N must
equal the number of channels. If M=1, the same bandpass
will be used in all the snapshots for the entire
observation, otherwise M must equal the number of
snapshots which is int(duration/t_acc)
Tsys [scalar, list or numpy array] System temperature (in K). If
a scalar is provided, the same Tsys will be used in all the
snapshots for the duration of the observation. If a list or
numpy array is provided, the number of elements must equal
the number of snapshots which is int(duration/t_int)
lst_init [scalar] Initial LST (in degrees) at the beginning of the
observing run corresponding to pointing_init
Keyword Inputs:
roi_radius [scalar] Radius of the region of interest (degrees) inside
which sources are to be observed. Default = 90 degrees,
which is the entire horizon.
roi_center [string] Center of the region of interest around which
roi_radius is used. Accepted values are 'pointing_center'
and 'zenith'. If set to None, it defaults to 'zenith'.
freq_scale [string] Units of frequencies specified in channels.
Accepted values are 'Hz', 'hz', 'khz', 'kHz', 'mhz',
'MHz', 'GHz' and 'ghz'. If None provided, defaults to 'Hz'
mode [string] Mode of observation. Accepted values are 'track'
and 'drift'. If using 'track', pointing center is fixed to
a specific point on the sky coordinate frame. If using
'drift', pointing center is fixed to a specific point on
the antenna's reference frame.
pointing_coords
[string] Coordinate system for pointing_init. Accepted
values are 'radec', 'hadec' and 'altaz'. If None provided,
default is set based on observing mode. If mode='track',
pointing_coords defaults to 'radec', and if mode='drift',
it defaults to 'hadec'
verbose [boolean] If set to True, prints progress and diagnostic
messages. Default = True
------------------------------------------------------------------------
"""
if verbose:
print('Preparing an observing run...\n')
print('\tVerifying input arguments to observing_run()...')
try:
pointing_init, skymodel, t_acc, duration, bpass, Tsys, lst_init
except NameError:
raise NameError('One or more of pointing_init, skymodel, t_acc, duration, bpass, Tsys, lst_init not specified.')
if isinstance(pointing_init, list):
pointing_init = NP.asarray(pointing_init)
elif not isinstance(pointing_init, NP.ndarray):
raise TypeError('pointing_init must be a list or numpy array.')
if pointing_init.size != 2:
raise ValueError('pointing_init must be a 2-element vector.')
pointing_init = pointing_init.ravel()
if not isinstance(skymodel, SM.SkyModel):
raise TypeError('skymodel must be an instance of class SkyModel.')
if not isinstance(t_acc, (int, float)):
raise TypeError('t_acc must be a scalar integer or float.')
if t_acc <= 0.0:
raise ValueError('t_acc must be positive.')
if not isinstance(duration, (int, float)):
raise TypeError('duration must be a scalar integer or float.')
if duration <= t_acc:
if verbose:
warnings.warn('\t\tDuration specified to be shorter than t_acc. Will set it equal to t_acc')
duration = t_acc
n_acc = int(duration / t_acc)
if verbose:
print('\t\tObserving run will have {0} accumulations.'.format(n_acc))
if isinstance(channels, list):
channels = NP.asarray(channels)
elif not isinstance(channels, NP.ndarray):
raise TypeError('channels must be a list or numpy array')
if (freq_scale is None) or (freq_scale == 'Hz') or (freq_scale == 'hz'):
channels = NP.asarray(channels)
elif freq_scale == 'GHz' or freq_scale == 'ghz':
channels = channels * 1.0e9
elif freq_scale == 'MHz' or freq_scale == 'mhz':
channels = channels * 1.0e6
elif freq_scale == 'kHz' or freq_scale == 'khz':
channels = channels * 1.0e3
else:
raise ValueError('Frequency units must be "GHz", "MHz", "kHz" or "Hz". If not set, it defaults to "Hz"')
if isinstance(bpass, (list, tuple, NP.ndarray)):
bpass = NP.asarray(bpass)
else:
raise TypeError('bpass must be a list, tuple or numpy array')
if bpass.size == self.channels.size:
bpass = NP.expand_dims(NP.repeat(bpass.reshape(1,-1), self.baselines.shape[0], axis=0), axis=2)
if verbose:
warnings.warn('\t\tSame bandpass will be applied to all baselines and all accumulations in the observing run.')
elif bpass.size == self.baselines.shape[0] * self.channels.size:
bpass = NP.expand_dims(bpass.reshape(-1,self.channels.size), axis=2)
if verbose:
warnings.warn('\t\tSame bandpass will be applied to all accumulations in the observing run.')
elif bpass.size == self.baselines.shape[0] * self.channels.size * n_acc:
bpass = bpass.reshape(-1,self.channels.size,n_acc)
else:
raise ValueError('Dimensions of bpass incompatible with the number of frequency channels, baselines and number of accumulations.')
if isinstance(Tsys, (int, float, list, tuple, NP.ndarray)):
Tsys = NP.asarray(Tsys).reshape(-1)
else:
raise TypeError('Tsys must be a scalar, list, tuple or numpy array')
if Tsys.size == 1:
if verbose:
warnings.warn('\t\tTsys = {0:.1f} K will be assumed for all frequencies, baselines, and accumulations.'.format(Tsys[0]))
Tsys = Tsys + NP.zeros((self.baselines.shape[0], self.channels.size, 1))
elif Tsys.size == self.channels.size:
Tsys = NP.expand_dims(NP.repeat(Tsys.reshape(1,-1), self.baselines.shape[0], axis=0), axis=2)
if verbose:
warnings.warn('\t\tSame Tsys will be assumed for all baselines and all accumulations in the observing run.')
elif Tsys.size == self.baselines.shape[0]:
Tsys = NP.expand_dims(NP.repeat(Tsys.reshape(-1,1), self.channels.size, axis=1), axis=2)
if verbose:
warnings.warn('\t\tSame Tsys will be assumed for all frequency channels and all accumulations in the observing run.')
elif Tsys.size == self.baselines.shape[0] * self.channels.size:
Tsys = NP.expand_dims(Tsys.reshape(-1,self.channels.size), axis=2)
if verbose:
warnings.warn('\t\tSame Tsys will be assumed for all accumulations in the observing run.')
elif Tsys.size == self.baselines.shape[0] * self.channels.size * n_acc:
Tsys = Tsys.reshape(-1,self.channels.size,n_acc)
else:
raise ValueError('Dimensions of Tsys incompatible with the number of frequency channels, baselines and number of accumulations.')
if not isinstance(lst_init, (int, float)):
raise TypeError('Starting LST should be a scalar')
if verbose:
print('\tVerified input arguments.')
print('\tProceeding to schedule the observing run...')
lst = (lst_init + (t_acc/3.6e3) * NP.arange(n_acc)) * 15.0 # in degrees
if verbose:
print('\tCreated LST range for observing run.')
if mode == 'track':
if pointing_coords == 'hadec':
pointing = NP.asarray([lst_init - pointing_init[0], pointing_init[1]])
elif (pointing_coords == 'radec') or (pointing_coords is None):
pointing = pointing_init
elif pointing_coords == 'altaz':
hadec = GEOM.altaz2hadec(pointing_init, self.latitude, units='degrees')
pointing = NP.asarray([lst_init - hadec[0], hadec[1]])
else:
raise ValueError('pointing_coords can only be set to "hadec", "radec" or "altaz".')
self.pointing_coords = 'radec'
self.phase_center_coords = 'radec'
elif mode == 'drift':
if pointing_coords == 'radec':
pointing = NP.asarray([lst_init - pointing_init[0], pointing_init[1]])
elif (pointing_coords == 'hadec') or (pointing_coords is None):
pointing = pointing_init
elif pointing_coords == 'altaz':
pointing = GEOM.altaz2hadec(pointing_init, self.latitude, units='degrees')
else:
raise ValueError('pointing_coords can only be set to "hadec", "radec" or "altaz".')
self.pointing_coords = 'hadec'
self.phase_center_coords = 'hadec'
if verbose:
print('\tPreparing to observe in {0} mode'.format(mode))
if verbose:
milestones = range(max(1,int(n_acc/10)), int(n_acc), max(1,int(n_acc/10)))
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(), PGB.ETA()], maxval=n_acc).start()
for i in range(n_acc):
timestamp = str(DT.datetime.now())
self.observe(timestamp, Tsys[:,:,i%Tsys.shape[2]],
bpass[:,:,i%bpass.shape[2]], pointing, skymodel,
t_acc, brightness_units=brightness_units,
roi_radius=roi_radius, roi_center=roi_center,
lst=lst[i], memsave=memsave)
if verbose:
progress.update(i+1)
if verbose:
progress.finish()
self.t_obs = duration
self.n_acc = n_acc
if verbose:
print('Observing run completed successfully.')
#############################################################################
def generate_noise(self):
"""
-------------------------------------------------------------------------
Generates thermal noise from attributes that describe system parameters
which can be added to sky visibilities. Thermal RMS here corresponds to
a complex value comprising of both real and imaginary parts. Thus only
1/sqrt(2) goes into each real and imaginary parts.
[Based on equations 9-12 through 9-15 or section 5 in chapter 9 on
Sensitivity in SIRA II wherein the equations are for real and imaginary
parts separately.]
-------------------------------------------------------------------------
"""
eff_Q = self.eff_Q
A_eff = self.A_eff
t_acc = NP.asarray(self.t_acc)
if len(eff_Q.shape) == 2:
eff_Q = eff_Q[:,:,NP.newaxis]
if len(A_eff.shape) == 2:
A_eff = A_eff[:,:,NP.newaxis]
t_acc = t_acc[NP.newaxis,NP.newaxis,:]
if (self.flux_unit == 'JY') or (self.flux_unit == 'jy') or (self.flux_unit == 'Jy'):
self.vis_rms_freq = 2.0 * FCNST.k / NP.sqrt(t_acc*self.freq_resolution) * (self.Tsys/A_eff/eff_Q) / CNST.Jy
elif (self.flux_unit == 'K') or (self.flux_unit == 'k'):
self.vis_rms_freq = 1 / NP.sqrt(t_acc*self.freq_resolution) * self.Tsys/eff_Q
else:
raise ValueError('Flux density units can only be in Jy or K.')
self.vis_noise_freq = self.vis_rms_freq / NP.sqrt(2.0) * (NP.random.randn(self.baselines.shape[0], self.channels.size, len(self.timestamp)) + 1j * NP.random.randn(self.baselines.shape[0], self.channels.size, len(self.timestamp))) # sqrt(2.0) is to split equal uncertainty into real and imaginary parts
#############################################################################
def add_noise(self):
"""
-------------------------------------------------------------------------
Adds the thermal noise generated in member function generate_noise() to
the sky visibilities after extracting and applying complex instrument
gains
-------------------------------------------------------------------------
"""
gains = 1.0
if self.gaininfo is not None:
try:
gains = self.gaininfo.spline_gains(self.labels, freqs=self.channels, times=NP.asarray(self.timestamp))
except IndexError:
try:
gains = self.gaininfo.spline_gains(self.labels, freqs=self.channels, times=NP.asarray(self.timestamp)-self.timestamp[0])
except IndexError:
try:
gains = self.gaininfo.nearest_gains(self.labels, freqs=self.channels, times=NP.asarray(self.timestamp))
except:
warnings.warn('Interpolation and nearest neighbour logic failed. Proceeding with default unity gains')
else:
warnings.warn('Gain table absent. Proceeding with default unity gains')
self.vis_freq = gains * self.skyvis_freq + self.vis_noise_freq
#############################################################################
def apply_gradients(self, gradient_mode=None, perturbations=None):
"""
-------------------------------------------------------------------------
Apply the perturbations in combination with the gradients to determine
perturbed visibilities
Inputs:
perturbations [dictionary] Contains perturbations on one of the
following quantities (specified as keys):
'baseline' [numpy array] nseed x 3 x nbl baseline
perturbations (in same units as attribute
baselines). The first dimension denotes the
number of realizations, the second denotes
the x-, y- and z-axes and the third
denotes the number of baselines. It can also
handle arrays of shapes (n1, n2, ..., 3, nbl)
gradient_mode [string] Specifies the quantity on which perturbations
are provided and perturbed visibilities to be computed.
This string must be one of the keys in the input
dictionary perturbations and must be found in the
attribute gradient_mode and gradient. Currently
accepted values are 'baseline'
Output:
Perturbed visibilities as a n1 x n2 x ... x nbl x nchan x ntimes
complex array
-------------------------------------------------------------------------
"""
if gradient_mode is None:
gradient_mode = self.gradient_mode
if perturbations is None:
perturbations = {gradient_mode: NP.zeros((1,1,1))}
if self.gradient_mode is None:
raise AttributeError('No gradient attribute found')
else:
if not self.gradient:
raise AttributeError('No gradient attribute found')
if not isinstance(perturbations, dict):
raise TypeError('Input perturbations must be a dictionary')
if not isinstance(gradient_mode, str):
raise TypeError('Input gradient_mode must be a string')
if gradient_mode not in ['baseline']:
raise KeyError('Specified gradient mode {0} not currently supported'.format(gradient_mode))
if gradient_mode not in perturbations:
raise KeyError('{0} key not found in input perturbations'.format(gradient_key))
if gradient_mode != self.gradient_mode:
raise ValueError('Specified gradient mode {0} not found in attribute'.format(gradient_mode))
if not isinstance(perturbations[gradient_mode], NP.ndarray):
raise TypeError('Perturbations must be specified as a numpy array')
if perturbations[gradient_mode].ndim == 2:
perturbations[gradient_mode] = perturbations[gradient_mode][NP.newaxis,...]
if perturbations[gradient_mode].ndim < 2:
raise ValueError('Perturbations must be two--dimensions or higher')
inpshape = perturbations[gradient_mode].shape
if perturbations[gradient_mode].ndim > 3:
perturbations[gradient_mode] = perturbations[gradient_mode].reshape(-1,inpshape[-2],inpshape[-1])
if perturbations[gradient_mode].shape[2] != self.gradient[self.gradient_mode].shape[1]:
raise ValueError('Number of {0} perturbations not equal to that in the gradient attribute'.format(gradient_mode))
if perturbations[gradient_mode].shape[1] == 1:
warnings.warn('Only {0}-dimensional coordinates specified. Proceeding with zero perturbations in other coordinate axes.'.format(perturbations[gradient_mode].shape[1]))
perturbations[gradient_mode] = NP.hstack((perturbations[gradient_mode], NP.zeros((perturbations[gradient_mode].shape[0],2,perturbations[gradient_mode].shape[2])))) # nseed x 3 x nbl
elif perturbations[gradient_mode].shape[1] == 2:
warnings.warn('Only {0}-dimensional coordinates specified. Proceeding with zero perturbations in other coordinate axes.'.format(perturbations[gradient_mode].shape[1]))
perturbations[gradient_mode] = NP.hstack((perturbations[gradient_mode], NP.zeros((perturbations[gradient_mode].shape[0],1,perturbations[gradient_mode].shape[2])))) # nseed x 3 x nbl
elif perturbations[gradient_mode].shape[1] > 3:
warnings.warn('{0}-dimensional coordinates specified. Proceeding with only the first three dimensions of coordinate axes.'.format(3))
perturbations[gradient_mode] = perturbations[gradient_mode][:,:3,:] # nseed x 3 x nbl
wl = FCNST.c / self.channels
if gradient_mode == 'baseline':
delta_skyvis_freq = -1j * 2.0 * NP.pi / wl.reshape(1,1,-1,1) * NP.sum(perturbations[gradient_mode][...,NP.newaxis,NP.newaxis] * self.gradient[gradient_mode][NP.newaxis,...], axis=1) # nseed x nbl x nchan x ntimes
outshape = list(inpshape[:-2])
outshape += [self.labels.size, self.channels.size, self.lst.size]
outshape = tuple(outshape)
delta_skyvis_freq = delta_skyvis_freq.reshape(outshape)
return delta_skyvis_freq
#############################################################################
def duplicate_measurements(self, blgroups=None):
"""
-------------------------------------------------------------------------
Duplicate visibilities based on redundant baselines specified. This saves
time when compared to simulating visibilities over redundant baselines.
Thus, it is more efficient to simulate unique baselines and duplicate
measurements for redundant baselines
Inputs:
blgroups [dictionary] Dictionary of baseline groups where the keys are
tuples containing baseline labels. Under each key is a numpy
recarray of baseline labels that are redundant and fall under
the baseline label key. Any number of sets of redundant
measurements can be duplicated in this depending on the
baseline label keys and recarrays specified here. It results
in updating attributes where a new number of baselines are
formed from original baselines and new redundant baselines.
If set to None (default), attribute blgroups will be used to
create redundant sets
-------------------------------------------------------------------------
"""
if blgroups is None:
blgroups = self.blgroups
if not isinstance(blgroups, dict):
raise TypeError('Input blgroups must be a dictionary')
if self.bl_reversemap is None:
nbl = NP.sum(NP.asarray([len(blgroups[blkey]) for blkey in blgroups]))
else:
nbl = len(self.bl_reversemap)
if self.labels.size < nbl:
label_keys = NP.asarray(blgroups.keys(), dtype=self.labels.dtype)
for label_key in label_keys:
if label_key not in self.labels:
if NP.asarray([tuple(reversed(label_key))], dtype=self.labels.dtype)[0] not in self.labels:
raise KeyError('Input label {0} not found in attribute labels'.format(label_key))
else:
label_key = NP.asarray([tuple(reversed(label_key))], dtype=self.labels.dtype)[0]
if label_key.dtype != blgroups[tuple(label_key)].dtype:
warnings.warn('Datatype of attribute labels does not match that of the keys in attribute blgroups. Need to fix. Processing with forced matching of the two datatypes')
if tuple(label_key) not in map(tuple, blgroups[tuple(label_key)]):
# if NP.isin(label_key, blgroups[tuple(label_key)], invert=True):
# if label_key not in blgroups[tuple(label_key)]:
# blgroups[tuple(label_key)] += [label_key]
blgroups[tuple(label_key)] = NP.hstack((label_key.astype(blgroups[tuple(label_key)].dtype), blgroups[tuple(label_key)]))
uniq_inplabels = []
num_list = []
for label in self.labels:
if label in label_keys:
num_list += [blgroups[tuple(label)].size]
for lbl in blgroups[tuple(label)]:
if tuple(lbl) not in uniq_inplabels:
uniq_inplabels += [tuple(lbl)]
else:
raise ValueError('Label {0} repeated in more than one baseline group'.format(lbl))
else:
num_list += [1]
uniq_inplabels += [tuple(label)]
if len(num_list) != len(self.labels):
raise ValueError('Fatal error in counting and matching labels in input blgroups')
if self.skyvis_freq is not None:
self.skyvis_freq = NP.repeat(self.skyvis_freq, num_list, axis=0)
if self.gradient_mode is not None:
self.gradient[self.gradient_mode] = NP.repeat(self.gradient[self.gradient_mode], num_list, axis=1)
self.labels = NP.asarray(uniq_inplabels, dtype=self.labels.dtype)
self.baselines = NP.repeat(self.baselines, num_list, axis=0)
self.projected_baselines = NP.repeat(self.projected_baselines, num_list, axis=0)
self.baseline_lengths = NP.repeat(self.baseline_lengths, num_list)
if self.Tsys.shape[0] > 1:
self.Tsys = NP.repeat(self.Tsys, num_list, axis=0)
if self.eff_Q.shape[0] > 1:
self.eff_Q = NP.repeat(self.eff_Q, num_list, axis=0)
if self.A_eff.shape[0] > 1:
self.A_eff = NP.repeat(self.A_eff, num_list, axis=0)
if self.bp.shape[0] > 1:
self.bp = NP.repeat(self.bp, num_list, axis=0)
if self.bp_wts.shape[0] > 1:
self.bp_wts = NP.repeat(self.bp_wts, num_list, axis=0)
self.generate_noise()
self.add_noise()
############################################################################
def getBaselineGroupKeys(self, inp_labels):
"""
------------------------------------------------------------------------
Find redundant baseline group keys of groups that contain the input
baseline labels
Inputs:
inp_labels
[list] List where each element in the list is a two-element
tuple that corresponds to a baseline / antenna pair label.
e.g. [('1', '2'), ('3', '0'), ('2', '2'), ...]
Output:
Tuple containing two values. The first value is a list of all baseline
group keys corresponding to the input keys. If any input keys were not
found in blgroups_reversemap, those corresponding position in this list
will be filled with None to indicate the label was not found. The second
value in the tuple indicates if the ordering of the input label had to
be flipped in order to find the baseline group key. Positions where an
input label was found as is will contain False, but if it had to be
flipped will contain True. If the input label was not found, it will be
filled with None.
Example:
blkeys, flipped = InterferometerArray.getBaselineGroupKeys(inp_labels)
blkeys --> [('2','3'), ('11','16'), None, ('5','1'),...]
flipped --> [False, True, None, False],...)
------------------------------------------------------------------------
"""
return getBaselineGroupKeys(inp_labels, self.bl_reversemap)
#################################################################################
def getBaselinesInGroups(self, inp_labels):
"""
---------------------------------------------------------------------------
Find all redundant baseline labels in groups that contain the given input
baseline labels
Inputs:
inp_labels
[list] List where each element in the list is a two-element tuple
that corresponds to a baseline / antenna pair label.
e.g. [('1', '2'), ('3', '0'), ('2', '2'), ...]
Output:
Tuple with two elements where the first element is a list of numpy
RecArrays where each RecArray corresponds to the entry in inp_label and is
an array of two-element records corresponding to the baseline labels in
that redundant group. If the input baseline is not found, the corresponding
element in the list is None to indicate the baseline label was not found.
The second value in the tuple indicates if the ordering of the input label
had to be flipped in order to find the baseline group key. Positions where
an input label was found as is will contain False, but if it had to be
flipped will contain True. If the input label was not found, it will
contain a None entry.
Example:
list_blgrps, flipped = InterferometerArray.getBaselineGroupKeys(inplabels)
list_blgrps --> [array([('2','3'), ('11','16')]), None,
array([('5','1')]), ...],
flipped --> [False, True, None, ...])
---------------------------------------------------------------------------
"""
return getBaselinesInGroups(inp_labels, self.bl_reversemap, self.blgroups)
#################################################################################
def getThreePointCombinations(self, unique=False):
"""
-------------------------------------------------------------------------
Return all or only unique 3-point combinations of baselines
Input:
unique [boolean] If set to True, only unique 3-point combinations of
baseline triads are returned. If set to False (default), all
3-point combinations are returned.
Output:
Tuple containing two lists. The first list is a list of triplet tuples of
antenna labels in the form [(a1,a2,a3), (a1,a4,a6), ...], the second list
is a list of triplet tuples of baselines encoded as strings
-------------------------------------------------------------------------
"""
if not isinstance(unique, bool):
raise TypeError('Input unique must be boolean')
bl = self.baselines + 0.0 # to avoid any weird negative sign before 0.0
blstr = NP.unique(['{0[0]:.2f}_{0[1]:.2f}_{0[2]:.2f}'.format(lo) for lo in bl])
bltriplets = []
blvecttriplets = []
anttriplets = []
for aind1,albl1 in enumerate(self.layout['labels']):
for aind2,albl2 in enumerate(self.layout['labels']):
bl12 = self.layout['positions'][aind2] - self.layout['positions'][aind1]
bl12 += 0.0 # to avoid any weird negative sign before 0.0
bl12[NP.abs(bl12) < 1e-10] = 0.0
bl12_len = NP.sqrt(NP.sum(bl12**2))
if bl12_len > 0.0:
bl12str = '{0[0]:.2f}_{0[1]:.2f}_{0[2]:.2f}'.format(bl12)
if bl12str not in blstr:
bl12 *= -1
bl12 += 0.0 # to avoid any weird negative sign before 0.0
bl12str = '{0[0]:.2f}_{0[1]:.2f}_{0[2]:.2f}'.format(bl12)
if bl12str not in blstr:
warnings.warn('A baseline not found in the simulated reference baselines. Proceeding with the rest')
# raise IndexError('A baseline not found in reference baselines')
else:
for aind3,albl3 in enumerate(self.layout['labels']):
bl23 = self.layout['positions'][aind3] - self.layout['positions'][aind2]
bl31 = self.layout['positions'][aind1] - self.layout['positions'][aind3]
bl23 += 0.0 # to avoid any weird negative sign before 0.0
bl31 += 0.0 # to avoid any weird negative sign before 0.0
bl23[NP.abs(bl23) < 1e-10] = 0.0
bl31[NP.abs(bl31) < 1e-10] = 0.0
bl23_len = NP.sqrt(NP.sum(bl23**2))
bl31_len = NP.sqrt(NP.sum(bl31**2))
if (bl23_len > 0.0) and (bl31_len > 0.0):
bl23str = '{0[0]:.2f}_{0[1]:.2f}_{0[2]:.2f}'.format(bl23)
if bl23str not in blstr:
bl23 *= -1
bl23 += 0.0 # to avoid any weird negative sign before 0.0
bl23str = '{0[0]:.2f}_{0[1]:.2f}_{0[2]:.2f}'.format(bl23)
if bl23str not in blstr:
warnings.warn('A baseline not found in the simulated reference baselines. Proceeding with the rest')
# raise IndexError('A baseline not found in reference baselines')
else:
bl31str = '{0[0]:.2f}_{0[1]:.2f}_{0[2]:.2f}'.format(bl31)
if bl31str not in blstr:
bl31 *= -1
bl31 += 0.0 # to avoid any weird negative sign before 0.0
bl31str = '{0[0]:.2f}_{0[1]:.2f}_{0[2]:.2f}'.format(bl31)
if bl31str not in blstr:
warnings.warn('A baseline not found in the simulated reference baselines. Proceeding with the rest')
# raise IndexError('A baseline not found in reference baselines')
else:
list123_str = [bl12str, bl23str, bl31str]
if len(list123_str) == 3:
if len(bltriplets) == 0:
bltriplets += [list123_str]
blvecttriplets += [[bl12, bl23, bl31]]
anttriplets += [(albl1, albl2, albl3)]
else:
found = False
if unique:
ind = 0
while (not found) and (ind < len(bltriplets)):
bltriplet = bltriplets[ind]
if NP.setdiff1d(list123_str, bltriplet).size == 0:
found = True
else:
ind += 1
if not found:
bltriplets += [list123_str]
blvecttriplets += [[bl12, bl23, bl31]]
anttriplets += [(albl1, albl2, albl3)]
# return (anttriplets, bltriplets)
return (anttriplets, blvecttriplets)
#############################################################################
def getClosurePhase(self, antenna_triplets=None, delay_filter_info=None,
specsmooth_info=None, spectral_window_info=None,
unique=False):
"""
-------------------------------------------------------------------------
Get closure phases of visibilities from triplets of antennas.
Inputs:
antenna_triplets
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. If set to None (default), all
the unique triplets based on the antenna layout attribute
in class InterferometerArray
unique [boolean] If set to True, only unique 3-point combinations
of baseline triads are returned. If set to False (default),
all 3-point combinations are returned. Applies only if
antenna_triplets is set to None, otherwise the 3-point
combinations of the specified antenna_triplets is returned.
delay_filter_info
[NoneType or dictionary] Info containing delay filter
parameters. If set to None (default), no delay filtering is
performed. Otherwise, delay filter is applied on each of the
visibilities in the triplet before computing the closure
phases. The delay filter parameters are specified in a
dictionary as follows:
'type' [string] 'horizon' (default) or 'regular'. If
set to 'horizon', the horizon delay limits are
estimated from the respective baseline lengths
in the triplet. If set to 'regular', the extent
of the filter is determined by the 'min' and
'width' keys (see below).
'min' [scalar] Non-negative number (in seconds) that
specifies the minimum delay in the filter span.
If not specified, it is assumed to be 0. If
'type' is set to 'horizon', the 'min' is ignored
and set to 0.
'width' [scalar] Non-negative number (in numbers of
inverse bandwidths). If 'type' is set to
'horizon', the width represents the delay
buffer beyond the horizon. If 'type' is set to
'regular', this number has to be positive and
determines the span of the filter starting from
the minimum delay in key 'min'.
'mode' [string] 'discard' (default) or 'retain'. If set
to 'discard', the span defining the filter is
discarded and the rest retained. If set to
'retain', the span defining the filter is
retained and the rest discarded. For example,
if 'type' is set to 'horizon' and 'mode' is set
to 'discard', the horizon-to-horizon is
filtered out (discarded).
specsmooth_info
[NoneType or dictionary] Spectral smoothing window to be
applied prior to the delay transform. If set to None, no
smoothing is done. This is usually set if spectral
smoothing is to be done such as in the case of RFI. The
smoothing window parameters are specified using the
following keys and values:
'op_type' [string] Smoothing operation type.
Default='median' (currently accepts only
'median' or 'interp').
'window_size' [integer] Size of smoothing window (in
pixels) along frequency axis. Applies only
if op_type is set to 'median'
'maskchans' [NoneType or numpy array] Numpy boolean array
of size nchan. False entries imply those
channels are not masked and will be used in
in interpolation while True implies they are
masked and will not be used in determining the
interpolation function. If set to None, all
channels are assumed to be unmasked (False).
'evalchans' [NoneType or numpy array] Channel numbers at
which visibilities are to be evaluated. Will
be useful for filling in RFI flagged channels.
If set to None, channels masked in 'maskchans'
will be evaluated
'noiseRMS' [NoneType or scalar or numpy array] If set to
None (default), the rest of the parameters are
used in determining the RMS of thermal noise.
If specified as scalar, all other parameters
will be ignored in estimating noiseRMS and
this value will be used instead. If specified
as a numpy array, it must be of shape
broadcastable to (nbl,nchan,ntimes). So
accpeted shapes can be (1,1,1), (1,1,ntimes),
(1,nchan,1), (nbl,1,1), (1,nchan,ntimes),
(nbl,nchan,1), (nbl,1,ntimes), or
(nbl,nchan,ntimes).
spectral_window_info
[NoneType or dictionary] Spectral window parameters to
determine the spectral weights and apply to the visibilities
in the frequency domain before filtering in the delay domain.
THESE PARAMETERS ARE APPLIED ON THE INDIVIDUAL VISIBILITIES
THAT GO INTO THE CLOSURE PHASE. THESE ARE NOT TO BE CONFUSED
WITH THE PARAMETERS THAT WILL BE USED IN THE ACTUAL DELAY
TRANSFORM OF CLOSURE PHASE SPECTRA WHICH ARE SPECIFIED
SEPARATELY FURTHER BELOW.
If set to None (default), unity spectral weights are applied.
If spectral weights are to be applied, it must be a provided
as a dictionary with the following keys and values:
bw_eff [scalar] effective bandwidths (in Hz) for the
spectral window
freq_center [scalar] frequency center (in Hz) for the
spectral window
shape [string] frequency window shape for the
spectral window. Accepted values are 'rect' or
'RECT' (for rectangular), 'bnw' and 'BNW' (for
Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
fftpow [scalar] power to which the FFT of the window
will be raised. The value must be a positive
scalar.
Output:
Dictionary containing closure phase information under the following keys
and values:
'closure_phase_skyvis' [numpy array] Closure phases (in radians) for
the given antenna triplets from the noiseless
visibilities. It is of shape
ntriplets x nchan x ntimes
'closure_phase_vis' [numpy array] Closure phases (in radians) for
the given antenna triplets for noisy
visibilities. It is of shape
ntriplets x nchan x ntimes
'closure_phase_noise' [numpy array] Closure phases (in radians) for
the given antenna triplets for thermal noise in
visibilities. It is of shape
ntriplets x nchan x ntimes
'antenna_triplets' [list of tuples] List of three-element tuples of
antenna IDs for which the closure phases are
calculated.
'baseline_triplets' [numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'.
'skyvis' [numpy array] Noiseless visibilities that went
into the triplet used for estimating closure
phases. It has size ntriplets x 3 nchan x ntimes
where 3 is for the triplet of visibilities or
baselines involved.
'vis' [numpy array] Same as 'skyvis' but for noisy
visibilities
'noisevis' [numpy array] Same as 'skyvis' but for the
noise in the visibilities
'spectral_weights' [numpy array] Spectral weights applied in the
frequency domain before filtering. This is
derived based on the parameters in the input
spectral_window_info. If spectral_window_info is
set to None, the spectral weights are set to 1.0
with shape (1,). If spectral_window_info is
specified as not None, the shape of the spectral
weights is (nchan,).
-------------------------------------------------------------------------
"""
if antenna_triplets is None:
antenna_triplets, bltriplets = self.getThreePointCombinations(unique=unique)
if not isinstance(antenna_triplets, list):
raise TypeError('Input antenna triplets must be a list of triplet tuples')
# Check if spectral smoothing is to be applied
if specsmooth_info is not None:
if not isinstance(specsmooth_info, dict):
raise TypeError('Input specsmooth_info must be a dictionary')
if 'op_type' not in specsmooth_info:
raise KeyError('Key "op_type" not found in input specsmooth_info')
if specsmooth_info['op_type'].lower() not in ['median', 'interp']:
raise ValueError('op_type specified in specsmooth_info currently not supported')
if specsmooth_info['op_type'].lower() == 'median':
if 'window_size' not in specsmooth_info:
raise KeyError('Input "window_size" not found in specsmooth_info')
if specsmooth_info['window_size'] <= 0:
raise ValueError('Spectral filter window size must be positive')
if specsmooth_info['op_type'].lower() == 'interp':
if 'maskchans' not in specsmooth_info:
specsmooth_info['maskchans'] = NP.zeros(self.channels.size, dtype=NP.bool)
elif specsmooth_info['maskchans'] is None:
specsmooth_info['maskchans'] = NP.zeros(self.channels.size, dtype=NP.bool)
elif not isinstance(specsmooth_info['maskchans'], NP.ndarray):
raise TypeError('Value under key "maskchans" must be a numpy array')
else:
if specsmooth_info['maskchans'].dtype != bool:
raise TypeError('Value under key "maskchans" must be a boolean numpy array')
if specsmooth_info['maskchans'].size != self.channels.size:
raise ValueError('Size of numpy array under key "maskchans" is not equal to the number of frequency channels')
specsmooth_info['maskchans'] = specsmooth_info['maskchans'].ravel()
if 'evalchans' not in specsmooth_info:
specsmooth_info['evalchans'] = NP.where(specsmooth_info['maskchans'])[0]
elif specsmooth_info['evalchans'] is None:
specsmooth_info['evalchans'] = NP.where(specsmooth_info['maskchans'])[0]
elif not isinstance(specsmooth_info['evalchans'], (int,list,NP.ndarray)):
raise TypeError('Values under key "evalchans" must be an integer, list or numpy array')
else:
specsmooth_info['evalchans'] = NP.asarray(specsmooth_info['evalchans']).reshape(-1)
unmasked_chans = NP.where(NP.logical_not(specsmooth_info['maskchans']))[0]
# Check if spectral windowing is to be applied
if spectral_window_info is not None:
freq_center = spectral_window_info['freq_center']
bw_eff = spectral_window_info['bw_eff']
shape = spectral_window_info['shape']
fftpow = spectral_window_info['fftpow']
if freq_center is None:
freq_center = self.channels[self.channels.size/2]
if shape is None:
shape = 'rect'
else:
shape = shape.lower()
if bw_eff is None:
if shape == 'rect':
bw_eff = self.channels.size * self.freq_resolution
elif shape == 'bhw':
bw_eff = 0.5 * self.channels.size * self.freq_resolution
else:
raise ValueError('Specified window shape not currently supported')
if fftpow is None:
fftpow = 1.0
elif isinstance(fftpow, (int,float)):
if fftpow <= 0.0:
raise ValueError('Value fftpow must be positive')
else:
raise ValueError('Value fftpow must be a scalar (int or float)')
freq_wts = NP.empty(self.channels.size, dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape, fftpow=fftpow, area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.freq_resolution).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.channels.reshape(-1,1), NP.asarray(freq_center).reshape(-1,1), distance_ULIM=0.5*self.freq_resolution, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
# n_window = n_window[sortind]
window = NP.sqrt(frac_width * n_window) * DSP.window_fftpow(n_window, shape=shape, fftpow=fftpow, centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.channels[ind_channels[0]] + self.freq_resolution * (NP.arange(n_window) - int(n_window/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.channels.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.freq_resolution, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.channels.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts = window
else:
freq_wts = NP.asarray(1.0).reshape(-1)
# Check if delay filter is to be performed
filter_unmask = NP.ones(self.channels.size)
if delay_filter_info is not None:
fft_delays = DSP.spectral_axis(self.channels.size, delx=self.freq_resolution, shift=False, use_real=False)
dtau = fft_delays[1] - fft_delays[0]
if not isinstance(delay_filter_info, dict):
raise TypeError('Delay filter info must be specified as a dictionary')
if 'mode' not in delay_filter_info:
filter_mode = 'discard'
else:
filter_mode = delay_filter_info['mode']
if filter_mode.lower() not in ['discard', 'retain']:
raise ValueError('Invalid delay filter mode specified')
if 'type' not in delay_filter_info:
filter_type = 'horizon'
else:
filter_type = delay_filter_info['type']
if filter_type.lower() not in ['horizon', 'regular']:
raise ValueError('Invalid delay filter type specified')
if filter_type.lower() == 'regular':
if ('min' not in delay_filter_info) or ('width' not in delay_filter_info):
raise KeyError('Keys "min" and "width" must be specified in input delay_filter_info')
delay_min = delay_filter_info['min']
delay_width = delay_filter_info['width']
if delay_min is None:
delay_min = 0.0
elif isinstance(delay_min, (int,float)):
delay_min = max([0.0, delay_min])
else:
raise TypeError('Minimum delay in the filter must be a scalar value (int or float)')
if isinstance(delay_width, (int,float)):
if delay_width <= 0.0:
raise ValueError('Delay filter width must be positive')
else:
raise TypeError('Delay width in the filter must be a scalar value (int or float)')
else:
if 'width' not in delay_filter_info:
delay_width = 0.0
else:
delay_width = delay_filter_info['width']
if delay_width is None:
delay_width = 0.0
elif isinstance(delay_width, (int,float)):
if delay_width <= 0.0:
raise ValueError('Delay filter width must be positive')
else:
raise TypeError('Delay width in the filter must be a scalar value (int or float)')
delay_width = delay_width * dtau
skyvis_freq = NP.copy(self.skyvis_freq)
vis_freq = NP.copy(self.vis_freq)
vis_noise_freq = NP.copy(self.vis_noise_freq)
phase_skyvis123 = []
phase_vis123 = []
phase_noise123 = []
blvecttriplets = []
skyvis_triplets = []
vis_triplets = []
noise_triplets = []
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Triplets '.format(len(antenna_triplets)), PGB.ETA()], maxval=len(antenna_triplets)).start()
for tripletind,anttriplet in enumerate(antenna_triplets):
blvecttriplets += [NP.zeros((3,3))]
a1, a2, a3 = anttriplet
a1 = str(a1)
a2 = str(a2)
a3 = str(a3)
bl12_id = (a2, a1)
conj12 = False
if bl12_id in self.bl_reversemap:
bl12_id_ref = self.bl_reversemap[bl12_id]
elif tuple(reversed(bl12_id)) in self.bl_reversemap:
bl12_id_ref = self.bl_reversemap[tuple(reversed(bl12_id))]
conj12 = True
else:
raise ValueError('Baseline ({0[0]:0d}, {0[1]:0d}) not found in simulated baselines'.format(bl12_id))
ind12 = NP.where(self.labels == bl12_id_ref)[0][0]
if not conj12:
skyvis12 = skyvis_freq[ind12,:,:]
vis12 = vis_freq[ind12,:,:]
noise12 = vis_noise_freq[ind12,:,:]
blvecttriplets[-1][0,:] = self.baselines[ind12,:]
bpwts12 = self.bp[ind12,:,:] * self.bp_wts[ind12,:,:]
else:
skyvis12 = skyvis_freq[ind12,:,:].conj()
vis12 = vis_freq[ind12,:,:].conj()
noise12 = vis_noise_freq[ind12,:,:].conj()
blvecttriplets[-1][0,:] = -self.baselines[ind12,:]
bpwts12 = self.bp[ind12,:,:].conj() * self.bp_wts[ind12,:,:].conj()
bl23_id = (a3, a2)
conj23 = False
if bl23_id in self.bl_reversemap:
bl23_id_ref = self.bl_reversemap[bl23_id]
elif tuple(reversed(bl23_id)) in self.bl_reversemap:
bl23_id_ref = self.bl_reversemap[tuple(reversed(bl23_id))]
conj23 = True
else:
raise ValueError('Baseline ({0[0]:0d}, {0[1]:0d}) not found in simulated baselines'.format(bl23_id))
ind23 = NP.where(self.labels == bl23_id_ref)[0][0]
if not conj23:
skyvis23 = skyvis_freq[ind23,:,:]
vis23 = vis_freq[ind23,:,:]
noise23 = vis_noise_freq[ind23,:,:]
blvecttriplets[-1][1,:] = self.baselines[ind23,:]
bpwts23 = self.bp[ind23,:,:] * self.bp_wts[ind23,:,:]
else:
skyvis23 = skyvis_freq[ind23,:,:].conj()
vis23 = vis_freq[ind23,:,:].conj()
noise23 = vis_noise_freq[ind23,:,:].conj()
blvecttriplets[-1][1,:] = -self.baselines[ind23,:]
bpwts23 = self.bp[ind23,:,:].conj() * self.bp_wts[ind23,:,:].conj()
bl31_id = (a1, a3)
conj31 = False
if bl31_id in self.bl_reversemap:
bl31_id_ref = self.bl_reversemap[bl31_id]
elif tuple(reversed(bl31_id)) in self.bl_reversemap:
bl31_id_ref = self.bl_reversemap[tuple(reversed(bl31_id))]
conj31 = True
else:
raise ValueError('Baseline ({0[0]:0d}, {0[1]:0d}) not found in simulated baselines'.format(bl31_id))
ind31 = NP.where(self.labels == bl31_id_ref)[0][0]
if not conj31:
skyvis31 = skyvis_freq[ind31,:,:]
vis31 = vis_freq[ind31,:,:]
noise31 = vis_noise_freq[ind31,:,:]
blvecttriplets[-1][2,:] = self.baselines[ind31,:]
bpwts31 = self.bp[ind31,:,:] * self.bp_wts[ind31,:,:]
else:
skyvis31 = skyvis_freq[ind31,:,:].conj()
vis31 = vis_freq[ind31,:,:].conj()
noise31 = vis_noise_freq[ind31,:,:].conj()
blvecttriplets[-1][2,:] = -self.baselines[ind31,:]
bpwts31 = self.bp[ind31,:,:].conj() * self.bp_wts[ind31,:,:].conj()
if specsmooth_info is not None:
# Perform interpolation for each triplet if op_type is 'interp'.
# If op_type is 'median' it can be performed triplet by triplet
# or on all triplets as once depending on if delay-filtering
# and spectral windowing is set or not.
if specsmooth_info['op_type'].lower() == 'interp':
if specsmooth_info['evalchans'].size > 0:
# Obtain the noise RMS on the required baselines
if 'noiseRMS' not in specsmooth_info:
specsmooth_info['noiseRMS'] = NP.copy(self.vis_rms_freq[NP.ix_([ind12,ind23,ind31], specsmooth_info['evalchans'], NP.arange(skyvis12.shape[1]))])
else:
specsmooth_info['noiseRMS'] = specsmooth_info['noiseRMS'][:,specsmooth_info['evalchans'],:]
noise123 = generateNoise(noiseRMS=specsmooth_info['noiseRMS'], nbl=3, nchan=specsmooth_info['evalchans'].size, ntimes=skyvis12.shape[1])
noise12[specsmooth_info['evalchans'],:] = noise123[0,:,:]
noise23[specsmooth_info['evalchans'],:] = noise123[1,:,:]
noise31[specsmooth_info['evalchans'],:] = noise123[2,:,:]
interpfunc_skyvis12_real = interpolate.interp1d(unmasked_chans, skyvis12[unmasked_chans,:].real, axis=0, kind='cubic', bounds_error=True, assume_sorted=True)
interpfunc_skyvis12_imag = interpolate.interp1d(unmasked_chans, skyvis12[unmasked_chans,:].imag, axis=0, kind='cubic', bounds_error=True, assume_sorted=True)
skyvis12[specsmooth_info['evalchans'],:] = interpfunc_skyvis12_real(specsmooth_info['evalchans']) + 1j * interpfunc_skyvis12_imag(specsmooth_info['evalchans'])
interpfunc_skyvis23_real = interpolate.interp1d(unmasked_chans, skyvis23[unmasked_chans,:].real, axis=0, kind='cubic', bounds_error=True, assume_sorted=True)
interpfunc_skyvis23_imag = interpolate.interp1d(unmasked_chans, skyvis23[unmasked_chans,:].imag, axis=0, kind='cubic', bounds_error=True, assume_sorted=True)
skyvis23[specsmooth_info['evalchans'],:] = interpfunc_skyvis23_real(specsmooth_info['evalchans']) + 1j * interpfunc_skyvis23_imag(specsmooth_info['evalchans'])
interpfunc_skyvis31_real = interpolate.interp1d(unmasked_chans, skyvis31[unmasked_chans,:].real, axis=0, kind='cubic', bounds_error=True, assume_sorted=True)
interpfunc_skyvis31_imag = interpolate.interp1d(unmasked_chans, skyvis31[unmasked_chans,:].imag, axis=0, kind='cubic', bounds_error=True, assume_sorted=True)
skyvis31[specsmooth_info['evalchans'],:] = interpfunc_skyvis31_real(specsmooth_info['evalchans']) + 1j * interpfunc_skyvis31_imag(specsmooth_info['evalchans'])
vis12[specsmooth_info['evalchans'],:] = skyvis12[specsmooth_info['evalchans'],:] + noise12[specsmooth_info['evalchans'],:]
vis23[specsmooth_info['evalchans'],:] = skyvis23[specsmooth_info['evalchans'],:] + noise23[specsmooth_info['evalchans'],:]
vis31[specsmooth_info['evalchans'],:] = skyvis31[specsmooth_info['evalchans'],:] + noise31[specsmooth_info['evalchans'],:]
# Apply the spectral ('median') smoothing first if delay filter
# and / or spectral windowing is to be performed, otherwise apply
# later on the full array instead of inside the antenna triplet loop
if (delay_filter_info is not None) or (spectral_window_info is not None):
if specsmooth_info is not None:
if specsmooth_info['op_type'].lower() == 'median':
skyvis12 = ndimage.median_filter(skyvis12.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(skyvis12.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
skyvis23 = ndimage.median_filter(skyvis23.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(skyvis23.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
skyvis31 = ndimage.median_filter(skyvis31.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(skyvis31.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
vis12 = ndimage.median_filter(vis12.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(vis12.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
vis23 = ndimage.median_filter(vis23.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(vis23.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
vis31 = ndimage.median_filter(vis31.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(vis31.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
noise12 = ndimage.median_filter(noise12.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(noise12.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
noise23 = ndimage.median_filter(noise23.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(noise23.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
noise31 = ndimage.median_filter(noise31.real, size=(specsmooth_info[specsmooth_info['window_size']],1)) + 1j * ndimage.median_filter(noise31.imag, size=(specsmooth_info[specsmooth_info['window_size']],1))
# Check if delay filter is to be performed
if delay_filter_info is not None:
if filter_type.lower() == 'regular':
delay_max = delay_min + delay_width
if filter_mode.lower() == 'discard':
mask_ind = NP.logical_and(NP.abs(fft_delays) >= delay_min, NP.abs(fft_delays) <= delay_max)
else:
mask_ind = NP.logical_or(NP.abs(fft_delays) <= delay_min, NP.abs(fft_delays) >= delay_max)
filter_unmask[mask_ind] = 0.0
skyvis12 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*skyvis12,ax=0,inverse=False), ax=0, inverse=True)
skyvis23 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*skyvis23,ax=0,inverse=False), ax=0, inverse=True)
skyvis31 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*skyvis31,ax=0,inverse=False), ax=0, inverse=True)
vis12 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*vis12,ax=0,inverse=False), ax=0, inverse=True)
vis23 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*vis23,ax=0,inverse=False), ax=0, inverse=True)
vis31 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*vis31,ax=0,inverse=False), ax=0, inverse=True)
noise12 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*noise12,ax=0,inverse=False), ax=0, inverse=True)
noise23 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*noise23,ax=0,inverse=False), ax=0, inverse=True)
noise31 = DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*noise31,ax=0,inverse=False), ax=0, inverse=True)
# skyvis12 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(skyvis12,ax=0,inverse=False), ax=0, inverse=True)
# skyvis23 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(skyvis23,ax=0,inverse=False), ax=0, inverse=True)
# skyvis31 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(skyvis31,ax=0,inverse=False), ax=0, inverse=True)
# vis12 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(vis12,ax=0,inverse=False), ax=0, inverse=True)
# vis23 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(vis23,ax=0,inverse=False), ax=0, inverse=True)
# vis31 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(vis31,ax=0,inverse=False), ax=0, inverse=True)
# noise12 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(noise12,ax=0,inverse=False), ax=0, inverse=True)
# noise23 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(noise23,ax=0,inverse=False), ax=0, inverse=True)
# noise31 = 1.0 * fft_delays.size / NP.sum(filter_unmask) * DSP.FT1D(filter_unmask[:,NP.newaxis] * DSP.FT1D(noise31,ax=0,inverse=False), ax=0, inverse=True)
else:
filter_unmask12 = 1.0 * filter_unmask
filter_unmask23 = 1.0 * filter_unmask
filter_unmask31 = 1.0 * filter_unmask
delay_max12 = self.baseline_lengths[ind12] / FCNST.c + delay_width
delay_max23 = self.baseline_lengths[ind23] / FCNST.c + delay_width
delay_max31 = self.baseline_lengths[ind31] / FCNST.c + delay_width
if filter_mode.lower() == 'discard':
mask_ind12 = NP.abs(fft_delays) <= delay_max12
mask_ind23 = NP.abs(fft_delays) <= delay_max23
mask_ind31 = NP.abs(fft_delays) <= delay_max31
else:
mask_ind12 = NP.abs(fft_delays) >= delay_max12
mask_ind23 = NP.abs(fft_delays) >= delay_max23
mask_ind31 = NP.abs(fft_delays) >= delay_max31
filter_unmask12[mask_ind12] = 0.0
filter_unmask23[mask_ind23] = 0.0
filter_unmask31[mask_ind31] = 0.0
skyvis12 = DSP.FT1D(filter_unmask12[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*skyvis12,ax=0,inverse=False), ax=0, inverse=True)
skyvis23 = DSP.FT1D(filter_unmask23[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*skyvis23,ax=0,inverse=False), ax=0, inverse=True)
skyvis31 = DSP.FT1D(filter_unmask31[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*skyvis31,ax=0,inverse=False), ax=0, inverse=True)
vis12 = DSP.FT1D(filter_unmask12[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*vis12,ax=0,inverse=False), ax=0, inverse=True)
vis23 = DSP.FT1D(filter_unmask23[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*vis23,ax=0,inverse=False), ax=0, inverse=True)
vis31 = DSP.FT1D(filter_unmask31[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*vis31,ax=0,inverse=False), ax=0, inverse=True)
noise12 = DSP.FT1D(filter_unmask12[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*noise12,ax=0,inverse=False), ax=0, inverse=True)
noise23 = DSP.FT1D(filter_unmask23[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*noise23,ax=0,inverse=False), ax=0, inverse=True)
noise31 = DSP.FT1D(filter_unmask31[:,NP.newaxis] * DSP.FT1D(freq_wts.reshape(-1,1)*noise31,ax=0,inverse=False), ax=0, inverse=True)
# skyvis12 = 1.0 * fft_delays.size / NP.sum(filter_unmask12) * DSP.FT1D(filter_unmask12[:,NP.newaxis] * DSP.FT1D(skyvis12,ax=0,inverse=False), ax=0, inverse=True)
# skyvis23 = 1.0 * fft_delays.size / NP.sum(filter_unmask23) * DSP.FT1D(filter_unmask23[:,NP.newaxis] * DSP.FT1D(skyvis23,ax=0,inverse=False), ax=0, inverse=True)
# skyvis31 = 1.0 * fft_delays.size / NP.sum(filter_unmask31) * DSP.FT1D(filter_unmask31[:,NP.newaxis] * DSP.FT1D(skyvis31,ax=0,inverse=False), ax=0, inverse=True)
# vis12 = 1.0 * fft_delays.size / NP.sum(filter_unmask12) * DSP.FT1D(filter_unmask12[:,NP.newaxis] * DSP.FT1D(vis12,ax=0,inverse=False), ax=0, inverse=True)
# vis23 = 1.0 * fft_delays.size / NP.sum(filter_unmask23) * DSP.FT1D(filter_unmask23[:,NP.newaxis] * DSP.FT1D(vis23,ax=0,inverse=False), ax=0, inverse=True)
# vis31 = 1.0 * fft_delays.size / NP.sum(filter_unmask31) * DSP.FT1D(filter_unmask31[:,NP.newaxis] * DSP.FT1D(vis31,ax=0,inverse=False), ax=0, inverse=True)
# noise12 = 1.0 * fft_delays.size / NP.sum(filter_unmask12) * DSP.FT1D(filter_unmask12[:,NP.newaxis] * DSP.FT1D(noise12,ax=0,inverse=False), ax=0, inverse=True)
# noise23 = 1.0 * fft_delays.size / NP.sum(filter_unmask23) * DSP.FT1D(filter_unmask23[:,NP.newaxis] * DSP.FT1D(noise23,ax=0,inverse=False), ax=0, inverse=True)
# noise31 = 1.0 * fft_delays.size / NP.sum(filter_unmask31) * DSP.FT1D(filter_unmask31[:,NP.newaxis] * DSP.FT1D(noise31,ax=0,inverse=False), ax=0, inverse=True)
else:
skyvis12 = freq_wts.reshape(-1,1)*skyvis12
skyvis23 = freq_wts.reshape(-1,1)*skyvis23
skyvis31 = freq_wts.reshape(-1,1)*skyvis31
vis12 = freq_wts.reshape(-1,1)*vis12
vis23 = freq_wts.reshape(-1,1)*vis23
vis31 = freq_wts.reshape(-1,1)*vis31
noise12 = freq_wts.reshape(-1,1)*noise12
noise23 = freq_wts.reshape(-1,1)*noise23
noise31 = freq_wts.reshape(-1,1)*noise31
skyvis_triplets += [[skyvis12*bpwts12, skyvis23*bpwts23, skyvis31*bpwts31]]
vis_triplets += [[vis12*bpwts12, vis23*bpwts23, vis31*bpwts31]]
noise_triplets += [[noise12*bpwts12, noise23*bpwts23, noise31*bpwts31]]
progress.update(tripletind+1)
progress.finish()
skyvis_triplets = NP.asarray(skyvis_triplets)
vis_triplets = NP.asarray(vis_triplets)
noise_triplets = NP.asarray(noise_triplets)
# Apply the spectral smoothing now on the entire triplet arrays
# if none of delay filter or spectral windowing is to be performed,
# otherwise it must have been applied prior to either one of those
# operations
if (delay_filter_info is None) and (spectral_window_info is None) and (specsmooth_info is not None):
if specsmooth_info['op_type'].lower() == 'median':
skyvis_triplets = ndimage.median_filter(skyvis_triplets.real, size=(1,1,specsmooth_info['window_size'],1)) + 1j * ndimage.median_filter(skyvis_triplets.imag, size=(1,1,specsmooth_info['window_size'],1))
vis_triplets = ndimage.median_filter(vis_triplets.real, size=(1,1,specsmooth_info['window_size'],1)) + 1j * ndimage.median_filter(vis_triplets.imag, size=(1,1,specsmooth_info['window_size'],1))
noise_triplets = ndimage.median_filter(noise_triplets.real, size=(1,1,specsmooth_info['window_size'],1)) + 1j * ndimage.median_filter(noise_triplets.imag, size=(1,1,specsmooth_info['window_size'],1))
phase_skyvis123 = NP.angle(NP.prod(skyvis_triplets, axis=1))
phase_vis123 = NP.angle(NP.prod(vis_triplets, axis=1))
phase_noise123 = NP.angle(NP.prod(noise_triplets, axis=1))
return {'closure_phase_skyvis': phase_skyvis123, 'closure_phase_vis': phase_vis123, 'closure_phase_noise': phase_noise123, 'antenna_triplets': antenna_triplets, 'baseline_triplets': blvecttriplets, 'skyvis': skyvis_triplets, 'vis': vis_triplets, 'noisevis': noise_triplets, 'spectral_weights': freq_wts}
#############################################################################
def rotate_visibilities(self, ref_point, do_delay_transform=False,
verbose=True):
"""
-------------------------------------------------------------------------
Centers the phase of visibilities around any given phase center.
Project baseline vectors with respect to a reference point on the sky.
Essentially a wrapper to member functions phase_centering() and
project_baselines()
Input(s):
ref_point [dictionary] Contains information about the reference
position to which projected baselines and rotated
visibilities are to be computed. No defaults. It must be
contain the following keys with the following values:
'coords' [string] Refers to the coordinate system in
which value in key 'location' is specified in.
Accepted values are 'radec', 'hadec', 'altaz'
and 'dircos'
'location' [numpy array] Must be a Mx2 (if value in key
'coords' is set to 'radec', 'hadec', 'altaz' or
'dircos') or Mx3 (if value in key 'coords' is
set to 'dircos'). M can be 1 or equal to number
of timestamps. If M=1, the same reference point
in the same coordinate system will be repeated
for all tiemstamps. If value under key 'coords'
is set to 'radec', 'hadec' or 'altaz', the
value under this key 'location' must be in
units of degrees.
do_delay_transform
[boolean] If set to True (default), also recompute the
delay transform after the visibilities are rotated to the
new phase center
verbose: [boolean] If set to True (default), prints progress and
diagnostic messages.
-------------------------------------------------------------------------
"""
try:
ref_point
except NameError:
raise NameError('Input ref_point must be provided')
if ref_point is None:
raise ValueError('Invalid input specified in ref_point')
elif not isinstance(ref_point, dict):
raise TypeError('Input ref_point must be a dictionary')
else:
if ('location' not in ref_point) or ('coords' not in ref_point):
raise KeyError('Both keys "location" and "coords" must be specified in input dictionary ref_point')
self.phase_centering(ref_point, do_delay_transform=do_delay_transform, verbose=verbose)
self.project_baselines(ref_point)
#############################################################################
def phase_centering(self, ref_point, do_delay_transform=False, verbose=True):
"""
-------------------------------------------------------------------------
Centers the phase of visibilities around any given phase center.
Inputs:
ref_point [dictionary] Contains information about the reference
position to which projected baselines and rotated
visibilities are to be computed. No defaults. It must be
contain the following keys with the following values:
'coords' [string] Refers to the coordinate system in
which value in key 'location' is specified in.
Accepted values are 'radec', 'hadec', 'altaz'
and 'dircos'
'location' [numpy array] Must be a Mx2 (if value in key
'coords' is set to 'radec', 'hadec', 'altaz' or
'dircos') or Mx3 (if value in key 'coords' is
set to 'dircos'). M can be 1 or equal to number
of timestamps. If M=1, the same reference point
in the same coordinate system will be repeated
for all tiemstamps. If value under key 'coords'
is set to 'radec', 'hadec' or 'altaz', the
value under this key 'location' must be in
units of degrees.
do_delay_transform
[boolean] If set to True, also recompute the delay
transform after the visibilities are rotated to the new
phase center. If set to False (default), this is skipped
verbose: [boolean] If set to True (default), prints progress and
diagnostic messages.
-------------------------------------------------------------------------
"""
try:
ref_point
except NameError:
raise NameError('Input ref_point must be provided')
if ref_point is None:
raise ValueError('Invalid input specified in ref_point')
elif not isinstance(ref_point, dict):
raise TypeError('Input ref_point must be a dictionary')
else:
if ('location' not in ref_point) or ('coords' not in ref_point):
raise KeyError('Both keys "location" and "coords" must be specified in input dictionary ref_point')
phase_center = ref_point['location']
phase_center_coords = ref_point['coords']
if phase_center is None:
raise ValueError('Valid phase center not specified in input ref_point')
elif not isinstance(phase_center, NP.ndarray):
raise TypeError('Phase center must be a numpy array')
elif phase_center.shape[0] == 1:
phase_center = NP.repeat(phase_center, len(self.lst), axis=0)
elif phase_center.shape[0] != len(self.lst):
raise ValueError('One phase center must be provided for every timestamp.')
phase_center_current = self.phase_center + 0.0
phase_center_new = phase_center + 0.0
phase_center_coords_current = self.phase_center_coords + ''
phase_center_coords_new = phase_center_coords + ''
phase_center_temp = phase_center_new + 0.0
phase_center_coords_temp = phase_center_coords_new + ''
if phase_center_coords_new is None:
raise NameError('Coordinates of phase center not provided.')
elif phase_center_coords_new == 'dircos':
if (phase_center_new.shape[1] < 2) or (phase_center_new.shape[1] > 3):
raise ValueError('Dimensions incompatible for direction cosine positions')
if NP.any(NP.sqrt(NP.sum(phase_center_new**2, axis=1)) > 1.0):
raise ValueError('direction cosines found to be exceeding unit magnitude.')
if phase_center_new.shape[1] == 2:
n = 1.0 - NP.sqrt(NP.sum(phase_center_new**2, axis=1))
phase_center_new = NP.hstack((phase_center_new, n.reshape(-1,1)))
phase_center_temp = phase_center_new + 0.0
phase_center_coords_temp = 'dircos'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp = GEOM.dircos2altaz(phase_center_temp, units='degrees')
phase_center_coords_temp = 'altaz'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp = GEOM.altaz2hadec(phase_center_temp, self.latitude, units='degrees')
phase_center_coords_temp = 'hadec'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp[:,0] = self.lst - phase_center_temp[:,0]
phase_center_coords_temp = 'hadec'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp[:,0] = self.lst - phase_center_temp[:,0]
phase_center_coords_temp = 'radec'
if phase_center_coords_temp != phase_center_coords_current:
raise ValueError('Pointing coordinates of interferometer array instance invalid.')
elif phase_center_coords_new == 'altaz':
phase_center_temp = phase_center_new + 0.0
phase_center_coords_temp = 'altaz'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp = GEOM.altaz2hadec(phase_center_temp, self.latitude, units='degrees')
phase_center_coords_temp = 'hadec'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp[:,0] = self.lst - phase_center_temp[:,0]
phase_center_coords_temp = 'radec'
if phase_center_coords_temp != phase_center_coords_current:
raise ValueError('Pointing coordinates of interferometer array instance invalid.')
phase_center_coords_temp = phase_center_coords_current + ''
phase_center_new = GEOM.altaz2dircos(phase_center_new, units='degrees')
elif phase_center_coords_new == 'hadec':
phase_center_temp = phase_center_new + 0.0
phase_center_coords_temp = 'hadec'
if phase_center_coords_temp != phase_center_coords_current:
if self.pointing_coords == 'radec':
phase_center_temp[:,0] = self.lst - phase_center_temp[:,0]
phase_center_coords_temp = 'radec'
else:
phase_center_temp = GEOM.hadec2altaz(phase_center_temp, self.latitude, units='degrees')
phase_center_coords_temp = 'altaz'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp = GEOM.altaz2dircos(phase_center_temp, units='degrees')
phase_center_coords_temp = 'dircos'
if phase_center_coords_temp != phase_center_coords_current:
raise ValueError('Pointing coordinates of interferometer array instance invalid.')
phase_center_new = GEOM.hadec2altaz(phase_center_new, self.latitude, units='degrees')
phase_center_new = GEOM.altaz2dircos(phase_center_new, units='degrees')
elif phase_center_coords_new == 'radec':
phase_center_temp = phase_center_new + 0.0
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp[:,0] = self.lst - phase_center_temp[:,0]
phase_center_coords_temp = 'hadec'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp = GEOM.hadec2altaz(phase_center_temp, self.latitude, units='degrees')
phase_center_coords_temp = 'altaz'
if phase_center_coords_temp != phase_center_coords_current:
phase_center_temp = GEOM.altaz2dircos(phase_center_temp, units='degrees')
phase_center_coords_temp = 'dircos'
if phase_center_coords_temp != phase_center_coords_current:
raise ValueError('Pointing coordinates of interferometer array instance invalid.')
phase_center_new[:,0] = self.lst - phase_center_new[:,0]
phase_center_new = GEOM.hadec2altaz(phase_center_new, self.latitude, units='degrees')
phase_center_new = GEOM.altaz2dircos(phase_center_new, units='degrees')
else:
raise ValueError('Invalid phase center coordinate system specified')
phase_center_current_temp = phase_center_current + 0.0
phase_center_coords_current_temp = phase_center_coords_current + ''
if phase_center_coords_current_temp == 'radec':
phase_center_current_temp[:,0] = self.lst - phase_center_current_temp[:,0]
phase_center_coords_current_temp = 'hadec'
if phase_center_coords_current_temp == 'hadec':
phase_center_current_temp = GEOM.hadec2altaz(phase_center_current_temp, self.latitude, units='degrees')
phase_center_coords_current_temp = 'altaz'
if phase_center_coords_current_temp == 'altaz':
phase_center_current_temp = GEOM.altaz2dircos(phase_center_current_temp, units='degrees')
phase_center_coords_current_temp = 'dircos'
pos_diff_dircos = phase_center_current_temp - phase_center_new
b_dot_l = NP.dot(self.baselines, pos_diff_dircos.T)
self.phase_center = phase_center_temp + 0.0
self.phase_center_coords = phase_center_coords_temp + ''
self.skyvis_freq = self.skyvis_freq * NP.exp(-1j * 2 * NP.pi * b_dot_l[:,NP.newaxis,:] * self.channels.reshape(1,-1,1) / FCNST.c)
if self.vis_freq is not None:
self.vis_freq = self.vis_freq * NP.exp(-1j * 2 * NP.pi * b_dot_l[:,NP.newaxis,:] * self.channels.reshape(1,-1,1) / FCNST.c)
if self.vis_noise_freq is not None:
self.vis_noise_freq = self.vis_noise_freq * NP.exp(-1j * 2 * NP.pi * b_dot_l[:,NP.newaxis,:] * self.channels.reshape(1,-1,1) / FCNST.c)
if do_delay_transform:
self.delay_transform()
print('Running delay_transform() with defaults inside phase_centering() after rotating visibility phases. Run delay_transform() again with appropriate inputs.')
#############################################################################
def project_baselines(self, ref_point):
"""
------------------------------------------------------------------------
Project baseline vectors with respect to a reference point on the sky.
Assigns the projected baselines to the attribute projected_baselines
Input(s):
ref_point [dictionary] Contains information about the reference
position to which projected baselines are to be computed.
No defaults. It must be contain the following keys with the
following values:
'coords' [string] Refers to the coordinate system in
which value in key 'location' is specified in.
Accepted values are 'radec', 'hadec', 'altaz'
and 'dircos'
'location' [numpy array] Must be a Mx2 (if value in key
'coords' is set to 'radec', 'hadec', 'altaz' or
'dircos') or Mx3 (if value in key 'coords' is
set to 'dircos'). M can be 1 or equal to number
of timestamps. If M=1, the same reference point
in the same coordinate system will be repeated
for all tiemstamps. If value under key 'coords'
is set to 'radec', 'hadec' or 'altaz', the
value under this key 'location' must be in
units of degrees.
------------------------------------------------------------------------
"""
try:
ref_point
except NameError:
raise NameError('Input ref_point must be provided')
if ref_point is None:
raise ValueError('Invalid input specified in ref_point')
elif not isinstance(ref_point, dict):
raise TypeError('Input ref_point must be a dictionary')
else:
if ('location' not in ref_point) or ('coords' not in ref_point):
raise KeyError('Both keys "location" and "coords" must be specified in input dictionary ref_point')
phase_center = ref_point['location']
phase_center_coords = ref_point['coords']
if not isinstance(phase_center, NP.ndarray):
raise TypeError('The specified reference point must be a numpy array')
if not isinstance(phase_center_coords, str):
raise TypeError('The specified coordinates of the reference point must be a string')
if phase_center_coords not in ['radec', 'hadec', 'altaz', 'dircos']:
raise ValueError('Specified coordinates of reference point invalid')
if phase_center.ndim == 1:
phase_center = phase_center.reshape(1,-1)
if phase_center.ndim > 2:
raise ValueError('Reference point has invalid dimensions')
if (phase_center.shape[0] != self.n_acc) and (phase_center.shape[0] != 1):
raise ValueError('Reference point has dimensions incompatible with the number of timestamps')
if phase_center.shape[0] == 1:
phase_center = phase_center + NP.zeros(self.n_acc).reshape(-1,1)
if phase_center_coords == 'radec':
if phase_center.shape[1] != 2:
raise ValueError('Reference point has invalid dimensions')
ha = NP.asarray(self.lst) - phase_center[:,0]
dec = phase_center[:,1]
elif phase_center_coords == 'hadec':
if phase_center.shape[1] != 2:
raise ValueError('Reference point has invalid dimensions')
ha = phase_center[:,0]
dec = phase_center[:,1]
elif phase_center_coords == 'altaz':
if phase_center.shape[1] != 2:
raise ValueError('Reference point has invalid dimensions')
hadec = GEOM.altaz2hadec(phase_center, self.latitude, units='degrees')
ha = hadec[:,0]
dec = hadec[:,1]
else: # phase_center_coords = 'dircos'
if (phase_center.shape[1] < 2) or (phase_center.shape[1] > 3):
raise ValueError('Reference point has invalid dimensions')
if NP.any(NP.sqrt(NP.sum(phase_center**2, axis=1)) > 1.0):
raise ValueError('direction cosines found to be exceeding unit magnitude.')
if NP.any(NP.max(NP.abs(phase_center), axis=1) > 1.0):
raise ValueError('direction cosines found to be exceeding unit magnitude.')
if phase_center.shape[1] == 2:
n = 1.0 - NP.sqrt(NP.sum(phase_center**2, axis=1))
phase_center = NP.hstack((phase_center, n.reshape(-1,1)))
altaz = GEOM.dircos2altaz(phase_center, units='degrees')
hadec = GEOM.altaz2hadec(phase_center, self.latitude, units='degrees')
ha = hadec[:,0]
dec = hadec[:,1]
ha = NP.radians(ha).ravel()
dec = NP.radians(dec).ravel()
eq_baselines = GEOM.enu2xyz(self.baselines, self.latitude, units='degrees')
rot_matrix = NP.asarray([[NP.sin(ha), NP.cos(ha), NP.zeros(ha.size)],
[-NP.sin(dec)*NP.cos(ha), NP.sin(dec)*NP.sin(ha), NP.cos(dec)],
[NP.cos(dec)*NP.cos(ha), -NP.cos(dec)*NP.sin(ha), NP.sin(dec)]])
if rot_matrix.ndim == 2:
rot_matrix = rot_matrix[:,:,NP.newaxis] # To ensure correct dot product is obtained in the next step
self.projected_baselines = NP.dot(eq_baselines, rot_matrix) # (n_bl x [3]).(3 x [3] x n_acc) -> n_bl x (first 3) x n_acc
# proj_baselines = NP.empty((eq_baselines.shape[0], eq_baselines.shape[1], len(self.lst)))
# for i in xrange(len(self.lst)):
# rot_matrix = NP.asarray([[NP.sin(ha[i]), NP.cos(ha[i]), 0.0],
# [-NP.sin(dec[i])*NP.cos(ha[i]), NP.sin(dec[i])*NP.sin(ha[i]), NP.cos(dec[i])],
# [NP.cos(dec[i])*NP.cos(ha[i]), -NP.cos(dec[i])*NP.sin(ha[i]), NP.sin(dec[i])]])
# proj_baselines[:,:,i] = NP.dot(eq_baselines, rot_matrix.T)
# self.projected_baselines = proj_baselines
#############################################################################
def conjugate(self, ind=None, verbose=True):
"""
------------------------------------------------------------------------
Flips the baseline vectors and conjugates the visibilies for a specified
subset of baselines.
Inputs:
ind [scalar, list or numpy array] Indices pointing to specific
baseline vectors which need to be flipped. Default = None means
no flipping or conjugation. If all baselines are to be
flipped, either provide all the indices in ind or set ind="all"
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
------------------------------------------------------------------------
"""
if ind is not None:
if isinstance(ind, str):
if ind != 'all':
raise ValueError('Value of ind must be "all" if set to string')
ind = NP.arange(self.baselines.shape[0])
elif isinstance(ind, int):
ind = [ind]
elif isinstance(ind, NP.ndarray):
ind = ind.tolist()
elif not isinstance(ind, list):
raise TypeError('ind must be string "all", scalar interger, list or numpy array')
ind = NP.asarray(ind)
if NP.any(ind >= self.baselines.shape[0]):
raise IndexError('Out of range indices found.')
self.labels = [tuple(reversed(self.labels[i])) if i in ind else self.labels[i] for i in xrange(len(self.labels))]
self.baselines[ind,:] = -self.baselines[ind,:]
self.baseline_orientations = NP.angle(self.baselines[:,0] + 1j * self.baselines[:,1])
if self.vis_freq is not None:
self.vis_freq[ind,:,:] = self.vis_freq[ind,:,:].conj()
if self.skyvis_freq is not None:
self.skyvis_freq[ind,:,:] = self.skyvis_freq[ind,:,:].conj()
if self.vis_noise_freq is not None:
self.vis_noise_freq[ind,:,:] = self.vis_noise_freq[ind,:,:].conj()
if self.projected_baselines is not None:
self.projected_baselines[ind,:,:] = -self.projected_baselines[ind,:,:]
if verbose:
warnings.warn('Certain baselines have been flipped and their visibilities conjugated. Use delay_transform() to update the delay spectra.')
#############################################################################
def delay_transform(self, pad=1.0, freq_wts=None, verbose=True):
"""
------------------------------------------------------------------------
Transforms the visibilities from frequency axis onto delay (time) axis
using an IFFT. This is performed for noiseless sky visibilities, thermal
noise in visibilities, and observed visibilities.
Inputs:
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
freq_wts [numpy vector or array] window shaping to be applied before
computing delay transform. It can either be a vector or size
equal to the number of channels (which will be applied to all
time instances for all baselines), or a nchan x n_snapshots
numpy array which will be applied to all baselines, or a
n_baselines x nchan numpy array which will be applied to all
timestamps, or a n_baselines x nchan x n_snapshots numpy
array. Default (None) will not apply windowing and only the
inherent bandpass will be used.
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
------------------------------------------------------------------------
"""
if verbose:
print('Preparing to compute delay transform...\n\tChecking input parameters for compatibility...')
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
warnings.warn('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if freq_wts is not None:
if freq_wts.size == self.channels.size:
freq_wts = NP.repeat(NP.expand_dims(NP.repeat(freq_wts.reshape(1,-1), self.baselines.shape[0], axis=0), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.channels.size * self.n_acc:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(self.channels.size, -1), axis=0), self.baselines.shape[0], axis=0)
elif freq_wts.size == self.channels.size * self.baselines.shape[0]:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(-1, self.channels.size), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.channels.size * self.baselines.shape[0] * self.n_acc:
freq_wts = freq_wts.reshape(self.baselines.shape[0], self.channels.size, self.n_acc)
else:
raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
self.bp_wts = freq_wts
if verbose:
print('\tFrequency window weights assigned.')
if verbose:
print('\tInput parameters have been verified to be compatible.\n\tProceeding to compute delay transform.')
self.lags = DSP.spectral_axis(self.channels.size, delx=self.freq_resolution, use_real=False, shift=True)
if pad == 0.0:
self.vis_lag = DSP.FT1D(self.vis_freq * self.bp * self.bp_wts, ax=1, inverse=True, use_real=False, shift=True) * self.channels.size * self.freq_resolution
self.skyvis_lag = DSP.FT1D(self.skyvis_freq * self.bp * self.bp_wts, ax=1, inverse=True, use_real=False, shift=True) * self.channels.size * self.freq_resolution
self.vis_noise_lag = DSP.FT1D(self.vis_noise_freq * self.bp * self.bp_wts, ax=1, inverse=True, use_real=False, shift=True) * self.channels.size * self.freq_resolution
self.lag_kernel = DSP.FT1D(self.bp * self.bp_wts, ax=1, inverse=True, use_real=False, shift=True) * self.channels.size * self.freq_resolution
if verbose:
print('\tDelay transform computed without padding.')
else:
npad = int(self.channels.size * pad)
self.vis_lag = DSP.FT1D(NP.pad(self.vis_freq * self.bp * self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.channels.size) * self.freq_resolution
self.skyvis_lag = DSP.FT1D(NP.pad(self.skyvis_freq * self.bp * self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.channels.size) * self.freq_resolution
self.vis_noise_lag = DSP.FT1D(NP.pad(self.vis_noise_freq * self.bp * self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.channels.size) * self.freq_resolution
self.lag_kernel = DSP.FT1D(NP.pad(self.bp * self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.channels.size) * self.freq_resolution
if verbose:
print('\tDelay transform computed with padding fraction {0:.1f}'.format(pad))
self.vis_lag = DSP.downsampler(self.vis_lag, 1+pad, axis=1)
self.skyvis_lag = DSP.downsampler(self.skyvis_lag, 1+pad, axis=1)
self.vis_noise_lag = DSP.downsampler(self.vis_noise_lag, 1+pad, axis=1)
self.lag_kernel = DSP.downsampler(self.lag_kernel, 1+pad, axis=1)
if verbose:
print('\tDelay transform products downsampled by factor of {0:.1f}'.format(1+pad))
print('delay_transform() completed successfully.')
#############################################################################
def multi_window_delay_transform(self, bw_eff, freq_center=None, shape=None,
pad=1.0, verbose=True):
"""
------------------------------------------------------------------------
Computes delay transform on multiple frequency windows with specified
weights
Inputs:
bw_eff [scalar, list, numpy array] Effective bandwidths of the
selected frequency windows. If a scalar is provided, the
same will be applied to all frequency windows.
freq_center [scalar, list, numpy array] Frequency centers of the
selected frequency windows. If a scalar is provided, the
same will be applied to all frequency windows. Default=None
uses the center frequency from the class attribute named
channels
shape [string] specifies frequency window shape. Accepted values
are 'rect' or 'RECT' (for rectangular), 'bnw' and 'BNW'
(for Blackman-Nuttall), and 'bhw' or 'BHW' (for Blackman-
Harris). Default=None sets it to 'rect' (rectangular
window)
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
A dictionary containing information under the following keys:
skyvis_lag Numpy array of pure sky visibilities delay spectra of
size n_bl x n_windows x nchan x n_snaps
vis_noise_lag Numpy array of noise delay spectra of size
size n_bl x n_windows x nchan x n_snaps
lag_kernel Numpy array of delay kernel of size
size n_bl x n_windows x nchan x n_snaps
lag_corr_length Numpy array of correlation length (in units of number
of delay samples) due to convolving kernel in delay
space. This is the number by which the delay spectra
obtained have to be downsampled by to get independent
samples of delay spectra
------------------------------------------------------------------------
"""
try:
bw_eff
except NameError:
raise NameError('Effective bandwidth must be specified')
else:
if not isinstance(bw_eff, (int, float, list, NP.ndarray)):
raise TypeError('Effective bandwidth must be a scalar, list or numpy array')
bw_eff = NP.asarray(bw_eff).reshape(-1)
if NP.any(bw_eff <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = NP.asarray(self.channels[int(0.5*self.channels.size)]).reshape(-1)
elif isinstance(freq_center, (int, float, list, NP.ndarray)):
freq_center = NP.asarray(freq_center).reshape(-1)
if NP.any((freq_center <= self.channels.min()) | (freq_center >= self.channels.max())):
raise ValueError('Frequency centers must lie strictly inside the observing band')
else:
raise TypeError('Frequency center(s) must be scalar, list or numpy array')
if (bw_eff.size == 1) and (freq_center.size > 1):
bw_eff = NP.repeat(bw_eff, freq_center.size)
elif (bw_eff.size > 1) and (freq_center.size == 1):
freq_center = NP.repeat(freq_center, bw_eff.size)
elif bw_eff.size != freq_center.size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, str):
raise TypeError('Window shape must be a string')
if shape not in ['rect', 'bhw', 'bnw', 'RECT', 'BHW', 'BNW']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = 'rect'
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
warnings.warn('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
freq_wts = NP.empty((bw_eff.size, self.channels.size))
frac_width = DSP.window_N2width(n_window=None, shape=shape)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.freq_resolution).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.channels.reshape(-1,1), freq_center.reshape(-1,1), distance_ULIM=0.5*self.freq_resolution, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = DSP.windowing(n_window[i], shape=shape, centering=True)
window_chans = self.channels[ind_chan] + self.freq_resolution * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.channels.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.freq_resolution, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.channels.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
lags = DSP.spectral_axis(self.channels.size, delx=self.freq_resolution, use_real=False, shift=True)
if pad == 0.0:
skyvis_lag = DSP.FT1D(self.skyvis_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ax=2, inverse=True, use_real=False, shift=True) * self.channels.size * self.freq_resolution
vis_noise_lag = DSP.FT1D(self.vis_noise_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ax=2, inverse=True, use_real=False, shift=True) * self.channels.size * self.freq_resolution
lag_kernel = DSP.FT1D(self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ax=2, inverse=True, use_real=False, shift=True) * self.channels.size * self.freq_resolution
if verbose:
print('\tMulti-window delay transform computed without padding.')
else:
npad = int(self.channels.size * pad)
skyvis_lag = DSP.FT1D(NP.pad(self.skyvis_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.channels.size) * self.freq_resolution
vis_noise_lag = DSP.FT1D(NP.pad(self.vis_noise_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.channels.size) * self.freq_resolution
lag_kernel = DSP.FT1D(NP.pad(self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.channels.size) * self.freq_resolution
if verbose:
print('\tMulti-window delay transform computed with padding fraction {0:.1f}'.format(pad))
skyvis_lag = DSP.downsampler(skyvis_lag, 1+pad, axis=2)
vis_noise_lag = DSP.downsampler(vis_noise_lag, 1+pad, axis=2)
lag_kernel = DSP.downsampler(lag_kernel, 1+pad, axis=2)
if verbose:
print('\tMulti-window delay transform products downsampled by factor of {0:.1f}'.format(1+pad))
print('multi_window_delay_transform() completed successfully.')
return {'skyvis_lag': skyvis_lag, 'vis_noise_lag': vis_noise_lag, 'lag_kernel': lag_kernel, 'lag_corr_length': self.channels.size / NP.sum(freq_wts, axis=1)}
#############################################################################
def concatenate(self, others, axis):
"""
-------------------------------------------------------------------------
Concatenates different visibility data sets from instances of class
InterferometerArray along baseline, frequency or time axis.
Inputs:
others [instance of class Interferometer Array or list of such
instances] Instance or list of instances of class
InterferometerArray whose visibility data have to be
concatenated to the current instance.
axis [scalar] Axis along which visibility data sets are to be
concatenated. Accepted values are 0 (concatenate along
baseline axis), 1 (concatenate frequency channels), or 2
(concatenate along time/snapshot axis). No default
-------------------------------------------------------------------------
"""
try:
others, axis
except NameError:
raise NameError('An instance of class InterferometerArray or a list of such instances and the axis along which they are to be concatenated must be provided.')
if isinstance(others, list):
for other in others:
if not isinstance(other, InterferometerArray):
raise TypeError('The interferometer array data to be concatenated must be an instance of class InterferometerArray or a list of such instances')
loo = [self]+others
elif isinstance(others, InterferometerArray):
loo = [self, others]
elif not isinstance(other, InterferometerArray):
raise TypeError('The interferometer array data to be concatenated must be an instance of class InterferometerArray or a list of such instances')
if not isinstance(axis, int):
raise TypeError('axis must be an integer')
self_shape = self.skyvis_freq.shape
if axis >= len(self_shape):
raise ValueError('Specified axis not found in the visibility data.')
elif axis == -1:
axis = len(self_shape) - 1
elif axis < -1:
raise ValueError('Specified axis not found in the visibility data.')
self.skyvis_freq = NP.concatenate(tuple([elem.skyvis_freq for elem in loo]), axis=axis)
if self.vis_freq is not None:
self.vis_freq = NP.concatenate(tuple([elem.vis_freq for elem in loo]), axis=axis)
if self.vis_noise_freq is not None:
self.vis_noise_freq = NP.concatenate(tuple([elem.vis_noise_freq for elem in loo]), axis=axis)
if self.vis_rms_freq is not None:
self.vis_rms_freq = NP.concatenate(tuple([elem.vis_rms_freq for elem in loo]), axis=axis)
self.bp = NP.concatenate(tuple([elem.bp for elem in loo]), axis=axis)
self.bp_wts = NP.concatenate(tuple([elem.bp_wts for elem in loo]), axis=axis)
self.Tsys = NP.concatenate(tuple([elem.Tsys for elem in loo]), axis=axis)
if self.gradient_mode is not None:
self.gradient[self.gradient_mode] = NP.concatenate(tuple([elem.gradient[self.gradient_mode] for elem in loo]), axis=axis+1)
if not self.Tsysinfo:
for elem in loo:
if elem.Tsysinfo:
self.Tsysinfo = elem.Tsysinfo
if axis != 1:
if self.skyvis_lag is not None:
self.skyvis_lag = NP.concatenate(tuple([elem.skyvis_lag for elem in loo]), axis=axis)
if self.vis_lag is not None:
self.vis_lag = NP.concatenate(tuple([elem.vis_lag for elem in loo]), axis=axis)
if self.vis_noise_lag is not None:
self.vis_noise_lag = NP.concatenate(tuple([elem.vis_noise_lag for elem in loo]), axis=axis)
if axis == 0: # baseline axis
for elem in loo:
if elem.baseline_coords != self.baseline_coords:
raise ValueError('Coordinate systems for the baseline vectors are mismatched.')
self.baselines = NP.vstack(tuple([elem.baselines for elem in loo]))
self.baseline_lengths = NP.sqrt(NP.sum(self.baselines**2, axis=1))
self.baseline_orientations = NP.angle(self.baselines[:,0] + 1j * self.baselines[:,1])
self.projected_baselines = NP.vstack(tuple([elem.projected_baselines for elem in loo]))
self.labels = [label for elem in loo for label in elem.labels]
self.A_eff = NP.vstack(tuple([elem.A_eff for elem in loo]))
self.eff_Q = NP.vstack(tuple([elem.eff_Q for elem in loo]))
elif axis == 1: # Frequency axis
self.channels = NP.hstack(tuple([elem.channels for elem in loo]))
self.A_eff = NP.hstack(tuple([elem.A_eff for elem in loo]))
self.eff_Q = NP.hstack(tuple([elem.eff_Q for elem in loo]))
# self.delay_transform()
elif axis == 2: # time axis
# self.timestamp = [timestamp for elem in loo for timestamp in elem.timestamp]
self.t_acc = [t_acc for elem in loo for t_acc in elem.t_acc]
self.n_acc = len(self.t_acc)
self.t_obs = sum(self.t_acc)
self.pointing_center = NP.vstack(tuple([elem.pointing_center for elem in loo]))
self.phase_center = NP.vstack(tuple([elem.phase_center for elem in loo]))
self.lst = [lst for elem in loo for lst in elem.lst]
self.timestamp = [timestamp for elem in loo for timestamp in elem.timestamp]
self.Tsysinfo = [Tsysinfo for elem in loo for Tsysinfo in elem.Tsysinfo]
#############################################################################
def save(self, outfile, fmt='HDF5', tabtype='BinTableHDU', npz=True,
overwrite=False, uvfits_parms=None, verbose=True):
"""
-------------------------------------------------------------------------
Saves the interferometer array information to disk in HDF5, FITS, NPZ
and UVFITS formats
Inputs:
outfile [string] Filename with full path to be saved to. Will be
appended with '.hdf5' or '.fits' extension depending on
input keyword fmt. If input npz is set to True, the
simulated visibilities will also get stored in '.npz'
format. Depending on parameters in uvfits_parms, three
UVFITS files will also be created whose names will be
outfile+'-noiseless', outfile+'-noisy' and
'outfile+'-noise' appended with '.uvfits'
Keyword Input(s):
fmt [string] string specifying the format of the output.
Accepted values are 'HDF5' (default) and 'FITS'.
The file names will be appended with '.hdf5' or '.fits'
respectively
tabtype [string] indicates table type for one of the extensions in
the FITS file. Allowed values are 'BinTableHDU' and
'TableHDU' for binary and ascii tables respectively. Default
is 'BinTableHDU'. Only applies if input fmt is set to 'FITS'
npz [boolean] True (default) indicates a numpy NPZ format file
is created in addition to the FITS file to store essential
attributes of the class InterferometerArray for easy
handing over of python files
overwrite [boolean] True indicates overwrite even if a file already
exists. Default = False (does not overwrite). Beware this
may not work reliably for UVFITS output when uvfits_method
is set to None or 'uvdata' and hence always better to make
sure the output file does not exist already
uvfits_parms [dictionary] specifies basic parameters required for
saving in UVFITS format. If set to None (default), the
data will not be saved in UVFITS format. To save in UVFITS
format, the following keys and values are required:
'ref_point' [dictionary] Contains information about the
reference position to which projected
baselines and rotated visibilities are to
be computed. Default=None (no additional
phasing will be performed). It must be
contain the following keys with the
following values:
'coords' [string] Refers to the
coordinate system in which value
in key 'location' is specified
in. Accepted values are 'radec',
'hadec', 'altaz' and 'dircos'
'location' [numpy array] Must be a Mx2 (if
value in key 'coords' is set to
'radec', 'hadec', 'altaz' or
'dircos') or Mx3 (if value in
key 'coords' is set to
'dircos'). M can be 1 or equal
to number of timestamps. If M=1,
the same reference point in the
same coordinate system will be
repeated for all tiemstamps. If
value under key 'coords' is set
to 'radec', 'hadec' or 'altaz',
the value under this key
'location' must be in units of
degrees.
'method' [string] specifies method to be used in
saving in UVFITS format. Accepted values are
'uvdata', 'uvfits' or None (default). If set
to 'uvdata', the UVFITS writer in uvdata
module is used. If set to 'uvfits', the
in-house UVFITS writer is used. If set to
None, first uvdata module will be attempted
but if it fails then the in-house UVFITS
writer will be tried.
verbose [boolean] If True (default), prints diagnostic and progress
messages. If False, suppress printing such messages.
-------------------------------------------------------------------------
"""
try:
outfile
except NameError:
raise NameError('No filename provided. Aborting InterferometerArray.save()...')
if fmt.lower() not in ['hdf5', 'fits']:
raise ValueError('Invalid output file format specified')
if fmt.lower() == 'hdf5':
filename = outfile + '.' + fmt.lower()
if fmt.lower() == 'fits':
filename = outfile + '.' + fmt.lower()
if verbose:
print('\nSaving information about interferometer...')
if fmt.lower() == 'fits':
use_ascii = False
if tabtype == 'TableHDU':
use_ascii = True
hdulist = []
hdulist += [fits.PrimaryHDU()]
hdulist[0].header['latitude'] = (self.latitude, 'Latitude of interferometer')
hdulist[0].header['longitude'] = (self.longitude, 'Longitude of interferometer')
hdulist[0].header['altitude'] = (self.altitude, 'Altitude of interferometer')
hdulist[0].header['baseline_coords'] = (self.baseline_coords, 'Baseline coordinate system')
hdulist[0].header['freq_resolution'] = (self.freq_resolution, 'Frequency Resolution (Hz)')
hdulist[0].header['pointing_coords'] = (self.pointing_coords, 'Pointing coordinate system')
hdulist[0].header['phase_center_coords'] = (self.phase_center_coords, 'Phase center coordinate system')
hdulist[0].header['skycoords'] = (self.skycoords, 'Sky coordinate system')
if 'id' in self.telescope:
hdulist[0].header['telescope'] = (self.telescope['id'], 'Telescope Name')
if self.telescope['groundplane'] is not None:
hdulist[0].header['groundplane'] = (self.telescope['groundplane'], 'Ground plane height')
if self.simparms_file is not None:
hdulist[0].header['simparms'] = (self.simparms_file, 'YAML file with simulation parameters')
if self.gradient_mode is not None:
hdulist[0].header['gradient_mode'] = (self.gradient_mode, 'Visibility Gradient Mode')
if self.gaininfo is not None:
hdulist[0].header['gainsfile'] = (outfile+'.gains.hdf5', 'Gains File')
hdulist[0].header['element_shape'] = (self.telescope['shape'], 'Antenna element shape')
hdulist[0].header['element_size'] = (self.telescope['size'], 'Antenna element size')
hdulist[0].header['element_ocoords'] = (self.telescope['ocoords'], 'Antenna element orientation coordinates')
hdulist[0].header['t_obs'] = (self.t_obs, 'Observing duration (s)')
hdulist[0].header['n_acc'] = (self.n_acc, 'Number of accumulations')
hdulist[0].header['flux_unit'] = (self.flux_unit, 'Unit of flux density')
hdulist[0].header['EXTNAME'] = 'PRIMARY'
if verbose:
print('\tCreated a primary HDU.')
hdulist += [fits.ImageHDU(self.telescope['orientation'], name='Antenna element orientation')]
if verbose:
print('\tCreated an extension for antenna element orientation.')
cols = []
if self.lst:
cols += [fits.Column(name='LST', format='D', array=NP.asarray(self.lst).ravel())]
cols += [fits.Column(name='pointing_longitude', format='D', array=self.pointing_center[:,0])]
cols += [fits.Column(name='pointing_latitude', format='D', array=self.pointing_center[:,1])]
cols += [fits.Column(name='phase_center_longitude', format='D', array=self.phase_center[:,0])]
cols += [fits.Column(name='phase_center_latitude', format='D', array=self.phase_center[:,1])]
columns = _astropy_columns(cols, tabtype=tabtype)
tbhdu = fits.new_table(columns)
tbhdu.header.set('EXTNAME', 'POINTING AND PHASE CENTER INFO')
hdulist += [tbhdu]
if verbose:
print('\tCreated pointing and phase center information table.')
# label_lengths = [len(label[0]) for label in self.labels]
# maxlen = max(label_lengths)
maxlen = int(self.layout['labels'].dtype.str.split('|')[1][1:])
labels = NP.asarray(self.labels, dtype=[('A2', '|S{0:0d}'.format(maxlen)), ('A1', '|S{0:0d}'.format(maxlen))])
cols = []
cols += [fits.Column(name='A1', format='{0:0d}A'.format(maxlen), array=labels['A1'])]
cols += [fits.Column(name='A2', format='{0:0d}A'.format(maxlen), array=labels['A2'])]
# cols += [fits.Column(name='labels', format='5A', array=NP.asarray(self.labels))]
columns = _astropy_columns(cols, tabtype=tabtype)
tbhdu = fits.new_table(columns)
tbhdu.header.set('EXTNAME', 'LABELS')
hdulist += [tbhdu]
if verbose:
print('\tCreated extension table containing baseline labels.')
hdulist += [fits.ImageHDU(self.baselines, name='baselines')]
if verbose:
print('\tCreated an extension for baseline vectors.')
if self.projected_baselines is not None:
hdulist += [fits.ImageHDU(self.projected_baselines, name='proj_baselines')]
if verbose:
print('\tCreated an extension for projected baseline vectors.')
if self.layout:
label_lengths = [len(label) for label in self.layout['labels']]
maxlen = max(label_lengths)
cols = []
cols += [fits.Column(name='labels', format='{0:0d}A'.format(maxlen), array=self.layout['labels'])]
cols += [fits.Column(name='ids', format='J', array=self.layout['ids'])]
cols += [fits.Column(name='positions', format='3D', array=self.layout['positions'])]
columns = _astropy_columns(cols, tabtype=tabtype)
tbhdu = fits.new_table(columns)
tbhdu.header.set('EXTNAME', 'LAYOUT')
tbhdu.header.set('COORDS', self.layout['coords'])
hdulist += [tbhdu]
hdulist += [fits.ImageHDU(self.A_eff, name='Effective area')]
if verbose:
print('\tCreated an extension for effective area.')
hdulist += [fits.ImageHDU(self.eff_Q, name='Interferometer efficiency')]
if verbose:
print('\tCreated an extension for interferometer efficiency.')
cols = []
cols += [fits.Column(name='frequency', format='D', array=self.channels)]
if self.lags is not None:
cols += [fits.Column(name='lag', format='D', array=self.lags)]
columns = _astropy_columns(cols, tabtype=tabtype)
tbhdu = fits.new_table(columns)
tbhdu.header.set('EXTNAME', 'SPECTRAL INFO')
hdulist += [tbhdu]
if verbose:
print('\tCreated spectral information table.')
if self.t_acc:
hdulist += [fits.ImageHDU(self.t_acc, name='t_acc')]
if verbose:
print('\tCreated an extension for accumulation times.')
cols = []
if isinstance(self.timestamp[0], str):
cols += [fits.Column(name='timestamps', format='24A', array=NP.asarray(self.timestamp))]
elif isinstance(self.timestamp[0], float):
cols += [fits.Column(name='timestamps', format='D', array=NP.asarray(self.timestamp))]
else:
raise TypeError('Invalid data type for timestamps')
columns = _astropy_columns(cols, tabtype=tabtype)
tbhdu = fits.new_table(columns)
tbhdu.header.set('EXTNAME', 'TIMESTAMPS')
hdulist += [tbhdu]
if verbose:
print('\tCreated extension table containing timestamps.')
if self.Tsysinfo:
cols = []
cols += [fits.Column(name='Trx', format='D', array=NP.asarray([elem['Trx'] for elem in self.Tsysinfo], dtype=NP.float))]
cols += [fits.Column(name='Tant0', format='D', array=NP.asarray([elem['Tant']['T0'] for elem in self.Tsysinfo], dtype=NP.float))]
cols += [fits.Column(name='f0', format='D', array=NP.asarray([elem['Tant']['f0'] for elem in self.Tsysinfo], dtype=NP.float))]
cols += [fits.Column(name='spindex', format='D', array=NP.asarray([elem['Tant']['spindex'] for elem in self.Tsysinfo], dtype=NP.float))]
columns = _astropy_columns(cols, tabtype=tabtype)
tbhdu = fits.new_table(columns)
tbhdu.header.set('EXTNAME', 'TSYSINFO')
hdulist += [tbhdu]
hdulist += [fits.ImageHDU(self.Tsys, name='Tsys')]
if verbose:
print('\tCreated an extension for Tsys.')
if self.vis_rms_freq is not None:
hdulist += [fits.ImageHDU(self.vis_rms_freq, name='freq_channel_noise_rms_visibility')]
if verbose:
print('\tCreated an extension for simulated visibility noise rms per channel.')
if self.vis_freq is not None:
hdulist += [fits.ImageHDU(self.vis_freq.real, name='real_freq_obs_visibility')]
hdulist += [fits.ImageHDU(self.vis_freq.imag, name='imag_freq_obs_visibility')]
if verbose:
print('\tCreated extensions for real and imaginary parts of observed visibility frequency spectrum of size {0[0]} x {0[1]} x {0[2]}'.format(self.vis_freq.shape))
if self.skyvis_freq is not None:
hdulist += [fits.ImageHDU(self.skyvis_freq.real, name='real_freq_sky_visibility')]
hdulist += [fits.ImageHDU(self.skyvis_freq.imag, name='imag_freq_sky_visibility')]
if verbose:
print('\tCreated extensions for real and imaginary parts of noiseless sky visibility frequency spectrum of size {0[0]} x {0[1]} x {0[2]}'.format(self.skyvis_freq.shape))
if self.vis_noise_freq is not None:
hdulist += [fits.ImageHDU(self.vis_noise_freq.real, name='real_freq_noise_visibility')]
hdulist += [fits.ImageHDU(self.vis_noise_freq.imag, name='imag_freq_noise_visibility')]
if verbose:
print('\tCreated extensions for real and imaginary parts of visibility noise frequency spectrum of size {0[0]} x {0[1]} x {0[2]}'.format(self.vis_noise_freq.shape))
if self.gradient_mode is not None:
for gradkey in self.gradient:
hdulist += [fits.ImageHDU(self.gradient[gradkey].real, name='real_freq_sky_visibility_gradient_wrt_{0}'.format(gradkey))]
hdulist += [fits.ImageHDU(self.gradient[gradkey].imag, name='imag_freq_sky_visibility_gradient_wrt_{0}'.format(gradkey))]
if verbose:
print('\tCreated extensions for real and imaginary parts of gradient of sky visibility frequency spectrum wrt {0} of size {1[0]} x {1[1]} x {1[2]} x {1[3]}'.format(gradkey, self.gradient[gradkey].shape))
hdulist += [fits.ImageHDU(self.bp, name='bandpass')]
if verbose:
print('\tCreated an extension for bandpass functions of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, frequency, and snapshot instance'.format(self.bp.shape))
hdulist += [fits.ImageHDU(self.bp_wts, name='bandpass_weights')]
if verbose:
print('\tCreated an extension for bandpass weights of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, frequency, and snapshot instance'.format(self.bp_wts.shape))
# hdulist += [fits.ImageHDU(self.lag_kernel.real, name='lag_kernel_real')]
# hdulist += [fits.ImageHDU(self.lag_kernel.imag, name='lag_kernel_imag')]
# if verbose:
# print('\tCreated an extension for impulse response of frequency bandpass shape of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, lags, and snapshot instance'.format(self.lag_kernel.shape))
if self.vis_lag is not None:
hdulist += [fits.ImageHDU(self.vis_lag.real, name='real_lag_visibility')]
hdulist += [fits.ImageHDU(self.vis_lag.imag, name='imag_lag_visibility')]
if verbose:
print('\tCreated extensions for real and imaginary parts of observed visibility delay spectrum of size {0[0]} x {0[1]} x {0[2]}'.format(self.vis_lag.shape))
if self.skyvis_lag is not None:
hdulist += [fits.ImageHDU(self.skyvis_lag.real, name='real_lag_sky_visibility')]
hdulist += [fits.ImageHDU(self.skyvis_lag.imag, name='imag_lag_sky_visibility')]
if verbose:
print('\tCreated extensions for real and imaginary parts of noiseless sky visibility delay spectrum of size {0[0]} x {0[1]} x {0[2]}'.format(self.skyvis_lag.shape))
if self.vis_noise_lag is not None:
hdulist += [fits.ImageHDU(self.vis_noise_lag.real, name='real_lag_noise_visibility')]
hdulist += [fits.ImageHDU(self.vis_noise_lag.imag, name='imag_lag_noise_visibility')]
if verbose:
print('\tCreated extensions for real and imaginary parts of visibility noise delay spectrum of size {0[0]} x {0[1]} x {0[2]}'.format(self.vis_noise_lag.shape))
if verbose:
print('\tNow writing FITS file to disk...')
hdu = fits.HDUList(hdulist)
hdu.writeto(filename, overwrite=overwrite)
if self.gaininfo is not None:
self.gaininfo.write_gaintable(outfile+'.gains.hdf5')
elif fmt.lower() == 'hdf5':
if overwrite:
write_str = 'w'
else:
write_str = 'w-'
with h5py.File(filename, write_str) as fileobj:
hdr_group = fileobj.create_group('header')
hdr_group['AstroUtils#'] = astroutils.__githash__
hdr_group['PRISim#'] = prisim.__githash__
hdr_group['flux_unit'] = self.flux_unit
tlscp_group = fileobj.create_group('telescope_parms')
tlscp_group['latitude'] = self.latitude
tlscp_group['longitude'] = self.longitude
tlscp_group['altitude'] = self.altitude
tlscp_group['latitude'].attrs['units'] = 'deg'
tlscp_group['longitude'].attrs['units'] = 'deg'
tlscp_group['altitude'].attrs['units'] = 'm'
if 'id' in self.telescope:
tlscp_group['id'] = self.telescope['id']
spec_group = fileobj.create_group('spectral_info')
spec_group['freq_resolution'] = self.freq_resolution
spec_group['freq_resolution'].attrs['units'] = 'Hz'
spec_group['freqs'] = self.channels
spec_group['freqs'].attrs['units'] = 'Hz'
if self.lags is not None:
spec_group['lags'] = self.lags
spec_group['lags'].attrs['units'] = 's'
spec_group['bp'] = self.bp
spec_group['bp_wts'] = self.bp_wts
if self.simparms_file is not None:
sim_group = fileobj.create_group('simparms')
sim_group['simfile'] = self.simparms_file
antelem_group = fileobj.create_group('antenna_element')
antelem_group['shape'] = self.telescope['shape']
antelem_group['size'] = self.telescope['size']
antelem_group['size'].attrs['units'] = 'm'
antelem_group['ocoords'] = self.telescope['ocoords']
antelem_group['orientation'] = self.telescope['orientation']
if self.telescope['ocoords'] != 'dircos':
antelem_group['orientation'].attrs['units'] = 'deg'
if 'groundplane' in self.telescope:
if self.telescope['groundplane'] is not None:
antelem_group['groundplane'] = self.telescope['groundplane']
if self.layout:
layout_group = fileobj.create_group('layout')
layout_group['positions'] = self.layout['positions']
layout_group['positions'].attrs['units'] = 'm'
layout_group['positions'].attrs['coords'] = self.layout['coords']
layout_group['labels'] = self.layout['labels']
layout_group['ids'] = self.layout['ids']
timing_group = fileobj.create_group('timing')
timing_group['t_obs'] = self.t_obs
timing_group['n_acc'] = self.n_acc
if self.t_acc:
timing_group['t_acc'] = self.t_acc
timing_group['timestamps'] = NP.asarray(self.timestamp)
sky_group = fileobj.create_group('skyparms')
sky_group['pointing_coords'] = self.pointing_coords
sky_group['phase_center_coords'] = self.phase_center_coords
sky_group['skycoords'] = self.skycoords
sky_group['LST'] = NP.asarray(self.lst).ravel()
sky_group['LST'].attrs['units'] = 'deg'
sky_group['pointing_center'] = self.pointing_center
sky_group['phase_center'] = self.phase_center
array_group = fileobj.create_group('array')
# label_lengths = [len(label[0]) for label in self.labels]
# maxlen = max(label_lengths)
# labels = NP.asarray(self.labels, dtype=[('A2', '|S{0:0d}'.format(maxlen)), ('A1', '|S{0:0d}'.format(maxlen))])
# if isinstance(self.labels, list):
# str_dtype = str(NP.asarray(self.labels).dtype)
# elif isinstance(self.labels, NP.ndarray):
# str_dtype = str(NP.asarray(self.labels.tolist()).dtype)
str_dtype = self.layout['labels'].dtype.str
labels = NP.asarray(self.labels, dtype=[('A2', str_dtype), ('A1', str_dtype)])
array_group['labels'] = labels
array_group['baselines'] = self.baselines
array_group['baseline_coords'] = self.baseline_coords
array_group['baselines'].attrs['coords'] = 'local-ENU'
array_group['baselines'].attrs['units'] = 'm'
array_group['projected_baselines'] = self.projected_baselines
array_group['baselines'].attrs['coords'] = 'eq-XYZ'
array_group['baselines'].attrs['units'] = 'm'
instr_group = fileobj.create_group('instrument')
instr_group['effective_area'] = self.A_eff
instr_group['effective_area'].attrs['units'] = 'm^2'
instr_group['efficiency'] = self.eff_Q
if self.Tsysinfo:
instr_group['Trx'] = NP.asarray([elem['Trx'] for elem in self.Tsysinfo], dtype=NP.float)
instr_group['Tant0'] = NP.asarray([elem['Tant']['T0'] for elem in self.Tsysinfo], dtype=NP.float)
instr_group['f0'] = NP.asarray([elem['Tant']['f0'] for elem in self.Tsysinfo], dtype=NP.float)
instr_group['spindex'] = NP.asarray([elem['Tant']['spindex'] for elem in self.Tsysinfo], dtype=NP.float)
instr_group['Trx'].attrs['units'] = 'K'
instr_group['Tant0'].attrs['units'] = 'K'
instr_group['f0'].attrs['units'] = 'Hz'
instr_group['Tnet'] = NP.asarray([elem['Tnet'] if elem['Tnet'] is not None else -999 for elem in self.Tsysinfo], dtype=NP.float)
instr_group['Tnet'].attrs['units'] = 'K'
instr_group['Tsys'] = self.Tsys
instr_group['Tsys'].attrs['units'] = 'K'
vis_group = fileobj.create_group('visibilities')
visfreq_group = vis_group.create_group('freq_spectrum')
if self.vis_rms_freq is not None:
visfreq_group['rms'] = self.vis_rms_freq
visfreq_group['rms'].attrs['units'] = 'Jy'
if self.vis_freq is not None:
visfreq_group['vis'] = self.vis_freq
visfreq_group['vis'].attrs['units'] = 'Jy'
if self.skyvis_freq is not None:
visfreq_group['skyvis'] = self.skyvis_freq
visfreq_group['skyvis'].attrs['units'] = 'Jy'
if self.vis_noise_freq is not None:
visfreq_group['noise'] = self.vis_noise_freq
visfreq_group['noise'].attrs['units'] = 'Jy'
vislags_group = vis_group.create_group('delay_spectrum')
if self.vis_lag is not None:
vislags_group['vis'] = self.vis_lag
vislags_group['vis'].attrs['units'] = 'Jy Hz'
if self.skyvis_lag is not None:
vislags_group['skyvis'] = self.skyvis_lag
vislags_group['skyvis'].attrs['units'] = 'Jy Hz'
if self.vis_noise_lag is not None:
vislags_group['noise'] = self.vis_noise_lag
vislags_group['noise'].attrs['units'] = 'Jy Hz'
if self.gradient_mode is not None:
visgradient_group = fileobj.create_group('gradients')
for gradkey in self.gradient:
visgradient_group[gradkey] = self.gradient[gradkey]
if self.gaininfo is not None:
gains_group = fileobj.create_group('gaininfo')
gains_group['gainsfile'] = outfile+'.gains.hdf5'
self.gaininfo.write_gaintable(gains_group['gainsfile'].value)
if self.blgroups is not None:
blinfo = fileobj.create_group('blgroupinfo')
blgrp = blinfo.create_group('groups')
for blkey in self.blgroups:
blgrp[str(blkey)] = self.blgroups[blkey]
revmap = blinfo.create_group('reversemap')
for blkey in self.bl_reversemap:
revmap[str(blkey)] = self.bl_reversemap[blkey]
if verbose:
print('\tInterferometer array information written successfully to file on disk:\n\t\t{0}\n'.format(filename))
if npz:
if (self.vis_freq is not None) and (self.vis_noise_freq is not None):
NP.savez_compressed(outfile+'.npz', skyvis_freq=self.skyvis_freq, vis_freq=self.vis_freq, vis_noise_freq=self.vis_noise_freq, lst=self.lst, freq=self.channels, timestamp=self.timestamp, bl=self.baselines, bl_length=self.baseline_lengths)
else:
NP.savez_compressed(outfile+'.npz', skyvis_freq=self.skyvis_freq, lst=self.lst, freq=self.channels, timestamp=self.timestamp, bl=self.baselines, bl_length=self.baseline_lengths)
if verbose:
print('\tInterferometer array information written successfully to NPZ file on disk:\n\t\t{0}\n'.format(outfile+'.npz'))
if uvfits_parms is not None:
self.write_uvfits(outfile, uvfits_parms=uvfits_parms, overwrite=overwrite, verbose=verbose)
#############################################################################
def pyuvdata_write(self, outfile, formats=None, uvfits_parms=None,
datapool=None, overwrite=False, verbose=True):
"""
-------------------------------------------------------------------------
Saves the interferometer array information to disk in various formats
through pyuvdata module
Inputs:
outfile [string] Filename with full path to be saved to. Three
UVFITS files will also be created whose names will be
outfile+'-noiseless', outfile+'-noisy' and
'outfile+'-noise' appended with '.uvfits'
Keyword Input(s):
formats [list] List of formats for the data to be written in.
Accepted values include 'uvfits', and 'uvh5'. If 'uvfits'
is included in this list, then uvfits_parms must be
provided.
uvfits_parms
[dictionary] specifies basic parameters required for
saving in UVFITS format. This will be used only if the
keyword input formats includes 'uvfits'. If set to None
(default), the data will not be saved in UVFITS format.
To save in UVFITS format, the following keys and
values are required:
'ref_point' [dictionary] Contains information about the
reference position to which projected
baselines and rotated visibilities are to
be computed. Default=None (no additional
phasing will be performed). It must be
contain the following keys with the
following values:
'coords' [string] Refers to the
coordinate system in which value
in key 'location' is specified
in. Accepted values are 'radec',
'hadec', 'altaz' and 'dircos'
'location' [numpy array] Must be a Mx2 (if
value in key 'coords' is set to
'radec', 'hadec', 'altaz' or
'dircos') or Mx3 (if value in
key 'coords' is set to
'dircos'). M can be 1 or equal
to number of timestamps. If M=1,
the same reference point in the
same coordinate system will be
repeated for all tiemstamps. If
value under key 'coords' is set
to 'radec', 'hadec' or 'altaz',
the value under this key
'location' must be in units of
degrees.
'method' [string] specifies method to be used in
saving in UVFITS format. Accepted values are
'uvdata', 'uvfits' or None (default). If set
to 'uvdata', the UVFITS writer in uvdata
module is used. If set to 'uvfits', the
in-house UVFITS writer is used. If set to
None, first uvdata module will be attempted
but if it fails then the in-house UVFITS
writer will be tried.
'datapool' [NoneType or list] Indicates which portion of the data
is to be written to the external file. If set to None
(default), all of skyvis_freq, vis_freq, and
vis_noise_freq attributes will be written. Otherwise,
accepted values are a list of strings that can include
'noiseless' (skyvis_freq attribute), 'noisy' (vis_freq
attribute), and 'noise' (vis_nosie_freq attribute).
overwrite [boolean] True indicates overwrite even if a file already
exists. Default = False (does not overwrite). Beware this
may not work reliably if uvfits_method is set to None or
'uvdata' and hence always better to make sure the output
file does not exist already
verbose [boolean] If True (default), prints diagnostic and progress
messages. If False, suppress printing such messages.
-------------------------------------------------------------------------
"""
if datapool is None:
datapool = ['noiseless', 'noisy', 'noise']
if not isinstance(datapool, list):
raise TypeError('Keyword input datapool must be a list')
else:
datapool_list = [dpool.lower() for dpool in datapool if (isinstance(dpool, str) and dpool.lower() in ['noiseless', 'noise', 'noisy'])]
if len(datapool_list) == 0:
raise ValueError('No valid datapool string found in input datapool')
datapool = datapool_list
for format in formats:
if format.lower() == 'uvh5':
dataobj = InterferometerData(self, ref_point=None, datakeys=datapool)
uvfits_method = None
if format.lower() == 'uvfits':
if uvfits_parms is not None:
if not isinstance(uvfits_parms, dict):
raise TypeError('Input uvfits_parms must be a dictionary')
if 'ref_point' not in uvfits_parms:
uvfits_parms['ref_point'] = None
if 'method' not in uvfits_parms:
uvfits_parms['method'] = None
else:
uvfits_parms = {'ref_point': None, 'method': None}
uvfits_method = uvfits_parms['method']
dataobj = InterferometerData(self, ref_point=uvfits_parms['ref_point'], datakeys=datapool)
filextn = format.lower()
for datakey in dataobj.infodict['data_array']:
if dataobj.infodict['data_array'][datakey] is not None:
dataobj.write(outfile+'-{0}.{1}'.format(datakey, filextn), datatype=datakey, fmt=format.upper(), uvfits_method=uvfits_method, overwrite=overwrite)
#################################################################################
class ApertureSynthesis(object):
"""
----------------------------------------------------------------------------
Class to manage aperture synthesis of visibility measurements of a
multi-element interferometer array.
Attributes:
ia [instance of class InterferometerArray] Instance of class
InterferometerArray created at the time of instantiating an
object of class ApertureSynthesis
baselines: [M x 3 Numpy array] The baseline vectors associated with the
M interferometers in SI units. The coordinate system of these
vectors is local East, North, Up system
blxyz [M x 3 Numpy array] The baseline vectors associated with the
M interferometers in SI units. The coordinate system of these
vectors is X, Y, Z in equatorial coordinates
uvw_lambda [M x 3 x Nt numpy array] Baseline vectors phased to the phase
center of each accummulation. M is the number of baselines, Nt
is the number of accumulations and 3 denotes U, V and W
components. This is in units of physical distance (usually in m)
uvw [M x 3 x Nch x Nt numpy array] Baseline vectors phased to the
phase center of each accummulation at each frequency. M is the
number of baselines, Nt is the number of accumulations, Nch is
the number of frequency channels, and 3 denotes U, V and W
components. This is uvw_lambda / wavelength and in units of
number of wavelengths
blc [numpy array] 3-element numpy array specifying bottom left
corner of the grid coincident with bottom left interferometer
location in UVW coordinate system (same units as uvw)
trc [numpy array] 3-element numpy array specifying top right
corner of the grid coincident with top right interferometer
location in UVW coordinate system (same units as uvw)
grid_blc [numpy array] 3-element numpy array specifying bottom left
corner of the grid in UVW coordinate system including any
padding used (same units as uvw)
grid_trc [numpy array] 2-element numpy array specifying top right
corner of the grid in UVW coordinate system including any
padding used (same units as uvw)
gridu [numpy array] 3-dimensional numpy meshgrid array specifying
grid u-locations in units of uvw in the UVW coordinate system
whose corners are specified by attributes grid_blc and grid_trc
gridv [numpy array] 3-dimensional numpy meshgrid array specifying
grid v-locations in units of uvw in the UVW coordinate system
whose corners are specified by attributes grid_blc and grid_trc
gridw [numpy array] 3-dimensional numpy meshgrid array specifying
grid w-locations in units of uvw in the UVW coordinate system
whose corners are specified by attributes grid_blc and grid_trc
grid_ready [boolean] set to True if the gridding has been performed,
False if grid is not available yet. Set to False in case
blc, trc, grid_blc or grid_trc is updated indicating gridding
is to be perfomed again
f [numpy vector] frequency channels in Hz
df [scalar] Frequency resolution (in Hz)
latitude [Scalar] Latitude of the interferometer's location. Default
is 34.0790 degrees North corresponding to that of the VLA.
lst [list] List of LST (in degrees) for each timestamp
n_acc [scalar] Number of accumulations
pointing_center
[2-column numpy array] Pointing center (latitude and
longitude) of the observation at a given timestamp. This is
where the telescopes will be phased up to as reference.
Coordinate system for the pointing_center is specified by another
attribute pointing_coords.
phase_center
[2-column numpy array] Phase center (latitude and
longitude) of the observation at a given timestamp. This is
where the telescopes will be phased up to as reference.
Coordinate system for the phase_center is specified by another
attribute phase_center_coords.
pointing_coords
[string] Coordinate system for telescope pointing. Accepted
values are 'radec' (RA-Dec), 'hadec' (HA-Dec) or 'altaz'
(Altitude-Azimuth). Default = 'hadec'.
phase_center_coords
[string] Coordinate system for array phase center. Accepted
values are 'radec' (RA-Dec), 'hadec' (HA-Dec) or 'altaz'
(Altitude-Azimuth). Default = 'hadec'.
timestamp [list] List of timestamps during the observation
Member functions:
__init__() Initialize an instance of class ApertureSynthesis which
manages information on a aperture synthesis with an
interferometer array.
genUVW() Generate U, V, W (in units of number of wavelengths) by
phasing the baseline vectors to the phase centers of each
pointing at all frequencies
reorderUVW() Reorder U, V, W (in units of number of wavelengths) of shape
nbl x 3 x nchan x n_acc to 3 x (nbl x nchan x n_acc)
setUVWgrid() Set up U, V, W grid (in units of number of wavelengths)
based on the synthesized U, V, W
----------------------------------------------------------------------------
"""
def __init__(self, interferometer_array=None):
"""
------------------------------------------------------------------------
Intialize the ApertureSynthesis class which manages information on a
aperture synthesis with an interferometer array.
Class attributes initialized are:
ia, f, df, lst, timestamp, baselines, blxyz, phase_center, n_acc,
phase_center_coords, pointing_center, pointing_coords, latitude, blc,
trc, grid_blc, grid_trc, grid_ready, uvw, uvw_lambda, gridu, gridv,
gridw
Read docstring of class ApertureSynthesis for details on these
attributes.
Keyword input(s):
interferometer_array
[instance of class InterferometerArray] Instance of class
InterferometerArray used to initialize an instance of
class ApertureSynthesis
------------------------------------------------------------------------
"""
if interferometer_array is not None:
if isinstance(interferometer_array, InterferometerArray):
self.ia = interferometer_array
else:
raise TypeError('Input interferometer_array must be an instance of class InterferoemterArray')
else:
raise NameError('No input interferometer_array provided')
self.f = self.ia.channels
self.df = interferometer_array.freq_resolution
self.n_acc = interferometer_array.n_acc
self.lst = interferometer_array.lst
self.phase_center = interferometer_array.phase_center
self.pointing_center = interferometer_array.pointing_center
self.phase_center_coords = interferometer_array.phase_center_coords
self.pointing_coords = interferometer_array.pointing_coords
self.baselines = interferometer_array.baselines
self.timestamp = interferometer_array.timestamp
self.latitude = interferometer_array.latitude
self.blxyz = GEOM.enu2xyz(self.baselines, self.latitude, units='degrees')
self.uvw_lambda = None
self.uvw = None
self.blc = NP.zeros(2)
self.trc = NP.zeros(2)
self.grid_blc = NP.zeros(2)
self.grid_trc = NP.zeros(2)
self.gridu, self.gridv, self.gridw = None, None, None
self.grid_ready = False
#############################################################################
def genUVW(self):
"""
------------------------------------------------------------------------
Generate U, V, W (in units of number of wavelengths) by phasing the
baseline vectors to the phase centers of each pointing at all
frequencies
------------------------------------------------------------------------
"""
if self.phase_center_coords == 'hadec':
pc_hadec = self.phase_center
elif self.phase_center_coords == 'radec':
pc_hadec = NP.hstack((NP.asarray(self.lst).reshape(-1,1), NP.zeros(len(self.lst)).reshape(-1,1)))
elif self.phase_center_coords == 'altaz':
pc_altaz = self.phase_center
pc_hadec = GEOM.altaz2hadec(pc_altaz, self.latitude, units='degrees')
else:
raise ValueError('Attribute phase_center_coords must be set to one of "hadec", "radec" or "altaz"')
pc_hadec = NP.radians(pc_hadec)
ha = pc_hadec[:,0]
dec = pc_hadec[:,1]
rotmat = NP.asarray([[NP.sin(ha), NP.cos(ha), NP.zeros_like(ha)],
[-NP.sin(dec)*NP.cos(ha), NP.sin(dec)*NP.sin(ha), NP.cos(dec)],
[NP.cos(dec)*NP.cos(ha), -NP.cos(dec)*NP.sin(ha), NP.sin(dec)]])
self.uvw_lambda = NP.tensordot(self.blxyz, rotmat, axes=[1,1])
wl = FCNST.c / self.f
self.uvw = self.uvw_lambda[:,:,NP.newaxis,:] / wl.reshape(1,1,-1,1)
#############################################################################
def reorderUVW(self):
"""
------------------------------------------------------------------------
Reorder U, V, W (in units of number of wavelengths) of shape
nbl x 3 x nchan x n_acc to 3 x (nbl x nchan x n_acc)
------------------------------------------------------------------------
"""
reorderedUVW = NP.swapaxes(self.uvw, 0, 1) # now 3 x Nbl x nchan x n_acc
reorderedUVW = reorderedUVW.reshape(3,-1) # now 3 x (Nbl x nchan x n_acc)
return reorderedUVW
#############################################################################
def setUVWgrid(self, spacing=0.5, pad=None, pow2=True):
"""
------------------------------------------------------------------------
Routine to produce a grid based on the UVW spacings of the
interferometer array
Inputs:
spacing [Scalar] Positive value indicating the upper limit on grid
spacing in uvw-coordinates desirable at the lowest
wavelength (max frequency). Default = 0.5
pad [List] Padding to be applied around the locations
before forming a grid. List elements should be positive. If
it is a one-element list, the element is applicable to all
x, y and z axes. If list contains four or more elements,
only the first three elements are considered one for each
axis. Default = None (no padding).
pow2 [Boolean] If set to True, the grid is forced to have a size
a next power of 2 relative to the actual size required. If
False, gridding is done with the appropriate size as
determined by spacing. Default = True.
------------------------------------------------------------------------
"""
if self.uvw is None:
self.genUVW()
uvw = self.reorderUVW()
blc = NP.amin(uvw, axis=1)
trc = NP.amax(uvw, axis=1)
self.trc = NP.amax(NP.abs(NP.vstack((blc, trc))), axis=0)
self.blc = -1 * self.trc
self.gridu, self.gridv, self.gridw = GRD.grid_3d([(self.blc[0], self.trc[0]), (self.blc[1], self.trc[1]), (self.blc[2], self.trc[2])], pad=pad, spacing=spacing, pow2=True)
self.grid_blc = NP.asarray([self.gridu.min(), self.gridv.min(), self.gridw.min()])
self.grid_trc = NP.asarray([self.gridu.max(), self.gridv.max(), self.gridw.max()])
self.grid_ready = True
################################################################################
class InterferometerData(object):
"""
----------------------------------------------------------------------------
Class to act as an interface between PRISim object and external data
formats.
Attributes:
infodict [dictionary] Dictionary consisting of many attributes loaded
from the PRISim object. This will be used to convert to info
required in external data formats
Member functions:
__init__() Initialize an instance of class InterferometerData
createUVData()
Create an instance of class UVData
write() Write an instance of class InterferometerData into specified
formats. Currently writes in UVFITS format
----------------------------------------------------------------------------
"""
def __init__(self, prisim_object, ref_point=None, datakeys=None):
"""
------------------------------------------------------------------------
Initialize an instance of class InterferometerData.
Class attributes initialized are:
infodict
Inputs:
prisim_object
[instance of class InterferometerArray] Instance of
class InterferometerArray used to initialize an
instance of class InterferometerData.
ref_point [dictionary] Contains information about the reference
position to which projected baselines and rotated
visibilities are to be computed. Default=None (no additional
phasing will be performed). It must be contain the following
keys with the following values:
'coords' [string] Refers to the coordinate system in
which value in key 'location' is specified in.
Accepted values are 'radec', 'hadec', 'altaz'
and 'dircos'
'location' [numpy array] Must be a Mx2 (if value in key
'coords' is set to 'radec', 'hadec', 'altaz' or
'dircos') or Mx3 (if value in key 'coords' is
set to 'dircos'). M can be 1 or equal to number
of timestamps. If M=1, the same reference point
in the same coordinate system will be repeated
for all tiemstamps. If value under key 'coords'
is set to 'radec', 'hadec' or 'altaz', the
value under this key 'location' must be in
units of degrees.
datakeys [NoneType or list] Indicates which portion of the data
is to be written to the UVFITS file. If set to None
(default), all of skyvis_freq, vis_freq, and
vis_noise_freq attributes will be written. Otherwise,
accepted values are a list of strings that can include
'noiseless' (skyvis_freq attribute), 'noisy' (vis_freq
attribute), and 'noise' (vis_nosie_freq attribute).
------------------------------------------------------------------------
"""
try:
prisim_object
except NameError:
raise NameError('Input prisim_object not specified')
if ref_point is not None:
prisim_object.rotate_visibilities(ref_point)
if not isinstance(prisim_object, InterferometerArray):
raise TypeError('Inout prisim_object must be an instance of class InterferometerArray')
if datakeys is None:
datakeys = ['noiseless', 'noisy', 'noise']
if not isinstance(datakeys, list):
raise TypeError('Input datakeys must be a list')
else:
datapool_list = [dpool.lower() for dpool in datakeys if (isinstance(dpool, str) and dpool.lower() in ['noiseless', 'noise', 'noisy'])]
if len(datapool_list) == 0:
raise ValueError('No valid datapool string found in input uvfits_parms')
datakeys = datapool_list
# datatypes = ['noiseless', 'noisy', 'noise']
visibilities = {key: None for key in datakeys}
for key in visibilities:
# Conjugate visibilities for compatibility with UVFITS and CASA imager
if key == 'noiseless':
visibilities[key] = prisim_object.skyvis_freq.conj()
if key == 'noisy':
if prisim_object.vis_freq is not None:
visibilities[key] = prisim_object.vis_freq.conj()
if key == 'noise':
if prisim_object.vis_noise_freq is not None:
visibilities[key] = prisim_object.vis_noise_freq.conj()
self.infodict = {}
self.infodict['Ntimes'] = prisim_object.n_acc
self.infodict['Nbls'] = prisim_object.baselines.shape[0]
self.infodict['Nblts'] = self.infodict['Nbls'] * self.infodict['Ntimes']
self.infodict['Nfreqs'] = prisim_object.channels.size
self.infodict['Npols'] = 1
self.infodict['Nspws'] = 1
self.infodict['data_array'] = {'noiseless': None, 'noisy': None, 'noise': None}
for key in visibilities:
if visibilities[key] is not None:
self.infodict['data_array'][key] = NP.transpose(NP.transpose(visibilities[key], (2,0,1)).reshape(self.infodict['Nblts'], self.infodict['Nfreqs'], self.infodict['Nspws'], self.infodict['Npols']), (0,2,1,3)) # (Nbls, Nfreqs, Ntimes) -> (Ntimes, Nbls, Nfreqs) -> (Nblts, Nfreqs, Nspws=1, Npols=1) -> (Nblts, Nspws=1, Nfreqs, Npols=1)
self.infodict['vis_units'] = 'Jy'
self.infodict['nsample_array'] = NP.ones((self.infodict['Nblts'], self.infodict['Nspws'], self.infodict['Nfreqs'], self.infodict['Npols']))
self.infodict['flag_array'] = NP.zeros((self.infodict['Nblts'], self.infodict['Nspws'], self.infodict['Nfreqs'], self.infodict['Npols']), dtype=NP.bool)
self.infodict['spw_array'] = NP.arange(self.infodict['Nspws'])
self.infodict['uvw_array'] = NP.transpose(prisim_object.projected_baselines, (2,0,1)).reshape(self.infodict['Nblts'], 3)
time_array = NP.asarray(prisim_object.timestamp).reshape(-1,1) + NP.zeros(self.infodict['Nbls']).reshape(1,-1)
self.infodict['time_array'] = time_array.ravel()
lst_array = NP.radians(NP.asarray(prisim_object.lst).reshape(-1,1)) + NP.zeros(self.infodict['Nbls']).reshape(1,-1)
self.infodict['lst_array'] = lst_array.ravel()
labels_A1 = prisim_object.labels['A1']
labels_A2 = prisim_object.labels['A2']
if prisim_object.layout:
id_A1 = [prisim_object.layout['ids'][prisim_object.layout['labels'].tolist().index(albl)] for albl in labels_A1]
id_A2 = [prisim_object.layout['ids'][prisim_object.layout['labels'].tolist().index(albl)] for albl in labels_A2]
id_A1 = NP.asarray(id_A1, dtype=int)
id_A2 = NP.asarray(id_A2, dtype=int)
else:
try:
id_A1 = prisim_object.labels['A1'].astype(NP.int)
id_A2 = prisim_object.labels['A2'].astype(NP.int)
except ValueError:
raise ValueError('Could not convert antenna labels to numbers')
ant_1_array = id_A1
ant_2_array = id_A2
ant_1_array = ant_1_array.reshape(1,-1) + NP.zeros(self.infodict['Ntimes'], dtype=NP.int).reshape(-1,1)
ant_2_array = ant_2_array.reshape(1,-1) + NP.zeros(self.infodict['Ntimes'], dtype=NP.int).reshape(-1,1)
self.infodict['ant_1_array'] = ant_1_array.ravel()
self.infodict['ant_2_array'] = ant_2_array.ravel()
self.infodict['baseline_array'] = 2048 * (self.infodict['ant_2_array'] + 1) + (self.infodict['ant_1_array'] + 1) + 2**16
self.infodict['freq_array'] = prisim_object.channels.reshape(self.infodict['Nspws'],-1)
self.infodict['polarization_array'] = NP.asarray([-5]).reshape(self.infodict['Npols']) # stokes 1:4 (I,Q,U,V); circular -1:-4 (RR,LL,RL,LR); linear -5:-8 (XX,YY,XY,YX)
if uvdata_module_found:
if LooseVersion(pyuvdata.__version__)>=LooseVersion('1.3.2'):
self.infodict['integration_time'] = prisim_object.t_acc[0] + NP.zeros(self.infodict['Nblts']) # Replicate to be of shape (Nblts,) to be Baseline-Dependent-Averaging compliant with pyuvdata >= v1.3.2
else:
self.infodict['integration_time'] = prisim_object.t_acc[0]
else:
self.infodict['integration_time'] = prisim_object.t_acc[0] + NP.zeros(self.infodict['Nblts'])
self.infodict['channel_width'] = prisim_object.freq_resolution
# ----- Observation information ------
pointing_center = prisim_object.pointing_center
pointing_coords = prisim_object.pointing_coords
if pointing_coords == 'dircos':
pointing_center_dircos = pointing_center
pointing_center_altaz = GEOM.dircos2altaz(pointing_center_dircos, units='degrees')
pointing_center_hadec = GEOM.altaz2hadec(pointing_center_altaz, prisim_object.latitude, units='degrees')
pointing_center_ra = NP.asarray(prisim_object.lst) - pointing_center_hadec[:,0]
pointing_center_radec = NP.hstack((pointing_center_ra.reshape(-1,1), pointing_center_hadec[:,1].reshape(-1,1)))
pointing_coords = 'radec'
elif pointing_coords == 'altaz':
pointing_center_altaz = pointing_center
pointing_center_hadec = GEOM.altaz2hadec(pointing_center_altaz, prisim_object.latitude, units='degrees')
pointing_center_ra = NP.asarray(prisim_object.lst) - pointing_center_hadec[:,0]
pointing_center_radec = NP.hstack((pointing_center_ra.reshape(-1,1), pointing_center_hadec[:,1].reshape(-1,1)))
pointing_coords = 'radec'
elif pointing_coords == 'hadec':
pointing_center_hadec = pointing_center
pointing_center_ra = NP.asarray(prisim_object.lst) - pointing_center_hadec[:,0]
pointing_center_radec = NP.hstack((pointing_center_ra.reshape(-1,1), pointing_center_hadec[:,1].reshape(-1,1)))
pointing_coords = 'radec'
elif pointing_coords == 'radec':
pointing_center_radec = pointing_center
else:
raise ValueError('Invalid pointing center coordinates')
phase_center = prisim_object.phase_center
phase_center_coords = prisim_object.phase_center_coords
if phase_center_coords == 'dircos':
phase_center_dircos = phase_center
phase_center_altaz = GEOM.dircos2altaz(phase_center_dircos, units='degrees')
phase_center_hadec = GEOM.altaz2hadec(phase_center_altaz, prisim_object.latitude, units='degrees')
phase_center_ra = NP.asarray(prisim_object.lst) - phase_center_hadec[:,0]
phase_center_radec = NP.hstack((phase_center_ra.reshape(-1,1), phase_center_hadec[:,1].reshape(-1,1)))
phase_center_coords = 'radec'
elif phase_center_coords == 'altaz':
phase_center_altaz = phase_center
phase_center_hadec = GEOM.altaz2hadec(phase_center_altaz, prisim_object.latitude, units='degrees')
phase_center_ra = NP.asarray(prisim_object.lst) - phase_center_hadec[:,0]
phase_center_radec = NP.hstack((phase_center_ra.reshape(-1,1), phase_center_hadec[:,1].reshape(-1,1)))
phase_center_coords = 'radec'
elif phase_center_coords == 'hadec':
phase_center_hadec = phase_center
phase_center_ra = NP.asarray(prisim_object.lst) - phase_center_hadec[:,0]
phase_center_radec = NP.hstack((phase_center_ra.reshape(-1,1), phase_center_hadec[:,1].reshape(-1,1)))
phase_center_coords = 'radec'
elif phase_center_coords == 'radec':
phase_center_radec = phase_center
else:
raise ValueError('Invalid phase center coordinates')
pointing_centers = SkyCoord(ra=pointing_center_radec[:,0], dec=pointing_center_radec[:,1], frame='icrs', unit='deg')
phase_centers = SkyCoord(ra=phase_center_radec[:,0], dec=phase_center_radec[:,1], frame='icrs', unit='deg')
pointing_center_obscenter = pointing_centers[int(prisim_object.n_acc/2)]
phase_center_obscenter = phase_centers[int(prisim_object.n_acc/2)]
self.infodict['object_name'] = 'J{0}{1}'.format(pointing_center_obscenter.ra.to_string(sep='', precision=2, pad=True), pointing_center_obscenter.dec.to_string(sep='', precision=2, alwayssign=True, pad=True))
if 'id' not in prisim_object.telescope:
self.infodict['telescope_name'] = 'custom'
else:
self.infodict['telescope_name'] = prisim_object.telescope['id']
self.infodict['instrument'] = self.infodict['telescope_name']
self.infodict['telescope_location'] = NP.asarray([prisim_object.latitude, prisim_object.longitude, prisim_object.altitude])
self.infodict['history'] = 'PRISim'
self.infodict['phase_center_epoch'] = 2000.0
is_phased = NP.allclose(phase_centers.ra.value, phase_centers.ra.value[::-1]) and NP.allclose(phase_centers.dec.value, phase_centers.dec.value[::-1])
self.infodict['is_phased'] = is_phased
# ----- antenna information ------
self.infodict['Nants_data'] = len(set(prisim_object.labels['A1']) | set(prisim_object.labels['A2']))
if prisim_object.layout:
# self.infodict['Nants_telescope'] = len(set(prisim_object.labels['A1']) | set(prisim_object.labels['A2']))
self.infodict['Nants_telescope'] = prisim_object.layout['ids'].size
else:
self.infodict['Nants_telescope'] = self.infodict['Nants_data']
if prisim_object.layout:
self.infodict['antenna_names'] = prisim_object.layout['labels']
self.infodict['antenna_numbers'] = prisim_object.layout['ids']
else:
self.infodict['antenna_names'] = NP.asarray(list(set(prisim_object.labels['A1']) | set(prisim_object.labels['A2'])))
try:
self.infodict['antenna_numbers'] = NP.asarray(list(set(prisim_object.labels['A1']) | set(prisim_object.labels['A2']))).astype(NP.int)
except ValueError:
raise ValueError('Count not convert antenna labels to numbers')
# ----- Optional information ------
self.infodict['dateobs'] = Time(prisim_object.timestamp[0], format='jd', scale='utc').iso
self.infodict['phase_center_ra'] = NP.radians(phase_center_obscenter.ra.value)
self.infodict['phase_center_dec'] = NP.radians(phase_center_obscenter.dec.value)
self.infodict['antenna_positions'] = NP.zeros((self.infodict['Nants_telescope'],3), dtype=NP.float)
if hasattr(prisim_object, 'layout'):
if prisim_object.layout:
if not isinstance(prisim_object.layout['positions'], NP.ndarray):
warnings.warn('Antenna positions must be a numpy array. Proceeding with default values.')
else:
if prisim_object.layout['positions'].shape != (self.infodict['Nants_telescope'],3):
warnings.warn('Number of antennas in prisim_object found to be incompatible with number of unique antennas found. Proceeding with default values.')
else:
x, y, z = GEOM.lla2ecef(*self.infodict['telescope_location'], units='degrees')
telscp_loc = NP.asarray([x[0], y[0], z[0]])
self.infodict['antenna_positions'] = GEOM.enu2ecef(prisim_object.layout['positions'], {'lat': prisim_object.latitude, 'lon': prisim_object.longitude, 'alt': prisim_object.altitude, 'units': 'degrees'}) - telscp_loc.reshape(1,-1)
# self.infodict['antenna_positions'] = UVUtils.ECEF_from_ENU(prisim_object.layout['positions'], NP.radians(prisim_object.latitude), NP.radians(prisim_object.longitude), prisim_object.altitude) - telscp_loc.reshape(1,-1)
self.infodict['gst0'] = 0.0
self.infodict['rdate'] = ''
self.infodict['earth_omega'] = 360.985
self.infodict['dut1'] = 0.0
self.infodict['timesys'] = 'UTC'
#############################################################################
def createUVData(self, datatype='noiseless'):
"""
------------------------------------------------------------------------
Create an instance of class UVData.
Inputs:
datatype [string] Specifies which visibilities are to be used in
creating the UVData object. Accepted values are 'noiseless'
(default) for noiseless pure-sky visibilities, 'noisy' for
sky visibilities to which noise has been added, or 'noise'
for pure noise visibilities.
Outputs:
dataobj [instance of class UVData] an instance of class UVData
containing visibilities of type specified in datatype. This
object can be used to write to some common external formats
such as UVFITS, etc.
------------------------------------------------------------------------
"""
if not uvdata_module_found:
raise ImportError('uvdata module not found')
if datatype not in ['noiseless', 'noisy', 'noise']:
raise ValueError('Invalid input datatype specified')
attributes_of_uvdata = ['Ntimes', 'Nbls', 'Nblts', 'Nfreqs', 'Npols', 'Nspws', 'data_array', 'vis_units', 'nsample_array', 'flag_array', 'spw_array', 'uvw_array', 'time_array', 'lst_array', 'ant_1_array', 'ant_2_array', 'baseline_array', 'freq_array', 'polarization_array', 'integration_time', 'channel_width', 'object_name', 'telescope_name', 'instrument', 'telescope_location', 'history', 'phase_center_epoch', 'is_phased', 'phase_type', 'Nants_data', 'Nants_telescope', 'antenna_names', 'antenna_numbers', 'dateobs', 'phase_center_ra', 'phase_center_dec', 'antenna_positions']
dataobj = UVData()
for attrkey in attributes_of_uvdata:
if attrkey == 'telescope_location':
x, y, z = GEOM.lla2ecef(*self.infodict[attrkey], units='degrees')
setattr(dataobj, attrkey, NP.asarray([x[0],y[0],z[0]]))
elif attrkey == 'phase_type':
if self.infodict['is_phased']:
setattr(dataobj, attrkey, 'phased')
else:
setattr(dataobj, attrkey, 'drift')
elif attrkey != 'data_array':
setattr(dataobj, attrkey, self.infodict[attrkey])
else:
if datatype in self.infodict[attrkey]:
if self.infodict[attrkey][datatype] is not None:
setattr(dataobj, attrkey, self.infodict[attrkey][datatype])
else:
raise KeyError('Data of specified datatype not found in InterferometerData object')
else:
raise KeyError('Specified datatype not found in InterferometerData object')
return dataobj
#############################################################################
def _blnum_to_antnums(self, blnum):
if self.infodict['Nants_telescope'] > 2048:
raise StandardError('error Nants={Nants}>2048 not supported'.format(Nants=self.infodict['Nants_telescope']))
if NP.min(blnum) > 2**16:
i = (blnum - 2**16) % 2048 - 1
j = (blnum - 2**16 - (i + 1)) / 2048 - 1
else:
i = (blnum) % 256 - 1
j = (blnum - (i + 1)) / 256 - 1
return NP.int32(i), NP.int32(j)
#############################################################################
def _antnums_to_blnum(self, i, j, attempt256=False):
# set the attempt256 keyword to True to (try to) use the older
# 256 standard used in many uvfits files
# (will use 2048 standard if there are more than 256 antennas)
i, j = NP.int64((i, j))
if self.infodict['Nants_telescope'] > 2048:
raise StandardError('cannot convert i,j to a baseline index '
'with Nants={Nants}>2048.'
.format(Nants=self.infodict['Nants_telescope']))
if attempt256:
if (NP.max(i) < 255 and NP.max(j) < 255):
return 256 * (j + 1) + (i + 1)
else:
print('Max antnums are {} and {}'.format(NP.max(i), NP.max(j)))
message = 'antnums_to_baseline: found > 256 antennas, using ' \
'2048 baseline indexing. Beware compatibility ' \
'with CASA etc'
warnings.warn(message)
return NP.int64(2048 * (j + 1) + (i + 1) + 2**16)
#############################################################################
def write(self, outfile, datatype='noiseless', fmt='UVFITS',
uvfits_method=None, overwrite=False):
"""
------------------------------------------------------------------------
Write an instance of class InterferometerData into specified formats.
Currently writes in UVFITS format
Inputs:
outfile [string] Filename into which data will be written
datatype [string] Specifies which visibilities are to be used in
creating the UVData object. Accepted values are 'noiseless'
(default) for noiseless pure-sky visibilities, 'noisy' for
sky visibilities to which noise has been added, or 'noise'
for pure noise visibilities.
fmt [string] Output file format. Currently accepted values are
'UVFITS' and 'UVH5'. Default='UVFITS'
uvfits_method
[string] Method using which UVFITS output is produced. It
is only used if fmt is set to 'UVFITS'. Accepted values
are 'uvdata', 'uvfits' or None (default). If set to
'uvdata', the UVFITS writer in uvdata module is used. If
set to 'uvfits', the in-house UVFITS writer is used. If
set to None, first uvdata module will be attempted but if
it fails then the in-house UVFITS writer will be tried.
overwrite [boolean] True indicates overwrite even if a file already
exists. Default = False (does not overwrite). Beware this
may not work reliably if uvfits_method is set to None or
'uvdata' and hence always better to make sure the output
file does not exist already
------------------------------------------------------------------------
"""
try:
outfile
except NameError:
raise NameError('Output filename not specified')
if not isinstance(outfile, str):
raise TypeError('Output filename must be a string')
if datatype not in ['noiseless', 'noisy', 'noise']:
raise ValueError('Invalid input datatype specified')
if fmt.lower() not in ['uvfits', 'uvh5']:
raise ValueError('Output format not supported')
uvdataobj = self.createUVData(datatype=datatype)
if fmt.lower() == 'uvh5':
uvdataobj.write_uvh5(outfile, clobber=overwrite)
if fmt.lower() == 'uvfits':
write_successful = False
if uvfits_method not in [None, 'uvfits', 'uvdata']:
uvfits_method = None
if (uvfits_method is None) or (uvfits_method == 'uvdata'):
try:
uvdataobj.write_uvfits(outfile, spoof_nonessential=True)
except Exception as xption1:
write_successful = False
if uvfits_method == 'uvdata':
warnings.warn('Output through UVData module did not work due to the following exception:')
raise xption1
else:
warnings.warn('Output through UVData module did not work. Trying with built-in UVFITS writer')
else:
write_successful = True
print('Data successfully written using uvdata module to {0}'.format(outfile))
return
# Try with in-house UVFITS writer
try:
weights_array = self.infodict['nsample_array'] * NP.where(self.infodict['flag_array'], -1, 1)
data_array = self.infodict['data_array'][datatype][:, NP.newaxis, NP.newaxis, :, :, :, NP.newaxis]
weights_array = weights_array[:, NP.newaxis, NP.newaxis, :, :, :, NP.newaxis]
# uvfits_array_data shape will be (Nblts,1,1,[Nspws],Nfreqs,Npols,3)
uvfits_array_data = NP.concatenate([data_array.real, data_array.imag, weights_array], axis=6)
uvw_array_sec = self.infodict['uvw_array'] / FCNST.c
# jd_midnight = NP.floor(self.infodict['time_array'][0] - 0.5) + 0.5
tzero = NP.float32(self.infodict['time_array'][0])
# uvfits convention is that time_array + relevant PZERO = actual JD
# We are setting PZERO4 = float32(first time of observation)
time_array = NP.float32(self.infodict['time_array'] - NP.float64(tzero))
int_time_array = (NP.zeros_like((time_array), dtype=NP.float) + self.infodict['integration_time'])
baselines_use = self._antnums_to_blnum(self.infodict['ant_1_array'], self.infodict['ant_2_array'], attempt256=True)
# Set up dictionaries for populating hdu
# Note that uvfits antenna arrays are 1-indexed so we add 1
# to our 0-indexed arrays
group_parameter_dict = {'UU ': uvw_array_sec[:, 0],
'VV ': uvw_array_sec[:, 1],
'WW ': uvw_array_sec[:, 2],
'DATE ': time_array,
'BASELINE': baselines_use,
'ANTENNA1': self.infodict['ant_1_array'] + 1,
'ANTENNA2': self.infodict['ant_2_array'] + 1,
'SUBARRAY': NP.ones_like(self.infodict['ant_1_array']),
'INTTIM': int_time_array}
pscal_dict = {'UU ': 1.0, 'VV ': 1.0, 'WW ': 1.0,
'DATE ': 1.0, 'BASELINE': 1.0, 'ANTENNA1': 1.0,
'ANTENNA2': 1.0, 'SUBARRAY': 1.0, 'INTTIM': 1.0}
pzero_dict = {'UU ': 0.0, 'VV ': 0.0, 'WW ': 0.0,
'DATE ': tzero, 'BASELINE': 0.0, 'ANTENNA1': 0.0,
'ANTENNA2': 0.0, 'SUBARRAY': 0.0, 'INTTIM': 0.0}
# list contains arrays of [u,v,w,date,baseline];
# each array has shape (Nblts)
if (NP.max(self.infodict['ant_1_array']) < 255 and
NP.max(self.infodict['ant_2_array']) < 255):
# if the number of antennas is less than 256 then include both the
# baseline array and the antenna arrays in the group parameters.
# Otherwise just use the antenna arrays
parnames_use = ['UU ', 'VV ', 'WW ',
'DATE ', 'BASELINE', 'ANTENNA1',
'ANTENNA2', 'SUBARRAY', 'INTTIM']
else:
parnames_use = ['UU ', 'VV ', 'WW ', 'DATE ',
'ANTENNA1', 'ANTENNA2', 'SUBARRAY', 'INTTIM']
group_parameter_list = [group_parameter_dict[parname] for
parname in parnames_use]
hdu = fits.GroupData(uvfits_array_data, parnames=parnames_use,
pardata=group_parameter_list, bitpix=-32)
hdu = fits.GroupsHDU(hdu)
for i, key in enumerate(parnames_use):
hdu.header['PSCAL' + str(i + 1) + ' '] = pscal_dict[key]
hdu.header['PZERO' + str(i + 1) + ' '] = pzero_dict[key]
# ISO string of first time in self.infodict['time_array']
# hdu.header['DATE-OBS'] = Time(self.infodict['time_array'][0], scale='utc', format='jd').iso
hdu.header['DATE-OBS'] = self.infodict['dateobs']
hdu.header['CTYPE2 '] = 'COMPLEX '
hdu.header['CRVAL2 '] = 1.0
hdu.header['CRPIX2 '] = 1.0
hdu.header['CDELT2 '] = 1.0
hdu.header['CTYPE3 '] = 'STOKES '
hdu.header['CRVAL3 '] = self.infodict['polarization_array'][0]
hdu.header['CRPIX3 '] = 1.0
try:
hdu.header['CDELT3 '] = NP.diff(self.infodict['polarization_array'])[0]
except(IndexError):
hdu.header['CDELT3 '] = 1.0
hdu.header['CTYPE4 '] = 'FREQ '
hdu.header['CRVAL4 '] = self.infodict['freq_array'][0, 0]
hdu.header['CRPIX4 '] = 1.0
hdu.header['CDELT4 '] = NP.diff(self.infodict['freq_array'][0])[0]
hdu.header['CTYPE5 '] = 'IF '
hdu.header['CRVAL5 '] = 1.0
hdu.header['CRPIX5 '] = 1.0
hdu.header['CDELT5 '] = 1.0
hdu.header['CTYPE6 '] = 'RA'
hdu.header['CRVAL6 '] = NP.degrees(self.infodict['phase_center_ra'])
hdu.header['CTYPE7 '] = 'DEC'
hdu.header['CRVAL7 '] = NP.degrees(self.infodict['phase_center_dec'])
hdu.header['BUNIT '] = self.infodict['vis_units']
hdu.header['BSCALE '] = 1.0
hdu.header['BZERO '] = 0.0
hdu.header['OBJECT '] = self.infodict['object_name']
hdu.header['TELESCOP'] = self.infodict['telescope_name']
hdu.header['LAT '] = self.infodict['telescope_location'][0]
hdu.header['LON '] = self.infodict['telescope_location'][1]
hdu.header['ALT '] = self.infodict['telescope_location'][2]
hdu.header['INSTRUME'] = self.infodict['instrument']
hdu.header['EPOCH '] = float(self.infodict['phase_center_epoch'])
for line in self.infodict['history'].splitlines():
hdu.header.add_history(line)
# ADD the ANTENNA table
staxof = NP.zeros(self.infodict['Nants_telescope'])
# 0 specifies alt-az, 6 would specify a phased array
mntsta = NP.zeros(self.infodict['Nants_telescope'])
# beware, X can mean just about anything
poltya = NP.full((self.infodict['Nants_telescope']), 'X', dtype=NP.object_)
polaa = [90.0] + NP.zeros(self.infodict['Nants_telescope'])
poltyb = NP.full((self.infodict['Nants_telescope']), 'Y', dtype=NP.object_)
polab = [0.0] + NP.zeros(self.infodict['Nants_telescope'])
col1 = fits.Column(name='ANNAME', format='8A',
array=self.infodict['antenna_names'])
col2 = fits.Column(name='STABXYZ', format='3D',
array=self.infodict['antenna_positions'])
# convert to 1-indexed from 0-indexed indicies
col3 = fits.Column(name='NOSTA', format='1J',
array=self.infodict['antenna_numbers'] + 1)
col4 = fits.Column(name='MNTSTA', format='1J', array=mntsta)
col5 = fits.Column(name='STAXOF', format='1E', array=staxof)
col6 = fits.Column(name='POLTYA', format='1A', array=poltya)
col7 = fits.Column(name='POLAA', format='1E', array=polaa)
# col8 = fits.Column(name='POLCALA', format='3E', array=polcala)
col9 = fits.Column(name='POLTYB', format='1A', array=poltyb)
col10 = fits.Column(name='POLAB', format='1E', array=polab)
# col11 = fits.Column(name='POLCALB', format='3E', array=polcalb)
# note ORBPARM is technically required, but we didn't put it in
cols = fits.ColDefs([col1, col2, col3, col4, col5, col6, col7, col9, col10])
ant_hdu = fits.BinTableHDU.from_columns(cols)
ant_hdu.header['EXTNAME'] = 'AIPS AN'
ant_hdu.header['EXTVER'] = 1
# write XYZ coordinates if not already defined
ant_hdu.header['ARRAYX'] = self.infodict['telescope_location'][0]
ant_hdu.header['ARRAYY'] = self.infodict['telescope_location'][1]
ant_hdu.header['ARRAYZ'] = self.infodict['telescope_location'][2]
# ant_hdu.header['FRAME'] = 'ITRF'
ant_hdu.header['FRAME'] = None
ant_hdu.header['GSTIA0'] = self.infodict['gst0']
ant_hdu.header['FREQ'] = self.infodict['freq_array'][0, 0]
ant_hdu.header['RDATE'] = self.infodict['rdate']
ant_hdu.header['UT1UTC'] = self.infodict['dut1']
ant_hdu.header['TIMSYS'] = self.infodict['timesys']
if self.infodict['timesys'] == 'IAT':
warnings.warn('This file has an "IAT" time system. Files of '
'this type are not properly supported')
ant_hdu.header['ARRNAM'] = self.infodict['telescope_name']
ant_hdu.header['NO_IF'] = self.infodict['Nspws']
ant_hdu.header['DEGPDY'] = self.infodict['earth_omega']
# ant_hdu.header['IATUTC'] = 35.
# set mandatory parameters which are not supported by this object
# (or that we just don't understand)
ant_hdu.header['NUMORB'] = 0
# note: Bart had this set to 3. We've set it 0 after aips 117. -jph
ant_hdu.header['NOPCAL'] = 0
ant_hdu.header['POLTYPE'] = 'X-Y LIN'
# note: we do not support the concept of "frequency setups"
# -- lists of spws given in a SU table.
ant_hdu.header['FREQID'] = -1
# if there are offsets in images, this could be the culprit
ant_hdu.header['POLARX'] = 0.0
ant_hdu.header['POLARY'] = 0.0
ant_hdu.header['DATUTC'] = 0 # ONLY UTC SUPPORTED
# we always output right handed coordinates
ant_hdu.header['XYZHAND'] = 'RIGHT'
# ADD the FQ table
# skipping for now and limiting to a single spw
# write the file
hdulist = fits.HDUList(hdus=[hdu, ant_hdu])
hdulist.writeto(outfile, overwrite=overwrite)
except Exception as xption2:
print(xption2)
raise IOError('Could not write to UVFITS file')
else:
write_successful = True
print('Data successfully written using in-house uvfits writer to {0}'.format(outfile))
return
#################################################################################
| 579,057 | 57.526177 | 587 |
py
|
PRISim
|
PRISim-master/prisim/baseline_delay_horizon.py
|
import numpy as NP
import scipy.constants as FCNST
from astroutils import geometry as GEOM
#################################################################################
def delay_envelope(bl, dircos, units='mks'):
"""
---------------------------------------------------------------------------
Estimates the delay envelope determined by the sky horizon for the given
baseline vectors and provides the shift in these envelopes for given
phase centers specified in direction cosines.
Inputs:
bl: E, N, and U components of baseline vectors in a Mx3 numpy
array in local ENU coordinates
dircos: Nx3 (direction cosines) numpy array of sky positions that will act
as phase centers
units: 'mks' or 'cgs' units. Default='mks'
Outputs:
delaymatrix: NxMx2 matrix. delaymatrix[:,:,0] contains the maximum delay if
there was no shift due to non-zenith phase center.
delaymatrix[:,:,1] contains the delay shift. To determine the
minimum delay, use -delaymatrix[:,:,1]-delaymatrix[:,:,0]. To
determine effective maximum delay, use
delaymatrix[:,:,0]-delaymatrix[:,:,1]. Minimum delay without
shift is -delaymatrix[:,:,0]
---------------------------------------------------------------------------
"""
try:
bl
except NameError:
raise NameError('No baseline(s) provided. Aborting delay_envelope().')
try:
dircos
except NameError:
print('No sky position in direction cosine units provided. Assuming zenith for phase center in delay_envelope().')
dircos = NP.zeros(3).reshape(1,3)
try:
units
except NameError:
print('No units provided. Assuming MKS units.')
units = 'mks'
if (units != 'mks') and (units != 'cgs'):
print('Units should be specified to be one of MKS or CGS. Default=MKS')
print('Proceeding with MKS units.')
units = 'mks'
# Set the speed of light in MKS or CGS units
if units == 'mks': c = FCNST.c
elif units == 'cgs': c = FCNST.c * 1e2
if len(bl.shape) == 1: bl = bl.reshape(1,len(bl))
if len(dircos.shape) == 1: dircos = dircos.reshape(1,len(dircos))
blshape = bl.shape
dcshape = dircos.shape
bl = bl[:,:min(blshape[1],dcshape[1])]
dircos = dircos[:,:min(blshape[1],dcshape[1])]
if blshape[1] > min(3,blshape[1],dcshape[1]):
bl = bl[:,:min(3,blshape[1],dcshape[1])]
if dcshape[1] > min(3,blshape[1],dcshape[1]):
dircos = dircos[:,:min(3,blshape[1],dcshape[1])]
blshape = bl.shape
dcshape = dircos.shape
eps = 1.0e-10
if NP.any(NP.sqrt(NP.sum(dircos**2,axis=1)) > 1.0+eps):
raise ValueError('Certain direction cosines exceed unit magnitude. Check inputs.')
elif dcshape[1] == 3:
if NP.any(NP.absolute(NP.sqrt(NP.sum(dircos**2,axis=1)) - 1.0) > eps):
raise ValueError('Magnitude of vector of direction cosines have to equal unity. Check inputs.')
# if NP.any(NP.sqrt(NP.sum(dircos**2,axis=1)) > 1.0+eps)):
# raise ValueError('Magnitude of vector of direction cosines have to equal unity. Check inputs.')
if NP.any(dircos[:,2] < 0.0):
raise ValueError('Direction cosines should lie on the upper hemisphere. Check inputs.')
delaymatrix_max = NP.repeat(NP.sqrt(NP.sum(bl.T**2,axis=0)).reshape(1,blshape[0]), dcshape[0], axis=0)/c
delaymatrix_shift = NP.dot(dircos, bl.T)/c
delaymatrix = NP.dstack((delaymatrix_max, delaymatrix_shift))
return delaymatrix
#################################################################################
def horizon_delay_limits(bl, dircos, units='mks'):
"""
---------------------------------------------------------------------------
Estimates the delay envelope determined by the sky horizon for given
baseline(s) for the phase centers specified by sky positions in direction
cosines.
Inputs:
bl: E, N, and U components of baseline vectors in a Mx3 numpy
array in local ENU coordinates
dircos: Nx3 (direction cosines) numpy array of sky positions
units: 'mks' or 'cgs' units. Default='mks'
Outputs:
horizon_envelope:
NxMx2 matrix. horizon_envelope[:,:,0] contains the minimum delay
after accounting for (any) non-zenith phase center.
horizon_envelope[:,:,1] contains the maximum delay after accounting
for (any) non-zenith phase center.
---------------------------------------------------------------------------
"""
delay_matrix = delay_envelope(bl, dircos, units='mks')
horizon_envelope = NP.dstack((-delay_matrix[:,:,0]-delay_matrix[:,:,1], delay_matrix[:,:,0]-delay_matrix[:,:,1]))
return horizon_envelope
################################################################################
def geometric_delay(baselines, skypos, altaz=False, dircos=False, hadec=True,
units='mks', latitude=None):
"""
---------------------------------------------------------------------
Estimates the geometric delays matrix for different baselines from
different sky positions.
Inputs:
baselines: x, y, and z components of baseline vectors in a Mx3 numpy
array
skypos: Nx2 (Alt-Az or HA-Dec) or Nx3 (direction cosines) numpy array
of sky positions
altaz: [Boolean flag, default=False] If True, skypos is in Alt-Az
coordinates system
hadec: [Boolean flag, default=True] If True, skypos is in HA-Dec
coordinates system
dircos: [Boolean flag, default=False] If True, skypos is in direction
cosines coordinates system
units: Units of baselines. Default='mks'. Alternative is 'cgs'.
latitude: Latitude of the observatory. Required if hadec is True.
Outputs:
geometric delays [NxM numpy array] Geometric delay for every combination
of baselines and skypos.
---------------------------------------------------------------------
"""
try:
baselines, skypos
except NameError:
raise NameError('baselines and/or skypos not defined in geometric_delay().')
if (altaz)+(dircos)+(hadec) != 1:
raise ValueError('One and only one of altaz, dircos, hadec must be set to True.')
if hadec and (latitude is None):
raise ValueError('Latitude must be specified when skypos is in HA-Dec format.')
try:
units
except NameError:
print('No units provided. Assuming MKS units.')
units = 'mks'
if (units != 'mks') and (units != 'cgs'):
print('Units should be specified to be one of MKS or CGS. Default=MKS')
print('Proceeding with MKS units.')
units = 'mks'
if not isinstance(baselines, NP.ndarray):
raise TypeError('baselines should be a Nx3 numpy array in geometric_delay().')
if len(baselines.shape) == 1:
baselines = baselines.reshape(1,-1)
if baselines.shape[1] == 1:
baselines = NP.hstack(baselines, NP.zeros((baselines.size,2)))
elif baselines.shape[1] == 2:
baselines = NP.hstack(baselines, NP.zeros((baselines.size,1)))
elif baselines.shape[1] > 3:
baselines = baselines[:,:3]
if altaz or hadec:
if len(skypos.shape) < 2:
if skypos.size != 2:
raise ValueError('Sky position in altitude-azimuth or HA-Dec should consist of 2 elements.')
else:
skypos = skypos.reshape(1,-1)
elif len(skypos.shape) > 2:
raise ValueError('Sky positions should be a Nx2 numpy array if using altitude-azimuth of HA-Dec.')
else:
if skypos.shape[1] != 2:
raise ValueError('Sky positions should be a Nx2 numpy array if using altitude-azimuth of HA-Dec.')
if altaz:
dc = GEOM.altaz2dircos(skypos, 'degrees')
else:
dc = GEOM.altaz2dircos(GEOM.hadec2altaz(skypos, latitude, 'degrees'), 'degrees')
else:
if len(skypos.shape) < 2:
if skypos.size != 3:
raise ValueError('Sky position in direction cosines should consist of 3 elements.')
else:
skypos = skypos.reshape(1,-1)
elif len(skypos.shape) > 2:
raise ValueError('Sky positions should be a Nx3 numpy array if using direction cosines.')
else:
if skypos.shape[1] != 3:
raise ValueError('Sky positions should be a Nx3 numpy array if using direction cosines.')
dc = skypos
# Set the speed of light in MKS or CGS units
if units == 'mks': c = FCNST.c
elif units == 'cgs': c = FCNST.c * 1e2
# geometric_delays = delay_envelope(baselines, dc, units)[:,:,-1]
geometric_delays = NP.dot(dc, baselines.T)/c
return geometric_delays
##########################################################################
| 9,054 | 36.110656 | 122 |
py
|
PRISim
|
PRISim-master/prisim/bispectrum_phase.py
|
from __future__ import division
import glob
import numpy as NP
from functools import reduce
import numpy.ma as MA
import progressbar as PGB
import h5py
import healpy as HP
import warnings
import copy
import astropy.cosmology as CP
from astropy.time import Time, TimeDelta
from astropy.io import fits
from astropy import units as U
from astropy import constants as FCNST
from scipy import interpolate
from astroutils import DSP_modules as DSP
from astroutils import constants as CNST
from astroutils import nonmathops as NMO
from astroutils import mathops as OPS
from astroutils import lookup_operations as LKP
import prisim
from prisim import interferometry as RI
from prisim import primary_beams as PB
from prisim import delay_spectrum as DS
try:
from pyuvdata import UVBeam
except ImportError:
uvbeam_module_found = False
else:
uvbeam_module_found = True
prisim_path = prisim.__path__[0]+'/'
cosmoPlanck15 = CP.Planck15 # Planck 2015 cosmology
cosmo100 = cosmoPlanck15.clone(name='Modified Planck 2015 cosmology with h=1.0', H0=100.0) # Modified Planck 2015 cosmology with h=1.0, H= 100 km/s/Mpc
################################################################################
def write_PRISim_bispectrum_phase_to_npz(infile_prefix, outfile_prefix,
triads=None, bltriplet=None,
hdf5file_prefix=None, infmt='npz',
datakey='noisy', blltol=0.1):
"""
----------------------------------------------------------------------------
Write closure phases computed in a PRISim simulation to a NPZ file with
appropriate format for further analysis.
Inputs:
infile_prefix
[string] HDF5 file or NPZ file created by a PRISim simulation or
its replication respectively. If infmt is specified as 'hdf5',
then hdf5file_prefix will be ignored and all the observing
info will be read from here. If infmt is specified as 'npz',
then hdf5file_prefix needs to be specified in order to read the
observing parameters.
triads [list or numpy array or None] Antenna triads given as a list of
3-element lists or a ntriads x 3 array. Each element in the
inner list is an antenna label. They will be converted to
strings internally. If set to None, then all triads determined
by bltriplet will be used. If specified, then inputs in blltol
and bltriplet will be ignored.
bltriplet [numpy array or None] 3x3 numpy array containing the 3 baseline
vectors. The first axis denotes the three baselines, the second
axis denotes the East, North, Up coordinates of the baseline
vector. Units are in m. Will be used only if triads is set to
None.
outfile_prefix
[string] Prefix of the NPZ file. It will be appended by
'_noiseless', '_noisy', and '_noise' and further by extension
'.npz'
infmt [string] Format of the input file containing visibilities.
Accepted values are 'npz' (default), and 'hdf5'. If infmt is
specified as 'npz', then hdf5file_prefix also needs to be
specified for reading the observing parameters
datakey [string] Specifies which -- 'noiseless', 'noisy' (default), or
'noise' -- visibilities are to be written to the output. If set
to None, and infmt is 'hdf5', then all three sets of
visibilities are written. The datakey string will also be added
as a suffix in the output file.
blltol [scalar] Baseline length tolerance (in m) for matching baseline
vectors in triads. It must be a scalar. Default = 0.1 m. Will
be used only if triads is set to None and bltriplet is to be
used.
----------------------------------------------------------------------------
"""
if not isinstance(infile_prefix, str):
raise TypeError('Input infile_prefix must be a string')
if not isinstance(outfile_prefix, str):
raise TypeError('Input outfile_prefix must be a string')
if (triads is None) and (bltriplet is None):
raise ValueError('One of triads or bltriplet must be set')
if triads is None:
if not isinstance(bltriplet, NP.ndarray):
raise TypeError('Input bltriplet must be a numpy array')
if not isinstance(blltol, (int,float)):
raise TypeError('Input blltol must be a scalar')
if bltriplet.ndim != 2:
raise ValueError('Input bltriplet must be a 2D numpy array')
if bltriplet.shape[0] != 3:
raise ValueError('Input bltriplet must contain three baseline vectors')
if bltriplet.shape[1] != 3:
raise ValueError('Input bltriplet must contain baseline vectors along three corrdinates in the ENU frame')
else:
if not isinstance(triads, (list, NP.ndarray)):
raise TypeError('Input triads must be a list or numpy array')
triads = NP.asarray(triads).astype(str)
if not isinstance(infmt, str):
raise TypeError('Input infmt must be a string')
if infmt.lower() not in ['npz', 'hdf5']:
raise ValueError('Input file format must be npz or hdf5')
if infmt.lower() == 'npz':
if not isinstance(hdf5file_prefix, str):
raise TypeError('If infmt is npz, then hdf5file_prefix needs to be specified for observing parameters information')
if datakey is None:
datakey = ['noisy']
if isinstance(datakey, str):
datakey = [datakey]
elif not isinstance(datakey, list):
raise TypeError('Input datakey must be a list')
for dkey in datakey:
if dkey.lower() not in ['noiseless', 'noisy', 'noise']:
raise ValueError('Invalid input found in datakey')
if infmt.lower() == 'hdf5':
fullfnames_with_extension = glob.glob(infile_prefix + '*' + infmt.lower())
fullfnames_without_extension = [fname.split('.hdf5')[0] for fname in fullfnames_with_extension]
else:
fullfnames_without_extension = [infile_prefix]
if len(fullfnames_without_extension) == 0:
raise IOError('No input files found with pattern {0}'.format(infile_prefix))
try:
if infmt.lower() == 'hdf5':
simvis = RI.InterferometerArray(None, None, None, init_file=fullfnames_without_extension[0])
else:
simvis = RI.InterferometerArray(None, None, None, init_file=hdf5file_prefix)
except:
raise IOError('Input PRISim file does not contain a valid PRISim output')
latitude = simvis.latitude
longitude = simvis.longitude
location = ('{0:.5f}d'.format(longitude), '{0:.5f}d'.format(latitude))
last = simvis.lst / 15.0 / 24.0 # from degrees to fraction of day
last = last.reshape(-1,1)
daydata = NP.asarray(simvis.timestamp[0]).ravel()
if infmt.lower() == 'npz':
simvisinfo = NP.load(fullfnames_without_extension[0]+'.'+infmt.lower())
skyvis = simvisinfo['noiseless'][0,...]
vis = simvisinfo['noisy']
noise = simvisinfo['noise']
n_realize = vis.shape[0]
else:
n_realize = len(fullfnames_without_extension)
cpdata = {}
outfile = {}
for fileind in range(n_realize):
if infmt.lower() == 'npz':
simvis.vis_freq = vis[fileind,...]
simvis.vis_noise_freq = noise[fileind,...]
else:
simvis = RI.InterferometerArray(None, None, None, init_file=fullfnames_without_extension[fileind])
if fileind == 0:
if triads is None:
triads, bltriplets = simvis.getThreePointCombinations(unique=False)
# triads = NP.asarray(prisim_BSP_info['antenna_triplets']).reshape(-1,3)
# bltriplets = NP.asarray(prisim_BSP_info['baseline_triplets'])
triads = NP.asarray(triads).reshape(-1,3)
bltriplets = NP.asarray(bltriplets)
blinds = []
matchinfo = LKP.find_NN(bltriplet, bltriplets.reshape(-1,3), distance_ULIM=blltol)
revind = []
for blnum in NP.arange(bltriplet.shape[0]):
if len(matchinfo[0][blnum]) == 0:
revind += [blnum]
if len(revind) > 0:
flip_factor = NP.ones(3, dtype=NP.float)
flip_factor[NP.array(revind)] = -1
rev_bltriplet = bltriplet * flip_factor.reshape(-1,1)
matchinfo = LKP.find_NN(rev_bltriplet, bltriplets.reshape(-1,3), distance_ULIM=blltol)
for blnum in NP.arange(bltriplet.shape[0]):
if len(matchinfo[0][blnum]) == 0:
raise ValueError('Some baselines in the triplet are not found in the model triads')
triadinds = []
for blnum in NP.arange(bltriplet.shape[0]):
triadind, blind = NP.unravel_index(NP.asarray(matchinfo[0][blnum]), (bltriplets.shape[0], bltriplets.shape[1]))
triadinds += [triadind]
triadind_intersection = NP.intersect1d(triadinds[0], NP.intersect1d(triadinds[1], triadinds[2]))
if triadind_intersection.size == 0:
raise ValueError('Specified triad not found in the PRISim model. Try other permutations of the baseline vectors and/or reverse individual baseline vectors in the triad before giving up.')
triads = triads[triadind_intersection,:]
selected_bltriplets = bltriplets[triadind_intersection,:,:].reshape(-1,3,3)
prisim_BSP_info = simvis.getClosurePhase(antenna_triplets=triads.tolist(),
delay_filter_info=None,
specsmooth_info=None,
spectral_window_info=None,
unique=False)
if fileind == 0:
triads = NP.asarray(prisim_BSP_info['antenna_triplets']).reshape(-1,3) # Re-establish the triads returned after the first iteration (to accunt for any order flips)
for outkey in datakey:
if fileind == 0:
outfile[outkey] = outfile_prefix + '_{0}.npz'.format(outkey)
if outkey == 'noiseless':
if fileind == 0:
# cpdata = prisim_BSP_info['closure_phase_skyvis'][triadind_intersection,:,:][NP.newaxis,...]
cpdata[outkey] = prisim_BSP_info['closure_phase_skyvis'][NP.newaxis,...]
else:
# cpdata = NP.concatenate((cpdata, prisim_BSP_info['closure_phase_skyvis'][triadind_intersection,:,:][NP.newaxis,...]), axis=0)
cpdata[outkey] = NP.concatenate((cpdata[outkey], prisim_BSP_info['closure_phase_skyvis'][NP.newaxis,...]), axis=0)
if outkey == 'noisy':
if fileind == 0:
# cpdata = prisim_BSP_info['closure_phase_vis'][triadind_intersection,:,:][NP.newaxis,...]
cpdata[outkey] = prisim_BSP_info['closure_phase_vis'][NP.newaxis,...]
else:
# cpdata = NP.concatenate((cpdata, prisim_BSP_info['closure_phase_vis'][triadind_intersection,:,:][NP.newaxis,...]), axis=0)
cpdata[outkey] = NP.concatenate((cpdata[outkey], prisim_BSP_info['closure_phase_vis'][NP.newaxis,...]), axis=0)
if outkey == 'noise':
if fileind == 0:
# cpdata = prisim_BSP_info['closure_phase_noise'][triadind_intersection,:,:]
cpdata[outkey] = prisim_BSP_info['closure_phase_noise'][NP.newaxis,:,:]
else:
# cpdata = NP.concatenate((cpdata, prisim_BSP_info['closure_phase_noise'][triadind_intersection,:,:][NP.newaxis,...]), axis=0)
cpdata[outkey] = NP.concatenate((cpdata[outkey], prisim_BSP_info['closure_phase_noise'][NP.newaxis,...]), axis=0)
for outkey in datakey:
cpdata[outkey] = NP.rollaxis(cpdata[outkey], 3, start=0)
flagsdata = NP.zeros(cpdata[outkey].shape, dtype=NP.bool)
NP.savez_compressed(outfile[outkey], closures=cpdata[outkey],
flags=flagsdata, triads=triads,
last=last+NP.zeros((1,n_realize)),
days=daydata+NP.arange(n_realize))
################################################################################
def loadnpz(npzfile, longitude=0.0, latitude=0.0, lst_format='fracday'):
"""
----------------------------------------------------------------------------
Read an input NPZ file containing closure phase data output from CASA and
return a dictionary
Inputs:
npzfile [string] Input NPZ file including full path containing closure
phase data. It must have the following files/keys inside:
'closures' [numpy array] Closure phase (radians). It is of
shape (nlst,ndays,ntriads,nchan)
'triads' [numpy array] Array of triad tuples, of shape
(ntriads,3)
'flags' [numpy array] Array of flags (boolean), of shape
(nlst,ndays,ntriads,nchan)
'last' [numpy array] Array of LST for each day (CASA units
which is MJD+6713). Shape is (nlst,ndays)
'days' [numpy array] Array of days, shape is (ndays,)
'averaged_closures'
[numpy array] optional array of closure phases
averaged across days. Shape is (nlst,ntriads,nchan)
'std_dev_lst'
[numpy array] optional array of standard deviation
of closure phases across days. Shape is
(nlst,ntriads,nchan)
'std_dev_triads'
[numpy array] optional array of standard deviation
of closure phases across triads. Shape is
(nlst,ndays,nchan)
latitude [scalar int or float] Latitude of site (in degrees).
Default=0.0 deg.
longitude [scalar int or float] Longitude of site (in degrees).
Default=0.0 deg.
lst_format [string] Specifies the format/units in which the 'last' key
is to be interpreted. If set to 'hourangle', the LST is in
units of hour angle. If set to 'fracday', the fractional
portion of the 'last' value is the LST in units of days.
Output:
cpinfo [dictionary] Contains one top level keys, namely, 'raw'
Under key 'raw' which holds a dictionary, the subkeys
include 'cphase' (nlst,ndays,ntriads,nchan),
'triads' (ntriads,3), 'lst' (nlst,ndays), and 'flags'
(nlst,ndays,ntriads,nchan), and some other optional keys
----------------------------------------------------------------------------
"""
npzdata = NP.load(npzfile)
cpdata = npzdata['closures']
triadsdata = npzdata['triads']
flagsdata = npzdata['flags']
location = ('{0:.5f}d'.format(longitude), '{0:.5f}d'.format(latitude))
daydata = Time(npzdata['days'].astype(NP.float64), scale='utc', format='jd', location=location)
# lstdata = Time(npzdata['last'].astype(NP.float64) - 6713.0, scale='utc', format='mjd', location=('+21.4278d', '-30.7224d')).sidereal_time('apparent') # Subtract 6713 based on CASA convention to obtain MJD
if lst_format.lower() == 'hourangle':
lstHA = npzdata['last']
lstday = daydata.reshape(1,-1) + TimeDelta(NP.zeros(lstHA.shape[0]).reshape(-1,1)*U.s)
elif lst_format.lower() == 'fracday':
lstfrac, lstint = NP.modf(npzdata['last'])
lstday = Time(lstint.astype(NP.float64) - 6713.0, scale='utc', format='mjd', location=location) # Subtract 6713 based on CASA convention to obtain MJD
lstHA = lstfrac * 24.0 # in hours
else:
raise ValueError('Input lst_format invalid')
cp = cpdata.astype(NP.float64)
flags = flagsdata.astype(NP.bool)
cpinfo = {}
datapool = ['raw']
for dpool in datapool:
cpinfo[dpool] = {}
if dpool == 'raw':
qtys = ['cphase', 'triads', 'flags', 'lst', 'lst-day', 'days', 'dayavg', 'std_triads', 'std_lst']
for qty in qtys:
if qty == 'cphase':
cpinfo[dpool][qty] = NP.copy(cp)
elif qty == 'triads':
cpinfo[dpool][qty] = NP.copy(triadsdata)
elif qty == 'flags':
cpinfo[dpool][qty] = NP.copy(flags)
elif qty == 'lst':
cpinfo[dpool][qty] = NP.copy(lstHA)
elif qty == 'lst-day':
cpinfo[dpool][qty] = NP.copy(lstday.jd)
elif qty == 'days':
cpinfo[dpool][qty] = NP.copy(daydata.jd)
elif qty == 'dayavg':
if 'averaged_closures' in npzdata:
cpinfo[dpool][qty] = NP.copy(cp_dayavg)
elif qty == 'std_triads':
if 'std_dev_triad' in npzdata:
cpinfo[dpool][qty] = NP.copy(cp_std_triads)
elif qty == 'std_lst':
if 'std_dev_lst' in npzdata:
cpinfo[dpool][qty] = NP.copy(cp_std_lst)
return cpinfo
################################################################################
def npz2hdf5(npzfile, hdf5file, longitude=0.0, latitude=0.0,
lst_format='fracday'):
"""
----------------------------------------------------------------------------
Read an input NPZ file containing closure phase data output from CASA and
save it to HDF5 format
Inputs:
npzfile [string] Input NPZ file including full path containing closure
phase data. It must have the following files/keys inside:
'closures' [numpy array] Closure phase (radians). It is of
shape (nlst,ndays,ntriads,nchan)
'triads' [numpy array] Array of triad tuples, of shape
(ntriads,3)
'flags' [numpy array] Array of flags (boolean), of shape
(nlst,ndays,ntriads,nchan)
'last' [numpy array] Array of LST for each day (CASA units
ehich is MJD+6713). Shape is (nlst,ndays)
'days' [numpy array] Array of days, shape is (ndays,)
'averaged_closures'
[numpy array] optional array of closure phases
averaged across days. Shape is (nlst,ntriads,nchan)
'std_dev_lst'
[numpy array] optional array of standard deviation
of closure phases across days. Shape is
(nlst,ntriads,nchan)
'std_dev_triads'
[numpy array] optional array of standard deviation
of closure phases across triads. Shape is
(nlst,ndays,nchan)
hdf5file [string] Output HDF5 file including full path.
latitude [scalar int or float] Latitude of site (in degrees).
Default=0.0 deg.
longitude [scalar int or float] Longitude of site (in degrees).
Default=0.0 deg.
lst_format [string] Specifies the format/units in which the 'last' key
is to be interpreted. If set to 'hourangle', the LST is in
units of hour angle. If set to 'fracday', the fractional
portion of the 'last' value is the LST in units of days.
----------------------------------------------------------------------------
"""
npzdata = NP.load(npzfile)
cpdata = npzdata['closures']
triadsdata = npzdata['triads']
flagsdata = npzdata['flags']
location = ('{0:.5f}d'.format(longitude), '{0:.5f}d'.format(latitude))
daydata = Time(npzdata['days'].astype(NP.float64), scale='utc', format='jd', location=location)
# lstdata = Time(npzdata['last'].astype(NP.float64) - 6713.0, scale='utc', format='mjd', location=('+21.4278d', '-30.7224d')).sidereal_time('apparent') # Subtract 6713 based on CASA convention to obtain MJD
if lst_format.lower() == 'hourangle':
lstHA = npzdata['last']
lstday = daydata.reshape(1,-1) + TimeDelta(NP.zeros(lstHA.shape[0]).reshape(-1,1)*U.s)
elif lst_format.lower() == 'fracday':
lstfrac, lstint = NP.modf(npzdata['last'])
lstday = Time(lstint.astype(NP.float64) - 6713.0, scale='utc', format='mjd', location=location) # Subtract 6713 based on CASA convention to obtain MJD
lstHA = lstfrac * 24.0 # in hours
else:
raise ValueError('Input lst_format invalid')
cp = cpdata.astype(NP.float64)
flags = flagsdata.astype(NP.bool)
if 'averaged_closures' in npzdata:
day_avg_cpdata = npzdata['averaged_closures']
cp_dayavg = day_avg_cpdata.astype(NP.float64)
if 'std_dev_triad' in npzdata:
std_triads_cpdata = npzdata['std_dev_triad']
cp_std_triads = std_triads_cpdata.astype(NP.float64)
if 'std_dev_lst' in npzdata:
std_lst_cpdata = npzdata['std_dev_lst']
cp_std_lst = std_lst_cpdata.astype(NP.float64)
with h5py.File(hdf5file, 'w') as fobj:
datapool = ['raw']
for dpool in datapool:
if dpool == 'raw':
qtys = ['cphase', 'triads', 'flags', 'lst', 'lst-day', 'days', 'dayavg', 'std_triads', 'std_lst']
for qty in qtys:
data = None
if qty == 'cphase':
data = NP.copy(cp)
elif qty == 'triads':
data = NP.copy(triadsdata)
elif qty == 'flags':
data = NP.copy(flags)
elif qty == 'lst':
data = NP.copy(lstHA)
elif qty == 'lst-day':
data = NP.copy(lstday.jd)
elif qty == 'days':
data = NP.copy(daydata.jd)
elif qty == 'dayavg':
if 'averaged_closures' in npzdata:
data = NP.copy(cp_dayavg)
elif qty == 'std_triads':
if 'std_dev_triad' in npzdata:
data = NP.copy(cp_std_triads)
elif qty == 'std_lst':
if 'std_dev_lst' in npzdata:
data = NP.copy(cp_std_lst)
if data is not None:
dset = fobj.create_dataset('{0}/{1}'.format(dpool, qty), data=data, compression='gzip', compression_opts=9)
################################################################################
def save_CPhase_cross_power_spectrum(xcpdps, outfile):
"""
----------------------------------------------------------------------------
Save cross-power spectrum information in a dictionary to a HDF5 file
Inputs:
xcpdps [dictionary] This dictionary is essentially an output of the
member function compute_power_spectrum() of class
ClosurePhaseDelaySpectrum. It has the following key-value
structure:
'triads' ((ntriads,3) array), 'triads_ind',
((ntriads,) array), 'lstXoffsets' ((ndlst_range,) array), 'lst'
((nlst,) array), 'dlst' ((nlst,) array), 'lst_ind' ((nlst,)
array), 'days' ((ndays,) array), 'day_ind' ((ndays,) array),
'dday' ((ndays,) array), 'oversampled' and 'resampled'
corresponding to whether resample was set to False or True in
call to member function FT(). Values under keys 'triads_ind'
and 'lst_ind' are numpy array corresponding to triad and time
indices used in selecting the data. Values under keys
'oversampled' and 'resampled' each contain a dictionary with
the following keys and values:
'z' [numpy array] Redshifts corresponding to the band
centers in 'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has shape=(nlags,)
'kprll' [numpy array] k_parallel modes (in h/Mpc) corresponding
to 'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in Hz)
of the frequency subbands of the subband delay spectra.
It is of size n_win. It is roughly equivalent to
redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on
each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in Hz)
of the subbands being delay transformed. It is of size
n_win. It is roughly equivalent to width in redshift or
along line-of-sight
'shape' [string] shape of the frequency window function applied.
Usual values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window was
raised. The value is be a positive scalar with
default = 1.0
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is proportional
to inverse of effective bandwidth. It is of size n_win.
The unit size of a pixel is determined by the difference
between adjacent pixels in lags under key 'lags' which
in turn is effectively inverse of the effective
bandwidth of the subband specified in bw_eff
It further contains one or more of the following keys named
'whole', 'submodel', 'residual', and 'errinfo' each of which is
a dictionary. 'whole' contains power spectrum info about the
input closure phases. 'submodel' contains power spectrum info
about the model that will have been subtracted (as closure
phase) from the 'whole' model. 'residual' contains power
spectrum info about the closure phases obtained as a difference
between 'whole' and 'submodel'. It contains the following keys
and values:
'mean' [numpy array] Delay power spectrum incoherently
estimated over the axes specified in xinfo['axes']
using the 'mean' key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has shape that
depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are
not set, those axes will be replaced with square
covariance matrices. If collapse_axes is provided but
avgcov is False, those axes will be of shape 2*Naxis-1.
'median'
[numpy array] Delay power spectrum incoherently averaged
over the axes specified in incohax using the 'median'
key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has shape that
depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are not
set, those axes will be replaced with square covariance
matrices. If collapse_axes is provided bu avgcov is
False, those axes will be of shape 2*Naxis-1.
'diagoffsets'
[dictionary] Same keys corresponding to keys under
'collapse_axes' in input containing the diagonal
offsets for those axes. If 'avgcov' was set, those
entries will be removed from 'diagoffsets' since all the
leading diagonal elements have been collapsed (averaged)
further. Value under each key is a numpy array where
each element in the array corresponds to the index of
that leading diagonal. This should match the size of the
output along that axis in 'mean' or 'median' above.
'diagweights'
[dictionary] Each key is an axis specified in
collapse_axes and the value is a numpy array of weights
corresponding to the diagonal offsets in that axis.
'axesmap'
[dictionary] If covariance in cross-power is calculated
but is not collapsed, the number of dimensions in the
output will have changed. This parameter tracks where
the original axis is now placed. The keys are the
original axes that are involved in incoherent
cross-power, and the values are the new locations of
those original axes in the output.
'nsamples_incoh'
[integer] Number of incoherent samples in producing the
power spectrum
'nsamples_coh'
[integer] Number of coherent samples in producing the
power spectrum
outfile [string] Full path to the external HDF5 file where the cross-
power spectrum information provided in xcpdps will be saved
----------------------------------------------------------------------------
"""
if not isinstance(xcpdps, dict):
raise TypeError('Input xcpdps must be a dictionary')
with h5py.File(outfile, 'w') as fileobj:
hdrgrp = fileobj.create_group('header')
hdrkeys = ['triads', 'triads_ind', 'lst', 'lst_ind', 'dlst', 'days', 'day_ind', 'dday']
for key in hdrkeys:
dset = hdrgrp.create_dataset(key, data=xcpdps[key])
sampling = ['oversampled', 'resampled']
sampling_keys = ['z', 'kprll', 'lags', 'freq_center', 'bw_eff', 'shape', 'freq_wts', 'lag_corr_length']
dpool_keys = ['whole', 'submodel', 'residual', 'errinfo']
for smplng in sampling:
if smplng in xcpdps:
smplgrp = fileobj.create_group(smplng)
for key in sampling_keys:
dset = smplgrp.create_dataset(key, data=xcpdps[smplng][key])
for dpool in dpool_keys:
if dpool in xcpdps[smplng]:
dpoolgrp = smplgrp.create_group(dpool)
keys = ['diagoffsets', 'diagweights', 'axesmap', 'nsamples_incoh', 'nsamples_coh']
for key in keys:
if key in xcpdps[smplng][dpool]:
if isinstance(xcpdps[smplng][dpool][key], dict):
subgrp = dpoolgrp.create_group(key)
for subkey in xcpdps[smplng][dpool][key]:
dset = subgrp.create_dataset(str(subkey), data=xcpdps[smplng][dpool][key][subkey])
else:
dset = dpoolgrp.create_dataset(key, data=xcpdps[smplng][dpool][key])
for stat in ['mean', 'median']:
if stat in xcpdps[smplng][dpool]:
if isinstance(xcpdps[smplng][dpool][stat], list):
for ii in range(len(xcpdps[smplng][dpool][stat])):
dset = dpoolgrp.create_dataset(stat+'/diagcomb_{0}'.format(ii), data=xcpdps[smplng][dpool][stat][ii].si.value)
dset.attrs['units'] = str(xcpdps[smplng][dpool][stat][ii].si.unit)
else:
dset = dpoolgrp.create_dataset(stat, data=xcpdps[smplng][dpool][stat].si.value)
dset.attrs['units'] = str(xcpdps[smplng][dpool][stat].si.unit)
################################################################################
def read_CPhase_cross_power_spectrum(infile):
"""
----------------------------------------------------------------------------
Read information about cross power spectrum from an external HDF5 file into
a dictionary. This is the counterpart to save_CPhase_corss_power_spectrum()
Input:
infile [string] Full path to the external HDF5 file that contains info
about cross-power spectrum.
Output:
xcpdps [dictionary] This dictionary has structure the same as output
of the member function compute_power_spectrum() of class
ClosurePhaseDelaySpectrum. It has the following key-value
structure:
'triads' ((ntriads,3) array), 'triads_ind',
((ntriads,) array), 'lstXoffsets' ((ndlst_range,) array), 'lst'
((nlst,) array), 'dlst' ((nlst,) array), 'lst_ind' ((nlst,)
array), 'days' ((ndays,) array), 'day_ind' ((ndays,) array),
'dday' ((ndays,) array), 'oversampled' and 'resampled'
corresponding to whether resample was set to False or True in
call to member function FT(). Values under keys 'triads_ind'
and 'lst_ind' are numpy array corresponding to triad and time
indices used in selecting the data. Values under keys
'oversampled' and 'resampled' each contain a dictionary with
the following keys and values:
'z' [numpy array] Redshifts corresponding to the band
centers in 'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has shape=(nlags,)
'kprll' [numpy array] k_parallel modes (in h/Mpc) corresponding
to 'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in Hz)
of the frequency subbands of the subband delay spectra.
It is of size n_win. It is roughly equivalent to
redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on
each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in Hz)
of the subbands being delay transformed. It is of size
n_win. It is roughly equivalent to width in redshift or
along line-of-sight
'shape' [string] shape of the frequency window function applied.
Usual values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window was
raised. The value is be a positive scalar with
default = 1.0
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is proportional
to inverse of effective bandwidth. It is of size n_win.
The unit size of a pixel is determined by the difference
between adjacent pixels in lags under key 'lags' which
in turn is effectively inverse of the effective
bandwidth of the subband specified in bw_eff
It further contains one or more of the following keys named
'whole', 'submodel', 'residual', and 'errinfo' each of which is
a dictionary. 'whole' contains power spectrum info about the
input closure phases. 'submodel' contains power spectrum info
about the model that will have been subtracted (as closure
phase) from the 'whole' model. 'residual' contains power
spectrum info about the closure phases obtained as a difference
between 'whole' and 'submodel'. It contains the following keys
and values:
'mean' [numpy array] Delay power spectrum incoherently
estimated over the axes specified in xinfo['axes']
using the 'mean' key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has shape that
depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are
not set, those axes will be replaced with square
covariance matrices. If collapse_axes is provided but
avgcov is False, those axes will be of shape 2*Naxis-1.
'median'
[numpy array] Delay power spectrum incoherently averaged
over the axes specified in incohax using the 'median'
key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has shape that
depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are not
set, those axes will be replaced with square covariance
matrices. If collapse_axes is provided bu avgcov is
False, those axes will be of shape 2*Naxis-1.
'diagoffsets'
[dictionary] Same keys corresponding to keys under
'collapse_axes' in input containing the diagonal
offsets for those axes. If 'avgcov' was set, those
entries will be removed from 'diagoffsets' since all the
leading diagonal elements have been collapsed (averaged)
further. Value under each key is a numpy array where
each element in the array corresponds to the index of
that leading diagonal. This should match the size of the
output along that axis in 'mean' or 'median' above.
'diagweights'
[dictionary] Each key is an axis specified in
collapse_axes and the value is a numpy array of weights
corresponding to the diagonal offsets in that axis.
'axesmap'
[dictionary] If covariance in cross-power is calculated
but is not collapsed, the number of dimensions in the
output will have changed. This parameter tracks where
the original axis is now placed. The keys are the
original axes that are involved in incoherent
cross-power, and the values are the new locations of
those original axes in the output.
'nsamples_incoh'
[integer] Number of incoherent samples in producing the
power spectrum
'nsamples_coh'
[integer] Number of coherent samples in producing the
power spectrum
outfile [string] Full path to the external HDF5 file where the cross-
power spectrum information provided in xcpdps will be saved
----------------------------------------------------------------------------
"""
if not isinstance(infile, str):
raise TypeError('Input infile must be a string')
xcpdps = {}
with h5py.File(infile, 'r') as fileobj:
hdrgrp = fileobj['header']
hdrkeys = ['triads', 'triads_ind', 'lst', 'lst_ind', 'dlst', 'days', 'day_ind', 'dday']
for key in hdrkeys:
xcpdps[key] = hdrgrp[key].value
sampling = ['oversampled', 'resampled']
sampling_keys = ['z', 'kprll', 'lags', 'freq_center', 'bw_eff', 'shape', 'freq_wts', 'lag_corr_length']
dpool_keys = ['whole', 'submodel', 'residual', 'errinfo']
for smplng in sampling:
if smplng in fileobj:
smplgrp = fileobj[smplng]
xcpdps[smplng] = {}
for key in sampling_keys:
xcpdps[smplng][key] = smplgrp[key].value
for dpool in dpool_keys:
if dpool in smplgrp:
xcpdps[smplng][dpool] = {}
dpoolgrp = smplgrp[dpool]
keys = ['diagoffsets', 'diagweights', 'axesmap', 'nsamples_incoh', 'nsamples_coh']
for key in keys:
if key in dpoolgrp:
if isinstance(dpoolgrp[key], h5py.Group):
xcpdps[smplng][dpool][key] = {}
for subkey in dpoolgrp[key]:
xcpdps[smplng][dpool][key][int(subkey)] = dpoolgrp[key][subkey].value
elif isinstance(dpoolgrp[key], h5py.Dataset):
xcpdps[smplng][dpool][key] = dpoolgrp[key].value
else:
raise TypeError('Invalid h5py data type encountered')
for stat in ['mean', 'median']:
if stat in dpoolgrp:
if isinstance(dpoolgrp[stat], h5py.Dataset):
valunits = dpoolgrp[stat].attrs['units']
xcpdps[smplng][dpool][stat] = dpoolgrp[stat].value * U.Unit(valunits)
elif isinstance(dpoolgrp[stat], h5py.Group):
xcpdps[smplng][dpool][stat] = []
for diagcomb_ind in range(len(dpoolgrp[stat].keys())):
if 'diagcomb_{0}'.format(diagcomb_ind) in dpoolgrp[stat]:
valunits = dpoolgrp[stat]['diagcomb_{0}'.format(diagcomb_ind)].attrs['units']
xcpdps[smplng][dpool][stat] += [dpoolgrp[stat]['diagcomb_{0}'.format(diagcomb_ind)].value * U.Unit(valunits)]
return xcpdps
################################################################################
def incoherent_cross_power_spectrum_average(xcpdps, excpdps=None, diagoffsets=None):
"""
----------------------------------------------------------------------------
Perform incoherent averaging of cross power spectrum along specified axes
Inputs:
xcpdps [dictionary or list of dictionaries] If provided as a list of
dictionaries, each dictionary consists of cross power spectral
information coming possible from different sources, and they
will be averaged be averaged incoherently. If a single
dictionary is provided instead of a list of dictionaries, the
said averaging does not take place. Each dictionary is
essentially an output of the member function
compute_power_spectrum() of class ClosurePhaseDelaySpectrum. It
has the following key-value structure:
'triads' ((ntriads,3) array), 'triads_ind',
((ntriads,) array), 'lstXoffsets' ((ndlst_range,) array), 'lst'
((nlst,) array), 'dlst' ((nlst,) array), 'lst_ind' ((nlst,)
array), 'days' ((ndays,) array), 'day_ind' ((ndays,) array),
'dday' ((ndays,) array), 'oversampled' and 'resampled'
corresponding to whether resample was set to False or True in
call to member function FT(). Values under keys 'triads_ind'
and 'lst_ind' are numpy array corresponding to triad and time
indices used in selecting the data. Values under keys
'oversampled' and 'resampled' each contain a dictionary with
the following keys and values:
'z' [numpy array] Redshifts corresponding to the band
centers in 'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has shape=(nlags,)
'kprll' [numpy array] k_parallel modes (in h/Mpc) corresponding
to 'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in Hz)
of the frequency subbands of the subband delay spectra.
It is of size n_win. It is roughly equivalent to
redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on
each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in Hz)
of the subbands being delay transformed. It is of size
n_win. It is roughly equivalent to width in redshift or
along line-of-sight
'shape' [string] shape of the frequency window function applied.
Usual values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window was
raised. The value is be a positive scalar with
default = 1.0
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is proportional
to inverse of effective bandwidth. It is of size n_win.
The unit size of a pixel is determined by the difference
between adjacent pixels in lags under key 'lags' which
in turn is effectively inverse of the effective
bandwidth of the subband specified in bw_eff
It further contains 3 keys named 'whole', 'submodel', and
'residual' each of which is a dictionary. 'whole' contains power
spectrum info about the input closure phases. 'submodel'
contains power spectrum info about the model that will have been
subtracted (as closure phase) from the 'whole' model. 'residual'
contains power spectrum info about the closure phases obtained
as a difference between 'whole' and 'submodel'. It contains the
following keys and values:
'mean' [numpy array] Delay power spectrum incoherently
estimated over the axes specified in xinfo['axes']
using the 'mean' key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has shape that
depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are
not set, those axes will be replaced with square
covariance matrices. If collapse_axes is provided but
avgcov is False, those axes will be of shape 2*Naxis-1.
'median'
[numpy array] Delay power spectrum incoherently averaged
over the axes specified in incohax using the 'median'
key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has shape that
depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are not
set, those axes will be replaced with square covariance
matrices. If collapse_axes is provided bu avgcov is
False, those axes will be of shape 2*Naxis-1.
'diagoffsets'
[dictionary] Same keys corresponding to keys under
'collapse_axes' in input containing the diagonal
offsets for those axes. If 'avgcov' was set, those
entries will be removed from 'diagoffsets' since all the
leading diagonal elements have been collapsed (averaged)
further. Value under each key is a numpy array where
each element in the array corresponds to the index of
that leading diagonal. This should match the size of the
output along that axis in 'mean' or 'median' above.
'diagweights'
[dictionary] Each key is an axis specified in
collapse_axes and the value is a numpy array of weights
corresponding to the diagonal offsets in that axis.
'axesmap'
[dictionary] If covariance in cross-power is calculated
but is not collapsed, the number of dimensions in the
output will have changed. This parameter tracks where
the original axis is now placed. The keys are the
original axes that are involved in incoherent
cross-power, and the values are the new locations of
those original axes in the output.
'nsamples_incoh'
[integer] Number of incoherent samples in producing the
power spectrum
'nsamples_coh'
[integer] Number of coherent samples in producing the
power spectrum
excpdps [dictionary or list of dictionaries] If provided as a list of
dictionaries, each dictionary consists of cross power spectral
information of subsample differences coming possible from
different sources, and they will be averaged be averaged
incoherently. This is optional. If not set (default=None), no
incoherent averaging happens. If a single dictionary is provided
instead of a list of dictionaries, the said averaging does not
take place. Each dictionary is essentially an output of the
member function compute_power_spectrum_uncertainty() of class
ClosurePhaseDelaySpectrum. It has the following key-value
structure:
'triads' ((ntriads,3) array), 'triads_ind',
((ntriads,) array), 'lstXoffsets' ((ndlst_range,) array), 'lst'
((nlst,) array), 'dlst' ((nlst,) array), 'lst_ind' ((nlst,)
array), 'days' ((ndaycomb,) array), 'day_ind' ((ndaycomb,)
array), 'dday' ((ndaycomb,) array), 'oversampled' and
'resampled' corresponding to whether resample was set to False
or True in call to member function FT(). Values under keys
'triads_ind' and 'lst_ind' are numpy array corresponding to
triad and time indices used in selecting the data. Values under
keys 'oversampled' and 'resampled' each contain a dictionary
with the following keys and values:
'z' [numpy array] Redshifts corresponding to the band
centers in 'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has shape=(nlags,)
'kprll' [numpy array] k_parallel modes (in h/Mpc) corresponding
to 'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in Hz) of
the frequency subbands of the subband delay spectra. It
is of size n_win. It is roughly equivalent to
redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on each
frequency sub-band during the subband delay transform.
It is of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in Hz)
of the subbands being delay transformed. It is of size
n_win. It is roughly equivalent to width in redshift or
along line-of-sight
'shape' [string] shape of the frequency window function applied.
Usual values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window was
raised. The value is be a positive scalar with
default = 1.0
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is proportional
to inverse of effective bandwidth. It is of size n_win.
The unit size of a pixel is determined by the difference
between adjacent pixels in lags under key 'lags' which
in turn is effectively inverse of the effective
bandwidth of the subband specified in bw_eff
It further contains a key named 'errinfo' which is a dictionary.
It contains information about power spectrum uncertainties
obtained from subsample differences. It contains the following
keys and values:
'mean' [numpy array] Delay power spectrum uncertainties
incoherently estimated over the axes specified in
xinfo['axes'] using the 'mean' key in input cpds or
attribute cPhaseDS['errinfo']['dspec']. It has shape
that depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are not
set, those axes will be replaced with square covariance
matrices. If collapse_axes is provided but avgcov is
False, those axes will be of shape 2*Naxis-1.
'median'
[numpy array] Delay power spectrum uncertainties
incoherently averaged over the axes specified in incohax
using the 'median' key in input cpds or attribute
cPhaseDS['errinfo']['dspec']. It has shape that depends
on the combination of input parameters. See examples
below. If both collapse_axes and avgcov are not set,
those axes will be replaced with square covariance
matrices. If collapse_axes is provided but avgcov is
False, those axes will be of shape 2*Naxis-1.
'diagoffsets'
[dictionary] Same keys corresponding to keys under
'collapse_axes' in input containing the diagonal offsets
for those axes. If 'avgcov' was set, those entries will
be removed from 'diagoffsets' since all the leading
diagonal elements have been collapsed (averaged) further.
Value under each key is a numpy array where each element
in the array corresponds to the index of that leading
diagonal. This should match the size of the output along
that axis in 'mean' or 'median' above.
'diagweights'
[dictionary] Each key is an axis specified in
collapse_axes and the value is a numpy array of weights
corresponding to the diagonal offsets in that axis.
'axesmap'
[dictionary] If covariance in cross-power is calculated
but is not collapsed, the number of dimensions in the
output will have changed. This parameter tracks where
the original axis is now placed. The keys are the
original axes that are involved in incoherent
cross-power, and the values are the new locations of
those original axes in the output.
'nsamples_incoh'
[integer] Number of incoherent samples in producing the
power spectrum
'nsamples_coh'
[integer] Number of coherent samples in producing the
power spectrum
diagoffsets [NoneType or dictionary or list of dictionaries] This info is
used for incoherent averaging along specified diagonals along
specified axes. This incoherent averaging is performed after
incoherently averaging multiple cross-power spectra (if any).
If set to None, this incoherent averaging is not performed.
Many combinations of axes and diagonals can be specified as
individual dictionaries in a list. If only one dictionary is
specified, then it assumed that only one combination of axes
and diagonals is requested. If a list of dictionaries is given,
each dictionary in the list specifies a different combination
for incoherent averaging. Each dictionary should have the
following key-value pairs. The key is the axis number (allowed
values are 1, 2, 3) that denote the axis type (1=LST, 2=Days,
3=Triads to be averaged), and the value under they keys is a
list or numpy array of diagonals to be averaged incoherently.
These axes-diagonal combinations apply to both the inputs
xcpdps and excpdps, except axis=2 does not apply to excpdps
(since it is made of subsample differences already) and will be
skipped.
Outputs:
A tuple consisting of two dictionaries. The first dictionary contains the
incoherent averaging of xcpdps as specified by the inputs, while the second
consists of incoherent of excpdps as specified by the inputs. The structure
of these dictionaries are practically the same as the dictionary inputs
xcpdps and excpdps respectively. The only differences in dictionary
structure are:
* Under key ['oversampled'/'resampled']['whole'/'submodel'/'residual'
/'effinfo']['mean'/'median'] is a list of numpy arrays, where each
array in the list corresponds to the dictionary in the list in input
diagoffsets that defines the axes-diagonal combination.
----------------------------------------------------------------------------
"""
if isinstance(xcpdps, dict):
xcpdps = [xcpdps]
if not isinstance(xcpdps, list):
raise TypeError('Invalid data type provided for input xcpdps')
if excpdps is not None:
if isinstance(excpdps, dict):
excpdps = [excpdps]
if not isinstance(excpdps, list):
raise TypeError('Invalid data type provided for input excpdps')
if len(xcpdps) != len(excpdps):
raise ValueError('Inputs xcpdps and excpdps found to have unequal number of values')
out_xcpdps = {'triads': xcpdps[0]['triads'], 'triads_ind': xcpdps[0]['triads_ind'], 'lst': xcpdps[0]['lst'], 'lst_ind': xcpdps[0]['lst_ind'], 'dlst': xcpdps[0]['dlst'], 'days': xcpdps[0]['days'], 'day_ind': xcpdps[0]['day_ind'], 'dday': xcpdps[0]['dday']}
out_excpdps = None
if excpdps is not None:
out_excpdps = {'triads': excpdps[0]['triads'], 'triads_ind': excpdps[0]['triads_ind'], 'lst': excpdps[0]['lst'], 'lst_ind': excpdps[0]['lst_ind'], 'dlst': excpdps[0]['dlst'], 'days': excpdps[0]['days'], 'day_ind': excpdps[0]['day_ind'], 'dday': excpdps[0]['dday']}
for smplng in ['oversampled', 'resampled']:
if smplng in xcpdps[0]:
out_xcpdps[smplng] = {'z': xcpdps[0][smplng]['z'], 'kprll': xcpdps[0][smplng]['kprll'], 'lags': xcpdps[0][smplng]['lags'], 'freq_center': xcpdps[0][smplng]['freq_center'], 'bw_eff': xcpdps[0][smplng]['bw_eff'], 'shape': xcpdps[0][smplng]['shape'], 'freq_wts': xcpdps[0][smplng]['freq_wts'], 'lag_corr_length': xcpdps[0][smplng]['lag_corr_length']}
if excpdps is not None:
out_excpdps[smplng] = {'z': excpdps[0][smplng]['z'], 'kprll': excpdps[0][smplng]['kprll'], 'lags': excpdps[0][smplng]['lags'], 'freq_center': excpdps[0][smplng]['freq_center'], 'bw_eff': excpdps[0][smplng]['bw_eff'], 'shape': excpdps[0][smplng]['shape'], 'freq_wts': excpdps[0][smplng]['freq_wts'], 'lag_corr_length': excpdps[0][smplng]['lag_corr_length']}
for dpool in ['whole', 'submodel', 'residual']:
if dpool in xcpdps[0][smplng]:
out_xcpdps[smplng][dpool] = {'diagoffsets': xcpdps[0][smplng][dpool]['diagoffsets'], 'axesmap': xcpdps[0][smplng][dpool]['axesmap']}
for stat in ['mean', 'median']:
if stat in xcpdps[0][smplng][dpool]:
out_xcpdps[smplng][dpool][stat] = {}
arr = []
diagweights = []
for i in range(len(xcpdps)):
arr += [xcpdps[i][smplng][dpool][stat].si.value]
arr_units = xcpdps[i][smplng][dpool][stat].si.unit
if isinstance(xcpdps[i][smplng][dpool]['diagweights'], dict):
diagwts = 1.0
diagwts_shape = NP.ones(xcpdps[i][smplng][dpool][stat].ndim, dtype=NP.int)
for ax in xcpdps[i][smplng][dpool]['diagweights']:
tmp_shape = NP.copy(diagwts_shape)
tmp_shape[xcpdps[i][smplng][dpool]['axesmap'][ax]] = xcpdps[i][smplng][dpool]['diagweights'][ax].size
diagwts = diagwts * xcpdps[i][smplng][dpool]['diagweights'][ax].reshape(tuple(tmp_shape))
elif isinstance(xcpdps[i][smplng][dpool]['diagweights'], NP.ndarray):
diagwts = NP.copy(xcpdps[i][smplng][dpool]['diagweights'])
else:
raise TypeError('Diagonal weights in input must be a dictionary or a numpy array')
diagweights += [diagwts]
diagweights = NP.asarray(diagweights)
arr = NP.asarray(arr)
arr = NP.nansum(arr * diagweights, axis=0) / NP.nansum(diagweights, axis=0) * arr_units
diagweights = NP.nansum(diagweights, axis=0)
out_xcpdps[smplng][dpool][stat] = arr
out_xcpdps[smplng][dpool]['diagweights'] = diagweights
for dpool in ['errinfo']:
if dpool in excpdps[0][smplng]:
out_excpdps[smplng][dpool] = {'diagoffsets': excpdps[0][smplng][dpool]['diagoffsets'], 'axesmap': excpdps[0][smplng][dpool]['axesmap']}
for stat in ['mean', 'median']:
if stat in excpdps[0][smplng][dpool]:
out_excpdps[smplng][dpool][stat] = {}
arr = []
diagweights = []
for i in range(len(excpdps)):
arr += [excpdps[i][smplng][dpool][stat].si.value]
arr_units = excpdps[i][smplng][dpool][stat].si.unit
if isinstance(excpdps[i][smplng][dpool]['diagweights'], dict):
diagwts = 1.0
diagwts_shape = NP.ones(excpdps[i][smplng][dpool][stat].ndim, dtype=NP.int)
for ax in excpdps[i][smplng][dpool]['diagweights']:
tmp_shape = NP.copy(diagwts_shape)
tmp_shape[excpdps[i][smplng][dpool]['axesmap'][ax]] = excpdps[i][smplng][dpool]['diagweights'][ax].size
diagwts = diagwts * excpdps[i][smplng][dpool]['diagweights'][ax].reshape(tuple(tmp_shape))
elif isinstance(excpdps[i][smplng][dpool]['diagweights'], NP.ndarray):
diagwts = NP.copy(excpdps[i][smplng][dpool]['diagweights'])
else:
raise TypeError('Diagonal weights in input must be a dictionary or a numpy array')
diagweights += [diagwts]
diagweights = NP.asarray(diagweights)
arr = NP.asarray(arr)
arr = NP.nansum(arr * diagweights, axis=0) / NP.nansum(diagweights, axis=0) * arr_units
diagweights = NP.nansum(diagweights, axis=0)
out_excpdps[smplng][dpool][stat] = arr
out_excpdps[smplng][dpool]['diagweights'] = diagweights
if diagoffsets is not None:
if isinstance(diagoffsets, dict):
diagoffsets = [diagoffsets]
if not isinstance(diagoffsets, list):
raise TypeError('Input diagoffsets must be a list of dictionaries')
for ind in range(len(diagoffsets)):
for ax in diagoffsets[ind]:
if not isinstance(diagoffsets[ind][ax], (list, NP.ndarray)):
raise TypeError('Values in input dictionary diagoffsets must be a list or numpy array')
diagoffsets[ind][ax] = NP.asarray(diagoffsets[ind][ax])
for smplng in ['oversampled', 'resampled']:
if smplng in out_xcpdps:
for dpool in ['whole', 'submodel', 'residual']:
if dpool in out_xcpdps[smplng]:
masks = []
for ind in range(len(diagoffsets)):
mask_ones = NP.ones(out_xcpdps[smplng][dpool]['diagweights'].shape, dtype=NP.bool)
mask_agg = None
for ax in diagoffsets[ind]:
mltdim_slice = [slice(None)] * mask_ones.ndim
mltdim_slice[out_xcpdps[smplng][dpool]['axesmap'][ax].squeeze()] = NP.where(NP.isin(out_xcpdps[smplng][dpool]['diagoffsets'][ax], diagoffsets[ind][ax]))[0]
mask_tmp = NP.copy(mask_ones)
mask_tmp[tuple(mltdim_slice)] = False
if mask_agg is None:
mask_agg = NP.copy(mask_tmp)
else:
mask_agg = NP.logical_or(mask_agg, mask_tmp)
masks += [NP.copy(mask_agg)]
diagwts = NP.copy(out_xcpdps[smplng][dpool]['diagweights'])
out_xcpdps[smplng][dpool]['diagweights'] = []
for stat in ['mean', 'median']:
if stat in out_xcpdps[smplng][dpool]:
arr = NP.copy(out_xcpdps[smplng][dpool][stat].si.value)
arr_units = out_xcpdps[smplng][dpool][stat].si.unit
out_xcpdps[smplng][dpool][stat] = []
for ind in range(len(diagoffsets)):
masked_diagwts = MA.array(diagwts, mask=masks[ind])
axes_to_avg = tuple([out_xcpdps[smplng][dpool]['axesmap'][ax][0] for ax in diagoffsets[ind]])
out_xcpdps[smplng][dpool][stat] += [MA.sum(arr * masked_diagwts, axis=axes_to_avg, keepdims=True) / MA.sum(masked_diagwts, axis=axes_to_avg, keepdims=True) * arr_units]
if len(out_xcpdps[smplng][dpool]['diagweights']) < len(diagoffsets):
out_xcpdps[smplng][dpool]['diagweights'] += [MA.sum(masked_diagwts, axis=axes_to_avg, keepdims=True)]
if excpdps is not None:
for smplng in ['oversampled', 'resampled']:
if smplng in out_excpdps:
for dpool in ['errinfo']:
if dpool in out_excpdps[smplng]:
masks = []
for ind in range(len(diagoffsets)):
mask_ones = NP.ones(out_excpdps[smplng][dpool]['diagweights'].shape, dtype=NP.bool)
mask_agg = None
for ax in diagoffsets[ind]:
if ax != 2:
mltdim_slice = [slice(None)] * mask_ones.ndim
mltdim_slice[out_excpdps[smplng][dpool]['axesmap'][ax].squeeze()] = NP.where(NP.isin(out_excpdps[smplng][dpool]['diagoffsets'][ax], diagoffsets[ind][ax]))[0]
mask_tmp = NP.copy(mask_ones)
mask_tmp[tuple(mltdim_slice)] = False
if mask_agg is None:
mask_agg = NP.copy(mask_tmp)
else:
mask_agg = NP.logical_or(mask_agg, mask_tmp)
masks += [NP.copy(mask_agg)]
diagwts = NP.copy(out_excpdps[smplng][dpool]['diagweights'])
out_excpdps[smplng][dpool]['diagweights'] = []
for stat in ['mean', 'median']:
if stat in out_excpdps[smplng][dpool]:
arr = NP.copy(out_excpdps[smplng][dpool][stat].si.value)
arr_units = out_excpdps[smplng][dpool][stat].si.unit
out_excpdps[smplng][dpool][stat] = []
for ind in range(len(diagoffsets)):
masked_diagwts = MA.array(diagwts, mask=masks[ind])
axes_to_avg = tuple([out_excpdps[smplng][dpool]['axesmap'][ax][0] for ax in diagoffsets[ind] if ax!=2])
out_excpdps[smplng][dpool][stat] += [MA.sum(arr * masked_diagwts, axis=axes_to_avg, keepdims=True) / MA.sum(masked_diagwts, axis=axes_to_avg, keepdims=True) * arr_units]
if len(out_excpdps[smplng][dpool]['diagweights']) < len(diagoffsets):
out_excpdps[smplng][dpool]['diagweights'] += [MA.sum(masked_diagwts, axis=axes_to_avg, keepdims=True)]
return (out_xcpdps, out_excpdps)
################################################################################
def incoherent_kbin_averaging(xcpdps, kbins=None, num_kbins=None, kbintype='log'):
"""
----------------------------------------------------------------------------
Averages the power spectrum incoherently by binning in bins of k. Returns
the power spectrum in units of both standard power spectrum and \Delta^2
Inputs:
xcpdps [dictionary] A dictionary that contains the incoherent averaged
power spectrum along LST and/or triads axes. This dictionary is
essentially the one(s) returned as the output of the function
incoherent_cross_power_spectrum_average()
kbins [NoneType, list or numpy array] Bins in k. If set to None
(default), it will be determined automatically based on the
inputs in num_kbins, and kbintype. If num_kbins is None and
kbintype='linear', the negative and positive values of k are
folded into a one-sided power spectrum. In this case, the
bins will approximately have the same resolution as the k-values
in the input power spectrum for all the spectral windows.
num_kbins [NoneType or integer] Number of k-bins. Used only if kbins is
set to None. If kbintype is set to 'linear', the negative and
positive values of k are folded into a one-sided power spectrum.
In this case, the bins will approximately have the same
resolution as the k-values in the input power spectrum for all
the spectral windows.
kbintype [string] Specifies the type of binning, used only if kbins is
set to None. Accepted values are 'linear' and 'log' for linear
and logarithmic bins respectively.
Outputs:
Dictionary containing the power spectrum information. At the top level, it
contains keys specifying the sampling to be 'oversampled' or 'resampled'.
Under each of these keys is another dictionary containing the following
keys:
'z' [numpy array] Redshifts corresponding to the band centers in
'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has shape=(nlags,).
'freq_center'
[numpy array] contains the center frequencies (in Hz) of the
frequency subbands of the subband delay spectra. It is of size
n_win. It is roughly equivalent to redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on each
frequency sub-band during the subband delay transform. It is
of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in Hz) of the
subbands being delay transformed. It is of size n_win. It is
roughly equivalent to width in redshift or along line-of-sight
'shape' [string] shape of the frequency window function applied. Usual
values are 'rect' (rectangular), 'bhw' (Blackman-Harris),
'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window was raised.
The value is be a positive scalar with default = 1.0
'lag_corr_length'
[numpy array] It is the correlation timescale (in pixels) of
the subband delay spectra. It is proportional to inverse of
effective bandwidth. It is of size n_win. The unit size of a
pixel is determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is effectively inverse
of the effective bandwidth of the subband specified in bw_eff
It further contains 3 keys named 'whole', 'submodel', and 'residual'
or one key named 'errinfo' each of which is a dictionary. 'whole'
contains power spectrum info about the input closure phases. 'submodel'
contains power spectrum info about the model that will have been
subtracted (as closure phase) from the 'whole' model. 'residual'
contains power spectrum info about the closure phases obtained as a
difference between 'whole' and 'submodel'. 'errinfo' contains power
spectrum information about the subsample differences. There is also
another dictionary under key 'kbininfo' that contains information about
k-bins. These dictionaries contain the following keys and values:
'whole'/'submodel'/'residual'/'errinfo'
[dictionary] It contains the following keys and values:
'mean' [dictionary] Delay power spectrum information under the
'mean' statistic incoherently obtained by averaging the
input power spectrum in bins of k. It contains output power
spectrum expressed as two quantities each of which is a
dictionary with the following key-value pairs:
'PS' [list of numpy arrays] Standard power spectrum in
units of 'K2 Mpc3'. Each numpy array in the list
maps to a specific combination of axes and axis
diagonals chosen for incoherent averaging in
earlier processing such as in the function
incoherent_cross_power_spectrum_average(). The
numpy array has a shape similar to the input power
spectrum, but that last axis (k-axis) will have a
different size that depends on the k-bins that
were used in the incoherent averaging along that
axis.
'Del2' [list of numpy arrays] power spectrum in Delta^2
units of 'K2'. Each numpy array in the list
maps to a specific combination of axes and axis
diagonals chosen for incoherent averaging in
earlier processing such as in the function
incoherent_cross_power_spectrum_average(). The
numpy array has a shape similar to the input power
spectrum, but that last axis (k-axis) will have a
different size that depends on the k-bins that
were used in the incoherent averaging along that
axis.
'median'
[dictionary] Delay power spectrum information under the
'median' statistic incoherently obtained by averaging the
input power spectrum in bins of k. It contains output power
spectrum expressed as two quantities each of which is a
dictionary with the following key-value pairs:
'PS' [list of numpy arrays] Standard power spectrum in
units of 'K2 Mpc3'. Each numpy array in the list
maps to a specific combination of axes and axis
diagonals chosen for incoherent averaging in
earlier processing such as in the function
incoherent_cross_power_spectrum_average(). The
numpy array has a shape similar to the input power
spectrum, but that last axis (k-axis) will have a
different size that depends on the k-bins that
were used in the incoherent averaging along that
axis.
'Del2' [list of numpy arrays] power spectrum in Delta^2
units of 'K2'. Each numpy array in the list
maps to a specific combination of axes and axis
diagonals chosen for incoherent averaging in
earlier processing such as in the function
incoherent_cross_power_spectrum_average(). The
numpy array has a shape similar to the input power
spectrum, but that last axis (k-axis) will have a
different size that depends on the k-bins that
were used in the incoherent averaging along that
axis.
'kbininfo'
[dictionary] Contains the k-bin information. It contains the
following key-value pairs:
'counts'
[list] List of numpy arrays where each numpy array in the stores
the counts in the determined k-bins. Each numpy array in the
list corresponds to a spectral window (redshift subband). The
shape of each numpy array is (nkbins,)
'kbin_edges'
[list] List of numpy arrays where each numpy array contains the
k-bin edges. Each array in the list corresponds to a spectral
window (redshift subband). The shape of each array is
(nkbins+1,).
'kbinnum'
[list] List of numpy arrays containing the bin number under
which the k value falls. Each array in the list corresponds to
a spectral window (redshift subband). The shape of each array
is (nlags,).
'ri'
[list] List of numpy arrays containing the reverse indices for
each k-bin. Each array in the list corresponds to a spectral
window (redshift subband). The shape of each array is
(nlags+nkbins+1,).
'whole'/'submodel'/'residual' or 'errinfo' [dictionary] k-bin info
estimated for the different datapools under different stats
and PS definitions. It has the keys 'mean' and 'median' for the
mean and median statistic respectively. Each of them contain a
dictionary with the following key-value pairs:
'PS' [list] List of numpy arrays where each numpy array
contains a standard power spectrum typically in units of
'K2 Mpc3'. Its shape is the same as input power spectrum
except the k-axis which now has nkbins number of
elements.
'Del2' [list] List of numpy arrays where each numpy array
contains a Delta^2 power spectrum typically in units of
'K2'. Its shape is the same as input power spectrum
except the k-axis which now has nkbins number of
elements.
----------------------------------------------------------------------------
"""
if not isinstance(xcpdps, dict):
raise TypeError('Input xcpdps must be a dictionary')
if kbins is not None:
if not isinstance(kbins, (list,NP.ndarray)):
raise TypeError('Input kbins must be a list or numpy array')
else:
if not isinstance(kbintype, str):
raise TypeError('Input kbintype must be a string')
if kbintype.lower() not in ['linear', 'log']:
raise ValueError('Input kbintype must be set to "linear" or "log"')
if kbintype.lower() == 'log':
if num_kbins is None:
num_kbins = 10
psinfo = {}
keys = ['triads', 'triads_ind', 'lst', 'lst_ind', 'dlst', 'days', 'day_ind', 'dday']
for key in keys:
psinfo[key] = xcpdps[key]
sampling = ['oversampled', 'resampled']
sampling_keys = ['z', 'freq_center', 'bw_eff', 'shape', 'freq_wts', 'lag_corr_length']
dpool_keys = ['whole', 'submodel', 'residual', 'errinfo']
for smplng in sampling:
if smplng in xcpdps:
psinfo[smplng] = {}
for key in sampling_keys:
psinfo[smplng][key] = xcpdps[smplng][key]
kprll = xcpdps[smplng]['kprll']
lags = xcpdps[smplng]['lags']
eps = 1e-10
if kbins is None:
dkprll = NP.max(NP.mean(NP.diff(kprll, axis=-1), axis=-1))
if kbintype.lower() == 'linear':
bins_kprll = NP.linspace(eps, NP.abs(kprll).max()+eps, num=kprll.shape[1]/2+1, endpoint=True)
else:
bins_kprll = NP.geomspace(eps, NP.abs(kprll).max()+eps, num=num_kbins+1, endpoint=True)
bins_kprll = NP.insert(bins_kprll, 0, -eps)
else:
bins_kprll = NP.asarray(kbins)
num_kbins = bins_kprll.size - 1
psinfo[smplng]['kbininfo'] = {'counts': [], 'kbin_edges': [], 'kbinnum': [], 'ri': []}
for spw in range(kprll.shape[0]):
counts, kbin_edges, kbinnum, ri = OPS.binned_statistic(NP.abs(kprll[spw,:]), statistic='count', bins=bins_kprll)
counts = counts.astype(NP.int)
psinfo[smplng]['kbininfo']['counts'] += [NP.copy(counts)]
psinfo[smplng]['kbininfo']['kbin_edges'] += [kbin_edges / U.Mpc]
psinfo[smplng]['kbininfo']['kbinnum'] += [NP.copy(kbinnum)]
psinfo[smplng]['kbininfo']['ri'] += [NP.copy(ri)]
for dpool in dpool_keys:
if dpool in xcpdps[smplng]:
psinfo[smplng][dpool] = {}
psinfo[smplng]['kbininfo'][dpool] = {}
keys = ['diagoffsets', 'diagweights', 'axesmap']
for key in keys:
psinfo[smplng][dpool][key] = xcpdps[smplng][dpool][key]
for stat in ['mean', 'median']:
if stat in xcpdps[smplng][dpool]:
psinfo[smplng][dpool][stat] = {'PS': [], 'Del2': []}
psinfo[smplng]['kbininfo'][dpool][stat] = []
for combi in range(len(xcpdps[smplng][dpool][stat])):
outshape = NP.asarray(xcpdps[smplng][dpool][stat][combi].shape)
outshape[-1] = num_kbins
tmp_dps = NP.full(tuple(outshape), NP.nan, dtype=NP.complex) * U.Unit(xcpdps[smplng][dpool][stat][combi].unit)
tmp_Del2 = NP.full(tuple(outshape), NP.nan, dtype=NP.complex) * U.Unit(xcpdps[smplng][dpool][stat][combi].unit / U.Mpc**3)
tmp_kprll = NP.full(tuple(outshape), NP.nan, dtype=NP.float) / U.Mpc
for spw in range(kprll.shape[0]):
counts = NP.copy(psinfo[smplng]['kbininfo']['counts'][spw])
ri = NP.copy(psinfo[smplng]['kbininfo']['ri'][spw])
print('Processing datapool={0}, stat={1}, LST-Day-Triad combination={2:0d}, spw={3:0d}...'.format(dpool, stat, combi, spw))
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} k-bins '.format(num_kbins), PGB.ETA()], maxval=num_kbins).start()
for binnum in range(num_kbins):
if counts[binnum] > 0:
ind_kbin = ri[ri[binnum]:ri[binnum+1]]
tmp_dps[spw,...,binnum] = NP.nanmean(NP.take(xcpdps[smplng][dpool][stat][combi][spw], ind_kbin, axis=-1), axis=-1)
k_shape = NP.ones(NP.take(xcpdps[smplng][dpool][stat][combi][spw], ind_kbin, axis=-1).ndim, dtype=NP.int)
k_shape[-1] = -1
tmp_Del2[spw,...,binnum] = NP.nanmean(NP.abs(kprll[spw,ind_kbin].reshape(tuple(k_shape))/U.Mpc)**3 * NP.take(xcpdps[smplng][dpool][stat][combi][spw], ind_kbin, axis=-1), axis=-1) / (2*NP.pi**2)
tmp_kprll[spw,...,binnum] = NP.nansum(NP.abs(kprll[spw,ind_kbin].reshape(tuple(k_shape))/U.Mpc) * NP.abs(NP.take(xcpdps[smplng][dpool][stat][combi][spw], ind_kbin, axis=-1)), axis=-1) / NP.nansum(NP.abs(NP.take(xcpdps[smplng][dpool][stat][combi][spw], ind_kbin, axis=-1)), axis=-1)
progress.update(binnum+1)
progress.finish()
psinfo[smplng][dpool][stat]['PS'] += [copy.deepcopy(tmp_dps)]
psinfo[smplng][dpool][stat]['Del2'] += [copy.deepcopy(tmp_Del2)]
psinfo[smplng]['kbininfo'][dpool][stat] += [copy.deepcopy(tmp_kprll)]
return psinfo
################################################################################
class ClosurePhase(object):
"""
----------------------------------------------------------------------------
Class to hold and operate on Closure Phase information.
It has the following attributes and member functions.
Attributes:
extfile [string] Full path to external file containing information
of ClosurePhase instance. The file is in HDF5 format
cpinfo [dictionary] Contains the following top level keys,
namely, 'raw', 'processed', and 'errinfo'
Under key 'raw' which holds a dictionary, the subkeys
include 'cphase' (nlst,ndays,ntriads,nchan),
'triads' (ntriads,3), 'lst' (nlst,ndays), and 'flags'
(nlst,ndays,ntriads,nchan).
Under the 'processed' key are more subkeys, namely,
'native', 'prelim', and optionally 'submodel' and 'residual'
each holding a dictionary.
Under 'native' dictionary, the subsubkeys for further
dictionaries are 'cphase' (masked array:
(nlst,ndays,ntriads,nchan)), 'eicp' (complex masked
array: (nlst,ndays,ntriads,nchan)), and 'wts' (masked
array: (nlst,ndays,ntriads,nchan)).
Under 'prelim' dictionary, the subsubkeys for further
dictionaries are 'tbins' (numpy array of tbin centers
after smoothing), 'dtbins' (numpy array of tbin
intervals), 'wts' (masked array:
(ntbins,ndays,ntriads,nchan)), 'eicp' and 'cphase'.
The dictionaries under 'eicp' are indexed by keys
'mean' (complex masked array:
(ntbins,ndays,ntriads,nchan)), and 'median' (complex
masked array: (ntbins,ndays,ntriads,nchan)).
The dictionaries under 'cphase' are indexed by keys
'mean' (masked array: (ntbins,ndays,ntriads,nchan)),
'median' (masked array: (ntbins,ndays,ntriads,nchan)),
'rms' (masked array: (ntbins,ndays,ntriads,nchan)), and
'mad' (masked array: (ntbins,ndays,ntriads,nchan)). The
last one denotes Median Absolute Deviation.
Under 'submodel' dictionary, the subsubkeys for further
dictionaries are 'cphase' (masked array:
(nlst,ndays,ntriads,nchan)), and 'eicp' (complex masked
array: (nlst,ndays,ntriads,nchan)).
Under 'residual' dictionary, the subsubkeys for further
dictionaries are 'cphase' and 'eicp'. These are
dictionaries too. The dictionaries under 'eicp' are
indexed by keys 'mean' (complex masked array:
(ntbins,ndays,ntriads,nchan)), and 'median' (complex
masked array: (ntbins,ndays,ntriads,nchan)).
The dictionaries under 'cphase' are indexed by keys
'mean' (masked array: (ntbins,ndays,ntriads,nchan)),
and 'median' (masked array:
(ntbins,ndays,ntriads,nchan)).
Under key 'errinfo', it contains the following keys and
values:
'list_of_pair_of_pairs'
List of pair of pairs for which differences of
complex exponentials have been computed, where the
elements are bins of days. The number of elements
in the list is ncomb. And each element is a smaller
(4-element) list of pair of pairs
'eicp_diff'
Difference of complex exponentials between pairs
of day bins. This will be used in evaluating noise
properties in power spectrum. It is a dictionary
with two keys '0' and '1' where each contains the
difference from a pair of subsamples. Each of these
keys contains a numpy array of shape
(nlstbins,ncomb,2,ntriads,nchan)
'wts' Weights in difference of complex exponentials
obtained by sum of squares of weights that are
associated with the pair that was used in the
differencing. It is a dictionary with two keys '0'
and '1' where each contains the weights associated
It is of shape (nlstbins,ncomb,2,ntriads,nchan)
Member functions:
__init__() Initialize an instance of class ClosurePhase
expicp() Compute and return complex exponential of the closure phase
as a masked array
smooth_in_tbins()
Smooth the complex exponentials of closure phases in LST
bins. Both mean and median smoothing is produced.
subtract() Subtract complex exponential of the bispectrum phase
from the current instance and updates the cpinfo attribute
subsample_differencing()
Create subsamples and differences between subsamples to
evaluate noise properties from the data set.
save() Save contents of attribute cpinfo in external HDF5 file
----------------------------------------------------------------------------
"""
def __init__(self, infile, freqs, infmt='npz'):
"""
------------------------------------------------------------------------
Initialize an instance of class ClosurePhase
Inputs:
infile [string] Input file including full path. It could be a NPZ
with raw data, or a HDF5 file that could contain raw or
processed data. The input file format is specified in the
input infmt. If it is a NPZ file, it must contain the
following keys/files:
'closures' [numpy array] Closure phase (radians). It is of
shape (nlst,ndays,ntriads,nchan)
'triads' [numpy array] Array of triad tuples, of shape
(ntriads,3)
'flags' [numpy array] Array of flags (boolean), of shape
(nlst,ndays,ntriads,nchan)
'last' [numpy array] Array of LST for each day (CASA
units which is MJD+6713). Shape is (nlst,ndays)
'days' [numpy array] Array of days, shape is (ndays,)
'averaged_closures'
[numpy array] optional array of closure phases
averaged across days. Shape is
(nlst,ntriads,nchan)
'std_dev_lst'
[numpy array] optional array of standard
deviation of closure phases across days. Shape
is (nlst,ntriads,nchan)
'std_dev_triads'
[numpy array] optional array of standard
deviation of closure phases across triads.
Shape is (nlst,ndays,nchan)
freqs [numpy array] Frequencies (in Hz) in the input. Size is
nchan.
infmt [string] Input file format. Accepted values are 'npz'
(default) and 'hdf5'.
------------------------------------------------------------------------
"""
if not isinstance(infile, str):
raise TypeError('Input infile must be a string')
if not isinstance(freqs, NP.ndarray):
raise TypeError('Input freqs must be a numpy array')
freqs = freqs.ravel()
if not isinstance(infmt, str):
raise TypeError('Input infmt must be a string')
if infmt.lower() not in ['npz', 'hdf5']:
raise ValueError('Input infmt must be "npz" or "hdf5"')
if infmt.lower() == 'npz':
infilesplit = infile.split('.npz')
infile_noext = infilesplit[0]
self.cpinfo = loadnpz(infile)
# npz2hdf5(infile, infile_noext+'.hdf5')
self.extfile = infile_noext + '.hdf5'
else:
# if not isinstance(infile, h5py.File):
# raise TypeError('Input infile is not a valid HDF5 file')
self.extfile = infile
self.cpinfo = NMO.load_dict_from_hdf5(self.extfile)
if freqs.size != self.cpinfo['raw']['cphase'].shape[-1]:
raise ValueError('Input frequencies do not match with dimensions of the closure phase data')
self.f = freqs
self.df = freqs[1] - freqs[0]
force_expicp = False
if 'processed' not in self.cpinfo:
force_expicp = True
else:
if 'native' not in self.cpinfo['processed']:
force_expicp = True
self.expicp(force_action=force_expicp)
if 'prelim' not in self.cpinfo['processed']:
self.cpinfo['processed']['prelim'] = {}
self.cpinfo['errinfo'] = {}
############################################################################
def expicp(self, force_action=False):
"""
------------------------------------------------------------------------
Compute the complex exponential of the closure phase as a masked array
Inputs:
force_action [boolean] If set to False (default), the complex
exponential is computed only if it has not been done so
already. Otherwise the computation is forced.
------------------------------------------------------------------------
"""
if 'processed' not in self.cpinfo:
self.cpinfo['processed'] = {}
force_action = True
if 'native' not in self.cpinfo['processed']:
self.cpinfo['processed']['native'] = {}
force_action = True
if 'cphase' not in self.cpinfo['processed']['native']:
self.cpinfo['processed']['native']['cphase'] = MA.array(self.cpinfo['raw']['cphase'].astype(NP.float64), mask=self.cpinfo['raw']['flags'])
force_action = True
if not force_action:
if 'eicp' not in self.cpinfo['processed']['native']:
self.cpinfo['processed']['native']['eicp'] = NP.exp(1j * self.cpinfo['processed']['native']['cphase'])
self.cpinfo['processed']['native']['wts'] = MA.array(NP.logical_not(self.cpinfo['raw']['flags']).astype(NP.float), mask=self.cpinfo['raw']['flags'])
else:
self.cpinfo['processed']['native']['eicp'] = NP.exp(1j * self.cpinfo['processed']['native']['cphase'])
self.cpinfo['processed']['native']['wts'] = MA.array(NP.logical_not(self.cpinfo['raw']['flags']).astype(NP.float), mask=self.cpinfo['raw']['flags'])
############################################################################
def smooth_in_tbins(self, daybinsize=None, ndaybins=None, lstbinsize=None):
"""
------------------------------------------------------------------------
Smooth the complex exponentials of closure phases in time bins. Both
mean and median smoothing is produced.
Inputs:
daybinsize [Nonetype or scalar] Day bin size (in days) over which mean
and median are estimated across different days for a fixed
LST bin. If set to None, it will look for value in input
ndaybins. If both are None, no smoothing is performed. Only
one of daybinsize or ndaybins must be set to non-None value.
ndaybins [NoneType or integer] Number of bins along day axis. Only
if daybinsize is set to None. It produces bins that roughly
consist of equal number of days in each bin regardless of
how much the days in each bin are separated from each other.
If both are None, no smoothing is performed. Only one of
daybinsize or ndaybins must be set to non-None value.
lstbinsize [NoneType or scalar] LST bin size (in seconds) over which
mean and median are estimated across the LST. If set to
None, no smoothing is performed
------------------------------------------------------------------------
"""
if (ndaybins is not None) and (daybinsize is not None):
raise ValueError('Only one of daybinsize or ndaybins should be set')
if (daybinsize is not None) or (ndaybins is not None):
if daybinsize is not None:
if not isinstance(daybinsize, (int,float)):
raise TypeError('Input daybinsize must be a scalar')
dres = NP.diff(self.cpinfo['raw']['days']).min() # in days
dextent = self.cpinfo['raw']['days'].max() - self.cpinfo['raw']['days'].min() + dres # in days
if daybinsize > dres:
daybinsize = NP.clip(daybinsize, dres, dextent)
eps = 1e-10
daybins = NP.arange(self.cpinfo['raw']['days'].min(), self.cpinfo['raw']['days'].max() + dres + eps, daybinsize)
ndaybins = daybins.size
daybins = NP.concatenate((daybins, [daybins[-1]+daybinsize+eps]))
if ndaybins > 1:
daybinintervals = daybins[1:] - daybins[:-1]
daybincenters = daybins[:-1] + 0.5 * daybinintervals
else:
daybinintervals = NP.asarray(daybinsize).reshape(-1)
daybincenters = daybins[0] + 0.5 * daybinintervals
counts, daybin_edges, daybinnum, ri = OPS.binned_statistic(self.cpinfo['raw']['days'], statistic='count', bins=daybins)
counts = counts.astype(NP.int)
# if 'prelim' not in self.cpinfo['processed']:
# self.cpinfo['processed']['prelim'] = {}
# self.cpinfo['processed']['prelim']['eicp'] = {}
# self.cpinfo['processed']['prelim']['cphase'] = {}
# self.cpinfo['processed']['prelim']['daybins'] = daybincenters
# self.cpinfo['processed']['prelim']['diff_dbins'] = daybinintervals
wts_daybins = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]))
eicp_dmean = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]), dtype=NP.complex128)
eicp_dmedian = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]), dtype=NP.complex128)
cp_drms = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]))
cp_dmad = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]))
for binnum in xrange(counts.size):
ind_daybin = ri[ri[binnum]:ri[binnum+1]]
wts_daybins[:,binnum,:,:] = NP.sum(self.cpinfo['processed']['native']['wts'][:,ind_daybin,:,:].data, axis=1)
eicp_dmean[:,binnum,:,:] = NP.exp(1j*NP.angle(MA.mean(self.cpinfo['processed']['native']['eicp'][:,ind_daybin,:,:], axis=1)))
eicp_dmedian[:,binnum,:,:] = NP.exp(1j*NP.angle(MA.median(self.cpinfo['processed']['native']['eicp'][:,ind_daybin,:,:].real, axis=1) + 1j * MA.median(self.cpinfo['processed']['native']['eicp'][:,ind_daybin,:,:].imag, axis=1)))
cp_drms[:,binnum,:,:] = MA.std(self.cpinfo['processed']['native']['cphase'][:,ind_daybin,:,:], axis=1).data
cp_dmad[:,binnum,:,:] = MA.median(NP.abs(self.cpinfo['processed']['native']['cphase'][:,ind_daybin,:,:] - NP.angle(eicp_dmedian[:,binnum,:,:][:,NP.newaxis,:,:])), axis=1).data
# mask = wts_daybins <= 0.0
# self.cpinfo['processed']['prelim']['wts'] = MA.array(wts_daybins, mask=mask)
# self.cpinfo['processed']['prelim']['eicp']['mean'] = MA.array(eicp_dmean, mask=mask)
# self.cpinfo['processed']['prelim']['eicp']['median'] = MA.array(eicp_dmedian, mask=mask)
# self.cpinfo['processed']['prelim']['cphase']['mean'] = MA.array(NP.angle(eicp_dmean), mask=mask)
# self.cpinfo['processed']['prelim']['cphase']['median'] = MA.array(NP.angle(eicp_dmedian), mask=mask)
# self.cpinfo['processed']['prelim']['cphase']['rms'] = MA.array(cp_drms, mask=mask)
# self.cpinfo['processed']['prelim']['cphase']['mad'] = MA.array(cp_dmad, mask=mask)
else:
if not isinstance(ndaybins, int):
raise TypeError('Input ndaybins must be an integer')
if ndaybins <= 0:
raise ValueError('Input ndaybins must be positive')
days_split = NP.array_split(self.cpinfo['raw']['days'], ndaybins)
daybincenters = NP.asarray([NP.mean(days) for days in days_split])
daybinintervals = NP.asarray([days.max()-days.min() for days in days_split])
counts = NP.asarray([days.size for days in days_split])
wts_split = NP.array_split(self.cpinfo['processed']['native']['wts'].data, ndaybins, axis=1)
# mask_split = NP.array_split(self.cpinfo['processed']['native']['wts'].mask, ndaybins, axis=1)
wts_daybins = NP.asarray([NP.sum(wtsitem, axis=1) for wtsitem in wts_split]) # ndaybins x nlst x ntriads x nchan
wts_daybins = NP.moveaxis(wts_daybins, 0, 1) # nlst x ndaybins x ntriads x nchan
mask_split = NP.array_split(self.cpinfo['processed']['native']['eicp'].mask, ndaybins, axis=1)
eicp_split = NP.array_split(self.cpinfo['processed']['native']['eicp'].data, ndaybins, axis=1)
eicp_dmean = MA.array([MA.mean(MA.array(eicp_split[i], mask=mask_split[i]), axis=1) for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
eicp_dmean = NP.exp(1j * NP.angle(eicp_dmean))
eicp_dmean = NP.moveaxis(eicp_dmean, 0, 1) # nlst x ndaybins x ntriads x nchan
eicp_dmedian = MA.array([MA.median(MA.array(eicp_split[i].real, mask=mask_split[i]), axis=1) + 1j * MA.median(MA.array(eicp_split[i].imag, mask=mask_split[i]), axis=1) for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
eicp_dmedian = NP.exp(1j * NP.angle(eicp_dmedian))
eicp_dmedian = NP.moveaxis(eicp_dmedian, 0, 1) # nlst x ndaybins x ntriads x nchan
cp_split = NP.array_split(self.cpinfo['processed']['native']['cphase'].data, ndaybins, axis=1)
cp_drms = NP.array([MA.std(MA.array(cp_split[i], mask=mask_split[i]), axis=1).data for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
cp_drms = NP.moveaxis(cp_drms, 0, 1) # nlst x ndaybins x ntriads x nchan
cp_dmad = NP.array([MA.median(NP.abs(cp_split[i] - NP.angle(eicp_dmedian[:,[i],:,:])), axis=1).data for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
cp_dmad = NP.moveaxis(cp_dmad, 0, 1) # nlst x ndaybins x ntriads x nchan
if 'prelim' not in self.cpinfo['processed']:
self.cpinfo['processed']['prelim'] = {}
self.cpinfo['processed']['prelim']['eicp'] = {}
self.cpinfo['processed']['prelim']['cphase'] = {}
self.cpinfo['processed']['prelim']['daybins'] = daybincenters
self.cpinfo['processed']['prelim']['diff_dbins'] = daybinintervals
mask = wts_daybins <= 0.0
self.cpinfo['processed']['prelim']['wts'] = MA.array(wts_daybins, mask=mask)
self.cpinfo['processed']['prelim']['eicp']['mean'] = MA.array(eicp_dmean, mask=mask)
self.cpinfo['processed']['prelim']['eicp']['median'] = MA.array(eicp_dmedian, mask=mask)
self.cpinfo['processed']['prelim']['cphase']['mean'] = MA.array(NP.angle(eicp_dmean), mask=mask)
self.cpinfo['processed']['prelim']['cphase']['median'] = MA.array(NP.angle(eicp_dmedian), mask=mask)
self.cpinfo['processed']['prelim']['cphase']['rms'] = MA.array(cp_drms, mask=mask)
self.cpinfo['processed']['prelim']['cphase']['mad'] = MA.array(cp_dmad, mask=mask)
rawlst = NP.degrees(NP.unwrap(NP.radians(self.cpinfo['raw']['lst'] * 15.0), discont=NP.pi, axis=0)) / 15.0 # in hours but unwrapped to have no discontinuities
if NP.any(rawlst > 24.0):
rawlst -= 24.0
if rawlst.shape[0] > 1: # LST bin only if there are multiple LST
if lstbinsize is not None:
if not isinstance(lstbinsize, (int,float)):
raise TypeError('Input lstbinsize must be a scalar')
lstbinsize = lstbinsize / 3.6e3 # in hours
tres = NP.diff(rawlst[:,0]).min() # in hours
textent = rawlst[:,0].max() - rawlst[:,0].min() + tres # in hours
eps = 1e-10
if 'prelim' not in self.cpinfo['processed']:
self.cpinfo['processed']['prelim'] = {}
no_change_in_lstbins = False
if lstbinsize > tres:
lstbinsize = NP.clip(lstbinsize, tres, textent)
lstbins = NP.arange(rawlst[:,0].min(), rawlst[:,0].max() + tres + eps, lstbinsize)
nlstbins = lstbins.size
lstbins = NP.concatenate((lstbins, [lstbins[-1]+lstbinsize+eps]))
if nlstbins > 1:
lstbinintervals = lstbins[1:] - lstbins[:-1]
lstbincenters = lstbins[:-1] + 0.5 * lstbinintervals
else:
lstbinintervals = NP.asarray(lstbinsize).reshape(-1)
lstbincenters = lstbins[0] + 0.5 * lstbinintervals
self.cpinfo['processed']['prelim']['lstbins'] = lstbincenters
self.cpinfo['processed']['prelim']['dlstbins'] = lstbinintervals
no_change_in_lstbins = False
else:
# Perform no binning and keep the current LST resolution, data and weights
warnings.warn('LST bin size found to be smaller than the LST resolution in the data. No LST binning/averaging will be performed.')
lstbinsize = tres
lstbins = NP.arange(rawlst[:,0].min(), rawlst[:,0].max() + lstbinsize + eps, lstbinsize)
nlstbins = lstbins.size - 1
if nlstbins > 1:
lstbinintervals = lstbins[1:] - lstbins[:-1]
else:
lstbinintervals = NP.asarray(lstbinsize).reshape(-1)
self.cpinfo['processed']['prelim']['dlstbins'] = lstbinintervals
self.cpinfo['processed']['prelim']['lstbins'] = lstbins[:-1]
# Ensure that the LST bins are inside the min/max envelope to
# error-free interpolation later
self.cpinfo['processed']['prelim']['lstbins'][0] += eps
self.cpinfo['processed']['prelim']['lstbins'][-1] -= eps
no_change_in_lstbins = True
counts, lstbin_edges, lstbinnum, ri = OPS.binned_statistic(rawlst[:,0], statistic='count', bins=lstbins)
counts = counts.astype(NP.int)
if 'wts' not in self.cpinfo['processed']['prelim']:
outshape = (counts.size, self.cpinfo['processed']['native']['eicp'].shape[1], self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3])
else:
outshape = (counts.size, self.cpinfo['processed']['prelim']['wts'].shape[1], self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3])
wts_lstbins = NP.zeros(outshape)
eicp_tmean = NP.zeros(outshape, dtype=NP.complex128)
eicp_tmedian = NP.zeros(outshape, dtype=NP.complex128)
cp_trms = NP.zeros(outshape)
cp_tmad = NP.zeros(outshape)
for binnum in xrange(counts.size):
if no_change_in_lstbins:
ind_lstbin = [binnum]
else:
ind_lstbin = ri[ri[binnum]:ri[binnum+1]]
if 'wts' not in self.cpinfo['processed']['prelim']:
indict = self.cpinfo['processed']['native']
else:
indict = self.cpinfo['processed']['prelim']
wts_lstbins[binnum,:,:,:] = NP.sum(indict['wts'][ind_lstbin,:,:,:].data, axis=0)
if 'wts' not in self.cpinfo['processed']['prelim']:
eicp_tmean[binnum,:,:,:] = NP.exp(1j*NP.angle(MA.mean(indict['eicp'][ind_lstbin,:,:,:], axis=0)))
eicp_tmedian[binnum,:,:,:] = NP.exp(1j*NP.angle(MA.median(indict['eicp'][ind_lstbin,:,:,:].real, axis=0) + 1j * MA.median(self.cpinfo['processed']['native']['eicp'][ind_lstbin,:,:,:].imag, axis=0)))
cp_trms[binnum,:,:,:] = MA.std(indict['cphase'][ind_lstbin,:,:,:], axis=0).data
cp_tmad[binnum,:,:,:] = MA.median(NP.abs(indict['cphase'][ind_lstbin,:,:,:] - NP.angle(eicp_tmedian[binnum,:,:,:][NP.newaxis,:,:,:])), axis=0).data
else:
eicp_tmean[binnum,:,:,:] = NP.exp(1j*NP.angle(MA.mean(NP.exp(1j*indict['cphase']['mean'][ind_lstbin,:,:,:]), axis=0)))
eicp_tmedian[binnum,:,:,:] = NP.exp(1j*NP.angle(MA.median(NP.cos(indict['cphase']['median'][ind_lstbin,:,:,:]), axis=0) + 1j * MA.median(NP.sin(indict['cphase']['median'][ind_lstbin,:,:,:]), axis=0)))
cp_trms[binnum,:,:,:] = MA.std(indict['cphase']['mean'][ind_lstbin,:,:,:], axis=0).data
cp_tmad[binnum,:,:,:] = MA.median(NP.abs(indict['cphase']['median'][ind_lstbin,:,:,:] - NP.angle(eicp_tmedian[binnum,:,:,:][NP.newaxis,:,:,:])), axis=0).data
mask = wts_lstbins <= 0.0
self.cpinfo['processed']['prelim']['wts'] = MA.array(wts_lstbins, mask=mask)
if 'eicp' not in self.cpinfo['processed']['prelim']:
self.cpinfo['processed']['prelim']['eicp'] = {}
if 'cphase' not in self.cpinfo['processed']['prelim']:
self.cpinfo['processed']['prelim']['cphase'] = {}
self.cpinfo['processed']['prelim']['eicp']['mean'] = MA.array(eicp_tmean, mask=mask)
self.cpinfo['processed']['prelim']['eicp']['median'] = MA.array(eicp_tmedian, mask=mask)
self.cpinfo['processed']['prelim']['cphase']['mean'] = MA.array(NP.angle(eicp_tmean), mask=mask)
self.cpinfo['processed']['prelim']['cphase']['median'] = MA.array(NP.angle(eicp_tmedian), mask=mask)
self.cpinfo['processed']['prelim']['cphase']['rms'] = MA.array(cp_trms, mask=mask)
self.cpinfo['processed']['prelim']['cphase']['mad'] = MA.array(cp_tmad, mask=mask)
# else:
# # Perform no binning and keep the current LST resolution, data and weights
# warnings.warn('LST bin size found to be smaller than the LST resolution in the data. No LST binning/averaging will be performed.')
# lstbinsize = tres
# lstbins = NP.arange(rawlst[:,0].min(), rawlst[:,0].max() + lstbinsize + eps, lstbinsize)
# nlstbins = lstbins.size - 1
# if nlstbins > 1:
# lstbinintervals = lstbins[1:] - lstbins[:-1]
# lstbincenters = lstbins[:-1] + 0.5 * lstbinintervals
# else:
# lstbinintervals = NP.asarray(lstbinsize).reshape(-1)
# lstbincenters = lstbins[0] + 0.5 * lstbinintervals
# if 'prelim' not in self.cpinfo['processed']:
# self.cpinfo['processed']['prelim'] = {}
# self.cpinfo['processed']['prelim']['lstbins'] = lstbincenters
# self.cpinfo['processed']['prelim']['dlstbins'] = lstbinintervals
if (rawlst.shape[0] <= 1) or (lstbinsize is None):
nlstbins = rawlst.shape[0]
lstbins = NP.mean(rawlst, axis=1)
if 'prelim' not in self.cpinfo['processed']:
self.cpinfo['processed']['prelim'] = {}
self.cpinfo['processed']['prelim']['lstbins'] = lstbins
if lstbinsize is not None:
self.cpinfo['processed']['prelim']['dlstbins'] = NP.asarray(lstbinsize).reshape(-1)
else:
self.cpinfo['processed']['prelim']['dlstbins'] = NP.zeros(1)
############################################################################
def subtract(self, cphase):
"""
------------------------------------------------------------------------
Subtract complex exponential of the bispectrum phase from the current
instance and updates the cpinfo attribute
Inputs:
cphase [masked array] Bispectrum phase array as a maked array. It
must be of same size as freqs along the axis specified in
input axis.
Action: Updates 'submodel' and 'residual' keys under attribute
cpinfo under key 'processed'
------------------------------------------------------------------------
"""
if not isinstance(cphase, NP.ndarray):
raise TypeError('Input cphase must be a numpy array')
if not isinstance(cphase, MA.MaskedArray):
cphase = MA.array(cphase, mask=NP.isnan(cphase))
if not OPS.is_broadcastable(cphase.shape, self.cpinfo['processed']['prelim']['cphase']['median'].shape):
raise ValueError('Input cphase has shape incompatible with that in instance attribute')
else:
minshape = tuple(NP.ones(self.cpinfo['processed']['prelim']['cphase']['median'].ndim - cphase.ndim, dtype=NP.int)) + cphase.shape
cphase = cphase.reshape(minshape)
# cphase = NP.broadcast_to(cphase, minshape)
eicp = NP.exp(1j*cphase)
self.cpinfo['processed']['submodel'] = {}
self.cpinfo['processed']['submodel']['cphase'] = cphase
self.cpinfo['processed']['submodel']['eicp'] = eicp
self.cpinfo['processed']['residual'] = {'eicp': {}, 'cphase': {}}
for key in ['mean', 'median']:
eicpdiff = self.cpinfo['processed']['prelim']['eicp'][key] - eicp
eicpratio = self.cpinfo['processed']['prelim']['eicp'][key] / eicp
self.cpinfo['processed']['residual']['eicp'][key] = eicpdiff
self.cpinfo['processed']['residual']['cphase'][key] = MA.array(NP.angle(eicpratio.data), mask=self.cpinfo['processed']['residual']['eicp'][key].mask)
############################################################################
def subsample_differencing(self, daybinsize=None, ndaybins=4, lstbinsize=None):
"""
------------------------------------------------------------------------
Create subsamples and differences between subsamples to evaluate noise
properties from the data set.
Inputs:
daybinsize [Nonetype or scalar] Day bin size (in days) over which mean
and median are estimated across different days for a fixed
LST bin. If set to None, it will look for value in input
ndaybins. If both are None, no smoothing is performed. Only
one of daybinsize or ndaybins must be set to non-None value.
Must yield greater than or equal to 4 bins
ndaybins [NoneType or integer] Number of bins along day axis. Only
if daybinsize is set to None. It produces bins that roughly
consist of equal number of days in each bin regardless of
how much the days in each bin are separated from each other.
If both are None, no smoothing is performed. Only one of
daybinsize or ndaybins must be set to non-None value. If set,
it must be set to greater than or equal to 4
lstbinsize [NoneType or scalar] LST bin size (in seconds) over which
mean and median are estimated across the LST. If set to
None, no smoothing is performed
------------------------------------------------------------------------
"""
if (ndaybins is not None) and (daybinsize is not None):
raise ValueError('Only one of daybinsize or ndaybins should be set')
if (daybinsize is not None) or (ndaybins is not None):
if daybinsize is not None:
if not isinstance(daybinsize, (int,float)):
raise TypeError('Input daybinsize must be a scalar')
dres = NP.diff(self.cpinfo['raw']['days']).min() # in days
dextent = self.cpinfo['raw']['days'].max() - self.cpinfo['raw']['days'].min() + dres # in days
if daybinsize > dres:
daybinsize = NP.clip(daybinsize, dres, dextent)
eps = 1e-10
daybins = NP.arange(self.cpinfo['raw']['days'].min(), self.cpinfo['raw']['days'].max() + dres + eps, daybinsize)
ndaybins = daybins.size
daybins = NP.concatenate((daybins, [daybins[-1]+daybinsize+eps]))
if ndaybins >= 4:
daybinintervals = daybins[1:] - daybins[:-1]
daybincenters = daybins[:-1] + 0.5 * daybinintervals
else:
raise ValueError('Could not find at least 4 bins along repeating days. Adjust binning interval.')
counts, daybin_edges, daybinnum, ri = OPS.binned_statistic(self.cpinfo['raw']['days'], statistic='count', bins=daybins)
counts = counts.astype(NP.int)
wts_daybins = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]))
eicp_dmean = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]), dtype=NP.complex128)
eicp_dmedian = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]), dtype=NP.complex128)
cp_drms = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]))
cp_dmad = NP.zeros((self.cpinfo['processed']['native']['eicp'].shape[0], counts.size, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3]))
for binnum in xrange(counts.size):
ind_daybin = ri[ri[binnum]:ri[binnum+1]]
wts_daybins[:,binnum,:,:] = NP.sum(self.cpinfo['processed']['native']['wts'][:,ind_daybin,:,:].data, axis=1)
eicp_dmean[:,binnum,:,:] = NP.exp(1j*NP.angle(MA.mean(self.cpinfo['processed']['native']['eicp'][:,ind_daybin,:,:], axis=1)))
eicp_dmedian[:,binnum,:,:] = NP.exp(1j*NP.angle(MA.median(self.cpinfo['processed']['native']['eicp'][:,ind_daybin,:,:].real, axis=1) + 1j * MA.median(self.cpinfo['processed']['native']['eicp'][:,ind_daybin,:,:].imag, axis=1)))
cp_drms[:,binnum,:,:] = MA.std(self.cpinfo['processed']['native']['cphase'][:,ind_daybin,:,:], axis=1).data
cp_dmad[:,binnum,:,:] = MA.median(NP.abs(self.cpinfo['processed']['native']['cphase'][:,ind_daybin,:,:] - NP.angle(eicp_dmedian[:,binnum,:,:][:,NP.newaxis,:,:])), axis=1).data
else:
if not isinstance(ndaybins, int):
raise TypeError('Input ndaybins must be an integer')
if ndaybins < 4:
raise ValueError('Input ndaybins must be greater than or equal to 4')
days_split = NP.array_split(self.cpinfo['raw']['days'], ndaybins)
daybincenters = NP.asarray([NP.mean(days) for days in days_split])
daybinintervals = NP.asarray([days.max()-days.min() for days in days_split])
counts = NP.asarray([days.size for days in days_split])
wts_split = NP.array_split(self.cpinfo['processed']['native']['wts'].data, ndaybins, axis=1)
# mask_split = NP.array_split(self.cpinfo['processed']['native']['wts'].mask, ndaybins, axis=1)
wts_daybins = NP.asarray([NP.sum(wtsitem, axis=1) for wtsitem in wts_split]) # ndaybins x nlst x ntriads x nchan
wts_daybins = NP.moveaxis(wts_daybins, 0, 1) # nlst x ndaybins x ntriads x nchan
mask_split = NP.array_split(self.cpinfo['processed']['native']['eicp'].mask, ndaybins, axis=1)
eicp_split = NP.array_split(self.cpinfo['processed']['native']['eicp'].data, ndaybins, axis=1)
eicp_dmean = MA.array([MA.mean(MA.array(eicp_split[i], mask=mask_split[i]), axis=1) for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
eicp_dmean = NP.exp(1j * NP.angle(eicp_dmean))
eicp_dmean = NP.moveaxis(eicp_dmean, 0, 1) # nlst x ndaybins x ntriads x nchan
eicp_dmedian = MA.array([MA.median(MA.array(eicp_split[i].real, mask=mask_split[i]), axis=1) + 1j * MA.median(MA.array(eicp_split[i].imag, mask=mask_split[i]), axis=1) for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
eicp_dmedian = NP.exp(1j * NP.angle(eicp_dmedian))
eicp_dmedian = NP.moveaxis(eicp_dmedian, 0, 1) # nlst x ndaybins x ntriads x nchan
cp_split = NP.array_split(self.cpinfo['processed']['native']['cphase'].data, ndaybins, axis=1)
cp_drms = NP.array([MA.std(MA.array(cp_split[i], mask=mask_split[i]), axis=1).data for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
cp_drms = NP.moveaxis(cp_drms, 0, 1) # nlst x ndaybins x ntriads x nchan
cp_dmad = NP.array([MA.median(NP.abs(cp_split[i] - NP.angle(eicp_dmedian[:,[i],:,:])), axis=1).data for i in range(daybincenters.size)]) # ndaybins x nlst x ntriads x nchan
cp_dmad = NP.moveaxis(cp_dmad, 0, 1) # nlst x ndaybins x ntriads x nchan
mask = wts_daybins <= 0.0
wts_daybins = MA.array(wts_daybins, mask=mask)
cp_dmean = MA.array(NP.angle(eicp_dmean), mask=mask)
cp_dmedian = MA.array(NP.angle(eicp_dmedian), mask=mask)
self.cpinfo['errinfo']['daybins'] = daybincenters
self.cpinfo['errinfo']['diff_dbins'] = daybinintervals
self.cpinfo['errinfo']['wts'] = {'{0}'.format(ind): None for ind in range(2)}
self.cpinfo['errinfo']['eicp_diff'] = {'{0}'.format(ind): {} for ind in range(2)}
rawlst = NP.degrees(NP.unwrap(NP.radians(self.cpinfo['raw']['lst'] * 15.0), discont=NP.pi, axis=0)) / 15.0 # in hours but unwrapped to have no discontinuities
if NP.any(rawlst > 24.0):
rawlst -= 24.0
if rawlst.shape[0] > 1: # LST bin only if there are multiple LST
if lstbinsize is not None:
if not isinstance(lstbinsize, (int,float)):
raise TypeError('Input lstbinsize must be a scalar')
lstbinsize = lstbinsize / 3.6e3 # in hours
tres = NP.diff(rawlst[:,0]).min() # in hours
textent = rawlst[:,0].max() - rawlst[:,0].min() + tres # in hours
eps = 1e-10
no_change_in_lstbins = False
if lstbinsize > tres:
lstbinsize = NP.clip(lstbinsize, tres, textent)
lstbins = NP.arange(rawlst[:,0].min(), rawlst[:,0].max() + tres + eps, lstbinsize)
nlstbins = lstbins.size
lstbins = NP.concatenate((lstbins, [lstbins[-1]+lstbinsize+eps]))
if nlstbins > 1:
lstbinintervals = lstbins[1:] - lstbins[:-1]
lstbincenters = lstbins[:-1] + 0.5 * lstbinintervals
else:
lstbinintervals = NP.asarray(lstbinsize).reshape(-1)
lstbincenters = lstbins[0] + 0.5 * lstbinintervals
self.cpinfo['errinfo']['lstbins'] = lstbincenters
self.cpinfo['errinfo']['dlstbins'] = lstbinintervals
no_change_in_lstbins = False
else:
# Perform no binning and keep the current LST resolution
warnings.warn('LST bin size found to be smaller than the LST resolution in the data. No LST binning/averaging will be performed.')
lstbinsize = tres
lstbins = NP.arange(rawlst[:,0].min(), rawlst[:,0].max() + lstbinsize + eps, lstbinsize)
nlstbins = lstbins.size - 1
if nlstbins > 1:
lstbinintervals = lstbins[1:] - lstbins[:-1]
else:
lstbinintervals = NP.asarray(lstbinsize).reshape(-1)
self.cpinfo['errinfo']['dlstbins'] = lstbinintervals
self.cpinfo['errinfo']['lstbins'] = lstbins[:-1]
# Ensure that the LST bins are inside the min/max envelope to
# error-free interpolation later
self.cpinfo['errinfo']['lstbins'][0] += eps
self.cpinfo['errinfo']['lstbins'][-1] -= eps
no_change_in_lstbins = True
counts, lstbin_edges, lstbinnum, ri = OPS.binned_statistic(rawlst[:,0], statistic='count', bins=lstbins)
counts = counts.astype(NP.int)
outshape = (counts.size, wts_daybins.shape[1], self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3])
wts_lstbins = NP.zeros(outshape)
eicp_tmean = NP.zeros(outshape, dtype=NP.complex128)
eicp_tmedian = NP.zeros(outshape, dtype=NP.complex128)
cp_trms = NP.zeros(outshape)
cp_tmad = NP.zeros(outshape)
for binnum in xrange(counts.size):
if no_change_in_lstbins:
ind_lstbin = [binnum]
else:
ind_lstbin = ri[ri[binnum]:ri[binnum+1]]
wts_lstbins[binnum,:,:,:] = NP.sum(wts_daybins[ind_lstbin,:,:,:].data, axis=0)
eicp_tmean[binnum,:,:,:] = NP.exp(1j*NP.angle(MA.mean(NP.exp(1j*cp_dmean[ind_lstbin,:,:,:]), axis=0)))
eicp_tmedian[binnum,:,:,:] = NP.exp(1j*NP.angle(MA.median(NP.cos(cp_dmedian[ind_lstbin,:,:,:]), axis=0) + 1j * MA.median(NP.sin(cp_dmedian[ind_lstbin,:,:,:]), axis=0)))
mask = wts_lstbins <= 0.0
wts_lstbins = MA.array(wts_lstbins, mask=mask)
eicp_tmean = MA.array(eicp_tmean, mask=mask)
eicp_tmedian = MA.array(eicp_tmedian, mask=mask)
else:
wts_lstbins = MA.copy(wts_daybins)
mask = wts_lstbins.mask
eicp_tmean = MA.array(NP.exp(1j*NP.angle(NP.exp(1j*cp_dmean))), mask=mask)
eicp_tmedian = MA.array(NP.exp(1j*NP.angle(NP.cos(cp_dmedian) + 1j * NP.sin(cp_dmedian))), mask=mask)
if (rawlst.shape[0] <= 1) or (lstbinsize is None):
nlstbins = rawlst.shape[0]
lstbins = NP.mean(rawlst, axis=1)
self.cpinfo['errinfo']['lstbins'] = lstbins
if lstbinsize is not None:
self.cpinfo['errinfo']['dlstbins'] = NP.asarray(lstbinsize).reshape(-1)
else:
self.cpinfo['errinfo']['dlstbins'] = NP.zeros(1)
ncomb = NP.sum(NP.asarray([(ndaybins-i-1)*(ndaybins-i-2)*(ndaybins-i-3)/2 for i in range(ndaybins-3)])).astype(int)
diff_outshape = (nlstbins, ncomb, self.cpinfo['processed']['native']['eicp'].shape[2], self.cpinfo['processed']['native']['eicp'].shape[3])
for diffind in range(2):
self.cpinfo['errinfo']['eicp_diff']['{0}'.format(diffind)]['mean'] = MA.empty(diff_outshape, dtype=NP.complex)
self.cpinfo['errinfo']['eicp_diff']['{0}'.format(diffind)]['median'] = MA.empty(diff_outshape, dtype=NP.complex)
self.cpinfo['errinfo']['wts']['{0}'.format(diffind)] = MA.empty(diff_outshape, dtype=NP.float)
ind = -1
self.cpinfo['errinfo']['list_of_pair_of_pairs'] = []
list_of_pair_of_pairs = []
for i in range(ndaybins-1):
for j in range(i+1,ndaybins):
for k in range(ndaybins-1):
if (k != i) and (k != j):
for m in range(k+1,ndaybins):
if (m != i) and (m != j):
pair_of_pairs = [set([i,j]), set([k,m])]
if (pair_of_pairs not in list_of_pair_of_pairs) and (pair_of_pairs[::-1] not in list_of_pair_of_pairs):
ind += 1
list_of_pair_of_pairs += [copy.deepcopy(pair_of_pairs)]
self.cpinfo['errinfo']['list_of_pair_of_pairs'] += [[i,j,k,m]]
for stat in ['mean', 'median']:
if stat == 'mean':
self.cpinfo['errinfo']['eicp_diff']['0'][stat][:,ind,:,:] = MA.array(0.5 * (eicp_tmean[:,j,:,:].data - eicp_tmean[:,i,:,:].data), mask=NP.logical_or(eicp_tmean[:,j,:,:].mask, eicp_tmean[:,i,:,:].mask))
self.cpinfo['errinfo']['eicp_diff']['1'][stat][:,ind,:,:] = MA.array(0.5 * (eicp_tmean[:,m,:,:].data - eicp_tmean[:,k,:,:].data), mask=NP.logical_or(eicp_tmean[:,m,:,:].mask, eicp_tmean[:,k,:,:].mask))
self.cpinfo['errinfo']['wts']['0'][:,ind,:,:] = MA.array(NP.sqrt(wts_lstbins[:,j,:,:].data**2 + wts_lstbins[:,i,:,:].data**2), mask=NP.logical_or(wts_lstbins[:,j,:,:].mask, wts_lstbins[:,i,:,:].mask))
self.cpinfo['errinfo']['wts']['1'][:,ind,:,:] = MA.array(NP.sqrt(wts_lstbins[:,m,:,:].data**2 + wts_lstbins[:,k,:,:].data**2), mask=NP.logical_or(wts_lstbins[:,m,:,:].mask, wts_lstbins[:,k,:,:].mask))
# self.cpinfo['errinfo']['eicp_diff']['0'][stat][:,ind,:,:] = 0.5 * (eicp_tmean[:,j,:,:] - eicp_tmean[:,i,:,:])
# self.cpinfo['errinfo']['eicp_diff']['1'][stat][:,ind,:,:] = 0.5 * (eicp_tmean[:,m,:,:] - eicp_tmean[:,k,:,:])
# self.cpinfo['errinfo']['wts']['0'][:,ind,:,:] = NP.sqrt(wts_lstbins[:,j,:,:]**2 + wts_lstbins[:,i,:,:]**2)
# self.cpinfo['errinfo']['wts']['1'][:,ind,:,:] = NP.sqrt(wts_lstbins[:,m,:,:]**2 + wts_lstbins[:,k,:,:]**2)
else:
self.cpinfo['errinfo']['eicp_diff']['0'][stat][:,ind,:,:] = MA.array(0.5 * (eicp_tmedian[:,j,:,:].data - eicp_tmedian[:,i,:,:].data), mask=NP.logical_or(eicp_tmedian[:,j,:,:].mask, eicp_tmedian[:,i,:,:].mask))
self.cpinfo['errinfo']['eicp_diff']['1'][stat][:,ind,:,:] = MA.array(0.5 * (eicp_tmedian[:,m,:,:].data - eicp_tmedian[:,k,:,:].data), mask=NP.logical_or(eicp_tmedian[:,m,:,:].mask, eicp_tmedian[:,k,:,:].mask))
# self.cpinfo['errinfo']['eicp_diff']['0'][stat][:,ind,:,:] = 0.5 * (eicp_tmedian[:,j,:,:] - eicp_tmedian[:,i,:,:])
# self.cpinfo['errinfo']['eicp_diff']['1'][stat][:,ind,:,:] = 0.5 * (eicp_tmedian[:,m,:,:] - eicp_tmedian[:,k,:,:])
mask0 = self.cpinfo['errinfo']['wts']['0'] <= 0.0
mask1 = self.cpinfo['errinfo']['wts']['1'] <= 0.0
self.cpinfo['errinfo']['eicp_diff']['0'][stat] = MA.array(self.cpinfo['errinfo']['eicp_diff']['0'][stat], mask=mask0)
self.cpinfo['errinfo']['eicp_diff']['1'][stat] = MA.array(self.cpinfo['errinfo']['eicp_diff']['1'][stat], mask=mask1)
self.cpinfo['errinfo']['wts']['0'] = MA.array(self.cpinfo['errinfo']['wts']['0'], mask=mask0)
self.cpinfo['errinfo']['wts']['1'] = MA.array(self.cpinfo['errinfo']['wts']['1'], mask=mask1)
############################################################################
def save(self, outfile=None):
"""
------------------------------------------------------------------------
Save contents of attribute cpinfo in external HDF5 file
Inputs:
outfile [NoneType or string] Output file (HDF5) to save contents to.
If set to None (default), it will be saved in the file
pointed to by the extfile attribute of class ClosurePhase
------------------------------------------------------------------------
"""
if outfile is None:
outfile = self.extfile
NMO.save_dict_to_hdf5(self.cpinfo, outfile, compressinfo={'compress_fmt': 'gzip', 'compress_opts': 9})
################################################################################
class ClosurePhaseDelaySpectrum(object):
"""
----------------------------------------------------------------------------
Class to hold and operate on Closure Phase information.
It has the following attributes and member functions.
Attributes:
cPhase [instance of class ClosurePhase] Instance of class
ClosurePhase
f [numpy array] Frequencies (in Hz) in closure phase spectra
df [float] Frequency resolution (in Hz) in closure phase
spectra
cPhaseDS [dictionary] Possibly oversampled Closure Phase Delay
Spectrum information.
cPhaseDS_resampled
[dictionary] Resampled Closure Phase Delay Spectrum
information.
Member functions:
__init__() Initialize instance of class ClosurePhaseDelaySpectrum
FT() Fourier transform of complex closure phase spectra mapping
from frequency axis to delay axis.
subset() Return triad and time indices to select a subset of
processed data
compute_power_spectrum()
Compute power spectrum of closure phase data. It is in units
of Mpc/h.
rescale_power_spectrum()
Rescale power spectrum to dimensional quantity by converting
the ratio given visibility amplitude information
average_rescaled_power_spectrum()
Average the rescaled power spectrum with physical units
along certain axes with inverse variance or regular
averaging
beam3Dvol() Compute three-dimensional volume of the antenna power
pattern along two transverse axes and one LOS axis.
----------------------------------------------------------------------------
"""
def __init__(self, cPhase):
"""
------------------------------------------------------------------------
Initialize instance of class ClosurePhaseDelaySpectrum
Inputs:
cPhase [class ClosurePhase] Instance of class ClosurePhase
------------------------------------------------------------------------
"""
if not isinstance(cPhase, ClosurePhase):
raise TypeError('Input cPhase must be an instance of class ClosurePhase')
self.cPhase = cPhase
self.f = self.cPhase.f
self.df = self.cPhase.df
self.cPhaseDS = None
self.cPhaseDS_resampled = None
############################################################################
def FT(self, bw_eff, freq_center=None, shape=None, fftpow=None, pad=None,
datapool='prelim', visscaleinfo=None, method='fft', resample=True,
apply_flags=True):
"""
------------------------------------------------------------------------
Fourier transform of complex closure phase spectra mapping from
frequency axis to delay axis.
Inputs:
bw_eff [scalar or numpy array] effective bandwidths (in Hz) on the
selected frequency windows for subband delay transform of
closure phases. If a scalar value is provided, the same
will be applied to all frequency windows
freq_center [scalar, list or numpy array] frequency centers (in Hz) of
the selected frequency windows for subband delay transform
of closure phases. The value can be a scalar, list or numpy
array. If a scalar is provided, the same will be applied to
all frequency windows. Default=None uses the center
frequency from the class attribute named channels
shape [string] frequency window shape for subband delay transform
of closure phases. Accepted values for the string are
'rect' or 'RECT' (for rectangular), 'bnw' and 'BNW' (for
Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
(rectangular window)
fftpow [scalar] the power to which the FFT of the window will be
raised. The value must be a positive scalar. Default = 1.0
pad [scalar] padding fraction relative to the number of
frequency channels for closure phases. Value must be a
non-negative scalar. For e.g., a pad of 1.0 pads the
frequency axis with zeros of the same width as the number
of channels. After the delay transform, the transformed
closure phases are downsampled by a factor of 1+pad. If a
negative value is specified, delay transform will be
performed with no padding. Default=None sets to padding
factor to 1.0
datapool [string] Specifies which data set is to be Fourier
transformed
visscaleinfo
[dictionary] Dictionary containing reference visibilities
based on which the closure phases will be scaled to units
of visibilities. It contains the following keys and values:
'vis' [numpy array or instance of class
InterferometerArray] Reference visibilities from the
baselines that form the triad. It can be an instance
of class RI.InterferometerArray or a numpy array.
If an instance of class InterferometerArray, the
baseline triplet must be set in key 'bltriplet'
and value in key 'lst' will be ignored. If the
value under this key 'vis' is set to a numpy array,
it must be of shape (nbl=3, nlst_vis, nchan). In
this case the value under key 'bltriplet' will be
ignored. The nearest LST will be looked up and
applied after smoothing along LST based on the
smoothing parameter 'smooth'
'bltriplet'
[Numpy array] Will be used in searching for matches
to these three baseline vectors if the value under
key 'vis' is set to an instance of class
InterferometerArray. However, if value under key
'vis' is a numpy array, this key 'bltriplet' will
be ignored.
'lst' [numpy array] Reference LST (in hours). It is of
shape (nlst_vis,). It will be used only if value
under key 'vis' is a numpy array, otherwise it will
be ignored and read from the instance of class
InterferometerArray passed under key 'vis'. If the
specified LST range does not cover the data LST
range, those LST will contain NaN in the delay
spectrum
'smoothinfo'
[dictionary] Dictionary specifying smoothing and/or
interpolation parameters. It has the following keys
and values:
'op_type' [string] Specifies the interpolating
operation. Must be specified (no
default). Accepted values are
'interp1d' (scipy.interpolate),
'median' (skimage.filters), 'tophat'
(astropy.convolution) and 'gaussian'
(astropy.convolution)
'interp_kind' [string (optional)] Specifies the
interpolation kind (if 'op_type' is
set to 'interp1d'). For accepted
values, see
scipy.interpolate.interp1d()
'window_size' [integer (optional)] Specifies the
size of the interpolating/smoothing
kernel. Only applies when 'op_type'
is set to 'median', 'tophat' or
'gaussian' The kernel is a tophat
function when 'op_type' is set to
'median' or 'tophat'. If refers to
FWHM when 'op_type' is set to
'gaussian'
resample [boolean] If set to True (default), resample the delay
spectrum axis to independent samples along delay axis. If
set to False, return the results as is even if they may be
be oversampled and not all samples may be independent
method [string] Specifies the Fourier transform method to be used.
Accepted values are 'fft' (default) for FFT and 'nufft' for
non-uniform FFT
apply_flags [boolean] If set to True (default), weights determined from
flags will be applied. If False, no weights from flagging
will be applied, and thus even flagged data will be included
Outputs:
A dictionary that contains the oversampled (if resample=False) or
resampled (if resample=True) delay spectrum information. It has the
following keys and values:
'freq_center' [numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow' [scalar] the power to which the FFT of the window was
raised. The value is be a positive scalar with
default = 1.0
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nlags=nchan+npad if resample=True, where
npad is the number of frequency channels padded
specified under the key 'npad'. If resample=False,
nlags = number of delays after resampling only
independent delays. The lags roughly correspond to
k_parallel.
'lag_kernel' [numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_win x nlst x ndays x ntriads x nlags.
nlags=nchan+npad if resample=True, where npad is the
number of frequency channels padded specified under
the key 'npad'. If resample=False, nlags = number of
delays after resampling only independent delays.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth of
the subband specified in bw_eff
'whole' [dictionary] Delay spectrum results corresponding to
bispectrum phase in 'prelim' key of attribute cpinfo.
Contains the following keys and values:
'dspec' [dictionary] Contains the following keys and
values:
'twts' [numpy array] Weights from time-based
flags that went into time-averaging.
Shape=(nlst,ndays,ntriads,nchan)
'mean' [numpy array] Delay spectrum of closure
phases based on their mean across time
intervals.
Shape=(nspw,nlst,ndays,ntriads,nlags)
'median'
[numpy array] Delay spectrum of closure
phases based on their median across time
intervals.
Shape=(nspw,nlst,ndays,ntriads,nlags)
'submodel' [dictionary] Delay spectrum results corresponding to
bispectrum phase in 'submodel' key of attribute cpinfo.
Contains the following keys and values:
'dspec' [numpy array] Delay spectrum of closure phases
Shape=(nspw,nlst,ndays,ntriads,nlags)
'residual' [dictionary] Delay spectrum results corresponding to
bispectrum phase in 'residual' key of attribute cpinfo
after subtracting 'submodel' bispectrum phase from that
of 'prelim'. It contains the following keys and values:
'dspec' [dictionary] Contains the following keys and
values:
'twts' [numpy array] Weights from time-based
flags that went into time-averaging.
Shape=(nlst,ndays,ntriads,nchan)
'mean' [numpy array] Delay spectrum of closure
phases based on their mean across time
intervals.
Shape=(nspw,nlst,ndays,ntriads,nlags)
'median'
[numpy array] Delay spectrum of closure
phases based on their median across time
intervals.
Shape=(nspw,nlst,ndays,ntriads,nlags)
'errinfo' [dictionary] It has two keys 'dspec0' and 'dspec1' each
of which are dictionaries with the following keys and
values:
'twts' [numpy array] Weights for the subsample
difference. It is of shape (nlst, ndays,
ntriads, nchan)
'mean' [numpy array] Delay spectrum of the
subsample difference obtained by using the
mean statistic. It is of shape (nspw, nlst,
ndays, ntriads, nlags)
'median'
[numpy array] Delay spectrum of the subsample
difference obtained by using the median
statistic. It is of shape (nspw, nlst, ndays,
ntriads, nlags)
------------------------------------------------------------------------
"""
try:
bw_eff
except NameError:
raise NameError('Effective bandwidth must be specified')
else:
if not isinstance(bw_eff, (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff = NP.asarray(bw_eff).reshape(-1)
if NP.any(bw_eff <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, (int, float, list, NP.ndarray)):
freq_center = NP.asarray(freq_center).reshape(-1)
if NP.any((freq_center <= self.f.min()) | (freq_center >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
if (bw_eff.size == 1) and (freq_center.size > 1):
bw_eff = NP.repeat(bw_eff, freq_center.size)
elif (bw_eff.size > 1) and (freq_center.size == 1):
freq_center = NP.repeat(freq_center, bw_eff.size)
elif bw_eff.size != freq_center.size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, str):
raise TypeError('Window shape must be a string')
if shape not in ['rect', 'bhw', 'bnw', 'RECT', 'BHW', 'BNW']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = 'rect'
if fftpow is None:
fftpow = 1.0
else:
if not isinstance(fftpow, (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = 1.0
else:
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if not isinstance(datapool, str):
raise TypeError('Input datapool must be a string')
if datapool.lower() not in ['prelim']:
raise ValueError('Specified datapool not supported')
if visscaleinfo is not None:
if not isinstance(visscaleinfo, dict):
raise TypeError('Input visscaleinfo must be a dictionary')
if 'vis' not in visscaleinfo:
raise KeyError('Input visscaleinfo does not contain key "vis"')
if not isinstance(visscaleinfo['vis'], RI.InterferometerArray):
if 'lst' not in visscaleinfo:
raise KeyError('Input visscaleinfo does not contain key "lst"')
lst_vis = visscaleinfo['lst'] * 15.0
if not isinstance(visscaleinfo['vis'], (NP.ndarray,MA.MaskedArray)):
raise TypeError('Input visibilities must be a numpy or a masked array')
if not isinstance(visscaleinfo['vis'], MA.MaskedArray):
visscaleinfo['vis'] = MA.array(visscaleinfo['vis'], mask=NP.isnan(visscaleinfo['vis']))
vistriad = MA.copy(visscaleinfo['vis'])
else:
if 'bltriplet' not in visscaleinfo:
raise KeyError('Input dictionary visscaleinfo does not contain key "bltriplet"')
blind, blrefind, dbl = LKP.find_1NN(visscaleinfo['vis'].baselines, visscaleinfo['bltriplet'], distance_ULIM=0.2, remove_oob=True)
if blrefind.size != 3:
blind_missing = NP.setdiff1d(NP.arange(3), blind, assume_unique=True)
blind_next, blrefind_next, dbl_next = LKP.find_1NN(visscaleinfo['vis'].baselines, -1*visscaleinfo['bltriplet'][blind_missing,:], distance_ULIM=0.2, remove_oob=True)
if blind_next.size + blind.size != 3:
raise ValueError('Exactly three baselines were not found in the reference baselines')
else:
blind = NP.append(blind, blind_missing[blind_next])
blrefind = NP.append(blrefind, blrefind_next)
else:
blind_missing = []
vistriad = NP.transpose(visscaleinfo['vis'].skyvis_freq[blrefind,:,:], (0,2,1))
if len(blind_missing) > 0:
vistriad[-blrefind_next.size:,:,:] = vistriad[-blrefind_next.size:,:,:].conj()
vistriad = MA.array(vistriad, mask=NP.isnan(vistriad))
lst_vis = visscaleinfo['vis'].lst
viswts = MA.array(NP.ones_like(vistriad.data), mask=vistriad.mask, dtype=NP.float)
lst_out = self.cPhase.cpinfo['processed']['prelim']['lstbins'] * 15.0
if lst_vis.size == 1: # Apply the visibility scaling from one reference LST to all LST
vis_ref = vistriad * NP.ones(lst_out.size).reshape(1,-1,1)
wts_ref = viswts * NP.ones(lst_out.size).reshape(1,-1,1)
else:
vis_ref, wts_ref = OPS.interpolate_masked_array_1D(vistriad, viswts, 1, visscaleinfo['smoothinfo'], inploc=lst_vis, outloc=lst_out)
if not isinstance(method, str):
raise TypeError('Input method must be a string')
if method.lower() not in ['fft', 'nufft']:
raise ValueError('Specified FFT method not supported')
if not isinstance(apply_flags, bool):
raise TypeError('Input apply_flags must be boolean')
flagwts = 1.0
visscale = 1.0
if datapool.lower() == 'prelim':
if method.lower() == 'fft':
freq_wts = NP.empty((bw_eff.size, self.f.size), dtype=NP.float_) # nspw x nchan
frac_width = DSP.window_N2width(n_window=None, shape=shape, fftpow=fftpow, area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center.reshape(-1,1), distance_ULIM=0.51*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape, fftpow=fftpow, centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.51*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
result = {'freq_center': freq_center, 'shape': shape, 'freq_wts': freq_wts, 'bw_eff': bw_eff, 'fftpow': fftpow, 'npad': npad, 'lags': lags, 'lag_corr_length': self.f.size / NP.sum(freq_wts, axis=-1), 'whole': {'dspec': {'twts': self.cPhase.cpinfo['processed'][datapool]['wts']}}, 'residual': {'dspec': {'twts': self.cPhase.cpinfo['processed'][datapool]['wts']}}, 'errinfo': {'dspec0': {'twts': self.cPhase.cpinfo['errinfo']['wts']['0']}, 'dspec1': {'twts': self.cPhase.cpinfo['errinfo']['wts']['1']}}, 'submodel': {}}
if visscaleinfo is not None:
visscale = NP.nansum(NP.transpose(vis_ref[NP.newaxis,NP.newaxis,:,:,:], axes=(0,3,1,2,4)) * freq_wts[:,NP.newaxis,NP.newaxis,NP.newaxis,:], axis=-1, keepdims=True) / NP.nansum(freq_wts[:,NP.newaxis,NP.newaxis,NP.newaxis,:], axis=-1, keepdims=True) # nspw x nlst x (ndays=1) x (nbl=3) x (nchan=1)
visscale = NP.sqrt(1.0/NP.nansum(1/NP.abs(visscale)**2, axis=-2, keepdims=True)) # nspw x nlst x (ndays=1) x (ntriads=1) x (nchan=1)
for dpool in ['errinfo', 'prelim', 'submodel', 'residual']:
if dpool.lower() == 'errinfo':
for diffind in range(2):
if apply_flags:
flagwts = NP.copy(self.cPhase.cpinfo['errinfo']['wts']['{0}'.format(diffind)].data)
flagwts = flagwts[NP.newaxis,...] # nlst x ndays x ntriads x nchan --> (nspw=1) x nlst x ndays x ntriads x nchan
flagwts = 1.0 * flagwts / NP.mean(flagwts, axis=-1, keepdims=True) # (nspw=1) x nlst x ndays x ntriads x nchan
for stat in self.cPhase.cpinfo[dpool]['eicp_diff']['{0}'.format(diffind)]:
eicp = NP.copy(self.cPhase.cpinfo[dpool]['eicp_diff']['{0}'.format(diffind)][stat].data) # Minimum shape as stored
# eicp = NP.copy(self.cPhase.cpinfo[dpool]['eicp_diff']['{0}'.format(diffind)][stat].filled(0.0)) # Minimum shape as stored
eicp = NP.broadcast_to(eicp, self.cPhase.cpinfo[dpool]['eicp_diff']['{0}'.format(diffind)][stat].shape) # Broadcast to final shape
eicp = eicp[NP.newaxis,...] # nlst x ndayscomb x ntriads x nchan --> (nspw=1) x nlst x ndayscomb x ntriads x nchan
ndim_padtuple = [(0,0)]*(eicp.ndim-1) + [(0,npad)] # [(0,0), (0,0), (0,0), (0,0), (0,npad)]
result[dpool]['dspec{0}'.format(diffind)][stat] = DSP.FT1D(NP.pad(eicp*flagwts*freq_wts[:,NP.newaxis,NP.newaxis,NP.newaxis,:]*visscale.filled(NP.nan), ndim_padtuple, mode='constant'), ax=-1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
else:
if dpool in self.cPhase.cpinfo['processed']:
if apply_flags:
flagwts = NP.copy(self.cPhase.cpinfo['processed'][datapool]['wts'].data)
flagwts = flagwts[NP.newaxis,...] # nlst x ndays x ntriads x nchan --> (nspw=1) x nlst x ndays x ntriads x nchan
flagwts = 1.0 * flagwts / NP.mean(flagwts, axis=-1, keepdims=True) # (nspw=1) x nlst x ndays x ntriads x nchan
if dpool == 'submodel':
eicp = NP.copy(self.cPhase.cpinfo['processed'][dpool]['eicp'].data) # Minimum shape as stored
# eicp = NP.copy(self.cPhase.cpinfo['processed'][dpool]['eicp'].filled(1.0)) # Minimum shape as stored
eicp = NP.broadcast_to(eicp, self.cPhase.cpinfo['processed'][datapool]['eicp']['mean'].shape) # Broadcast to final shape
eicp = eicp[NP.newaxis,...] # nlst x ndays x ntriads x nchan --> (nspw=1) x nlst x ndays x ntriads x nchan
ndim_padtuple = [(0,0)]*(eicp.ndim-1) + [(0,npad)] # [(0,0), (0,0), (0,0), (0,0), (0,npad)]
result[dpool]['dspec'] = DSP.FT1D(NP.pad(eicp*flagwts*freq_wts[:,NP.newaxis,NP.newaxis,NP.newaxis,:]*visscale.filled(NP.nan), ndim_padtuple, mode='constant'), ax=-1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
else:
for key in self.cPhase.cpinfo['processed'][dpool]['eicp']:
eicp = NP.copy(self.cPhase.cpinfo['processed'][dpool]['eicp'][key].data)
# eicp = NP.copy(self.cPhase.cpinfo['processed'][dpool]['eicp'][key].filled(1.0))
eicp = eicp[NP.newaxis,...] # nlst x ndays x ntriads x nchan --> (nspw=1) x nlst x ndays x ntriads x nchan
ndim_padtuple = [(0,0)]*(eicp.ndim-1) + [(0,npad)] # [(0,0), (0,0), (0,0), (0,0), (0,npad)]
if dpool == 'prelim':
result['whole']['dspec'][key] = DSP.FT1D(NP.pad(eicp*flagwts*freq_wts[:,NP.newaxis,NP.newaxis,NP.newaxis,:]*visscale.filled(NP.nan), ndim_padtuple, mode='constant'), ax=-1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
else:
result[dpool]['dspec'][key] = DSP.FT1D(NP.pad(eicp*flagwts*freq_wts[:,NP.newaxis,NP.newaxis,NP.newaxis,:]*visscale.filled(NP.nan), ndim_padtuple, mode='constant'), ax=-1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = DSP.FT1D(NP.pad(flagwts*freq_wts[:,NP.newaxis,NP.newaxis,NP.newaxis,:], ndim_padtuple, mode='constant'), ax=-1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
self.cPhaseDS = result
if resample:
result_resampled = copy.deepcopy(result)
downsample_factor = NP.min((self.f.size + npad) * self.df / bw_eff)
result_resampled['lags'] = DSP.downsampler(result_resampled['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result_resampled['lag_kernel'] = DSP.downsampler(result_resampled['lag_kernel'], downsample_factor, axis=-1, method='interp', kind='linear')
for dpool in ['errinfo', 'prelim', 'submodel', 'residual']:
if dpool.lower() == 'errinfo':
for diffind in self.cPhase.cpinfo[dpool]['eicp_diff']:
for key in self.cPhase.cpinfo[dpool]['eicp_diff'][diffind]:
result_resampled[dpool]['dspec'+diffind][key] = DSP.downsampler(result_resampled[dpool]['dspec'+diffind][key], downsample_factor, axis=-1, method='FFT')
if dpool in self.cPhase.cpinfo['processed']:
if dpool == 'submodel':
result_resampled[dpool]['dspec'] = DSP.downsampler(result_resampled[dpool]['dspec'], downsample_factor, axis=-1, method='FFT')
else:
for key in self.cPhase.cpinfo['processed'][datapool]['eicp']:
if dpool == 'prelim':
result_resampled['whole']['dspec'][key] = DSP.downsampler(result_resampled['whole']['dspec'][key], downsample_factor, axis=-1, method='FFT')
else:
result_resampled[dpool]['dspec'][key] = DSP.downsampler(result_resampled[dpool]['dspec'][key], downsample_factor, axis=-1, method='FFT')
self.cPhaseDS_resampled = result_resampled
return result_resampled
else:
return result
############################################################################
def subset(self, selection=None):
"""
------------------------------------------------------------------------
Return triad and time indices to select a subset of processed data
Inputs:
selection [NoneType or dictionary] Selection parameters based on which
triad, LST, and day indices will be returned. If set to None
(default), all triad, LST, and day indices will be returned.
Otherwise it must be a dictionary with the following keys
and values:
'triads' [NoneType or list of 3-element tuples] If set
to None (default), indices of all triads are
returned. Otherwise, the specific triads must
be specified such as [(1,2,3), (1,2,4), ...]
and their indices will be returned
'lst' [NoneType, list or numpy array] If set to None
(default), indices of all LST are returned.
Otherwise must be a list or numpy array
containing indices to LST.
'days' [NoneType, list or numpy array] If set to None
(default), indices of all days are returned.
Otherwise must be a list or numpy array
containing indices to days.
Outputs:
Tuple (triad_ind, lst_ind, day_ind, day_ind_eicpdiff) containing the
triad, LST, day, and day-pair (for subsample differences) indices,
each as a numpy array
------------------------------------------------------------------------
"""
if selection is None:
selsection = {}
else:
if not isinstance(selection, dict):
raise TypeError('Input selection must be a dictionary')
triads = map(tuple, self.cPhase.cpinfo['raw']['triads'])
if 'triads' not in selection:
selection['triads'] = triads
if selection['triads'] is None:
selection['triads'] = triads
triad_ind = [triads.index(triad) for triad in selection['triads']]
triad_ind = NP.asarray(triad_ind)
lst_ind = None
if 'lst' not in selection:
if 'prelim' in self.cPhase.cpinfo['processed']:
lst_ind = NP.arange(self.cPhase.cpinfo['processed']['prelim']['wts'].shape[0])
else:
if selection['lst'] is None:
if 'prelim' in self.cPhase.cpinfo['processed']:
lst_ind = NP.arange(self.cPhase.cpinfo['processed']['prelim']['wts'].shape[0])
elif isinstance(selection['lst'], (list,NP.ndarray)):
if 'prelim' in self.cPhase.cpinfo['processed']:
lst_ind = selection['lst']
if NP.any(NP.logical_or(lst_ind < 0, lst_ind >= self.cPhase.cpinfo['processed']['prelim']['wts'].shape[0])):
raise ValueError('Input processed lst indices out of bounds')
else:
raise TypeError('Wrong type for processed lst indices')
if lst_ind is None:
raise ValueError('LST index selection could not be performed')
day_ind = None
day_ind_eicpdiff = None
if 'days' not in selection:
if 'prelim' in self.cPhase.cpinfo['processed']:
day_ind = NP.arange(self.cPhase.cpinfo['processed']['prelim']['wts'].shape[1])
if 'errinfo' in self.cPhase.cpinfo:
day_ind_eicpdiff = NP.arange(len(self.cPhase.cpinfo['errinfo']['list_of_pair_of_pairs']))
else:
if selection['days'] is None:
if 'prelim' in self.cPhase.cpinfo['processed']:
day_ind = NP.arange(self.cPhase.cpinfo['processed']['prelim']['wts'].shape[1])
if 'errinfo' in self.cPhase.cpinfo:
day_ind_eicpdiff = NP.arange(len(self.cPhase.cpinfo['errinfo']['list_of_pair_of_pairs']))
elif isinstance(selection['days'], (list,NP.ndarray)):
if 'prelim' in self.cPhase.cpinfo['processed']:
day_ind = selection['days']
if NP.any(NP.logical_or(day_ind < 0, day_ind >= self.cPhase.cpinfo['processed']['prelim']['wts'].shape[1])):
raise ValueError('Input processed day indices out of bounds')
if 'errinfo' in self.cPhase.cpinfo:
day_ind_eicpdiff = [i for i,item in enumerate(self.cPhase.cpinfo['errinfo']['list_of_pair_of_pairs']) if len(set(item)-set(selection['days']))==0]
else:
raise TypeError('Wrong type for processed day indices')
if day_ind is None:
raise ValueError('Day index selection could not be performed')
return (triad_ind, lst_ind, day_ind, day_ind_eicpdiff)
############################################################################
def compute_power_spectrum(self, cpds=None, selection=None, autoinfo=None,
xinfo=None, cosmo=cosmo100, units='K', beamparms=None):
"""
------------------------------------------------------------------------
Compute power spectrum of closure phase data. It is in units of Mpc/h
Inputs:
cpds [dictionary] A dictionary that contains the 'oversampled' (if
resample=False) and/or 'resampled' (if resample=True) delay
spectrum information. If it is not specified the attributes
cPhaseDS['processed'] and cPhaseDS_resampled['processed'] are
used. Under each of these keys, it holds a dictionary that has
the following keys and values:
'freq_center' [numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is
roughly equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband
delay transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed.
It is of size n_win. It is roughly equivalent to
width in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow' [scalar] the power to which the FFT of the window
was raised. The value is be a positive scalar
with default = 1.0
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform.
It is of size nlags. The lags roughly correspond
to k_parallel.
'lag_kernel' [numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale
(in pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth.
It is of size n_win. The unit size of a pixel is
determined by the difference between adjacent
pixels in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
of the subband specified in bw_eff
'processed' [dictionary] Contains the following keys and
values:
'dspec' [dictionary] Contains the following keys
and values:
'twts' [numpy array] Weights from
time-based flags that went into
time-averaging.
Shape=(ntriads,npol,nchan,nt)
'mean' [numpy array] Delay spectrum of
closure phases based on their
mean across time intervals.
Shape=(nspw,npol,nt,ntriads,nlags)
'median'
[numpy array] Delay spectrum of
closure phases based on their
median across time intervals.
Shape=(nspw,npol,nt,ntriads,nlags)
selection [NoneType or dictionary] Selection parameters based on which
triad, LST, and day indices will be returned. If set to None
(default), all triad, LST, and day indices will be returned.
Otherwise it must be a dictionary with the following keys
and values:
'triads' [NoneType or list of 3-element tuples] If set
to None (default), indices of all triads are
returned. Otherwise, the specific triads must
be specified such as [(1,2,3), (1,2,4), ...]
and their indices will be returned
'lst' [NoneType, list or numpy array] If set to None
(default), indices of all LST are returned.
Otherwise must be a list or numpy array
containing indices to LST.
'days' [NoneType, list or numpy array] If set to None
(default), indices of all days are returned.
Otherwise must be a list or numpy array
containing indices to days.
autoinfo
[NoneType or dictionary] Specifies parameters for processing
before power spectrum in auto or cross modes. If set to None,
a dictionary will be created with the default values as
described below. The dictionary must have the following keys
and values:
'axes' [NoneType/int/list/tuple/numpy array] Axes that will
be averaged coherently before squaring (for auto) or
cross-multiplying (for cross) power spectrum. If set
to None (default), no axes are averaged coherently.
If set to int, list, tuple or numpy array, those axes
will be averaged coherently after applying the weights
specified under key 'wts' along those axes. 1=lst,
2=days, 3=triads.
'wts' [NoneType/list/numpy array] If not provided (equivalent
to setting it to None) or set to None (default), it is
set to a one element list which is a one element numpy
array of unity. Otherwise, it must be a list of same
number of elements as in key 'axes' and each of these
must be a numpy broadcast compatible array corresponding
to each of the axis specified in 'axes'
xinfo [NoneType or dictionary] Specifies parameters for processing
cross power spectrum. If set to None, a dictionary will be
created with the default values as described below. The
dictionary must have the following keys and values:
'axes' [NoneType/int/list/tuple/numpy array] Axes over which
power spectrum will be computed incoherently by cross-
multiplication. If set to None (default), no cross-
power spectrum is computed. If set to int, list, tuple
or numpy array, cross-power over those axes will be
computed incoherently by cross-multiplication. The
cross-spectrum over these axes will be computed after
applying the pre- and post- cross-multiplication
weights specified in key 'wts'. 1=lst, 2=days,
3=triads.
'collapse_axes'
[list] The axes that will be collpased after the
cross-power matrix is produced by cross-multiplication.
If this key is not set, it will be initialized to an
empty list (default), in which case none of the axes
is collapsed and the full cross-power matrix will be
output. it must be a subset of values under key 'axes'.
This will reduce it from a square matrix along that axis
to collapsed values along each of the leading diagonals.
1=lst, 2=days, 3=triads.
'dlst' [scalar] LST interval (in mins) or difference between LST
pairs which will be determined and used for
cross-power spectrum. Will only apply if values under
'axes' contains the LST axis(=1).
'dlst_range'
[scalar, numpy array, or NoneType] Specifies the LST
difference(s) in minutes that are to be used in the
computation of cross-power spectra. If a scalar, only
the diagonal consisting of pairs with that LST
difference will be computed. If a numpy array, those
diagonals consisting of pairs with that LST difference
will be computed. If set to None (default), the main
diagonal (LST difference of 0) and the first off-main
diagonal (LST difference of 1 unit) corresponding to
pairs with 0 and 1 unit LST difference are computed.
Applies only if key 'axes' contains LST axis (=1).
'avgcov'
[boolean] It specifies if the collapse of square
covariance matrix is to be collapsed further to a single
number after applying 'postX' weights. If not set or
set to False (default), this late stage collapse will
not be performed. Otherwise, it will be averaged in a
weighted average sense where the 'postX' weights would
have already been applied during the collapsing
operation
'wts' [NoneType or Dictionary] If not set, a default
dictionary (see default values below) will be created.
It must have the follwoing keys and values:
'preX' [list of numpy arrays] It contains pre-cross-
multiplication weights. It is a list where
each element in the list is a numpy array, and
the number of elements in the list must match
the number of entries in key 'axes'. If 'axes'
is set None, 'preX' may be set to a list
with one element which is a numpy array of ones.
The number of elements in each of the numpy
arrays must be numpy broadcastable into the
number of elements along that axis in the
delay spectrum.
'preXnorm'
[boolean] If False (default), no normalization
is done after the application of weights. If
set to True, the delay spectrum will be
normalized by the sum of the weights.
'postX' [list of numpy arrays] It contains post-cross-
multiplication weights. It is a list where
each element in the list is a numpy array, and
the number of elements in the list must match
the number of entries in key 'axes'. If 'axes'
is set None, 'preX' may be set to a list
with one element which is a numpy array of ones.
The number of elements in each of the numpy
arrays must be numpy broadcastable into the
number of elements along that axis in the
delay spectrum.
'preXnorm'
[boolean] If False (default), no normalization
is done after the application of 'preX' weights.
If set to True, the delay spectrum will be
normalized by the sum of the weights.
'postXnorm'
[boolean] If False (default), no normalization
is done after the application of postX weights.
If set to True, the delay cross power spectrum
will be normalized by the sum of the weights.
cosmo [instance of cosmology class from astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module. Default
uses Planck 2015 cosmology, with H0=100 h km/s/Mpc
units [string] Specifies the units of output power spectum. Accepted
values are 'Jy' and 'K' (default)) and the power spectrum will
be in corresponding squared units.
Output:
Dictionary with the keys 'triads' ((ntriads,3) array), 'triads_ind',
((ntriads,) array), 'lstXoffsets' ((ndlst_range,) array), 'lst'
((nlst,) array), 'dlst' ((nlst,) array), 'lst_ind' ((nlst,) array),
'days' ((ndays,) array), 'day_ind' ((ndays,) array), 'dday'
((ndays,) array), 'oversampled' and 'resampled' corresponding to whether
resample was set to False or True in call to member function FT().
Values under keys 'triads_ind' and 'lst_ind' are numpy array
corresponding to triad and time indices used in selecting the data.
Values under keys 'oversampled' and 'resampled' each contain a
dictionary with the following keys and values:
'z' [numpy array] Redshifts corresponding to the band centers in
'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has shape=(nlags,).
'kprll' [numpy array] k_parallel modes (in h/Mpc) corresponding to
'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in Hz) of the
frequency subbands of the subband delay spectra. It is of size
n_win. It is roughly equivalent to redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on each
frequency sub-band during the subband delay transform. It is
of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in Hz) of the
subbands being delay transformed. It is of size n_win. It is
roughly equivalent to width in redshift or along line-of-sight
'shape' [string] shape of the frequency window function applied. Usual
values are 'rect' (rectangular), 'bhw' (Blackman-Harris),
'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window was raised.
The value is be a positive scalar with default = 1.0
'lag_corr_length'
[numpy array] It is the correlation timescale (in pixels) of
the subband delay spectra. It is proportional to inverse of
effective bandwidth. It is of size n_win. The unit size of a
pixel is determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is effectively inverse
of the effective bandwidth of the subband specified in bw_eff
It further contains 3 keys named 'whole', 'submodel', and 'residual'
each of which is a dictionary. 'whole' contains power spectrum info
about the input closure phases. 'submodel' contains power spectrum info
about the model that will have been subtracted (as closure phase) from
the 'whole' model. 'residual' contains power spectrum info about the
closure phases obtained as a difference between 'whole' and 'submodel'.
It contains the following keys and values:
'mean' [numpy array] Delay power spectrum incoherently estiamted over
the axes specified in xinfo['axes'] using the 'mean' key in input
cpds or attribute cPhaseDS['processed']['dspec']. It has shape
that depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are not set,
those axes will be replaced with square covariance matrices. If
collapse_axes is provided but avgcov is False, those axes will be
of shape 2*Naxis-1.
'median'
[numpy array] Delay power spectrum incoherently averaged over
the axes specified in incohax using the 'median' key in input
cpds or attribute cPhaseDS['processed']['dspec']. It has shape
that depends on the combination of input parameters. See
examples below. If both collapse_axes and avgcov are not set,
those axes will be replaced with square covariance matrices. If
collapse_axes is provided bu avgcov is False, those axes will be
of shape 2*Naxis-1.
'diagoffsets'
[dictionary] Same keys corresponding to keys under
'collapse_axes' in input containing the diagonal offsets for
those axes. If 'avgcov' was set, those entries will be removed
from 'diagoffsets' since all the leading diagonal elements have
been collapsed (averaged) further. Value under each key is a
numpy array where each element in the array corresponds to the
index of that leading diagonal. This should match the size of
the output along that axis in 'mean' or 'median' above.
'diagweights'
[dictionary] Each key is an axis specified in collapse_axes and
the value is a numpy array of weights corresponding to the
diagonal offsets in that axis.
'axesmap'
[dictionary] If covariance in cross-power is calculated but is
not collapsed, the number of dimensions in the output will have
changed. This parameter tracks where the original axis is now
placed. The keys are the original axes that are involved in
incoherent cross-power, and the values are the new locations of
those original axes in the output.
'nsamples_incoh'
[integer] Number of incoherent samples in producing the power
spectrum
'nsamples_coh'
[integer] Number of coherent samples in producing the power
spectrum
Examples:
(1)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': 2, 'wts': None}
xinfo = {'axes': None, 'avgcov': False, 'collapse_axes': [],
'wts':{'preX': None, 'preXnorm': False,
'postX': None, 'postXnorm': False}}
Output delay power spectrum has shape (Nspw, Nlst, 1, Ntriads, Nlags)
(2)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': 2, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': False, 'collapse_axes': [],
'wts':{'preX': None, 'preXnorm': False,
'postX': None, 'postXnorm': False},
'dlst_range': None}
Output delay power spectrum has shape
(Nspw, 2, Nlst, 1, Ntriads, Ntriads, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range)},
axesmap = {1: [1,2], 3: [4,5]}
(3)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': 2, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': False, 'collapse_axes': [3],
'dlst_range': [0.0, 1.0, 2.0]}
Output delay power spectrum has shape
(Nspw, 3, Nlst, 1, 2*Ntriads-1, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range),
3: NP.arange(-Ntriads,Ntriads)},
axesmap = {1: [1,2], 3: [4]}
(4)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': None, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': False, 'collapse_axes': [1,3],
'dlst_range': [1.0, 2.0, 3.0, 4.0]}
Output delay power spectrum has shape
(Nspw, 4, Ndays, 2*Ntriads-1, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range),
3: NP.arange(-Ntriads,Ntriads)},
axesmap = {1: [1], 3: [3]}
(5)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': None, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': True, 'collapse_axes': [3],
'dlst_range': None}
Output delay power spectrum has shape
(Nspw, 2, Nlst, Ndays, 1, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range)}, axesmap = {1: [1,2], 3: [4]}
(6)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': None, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': True, 'collapse_axes': []}
Output delay power spectrum has shape
(Nspw, 1, Ndays, 1, Nlags)
diagoffsets = {}, axesmap = {1: [1], 3: [3]}
------------------------------------------------------------------------
"""
if not isinstance(units,str):
raise TypeError('Input parameter units must be a string')
if units.lower() == 'k':
if not isinstance(beamparms, dict):
raise TypeError('Input beamparms must be a dictionary')
if 'freqs' not in beamparms:
beamparms['freqs'] = self.f
beamparms_orig = copy.deepcopy(beamparms)
if autoinfo is None:
autoinfo = {'axes': None, 'wts': [NP.ones(1, dtpye=NP.float)]}
elif not isinstance(autoinfo, dict):
raise TypeError('Input autoinfo must be a dictionary')
if 'axes' not in autoinfo:
autoinfo['axes'] = None
else:
if autoinfo['axes'] is not None:
if not isinstance(autoinfo['axes'], (list,tuple,NP.ndarray,int)):
raise TypeError('Value under key axes in input autoinfo must be an integer, list, tuple or numpy array')
else:
autoinfo['axes'] = NP.asarray(autoinfo['axes']).reshape(-1)
if 'wts' not in autoinfo:
if autoinfo['axes'] is not None:
autoinfo['wts'] = [NP.ones(1, dtype=NP.float)] * len(autoinfo['axes'])
else:
autoinfo['wts'] = [NP.ones(1, dtype=NP.float)]
else:
if autoinfo['axes'] is not None:
if not isinstance(autoinfo['wts'], list):
raise TypeError('wts in input autoinfo must be a list of numpy arrays')
else:
if len(autoinfo['wts']) != len(autoinfo['axes']):
raise ValueError('Input list of wts must be same as length of autoinfo axes')
else:
autoinfo['wts'] = [NP.ones(1, dtype=NP.float)]
if xinfo is None:
xinfo = {'axes': None, 'wts': {'preX': [NP.ones(1, dtpye=NP.float)], 'postX': [NP.ones(1, dtpye=NP.float)], 'preXnorm': False, 'postXnorm': False}}
elif not isinstance(xinfo, dict):
raise TypeError('Input xinfo must be a dictionary')
if 'axes' not in xinfo:
xinfo['axes'] = None
else:
if not isinstance(xinfo['axes'], (list,tuple,NP.ndarray,int)):
raise TypeError('Value under key axes in input xinfo must be an integer, list, tuple or numpy array')
else:
xinfo['axes'] = NP.asarray(xinfo['axes']).reshape(-1)
if 'wts' not in xinfo:
xinfo['wts'] = {}
for xkey in ['preX', 'postX']:
if xinfo['axes'] is not None:
xinfo['wts'][xkey] = [NP.ones(1, dtype=NP.float)] * len(xinfo['axes'])
else:
xinfo['wts'][xkey] = [NP.ones(1, dtype=NP.float)]
xinfo['wts']['preXnorm'] = False
xinfo['wts']['postXnorm'] = False
else:
if xinfo['axes'] is not None:
if not isinstance(xinfo['wts'], dict):
raise TypeError('wts in input xinfo must be a dictionary')
for xkey in ['preX', 'postX']:
if not isinstance(xinfo['wts'][xkey], list):
raise TypeError('{0} wts in input xinfo must be a list of numpy arrays'.format(xkey))
else:
if len(xinfo['wts'][xkey]) != len(xinfo['axes']):
raise ValueError('Input list of {0} wts must be same as length of xinfo axes'.format(xkey))
else:
for xkey in ['preX', 'postX']:
xinfo['wts'][xkey] = [NP.ones(1, dtype=NP.float)]
if 'preXnorm' not in xinfo['wts']:
xinfo['wts']['preXnorm'] = False
if 'postXnorm' not in xinfo['wts']:
xinfo['wts']['postXnorm'] = False
if not isinstance(xinfo['wts']['preXnorm'], NP.bool):
raise TypeError('preXnorm in input xinfo must be a boolean')
if not isinstance(xinfo['wts']['postXnorm'], NP.bool):
raise TypeError('postXnorm in input xinfo must be a boolean')
if 'avgcov' not in xinfo:
xinfo['avgcov'] = False
if not isinstance(xinfo['avgcov'], NP.bool):
raise TypeError('avgcov under input xinfo must be boolean')
if 'collapse_axes' not in xinfo:
xinfo['collapse_axes'] = []
if not isinstance(xinfo['collapse_axes'], (int,list,tuple,NP.ndarray)):
raise TypeError('collapse_axes under input xinfo must be an integer, tuple, list or numpy array')
else:
xinfo['collapse_axes'] = NP.asarray(xinfo['collapse_axes']).reshape(-1)
if (autoinfo['axes'] is not None) and (xinfo['axes'] is not None):
if NP.intersect1d(autoinfo['axes'], xinfo['axes']).size > 0:
raise ValueError("Inputs autoinfo['axes'] and xinfo['axes'] must have no intersection")
cohax = autoinfo['axes']
if cohax is None:
cohax = []
incohax = xinfo['axes']
if incohax is None:
incohax = []
if selection is None:
selection = {'triads': None, 'lst': None, 'days': None}
else:
if not isinstance(selection, dict):
raise TypeError('Input selection must be a dictionary')
if cpds is None:
cpds = {}
sampling = ['oversampled', 'resampled']
for smplng in sampling:
if smplng == 'oversampled':
cpds[smplng] = copy.deepcopy(self.cPhaseDS)
else:
cpds[smplng] = copy.deepcopy(self.cPhaseDS_resampled)
triad_ind, lst_ind, day_ind, day_ind_eicpdiff = self.subset(selection=selection)
result = {'triads': self.cPhase.cpinfo['raw']['triads'][triad_ind], 'triads_ind': triad_ind, 'lst': self.cPhase.cpinfo['processed']['prelim']['lstbins'][lst_ind], 'lst_ind': lst_ind, 'dlst': self.cPhase.cpinfo['processed']['prelim']['dlstbins'][lst_ind], 'days': self.cPhase.cpinfo['processed']['prelim']['daybins'][day_ind], 'day_ind': day_ind, 'dday': self.cPhase.cpinfo['processed']['prelim']['diff_dbins'][day_ind]}
dlstbin = NP.mean(self.cPhase.cpinfo['processed']['prelim']['dlstbins'])
if 'dlst_range' in xinfo:
if xinfo['dlst_range'] is None:
dlst_range = None
lstshifts = NP.arange(2) # LST index offsets of 0 and 1 are only estimated
else:
dlst_range = NP.asarray(xinfo['dlst_range']).ravel() / 60.0 # Difference in LST between a pair of LST (in hours)
if dlst_range.size == 1:
dlst_range = NP.insert(dlst_range, 0, 0.0)
lstshifts = NP.arange(max([0, NP.ceil(1.0*dlst_range.min()/dlstbin).astype(NP.int)]), min([NP.ceil(1.0*dlst_range.max()/dlstbin).astype(NP.int), result['lst'].size]))
else:
dlst_range = None
lstshifts = NP.arange(2) # LST index offsets of 0 and 1 are only estimated
result['lstXoffsets'] = lstshifts * dlstbin # LST interval corresponding to diagonal offsets created by the LST covariance
for smplng in sampling:
result[smplng] = {}
wl = FCNST.c / (cpds[smplng]['freq_center'] * U.Hz)
z = CNST.rest_freq_HI / cpds[smplng]['freq_center'] - 1
dz = CNST.rest_freq_HI / cpds[smplng]['freq_center']**2 * cpds[smplng]['bw_eff']
dkprll_deta = DS.dkprll_deta(z, cosmo=cosmo)
kprll = dkprll_deta.reshape(-1,1) * cpds[smplng]['lags']
rz_los = cosmo.comoving_distance(z) # in Mpc/h
drz_los = FCNST.c * cpds[smplng]['bw_eff']*U.Hz * (1+z)**2 / (CNST.rest_freq_HI * U.Hz) / (cosmo.H0 * cosmo.efunc(z)) # in Mpc/h
if units == 'Jy':
jacobian1 = 1 / (cpds[smplng]['bw_eff'] * U.Hz)
jacobian2 = drz_los / (cpds[smplng]['bw_eff'] * U.Hz)
temperature_from_fluxdensity = 1.0
elif units == 'K':
beamparms = copy.deepcopy(beamparms_orig)
omega_bw = self.beam3Dvol(beamparms, freq_wts=cpds[smplng]['freq_wts'])
jacobian1 = 1 / (omega_bw * U.Hz) # The steradian is present but not explicitly assigned
jacobian2 = rz_los**2 * drz_los / (cpds[smplng]['bw_eff'] * U.Hz)
temperature_from_fluxdensity = wl**2 / (2*FCNST.k_B)
else:
raise ValueError('Input value for units invalid')
factor = jacobian1 * jacobian2 * temperature_from_fluxdensity**2
result[smplng]['z'] = z
result[smplng]['kprll'] = kprll
result[smplng]['lags'] = NP.copy(cpds[smplng]['lags'])
result[smplng]['freq_center'] = cpds[smplng]['freq_center']
result[smplng]['bw_eff'] = cpds[smplng]['bw_eff']
result[smplng]['shape'] = cpds[smplng]['shape']
result[smplng]['freq_wts'] = cpds[smplng]['freq_wts']
result[smplng]['lag_corr_length'] = cpds[smplng]['lag_corr_length']
for dpool in ['whole', 'submodel', 'residual']:
if dpool in cpds[smplng]:
result[smplng][dpool] = {}
inpshape = list(cpds[smplng]['whole']['dspec']['mean'].shape)
inpshape[1] = lst_ind.size
inpshape[2] = day_ind.size
inpshape[3] = triad_ind.size
if len(cohax) > 0:
nsamples_coh = NP.prod(NP.asarray(inpshape)[NP.asarray(cohax)])
else:
nsamples_coh = 1
if len(incohax) > 0:
nsamples = NP.prod(NP.asarray(inpshape)[NP.asarray(incohax)])
nsamples_incoh = nsamples * (nsamples - 1)
else:
nsamples_incoh = 1
twts_multidim_idx = NP.ix_(lst_ind,day_ind,triad_ind,NP.arange(1)) # shape=(nlst,ndays,ntriads,1)
dspec_multidim_idx = NP.ix_(NP.arange(wl.size),lst_ind,day_ind,triad_ind,NP.arange(inpshape[4])) # shape=(nspw,nlst,ndays,ntriads,nchan)
max_wt_in_chan = NP.max(NP.sum(cpds[smplng]['whole']['dspec']['twts'].data, axis=(0,1,2)))
select_chan = NP.argmax(NP.sum(cpds[smplng]['whole']['dspec']['twts'].data, axis=(0,1,2)))
twts = NP.copy(cpds[smplng]['whole']['dspec']['twts'].data[:,:,:,[select_chan]]) # shape=(nlst,ndays,ntriads,nlags=1)
if nsamples_coh > 1:
awts_shape = tuple(NP.ones(cpds[smplng]['whole']['dspec']['mean'].ndim, dtype=NP.int))
awts = NP.ones(awts_shape, dtype=NP.complex)
awts_shape = NP.asarray(awts_shape)
for caxind,caxis in enumerate(cohax):
curr_awts_shape = NP.copy(awts_shape)
curr_awts_shape[caxis] = -1
awts = awts * autoinfo['wts'][caxind].reshape(tuple(curr_awts_shape))
for stat in ['mean', 'median']:
if dpool == 'submodel':
dspec = NP.copy(cpds[smplng][dpool]['dspec'][dspec_multidim_idx])
else:
dspec = NP.copy(cpds[smplng][dpool]['dspec'][stat][dspec_multidim_idx])
if nsamples_coh > 1:
if stat == 'mean':
dspec = NP.sum(twts[twts_multidim_idx][NP.newaxis,...] * awts * dspec[dspec_multidim_idx], axis=cohax, keepdims=True) / NP.sum(twts[twts_multidim_idx][NP.newaxis,...] * awts, axis=cohax, keepdims=True)
else:
dspec = NP.median(dspec[dspec_multidim_idx], axis=cohax, keepdims=True)
if nsamples_incoh > 1:
expandax_map = {}
wts_shape = tuple(NP.ones(dspec.ndim, dtype=NP.int))
preXwts = NP.ones(wts_shape, dtype=NP.complex)
wts_shape = NP.asarray(wts_shape)
for incaxind,incaxis in enumerate(xinfo['axes']):
curr_wts_shape = NP.copy(wts_shape)
curr_wts_shape[incaxis] = -1
preXwts = preXwts * xinfo['wts']['preX'][incaxind].reshape(tuple(curr_wts_shape))
dspec1 = NP.copy(dspec)
dspec2 = NP.copy(dspec)
preXwts1 = NP.copy(preXwts)
preXwts2 = NP.copy(preXwts)
for incax in NP.sort(incohax)[::-1]:
dspec1 = NP.expand_dims(dspec1, axis=incax)
preXwts1 = NP.expand_dims(preXwts1, axis=incax)
if incax == 1:
preXwts1_outshape = list(preXwts1.shape)
preXwts1_outshape[incax+1] = dspec1.shape[incax+1]
preXwts1_outshape = tuple(preXwts1_outshape)
preXwts1 = NP.broadcast_to(preXwts1, preXwts1_outshape).copy() # For some strange reason the NP.broadcast_to() creates a "read-only" immutable array which is changed to writeable by copy()
preXwts2_tmp = NP.expand_dims(preXwts2, axis=incax)
preXwts2_shape = NP.asarray(preXwts2_tmp.shape)
preXwts2_shape[incax] = lstshifts.size
preXwts2_shape[incax+1] = preXwts1_outshape[incax+1]
preXwts2_shape = tuple(preXwts2_shape)
preXwts2 = NP.broadcast_to(preXwts2_tmp, preXwts2_shape).copy() # For some strange reason the NP.broadcast_to() creates a "read-only" immutable array which is changed to writeable by copy()
dspec2_tmp = NP.expand_dims(dspec2, axis=incax)
dspec2_shape = NP.asarray(dspec2_tmp.shape)
dspec2_shape[incax] = lstshifts.size
# dspec2_shape = NP.insert(dspec2_shape, incax, lstshifts.size)
dspec2_shape = tuple(dspec2_shape)
dspec2 = NP.broadcast_to(dspec2_tmp, dspec2_shape).copy() # For some strange reason the NP.broadcast_to() creates a "read-only" immutable array which is changed to writeable by copy()
for lstshiftind, lstshift in enumerate(lstshifts):
dspec2[:,lstshiftind,...] = NP.roll(dspec2_tmp[:,0,...], lstshift, axis=incax)
dspec2[:,lstshiftind,:lstshift,...] = NP.nan
preXwts2[:,lstshiftind,...] = NP.roll(preXwts2_tmp[:,0,...], lstshift, axis=incax)
preXwts2[:,lstshiftind,:lstshift,...] = NP.nan
else:
dspec2 = NP.expand_dims(dspec2, axis=incax+1)
preXwts2 = NP.expand_dims(preXwts2, axis=incax+1)
expandax_map[incax] = incax + NP.arange(2)
for ekey in expandax_map:
if ekey > incax:
expandax_map[ekey] += 1
result[smplng][dpool][stat] = factor.reshape((-1,)+tuple(NP.ones(dspec1.ndim-1, dtype=NP.int))) * (dspec1*U.Unit('Jy Hz') * preXwts1) * (dspec2*U.Unit('Jy Hz') * preXwts2).conj()
if xinfo['wts']['preXnorm']:
result[smplng][dpool][stat] = result[smplng][dpool][stat] / NP.nansum(preXwts1 * preXwts2.conj(), axis=NP.union1d(NP.where(logical_or(NP.asarray(preXwts1.shape)>1, NP.asarray(preXwts2.shape)>1))), keepdims=True) # Normalize by summing the weights over the expanded axes
if (len(xinfo['collapse_axes']) > 0) or (xinfo['avgcov']):
# if any one of collapsing of incoherent axes or
# averaging of full covariance is requested
diagoffsets = {} # Stores the correlation index difference along each axis.
diagweights = {} # Stores the number of points summed in the trace along the offset diagonal
for colaxind, colax in enumerate(xinfo['collapse_axes']):
if colax == 1:
shp = NP.ones(dspec.ndim, dtype=NP.int)
shp[colax] = lst_ind.size
multdim_idx = tuple([NP.arange(axdim) for axdim in shp])
diagweights[colax] = NP.sum(NP.logical_not(NP.isnan(dspec[multdim_idx]))) - lstshifts
# diagweights[colax] = result[smplng][dpool][stat].shape[expandax_map[colax][-1]] - lstshifts
if stat == 'mean':
result[smplng][dpool][stat] = NP.nanmean(result[smplng][dpool][stat], axis=expandax_map[colax][-1])
else:
result[smplng][dpool][stat] = NP.nanmedian(result[smplng][dpool][stat], axis=expandax_map[colax][-1])
diagoffsets[colax] = lstshifts
else:
pspec_unit = result[smplng][dpool][stat].si.unit
result[smplng][dpool][stat], offsets, diagwts = OPS.array_trace(result[smplng][dpool][stat].si.value, offsets=None, axis1=expandax_map[colax][0], axis2=expandax_map[colax][1], outaxis='axis1')
diagwts_shape = NP.ones(result[smplng][dpool][stat].ndim, dtype=NP.int)
diagwts_shape[expandax_map[colax][0]] = diagwts.size
diagoffsets[colax] = offsets
diagweights[colax] = NP.copy(diagwts)
result[smplng][dpool][stat] = result[smplng][dpool][stat] * pspec_unit / diagwts.reshape(diagwts_shape)
for ekey in expandax_map:
if ekey > colax:
expandax_map[ekey] -= 1
expandax_map[colax] = NP.asarray(expandax_map[colax][0]).ravel()
wts_shape = tuple(NP.ones(result[smplng][dpool][stat].ndim, dtype=NP.int))
postXwts = NP.ones(wts_shape, dtype=NP.complex)
wts_shape = NP.asarray(wts_shape)
for colaxind, colax in enumerate(xinfo['collapse_axes']):
curr_wts_shape = NP.copy(wts_shape)
curr_wts_shape[expandax_map[colax]] = -1
postXwts = postXwts * xinfo['wts']['postX'][colaxind].reshape(tuple(curr_wts_shape))
result[smplng][dpool][stat] = result[smplng][dpool][stat] * postXwts
axes_to_sum = tuple(NP.asarray([expandax_map[colax] for colax in xinfo['collapse_axes']]).ravel()) # for post-X normalization and collapse of covariance matrix
if xinfo['wts']['postXnorm']:
result[smplng][dpool][stat] = result[smplng][dpool][stat] / NP.nansum(postXwts, axis=axes_to_sum, keepdims=True) # Normalize by summing the weights over the collapsed axes
if xinfo['avgcov']:
# collapse the axes further (postXwts have already
# been applied)
diagoffset_weights = 1.0
for colaxind in zip(*sorted(zip(NP.arange(xinfo['collapse_axes'].size), xinfo['collapse_axes']), reverse=True))[0]:
# It is important to sort the collapsable axes in
# reverse order before deleting elements below,
# otherwise the axes ordering may be get messed up
diagoffset_weights_shape = NP.ones(result[smplng][dpool][stat].ndim, dtype=NP.int)
diagoffset_weights_shape[expandax_map[xinfo['collapse_axes'][colaxind]][0]] = diagweights[xinfo['collapse_axes'][colaxind]].size
diagoffset_weights = diagoffset_weights * diagweights[xinfo['collapse_axes'][colaxind]].reshape(diagoffset_weights_shape)
del diagoffsets[xinfo['collapse_axes'][colaxind]]
result[smplng][dpool][stat] = NP.nansum(result[smplng][dpool][stat]*diagoffset_weights, axis=axes_to_sum, keepdims=True) / NP.nansum(diagoffset_weights, axis=axes_to_sum, keepdims=True)
else:
result[smplng][dpool][stat] = factor.reshape((-1,)+tuple(NP.ones(dspec.ndim-1, dtype=NP.int))) * NP.abs(dspec * U.Jy)**2
diagoffsets = {}
expandax_map = {}
if units == 'Jy':
result[smplng][dpool][stat] = result[smplng][dpool][stat].to('Jy2 Mpc')
elif units == 'K':
result[smplng][dpool][stat] = result[smplng][dpool][stat].to('K2 Mpc3')
else:
raise ValueError('Input value for units invalid')
result[smplng][dpool]['diagoffsets'] = diagoffsets
result[smplng][dpool]['diagweights'] = diagweights
result[smplng][dpool]['axesmap'] = expandax_map
result[smplng][dpool]['nsamples_incoh'] = nsamples_incoh
result[smplng][dpool]['nsamples_coh'] = nsamples_coh
return result
############################################################################
def compute_power_spectrum_uncertainty(self, cpds=None, selection=None,
autoinfo=None,xinfo=None,
cosmo=cosmo100, units='K',
beamparms=None):
"""
------------------------------------------------------------------------
Compute uncertainty in the power spectrum of closure phase data. It is
in units of Mpc/h
Inputs:
cpds [dictionary] A dictionary that contains the 'oversampled' (if
resample=False) and/or 'resampled' (if resample=True) delay
spectrum information on the key 'errinfo'. If it is not
specified the attributes cPhaseDS['errinfo'] and
cPhaseDS_resampled['errinfo'] are used. Under each of these
sampling keys, it holds a dictionary that has the following
keys and values:
'freq_center' [numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is
roughly equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband
delay transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed.
It is of size n_win. It is roughly equivalent to
width in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow' [scalar] the power to which the FFT of the window
was raised. The value is be a positive scalar
with default = 1.0
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform.
It is of size nlags. The lags roughly correspond
to k_parallel.
'lag_kernel' [numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale
(in pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth.
It is of size n_win. The unit size of a pixel is
determined by the difference between adjacent
pixels in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
of the subband specified in bw_eff
'errinfo' [dictionary] It has two keys 'dspec0' and
'dspec1' each of which are dictionaries with
the following keys and values:
'twts' [numpy array] Weights for the subsample
difference. It is of shape (nlst, ndays,
ntriads, nchan)
'mean' [numpy array] Delay spectrum of the
subsample difference obtained by using
the mean statistic. It is of shape
(nspw, nlst, ndays, ntriads, nlags)
'median'
[numpy array] Delay spectrum of the
subsample difference obtained by using
the median statistic. It is of shape
(nspw, nlst, ndays, ntriads, nlags)
selection [NoneType or dictionary] Selection parameters based on which
triad, LST, and day indices will be returned. If set to None
(default), all triad, LST, and day indices will be returned.
Otherwise it must be a dictionary with the following keys
and values:
'triads' [NoneType or list of 3-element tuples] If set
to None (default), indices of all triads are
returned. Otherwise, the specific triads must
be specified such as [(1,2,3), (1,2,4), ...]
and their indices will be returned
'lst' [NoneType, list or numpy array] If set to None
(default), indices of all LST are returned.
Otherwise must be a list or numpy array
containing indices to LST.
'days' [NoneType, list or numpy array] If set to None
(default), indices of all days are returned.
Otherwise must be a list or numpy array
containing indices to days.
autoinfo
[NoneType or dictionary] Specifies parameters for processing
before power spectrum in auto or cross modes. If set to None,
a dictionary will be created with the default values as
described below. The dictionary must have the following keys
and values:
'axes' [NoneType/int/list/tuple/numpy array] Axes that will
be averaged coherently before squaring (for auto) or
cross-multiplying (for cross) power spectrum. If set
to None (default), no axes are averaged coherently.
If set to int, list, tuple or numpy array, those axes
will be averaged coherently after applying the weights
specified under key 'wts' along those axes. 1=lst,
3=triads. Value of 2 for axes is not allowed since
that denotes repeated days and it is along this axis
that cross-power is computed regardless.
'wts' [NoneType/list/numpy array] If not provided (equivalent
to setting it to None) or set to None (default), it is
set to a one element list which is a one element numpy
array of unity. Otherwise, it must be a list of same
number of elements as in key 'axes' and each of these
must be a numpy broadcast compatible array corresponding
to each of the axis specified in 'axes'
xinfo [NoneType or dictionary] Specifies parameters for processing
cross power spectrum. If set to None, a dictionary will be
created with the default values as described below. The
dictionary must have the following keys and values:
'axes' [NoneType/int/list/tuple/numpy array] Axes over which
power spectrum will be computed incoherently by cross-
multiplication. If set to None (default), no cross-
power spectrum is computed. If set to int, list, tuple
or numpy array, cross-power over those axes will be
computed incoherently by cross-multiplication. The
cross-spectrum over these axes will be computed after
applying the pre- and post- cross-multiplication
weights specified in key 'wts'. 1=lst, 3=triads. Value
of 2 for axes is not allowed since that denotes
repeated days and it is along this axis that
cross-power is computed regardless.
'collapse_axes'
[list] The axes that will be collpased after the
cross-power matrix is produced by cross-multiplication.
If this key is not set, it will be initialized to an
empty list (default), in which case none of the axes
is collapsed and the full cross-power matrix will be
output. it must be a subset of values under key 'axes'.
This will reduce it from a square matrix along that axis
to collapsed values along each of the leading diagonals.
1=lst, 3=triads.
'dlst' [scalar] LST interval (in mins) or difference between LST
pairs which will be determined and used for
cross-power spectrum. Will only apply if values under
'axes' contains the LST axis(=1).
'dlst_range'
[scalar, numpy array, or NoneType] Specifies the LST
difference(s) in minutes that are to be used in the
computation of cross-power spectra. If a scalar, only
the diagonal consisting of pairs with that LST
difference will be computed. If a numpy array, those
diagonals consisting of pairs with that LST difference
will be computed. If set to None (default), the main
diagonal (LST difference of 0) and the first off-main
diagonal (LST difference of 1 unit) corresponding to
pairs with 0 and 1 unit LST difference are computed.
Applies only if key 'axes' contains LST axis (=1).
'avgcov'
[boolean] It specifies if the collapse of square
covariance matrix is to be collapsed further to a single
number after applying 'postX' weights. If not set or
set to False (default), this late stage collapse will
not be performed. Otherwise, it will be averaged in a
weighted average sense where the 'postX' weights would
have already been applied during the collapsing
operation
'wts' [NoneType or Dictionary] If not set, a default
dictionary (see default values below) will be created.
It must have the follwoing keys and values:
'preX' [list of numpy arrays] It contains pre-cross-
multiplication weights. It is a list where
each element in the list is a numpy array, and
the number of elements in the list must match
the number of entries in key 'axes'. If 'axes'
is set None, 'preX' may be set to a list
with one element which is a numpy array of ones.
The number of elements in each of the numpy
arrays must be numpy broadcastable into the
number of elements along that axis in the
delay spectrum.
'preXnorm'
[boolean] If False (default), no normalization
is done after the application of weights. If
set to True, the delay spectrum will be
normalized by the sum of the weights.
'postX' [list of numpy arrays] It contains post-cross-
multiplication weights. It is a list where
each element in the list is a numpy array, and
the number of elements in the list must match
the number of entries in key 'axes'. If 'axes'
is set None, 'preX' may be set to a list
with one element which is a numpy array of ones.
The number of elements in each of the numpy
arrays must be numpy broadcastable into the
number of elements along that axis in the
delay spectrum.
'preXnorm'
[boolean] If False (default), no normalization
is done after the application of 'preX' weights.
If set to True, the delay spectrum will be
normalized by the sum of the weights.
'postXnorm'
[boolean] If False (default), no normalization
is done after the application of postX weights.
If set to True, the delay cross power spectrum
will be normalized by the sum of the weights.
cosmo [instance of cosmology class from astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module. Default
uses Planck 2015 cosmology, with H0=100 h km/s/Mpc
units [string] Specifies the units of output power spectum. Accepted
values are 'Jy' and 'K' (default)) and the power spectrum will
be in corresponding squared units.
Output:
Dictionary with the keys 'triads' ((ntriads,3) array), 'triads_ind',
((ntriads,) array), 'lstXoffsets' ((ndlst_range,) array), 'lst'
((nlst,) array), 'dlst' ((nlst,) array), 'lst_ind' ((nlst,) array),
'days' ((ndaycomb,) array), 'day_ind' ((ndaycomb,) array), 'dday'
((ndaycomb,) array), 'oversampled' and 'resampled' corresponding to
whether resample was set to False or True in call to member function
FT(). Values under keys 'triads_ind' and 'lst_ind' are numpy array
corresponding to triad and time indices used in selecting the data.
Values under keys 'oversampled' and 'resampled' each contain a
dictionary with the following keys and values:
'z' [numpy array] Redshifts corresponding to the band centers in
'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has shape=(nlags,).
'kprll' [numpy array] k_parallel modes (in h/Mpc) corresponding to
'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in Hz) of the
frequency subbands of the subband delay spectra. It is of size
n_win. It is roughly equivalent to redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on each
frequency sub-band during the subband delay transform. It is
of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in Hz) of the
subbands being delay transformed. It is of size n_win. It is
roughly equivalent to width in redshift or along line-of-sight
'shape' [string] shape of the frequency window function applied. Usual
values are 'rect' (rectangular), 'bhw' (Blackman-Harris),
'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window was raised.
The value is be a positive scalar with default = 1.0
'lag_corr_length'
[numpy array] It is the correlation timescale (in pixels) of
the subband delay spectra. It is proportional to inverse of
effective bandwidth. It is of size n_win. The unit size of a
pixel is determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is effectively inverse
of the effective bandwidth of the subband specified in bw_eff
It further contains a key named 'errinfo' which is a dictionary. It
contains information about power spectrum uncertainties obtained from
subsample differences. It contains the following keys and values:
'mean' [numpy array] Delay power spectrum uncertainties incoherently
estimated over the axes specified in xinfo['axes'] using the
'mean' key in input cpds or attribute
cPhaseDS['errinfo']['dspec']. It has shape that depends on the
combination of input parameters. See examples below. If both
collapse_axes and avgcov are not set, those axes will be
replaced with square covariance matrices. If collapse_axes is
provided but avgcov is False, those axes will be of shape
2*Naxis-1.
'median'
[numpy array] Delay power spectrum uncertainties incoherently
averaged over the axes specified in incohax using the 'median'
key in input cpds or attribute cPhaseDS['errinfo']['dspec'].
It has shape that depends on the combination of input
parameters. See examples below. If both collapse_axes and
avgcov are not set, those axes will be replaced with square
covariance matrices. If collapse_axes is provided but avgcov is
False, those axes will be of shape 2*Naxis-1.
'diagoffsets'
[dictionary] Same keys corresponding to keys under
'collapse_axes' in input containing the diagonal offsets for
those axes. If 'avgcov' was set, those entries will be removed
from 'diagoffsets' since all the leading diagonal elements have
been collapsed (averaged) further. Value under each key is a
numpy array where each element in the array corresponds to the
index of that leading diagonal. This should match the size of
the output along that axis in 'mean' or 'median' above.
'diagweights'
[dictionary] Each key is an axis specified in collapse_axes and
the value is a numpy array of weights corresponding to the
diagonal offsets in that axis.
'axesmap'
[dictionary] If covariance in cross-power is calculated but is
not collapsed, the number of dimensions in the output will have
changed. This parameter tracks where the original axis is now
placed. The keys are the original axes that are involved in
incoherent cross-power, and the values are the new locations of
those original axes in the output.
'nsamples_incoh'
[integer] Number of incoherent samples in producing the power
spectrum
'nsamples_coh'
[integer] Number of coherent samples in producing the power
spectrum
Examples:
(1)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': 2, 'wts': None}
xinfo = {'axes': None, 'avgcov': False, 'collapse_axes': [],
'wts':{'preX': None, 'preXnorm': False,
'postX': None, 'postXnorm': False}}
This will not do anything because axes cannot include value 2 which
denote the 'days' axis and the uncertainties are obtained through
subsample differencing along days axis regardless.
Output delay power spectrum has shape (Nspw, Nlst, Ndaycomb, Ntriads,
Nlags)
(2)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': 2, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': False, 'collapse_axes': [],
'wts':{'preX': None, 'preXnorm': False,
'postX': None, 'postXnorm': False},
'dlst_range': None}
This will not do anything about coherent averaging along axis=2 because
axes cannot include value 2 which denote the 'days' axis and the
uncertainties are obtained through subsample differencing along days
axis regardless.
Output delay power spectrum has shape
(Nspw, 2, Nlst, Ndaycomb, Ntriads, Ntriads, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range)},
axesmap = {1: [1,2], 3: [4,5]}
(3)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': 2, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': False, 'collapse_axes': [3],
'dlst_range': [0.0, 1.0, 2.0]}
This will not do anything about coherent averaging along axis=2 because
axes cannot include value 2 which denote the 'days' axis and the
uncertainties are obtained through subsample differencing along days
axis regardless.
Output delay power spectrum has shape
(Nspw, 3, Nlst, 1, 2*Ntriads-1, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range),
3: NP.arange(-Ntriads,Ntriads)},
axesmap = {1: [1,2], 3: [4]}
(4)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': None, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': False, 'collapse_axes': [1,3],
'dlst_range': [1.0, 2.0, 3.0, 4.0]}
Output delay power spectrum has shape
(Nspw, 4, Ndaycomb, 2*Ntriads-1, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range),
3: NP.arange(-Ntriads,Ntriads)},
axesmap = {1: [1], 3: [3]}
(5)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': None, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': True, 'collapse_axes': [3],
'dlst_range': None}
Output delay power spectrum has shape
(Nspw, 2, Nlst, Ndays, 1, Nlags)
diagoffsets = {1: NP.arange(n_dlst_range)}, axesmap = {1: [1,2], 3: [4]}
(6)
Input delay spectrum of shape (Nspw, Nlst, Ndays, Ntriads, Nlags)
autoinfo = {'axes': None, 'wts': None}
xinfo = {'axes': [1,3], 'avgcov': True, 'collapse_axes': []}
Output delay power spectrum has shape
(Nspw, 1, Ndays, 1, Nlags)
diagoffsets = {}, axesmap = {1: [1], 3: [3]}
------------------------------------------------------------------------
"""
if not isinstance(units,str):
raise TypeError('Input parameter units must be a string')
if units.lower() == 'k':
if not isinstance(beamparms, dict):
raise TypeError('Input beamparms must be a dictionary')
if 'freqs' not in beamparms:
beamparms['freqs'] = self.f
beamparms_orig = copy.deepcopy(beamparms)
if autoinfo is None:
autoinfo = {'axes': None, 'wts': [NP.ones(1, dtpye=NP.float)]}
elif not isinstance(autoinfo, dict):
raise TypeError('Input autoinfo must be a dictionary')
if 'axes' not in autoinfo:
autoinfo['axes'] = None
else:
if autoinfo['axes'] is not None:
if not isinstance(autoinfo['axes'], (list,tuple,NP.ndarray,int)):
raise TypeError('Value under key axes in input autoinfo must be an integer, list, tuple or numpy array')
else:
autoinfo['axes'] = NP.asarray(autoinfo['axes']).reshape(-1)
if 'wts' not in autoinfo:
if autoinfo['axes'] is not None:
autoinfo['wts'] = [NP.ones(1, dtype=NP.float)] * len(autoinfo['axes'])
else:
autoinfo['wts'] = [NP.ones(1, dtype=NP.float)]
else:
if autoinfo['axes'] is not None:
if not isinstance(autoinfo['wts'], list):
raise TypeError('wts in input autoinfo must be a list of numpy arrays')
else:
if len(autoinfo['wts']) != len(autoinfo['axes']):
raise ValueError('Input list of wts must be same as length of autoinfo axes')
else:
autoinfo['wts'] = [NP.ones(1, dtype=NP.float)]
if xinfo is None:
xinfo = {'axes': None, 'wts': {'preX': [NP.ones(1, dtpye=NP.float)], 'postX': [NP.ones(1, dtpye=NP.float)], 'preXnorm': False, 'postXnorm': False}}
elif not isinstance(xinfo, dict):
raise TypeError('Input xinfo must be a dictionary')
if 'axes' not in xinfo:
xinfo['axes'] = None
else:
if not isinstance(xinfo['axes'], (list,tuple,NP.ndarray,int)):
raise TypeError('Value under key axes in input xinfo must be an integer, list, tuple or numpy array')
else:
xinfo['axes'] = NP.asarray(xinfo['axes']).reshape(-1)
if 'wts' not in xinfo:
xinfo['wts'] = {}
for xkey in ['preX', 'postX']:
if xinfo['axes'] is not None:
xinfo['wts'][xkey] = [NP.ones(1, dtype=NP.float)] * len(xinfo['axes'])
else:
xinfo['wts'][xkey] = [NP.ones(1, dtype=NP.float)]
xinfo['wts']['preXnorm'] = False
xinfo['wts']['postXnorm'] = False
else:
if xinfo['axes'] is not None:
if not isinstance(xinfo['wts'], dict):
raise TypeError('wts in input xinfo must be a dictionary')
for xkey in ['preX', 'postX']:
if not isinstance(xinfo['wts'][xkey], list):
raise TypeError('{0} wts in input xinfo must be a list of numpy arrays'.format(xkey))
else:
if len(xinfo['wts'][xkey]) != len(xinfo['axes']):
raise ValueError('Input list of {0} wts must be same as length of xinfo axes'.format(xkey))
else:
for xkey in ['preX', 'postX']:
xinfo['wts'][xkey] = [NP.ones(1, dtype=NP.float)]
if 'preXnorm' not in xinfo['wts']:
xinfo['wts']['preXnorm'] = False
if 'postXnorm' not in xinfo['wts']:
xinfo['wts']['postXnorm'] = False
if not isinstance(xinfo['wts']['preXnorm'], NP.bool):
raise TypeError('preXnorm in input xinfo must be a boolean')
if not isinstance(xinfo['wts']['postXnorm'], NP.bool):
raise TypeError('postXnorm in input xinfo must be a boolean')
if 'avgcov' not in xinfo:
xinfo['avgcov'] = False
if not isinstance(xinfo['avgcov'], NP.bool):
raise TypeError('avgcov under input xinfo must be boolean')
if 'collapse_axes' not in xinfo:
xinfo['collapse_axes'] = []
if not isinstance(xinfo['collapse_axes'], (int,list,tuple,NP.ndarray)):
raise TypeError('collapse_axes under input xinfo must be an integer, tuple, list or numpy array')
else:
xinfo['collapse_axes'] = NP.asarray(xinfo['collapse_axes']).reshape(-1)
if (autoinfo['axes'] is not None) and (xinfo['axes'] is not None):
if NP.intersect1d(autoinfo['axes'], xinfo['axes']).size > 0:
raise ValueError("Inputs autoinfo['axes'] and xinfo['axes'] must have no intersection")
cohax = autoinfo['axes']
if cohax is None:
cohax = []
if 2 in cohax: # Remove axis=2 from cohax
if isinstance(cohax, list):
cohax.remove(2)
if isinstance(cohax, NP.ndarray):
cohax = cohax.tolist()
cohax.remove(2)
cohax = NP.asarray(cohax)
incohax = xinfo['axes']
if incohax is None:
incohax = []
if 2 in incohax: # Remove axis=2 from incohax
if isinstance(incohax, list):
incohax.remove(2)
if isinstance(incohax, NP.ndarray):
incohax = incohax.tolist()
incohax.remove(2)
incohax = NP.asarray(incohax)
if selection is None:
selection = {'triads': None, 'lst': None, 'days': None}
else:
if not isinstance(selection, dict):
raise TypeError('Input selection must be a dictionary')
if cpds is None:
cpds = {}
sampling = ['oversampled', 'resampled']
for smplng in sampling:
if smplng == 'oversampled':
cpds[smplng] = copy.deepcopy(self.cPhaseDS)
else:
cpds[smplng] = copy.deepcopy(self.cPhaseDS_resampled)
triad_ind, lst_ind, day_ind, day_ind_eicpdiff = self.subset(selection=selection)
result = {'triads': self.cPhase.cpinfo['raw']['triads'][triad_ind], 'triads_ind': triad_ind, 'lst': self.cPhase.cpinfo['errinfo']['lstbins'][lst_ind], 'lst_ind': lst_ind, 'dlst': self.cPhase.cpinfo['errinfo']['dlstbins'][lst_ind], 'days': self.cPhase.cpinfo['errinfo']['daybins'][day_ind], 'day_ind': day_ind_eicpdiff, 'dday': self.cPhase.cpinfo['errinfo']['diff_dbins'][day_ind]}
dlstbin = NP.mean(self.cPhase.cpinfo['errinfo']['dlstbins'])
if 'dlst_range' in xinfo:
if xinfo['dlst_range'] is None:
dlst_range = None
lstshifts = NP.arange(2) # LST index offsets of 0 and 1 are only estimated
else:
dlst_range = NP.asarray(xinfo['dlst_range']).ravel() / 60.0 # Difference in LST between a pair of LST (in hours)
if dlst_range.size == 1:
dlst_range = NP.insert(dlst_range, 0, 0.0)
lstshifts = NP.arange(max([0, NP.ceil(1.0*dlst_range.min()/dlstbin).astype(NP.int)]), min([NP.ceil(1.0*dlst_range.max()/dlstbin).astype(NP.int), result['lst'].size]))
else:
dlst_range = None
lstshifts = NP.arange(2) # LST index offsets of 0 and 1 are only estimated
result['lstXoffsets'] = lstshifts * dlstbin # LST interval corresponding to diagonal offsets created by the LST covariance
for smplng in sampling:
result[smplng] = {}
wl = FCNST.c / (cpds[smplng]['freq_center'] * U.Hz)
z = CNST.rest_freq_HI / cpds[smplng]['freq_center'] - 1
dz = CNST.rest_freq_HI / cpds[smplng]['freq_center']**2 * cpds[smplng]['bw_eff']
dkprll_deta = DS.dkprll_deta(z, cosmo=cosmo)
kprll = dkprll_deta.reshape(-1,1) * cpds[smplng]['lags']
rz_los = cosmo.comoving_distance(z) # in Mpc/h
drz_los = FCNST.c * cpds[smplng]['bw_eff']*U.Hz * (1+z)**2 / (CNST.rest_freq_HI * U.Hz) / (cosmo.H0 * cosmo.efunc(z)) # in Mpc/h
if units == 'Jy':
jacobian1 = 1 / (cpds[smplng]['bw_eff'] * U.Hz)
jacobian2 = drz_los / (cpds[smplng]['bw_eff'] * U.Hz)
temperature_from_fluxdensity = 1.0
elif units == 'K':
beamparms = copy.deepcopy(beamparms_orig)
omega_bw = self.beam3Dvol(beamparms, freq_wts=cpds[smplng]['freq_wts'])
jacobian1 = 1 / (omega_bw * U.Hz) # The steradian is present but not explicitly assigned
jacobian2 = rz_los**2 * drz_los / (cpds[smplng]['bw_eff'] * U.Hz)
temperature_from_fluxdensity = wl**2 / (2*FCNST.k_B)
else:
raise ValueError('Input value for units invalid')
factor = jacobian1 * jacobian2 * temperature_from_fluxdensity**2
result[smplng]['z'] = z
result[smplng]['kprll'] = kprll
result[smplng]['lags'] = NP.copy(cpds[smplng]['lags'])
result[smplng]['freq_center'] = cpds[smplng]['freq_center']
result[smplng]['bw_eff'] = cpds[smplng]['bw_eff']
result[smplng]['shape'] = cpds[smplng]['shape']
result[smplng]['freq_wts'] = cpds[smplng]['freq_wts']
result[smplng]['lag_corr_length'] = cpds[smplng]['lag_corr_length']
dpool = 'errinfo'
if dpool in cpds[smplng]:
result[smplng][dpool] = {}
inpshape = list(cpds[smplng][dpool]['dspec0']['mean'].shape)
inpshape[1] = lst_ind.size
inpshape[2] = day_ind_eicpdiff.size
inpshape[3] = triad_ind.size
if len(cohax) > 0:
nsamples_coh = NP.prod(NP.asarray(inpshape)[NP.asarray(cohax)])
else:
nsamples_coh = 1
if len(incohax) > 0:
nsamples = NP.prod(NP.asarray(inpshape)[NP.asarray(incohax)])
nsamples_incoh = nsamples * (nsamples - 1)
else:
nsamples_incoh = 1
twts_multidim_idx = NP.ix_(lst_ind,day_ind_eicpdiff,triad_ind,NP.arange(1)) # shape=(nlst,ndays,ntriads,1)
dspec_multidim_idx = NP.ix_(NP.arange(wl.size),lst_ind,day_ind_eicpdiff,triad_ind,NP.arange(inpshape[4])) # shape=(nspw,nlst,ndays,ntriads,nchan)
max_wt_in_chan = NP.max(NP.sum(cpds[smplng]['errinfo']['dspec0']['twts'].data, axis=(0,1,2,3)))
select_chan = NP.argmax(NP.sum(cpds[smplng]['errinfo']['dspec0']['twts'].data, axis=(0,1,2,3)))
twts = {'0': NP.copy(cpds[smplng]['errinfo']['dspec0']['twts'].data[:,:,:,[select_chan]]), '1': NP.copy(cpds[smplng]['errinfo']['dspec1']['twts'].data[:,:,:,[select_chan]])}
if nsamples_coh > 1:
awts_shape = tuple(NP.ones(cpds[smplng]['errinfo']['dspec']['mean'].ndim, dtype=NP.int))
awts = NP.ones(awts_shape, dtype=NP.complex)
awts_shape = NP.asarray(awts_shape)
for caxind,caxis in enumerate(cohax):
curr_awts_shape = NP.copy(awts_shape)
curr_awts_shape[caxis] = -1
awts = awts * autoinfo['wts'][caxind].reshape(tuple(curr_awts_shape))
for stat in ['mean', 'median']:
dspec0 = NP.copy(cpds[smplng][dpool]['dspec0'][stat][dspec_multidim_idx])
dspec1 = NP.copy(cpds[smplng][dpool]['dspec1'][stat][dspec_multidim_idx])
if nsamples_coh > 1:
if stat == 'mean':
dspec0 = NP.sum(twts['0'][NP.newaxis,...] * awts * dspec0, axis=cohax, keepdims=True) / NP.sum(twts['0'][twts_multidim_idx][NP.newaxis,...] * awts, axis=cohax, keepdims=True)
dspec1 = NP.sum(twts['1'][NP.newaxis,...] * awts * dspec1, axis=cohax, keepdims=True) / NP.sum(twts['1'][twts_multidim_idx][NP.newaxis,...] * awts, axis=cohax, keepdims=True)
else:
dspec0 = NP.median(dspec0, axis=cohax, keepdims=True)
dspec1 = NP.median(dspec1, axis=cohax, keepdims=True)
if nsamples_incoh > 1:
expandax_map = {}
wts_shape = tuple(NP.ones(dspec0.ndim, dtype=NP.int))
preXwts = NP.ones(wts_shape, dtype=NP.complex)
wts_shape = NP.asarray(wts_shape)
for incaxind,incaxis in enumerate(xinfo['axes']):
curr_wts_shape = NP.copy(wts_shape)
curr_wts_shape[incaxis] = -1
preXwts = preXwts * xinfo['wts']['preX'][incaxind].reshape(tuple(curr_wts_shape))
preXwts0 = NP.copy(preXwts)
preXwts1 = NP.copy(preXwts)
for incax in NP.sort(incohax)[::-1]:
dspec0 = NP.expand_dims(dspec0, axis=incax)
preXwts0 = NP.expand_dims(preXwts0, axis=incax)
if incax == 1:
preXwts0_outshape = list(preXwts0.shape)
preXwts0_outshape[incax+1] = dspec0.shape[incax+1]
preXwts0_outshape = tuple(preXwts0_outshape)
preXwts0 = NP.broadcast_to(preXwts0, preXwts0_outshape).copy() # For some strange reason the NP.broadcast_to() creates a "read-only" immutable array which is changed to writeable by copy()
preXwts1_tmp = NP.expand_dims(preXwts1, axis=incax)
preXwts1_shape = NP.asarray(preXwts1_tmp.shape)
preXwts1_shape[incax] = lstshifts.size
preXwts1_shape[incax+1] = preXwts0_outshape[incax+1]
preXwts1_shape = tuple(preXwts1_shape)
preXwts1 = NP.broadcast_to(preXwts1_tmp, preXwts1_shape).copy() # For some strange reason the NP.broadcast_to() creates a "read-only" immutable array which is changed to writeable by copy()
dspec1_tmp = NP.expand_dims(dspec1, axis=incax)
dspec1_shape = NP.asarray(dspec1_tmp.shape)
dspec1_shape[incax] = lstshifts.size
# dspec1_shape = NP.insert(dspec1_shape, incax, lstshifts.size)
dspec1_shape = tuple(dspec1_shape)
dspec1 = NP.broadcast_to(dspec1_tmp, dspec1_shape).copy() # For some strange reason the NP.broadcast_to() creates a "read-only" immutable array which is changed to writeable by copy()
for lstshiftind, lstshift in enumerate(lstshifts):
dspec1[:,lstshiftind,...] = NP.roll(dspec1_tmp[:,0,...], lstshift, axis=incax)
dspec1[:,lstshiftind,:lstshift,...] = NP.nan
preXwts1[:,lstshiftind,...] = NP.roll(preXwts1_tmp[:,0,...], lstshift, axis=incax)
preXwts1[:,lstshiftind,:lstshift,...] = NP.nan
else:
dspec1 = NP.expand_dims(dspec1, axis=incax+1)
preXwts1 = NP.expand_dims(preXwts1, axis=incax+1)
expandax_map[incax] = incax + NP.arange(2)
for ekey in expandax_map:
if ekey > incax:
expandax_map[ekey] += 1
result[smplng][dpool][stat] = factor.reshape((-1,)+tuple(NP.ones(dspec0.ndim-1, dtype=NP.int))) * (dspec0*U.Unit('Jy Hz') * preXwts0) * (dspec1*U.Unit('Jy Hz') * preXwts1).conj()
if xinfo['wts']['preXnorm']:
result[smplng][dpool][stat] = result[smplng][dpool][stat] / NP.nansum(preXwts0 * preXwts1.conj(), axis=NP.union1d(NP.where(logical_or(NP.asarray(preXwts0.shape)>1, NP.asarray(preXwts1.shape)>1))), keepdims=True) # Normalize by summing the weights over the expanded axes
if (len(xinfo['collapse_axes']) > 0) or (xinfo['avgcov']):
# Remove axis=2 if present
if 2 in xinfo['collapse_axes']:
# Remove axis=2 from cohax
if isinstance(xinfo['collapse_axes'], list):
xinfo['collapse_axes'].remove(2)
if isinstance(xinfo['collapse_axes'], NP.ndarray):
xinfo['collapse_axes'] = xinfo['collapse_axes'].tolist()
xinfo['collapse_axes'].remove(2)
xinfo['collapse_axes'] = NP.asarray(xinfo['collapse_axes'])
if (len(xinfo['collapse_axes']) > 0) or (xinfo['avgcov']):
# if any one of collapsing of incoherent axes or
# averaging of full covariance is requested
diagoffsets = {} # Stores the correlation index difference along each axis.
diagweights = {} # Stores the number of points summed in the trace along the offset diagonal
for colaxind, colax in enumerate(xinfo['collapse_axes']):
if colax == 1:
shp = NP.ones(cpds[smplng][dpool]['dspec0'][stat].ndim, dtype=NP.int)
shp[colax] = lst_ind.size
multdim_idx = tuple([NP.arange(axdim) for axdim in shp])
diagweights[colax] = NP.sum(NP.logical_not(NP.isnan(cpds[smplng][dpool]['dspec0'][stat][dspec_multidim_idx][multdim_idx]))) - lstshifts
# diagweights[colax] = result[smplng][dpool][stat].shape[expandax_map[colax][-1]] - lstshifts
if stat == 'mean':
result[smplng][dpool][stat] = NP.nanmean(result[smplng][dpool][stat], axis=expandax_map[colax][-1])
else:
result[smplng][dpool][stat] = NP.nanmedian(result[smplng][dpool][stat], axis=expandax_map[colax][-1])
diagoffsets[colax] = lstshifts
else:
pspec_unit = result[smplng][dpool][stat].si.unit
result[smplng][dpool][stat], offsets, diagwts = OPS.array_trace(result[smplng][dpool][stat].si.value, offsets=None, axis1=expandax_map[colax][0], axis2=expandax_map[colax][1], outaxis='axis1')
diagwts_shape = NP.ones(result[smplng][dpool][stat].ndim, dtype=NP.int)
diagwts_shape[expandax_map[colax][0]] = diagwts.size
diagoffsets[colax] = offsets
diagweights[colax] = NP.copy(diagwts)
result[smplng][dpool][stat] = result[smplng][dpool][stat] * pspec_unit / diagwts.reshape(diagwts_shape)
for ekey in expandax_map:
if ekey > colax:
expandax_map[ekey] -= 1
expandax_map[colax] = NP.asarray(expandax_map[colax][0]).ravel()
wts_shape = tuple(NP.ones(result[smplng][dpool][stat].ndim, dtype=NP.int))
postXwts = NP.ones(wts_shape, dtype=NP.complex)
wts_shape = NP.asarray(wts_shape)
for colaxind, colax in enumerate(xinfo['collapse_axes']):
curr_wts_shape = NP.copy(wts_shape)
curr_wts_shape[expandax_map[colax]] = -1
postXwts = postXwts * xinfo['wts']['postX'][colaxind].reshape(tuple(curr_wts_shape))
result[smplng][dpool][stat] = result[smplng][dpool][stat] * postXwts
axes_to_sum = tuple(NP.asarray([expandax_map[colax] for colax in xinfo['collapse_axes']]).ravel()) # for post-X normalization and collapse of covariance matrix
if xinfo['wts']['postXnorm']:
result[smplng][dpool][stat] = result[smplng][dpool][stat] / NP.nansum(postXwts, axis=axes_to_sum, keepdims=True) # Normalize by summing the weights over the collapsed axes
if xinfo['avgcov']:
# collapse the axes further (postXwts have already
# been applied)
diagoffset_weights = 1.0
result[smplng][dpool][stat] = NP.nanmean(result[smplng][dpool][stat], axis=axes_to_sum, keepdims=True)
for colaxind in zip(*sorted(zip(NP.arange(xinfo['collapse_axes'].size), xinfo['collapse_axes']), reverse=True))[0]:
# It is import to sort the collapsable axes in
# reverse order before deleting elements below,
# otherwise the axes ordering may be get messed up
diagoffset_weights_shape = NP.ones(result[smplng][dpool][stat].ndim, dtype=NP.int)
diagoffset_weights_shape[expandax_map[xinfo['collapse_axes'][colaxind]][0]] = diagweights[xinfo['collapse_axes'][colaxind]].size
diagoffset_weights = diagoffset_weights * diagweights[xinfo['collapse_axes'][colaxind]].reshape(diagoffset_weights_shape)
del diagoffsets[xinfo['collapse_axes'][colaxind]]
result[smplng][dpool][stat] = NP.nansum(result[smplng][dpool][stat]*diagoffset_weights, axis=axes_to_sum, keepdims=True) / NP.nansum(diagoffset_weights, axis=axes_to_sum, keepdims=True)
else:
result[smplng][dpool][stat] = factor.reshape((-1,)+tuple(NP.ones(dspec.ndim-1, dtype=NP.int))) * NP.abs(dspec * U.Jy)**2
diagoffsets = {}
expandax_map = {}
if units == 'Jy':
result[smplng][dpool][stat] = result[smplng][dpool][stat].to('Jy2 Mpc')
elif units == 'K':
result[smplng][dpool][stat] = result[smplng][dpool][stat].to('K2 Mpc3')
else:
raise ValueError('Input value for units invalid')
result[smplng][dpool]['diagoffsets'] = diagoffsets
result[smplng][dpool]['diagweights'] = diagweights
result[smplng][dpool]['axesmap'] = expandax_map
result[smplng][dpool]['nsamples_incoh'] = nsamples_incoh
result[smplng][dpool]['nsamples_coh'] = nsamples_coh
return result
############################################################################
def rescale_power_spectrum(self, cpdps, visfile, blindex, visunits='Jy'):
"""
------------------------------------------------------------------------
Rescale power spectrum to dimensional quantity by converting the ratio
given visibility amplitude information
Inputs:
cpdps [dictionary] Dictionary with the keys 'triads',
'triads_ind', 'lstbins', 'lst', 'dlst', 'lst_ind',
'oversampled' and 'resampled' corresponding to whether
resample was set to False or True in call to member function
FT(). Values under keys 'triads_ind' and 'lst_ind' are numpy
array corresponding to triad and time indices used in
selecting the data. Values under keys 'oversampled' and
'resampled' each contain a dictionary with the following keys
and values:
'z' [numpy array] Redshifts corresponding to the band
centers in 'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has
shape=(nlags,).
'kprll' [numpy array] k_parallel modes (in h/Mpc)
corresponding to 'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in
Hz) of the frequency subbands of the subband delay
spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on
each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in
Hz) of the subbands being delay transformed. It is
of size n_win. It is roughly equivalent to width in
redshift or along line-of-sight
'shape' [string] shape of the frequency window function
applied. Usual values are 'rect' (rectangular),
'bhw' (Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window
was raised.
The value is be a positive scalar with default = 1.0
'mean' [numpy array] Delay power spectrum incoherently
averaged over the axes specified in incohax using
the 'mean' key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has
shape=(nspw,nlst,ndays,ntriads,nchan). It has units
of Mpc/h. If incohax was set, those axes will be set
to 1.
'median'
[numpy array] Delay power spectrum incoherently
averaged over the axes specified in incohax using
the 'median' key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has
shape=(nspw,nlst,ndays,ntriads,nchan). It has units
of Mpc/h. If incohax was set, those axes will be set
to 1.
visfile [string] Full path to the visibility file in NPZ format that
consists of the following keys and values:
'vis' [numpy array] Complex visibilities averaged over
all redundant baselines of different classes of
baselines. It is of shape (nlst,nbl,nchan)
'last' [numpy array] Array of LST in units of days where
the fractional part is LST in days.
blindex [numpy array] 3-element array of baseline indices to use in
selecting the triad corresponding to closure phase power
spectrum in cpdps. It will index into the 'vis' array in
NPZ file visfile
visunits [string] Units of visibility in visfile. Accepted values
are 'Jy' (default; for Jansky) and 'K' (for Kelvin)
Outputs:
Same dictionary as input cpdps except it has the following additional
keys and values. Under 'resampled' and 'oversampled' keys, there are
now new keys called 'mean-absscale' and 'median-absscale' keys which
are each dictionaries with the following keys and values:
'converted' [numpy array] Values of power (in units of visunits^2) with
same shape as the values under 'mean' and 'median' keys --
(nspw,nlst,ndays,ntriads,nchan) unless some of those axes
have already been averaged coherently or incoherently
'units' [string] Units of power in key 'converted'. Its values are
square of the input visunits -- 'Jy^2' or 'K^2'
------------------------------------------------------------------------
"""
if not isinstance(cpdps, dict):
raise TypeError('Input cpdps must be a dictionary')
if not isinstance(visfile, str):
raise TypeError('Input visfile must be a string containing full file path')
if isinstance(blindex, NP.ndarray):
raise TypeError('Input blindex must be a numpy array')
if blindex.size != 3:
raise ValueError('Input blindex must be a 3-element array')
if not isinstance(visunits, str):
raise TypeError('Input visunits must be a string')
if visunits not in ['Jy', 'K']:
raise ValueError('Input visunits currently not accepted')
datapool = []
for dpool in ['resampled', 'oversampled']:
if dpool in cpdps:
datapool += [dpool]
scaleinfo = NP.load(visfile)
vis = scaleinfo['vis'][:,blindex,:] # shape=(nlst,nbl,nchan)
vis_lstfrac, vis_lstint = NP.modf(scaleinfo['last']) # shape=(nlst,)
vis_lstHA = vis_lstfrac * 24.0 # in hours
vis_lstdeg = vis_lstHA * 15.0 # in degrees
cpdps_lstdeg = 15.0*cpdps['lst'] # in degrees
lstmatrix = cpdps_lstdeg.reshape(-1,1) - vis_lstdeg.reshape(1,-1)
lstmatrix[NP.abs(lstmatrix) > 180.0] -= 360.0
ind_minlstsep = NP.argmin(NP.abs(lstmatrix), axis=1)
vis_nearestLST = vis[blindex,ind_minlstsep,:] # nlst x nbl x nchan
for dpool in datapool:
freq_wts = cpdps[dpool]['freq_wts'] # nspw x nchan
freqwtd_avgvis_nearestLST = NP.sum(freq_wts[:,NP.newaxis,NP.newaxis,:] * vis_nearestLST[NP.newaxis,:,:,:], axis=-1, keepdims=True) / NP.sum(freq_wts[:,NP.newaxis,NP.newaxis,:], axis=-1, keepdims=True) # nspw x nlst x nbl x (nchan=1)
vis_square_multscalar = 1 / NP.sum(1/NP.abs(freqwtd_avgvis_nearestLST)**2, axis=2, keepdims=True) # nspw x nlst x (nbl=1) x (nchan=1)
for stat in ['mean', 'median']:
cpdps[dpool][stat+'-absscale'] = {}
cpdps[dpool][stat+'-absscale']['converted'] = cpdps[dpool][stat] * vis_square_multscalar[:,:,NP.newaxis,:,:] # nspw x nlst x ndays x ntriads x nlags
cpdps[dpool][stat+'-absscale']['units'] = '{0}^2'.format(visunits)
return cpdps
############################################################################
def average_rescaled_power_spectrum(rcpdps, avgax, kprll_llim=None):
"""
------------------------------------------------------------------------
Average the rescaled power spectrum with physical units along certain
axes with inverse variance or regular averaging
Inputs:
rcpdps [dictionary] Dictionary with the keys 'triads',
'triads_ind', 'lstbins', 'lst', 'dlst', 'lst_ind',
'oversampled' and 'resampled' corresponding to whether
resample was set to False or True in call to member function
FT(). Values under keys 'triads_ind' and 'lst_ind' are numpy
array corresponding to triad and time indices used in
selecting the data. Values under keys 'oversampled' and
'resampled' each contain a dictionary with the following keys
and values:
'z' [numpy array] Redshifts corresponding to the band
centers in 'freq_center'. It has shape=(nspw,)
'lags' [numpy array] Delays (in seconds). It has
shape=(nlags,).
'kprll' [numpy array] k_parallel modes (in h/Mpc)
corresponding to 'lags'. It has shape=(nspw,nlags)
'freq_center'
[numpy array] contains the center frequencies (in
Hz) of the frequency subbands of the subband delay
spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts'
[numpy array] Contains frequency weights applied on
each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff'
[numpy array] contains the effective bandwidths (in
Hz) of the subbands being delay transformed. It is
of size n_win. It is roughly equivalent to width in
redshift or along line-of-sight
'shape' [string] shape of the frequency window function
applied. Usual values are 'rect' (rectangular),
'bhw' (Blackman-Harris), 'bnw' (Blackman-Nuttall).
'fftpow'
[scalar] the power to which the FFT of the window
was raised.
The value is be a positive scalar with default = 1.0
'mean' [numpy array] Delay power spectrum incoherently
averaged over the axes specified in incohax using
the 'mean' key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has
shape=(nspw,nlst,ndays,ntriads,nchan). It has units
of Mpc/h. If incohax was set, those axes will be set
to 1.
'median'
[numpy array] Delay power spectrum incoherently
averaged over the axes specified in incohax using
the 'median' key in input cpds or attribute
cPhaseDS['processed']['dspec']. It has
shape=(nspw,nlst,ndays,ntriads,nchan). It has units
of Mpc/h. If incohax was set, those axes will be set
to 1.
'mean-absscale' and 'median-absscale'
[dictionary] Each dictionary consists of the
following keys and values:
'converted' [numpy array] Values of power (in units
of value in key 'units') with same shape
as the values under 'mean' and 'median'
keys -- (nspw,nlst,ndays,ntriads,nchan)
unless some of those axes have already
been averaged coherently or incoherently
'units' [string] Units of power in key
'converted'. Its values are square of
either 'Jy^2' or 'K^2'
avgax [int, list, tuple] Specifies the axes over which the power
in absolute scale (with physical units) should be averaged.
This counts as incoherent averaging. The averaging is done
with inverse-variance weighting if the input kprll_llim is
set to choose the range of kprll from which the variance
and inverse variance will be determined. Otherwise, a
regular averaging is performed.
kprll_llim [float] Lower limit of absolute value of kprll (in Mpc/h)
beyond which the variance will be determined in order to
estimate the inverse variance weights. If set to None, the
weights are uniform. If set to a value, values beyond this
kprll_llim are used to estimate the variance and hence the
inverse-variance weights.
Outputs:
Dictionary with the same structure as the input dictionary rcpdps except
with the following additional keys and values. Under the dictionaries
under keys 'mean-absscale' and 'median-absscale', there is an additional
key-value pair:
'avg' [numpy array] Values of power (in units of value in key 'units')
with same shape as the values under 'converted' --
(nspw,nlst,ndays,ntriads,nchan) except those axes which were
averaged in this member function, and those axes will be
retained but with axis size=1.
------------------------------------------------------------------------
"""
if not isinstance(rcpdps, dict):
raise TypeError('Input rcpdps must be a dictionary')
if isinstance(avgax, int):
if avgax >= 4:
raise ValueError('Input avgax has a value greater than the maximum axis number over which averaging can be performed')
avgax = NP.asarray(avgax)
elif isinstance(avgax, (list,tuple)):
avgax = NP.asarray(avgax)
if NP.any(avgax >= 4):
raise ValueError('Input avgax contains a value greater than the maximum axis number over which averaging can be performed')
else:
raise TypeError('Input avgax must be an integer, list, or tuple')
if kprll_llim is not None:
if not isinstance(kprll_llim, (int,float)):
raise TypeError('Input kprll_llim must be a scalar')
kprll_llim = NP.abs(kprll_llim)
for dpool in datapool:
for stat in ['mean', 'median']:
wts = NP.ones((1,1,1,1,1))
if kprll_llim is not None:
kprll_ind = NP.abs(rcpdps[dpool]['kprll']) >= kprll_llim # nspw x nlags
if NP.any(kprll_ind):
if rcpdps[dpool]['z'].size > 1:
indsets = [NP.where(kprll_ind[i,:])[0] for i in range(rcpdps[dpool]['z'].size)]
common_kprll_ind = reduce(NP.intersect1d(indsets))
multidim_idx = NP.ix_(NP.arange(rcpdps[dpool]['freq_center'].size), NP.arange(rcpdps['lst'].size), NP.arange(rcpdps['days'].size), NP.arange(rcpdps['triads'].size), common_kprll_ind)
else:
multidim_idx = NP.ix_(NP.arange(rcpdps[dpool]['freq_center'].size), NP.arange(rcpdps['lst'].size), NP.arange(rcpdps['days'].size), NP.arange(rcpdps['triads'].size), kprll_ind[0,:])
else:
multidim_idx = NP.ix_(NP.arange(rcpdps[dpool]['freq_center'].size), NP.arange(rcpdps['lst'].size), NP.arange(rcpdps['days'].size), NP.arange(rcpdps['triads'].size), rcpdps[dpool]['lags'].size)
wts = 1 / NP.var(rcpdps[dpool][stat]['absscale']['rescale'][multidim_idx], axis=avgax, keepdims=True)
rcpdps[dpool][stat]['absscale']['avg'] = NP.sum(wts * rcpdps[dpool][stat]['absscale']['rescale'], axis=avgax, keepdims=True) / NP.sum(wts, axis=avgax, keepdims=True)
return rcpdps
############################################################################
def beam3Dvol(self, beamparms, freq_wts=None):
"""
------------------------------------------------------------------------
Compute three-dimensional (transverse-LOS) volume of the beam in units
of "Sr Hz".
Inputs:
beamparms [dictionary] Contains beam information. It contains the
following keys and values:
'beamfile' [string] If set to string, should contain the
filename relative to default path or absolute
path containing the power pattern. If both
'beamfile' and 'telescope' are set, the
'beamfile' will be used. The latter is used for
determining analytic beam.
'filepathtype'
[string] Specifies if the beamfile is to be
found at the 'default' location or a 'custom'
location. If set to 'default', the PRISim path
is searched for the beam file. Only applies if
'beamfile' key is set.
'filefmt' [string] External file format of the beam.
Accepted values are 'uvbeam', 'fits' and 'hdf5'
'telescope' [dictionary] Information used to analytically
determine the power pattern. used only if
'beamfile' is not set or set to None. This
specifies the type of element, its size and
orientation. It consists of the following keys
and values:
'id' [string] If set, will ignore the other keys
and use telescope details for known
telescopes. Accepted values are 'mwa',
'vla', 'gmrt', 'hera', 'paper', 'hirax',
and 'chime'
'shape' [string] Shape of antenna element. Accepted
values are 'dipole', 'delta', 'dish',
'gaussian', 'rect' and 'square'. Will be
ignored if key 'id' is set. 'delta' denotes
a delta function for the antenna element
which has an isotropic radiation pattern.
'delta' is the default when keys 'id' and
'shape' are not set.
'size' [scalar or 2-element list/numpy array]
Diameter of the telescope dish (in meters)
if the key 'shape' is set to 'dish', side
of the square aperture (in meters) if the
key 'shape' is set to 'square', 2-element
sides if key 'shape' is set to 'rect', or
length of the dipole if key 'shape' is set
to 'dipole'. Will be ignored if key 'shape'
is set to 'delta'. Will be ignored if key
'id' is set and a preset value used for the
diameter or dipole.
'orientation'
[list or numpy array] If key 'shape' is set
to dipole, it refers to the orientation of
the dipole element unit vector whose
magnitude is specified by length. If key
'shape' is set to 'dish', it refers to the
position on the sky to which the dish is
pointed. For a dipole, this unit vector must
be provided in the local ENU coordinate
system aligned with the direction cosines
coordinate system or in the Alt-Az
coordinate system. This will be used only
when key 'shape' is set to 'dipole'. This
could be a 2-element vector (transverse
direction cosines) where the third
(line-of-sight) component is determined, or
a 3-element vector specifying all three
direction cosines or a two-element
coordinate in Alt-Az system. If not provided
it defaults to an eastward pointing dipole.
If key 'shape' is set to 'dish' or
'gaussian', the orientation refers to the
pointing center of the dish on the sky. It
can be provided in Alt-Az system as a
two-element vector or in the direction
cosine coordinate system as a two- or
three-element vector. If not set in the case
of a dish element, it defaults to zenith.
This is not to be confused with the key
'pointing_center' in dictionary
'pointing_info' which refers to the
beamformed pointing center of the array. The
coordinate system is specified by the key
'ocoords'
'ocoords' [string] specifies the coordinate system
for key 'orientation'. Accepted values are
'altaz' and 'dircos'.
'element_locs'
[2- or 3-column array] Element locations that
constitute the tile. Each row specifies
location of one element in the tile. The
locations must be specified in local ENU
coordinate system. First column specifies along
local east, second along local north and the
third along local up. If only two columns are
specified, the third column is assumed to be
zeros. If 'elements_locs' is not provided, it
assumed to be a one-element system and not a
phased array as far as determination of
primary beam is concerned.
'groundplane'
[scalar] height of telescope element above
the ground plane (in meteres). Default=None
will denote no ground plane effects.
'ground_modify'
[dictionary] contains specifications to
modify the analytically computed ground
plane pattern. If absent, the ground plane
computed will not be modified. If set, it
may contain the following keys:
'scale' [scalar] positive value to scale the
modifying factor with. If not set, the
scale factor to the modification is
unity.
'max' [scalar] positive value to clip the
modified and scaled values to. If not
set, there is no upper limit
'freqs' [numpy array] Numpy array denoting frequencies
(in Hz) at which beam integrals are to be
evaluated. If set to None, it will automatically
be set from the class attribute.
'nside' [integer] NSIDE parameter for determining and
interpolating the beam. If not set, it will be
set to 64 (default).
'chromatic' [boolean] If set to true, a chromatic power
pattern is used. If false, an achromatic power
pattern is used based on a reference frequency
specified in 'select_freq'.
'select_freq'
[scalar] Selected frequency for the achromatic
beam. If not set, it will be determined to be
mean of the array in 'freqs'
'spec_interp'
[string] Method to perform spectral
interpolation. Accepted values are those
accepted in scipy.interpolate.interp1d() and
'fft'. Default='cubic'.
freq_wts [numpy array] Frequency weights centered on different
spectral windows or redshifts. Its shape is (nwin,nchan)
and should match the number of spectral channels in input
parameter 'freqs' under 'beamparms' dictionary
Output:
omega_bw [numpy array] Integral of the square of the power pattern
over transverse and spectral axes. Its shape is (nwin,)
------------------------------------------------------------------------
"""
if not isinstance(beamparms, dict):
raise TypeError('Input beamparms must be a dictionary')
if ('beamfile' not in beamparms) and ('telescope' not in beamparms):
raise KeyError('Input beamparms does not contain either "beamfile" or "telescope" keys')
if 'freqs' not in beamparms:
raise KeyError('Key "freqs" not found in input beamparms')
if not isinstance(beamparms['freqs'], NP.ndarray):
raise TypeError('Key "freqs" in input beamparms must contain a numpy array')
if 'nside' not in beamparms:
beamparms['nside'] = 64
if not isinstance(beamparms['nside'], int):
raise TypeError('"nside" parameter in input beamparms must be an integer')
if 'chromatic' not in beamparms:
beamparms['chromatic'] = True
else:
if not isinstance(beamparms['chromatic'], bool):
raise TypeError('Beam chromaticity parameter in input beamparms must be a boolean')
theta, phi = HP.pix2ang(beamparms['nside'], NP.arange(HP.nside2npix(beamparms['nside'])))
theta_phi = NP.hstack((theta.reshape(-1,1), phi.reshape(-1,1)))
if beamparms['beamfile'] is not None:
if 'filepathtype' in beamparms:
if beamparms['filepathtype'] == 'default':
beamparms['beamfile'] = prisim_path+'data/beams/'+beamparms['beamfile']
if 'filefmt' not in beamparms:
raise KeyError('Input beam file format must be specified for an external beam')
if beamparms['filefmt'].lower() in ['hdf5', 'fits', 'uvbeam']:
beamparms['filefmt'] = beamparms['filefmt'].lower()
else:
raise ValueError('Invalid beam file format specified')
if 'pol' not in beamparms:
raise KeyError('Beam polarization must be specified')
if not beamparms['chromatic']:
if 'select_freq' not in beamparms:
raise KeyError('Input reference frequency for achromatic behavior must be specified')
if beamparms['select_freq'] is None:
beamparms['select_freq'] = NP.mean(beamparms['freqs'])
if 'spec_interp' not in beamparms:
beamparms['spec_interp'] = 'cubic'
if beamparms['filefmt'] == 'fits':
external_beam = fits.getdata(beamparms['beamfile'], extname='BEAM_{0}'.format(beamparms['pol']))
external_beam_freqs = fits.getdata(beamparms['beamfile'], extname='FREQS_{0}'.format(beamparms['pol'])) # in MHz
external_beam = external_beam.reshape(-1,external_beam_freqs.size) # npix x nfreqs
elif beamparms['filefmt'] == 'uvbeam':
if uvbeam_module_found:
uvbm = UVBeam()
uvbm.read_beamfits(beamparms['beamfile'])
axis_vec_ind = 0 # for power beam
spw_ind = 0 # spectral window index
if beamparms['pol'].lower() in ['x', 'e']:
beam_pol_ind = 0
else:
beam_pol_ind = 1
external_beam = uvbm.data_array[axis_vec_ind,spw_ind,beam_pol_ind,:,:].T # npix x nfreqs
external_beam_freqs = uvbm.freq_array.ravel() # nfreqs (in Hz)
else:
raise ImportError('uvbeam module not installed/found')
if NP.abs(NP.abs(external_beam).max() - 1.0) > 1e-10:
external_beam /= NP.abs(external_beam).max()
else:
raise ValueError('Specified beam file format not currently supported')
if beamparms['chromatic']:
if beamparms['spec_interp'] == 'fft':
external_beam = external_beam[:,:-1]
external_beam_freqs = external_beam_freqs[:-1]
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(external_beam), theta_phi=theta_phi, inloc_axis=external_beam_freqs, outloc_axis=beamparms['freqs'], axis=1, kind=beamparms['spec_interp'], assume_sorted=True)
else:
nearest_freq_ind = NP.argmin(NP.abs(external_beam_freqs - beamparms['select_freq']))
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(NP.repeat(external_beam[:,nearest_freq_ind].reshape(-1,1), beamparms['freqs'].size, axis=1)), theta_phi=theta_phi, inloc_axis=beamparms['freqs'], outloc_axis=beamparms['freqs'], axis=1, assume_sorted=True)
interp_logbeam_max = NP.nanmax(interp_logbeam, axis=0)
interp_logbeam_max[interp_logbeam_max <= 0.0] = 0.0
interp_logbeam_max = interp_logbeam_max.reshape(1,-1)
interp_logbeam = interp_logbeam - interp_logbeam_max
beam = 10**interp_logbeam
else:
altaz = NP.array([90.0, 0.0]).reshape(1,-1) + NP.array([-1,1]).reshape(1,-1) * NP.degrees(theta_phi)
if beamparms['chromatic']:
beam = PB.primary_beam_generator(altaz, beamparms['freqs'], beamparms['telescope'], skyunits='altaz', pointing_info=None, pointing_center=None, freq_scale='Hz', east2ax1=0.0)
else:
beam = PB.primary_beam_generator(altaz, beamparms['select_freq'], beamparms['telescope'], skyunits='altaz', pointing_info=None, pointing_center=None, freq_scale='Hz', east2ax1=0.0)
beam = beam.reshape(-1,1) * NP.ones(beamparms['freqs'].size).reshape(1,-1)
omega_bw = DS.beam3Dvol(beam, beamparms['freqs'], freq_wts=freq_wts, hemisphere=True)
return omega_bw
############################################################################
| 309,270 | 62.310338 | 533 |
py
|
PRISim
|
PRISim-master/prisim/primary_beams.py
|
import numpy as NP
import scipy.constants as FCNST
import scipy.special as SPS
import h5py
from astroutils import geometry as GEOM
#################################################################################
def primary_beam_generator(skypos, frequency, telescope, freq_scale='GHz',
skyunits='degrees', east2ax1=0.0, pointing_info=None,
pointing_center=None, short_dipole_approx=False,
half_wave_dipole_approx=False):
"""
-----------------------------------------------------------------------------
A wrapper for estimating the power patterns of different telescopes such as
the VLA, GMRT, MWA, HERA, PAPER, HIRAX, CHIME, etc. For the VLA and GMRT,
polynomial power patterns are estimated as specified in AIPS task PBCOR. For
MWA, it is based on theoretical expressions for dipole (element) pattern
multiplied with the array pattern of isotropic radiators.
Inputs:
skypos [numpy array] Sky positions at which the power pattern is to be
estimated. Size is M x N where M is the number of locations and
N = 1 (if skyunits = degrees, for azimuthally symmetric
telescopes such as VLA and GMRT which have parabolic dishes),
N = 2 (if skyunits = altaz denoting Alt-Az coordinates), or N = 3
(if skyunits = dircos denoting direction cosine coordinates)
frequency [scalar, list or numpy vector] frequencies at which the power
pattern is to be estimated. Units can be GHz, MHz or kHz (see
input freq_scale)
telescope [dictionary] dictionary that specifies the type of element,
element size and orientation. It consists of the following keys
and values:
'id' [string] If set, will ignore the other keys and use
telescope details for known telescopes. Accepted
values are 'mwa', 'vla', 'gmrt', 'ugmrt', 'hera',
'paper', 'hirax', and 'chime'
'shape' [string] Shape of antenna element. Accepted values
are 'dipole', 'delta', 'dish', 'gaussian', 'rect'
and 'square'. Will be ignored if key 'id' is set.
'delta' denotes a delta function for the antenna
element which has an isotropic radiation pattern.
'delta' is the default when keys 'id' and 'shape'
are not set.
'size' [scalar or 2-element list/numpy array] Diameter of
the telescope dish (in meters) if the key 'shape'
is set to 'dish', side of the square aperture (in
meters) if the key 'shape' is set to 'square',
2-element sides if key 'shape' is set to 'rect',
or length of the dipole if key 'shape' is set to
'dipole'. Will be ignored if key 'shape' is set to
'delta'. Will be ignored if key 'id' is set and a
preset value used for the diameter or dipole.
'orientation' [list or numpy array] If key 'shape' is set to
dipole, it refers to the orientation of the dipole
element unit vector whose magnitude is specified by
length. If key 'shape' is set to 'dish', it refers
to the position on the sky to which the dish is
pointed. For a dipole, this unit vector must be
provided in the local ENU coordinate system aligned
with the direction cosines coordinate system or in
the Alt-Az coordinate system. This will be
used only when key 'shape' is set to 'dipole'.
This could be a 2-element vector (transverse
direction cosines) where the third (line-of-sight)
component is determined, or a 3-element vector
specifying all three direction cosines or a two-
element coordinate in Alt-Az system. If not provided
it defaults to an eastward pointing dipole. If key
'shape' is set to 'dish' or 'gaussian', the
orientation refers to the pointing center of the
dish on the sky. It can be provided in Alt-Az
system as a two-element vector or in the direction
cosine coordinate system as a two- or three-element
vector. If not set in the case of a dish element,
it defaults to zenith. This is not to be confused
with the key 'pointing_center' in dictionary
'pointing_info' which refers to the beamformed
pointing center of the array. The coordinate system
is specified by the key 'ocoords'
'ocoords' [scalar string] specifies the coordinate system
for key 'orientation'. Accepted values are 'altaz'
and 'dircos'.
'element_locs'
[2- or 3-column array] Element locations that
constitute the tile. Each row specifies
location of one element in the tile. The
locations must be specified in local ENU
coordinate system. First column specifies along
local east, second along local north and the
third along local up. If only two columns are
specified, the third column is assumed to be
zeros. If 'elements_locs' is not provided, it
assumed to be a one-element system and not a
phased array as far as determination of primary
beam is concerned.
'groundplane' [scalar] height of telescope element above the
ground plane (in meteres). Default = None will
denote no ground plane effects.
'ground_modify'
[dictionary] contains specifications to modify
the analytically computed ground plane pattern. If
absent, the ground plane computed will not be
modified. If set, it may contain the following
keys:
'scale' [scalar] positive value to scale the
modifying factor with. If not set, the
scale factor to the modification is unity.
'max' [scalar] positive value to clip the
modified and scaled values to. If not set,
there is no upper limit
freq_scale [scalar] string specifying the units of frequency. Accepted
values are 'GHz', 'MHz' and 'Hz'. Default = 'GHz'
skyunits [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and 'dircos'.
Default = 'degrees'. If 'dircos', the direction cosines are
aligned with the local East, North, and Up
east2ax1 [scalar] Angle (in degrees) the primary axis of the aperture
makes with the local East (positive anti-clockwise).
pointing_info
[dictionary] A dictionary consisting of information relating to
pointing center in case of a phased array. The pointing center
can be specified either via element delay compensation or by
directly specifying the pointing center in a certain coordinate
system. Default = None (pointing centered at zenith). This
dictionary consists of the following tags and values:
'gains' [numpy array] Complex element gains. Must be of
size equal to the number of elements as
specified by the number of rows in antpos. If
set to None (default), all element gains are
assumed to be unity. Used only in phased array
mode.
'gainerr' [int, float] RMS error in voltage amplitude in
dB to be used in the beamformer. Random jitters
are drawn from a normal distribution in
logarithm units which are then converted to
linear units. Must be a non-negative scalar.
If not provided, it defaults to 0 (no jitter).
Used only in phased array mode.
'delays' [numpy array] Delays (in seconds) to be applied
to the tile elements. Size should be equal to
number of tile elements (number of rows in
antpos). Default = None will set all element
delays to zero phasing them to zenith. Used only
in phased array mode.
'pointing_center' [numpy array] This will apply in the absence of
key 'delays'. This can be specified as a row
vector. Should have two-columns if using Alt-Az
coordinates, or two or three columns if using
direction cosines. There is no default. The
coordinate system must be specified in
'pointing_coords' if 'pointing_center' is to be
used.
'pointing_coords' [string scalar] Coordinate system in which the
pointing_center is specified. Accepted values
are 'altaz' or 'dircos'. Must be provided if
'pointing_center' is to be used. No default.
'delayerr' [int, float] RMS jitter in delays used in the
beamformer. Random jitters are drawn from a
normal distribution with this rms. Must be
a non-negative scalar. If not provided, it
defaults to 0 (no jitter). Used only in phased
array mode.
'nrand' [int] number of random realizations of gainerr
and/or delayerr to be averaged. Must be
positive. If none provided, it defaults to 1.
Used only in phased array mode.
pointing_center
[list or numpy array] coordinates of pointing center (in the same
coordinate system as that of sky coordinates specified by
skyunits). 2-element vector if skyunits='altaz'. 2- or 3-element
vector if skyunits='dircos'. Only used with phased array primary
beams, dishes excluding VLA and GMRT, or uniform rectangular or
square apertures. For all telescopes except MWA, pointing_center
is used in place of pointing_info. For MWA, this is used if
pointing_info is not provided.
short_dipole_approx
[boolean] if True, indicates short dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=False. Both
short_dipole_approx and half_wave_dipole_approx cannot be set
to True at the same time
half_wave_dipole_approx
[boolean] if True, indicates half-wave dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=False
Output:
[Numpy array] Power pattern at the specified sky positions. Shape is
(nsrc, nchan)
-----------------------------------------------------------------------------
"""
try:
skypos, frequency, telescope
except NameError:
raise NameError('Sky positions, frequency and telescope inputs must be specified.')
if (freq_scale == 'ghz') or (freq_scale == 'GHz'):
frequency = frequency * 1.0e9
elif (freq_scale == 'mhz') or (freq_scale == 'MHz'):
frequency = frequency * 1.0e6
elif (freq_scale == 'khz') or (freq_scale == 'kHz'):
frequency = frequency * 1.0e3
frequency = NP.asarray(frequency).reshape(-1)
if (telescope is None) or (not isinstance(telescope, dict)):
raise TypeError('telescope must be specified as a dictionary')
if 'id' in telescope:
if (telescope['id'] == 'vla') or ('gmrt' in telescope['id']):
if skyunits == 'altaz':
angles = 90.0 - skypos[:,0]
elif skyunits == 'dircos':
angles = NP.arccos(NP.sqrt(1.0 - NP.sum(skypos[:,2]**2, axis=1)))
elif skyunits == 'degrees':
angles = skypos
else:
raise ValueError('skyunits must be "altaz", "dircos" or "degrees".')
if telescope['id'] == 'vla':
pb = VLA_primary_beam_PBCOR(angles, frequency/1e9, 'degrees')
elif 'gmrt' in telescope['id']:
pb = GMRT_primary_beam(angles, frequency/1e9, 'degrees', instrument=telescope['id'])
elif (telescope['id'] == 'hera') or (telescope['id'] == 'hirax'):
if telescope['id'] == 'hera':
dish_dia = 14.0
else:
dish_dia = 6.0
pb = airy_disk_pattern(dish_dia, skypos, frequency, skyunits=skyunits,
peak=1.0, pointing_center=NP.asarray(telescope['orientation']),
pointing_coords=telescope['ocoords'],
power=True, small_angle_tol=1e-10)
elif telescope['id'] == 'mwa':
if (skyunits == 'altaz') or (skyunits == 'dircos'):
if ('orientation' in telescope) and ('ocoords' in telescope):
orientation = NP.asarray(telescope['orientation']).reshape(1,-1)
ocoords = telescope['ocoords']
elif ('orientation' not in telescope) and ('ocoords' in telescope):
ocoords = telescope['ocoords']
if telescope['ocoords'] == 'altaz':
orientation = NP.asarray([0.0, 90.0]).reshape(1,-1)
elif telescope['ocoords'] == 'dircos':
orientation = NP.asarray([1.0, 0.0, 0.0]).reshape(1,-1)
else:
raise ValueError('key "ocoords" in telescope dictionary contains invalid value')
elif ('orientation' in telescope) and ('ocoords' not in telescope):
raise KeyError('key "ocoords" in telescope dictionary not specified.')
else:
ocoords = 'dircos'
orientation = NP.asarray([1.0, 0.0, 0.0]).reshape(1,-1)
ep = dipole_field_pattern(0.74, skypos, dipole_coords=ocoords,
dipole_orientation=orientation,
skycoords=skyunits, wavelength=FCNST.c/frequency,
short_dipole_approx=short_dipole_approx,
half_wave_dipole_approx=half_wave_dipole_approx,
power=False)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
if pointing_info is None: # Use analytical formula
if skyunits == 'altaz':
pointing_center = NP.asarray([90.0, 270.0]).reshape(1,-1)
elif skyunits == 'dircos':
pointing_center = NP.asarray([0.0, 0.0, 1.0]).reshape(1,-1)
else:
raise ValueError('skyunits for MWA must be "altaz" or "dircos"')
irap = isotropic_radiators_array_field_pattern(4, 4, 1.1, 1.1, skypos,
FCNST.c/frequency, east2ax1=east2ax1,
pointing_center=pointing_center,
skycoords=skyunits, power=False)
irap = irap[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
else: # Call the beamformer
if 'element_locs' not in telescope:
nrand = 1
xlocs, ylocs = NP.meshgrid(1.1*NP.linspace(-1.5,1.5,4), 1.1*NP.linspace(1.5,-1.5,4))
element_locs = NP.hstack((xlocs.reshape(-1,1), ylocs.reshape(-1,1), NP.zeros(xlocs.size).reshape(-1,1)))
else:
element_locs = telescope['element_locs']
pinfo = {}
gains = None
if 'delays' in pointing_info:
pinfo['delays'] = pointing_info['delays']
if 'delayerr' in pointing_info:
pinfo['delayerr'] = pointing_info['delayerr']
if 'pointing_center' in pointing_info:
pinfo['pointing_center'] = pointing_info['pointing_center']
if 'pointing_coords' in pointing_info:
pinfo['pointing_coords'] = pointing_info['pointing_coords']
if 'gains' in pointing_info:
pinfo['gains'] = pointing_info['gains']
if 'gainerr' in pointing_info:
pinfo['gainerr'] = pointing_info['gainerr']
if 'nrand' in pointing_info:
pinfo['nrand'] = pointing_info['nrand']
irap = array_field_pattern(element_locs, skypos,
skycoords=skyunits,
pointing_info=pinfo,
wavelength=FCNST.c/frequency,
power=False)
nrand = irap.shape[-1]
pb = NP.mean(NP.abs(ep * irap)**2, axis=2) # Power pattern is square of the field pattern
else:
raise ValueError('skyunits must be in Alt-Az or direction cosine coordinates for MWA.')
elif (telescope['id'] == 'mwa_dipole') or (telescope['id'] == 'paper'):
if telescope['id'] == 'mwa_dipole':
dipole_size = 0.74
else:
dipole_size = 2.0
if (skyunits == 'altaz') or (skyunits == 'dircos'):
if ('orientation' in telescope) and ('ocoords' in telescope):
orientation = NP.asarray(telescope['orientation']).reshape(1,-1)
ocoords = telescope['ocoords']
elif ('orientation' not in telescope) and ('ocoords' in telescope):
ocoords = telescope['ocoords']
if telescope['ocoords'] == 'altaz':
orientation = NP.asarray([0.0, 90.0]).reshape(1,-1)
elif telescope['ocoords'] == 'dircos':
orientation = NP.asarray([1.0, 0.0, 0.0]).reshape(1,-1)
else:
raise ValueError('key "ocoords" in telescope dictionary contains invalid value')
elif ('orientation' in telescope) and ('ocoords' not in telescope):
raise KeyError('key "ocoords" in telescope dictionary not specified.')
else:
ocoords = 'dircos'
orientation = NP.asarray([1.0, 0.0, 0.0]).reshape(1,-1)
ep = dipole_field_pattern(dipole_size, skypos, dipole_coords=ocoords,
dipole_orientation=orientation,
skycoords=skyunits, wavelength=FCNST.c/frequency,
short_dipole_approx=short_dipole_approx,
half_wave_dipole_approx=half_wave_dipole_approx,
power=False)
pb = NP.abs(ep)**2 # Power pattern is square of the field pattern
else:
raise ValueError('skyunits must be in Alt-Az or direction cosine coordinates for MWA dipole.')
else:
raise ValueError('No presets available for the specified telescope ID. Set custom parameters instead in input parameter telescope.')
else:
if 'shape' not in telescope:
telescope['shape'] = 'delta'
ep = 1.0
elif telescope['shape'] == 'delta':
ep = 1.0
elif telescope['shape'] == 'dipole':
ep = dipole_field_pattern(telescope['size'], skypos,
dipole_coords=telescope['ocoords'],
dipole_orientation=telescope['orientation'],
skycoords=skyunits, wavelength=FCNST.c/frequency,
short_dipole_approx=short_dipole_approx,
half_wave_dipole_approx=half_wave_dipole_approx,
power=False)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
elif telescope['shape'] == 'dish':
ep = airy_disk_pattern(telescope['size'], skypos, frequency, skyunits=skyunits,
peak=1.0, pointing_center=pointing_center,
power=False, small_angle_tol=1e-10)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
elif telescope['shape'] == 'gaussian':
ep = gaussian_beam(telescope['size'], skypos, frequency, skyunits=skyunits,
pointing_center=pointing_center, power=False)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
elif telescope['shape'] == 'rect':
ep = uniform_rectangular_aperture(telescope['size'], skypos, frequency, skyunits=skyunits, east2ax1=east2ax1, pointing_center=pointing_center, power=False)
elif telescope['shape'] == 'square':
ep = uniform_square_aperture(telescope['size'], skypos, frequency, skyunits=skyunits, east2ax1=east2ax1, pointing_center=pointing_center, power=False)
else:
raise ValueError('Value in key "shape" of telescope dictionary invalid.')
if pointing_info is not None: # Call the beamformer
if 'element_locs' not in telescope:
nrand = 1
irap = NP.ones(skypos.shape[0]*frequency.size).reshape(skypos.shape[0],frequency.size,nrand)
else:
element_locs = telescope['element_locs']
pinfo = {}
gains = None
gainerr = None
if 'delays' in pointing_info:
pinfo['delays'] = pointing_info['delays']
if 'delayerr' in pointing_info:
pinfo['delayerr'] = pointing_info['delayerr']
if 'pointing_center' in pointing_info:
pinfo['pointing_center'] = pointing_info['pointing_center']
if 'pointing_coords' in pointing_info:
pinfo['pointing_coords'] = pointing_info['pointing_coords']
if 'gains' in pointing_info:
pinfo['gains'] = pointing_info['gains']
if 'gainerr' in pointing_info:
pinfo['gainerr'] = pointing_info['gainerr']
if 'nrand' in pointing_info:
pinfo['nrand'] = pointing_info['nrand']
irap = array_field_pattern(element_locs, skypos, skycoords=skyunits,
pointing_info=pinfo,
wavelength=FCNST.c/frequency, power=False)
nrand = irap.shape[-1]
else:
nrand = 1
irap = NP.ones(skypos.shape[0]*frequency.size).reshape(skypos.shape[0],frequency.size,nrand) # Last axis indicates number of random realizations
pb = NP.mean(NP.abs(ep * irap)**2, axis=2) # Power pattern is square of the field pattern averaged over all random realizations of delays and gains if specified
if 'groundplane' in telescope:
gp = 1.0
if telescope['groundplane'] is not None:
if 'shape' in telescope:
if telescope['shape'] != 'dish': # If shape is not dish, compute ground plane pattern
modifier = None
if 'ground_modify' in telescope:
modifier = telescope['ground_modify']
gp = ground_plane_field_pattern(telescope['groundplane'], skypos, skycoords=skyunits,
wavelength=FCNST.c/frequency, angle_units='degrees',
modifier=modifier, power=False)
else:
modifier = None
if 'ground_modify' in telescope:
modifier = telescope['ground_modify']
gp = ground_plane_field_pattern(telescope['groundplane'], skypos, skycoords=skyunits,
wavelength=FCNST.c/frequency, angle_units='degrees',
modifier=modifier, power=False)
pb *= gp**2
return pb
#################################################################################
def VLA_primary_beam_PBCOR(skypos, frequency, skyunits='degrees'):
"""
-----------------------------------------------------------------------------
Primary beam power pattern for the VLA dishes based on the polynomial formula
in AIPS task PBCOR
Inputs:
skypos [list or numpy vector] Sky positions at which the power pattern
is to be estimated. Size is M x N where M is the number of
locations and N = 1 (if skyunits = degrees), N = 2 (if
skyunits = altaz denoting Alt-Az coordinates), or N = 3 (if
skyunits = dircos denoting direction cosine coordinates)
frequency [list or numpy vector] frequencies (in GHz) at which the power
pattern is to be estimated. Frequencies differing by too much
and extending over the usual bands cannot be given.
skyunits [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and 'dircos'.
Default = 'degrees'. If 'dircos', the direction cosines are
aligned with the local East, North, and Up
Output:
[Numpy array] Power pattern at the specified sky positions.
-----------------------------------------------------------------------------
"""
try:
skypos, frequency
except NameError:
raise NameError('skypos and frequency are required in VLA_primary_beam_PBCOR().')
frequency = NP.asarray(frequency).ravel()
freq_ref = NP.asarray([0.0738, 0.3275, 1.465, 4.885, 8.435, 14.965, 22.485, 43.315]).reshape(-1,1)
parms_ref = NP.asarray([[-0.897, 2.71, -0.242],
[-0.935, 3.23, -0.378],
[-1.343, 6.579, -1.186],
[-1.372, 6.940, -1.309],
[-1.306, 6.253, -1.100],
[-1.305, 6.155, -1.030],
[-1.417, 7.332, -1.352],
[-1.321, 6.185, -0.983]])
idx = NP.argmin(NP.abs(freq_ref - frequency[0])) # Index of closest value
skypos = NP.asarray(skypos)
if skyunits == 'degrees':
x = (NP.repeat(skypos.reshape(-1,1), frequency.size, axis=1) * 60.0 * NP.repeat(frequency.reshape(1,-1), skypos.size, axis=0))**2
elif skyunits == 'altaz':
x = ((90.0-NP.repeat(skypos[:,0].reshape(-1,1), frequency.size, axis=1)) * 60.0 * NP.repeat(frequency.reshape(1,-1), skypos.size, axis=0))**2
elif skyunits == 'dircos':
x = (NP.degrees(NP.arccos(NP.repeat(skypos[:,-1].reshape(-1,1), frequency.size, axis=1))) * 60.0 * NP.repeat(frequency.reshape(1,-1), skypos.size, axis=0))**2
else:
raise ValueError('skyunits must be "degrees", "altaz" or "dircos" in GMRT_primary_beam().')
pb = 1.0 + parms_ref[idx,0]*x/1e3 + parms_ref[idx,1]*(x**2)/1e7 + \
parms_ref[idx,2]*(x**3)/1e10
eps = 0.01
if NP.any(pb >= 1+eps):
raise ValueError('Primary beam exceeds unity by a significant amount. Check the validity of the Primary beam equation for the angles specified. Consider using a narrower field of view radius and frequency range over which the equations are valid.')
return pb
##########################################################################
def airy_disk_pattern(diameter, skypos, frequency, skyunits='altaz', peak=1.0,
pointing_center=None, pointing_coords=None,
small_angle_tol=1e-10, power=True):
"""
-----------------------------------------------------------------------------
Field pattern of a uniformly illuminated dish
Inputs:
diameter [scalar] Diameter of the dish (in m)
skypos [list or numpy vector] Sky positions at which the power pattern
is to be estimated. Size is M x N where M is the number of
locations and N = 1 (if skyunits = degrees), N = 2 (if
skyunits = altaz denoting Alt-Az coordinates), or N = 3 (if
skyunits = dircos denoting direction cosine coordinates). If
skyunits = altaz, then altitude and azimuth must be in degrees
frequency [list or numpy vector] frequencies (in GHz) at which the power
pattern is to be estimated. Frequencies differing by too much
and extending over the usual bands cannot be given.
skyunits [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and 'dircos'.
Default = 'degrees'. If 'dircos', the direction cosines are
aligned with the local East, North, and Up. If 'altaz', then
altitude and azimuth must be in degrees.
pointing_center
[numpy array] 1xN numpy array, where N is the same as in skypos.
If None specified, pointing_center is assumed to be at zenith.
pointing_coords
[string] Coordiantes of the pointing center. If None specified,
it is assumed to be same as skyunits. Same allowed values as
skyunits. Default = None.
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
small_angle_tol
[scalar] Small angle limit (in radians) below which division by
zero is to be avoided. Default = 1e-10
Output:
[Numpy array] Field or Power pattern at the specified sky positions.
-----------------------------------------------------------------------------
"""
try:
diameter, skypos, frequency
except NameError:
raise NameError('diameter, skypos and frequency are required in airy_disk_pattern().')
skypos = NP.asarray(skypos)
frequency = NP.asarray(frequency).ravel()
if pointing_center is None:
if skyunits == 'degrees':
x = NP.radians(skypos)
elif skyunits == 'altaz':
x = NP.radians(90.0 - skypos[:,0])
elif skyunits == 'dircos':
x = NP.arcsin(NP.sqrt(skypos[:,0]**2 + skypos[:,1]**2))
else:
raise ValueError('skyunits must be "degrees", "altaz" or "dircos" in GMRT_primary_beam().')
zero_ind = x >= NP.pi/2 # Determine positions beyond the horizon
else:
if pointing_coords is None:
pointing_coords = skyunits
if skyunits == 'degrees':
x = NP.radians(skypos)
else:
pc_altaz = pointing_center.reshape(1,-1)
if pointing_coords == 'altaz':
if pc_altaz.size != 2:
raise IndexError('Pointing center in Alt-Az coordinates must contain exactly two elements.')
elif pointing_coords == 'dircos':
if pc_altaz.size != 3:
raise IndexError('Pointing center in direction cosine coordinates must contain exactly three elements.')
pc_altaz = GEOM.dircos2altaz(pc_altaz, units='degrees')
skypos_altaz = NP.copy(skypos)
if skyunits == 'dircos':
skypos_altaz = GEOM.dircos2altaz(skypos, units='degrees')
elif skyunits != 'altaz':
raise ValueError('skyunits must be "degrees", "altaz" or "dircos" in GMRT_primary_beam().')
x = GEOM.sphdist(skypos_altaz[:,1], skypos_altaz[:,0], pc_altaz[0,1], pc_altaz[0,0])
x = NP.radians(x)
zero_ind = NP.logical_or(x >= NP.pi/2, skypos_altaz[:,0] <= 0.0) # Determine positions beyond the horizon of the sky as well as those beyond the horizon of the dish, if it is pointed away from the horizon
k = 2*NP.pi*frequency/FCNST.c
k = k.reshape(1,-1)
small_angles_ind = x < small_angle_tol
x = NP.where(small_angles_ind, small_angle_tol, x)
x = x.reshape(-1,1)
pattern = 2 * SPS.j1(k*0.5*diameter*NP.sin(x)) / (k*0.5*diameter*NP.sin(x))
pattern[zero_ind,:] = 0.0 # Blank all values beyond the horizon
maxval = 2 * SPS.j1(k*0.5*diameter*NP.sin(small_angle_tol)) / (k*0.5*diameter*NP.sin(small_angle_tol))
if power:
pattern = NP.abs(pattern)**2
maxval = maxval**2
pattern *= peak / maxval
return pattern
##########################################################################
def gaussian_beam(diameter, skypos, frequency, skyunits='altaz',
pointing_center=None, pointing_coords=None, power=True):
"""
-----------------------------------------------------------------------------
Field/power pattern of a Gaussian illumination
Inputs:
diameter [scalar] FWHM diameter of the dish (in m)
skypos [list or numpy vector] Sky positions at which the power pattern
is to be estimated. Size is M x N where M is the number of
locations and N = 1 (if skyunits = degrees), N = 2 (if
skyunits = altaz denoting Alt-Az coordinates), or N = 3 (if
skyunits = dircos denoting direction cosine coordinates). If
skyunits = altaz, then altitude and azimuth must be in degrees
frequency [list or numpy vector] frequencies (in GHz) at which the power
pattern is to be estimated. Frequencies differing by too much
and extending over the usual bands cannot be given.
skyunits [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and 'dircos'.
Default = 'degrees'. If 'dircos', the direction cosines are
aligned with the local East, North, and Up. If 'altaz', then
altitude and azimuth must be in degrees.
pointing_center
[numpy array] 1xN numpy array, where N is the same as in skypos.
If None specified, pointing_center is assumed to be at zenith.
pointing_coords
[string] Coordiantes of the pointing center. If None specified,
it is assumed to be same as skyunits. Same allowed values as
skyunits. Default = None.
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
Output:
[Numpy array] Field or Power pattern at the specified sky positions.
-----------------------------------------------------------------------------
"""
try:
diameter, skypos, frequency
except NameError:
raise NameError('diameter, skypos and frequency are required in airy_disk_pattern().')
skypos = NP.asarray(skypos)
frequency = NP.asarray(frequency).ravel()
if pointing_center is None:
if skyunits == 'degrees':
x = NP.radians(skypos)
elif skyunits == 'altaz':
x = NP.radians(90.0 - skypos[:,0])
elif skyunits == 'dircos':
x = NP.arcsin(NP.sqrt(skypos[:,0]**2 + skypos[:,1]**2))
else:
raise ValueError('skyunits must be "degrees", "altaz" or "dircos" in GMRT_primary_beam().')
zero_ind = x >= NP.pi/2 # Determine positions beyond the horizon
else:
if pointing_coords is None:
pointing_coords = skyunits
if skyunits == 'degrees':
x = NP.radians(skypos)
else:
pc_altaz = pointing_center.reshape(1,-1)
if pointing_coords == 'altaz':
if pc_altaz.size != 2:
raise IndexError('Pointing center in Alt-Az coordinates must contain exactly two elements.')
elif pointing_coords == 'dircos':
if pc_altaz.size != 3:
raise IndexError('Pointing center in direction cosine coordinates must contain exactly three elements.')
pc_altaz = GEOM.dircos2altaz(pc_altaz, units='degrees')
skypos_altaz = NP.copy(skypos)
if skyunits == 'dircos':
skypos_altaz = GEOM.dircos2altaz(skypos, units='degrees')
elif skyunits != 'altaz':
raise ValueError('skyunits must be "degrees", "altaz" or "dircos" in GMRT_primary_beam().')
x = GEOM.sphdist(skypos_altaz[:,1], skypos_altaz[:,0], pc_altaz[0,1], pc_altaz[0,0])
x = NP.radians(x)
zero_ind = NP.logical_or(x >= NP.pi/2, skypos_altaz[:,0] <= 0.0) # Determine positions beyond the horizon of the sky as well as those beyond the horizon of the dish, if it is pointed away from the horizon
x = x.reshape(-1,1) # nsrc x 1
sigma_aprtr = diameter / (2.0 * NP.sqrt(2.0 * NP.log(2.0))) / (FCNST.c/frequency) # in units of "u"
# exp(-a t**2) <--> exp(-(pi*f)**2/a)
# 2 x sigma_aprtr**2 = 1/a
# 2 x sigma_dircos**2 = a / pi**2 = 1 / (2 * pi**2 * sigma_aprtr**2)
sigma_dircos = 1.0 / (2 * NP.pi * sigma_aprtr)
sigma_dircos = sigma_dircos.reshape(1,-1) # 1 x nchan
dircos = NP.sin(x)
pattern = NP.exp(-0.5 * (dircos/sigma_dircos)**2)
pattern[zero_ind,:] = 0.0 # Blank all values beyond the horizon
if power:
pattern = NP.abs(pattern)**2
return pattern
##########################################################################
def GMRT_primary_beam(skypos, frequency, skyunits='degrees', instrument='gmrt'):
"""
-----------------------------------------------------------------------------
Primary beam power pattern for the GMRT dishes based on the polynomial
formula in AIPS task PBCOR
Inputs:
skypos [list or numpy vector] Sky positions at which the power pattern
is to be estimated. Size is M x N where M is the number of
locations and N = 1 (if skyunits = degrees), N = 2 (if
skyunits = altaz denoting Alt-Az coordinates), or N = 3 (if
skyunits = dircos denoting direction cosine coordinates)
frequency [list or numpy vector] frequencies (in GHz) at which the power
pattern is to be estimated. Frequencies differing by too much
and extending over the usual bands cannot be given.
skyunits [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and 'dircos'.
Default = 'degrees'. If 'dircos', the direction cosines are
aligned with the local East, North, and Up
instrument [string] string specifying if the instrument is the new GMRT
('ugmrt') or the old GMRT ('gmrt'). Default='gmrt'.
Output:
[Numpy array] Power pattern at the specified sky positions.
-----------------------------------------------------------------------------
"""
try:
skypos, frequency
except NameError:
raise NameError('skypos and frequency are required in GMRT_primary_beam().')
frequency = NP.asarray(frequency).ravel()
freq_ref = NP.asarray([0.235, 0.325, 0.610, 1.420]).reshape(-1,1)
parms_ref = {}
parms_ref['gmrt'] = NP.asarray([[-3.366 , 46.159 , -29.963 , 7.529 ],
[-3.397 , 47.192 , -30.931 , 7.803 ],
[-3.486 , 47.749 , -35.203 , 10.399 ],
[-2.27961, 21.4611, -9.7929, 1.80153]])
parms_ref['ugmrt'] = NP.asarray([[NP.nan , NP.nan , NP.nan , NP.nan ],
[-2.939 , 33.312 , -16.659 , 3.006 ],
[-3.190 , 38.642 , -20.471 , 3.964 ],
[-2.608 , 27.357 , -13.091 , 2.365 ]])
idx = NP.argmin(NP.abs(freq_ref - frequency[0])) # Index of closest value
skypos = NP.asarray(skypos)
if skyunits == 'degrees':
x = (NP.repeat(skypos.reshape(-1,1), frequency.size, axis=1) * 60.0 * NP.repeat(frequency.reshape(1,-1), skypos.size, axis=0))**2
elif skyunits == 'altaz':
x = ((90.0-NP.repeat(skypos[:,0].reshape(-1,1), frequency.size, axis=1)) * 60.0 * NP.repeat(frequency.reshape(1,-1), skypos.size, axis=0))**2
elif skyunits == 'dircos':
x = (NP.degrees(NP.arccos(NP.repeat(skypos[:,-1].reshape(-1,1), frequency.size, axis=1))) * 60.0 * NP.repeat(frequency.reshape(1,-1), skypos.size, axis=0))**2
else:
raise ValueError('skyunits must be "degrees", "altaz" or "dircos" in GMRT_primary_beam().')
pb = 1.0 + parms_ref[instrument][idx,0]*x/1e3 + parms_ref[instrument][idx,1]*(x**2)/1e7 + parms_ref[instrument][idx,2]*(x**3)/1e10 + parms_ref[instrument][idx,3]*(x**4)/1e13
if NP.any(NP.isnan(pb)):
raise ValueError('Primary beam values were found to be NaN in some case(s). Check if the polynomial equations are valid for the frequencies specified.')
eps = 0.01
if NP.any(pb >= 1+eps):
raise ValueError('Primary beam exceeds unity by a significant amount. Check the validity of the Primary beam equation for the angles specified. Consider using a narrower field of view radius and frequency range over which the equations are valid.')
return pb
#################################################################################
def ground_plane_field_pattern(height, skypos, skycoords=None, wavelength=1.0,
angle_units=None, modifier=None, power=True):
"""
-----------------------------------------------------------------------------
Compute the field pattern of ground plane of specified height at the
specified sky positions at the specified wavelength.
Inputs:
height [scalar] height of the dipole above ground plane (in meters)
skypos [numpy array] Sky positions at which the field pattern is to
be estimated. Size is M x N where M is the number of
locations and N = 2 (if skycoords = 'altaz'), N = 2 or 3 (if
skycoords = 'dircos'). If only transverse direction cosines
are provided (N=2, skycoords='dircos'), the line-of-sight
component will be determined appropriately.
Keyword Inputs:
skycoords [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and
'dircos'. Default = 'degrees'. If 'dircos', the direction
cosines are aligned with the local East, North, and Up
wavelength [scalar, list or numpy vector] Wavelengths at which the field
dipole pattern is to be estimated. Must be in the same units as
the dipole length
angle_units [string] Units of angles used when Alt-Az coordinates are
used in case of skypos or dipole_orientation. Accepted
values are 'degrees' and 'radians'. If none given,
default='degrees' is used.
modifier [dictionary] Dictionary specifying modifications to the
ground plane. If modifier is set to None, the ground plane
is not modified from the analytical value. If not set to
None, it may contain the following two keys:
'scale' [scalar] positive value to scale the modifying
factor with. If not set, the scale factor to the
modification is unity.
'max' [scalar] positive value to clip the modified and
scaled values to. If not set, there is no upper
limit
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
Output:
Ground plane electric field or power pattern, a numpy array with number of
rows equal to the number of sky positions (which is equal to the number of
rows in skypos) and number of columns equal to number of wavelengths
specified.
-----------------------------------------------------------------------------
"""
try:
height, skypos
except NameError:
raise NameError('Dipole height above ground plane and sky positions must be specified. Check inputs.')
if not isinstance(height, (int,float)):
raise TypeError('Dipole height above ground plane should be a scalar.')
if height <= 0.0:
raise ValueError('Dipole height above ground plane should be positive.')
if isinstance(wavelength, list):
wavelength = NP.asarray(wavelength)
elif isinstance(wavelength, (int, float)):
wavelength = NP.asarray(wavelength).reshape(-1)
elif not isinstance(wavelength, NP.ndarray):
raise TypeError('Wavelength should be a scalar, list or numpy array.')
if NP.any(wavelength <= 0.0):
raise ValueError('Wavelength(s) should be positive.')
if skycoords is not None:
if not isinstance(skycoords, str):
raise TypeError('skycoords must be a string. Allowed values are "altaz" and "dircos"')
elif (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos".')
else:
raise ValueError('skycoords must be specified. Allowed values are "altaz" and "dircos"')
if skycoords == 'altaz':
if angle_units is None:
angle_units = 'degrees'
elif not isinstance(angle_units, str):
raise TypeError('angle_units must be a string. Allowed values are "degrees" and "radians".')
elif (angle_units != 'degrees') and (angle_units != 'radians'):
raise ValueError('angle_units must be "degrees" or "radians".')
skypos = NP.asarray(skypos)
if angle_units == 'radians':
skypos = NP.degrees(skypos)
if skypos.ndim < 2:
if len(skypos) == 2:
skypos = NP.asarray(skypos).reshape(1,2)
else:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
else:
if skypos.shape[1] != 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif NP.any(skypos[:,0] < 0.0) or NP.any(skypos[:,0] > 90.0):
raise ValueError('Altitudes in skypos have to be positive and <= 90 degrees')
skypos_dircos = GEOM.altaz2dircos(skypos, units='degrees')
else:
if skypos.ndim < 2:
if (len(skypos) == 2) or (len(skypos) == 3):
skypos = NP.asarray(skypos).reshape(1,-1)
else:
raise ValueError('skypos must be a Nx2 Nx3 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if (skypos.shape[1] < 2) or (skypos.shape[1] > 3):
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if NP.any(NP.abs(skypos[:,0]) > 1.0) or NP.any(NP.abs(skypos[:,1]) > 1.0):
raise ValueError('skypos in transverse direction cosine coordinates found to be exceeding unity.')
else:
if skypos.shape[1] == 3:
eps = 1.0e-10
if NP.any(NP.abs(NP.sqrt(NP.sum(skypos**2, axis=1)) - 1.0) > eps) or NP.any(skypos[:,2] < 0.0):
print('Warning: skypos in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
skypos[:,2] = 1.0 - NP.sqrt(NP.sum(skypos[:,:2]**2,axis=1))
else:
skypos = NP.hstack((skypos, 1.0 - NP.asarray(NP.sqrt(NP.sum(skypos[:,:2]**2,axis=1))).reshape(-1,1)))
skypos_dircos = skypos
k = 2 * NP.pi / wavelength
skypos_altaz = GEOM.dircos2altaz(skypos_dircos, units='radians')
ground_pattern = 2 * NP.sin(k.reshape(1,-1) * height * NP.sin(skypos_altaz[:,0].reshape(-1,1))) # array broadcasting
if modifier is not None:
if isinstance(modifier, dict):
val = 1.0 / NP.sqrt(NP.abs(skypos_dircos[:,2]))
if 'scale' in modifier:
val *= modifier['scale']
if 'max' in modifier:
val = NP.clip(val, 0.0, modifier['max'])
val = val[:,NP.newaxis]
ground_pattern *= val
max_pattern = 2 * NP.sin(k.reshape(1,-1) * height * NP.sin(NP.pi/2).reshape(-1,1)) # array broadcasting
ground_pattern = ground_pattern / max_pattern
if power:
return NP.abs(ground_pattern)**2
else:
return ground_pattern
#################################################################################
def dipole_field_pattern(length, skypos, dipole_coords=None, skycoords=None,
dipole_orientation=None, wavelength=1.0, angle_units=None,
short_dipole_approx=False, half_wave_dipole_approx=True,
power=True):
"""
-----------------------------------------------------------------------------
Compute the dipole field pattern of specified length at the specified sky
positions at the specified wavelength.
Inputs:
length [scalar] length of the dipole
skypos [numpy array] Sky positions at which the field pattern is to
be estimated. Size is M x N where M is the number of
locations and N = 2 (if skycoords = 'altaz'), N = 2 or 3 (if
skycoords = 'dircos'). If only transverse direction cosines
are provided (N=2, skycoords='dircos'), the line-of-sight
component will be determined appropriately.
Keyword Inputs:
dipole_coords [string] specifies coordinate system for the unit vector of
the dipole element specified in dipole_orientation. Accepted
values are 'altaz' (Alt-Az) and 'dircos' (direction cosines).
If none provided, default='dircos' is used.
dipole_orientation
[list or numpy array] Orientation of the dipole element
unit vector and magnitude specified by length. This unit
vector could be provided in a coordinate system specified
by dipole_coords. If dipole_coords='altaz', then the
dipole_orientation should be a 2-element vector. If 'dircos'
is used, this could be a 2-element vector (transverse
direction cosines) where the third (line-of-sight) component
is determined, or a 3-element vector specifying all three
direction cosines. If set to None, defaults to eastward
pointing dipole ([0.0, 90.0] if dipole_coords = 'altaz', or
[1.0, 0.0, 0.0]) if dipole_coords = 'dircos'
skycoords [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and
'dircos'. Default = 'degrees'. If 'dircos', the direction
cosines are aligned with the local East, North, and Up
wavelength [scalar, list or numpy vector] Wavelengths at which the field
dipole pattern is to be estimated. Must be in the same units
as the dipole length
angle_units [string] Units of angles used when Alt-Az coordinates are
used in case of skypos or dipole_orientation. Accepted
values are 'degrees' and 'radians'. If none given,
default='degrees' is used.
short_dipole_approx
[boolean] if True, indicates short dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=False. Both
short_dipole_approx and half_wave_dipole_approx cannot be set
to True at the same time
half_wave_dipole_approx
[boolean] if True, indicates half-wave dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=True. Both
short_dipole_approx and half_wave_dipole_approx cannot be set
to True at the same time
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
Output:
Dipole Electric field or power pattern, a numpy array with number of rows
equal to the number of sky positions (which is equal to the number of rows
in skypos) and number of columns equal to number of wavelengths specified.
-----------------------------------------------------------------------------
"""
try:
length, skypos
except NameError:
raise NameError('Dipole length and sky positions must be specified. Check inputs.')
if not isinstance(length, (int,float)):
raise TypeError('Dipole length should be a scalar.')
if length <= 0.0:
raise ValueError('Dipole length should be positive.')
if short_dipole_approx and half_wave_dipole_approx:
raise ValueError('Both short dipole and half-wave dipole approximations cannot be made at the same time')
if isinstance(wavelength, list):
wavelength = NP.asarray(wavelength)
elif isinstance(wavelength, (int, float)):
wavelength = NP.asarray(wavelength).reshape(-1)
elif not isinstance(wavelength, NP.ndarray):
raise TypeError('Wavelength should be a scalar, list or numpy array.')
if NP.any(wavelength <= 0.0):
raise ValueError('Wavelength(s) should be positive.')
# if ground_plane is not None:
# if not isinstance(ground_plane, (int,float)):
# raise TypeError('Height above ground plane should be a scalar.')
# if ground_plane <= 0.0:
# raise ValueError('Height above ground plane should be positive.')
if dipole_coords is not None:
if not isinstance(dipole_coords, str):
raise TypeError('dipole_coords must be a string. Allowed values are "altaz" and "dircos"')
elif (dipole_coords != 'altaz') and (dipole_coords != 'dircos'):
raise ValueError('dipole_coords must be "altaz" or "dircos".')
if skycoords is not None:
if not isinstance(skycoords, str):
raise TypeError('skycoords must be a string. Allowed values are "altaz" and "dircos"')
elif (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos".')
if (dipole_coords is None) and (skycoords is None):
raise ValueError('At least one of dipole_coords and skycoords must be specified. Allowed values are "altaz" and "dircos"')
elif (dipole_coords is not None) and (skycoords is None):
skycoords = dipole_coords
elif (dipole_coords is None) and (skycoords is not None):
dipole_coords = skycoords
if (skycoords == 'altaz') or (dipole_coords == 'altaz'):
if angle_units is None:
angle_units = 'degrees'
elif not isinstance(angle_units, str):
raise TypeError('angle_units must be a string. Allowed values are "degrees" and "radians".')
elif (angle_units != 'degrees') and (angle_units != 'radians'):
raise ValueError('angle_units must be "degrees" or "radians".')
if skycoords == 'altaz':
skypos = NP.asarray(skypos)
if angle_units == 'radians':
skypos = NP.degrees(skypos)
if skypos.ndim < 2:
if len(skypos) == 2:
skypos = NP.asarray(skypos).reshape(1,2)
else:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
else:
if skypos.shape[1] != 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif NP.any(skypos[:,0] < 0.0) or NP.any(skypos[:,0] > 90.0):
raise ValueError('Altitudes in skypos have to be positive and <= 90 degrees')
skypos_dircos = GEOM.altaz2dircos(skypos, units='degrees')
else:
if skypos.ndim < 2:
if (len(skypos) == 2) or (len(skypos) == 3):
skypos = NP.asarray(skypos).reshape(1,-1)
else:
raise ValueError('skypos must be a Nx2 Nx3 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if (skypos.shape[1] < 2) or (skypos.shape[1] > 3):
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if NP.any(NP.abs(skypos[:,0]) > 1.0) or NP.any(NP.abs(skypos[:,1]) > 1.0):
raise ValueError('skypos in transverse direction cosine coordinates found to be exceeding unity.')
else:
if skypos.shape[1] == 3:
eps = 1.0e-10
if NP.any(NP.abs(NP.sqrt(NP.sum(skypos**2, axis=1)) - 1.0) > eps) or NP.any(skypos[:,2] < 0.0):
print('Warning: skypos in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
skypos[:,2] = 1.0 - NP.sqrt(NP.sum(skypos[:,:2]**2,axis=1))
else:
skypos = NP.hstack((skypos, 1.0 - NP.asarray(NP.sqrt(NP.sum(skypos[:,:2]**2,axis=1))).reshape(-1,1)))
skypos_dircos = skypos
if dipole_coords == 'altaz':
if dipole_orientation is not None:
dipole_orientation = NP.asarray(dipole_orientation)
if angle_units == 'radians':
dipole_orientation = NP.degrees(dipole_orientation)
if dipole_orientation.ndim < 2:
if len(dipole_orientation) == 2:
dipole_orientation = NP.asarray(dipole_orientation).reshape(1,2)
else:
raise ValueError('dipole_orientation must be a Nx2 Numpy array.')
elif dipole_orientation.ndim > 2:
raise ValueError('dipole_orientation must be a Nx2 Numpy array.')
else:
if dipole_orientation.shape[1] != 2:
raise ValueError('dipole_orientation must be a Nx2 Numpy array.')
elif NP.any(dipole_orientation[:,0] < 0.0) or NP.any(dipole_orientation[:,0] > 90.0):
raise ValueError('Altitudes in dipole_orientation have to be positive and <= 90 degrees')
else:
dipole_orietnation = NP.asarray([0.0, 90.0]).reshape(1,-1) # # Default dipole orientation points towards east
dipole_orientation_dircos = GEOM.altaz2dircos(dipole_orientation, units='degrees')
else:
if dipole_orientation is not None:
if dipole_orientation.ndim < 2:
if (len(dipole_orientation) == 2) or (len(dipole_orientation) == 3):
dipole_orientation = NP.asarray(dipole_orientation).reshape(1,-1)
else:
raise ValueError('dipole_orientation must be a Nx2 Nx3 Numpy array.')
elif dipole_orientation.ndim > 2:
raise ValueError('dipole_orientation must be a Nx2 or Nx3 Numpy array.')
else:
if (dipole_orientation.shape[1] < 2) or (dipole_orientation.shape[1] > 3):
raise ValueError('dipole_orientation must be a Nx2 or Nx3 Numpy array.')
else:
if NP.any(NP.abs(dipole_orientation[:,0]) > 1.0) or NP.any(NP.abs(dipole_orientation[:,1]) > 1.0):
raise ValueError('dipole_orientation in transverse direction cosine coordinates found to be exceeding unity.')
else:
if dipole_orientation.shape[1] == 3:
eps = 1.0e-10
if NP.any(NP.abs(NP.sqrt(NP.sum(dipole_orientation**2, axis=1)) - 1.0) > eps) or NP.any(dipole_orientation[:,2] < 0.0):
print('Warning: dipole_orientation in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
dipole_orientation[:,2] = 1.0 - NP.sqrt(NP.sum(dipole_orientation[:,:2]**2,axis=1))
else:
dipole_orientation = NP.hstack((dipole_orientation, 1.0 - NP.asarray(NP.sqrt(NP.sum(dipole_orientation[:,:2]**2,axis=1))).reshape(-1,1)))
else:
dipole_orientation = NP.asarray([1.0, 0.0, 0.0]).reshape(1,-1) # Default dipole orientation points towards east
dipole_orientation_dircos = dipole_orientation
k = 2 * NP.pi / wavelength.reshape(1,-1)
h = 0.5 * length
dot_product = NP.dot(dipole_orientation_dircos, skypos_dircos.T).reshape(-1,1)
angles = NP.arccos(dot_product)
eps = 1.e-10
zero_angles_ind = NP.abs(NP.abs(dot_product) - 1.0) < eps
n_zero_angles = NP.sum(zero_angles_ind)
reasonable_angles_ind = NP.abs(NP.abs(dot_product) - 1.0) > eps
max_pattern = 1.0 # Normalization factor
if short_dipole_approx:
field_pattern = NP.sin(angles)
field_pattern = NP.repeat(field_pattern.reshape(-1,1), wavelength.size, axis=1) # Repeat along wavelength axis
else:
if half_wave_dipole_approx:
field_pattern = NP.cos(0.5 * NP.pi * NP.cos(angles)) / NP.sin(angles)
field_pattern = NP.repeat(field_pattern.reshape(-1,1), wavelength.size, axis=1) # Repeat along wavelength axis
else:
max_pattern = 1.0 - NP.cos(k * h) # Maximum occurs at angle = NP.pi / 2
field_pattern = (NP.cos(k*h*NP.cos(angles)) - NP.cos(k*h)) / NP.sin(angles)
if n_zero_angles > 0:
field_pattern[zero_angles_ind.ravel(),:] = k*h * NP.sin(k*h * NP.cos(angles[zero_angles_ind])) * NP.tan(angles[zero_angles_ind]) # Correct expression from L' Hospital rule
if power:
return NP.abs(field_pattern / max_pattern)**2
else:
return field_pattern / max_pattern
#################################################################################
def isotropic_radiators_array_field_pattern(nax1, nax2, sep1, sep2=None,
skypos=None, wavelength=1.0,
east2ax1=None, skycoords='altaz',
pointing_center=None, power=True):
"""
-----------------------------------------------------------------------------
Compute the electric field pattern at the specified sky positions due to an
array of antennas.
Inputs:
nax1 [scalar] Number of radiator elements along axis #1
nax2 [scalar] Number of radiator elements along axis #2
sep1 [scalar] Distance along axis #1 between two adjacent radiator
elements along axis #1
Keyword Inputs:
sep2 [scalar] Distance along axis #2 between two adjacent radiator
elements along axis #2. If none specified, sep2 is set equal to
sep1. Same units as sep1.
skypos [numpy array] Sky positions at which the field pattern is to be
estimated. Size is M x N where M is the number of locations and
N = 1 (if skycoords = degrees, for azimuthally symmetric
telescopes such as VLA and GMRT which have parabolic dishes),
N = 2 (if skycoords = altaz denoting Alt-Az coordinates), or
N = 3 (if skycoords = dircos denoting direction cosine
coordinates)
skycoords [string] string specifying the coordinate system of the sky
positions. Accepted values are 'degrees', 'altaz', and
'dircos'. Default = 'degrees'. If 'dircos', the direction
cosines are aligned with the local East, North, and Up
wavelength [scalar, list or numpy vector] Wavelengths at which the field
dipole pattern is to be estimated. Must be in the same units as
the dipole length
east2ax1 [scalar] Angle (in degrees) the primary axis of the array makes
with the local East (positive anti-clockwise).
pointing_center [list or numpy array] coordinates of pointing center (in the same
coordinate system as that of sky coordinates specified by
skycoords). 2-element vector if skycoords='altaz'. 2- or
3-element vector if skycoords='dircos'. Only used with phased
array primary beams or dishes excluding those of VLA and GMRT.
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
Output:
Array Electric field or power pattern, number of rows equal to the number of
sky positions (which is equal to the number of rows in skypos), and number of
columns equal to the number of wavelengths. The array pattern is the product
of the array patterns along each axis.
-----------------------------------------------------------------------------
"""
try:
nax1, nax2, sep1, skypos
except NameError:
raise NameError('Number of radiators along axis 1 and 2 and their separation must be specified. Check inputs.')
if skypos is None:
raise NameError('skypos must be specified in Alt-Az or direction cosine units as a Numpy array. Check inputs.')
if not isinstance(nax1, int):
raise TypeError('nax1 must be a positive integer.')
elif nax1 <= 0:
raise ValueError('nax1 must be a positive integer.')
if not isinstance(nax2, int):
raise TypeError('nax2 must be a positive integer.')
elif nax2 <= 0:
raise ValueError('nax2 must be a positive integer.')
if not isinstance(sep1, (int,float)):
raise TypeError('sep1 must be a positive scalar.')
elif sep1 <= 0:
raise ValueError('sep1 must be a positive value.')
if sep2 is None:
sep2 = sep1
if isinstance(wavelength, list):
wavelength = NP.asarray(wavelength)
elif isinstance(wavelength, (int, float)):
wavelength = NP.asarray(wavelength).reshape(-1)
elif not isinstance(wavelength, NP.ndarray):
raise TypeError('Wavelength should be a scalar, list or numpy array.')
if NP.any(wavelength <= 0.0):
raise ValueError('Wavelength(s) should be positive.')
# if not isinstance(wavelength, (int,float)):
# raise TypeError('wavelength must be a positive scalar.')
# elif wavelength <= 0:
# raise ValueError('wavelength must be a positive value.')
if not isinstance(east2ax1, (int,float)):
raise TypeError('east2ax1 must be a scalar.')
if not isinstance(skypos, NP.ndarray):
raise TypeError('skypos must be a Numpy array.')
if skycoords is not None:
if (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos" or None (default).')
elif skycoords == 'altaz':
if skypos.ndim < 2:
if skypos.size == 2:
skypos = NP.asarray(skypos).reshape(1,2)
else:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
else:
if skypos.shape[1] != 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif NP.any(skypos[:,0] < 0.0) or NP.any(skypos[:,0] > 90.0):
raise ValueError('Altitudes in skypos have to be positive and <= 90 degrees')
else:
if skypos.ndim < 2:
if (skypos.size == 2) or (skypos.size == 3):
skypos = NP.asarray(skypos).reshape(1,-1)
else:
raise ValueError('skypos must be a Nx2 Nx3 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if (skypos.shape[1] < 2) or (skypos.shape[1] > 3):
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
elif skypos.shape[1] == 2:
if NP.any(NP.sum(skypos**2, axis=1) > 1.0):
raise ValueError('skypos in direction cosine coordinates are invalid.')
skypos = NP.hstack((skypos, NP.sqrt(1.0-NP.sum(skypos**2, axis=1)).reshape(-1,1)))
else:
eps = 1.0e-10
if NP.any(NP.abs(NP.sum(skypos**2, axis=1) - 1.0) > eps) or NP.any(skypos[:,2] < 0.0):
if verbose:
print('\tWarning: skypos in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
skypos[:,2] = NP.sqrt(1.0 - NP.sum(skypos[:2]**2, axis=1))
else:
raise ValueError('skycoords has not been set.')
if pointing_center is None:
if skycoords == 'altaz':
pointing_center = NP.asarray([90.0, 0.0]) # Zenith in Alt-Az coordinates
else:
pointing_center = NP.asarray([0.0, 0.0, 1.0]) # Zenith in direction-cosine coordinates
else:
if not isinstance(pointing_center, (list, NP.ndarray)):
raise TypeError('pointing_center must be a list or numpy array')
pointing_center = NP.asarray(pointing_center)
if (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos" or None (default).')
elif skycoords == 'altaz':
if pointing_center.size != 2:
raise ValueError('pointing_center must be a 2-element vector in Alt-Az coordinates.')
else:
pointing_center = pointing_center.ravel()
if NP.any(pointing_center[0] < 0.0) or NP.any(pointing_center[0] > 90.0):
raise ValueError('Altitudes in pointing_center have to be positive and <= 90 degrees')
else:
if (pointing_center.size < 2) or (pointing_center.size > 3):
raise ValueError('pointing_center must be a 2- or 3-element vector in direction cosine coordinates')
else:
pointing_center = pointing_center.ravel()
if pointing_center.size == 2:
if NP.sum(pointing_center**2) > 1.0:
raise ValueError('pointing_center in direction cosine coordinates are invalid.')
pointing_center = NP.hstack((pointing_center, NP.sqrt(1.0-NP.sum(pointing_center**2))))
else:
eps = 1.0e-10
if (NP.abs(NP.sum(pointing_center**2) - 1.0) > eps) or (pointing_center[2] < 0.0):
if verbose:
print('\tWarning: pointing_center in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
pointing_center[2] = NP.sqrt(1.0 - NP.sum(pointing_center[:2]**2))
# skypos_dircos_relative = NP.empty((skypos.shape[0],3))
if east2ax1 is not None:
if not isinstance(east2ax1, (int, float)):
raise TypeError('east2ax1 must be a scalar value.')
else:
if skycoords == 'altaz':
# skypos_dircos_rotated = GEOM.altaz2dircos(NP.hstack((skypos[:,0].reshape(-1,1),NP.asarray(skypos[:,1]-east2ax1).reshape(-1,1))), units='degrees')
# pointing_center_dircos_rotated = GEOM.altaz2dircos([pointing_center[0], pointing_center[1]-east2ax1], units='degrees')
# Rotate in Az. Remember Az is measured clockwise from North
# whereas east2ax1 is measured anti-clockwise from East.
# Therefore, newAz = Az + East2ax1 wrt to principal axis
skypos_dircos_rotated = GEOM.altaz2dircos(NP.hstack((skypos[:,0].reshape(-1,1),NP.asarray(skypos[:,1]+east2ax1).reshape(-1,1))), units='degrees')
pointing_center_dircos_rotated = GEOM.altaz2dircos([pointing_center[0], pointing_center[1]+east2ax1], units='degrees')
else:
angle = NP.radians(east2ax1)
rotation_matrix = NP.asarray([[NP.cos(angle), NP.sin(angle), 0.0],
[-NP.sin(angle), NP.cos(angle), 0.0],
[0.0, 0.0, 1.0]])
skypos_dircos_rotated = NP.dot(skypos, rotation_matrix.T)
pointing_center_dircos_rotated = NP.dot(pointing_center, rotation_matrix.T)
skypos_dircos_relative = skypos_dircos_rotated - NP.repeat(pointing_center_dircos_rotated.reshape(1,-1), skypos.shape[0], axis=0)
else:
if skycoords == 'altaz':
skypos_dircos = GEOM.altaz2dircos(skypos, units='degrees')
pointing_center_dircos = GEOM.altaz2dircos([pointing_center[0], pointing_center[1]-east2ax1], units='degrees')
else:
skypos_dircos_rotated = skypos
skypos_dircos_relative = skypos_dircos - NP.repeat(pointing_center_dircos, skypos.shape[0], axis=0)
phi = 2 * NP.pi * sep1 * NP.repeat(skypos_dircos_relative[:,0].reshape(-1,1), wavelength.size, axis=1) / NP.repeat(wavelength.reshape(1,-1), skypos.shape[0], axis=0)
psi = 2 * NP.pi * sep2 * NP.repeat(skypos_dircos_relative[:,1].reshape(-1,1), wavelength.size, axis=1) / NP.repeat(wavelength.reshape(1,-1), skypos.shape[0], axis=0)
eps = 1.0e-10
zero_phi = NP.abs(phi) < eps
zero_psi = NP.abs(psi) < eps
term1 = NP.sin(0.5*nax1*phi) / NP.sin(0.5*phi) / nax1
term1_zero_phi = NP.cos(0.5*nax1*phi[zero_phi]) / NP.cos(0.5*phi[zero_phi]) # L'Hospital rule
term1[zero_phi] = term1_zero_phi.ravel()
term2 = NP.sin(0.5*nax1*psi) / NP.sin(0.5*psi) / nax1
term2_zero_psi = NP.cos(0.5*nax1*psi[zero_psi]) / NP.cos(0.5*psi[zero_psi]) # L'Hospital rule
term2[zero_psi] = term2_zero_psi.ravel()
pb = term1 * term2
if power:
pb = NP.abs(pb)**2
return pb
#################################################################################
def array_field_pattern(antpos, skypos, skycoords='altaz', pointing_info=None,
wavelength=1.0, power=True):
"""
-----------------------------------------------------------------------------
A routine to generate field pattern from an array of generic shape made of
isotropic radiator elements. This can supercede the functionality of
isotropic_radiators_array_field_pattern() because the latter can only handle
rectangular or square arrays with equally spaced elements. Secondly, this
routine can handle beam pointing through specification of pointing center or
beamformer delays. Effect of jitter in the delay settings of the beamformer
can also be taken into account.
Inputs:
antpos [2- or 3-column numpy array] The position of elements in tile. The
coordinates are assumed to be in the local ENU coordinate system
in meters. If a 2-column array is provided, the third column is
assumed to be made of zeros. Each row is for one element. No
default.
skypos [2- or 3-column numpy array] The positions on the sky for which
the array field pattern is to be estimated. The coordinate system
specified using the keyword input skycoords. If skycoords is set
to 'altaz', skypos must be a 2-column array that obeys Alt-Az
conventions with altitude in the first column and azimuth in the
second column. Both altitude and azimuth must be in degrees. If
skycoords is set to 'dircos', a 3- or 2-column (the
third column is automatically determined from direction cosine
rules), it must obey conventions of direction cosines. The first
column is l (east), the second is m (north) and third is n (up).
Default will be set to zenith position in the coordinate system
specified.
skycoords [string scalar] Coordinate system of sky positions specified in
skypos. Accepted values are 'altaz' (Alt-Az) or 'dircos' (direction
cosines)
pointing_info
[dictionary] A dictionary consisting of information relating to
pointing center. The pointing center can be specified either via
element delay compensation or by directly specifying the pointing
center in a certain coordinate system. Default = None (pointing
centered at zenith). This dictionary consists of the following
tags and values:
'delays' [numpy array] Delays (in seconds) to be applied
to the tile elements. Size should be equal to
number of tile elements (number of rows in
antpos). Default = None will set all element
delays to zero phasing them to zenith.
'pointing_center' [numpy array] This will apply in the absence of
key 'delays'. This can be specified as a row
vector. Should have two-columns if using Alt-Az
coordinates, or two or three columns if using
direction cosines. There is no default. The
coordinate system must be specified in
'pointing_coords' if 'pointing_center' is to be
used.
'pointing_coords' [string scalar] Coordinate system in which the
pointing_center is specified. Accepted values
are 'altaz' or 'dircos'. Must be provided if
'pointing_center' is to be used. No default.
'delayerr' [int, float] RMS jitter in delays used in the
beamformer. Random jitters are drawn from a
normal distribution with this rms. Must be
a non-negative scalar. If not provided, it
defaults to 0 (no jitter).
'gains' [numpy array] Complex element gains. Must be of
size equal n_antennas specified by the number of
rows in antpos. If set to None (default), all
element gains are assumed to be unity.
'gainerr' [int, float] RMS error in voltage amplitude in
dB to be used in the beamformer. Random jitters
are drawn from a normal distribution in
logarithm units which are then converted to
linear units. Must be a non-negative scalar. If
not provided, it defaults to 0 (no jitter).
'nrand' [int] number of random realizations of gainerr
and/or delayerr to be generated. Must be
positive. If none provided, it defaults to 1.
wavelength [scalar, list or numpy vector] Wavelengths at which the field
dipole pattern is to be estimated. Must be in the same units as
element positions in antpos.
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
Output:
Returns a complex electric field or power pattern as a MxNxnrand numpy array,
M=number of sky positions, N=number of wavelengths, nrand=number of random
realizations
-----------------------------------------------------------------------------
"""
try:
antpos, skypos
except NameError:
raise NameError('antpos and skypos must be provided.')
if not isinstance(antpos, NP.ndarray):
raise TypeError('antenna positions in antpos must be a numpy array.')
else:
if (len(antpos.shape) != 2):
raise ValueError('antpos must be a 2-dimensional 2- or 3-column numpy array')
else:
if antpos.shape[1] == 2:
antpos = NP.hstack((antpos, NP.zeros(antpos.shape[0]).reshape(-1,1)))
elif antpos.shape[1] != 3:
raise ValueError('antpos must be a 2- or 3-column array')
antpos = antpos.astype(NP.float32)
if pointing_info is None:
delays = NP.zeros(antpos.shape[0])
gains = NP.ones(antpos.shape[0])
nrand = 1
else:
if 'nrand' in pointing_info:
nrand = pointing_info['nrand']
if nrand is None:
nrand = 1
elif not isinstance(nrand, int):
raise TypeError('nrand must be an integer')
elif nrand < 1:
raise ValueError('nrand must be positive')
else:
nrand = 1
if 'delays' in pointing_info:
delays = pointing_info['delays']
if delays is None:
delays = NP.zeros(antpos.shape[0])
elif not isinstance(delays, NP.ndarray):
raise TypeError('delays must be a numpy array')
else:
if delays.size != antpos.shape[0]:
raise ValueError('size of delays must be equal to the number of antennas')
delays = delays.ravel()
elif 'pointing_center' in pointing_info:
if 'pointing_coords' not in pointing_info:
raise KeyError('pointing_coords not specified.')
elif pointing_info['pointing_coords'] == 'altaz':
pointing_center = GEOM.altaz2dircos(pointing_info['pointing_center'].reshape(1,-1), units='degrees')
elif pointing_info['pointing_coords'] == 'dircos':
if NP.sum(pointing_info['pointing_center']**2 > 1.0):
raise ValueError('Invalid direction cosines specified in pointing_center')
pointing_center = pointing_info['pointing_center'].reshape(1,-1)
else:
raise ValueError('pointing_coords must be set to "dircos" or "altaz"')
delays = NP.dot(antpos, pointing_center.T) / FCNST.c # Opposite sign as that used for determining geometric delays later because this is delay compensation
else:
delays = NP.zeros(antpos.shape[0], dtype=NP.float32)
if 'gains' in pointing_info:
gains = pointing_info['gains']
if gains is None:
gains = NP.ones(antpos.shape[0])
elif not isinstance(gains, NP.ndarray):
raise TypeError('gains must be a numpy array')
else:
if gains.size != antpos.shape[0]:
raise ValueError('size of gains must be equal to the number of antennas')
gains = gains.ravel()
else:
gains = NP.ones(antpos.shape[0], dtype=NP.float32)
if 'delayerr' in pointing_info:
delayerr = pointing_info['delayerr']
if delayerr is not None:
if isinstance(delayerr, (int, float)):
if delayerr < 0.0:
raise ValueError('delayerr must be non-negative')
delays = delays.reshape(antpos.shape[0],1) + delayerr * NP.random.standard_normal((antpos.shape[0],nrand))
else:
raise TypeError('delayerr must be an integer or float')
if 'gainerr' in pointing_info:
gainerr = pointing_info['gainerr']
if gainerr is not None:
if isinstance(gainerr, (int, float)):
if gainerr < 0.0:
raise ValueError('gainerr must be non-negative')
gainerr /= 10.0 # Convert from dB to logarithmic units
gains = gains.reshape(antpos.shape[0],1) * 10**(gainerr * NP.random.standard_normal((antpos.shape[0],nrand)))
else:
raise TypeError('gainerr must be an integer or float')
gains = gains.astype(NP.float32)
delays = delays.astype(NP.float32)
if not isinstance(skypos, NP.ndarray):
raise TypeError('skypos must be a Numpy array.')
if skycoords is not None:
if (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos" or None (default).')
elif skycoords == 'altaz':
if skypos.ndim < 2:
if skypos.size == 2:
skypos = NP.asarray(skypos).reshape(1,2)
else:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
else:
if skypos.shape[1] != 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif NP.any(skypos[:,0] < 0.0) or NP.any(skypos[:,0] > 90.0):
raise ValueError('Altitudes in skypos have to be positive and <= 90 degrees')
skypos = GEOM.altaz2dircos(skypos, 'degrees') # Convert sky positions to direction cosines
else:
if skypos.ndim < 2:
if (skypos.size == 2) or (skypos.size == 3):
skypos = NP.asarray(skypos).reshape(1,-1)
else:
raise ValueError('skypos must be a Nx2 Nx3 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if (skypos.shape[1] < 2) or (skypos.shape[1] > 3):
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
elif skypos.shape[1] == 2:
if NP.any(NP.sum(skypos**2, axis=1) > 1.0):
raise ValueError('skypos in direction cosine coordinates are invalid.')
skypos = NP.hstack((skypos, NP.sqrt(1.0-NP.sum(skypos**2, axis=1)).reshape(-1,1)))
else:
eps = 1.0e-10
if NP.any(NP.abs(NP.sum(skypos**2, axis=1) - 1.0) > eps) or NP.any(skypos[:,2] < 0.0):
if verbose:
print('\tWarning: skypos in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
skypos[:,2] = NP.sqrt(1.0 - NP.sum(skypos[:2]**2, axis=1))
else:
raise ValueError('skycoords has not been set.')
skypos = skypos.astype(NP.float32, copy=False)
if isinstance(wavelength, list):
wavelength = NP.asarray(wavelength)
elif isinstance(wavelength, (int, float)):
wavelength = NP.asarray(wavelength).reshape(-1)
elif not isinstance(wavelength, NP.ndarray):
raise TypeError('Wavelength should be a scalar, list or numpy array.')
if NP.any(wavelength <= 0.0):
raise ValueError('Wavelength(s) should be positive.')
wavelength = wavelength.astype(NP.float32)
geometric_delays = -NP.dot(antpos, skypos.T) / FCNST.c
geometric_delays = geometric_delays[:,:,NP.newaxis,NP.newaxis].astype(NP.float32, copy=False) # Add an axis for wavelengths, and random realizations of beamformer settings
gains = gains.reshape(antpos.shape[0],1,1,nrand).astype(NP.complex64, copy=False)
delays = delays.reshape(antpos.shape[0],1,1,nrand)
wavelength = wavelength.reshape(1,1,-1,1).astype(NP.float32, copy=False)
retvalue = geometric_delays + delays
retvalue = retvalue.astype(NP.complex64, copy=False)
# retvalue *= 1j * 2*NP.pi * FCNST.c
# retvalue = retvalue.astype(NP.complex64, copy=False)
# retvalue = retvalue/wavelength
retvalue = NP.exp(1j * 2*NP.pi * FCNST.c/wavelength * retvalue).astype(NP.complex64, copy=False)
retvalue *= gains/antpos.shape[0]
retvalue = NP.sum(retvalue.astype(NP.complex64), axis=0)
# field_pattern = NP.sum(gains * NP.exp(1j * 2*NP.pi * (geometric_delays+delays) * FCNST.c / wavelength), axis=0) / antpos.shape[0]
# return field_pattern
if power:
retvalue = NP.abs(retvalue)**2
return retvalue
#################################################################################
def generic_aperture_field_pattern(elementpos, skypos, skycoords='altaz',
pointing_info=None, wavelength=1.0,
power=True):
"""
-----------------------------------------------------------------------------
A routine to generate field pattern from an aperture of generic shape made of
isotropic radiator elements. This can supercede the functionality of
isotropic_radiators_array_field_pattern() as well as array_field_pattern()
because this can handle frequency-dependent gains as well as delays applied
on the aperture elements of any arbitrary shape. This can model aperture
surface imperfections including frequency dependent variations.
Inputs:
elementpos
[2- or 3-column numpy array] The position of elements in tile. The
coordinates are assumed to be in the local ENU coordinate system
in meters. If a 2-column array is provided, the third column is
assumed to be made of zeros. Each row is for one element. No
default.
skypos [2- or 3-column numpy array] The positions on the sky for which
the array field pattern is to be estimated. The coordinate system
specified using the keyword input skycoords. If skycoords is set
to 'altaz', skypos must be a 2-column array that obeys Alt-Az
conventions with altitude in the first column and azimuth in the
second column. Both altitude and azimuth must be in degrees. If
skycoords is set to 'dircos', a 3- or 2-column (the
third column is automatically determined from direction cosine
rules), it must obey conventions of direction cosines. The first
column is l (east), the second is m (north) and third is n (up).
Default will be set to zenith position in the coordinate system
specified.
skycoords [string scalar] Coordinate system of sky positions specified in
skypos. Accepted values are 'altaz' (Alt-Az) or 'dircos' (direction
cosines)
pointing_info
[dictionary] A dictionary consisting of information relating to
pointing center. The pointing center can be specified either via
element delay compensation or by directly specifying the pointing
center in a certain coordinate system. Default = None (pointing
centered at zenith). This dictionary consists of the following
tags and values:
'delays' [numpy array] Delays (in seconds) to be applied
to the tile elements. Size should be equal to
number of tile elements (number of rows in
elementpos). Default = None will set all element
delays to zero phasing them to zenith.
'pointing_center' [numpy array] This will apply in the absence of
key 'delays'. This can be specified as a row
vector. Should have two-columns if using Alt-Az
coordinates, or two or three columns if using
direction cosines. There is no default. The
coordinate system must be specified in
'pointing_coords' if 'pointing_center' is to be
used.
'pointing_coords' [string scalar] Coordinate system in which the
pointing_center is specified. Accepted values
are 'altaz' or 'dircos'. Must be provided if
'pointing_center' is to be used. No default.
'delayerr' [int, float] RMS jitter in delays used in the
beamformer. Random jitters are drawn from a
normal distribution with this rms. Must be
a non-negative scalar. If not provided, it
defaults to 0 (no jitter).
'gains' [numpy array] Complex element gains. Must be of
size equal n_elements specified by the number of
rows in elementpos. If set to None (default), all
element gains are assumed to be unity.
'gainerr' [int, float] RMS error in voltage amplitude in
dB to be used in the beamformer. Random jitters
are drawn from a normal distribution in
logarithm units which are then converted to
linear units. Must be a non-negative scalar. If
not provided, it defaults to 0 (no jitter).
'nrand' [int] number of random realizations of gainerr
and/or delayerr to be generated. Must be
positive. If none provided, it defaults to 1.
wavelength [scalar, list or numpy vector] Wavelengths at which the field
dipole pattern is to be estimated. Must be in the same units as
element positions in elementpos.
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
Output:
Returns a complex electric field or power pattern as a MxNxnrand numpy array,
M=number of sky positions, N=number of wavelengths, nrand=number of random
realizations
-----------------------------------------------------------------------------
"""
try:
elementpos, skypos
except NameError:
raise NameError('elementpos and skypos must be provided.')
if not isinstance(elementpos, NP.ndarray):
raise TypeError('antenna positions in elementpos must be a numpy array.')
else:
if (len(elementpos.shape) != 2):
raise ValueError('elementpos must be a 2-dimensional 2- or 3-column numpy array')
else:
if elementpos.shape[1] == 2:
elementpos = NP.hstack((elementpos, NP.zeros(elementpos.shape[0]).reshape(-1,1)))
elif elementpos.shape[1] != 3:
raise ValueError('elementpos must be a 2- or 3-column array')
elementpos = elementpos.astype(NP.float32)
if isinstance(wavelength, list):
wavelength = NP.asarray(wavelength)
elif isinstance(wavelength, (int, float)):
wavelength = NP.asarray(wavelength).reshape(-1)
elif not isinstance(wavelength, NP.ndarray):
raise TypeError('Wavelength should be a scalar, list or numpy array.')
if NP.any(wavelength <= 0.0):
raise ValueError('Wavelength(s) should be positive.')
wavelength = wavelength.astype(NP.float32)
if pointing_info is None:
# delays = NP.zeros(elementpos.shape[0])
# gains = NP.ones(elementpos.shape[0])
nrand = 1
delays = NP.asarray([0.0]).reshape(1,1,1,1) # (nelements=1)x(nsky=1)x(nchan=1)x(nrand=1)
gains = NP.asarray([1.0]).reshape(1,1,1,1) # (nelements=1)x(nsky=1)x(nchan=1)x(nrand=1)
else:
if 'nrand' in pointing_info:
nrand = pointing_info['nrand']
if nrand is None:
nrand = 1
elif not isinstance(nrand, int):
raise TypeError('nrand must be an integer')
elif nrand < 1:
raise ValueError('nrand must be positive')
else:
nrand = 1
if 'delays' in pointing_info:
delays = pointing_info['delays']
if delays is None:
# delays = NP.zeros(elementpos.shape[0])
delays = NP.asarray([0.0]).reshape(1,1,1,1) # (nelements=1)x(nsky=1)x(nchan=1)x(nrand=1)
elif not isinstance(delays, NP.ndarray):
raise TypeError('delays must be a numpy array')
else:
if delays.size == 1:
delays = delays.reshape(1,1,1,1)
elif delays.size == elementpos.shape[0]:
delays = delays.reshape(-1,1,1,1)
elif delays.size == wavelength.size:
delays = delays.reshape(1,1,-1,1)
elif delays.shape == (elementpos.shape[0], wavelength.size):
delays = delays[:,NP.newaxis,:,NP.newaxis]
else:
raise ValueError('size of delays provided is inconsistent')
# delays = delays.ravel()
elif 'pointing_center' in pointing_info:
if 'pointing_coords' not in pointing_info:
raise KeyError('pointing_coords not specified.')
elif pointing_info['pointing_coords'] == 'altaz':
pointing_center = GEOM.altaz2dircos(pointing_info['pointing_center'].reshape(1,-1), units='degrees')
elif pointing_info['pointing_coords'] == 'dircos':
if NP.sum(pointing_info['pointing_center']**2 > 1.0):
raise ValueError('Invalid direction cosines specified in pointing_center')
pointing_center = pointing_info['pointing_center'].reshape(1,-1)
else:
raise ValueError('pointing_coords must be set to "dircos" or "altaz"')
delays = NP.dot(elementpos, pointing_center.T) / FCNST.c # Opposite sign as that used for determining geometric delays later because this is delay compensation, shape = (nelements x nsky)
delays = delays[:,:,NP.newaxis,NP.newaxis]
else:
delays = NP.asarray([0.0]).reshape(1,1,1,1) # (nelements=1)x(nsky=1)x(nchan=1)x(nrand=1)
# delays = NP.zeros(elementpos.shape[0], dtype=NP.float32)
if 'gains' in pointing_info:
gains = pointing_info['gains']
if gains is None:
# gains = NP.ones(elementpos.shape[0])
gains = NP.asarray([1.0]).reshape(1,1,1,1) # (nelements=1)x(nsky=1)x(nchan=1)x(nrand=1)
elif not isinstance(gains, NP.ndarray):
raise TypeError('gains must be a numpy array')
else:
if gains.size == 1:
gains = gains.reshape(1,1,1,1)
elif gains.size == elementpos.shape[0]:
gains = gains.reshape(-1,1,1,1)
elif gains.size == wavelength.size:
gains = gains.reshape(1,1,-1,1)
elif gains.shape == (elementpos.shape[0], wavelength.size):
gains = gains[:,NP.newaxis,:,NP.newaxis]
else:
raise ValueError('size of gains provided is inconsistent')
# gains = gains.ravel()
else:
gains = NP.asarray([1.0]).reshape(1,1,1,1) # (nelements=1)x(nsky=1)x(nchan=1)x(nrand=1)
# gains = NP.ones(elementpos.shape[0], dtype=NP.float32)
if 'delayerr' in pointing_info:
delayerr = pointing_info['delayerr']
if delayerr is not None:
if isinstance(delayerr, (int, float)):
if delayerr < 0.0:
raise ValueError('delayerr must be non-negative')
# delays = delays.reshape(elementpos.shape[0],1) + delayerr * NP.random.standard_normal((elementpos.shape[0],nrand))
delays = delays + delayserr * NP.random.standard_normal((elementpos.shape[0],1,1,nrand))
else:
raise TypeError('delayerr must be an integer or float')
if 'gainerr' in pointing_info:
gainerr = pointing_info['gainerr']
if gainerr is not None:
if isinstance(gainerr, (int, float)):
if gainerr < 0.0:
raise ValueError('gainerr must be non-negative')
gainerr /= 10.0 # Convert from dB to logarithmic units
# gains = gains.reshape(elementpos.shape[0],1) * 10**(gainerr * NP.random.standard_normal((elementpos.shape[0],nrand)))
gains = gains * 10**(gainerr * NP.random.standard_normal((elementpos.shape[0],1,1,nrand)))
else:
raise TypeError('gainerr must be an integer or float')
gains = gains.astype(NP.float32)
delays = delays.astype(NP.float32)
if not isinstance(skypos, NP.ndarray):
raise TypeError('skypos must be a Numpy array.')
if skycoords is not None:
if (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos" or None (default).')
elif skycoords == 'altaz':
if skypos.ndim < 2:
if skypos.size == 2:
skypos = NP.asarray(skypos).reshape(1,2)
else:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
else:
if skypos.shape[1] != 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif NP.any(skypos[:,0] < 0.0) or NP.any(skypos[:,0] > 90.0):
raise ValueError('Altitudes in skypos have to be positive and <= 90 degrees')
skypos = GEOM.altaz2dircos(skypos, 'degrees') # Convert sky positions to direction cosines
else:
if skypos.ndim < 2:
if (skypos.size == 2) or (skypos.size == 3):
skypos = NP.asarray(skypos).reshape(1,-1)
else:
raise ValueError('skypos must be a Nx2 Nx3 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if (skypos.shape[1] < 2) or (skypos.shape[1] > 3):
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
elif skypos.shape[1] == 2:
if NP.any(NP.sum(skypos**2, axis=1) > 1.0):
raise ValueError('skypos in direction cosine coordinates are invalid.')
skypos = NP.hstack((skypos, NP.sqrt(1.0-NP.sum(skypos**2, axis=1)).reshape(-1,1)))
else:
eps = 1.0e-10
if NP.any(NP.abs(NP.sum(skypos**2, axis=1) - 1.0) > eps) or NP.any(skypos[:,2] < 0.0):
if verbose:
print '\tWarning: skypos in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.'
skypos[:,2] = NP.sqrt(1.0 - NP.sum(skypos[:2]**2, axis=1))
else:
raise ValueError('skycoords has not been set.')
skypos = skypos.astype(NP.float32, copy=False)
geometric_delays = -NP.dot(elementpos, skypos.T) / FCNST.c
geometric_delays = geometric_delays[:,:,NP.newaxis,NP.newaxis].astype(NP.float32, copy=False) # Add an axis for wavelengths, and random realizations of beamformer settings
# gains = gains.reshape(elementpos.shape[0],1,1,nrand).astype(NP.complex64, copy=False)
# delays = delays.reshape(elementpos.shape[0],1,1,nrand)
gains = gains.astype(NP.complex64, copy=False)
wavelength = wavelength.reshape(1,1,-1,1).astype(NP.float32, copy=False)
retvalue = geometric_delays + delays
retvalue = retvalue.astype(NP.complex64, copy=False)
retvalue = NP.exp(1j * 2*NP.pi * FCNST.c/wavelength * retvalue).astype(NP.complex64, copy=False)
retvalue = NP.sum(gains*retvalue, axis=0) / elementpos.shape[0]
# field_pattern = NP.sum(gains * NP.exp(1j * 2*NP.pi * (geometric_delays+delays) * FCNST.c / wavelength), axis=0) / elementpos.shape[0]
# return field_pattern
if power:
retvalue = NP.abs(retvalue)**2
return retvalue
#################################################################################
def uniform_rectangular_aperture(sides, skypos, frequency, skyunits='altaz',
east2ax1=None, pointing_center=None,
power=True):
"""
-----------------------------------------------------------------------------
Compute the electric field or power pattern at the specified sky positions
due to a uniformly illuminated rectangular aperture
Inputs:
sides [scalar, list or numpy array] Sides of the rectangle (in m). If
scalar, it will be assumed to be identical for both sides which
is a square. If a list or numpy array, it must have two
elements
skypos [list or numpy vector] Sky positions at which the power pattern
is to be estimated. Size is M x N where M is the number of
locations, N = 2 (if skyunits = altaz denoting Alt-Az
coordinates), or N = 3 (if skyunits = dircos denoting direction
cosine coordinates). If skyunits = altaz, then altitude and
azimuth must be in degrees
frequency [list or numpy vector] frequencies (in GHz) at which the power
pattern is to be estimated. Frequencies differing by too much
and extending over the usual bands cannot be given.
Keyword Inputs:
skyunits [string] string specifying the coordinate system of the sky
positions. Accepted values are 'altaz', and 'dircos'.
Default = 'altaz'. If 'dircos', the direction cosines are
aligned with the local East, North, and Up. If 'altaz', then
altitude and azimuth must be in degrees.
east2ax1 [scalar] Angle (in degrees) the primary axis of the array makes
with the local East (positive anti-clockwise).
pointing_center
[list or numpy array] coordinates of pointing center (in the same
coordinate system as that of sky coordinates specified by
skycoords). 2-element vector if skycoords='altaz'. 2- or
3-element vector if skycoords='dircos'.
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern
Output:
Electric field pattern or power pattern, number of rows equal to the number
of sky positions (which is equal to the number of rows in skypos), and
number of columns equal to the number of wavelengths.
-----------------------------------------------------------------------------
"""
try:
sides, skypos, frequency
except NameError:
raise NameError('Rectangular antenna sides, skypos, frequency must be specified')
if isinstance(sides, (int,float)):
sides = NP.asarray([sides]*2, dtype=NP.float)
elif isinstance(sides, list):
sides = NP.asarray(sides).astype(NP.float)
elif not isinstance(sides, NP.ndarray):
raise TypeError('Antenna sides must be a scalar, list or numpy array')
sides = sides.astype(NP.float)
if sides.size == 1:
sides = sides.ravel() + NP.zeros(2)
elif sides.size == 2:
sides = sides.ravel()
sides= sides.astype(NP.float)
else:
raise ValueError('Antenna sides must not have more than 2 elements')
if NP.any(sides < 0.0):
raise ValueError('Antenna sides must not be negative')
if isinstance(frequency, list):
frequency = NP.asarray(frequency)
elif isinstance(frequency, (int, float)):
frequency = NP.asarray(frequency).reshape(-1)
elif not isinstance(frequency, NP.ndarray):
raise TypeError('Frequency should be a scalar, list or numpy array.')
if NP.any(frequency <= 0.0):
raise ValueError('Frequency(s) should be positive.')
if not isinstance(east2ax1, (int,float)):
raise TypeError('east2ax1 must be a scalar.')
if not isinstance(skypos, NP.ndarray):
raise TypeError('skypos must be a Numpy array.')
frequency = NP.asarray(frequency).ravel()
wavelength = FCNST.c / frequency
if skycoords is not None:
if (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos" or None (default).')
elif skycoords == 'altaz':
if skypos.ndim < 2:
if skypos.size == 2:
skypos = NP.asarray(skypos).reshape(1,2)
else:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
else:
if skypos.shape[1] != 2:
raise ValueError('skypos must be a Nx2 Numpy array.')
elif NP.any(skypos[:,0] < 0.0) or NP.any(skypos[:,0] > 90.0):
raise ValueError('Altitudes in skypos have to be positive and <= 90 degrees')
else:
if skypos.ndim < 2:
if (skypos.size == 2) or (skypos.size == 3):
skypos = NP.asarray(skypos).reshape(1,-1)
else:
raise ValueError('skypos must be a Nx2 Nx3 Numpy array.')
elif skypos.ndim > 2:
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
else:
if (skypos.shape[1] < 2) or (skypos.shape[1] > 3):
raise ValueError('skypos must be a Nx2 or Nx3 Numpy array.')
elif skypos.shape[1] == 2:
if NP.any(NP.sum(skypos**2, axis=1) > 1.0):
raise ValueError('skypos in direction cosine coordinates are invalid.')
skypos = NP.hstack((skypos, NP.sqrt(1.0-NP.sum(skypos**2, axis=1)).reshape(-1,1)))
else:
eps = 1.0e-10
if NP.any(NP.abs(NP.sum(skypos**2, axis=1) - 1.0) > eps) or NP.any(skypos[:,2] < 0.0):
if verbose:
print('\tWarning: skypos in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
skypos[:,2] = NP.sqrt(1.0 - NP.sum(skypos[:2]**2, axis=1))
else:
raise ValueError('skycoords has not been set.')
if pointing_center is None:
if skycoords == 'altaz':
pointing_center = NP.asarray([90.0, 0.0]) # Zenith in Alt-Az coordinates
else:
pointing_center = NP.asarray([0.0, 0.0, 1.0]) # Zenith in direction-cosine coordinates
else:
if not isinstance(pointing_center, (list, NP.ndarray)):
raise TypeError('pointing_center must be a list or numpy array')
pointing_center = NP.asarray(pointing_center)
if (skycoords != 'altaz') and (skycoords != 'dircos'):
raise ValueError('skycoords must be "altaz" or "dircos" or None (default).')
elif skycoords == 'altaz':
if pointing_center.size != 2:
raise ValueError('pointing_center must be a 2-element vector in Alt-Az coordinates.')
else:
pointing_center = pointing_center.ravel()
if NP.any(pointing_center[0] < 0.0) or NP.any(pointing_center[0] > 90.0):
raise ValueError('Altitudes in pointing_center have to be positive and <= 90 degrees')
else:
if (pointing_center.size < 2) or (pointing_center.size > 3):
raise ValueError('pointing_center must be a 2- or 3-element vector in direction cosine coordinates')
else:
pointing_center = pointing_center.ravel()
if pointing_center.size == 2:
if NP.sum(pointing_center**2) > 1.0:
raise ValueError('pointing_center in direction cosine coordinates are invalid.')
pointing_center = NP.hstack((pointing_center, NP.sqrt(1.0-NP.sum(pointing_center**2))))
else:
eps = 1.0e-10
if (NP.abs(NP.sum(pointing_center**2) - 1.0) > eps) or (pointing_center[2] < 0.0):
if verbose:
print('\tWarning: pointing_center in direction cosine coordinates along line of sight found to be negative or some direction cosines are not unit vectors. Resetting to correct values.')
pointing_center[2] = NP.sqrt(1.0 - NP.sum(pointing_center[:2]**2))
if east2ax1 is not None:
if not isinstance(east2ax1, (int, float)):
raise TypeError('east2ax1 must be a scalar value.')
else:
if skycoords == 'altaz':
# skypos_dircos_rotated = GEOM.altaz2dircos(NP.hstack((skypos[:,0].reshape(-1,1),NP.asarray(skypos[:,1]-east2ax1).reshape(-1,1))), units='degrees')
# pointing_center_dircos_rotated = GEOM.altaz2dircos([pointing_center[0], pointing_center[1]-east2ax1], units='degrees')
# Rotate in Az. Remember Az is measured clockwise from North
# whereas east2ax1 is measured anti-clockwise from East.
# Therefore, newAz = Az + East2ax1 wrt to principal axis
skypos_dircos_rotated = GEOM.altaz2dircos(NP.hstack((skypos[:,0].reshape(-1,1),NP.asarray(skypos[:,1]+east2ax1).reshape(-1,1))), units='degrees')
pointing_center_dircos_rotated = GEOM.altaz2dircos([pointing_center[0], pointing_center[1]+east2ax1], units='degrees')
else:
angle = NP.radians(east2ax1)
rotation_matrix = NP.asarray([[NP.cos(angle), NP.sin(angle), 0.0],
[-NP.sin(angle), NP.cos(angle), 0.0],
[0.0, 0.0, 1.0]])
skypos_dircos_rotated = NP.dot(skypos, rotation_matrix.T)
pointing_center_dircos_rotated = NP.dot(pointing_center, rotation_matrix.T)
skypos_dircos_relative = skypos_dircos_rotated - NP.repeat(pointing_center_dircos_rotated.reshape(1,-1), skypos.shape[0], axis=0)
else:
if skycoords == 'altaz':
skypos_dircos = GEOM.altaz2dircos(skypos, units='degrees')
pointing_center_dircos = GEOM.altaz2dircos([pointing_center[0], pointing_center[1]-east2ax1], units='degrees')
else:
skypos_dircos_rotated = skypos
skypos_dircos_relative = skypos_dircos - NP.repeat(pointing_center_dircos, skypos.shape[0], axis=0)
arg1 = sides[0] * skypos_dircos_relative[:,0].reshape(-1,1) / wavelength.reshape(1,-1)
arg2 = sides[1] * skypos_dircos_relative[:,1].reshape(-1,1) / wavelength.reshape(1,-1)
ab = NP.sinc(arg1) * NP.sinc(arg2)
if power:
ab = NP.abs(ab)**2
return ab
################################################################################
def uniform_square_aperture(side, skypos, frequency, skyunits='altaz',
east2ax1=None, pointing_center=None,
power=True):
"""
-----------------------------------------------------------------------------
Compute the electric field or power pattern at the specified sky positions
due to a uniformly illuminated square aperture
Inputs:
side [scalar] Sides of the square (in m)
skypos [list or numpy vector] Sky positions at which the power pattern
is to be estimated. Size is M x N where M is the number of
locations, N = 2 (if skyunits = altaz denoting Alt-Az
coordinates), or N = 3 (if skyunits = dircos denoting direction
cosine coordinates). If skyunits = altaz, then altitude and
azimuth must be in degrees
frequency [list or numpy vector] frequencies (in GHz) at which the power
pattern is to be estimated. Frequencies differing by too much
and extending over the usual bands cannot be given.
Keyword Inputs:
skyunits [string] string specifying the coordinate system of the sky
positions. Accepted values are 'altaz', and 'dircos'.
Default = 'altaz'. If 'dircos', the direction cosines are
aligned with the local East, North, and Up. If 'altaz', then
altitude and azimuth must be in degrees.
east2ax1 [scalar] Angle (in degrees) the primary axis of the array makes
with the local East (positive anti-clockwise).
pointing_center
[list or numpy array] coordinates of pointing center (in the same
coordinate system as that of sky coordinates specified by
skycoords). 2-element vector if skycoords='altaz'. 2- or
3-element vector if skycoords='dircos'.
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern
Output:
Electric field pattern or power pattern, number of rows equal to the number
of sky positions (which is equal to the number of rows in skypos), and number
of columns equal to the number of wavelengths.
-----------------------------------------------------------------------------
"""
try:
side, skypos, frequency
except NameError:
raise NameError('Square antenna side, skypos, frequency must be specified')
if not isinstance(sides, (int,float)):
raise TypeError('Antenna sides must be a scalar')
sides = NP.asarray([side]*2, dtype=NP.float)
ab = uniform_rectangular_aperture(sides, skypos, frequency,
skyunits=skyunits,
east2ax1=east2ax1,
pointing_center=pointing_center,
power=power)
return ab
################################################################################
def feed_illumination_of_aperture(aperture_locs, feedinfo, wavelength=1.0,
short_dipole_approx=False,
half_wave_dipole_approx=True):
"""
-----------------------------------------------------------------------------
Compute the illumination by a specified feed of the aperture at specified
locations.
Inputs:
aperture_locs
[numpy array] Nx3 array of N locations defined by three
coordinates x, y, z. If Nx1 or Nx2 array is specified, y
and z are accordingly assumed to be zero.
feedinfo [dictionary] dictionary that specifies feed including the type
of element, element size and orientation. It consists of the
following keys and values:
'position' [numpy array] 3-element array specifying x,y,z-
coordinates of the center of the feed (in meters).
If not specified or set to None, it is assumed to
be at the origin
'shape' [string] Shape of antenna element. Accepted values
are 'dipole', 'delta', 'dish', 'rect' and 'square'.
Will be ignored if key 'id' is set. 'delta' denotes
a delta function for the antenna element which has
an isotropic radiation pattern. 'delta' is the
default when keys 'id' and 'shape' are not set.
'size' [scalar or 2-element list/numpy array] Diameter of
the telescope dish (in meters) if the key 'shape'
is set to 'dish', side of the square aperture (in
meters) if the key 'shape' is set to 'square',
2-element sides if key 'shape' is set to 'rect',
or length of the dipole if key 'shape' is set to
'dipole'. Will be ignored if key 'shape' is set to
'delta'. Will be ignored if key 'id' is set and a
preset value used for the diameter or dipole.
'orientation' [list or numpy array] If key 'shape' is set to
dipole, it refers to the orientation of the dipole
element unit vector whose magnitude is specified by
length. If key 'shape' is set to 'dish', it refers
to the position on the sky to which the dish is
pointed. For a dipole, this unit vector must be
provided in the local ENU coordinate system aligned
with the direction cosines coordinate system or in
the Alt-Az coordinate system. This will be
used only when key 'shape' is set to 'dipole'.
This could be a 2-element vector (transverse
direction cosines) where the third (line-of-sight)
component is determined, or a 3-element vector
specifying all three direction cosines or a two-
element coordinate in Alt-Az system. If not provided
it defaults to an eastward pointing dipole. If key
'shape' is set to 'dish', the orientation refers
to the pointing center of the dish on the sky. It
can be provided in Alt-Az system as a two-element
vector or in the direction cosine coordinate
system as a two- or three-element vector. If not
set in the case of a dish element, it defaults to
zenith. This is not to be confused with the key
'pointing_center' in dictionary 'pointing_info'
which refers to the beamformed pointing center of
the array. The coordinate system is specified by
the key 'ocoords'
'ocoords' [scalar string] specifies the coordinate system
for key 'orientation'. Accepted values are 'altaz'
and 'dircos'.
'element_locs'
[2- or 3-column array] Element locations that
constitute the tile. Each row specifies
location of one element in the tile. The
locations must be specified in local ENU
coordinate system. First column specifies along
local east, second along local north and the
third along local up. If only two columns are
specified, the third column is assumed to be
zeros. If 'elements_locs' is not provided, it
assumed to be a one-element system and not a
phased array as far as determination of primary
beam is concerned.
'groundplane' [scalar] height of telescope element above the
ground plane (in meteres). Default = None will
denote no ground plane effects.
wavelength [scalar, list or numpy vector] Wavelengths at which the field
pattern of the feed is to be estimated. Must be in the same
units as aperture_locs and the feed dimensions
short_dipole_approx
[boolean] if True, indicates short dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=False. Both
short_dipole_approx and half_wave_dipole_approx cannot be set
to True at the same time
half_wave_dipole_approx
[boolean] if True, indicates half-wave dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=False
Output:
Dictionary containing the following keys and values:
'aperture_locs' [numpy array] Nx3 array of sampled x, y, z aperture
locations
'field_pattern' [numpy array] Complex electric field illumination pattern
at the sampled aperture locations. It is an array of shape
N x nchan where nchan is the number of frequency channels
-----------------------------------------------------------------------------
"""
try:
aperture_locs, feedinfo
except NameError:
raise NameError('Input aperture_locs must be specified')
if 'position' not in feedinfo:
feedinfo['position'] = NP.asarray([0.0, 0.0, 0.0])
elif feedinfo['position'] is None:
feedinfo['position'] = NP.asarray([0.0, 0.0, 0.0])
elif not isinstance(feedinfo['position'], NP.ndarray):
raise TypeError('"position" key in input feedinfo must be a numpy array')
else:
if feedinfo['position'].ndim > 1:
feedinfo['position'] = feedinfo['position'].ravel()
feedinfo['position'] = feedinfo['position'].reshape(-1)
if feedinfo['position'].size > 3:
raise ValueError('Feed position must be a 3-element array')
else:
feedinfo['position'] = NP.pad(feedinfo['position'], [(0,3-feedinfo['position'].size)], mode='constant', constant_values=[(0.0, 0.0)])
if not isinstance(aperture_locs, NP.ndarray):
raise TypeError('Input aperture_locs must be a numpy array')
if aperture_locs.ndim == 1:
aperture_locs = aperture_locs.reshape(-1) + NP.zeros((1,3), dtype=NP.float)
elif aperture_locs.ndim == 2:
if aperture_locs.shape[1] == 1:
aperture_locs = aperture_locs + NP.zeros((1,3), dtype=NP.float)
elif aperture_locs.shape[1] == 2:
aperture_locs = NP.hstack((aperture_locs, NP.zeros((aperture_locs.shape[0],1), dtype=NP.float)))
elif aperture_locs.shape[1] != 3:
raise ValueError('Input aperture_locs must not have more than three coordinates')
else:
raise ValueError('Input aperture_locs has too many dimensions')
aperture_locs = aperture_locs - feedinfo['position'].reshape(1,-1)
if isinstance(wavelength, list):
wavelength = NP.asarray(wavelength)
elif isinstance(wavelength, (int, float)):
wavelength = NP.asarray(wavelength).reshape(-1)
elif not isinstance(wavelength, NP.ndarray):
raise TypeError('Wavelength should be a scalar, list or numpy array.')
if NP.any(wavelength <= 0.0):
raise ValueError('Wavelength(s) should be positive.')
wavelength = wavelength.astype(NP.float32)
if NP.mean(aperture_locs, axis=2) < 0.0: # Invert the aperture and compute the feed illumination on the aperture as the feed can "only point upwards"
r, aperture_locs_alt, aperture_locs_az = GEOM.xyz2sph(-aperture_locs[:,0], -aperture_locs[:,1], -aperture_locs[:,2], units='degrees')
else:
r, aperture_locs_alt, aperture_locs_az = GEOM.xyz2sph(aperture_locs[:,0], aperture_locs[:,1], aperture_locs[:,2], units='degrees')
aperture_locs_altaz = NP.hstack((aperture_locs_alt.reshape(-1,1), aperture_locs_az.reshape(-1,1)))
if not isinstance(feedinfo, dict):
raise TypeError('Input feedinfo must be a dictionary')
if 'shape' not in feedinfo:
feedinfo['shape'] = 'delta'
ep = 1.0
elif feedinfo['shape'] == 'delta':
ep = 1.0
elif feedinfo['shape'] == 'dipole':
ep = dipole_field_pattern(feedinfo['size'], aperture_locs_altaz,
dipole_coords=feedinfo['ocoords'],
dipole_orientation=feedinfo['orientation'],
skycoords='altaz', wavelength=wavelength,
short_dipole_approx=short_dipole_approx,
half_wave_dipole_approx=half_wave_dipole_approx,
power=False)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
elif feedinfo['shape'] == 'dish':
ep = airy_disk_pattern(feedinfo['size'], aperture_locs_altaz,
FCNST.c/wavelength, skyunits='altaz',
peak=1.0, pointing_center=None,
gaussian=False, power=False,
small_angle_tol=1e-10)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
elif feedinfo['shape'] == 'rect':
if feedinfo['phased_array_feed']:
raise ValueError('Phased array feed cannot be used with the feed shape specified')
ep = uniform_rectangular_aperture(feedinfo['size'], aperture_locs_altaz, FCNST.c/wavelength, skyunits='altaz', east2ax1=feedinfo['east2ax1'], pointing_center=None, power=False)
elif feedinfo['shape'] == 'square':
if feedinfo['phased_array_feed']:
raise ValueError('Phased array feed cannot be used with the feed shape specified')
ep = uniform_square_aperture(feedinfo['size'], aperture_locs_altaz, FCNST.c/wavelength, skyunits='altaz', east2ax1=feedinfo['east2ax1'], pointing_center=None, power=False)
else:
raise ValueError('Value in key "shape" of feedinfo dictionary invalid.')
if feedinfo['phased_array_feed']:
element_locs = feedinfo['element_locs']
irap = array_field_pattern(element_locs, aperture_locs_altaz, skycoords='altaz',
pointing_info=None, wavelength=FCNST.c/frequency,
power=False)
else:
irap = 1.0
compute_ground_pattern = False
gp = 1.0
ground_modifier = None
if 'groundplane' in feedinfo:
if feedinfo['groundplane'] is not None:
if 'shape' in feedinfo:
if feedinfo['shape'] != 'dish': # If shape is not dish, compute ground plane pattern
compute_ground_pattern = True
else:
compute_ground_pattern = True
if compute_ground_pattern:
gp = ground_plane_field_pattern(feedinfo['groundplane'], aperture_locs_altaz, skycoords='altaz',
wavelength=FCNST.c/frequency, angle_units='degrees',
modifier=ground_modifier, power=False)
fp = ep * irap * gp
return {'aperture_locs': aperture_locs, 'illumination': fp}
#################################################################################
def feed_aperture_combined_field_pattern(aperture_locs, feedinfo, skypos,
skycoords='altaz', pointing_info=None,
wavelength=1.0, short_dipole_approx=False,
half_wave_dipole_approx=True,
power=False):
"""
-----------------------------------------------------------------------------
Compute the combined field pattern of a feed-aperture assembly where the feed
illuminates the aperture.
Inputs:
aperture_locs
[numpy array] Nx3 array of N locations defined by three
coordinates x, y, z. If Nx1 or Nx2 array is specified, y
and z are accordingly assumed to be zero.
feedinfo [dictionary] dictionary that specifies feed including the type
of element, element size and orientation. It consists of the
following keys and values:
'position' [numpy array] 3-element array specifying x,y,z-
coordinates of the center of the feed (in meters).
If not specified or set to None, it is assumed to
be at the origin
'shape' [string] Shape of antenna element. Accepted values
are 'dipole', 'delta', 'dish', 'rect' and 'square'.
Will be ignored if key 'id' is set. 'delta' denotes
a delta function for the antenna element which has
an isotropic radiation pattern. 'delta' is the
default when keys 'id' and 'shape' are not set.
'size' [scalar or 2-element list/numpy array] Diameter of
the telescope dish (in meters) if the key 'shape'
is set to 'dish', side of the square aperture (in
meters) if the key 'shape' is set to 'square',
2-element sides if key 'shape' is set to 'rect',
or length of the dipole if key 'shape' is set to
'dipole'. Will be ignored if key 'shape' is set to
'delta'. Will be ignored if key 'id' is set and a
preset value used for the diameter or dipole.
'orientation' [list or numpy array] If key 'shape' is set to
dipole, it refers to the orientation of the dipole
element unit vector whose magnitude is specified by
length. If key 'shape' is set to 'dish', it refers
to the position on the sky to which the dish is
pointed. For a dipole, this unit vector must be
provided in the local ENU coordinate system aligned
with the direction cosines coordinate system or in
the Alt-Az coordinate system. This will be
used only when key 'shape' is set to 'dipole'.
This could be a 2-element vector (transverse
direction cosines) where the third (line-of-sight)
component is determined, or a 3-element vector
specifying all three direction cosines or a two-
element coordinate in Alt-Az system. If not provided
it defaults to an eastward pointing dipole. If key
'shape' is set to 'dish', the orientation refers
to the pointing center of the dish on the sky. It
can be provided in Alt-Az system as a two-element
vector or in the direction cosine coordinate
system as a two- or three-element vector. If not
set in the case of a dish element, it defaults to
zenith. This is not to be confused with the key
'pointing_center' in dictionary 'pointing_info'
which refers to the beamformed pointing center of
the array. The coordinate system is specified by
the key 'ocoords'
'ocoords' [scalar string] specifies the coordinate system
for key 'orientation'. Accepted values are 'altaz'
and 'dircos'.
'element_locs'
[2- or 3-column array] Element locations that
constitute the tile. Each row specifies
location of one element in the tile. The
locations must be specified in local ENU
coordinate system. First column specifies along
local east, second along local north and the
third along local up. If only two columns are
specified, the third column is assumed to be
zeros. If 'elements_locs' is not provided, it
assumed to be a one-element system and not a
phased array as far as determination of primary
beam is concerned.
'groundplane' [scalar] height of telescope element above the
ground plane (in meteres). Default = None will
denote no ground plane effects.
skypos [2- or 3-column numpy array] The positions on the sky for which
the array field pattern is to be estimated. The coordinate system
specified using the keyword input skycoords. If skycoords is set
to 'altaz', skypos must be a 2-column array that obeys Alt-Az
conventions with altitude in the first column and azimuth in the
second column. Both altitude and azimuth must be in degrees. If
skycoords is set to 'dircos', a 3- or 2-column (the
third column is automatically determined from direction cosine
rules), it must obey conventions of direction cosines. The first
column is l (east), the second is m (north) and third is n (up).
Default will be set to zenith position in the coordinate system
specified.
skycoords [string scalar] Coordinate system of sky positions specified in
skypos. Accepted values are 'altaz' (Alt-Az) or 'dircos'
(direction cosines)
pointing_info
[dictionary] A dictionary consisting of information relating to
pointing center. The pointing center can be specified either via
element delay compensation or by directly specifying the pointing
center in a certain coordinate system. Default = None (pointing
centered at zenith). This dictionary consists of the following
tags and values:
'delays' [numpy array] Delays (in seconds) to be applied
to the tile elements. Size should be equal to
number of tile elements (number of rows in
elementpos). Default = None will set all element
delays to zero phasing them to zenith.
'pointing_center' [numpy array] This will apply in the absence of
key 'delays'. This can be specified as a row
vector. Should have two-columns if using Alt-Az
coordinates, or two or three columns if using
direction cosines. There is no default. The
coordinate system must be specified in
'pointing_coords' if 'pointing_center' is to be
used.
'pointing_coords' [string scalar] Coordinate system in which the
pointing_center is specified. Accepted values
are 'altaz' or 'dircos'. Must be provided if
'pointing_center' is to be used. No default.
'delayerr' [int, float] RMS jitter in delays used in the
beamformer. Random jitters are drawn from a
normal distribution with this rms. Must be
a non-negative scalar. If not provided, it
defaults to 0 (no jitter).
'gains' [numpy array] Complex element gains. Must be of
size equal n_elements specified by the number of
rows in elementpos. If set to None (default),
all element gains are assumed to be unity.
'gainerr' [int, float] RMS error in voltage amplitude in
dB to be used in the beamformer. Random jitters
are drawn from a normal distribution in
logarithm units which are then converted to
linear units. Must be a non-negative scalar. If
not provided, it defaults to 0 (no jitter).
'nrand' [int] number of random realizations of gainerr
and/or delayerr to be generated. Must be
positive. If none provided, it defaults to 1.
wavelength
[scalar, list or numpy vector] Wavelengths at which the field
pattern of the feed is to be estimated. Must be in the same
units as aperture_locs and the feed dimensions
short_dipole_approx
[boolean] if True, indicates short dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=False. Both
short_dipole_approx and half_wave_dipole_approx cannot be set
to True at the same time
half_wave_dipole_approx
[boolean] if True, indicates half-wave dipole approximation
is to be used. Otherwise, a more accurate expression is used
for the dipole pattern. Default=False
power [boolean] If set to True (default), compute power pattern,
otherwise compute field pattern.
Output:
Complex electric field pattern or power pattern of shaped nsrc x nchan
-----------------------------------------------------------------------------
"""
try:
aperture_locs, feedinfo, skypos
except NameError:
raise NameError('Input aperture_locs, feedinfo and skypos must be specified')
if not isinstance(feedinfo, dict):
raise TypeError('Input feedinfo must be a dictionary')
if 'shape' not in feedinfo:
feedinfo['shape'] = 'delta'
ep = 1.0
elif feedinfo['shape'] == 'delta':
ep = 1.0
elif feedinfo['shape'] == 'dipole':
ep = dipole_field_pattern(feedinfo['size'], skypos,
dipole_coords=feedinfo['ocoords'],
dipole_orientation=feedinfo['orientation'],
skycoords=skycoords, wavelength=wavelength,
short_dipole_approx=short_dipole_approx,
half_wave_dipole_approx=half_wave_dipole_approx,
power=False)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
elif feedinfo['shape'] == 'dish':
ep = airy_disk_pattern(feedinfo['size'], skypos, FCNST.c/wavelength,
skyunits=skycoords, peak=1.0,
pointing_center=None, gaussian=False,
power=False, small_angle_tol=1e-10)
ep = ep[:,:,NP.newaxis] # add an axis to be compatible with random ralizations
elif feedinfo['shape'] == 'rect':
if feedinfo['phased_array_feed']:
raise ValueError('Phased array feed cannot be used with the feed shape specified')
ep = uniform_rectangular_aperture(feedinfo['size'], skypos, FCNST.c/wavelength, skyunits=skycoords, east2ax1=feedinfo['east2ax1'], pointing_center=None, power=False)
elif feedinfo['shape'] == 'square':
if feedinfo['phased_array_feed']:
raise ValueError('Phased array feed cannot be used with the feed shape specified')
ep = uniform_square_aperture(feedinfo['size'], skypos, FCNST.c/wavelength, skyunits=skycoords, east2ax1=feedinfo['east2ax1'], pointing_center=None, power=False)
else:
raise ValueError('Value in key "shape" of feedinfo dictionary invalid.')
if feedinfo['phased_array_feed']:
element_locs = feedinfo['element_locs']
irap = array_field_pattern(element_locs, skypos, skycoords=skycoords,
pointing_info=None, wavelength=FCNST.c/frequency,
power=False)
else:
irap = 1.0
compute_ground_pattern = False
gp = 1.0
ground_modifier = None
if 'groundplane' in feedinfo:
if feedinfo['groundplane'] is not None:
if 'shape' in feedinfo:
if feedinfo['shape'] != 'dish': # If shape is not dish, compute ground plane pattern
compute_ground_pattern = True
else:
compute_ground_pattern = True
if compute_ground_pattern:
gp = ground_plane_field_pattern(feedinfo['groundplane'], skypos,
skycoords=skycoords,
wavelength=FCNST.c/frequency,
angle_units='degrees',
modifier=ground_modifier, power=False)
feed_field_pattern = ep * irap * gp
illumination_info = feed_illumination_of_aperture(aperture_locs, feedinfo, wavelength=wavelength, short_dipole_approx=short_dipole_approx, half_wave_dipole_approx=hal_wave_dipole_approx)
pinfo = copy.copy(pointing_info)
if (pinfo is None) or not isinstance(pinfo, dict):
pinfo = {}
pinfo['gains'] = illumination_info['illumination']
else:
pinfo['gains'] = pinfo['gains'] * illumination_info['illumination']
aperture_field_pattern = generic_aperture_field_pattern(illumination_info['aperture_locs'], skypos, skycoords=skycoords, pointing_info=pinfo, wavelength=wavelength, power=False)
if power:
return NP.abs(aperture_field_pattern*feed_field_pattern)**2
else:
return aperture_field_pattern*feed_field_pattern
#################################################################################
| 152,224 | 52.808766 | 256 |
py
|
PRISim
|
PRISim-master/prisim/__init__.py
|
import os as _os
__version__='2.2.1'
__description__='Precision Radio Interferometry Simulator'
__author__='Nithyanandan Thyagarajan'
__authoremail__='[email protected]'
__maintainer__='Nithyanandan Thyagarajan'
__maintaineremail__='[email protected]'
__url__='http://github.com/nithyanandan/prisim'
with open(_os.path.dirname(_os.path.abspath(__file__))+'/githash.txt', 'r') as _githash_file:
__githash__ = _githash_file.readline()
| 453 | 33.923077 | 93 |
py
|
PRISim
|
PRISim-master/prisim/delay_spectrum.py
|
from __future__ import division
import numpy as NP
import multiprocessing as MP
import itertools as IT
import progressbar as PGB
# import aipy as AP
import astropy
from astropy.io import fits
import astropy.cosmology as CP
import scipy.constants as FCNST
import healpy as HP
from distutils.version import LooseVersion
import yaml, h5py
from astroutils import writer_module as WM
from astroutils import constants as CNST
from astroutils import DSP_modules as DSP
from astroutils import mathops as OPS
from astroutils import geometry as GEOM
from astroutils import lookup_operations as LKP
import prisim
from prisim import primary_beams as PB
from prisim import interferometry as RI
from prisim import baseline_delay_horizon as DLY
try:
from pyuvdata import UVBeam
except ImportError:
uvbeam_module_found = False
else:
uvbeam_module_found = True
prisim_path = prisim.__path__[0]+'/'
# cosmo100 = CP.FlatLambdaCDM(H0=100.0, Om0=0.27) # Using H0 = 100 km/s/Mpc
cosmoPlanck15 = CP.Planck15 # Planck 2015 cosmology
cosmo100 = cosmoPlanck15.clone(name='Modified Planck 2015 cosmology with h=1.0', H0=100.0) # Modified Planck 2015 cosmology with h=1.0, H= 100 km/s/Mpc
#################################################################################
def _astropy_columns(cols, tabtype='BinTableHDU'):
"""
----------------------------------------------------------------------------
!!! FOR INTERNAL USE ONLY !!!
This internal routine checks for Astropy version and produces the FITS
columns based on the version
Inputs:
cols [list of Astropy FITS columns] These are a list of Astropy FITS
columns
tabtype [string] specifies table type - 'BinTableHDU' (default) for binary
tables and 'TableHDU' for ASCII tables
Outputs:
columns [Astropy FITS column data]
----------------------------------------------------------------------------
"""
try:
cols
except NameError:
raise NameError('Input cols not specified')
if tabtype not in ['BinTableHDU', 'TableHDU']:
raise ValueError('tabtype specified is invalid.')
use_ascii = False
if tabtype == 'TableHDU':
use_ascii = True
if astropy.__version__ == '0.4':
columns = fits.ColDefs(cols, tbtype=tabtype)
elif LooseVersion(astropy.__version__)>=LooseVersion('0.4.2'):
columns = fits.ColDefs(cols, ascii=use_ascii)
return columns
################################################################################
# def _gentle_clean(dd, _w, tol=1e-1, area=None, stop_if_div=True, maxiter=100,
# verbose=False, autoscale=True):
# if verbose:
# print("Performing gentle clean...")
# scale_factor = 1.0
# if autoscale:
# scale_factor = NP.nanmax(NP.abs(_w))
# dd /= scale_factor
# _w /= scale_factor
# cc, info = AP.deconv.clean(dd, _w, tol=tol, area=area, stop_if_div=False,
# maxiter=maxiter, verbose=verbose)
# #dd = info['res']
# cc = NP.zeros_like(dd)
# inside_res = NP.std(dd[area!=0])
# outside_res = NP.std(dd[area==0])
# initial_res = inside_res
# #print(inside_res,'->',)
# ncycle=0
# if verbose:
# print("inside_res outside_res")
# print(inside_res, outside_res)
# inside_res = 2*outside_res #just artifically bump up the inside res so the loop runs at least once
# while(inside_res>outside_res and maxiter>0):
# if verbose: print('.',)
# _d_cl, info = AP.deconv.clean(dd, _w, tol=tol, area=area, stop_if_div=stop_if_div, maxiter=maxiter, verbose=verbose, pos_def=True)
# res = info['res']
# inside_res = NP.std(res[area!=0])
# outside_res = NP.std(res[area==0])
# dd = info['res']
# cc += _d_cl
# ncycle += 1
# if verbose: print(inside_res*scale_factor, outside_res*scale_factor)
# if ncycle>1000: break
# info['ncycle'] = ncycle-1
# dd *= scale_factor
# _w *= scale_factor
# cc *= scale_factor
# info['initial_residual'] = initial_res * scale_factor
# info['final_residual'] = inside_res * scale_factor
# return cc, info
#################################################################################
def complex1dClean_arg_splitter(args, **kwargs):
return complex1dClean(*args, **kwargs)
def complex1dClean(inp, kernel, cbox=None, gain=0.1, maxiter=10000,
threshold=5e-3, threshold_type='relative', verbose=False,
progressbar=False, pid=None, progressbar_yloc=0):
"""
----------------------------------------------------------------------------
Hogbom CLEAN algorithm applicable to 1D complex array
Inputs:
inp [numpy vector] input 1D array to be cleaned. Can be complex.
kernel [numpy vector] 1D array that acts as the deconvolving kernel. Can
be complex. Must be of same size as inp
cbox [boolean array] 1D boolean array that acts as a mask for pixels
which should be cleaned. Same size as inp. Only pixels with values
True are to be searched for maxima in residuals for cleaning and
the rest are not searched for. Default=None (means all pixels are
to be searched for maxima while cleaning)
gain [scalar] gain factor to be applied while subtracting clean
component from residuals. This is the fraction of the maximum in
the residuals that will be subtracted. Must lie between 0 and 1.
A lower value will have a smoother convergence but take a longer
time to converge. Default=0.1
maxiter [scalar] maximum number of iterations for cleaning process. Will
terminate if the number of iterations exceed maxiter. Default=10000
threshold
[scalar] represents the cleaning depth either as a fraction of the
maximum in the input (when thershold_type is set to 'relative') or
the absolute value (when threshold_type is set to 'absolute') in
same units of input down to which inp should be cleaned. Value must
always be positive. When threshold_type is set to 'relative',
threshold mu st lie between 0 and 1. Default=5e-3 (found to work
well and converge fast) assuming threshold_type is set to 'relative'
threshold_type
[string] represents the type of threshold specified by value in
input threshold. Accepted values are 'relative' and 'absolute'. If
set to 'relative' the threshold value is the fraction (between 0
and 1) of maximum in input down to which it should be cleaned. If
set to 'asbolute' it is the actual value down to which inp should
be cleaned. Default='relative'
verbose [boolean] If set to True (default), print diagnostic and progress
messages. If set to False, no such messages are printed.
progressbar
[boolean] If set to False (default), no progress bar is displayed
pid [string or integer] process identifier (optional) relevant only in
case of parallel processing and if progressbar is set to True. If
pid is not specified, it defaults to the Pool process id
progressbar_yloc
[integer] row number where the progressbar is displayed on the
terminal. Default=0
Output:
outdict [dictionary] It consists of the following keys and values at
termination:
'termination' [dictionary] consists of information on the
conditions for termination with the following keys
and values:
'threshold' [boolean] If True, the cleaning process
terminated because the threshold was
reached
'maxiter' [boolean] If True, the cleaning process
terminated because the number of
iterations reached maxiter
'inrms<outrms'
[boolean] If True, the cleaning process
terminated because the rms inside the
clean box is below the rms outside of it
'iter' [scalar] number of iterations performed before
termination
'rms' [numpy vector] rms of the residuals as a function of
iteration
'inrms' [numpy vector] rms of the residuals inside the clean
box as a function of iteration
'outrms' [numpy vector] rms of the residuals outside the clean
box as a function of iteration
'res' [numpy array] uncleaned residuals at the end of the
cleaning process. Complex valued and same size as
inp
'cc' [numpy array] clean components at the end of the
cleaning process. Complex valued and same size as
inp
----------------------------------------------------------------------------
"""
try:
inp, kernel
except NameError:
raise NameError('Inputs inp and kernel not specified')
if not isinstance(inp, NP.ndarray):
raise TypeError('inp must be a numpy array')
if not isinstance(kernel, NP.ndarray):
raise TypeError('kernel must be a numpy array')
if threshold_type not in ['relative', 'absolute']:
raise ValueError('invalid specification for threshold_type')
if not isinstance(threshold, (int,float)):
raise TypeError('input threshold must be a scalar')
else:
threshold = float(threshold)
if threshold <= 0.0:
raise ValueError('input threshold must be positive')
inp = inp.flatten()
kernel = kernel.flatten()
kernel /= NP.abs(kernel).max()
kmaxind = NP.argmax(NP.abs(kernel))
if inp.size != kernel.size:
raise ValueError('inp and kernel must have same size')
if cbox is None:
cbox = NP.ones(inp.size, dtype=NP.bool)
elif isinstance(cbox, NP.ndarray):
cbox = cbox.flatten()
if cbox.size != inp.size:
raise ValueError('Clean box must be of same size as input')
cbox = NP.where(cbox > 0.0, True, False)
# cbox = cbox.astype(NP.int)
else:
raise TypeError('cbox must be a numpy array')
cbox = cbox.astype(NP.bool)
if threshold_type == 'relative':
lolim = threshold
else:
lolim = threshold / NP.abs(inp).max()
if lolim >= 1.0:
raise ValueError('incompatible value specified for threshold')
# inrms = [NP.std(inp[cbox])]
inrms = [NP.median(NP.abs(inp[cbox] - NP.median(inp[cbox])))]
if inp.size - NP.sum(cbox) <= 2:
outrms = None
else:
# outrms = [NP.std(inp[NP.invert(cbox)])]
outrms = [NP.median(NP.abs(inp[NP.invert(cbox)] - NP.median(inp[NP.invert(cbox)])))]
if not isinstance(gain, float):
raise TypeError('gain must be a floating point number')
else:
if (gain <= 0.0) or (gain >= 1.0):
raise TypeError('gain must lie between 0 and 1')
if not isinstance(maxiter, int):
raise TypeError('maxiter must be an integer')
else:
if maxiter <= 0:
raise ValueError('maxiter must be positive')
cc = NP.zeros_like(inp)
res = NP.copy(inp)
cond4 = False
# prevrms = NP.std(res)
# currentrms = [NP.std(res)]
prevrms = NP.median(NP.abs(res - NP.median(res)))
currentrms = [NP.median(NP.abs(res - NP.median(res)))]
itr = 0
terminate = False
if progressbar:
if pid is None:
pid = MP.current_process().name
else:
pid = '{0:0d}'.format(pid)
progressbar_loc = (0, progressbar_yloc)
writer=WM.Writer(progressbar_loc)
progress = PGB.ProgressBar(widgets=[pid+' ', PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Iterations '.format(maxiter), PGB.ETA()], maxval=maxiter, fd=writer).start()
while not terminate:
itr += 1
indmaxres = NP.argmax(NP.abs(res*cbox))
maxres = res[indmaxres]
ccval = gain * maxres
cc[indmaxres] += ccval
res = res - ccval * NP.roll(kernel, indmaxres-kmaxind)
prevrms = NP.copy(currentrms[-1])
# currentrms += [NP.std(res)]
currentrms += [NP.median(NP.abs(res - NP.median(res)))]
# inrms += [NP.std(res[cbox])]
inrms += [NP.median(NP.abs(res[cbox] - NP.median(res[cbox])))]
# cond1 = NP.abs(maxres) <= inrms[-1]
cond1 = NP.abs(maxres) <= lolim * NP.abs(inp).max()
cond2 = itr >= maxiter
terminate = cond1 or cond2
if outrms is not None:
# outrms += [NP.std(res[NP.invert(cbox)])]
outrms += [NP.median(NP.abs(res[NP.invert(cbox)] - NP.median(res[NP.invert(cbox)])))]
cond3 = inrms[-1] <= outrms[-1]
terminate = terminate or cond3
if progressbar:
progress.update(itr)
if progressbar:
progress.finish()
inrms = NP.asarray(inrms)
currentrms = NP.asarray(currentrms)
if outrms is not None:
outrms = NP.asarray(outrms)
outdict = {'termination':{'threshold': cond1, 'maxiter': cond2, 'inrms<outrms': cond3}, 'iter': itr, 'rms': currentrms, 'inrms': inrms, 'outrms': outrms, 'cc': cc, 'res': res}
return outdict
################################################################################
def dkprll_deta(redshift, cosmo=cosmo100):
"""
----------------------------------------------------------------------------
Compute jacobian to transform delays (eta or tau) to line-of-sight
wavenumbers (h/Mpc) corresponding to specified redshift(s) and cosmology
corresponding to the HI 21 cm line
Inputs:
redshift [scalar, list or numpy array] redshift(s). Must be a
scalar, list or numpy array
cosmo [instance of cosmology class from astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module. Default
uses Flat lambda CDM cosmology with Omega_m=0.27,
H0=100 km/s/Mpc
Outputs:
Jacobian to convert eta (lags) to k_parallel. Same size as redshift
----------------------------------------------------------------------------
"""
if not isinstance(redshift, (int, float, list, NP.ndarray)):
raise TypeError('redshift must be a scalar, list or numpy array')
redshift = NP.asarray(redshift)
if NP.any(redshift < 0.0):
raise ValueError('redshift(s) must be non-negative')
if not isinstance(cosmo, (CP.FLRW, CP.default_cosmology)):
raise TypeError('Input cosmology must be a cosmology class defined in Astropy')
jacobian = 2 * NP.pi * cosmo.H0.value * CNST.rest_freq_HI * cosmo.efunc(redshift) / FCNST.c / (1+redshift)**2 * 1e3
return jacobian
################################################################################
def beam3Dvol(beam, freqs, freq_wts=None, hemisphere=True):
"""
----------------------------------------------------------------------------
Compute 3D volume relevant for power spectrum given an antenna power
pattern. It is estimated by summing square of the beam in angular and
frequency coordinates and in units of "Sr Hz".
Inputs:
beam [numpy array] Antenna power pattern with peak normalized to
unity. It can be of shape (npix x nchan) or (npix x 1) or
(npix,). npix must be a HEALPix compatible value. nchan is the
number of frequency channels, same as the size of input freqs.
If it is of shape (npix x 1) or (npix,), the beam will be
assumed to be identical for all frequency channels.
freqs [list or numpy array] Frequency channels (in Hz) of size nchan
freq_wts [numpy array] Frequency weights to be applied to the
beam. Must be of shape (nchan,) or (nwin, nchan)
Keyword Inputs:
hemisphere [boolean] If set to True (default), the 3D volume will be
estimated using the upper hemisphere. If False, the full sphere
is used.
Output:
The product Omega x bandwdith (in Sr Hz) computed using the integral of
squared power pattern. It is of shape (nwin,)
----------------------------------------------------------------------------
"""
try:
beam, freqs
except NameError:
raise NameError('Both inputs beam and freqs must be specified')
if not isinstance(beam, NP.ndarray):
raise TypeError('Input beam must be a numpy array')
if not isinstance(freqs, (list, NP.ndarray)):
raise TypeError('Input freqs must be a list or numpy array')
freqs = NP.asarray(freqs).astype(NP.float).reshape(-1)
if freqs.size < 2:
raise ValueError('Input freqs does not have enough elements to determine frequency resolution')
if beam.ndim > 2:
raise ValueError('Invalid dimensions for beam')
elif beam.ndim == 2:
if beam.shape[1] != 1:
if beam.shape[1] != freqs.size:
raise ValueError('Dimensions of beam do not match the number of frequency channels')
elif beam.ndim == 1:
beam = beam.reshape(-1,1)
else:
raise ValueError('Invalid dimensions for beam')
if freq_wts is not None:
if not isinstance(freq_wts, NP.ndarray):
raise TypeError('Input freq_wts must be a numpy array')
if freq_wts.ndim == 1:
freq_wts = freq_wts.reshape(1,-1)
elif freq_wts.ndim > 2:
raise ValueError('Input freq_wts must be of shape nwin x nchan')
freq_wts = NP.asarray(freq_wts).astype(NP.float).reshape(-1,freqs.size)
if freq_wts.shape[1] != freqs.size:
raise ValueError('Input freq_wts does not have shape compatible with freqs')
else:
freq_wts = NP.ones(freqs.size, dtype=NP.float).reshape(1,-1)
eps = 1e-10
if beam.max() > 1.0+eps:
raise ValueError('Input beam maximum exceeds unity. Input beam should be normalized to peak of unity')
nside = HP.npix2nside(beam.shape[0])
domega = HP.nside2pixarea(nside, degrees=False)
df = freqs[1] - freqs[0]
bw = df * freqs.size
weighted_beam = beam[:,NP.newaxis,:] * freq_wts[NP.newaxis,:,:]
theta, phi = HP.pix2ang(nside, NP.arange(beam.shape[0]))
if hemisphere:
ind, = NP.where(theta <= NP.pi/2) # Select upper hemisphere
else:
ind = NP.arange(beam.shape[0])
omega_bw = domega * df * NP.nansum(weighted_beam[ind,:,:]**2, axis=(0,2))
if NP.any(omega_bw > 4*NP.pi*bw):
raise ValueError('3D volume estimated from beam exceeds the upper limit. Check normalization of the input beam')
return omega_bw
################################################################################
class DelaySpectrum(object):
"""
----------------------------------------------------------------------------
Class to manage delay spectrum information on a multi-element interferometer
array.
Attributes:
ia [instance of class InterferometerArray] An instance of class
InterferometerArray that contains the results of the simulated
interferometer visibilities
bp [numpy array] Bandpass weights of size n_baselines x nchan x
n_acc, where n_acc is the number of accumulations in the
observation, nchan is the number of frequency channels, and
n_baselines is the number of baselines
bp_wts [numpy array] Additional weighting to be applied to the bandpass
shapes during the application of the member function
delay_transform(). Same size as attribute bp.
f [list or numpy vector] frequency channels in Hz
cc_freq [list or numpy vector] frequency channels in Hz associated with
clean components of delay spectrum. Same size as cc_lags. This
computed inside member function delayClean()
df [scalar] Frequency resolution (in Hz)
lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as channels. This is
computed in member function delay_transform().
cc_lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as cc_freq. This is computed in
member function delayClean().
lag_kernel [numpy array] Inverse Fourier Transform of the frequency
bandpass shape. In other words, it is the impulse response
corresponding to frequency bandpass. Same size as attributes
bp and bp_wts. It is initialized in __init__() member function
but effectively computed in member functions delay_transform()
and delayClean()
cc_lag_kernel
[numpy array] Inverse Fourier Transform of the frequency
bandpass shape. In other words, it is the impulse response
corresponding to frequency bandpass shape used in complex delay
clean routine. It is initialized in __init__() member function
but effectively computed in member function delayClean()
n_acc [scalar] Number of accumulations
horizon_delay_limits
[numpy array] NxMx2 numpy array denoting the neagtive and
positive horizon delay limits where N is the number of
timestamps, M is the number of baselines. The 0 index in the
third dimenstion denotes the negative horizon delay limit while
the 1 index denotes the positive horizon delay limit
skyvis_lag [numpy array] Complex visibility due to sky emission (in Jy Hz or
K Hz) along the delay axis for each interferometer obtained by
FFT of skyvis_freq along frequency axis. Same size as vis_freq.
Created in the member function delay_transform(). Read its
docstring for more details. Same dimensions as skyvis_freq
vis_lag [numpy array] The simulated complex visibility (in Jy Hz or K Hz)
along delay axis for each interferometer obtained by FFT of
vis_freq along frequency axis. Same size as vis_noise_lag and
skyis_lag. It is evaluated in member function delay_transform().
vis_noise_lag
[numpy array] Complex visibility noise (in Jy Hz or K Hz) along
delay axis for each interferometer generated using an FFT of
vis_noise_freq along frequency axis. Same size as vis_noise_freq.
Created in the member function delay_transform(). Read its
docstring for more details.
cc_skyvis_lag
[numpy array] Complex cleaned visibility delay spectra (in
Jy Hz or K Hz) of noiseless simulated sky visibilities for each
baseline at each LST. Size is nbl x nlags x nlst
cc_skyvis_res_lag
[numpy array] Complex residuals from cleaned visibility delay
spectra (in Jy Hz or K Hz) of noiseless simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst
cc_skyvis_net_lag
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals (in Jy Hz or K Hz) of noiseless simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst. cc_skyvis_net_lag = cc_skyvis_lag +
cc_skyvis_res_lag
cc_vis_lag
[numpy array] Complex cleaned visibility delay spectra (in
Jy Hz or K Hz) of noisy simulated sky visibilities for each
baseline at each LST. Size is nbl x nlags x nlst
cc_vis_res_lag
[numpy array] Complex residuals from cleaned visibility delay
spectra (in Jy Hz or K Hz) of noisy simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst
cc_vis_net_lag
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals (in Jy Hz or K Hz) of noisy simulated sky
visibilities for each baseline at each LST. Size is
nbl x nlags x nlst. cc_vis_net_lag = cc_vis_lag +
cc_vis_res_lag
cc_skyvis_freq
[numpy array] Complex cleaned visibility delay spectra
transformed to frequency domain (in Jy or K.Sr) obtained from
noiseless simulated sky visibilities for each baseline at each
LST. Size is nbl x nlags x nlst
cc_skyvis_res_freq
[numpy array] Complex residuals from cleaned visibility delay
spectra transformed to frequency domain (in Jy or K.Sr) obtained
from noiseless simulated sky visibilities for each baseline at
each LST. Size is nbl x nlags x nlst
cc_skyvis_net_freq
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals transformed to frequency domain (in Jy or K.Sr)
obtained from noiseless simulated sky visibilities for each
baseline at each LST. Size is nbl x nlags x nlst.
cc_skyvis_net_freq = cc_skyvis_freq + cc_skyvis_res_freq
cc_vis_freq
[numpy array] Complex cleaned visibility delay spectra
transformed to frequency domain (in Jy or K.Sr) obtained from
noisy simulated sky visibilities for each baseline at each LST.
Size is nbl x nlags x nlst
cc_vis_res_freq
[numpy array] Complex residuals from cleaned visibility delay
spectra transformed to frequency domain (in Jy or K.Sr) of noisy
simulated sky visibilities for each baseline at each LST. Size
is nbl x nlags x nlst
cc_vis_net_freq
[numpy array] Sum of complex cleaned visibility delay spectra
and residuals transformed to frequency domain (in Jy or K.Sr)
obtained from noisy simulated sky visibilities for each baseline
at each LST. Size is nbl x nlags x nlst.
cc_vis_net_freq = cc_vis_freq + cc_vis_res_freq
clean_window_buffer
[scalar] number of inverse bandwidths to extend beyond the
horizon delay limit to include in the CLEAN deconvolution.
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
subband_delay_spectra
[dictionary] contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Under each of these keys is information about delay
spectra of different frequency sub-bands (n_win in number) in the
form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'bpcorrect' [boolean] If True (default), correct for frequency
weights that were applied during the original
delay transform using which the delay CLEAN was
done. This would flatten the bandpass after delay
CLEAN. If False, do not apply the correction,
namely, inverse of bandpass weights. This applies
only CLEAned visibilities under the 'cc' key and
hence is present only if the top level key is 'cc'
and absent for key 'sim'
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'. It roughly corresponds to k_parallel.
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated.
'skyvis_lag'
[numpy array] subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on
whether the top level key is 'cc' or 'sim'
respectively, after applying the frequency weights
under the key 'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_lag' [numpy array] subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_noise_lag'
[numpy array] subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is
'sim' and absent for 'cc'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'skyvis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'skyvis_net_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
subband_delay_spectra_resampled
[dictionary] Very similar to the attribute
subband_delay_spectra except now it has been resampled along
delay axis to contain usually only independent delay bins. It
contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Under each of these keys is information about delay
spectra of different frequency sub-bands (n_win in number) after
resampling to independent number of delay bins in the
form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins. It roughly corresponds to
k_parallel.
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
usually approximately inverse of the effective
bandwidth of the subband
'skyvis_lag'
[numpy array] subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on
whether the top level key is 'cc' or 'sim'
respectively, after applying the frequency weights
under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_lag' [numpy array] subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_noise_lag'
[numpy array] subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is
'sim' and absent for 'cc'. It is of size
n_bl x n_win x nlags x n_t.
'skyvis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'skyvis_net_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] subband delay spectra of sum of
residuals and clean components
after delay CLEAN of simulated noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present
for top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
Member functions:
__init__() Initializes an instance of class DelaySpectrum
delay_transform()
Transforms the visibilities from frequency axis onto
delay (time) axis using an IFFT. This is performed for
noiseless sky visibilities, thermal noise in visibilities,
and observed visibilities.
delay_transform_allruns()
Transforms the visibilities of multiple runs from frequency
axis onto delay (time) axis using an IFFT.
clean() Transforms the visibilities from frequency axis onto delay
(time) axis using an IFFT and deconvolves the delay transform
quantities along the delay axis. This is performed for noiseless
sky visibilities, thermal noise in visibilities, and observed
visibilities.
delayClean()
Transforms the visibilities from frequency axis onto delay
(time) axis using an IFFT and deconvolves the delay transform
quantities along the delay axis. This is performed for noiseless
sky visibilities, thermal noise in visibilities, and observed
visibilities. This calls an in-house module complex1dClean
instead of the clean routine in AIPY module. It can utilize
parallelization
subband_delay_transform()
Computes delay transform on multiple frequency sub-bands with
specified weights
subband_delay_transform_allruns()
Computes delay transform on multiple frequency sub-bands with
specified weights for multiple realizations of visibilities
subband_delay_transform_closure_phase()
Computes delay transform of closure phases on antenna triplets
on multiple frequency sub-bands with specified weights
get_horizon_delay_limits()
Estimates the delay envelope determined by the sky horizon
for the baseline(s) for the phase centers
set_horizon_delay_limits()
Estimates the delay envelope determined by the sky horizon for
the baseline(s) for the phase centers of the DelaySpectrum
instance. No output is returned. Uses the member function
get_horizon_delay_limits()
save() Saves the interferometer array delay spectrum information to
disk.
----------------------------------------------------------------------------
"""
def __init__(self, interferometer_array=None, init_file=None):
"""
------------------------------------------------------------------------
Intialize the DelaySpectrum class which manages information on delay
spectrum of a multi-element interferometer.
Class attributes initialized are:
f, bp, bp_wts, df, lags, skyvis_lag, vis_lag, n_acc, vis_noise_lag, ia,
pad, lag_kernel, horizon_delay_limits, cc_skyvis_lag, cc_skyvis_res_lag,
cc_skyvis_net_lag, cc_vis_lag, cc_vis_res_lag, cc_vis_net_lag,
cc_skyvis_freq, cc_skyvis_res_freq, cc_sktvis_net_freq, cc_vis_freq,
cc_vis_res_freq, cc_vis_net_freq, clean_window_buffer, cc_freq, cc_lags,
cc_lag_kernel, subband_delay_spectra, subband_delay_spectra_resampled
Read docstring of class DelaySpectrum for details on these
attributes.
Input(s):
interferometer_array
[instance of class InterferometerArray] An instance of
class InterferometerArray from which certain attributes
will be obtained and used
init_file [string] full path to filename in FITS format containing
delay spectrum information of interferometer array
Other input parameters have their usual meanings. Read the docstring of
class DelaySpectrum for details on these inputs.
------------------------------------------------------------------------
"""
argument_init = False
init_file_success = False
if init_file is not None:
try:
hdulist = fits.open(init_file)
except IOError:
argument_init = True
print('\tinit_file provided but could not open the initialization file. Attempting to initialize with input parameters...')
extnames = [hdulist[i].header['EXTNAME'] for i in xrange(1,len(hdulist))]
try:
self.df = hdulist[0].header['freq_resolution']
except KeyError:
hdulist.close()
raise KeyError('Keyword "freq_resolution" not found in header')
try:
self.n_acc = hdulist[0].header['N_ACC']
except KeyError:
hdulist.close()
raise KeyError('Keyword "N_ACC" not found in header')
try:
self.pad = hdulist[0].header['PAD']
except KeyError:
hdulist.close()
raise KeyError('Keyword "PAD" not found in header')
try:
self.clean_window_buffer = hdulist[0].header['DBUFFER']
except KeyError:
hdulist.close()
raise KeyError('Keyword "DBUFFER" not found in header')
try:
iarray_init_file = hdulist[0].header['IARRAY']
except KeyError:
hdulist.close()
raise KeyError('Keyword "IARRAY" not found in header')
self.ia = RI.InterferometerArray(None, None, None, init_file=iarray_init_file)
# if 'SPECTRAL INFO' not in extnames:
# raise KeyError('No extension table found containing spectral information.')
# else:
# self.f = hdulist['SPECTRAL INFO'].data['frequency']
# try:
# self.lags = hdulist['SPECTRAL INFO'].data['lag']
# except KeyError:
# self.lags = None
try:
self.f = hdulist['FREQUENCIES'].data
except KeyError:
hdulist.close()
raise KeyError('Extension "FREQUENCIES" not found in header')
self.lags = None
if 'LAGS' in extnames:
self.lags = hdulist['LAGS'].data
self.cc_lags = None
if 'CLEAN LAGS' in extnames:
self.cc_lags = hdulist['CLEAN LAGS'].data
self.cc_freq = None
if 'CLEAN FREQUENCIES' in extnames:
self.cc_freq = hdulist['CLEAN FREQUENCIES'].data
if 'BANDPASS' in extnames:
self.bp = hdulist['BANDPASS'].data
else:
raise KeyError('Extension named "BANDPASS" not found in init_file.')
if 'BANDPASS WEIGHTS' in extnames:
self.bp_wts = hdulist['BANDPASS WEIGHTS'].data
else:
self.bp_wts = NP.ones_like(self.bp)
if 'HORIZON LIMITS' in extnames:
self.horizon_delay_limits = hdulist['HORIZON LIMITS'].data
else:
self.set_horizon_delay_limits()
self.lag_kernel = None
if 'LAG KERNEL REAL' in extnames:
self.lag_kernel = hdulist['LAG KERNEL REAL'].data
if 'LAG KERNEL IMAG' in extnames:
self.lag_kernel = self.lag_kernel.astype(NP.complex)
self.lag_kernel += 1j * hdulist['LAG KERNEL IMAG'].data
self.cc_lag_kernel = None
if 'CLEAN LAG KERNEL REAL' in extnames:
self.cc_lag_kernel = hdulist['CLEAN LAG KERNEL REAL'].data
if 'CLEAN LAG KERNEL IMAG' in extnames:
self.cc_lag_kernel = self.cc_lag_kernel.astype(NP.complex)
self.cc_lag_kernel += 1j * hdulist['CLEAN LAG KERNEL IMAG'].data
self.skyvis_lag = None
if 'NOISELESS DELAY SPECTRA REAL' in extnames:
self.skyvis_lag = hdulist['NOISELESS DELAY SPECTRA REAL'].data
if 'NOISELESS DELAY SPECTRA IMAG' in extnames:
self.skyvis_lag = self.skyvis_lag.astype(NP.complex)
self.skyvis_lag += 1j * hdulist['NOISELESS DELAY SPECTRA IMAG'].data
self.vis_lag = None
if 'NOISY DELAY SPECTRA REAL' in extnames:
self.vis_lag = hdulist['NOISY DELAY SPECTRA REAL'].data
if 'NOISY DELAY SPECTRA IMAG' in extnames:
self.vis_lag = self.vis_lag.astype(NP.complex)
self.vis_lag += 1j * hdulist['NOISY DELAY SPECTRA IMAG'].data
self.vis_noise_lag = None
if 'DELAY SPECTRA NOISE REAL' in extnames:
self.vis_noise_lag = hdulist['DELAY SPECTRA NOISE REAL'].data
if 'DELAY SPECTRA NOISE IMAG' in extnames:
self.vis_noise_lag = self.vis_noise_lag.astype(NP.complex)
self.vis_noise_lag += 1j * hdulist['DELAY SPECTRA NOISE IMAG'].data
self.cc_skyvis_lag = None
if 'CLEAN NOISELESS DELAY SPECTRA REAL' in extnames:
self.cc_skyvis_lag = hdulist['CLEAN NOISELESS DELAY SPECTRA REAL'].data
if 'CLEAN NOISELESS DELAY SPECTRA IMAG' in extnames:
self.cc_skyvis_lag = self.cc_skyvis_lag.astype(NP.complex)
self.cc_skyvis_lag += 1j * hdulist['CLEAN NOISELESS DELAY SPECTRA IMAG'].data
self.cc_vis_lag = None
if 'CLEAN NOISY DELAY SPECTRA REAL' in extnames:
self.cc_vis_lag = hdulist['CLEAN NOISY DELAY SPECTRA REAL'].data
if 'CLEAN NOISY DELAY SPECTRA IMAG' in extnames:
self.cc_vis_lag = self.cc_vis_lag.astype(NP.complex)
self.cc_vis_lag += 1j * hdulist['CLEAN NOISY DELAY SPECTRA IMAG'].data
self.cc_skyvis_res_lag = None
if 'CLEAN NOISELESS DELAY SPECTRA RESIDUALS REAL' in extnames:
self.cc_skyvis_res_lag = hdulist['CLEAN NOISELESS DELAY SPECTRA RESIDUALS REAL'].data
if 'CLEAN NOISELESS DELAY SPECTRA RESIDUALS IMAG' in extnames:
self.cc_skyvis_res_lag = self.cc_skyvis_res_lag.astype(NP.complex)
self.cc_skyvis_res_lag += 1j * hdulist['CLEAN NOISELESS DELAY SPECTRA RESIDUALS IMAG'].data
self.cc_vis_res_lag = None
if 'CLEAN NOISY DELAY SPECTRA RESIDUALS REAL' in extnames:
self.cc_vis_res_lag = hdulist['CLEAN NOISY DELAY SPECTRA RESIDUALS REAL'].data
if 'CLEAN NOISY DELAY SPECTRA RESIDUALS IMAG' in extnames:
self.cc_vis_res_lag = self.cc_vis_res_lag.astype(NP.complex)
self.cc_vis_res_lag += 1j * hdulist['CLEAN NOISY DELAY SPECTRA RESIDUALS IMAG'].data
self.cc_skyvis_freq = None
if 'CLEAN NOISELESS VISIBILITIES REAL' in extnames:
self.cc_skyvis_freq = hdulist['CLEAN NOISELESS VISIBILITIES REAL'].data
if 'CLEAN NOISELESS VISIBILITIES IMAG' in extnames:
self.cc_skyvis_freq = self.cc_skyvis_freq.astype(NP.complex)
self.cc_skyvis_freq += 1j * hdulist['CLEAN NOISELESS VISIBILITIES IMAG'].data
self.cc_vis_freq = None
if 'CLEAN NOISY VISIBILITIES REAL' in extnames:
self.cc_vis_freq = hdulist['CLEAN NOISY VISIBILITIES REAL'].data
if 'CLEAN NOISY VISIBILITIES IMAG' in extnames:
self.cc_vis_freq = self.cc_vis_freq.astype(NP.complex)
self.cc_vis_freq += 1j * hdulist['CLEAN NOISY VISIBILITIES IMAG'].data
self.cc_skyvis_res_freq = None
if 'CLEAN NOISELESS VISIBILITIES RESIDUALS REAL' in extnames:
self.cc_skyvis_res_freq = hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS REAL'].data
if 'CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG' in extnames:
self.cc_skyvis_res_freq = self.cc_skyvis_res_freq.astype(NP.complex)
self.cc_skyvis_res_freq += 1j * hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG'].data
self.cc_vis_res_freq = None
if 'CLEAN NOISY VISIBILITIES RESIDUALS REAL' in extnames:
self.cc_vis_res_freq = hdulist['CLEAN NOISY VISIBILITIES RESIDUALS REAL'].data
if 'CLEAN NOISY VISIBILITIES RESIDUALS IMAG' in extnames:
self.cc_vis_res_freq = self.cc_vis_res_freq.astype(NP.complex)
self.cc_vis_res_freq += 1j * hdulist['CLEAN NOISY VISIBILITIES RESIDUALS IMAG'].data
self.cc_skyvis_net_lag = None
if (self.cc_skyvis_lag is not None) and (self.cc_skyvis_res_lag is not None):
self.cc_skyvis_net_lag = self.cc_skyvis_lag + self.cc_skyvis_res_lag
self.cc_vis_net_lag = None
if (self.cc_vis_lag is not None) and (self.cc_vis_res_lag is not None):
self.cc_vis_net_lag = self.cc_vis_lag + self.cc_vis_res_lag
self.cc_skyvis_net_freq = None
if (self.cc_skyvis_freq is not None) and (self.cc_skyvis_res_freq is not None):
self.cc_skyvis_net_freq = self.cc_skyvis_freq + self.cc_skyvis_res_freq
self.cc_vis_net_freq = None
if (self.cc_vis_freq is not None) and (self.cc_vis_res_freq is not None):
self.cc_vis_net_freq = self.cc_vis_freq + self.cc_vis_res_freq
self.subband_delay_spectra = {}
self.subband_delay_spectra_resampled = {}
if 'SBDS' in hdulist[0].header:
for key in ['cc', 'sim']:
if '{0}-SBDS'.format(key) in hdulist[0].header:
self.subband_delay_spectra[key] = {}
self.subband_delay_spectra[key]['shape'] = hdulist[0].header['{0}-SBDS-WSHAPE'.format(key)]
if key == 'cc':
self.subband_delay_spectra[key]['bpcorrect'] = bool(hdulist[0].header['{0}-SBDS-BPCORR'.format(key)])
self.subband_delay_spectra[key]['npad'] = hdulist[0].header['{0}-SBDS-NPAD'.format(key)]
self.subband_delay_spectra[key]['freq_center'] = hdulist['{0}-SBDS-F0'.format(key)].data
self.subband_delay_spectra[key]['freq_wts'] = hdulist['{0}-SBDS-FWTS'.format(key)].data
self.subband_delay_spectra[key]['bw_eff'] = hdulist['{0}-SBDS-BWEFF'.format(key)].data
self.subband_delay_spectra[key]['lags'] = hdulist['{0}-SBDS-LAGS'.format(key)].data
self.subband_delay_spectra[key]['lag_kernel'] = hdulist['{0}-SBDS-LAGKERN-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-LAGKERN-IMAG'.format(key)].data
self.subband_delay_spectra[key]['lag_corr_length'] = hdulist['{0}-SBDS-LAGCORR'.format(key)].data
self.subband_delay_spectra[key]['skyvis_lag'] = hdulist['{0}-SBDS-SKYVISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-SKYVISLAG-IMAG'.format(key)].data
self.subband_delay_spectra[key]['vis_lag'] = hdulist['{0}-SBDS-VISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-VISLAG-IMAG'.format(key)].data
if key == 'sim':
self.subband_delay_spectra[key]['vis_noise_lag'] = hdulist['{0}-SBDS-NOISELAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-NOISELAG-IMAG'.format(key)].data
if key == 'cc':
self.subband_delay_spectra[key]['skyvis_res_lag'] = hdulist['{0}-SBDS-SKYVISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-SKYVISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra[key]['vis_res_lag'] = hdulist['{0}-SBDS-VISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDS-VISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra[key]['skyvis_net_lag'] = self.subband_delay_spectra[key]['skyvis_lag'] + self.subband_delay_spectra[key]['skyvis_res_lag']
self.subband_delay_spectra[key]['vis_net_lag'] = self.subband_delay_spectra[key]['vis_lag'] + self.subband_delay_spectra[key]['vis_res_lag']
if 'SBDS-RS' in hdulist[0].header:
for key in ['cc', 'sim']:
if '{0}-SBDS-RS'.format(key) in hdulist[0].header:
self.subband_delay_spectra_resampled[key] = {}
self.subband_delay_spectra_resampled[key]['freq_center'] = hdulist['{0}-SBDSRS-F0'.format(key)].data
self.subband_delay_spectra_resampled[key]['bw_eff'] = hdulist['{0}-SBDSRS-BWEFF'.format(key)].data
self.subband_delay_spectra_resampled[key]['lags'] = hdulist['{0}-SBDSRS-LAGS'.format(key)].data
self.subband_delay_spectra_resampled[key]['lag_kernel'] = hdulist['{0}-SBDSRS-LAGKERN-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-LAGKERN-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['lag_corr_length'] = hdulist['{0}-SBDSRS-LAGCORR'.format(key)].data
self.subband_delay_spectra_resampled[key]['skyvis_lag'] = hdulist['{0}-SBDSRS-SKYVISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-SKYVISLAG-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['vis_lag'] = hdulist['{0}-SBDSRS-VISLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-VISLAG-IMAG'.format(key)].data
if key == 'sim':
self.subband_delay_spectra_resampled[key]['vis_noise_lag'] = hdulist['{0}-SBDSRS-NOISELAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-NOISELAG-IMAG'.format(key)].data
if key == 'cc':
self.subband_delay_spectra_resampled[key]['skyvis_res_lag'] = hdulist['{0}-SBDSRS-SKYVISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-SKYVISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['vis_res_lag'] = hdulist['{0}-SBDSRS-VISRESLAG-REAL'.format(key)].data + 1j * hdulist['{0}-SBDSRS-VISRESLAG-IMAG'.format(key)].data
self.subband_delay_spectra_resampled[key]['skyvis_net_lag'] = self.subband_delay_spectra_resampled[key]['skyvis_lag'] + self.subband_delay_spectra_resampled[key]['skyvis_res_lag']
self.subband_delay_spectra_resampled[key]['vis_net_lag'] = self.subband_delay_spectra_resampled[key]['vis_lag'] + self.subband_delay_spectra_resampled[key]['vis_res_lag']
hdulist.close()
init_file_success = True
return
else:
argument_init = True
if (not argument_init) and (not init_file_success):
raise ValueError('Initialization failed with the use of init_file.')
if not isinstance(interferometer_array, RI.InterferometerArray):
raise TypeError('Input interferometer_array must be an instance of class InterferometerArray')
self.ia = interferometer_array
self.f = interferometer_array.channels
self.df = interferometer_array.freq_resolution
self.n_acc = interferometer_array.n_acc
self.horizon_delay_limits = self.get_horizon_delay_limits()
self.bp = interferometer_array.bp # Inherent bandpass shape
self.bp_wts = interferometer_array.bp_wts # Additional bandpass weights
self.pad = 0.0
self.lags = DSP.spectral_axis(self.f.size, delx=self.df, use_real=False, shift=True)
self.lag_kernel = None
self.skyvis_lag = None
self.vis_lag = None
self.vis_noise_lag = None
self.clean_window_buffer = 1.0
self.cc_lags = None
self.cc_freq = None
self.cc_lag_kernel = None
self.cc_skyvis_lag = None
self.cc_skyvis_res_lag = None
self.cc_vis_lag = None
self.cc_vis_res_lag = None
self.cc_skyvis_net_lag = None
self.cc_vis_net_lag = None
self.cc_skyvis_freq = None
self.cc_skyvis_res_freq = None
self.cc_vis_freq = None
self.cc_vis_res_freq = None
self.cc_skyvis_net_freq = None
self.cc_vis_net_freq = None
self.subband_delay_spectra = {}
self.subband_delay_spectra_resampled = {}
#############################################################################
def delay_transform(self, pad=1.0, freq_wts=None, downsample=True,
action=None, verbose=True):
"""
------------------------------------------------------------------------
Transforms the visibilities from frequency axis onto delay (time) axis
using an IFFT. This is performed for noiseless sky visibilities, thermal
noise in visibilities, and observed visibilities.
Inputs:
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
freq_wts [numpy vector or array] window shaping to be applied before
computing delay transform. It can either be a vector or size
equal to the number of channels (which will be applied to all
time instances for all baselines), or a nchan x n_snapshots
numpy array which will be applied to all baselines, or a
n_baselines x nchan numpy array which will be applied to all
timestamps, or a n_baselines x nchan x n_snapshots numpy
array. Default (None) will not apply windowing and only the
inherent bandpass will be used.
downsample [boolean] If set to True (default), the delay transform
quantities will be downsampled by exactly the same factor
that was used in padding. For instance, if pad is set to
1.0, the downsampling will be by a factor of 2. If set to
False, no downsampling will be done even if the original
quantities were padded
action [boolean] If set to None (default), just return the delay-
transformed quantities. If set to 'store', these quantities
will be stored as internal attributes
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
------------------------------------------------------------------------
"""
if verbose:
print('Preparing to compute delay transform...\n\tChecking input parameters for compatibility...')
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if freq_wts is not None:
if freq_wts.size == self.f.size:
freq_wts = NP.repeat(NP.expand_dims(NP.repeat(freq_wts.reshape(1,-1), self.ia.baselines.shape[0], axis=0), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.n_acc:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(self.f.size, -1), axis=0), self.ia.baselines.shape[0], axis=0)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0]:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(-1, self.f.size), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0] * self.n_acc:
freq_wts = freq_wts.reshape(self.ia.baselines.shape[0], self.f.size, self.n_acc)
else:
raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
else:
freq_wts = self.bp_wts
if verbose:
print('\tFrequency window weights assigned.')
if not isinstance(downsample, bool):
raise TypeError('Input downsample must be of boolean type')
if verbose:
print('\tInput parameters have been verified to be compatible.\n\tProceeding to compute delay transform.')
result = {}
result['freq_wts'] = freq_wts
result['pad'] = pad
result['lags'] = DSP.spectral_axis(int(self.f.size*(1+pad)), delx=self.df, use_real=False, shift=True)
if pad == 0.0:
result['vis_lag'] = DSP.FT1D(self.ia.vis_freq * self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['skyvis_lag'] = DSP.FT1D(self.ia.skyvis_freq * self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['vis_noise_lag'] = DSP.FT1D(self.ia.vis_noise_freq * self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['lag_kernel'] = DSP.FT1D(self.bp * freq_wts, ax=1, inverse=True, use_real=False, shift=True) * self.f.size * self.df
if verbose:
print('\tDelay transform computed without padding.')
else:
npad = int(self.f.size * pad)
result['vis_lag'] = DSP.FT1D(NP.pad(self.ia.vis_freq * self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['skyvis_lag'] = DSP.FT1D(NP.pad(self.ia.skyvis_freq * self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['vis_noise_lag'] = DSP.FT1D(NP.pad(self.ia.vis_noise_freq * self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = DSP.FT1D(NP.pad(self.bp * freq_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
if verbose:
print('\tDelay transform computed with padding fraction {0:.1f}'.format(pad))
if downsample:
result['vis_lag'] = DSP.downsampler(result['vis_lag'], 1+pad, axis=1)
result['skyvis_lag'] = DSP.downsampler(result['skyvis_lag'], 1+pad, axis=1)
result['vis_noise_lag'] = DSP.downsampler(result['vis_noise_lag'], 1+pad, axis=1)
result['lag_kernel'] = DSP.downsampler(result['lag_kernel'], 1+pad, axis=1)
result['lags'] = DSP.downsampler(result['lags'], 1+pad)
result['lags'] = result['lags'].flatten()
if verbose:
print('\tDelay transform products downsampled by factor of {0:.1f}'.format(1+pad))
print('delay_transform() completed successfully.')
if action == 'store':
self.pad = pad
self.lags = result['lags']
self.bp_wts = freq_wts
self.vis_lag = result['vis_lag']
self.skyvis_lag = result['skyvis_lag']
self.vis_noise_lag = result['vis_noise_lag']
self.lag_kernel = result['lag_kernel']
return result
#############################################################################
# def clean(self, pad=1.0, freq_wts=None, clean_window_buffer=1.0,
# verbose=True):
# """
# ------------------------------------------------------------------------
# TO BE DEPRECATED!!! USE MEMBER FUNCTION delayClean()
# Transforms the visibilities from frequency axis onto delay (time) axis
# using an IFFT and deconvolves the delay transform quantities along the
# delay axis. This is performed for noiseless sky visibilities, thermal
# noise in visibilities, and observed visibilities.
# Inputs:
# pad [scalar] Non-negative scalar indicating padding fraction
# relative to the number of frequency channels. For e.g., a
# pad of 1.0 pads the frequency axis with zeros of the same
# width as the number of channels. If a negative value is
# specified, delay transform will be performed with no padding
# freq_wts [numpy vector or array] window shaping to be applied before
# computing delay transform. It can either be a vector or size
# equal to the number of channels (which will be applied to all
# time instances for all baselines), or a nchan x n_snapshots
# numpy array which will be applied to all baselines, or a
# n_baselines x nchan numpy array which will be applied to all
# timestamps, or a n_baselines x nchan x n_snapshots numpy
# array. Default (None) will not apply windowing and only the
# inherent bandpass will be used.
# verbose [boolean] If set to True (default), print diagnostic and
# progress messages. If set to False, no such messages are
# printed.
# ------------------------------------------------------------------------
# """
# if not isinstance(pad, (int, float)):
# raise TypeError('pad fraction must be a scalar value.')
# if pad < 0.0:
# pad = 0.0
# if verbose:
# print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
# if freq_wts is not None:
# if freq_wts.size == self.f.size:
# freq_wts = NP.repeat(NP.expand_dims(NP.repeat(freq_wts.reshape(1,-1), self.ia.baselines.shape[0], axis=0), axis=2), self.n_acc, axis=2)
# elif freq_wts.size == self.f.size * self.n_acc:
# freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(self.f.size, -1), axis=0), self.ia.baselines.shape[0], axis=0)
# elif freq_wts.size == self.f.size * self.ia.baselines.shape[0]:
# freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(-1, self.f.size), axis=2), self.n_acc, axis=2)
# elif freq_wts.size == self.f.size * self.ia.baselines.shape[0] * self.n_acc:
# freq_wts = freq_wts.reshape(self.ia.baselines.shape[0], self.f.size, self.n_acc)
# else:
# raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
# self.bp_wts = freq_wts
# if verbose:
# print('\tFrequency window weights assigned.')
# bw = self.df * self.f.size
# pc = self.ia.phase_center
# pc_coords = self.ia.phase_center_coords
# if pc_coords == 'hadec':
# pc_altaz = GEOM.hadec2altaz(pc, self.ia.latitude, units='degrees')
# pc_dircos = GEOM.altaz2dircos(pc_altaz, units='degrees')
# elif pc_coords == 'altaz':
# pc_dircos = GEOM.altaz2dircos(pc, units='degrees')
# npad = int(self.f.size * pad)
# lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=False)
# dlag = lags[1] - lags[0]
# clean_area = NP.zeros(self.f.size + npad, dtype=int)
# skyvis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.skyvis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
# vis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.vis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
# lag_kernel = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.bp, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
# ccomponents_noiseless = NP.zeros_like(skyvis_lag)
# ccres_noiseless = NP.zeros_like(skyvis_lag)
# ccomponents_noisy = NP.zeros_like(vis_lag)
# ccres_noisy = NP.zeros_like(vis_lag)
# for snap_iter in xrange(self.n_acc):
# progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Baselines '.format(self.ia.baselines.shape[0]), PGB.ETA()], maxval=self.ia.baselines.shape[0]).start()
# for bl_iter in xrange(self.ia.baselines.shape[0]):
# clean_area[NP.logical_and(lags <= self.horizon_delay_limits[snap_iter,bl_iter,1]+clean_window_buffer/bw, lags >= self.horizon_delay_limits[snap_iter,bl_iter,0]-clean_window_buffer/bw)] = 1
# cc_noiseless, info_noiseless = _gentle_clean(skyvis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], area=clean_area, stop_if_div=False, verbose=False, autoscale=True)
# ccomponents_noiseless[bl_iter,:,snap_iter] = cc_noiseless
# ccres_noiseless[bl_iter,:,snap_iter] = info_noiseless['res']
# cc_noisy, info_noisy = _gentle_clean(vis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], area=clean_area, stop_if_div=False, verbose=False, autoscale=True)
# ccomponents_noisy[bl_iter,:,snap_iter] = cc_noisy
# ccres_noisy[bl_iter,:,snap_iter] = info_noisy['res']
# progress.update(bl_iter+1)
# progress.finish()
# deta = lags[1] - lags[0]
# cc_skyvis = NP.fft.fft(ccomponents_noiseless, axis=1) * deta
# cc_skyvis_res = NP.fft.fft(ccres_noiseless, axis=1) * deta
# cc_vis = NP.fft.fft(ccomponents_noisy, axis=1) * deta
# cc_vis_res = NP.fft.fft(ccres_noisy, axis=1) * deta
# self.skyvis_lag = NP.fft.fftshift(skyvis_lag, axes=1)
# self.vis_lag = NP.fft.fftshift(vis_lag, axes=1)
# self.lag_kernel = NP.fft.fftshift(lag_kernel, axes=1)
# self.cc_skyvis_lag = NP.fft.fftshift(ccomponents_noiseless, axes=1)
# self.cc_skyvis_res_lag = NP.fft.fftshift(ccres_noiseless, axes=1)
# self.cc_vis_lag = NP.fft.fftshift(ccomponents_noisy, axes=1)
# self.cc_vis_res_lag = NP.fft.fftshift(ccres_noisy, axes=1)
# self.cc_skyvis_net_lag = self.cc_skyvis_lag + self.cc_skyvis_res_lag
# self.cc_vis_net_lag = self.cc_vis_lag + self.cc_vis_res_lag
# self.lags = NP.fft.fftshift(lags)
# self.cc_skyvis_freq = cc_skyvis
# self.cc_skyvis_res_freq = cc_skyvis_res
# self.cc_vis_freq = cc_vis
# self.cc_vis_res_freq = cc_vis_res
# self.cc_skyvis_net_freq = cc_skyvis + cc_skyvis_res
# self.cc_vis_net_freq = cc_vis + cc_vis_res
# self.clean_window_buffer = clean_window_buffer
#############################################################################
def delay_transform_allruns(self, vis, pad=1.0, freq_wts=None,
downsample=True, verbose=True):
"""
------------------------------------------------------------------------
Transforms the visibilities of multiple runs from frequency axis onto
delay (time) axis using an IFFT.
Inputs:
vis [numpy array] Visibilities which will be delay transformed.
It must be of shape (...,nbl,nchan,ntimes)
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. After the delay transform,
the transformed visibilities are downsampled by a factor of
1+pad. If a negative value is specified, delay transform
will be performed with no padding
freq_wts [numpy vector or array] window shaping to be applied before
computing delay transform. It can either be a vector or size
equal to the number of channels (which will be applied to all
time instances for all baselines), or a nchan x n_snapshots
numpy array which will be applied to all baselines, or a
n_baselines x nchan numpy array which will be applied to all
timestamps, or a n_baselines x nchan x n_snapshots numpy
array or have shape identical to input vis. Default (None)
will not apply windowing and only the inherent bandpass will
be used.
downsample [boolean] If set to True (default), the delay transform
quantities will be downsampled by exactly the same factor
that was used in padding. For instance, if pad is set to
1.0, the downsampling will be by a factor of 2. If set to
False, no downsampling will be done even if the original
quantities were padded
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
Dictionary containing delay spectrum information. It contains the
following keys and values:
'lags' [numpy array] lags of the subband delay spectra with or
without resampling. If not resampled it is of size
nlags=nchan+npad where npad is the number of frequency
channels padded specified under the key 'npad'. If
resampled, it is of shape nlags where nlags is the number
of independent delay bins
'lag_kernel'
[numpy array] The delay kernel which is the result of the
bandpass shape and the spectral window used in determining
the delay spectrum. It is of shape
n_bl x n_win x nlags x n_t.
'vis_lag' [numpy array] delay spectra of visibilities, after
applying the frequency weights under the key 'freq_wts'. It
is of size n_win x (n1xn2x... n_runs dims) x n_bl x nlags x
x n_t.
------------------------------------------------------------------------
"""
if verbose:
print('Preparing to compute delay transform...\n\tChecking input parameters for compatibility...')
try:
vis
except NameError:
raise NameError('Input vis must be provided')
if not isinstance(vis, NP.ndarray):
raise TypeError('Input vis must be a numpy array')
elif vis.ndim < 3:
raise ValueError('Input vis must be at least 3-dimensional')
elif vis.shape[-3:] == (self.ia.baselines.shape[0],self.f.size,self.n_acc):
if vis.ndim == 3:
shp = (1,) + vis.shape
else:
shp = vis.shape
vis = vis.reshape(shp)
else:
raise ValueError('Input vis does not have compatible shape')
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if freq_wts is not None:
if freq_wts.shape == self.f.shape:
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(1,-1,1))
elif freq_wts.shape == (self.f.size, self.n_acc):
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(1,self.f.size,self.n_acc))
elif freq_wts.shape == (self.ia.baselines.shape[0], self.f.size):
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(self.ia.baselines.shape[0],self.f.size,1))
elif freq_wts.shape == (self.ia.baselines.shape[0], self.f.size, self.n_acc):
freq_wts = freq_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(self.ia.baselines.shape[0],self.f.size,self.n_acc))
elif not freq_wts.shape != vis.shape:
raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
else:
freq_wts = self.bp_wts.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+self.bp_wts.shape)
bp = self.bp.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+self.bp.shape)
if verbose:
print('\tFrequency window weights assigned.')
if not isinstance(downsample, bool):
raise TypeError('Input downsample must be of boolean type')
if verbose:
print('\tInput parameters have been verified to be compatible.\n\tProceeding to compute delay transform.')
result = {}
result['freq_wts'] = freq_wts
result['pad'] = pad
result['lags'] = DSP.spectral_axis(int(self.f.size*(1+pad)), delx=self.df, use_real=False, shift=True)
if pad == 0.0:
result['vis_lag'] = DSP.FT1D(vis * bp * freq_wts, ax=-2, inverse=True, use_real=False, shift=True) * self.f.size * self.df
result['lag_kernel'] = DSP.FT1D(bp * freq_wts, ax=-2, inverse=True, use_real=False, shift=True) * self.f.size * self.df
if verbose:
print('\tDelay transform computed without padding.')
else:
npad = int(self.f.size * pad)
pad_shape = NP.zeros((len(vis.shape[:-3]),2), dtype=NP.int).tolist()
pad_shape += [[0,0], [0,npad], [0,0]]
result['vis_lag'] = DSP.FT1D(NP.pad(vis * bp * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = DSP.FT1D(NP.pad(bp * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
if verbose:
print('\tDelay transform computed with padding fraction {0:.1f}'.format(pad))
if downsample:
result['vis_lag'] = DSP.downsampler(result['vis_lag'], 1+pad, axis=-2)
result['lag_kernel'] = DSP.downsampler(result['lag_kernel'], 1+pad, axis=-2)
result['lags'] = DSP.downsampler(result['lags'], 1+pad)
result['lags'] = result['lags'].flatten()
if verbose:
print('\tDelay transform products downsampled by factor of {0:.1f}'.format(1+pad))
print('delay_transform() completed successfully.')
return result
#############################################################################
def delayClean(self, pad=1.0, freq_wts=None, clean_window_buffer=1.0,
gain=0.1, maxiter=10000, threshold=5e-3,
threshold_type='relative', parallel=False, nproc=None,
verbose=True):
"""
------------------------------------------------------------------------
Transforms the visibilities from frequency axis onto delay (time) axis
using an IFFT and deconvolves the delay transform quantities along the
delay axis. This is performed for noiseless sky visibilities, thermal
noise in visibilities, and observed visibilities. This calls an in-house
module complex1dClean instead of the clean routine in AIPY module. It
can utilize parallelization
Inputs:
pad [scalar] Non-negative scalar indicating padding fraction
relative to the number of frequency channels. For e.g., a
pad of 1.0 pads the frequency axis with zeros of the same
width as the number of channels. If a negative value is
specified, delay transform will be performed with no padding
freq_wts [numpy vector or array] window shaping to be applied before
computing delay transform. It can either be a vector or size
equal to the number of channels (which will be applied to all
time instances for all baselines), or a nchan x n_snapshots
numpy array which will be applied to all baselines, or a
n_baselines x nchan numpy array which will be applied to all
timestamps, or a n_baselines x nchan x n_snapshots numpy
array. Default (None) will not apply windowing and only the
inherent bandpass will be used.
gain [scalar] gain factor to be applied while subtracting clean
component from residuals. This is the fraction of the maximum in
the residuals that will be subtracted. Must lie between 0 and 1.
A lower value will have a smoother convergence but take a longer
time to converge. Default=0.1
maxiter [scalar] maximum number of iterations for cleaning process. Will
terminate if the number of iterations exceed maxiter.
Default=10000
threshold
[scalar] represents the cleaning depth either as a fraction of
the maximum in the input (when thershold_type is set to
'relative') or the absolute value (when threshold_type is set
to 'absolute') in same units of input down to which inp should
be cleaned. Value must always be positive. When threshold_type
is set to 'relative', threshold mu st lie between 0 and 1.
Default=5e-3 (found to work well and converge fast) assuming
threshold_type is set to 'relative'
threshold_type
[string] represents the type of threshold specified by value in
input threshold. Accepted values are 'relative' and 'absolute'.
If set to 'relative' the threshold value is the fraction
(between 0 and 1) of maximum in input down to which it should
be cleaned. If set to 'asbolute' it is the actual value down to
which inp should be cleaned. Default='relative'
parallel [boolean] specifies if parallelization is to be invoked.
False (default) means only serial processing
nproc [integer] specifies number of independent processes to spawn.
Default = None, means automatically determines the number of
process cores in the system and use one less than that to
avoid locking the system for other processes. Applies only
if input parameter 'parallel' (see above) is set to True.
If nproc is set to a value more than the number of process
cores in the system, it will be reset to number of process
cores in the system minus one to avoid locking the system out
for other processes
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
------------------------------------------------------------------------
"""
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if freq_wts is not None:
if freq_wts.size == self.f.size:
freq_wts = NP.repeat(NP.expand_dims(NP.repeat(freq_wts.reshape(1,-1), self.ia.baselines.shape[0], axis=0), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.n_acc:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(self.f.size, -1), axis=0), self.ia.baselines.shape[0], axis=0)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0]:
freq_wts = NP.repeat(NP.expand_dims(freq_wts.reshape(-1, self.f.size), axis=2), self.n_acc, axis=2)
elif freq_wts.size == self.f.size * self.ia.baselines.shape[0] * self.n_acc:
freq_wts = freq_wts.reshape(self.ia.baselines.shape[0], self.f.size, self.n_acc)
else:
raise ValueError('window shape dimensions incompatible with number of channels and/or number of tiemstamps.')
self.bp_wts = freq_wts
if verbose:
print('\tFrequency window weights assigned.')
bw = self.df * self.f.size
pc = self.ia.phase_center
pc_coords = self.ia.phase_center_coords
if pc_coords == 'hadec':
pc_altaz = GEOM.hadec2altaz(pc, self.ia.latitude, units='degrees')
pc_dircos = GEOM.altaz2dircos(pc_altaz, units='degrees')
elif pc_coords == 'altaz':
pc_dircos = GEOM.altaz2dircos(pc, units='degrees')
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=False)
dlag = lags[1] - lags[0]
clean_area = NP.zeros(self.f.size + npad, dtype=int)
skyvis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.skyvis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
vis_lag = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.ia.vis_freq*self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
lag_kernel = (npad + self.f.size) * self.df * DSP.FT1D(NP.pad(self.bp*self.bp_wts, ((0,0),(0,npad),(0,0)), mode='constant'), ax=1, inverse=True, use_real=False, shift=False)
ccomponents_noiseless = NP.zeros_like(skyvis_lag)
ccres_noiseless = NP.zeros_like(skyvis_lag)
ccomponents_noisy = NP.zeros_like(vis_lag)
ccres_noisy = NP.zeros_like(vis_lag)
if parallel:
if nproc is None:
nproc = min(max(MP.cpu_count()-1, 1), self.ia.baselines.shape[0]*self.n_acc)
else:
nproc = min(max(MP.cpu_count()-1, 1), self.ia.baselines.shape[0]*self.n_acc, nproc)
list_of_skyvis_lag = []
list_of_vis_lag = []
list_of_dkern = []
list_of_cboxes = []
for bli in xrange(self.ia.baselines.shape[0]):
for ti in xrange(self.n_acc):
list_of_skyvis_lag += [skyvis_lag[bli,:,ti]]
list_of_vis_lag += [vis_lag[bli,:,ti]]
list_of_dkern += [lag_kernel[bli,:,ti]]
clean_area = NP.zeros(self.f.size + npad, dtype=int)
clean_area[NP.logical_and(lags <= self.horizon_delay_limits[ti,bli,1]+clean_window_buffer/bw, lags >= self.horizon_delay_limits[ti,bli,0]-clean_window_buffer/bw)] = 1
list_of_cboxes += [clean_area]
list_of_gains = [gain] * self.ia.baselines.shape[0]*self.n_acc
list_of_maxiter = [maxiter] * self.ia.baselines.shape[0]*self.n_acc
list_of_thresholds = [threshold] * self.ia.baselines.shape[0]*self.n_acc
list_of_threshold_types = [threshold_type] * self.ia.baselines.shape[0]*self.n_acc
list_of_verbosity = [verbose] * self.ia.baselines.shape[0]*self.n_acc
list_of_pid = range(self.ia.baselines.shape[0]*self.n_acc)
# list_of_pid = [None] * self.ia.baselines.shape[0]*self.n_acc
list_of_progressbars = [True] * self.ia.baselines.shape[0]*self.n_acc
list_of_progressbar_ylocs = NP.arange(self.ia.baselines.shape[0]*self.n_acc) % min(nproc, WM.term.height)
list_of_progressbar_ylocs = list_of_progressbar_ylocs.tolist()
pool = MP.Pool(processes=nproc)
list_of_noiseless_cleanstates = pool.map(complex1dClean_arg_splitter, IT.izip(list_of_skyvis_lag, list_of_dkern, list_of_cboxes, list_of_gains, list_of_maxiter, list_of_thresholds, list_of_threshold_types, list_of_verbosity, list_of_progressbars, list_of_pid, list_of_progressbar_ylocs))
list_of_noisy_cleanstates = pool.map(complex1dClean_arg_splitter, IT.izip(list_of_vis_lag, list_of_dkern, list_of_cboxes, list_of_gains, list_of_maxiter, list_of_thresholds, list_of_threshold_types, list_of_verbosity, list_of_progressbars, list_of_pid, list_of_progressbar_ylocs))
for bli in xrange(self.ia.baselines.shape[0]):
for ti in xrange(self.n_acc):
ind = bli * self.n_acc + ti
noiseless_cleanstate = list_of_noiseless_cleanstates[ind]
ccomponents_noiseless[bli,:,ti] = noiseless_cleanstate['cc']
ccres_noiseless[bli,:,ti] = noiseless_cleanstate['res']
noisy_cleanstate = list_of_noisy_cleanstates[ind]
ccomponents_noisy[bli,:,ti] = noisy_cleanstate['cc']
ccres_noisy[bli,:,ti] = noisy_cleanstate['res']
else:
for snap_iter in xrange(self.n_acc):
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Baselines '.format(self.ia.baselines.shape[0]), PGB.ETA()], maxval=self.ia.baselines.shape[0]).start()
for bl_iter in xrange(self.ia.baselines.shape[0]):
clean_area[NP.logical_and(lags <= self.horizon_delay_limits[snap_iter,bl_iter,1]+clean_window_buffer/bw, lags >= self.horizon_delay_limits[snap_iter,bl_iter,0]-clean_window_buffer/bw)] = 1
cleanstate = complex1dClean(skyvis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], cbox=clean_area, gain=gain, maxiter=maxiter, threshold=threshold, threshold_type=threshold_type, verbose=verbose)
ccomponents_noiseless[bl_iter,:,snap_iter] = cleanstate['cc']
ccres_noiseless[bl_iter,:,snap_iter] = cleanstate['res']
cleanstate = complex1dClean(vis_lag[bl_iter,:,snap_iter], lag_kernel[bl_iter,:,snap_iter], cbox=clean_area, gain=gain, maxiter=maxiter, threshold=threshold, threshold_type=threshold_type, verbose=verbose)
ccomponents_noisy[bl_iter,:,snap_iter] = cleanstate['cc']
ccres_noisy[bl_iter,:,snap_iter] = cleanstate['res']
progress.update(bl_iter+1)
progress.finish()
deta = lags[1] - lags[0]
pad_factor = (1.0 + 1.0*npad/self.f.size) # to make sure visibilities after CLEANing are at the same amplitude level as before CLEANing
cc_skyvis = NP.fft.fft(ccomponents_noiseless, axis=1) * deta * pad_factor
cc_skyvis_res = NP.fft.fft(ccres_noiseless, axis=1) * deta * pad_factor
cc_vis = NP.fft.fft(ccomponents_noisy, axis=1) * deta * pad_factor
cc_vis_res = NP.fft.fft(ccres_noisy, axis=1) * deta * pad_factor
self.lags = lags
self.skyvis_lag = NP.fft.fftshift(skyvis_lag, axes=1)
self.vis_lag = NP.fft.fftshift(vis_lag, axes=1)
self.lag_kernel = NP.fft.fftshift(lag_kernel, axes=1)
self.cc_lag_kernel = NP.fft.fftshift(lag_kernel, axes=1)
self.cc_skyvis_lag = NP.fft.fftshift(ccomponents_noiseless, axes=1)
self.cc_skyvis_res_lag = NP.fft.fftshift(ccres_noiseless, axes=1)
self.cc_vis_lag = NP.fft.fftshift(ccomponents_noisy, axes=1)
self.cc_vis_res_lag = NP.fft.fftshift(ccres_noisy, axes=1)
self.cc_skyvis_net_lag = self.cc_skyvis_lag + self.cc_skyvis_res_lag
self.cc_vis_net_lag = self.cc_vis_lag + self.cc_vis_res_lag
self.cc_lags = NP.fft.fftshift(lags)
self.cc_skyvis_freq = cc_skyvis
self.cc_skyvis_res_freq = cc_skyvis_res
self.cc_vis_freq = cc_vis
self.cc_vis_res_freq = cc_vis_res
self.cc_skyvis_net_freq = cc_skyvis + cc_skyvis_res
self.cc_vis_net_freq = cc_vis + cc_vis_res
self.clean_window_buffer = clean_window_buffer
#############################################################################
def subband_delay_transform(self, bw_eff, freq_center=None, shape=None,
fftpow=None, pad=None, bpcorrect=False, action=None,
verbose=True):
"""
------------------------------------------------------------------------
Computes delay transform on multiple frequency sub-bands with specified
weights
Inputs:
bw_eff [dictionary] dictionary with two keys 'cc' and 'sim' to
specify effective bandwidths (in Hz) on the selected
frequency windows for subband delay
transform of CLEANed and simulated visibilities
respectively. The values under these keys can be a scalar,
list or numpy array and are independent of each other. If
a scalar value is provided, the same will be applied to all
frequency windows under that key
freq_center [dictionary] dictionary with two keys 'cc' and 'sim' to
specify frequency centers (in Hz) of the selected frequency
windows for subband delay transform of CLEANed and
simulated visibilities respectively. The values under these
keys can be a scalar, list or numpy array and are
independent of each other. If a scalar is provided, the
same will be applied to all frequency windows. Default=None
uses the center frequency from the class attribute named
channels for both keys 'cc' and 'sim'
shape [dictionary] dictionary with two keys 'cc' and 'sim' to
specify frequency window shape for subband delay transform
of CLEANed and simulated visibilities respectively. Values
held by the keys must be a string. Accepted values for the
string are 'rect' or 'RECT' (for rectangular), 'bnw' and
'BNW' (for Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
(rectangular window) for both keys
fftpow [dictionary] dictionary with two keys 'cc' and 'sim' to
specify the power to which the FFT of the window will be
raised. The values under these keys must be a positive
scalar. Default = 1.0 for each key
pad [dictionary] dictionary with two keys 'cc' and 'sim' to
specify padding fraction relative to the number of frequency
channels for CLEANed and simualted visibilities respectively.
Values held by the keys must be a non-negative scalar. For
e.g., a pad of 1.0 pads the frequency axis with zeros of
the same width as the number of channels. After the delay
transform, the transformed visibilities are downsampled by a
factor of 1+pad. If a negative value is specified, delay
transform will be performed with no padding. Default=None
sets to padding factor to 1.0 under both keys.
bpcorrect [boolean] Only applicable on delay CLEANed visibilities.
If True, correct for frequency weights that were applied
during the original delay transform using which the delay
CLEAN was done. This would flatten the bandpass after delay
CLEAN. If False (default), do not apply the correction,
namely, inverse of bandpass weights
action [string or None] If set to None (default) just updates the
attribute. If set to 'return_oversampled' it returns the
output dictionary corresponding to oversampled delay space
quantities and updates its attribute
subband_delay_spectra with full resolution in delay space.
If set to 'return_resampled' it returns the output
dictionary corresponding to resampled/downsampled delay
space quantities and updates the attribute.
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
If keyword input action is set to None (default), the output
is internally stored in the class attributes
subband_delay_spectra and subband_delay_spectra_resampled. If action is
set to 'return_oversampled', the following
output is returned. The output is a dictionary that contains two top
level keys, namely, 'cc' and 'sim' denoting information about CLEAN
and simulated visibilities respectively. Under each of these keys is
information about delay spectra of different frequency sub-bands (n_win
in number) in the form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'bpcorrect' [boolean] If True (default), correct for frequency
weights that were applied during the original
delay transform using which the delay CLEAN was
done. This would flatten the bandpass after delay
CLEAN. If False, do not apply the correction,
namely, inverse of bandpass weights. This applies only
CLEAned visibilities under the 'cc' key and hence is
present only if the top level key is 'cc' and absent
for key 'sim'
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated.
'skyvis_lag'
[numpy array] subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_lag' [numpy array] subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'vis_noise_lag'
[numpy array] subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is 'sim'
and absent for 'cc'. It is of size
n_bl x n_win x (nchan+npad) x n_t.
'skyvis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simualted noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] subband delay spectra of residuals
after delay CLEAN of simualted noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x (nchan+npad) x n_t
If action is set to 'return_resampled', the following
output is returned. The output is a dictionary that contains two top
level keys, namely, 'cc' and 'sim' denoting information about CLEAN
and simulated visibilities respectively. Under each of these keys is
information about delay spectra of different frequency sub-bands (n_win
in number) in the form of a dictionary under the following keys:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'skyvis_lag'
[numpy array] resampled subband delay spectra of simulated
or CLEANed noiseless visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_lag' [numpy array] resampled subband delay spectra of simulated
or CLEANed noisy visibilities, depending on whether
the top level key is 'cc' or 'sim' respectively,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'vis_noise_lag'
[numpy array] resampled subband delay spectra of simulated
noise after applying the frequency weights under
the key 'freq_wts'. Only present if top level key is 'sim'
and absent for 'cc'. It is of size
n_bl x n_win x nlags x n_t.
'skyvis_res_lag'
[numpy array] resampled subband delay spectra of residuals
after delay CLEAN of simualted noiseless
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] resampled subband delay spectra of residuals
after delay CLEAN of simualted noisy
visibilities obtained after applying frequency
weights specified under key 'freq_wts'. Only present for
top level key 'cc' and absent for 'sim'. It is of
size n_bl x n_win x nlags x n_t
------------------------------------------------------------------------
"""
try:
bw_eff
except NameError:
raise NameError('Effective bandwidth must be specified')
else:
if not isinstance(bw_eff, dict):
raise TypeError('Effective bandwidth must be specified as a dictionary')
for key in ['cc','sim']:
if key in bw_eff:
if not isinstance(bw_eff[key], (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff[key] = NP.asarray(bw_eff[key]).reshape(-1)
if NP.any(bw_eff[key] <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = {key: NP.asarray(self.f[self.f.size/2]).reshape(-1) for key in ['cc', 'sim']}
# freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, dict):
for key in ['cc', 'sim']:
if isinstance(freq_center[key], (int, float, list, NP.ndarray)):
freq_center[key] = NP.asarray(freq_center[key]).reshape(-1)
if NP.any((freq_center[key] <= self.f.min()) | (freq_center[key] >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
else:
raise TypeError('Input frequency center must be specified as a dictionary')
for key in ['cc', 'sim']:
if (bw_eff[key].size == 1) and (freq_center[key].size > 1):
bw_eff[key] = NP.repeat(bw_eff[key], freq_center[key].size)
elif (bw_eff[key].size > 1) and (freq_center[key].size == 1):
freq_center[key] = NP.repeat(freq_center[key], bw_eff[key].size)
elif bw_eff[key].size != freq_center[key].size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, dict):
raise TypeError('Window shape must be specified as a dictionary')
for key in ['cc', 'sim']:
if not isinstance(shape[key], str):
raise TypeError('Window shape must be a string')
if shape[key] not in ['rect', 'bhw', 'bnw', 'RECT', 'BHW', 'BNW']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = {key: 'rect' for key in ['cc', 'sim']}
# shape = 'rect'
if fftpow is None:
fftpow = {key: 1.0 for key in ['cc', 'sim']}
else:
if not isinstance(fftpow, dict):
raise TypeError('Power to raise FFT of window by must be specified as a dictionary')
for key in ['cc', 'sim']:
if not isinstance(fftpow[key], (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow[key] < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = {key: 1.0 for key in ['cc', 'sim']}
else:
if not isinstance(pad, dict):
raise TypeError('Padding for delay transform must be specified as a dictionary')
for key in ['cc', 'sim']:
if not isinstance(pad[key], (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad[key] < 0.0:
pad[key] = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if not isinstance(bpcorrect, bool):
raise TypeError('Input keyword bpcorrect must be of boolean type')
vis_noise_freq = NP.copy(self.ia.vis_noise_freq)
result = {}
for key in ['cc', 'sim']:
if (key == 'sim') or ((key == 'cc') and (self.cc_lags is not None)):
freq_wts = NP.empty((bw_eff[key].size, self.f.size), dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape[key], fftpow=fftpow[key], area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff[key] / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center[key].reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape[key], fftpow=fftpow[key], centering=True, peak=None, area_normalize=False, power_normalize=True)
# window = NP.sqrt(frac_width * n_window[i]) * DSP.windowing(n_window[i], shape=shape[key], centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
bpcorrection_factor = 1.0
npad = int(self.f.size * pad[key])
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
if key == 'cc':
skyvis_freq = self.cc_skyvis_freq[:,:self.f.size,:]
vis_freq = self.cc_vis_freq[:,:self.f.size,:]
skyvis_res_freq = self.cc_skyvis_res_freq[:,:self.f.size,:]
vis_res_freq = self.cc_vis_res_freq[:,:self.f.size,:]
skyvis_net_freq = self.cc_skyvis_net_freq[:,:self.f.size,:]
vis_net_freq = self.cc_vis_net_freq[:,:self.f.size,:]
if bpcorrect:
bpcorrection_factor = NP.where(NP.abs(self.bp_wts)>0.0, 1/self.bp_wts, 0.0)
bpcorrection_factor = bpcorrection_factor[:,NP.newaxis,:,:]
else:
skyvis_freq = NP.copy(self.ia.skyvis_freq)
vis_freq = NP.copy(self.ia.vis_freq)
skyvis_lag = DSP.FT1D(NP.pad(skyvis_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_lag = DSP.FT1D(NP.pad(vis_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_noise_lag = DSP.FT1D(NP.pad(vis_noise_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
lag_kernel = DSP.FT1D(NP.pad(self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result[key] = {'freq_center': freq_center[key], 'shape': shape[key], 'freq_wts': freq_wts, 'bw_eff': bw_eff[key], 'npad': npad, 'lags': lags, 'skyvis_lag': skyvis_lag, 'vis_lag': vis_lag, 'lag_kernel': lag_kernel, 'lag_corr_length': self.f.size / NP.sum(freq_wts, axis=1)}
if key == 'cc':
skyvis_res_lag = DSP.FT1D(NP.pad(skyvis_res_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_res_lag = DSP.FT1D(NP.pad(vis_res_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
skyvis_net_lag = DSP.FT1D(NP.pad(skyvis_net_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
vis_net_lag = DSP.FT1D(NP.pad(vis_net_freq[:,NP.newaxis,:,:] * self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result[key]['vis_res_lag'] = vis_res_lag
result[key]['skyvis_res_lag'] = skyvis_res_lag
result[key]['vis_net_lag'] = vis_net_lag
result[key]['skyvis_net_lag'] = skyvis_net_lag
result[key]['bpcorrect'] = bpcorrect
else:
result[key]['vis_noise_lag'] = vis_noise_lag
if verbose:
print('\tSub-band(s) delay transform computed')
self.subband_delay_spectra = result
result_resampled = {}
for key in ['cc', 'sim']:
if key in result:
result_resampled[key] = {}
result_resampled[key]['freq_center'] = result[key]['freq_center']
result_resampled[key]['bw_eff'] = result[key]['bw_eff']
downsample_factor = NP.min((self.f.size + npad) * self.df / result_resampled[key]['bw_eff'])
result_resampled[key]['lags'] = DSP.downsampler(result[key]['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result_resampled[key]['lag_kernel'] = DSP.downsampler(result[key]['lag_kernel'], downsample_factor, axis=2, method='interp', kind='linear')
result_resampled[key]['skyvis_lag'] = DSP.downsampler(result[key]['skyvis_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['vis_lag'] = DSP.downsampler(result[key]['vis_lag'], downsample_factor, axis=2, method='FFT')
dlag = result_resampled[key]['lags'][1] - result_resampled[key]['lags'][0]
result_resampled[key]['lag_corr_length'] = (1/result[key]['bw_eff']) / dlag
if key == 'cc':
result_resampled[key]['skyvis_res_lag'] = DSP.downsampler(result[key]['skyvis_res_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['vis_res_lag'] = DSP.downsampler(result[key]['vis_res_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['skyvis_net_lag'] = DSP.downsampler(result[key]['skyvis_net_lag'], downsample_factor, axis=2, method='FFT')
result_resampled[key]['vis_net_lag'] = DSP.downsampler(result[key]['vis_net_lag'], downsample_factor, axis=2, method='FFT')
else:
result_resampled[key]['vis_noise_lag'] = DSP.downsampler(result[key]['vis_noise_lag'], downsample_factor, axis=2, method='FFT')
if verbose:
print('\tDownsampled Sub-band(s) delay transform computed')
self.subband_delay_spectra_resampled = result_resampled
if action is not None:
if action == 'return_oversampled':
return result
if action == 'return_resampled':
return result_resampled
#############################################################################
def subband_delay_transform_allruns(self, vis, bw_eff, freq_center=None,
shape=None, fftpow=None, pad=None,
bpcorrect=False, action=None,
verbose=True):
"""
------------------------------------------------------------------------
Computes delay transform on multiple frequency sub-bands with specified
weights for multiple realizations of visibilities
Inputs:
vis [numpy array] Visibilities which will be delay transformed.
It must be of shape (...,nbl,nchan,ntimes)
bw_eff [scalar, list or numpy array] effective bandwidths (in Hz)
on the selected frequency windows for subband delay
transform of visibilities. The values can be a scalar, list
or numpy array. If a scalar value is provided, the same
will be applied to all frequency windows.
freq_center [scalar, list or numpy array] frequency centers (in Hz) of
the selected frequency windows for subband delay transform
of visibilities. The values can be a scalar, list or numpy
array. If a scalar is provided, the same will be applied
to all frequency windows. Default=None uses the center
frequency from the class attribute
shape [string] frequency window shape for subband delay transform
of visibilities. It must be a string. Accepted values for the
string are 'rect' or 'RECT' (for rectangular), 'bnw' and
'BNW' (for Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
(rectangular window)
fftpow [scalar] the power to which the FFT of the window will be
raised. The value must be a positive scalar. Default = 1.0
pad [scalar] padding fraction relative to the number of
frequency channels. Value must be a non-negative scalar.
For e.g., a pad of 1.0 pads the frequency axis with zeros
of the same width as the number of channels. After the
delay transform, the transformed visibilities are
downsampled by a factor of 1+pad. If a negative value is
specified, delay transform will be performed with no
padding. Default=None sets to padding factor to 1.0
action [string or None] If set to 'return_oversampled' it returns
the output dictionary corresponding to oversampled delay
space quantities with full resolution in delay space. If
set to None (default) or 'return_resampled' it returns the
output dictionary corresponding to resampled/downsampled
delay space quantities.
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
The output is a dictionary that contains information about delay spectra
of different frequency sub-bands (n_win in number). If action is set to
'return_resampled', it contains the following keys and values:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_win x (1 x 1 x ... nruns times) x n_bl x
(nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated. It is of size n_win
'vis_lag' [numpy array] subband delay spectra of visibilities,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x (nchan+npad) x
x n_t.
If action is set to 'return_resampled', the following
output is returned. The output is a dictionary that contains
information about delay spectra of different frequency sub-bands
(n_win in number) with the following keys and values:
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_win x (1 x 1 x ... nruns times) x n_bl x nlags x n_t
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated. It is of size n_win
'vis_lag' [numpy array] subband delay spectra of visibilities,
after applying the frequency weights under the key
'freq_wts'. It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t
------------------------------------------------------------------------
"""
try:
vis, bw_eff
except NameError:
raise NameError('Input visibilities and effective bandwidth must be specified')
else:
if not isinstance(vis, NP.ndarray):
raise TypeError('Input vis must be a numpy array')
elif vis.ndim < 3:
raise ValueError('Input vis must be at least 3-dimensional')
elif vis.shape[-3:] == (self.ia.baselines.shape[0],self.f.size,self.n_acc):
if vis.ndim == 3:
shp = (1,) + vis.shape
else:
shp = vis.shape
vis = vis.reshape(shp)
else:
raise ValueError('Input vis does not have compatible shape')
if not isinstance(bw_eff, (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff = NP.asarray(bw_eff).reshape(-1)
if NP.any(bw_eff <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, (int, float, list, NP.ndarray)):
freq_center = NP.asarray(freq_center).reshape(-1)
if NP.any((freq_center <= self.f.min()) | (freq_center >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
if (bw_eff.size == 1) and (freq_center.size > 1):
bw_eff = NP.repeat(bw_eff, freq_center.size)
elif (bw_eff.size > 1) and (freq_center.size == 1):
freq_center = NP.repeat(freq_center, bw_eff.size)
elif bw_eff.size != freq_center.size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, str):
raise TypeError('Window shape must be a string')
if shape.lower() not in ['rect', 'bhw', 'bnw']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = 'rect'
if fftpow is None:
fftpow = 1.0
else:
if not isinstance(fftpow, (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = 1.0
else:
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
result = {}
freq_wts = NP.empty((bw_eff.size, self.f.size), dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape, fftpow=fftpow, area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape, fftpow=fftpow, centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
freq_wts = freq_wts.reshape((bw_eff.size,)+tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+(1,self.f.size,1))
bp = self.bp.reshape(tuple(NP.ones(len(vis.shape[:-3]),dtype=NP.int))+self.bp.shape)
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
pad_shape = [[0,0]] + NP.zeros((len(vis.shape[:-3]),2), dtype=NP.int).tolist()
pad_shape += [[0,0], [0,npad], [0,0]]
vis_lag = DSP.FT1D(NP.pad(vis[NP.newaxis,...] * bp[NP.newaxis,...] * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
lag_kernel = DSP.FT1D(NP.pad(bp[NP.newaxis,...] * freq_wts, pad_shape, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result = {'freq_center': freq_center, 'shape': shape, 'freq_wts': freq_wts, 'bw_eff': bw_eff, 'npad': npad, 'lags': lags, 'vis_lag': vis_lag, 'lag_kernel': lag_kernel, 'lag_corr_length': self.f.size / NP.squeeze(NP.sum(freq_wts, axis=-2))}
if verbose:
print('\tSub-band(s) delay transform computed')
if action is not None:
action = 'return_resampled'
if action == 'return_oversampled':
return result
elif action == 'return_resampled':
downsample_factor = NP.min((self.f.size + npad) * self.df / result['bw_eff'])
result['lags'] = DSP.downsampler(result['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result['lag_kernel'] = DSP.downsampler(result['lag_kernel'], downsample_factor, axis=-2, method='interp', kind='linear')
result['vis_lag'] = DSP.downsampler(result['vis_lag'], downsample_factor, axis=-2, method='FFT')
dlag = result['lags'][1] - result['lags'][0]
result['lag_corr_length'] = (1/result['bw_eff']) / dlag
return result
else:
raise ValueError('Invalid value specified for keyword input action')
if verbose:
print('\tDownsampled Sub-band(s) delay transform computed')
#############################################################################
def subband_delay_transform_closure_phase(self, bw_eff, cpinfo=None,
antenna_triplets=None,
specsmooth_info=None,
delay_filter_info=None,
spectral_window_info=None,
freq_center=None, shape=None,
fftpow=None, pad=None, action=None,
verbose=True):
"""
------------------------------------------------------------------------
Computes delay transform of closure phases on antenna triplets on
multiple frequency sub-bands with specified weights. It will have units
of Hz
Inputs:
bw_eff [scalar or numpy array] effective bandwidths (in Hz) on the
selected frequency windows for subband delay transform of
closure phases. If a scalar value is provided, the same
will be applied to all frequency windows
cpinfo [dictionary] If set to None, it will be determined based on
other inputs. Otherwise, it will be used directly. The
dictionary will contain the following keys and values:
'closure_phase_skyvis' [numpy array] [optional] Closure
phases (in radians) for the given
antenna triplets from the noiseless
visibilities. It is of shape
ntriplets x ... x nchan x ntimes
'closure_phase_vis' [numpy array] [optional] Closure
phases (in radians) for the given
antenna triplets for noisy
visibilities. It is of shape
ntriplets x ... x nchan x ntimes
'closure_phase_noise' [numpy array] [optional] Closure
phases (in radians) for the given
antenna triplets for thermal noise
in visibilities. It is of shape
ntriplets x ... x nchan x ntimes
'antenna_triplets' [list of tuples] List of
three-element tuples of antenna IDs
for which the closure phases are
calculated.
'baseline_triplets' [numpy array] List of 3x3 numpy
arrays. Each 3x3 unit in the list
represents triplets of baseline
vectors where the three rows denote
the three baselines in the triplet
and the three columns define the x-,
y- and z-components of the triplet.
The number of 3x3 unit elements in
the list will equal the number of
elements in the list under key
'antenna_triplets'.
antenna_triplets
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. If set to None (default), all
the unique triplets based on the antenna layout attribute
in class InterferometerArray
specsmooth_info
[NoneType or dictionary] Spectral smoothing window to be
applied prior to the delay transform. If set to None, no
smoothing is done. This is usually set if spectral
smoothing is to be done such as in the case of RFI. The
smoothing window parameters are specified using the
following keys and values:
'op_type' [string] Smoothing operation type.
Default='median' (currently accepts only
'median' or 'interp').
'window_size' [integer] Size of smoothing window (in
pixels) along frequency axis. Applies only
if op_type is set to 'median'
'maskchans' [NoneType or numpy array] Numpy boolean array
of size nchan. False entries imply those
channels are not masked and will be used in
in interpolation while True implies they are
masked and will not be used in determining the
interpolation function. If set to None, all
channels are assumed to be unmasked (False).
'evalchans' [NoneType or numpy array] Channel numbers at
which visibilities are to be evaluated. Will
be useful for filling in RFI flagged channels.
If set to None, all channels will be evaluated
'noiseRMS' [NoneType or scalar or numpy array] If set to
None (default), the rest of the parameters are
used in determining the RMS of thermal noise.
If specified as scalar, all other parameters
will be ignored in estimating noiseRMS and
this value will be used instead. If specified
as a numpy array, it must be of shape
broadcastable to (nbl,nchan,ntimes). So
accpeted shapes can be (1,1,1), (1,1,ntimes),
(1,nchan,1), (nbl,1,1), (1,nchan,ntimes),
(nbl,nchan,1), (nbl,1,ntimes), or
(nbl,nchan,ntimes).
delay_filter_info
[NoneType or dictionary] Info containing delay filter
parameters. If set to None (default), no delay filtering is
performed. Otherwise, delay filter is applied on each of the
visibilities in the triplet before computing the closure
phases. The delay filter parameters are specified in a
dictionary as follows:
'type' [string] 'horizon' (default) or 'regular'. If
set to 'horizon', the horizon delay limits are
estimated from the respective baseline lengths
in the triplet. If set to 'regular', the extent
of the filter is determined by the 'min' and
'width' keys (see below).
'min' [scalar] Non-negative number (in seconds) that
specifies the minimum delay in the filter span.
If not specified, it is assumed to be 0. If
'type' is set to 'horizon', the 'min' is ignored
and set to 0.
'width' [scalar] Non-negative number (in numbers of
inverse bandwidths). If 'type' is set to
'horizon', the width represents the delay
buffer beyond the horizon. If 'type' is set to
'regular', this number has to be positive and
determines the span of the filter starting from
the minimum delay in key 'min'.
'mode' [string] 'discard' (default) or 'retain'. If set
to 'discard', the span defining the filter is
discarded and the rest retained. If set to
'retain', the span defining the filter is
retained and the rest discarded. For example,
if 'type' is set to 'horizon' and 'mode' is set
to 'discard', the horizon-to-horizon is
filtered out (discarded).
spectral_window_info
[NoneType or dictionary] Spectral window parameters to
determine the spectral weights and apply to the visibilities
in the frequency domain before filtering in the delay domain.
THESE PARAMETERS ARE APPLIED ON THE INDIVIDUAL VISIBILITIES
THAT GO INTO THE CLOSURE PHASE. THESE ARE NOT TO BE CONFUSED
WITH THE PARAMETERS THAT WILL BE USED IN THE ACTUAL DELAY
TRANSFORM OF CLOSURE PHASE SPECTRA WHICH ARE SPECIFIED
SEPARATELY FURTHER BELOW.
If set to None (default), unity spectral weights are applied.
If spectral weights are to be applied, it must be a provided
as a dictionary with the following keys and values:
bw_eff [scalar] effective bandwidths (in Hz) for the
spectral window
freq_center [scalar] frequency center (in Hz) for the
spectral window
shape [string] frequency window shape for the
spectral window. Accepted values are 'rect' or
'RECT' (for rectangular), 'bnw' and 'BNW' (for
Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
fftpow [scalar] power to which the FFT of the window
will be raised. The value must be a positive
scalar.
freq_center [scalar, list or numpy array] frequency centers (in Hz) of
the selected frequency windows for subband delay transform
of closure phases. The value can be a scalar, list or numpy
array. If a scalar is provided, the same will be applied to
all frequency windows. Default=None uses the center
frequency from the class attribute named channels
shape [string] frequency window shape for subband delay transform
of closure phases. Accepted values for the string are
'rect' or 'RECT' (for rectangular), 'bnw' and 'BNW' (for
Blackman-Nuttall), and 'bhw' or 'BHW' (for
Blackman-Harris). Default=None sets it to 'rect'
(rectangular window)
fftpow [scalar] the power to which the FFT of the window will be
raised. The value must be a positive scalar. Default = 1.0
pad [scalar] padding fraction relative to the number of
frequency channels for closure phases. Value must be a
non-negative scalar. For e.g., a pad of 1.0 pads the
frequency axis with zeros of the same width as the number
of channels. After the delay transform, the transformed
closure phases are downsampled by a factor of 1+pad. If a
negative value is specified, delay transform will be
performed with no padding. Default=None sets to padding
factor to 1.0
action [string or None] If set to None (default) just updates the
attribute. If set to 'return_oversampled' it returns the
output dictionary corresponding to oversampled delay space
quantities with full resolution in delay space. If set to
None (default) or 'return_resampled', it returns the output
dictionary corresponding to resampled or downsampled delay
space quantities.
verbose [boolean] If set to True (default), print diagnostic and
progress messages. If set to False, no such messages are
printed.
Output:
If keyword input action is set to 'return_oversampled', the following
output is returned. The output is a dictionary that contains information
about delay spectra of different frequency sub-bands (n_win in number)
under the following keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan.
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'shape' [string] shape of the window function applied.
Accepted values are 'rect' (rectangular), 'bhw'
(Blackman-Harris), 'bnw' (Blackman-Nuttall).
'npad' [scalar] Numbber of zero-padded channels before
performing the subband delay transform.
'lags' [numpy array] lags of the subband delay spectra
after padding in frequency during the transform. It
is of size nchan+npad where npad is the number of
frequency channels padded specified under the key
'npad'
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_triplets x ... x n_win x (nchan+npad) x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the total bandwidth
(nchan x df) simulated.
'closure_phase_skyvis'
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x ... n_win x
nlags x n_t. It is in units of Hz
'closure_phase_vis'
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_noise'
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x ... x n_win x nlags x n_t. It
is in units of Hz
If action is set to 'return_resampled', the following
output is returned. The output is a dictionary that contains
information about closure phases. Under each of these keys is
information about delay spectra of different frequency sub-bands
(n_win in number) under the following keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_triplets x ... x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'closure_phase_skyvis'
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_vis'
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x ... x n_win x
nlags x n_t. It is in units of Hz
'closure_phase_noise'
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x ... x n_win x nlags x n_t. It is
in units of Hz
------------------------------------------------------------------------
"""
try:
bw_eff
except NameError:
raise NameError('Effective bandwidth must be specified')
else:
if not isinstance(bw_eff, (int, float, list, NP.ndarray)):
raise TypeError('Value of effective bandwidth must be a scalar, list or numpy array')
bw_eff = NP.asarray(bw_eff).reshape(-1)
if NP.any(bw_eff <= 0.0):
raise ValueError('All values in effective bandwidth must be strictly positive')
if freq_center is None:
freq_center = NP.asarray(self.f[self.f.size/2]).reshape(-1)
elif isinstance(freq_center, (int, float, list, NP.ndarray)):
freq_center = NP.asarray(freq_center).reshape(-1)
if NP.any((freq_center <= self.f.min()) | (freq_center >= self.f.max())):
raise ValueError('Value(s) of frequency center(s) must lie strictly inside the observing band')
else:
raise TypeError('Values(s) of frequency center must be scalar, list or numpy array')
if (bw_eff.size == 1) and (freq_center.size > 1):
bw_eff = NP.repeat(bw_eff, freq_center.size)
elif (bw_eff.size > 1) and (freq_center.size == 1):
freq_center = NP.repeat(freq_center, bw_eff.size)
elif bw_eff.size != freq_center.size:
raise ValueError('Effective bandwidth(s) and frequency center(s) must have same number of elements')
if shape is not None:
if not isinstance(shape, str):
raise TypeError('Window shape must be a string')
if shape not in ['rect', 'bhw', 'bnw', 'RECT', 'BHW', 'BNW']:
raise ValueError('Invalid value for window shape specified.')
else:
shape = 'rect'
if fftpow is None:
fftpow = 1.0
else:
if not isinstance(fftpow, (int, float)):
raise TypeError('Power to raise window FFT by must be a scalar value.')
if fftpow < 0.0:
raise ValueError('Power for raising FFT of window by must be positive.')
if pad is None:
pad = 1.0
else:
if not isinstance(pad, (int, float)):
raise TypeError('pad fraction must be a scalar value.')
if pad < 0.0:
pad = 0.0
if verbose:
print('\tPad fraction found to be negative. Resetting to 0.0 (no padding will be applied).')
if cpinfo is not None:
if not isinstance(cpinfo, dict):
raise TypeError('Input cpinfo must be a dictionary')
else:
cpinfo = self.ia.getClosurePhase(antenna_triplets=antenna_triplets, specsmooth_info=specsmooth_info, delay_filter_info=delay_filter_info, spectral_window_info=spectral_window_info)
result = {'antenna_triplets': cpinfo['antenna_triplets'], 'baseline_triplets': cpinfo['baseline_triplets']}
freq_wts = NP.empty((bw_eff.size, self.f.size), dtype=NP.float_)
frac_width = DSP.window_N2width(n_window=None, shape=shape, fftpow=fftpow, area_normalize=False, power_normalize=True)
window_loss_factor = 1 / frac_width
n_window = NP.round(window_loss_factor * bw_eff / self.df).astype(NP.int)
ind_freq_center, ind_channels, dfrequency = LKP.find_1NN(self.f.reshape(-1,1), freq_center.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sortind = NP.argsort(ind_channels)
ind_freq_center = ind_freq_center[sortind]
ind_channels = ind_channels[sortind]
dfrequency = dfrequency[sortind]
n_window = n_window[sortind]
for i,ind_chan in enumerate(ind_channels):
window = NP.sqrt(frac_width * n_window[i]) * DSP.window_fftpow(n_window[i], shape=shape, fftpow=fftpow, centering=True, peak=None, area_normalize=False, power_normalize=True)
window_chans = self.f[ind_chan] + self.df * (NP.arange(n_window[i]) - int(n_window[i]/2))
ind_window_chans, ind_chans, dfreq = LKP.find_1NN(self.f.reshape(-1,1), window_chans.reshape(-1,1), distance_ULIM=0.5*self.df, remove_oob=True)
sind = NP.argsort(ind_window_chans)
ind_window_chans = ind_window_chans[sind]
ind_chans = ind_chans[sind]
dfreq = dfreq[sind]
window = window[ind_window_chans]
window = NP.pad(window, ((ind_chans.min(), self.f.size-1-ind_chans.max())), mode='constant', constant_values=((0.0,0.0)))
freq_wts[i,:] = window
npad = int(self.f.size * pad)
lags = DSP.spectral_axis(self.f.size + npad, delx=self.df, use_real=False, shift=True)
# lag_kernel = DSP.FT1D(NP.pad(self.bp[:,NP.newaxis,:,:] * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
# lag_kernel = DSP.FT1D(NP.pad(freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result = {'freq_center': freq_center, 'shape': shape, 'freq_wts': freq_wts, 'bw_eff': bw_eff, 'npad': npad, 'lags': lags, 'lag_corr_length': self.f.size / NP.sum(freq_wts, axis=-1)}
for key in cpinfo:
if key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
available_CP_key = key
ndim_padtuple = [(0,0) for i in range(1+len(cpinfo[key].shape[:-2]))] + [(0,npad), (0,0)]
result[key] = DSP.FT1D(NP.pad(NP.exp(-1j*cpinfo[key].reshape(cpinfo[key].shape[:-2]+(1,)+cpinfo[key].shape[-2:])) * freq_wts.reshape(tuple(NP.ones(len(cpinfo[key].shape[:-2])).astype(int))+freq_wts.shape+(1,)), ndim_padtuple, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
# result[key] = DSP.FT1D(NP.pad(NP.exp(-1j*cpinfo[key][:,NP.newaxis,:,:]) * freq_wts[NP.newaxis,:,:,NP.newaxis], ((0,0),(0,0),(0,npad),(0,0)), mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
lag_kernel = DSP.FT1D(NP.pad(freq_wts.reshape(tuple(NP.ones(len(cpinfo[available_CP_key].shape[:-2])).astype(int))+freq_wts.shape+(1,)), ndim_padtuple, mode='constant'), ax=-2, inverse=True, use_real=False, shift=True) * (npad + self.f.size) * self.df
result['lag_kernel'] = lag_kernel
if verbose:
print('\tSub-band(s) delay transform computed')
result_resampled = {'antenna_triplets': cpinfo['antenna_triplets'], 'baseline_triplets': cpinfo['baseline_triplets']}
result_resampled['freq_center'] = result['freq_center']
result_resampled['bw_eff'] = result['bw_eff']
result_resampled['freq_wts'] = result['freq_wts']
downsample_factor = NP.min((self.f.size + npad) * self.df / result_resampled['bw_eff'])
result_resampled['lags'] = DSP.downsampler(result['lags'], downsample_factor, axis=-1, method='interp', kind='linear')
result_resampled['lag_kernel'] = DSP.downsampler(result['lag_kernel'], downsample_factor, axis=-2, method='interp', kind='linear')
dlag = result_resampled['lags'][1] - result_resampled['lags'][0]
result_resampled['lag_corr_length'] = (1/result['bw_eff']) / dlag
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in result:
result_resampled[key] = DSP.downsampler(result[key], downsample_factor, axis=-2, method='FFT')
if verbose:
print('\tDownsampled Sub-band(s) delay transform computed')
if (action is None) or (action.lower() == 'return_resampled'):
return result_resampled
elif action.lower() == 'return_oversampled':
return result
else:
raise ValueError('Invalid action specified')
################################################################################
def get_horizon_delay_limits(self, phase_center=None,
phase_center_coords=None):
"""
-------------------------------------------------------------------------
Estimates the delay envelope determined by the sky horizon for the
baseline(s) for the phase centers
Inputs:
phase_center
[numpy array] Phase center of the observation as 2-column or
3-column numpy array. Two columns are used when it is specified
in 'hadec' or 'altaz' coordinates as indicated by the input
phase_center_coords or by three columns when 'dircos' coordinates
are used. This is where the telescopes will be phased up to as
reference. Coordinate system for the phase_center is specified
by another input phase_center_coords. Default=None implies the
corresponding attribute from the DelaySpectrum instance is used.
This is a Nx2 or Nx3 array
phase_center_coords
[string] Coordinate system for array phase center. Accepted
values are 'hadec' (HA-Dec), 'altaz' (Altitude-Azimuth) or
'dircos' (direction cosines). Default=None implies the
corresponding attribute from the DelaySpectrum instance is used.
Outputs:
horizon_envelope:
NxMx2 matrix where M is the number of baselines and N is the number
of phase centers. horizon_envelope[:,:,0] contains the minimum delay
after accounting for (any) non-zenith phase center.
horizon_envelope[:,:,1] contains the maximum delay after accounting
for (any) non-zenith phase center(s).
-------------------------------------------------------------------------
"""
if phase_center is None:
phase_center = self.ia.phase_center
phase_center_coords = self.ia.phase_center_coords
if phase_center_coords not in ['hadec', 'altaz', 'dircos']:
raise ValueError('Phase center coordinates must be "altaz", "hadec" or "dircos"')
if phase_center_coords == 'hadec':
pc_altaz = GEOM.hadec2altaz(phase_center, self.ia.latitude, units='degrees')
pc_dircos = GEOM.altaz2dircos(pc_altaz, units='degrees')
elif phase_center_coords == 'altaz':
pc_dircos = GEOM.altaz2dircos(phase_center, units='degrees')
elif phase_center_coords == 'dircos':
pc_dircos = phase_center
horizon_envelope = DLY.horizon_delay_limits(self.ia.baselines, pc_dircos, units='mks')
return horizon_envelope
#############################################################################
def set_horizon_delay_limits(self):
"""
-------------------------------------------------------------------------
Estimates the delay envelope determined by the sky horizon for the
baseline(s) for the phase centers of the DelaySpectrum instance. No
output is returned. Uses the member function get_horizon_delay_limits()
-------------------------------------------------------------------------
"""
self.horizon_delay_limits = self.get_horizon_delay_limits()
#############################################################################
def save(self, ds_outfile, ia_outfile, tabtype='BinTabelHDU', overwrite=False,
verbose=True):
"""
-------------------------------------------------------------------------
Saves the interferometer array delay spectrum information to disk.
Inputs:
outfile [string] Filename with full path for for delay spectrum
data to be saved to. Will be appended with '.ds.fits'
ia_outfile [string] Filename with full path for interferometer array
data to be saved to. Will be appended with '.fits'
extension
Keyword Input(s):
tabtype [string] indicates table type for one of the extensions in
the FITS file. Allowed values are 'BinTableHDU' and
'TableHDU' for binary and ascii tables respectively. Default
is 'BinTableHDU'.
overwrite [boolean] True indicates overwrite even if a file already
exists. Default = False (does not overwrite)
verbose [boolean] If True (default), prints diagnostic and progress
messages. If False, suppress printing such messages.
-------------------------------------------------------------------------
"""
try:
ds_outfile, ia_outfile
except NameError:
raise NameError('Both delay spectrum and interferometer array output filenames must be specified. Aborting DelaySpectrum.save()...')
if verbose:
print('\nSaving information about interferometer array...')
self.ia.save(ia_outfile, tabtype=tabtype, overwrite=overwrite,
verbose=verbose)
if verbose:
print('\nSaving information about delay spectra...')
hdulist = []
hdulist += [fits.PrimaryHDU()]
hdulist[0].header['EXTNAME'] = 'PRIMARY'
hdulist[0].header['NCHAN'] = (self.f.size, 'Number of frequency channels')
hdulist[0].header['NLAGS'] = (self.lags.size, 'Number of lags')
hdulist[0].header['freq_resolution'] = (self.df, 'Frequency resolution (Hz)')
hdulist[0].header['N_ACC'] = (self.n_acc, 'Number of accumulations')
hdulist[0].header['PAD'] = (self.pad, 'Padding factor')
hdulist[0].header['DBUFFER'] = (self.clean_window_buffer, 'CLEAN window buffer (1/bandwidth)')
hdulist[0].header['IARRAY'] = (ia_outfile+'.fits', 'Location of InterferometerArray simulated visibilities')
if verbose:
print('\tCreated a primary HDU.')
# cols = []
# cols += [fits.Column(name='frequency', format='D', array=self.f)]
# cols += [fits.Column(name='lag', format='D', array=self.lags)]
# columns = _astropy_columns(cols, tabtype=tabtype)
# tbhdu = fits.new_table(columns)
# tbhdu.header.set('EXTNAME', 'SPECTRAL INFO')
# hdulist += [tbhdu]
# if verbose:
# print('\tCreated an extension for spectral information.')
hdulist += [fits.ImageHDU(self.f, name='FREQUENCIES')]
hdulist += [fits.ImageHDU(self.lags, name='LAGS')]
if verbose:
print('\tCreated an extension for spectral information.')
hdulist += [fits.ImageHDU(self.horizon_delay_limits, name='HORIZON LIMITS')]
if verbose:
print('\tCreated an extension for horizon delay limits of size {0[0]} x {0[1]} x {0[2]} as a function of snapshot instance, baseline, and (min,max) limits'.format(self.horizon_delay_limits.shape))
hdulist += [fits.ImageHDU(self.bp, name='BANDPASS')]
if verbose:
print('\tCreated an extension for bandpass functions of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, frequency, and snapshot instance'.format(self.bp.shape))
hdulist += [fits.ImageHDU(self.bp_wts, name='BANDPASS WEIGHTS')]
if verbose:
print('\tCreated an extension for bandpass weights of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, frequency, and snapshot instance'.format(self.bp_wts.shape))
if self.lag_kernel is not None:
hdulist += [fits.ImageHDU(self.lag_kernel.real, name='LAG KERNEL REAL')]
hdulist += [fits.ImageHDU(self.lag_kernel.imag, name='LAG KERNEL IMAG')]
if verbose:
print('\tCreated an extension for convolving lag kernel of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, lags, and snapshot instance'.format(self.lag_kernel.shape))
if self.skyvis_lag is not None:
hdulist += [fits.ImageHDU(self.skyvis_lag.real, name='NOISELESS DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.skyvis_lag.imag, name='NOISELESS DELAY SPECTRA IMAG')]
if self.vis_lag is not None:
hdulist += [fits.ImageHDU(self.vis_lag.real, name='NOISY DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.vis_lag.imag, name='NOISY DELAY SPECTRA IMAG')]
if self.vis_noise_lag is not None:
hdulist += [fits.ImageHDU(self.vis_noise_lag.real, name='DELAY SPECTRA NOISE REAL')]
hdulist += [fits.ImageHDU(self.vis_noise_lag.imag, name='DELAY SPECTRA NOISE IMAG')]
if self.cc_freq is not None:
hdulist += [fits.ImageHDU(self.cc_freq, name='CLEAN FREQUENCIES')]
if self.cc_lags is not None:
hdulist += [fits.ImageHDU(self.cc_lags, name='CLEAN LAGS')]
if verbose:
print('\tCreated an extension for spectral axes of clean components')
if self.cc_lag_kernel is not None:
hdulist += [fits.ImageHDU(self.cc_lag_kernel.real, name='CLEAN LAG KERNEL REAL')]
hdulist += [fits.ImageHDU(self.cc_lag_kernel.imag, name='CLEAN LAG KERNEL IMAG')]
if verbose:
print('\tCreated an extension for deconvolving lag kernel of size {0[0]} x {0[1]} x {0[2]} as a function of baseline, lags, and snapshot instance'.format(self.cc_lag_kernel.shape))
if self.cc_skyvis_lag is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_lag.real, name='CLEAN NOISELESS DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_lag.imag, name='CLEAN NOISELESS DELAY SPECTRA IMAG')]
if self.cc_skyvis_res_lag is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_res_lag.real, name='CLEAN NOISELESS DELAY SPECTRA RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_res_lag.imag, name='CLEAN NOISELESS DELAY SPECTRA RESIDUALS IMAG')]
if self.cc_skyvis_freq is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_freq.real, name='CLEAN NOISELESS VISIBILITIES REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_freq.imag, name='CLEAN NOISELESS VISIBILITIES IMAG')]
if self.cc_skyvis_res_freq is not None:
hdulist += [fits.ImageHDU(self.cc_skyvis_res_freq.real, name='CLEAN NOISELESS VISIBILITIES RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_skyvis_res_freq.imag, name='CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG')]
if self.cc_vis_lag is not None:
hdulist += [fits.ImageHDU(self.cc_vis_lag.real, name='CLEAN NOISY DELAY SPECTRA REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_lag.imag, name='CLEAN NOISY DELAY SPECTRA IMAG')]
if self.cc_vis_res_lag is not None:
hdulist += [fits.ImageHDU(self.cc_vis_res_lag.real, name='CLEAN NOISY DELAY SPECTRA RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_res_lag.imag, name='CLEAN NOISY DELAY SPECTRA RESIDUALS IMAG')]
if self.cc_vis_freq is not None:
hdulist += [fits.ImageHDU(self.cc_vis_freq.real, name='CLEAN NOISY VISIBILITIES REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_freq.imag, name='CLEAN NOISY VISIBILITIES IMAG')]
if self.cc_vis_res_freq is not None:
hdulist += [fits.ImageHDU(self.cc_vis_res_freq.real, name='CLEAN NOISY VISIBILITIES RESIDUALS REAL')]
hdulist += [fits.ImageHDU(self.cc_vis_res_freq.imag, name='CLEAN NOISY VISIBILITIES RESIDUALS IMAG')]
if verbose:
print('\tCreated extensions for clean components of noiseless, noisy and residuals of visibilities in frequency and delay coordinates of size {0[0]} x {0[1]} x {0[2]} as a function of baselines, lags/frequency and snapshot instance'.format(self.lag_kernel.shape))
if self.subband_delay_spectra:
hdulist[0].header['SBDS'] = (1, 'Presence of Subband Delay Spectra')
for key in self.subband_delay_spectra:
hdulist[0].header['{0}-SBDS'.format(key)] = (1, 'Presence of {0} Subband Delay Spectra'.format(key))
hdulist[0].header['{0}-SBDS-WSHAPE'.format(key)] = (self.subband_delay_spectra[key]['shape'], 'Shape of {0} subband frequency weights'.format(key))
if key == 'cc':
hdulist[0].header['{0}-SBDS-BPCORR'.format(key)] = (int(self.subband_delay_spectra[key]['bpcorrect']), 'Truth value for {0} subband delay spectrum bandpass windows weights correction'.format(key))
hdulist[0].header['{0}-SBDS-NPAD'.format(key)] = (self.subband_delay_spectra[key]['npad'], 'Number of zero-padded channels for subband delay spectra'.format(key))
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['freq_center'], name='{0}-SBDS-F0'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['freq_wts'], name='{0}-SBDS-FWTS'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['bw_eff'], name='{0}-SBDS-BWEFF'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lags'], name='{0}-SBDS-LAGS'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lag_kernel'].real, name='{0}-SBDS-LAGKERN-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lag_kernel'].imag, name='{0}-SBDS-LAGKERN-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['lag_corr_length'], name='{0}-SBDS-LAGCORR'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_lag'].real, name='{0}-SBDS-SKYVISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_lag'].imag, name='{0}-SBDS-SKYVISLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_lag'].real, name='{0}-SBDS-VISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_lag'].imag, name='{0}-SBDS-VISLAG-IMAG'.format(key))]
if key == 'sim':
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_noise_lag'].real, name='{0}-SBDS-NOISELAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_noise_lag'].imag, name='{0}-SBDS-NOISELAG-IMAG'.format(key))]
if key == 'cc':
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_res_lag'].real, name='{0}-SBDS-SKYVISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['skyvis_res_lag'].imag, name='{0}-SBDS-SKYVISRESLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_res_lag'].real, name='{0}-SBDS-VISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra[key]['vis_res_lag'].imag, name='{0}-SBDS-VISRESLAG-IMAG'.format(key))]
if verbose:
print('\tCreated extensions for information on subband delay spectra for simulated and clean components of visibilities as a function of baselines, lags/frequency and snapshot instance')
if self.subband_delay_spectra_resampled:
hdulist[0].header['SBDS-RS'] = (1, 'Presence of Resampled Subband Delay Spectra')
for key in self.subband_delay_spectra_resampled:
hdulist[0].header['{0}-SBDS-RS'.format(key)] = (1, 'Presence of {0} Reampled Subband Delay Spectra'.format(key))
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['freq_center'], name='{0}-SBDSRS-F0'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['bw_eff'], name='{0}-SBDSRS-BWEFF'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lags'], name='{0}-SBDSRS-LAGS'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lag_kernel'].real, name='{0}-SBDSRS-LAGKERN-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lag_kernel'].imag, name='{0}-SBDSRS-LAGKERN-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['lag_corr_length'], name='{0}-SBDSRS-LAGCORR'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_lag'].real, name='{0}-SBDSRS-SKYVISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_lag'].imag, name='{0}-SBDSRS-SKYVISLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_lag'].real, name='{0}-SBDSRS-VISLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_lag'].imag, name='{0}-SBDSRS-VISLAG-IMAG'.format(key))]
if key == 'sim':
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_noise_lag'].real, name='{0}-SBDSRS-NOISELAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_noise_lag'].imag, name='{0}-SBDSRS-NOISELAG-IMAG'.format(key))]
if key == 'cc':
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_res_lag'].real, name='{0}-SBDSRS-SKYVISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['skyvis_res_lag'].imag, name='{0}-SBDSRS-SKYVISRESLAG-IMAG'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_res_lag'].real, name='{0}-SBDSRS-VISRESLAG-REAL'.format(key))]
hdulist += [fits.ImageHDU(self.subband_delay_spectra_resampled[key]['vis_res_lag'].imag, name='{0}-SBDSRS-VISRESLAG-IMAG'.format(key))]
if verbose:
print('\tCreated extensions for information on resampled subband delay spectra for simulated and clean components of visibilities as a function of baselines, lags/frequency and snapshot instance')
hdu = fits.HDUList(hdulist)
hdu.writeto(ds_outfile+'.ds.fits', clobber=overwrite)
################################################################################
class DelayPowerSpectrum(object):
"""
----------------------------------------------------------------------------
Class to manage delay power spectrum from visibility measurements of a
multi-element interferometer array.
Attributes:
cosmo [instance of cosmology class from astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module.
ds [instance of class DelaySpectrum] An instance of class
DelaySpectrum that contains the information on delay spectra of
simulated visibilities
f [list or numpy vector] frequency channels in Hz
lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as channels. This is
computed in member function delay_transform().
cc_lags [numpy vector] Time axis obtained when the frequency axis is
inverted using a FFT. Same size as cc_freq. This is computed in
member function delayClean().
df [scalar] Frequency resolution (in Hz)
bl [M x 3 Numpy array] The baseline vectors associated with the
M interferometers in SI units
bl_length [M-element numpy array] Lengths of the baseline in SI units
f0 [scalar] Central frequency (in Hz)
wl0 [scalar] Central wavelength (in m)
z [scalar] redshift
bw [scalar] (effective) bandwidth (in Hz)
kprll [numpy array] line-of-sight wavenumbers (in h/Mpc) corresponding
to delays in the delay spectrum
kperp [numpy array] transverse wavenumbers (in h/Mpc) corresponding
to baseline lengths
horizon_kprll_limits
[numpy array] limits on k_parallel corresponding to limits on
horizon delays. It is of size NxMx2 denoting the neagtive and
positive horizon delay limits where N is the number of
timestamps, M is the number of baselines. The 0 index in the
third dimenstion denotes the negative horizon limit while
the 1 index denotes the positive horizon limit
drz_los [scalar] comoving line-of-sight depth (Mpc/h) corresponding to
specified redshift and bandwidth for redshifted 21 cm line
rz_transverse
[scalar] comoving transverse distance (Mpc/h) corresponding to
specified redshift for redshifted 21 cm line
rz_los [scalar] comoving line-of-sight distance (Mpc/h) corresponding
to specified redshift for redshifted 21 cm line
jacobian1 [scalar] first jacobian in conversion of delay spectrum to
power spectrum. It is equal to A_eff / wl**2 / bw
jacobian2 [scalar] second jacobian in conversion of delay spectrum to
power spectrum. It is equal to rz_los**2 * drz_los / bw
Jy2K [scalar] factor to convert Jy/Sr to K. It is equal to
wl**2 * Jy / (2k)
K2Jy [scalar] factor to convert K to Jy/Sr. It is equal to 1/Jy2K
dps [dictionary of numpy arrays] contains numpy arrays containing
delay power spectrum in units of K^2 (Mpc/h)^3 under the
following keys:
'skyvis' [numpy array] delay power spectrum of noiseless
delay spectra
'vis' [numpy array] delay power spectrum of noisy delay
spectra
'noise' [numpy array] delay power spectrum of thermal noise
delay spectra
'cc_skyvis' [numpy array] delay power spectrum of clean
components of noiseless delay spectra
'cc_vis' [numpy array] delay power spectrum of clean
components of noisy delay spectra
'cc_skyvis_res'
[numpy array] delay power spectrum of residuals
after delay cleaning of noiseless delay spectra
'cc_vis_res'
[numpy array] delay power spectrum of residuals
after delay cleaning of noisy delay spectra
'cc_skyvis_net'
[numpy array] delay power spectrum of sum of
residuals and clean components
after delay cleaning of noiseless delay spectra
'cc_vis_net'
[numpy array] delay power spectrum of sum of
residuals and clean components
after delay cleaning of noisy delay spectra
subband_delay_power_spectra
[dictionary] contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Essentially this is the power spectrum equivalent
of the attribute suuband_delay_spectra under class DelaySpectrum.
Under each of these keys is information about delay power spectra
of different frequency sub-bands (n_win in number) in the form of
a dictionary under the following keys:
'z' [numpy array] contains the redshifts corresponding to
center frequencies (in Hz) of the frequency subbands
of the subband delay spectra. It is of size n_win.
'dz' [numpy array] contains the width in redshifts
corresponding to the effective bandwidths (in Hz) of
the subbands being delay transformed. It is of size
n_win.
'kprll' [numpy array] line-of-sight k-modes (in h/Mpc)
corresponding to lags of the subband delay spectra.
It is of size n_win x (nchan+npad)
'kperp' [numpy array] transverse k-modes (in h/Mpc)
corresponding to the baseline lengths and the
center frequencies. It is of size
n_win x n_bl
horizon_kprll_limits
[numpy array] limits on k_parallel corresponding to
limits on horizon delays for each subband. It is of
size N x n_win x M x 2 denoting the neagtive and
positive horizon delay limits where N is the number
of timestamps, n_win is the number of subbands, M is
the number of baselines. The 0 index in the fourth
dimenstion denotes the negative horizon limit while
the 1 index denotes the positive horizon limit
'rz_los' [numpy array] Comoving distance along LOS (in Mpc/h)
corresponding to the different redshifts under key
'z'. It is of size n_win
'rz_transverse'
[numpy array] transverse comoving distance
(in Mpc/h) corresponding to the different redshifts
under key 'z'. It is of size n_win
'drz_los' [numpy array] line-of-sight comoving depth (in
Mpc/h) corresponding to the redshift widths under
key 'dz' and redshifts under key 'z'. It is of size
n_win
'jacobian1' [numpy array] first jacobian in conversion of delay
spectrum to power spectrum. It is equal to
A_eff / wl**2 / bw. It is of size n_win
'jacobian2' [numpy array] second jacobian in conversion of delay
spectrum to power spectrum. It is equal to
rz_los**2 * drz_los / bw. It is of size n_win
'Jy2K' [numpy array] factor to convert Jy/Sr to K. It is
equal to wl**2 * Jy / (2k). It is of size n_win
'factor' [numpy array] conversion factor to convert delay
spectrum (in Jy Hz) to delay power spectrum (in
K^2 (Mpc/h)^3). It is equal to
jacobian1 * jacobian2 * Jy2K**2. It is of size n_win
'skyvis_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noiseless simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'skyvis_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_lag' [numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noisy simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'vis_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_noise_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to thermal noise simulated (under top
level key 'sim') delay spectrum under key
'vis_noise_lag' in attribute subband_delay_spectra
under instance of class DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'skyvis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_res_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_res_lag' in attribute subband_delay_spectra
under instance of class DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'skyvis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_net_lag' in attribute
subband_delay_spectra under instance of class
DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
'vis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_net_lag' in attribute subband_delay_spectra
under instance of class DelaySpectrum. It is of size
n_bl x n_win x (nchan+npad) x n_t
subband_delay_power_spectra_resampled
[dictionary] contains two top level keys, namely, 'cc' and 'sim'
denoting information about CLEAN and simulated visibilities
respectively. Essentially this is the power spectrum equivalent
of the attribute suuband_delay_spectra_resampled under class
DelaySpectrum. Under each of these keys is information about
delay power spectra of different frequency sub-bands (n_win in
number) in the form of a dictionary under the following keys:
'kprll' [numpy array] line-of-sight k-modes (in h/Mpc)
corresponding to lags of the subband delay spectra.
It is of size n_win x nlags, where nlags is the
resampeld number of delay bins
'kperp' [numpy array] transverse k-modes (in h/Mpc)
corresponding to the baseline lengths and the
center frequencies. It is of size
n_win x n_bl
'horizon_kprll_limits'
[numpy array] limits on k_parallel corresponding to
limits on horizon delays for each subband. It is of
size N x n_win x M x 2 denoting the negative and
positive horizon delay limits where N is the number
of timestamps, n_win is the number of subbands, M is
the number of baselines. The 0 index in the fourth
dimenstion denotes the negative horizon limit while
the 1 index denotes the positive horizon limit
'skyvis_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noiseless simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'skyvis_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_lag' [numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to noisy simulated (under top level
key 'sim') or CLEANed (under top level key 'cc')
delay spectrum under key 'vis_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_noise_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to thermal noise simulated (under top
level key 'sim') delay spectrum under key
'vis_noise_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'skyvis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_res_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_res_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to CLEAN residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_res_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'skyvis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noiseless simulated delay spectrum under
key 'skyvis_net_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
'vis_net_lag'
[numpy array] delay power spectrum (in K^2 (Mpc/h)^3)
corresponding to sum of CLEAN components and
residuals (under top level key
'cc') from noisy delay spectrum under key
'vis_net_lag' in attribute
subband_delay_spectra_resampled under instance of
class DelaySpectrum. It is of size
n_bl x n_win x nlags x n_t
Member functions:
__init__() Initialize an instance of class DelayPowerSpectrum
comoving_los_depth()
Compute comoving line-of-sight depth (Mpc/h) corresponding to
specified redshift and bandwidth for redshifted 21 cm line
comoving_transverse_distance()
Compute comoving transverse distance (Mpc/h) corresponding to
specified redshift for redshifted 21 cm line
comoving_los_distance()
Compute comoving line-of-sight distance (Mpc/h) corresponding
to specified redshift for redshifted 21 cm line
k_parallel()
Compute line-of-sight wavenumbers (h/Mpc) corresponding to
specified delays and redshift for redshifted 21 cm line
k_perp() Compute transverse wavenumbers (h/Mpc) corresponding to
specified baseline lengths and redshift for redshifted 21 cm
line assuming a mean wavelength (in m) for the relationship
between baseline lengths and spatial frequencies (u and v)
compute_power_spectrum()
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the
delay spectrum in units of Jy Hz
compute_power_spectrum_allruns()
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the
delay spectrum in units of Jy Hz from multiple runs of
visibilities
compute_individual_closure_phase_power_spectrum()
Compute delay power spectrum of closure phase in units of
K^2 (Mpc/h)^3 from the delay spectrum in units of Jy Hz where
the original visibility amplitudes of closure phase complex
exponents are assumed to be 1 Jy across the band
compute_averaged_closure_phase_power_spectrum()
Compute delay power spectrum of closure phase in units of
K^2 (Mpc/h)^3 from the delay spectrum in units of Jy Hz and
average over 'auto' and 'cross' modes, where the original
visibility amplitudes of closure phase complex exponents are
assumed to be 1 Jy across the band
----------------------------------------------------------------------------
"""
def __init__(self, dspec, cosmo=cosmo100):
"""
------------------------------------------------------------------------
Initialize an instance of class DelayPowerSpectrum. Attributes
initialized are: ds, cosmo, f, df, f0, z, bw, drz_los, rz_transverse,
rz_los, kprll, kperp, jacobian1, jacobian2, subband_delay_power_spectra,
subband_delay_power_spectra_resampled
Inputs:
dspec [instance of class DelaySpectrum] An instance of class
DelaySpectrum that contains the information on delay spectra of
simulated visibilities
cosmo [instance of a cosmology class in Astropy] An instance of class
FLRW or default_cosmology of astropy cosmology module. Default
value is set using concurrent cosmology but keep
H0=100 km/s/Mpc
------------------------------------------------------------------------
"""
try:
dspec
except NameError:
raise NameError('No delay spectrum instance supplied for initialization')
if not isinstance(dspec, DelaySpectrum):
raise TypeError('Input dspec must be an instance of class DelaySpectrum')
if not isinstance(cosmo, (CP.FLRW, CP.default_cosmology)):
raise TypeError('Input cosmology must be a cosmology class defined in Astropy')
self.cosmo = cosmo
self.ds = dspec
self.f = self.ds.f
self.lags = self.ds.lags
self.cc_lags = self.ds.cc_lags
self.bl = self.ds.ia.baselines
self.bl_length = self.ds.ia.baseline_lengths
self.df = self.ds.df
self.f0 = self.f[int(self.f.size/2)]
self.wl0 = FCNST.c / self.f0
self.z = CNST.rest_freq_HI / self.f0 - 1
self.bw = self.df * self.f.size
self.kprll = self.k_parallel(self.lags, redshift=self.z, action='return') # in h/Mpc
self.kperp = self.k_perp(self.bl_length, redshift=self.z, action='return') # in h/Mpc
self.horizon_kprll_limits = self.k_parallel(self.ds.horizon_delay_limits, redshift=self.z, action='return') # in h/Mpc
self.drz_los = self.comoving_los_depth(self.bw, self.z, action='return') # in Mpc/h
self.rz_transverse = self.comoving_transverse_distance(self.z, action='return') # in Mpc/h
self.rz_los = self.comoving_los_distance(self.z, action='return') # in Mpc/h
# self.jacobian1 = NP.mean(self.ds.ia.A_eff) / self.wl0**2 / self.bw
omega_bw = self.beam3Dvol(freq_wts=self.ds.bp_wts[0,:,0])
self.jacobian1 = 1 / omega_bw
# self.jacobian2 = self.rz_transverse**2 * self.drz_los / self.bw
self.jacobian2 = self.rz_los**2 * self.drz_los / self.bw
self.Jy2K = self.wl0**2 * CNST.Jy / (2*FCNST.k)
self.K2Jy = 1 / self.Jy2K
self.dps = {}
self.dps['skyvis'] = None
self.dps['vis'] = None
self.dps['noise'] = None
self.dps['cc_skyvis'] = None
self.dps['cc_vis'] = None
self.dps['cc_skyvis_res'] = None
self.dps['cc_vis_res'] = None
self.dps['cc_skyvis_net'] = None
self.dps['cc_vis_net'] = None
self.subband_delay_power_spectra = {}
self.subband_delay_power_spectra_resampled = {}
############################################################################
def comoving_los_depth(self, bw, redshift, action=None):
"""
------------------------------------------------------------------------
Compute comoving line-of-sight depth (Mpc/h) corresponding to specified
redshift and bandwidth for redshifted 21 cm line
Inputs:
bw [scalar] bandwidth in Hz
redshift [scalar] redshift
action [string] If set to None (default), the comoving depth
along the line of sight (Mpc/h) and specified reshift are
stored internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the comoving depth
along line of sight (Mpc/h) computed is returned
Outputs:
If keyword input action is set to 'return', the comoving depth along
line of sight (Mpc/h) computed is returned
------------------------------------------------------------------------
"""
drz_los = (FCNST.c/1e3) * bw * (1+redshift)**2 / CNST.rest_freq_HI / self.cosmo.H0.value / self.cosmo.efunc(redshift) # in Mpc/h
if action is None:
self.z = redshift
self.drz_los = drz_los
return
else:
return drz_los
############################################################################
def comoving_transverse_distance(self, redshift, action=None):
"""
------------------------------------------------------------------------
Compute comoving transverse distance (Mpc/h) corresponding to specified
redshift for redshifted 21 cm line
Inputs:
redshift [scalar] redshift
action [string] If set to None (default), the comoving
transverse distance (Mpc/h) and specified reshift are stored
internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the comoving
transverse distance (Mpc/h) computed is returned
Outputs:
If keyword input action is set to 'return', the comoving transverse
distance (Mpc/h) computed is returned
------------------------------------------------------------------------
"""
rz_transverse = self.cosmo.comoving_transverse_distance(redshift).to('Mpc').value # in Mpc/h
if action is None:
self.z = redshift
self.rz_transverse = rz_transverse
return
else:
return rz_transverse
############################################################################
def comoving_los_distance(self, redshift, action=None):
"""
------------------------------------------------------------------------
Compute comoving line-of-sight distance (Mpc/h) corresponding to
specified redshift for redshifted 21 cm line
Inputs:
redshift [scalar] redshift
action [string] If set to None (default), the comoving
line-of-sight distance (Mpc/h) and specified reshift are
stored internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the comoving
line-of-sight distance (Mpc/h) computed is returned
Outputs:
If keyword input action is set to 'return', the comoving line-of-sight
distance (Mpc/h) computed is returned
------------------------------------------------------------------------
"""
rz_los = self.cosmo.comoving_distance(redshift).to('Mpc').value # in Mpc/h
if action is None:
self.z = redshift
self.rz_los = rz_los
return
else:
return rz_los
############################################################################
def k_parallel(self, lags, redshift, action=None):
"""
------------------------------------------------------------------------
Compute line-of-sight wavenumbers (h/Mpc) corresponding to specified
delays and redshift for redshifted 21 cm line
Inputs:
lags [numpy array] geometric delays (in seconds) obtained as
Fourier conjugate variable of frequencies in the bandpass
redshift [scalar] redshift
action [string] If set to None (default), the line-of-sight
wavenumbers (h/Mpc) and specified reshift are
stored internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the line-of-sight
wavenumbers (h/Mpc) computed is returned
Outputs:
If keyword input action is set to 'return', the line-of-sight
wavenumbers (h/Mpc) computed is returned. It is of same size as input
lags
------------------------------------------------------------------------
"""
eta2kprll = dkprll_deta(redshift, cosmo=self.cosmo)
kprll = eta2kprll * lags
if action is None:
self.z = redshift
self.kprll = kprll
return
else:
return kprll
############################################################################
def k_perp(self, baseline_length, redshift, action=None):
"""
------------------------------------------------------------------------
Compute transverse wavenumbers (h/Mpc) corresponding to specified
baseline lengths and redshift for redshifted 21 cm line assuming a
mean wavelength (in m) for the relationship between baseline lengths and
spatial frequencies (u and v)
Inputs:
baseline_length
[numpy array] baseline lengths (in m)
redshift [scalar] redshift
action [string] If set to None (default), the transverse
wavenumbers (h/Mpc) and specified reshift are stored
internally as attributes of the instance of class
DelayPowerSpectrum. If set to 'return', the transverse
wavenumbers (h/Mpc) computed is returned
Outputs:
If keyword input action is set to 'return', the transverse
wavenumbers (h/Mpc) computed is returned
------------------------------------------------------------------------
"""
kperp = 2 * NP.pi * (baseline_length/self.wl0) / self.comoving_transverse_distance(redshift, action='return')
if action is None:
self.z = redshift
self.kperp = kperp
return
else:
return kperp
############################################################################
def beam3Dvol(self, freq_wts=None, nside=32):
"""
------------------------------------------------------------------------
Compute three-dimensional (transverse-LOS) volume of the beam in units
of "Sr Hz".
freq_wts [numpy array] Frequency weights centered on different
spectral windows or redshifts. Its shape is (nwin,nchan).
nchan should match the number of spectral channels in the
class attribute for frequency channels
'nside' [integer] NSIDE parameter for determining and interpolating
the beam. If not set, it will be set to 64 (default).
Output:
omega_bw [numpy array] Integral of the square of the power pattern
over transverse and spectral axes. Its shape is (nwin,)
------------------------------------------------------------------------
"""
if self.ds.ia.simparms_file is not None:
parms_file = open(self.ds.ia.simparms_file, 'r')
parms = yaml.safe_load(parms_file)
parms_file.close()
# sky_nside = parms['fgparm']['nside']
beam_info = parms['beam']
use_external_beam = beam_info['use_external']
beam_chromaticity = beam_info['chromatic']
select_beam_freq = beam_info['select_freq']
if select_beam_freq is None:
select_beam_freq = self.f0
theta, phi = HP.pix2ang(nside, NP.arange(HP.nside2npix(nside)))
theta_phi = NP.hstack((theta.reshape(-1,1), phi.reshape(-1,1)))
if use_external_beam:
beam_file = beam_info['file']
if beam_info['filefmt'].lower() in ['hdf5', 'fits', 'uvbeam']:
beam_filefmt = beam_info['filefmt'].lower()
else:
raise ValueError('Invalid beam file format specified')
if beam_info['filepathtype'] == 'default':
beam_file = prisim_path+'data/beams/' + beam_file
beam_pol = beam_info['pol']
beam_id = beam_info['identifier']
pbeam_spec_interp_method = beam_info['spec_interp']
if beam_filefmt == 'fits':
extbeam = fits.getdata(beam_file, extname='BEAM_{0}'.format(beam_pol))
beam_freqs = fits.getdata(beam_file, extname='FREQS_{0}'.format(beam_pol))
extbeam = extbeam.reshape(-1,beam_freqs.size) # npix x nfreqs
prihdr = fits.getheader(beam_file, 0)
beamunit = prihdr['GAINUNIT']
elif beam_filefmt.lower() == 'hdf5':
with h5py.File(beam_file, 'r') as fileobj:
extbeam = fileobj['gain_info'][beam_pol].value
extbeam = extbeam.T
beam_freqs = fileobj['spectral_info']['freqs'].value
beamunit = fileobj['header']['gainunit'].value
elif beam_filefmt == 'uvbeam':
if uvbeam_module_found:
uvbm = UVBeam()
uvbm.read_beamfits(beam_file)
axis_vec_ind = 0 # for power beam
spw_ind = 0 # spectral window index
if beam_pol.lower() in ['x', 'e']:
beam_pol_ind = 0
else:
beam_pol_ind = 1
extbeam = uvbm.data_array[axis_vec_ind,spw_ind,beam_pol_ind,:,:].T # npix x nfreqs
beam_freqs = uvbm.freq_array.ravel() # nfreqs (in Hz)
else:
raise ImportError('uvbeam module not installed/found')
if NP.abs(NP.abs(extbeam).max() - 1.0) > 1e-10:
extbeam /= NP.abs(extbeam).max()
beamunit = ''
else:
raise ValueError('Specified external beam file format not currently supported')
if beamunit.lower() == 'db':
extbeam = 10**(extbeam/10.0)
beam_nside = HP.npix2nside(extbeam.shape[0])
if beam_nside < nside:
nside = beam_nside
if beam_chromaticity:
if pbeam_spec_interp_method == 'fft':
extbeam = extbeam[:,:-1]
beam_freqs = beam_freqs[:-1]
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(extbeam), theta_phi=theta_phi, inloc_axis=beam_freqs, outloc_axis=self.f, axis=1, kind=pbeam_spec_interp_method, assume_sorted=True)
else:
nearest_freq_ind = NP.argmin(NP.abs(beam_freqs - select_beam_freq))
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(NP.repeat(extbeam[:,nearest_freq_ind].reshape(-1,1), self.f.size, axis=1)), theta_phi=theta_phi, inloc_axis=self.f, outloc_axis=self.f, axis=1, assume_sorted=True)
interp_logbeam_max = NP.nanmax(interp_logbeam, axis=0)
interp_logbeam_max[interp_logbeam_max <= 0.0] = 0.0
interp_logbeam_max = interp_logbeam_max.reshape(1,-1)
interp_logbeam = interp_logbeam - interp_logbeam_max
beam = 10**interp_logbeam
else:
alt = 90.0 - NP.degrees(theta)
az = NP.degrees(phi)
altaz = NP.hstack((alt.reshape(-1,1), az.reshape(-1,1)))
if beam_chromaticity:
beam = PB.primary_beam_generator(altaz, self.f, self.ds.ia.telescope, freq_scale='Hz', skyunits='altaz', east2ax1=0.0, pointing_info=None, pointing_center=None)
else:
beam = PB.primary_beam_generator(altaz, select_beam_freq, self.ds.ia.telescope, skyunits='altaz', pointing_info=None, pointing_center=None, freq_scale='Hz', east2ax1=0.0)
beam = beam.reshape(-1,1) * NP.ones(self.f.size).reshape(1,-1)
else:
theta, phi = HP.pix2ang(nside, NP.arange(HP.nside2npix(nside)))
alt = 90.0 - NP.degrees(theta)
az = NP.degrees(phi)
altaz = NP.hstack((alt.reshape(-1,1), az.reshape(-1,1)))
beam = PB.primary_beam_generator(altaz, self.f, self.ds.ia.telescope, freq_scale='Hz', skyunits='altaz', east2ax1=0.0, pointing_info=None, pointing_center=None)
omega_bw = beam3Dvol(beam, self.f, freq_wts=freq_wts, hemisphere=True)
return omega_bw
############################################################################
def compute_power_spectrum(self):
"""
------------------------------------------------------------------------
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the delay
spectrum in units of Jy Hz.
------------------------------------------------------------------------
"""
self.dps = {}
factor = self.jacobian1 * self.jacobian2 * self.Jy2K**2
if self.ds.skyvis_lag is not None: self.dps['skyvis'] = NP.abs(self.ds.skyvis_lag)**2 * factor
if self.ds.vis_lag is not None: self.dps['vis'] = NP.abs(self.ds.vis_lag)**2 * factor
if self.ds.vis_noise_lag is not None: self.dps['noise'] = NP.abs(self.ds.vis_noise_lag)**2 * factor
if self.ds.cc_lags is not None:
if self.ds.cc_skyvis_lag is not None: self.dps['cc_skyvis'] = NP.abs(self.ds.cc_skyvis_lag)**2 * factor
if self.ds.cc_vis_lag is not None: self.dps['cc_vis'] = NP.abs(self.ds.cc_vis_lag)**2 * factor
if self.ds.cc_skyvis_res_lag is not None: self.dps['cc_skyvis_res'] = NP.abs(self.ds.cc_skyvis_res_lag)**2 * factor
if self.ds.cc_vis_res_lag is not None: self.dps['cc_vis_res'] = NP.abs(self.ds.cc_vis_res_lag)**2 * factor
if self.ds.cc_skyvis_net_lag is not None: self.dps['cc_skyvis_net'] = NP.abs(self.ds.cc_skyvis_net_lag)**2 * factor
if self.ds.cc_vis_net_lag is not None: self.dps['cc_vis_net'] = NP.abs(self.ds.cc_vis_net_lag)**2 * factor
if self.ds.subband_delay_spectra:
for key in self.ds.subband_delay_spectra:
self.subband_delay_power_spectra[key] = {}
wl = FCNST.c / self.ds.subband_delay_spectra[key]['freq_center']
self.subband_delay_power_spectra[key]['z'] = CNST.rest_freq_HI / self.ds.subband_delay_spectra[key]['freq_center'] - 1
self.subband_delay_power_spectra[key]['dz'] = CNST.rest_freq_HI / self.ds.subband_delay_spectra[key]['freq_center']**2 * self.ds.subband_delay_spectra[key]['bw_eff']
kprll = NP.empty((self.ds.subband_delay_spectra[key]['freq_center'].size, self.ds.subband_delay_spectra[key]['lags'].size))
kperp = NP.empty((self.ds.subband_delay_spectra[key]['freq_center'].size, self.bl_length.size))
horizon_kprll_limits = NP.empty((self.ds.n_acc, self.ds.subband_delay_spectra[key]['freq_center'].size, self.bl_length.size, 2))
for zind,z in enumerate(self.subband_delay_power_spectra[key]['z']):
kprll[zind,:] = self.k_parallel(self.ds.subband_delay_spectra[key]['lags'], z, action='return')
kperp[zind,:] = self.k_perp(self.bl_length, z, action='return')
horizon_kprll_limits[:,zind,:,:] = self.k_parallel(self.ds.horizon_delay_limits, z, action='return')
self.subband_delay_power_spectra[key]['kprll'] = kprll
self.subband_delay_power_spectra[key]['kperp'] = kperp
self.subband_delay_power_spectra[key]['horizon_kprll_limits'] = horizon_kprll_limits
self.subband_delay_power_spectra[key]['rz_los'] = self.cosmo.comoving_distance(self.subband_delay_power_spectra[key]['z']).to('Mpc').value # in Mpc/h
self.subband_delay_power_spectra[key]['rz_transverse'] = self.comoving_transverse_distance(self.subband_delay_power_spectra[key]['z'], action='return') # in Mpc/h
self.subband_delay_power_spectra[key]['drz_los'] = self.comoving_los_depth(self.ds.subband_delay_spectra[key]['bw_eff'], self.subband_delay_power_spectra[key]['z'], action='return')
# self.subband_delay_power_spectra[key]['jacobian1'] = NP.mean(self.ds.ia.A_eff) / wl**2 / self.ds.subband_delay_spectra[key]['bw_eff']
omega_bw = self.beam3Dvol(freq_wts=self.ds.subband_delay_spectra[key]['freq_wts'])
self.subband_delay_power_spectra[key]['jacobian1'] = 1 / omega_bw
# self.subband_delay_power_spectra[key]['jacobian2'] = self.subband_delay_power_spectra[key]['rz_transverse']**2 * self.subband_delay_power_spectra[key]['drz_los'] / self.ds.subband_delay_spectra[key]['bw_eff']
self.subband_delay_power_spectra[key]['jacobian2'] = self.subband_delay_power_spectra[key]['rz_los']**2 * self.subband_delay_power_spectra[key]['drz_los'] / self.ds.subband_delay_spectra[key]['bw_eff']
self.subband_delay_power_spectra[key]['Jy2K'] = wl**2 * CNST.Jy / (2*FCNST.k)
self.subband_delay_power_spectra[key]['factor'] = self.subband_delay_power_spectra[key]['jacobian1'] * self.subband_delay_power_spectra[key]['jacobian2'] * self.subband_delay_power_spectra[key]['Jy2K']**2
conversion_factor = self.subband_delay_power_spectra[key]['factor'].reshape(1,-1,1,1)
self.subband_delay_power_spectra[key]['skyvis_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['skyvis_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['vis_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_lag'])**2 * conversion_factor
if key == 'cc':
self.subband_delay_power_spectra[key]['skyvis_res_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['skyvis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['vis_res_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['skyvis_net_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['skyvis_net_lag'])**2 * conversion_factor
self.subband_delay_power_spectra[key]['vis_net_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_net_lag'])**2 * conversion_factor
else:
self.subband_delay_power_spectra[key]['vis_noise_lag'] = NP.abs(self.ds.subband_delay_spectra[key]['vis_noise_lag'])**2 * conversion_factor
if self.ds.subband_delay_spectra_resampled:
for key in self.ds.subband_delay_spectra_resampled:
self.subband_delay_power_spectra_resampled[key] = {}
kprll = NP.empty((self.ds.subband_delay_spectra_resampled[key]['freq_center'].size, self.ds.subband_delay_spectra_resampled[key]['lags'].size))
kperp = NP.empty((self.ds.subband_delay_spectra_resampled[key]['freq_center'].size, self.bl_length.size))
horizon_kprll_limits = NP.empty((self.ds.n_acc, self.ds.subband_delay_spectra_resampled[key]['freq_center'].size, self.bl_length.size, 2))
for zind,z in enumerate(self.subband_delay_power_spectra[key]['z']):
kprll[zind,:] = self.k_parallel(self.ds.subband_delay_spectra_resampled[key]['lags'], z, action='return')
kperp[zind,:] = self.k_perp(self.bl_length, z, action='return')
horizon_kprll_limits[:,zind,:,:] = self.k_parallel(self.ds.horizon_delay_limits, z, action='return')
self.subband_delay_power_spectra_resampled[key]['kprll'] = kprll
self.subband_delay_power_spectra_resampled[key]['kperp'] = kperp
self.subband_delay_power_spectra_resampled[key]['horizon_kprll_limits'] = horizon_kprll_limits
conversion_factor = self.subband_delay_power_spectra[key]['factor'].reshape(1,-1,1,1)
self.subband_delay_power_spectra_resampled[key]['skyvis_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['skyvis_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['vis_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_lag'])**2 * conversion_factor
if key == 'cc':
self.subband_delay_power_spectra_resampled[key]['skyvis_res_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['skyvis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['vis_res_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_res_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['skyvis_net_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['skyvis_net_lag'])**2 * conversion_factor
self.subband_delay_power_spectra_resampled[key]['vis_net_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_net_lag'])**2 * conversion_factor
else:
self.subband_delay_power_spectra_resampled[key]['vis_noise_lag'] = NP.abs(self.ds.subband_delay_spectra_resampled[key]['vis_noise_lag'])**2 * conversion_factor
############################################################################
def compute_power_spectrum_allruns(self, dspec, subband=False):
"""
------------------------------------------------------------------------
Compute delay power spectrum in units of K^2 (Mpc/h)^3 from the delay
spectrum in units of Jy Hz from multiple runs of visibilities
Inputs:
dspec [dictionary] Delay spectrum information. If subband is set to
False, it contains the keys 'vislag1' and maybe 'vislag2'
(optional). If subband is set to True, it must contain these
keys as well - 'lags', 'freq_center', 'bw_eff', 'freq_wts' as
well. The value under these keys are described below:
'vislag1' [numpy array] subband delay spectra of first set of
visibilities. It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t
if subband is set to True or of shape
(n1xn2x... n_runs dims) x n_bl x nlags x n_t if
subband is set to False
It must be specified independent of subband value
'vislag2' [numpy array] subband delay spectra of second set of
visibilities (optional). If not specified, value
under key 'vislag1' is copied under this key and
auto-delay spectrum is computed. If explicitly
specified, it must be of same shape as value under
'vislag1' and cross-delay spectrum will be computed.
It is of size
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t
if subband is set to True or of shape
(n1xn2x... n_runs dims) x n_bl x nlags x n_t if
subband is set to False. It is applicable
independent of value of input subband
'lags' [numpy array] Contains the lags in the delay
spectrum. Applicable only if subband is set to True.
It is of size nlags
'freq_center'
[numpy array] frequency centers (in Hz) of the
selected frequency windows for subband delay
transform of visibilities. The values can be a
scalar, list or numpy array. Applicable only if
subband is set to True. It is of size n_win
'bw_eff' [scalar, list or numpy array] effective bandwidths
(in Hz) on the selected frequency windows for
subband delay transform of visibilities. The values
can be a scalar, list or numpy array. Applicable
only if subband is set to True. It is of size n_win
'freq_wts' [numpy array] Contains frequency weights applied
on each frequency sub-band during the subband delay
transform. It is of size n_win x nchan. Applicable
only if subband is set to True.
subband [boolean] If set to False (default), the entire band is used in
determining the delay power spectrum and only value
under key 'vislag1' and optional key 'vislag2' in
input dspec is required. If set to True, delay pwoer
spectrum in specified subbands is determined. In
addition to key 'vislag1' and optional key 'vislag2',
following keys are also required in input dictionary
dspec, namely, 'lags', 'freq_center', 'bw_eff',
'freq_wts'
Output:
Dictionary containing delay power spectrum (in units of K^2 (Mpc/h)^3)
of shape (n1xn2x... n_runs dims) x n_bl x nlags x n_t under key
'fullband' if subband is set to False or of shape
n_win x (n1xn2x... n_runs dims) x n_bl x nlags x n_t under key 'subband'
if subband is set to True.
------------------------------------------------------------------------
"""
try:
dspec
except NameError:
raise NameError('Input dspec must be specified')
if not isinstance(dspec, dict):
raise TypeError('Input dspec must be a dictionary')
else:
mode = 'auto'
if 'vislag1' not in dspec:
raise KeyError('Key "vislag1" not found in input dspec')
if not isinstance(dspec['vislag1'], NP.ndarray):
raise TypeError('Value under key "vislag1" must be a numpy array')
if 'vislag2' not in dspec:
dspec['vislag2'] = dspec['vislag1']
else:
mode = 'cross'
if not isinstance(dspec['vislag2'], NP.ndarray):
raise TypeError('Value under key "vislag2" must be a numpy array')
if dspec['vislag1'].shape != dspec['vislag2'].shape:
raise ValueError('Value under keys "vislag1" and "vislag2" must have same shape')
if not isinstance(subband, bool):
raise TypeError('Input subband must be boolean')
dps = {}
if not subband:
factor = self.jacobian1 * self.jacobian2 * self.Jy2K**2 # scalar
factor = factor.reshape(tuple(NP.ones(dspec['vislag1'].ndim, dtype=NP.int)))
key = 'fullband'
else:
dspec['freq_center'] = NP.asarray(dspec['freq_center']).ravel() # n_win
dspec['bw_eff'] = NP.asarray(dspec['bw_eff']).ravel() # n_win
wl = FCNST.c / dspec['freq_center'] # n_win
redshift = CNST.rest_freq_HI / dspec['freq_center'] - 1 # n_win
dz = CNST.rest_freq_HI / dspec['freq_center']**2 * dspec['bw_eff'] # n_win
kprll = NP.empty((dspec['freq_center'].size, dspec['lags'].size)) # n_win x nlags
kperp = NP.empty((dspec['freq_center'].size, self.bl_length.size)) # n_win x nbl
for zind,z in enumerate(redshift):
kprll[zind,:] = self.k_parallel(dspec['lags'], z, action='return')
kperp[zind,:] = self.k_perp(self.bl_length, z, action='return')
rz_los = self.cosmo.comoving_distance(redshift).to('Mpc').value
rz_transverse = self.comoving_transverse_distance(redshift, action='return') # n_win
drz_los = self.comoving_los_depth(dspec['bw_eff'], redshift, action='return') # n_win
omega_bw = self.beam3Dvol(freq_wts=NP.squeeze(dspec['freq_wts']))
jacobian1 = 1 / omega_bw # n_win
# jacobian2 = rz_transverse**2 * drz_los / dspec['bw_eff'] # n_win
jacobian2 = rz_los**2 * drz_los / dspec['bw_eff'] # n_win
Jy2K = wl**2 * CNST.Jy / (2*FCNST.k) # n_win
factor = jacobian1 * jacobian2 * Jy2K**2 # n_win
factor = factor.reshape((-1,)+tuple(NP.ones(dspec['vislag1'].ndim-1, dtype=NP.int)))
key = 'subband'
dps[key] = dspec['vislag1'] * dspec['vislag2'].conj() * factor
dps[key] = dps[key].real
if mode == 'cross':
dps[key] *= 2
return dps
############################################################################
def compute_individual_closure_phase_power_spectrum(self, closure_phase_delay_spectra):
"""
------------------------------------------------------------------------
Compute delay power spectrum of closure phase in units of Mpc/h from the
delay spectrum in units of Hz
Inputs:
closure_phase_delay_spectra
[dictionary] contains information about closure phase delay spectra of
different frequency sub-bands (n_win in number) under the following
keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'closure_phase_skyvis' (optional)
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It must be in units of Hz.
'closure_phase_vis' (optional)
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x n_win x nlags x n_t.
It must be in units of Hz.
'closure_phase_noise' (optional)
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x n_win x nlags x n_t. It must be
in units of Hz.
Output:
Dictionary with closure phase delay power spectra containing the
following keys and values:
'z' [numpy array] Redshifts corresponding to the centers of the
frequency subbands. Same size as number of values under key
'freq_center' which is n_win
'kprll' [numpy array] k_parallel (h/Mpc) for different subbands and
various delays. It is of size n_win x nlags
'kperp' [numpy array] k_perp (h/Mpc) for different subbands and the
antenna/baseline triplets. It is of size n_win x n_triplets
x 3 x 3 where the 3 x 3 refers to 3 different baselines and
3 components of the baseline vector respectively
'horizon_kprll_limits'
[numpy array] limits on k_parallel corresponding to limits
on horizon delays for each of the baseline triplets and
subbands. It is of shape n_t x n_win x n_triplets x 3 x 2,
where 3 is for the three baselines involved in the triplet,
2 limits (upper and lower). It has units of h/Mpc
'closure_phase_skyvis'
[numpy array] subband delay power spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It is in units of Mpc/h. This is returned if
this key is present in the input closure_phase_delay_spectra
'closure_phase_vis'
[numpy array] subband delay power spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x n_win x nlags x n_t.
It is in units of Mpc/h. This is returned if this key is
present in the input closure_phase_delay_spectra
'closure_phase_noise'
[numpy array] subband delay power spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x n_win x nlags x n_t. It is in
units of Mpc/h. This is returned if this key is present in
the input closure_phase_delay_spectra
------------------------------------------------------------------------
"""
try:
closure_phase_delay_spectra
except NameError:
raise NameError('Input closure_phase_delay_spectra must be provided')
closure_phase_delay_power_spectra = {}
wl = FCNST.c / closure_phase_delay_spectra['freq_center']
z = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center'] - 1
dz = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center']**2 * closure_phase_delay_spectra['bw_eff']
kprll = NP.empty((closure_phase_delay_spectra['freq_center'].size, closure_phase_delay_spectra['lags'].size))
kperp = NP.empty((closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3)) # n_win x n_triplets x 3, where 3 is for the three baselines involved
horizon_kprll_limits = NP.empty((self.ds.n_acc, closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3, 2)) # n_t x n_win x n_triplets x 3 x 2, where 3 is for the three baselines involved
for zind,redshift in enumerate(z):
kprll[zind,:] = self.k_parallel(closure_phase_delay_spectra['lags'], redshift, action='return')
for triplet_ind, ant_triplet in enumerate(closure_phase_delay_spectra['antenna_triplets']):
bl_lengths = NP.sqrt(NP.sum(closure_phase_delay_spectra['baseline_triplets'][triplet_ind]**2, axis=1))
kperp[zind,triplet_ind,:] = self.k_perp(bl_lengths, redshift, action='return')
horizon_delay_limits = bl_lengths.reshape(1,-1,1) / FCNST.c # 1x3x1, where 1 phase center, 3 is for the three baselines involved in the triplet, 1 upper limit
horizon_delay_limits = NP.concatenate((horizon_delay_limits, -horizon_delay_limits), axis=2) # 1x3x2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
horizon_kprll_limits[:,zind,triplet_ind,:,:] = self.k_parallel(horizon_delay_limits, redshift, action='return') # 1 x n_win x n_triplets x 3 x 2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
closure_phase_delay_power_spectra['z'] = z
closure_phase_delay_power_spectra['kprll'] = kprll
closure_phase_delay_power_spectra['kperp'] = kperp
closure_phase_delay_power_spectra['horizon_kprll_limits'] = horizon_kprll_limits
# rz_transverse = self.comoving_transverse_distance(closure_phase_delay_power_spectra['z'], action='return')
drz_los = self.comoving_los_depth(closure_phase_delay_spectra['bw_eff'], closure_phase_delay_power_spectra['z'], action='return')
# omega_bw = self.beam3Dvol(freq_wts=closure_phase_delay_spectra['freq_wts'])
# jacobian1 = 1 / omega_bw
# jacobian2 = rz_transverse**2 * drz_los / closure_phase_delay_spectra['bw_eff']
# Jy2K = wl**2 * CNST.Jy / (2*FCNST.k)
jacobian1 = 1 / closure_phase_delay_spectra['bw_eff']
jacobian2 = drz_los / closure_phase_delay_spectra['bw_eff']
factor = jacobian1 * jacobian2
conversion_factor = factor.reshape(1,-1,1,1)
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in closure_phase_delay_spectra:
closure_phase_delay_power_spectra[key] = NP.abs(closure_phase_delay_spectra[key])**2 * conversion_factor
return closure_phase_delay_power_spectra
############################################################################
def compute_averaged_closure_phase_power_spectrum(self, closure_phase_delay_spectra):
"""
------------------------------------------------------------------------
Compute delay power spectrum of closure phase in units of Mpc/h from the
delay spectrum in units of Jy Hz and average over 'auto' and 'cross'
modes
Inputs:
closure_phase_delay_spectra
[dictionary] contains information about closure phase delay spectra of
different frequency sub-bands (n_win in number) under the following
keys:
'antenna_triplets'
[list of tuples] List of antenna ID triplets where each
triplet is given as a tuple. Closure phase delay spectra in
subbands is computed for each of these antenna triplets
'baseline_triplets'
[numpy array] List of 3x3 numpy arrays. Each 3x3
unit in the list represents triplets of baseline
vectors where the three rows denote the three
baselines in the triplet and the three columns
define the x-, y- and z-components of the
triplet. The number of 3x3 unit elements in the
list will equal the number of elements in the
list under key 'antenna_triplets'. Closure phase delay
spectra in subbands is computed for each of these baseline
triplets which correspond to the antenna triplets
'freq_center'
[numpy array] contains the center frequencies
(in Hz) of the frequency subbands of the subband
delay spectra. It is of size n_win. It is roughly
equivalent to redshift(s)
'bw_eff' [numpy array] contains the effective bandwidths
(in Hz) of the subbands being delay transformed. It
is of size n_win. It is roughly equivalent to width
in redshift or along line-of-sight
'lags' [numpy array] lags of the resampled subband delay spectra
after padding in frequency during the transform. It
is of size nlags where nlags is the number of
independent delay bins
'lag_kernel'
[numpy array] delay transform of the frequency
weights under the key 'freq_wts'. It is of size
n_bl x n_win x nlags x n_t.
'lag_corr_length'
[numpy array] It is the correlation timescale (in
pixels) of the resampled subband delay spectra. It is
proportional to inverse of effective bandwidth. It
is of size n_win. The unit size of a pixel is
determined by the difference between adjacent pixels
in lags under key 'lags' which in turn is
effectively inverse of the effective bandwidth
'closure_phase_skyvis' (optional)
[numpy array] subband delay spectra of closure phases
of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It must be in units of Hz.
'closure_phase_vis' (optional)
[numpy array] subband delay spectra of closure phases
of noisy sky visiblities from the specified antenna
triplets. It is of size n_triplets x n_win x nlags x n_t.
It must be in units of Hz.
'closure_phase_noise' (optional)
[numpy array] subband delay spectra of closure phases
of noise visiblities from the specified antenna triplets.
It is of size n_triplets x n_win x nlags x n_t. It must be
in units of Hz.
Output:
Dictionary with closure phase delay power spectra containing the
following keys and values:
'z' [numpy array] Redshifts corresponding to the centers of the
frequency subbands. Same size as number of values under key
'freq_center' which is n_win
'kprll' [numpy array] k_parallel (h/Mpc) for different subbands and
various delays. It is of size n_win x nlags
'kperp' [numpy array] k_perp (h/Mpc) for different subbands and the
antenna/baseline triplets. It is of size n_win x n_triplets
x 3 x 3 where the 3 x 3 refers to 3 different baselines and
3 components of the baseline vector respectively
'horizon_kprll_limits'
[numpy array] limits on k_parallel corresponding to limits
on horizon delays for each of the baseline triplets and
subbands. It is of shape n_t x n_win x n_triplets x 3 x 2,
where 3 is for the three baselines involved in the triplet,
2 limits (upper and lower). It has units of h/Mpc
'auto' [dictionary] average of diagonal terms in the power spectrum
matrix with possibly the following keys and values:
'closure_phase_skyvis'
[numpy array] subband delay power spectra of closure
phases of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It is in units of Mpc/h. This is returned
if this key is present in the input
closure_phase_delay_spectra
'closure_phase_vis'
[numpy array] subband delay power spectra of closure
phases of noisy sky visiblities from the specified
antenna triplets. It is of size
1 x n_win x nlags x n_t. It is in units of Mpc/h. This
is returned if this key is present in the input
closure_phase_delay_spectra
'closure_phase_noise'
[numpy array] subband delay power spectra of closure
phases of noise visiblities from the specified antenna
triplets. It is of size 1 x n_win x nlags x n_t. It is
in units of Mpc/h. This is returned if this key is
present in the input closure_phase_delay_spectra
'cross' [dictionary] average of off-diagonal terms in the power
spectrum matrix with possibly the following keys and values:
'closure_phase_skyvis'
[numpy array] subband delay power spectra of closure
phases of noiseless sky visiblities from the specified
antenna triplets. It is of size n_triplets x n_win x
nlags x n_t. It is in units of Mpc/h. This is returned
if this key is present in the input
closure_phase_delay_spectra
'closure_phase_vis'
[numpy array] subband delay power spectra of closure
phases of noisy sky visiblities from the specified
antenna triplets. It is of size
1 x n_win x nlags x n_t. It is in units of Mpc/h. This
is returned if this key is present
in the input closure_phase_delay_spectra
'closure_phase_noise'
[numpy array] subband delay power spectra of closure
phases of noise visiblities from the specified antenna
triplets. It is of size 1 x n_win x nlags x n_t. It is
in units of Mpc/h. This is returned if this key is
present in the input closure_phase_delay_spectra
------------------------------------------------------------------------
"""
try:
closure_phase_delay_spectra
except NameError:
raise NameError('Input closure_phase_delay_spectra must be provided')
closure_phase_delay_power_spectra = {}
wl = FCNST.c / closure_phase_delay_spectra['freq_center']
z = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center'] - 1
dz = CNST.rest_freq_HI / closure_phase_delay_spectra['freq_center']**2 * closure_phase_delay_spectra['bw_eff']
kprll = NP.empty((closure_phase_delay_spectra['freq_center'].size, closure_phase_delay_spectra['lags'].size))
kperp = NP.empty((closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3)) # n_win x n_triplets x 3, where 3 is for the three baselines involved
horizon_kprll_limits = NP.empty((self.ds.n_acc, closure_phase_delay_spectra['freq_center'].size, len(closure_phase_delay_spectra['antenna_triplets']), 3, 2)) # n_t x n_win x n_triplets x 3 x 2, where 3 is for the three baselines involved
for zind,redshift in enumerate(z):
kprll[zind,:] = self.k_parallel(closure_phase_delay_spectra['lags'], redshift, action='return')
for triplet_ind, ant_triplet in enumerate(closure_phase_delay_spectra['antenna_triplets']):
bl_lengths = NP.sqrt(NP.sum(closure_phase_delay_spectra['baseline_triplets'][triplet_ind]**2, axis=1))
kperp[zind,triplet_ind,:] = self.k_perp(bl_lengths, redshift, action='return')
horizon_delay_limits = bl_lengths.reshape(1,-1,1) / FCNST.c # 1x3x1, where 1 phase center, 3 is for the three baselines involved in the triplet, 1 upper limit
horizon_delay_limits = NP.concatenate((horizon_delay_limits, -horizon_delay_limits), axis=2) # 1x3x2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
horizon_kprll_limits[:,zind,triplet_ind,:,:] = self.k_parallel(horizon_delay_limits, redshift, action='return') # 1 x n_win x n_triplets x 3 x 2, where 1 phase center, 3 is for the three baselines involved in the triplet, 2 limits (upper and lower)
closure_phase_delay_power_spectra['z'] = z
closure_phase_delay_power_spectra['kprll'] = kprll
closure_phase_delay_power_spectra['kperp'] = kperp
closure_phase_delay_power_spectra['horizon_kprll_limits'] = horizon_kprll_limits
# rz_transverse = self.comoving_transverse_distance(closure_phase_delay_power_spectra['z'], action='return')
drz_los = self.comoving_los_depth(closure_phase_delay_spectra['bw_eff'], closure_phase_delay_power_spectra['z'], action='return')
# omega_bw = self.beam3Dvol(freq_wts=closure_phase_delay_spectra['freq_wts'])
# jacobian1 = 1 / omega_bw
# jacobian2 = rz_transverse**2 * drz_los / closure_phase_delay_spectra['bw_eff']
# Jy2K = wl**2 * CNST.Jy / (2*FCNST.k)
jacobian1 = 1 / closure_phase_delay_spectra['bw_eff']
jacobian2 = drz_los / closure_phase_delay_spectra['bw_eff']
factor = jacobian1 * jacobian2
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in closure_phase_delay_spectra:
ndim_shape = NP.ones(closure_phase_delay_spectra[key].ndim, dtype=int)
ndim_shape[-3] = -1
ndim_shape = tuple(ndim_shape)
conversion_factor = factor.reshape(ndim_shape)
for mode in ['auto', 'cross']:
closure_phase_delay_power_spectra[mode] = {}
for key in ['closure_phase_skyvis', 'closure_phase_vis', 'closure_phase_noise']:
if key in closure_phase_delay_spectra:
nruns = closure_phase_delay_spectra[key].shape[0]
if mode == 'auto':
closure_phase_delay_power_spectra[mode][key] = NP.mean(NP.abs(closure_phase_delay_spectra[key])**2, axis=0, keepdims=True) * conversion_factor
else:
closure_phase_delay_power_spectra[mode][key] = 1.0 / (nruns*(nruns-1)) * (conversion_factor * NP.abs(NP.sum(closure_phase_delay_spectra[key], axis=0, keepdims=True))**2 - nruns * closure_phase_delay_power_spectra['auto'][key])
return closure_phase_delay_power_spectra
############################################################################
| 265,227 | 57.368838 | 341 |
py
|
PRISim
|
PRISim-master/prisim/examples/codes/BispectrumPhase/combine_pol_multiday_closure_PS_analysis.py
|
import copy
import numpy as NP
import matplotlib.pyplot as PLT
import matplotlib.colors as PLTC
import matplotlib.ticker as PLTick
import yaml, argparse, warnings
import progressbar as PGB
from prisim import bispectrum_phase as BSP
import ipdb as PDB
PLT.switch_backend("TkAgg")
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to analyze closure phases from multiple days from multiple sources such as polarizations')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default='/data3/t_nithyanandan/codes/mine/python/projects/closure/combine_pol_multiday_EQ28_data_RA_1.6_closure_PS_analysis_parms.yaml', type=str, required=False, help='File specifying input parameters')
args = vars(parser.parse_args())
with open(args['infile'], 'r') as parms_file:
parms = yaml.safe_load(parms_file)
datadirs = parms['dirStruct']['datadirs']
infiles_a = parms['dirStruct']['infiles_a']
infiles_a_errinfo = parms['dirStruct']['err_infiles_a']
infiles_b = parms['dirStruct']['infiles_b']
infiles_b_errinfo = parms['dirStruct']['err_infiles_b']
model_labels = parms['dirStruct']['modelinfo']['model_labels']
mdldirs = parms['dirStruct']['modelinfo']['mdldirs']
mdl_infiles_a = parms['dirStruct']['modelinfo']['infiles_a']
mdl_infiles_a_errinfo = parms['dirStruct']['modelinfo']['err_infiles_a']
mdl_infiles_b = parms['dirStruct']['modelinfo']['infiles_b']
mdl_infiles_b_errinfo = parms['dirStruct']['modelinfo']['err_infiles_b']
outdir = parms['dirStruct']['outdir']
figdir = outdir + parms['dirStruct']['figdir']
plotfile_pfx = parms['dirStruct']['plotfile_pfx']
xcpdps_a = []
excpdps_a = []
xcpdps_b = []
excpdps_b = []
for fileind,indir in enumerate(datadirs):
infile_a = indir + infiles_a[fileind]
infile_a_errinfo = indir + infiles_a_errinfo[fileind]
infile_b = indir + infiles_b[fileind]
infile_b_errinfo = indir + infiles_b_errinfo[fileind]
xcpdps_a += [BSP.read_CPhase_cross_power_spectrum(infile_a)]
excpdps_a += [BSP.read_CPhase_cross_power_spectrum(infile_a_errinfo)]
xcpdps_b += [BSP.read_CPhase_cross_power_spectrum(infile_b)]
excpdps_b += [BSP.read_CPhase_cross_power_spectrum(infile_b_errinfo)]
xcpdps_a_avg_pol, excpdps_a_avg_pol = BSP.incoherent_cross_power_spectrum_average(xcpdps_a, excpdps=excpdps_a, diagoffsets=None)
xcpdps_b_avg_pol, excpdps_b_avg_pol = BSP.incoherent_cross_power_spectrum_average(xcpdps_b, excpdps=excpdps_b, diagoffsets=None)
models_xcpdps_a_avg_pol = []
models_excpdps_a_avg_pol = []
models_xcpdps_b_avg_pol = []
models_excpdps_b_avg_pol = []
for mdlind, model in enumerate(model_labels):
mdl_xcpdps_a = []
mdl_excpdps_a = []
mdl_xcpdps_b = []
mdl_excpdps_b = []
for fileind,mdldir in enumerate(mdldirs[mdlind]):
mdl_infile_a = mdldir + mdl_infiles_a[mdlind][fileind]
mdl_infile_a_errinfo = mdldir + mdl_infiles_a_errinfo[mdlind][fileind]
mdl_infile_b = mdldir + mdl_infiles_b[mdlind][fileind]
mdl_infile_b_errinfo = mdldir + mdl_infiles_b_errinfo[mdlind][fileind]
mdl_xcpdps_a += [BSP.read_CPhase_cross_power_spectrum(mdl_infile_a)]
mdl_excpdps_a += [BSP.read_CPhase_cross_power_spectrum(mdl_infile_a_errinfo)]
mdl_xcpdps_b += [BSP.read_CPhase_cross_power_spectrum(mdl_infile_b)]
mdl_excpdps_b += [BSP.read_CPhase_cross_power_spectrum(mdl_infile_b_errinfo)]
mdl_xcpdps_a_avg_pol, mdl_excpdps_a_avg_pol = BSP.incoherent_cross_power_spectrum_average(mdl_xcpdps_a, excpdps=mdl_excpdps_a, diagoffsets=None)
models_xcpdps_a_avg_pol += [mdl_xcpdps_a_avg_pol]
models_excpdps_a_avg_pol += [mdl_excpdps_a_avg_pol]
mdl_xcpdps_b_avg_pol, mdl_excpdps_b_avg_pol = BSP.incoherent_cross_power_spectrum_average(mdl_xcpdps_b, excpdps=mdl_excpdps_b, diagoffsets=None)
models_xcpdps_b_avg_pol += [mdl_xcpdps_b_avg_pol]
models_excpdps_b_avg_pol += [mdl_excpdps_b_avg_pol]
plot_info = parms['plot']
plots = [key for key in plot_info if plot_info[key]['action']]
PLT.ion()
if ('2' in plots) or ('2a' in plots) or ('2b' in plots) or ('2c' in plots) or ('2d' in plots):
sampling = plot_info['2']['sampling']
statistic = plot_info['2']['statistic']
datapool = plot_info['2']['datapool']
pspec_unit_type = plot_info['2']['units']
if pspec_unit_type == 'K':
pspec_unit = 'mK2 Mpc3'
else:
pspec_unit = 'Jy2 Mpc'
spw = plot_info['2']['spw']
if spw is None:
spwind = NP.arange(xcpdps2_a[sampling]['z'].size)
else:
spwind = NP.asarray(spw)
if statistic is None:
statistic = ['mean', 'median']
else:
statistic = [statistic]
ps_errtype = plot_info['2']['errtype']
errshade = {}
for errtype in ps_errtype:
if errtype.lower() == 'ssdiff':
errshade[errtype] = '0.8'
elif errtype.lower() == 'psdiff':
errshade[errtype] = '0.6'
nsigma = plot_info['2']['nsigma']
mdl_colrs = ['red', 'green', 'blue', 'cyan', 'gray', 'orange']
if ('2c' in plots) or ('2d' in plots):
avg_incohax_a = plot_info['2c']['incohax_a']
diagoffsets_incohax_a = plot_info['2c']['diagoffsets_a']
diagoffsets_a = []
avg_incohax_b = plot_info['2c']['incohax_b']
diagoffsets_incohax_b = plot_info['2c']['diagoffsets_b']
diagoffsets_b = []
for combi,incax_comb in enumerate(avg_incohax_a):
diagoffsets_a += [{}]
for incaxind,incax in enumerate(incax_comb):
diagoffsets_a[-1][incax] = NP.asarray(diagoffsets_incohax_a[combi][incaxind])
xcpdps_a_avg_pol_diag, excpdps_a_avg_pol_diag = BSP.incoherent_cross_power_spectrum_average(xcpdps_a_avg_pol, excpdps=excpdps_a_avg_pol, diagoffsets=diagoffsets_a)
models_xcpdps_a_avg_pol_diag = []
models_excpdps_a_avg_pol_diag = []
for combi,incax_comb in enumerate(avg_incohax_b):
diagoffsets_b += [{}]
for incaxind,incax in enumerate(incax_comb):
diagoffsets_b[-1][incax] = NP.asarray(diagoffsets_incohax_b[combi][incaxind])
xcpdps_b_avg_pol_diag, excpdps_b_avg_pol_diag = BSP.incoherent_cross_power_spectrum_average(xcpdps_b_avg_pol, excpdps=excpdps_b_avg_pol, diagoffsets=diagoffsets_b)
models_xcpdps_b_avg_pol_diag = []
models_excpdps_b_avg_pol_diag = []
if len(model_labels) > 0:
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Models '.format(len(model_labels)), PGB.ETA()], maxval=len(model_labels)).start()
for i in range(len(model_labels)):
model_xcpdps_a_avg_pol_diag, model_excpdps_a_avg_pol_diag = BSP.incoherent_cross_power_spectrum_average(models_xcpdps_a_avg_pol[i], excpdps=models_excpdps_a_avg_pol[i], diagoffsets=diagoffsets_a)
models_xcpdps_a_avg_pol_diag += [copy.deepcopy(model_xcpdps_a_avg_pol_diag)]
models_excpdps_a_avg_pol_diag += [copy.deepcopy(model_excpdps_a_avg_pol_diag)]
model_xcpdps_b_avg_pol_diag, model_excpdps_b_avg_pol_diag = BSP.incoherent_cross_power_spectrum_average(models_xcpdps_b_avg_pol[i], excpdps=models_excpdps_b_avg_pol[i], diagoffsets=diagoffsets_b)
models_xcpdps_b_avg_pol_diag += [copy.deepcopy(model_xcpdps_b_avg_pol_diag)]
models_excpdps_b_avg_pol_diag += [copy.deepcopy(model_excpdps_b_avg_pol_diag)]
progress.update(i+1)
progress.finish()
if '2c' in plots:
lstind = [0]
triadind = [0]
dayind = [0]
dayind_models = NP.zeros(len(model_labels), dtype=int).reshape(1,-1)
for stat in statistic:
for zind in spwind:
for lind in lstind:
for di,dind in enumerate(dayind):
for combi in range(len(diagoffsets_b)):
maxabsvals = []
minabsvals = []
maxvals = []
minvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate(triadind):
# if len(model_labels) > 0:
# for mdlind, mdl in enumerate(model_labels):
# if dpool in models_xcpdps_b_avg_pol_diag[mdlind][sampling]:
# psval = (1/3.0) * models_xcpdps_b_avg_pol_diag[mdlind][sampling][dpool][stat][combi][zind,lind,dayind_models[di][mdlind],trind,:].to(pspec_unit).value
# maxabsvals += [NP.abs(psval.real).max()]
# minabsvals += [NP.abs(psval.real).min()]
# maxvals += [psval.real.max()]
# minvals += [psval.real.min()]
# axs[dpoolind].plot(models_xcpdps_b_avg_pol_diag[mdlind][sampling]['kprll'][zind,:], psval.real, ls='none', marker='.', ms=3, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
if dpool in xcpdps_b_avg_pol_diag[sampling]:
psval = (2/3.0) * xcpdps_b_avg_pol_diag[sampling][dpool][stat][combi][zind,lind,dind,trind,:].to(pspec_unit).value
psrms_ssdiff = (2/3.0) * NP.nanstd(excpdps_a_avg_pol_diag[sampling]['errinfo'][stat][combi][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
if 2 in avg_incohax_b[combi]:
ind_dayax_in_incohax = avg_incohax_b[combi].index(2)
if 0 in diagoffsets_incohax_b[combi][ind_dayax_in_incohax]:
rms_inflation_factor = 2.0 * NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
psrms_psdiff = (2/3.0) * (xcpdps_a_avg_pol_diag[sampling][dpool][stat][combi][zind,lind,1,1,trind,:] - xcpdps_a_avg_pol_diag[sampling][dpool][stat][combi][zind,lind,0,0,trind,:]).to(pspec_unit).value
psrms_psdiff = NP.abs(psrms_psdiff.real) / rms_inflation_factor
psrms_max = NP.amax(NP.vstack((psrms_ssdiff, psrms_psdiff)), axis=0)
maxabsvals += [NP.abs(psval.real + nsigma*psrms_max).max()]
minabsvals += [NP.abs(psval.real).min()]
maxvals += [(psval.real + nsigma*psrms_max).max()]
minvals += [(psval.real - nsigma*psrms_max).min()]
for errtype in ps_errtype:
if errtype.lower() == 'ssdiff':
axs[dpoolind].errorbar(xcpdps_b_avg_pol_diag[sampling]['kprll'][zind,:], psval.real, yerr=nsigma*psrms_ssdiff, xerr=None, ecolor=errshade[errtype], ls='none', marker='.', ms=4, color='black')
elif errtype.lower() == 'psdiff':
axs[dpoolind].errorbar(xcpdps_b_avg_pol_diag[sampling]['kprll'][zind,:], psval.real, yerr=nsigma*psrms_psdiff, xerr=None, ecolor=errshade[errtype], ls='none', marker='.', ms=4, color='black', label='FG+N')
# legend = axs[dpoolind].legend(loc='center', bbox_to_anchor=(0.5,0.3), shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].text(0.95, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps_b_avg_pol_diag[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='right', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps_b_avg_pol_diag[sampling]['lags'].min(), 1e6*xcpdps_b_avg_pol_diag[sampling]['lags'].max())
axs[dpoolind].axhline(y=0, xmin=0, xmax=1, ls='-', lw=1, color='black')
minvals = NP.asarray(minvals)
maxvals = NP.asarray(maxvals)
minabsvals = NP.asarray(minabsvals)
maxabsvals = NP.asarray(maxabsvals)
axs[dpoolind].set_xlim(0.99*xcpdps_b_avg_pol_diag[sampling]['kprll'][zind,:].min(), 1.01*xcpdps_b_avg_pol_diag[sampling]['kprll'][zind,:].max())
if NP.min(minvals) < 0.0:
axs[dpoolind].set_ylim(1.5*NP.min(minvals), 2*NP.max(maxabsvals))
else:
axs[dpoolind].set_ylim(0.5*NP.min(minvals), 2*NP.max(maxabsvals))
axs[dpoolind].set_yscale('symlog', linthreshy=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))))
tickloc = PLTick.SymmetricalLogLocator(linthresh=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))), base=100.0)
axs[dpoolind].yaxis.set_major_locator(tickloc)
axs[dpoolind].grid(color='0.9', which='both', linestyle=':', lw=1)
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$\kappa_\parallel$'+' [pseudo '+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$\frac{2}{3}\, P_\nabla(\kappa_\parallel)$ [pseudo mK$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=40)
else:
big_ax.set_ylabel(r'$\frac{2}{3}\, P_\nabla(\kappa_\parallel)$ [pseudo Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=40)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
PLT.savefig(figdir + '{0}_symlog_incoh_avg_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_comb_{5:0d}.pdf'.format(plotfile_pfx, xcpdps_b_avg_pol_diag[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps_b_avg_pol_diag['dlst'][0], combi), bbox_inches=0)
PDB.set_trace()
if '2d' in plots:
kbin_min = plot_info['2d']['kbin_min']
kbin_max = plot_info['2d']['kbin_max']
num_kbins = plot_info['2d']['num_kbins']
kbintype = plot_info['2d']['kbintype']
if (kbin_min is None) or (kbin_max is None):
kbins = None
else:
if num_kbins is None:
raise ValueError('Input num_kbins must be set if kbin range is provided')
if kbintype == 'linear':
kbins = NP.linspace(kbin_min, kbin_max, num=num_kbins, endpoint=True)
elif kbintype == 'log':
if kbin_min > 0.0:
kbins = NP.geomspace(kbin_min, kbin_max, num=num_kbins, endpoint=True)
elif kbin_min == 0.0:
eps_k = 1e-3
kbins = NP.geomspace(kbin_min+eps_k, kbin_max, num=num_kbins, endpoint=True)
else:
eps_k = 1e-3
kbins_pos = NP.geomspace(eps_k, kbin_max, num=num_kbins, endpoint=True)
ind_kbin_thresh = NP.argmin(kbins_pos[kbins_pos >= NP.abs(kbin_min)])
kbins_neg = -1 * kbins_pos[:ind_kbin_thresh+1][::-1]
kbins = NP.hstack((kbins_neg, kbins_pos))
else:
raise ValueError('Input kbintype must be set to "linear" or "log"')
xcpdps_a_avg_pol_diag_kbin = BSP.incoherent_kbin_averaging(xcpdps_a_avg_pol_diag, kbins=kbins, kbintype=kbintype)
excpdps_a_avg_pol_diag_kbin = BSP.incoherent_kbin_averaging(excpdps_a_avg_pol_diag, kbins=kbins, kbintype=kbintype)
models_xcpdps_a_avg_pol_diag_kbin = []
models_excpdps_a_avg_pol_diag_kbin = []
xcpdps_b_avg_pol_diag_kbin = BSP.incoherent_kbin_averaging(xcpdps_b_avg_pol_diag, kbins=kbins, kbintype=kbintype)
excpdps_b_avg_pol_diag_kbin = BSP.incoherent_kbin_averaging(excpdps_b_avg_pol_diag, kbins=kbins, kbintype=kbintype)
models_xcpdps_b_avg_pol_diag_kbin = []
models_excpdps_b_avg_pol_diag_kbin = []
if len(model_labels) > 0:
for i in range(len(model_labels)):
models_xcpdps_a_avg_pol_diag_kbin += [BSP.incoherent_kbin_averaging(models_xcpdps_a_avg_pol_diag[i], kbins=kbins, kbintype=kbintype)]
models_excpdps_a_avg_pol_diag_kbin += [BSP.incoherent_kbin_averaging(models_excpdps_a_avg_pol_diag[i], kbins=kbins, kbintype=kbintype)]
models_xcpdps_b_avg_pol_diag_kbin += [BSP.incoherent_kbin_averaging(models_xcpdps_b_avg_pol_diag[i], kbins=kbins, kbintype=kbintype)]
models_excpdps_b_avg_pol_diag_kbin += [BSP.incoherent_kbin_averaging(models_excpdps_b_avg_pol_diag[i], kbins=kbins, kbintype=kbintype)]
lstind = [0]
triadind = [0]
dayind = [0]
dayind_models = NP.zeros(len(model_labels), dtype=int).reshape(1,-1)
for stat in statistic:
for zind in spwind:
for lind in lstind:
for di,dind in enumerate(dayind):
for pstype in ['PS', 'Del2']:
for combi in range(len(diagoffsets_b)):
maxabsvals = []
minabsvals = []
maxvals = []
minvals = []
if pstype == 'Del2':
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 6.0))
else:
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate(triadind):
if pstype == 'Del2':
if len(model_labels) > 0:
for mdlind, mdl in enumerate(model_labels):
if dpool in models_xcpdps_b_avg_pol_diag_kbin[mdlind][sampling]:
if pstype == 'PS':
psval = (2/3.0) * models_xcpdps_b_avg_pol_diag_kbin[mdlind][sampling][dpool][stat][pstype][combi][zind,lind,dayind_models[di][mdlind],trind,:].to(pspec_unit).value
else:
psval = (2/3.0) * models_xcpdps_b_avg_pol_diag_kbin[mdlind][sampling][dpool][stat][pstype][combi][zind,lind,dayind_models[di][mdlind],trind,:].to('mK2').value
kval = models_xcpdps_b_avg_pol_diag_kbin[mdlind][sampling]['kbininfo'][dpool][stat][combi][zind,lind,dayind_models[di][mdlind],trind,:].to('Mpc-1').value
maxabsvals += [NP.nanmin(NP.abs(psval.real))]
minabsvals += [NP.nanmin(NP.abs(psval.real))]
maxvals += [NP.nanmax(psval.real)]
minvals += [NP.nanmin(psval.real)]
axs[dpoolind].plot(kval, psval.real, ls='none', marker='.', ms=3, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
if dpool in xcpdps_b_avg_pol_diag_kbin[sampling]:
if pstype == 'PS':
psval = (2/3.0) * xcpdps_b_avg_pol_diag_kbin[sampling][dpool][stat][pstype][combi][zind,lind,dind,trind,:].to(pspec_unit).value
psrms_ssdiff = (2/3.0) * NP.nanstd(excpdps_b_avg_pol_diag_kbin[sampling]['errinfo'][stat][pstype][combi][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
psrms_psdiff = (2/3.0) * (xcpdps_a_avg_pol_diag_kbin[sampling][dpool][stat][pstype][combi][zind,lind,1,1,trind,:] - xcpdps_a_avg_pol_diag_kbin[sampling][dpool][stat][pstype][combi][zind,lind,0,0,trind,:]).to(pspec_unit).value
else:
psval = (2/3.0) * xcpdps_b_avg_pol_diag_kbin[sampling][dpool][stat][pstype][combi][zind,lind,dind,trind,:].to('mK2').value
psrms_ssdiff = (2/3.0) * NP.nanstd(excpdps_b_avg_pol_diag_kbin[sampling]['errinfo'][stat][pstype][combi][zind,lind,:,trind,:], axis=0).to('mK2').value
psrms_psdiff = (2/3.0) * (xcpdps_a_avg_pol_diag_kbin[sampling][dpool][stat][pstype][combi][zind,lind,1,1,trind,:] - xcpdps_a_avg_pol_diag_kbin[sampling][dpool][stat][pstype][combi][zind,lind,0,0,trind,:]).to('mK2').value
if 2 in avg_incohax_b[combi]:
ind_dayax_in_incohax = avg_incohax_b[combi].index(2)
if 0 in diagoffsets_incohax_b[combi][ind_dayax_in_incohax]:
rms_inflation_factor = 2.0 * NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
psrms_psdiff = NP.abs(psrms_psdiff.real) / rms_inflation_factor
psrms_max = NP.amax(NP.vstack((psrms_ssdiff, psrms_psdiff)), axis=0)
kval = xcpdps_b_avg_pol_diag_kbin[sampling]['kbininfo'][dpool][stat][combi][zind,lind,dind,trind,:].to('Mpc-1').value
maxabsvals += [NP.nanmax(NP.abs(psval.real + nsigma*psrms_max.real))]
minabsvals += [NP.nanmin(NP.abs(psval.real))]
maxvals += [NP.nanmax(psval.real + nsigma*psrms_max.real)]
minvals += [NP.nanmin(psval.real - nsigma*psrms_max.real)]
for errtype in ps_errtype:
if errtype.lower() == 'ssdiff':
axs[dpoolind].errorbar(kval, psval.real, yerr=nsigma*psrms_ssdiff, xerr=None, ecolor=errshade[errtype.lower()], ls='none', marker='.', ms=4, color='black')
elif errtype.lower() == 'psdiff':
axs[dpoolind].errorbar(kval, psval.real, yerr=nsigma*psrms_psdiff, xerr=None, ecolor=errshade[errtype.lower()], ls='none', marker='.', ms=4, color='black', label='Data')
if pstype == 'Del2':
legend = axs[dpoolind].legend(loc='center', bbox_to_anchor=(0.5,0.3), shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].text(0.95, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps_b_avg_pol_diag_kbin['resampled']['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='right', va='top', color='black')
axs[dpoolind].axhline(y=0, xmin=0, xmax=1, ls='-', lw=1, color='black')
minvals = NP.asarray(minvals)
maxvals = NP.asarray(maxvals)
minabsvals = NP.asarray(minabsvals)
maxabsvals = NP.asarray(maxabsvals)
axs[dpoolind].set_xlim(0.99*NP.nanmin(xcpdps_b_avg_pol_diag_kbin['resampled']['kbininfo']['kbin_edges'][zind].to('Mpc-1').value), 1.01*NP.nanmax(xcpdps_b_avg_pol_diag_kbin['resampled']['kbininfo']['kbin_edges'][zind].to('Mpc-1').value))
if NP.min(minvals) < 0.0:
axs[dpoolind].set_ylim(1.5*NP.nanmin(minvals), 2*NP.nanmax(maxabsvals))
else:
axs[dpoolind].set_ylim(0.5*NP.nanmin(minvals), 2*NP.nanmax(maxabsvals))
axs[dpoolind].set_yscale('symlog', linthreshy=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))))
tickloc = PLTick.SymmetricalLogLocator(linthresh=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))), base=100.0)
axs[dpoolind].yaxis.set_major_locator(tickloc)
axs[dpoolind].grid(color='0.8', which='both', linestyle=':', lw=1)
fig.subplots_adjust(top=0.95)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$\kappa_\parallel$'+' [pseudo '+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pstype == 'PS':
big_ax.set_ylabel(r'$\frac{2}{3}\, P_\nabla(\kappa_\parallel)$ [pseudo mK$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=40)
else:
big_ax.set_ylabel(r'$\frac{2}{3}\, \Delta_\nabla^2(\kappa_\parallel)$ [pseudo mK$^2$]', fontsize=12, weight='medium', labelpad=40)
# big_axt = big_ax.twiny()
# big_axt.set_xticks([])
# big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
if pstype == 'PS':
PLT.savefig(figdir + '{0}_symlog_incoh_kbin_avg_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_comb_{5:0d}.pdf'.format(plotfile_pfx, xcpdps_a_avg_pol_diag_kbin[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps_b_avg_pol_diag_kbin['dlst'][0], combi), bbox_inches=0)
else:
PLT.savefig(figdir + '{0}_symlog_incoh_kbin_avg_real_cpDel2_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_comb_{5:0d}.pdf'.format(plotfile_pfx, xcpdps_a_avg_pol_diag_kbin[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps_b_avg_pol_diag_kbin['dlst'][0], combi), bbox_inches=0)
PDB.set_trace()
| 32,485 | 74.37355 | 306 |
py
|
PRISim
|
PRISim-master/prisim/examples/codes/BispectrumPhase/multiday_closure_PS_analysis.py
|
import copy, glob
import progressbar as PGB
import numpy as NP
import numpy.ma as MA
from scipy import interpolate
import matplotlib.pyplot as PLT
import matplotlib.colors as PLTC
import matplotlib.ticker as PLTick
import yaml, argparse, warnings
from astropy.io import ascii
import astropy.units as U
import astropy.constants as FCNST
import astropy.cosmology as cosmology
from astroutils import DSP_modules as DSP
from astroutils import constants as CNST
from astroutils import mathops as OPS
from astroutils import nonmathops as NMO
from astroutils import lookup_operations as LKP
import astroutils
import prisim
from prisim import interferometry as RI
from prisim import bispectrum_phase as BSP
from prisim import delay_spectrum as DS
import ipdb as PDB
PLT.switch_backend("TkAgg")
cosmoPlanck15 = cosmology.Planck15 # Planck 2015 cosmology
cosmo100 = cosmoPlanck15.clone(name='Modified Planck 2015 cosmology with h=1.0', H0=100.0) # Modified Planck 2015 cosmology with h=1.0, H= 100 km/s/Mpc
print('AstroUtils git # {0}\nPrisim git # {1}'.format(astroutils.__githash__, prisim.__githash__))
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to analyze closure phases from multiple days')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default='/data3/t_nithyanandan/codes/mine/python/projects/closure/multiday_closure_PS_analysis_parms.yaml', type=str, required=False, help='File specifying input parameters')
args = vars(parser.parse_args())
with open(args['infile'], 'r') as parms_file:
parms = yaml.safe_load(parms_file)
projectdir = parms['dirStruct']['projectdir']
datadir = projectdir + parms['dirStruct']['datadir']
figdir = datadir + parms['dirStruct']['figdir']
modelsdir = parms['dirStruct']['modeldir']
infiles = parms['dirStruct']['infiles']
visfile = parms['dirStruct']['visfile']
visfiletype = parms['dirStruct']['visfiletype']
hdf5_infile = parms['dirStruct']['hdf5_infile']
model_hdf5files = parms['dirStruct']['model_hdf5files']
model_labels = parms['dirStruct']['model_labels']
telescope_parms = parms['telescope']
site_latitude = telescope_parms['latitude']
site_longitude = telescope_parms['longitude']
preprocessinfo = parms['preProcessing']
preprocess = preprocessinfo['action']
flagchans = preprocessinfo['flagchans']
if flagchans is not None:
flagchans = NP.asarray(preprocessinfo['flagchans']).reshape(-1)
flagants = preprocessinfo['flagants']
if flagants is not None:
flagants = NP.asarray(preprocessinfo['flagants']).reshape(-1)
daybinsize = preprocessinfo['daybinsize']
ndaybins = preprocessinfo['ndaybins']
lstbinsize = preprocessinfo['lstbinsize']
band_center = preprocessinfo['band_center']
freq_resolution = preprocessinfo['freq_resolution']
mdl_ndaybins = preprocessinfo['mdl_ndaybins']
dspecinfo = parms['delaySpectrum']
subbandinfo = dspecinfo['subband']
freq_window_centers = NP.asarray(subbandinfo['freq_center'])
freq_window_bw = NP.asarray(subbandinfo['bw_eff'])
freq_window_shape = subbandinfo['shape']
freq_window_fftpow = subbandinfo['fftpow']
pad = dspecinfo['pad']
apply_flags = dspecinfo['applyflags']
if apply_flags:
applyflags_str = 'Y'
else:
applyflags_str = 'N'
bl = NP.asarray(dspecinfo['bl'])
if bl.shape[0] != 3:
raise ValueError('Input bl must be made of three vectors forming the triad')
bltol = dspecinfo['bltol']
infile = infiles[0]
infile_no_ext = hdf5_infile.split('.hdf5')[0]
# visdata = NP.load(visfile)
if visfile is None:
visinfo = None
else:
if visfiletype == 'hdf5':
visinfo = NMO.load_dict_from_hdf5(visfile+'.hdf5')
blind, blrefind, dbl = LKP.find_1NN(visinfo['baseline']['blvect'], bl, distance_ULIM=bltol, remove_oob=True)
if blrefind.size != 3:
blind_missing = NP.setdiff1d(NP.arange(3), blind, assume_unique=True)
blind_next, blrefind_next, dbl_next = LKP.find_1NN(visinfo['baseline']['blvect'], -1*bl[blind_missing,:], distance_ULIM=bltol, remove_oob=True)
if blind_next.size + blind.size != 3:
raise ValueError('Exactly three baselines were not found in the reference baselines')
else:
blind = NP.append(blind, blind_missing[blind_next])
blrefind = NP.append(blrefind, blrefind_next)
else:
blind_missing = []
vistriad = MA.array(visinfo['vis_real'][blrefind,:,:] + 1j * visinfo['vis_imag'][blrefind,:,:], mask=visinfo['mask'][blrefind,:,:])
if len(blind_missing) > 0:
vistriad[-blrefind_next.size:,:,:] = vistriad[-blrefind_next.size:,:,:].conj()
else:
visinfo = RI.InterferometerArray(None, None, None, init_file=visfile)
tmpnpzdata = NP.load(datadir+infile)
nchan = tmpnpzdata['flags'].shape[-1]
freqs = band_center + freq_resolution * (NP.arange(nchan) - int(0.5*nchan))
# cpinfo2 = BSP.loadnpz(datadir+infile)
cpObj = BSP.ClosurePhase(datadir+hdf5_infile, freqs, infmt='hdf5')
cpObj.smooth_in_tbins(daybinsize=daybinsize, ndaybins=ndaybins, lstbinsize=lstbinsize)
cpObj.subtract(NP.zeros(1024))
cpObj.subsample_differencing(daybinsize=None, ndaybins=4, lstbinsize=lstbinsize)
cpDSobj = BSP.ClosurePhaseDelaySpectrum(cpObj)
if visinfo is not None:
if visfiletype == 'hdf5':
visscaleinfo = {'vis': vistriad, 'lst': visinfo['header']['LST'], 'smoothinfo': {'op_type': 'interp1d', 'interp_kind': 'linear'}}
else:
visscaleinfo = {'vis': visinfo, 'bltriplet': bl, 'smoothinfo': {'op_type': 'interp1d', 'interp_kind': 'linear'}}
else:
visscaleinfo = None
cpds = cpDSobj.FT(freq_window_bw, freq_center=freq_window_centers, shape=freq_window_shape, fftpow=freq_window_fftpow, pad=pad, datapool='prelim', visscaleinfo=visscaleinfo, method='fft', resample=True, apply_flags=apply_flags)
model_cpObjs = []
if model_hdf5files is not None:
for i in range(len(model_hdf5files)):
mdl_infile_no_ext = model_hdf5files[i].split('.hdf5')[0]
model_cpObj = BSP.ClosurePhase(modelsdir+model_hdf5files[i], freqs, infmt='hdf5')
model_cpObj.smooth_in_tbins(daybinsize=daybinsize, ndaybins=mdl_ndaybins[i], lstbinsize=lstbinsize)
model_cpObj.subsample_differencing(daybinsize=None, ndaybins=4, lstbinsize=lstbinsize)
model_cpObj.subtract(NP.zeros(1024))
model_cpObjs += [copy.deepcopy(model_cpObj)]
plot_info = parms['plot']
plots = [key for key in plot_info if plot_info[key]['action']]
PLT.ion()
if ('1' in plots) or ('1a' in plots) or ('1b' in plots) or ('1c' in plots) or ('1d' in plots):
triads = map(tuple, cpDSobj.cPhase.cpinfo['raw']['triads'])
ntriads = len(triads)
lst = cpDSobj.cPhase.cpinfo['raw']['lst']
ntimes = lst.size
tbins = cpDSobj.cPhase.cpinfo['processed']['prelim']['lstbins']
ntbins = tbins.size
dlst = lst[1] - lst[0]
dtbins = cpDSobj.cPhase.cpinfo['processed']['prelim']['dlstbins']
flags = cpDSobj.cPhase.cpinfo['raw']['flags']
wts_raw = cpDSobj.cPhase.cpinfo['processed']['native']['wts'].data
wts_proc = cpDSobj.cPhase.cpinfo['processed']['prelim']['wts'].data
freq_wts = cpds['freq_wts']
if '1a' in plots:
triad = tuple(plot_info['1a']['triad'])
triad_ind = triads.index(triad)
fig = PLT.figure(figsize=(4,2.8))
ax = fig.add_subplot(111)
ax.imshow(wts_raw[triad_ind,0,:,:].T, origin='lower', extent=[1e-6*freqs.min(), 1e-6*freqs.max(), lst.min(), lst.max()+NP.mean(dlst)], vmin=wts_raw.min(), vmax=wts_raw.max(), interpolation='none', cmap='gray')
ax.text(0.5, 0.97, '({0[0]:0d}, {0[1]:0d}, {0[2]:0d})'.format(triad), transform=ax.transAxes, fontsize=12, weight='semibold', ha='center', va='top', color='red')
ax.set_xlim(1e-6*freqs.min(), 1e-6*freqs.max())
ax.set_ylim(lst.min(), lst.max()+NP.mean(dlst))
ax.set_aspect('auto')
ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium')
ax.set_ylabel('LST [hours]', fontsize=12, weight='medium')
fig.subplots_adjust(top=0.95)
fig.subplots_adjust(left=0.2)
fig.subplots_adjust(bottom=0.2)
fig.subplots_adjust(right=0.98)
PLT.savefig(figdir + '{0}_time_frequency_flags_triad_{1[0]:0d}_{1[1]:0d}_{1[2]:0d}.png'.format(infile_no_ext, triad), bbox_inches=0)
PLT.savefig(figdir + '{0}_time_frequency_flags_triad_{1[0]:0d}_{1[1]:0d}_{1[2]:0d}.eps'.format(infile_no_ext, triad), bbox_inches=0)
fig = PLT.figure(figsize=(4,2.8))
ax = fig.add_subplot(111)
wtsimg = ax.imshow(wts_proc[:,0,triad_ind,:], origin='lower', extent=[1e-6*freqs.min(), 1e-6*freqs.max(), tbins.min(), tbins.max()+NP.mean(dtbins)], vmin=wts_proc.min(), vmax=wts_proc.max(), interpolation='none', cmap='gray')
ax.text(0.5, 0.97, '({0[0]:0d}, {0[1]:0d}, {0[2]:0d})'.format(triad), transform=ax.transAxes, fontsize=12, weight='semibold', ha='center', va='top', color='red')
ax.set_xlim(1e-6*freqs.min(), 1e-6*freqs.max())
ax.set_ylim(tbins.min(), tbins.max()+NP.mean(dtbins))
ax.set_aspect('auto')
ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium')
ax.set_ylabel('LST [hours]', fontsize=12, weight='medium')
cbax = fig.add_axes([0.86, 0.2, 0.02, 0.75])
cbar = fig.colorbar(wtsimg, cax=cbax, orientation='vertical')
cbax.yaxis.tick_right()
# cbax.yaxis.set_label_position('right')
fig.subplots_adjust(top=0.95)
fig.subplots_adjust(left=0.2)
fig.subplots_adjust(bottom=0.2)
fig.subplots_adjust(right=0.85)
PLT.savefig(figdir + '{0}_time_frequency_wts_triad_{1[0]:0d}_{1[1]:0d}_{1[2]:0d}.png'.format(infile_no_ext, triad), bbox_inches=0)
PLT.savefig(figdir + '{0}_time_frequency_wts_triad_{1[0]:0d}_{1[1]:0d}_{1[2]:0d}.eps'.format(infile_no_ext, triad), bbox_inches=0)
if '1b' in plots:
triad = tuple(plot_info['1b']['triad'])
triad_ind = triads.index(triad)
net_wts_raw = wts_raw[:,0,triad_ind,:][NP.newaxis,:,:] * freq_wts[:,NP.newaxis,:] # nspw x nlst x nchan
net_wts_proc = wts_proc[:,0,triad_ind,:][NP.newaxis,:,:] * freq_wts[:,NP.newaxis,:] # nspw x nlst x nchan
# net_wts_raw = wts_raw[triad_ind,0,:,:][NP.newaxis,:,:] * freq_wts[:,:,NP.newaxis]
# net_wts_proc = wts_proc[triad_ind,0,:,:][NP.newaxis,:,:] * freq_wts[:,:,NP.newaxis]
nrow = freq_wts.shape[0]
fig, axs = PLT.subplots(nrows=nrow, sharex=True, sharey=True, figsize=(3.5,6))
for axind in range(len(axs)):
wtsimg = axs[axind].imshow(net_wts_proc[axind,:,:], origin='lower', extent=[1e-6*freqs.min(), 1e-6*freqs.max(), tbins.min(), tbins.max()+NP.mean(dtbins)], norm=PLTC.LogNorm(vmin=1e-6, vmax=net_wts_proc.max()), interpolation='none', cmap='binary')
if axind == 0:
axs[axind].text(0.97, 0.97, '({0[0]:0d}, {0[1]:0d}, {0[2]:0d})'.format(triad), transform=axs[axind].transAxes, fontsize=12, weight='semibold', ha='right', va='top', color='red')
axs[axind].set_xlim(1e-6*freqs.min(), 1e-6*freqs.max())
axs[axind].set_ylim(tbins.min(), tbins.max()+NP.mean(dtbins))
axs[axind].set_aspect('auto')
fig.subplots_adjust(hspace=0, wspace=0)
fig.subplots_adjust(top=0.95)
fig.subplots_adjust(left=0.2)
fig.subplots_adjust(bottom=0.12)
fig.subplots_adjust(right=0.85)
cbax = fig.add_axes([0.86, 0.12, 0.02, 0.3])
cbar = fig.colorbar(wtsimg, cax=cbax, orientation='vertical')
cbax.yaxis.tick_right()
big_ax = fig.add_subplot(111)
# big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium', labelpad=20)
big_ax.set_ylabel('LST [seconds]', fontsize=12, weight='medium', labelpad=35)
PLT.savefig(figdir + '{0}_time_frequency_netwts_triad_{1[0]:0d}_{1[1]:0d}_{1[2]:0d}.png'.format(infile_no_ext, triad), bbox_inches=0)
PLT.savefig(figdir + '{0}_time_frequency_netwts_triad_{1[0]:0d}_{1[1]:0d}_{1[2]:0d}.eps'.format(infile_no_ext, triad), bbox_inches=0)
if '1c' in plots:
ncol = 5
nrow = min(6, int(NP.ceil(1.0*ntriads/ncol)))
npages = int(NP.ceil(1.0 * ntriads / (nrow*ncol)))
for pagei in range(npages):
if pagei > 0:
ntriads_remain = ntriads - pagei * nrow * ncol
nrow = min(6, int(NP.ceil(1.0*ntriads_remain/ncol)))
fig, axs = PLT.subplots(nrows=nrow, ncols=ncol, sharex=True, sharey=True, figsize=(8,6.4))
for i in range(nrow):
for j in range(ncol):
if i*ncol+j < ntriads:
axs[i,j].imshow(wts_raw[i*ncol+j,0,:,:].T, origin='lower', extent=[1e-6*freqs.min(), 1e-6*freqs.max(), lst.min(), lst.max()+NP.mean(dlst)], vmin=0, vmax=1, interpolation='none', cmap='gray')
axs[i,j].text(0.5, 0.97, '({0[0]:0d}, {0[1]:0d}, {0[2]:0d})'.format(triads[i*ncol+j,:]), transform=axs[i,j].transAxes, fontsize=10, weight='medium', ha='center', va='top', color='red')
else:
axs[i,j].axis('off')
axs[i,j].set_xlim(1e-6*freqs.min(), 1e-6*freqs.max())
axs[i,j].set_ylim(lst.min(), lst.max()+NP.mean(dlst))
axs[i,j].set_aspect('auto')
fig.subplots_adjust(hspace=0, wspace=0)
fig.subplots_adjust(top=0.95)
fig.subplots_adjust(left=0.1)
fig.subplots_adjust(bottom=0.15)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
# big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium', labelpad=20)
big_ax.set_ylabel('LST [seconds]', fontsize=12, weight='medium', labelpad=35)
PLT.savefig(figdir + '{0}_time_frequency_flags_page_{1:03d}_of_{2:0d}.png'.format(infile_no_ext, pagei+1, npages), bbox_inches=0)
PLT.savefig(figdir + '{0}_time_frequency_flags_page_{1:03d}_of_{2:0d}.eps'.format(infile_no_ext, pagei+1, npages), bbox_inches=0)
if '1d' in plots:
datastage = plot_info['1d']['datastage']
if datastage.lower() not in ['native', 'prelim']:
raise ValueError('Input datastage value invalid')
elif datastage.lower() == 'native':
cphase = cpObj.cpinfo['processed'][datastage]['cphase']
datastr = '{0}'.format(datastage)
else:
statistic = plot_info['1d']['statistic']
cphase = cpObj.cpinfo['processed'][datastage]['cphase'][statistic]
datastr = '{0}_{1}'.format(datastage, statistic)
mask = cphase.mask
timetriad_selection = plot_info['1d']['selection']
if timetriad_selection is not None:
dayind = timetriad_selection['dayind']
else:
dayind = 0
for key in timetriad_selection:
if timetriad_selection[key] is not None:
if key == 'triads':
triads = map(tuple, timetriad_selection[key])
elif key == 'lstrange':
lstrange = timetriad_selection[key]
if datastage.lower() == 'native':
lstbins = cpObj.cpinfo['raw']['lst'][:,dayind]
else:
lstbins = cpObj.cpinfo['processed']['prelim']['lstbins']
if lstrange is None:
lstinds = NP.arange(lstbins.size)
else:
lstrange = NP.asarray(lstrange)
lstinds = NP.where(NP.logical_and(lstbins >= lstrange.min(), lstbins <= lstrange.max()))[0]
else:
if key == 'triads':
triads = map(tuple, cpDSobj.cPhase.cpinfo['raw']['triads'])
elif key == 'lstrange':
if datastage.lower() == 'native':
lstbins = cpObj.cpinfo['raw']['lst'][:,dayind]
else:
lstbins = cpObj.cpinfo['processed']['prelim']['lstbins']
lstinds = NP.arange(lstbins.size)
sparseness = plot_info['1d']['sparseness']
if sparseness < 1.0:
sparseness = 1.0
sparsestr = '{0:.1f}'.format(sparseness)
sparsenum = NP.ceil(freqs.size / sparseness).astype(NP.int)
if sparsenum == freqs.size:
indchan = NP.arange(freqs.size)
applyflags = plot_info['1d']['applyflags']
if applyflags:
flags_str = 'flags'
else:
flags_str = 'noflags'
ncol = 3
nrow = min(4, int(NP.ceil(1.0*lstinds.size/ncol)))
npages = int(NP.ceil(1.0 * lstinds.size / (nrow*ncol)))
nlst_remain = lstinds.size
for pagei in range(npages):
if pagei > 0:
nlst_remain = lstinds.size - pagei * nrow * ncol
nrow = min(4, int(NP.ceil(1.0*nlst_remain/ncol)))
fig, axs = PLT.subplots(nrows=nrow, ncols=ncol, sharex=True, sharey=True, figsize=(8,6.4))
for i in range(nrow):
for j in range(ncol):
lstind = (lstinds.size - nlst_remain) + i*ncol+j
lind = lstinds[lstind]
if lstind < lstinds.size:
for triad in triads:
triad_ind = triads.index(triad)
if sparsenum < freqs.size:
indchan = NP.sort(NP.random.randint(freqs.size, size=sparsenum))
axs[i,j].plot(1e-6*freqs[indchan], cphase[lind,dayind,triad_ind,indchan], marker='.', ms=2, ls='none')
if applyflags:
flagind = mask[lind,dayind,triad_ind,:]
axs[i,j].plot(1e-6*freqs[flagind], cphase[lind,dayind,triad_ind,flagind].data, marker='.', ms=1, color='black', ls='none')
axs[i,j].text(0.5, 0.97, '{0:.2f} hrs'.format(lstbins[lind]), transform=axs[i,j].transAxes, fontsize=10, weight='medium', ha='center', va='top', color='black')
else:
axs[i,j].axis('off')
axs[i,j].set_xlim(1e-6*freqs.min(), 1e-6*freqs.max())
axs[i,j].set_ylim(-3.5,3.5)
fig.subplots_adjust(hspace=0, wspace=0)
fig.subplots_adjust(top=0.95)
fig.subplots_adjust(left=0.1)
fig.subplots_adjust(bottom=0.15)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
# big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium', labelpad=20)
big_ax.set_ylabel(r'$\phi_\nabla$ [radians]', fontsize=12, weight='medium', labelpad=35)
PLT.savefig(figdir + '{0}_cp_spectra_{1}_{2}_{3}_triads_day_{4}_{5:.1f}x_sparse_page_{6:03d}_of_{7:0d}.png'.format(infile_no_ext, flags_str, datastr, len(triads), dayind, sparseness, pagei+1, npages), bbox_inches=0)
PLT.savefig(figdir + '{0}_cp_spectra_{1}_{2}_{3}_triads_day_{4}_{5:.1f}x_sparse_page_{6:03d}_of_{7:0d}.eps'.format(infile_no_ext, flags_str, datastr, len(triads), dayind, sparseness, pagei+1, npages), bbox_inches=0)
# fig = PLT.figure(figsize=(3.75,3))
# ax = fig.add_subplot(111)
# for lstind in lstinds:
# for triad in triads:
# triad_ind = triads.index(triad)
# if sparsenum < freqs.size:
# indchan = NP.sort(NP.random.randint(freqs.size, size=sparsenum))
# ax.plot(1e-6*freqs[indchan], cphase[lstind,dayind,triad_ind,indchan], marker='.', ms=2, ls='none')
# if applyflags:
# flagind = mask[lstind,dayind,triad_ind,:]
# ax.plot(1e-6*freqs[flagind], cphase[lstind,dayind,triad_ind,flagind].data, marker='.', ms=1, color='black', ls='none')
# ax.set_xlim(1e-6*freqs.min(), 1e-6*freqs.max())
# ax.set_ylim(-3.5,3.5)
# ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium')
# ax.set_ylabel(r'$\phi_\nabla$ [radians]', fontsize=12, weight='medium')
# fig.subplots_adjust(top=0.95)
# fig.subplots_adjust(left=0.16)
# fig.subplots_adjust(bottom=0.18)
# fig.subplots_adjust(right=0.98)
# PLT.savefig(figdir + '{0}_cp_spectra_{1}_{2}_{3}_triads_{4}_times_{5:.1f}x_sparse.png'.format(infile_no_ext, flags_str, datastr, len(triads), lstinds.size, sparseness), bbox_inches=0)
# PLT.savefig(figdir + '{0}_cp_spectra_{1}_{2}_{3}_triads_{4}_times_{5:.1f}x_sparse.eps'.format(infile_no_ext, flags_str, datastr, len(triads), lstinds.size, sparseness), bbox_inches=0)
if ('2' in plots) or ('2a' in plots) or ('2b' in plots) or ('2c' in plots) or ('2d' in plots):
dir_PS = plot_info['2']['PS_dir']
infile_pfx_a = plot_info['2']['infile_pfx_a']
outfile_pfx_a = plot_info['2']['outfile_pfx_a']
infile_pfx_b = plot_info['2']['infile_pfx_b']
outfile_pfx_b = plot_info['2']['outfile_pfx_b']
sampling = plot_info['2']['sampling']
statistic = plot_info['2']['statistic']
cohax = plot_info['2']['cohax']
incohax = plot_info['2']['incohax']
collapseax_a = plot_info['2']['collapseax_a']
collapseax_b = plot_info['2']['collapseax_b']
datapool = plot_info['2']['datapool']
pspec_unit_type = plot_info['2']['units']
ps_errtype = plot_info['2']['errtype']
errshade = {}
for errtype in ps_errtype:
if errtype.lower() == 'ssdiff':
errshade[errtype] = '0.8'
elif errtype.lower() == 'psdiff':
errshade[errtype] = '0.6'
nsigma = plot_info['2']['nsigma']
beaminfo = plot_info['2']['beaminfo']
xlim = plot_info['2']['xlim']
if infile_pfx_a is not None:
ps_infile_a = datadir + dir_PS + infile_pfx_a + '_' + infile_no_ext + '.hdf5'
pserr_infile_a = datadir + dir_PS + infile_pfx_a + '_' + infile_no_ext + '_errinfo.hdf5'
if outfile_pfx_a is not None:
ps_outfile_a = datadir + dir_PS + outfile_pfx_a + '_' + infile_no_ext + '.hdf5'
pserr_outfile_a = datadir + dir_PS + outfile_pfx_a + '_' + infile_no_ext + '_errinfo.hdf5'
if infile_pfx_b is not None:
ps_infile_b = datadir + dir_PS + infile_pfx_b + '_' + infile_no_ext + '.hdf5'
pserr_infile_b = datadir + dir_PS + infile_pfx_b + '_' + infile_no_ext + '_errinfo.hdf5'
if outfile_pfx_b is not None:
ps_outfile_b = datadir + dir_PS + outfile_pfx_b + '_' + infile_no_ext + '.hdf5'
pserr_outfile_b = datadir + dir_PS + outfile_pfx_b + '_' + infile_no_ext + '_errinfo.hdf5'
timetriad_selection = plot_info['2']['selection']
if timetriad_selection is not None:
dayind = timetriad_selection['days']
for key in timetriad_selection:
if timetriad_selection[key] is not None:
if key == 'triads':
triads = map(tuple, timetriad_selection[key])
elif key == 'lstrange':
lstrange = timetriad_selection[key]
lstbins = cpObj.cpinfo['processed']['prelim']['lstbins']
if lstrange is None:
lstinds = NP.arange(lstbins.size)
else:
lstrange = NP.asarray(lstrange)
lstinds = NP.where(NP.logical_and(lstbins >= lstrange.min(), lstbins <= lstrange.max()))[0]
if lstinds.size == 0:
raise ValueError('No data found in the specified LST range.')
else:
if key == 'triads':
triads = map(tuple, cpDSobj.cPhase.cpinfo['raw']['triads'])
elif key == 'lstrange':
lstbins = cpObj.cpinfo['processed']['prelim']['lstbins']
lstinds = NP.arange(lstbins.size)
selection = {'triads': triads, 'lst': lstinds, 'days': dayind}
autoinfo = {'axes': cohax}
xinfo_a = {'axes': incohax, 'avgcov': False, 'collapse_axes': collapseax_a, 'dlst_range': timetriad_selection['dlst_range']}
xinfo_b = {'axes': incohax, 'avgcov': False, 'collapse_axes': collapseax_b, 'dlst_range': timetriad_selection['dlst_range']}
if pspec_unit_type == 'K':
pspec_unit = 'mK2 Mpc3'
else:
pspec_unit = 'Jy2 Mpc'
subselection = plot_info['2']['subselection']
mdl_day = plot_info['2']['modelinfo']['mdl_day']
mdl_cohax = plot_info['2']['modelinfo']['mdl_cohax']
mdl_incohax = plot_info['2']['modelinfo']['mdl_incohax']
mdl_collapseax_a = plot_info['2']['modelinfo']['mdl_collapax_a']
mdl_collapseax_b = plot_info['2']['modelinfo']['mdl_collapax_b']
mdl_dir_PS = plot_info['2']['modelinfo']['PS_dir']
mdl_infile_pfx_a = plot_info['2']['modelinfo']['infile_pfx_a']
mdl_outfile_pfx_a = plot_info['2']['modelinfo']['outfile_pfx_a']
mdl_infile_pfx_b = plot_info['2']['modelinfo']['infile_pfx_b']
mdl_outfile_pfx_b = plot_info['2']['modelinfo']['outfile_pfx_b']
if model_hdf5files is not None:
mdl_autoinfo = [{'axes': mdl_cohax[i]} for i in range(len(model_hdf5files))]
mdl_xinfo_a = [{'axes': mdl_incohax[i], 'avgcov': False, 'collapse_axes': mdl_collapseax_a[i], 'dlst_range': timetriad_selection['dlst_range']} for i in range(len(model_hdf5files))]
mdl_xinfo_b = [{'axes': mdl_incohax[i], 'avgcov': False, 'collapse_axes': mdl_collapseax_b[i], 'dlst_range': timetriad_selection['dlst_range']} for i in range(len(model_hdf5files))]
if statistic is None:
statistic = ['mean', 'median']
else:
statistic = [statistic]
if infile_pfx_a is not None:
xcpdps2_a = BSP.read_CPhase_cross_power_spectrum(ps_infile_a)
xcpdps2_a_errinfo = BSP.read_CPhase_cross_power_spectrum(pserr_infile_a)
else:
xcpdps2_a = cpDSobj.compute_power_spectrum(selection=selection, autoinfo=autoinfo, xinfo=xinfo_a, units=pspec_unit_type, beamparms=beaminfo)
xcpdps2_a_errinfo = cpDSobj.compute_power_spectrum_uncertainty(selection=selection, autoinfo=autoinfo, xinfo=xinfo_a, units=pspec_unit_type, beamparms=beaminfo)
if outfile_pfx_a is not None:
BSP.save_CPhase_cross_power_spectrum(xcpdps2_a, ps_outfile_a)
BSP.save_CPhase_cross_power_spectrum(xcpdps2_a_errinfo, pserr_outfile_a)
if infile_pfx_b is not None:
xcpdps2_b = BSP.read_CPhase_cross_power_spectrum(ps_infile_b)
xcpdps2_b_errinfo = BSP.read_CPhase_cross_power_spectrum(pserr_infile_b)
else:
xcpdps2_b = cpDSobj.compute_power_spectrum(selection=selection, autoinfo=autoinfo, xinfo=xinfo_b, units=pspec_unit_type, beamparms=beaminfo)
xcpdps2_b_errinfo = cpDSobj.compute_power_spectrum_uncertainty(selection=selection, autoinfo=autoinfo, xinfo=xinfo_b, units=pspec_unit_type, beamparms=beaminfo)
if outfile_pfx_b is not None:
BSP.save_CPhase_cross_power_spectrum(xcpdps2_b, ps_outfile_b)
BSP.save_CPhase_cross_power_spectrum(xcpdps2_b_errinfo, pserr_outfile_b)
nsamples_incoh = xcpdps2_a[sampling]['whole']['nsamples_incoh']
nsamples_coh = xcpdps2_a[sampling]['whole']['nsamples_coh']
model_cpDSobjs = []
cpds_models = []
xcpdps2_a_models = []
xcpdps2_a_errinfo_models = []
xcpdps2_b_models = []
xcpdps2_b_errinfo_models = []
if model_hdf5files is not None:
if mdl_infile_pfx_a is not None:
if isinstance(mdl_infile_pfx_a, list):
if (len(mdl_infile_pfx_a) > 0):
if not isinstance(mdl_dir_PS, list):
if isinstance(mdl_dir_PS, str):
mdl_dir_PS = [mdl_dir_PS] * len(model_hdf5files)
else:
raise TypeError('PS directory for models must be a list of strings')
else:
if len(mdl_dir_PS) != len(model_hdf5files):
raise ValueError('Input model PS directories must match the number of models being analyzed.')
else:
raise TypeError('Input model PS infile_a prefixes must be specified as a list of strings')
if mdl_infile_pfx_b is not None:
if isinstance(mdl_infile_pfx_b, list):
if len(mdl_infile_pfx_b) != len(mdl_infile_pfx_b):
raise ValueError('Length of input model PS infile_b prefixes must match the length of input model PS infile_a prefixes')
else:
raise TypeError('Input model PS infile_b prefixes must be specified as a list of strings')
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Models '.format(len(model_hdf5files)), PGB.ETA()], maxval=len(model_hdf5files)).start()
for i in range(len(model_hdf5files)):
mdl_infile_no_ext = model_hdf5files[i].split('.hdf5')[0]
mdl_ps_infile_a_provided = False
mdl_pserr_infile_a_provided = False
mdl_ps_infile_b_provided = False
mdl_pserr_infile_b_provided = False
if mdl_infile_pfx_a is not None:
if len(mdl_infile_pfx_a) > 0:
if mdl_infile_pfx_a[i] is not None:
if not isinstance(mdl_infile_pfx_a[i], str):
raise TypeError('Input {0}-th model cross PS file must be a string'.format(i+1))
else:
try:
model_xcpdps2_a = BSP.read_CPhase_cross_power_spectrum(mdl_dir_PS[i]+mdl_infile_pfx_a[i]+'_'+mdl_infile_no_ext+'.hdf5')
except IOError as xcption:
mdl_ps_infile_a_provided = False
warnings.warn('Provided model cross-power spectrum infile_a "{0}" could not be opened. Will proceed with computing of model cross power spectrum based on parameters specified.'.format(mdl_dir_PS[i]+mdl_infile_pfx_a[i]+'.hdf5'))
else:
mdl_ps_infile_a_provided = True
xcpdps2_a_models += [copy.deepcopy(model_xcpdps2_a)]
try:
model_xcpdps2_a_errinfo = BSP.read_CPhase_cross_power_spectrum(mdl_dir_PS[i]+mdl_infile_pfx_a[i]+'_'+mdl_infile_no_ext+'_errinfo.hdf5')
except IOError as xcption:
mdl_pserr_infile_a_provided = False
warnings.warn('Provided model cross-power spectrum infile_a "{0}" could not be opened. Will proceed with computing of model cross power spectrum based on parameters specified.'.format(mdl_dir_PS[i]+mdl_infile_pfx_a[i]+'_errinfo.hdf5'))
else:
mdl_pserr_infile_a_provided = True
xcpdps2_a_errinfo_models += [copy.deepcopy(model_xcpdps2_a_errinfo)]
if mdl_infile_pfx_b is not None:
if len(mdl_infile_pfx_b) > 0:
if mdl_infile_pfx_b[i] is not None:
if not isinstance(mdl_infile_pfx_b[i], str):
raise TypeError('Input {0}-th model cross PS file must be a string'.format(i+1))
else:
try:
model_xcpdps2_b = BSP.read_CPhase_cross_power_spectrum(mdl_dir_PS[i]+mdl_infile_pfx_b[i]+'_'+mdl_infile_no_ext+'.hdf5')
except IOError as xcption:
mdl_ps_infile_b_provided = False
warnings.warn('Provided model cross-power spectrum infile_b "{0}" could not be opened. Will proceed with computing of model cross power spectrum based on parameters specified.'.format(mdl_dir_PS[i]+mdl_infile_pfx_b[i]+'.hdf5'))
else:
mdl_ps_infile_b_provided = True
xcpdps2_b_models += [copy.deepcopy(model_xcpdps2_b)]
try:
model_xcpdps2_b_errinfo = BSP.read_CPhase_cross_power_spectrum(mdl_dir_PS[i]+mdl_infile_pfx_b[i]+'_'+mdl_infile_no_ext+'_errinfo.hdf5')
except IOError as xcption:
mdl_pserr_infile_b_provided = False
warnings.warn('Provided model cross-power spectrum infile_b "{0}" could not be opened. Will proceed with computing of model cross power spectrum based on parameters specified.'.format(mdl_dir_PS[i]+mdl_infile_pfx_b[i]+'_errinfo.hdf5'))
else:
mdl_pserr_infile_b_provided = True
xcpdps2_b_errinfo_models += [copy.deepcopy(model_xcpdps2_b_errinfo)]
if (not mdl_ps_infile_a_provided) or (not mdl_pserr_infile_a_provided) or (not mdl_ps_infile_b_provided) or (not mdl_pserr_infile_b_provided):
# model_cpObj = BSP.ClosurePhase(modelsdir+model_hdf5files[i], freqs, infmt='hdf5')
# model_cpObj.smooth_in_tbins(daybinsize=daybinsize, ndaybins=mdl_ndaybins[i], lstbinsize=lstbinsize)
# model_cpObj.subsample_differencing(daybinsize=None, ndaybins=4, lstbinsize=lstbinsize)
# model_cpObj.subtract(NP.zeros(1024))
# model_cpObjs += [copy.deepcopy(model_cpObj)]
model_cpDSobjs += [BSP.ClosurePhaseDelaySpectrum(model_cpObjs[i])]
cpds_models += [model_cpDSobjs[i].FT(freq_window_bw, freq_center=freq_window_centers, shape=freq_window_shape, fftpow=freq_window_fftpow, pad=pad, datapool='prelim', visscaleinfo=visscaleinfo, method='fft', resample=True, apply_flags=apply_flags)]
if not mdl_ps_infile_a_provided:
xcpdps2_a_models += [model_cpDSobjs[i].compute_power_spectrum(selection=selection, autoinfo=mdl_autoinfo[i], xinfo=mdl_xinfo_a[i], units=pspec_unit_type, beamparms=beaminfo)]
if not mdl_pserr_infile_a_provided:
xcpdps2_a_errinfo_models += [model_cpDSobjs[i].compute_power_spectrum_uncertainty(selection=selection, autoinfo=autoinfo, xinfo=xinfo_a, units=pspec_unit_type, beamparms=beaminfo)]
if not mdl_ps_infile_b_provided:
xcpdps2_b_models += [model_cpDSobjs[i].compute_power_spectrum(selection=selection, autoinfo=mdl_autoinfo[i], xinfo=mdl_xinfo_b[i], units=pspec_unit_type, beamparms=beaminfo)]
if not mdl_pserr_infile_b_provided:
xcpdps2_b_errinfo_models += [model_cpDSobjs[i].compute_power_spectrum_uncertainty(selection=selection, autoinfo=autoinfo, xinfo=xinfo_b, units=pspec_unit_type, beamparms=beaminfo)]
else:
model_cpObjs += [None]
model_cpDSobjs += [None]
cpds_models += [None]
if mdl_outfile_pfx_a is not None:
if isinstance(mdl_outfile_pfx_a, str):
mdl_outfile_pfx_a = [mdl_outfile_pfx_a] * len(model_hdf5files)
if not isinstance(mdl_outfile_pfx_a, list):
raise TypeError('The model cross-power spectrum outfile prefixes must be specified as a list with item for each model.')
if len(mdl_outfile_pfx_a) != len(mdl_dir_PS):
raise ValueError('Invalid number of model cross-power output files specified')
mdl_ps_outfile_a = mdl_dir_PS[i] + mdl_outfile_pfx_a[i] + '_' + mdl_infile_no_ext + '.hdf5'
mdl_pserr_outfile_a = mdl_dir_PS[i] + mdl_outfile_pfx_a[i] + '_' + mdl_infile_no_ext + '_errinfo.hdf5'
BSP.save_CPhase_cross_power_spectrum(xcpdps2_a_models[-1], mdl_ps_outfile_a)
BSP.save_CPhase_cross_power_spectrum(xcpdps2_a_errinfo_models[-1], mdl_pserr_outfile_a)
if mdl_outfile_pfx_b is not None:
if isinstance(mdl_outfile_pfx_b, str):
mdl_outfile_pfx_b = [mdl_outfile_pfx_b] * len(model_hdf5files)
if not isinstance(mdl_outfile_pfx_b, list):
raise TypeError('The model cross-power spectrum outfile prefixes must be specified as a list with item for each model.')
if len(mdl_outfile_pfx_b) != len(mdl_dir_PS):
raise ValueError('Invalid number of model cross-power output files specified')
mdl_ps_outfile_b = mdl_dir_PS[i] + mdl_outfile_pfx_b[i] + '_' + mdl_infile_no_ext + '.hdf5'
mdl_pserr_outfile_b = mdl_dir_PS[i] + mdl_outfile_pfx_b[i] + '_' + mdl_infile_no_ext + '_errinfo.hdf5'
BSP.save_CPhase_cross_power_spectrum(xcpdps2_b_models[-1], mdl_ps_outfile_b)
BSP.save_CPhase_cross_power_spectrum(xcpdps2_b_errinfo_models[-1], mdl_pserr_outfile_b)
progress.update(i+1)
progress.finish()
spw = subselection['spw']
if spw is None:
spwind = NP.arange(xcpdps2_a[sampling]['z'].size)
else:
spwind = NP.asarray(spw)
lstind = NMO.find_list_in_list(xcpdps2_a[sampling][datapool[0]]['diagoffsets'][1], NP.asarray(subselection['lstdiag']))
dayind = NP.asarray(subselection['day'])
dayind_models = NP.asarray(mdl_day)
triadind = NMO.find_list_in_list(xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3], NP.asarray(subselection['triaddiag']))
mdl_colrs = ['red', 'green', 'blue', 'cyan', 'gray', 'orange']
if '2a' in plots:
for stat in statistic:
for zind in spwind:
for lind in lstind:
for di,dind in enumerate(dayind):
maxabsvals = []
minabsvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate([triadind[0]]):
if model_hdf5files is not None:
for mdlind, mdl in enumerate(model_labels):
if dpool in xcpdps2_a_models[mdlind][sampling]:
psval = (1/3.0) * xcpdps2_a_models[mdlind][sampling][dpool][stat][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to(pspec_unit).value
negind = psval.real < 0.0
posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.real).max()]
minabsvals += [NP.abs(psval.real).min()]
if sampling == 'oversampled':
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,posind], psval.real[posind], ls='none', marker='.', ms=1, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,negind], NP.abs(psval.real[negind]), ls='none', marker='|', ms=1, color=mdl_colrs[mdlind])
else:
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,:], NP.abs(psval.real), ls='-', lw=1, marker='.', ms=1, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,negind], NP.abs(psval.real[negind]), ls='none', marker='o', ms=2, color=mdl_colrs[mdlind])
if dpool in xcpdps2_a[sampling]:
psval = (1/3.0) * xcpdps2_a[sampling][dpool][stat][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
negind = psval.real < 0.0
posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.real).max()]
minabsvals += [NP.abs(psval.real).min()]
if sampling == 'oversampled':
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,posind], psval.real[posind], ls='none', marker='.', ms=1, color='black', label='FG+N')
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.real[negind]), ls='none', marker='|', ms=1, color='black')
else:
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,:], NP.abs(psval.real), ls='-', lw=1, marker='.', ms=1, color='black', label='FG+N')
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.real[negind]), ls='none', marker='o', ms=2, color='black')
legend = axs[dpoolind].legend(loc='upper right', shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].set_yscale('log')
axs[dpoolind].text(0.05, 0.97, 'Real', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.87, r'$z=$'+' {0:.1f}'.format(xcpdps2_a[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.77, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.67, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps2_a[sampling]['lags'].min(), 1e6*xcpdps2_a[sampling]['lags'].max())
# axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium')
if xlim is None:
axs[dpoolind].set_xlim(0.99*xcpdps2_a[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_a[sampling]['kprll'][zind,:].max())
else:
axs[dpoolind].set_xlim(xlim)
axs[dpoolind].set_ylim(0.5*min(minabsvals), 2*max(maxabsvals))
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [K$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=30)
else:
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=30)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
PLT.savefig(figdir + '{0}_log_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.png'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_log_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.pdf'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_log_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.eps'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
maxabsvals = []
minabsvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate([triadind[0]]):
if model_hdf5files is not None:
for mdlind, mdl in enumerate(model_labels):
if dpool in xcpdps2_a_models[mdlind][sampling]:
psval = (1/3.0) * xcpdps2_a_models[mdlind][sampling][dpool][stat][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to(pspec_unit).value
negind = psval.imag < 0.0
posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.imag).max()]
minabsvals += [NP.abs(psval.imag).min()]
if sampling == 'oversampled':
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,posind], psval.imag[posind], ls='none', marker='.', ms=1, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,negind], NP.abs(psval.imag[negind]), ls='none', marker='|', ms=1, color=mdl_colrs[mdlind])
else:
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,:], NP.abs(psval.imag), ls='-', lw=1, marker='.', ms=1, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,negind], NP.abs(psval.imag[negind]), ls='none', marker='o', ms=2, color=mdl_colrs[mdlind])
if dpool in xcpdps2_a[sampling]:
psval = (1/3.0) * xcpdps2_a[sampling][dpool][stat][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
negind = psval.imag < 0.0
posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.imag).max()]
minabsvals += [NP.abs(psval.imag).min()]
if sampling == 'oversampled':
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,posind], psval.imag[posind], ls='none', marker='.', ms=1, color='black', label='FG+N')
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.imag[negind]), ls='none', marker='|', ms=1, color='black')
else:
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,:], NP.abs(psval.imag), ls='-', lw=1, marker='.', ms=1, color='black', label='FG+N')
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.imag[negind]), ls='none', marker='o', ms=2, color='black')
legend = axs[dpoolind].legend(loc='upper right', shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].set_yscale('log')
axs[dpoolind].set_xlim(0.99*xcpdps2_a[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_a[sampling]['kprll'][zind,:].max())
axs[dpoolind].text(0.05, 0.97, 'Imag', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.87, r'$z=$'+' {0:.1f}'.format(xcpdps2_a[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.77, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.67, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps2_a[sampling]['lags'].min(), 1e6*xcpdps2_a[sampling]['lags'].max())
# axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium')
axs[dpoolind].set_ylim(0.5*min(minabsvals), 2*max(maxabsvals))
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [K$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=30)
else:
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=30)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
PLT.savefig(figdir + '{0}_log_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.png'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_log_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.pdf'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_log_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.eps'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
if '2b' in plots:
for stat in statistic:
for zind in spwind:
for lind in lstind:
for di,dind in enumerate(dayind):
maxabsvals = []
minabsvals = []
maxvals = []
minvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate([triadind[0]]):
if model_hdf5files is not None:
for mdlind, mdl in enumerate(model_labels):
if dpool in xcpdps2_a_models[mdlind][sampling]:
psval = (1/3.0) * xcpdps2_a_models[mdlind][sampling][dpool][stat][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to(pspec_unit).value
# negind = psval.real < 0.0
# posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.real).max()]
minabsvals += [NP.abs(psval.real).min()]
maxvals += [psval.real.max()]
minvals += [psval.real.min()]
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,:], psval.real, ls='none', marker='.', ms=3, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
if dpool in xcpdps2_a[sampling]:
psval = (1/3.0) * xcpdps2_a[sampling][dpool][stat][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
psrms = (1/3.0) * NP.nanstd(xcpdps2_a_errinfo[sampling]['errinfo'][stat][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
maxabsvals += [NP.abs(psval.real + psrms).max()]
minabsvals += [NP.abs(psval.real).min()]
maxvals += [(psval.real + psrms).max()]
minvals += [(psval.real - psrms).min()]
# axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,:], psval.real, ls='none', marker='.', ms=1, color='black', label='FG+N')
axs[dpoolind].errorbar(xcpdps2_a[sampling]['kprll'][zind,:], psval.real, yerr=psrms, xerr=None, ecolor='0.8', ls='none', marker='.', ms=4, color='black', label='FG+N')
legend = axs[dpoolind].legend(loc='center', bbox_to_anchor=(0.5,0.3), shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].text(0.05, 0.97, 'Real', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.95, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps2_a[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='right', va='top', color='black')
axs[dpoolind].text(0.05, 0.92, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.87, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps2_a[sampling]['lags'].min(), 1e6*xcpdps2_a[sampling]['lags'].max())
minvals = NP.asarray(minvals)
maxvals = NP.asarray(maxvals)
minabsvals = NP.asarray(minabsvals)
maxabsvals = NP.asarray(maxabsvals)
if xlim is None:
axs[dpoolind].set_xlim(0.99*xcpdps2_a[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_a[sampling]['kprll'][zind,:].max())
else:
axs[dpoolind].set_xlim(xlim)
if NP.min(minvals) < 0.0:
axs[dpoolind].set_ylim(1.5*NP.min(minvals), 2*NP.max(maxabsvals))
else:
axs[dpoolind].set_ylim(0.5*NP.min(minvals), 2*NP.max(maxabsvals))
axs[dpoolind].set_yscale('symlog', linthreshy=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))))
tickloc = PLTick.SymmetricalLogLocator(linthresh=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))), base=100.0)
axs[dpoolind].yaxis.set_major_locator(tickloc)
axs[dpoolind].grid(color='0.8', which='both', linestyle=':', lw=1)
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [K$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=40)
else:
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=40)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
# PLT.savefig(figdir + '{0}_symlog_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.png'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_symlog_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.pdf'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
# PLT.savefig(figdir + '{0}_symlog_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.eps'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
maxabsvals = []
minabsvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate([triadind[0]]):
if model_hdf5files is not None:
for mdlind, mdl in enumerate(model_labels):
if dpool in xcpdps2_a_models[mdlind][sampling]:
psval = (1/3.0) * xcpdps2_a_models[mdlind][sampling][dpool][stat][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to(pspec_unit).value
# negind = psval.imag < 0.0
# posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.imag).max()]
minabsvals += [NP.abs(psval.imag).min()]
maxvals += [psval.imag.max()]
minvals += [psval.imag.min()]
axs[dpoolind].plot(xcpdps2_a_models[mdlind][sampling]['kprll'][zind,:], psval.imag, ls='none', marker='.', ms=3, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
if dpool in xcpdps2_a[sampling]:
psval = (1/3.0) * xcpdps2_a[sampling][dpool][stat][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
psrms = (1/3.0) * NP.nanstd(xcpdps2_a_errinfo[sampling]['errinfo'][stat][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
maxabsvals += [NP.abs(psval.imag + psrms).max()]
minabsvals += [NP.abs(psval.imag).min()]
maxvals += [(psval.imag + psrms).max()]
minvals += [(psval.imag - psrms).min()]
# axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,:], psval.imag, ls='none', marker='.', ms=1, color='black', label='FG+N')
axs[dpoolind].errorbar(xcpdps2_a[sampling]['kprll'][zind,:], psval.imag, yerr=psrms, xerr=None, ecolor='0.8', ls='none', marker='.', ms=4, color='black', label='FG+N')
legend = axs[dpoolind].legend(loc='center', bbox_to_anchor=(0.5,0.3), shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].set_xlim(0.99*xcpdps2_a[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_a[sampling]['kprll'][zind,:].max())
axs[dpoolind].text(0.05, 0.97, 'Imag', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.95, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps2_a[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='right', va='top', color='black')
axs[dpoolind].text(0.05, 0.92, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.87, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps2_a[sampling]['lags'].min(), 1e6*xcpdps2_a[sampling]['lags'].max())
minvals = NP.asarray(minvals)
maxvals = NP.asarray(maxvals)
minabsvals = NP.asarray(minabsvals)
maxabsvals = NP.asarray(maxabsvals)
if min(minvals) < 0.0:
axs[dpoolind].set_ylim(1.5*NP.min(minvals), 2*NP.max(maxabsvals))
else:
axs[dpoolind].set_ylim(0.5*NP.min(minvals), 2*NP.max(maxabsvals))
axs[dpoolind].set_yscale('symlog', linthreshy=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))))
tickloc = PLTick.SymmetricalLogLocator(linthresh=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))), base=100.0)
axs[dpoolind].yaxis.set_major_locator(tickloc)
axs[dpoolind].grid(color='0.8', which='both', linestyle=':', lw=1)
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [K$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=40)
else:
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=40)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
# PLT.savefig(figdir + '{0}_symlog_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.png'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_symlog_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.pdf'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
# PLT.savefig(figdir + '{0}_symlog_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}.eps'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str), bbox_inches=0)
if ('2c' in plots) or ('2d' in plots):
avg_incohax_a = plot_info['2c']['incohax_a']
diagoffsets_incohax_a = plot_info['2c']['diagoffsets_a']
diagoffsets_a = []
avg_incohax_b = plot_info['2c']['incohax_b']
diagoffsets_incohax_b = plot_info['2c']['diagoffsets_b']
diagoffsets_b = []
for combi,incax_comb in enumerate(avg_incohax_a):
diagoffsets_a += [{}]
for incaxind,incax in enumerate(incax_comb):
diagoffsets_a[-1][incax] = NP.asarray(diagoffsets_incohax_a[combi][incaxind])
xcpdps2_a_avg, excpdps2_a_avg = BSP.incoherent_cross_power_spectrum_average(xcpdps2_a, excpdps=xcpdps2_a_errinfo, diagoffsets=diagoffsets_a)
avg_xcpdps2_a_models = []
avg_excpdps2_a_models = []
for combi,incax_comb in enumerate(avg_incohax_b):
diagoffsets_b += [{}]
for incaxind,incax in enumerate(incax_comb):
diagoffsets_b[-1][incax] = NP.asarray(diagoffsets_incohax_b[combi][incaxind])
# xcpdps2_b_avg, excpdps2_b_avg = BSP.incoherent_cross_power_spectrum_average(xcpdps2_b, excpdps=None, diagoffsets=diagoffsets_b)
xcpdps2_b_avg, excpdps2_b_avg = BSP.incoherent_cross_power_spectrum_average(xcpdps2_b, excpdps=xcpdps2_b_errinfo, diagoffsets=diagoffsets_b)
avg_xcpdps2_b_models = []
avg_excpdps2_b_models = []
if model_hdf5files is not None:
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Models '.format(len(model_hdf5files)), PGB.ETA()], maxval=len(model_hdf5files)).start()
for i in range(len(model_hdf5files)):
avg_xcpdps2_a_model, avg_excpdps2_a_model = BSP.incoherent_cross_power_spectrum_average(xcpdps2_a_models[i], excpdps=xcpdps2_a_errinfo_models[i], diagoffsets=diagoffsets_a)
avg_xcpdps2_a_models += [copy.deepcopy(avg_xcpdps2_a_model)]
avg_excpdps2_a_models += [copy.deepcopy(avg_excpdps2_a_model)]
# avg_xcpdps2_b_model, avg_excpdps2_b_model = BSP.incoherent_cross_power_spectrum_average(xcpdps2_b_models[i], excpdps=None, diagoffsets=diagoffsets_b)
avg_xcpdps2_b_model, avg_excpdps2_b_model = BSP.incoherent_cross_power_spectrum_average(xcpdps2_b_models[i], excpdps=xcpdps2_b_errinfo_models[i], diagoffsets=diagoffsets_b)
avg_xcpdps2_b_models += [copy.deepcopy(avg_xcpdps2_b_model)]
avg_excpdps2_b_models += [copy.deepcopy(avg_excpdps2_b_model)]
progress.update(i+1)
progress.finish()
# Save incoherent cross power average of the main dataset and its uncertainties
xps_avg_outfile_b = datadir + dir_PS + outfile_pfx_b + '_' + infile_no_ext + '.npz'
xpserr_avg_outfile_b = datadir + dir_PS + outfile_pfx_b + '_' + infile_no_ext + '_errinfo.npz'
# if '2c' in plots:
# lstind = [0]
# triadind = [0]
# for stat in statistic:
# for zind in spwind:
# for lind in lstind:
# for di,dind in enumerate(dayind):
# for combi in range(len(diagoffsets)):
# maxabsvals = []
# minabsvals = []
# maxvals = []
# minvals = []
# fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
# if len(datapool) == 1:
# axs = [axs]
# for dpoolind,dpool in enumerate(datapool):
# for trno,trind in enumerate(triadind):
# if model_hdf5files is not None:
# for mdlind, mdl in enumerate(model_labels):
# if dpool in avg_xcpdps2_a_models[mdlind][sampling]:
# psval = (1/3.0) * avg_xcpdps2_a_models[mdlind][sampling][dpool][stat][combi][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to(pspec_unit).value
# maxabsvals += [NP.abs(psval.real).max()]
# minabsvals += [NP.abs(psval.real).min()]
# maxvals += [psval.real.max()]
# minvals += [psval.real.min()]
# axs[dpoolind].plot(avg_xcpdps2_a_models[mdlind][sampling]['kprll'][zind,:], psval.real, ls='none', marker='.', ms=3, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
# if dpool in xcpdps2_a_avg[sampling]:
# psval = (1/3.0) * xcpdps2_a_avg[sampling][dpool][stat][combi][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
# psrms = (1/3.0) * NP.nanstd(excpdps2_a_avg[sampling]['errinfo'][stat][combi][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
# maxabsvals += [NP.abs(psval.real + psrms).max()]
# minabsvals += [NP.abs(psval.real).min()]
# maxvals += [(psval.real + psrms).max()]
# minvals += [(psval.real - psrms).min()]
# axs[dpoolind].errorbar(xcpdps2_a_avg[sampling]['kprll'][zind,:], psval.real, yerr=psrms, xerr=None, ecolor='0.8', ls='none', marker='.', ms=4, color='black', label='FG+N')
# legend = axs[dpoolind].legend(loc='center', bbox_to_anchor=(0.5,0.3), shadow=False, fontsize=8)
# if trno == 0:
# axs[dpoolind].text(0.05, 0.97, 'Real', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axs[dpoolind].text(0.95, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps2_a_avg[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='right', va='top', color='black')
# axs[dpoolind].text(0.05, 0.92, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a_avg['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axs[dpoolind].text(0.05, 0.87, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axt = axs[dpoolind].twiny()
# axt.set_xlim(1e6*xcpdps2_a_avg[sampling]['lags'].min(), 1e6*xcpdps2_a_avg[sampling]['lags'].max())
# minvals = NP.asarray(minvals)
# maxvals = NP.asarray(maxvals)
# minabsvals = NP.asarray(minabsvals)
# maxabsvals = NP.asarray(maxabsvals)
# if xlim is None:
# axs[dpoolind].set_xlim(0.99*xcpdps2_a_avg[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_a_avg[sampling]['kprll'][zind,:].max())
# else:
# axs[dpoolind].set_xlim(xlim)
# if NP.min(minvals) < 0.0:
# axs[dpoolind].set_ylim(1.5*NP.min(minvals), 2*NP.max(maxabsvals))
# else:
# axs[dpoolind].set_ylim(0.5*NP.min(minvals), 2*NP.max(maxabsvals))
# axs[dpoolind].set_yscale('symlog', linthreshy=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))))
# tickloc = PLTick.SymmetricalLogLocator(linthresh=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))), base=100.0)
# axs[dpoolind].yaxis.set_major_locator(tickloc)
# axs[dpoolind].grid(color='0.8', which='both', linestyle=':', lw=1)
# fig.subplots_adjust(top=0.85)
# fig.subplots_adjust(bottom=0.16)
# fig.subplots_adjust(left=0.22)
# fig.subplots_adjust(right=0.98)
# big_ax = fig.add_subplot(111)
# big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# # big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
# big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
# big_ax.set_xticks([])
# big_ax.set_yticks([])
# big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
# if pspec_unit_type == 'K':
# big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [K$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=40)
# else:
# big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=40)
# big_axt = big_ax.twiny()
# big_axt.set_xticks([])
# big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
# PLT.savefig(figdir + '{0}_symlog_incoh_avg_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}_comb_{9:0d}.pdf'.format(infile_no_ext, xcpdps2_a_avg[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a_avg['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a_avg[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str, combi), bbox_inches=0)
# # PLT.savefig(figdir + '{0}_symlog_incoh_avg_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}_comb_{9:0d}.eps'.format(infile_no_ext, xcpdps2_a_avg[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a_avg['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a_avg[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str, combi), bbox_inches=0)
if '2c' in plots:
lstind = [0]
triadind = [0]
dayind = [0]
dayind_models = NP.zeros(len(model_labels), dtype=int).reshape(1,-1)
for stat in statistic:
for zind in spwind:
for lind in lstind:
for di,dind in enumerate(dayind):
for combi in range(len(diagoffsets_b)):
maxabsvals = []
minabsvals = []
maxvals = []
minvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate(triadind):
if model_hdf5files is not None:
for mdlind, mdl in enumerate(model_labels):
if dpool in avg_xcpdps2_b_models[mdlind][sampling]:
psval = (2/3.0) * avg_xcpdps2_b_models[mdlind][sampling][dpool][stat][combi][zind,lind,dayind_models[di][mdlind],trind,:].to(pspec_unit).value
maxabsvals += [NP.abs(psval.real).max()]
minabsvals += [NP.abs(psval.real).min()]
maxvals += [psval.real.max()]
minvals += [psval.real.min()]
axs[dpoolind].plot(avg_xcpdps2_b_models[mdlind][sampling]['kprll'][zind,:], psval.real, ls='none', marker='.', ms=3, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
if dpool in xcpdps2_b_avg[sampling]:
psval = (2/3.0) * xcpdps2_b_avg[sampling][dpool][stat][combi][zind,lind,dind,trind,:].to(pspec_unit).value
psrms_ssdiff = (2/3.0) * NP.nanstd(excpdps2_a_avg[sampling]['errinfo'][stat][combi][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
if 2 in avg_incohax_b[combi]:
ind_dayax_in_incohax = avg_incohax_b[combi].index(2)
if 0 in diagoffsets_incohax_b[combi][ind_dayax_in_incohax]:
rms_inflation_factor = 2.0 * NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
psrms_psdiff = (2/3.0) * (xcpdps2_a_avg[sampling][dpool][stat][combi][zind,lind,1,1,trind,:] - xcpdps2_a_avg[sampling][dpool][stat][combi][zind,lind,0,0,trind,:]).to(pspec_unit).value
psrms_psdiff = NP.abs(psrms_psdiff.real) / rms_inflation_factor
psrms_max = NP.amax(NP.vstack((psrms_ssdiff, psrms_psdiff)), axis=0)
maxabsvals += [NP.abs(psval.real + nsigma*psrms_max).max()]
minabsvals += [NP.abs(psval.real).min()]
maxvals += [(psval.real + nsigma*psrms_max).max()]
minvals += [(psval.real - nsigma*psrms_max).min()]
for errtype in ps_errtype:
if errtype.lower() == 'ssdiff':
axs[dpoolind].errorbar(xcpdps2_b_avg[sampling]['kprll'][zind,:], psval.real, yerr=nsigma*psrms_ssdiff, xerr=None, ecolor=errshade[errtype.lower()], ls='none', marker='.', ms=4, color='black')
elif errtype.lower() == 'psdiff':
axs[dpoolind].errorbar(xcpdps2_b_avg[sampling]['kprll'][zind,:], psval.real, yerr=nsigma*psrms_psdiff, xerr=None, ecolor=errshade[errtype.lower()], ls='none', marker='.', ms=4, color='black', label='FG+N')
# axs[dpoolind].errorbar(xcpdps2_b_avg[sampling]['kprll'][zind,:], psval.real, yerr=psrms, xerr=None, ecolor='0.8', ls='none', marker='.', ms=4, color='black', label='FG+N')
legend = axs[dpoolind].legend(loc='center', bbox_to_anchor=(0.5,0.3), shadow=False, fontsize=8)
if trno == 0:
# axs[dpoolind].text(0.05, 0.97, 'Real', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.95, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps2_b_avg[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='right', va='top', color='black')
# axs[dpoolind].text(0.05, 0.92, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a_avg['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axs[dpoolind].text(0.05, 0.87, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps2_b_avg[sampling]['lags'].min(), 1e6*xcpdps2_b_avg[sampling]['lags'].max())
axs[dpoolind].axhline(y=0, xmin=0, xmax=1, ls='-', lw=1, color='black')
minvals = NP.asarray(minvals)
maxvals = NP.asarray(maxvals)
minabsvals = NP.asarray(minabsvals)
maxabsvals = NP.asarray(maxabsvals)
if xlim is None:
axs[dpoolind].set_xlim(0.99*xcpdps2_b_avg[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_b_avg[sampling]['kprll'][zind,:].max())
else:
axs[dpoolind].set_xlim(xlim)
if NP.min(minvals) < 0.0:
axs[dpoolind].set_ylim(1.5*NP.min(minvals), 2*NP.max(maxabsvals))
else:
axs[dpoolind].set_ylim(0.5*NP.min(minvals), 2*NP.max(maxabsvals))
axs[dpoolind].set_yscale('symlog', linthreshy=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))))
tickloc = PLTick.SymmetricalLogLocator(linthresh=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))), base=100.0)
axs[dpoolind].yaxis.set_major_locator(tickloc)
axs[dpoolind].grid(color='0.8', which='both', linestyle=':', lw=1)
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$\kappa_\parallel$'+' [pseudo '+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(\kappa_\parallel)$ [pseudo mK$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=40)
else:
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(\kappa_\parallel)$ [pseudo Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=40)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
PLT.savefig(figdir + '{0}_symlog_incoh_avg_real_cpdps_err_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_flags_{5}_comb_{6:0d}.pdf'.format(infile_no_ext, xcpdps2_b_avg[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_b_avg['dlst'][0], applyflags_str, combi), bbox_inches=0)
if '2d' in plots:
kbin_min = plot_info['2d']['kbin_min']
kbin_max = plot_info['2d']['kbin_max']
num_kbins = plot_info['2d']['num_kbins']
kbintype = plot_info['2d']['kbintype']
if (kbin_min is None) or (kbin_max is None):
kbins = None
else:
if num_kbins is None:
raise ValueError('Input num_kbins must be set if kbin range is provided')
if kbintype == 'linear':
kbins = NP.linspace(kbin_min, kbin_max, num=num_kbins, endpoint=True)
elif kbintype == 'log':
if kbin_min > 0.0:
kbins = NP.geomspace(kbin_min, kbin_max, num=num_kbins, endpoint=True)
elif kbin_min == 0.0:
eps_k = 1e-3
kbins = NP.geomspace(kbin_min+eps_k, kbin_max, num=num_kbins, endpoint=True)
else:
eps_k = 1e-3
kbins_pos = NP.geomspace(eps_k, kbin_max, num=num_kbins, endpoint=True)
ind_kbin_thresh = NP.argmin(kbins_pos[kbins_pos >= NP.abs(kbin_min)])
kbins_neg = -1 * kbins_pos[:ind_kbin_thresh+1][::-1]
kbins = NP.hstack((kbins_neg, kbins_pos))
else:
raise ValueError('Input kbintype must be set to "linear" or "log"')
xcpdps2_a_avg_kbin = BSP.incoherent_kbin_averaging(xcpdps2_a_avg, kbins=kbins, kbintype=kbintype)
excpdps2_a_avg_kbin = BSP.incoherent_kbin_averaging(excpdps2_a_avg, kbins=kbins, kbintype=kbintype)
xcpdps2_a_avg_kbin_models = []
excpdps2_a_avg_kbin_models = []
xcpdps2_b_avg_kbin = BSP.incoherent_kbin_averaging(xcpdps2_b_avg, kbins=kbins, kbintype=kbintype)
excpdps2_b_avg_kbin = BSP.incoherent_kbin_averaging(excpdps2_b_avg, kbins=kbins, kbintype=kbintype)
xcpdps2_b_avg_kbin_models = []
excpdps2_b_avg_kbin_models = []
if model_hdf5files is not None:
for i in range(len(model_hdf5files)):
xcpdps2_a_avg_kbin_models += [BSP.incoherent_kbin_averaging(avg_xcpdps2_a_models[i], kbins=kbins, kbintype=kbintype)]
excpdps2_a_avg_kbin_models += [BSP.incoherent_kbin_averaging(avg_excpdps2_a_models[i], kbins=kbins, kbintype=kbintype)]
xcpdps2_b_avg_kbin_models += [BSP.incoherent_kbin_averaging(avg_xcpdps2_b_models[i], kbins=kbins, kbintype=kbintype)]
excpdps2_b_avg_kbin_models += [BSP.incoherent_kbin_averaging(avg_excpdps2_b_models[i], kbins=kbins, kbintype=kbintype)]
lstind = [0]
triadind = [0]
dayind = [0]
dayind_models = NP.zeros(len(model_labels), dtype=int).reshape(1,-1)
for stat in statistic:
for zind in spwind:
for lind in lstind:
for di,dind in enumerate(dayind):
for pstype in ['PS', 'Del2']:
for combi in range(len(diagoffsets_b)):
maxabsvals = []
minabsvals = []
maxvals = []
minvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate(triadind):
if model_hdf5files is not None:
for mdlind, mdl in enumerate(model_labels):
if dpool in xcpdps2_b_avg_kbin_models[mdlind][sampling]:
if pstype == 'PS':
psval = (2/3.0) * xcpdps2_b_avg_kbin_models[mdlind][sampling][dpool][stat][pstype][combi][zind,lind,dayind_models[di][mdlind],trind,:].to(pspec_unit).value
# psval = (2/3.0) * xcpdps2_a_avg_kbin_models[mdlind][sampling][dpool][stat][pstype][combi][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to(pspec_unit).value
else:
psval = (2/3.0) * xcpdps2_b_avg_kbin_models[mdlind][sampling][dpool][stat][pstype][combi][zind,lind,dayind_models[di][mdlind],trind,:].to('mK2').value
# psval = (2/3.0) * xcpdps2_a_avg_kbin_models[mdlind][sampling][dpool][stat][pstype][combi][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to('K2').value
kval = xcpdps2_b_avg_kbin_models[mdlind][sampling]['kbininfo'][dpool][stat][combi][zind,lind,dayind_models[di][mdlind],trind,:].to('Mpc-1').value
# kval = xcpdps2_a_avg_kbin_models[mdlind][sampling]['kbininfo'][dpool][stat][combi][zind,lind,dayind_models[di][mdlind][0],dayind_models[di][mdlind][1],trind,:].to('Mpc-1').value
maxabsvals += [NP.nanmin(NP.abs(psval.real))]
minabsvals += [NP.nanmin(NP.abs(psval.real))]
maxvals += [NP.nanmax(psval.real)]
minvals += [NP.nanmin(psval.real)]
axs[dpoolind].plot(kval, psval.real, ls='none', marker='.', ms=3, color=mdl_colrs[mdlind], label='{0}'.format(mdl))
if dpool in xcpdps2_b_avg_kbin[sampling]:
if pstype == 'PS':
psval = (2/3.0) * xcpdps2_b_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,dind,trind,:].to(pspec_unit).value
psrms_ssdiff = (2/3.0) * NP.nanstd(excpdps2_b_avg_kbin[sampling]['errinfo'][stat][pstype][combi][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
psrms_psdiff = (2/3.0) * (xcpdps2_a_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,1,1,trind,:] - xcpdps2_a_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,0,0,trind,:]).to(pspec_unit).value
# psval = (2/3.0) * xcpdps2_a_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
# psrms = (2/3.0) * NP.nanstd(excpdps2_a_avg_kbin[sampling]['errinfo'][stat][pstype][combi][zind,lind,:,trind,:], axis=0).to(pspec_unit).value
else:
psval = (2/3.0) * xcpdps2_b_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,dind,trind,:].to('mK2').value
psrms_ssdiff = (2/3.0) * NP.nanstd(excpdps2_b_avg_kbin[sampling]['errinfo'][stat][pstype][combi][zind,lind,:,trind,:], axis=0).to('mK2').value
psrms_psdiff = (1/3.0) * (xcpdps2_a_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,1,1,trind,:] - xcpdps2_a_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,0,0,trind,:]).to('K2').value
# psval = (2/3.0) * xcpdps2_a_avg_kbin[sampling][dpool][stat][pstype][combi][zind,lind,dind[0],dind[1],trind,:].to('mK2').value
# psrms = (2/3.0) * NP.nanstd(excpdps2_a_avg_kbin[sampling]['errinfo'][stat][pstype][combi][zind,lind,:,trind,:], axis=0).to('mK2').value
if 2 in avg_incohax_b[combi]:
ind_dayax_in_incohax = avg_incohax_b[combi].index(2)
if 0 in diagoffsets_incohax_b[combi][ind_dayax_in_incohax]:
rms_inflation_factor = 2.0 * NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
else:
rms_inflation_factor = NP.sqrt(2.0)
psrms_psdiff = NP.abs(psrms_psdiff.real) / rms_inflation_factor
psrms_max = NP.amax(NP.vstack((psrms_ssdiff, psrms_psdiff)), axis=0)
kval = xcpdps2_b_avg_kbin[sampling]['kbininfo'][dpool][stat][combi][zind,lind,dind,trind,:].to('Mpc-1').value
# kval = xcpdps2_a_avg_kbin[sampling]['kbininfo'][dpool][stat][combi][zind,lind,dind[0],dind[1],trind,:].to('Mpc-1').value
maxabsvals += [NP.nanmax(NP.abs(psval.real + nsigma*psrms_max.real))]
minabsvals += [NP.nanmin(NP.abs(psval.real))]
maxvals += [NP.nanmax(psval.real + nsigma*psrms_max.real)]
minvals += [NP.nanmin(psval.real - nsigma*psrms_max.real)]
for errtype in ps_errtype:
if errtype.lower() == 'ssdiff':
axs[dpoolind].errorbar(kval, psval.real, yerr=nsigma*psrms_ssdiff, xerr=None, ecolor=errshade[errtype.lower()], ls='none', marker='.', ms=4, color='black')
elif errtype.lower() in 'psdiff':
axs[dpoolind].errorbar(kval, psval.real, yerr=nsigma*psrms_psdiff, xerr=None, ecolor=errshade[errtype.lower()], ls='none', marker='.', ms=4, color='black', label='FG+N')
# axs[dpoolind].errorbar(kval, psval.real, yerr=psrms, xerr=None, ecolor='0.8', ls='none', marker='.', ms=4, color='black', label='FG+N')
legend = axs[dpoolind].legend(loc='center', bbox_to_anchor=(0.5,0.3), shadow=False, fontsize=8)
if trno == 0:
# axs[dpoolind].text(0.05, 0.97, 'Real', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.95, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps2_b_avg_kbin['resampled']['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='right', va='top', color='black')
# axs[dpoolind].text(0.05, 0.92, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a_avg_kbin['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axs[dpoolind].text(0.05, 0.87, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].axhline(y=0, xmin=0, xmax=1, ls='-', lw=1, color='black')
minvals = NP.asarray(minvals)
maxvals = NP.asarray(maxvals)
minabsvals = NP.asarray(minabsvals)
maxabsvals = NP.asarray(maxabsvals)
axs[dpoolind].set_xlim(0.99*NP.nanmin(xcpdps2_b_avg_kbin['resampled']['kbininfo']['kbin_edges'][zind].to('Mpc-1').value), 1.01*NP.nanmax(xcpdps2_b_avg_kbin['resampled']['kbininfo']['kbin_edges'][zind].to('Mpc-1').value))
if NP.min(minvals) < 0.0:
axs[dpoolind].set_ylim(1.5*NP.nanmin(minvals), 2*NP.nanmax(maxabsvals))
else:
axs[dpoolind].set_ylim(0.5*NP.nanmin(minvals), 2*NP.nanmax(maxabsvals))
axs[dpoolind].set_yscale('symlog', linthreshy=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))))
tickloc = PLTick.SymmetricalLogLocator(linthresh=10**NP.floor(NP.log10(NP.min(minabsvals[minabsvals > 0.0]))), base=100.0)
axs[dpoolind].yaxis.set_major_locator(tickloc)
axs[dpoolind].grid(color='0.8', which='both', linestyle=':', lw=1)
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.22)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$\kappa_\parallel$'+' [pseudo '+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pstype == 'PS':
big_ax.set_ylabel(r'$\frac{1}{3}\, P_\nabla(\kappa_\parallel)$ [pseudo mK$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=40)
else:
big_ax.set_ylabel(r'$\frac{1}{3}\, \Delta_\nabla^2(\kappa_\parallel)$ [pseudo mK$^2$]', fontsize=12, weight='medium', labelpad=40)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
if pstype == 'PS':
PLT.savefig(figdir + '{0}_symlog_incoh_kbin_avg_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_flags_{5}_comb_{6:0d}.pdf'.format(infile_no_ext, xcpdps2_a_avg_kbin[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_b_avg_kbin['dlst'][0], applyflags_str, combi), bbox_inches=0)
# PLT.savefig(figdir + '{0}_symlog_incoh_kbin_avg_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}_comb_{9:0d}.pdf'.format(infile_no_ext, xcpdps2_a_avg_kbin[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a_avg_kbin['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a_avg_kbin[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str, combi), bbox_inches=0)
else:
PLT.savefig(figdir + '{0}_symlog_incoh_kbin_avg_real_cpDel2_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_flags_{5}_comb_{6:0d}.pdf'.format(infile_no_ext, xcpdps2_a_avg_kbin[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_b_avg_kbin['dlst'][0], applyflags_str, combi), bbox_inches=0)
# PLT.savefig(figdir + '{0}_symlog_incoh_kbin_avg_real_cpDel2_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_{7:0d}_flags_{8}_comb_{9:0d}.pdf'.format(infile_no_ext, xcpdps2_a_avg_kbin[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a_avg_kbin['dlst'][0], subselection['lstdiag'][lind], dind, xcpdps2_a_avg_kbin[sampling][datapool[0]]['diagoffsets'][3][trind], applyflags_str, combi), bbox_inches=0)
if '2e' in plots:
subselection = plot_info['2e']['subselection']
autoinfo = {'axes': cohax}
xinfo = {'axes': incohax, 'avgcov': False, 'collapse_axes': collapseax, 'dlst_range': timetriad_selection['dlst_range']}
if statistic is None:
statistic = ['mean', 'median']
else:
statistic = [statistic]
spw = subselection['spw']
if spw is None:
spwind = NP.arange(xcpdps2_a[sampling]['z'].size)
else:
spwind = NP.asarray(spw)
lstind = NMO.find_list_in_list(xcpdps2_a[sampling][datapool[0]]['diagoffsets'][1], NP.asarray(subselection['lstdiag']))
dayind = NP.asarray(subselection['day'])
triadind = NMO.find_list_in_list(xcpdps2_a[sampling][datapool[0]]['diagoffsets'][3], NP.asarray(subselection['triaddiag']))
colrs = ['red', 'green', 'blue', 'cyan', 'gray', 'orange']
for stat in statistic:
for zind in spwind:
for lind in lstind:
for dind in dayind:
maxabsvals = []
minabsvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate(triadind):
if dpool in xcpdps2_a[sampling]:
psval = xcpdps2_a[sampling][dpool][stat][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
negind = psval.real < 0.0
posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.real).max()]
minabsvals += [NP.abs(psval.real).min()]
if sampling == 'oversampled':
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,posind], psval.real[posind], ls='none', marker='.', ms=1, color=colrs[trno], label=r'$\Delta$Tr={0:0d}'.format(subselection['triaddiag'][trno]))
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.real[negind]), ls='none', marker='|', ms=1, color=colrs[trno])
else:
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,:], NP.abs(psval.real), ls='-', lw=1, marker='.', ms=1, color=colrs[trno], label=r'$\Delta$Tr={0:0d}'.format(subselection['triaddiag'][trno]))
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.real[negind]), ls='none', marker='o', ms=2, color=colrs[trno])
legend = axs[dpoolind].legend(loc='upper right', shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].set_yscale('log')
axs[dpoolind].set_xlim(0.99*xcpdps2_a[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_a[sampling]['kprll'][zind,:].max())
axs[dpoolind].set_ylim(1e-3, 1e8)
axs[dpoolind].text(0.05, 0.97, 'Real', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.87, r'$z=$'+' {0:.1f}'.format(xcpdps2_a[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.77, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.67, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps2_a[sampling]['lags'].min(), 1e6*xcpdps2_a[sampling]['lags'].max())
# axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium')
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.24)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$P_\nabla(k_\parallel)$ [K$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=30)
else:
big_ax.set_ylabel(r'$P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=30)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
# PLT.savefig(figdir + '{0}_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_flags_{7}.png'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_flags_{7}.pdf'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, applyflags_str), bbox_inches=0)
# PLT.savefig(figdir + '{0}_real_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_flags_{7}.eps'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, applyflags_str), bbox_inches=0)
maxabsvals = []
minabsvals = []
fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(4.0*len(datapool), 3.6))
if len(datapool) == 1:
axs = [axs]
for dpoolind,dpool in enumerate(datapool):
for trno,trind in enumerate(triadind):
if dpool in xcpdps2_a[sampling]:
psval = xcpdps2_a[sampling][dpool][stat][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
negind = psval.imag < 0.0
posind = NP.logical_not(negind)
maxabsvals += [NP.abs(psval.imag).max()]
minabsvals += [NP.abs(psval.imag).min()]
if sampling == 'oversampled':
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,posind], psval.imag[posind], ls='none', marker='.', ms=1, color=colrs[trno], label=r'$\Delta$Tr={0:0d}'.format(subselection['triaddiag'][trno]))
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.imag[negind]), ls='none', marker='|', ms=1, color=colrs[trno])
else:
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,:], NP.abs(psval.imag), ls='-', lw=1, marker='.', ms=1, color=colrs[trno], label=r'$\Delta$Tr={0:0d}'.format(subselection['triaddiag'][trno]))
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.imag[negind]), ls='none', marker='o', ms=2, color=colrs[trno])
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,posind], psval.imag[posind], ls='none', marker='.', ms=1, color=colrs[trno], label=r'$\Delta$Tr={0:0d}'.format(subselection['triaddiag'][trno]))
axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,negind], NP.abs(psval.imag[negind]), ls='none', marker='|', ms=1, color=colrs[trno])
# axs[dpoolind].plot(xcpdps2_a[sampling]['kprll'][zind,:], NP.abs(psval), ls='-', lw=0.5, color=colrs[trno])
legend = axs[dpoolind].legend(loc='upper right', shadow=False, fontsize=8)
if trno == 0:
axs[dpoolind].set_yscale('log')
axs[dpoolind].set_xlim(0.99*xcpdps2_a[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2_a[sampling]['kprll'][zind,:].max())
axs[dpoolind].set_ylim(1e-3, 1e8)
axs[dpoolind].text(0.05, 0.97, 'Imag', transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.87, r'$z=$'+' {0:.1f}'.format(xcpdps2_a[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.77, r'$\Delta$'+'LST = {0:.1f} s'.format(lind*3.6e3*xcpdps2_a['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[dpoolind].text(0.05, 0.67, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axt = axs[dpoolind].twiny()
axt.set_xlim(1e6*xcpdps2_a[sampling]['lags'].min(), 1e6*xcpdps2_a[sampling]['lags'].max())
# axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium')
fig.subplots_adjust(top=0.85)
fig.subplots_adjust(bottom=0.16)
fig.subplots_adjust(left=0.24)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
big_ax.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
if pspec_unit_type == 'K':
big_ax.set_ylabel(r'$P_\nabla(k_\parallel)$ [K$^2h^{-3}$ Mpc$^3$]', fontsize=12, weight='medium', labelpad=30)
else:
big_ax.set_ylabel(r'$P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=30)
big_axt = big_ax.twiny()
big_axt.set_xticks([])
big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
# PLT.savefig(figdir + '{0}_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_flags_{7}.png'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, applyflags_str), bbox_inches=0)
PLT.savefig(figdir + '{0}_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_flags_{7}.pdf'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, applyflags_str), bbox_inches=0)
# PLT.savefig(figdir + '{0}_imag_cpdps_z_{1:.1f}_{2}_{3}_dlst_{4:.1f}s_lstdiag_{5:0d}_day_{6[0]:0d}_{6[1]:0d}_triaddiags_flags_{7}.eps'.format(infile_no_ext, xcpdps2_a[sampling]['z'][zind], stat, sampling, 3.6e3*xcpdps2_a['dlst'][0], subselection['lstdiag'][lind], dind, applyflags_str), bbox_inches=0)
# PLT.savefig(figdir + '{0}_closure_phase_delay_power_spectra_{1}_{2}_triads_{3}x{4:.1f}sx{5:.1f}d_{6}_statistic_nsamples_incoh_{7}_flags_{8}.png'.format(infile_no_ext, sampling, xcpdps2_a['triads_ind'].size, xcpdps2_a['lst'].size, 3.6e3*xcpdps2_a['dlst'][0], xcpdps2_a['dday'][0], stat, nsamples_incoh, applyflags_str), bbox_inches=0)
# PLT.savefig(figdir + '{0}_closure_phase_delay_power_spectra_{1}_{2}_triads_{3}x{4:.1f}sx{5:.1f}d_{6}_statistic_nsamples_incoh_{7}_flags_{8}.eps'.format(infile_no_ext, sampling, xcpdps2_a['triads_ind'].size, xcpdps2_a['lst'].size, 3.6e3*xcpdps2_a['dlst'][0], xcpdps2_a['dday'][0], stat, nsamples_incoh, applyflags_str), bbox_inches=0)
# if '2f' in plots:
# antloc_file = plot_info['2f']['antloc_file']
# anttable = ascii.read(antloc_file)
# ant_E = anttable['East']
# ant_N = anttable['North']
# ant_U = anttable['Up']
# antlocs = NP.concatenate((ant_E.reshape(-1,1), ant_N.reshape(-1,1), ant_U.reshape(-1,1)))
# antnums = NP.arange(len(anttable))
# selection = plot_info['2f']['selection']
# for key in selection:
# if selection[key] is not None:
# if key == 'triads':
# selection[key] = map(tuple,selection[key])
# else:
# selection[key] = NP.asarray(selection[key])
# subselection = plot_info['2f']['subselection']
# statistic = plot_info['2f']['statistic']
# datapool = plot_info['2f']['datapool']
# cohax = plot_info['2f']['cohax']
# incohax = plot_info['2f']['incohax']
# collapseax = plot_info['2f']['collapseax']
# autoinfo = {'axes': cohax}
# xinfo = {'axes': incohax, 'avgcov': False, 'collapse_axes': collapseax, 'dlst_range': selection['dlst_range']}
# xcpdps2f = cpDSobj.compute_power_spectrum_new(selection=selection, autoinfo=autoinfo, xinfo=xinfo)
# nsamples_incoh = xcpdps2f[sampling]['whole']['nsamples_incoh']
# nsamples_coh = xcpdps2f[sampling]['whole']['nsamples_coh']
# if statistic is None:
# statistic = 'mean'
# spw = subselection['spw']
# if spw is None:
# spwind = NP.arange(xcpdps[sampling]['z'])
# else:
# spwind = NP.asarray(spw)
# lstind = NMO.find_list_in_list(xcpdps2f[sampling][datapool[0]]['diagoffsets'][1], NP.asarray(subselection['lstdiag']))
# dayind = NP.asarray(subselection['day'])
# tau_ind = NP.where(NP.logical_and(NP.abs(1e6*xcpdps2f[sampling]['lags']) >= 0.6, NP.abs(1e6*xcpdps2f[sampling]['lags']) <= 1.5))[0]
# colrs = ['red', 'green', 'blue', 'cyan', 'orange', 'gray']
# for stat in statistic:
# for zind in spwind:
# for lind in lstind:
# for dind in dayind:
# fig, axs = PLT.subplots(nrows=1, ncols=len(datapool), sharex=True, sharey=True, figsize=(2.4*len(datapool), 3.6))
# if len(datapool) == 1:
# axs = [axs]
# for dpoolind,dpool in enumerate(datapool):
# peak12_ratio = NP.max(NP.abs(xcpdps2f[sampling][dpool][stat][zind,lind,:,:,:]), axis=-1) / NP.max(NP.abs(xcpdps2f[sampling][dpool][stat][zind,lind,:,:,tau_ind]), axis=-1)
# for trno1 in NP.arange(xcpdps2f['triads'].size):
# for trno2 in NP.range(trno1, xcpdps2f['triads'].size):
# tr1_antinds = NMO.find_list_in_list(antnums, xcpdps2f['triads'][trind])
# tr1_antinds = NMO.find_list_in_list(antnums, xcpdps2f['triads'][trind])
# if dpool in xcpdps2f[sampling]:
# psval = xcpdps2f[sampling][dpool][stat][zind,lind,dind[0],dind[1],trind,:].to(pspec_unit).value
# negind = psval.real < 0.0
# posind = NP.logical_not(negind)
# axs[dpoolind].plot(xcpdps2f[sampling]['kprll'][zind,posind], psval.real[posind], ls='none', marker='.', ms=1, color=colrs[trno], label=r'$\Delta$Tr={0:0d}'.format(subselection['triaddiag'][trno]))
# axs[dpoolind].plot(xcpdps2f[sampling]['kprll'][zind,negind], NP.abs(psval.real[negind]), ls='none', marker='|', ms=1, color=colrs[trno])
# axs[dpoolind].plot(xcpdps2f[sampling]['kprll'][zind,:], NP.abs(psval), ls='-', lw=0.5, color=colrs[trno])
# axs[dpoolind].set_yscale('log')
# axs[dpoolind].set_xlim(0.99*xcpdps2f[sampling]['kprll'][zind,:].min(), 1.01*xcpdps2f[sampling]['kprll'][zind,:].max())
# axs[dpoolind].set_ylim(1e-3, 1e8)
# legend = axs[dpoolind].legend(loc='upper right', shadow=False, fontsize=8)
# axs[dpoolind].text(0.05, 0.97, r'$z=$'+' {0:.1f}'.format(xcpdps2f[sampling]['z'][zind]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axs[dpoolind].text(0.05, 0.87, r'$\Delta$'+'LST = {0:.1f} s'.format(3.6e3*xcpdps2f['dlst'][0]), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axs[dpoolind].text(0.05, 0.77, 'G{0[0]:0d}{0[1]:0d}'.format(dind), transform=axs[dpoolind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
# axt = axs[dpoolind].twiny()
# axt.set_xlim(1e6*xcpdps2f[sampling]['lags'].min(), 1e6*xcpdps2f[sampling]['lags'].max())
# # axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium')
# fig.subplots_adjust(top=0.85)
# fig.subplots_adjust(bottom=0.16)
# fig.subplots_adjust(left=0.24)
# fig.subplots_adjust(right=0.98)
# big_ax = fig.add_subplot(111)
# # big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
# big_ax.set_axis_bgcolor('none') # matplotlib.__version__ < 2.0.0
# big_ax.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
# big_ax.set_xticks([])
# big_ax.set_yticks([])
# big_ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium', labelpad=20)
# big_ax.set_ylabel(r'$P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=35)
# big_axt = big_ax.twiny()
# big_axt.set_xticks([])
# big_axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium', labelpad=20)
# colrs = ['red', 'green', 'blue']
# for stat in statistic:
# for dpool in ['whole', 'submodel', 'residual']:
# if dpool in xcpdps[sampling]:
# psval = NP.mean(xcpdps[sampling][dpool][stat], axis=tuple(axes_to_avg))
# fig = PLT.figure(figsize=(3.5,3.5))
# ax = fig.add_subplot(111)
# for zind,z in enumerate(xcpdps[sampling]['z']):
# negind = psval[zind,:] < 0.0
# posind = NP.logical_not(negind)
# ax.plot(xcpdps[sampling]['kprll'][zind,posind], psval[zind,posind], ls='none', marker='.', ms=4, color=colrs[zind], label=r'$z$={0:.1f}'.format(z))
# ax.plot(xcpdps[sampling]['kprll'][zind,negind], NP.abs(psval[zind,negind]), ls='none', marker='|', ms=4, color=colrs[zind])
# ax.set_yscale('log')
# ax.set_xlim(0.99*xcpdps[sampling]['kprll'][zind,:].min(), 1.01*xcpdps[sampling]['kprll'][zind,:].max())
# ax.set_ylim(1e-3, 1e8)
# ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium')
# ax.set_ylabel(r'$P_\nabla(k_\parallel)$ [Jy$^2h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=0)
# legend = ax.legend(loc='upper right', shadow=False, fontsize=10)
# axt = ax.twiny()
# axt.set_xlim(1e6*xcpdps[sampling]['lags'].min(), 1e6*xcpdps[sampling]['lags'].max())
# axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium')
# fig.subplots_adjust(top=0.85)
# fig.subplots_adjust(bottom=0.16)
# fig.subplots_adjust(left=0.2)
# fig.subplots_adjust(right=0.98)
# PLT.savefig(figdir + '{0}_closure_phase_delay_power_spectra_{1}_{2}_triads_{3}x{4:.1f}sx{5:.1f}d_{6}_statistic_nsamples_incoh_{7}_flags_{8}.png'.format(infile_no_ext, sampling, xcpdps['triads_ind'].size, xcpdps['lst'].size, 3.6e3*xcpdps['dlst'][0], xcpdps['dday'][0], stat, nsamples_incoh, applyflags_str), bbox_inches=0)
# PLT.savefig(figdir + '{0}_closure_phase_delay_power_spectra_{1}_{2}_triads_{3}x{4:.1f}sx{5:.1f}d_{6}_statistic_nsamples_incoh_{7}_flags_{8}.eps'.format(infile_no_ext, sampling, xcpdps['triads_ind'].size, xcpdps['lst'].size, 3.6e3*xcpdps['dlst'][0], xcpdps['dday'][0], stat, nsamples_incoh, applyflags_str), bbox_inches=0)
# # for stat in statistic:
# # fig = PLT.figure(figsize=(3.5,3.5))
# # ax = fig.add_subplot(111)
# # for zind,z in enumerate(xcpdps[sampling]['z']):
# # if len(avgax) > 0:
# # psval = NP.mean(xcpdps[sampling][stat], axis=tuple(avgax), keepdims=True)
# # else:
# # psval = NP.copy(xcpdps[sampling][stat])
# # negind = psval[zind,lstind,dayind,triadind,:] < 0.0
# # posind = NP.logical_not(negind)
# # ax.plot(xcpdps[sampling]['kprll'][zind,posind], psval[zind,lstind,dayind,triadind,posind], ls='none', marker='.', ms=4, color=colrs[zind], label=r'$z$={0:.1f}'.format(z))
# # ax.plot(xcpdps[sampling]['kprll'][zind,negind], NP.abs(psval[zind,lstind,dayind,triadind,negind]), ls='none', marker='|', ms=4, color=colrs[zind])
# # ax.set_yscale('log')
# # ax.set_xlim(0.99*xcpdps[sampling]['kprll'][zind,:].min(), 1.01*xcpdps[sampling]['kprll'][zind,:].max())
# # ax.set_ylim(1e-8, 1e2)
# # ax.set_xlabel(r'$k_\parallel$'+' ['+r'$h$'+' Mpc'+r'$^{-1}$'+']', fontsize=12, weight='medium')
# # ax.set_ylabel(r'$P_\nabla(k_\parallel)$ [$h^{-1}$ Mpc]', fontsize=12, weight='medium', labelpad=0)
# # legend = ax.legend(loc='upper right', shadow=False, fontsize=10)
# # axt = ax.twiny()
# # axt.set_xlim(1e6*xcpdps[sampling]['lags'].min(), 1e6*xcpdps[sampling]['lags'].max())
# # axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]', fontsize=12, weight='medium')
# # fig.subplots_adjust(top=0.85)
# # fig.subplots_adjust(bottom=0.16)
# # fig.subplots_adjust(left=0.2)
# # fig.subplots_adjust(right=0.98)
# # PLT.savefig(figdir + '{0}_closure_phase_delay_power_spectra_{1}_{2}_triads_{3}x{4:.1f}sx{5:.1f}d_{6}_statistic_nsamples_incoh_{7}_flags_{8}.png'.format(infile_no_ext, sampling, xcpdps['triads_ind'].size, xcpdps['lst'].size, 3.6e3*xcpdps['dlst'][0], xcpdps['dday'][0], stat, nsamples_incoh, applyflags_str), bbox_inches=0)
# # PLT.savefig(figdir + '{0}_closure_phase_delay_power_spectra_{1}_{2}_triads_{3}x{4:.1f}sx{5:.1f}d_{6}_statistic_nsamples_incoh_{7}_flags_{8}.eps'.format(infile_no_ext, sampling, xcpdps['triads_ind'].size, xcpdps['lst'].size, 3.6e3*xcpdps['dlst'][0], xcpdps['dday'][0], stat, nsamples_incoh, applyflags_str), bbox_inches=0)
if ('3' in plots) or ('3a' in plots) or ('3b' in plots) or ('3c' in plots):
HI_PS_dir = plot_info['3']['21cm_PS_dir']
sim_rootdir = plot_info['3']['sim_rootdir']
visdirs = plot_info['3']['visdirs']
simvisdirs = [sim_rootdir+visdir for visdir in visdirs]
simlabels = plot_info['3']['simlabels']
visfile_prefix = plot_info['3']['visfile_prfx']
theory_HI_PS_files = glob.glob(HI_PS_dir+'ps_*')
z_theory_HI_PS_files = NP.asarray([fname.split('/')[-1].split('_')[3].split('z')[1] for fname in theory_HI_PS_files], dtype=NP.float)
h_Planck15 = DS.cosmoPlanck15.h
z_freq_window_centers = CNST.rest_freq_HI / freq_window_centers - 1
psfile_inds = [NP.argmin(NP.abs(z_theory_HI_PS_files - z_freq_window_center)) for z_freq_window_center in z_freq_window_centers]
simvis_objs = [RI.InterferometerArray(None, None, None, init_file=simvisdir+visfile_prefix) for simvisdir in simvisdirs]
select_lst = plot_info['3']['lst']
simlst = (simvis_objs[0].lst / 15.0) # in hours
if select_lst is None:
lstind = NP.asarray(NP.floor(simlst.size/2.0).astype(int)).reshape(-1)
elif isinstance(select_lst, (int,float)):
lstind = NP.asarray(NP.argmin(NP.abs(simlst - select_lst))).reshape(-1)
elif isinstance(select_lst, list):
lstind = NP.asarray([NP.argmin(NP.abs(simlst - select_lst[i])) for i in range(len(select_lst))])
else:
raise TypeError('Invalid format for selecting LST')
sysT = plot_info['3']['Tsys']
if '3a' in plots:
spw = plot_info['3a']['spw']
if spw is not None:
spwind = NP.asarray(spw).reshape(-1)
blvects = NP.asarray(plot_info['3a']['bl'])
bll = NP.sqrt(NP.sum(blvects**2, axis=1))
blo = NP.degrees(NP.arctan2(blvects[:,1], blvects[:,0]))
bltol = plot_info['3a']['bltol']
blinds, blrefinds, dbl = LKP.find_1NN(simvis_objs[0].baselines, blvects, distance_ULIM=bltol, remove_oob=True)
blcolrs = ['black', 'red', 'cyan']
for lind in lstind:
fig, axs = PLT.subplots(nrows=2, ncols=1, sharex='col', gridspec_kw={'height_ratios': [2, 1]}, figsize=(3.6, 3), constrained_layout=False)
for simind,simlbl in enumerate(simlabels):
if spw is not None:
for zind in spwind:
axs[simind].axvspan((freq_window_centers[zind]-0.5*freq_window_bw[zind])/1e6, (freq_window_centers[zind]+0.5*freq_window_bw[zind])/1e6, facecolor='0.8')
for blno, blrefind in enumerate(blrefinds):
if simind == 0:
axs[simind].plot(simvis_objs[simind].channels/1e6, NP.abs(simvis_objs[simind].skyvis_freq[blrefind,:,lind]), ls='-', color=blcolrs[blno], label='{0:.1f} m, {1:.1f}'.format(bll[blno], blo[blno])+r'$^\circ$')
if blno == blinds.size-1:
axs[simind].plot(simvis_objs[simind].channels/1e6, simvis_objs[0].vis_rms_freq[blrefind,:,lind], ls='--', color='black', label='Noise RMS')
axs[simind].text(0.05, 0.95, 'FG', transform=axs[simind].transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
axs[simind].set_ylabel(r'$|V|$ [Jy]', fontsize=12, weight='medium')
legend = axs[simind].legend(loc='upper right', shadow=False, fontsize=7)
else:
axs[simind].plot(simvis_objs[simind].channels/1e6, NP.abs(simvis_objs[0].skyvis_freq[blrefind,:,lind] + simvis_objs[simind].skyvis_freq[blrefind,:,lind]) - NP.abs(simvis_objs[0].skyvis_freq[blrefind,:,lind]), ls='-', color=blcolrs[blno], alpha=0.5)
if blno == blinds.size-1:
axs[simind].set_ylim(-5e-3, 4e-3)
axs[simind].text(0.95, 0.05, 'H I', transform=axs[simind].transAxes, fontsize=8, weight='medium', ha='right', va='bottom', color='black')
axs[simind].set_ylabel(r'$\delta |V|$ [Jy]', fontsize=12, weight='medium')
fig.subplots_adjust(hspace=0, wspace=0)
fig.subplots_adjust(top=0.95)
fig.subplots_adjust(bottom=0.15)
fig.subplots_adjust(left=0.25)
fig.subplots_adjust(right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium', labelpad=20)
PLT.savefig(figdir+'model_visibility_spectrum_{0:.1f}m_lst_{1:.3f}hr.pdf'.format(bll[blno], simlst[lind]), bbox_inches=0)
if '3b' in plots:
spw = plot_info['3b']['spw']
if spw is not None:
spwind = NP.asarray(spw).reshape(-1)
for lind in lstind:
fig, axs = PLT.subplots(nrows=2, ncols=1, sharex='col', gridspec_kw={'height_ratios': [2, 1]}, figsize=(3.6, 3), constrained_layout=False)
for simind,simlbl in enumerate(simlabels):
if spw is not None:
for zind in spwind:
axs[simind].axvspan((freq_window_centers[zind]-0.5*freq_window_bw[zind])/1e6, (freq_window_centers[zind]+0.5*freq_window_bw[zind])/1e6, facecolor='0.8')
if simind == 0:
axs[simind].plot(model_cpObjs[simind].f/1e6, model_cpObjs[simind].cpinfo['processed']['native']['cphase'][lind,0,0,:], ls='-', color='black')
axs[simind].set_ylim(-NP.pi, NP.pi)
axs[simind].set_ylabel(r'$\phi_\nabla^\mathrm{F}(f)$ [rad]', fontsize=12, weight='medium')
elif simind == 1:
axs[simind].plot(model_cpObjs[simind].f/1e6, model_cpObjs[simind].cpinfo['processed']['native']['cphase'][lind,0,0,:] - model_cpObjs[0].cpinfo['processed']['native']['cphase'][lind,0,0,:], ls='-', color='black')
axs[simind].set_ylim(-2e-4, 2e-4)
axs[simind].set_ylabel(r'$\delta\phi_\nabla^\mathrm{HI}(f)$ [rad]', fontsize=12, weight='medium')
fig.subplots_adjust(hspace=0, wspace=0)
fig.subplots_adjust(top=0.95, bottom=0.15, left=0.25, right=0.98)
big_ax = fig.add_subplot(111)
big_ax.set_facecolor('none') # matplotlib.__version__ >= 2.0.0
big_ax.tick_params(labelcolor='none', top=False, bottom=False, left=False, right=False)
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$f$ [MHz]', fontsize=12, weight='medium', labelpad=20)
PLT.savefig(figdir+'model_cPhase_spectrum_EQ28_lst_{0:.3f}hr.pdf'.format(simlst[lind]), bbox_inches=0)
PDB.set_trace()
if '3c' in plots:
n_days = plot_info['3c']['n_days']
n_batches = plot_info['3c']['n_batches']
t_field = plot_info['3c']['t_field'] * U.min
t_int = plot_info['3c']['t_int'] * U.s
n_pairs_of_batches = n_batches * (n_batches - 1) / 2.0 # Number of pairs of batches going into the cross-product
n_int_per_field = t_field * 60.0 / t_int # Number of coherent integrations on a field
npol = plot_info['3c']['npol']
sampling = plot_info['3c']['sampling']
spw = plot_info['3c']['spw']
if spw is None:
spwind = NP.arange(simDPS_objs[0].subband_delay_power_spectra['sim']['z'].size)
else:
spwind = NP.asarray(spw)
eff_A = plot_info['3c']['A_eff']
if isinstance(eff_A, (int,float)):
eff_A = eff_A + NP.zeros_like(freq_window_centers)
elif isinstance(eff_A, list):
eff_A = NP.asarray(eff_A) + NP.zeros_like(freq_window_centers)
else:
raise TypeError('Effective area must be a scalar or list')
eff_A = eff_A * U.m**2
blvects = NP.asarray(plot_info['3c']['bl'])
bll = NP.sqrt(NP.sum(blvects**2, axis=1))
blo = NP.degrees(NP.arctan2(blvects[:,1], blvects[:,0]))
bltol = plot_info['3c']['bltol']
blinds, blrefinds, dbl = LKP.find_1NN(simvis_objs[0].baselines, blvects, distance_ULIM=bltol, remove_oob=True)
bl_same_bin = plot_info['3c']['bl_same_bin']
blvctinds = []
blvctrefinds = []
blhists = []
blwts_coherent = []
blwts_incoherent = []
for blgrpind in range(len(bl_same_bin)):
blvctgrp = NP.asarray(bl_same_bin[blgrpind])
indNN_list, blind_ngbrof, blind_ngbrin = LKP.find_NN(simvis_objs[0].baselines, blvctgrp, distance_ULIM=bltol, flatten=True)
blvctinds += [blind_ngbrin]
blvctrefinds += [blind_ngbrof]
blhist, blind_type, bl_binnum, ri = OPS.binned_statistic(blind_ngbrin, values=None, statistic='count', bins=range(blind_ngbrin.max()+2), range=None)
blhists += [blhist]
blwts_coherent += [NP.sum(blhist**2)]
blwts_incoherent += [NP.sum(blhist)]
if sysT is None:
sysT = simvis_objs[0].Tsys
elif isinstance(sysT, (int,float)):
sysT = sysT + NP.zeros_like(simvis_objs[0].shape)
else:
raise TypeError('Input system temperature in invalid format')
sysT = sysT * U.K
freqinds = NP.asarray([NP.argmin(NP.abs(simvis_objs[0].channels - fwin)) for fwin in freq_window_centers])
nearest_Tsys = sysT[NP.ix_(blrefinds,freqinds,lstind)]
df = simvis_objs[0].freq_resolution * U.Hz
sysT_per_unit_visibility = nearest_Tsys / NP.sqrt(df * t_int * n_days) # Noise RMS temperature (in K) per batch. Of this, 1/sqrt(2) each in real and imaginary parts
sysT_per_unit_visibility_real = sysT_per_unit_visibility / NP.sqrt(2.0) # in K
sysT_per_unit_visibility_imag = sysT_per_unit_visibility / NP.sqrt(2.0) # in K
rms_noise_K_dspec_bin = sysT_per_unit_visibility * NP.sqrt(freq_window_bw.reshape(1,-1,1)*U.Hz / df) * df # in K.Hz, of which 1/sqrt(2) each in real and imaginary parts
rms_noise_K_dspec_bin_real = rms_noise_K_dspec_bin / NP.sqrt(2.0) # in K.Hz
rms_noise_K_dspec_bin_imag = rms_noise_K_dspec_bin / NP.sqrt(2.0) # in K.Hz
# Product of two independent Gaussian random variables is a modified Bessel function of the second kind with RMS as below:
rms_noise_K_crosssprod_bin_real = NP.sqrt(rms_noise_K_dspec_bin_real**2 * rms_noise_K_dspec_bin_real**2 + rms_noise_K_dspec_bin_imag**2 * rms_noise_K_dspec_bin_imag**2) / NP.sqrt(npol * n_pairs_of_batches * n_int_per_field) # in K^2 Hz^2, per baseline
rms_noise_K_crosssprod_bin_imag = NP.sqrt(rms_noise_K_dspec_bin_real**2 * rms_noise_K_dspec_bin_imag**2 + rms_noise_K_dspec_bin_real**2 * rms_noise_K_dspec_bin_imag**2) / NP.sqrt(npol * n_pairs_of_batches * n_int_per_field) # in K^2 Hz^2, per baseline
rest_freq_HI = CNST.rest_freq_HI * U.Hz
center_redshifts = rest_freq_HI / (freq_window_centers * U.Hz) - 1
redshifts_ulim = rest_freq_HI / ((freq_window_centers - 0.5 * freq_window_bw) * U.Hz) - 1
redshifts_llim = rest_freq_HI / ((freq_window_centers + 0.5 * freq_window_bw) * U.Hz) - 1
center_redshifts = center_redshifts.to_value()
redshifts_ulim = redshifts_ulim.to_value()
redshifts_llim = redshifts_llim.to_value()
wl = FCNST.c / (freq_window_centers * U.Hz)
rz = cosmo100.comoving_distance(center_redshifts)
drz = cosmo100.comoving_distance(redshifts_ulim) - cosmo100.comoving_distance(redshifts_llim)
conv_factor1 = (wl**2 / eff_A)
conv_factor2 = rz**2 * drz / (freq_window_bw * U.Hz)**2
conv_factor = conv_factor1 * conv_factor2
noise_xpspec_rms_real = rms_noise_K_crosssprod_bin_real * conv_factor.reshape(1,-1,1)
noise_xpspec_rms_real_blgroups = []
for blgrpind in range(len(bl_same_bin)):
noise_xpspec_rms_real_blgroups += [{'coh_bl': noise_xpspec_rms_real[blgrpind].to('K2 Mpc3') / NP.sqrt(blwts_coherent[blgrpind]), 'incoh_bl': noise_xpspec_rms_real[blgrpind].to('K2 Mpc3') / NP.sqrt(blwts_incoherent[blgrpind])}]
simDS_objs = [DS.DelaySpectrum(interferometer_array=simvis_obj) for simvis_obj in simvis_objs]
simDPS_objs = []
for simind,simlbl in enumerate(simlabels):
dspec = simDS_objs[simind].delay_transform(action='store')
subband_dspec = simDS_objs[simind].subband_delay_transform({key: freq_window_bw for key in ['cc', 'sim']}, freq_center={key: freq_window_centers for key in ['cc', 'sim']}, shape={key: freq_window_shape for key in ['cc', 'sim']}, fftpow={key: freq_window_fftpow for key in ['cc', 'sim']}, pad={key: pad for key in ['cc', 'sim']}, bpcorrect=False, action='return_resampled')
simDPS_objs = []
for simind,simlbl in enumerate(simlabels):
simDPS_objs += [DS.DelayPowerSpectrum(simDS_objs[simind])]
simDPS_objs[simind].compute_power_spectrum()
colrs_sim = ['black', 'black']
colrs_ref = ['gray', 'gray']
# colrs_sim = ['red', 'blue']
# colrs_ref = ['orange', 'cyan']
lstyles = [':', '-']
for blno, blrefind in enumerate(blrefinds):
for lstno,lind in enumerate(lstind):
for zind in spwind:
pstable = ascii.read(theory_HI_PS_files[psfile_inds[zind]])
k = pstable['col1'] # in 1/Mpc
delta2 = 1e-6 * pstable['col2'] # in K^2
pk = 2 * NP.pi**2 / k**3 * delta2 # in K^2 Mpc^3
k_h = k / h_Planck15 # in h/Mpc
pk_h = pk * h_Planck15**3 # in K^2 (Mpc/h)^3
kprll_sim = simDPS_objs[simind].subband_delay_power_spectra_resampled['sim']['kprll'][zind,:]
kperp_sim = simDPS_objs[simind].subband_delay_power_spectra_resampled['sim']['kperp'][zind,blrefind]
k_sim = NP.sqrt(kperp_sim**2 + kprll_sim**2)
log10_ps_interped = OPS.interpolate_array(NP.log10(pk_h), NP.log10(k_h), NP.log10(k_sim), axis=-1, kind='linear')
ps_interped = 10**log10_ps_interped
fig = PLT.figure(figsize=(4.0, 3.6))
ax = fig.add_subplot(111)
for simind,simlbl in enumerate(simlabels):
if simind == 0:
ax.plot(simDPS_objs[simind].subband_delay_power_spectra_resampled['sim']['kprll'][zind,:], 1e6*simDPS_objs[simind].subband_delay_power_spectra_resampled['sim']['skyvis_lag'][blrefind,zind,:,lind], ls=lstyles[simind], color=colrs_sim[zind], label=r'$P_\mathrm{F}$'+' ({0:.1f} MHz)'.format(freq_window_centers[zind]/1e6))
else:
ax.plot(simDPS_objs[simind].subband_delay_power_spectra_resampled['sim']['kprll'][zind,:], 1e6*simDPS_objs[simind].subband_delay_power_spectra_resampled['sim']['skyvis_lag'][blrefind,zind,:,lind], ls=lstyles[simind], color=colrs_sim[zind], label=r'$P_\mathrm{HI}$'+' (sim), '+r'$z=$'+'{0:.1f}'.format(simDPS_objs[simind].subband_delay_power_spectra['sim']['z'][zind]))
ax.plot(simDPS_objs[simind].subband_delay_power_spectra_resampled['sim']['kprll'][zind,:], 1e6*ps_interped, ls='-', color=colrs_ref[zind], label=r'$P_\mathrm{HI}$'+' (ref), '+r'$z=$'+'{0:.1f}'.format(simDPS_objs[simind].subband_delay_power_spectra['sim']['z'][zind]))
ax.axhline(y=noise_xpspec_rms_real_blgroups[blno]['coh_bl'][zind,lstno].to('mK2 Mpc3').value, ls='--', color='gray', label=r'$P_\mathrm{N}$'+' (red.)')
ax.axhline(y=noise_xpspec_rms_real_blgroups[blno]['incoh_bl'][zind,lstno].to('mK2 Mpc3').value, ls='--', color='black', label=r'$P_\mathrm{N}$'+' (non-red.)')
ax.set_yscale('log')
ax.legend(loc='upper right', shadow=False, fontsize=7.5)
ax.text(0.1, 0.9, '{0:.1f} m'.format(bll[blno]), transform=ax.transAxes, fontsize=8, weight='medium', ha='left', va='top', color='black')
ax.set_xlabel(r'$k_\parallel$ [$h$ Mpc$^{-1}$]')
ax.set_ylabel(r'$P_b(k_\parallel)$ [mK$^2$ $h^{-3}$ Mpc$^3$]')
axt = ax.twiny()
axt.set_xlim(1e6*simDS_objs[simind].subband_delay_spectra_resampled['sim']['lags'].min(), 1e6*simDS_objs[simind].subband_delay_spectra_resampled['sim']['lags'].max())
axt.set_xlabel(r'$\tau$'+' ['+r'$\mu$'+'s]')
fig.subplots_adjust(bottom=0.15, left=0.18, right=0.98)
# PLT.savefig(figdir+'delay_PS_{0:.1f}m_z_{1:.1f}_lst_{2:.3f}hr.pdf'.format(bll[blno], simDPS_objs[simind].subband_delay_power_spectra['sim']['z'][zind], simlst[lind]), bbox_inches=0)
PDB.set_trace()
| 159,173 | 78.547226 | 481 |
py
|
PRISim
|
PRISim-master/prisim/scriptUtils/replicatesim_util.py
|
import pprint, yaml
import numpy as NP
from pyuvdata import UVData
from astroutils import geometry as GEOM
import prisim
from prisim import interferometry as RI
prisim_path = prisim.__path__[0]+'/'
def replicate(parms=None):
if (parms is None) or (not isinstance(parms, dict)):
example_yaml_filepath = prisim_path+'examples/simparms/replicatesim.yaml'
print('\nInput parms must be specified as a dictionary in the format below. Be sure to check example with detailed descriptions in {0}\n'.format(example_yaml_filepath))
with open(example_yaml_filepath, 'r') as parms_file:
example_parms = yaml.safe_load(parms_file)
pprint.pprint(example_parms)
print('-----------------------\n')
raise ValueError('Current input parameters insufficient or incompatible to proceed with.')
else:
indir = parms['dirstruct']['indir']
infile = parms['dirstruct']['infile']
infmt = parms['dirstruct']['infmt']
outdir = parms['dirstruct']['outdir']
outfile = parms['dirstruct']['outfile']
outfmt = parms['dirstruct']['outfmt']
if infmt.lower() not in ['hdf5', 'uvfits']:
raise ValueError('Input simulation format must be "hdf5" or "uvfits"')
if outfmt.lower() not in ['npz', 'uvfits']:
raise ValueError('Output simulation format must be "npz" or "uvfits"')
if infmt.lower() == 'uvfits':
if outfmt.lower() != 'uvfits':
warnings.warn('Forcing output format to "uvfits" since input format is in "uvfits"')
outfmt = 'uvfits'
if infmt.lower() == 'hdf5':
simvis = RI.InterferometerArray(None, None, None, init_file=indir+infile)
freqs = simvis.channels
nchan = freqs.size
df = simvis.freq_resolution
t_acc = NP.asarray(simvis.t_acc)
ntimes = t_acc.shape[-1]
dt = NP.mean(t_acc)
nbl = simvis.baseline_lengths.size
data_array = simvis.skyvis_freq
else:
uvd = UVData()
uvd.read_uvfits(indir+infile+'.'+infmt)
freqs = uvd.freq_array.ravel()
df = uvd.channel_width
nbl = uvd.Nbls
t_acc = uvd.integration_time.reshape(-1,nbl)
dt = NP.mean(t_acc[:,0])
nchan = freqs.size
ntimes = t_acc.shape[0]
data_array = NP.transpose(uvd.data_array[:,0,:,0].reshape(ntimes, nbl, nchan), (1,2,0))
if outfmt.lower() == 'uvfits':
if infmt.lower() == 'uvfits':
uvdummy = UVData()
uvdummy.read_uvfits(indir+infile+'.'+infmt)
Tsys = parms['telescope']['Tsys']
if Tsys is None:
Trx = parms['telescope']['Trx']
Tant_freqref = parms['telescope']['Tant_freqref']
Tant_ref = parms['telescope']['Tant_ref']
Tant_spindex = parms['telescope']['Tant_spindex']
Tsys = Trx + Tant_ref * (freqs/Tant_freqref)**Tant_spindex
Tsys = NP.asarray(Tsys).reshape(1,-1,1)
A_eff = parms['telescope']['A_eff']
eff_aprtr = parms['telescope']['eff_aprtr']
A_eff *= eff_aprtr
eff_Q = parms['telescope']['eff_Q']
replicate_info = parms['replicate']
n_avg = replicate_info['n_avg']
n_realize = replicate_info['n_realize']
seed = replicate_info['seed']
if seed is None:
seed = NP.random.random_integers(100000)
noiseRMS = RI.thermalNoiseRMS(A_eff, df, dt, Tsys, nbl=nbl, nchan=nchan, ntimes=ntimes, flux_unit='Jy', eff_Q=eff_Q)
noiseRMS = noiseRMS[NP.newaxis,:,:,:] # (1,nbl,nchan,ntimes)
rstate = NP.random.RandomState(seed)
noise = noiseRMS / NP.sqrt(2.0*n_avg) * (rstate.randn(n_realize, nbl, nchan, ntimes) + 1j * rstate.randn(n_realize, nbl, nchan, ntimes)) # sqrt(2.0) is to split equal uncertainty into real and imaginary parts
if outfmt.lower() == 'npz':
outfilename = outdir + outfile + '_{0:03d}-{1:03d}.{2}'.format(1,n_realize,outfmt.lower())
outarray = data_array[NP.newaxis,...] + noise
NP.savez(outfilename, noiseless=data_array[NP.newaxis,...], noisy=outarray, noise=noise)
else:
for i in range(n_realize):
outfilename = outdir + outfile + '-{0:03d}'.format(i+1)
outarray = data_array + noise[i,...]
if infmt.lower() == 'uvfits':
outfilename = outfilename + '-noisy.{0}'.format(outfmt.lower())
uvdummy.data_array = NP.transpose(NP.transpose(outarray, (2,0,1)).reshape(nbl*ntimes, nchan, 1, 1), (0,2,1,3)) # (Nbls, Nfreqs, Ntimes) -> (Ntimes, Nbls, Nfreqs) -> (Nblts, Nfreqs, Nspws=1, Npols=1) -> (Nblts, Nspws=1, Nfreqs, Npols=1)
uvdummy.write_uvfits(outfilename, force_phase=True, spoof_nonessential=True)
else:
simvis.vis_freq = outarray
phase_center = simvis.pointing_center[0,:].reshape(1,-1)
phase_center_coords = simvis.pointing_coords
if phase_center_coords == 'dircos':
phase_center = GEOM.dircos2altaz(phase_center, units='degrees')
phase_center_coords = 'altaz'
if phase_center_coords == 'altaz':
phase_center = GEOM.altaz2hadec(phase_center, simvis.latitude, units='degrees')
phase_center_coords = 'hadec'
if phase_center_coords == 'hadec':
phase_center = NP.hstack((simvis.lst[0]-phase_center[0,0], phase_center[0,1]))
phase_center_coords = 'radec'
if phase_center_coords != 'radec':
raise ValueError('Invalid phase center coordinate system')
uvfits_ref_point = {'location': phase_center.reshape(1,-1), 'coords': 'radec'}
simvis.rotate_visibilities(uvfits_ref_point)
simvis.write_uvfits(outfilename, uvfits_parms={'ref_point': None, 'method': None, 'datapool': ['noisy']}, overwrite=True, verbose=True)
| 6,296 | 49.782258 | 255 |
py
|
PRISim
|
PRISim-master/prisim/scriptUtils/write_PRISim_bispectrum_phase_to_npz_util.py
|
import pprint, yaml
import numpy as NP
from prisim import bispectrum_phase as BSP
import prisim
prisim_path = prisim.__path__[0]+'/'
def write(parms=None):
if (parms is None) or (not isinstance(parms, dict)):
example_yaml_filepath = prisim_path+'examples/ioparms/model_bispectrum_phase_to_npz_parms.yaml'
print('\nInput parms must be specified as a dictionary in the format below. Be sure to check example with detailed descriptions in {0}\n'.format(example_yaml_filepath))
with open(example_yaml_filepath, 'r') as parms_file:
example_parms = yaml.safe_load(parms_file)
pprint.pprint(example_parms)
print('-----------------------\n')
raise ValueError('Current input parameters insufficient or incompatible to proceed with.')
else:
dirinfo = parms['dirStruct']
indir = dirinfo['indir']
infile_prefix = dirinfo['infile_prfx']
infmt = dirinfo['infmt']
simdir = dirinfo['prisim_dir']
simfile_prefix = dirinfo['simfile_prfx']
if infmt.lower() != 'hdf5':
if (simdir is None) or (simfile_prefix is None):
raise TypeError('Inputs prisim_dir and simfile_prfx must both be specified')
if not isinstance(simdir, str):
raise TypeError('Input simdir must be a string')
if not isinstance(simfile_prefix, str):
raise TypeError('Input simfile_prefix must be a string')
hdf5file_prefix = simdir + simfile_prefix
else:
hdf5file_prefix = None
outdir = dirinfo['outdir']
outfile_prefix = dirinfo['outfile_prfx']
procparms = parms['proc']
reftriad = NP.asarray(procparms['bltriplet'])
blltol = procparms['blltol']
datakey = procparms['datakey']
triads = procparms['triads']
BSP.write_PRISim_bispectrum_phase_to_npz(indir+infile_prefix, outdir+outfile_prefix, triads=triads, bltriplet=reftriad, hdf5file_prefix=hdf5file_prefix, infmt=infmt, datakey=datakey, blltol=blltol)
| 2,073 | 44.086957 | 205 |
py
|
PRISim
|
PRISim-master/prisim/scriptUtils/__init__.py
|
import os as _os
__version__='0.1.0'
__description__='Precision Radio Interferometry Simulator'
__author__='Nithyanandan Thyagarajan'
__authoremail__='[email protected]'
__maintainer__='Nithyanandan Thyagarajan'
__maintaineremail__='[email protected]'
__url__='http://github.com/nithyanandan/prisim'
with open(_os.path.dirname(_os.path.abspath(__file__))+'/../githash.txt', 'r') as _githash_file:
__githash__ = _githash_file.readline()
| 456 | 34.153846 | 96 |
py
|
PRISim
|
PRISim-master/scripts/write_PRISim_bispectrum_phase_to_npz.py
|
#!python
import yaml, argparse
import prisim
from prisim.scriptUtils import write_PRISim_bispectrum_phase_to_npz_util
import ipdb as PDB
prisim_path = prisim.__path__[0]+'/'
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to extract bispectrum phases and save to output file for further processing')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default=prisim_path+'examples/ioparms/model_bispectrum_phase_to_npz_parms.yaml', type=str, required=False, help='File specifying input parameters')
args = vars(parser.parse_args())
with open(args['infile'], 'r') as parms_file:
parms = yaml.safe_load(parms_file)
write_PRISim_bispectrum_phase_to_npz_util.write(parms)
| 875 | 32.692308 | 209 |
py
|
PRISim
|
PRISim-master/scripts/make_redundant_visibilities.py
|
#!python
import yaml
import argparse
import numpy as NP
from prisim import interferometry as RI
import write_PRISim_visibilities as PRISimWriter
import ipdb as PDB
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to duplicate redundant baseline measurements')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-s', '--simfile', dest='simfile', type=str, required=True, help='HDF5 file from PRISim simulation')
input_group.add_argument('-p', '--parmsfile', dest='parmsfile', default=None, type=str, required=False, help='File specifying simulation parameters')
output_group = parser.add_argument_group('Output parameters', 'Output specifications')
output_group.add_argument('-o', '--outfile', dest='outfile', default=None, type=str, required=True, help='Output File with redundant measurements')
output_group.add_argument('--outfmt', dest='outfmt', default=['hdf5'], type=str, required=True, nargs='*', choices=['HDF5', 'hdf5', 'UVFITS', 'uvfits', 'UVH5', 'uvh5'], help='Output file format')
misc_group = parser.add_argument_group('Misc parameters', 'Misc specifications')
misc_group.add_argument('-w', '--wait', dest='wait', action='store_true', help='Wait after run')
args = vars(parser.parse_args())
outfile = args['outfile']
outformats = args['outfmt']
parmsfile = args['parmsfile']
simobj = RI.InterferometerArray(None, None, None, init_file=args['simfile'])
if args['parmsfile'] is not None:
parmsfile = args['parmsfile']
with open(parmsfile, 'r') as pfile:
parms = yaml.safe_load(pfile)
blinfo = RI.getBaselineInfo(parms)
bl = blinfo['bl']
blgroups = blinfo['groups']
bl_length = NP.sqrt(NP.sum(bl**2, axis=1))
simbl = simobj.baselines
if simbl.shape[0] == bl.shape[0]:
simbll = NP.sqrt(NP.sum(simbl**2, axis=1))
simblo = NP.angle(simbl[:,0] + 1j * simbl[:,1], deg=True)
simblza = NP.degrees(NP.arccos(simbl[:,2] / simbll))
simblstr = ['{0[0]:.2f}_{0[1]:.3f}_{0[2]:.3f}'.format(lo) for lo in zip(simbll,3.6e3*simblza,3.6e3*simblo)]
inp_blo = NP.angle(bl[:,0] + 1j * bl[:,1], deg=True)
inp_blza = NP.degrees(NP.arccos(bl[:,2] / bl_length))
inp_blstr = ['{0[0]:.2f}_{0[1]:.3f}_{0[2]:.3f}'.format(lo) for lo in zip(bl_length,3.6e3*inp_blza,3.6e3*inp_blo)]
uniq_inp_blstr, inp_ind, inp_invind = NP.unique(inp_blstr, return_index=True, return_inverse=True) ## if numpy.__version__ < 1.9.0
uniq_sim_blstr, sim_ind, sim_invind = NP.unique(simblstr, return_index=True, return_inverse=True) ## if numpy.__version__ < 1.9.0
# uniq_inp_blstr, inp_ind, inp_invind, inp_frequency = NP.unique(inp_blstr, return_index=True, return_inverse=True, return_counts=True) ## if numpy.__version__ >= 1.9.0
# uniq_sim_blstr, sim_ind, sim_invind, sim_frequency = NP.unique(simblstr, return_index=True, return_inverse=True, return_counts=True) ## if numpy.__version__ >= 1.9.0
if simbl.shape[0] != uniq_sim_blstr.size:
raise ValueError('Non-redundant baselines already found in the simulations')
if not NP.array_equal(uniq_inp_blstr, uniq_sim_blstr):
raise ValueError('Layout from input simulation parameters file do not match simulated data.')
simobj.duplicate_measurements(blgroups=blgroups)
else:
raise ValueError('Layout from input simulation parameters file do not match simulated data.')
else:
simobj.duplicate_measurements()
# The following "if" statement is to allow previous buggy saved versions
# of HDF5 files that did not save the projected_baselines attribute in the
# right shape when n_acc=1
update_projected_baselines = False
if simobj.projected_baselines.ndim != 3:
update_projected_baselines = True
else:
if simobj.projected_baselines.shape[2] != simobj.n_acc:
update_projected_baselines = True
if update_projected_baselines:
uvw_ref_point = None
if parms['save_formats']['phase_center'] is None:
phase_center = simobj.pointing_center[0,:].reshape(1,-1)
phase_center_coords = simobj.pointing_coords
if phase_center_coords == 'dircos':
phase_center = GEOM.dircos2altaz(phase_center, units='degrees')
phase_center_coords = 'altaz'
if phase_center_coords == 'altaz':
phase_center = GEOM.altaz2hadec(phase_center, simobj.latitude, units='degrees')
phase_center_coords = 'hadec'
if phase_center_coords == 'hadec':
phase_center = NP.hstack((simobj.lst[0]-phase_center[0,0], phase_center[0,1]))
phase_center_coords = 'radec'
if phase_center_coords != 'radec':
raise ValueError('Invalid phase center coordinate system')
uvw_ref_point = {'location': phase_center.reshape(1,-1), 'coords': 'radec'}
else:
uvw_ref_point = {'location': NP.asarray(parms['save_formats']['phase_center']).reshape(1,-1), 'coords': 'radec'}
simobj.project_baselines(uvw_ref_point)
PRISimWriter.save(simobj, outfile, outformats, parmsfile=parmsfile)
wait_after_run = args['wait']
if wait_after_run:
PDB.set_trace()
| 5,594 | 49.863636 | 199 |
py
|
PRISim
|
PRISim-master/scripts/FEKO_beam_to_healpix.py
|
#!python
import ast
import numpy as NP
import healpy as HP
import yaml, h5py
from astropy.io import fits
import argparse
from scipy import interpolate
import progressbar as PGB
from astroutils import mathops as OPS
import ipdb as PDB
def read_FEKO(infile):
freqs = []
theta_list = []
phi_list = []
gaindB = []
ntheta = None
nphi = None
theta_range = [0.0, 0.0]
phi_range = [0.0, 0.0]
with open(infile, 'r') as fileobj:
for linenum,line in enumerate(fileobj.readlines()):
words = line.split()
if 'Frequency' in line:
freqs += [ast.literal_eval(words[1])]
gaindB += [[]]
if ntheta is None:
if 'Theta Samples' in line:
ntheta = ast.literal_eval(words[-1])
if nphi is None:
if 'Phi Samples' in line:
nphi = ast.literal_eval(words[-1])
if (line[0] != '#') and (line[0] != '*') and (len(words) > 0):
gaindB[-1] += [ast.literal_eval(words[-1])]
if len(gaindB) <= 1:
theta_list += [ast.literal_eval(words[0])]
phi_list += [ast.literal_eval(words[1])]
if len(gaindB) != len(freqs):
raise IndexError('Number of frequencies do not match number of channels in gains. Requires debugging.')
freqs = NP.asarray(freqs)
theta_list = NP.asarray(theta_list)
phi_list = NP.asarray(phi_list) + 90 # This 90 deg rotation is required to be compatible with HEALPIX and general spherical coordinate convention for phi. Not sure if it must be +90 or -90 but should not make a difference if the beam has symmetry
gaindB = NP.asarray(gaindB)
theta = NP.linspace(theta_list.min(), theta_list.max(), ntheta)
phi = NP.linspace(phi_list.min(), phi_list.max(), nphi)
return (freqs, theta_list, phi_list, theta, phi, gaindB)
def convert_to_healpix(theta, phi, gains, nside=32, interp_method='spline', gainunit_in='dB', gainunit_out=None, angunits='radians'):
try:
theta, phi, gains
except NameError:
raise NameError('Inputs theta, phi and gains must be specified')
if not HP.isnsideok(nside):
raise ValueError('Specified nside invalid')
if not isinstance(interp_method, str):
raise TypeError('Input interp_method must be a string')
if interp_method not in ['spline', 'nearest', 'healpix']:
raise valueError('Input interp_method value specified is invalid')
if gains.shape == (theta.size, phi.size):
gridded = True
elif (gains.size == theta.size) and (gains.size == phi.size):
gridded = False
else:
raise ValueError('Inputs theta, phi and gains have incompatible dimensions')
if angunits.lower() == 'degrees':
theta = NP.radians(theta)
phi = NP.radians(phi)
phi = NP.angle(NP.exp(1j*phi)) # Bring all phi in [-pi,pi] range
phi[phi<0.0] += 2*NP.pi # Bring all phi in [0, 2 pi] range
hmap = NP.empty(HP.nside2npix(nside))
wtsmap = NP.empty(HP.nside2npix(nside))
hmap.fill(NP.nan)
wtsmap.fill(NP.nan)
if interp_method == 'spline':
if gainunit_in.lower() != 'db':
gains = 10.0 * NP.log10(gains)
hpxtheta, hpxphi = HP.pix2ang(nside, NP.arange(HP.nside2npix(nside)))
# Find the in-bound and out-of-bound indices to handle the boundaries
inb = NP.logical_and(NP.logical_and(hpxtheta>=theta.min(), hpxtheta<=theta.max()), NP.logical_and(hpxphi>=phi.min(), hpxphi<=phi.max()))
pub = hpxphi < phi.min()
pob = hpxphi > phi.max()
oob = NP.logical_not(inb)
inb_ind = NP.where(inb)[0]
oob_ind = NP.where(oob)[0]
pub_ind = NP.where(pub)[0]
pob_ind = NP.where(pob)[0]
# Perform regular interpolation in in-bound indices
if NP.any(inb):
if gridded:
interp_func = interpolate.RectBivariateSpline(theta, phi, gains)
hmap[inb_ind] = interp_func.ev(hpxtheta[inb_ind], hpxphi[inb_ind])
else:
# interp_func = interpolate.interp2d(theta, phi, gains, kind='cubic')
# hmap = interp_func(hpxtheta, hpxphi)
hmap[inb_ind] = interpolate.griddata(NP.hstack((theta.reshape(-1,1),phi.reshape(-1,1))), gains, NP.hstack((hpxtheta[inb_ind].reshape(-1,1),hpxphi[inb_ind].reshape(-1,1))), method='cubic')
if NP.any(pub): # Under bound at phi=0
phi[phi>NP.pi] -= 2*NP.pi # Bring oob phi in [-pi, pi] range
if gridded:
interp_func = interpolate.RectBivariateSpline(theta, phi, gains)
hmap[pub_ind] = interp_func.ev(hpxtheta[pub_ind], hpxphi[pub_ind])
else:
# interp_func = interpolate.interp2d(theta, phi, gains, kind='cubic')
# hmap = interp_func(hpxtheta, hpxphi)
hmap[pub_ind] = interpolate.griddata(NP.hstack((theta.reshape(-1,1),phi.reshape(-1,1))), gains, NP.hstack((hpxtheta[pub_ind].reshape(-1,1),hpxphi[pub_ind].reshape(-1,1))), method='cubic')
if NP.any(pob): # Over bound at phi=2 pi
phi[phi<0.0] += 2*NP.pi # Bring oob phi in [0, 2 pi] range
phi[phi<NP.pi] += 2*NP.pi # Bring oob phi in [pi, 3 pi] range
if gridded:
interp_func = interpolate.RectBivariateSpline(theta, phi, gains)
hmap[pob_ind] = interp_func.ev(hpxtheta[pob_ind], hpxphi[pob_ind])
else:
# interp_func = interpolate.interp2d(theta, phi, gains, kind='cubic')
# hmap = interp_func(hpxtheta, hpxphi)
hmap[pob_ind] = interpolate.griddata(NP.hstack((theta.reshape(-1,1),phi.reshape(-1,1))), gains, NP.hstack((hpxtheta[pob_ind].reshape(-1,1),hpxphi[pob_ind].reshape(-1,1))), method='cubic')
hmap -= NP.nanmax(hmap)
if gainunit_out.lower() != 'db':
hmap = 10**(hmap/10)
else:
if gainunit_in.lower() == 'db':
gains = 10**(gains/10.0)
if gridded:
phi_flattened, theta_flattened = NP.meshgrid(phi, theta)
theta_flattened = theta_flattened.flatten()
phi_flattened = phi_flattened.flatten()
gains = gains.flatten()
else:
theta_flattened = theta
phi_flattened = phi
if interp_method == 'healpix':
ngbrs, wts = HP.get_interp_weights(nside, theta_flattened, phi=phi_flattened)
gains4 = gains.reshape(1,-1) * NP.ones(ngbrs.shape[0]).reshape(-1,1)
wtsmap, be, bn, ri = OPS.binned_statistic(ngbrs.ravel(), values=wts.ravel(), statistic='sum', bins=NP.arange(HP.nside2npix(nside)+1))
hmap, be, bn, ri = OPS.binned_statistic(ngbrs.ravel(), values=(wts*gains4).ravel(), statistic='sum', bins=NP.arange(HP.nside2npix(nside)+1))
else: # nearest neighbour
ngbrs = HP.ang2pix(nside, theta_flattened, phi_flattened)
wtsmap, be, bn, ri = OPS.binned_statistic(ngbrs.ravel(), statistic='count', bins=NP.arange(HP.nside2npix(nside)+1))
hmap, be, bn, ri = OPS.binned_statistic(ngbrs.ravel(), values=gains.ravel(), statistic='sum', bins=NP.arange(HP.nside2npix(nside)+1))
ind_nan = NP.isnan(wtsmap)
other_nanind = wtsmap < 1e-12
ind_nan = ind_nan | other_nanind
wtsmap[ind_nan] = NP.nan
hmap /= wtsmap
hmap /= NP.nanmax(hmap)
if gainunit_out.lower() == 'db':
hmap = 10.0 * NP.log10(hmap)
ind_nan = NP.isnan(hmap)
hmap[ind_nan] = HP.UNSEEN
return hmap
def write_HEALPIX(beaminfo, outfile, outfmt='HDF5'):
try:
outfile, beaminfo
except NameError:
raise NameError('Inputs outfile and beaminfo must be specified')
if not isinstance(outfile, str):
raise TypeError('Output filename must be a string')
if not isinstance(beaminfo, dict):
raise TypeError('Input beaminfo must be a dictionary')
if 'gains' not in beaminfo:
raise KeyError('Input beaminfo missing "gains" key')
if 'freqs' not in beaminfo:
raise KeyError('Input beaminfo missing "freqs" key')
if not isinstance(outfmt, str):
raise TypeError('Output format must be specified in a string')
if outfmt.lower() not in ['fits', 'hdf5']:
raise ValueError('Output file format invalid')
outfilename = outfile + '.' + outfmt.lower()
if outfmt.lower() == 'hdf5':
with h5py.File(outfilename, 'w') as fileobj:
hdr_grp = fileobj.create_group('header')
hdr_grp['npol'] = len(beaminfo['gains'].keys())
hdr_grp['source'] = beaminfo['source']
hdr_grp['nchan'] = beaminfo['freqs'].size
hdr_grp['nside'] = beaminfo['nside']
hdr_grp['gainunit'] = beaminfo['gainunit']
spec_grp = fileobj.create_group('spectral_info')
spec_grp['freqs'] = beaminfo['freqs']
spec_grp['freqs'].attrs['units'] = 'Hz'
gain_grp = fileobj.create_group('gain_info')
for key in beaminfo['gains']: # Different polarizations
dset = gain_grp.create_dataset(key, data=beaminfo['gains'][key], chunks=(1,beaminfo['gains'][key].shape[1]), compression='gzip', compression_opts=9)
else:
hdulist = []
hdulist += [fits.PrimaryHDU()]
hdulist[0].header['EXTNAME'] = 'PRIMARY'
hdulist[0].header['NPOL'] = (beaminfo['npol'], 'Number of polarizations')
hdulist[0].header['SOURCE'] = (beaminfo['source'], 'Source of data')
hdulist[0].header['GAINUNIT'] = (beaminfo['gainunit'], 'Units of gain')
# hdulist[0].header['NSIDE'] = (beaminfo['nside'], 'NSIDE parameter of HEALPIX')
# hdulist[0].header['NCHAN'] = (beaminfo['freqs'].size, 'Number of frequency channels')
for pi,pol in enumerate(pols):
hdu = fits.ImageHDU(beaminfo['gains'][pol].T, name='BEAM_{0}'.format(pol))
hdu.header['PIXTYPE'] = ('HEALPIX', 'Type of pixelization')
hdu.header['ORDERING'] = ('RING', 'Pixel ordering scheme, either RING or NESTED')
hdu.header['NSIDE'] = (beaminfo['nside'], 'NSIDE parameter of HEALPIX')
npix = HP.nside2npix(beaminfo['nside'])
hdu.header['NPIX'] = (npix, 'Number of HEALPIX pixels')
hdu.header['FIRSTPIX'] = (0, 'First pixel # (0 based)')
hdu.header['LASTPIX'] = (npix-1, 'Last pixel # (0 based)')
hdulist += [hdu]
hdulist += [fits.ImageHDU(beaminfo['freqs'], name='FREQS_{0}'.format(pol))]
outhdu = fits.HDUList(hdulist)
outhdu.writeto(outfilename, clobber=True)
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to convert simulated beams into healpix format')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default=None, type=file, required=True, help='File specifying input parameters. Example in prisim/examples/pbparms/FEKO_beam_to_healpix.yaml')
args = vars(parser.parse_args())
with args['infile'] as parms_file:
parms = yaml.safe_load(parms_file)
ioparms = parms['io']
indir = ioparms['indir']
infmt = ioparms['infmt']
p1infile = indir + ioparms['p1infile']
p2infile = indir + ioparms['p2infile']
infiles = [p1infile, p2infile]
outdir = ioparms['outdir']
outfmt = ioparms['outfmt']
outfile = outdir + ioparms['outfile']
gridded = parms['processing']['is_grid']
nside = parms['processing']['nside']
gainunit_in = parms['processing']['gainunit_in']
gainunit_out = parms['processing']['gainunit_out']
if gainunit_out is None:
gainunit_out = 'regular'
interp_method = parms['processing']['interp']
wait_after_run = parms['processing']['wait']
beam_src = parms['misc']['source']
pols = ['P1', 'P2']
gains = {}
if infmt.lower() == 'feko':
for pi,pol in enumerate(pols):
if infiles[pi] is not None:
freqs, theta_list, phi_list, theta, phi, gaindB = read_FEKO(infiles[pi])
if gridded and (interp_method == 'spline'):
gaindB = NP.transpose(gaindB.reshape(freqs.size,phi.size,theta.size), (0,2,1)) # nchan x ntheta x nphi
gains[pol] = NP.copy(gaindB).astype(NP.float64)
hmaps = {pol: [] for pol in pols}
for pi,pol in enumerate(pols):
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Channels'.format(freqs.size), PGB.ETA()], maxval=freqs.size).start()
for freqind,freq in enumerate(freqs):
if gridded and (interp_method == 'spline'):
hmap = convert_to_healpix(theta, phi, gains[pol][freqind,:,:], nside=nside, interp_method=interp_method, gainunit_in=gainunit_in, gainunit_out=gainunit_out, angunits='degrees')
else:
hmap = convert_to_healpix(theta_list, phi_list, gains[pol][freqind,:], nside=nside, interp_method=interp_method, gainunit_in=gainunit_in, gainunit_out=gainunit_out, angunits='degrees')
hmaps[pol] += [hmap]
progress.update(freqind+1)
progress.finish()
hmaps[pol] = NP.asarray(hmaps[pol])
beaminfo = {'npol': len(pols), 'nside': nside, 'source': beam_src, 'freqs': freqs, 'gains': hmaps, 'gainunit': gainunit_out}
write_HEALPIX(beaminfo, outfile, outfmt=outfmt)
if wait_after_run:
PDB.set_trace()
| 13,656 | 47.088028 | 250 |
py
|
PRISim
|
PRISim-master/scripts/replicate_sim.py
|
#!python
import yaml, argparse, copy, warnings
import prisim
from prisim.scriptUtils import replicatesim_util
import ipdb as PDB
prisim_path = prisim.__path__[0]+'/'
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to replicate simulated interferometer array data')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default=prisim_path+'examples/simparms/replicatesim.yaml', type=file, required=False, help='File specifying input parameters for replicating PRISim output')
args = vars(parser.parse_args())
with args['infile'] as parms_file:
parms = yaml.safe_load(parms_file)
if 'wait_before_run' in parms['diagnosis']:
wait_before_run = parms['diagnosis']['wait_before_run']
else:
wait_before_run = False
if 'wait_after_run' in parms['diagnosis']:
wait_after_run = parms['diagnosis']['wait_after_run']
else:
wait_after_run = False
if wait_before_run:
PDB.set_trace()
# Perform replication
replicatesim_util.replicate(parms)
if wait_after_run:
PDB.set_trace()
| 1,250 | 28.785714 | 218 |
py
|
PRISim
|
PRISim-master/scripts/altsim_interface.py
|
#!python
import yaml, argparse, ast, warnings
import numpy as NP
from astropy.io import ascii
from astropy.time import Time
import prisim
prisim_path = prisim.__path__[0]+'/'
def simparms_from_pyuvsim_to_prisim(pyuvsim_parms, prisim_parms):
if not isinstance(pyuvsim_parms, dict):
raise TypeError('Input pyuvsim_parms must be a dictionary')
if not isinstance(prisim_parms, dict):
raise TypeError('Input prisim_parms must be a dictionary')
#I/O and directory structure
pyuvsim_outpath = pyuvsim_parms['filing']['outdir']
pyuvsim_outpath_hierarchy = pyuvsim_outpath.split('/')
pyuvsim_outpath_hierarchy = [item for item in pyuvsim_outpath_hierarchy if item != '']
prisim_parms['dirstruct']['rootdir'] = '/' + '/'.join(pyuvsim_outpath_hierarchy[:-1]) + '/'
prisim_parms['dirstruct']['project'] = '/'.join(pyuvsim_outpath_hierarchy[-1:])
prisim_parms['dirstruct']['simid'] = pyuvsim_parms['filing']['outfile_name']
# Telescope parameters
pyuvsim_telescope_parms = pyuvsim_parms['telescope']
with open(pyuvsim_telescope_parms['telescope_config_name'], 'r') as pyuvsim_telescope_config_file:
pyuvsim_telescope_config = yaml.safe_load(pyuvsim_telescope_config_file)
telescope_location = ast.literal_eval(pyuvsim_telescope_config['telescope_location'])
prisim_parms['telescope']['latitude'] = telescope_location[0]
prisim_parms['telescope']['longitude'] = telescope_location[1]
prisim_parms['telescope']['altitude'] = telescope_location[2]
# Array parameters
prisim_parms['array']['redundant'] = True
prisim_parms['array']['layout'] = None
prisim_parms['array']['file'] = pyuvsim_telescope_parms['array_layout']
prisim_parms['array']['filepathtype'] = 'custom'
prisim_parms['array']['parser']['data_start'] = 1
prisim_parms['array']['parser']['label'] = 'Name'
prisim_parms['array']['parser']['east'] = 'E'
prisim_parms['array']['parser']['north'] = 'N'
prisim_parms['array']['parser']['up'] = 'U'
# Antenna power pattern parameters
if pyuvsim_telescope_config['beam_paths'][0].lower() == 'uniform':
prisim_parms['antenna']['shape'] = 'delta'
if pyuvsim_telescope_config['beam_paths'][0].lower() == 'gaussian':
prisim_parms['antenna']['shape'] = 'gaussian'
prisim_parms['antenna']['size'] = pyuvsim_telescope_config['diameter']
if pyuvsim_telescope_config['beam_paths'][0].lower() == 'airy':
prisim_parms['antenna']['shape'] = 'dish'
prisim_parms['antenna']['size'] = pyuvsim_telescope_config['diameter']
if pyuvsim_telescope_config['beam_paths'][0].lower() in ['uniform', 'airy', 'gaussian']:
prisim_parms['beam']['use_external'] = False
prisim_parms['beam']['file'] = None
else:
prisim_parms['beam']['use_external'] = True
prisim_parms['beam']['file'] = pyuvsim_telescope_config['beam_paths'][0]
prisim_parms['beam']['filepathtype'] = 'custom'
prisim_parms['beam']['filefmt'] = 'UVBeam'
# Bandpass parameters
prisim_parms['bandpass']['freq_resolution'] = pyuvsim_parms['freq']['channel_width']
prisim_parms['bandpass']['nchan'] = pyuvsim_parms['freq']['Nfreqs']
if prisim_parms['bandpass']['nchan'] == 1:
warnings.warn('Single channel simulation is not supported currently in PRISim. Request at least two frequency channels.')
pyuvsim_start_freq = pyuvsim_parms['freq']['start_freq']
pyuvsim_freqs = pyuvsim_start_freq + prisim_parms['bandpass']['freq_resolution'] * NP.arange(prisim_parms['bandpass']['nchan'])
prisim_parms['bandpass']['freq'] = pyuvsim_start_freq + 0.5 * prisim_parms['bandpass']['nchan'] * prisim_parms['bandpass']['freq_resolution']
# Observing parameters
prisim_parms['obsparm']['n_acc'] = pyuvsim_parms['time']['Ntimes']
prisim_parms['obsparm']['t_acc'] = pyuvsim_parms['time']['integration_time']
prisim_parms['obsparm']['obs_mode'] = 'drift'
prisim_parms['pointing']['jd_init'] = pyuvsim_parms['time']['start_time']
prisim_parms['obsparm']['obs_date'] = Time(prisim_parms['pointing']['jd_init'], scale='utc', format='jd').iso.split(' ')[0].replace('-', '/')
prisim_parms['pointing']['lst_init'] = None
prisim_parms['pointing']['drift_init']['alt'] = 90.0
prisim_parms['pointing']['drift_init']['az'] = 270.0
prisim_parms['pointing']['drift_init']['ha'] = None
prisim_parms['pointing']['drift_init']['dec'] = None
# Sky model
prisim_parms['skyparm']['model'] = 'custom'
prisim_parms['catalog']['filepathtype'] = 'custom'
prisim_parms['catalog']['custom_file'] = pyuvsim_parms['sources']['catalog'].split('.txt')[0] + '_prisim.txt'
pyuvsim_catalog = ascii.read(pyuvsim_parms['sources']['catalog'], comment='#', header_start=0, data_start=1)
ra_colname = ''
dec_colname = ''
epoch = ''
for colname in pyuvsim_catalog.colnames:
if 'RA' in colname:
ra_colname = colname
ra_deg = pyuvsim_catalog[colname].data
epoch = ra_colname.split('_')[1].split()[0][1:]
if 'Dec' in colname:
dec_colname = colname
dec_deg = pyuvsim_catalog[colname].data
if 'Flux' in colname:
fint = pyuvsim_catalog[colname].data.astype(NP.float)
if 'Frequency' in colname:
ref_freq = pyuvsim_catalog[colname].data.astype(NP.float)
spindex = NP.zeros(fint.size, dtype=NP.float)
majax = NP.zeros(fint.size, dtype=NP.float)
minax = NP.zeros(fint.size, dtype=NP.float)
pa = NP.zeros(fint.size, dtype=NP.float)
prisim_parms['skyparm']['epoch'] = epoch
prisim_parms['skyparm']['flux_unit'] = 'Jy'
prisim_parms['skyparm']['flux_min'] = None
prisim_parms['skyparm']['flux_max'] = None
prisim_parms['skyparm']['custom_reffreq'] = float(ref_freq[0]) / 1e9
ascii.write([ra_deg, dec_deg, fint, spindex, majax, minax, pa], prisim_parms['catalog']['custom_file'], names=['RA', 'DEC', 'F_INT', 'SPINDEX', 'MAJAX', 'MINAX', 'PA'], delimiter=' ', format='fixed_width', formats={'RA': '%11.7f', 'DEC': '%12.7f', 'F_INT': '%10.4f', 'SPINDEX': '%8.5f', 'MAJAX': '%8.5f', 'MINAX': '%8.5f', 'PA': '%8.5f'}, bookend=False, overwrite=True)
# Save format parameters
prisim_parms['save_formats']['npz'] = False
prisim_parms['save_formats']['uvfits'] = False
prisim_parms['save_formats']['uvh5'] = True
return prisim_parms
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Program to convert simulation parameter configurations from one simulator to another')
## Parse input arguments
io_group = parser.add_argument_group('Input/Output parameters', 'Input/output specifications')
io_group.add_argument('-i', '--infile', dest='infile', default=None, type=str, required=False, help='Full path to file specifying input parameters')
io_group.add_argument('-o', '--outfile', dest='outfile', default=None, type=str, required=True, help='Full path to file specifying output parameters')
io_group.add_argument('--from', dest='from', default=None, type=str, required=True, help='String specifying origin simulation configuration. Accepts "prisim", "pyuvsim"')
io_group.add_argument('--to', dest='to', default=None, type=str, required=True, help='String specifying destination simulation configuration. Accepts "prisim", "pyuvsim"')
args = vars(parser.parse_args())
if args['from'].lower() not in ['prisim', 'pyuvsim']:
raise ValueError('Originating simulation must be set to "prisim" or "pyuvsim"')
if args['to'].lower() not in ['prisim', 'pyuvsim']:
raise ValueError('Destination simulation must be set to "prisim" or "pyuvsim"')
if args['from'].lower() == args['to'].lower():
raise ValueError('Origin and destination simulation types must not be equal')
if args['to'].lower() == 'prisim':
prisim_template_file = prisim_path+'examples/simparms/defaultparms.yaml'
with open(prisim_template_file, 'r') as prisim_parms_file:
prisim_parms = yaml.safe_load(prisim_parms_file)
with open(args['infile'], 'r') as pyuvsim_parms_file:
pyuvsim_parms = yaml.safe_load(pyuvsim_parms_file)
outparms = simparms_from_pyuvsim_to_prisim(pyuvsim_parms, prisim_parms)
elif args['from'].lower() == 'prisim':
with open(args['infile'], 'r') as prisim_parms_file:
prisim_parms = yaml.safe_load(prisim_template_file)
outparms = simparms_from_pyuvsim_to_prisim(prisim_parms)
with open(args['outfile'], 'w') as outfile:
yaml.dump(outparms, outfile, default_flow_style=False)
| 8,667 | 49.988235 | 376 |
py
|
PRISim
|
PRISim-master/scripts/prisim_resource_monitor.py
|
#!python
import numpy as NP
import os
import subprocess
import psutil
import time
import argparse
from astroutils import writer_module as WM
def monitor_memory(pids, tint=2.0):
if not isinstance(pids , list):
raise TypeError('Input PIDs must be specified as a list')
try:
pids = map(int, pids)
except ValueError:
raise ValueError('Input PIDs could not be specified as integers. Check inputs again.')
if not isinstance(tint, (int,float)):
raise TypeError('Time interval must be a scalar number')
if tint <= 0.0:
tint = 60.0
while True:
subprocess.call(['clear'])
with WM.term.location(0, 0):
print('Resources under PRISim processes...')
with WM.term.location(0, 1):
print('{0:>8} {1:>8} {2:>12}'.format('PID', 'CPU [%]', 'Memory [GB]'))
cpu = NP.zeros(len(pids))
mem = NP.zeros(len(pids))
for pi, pid in enumerate(pids):
proc = psutil.Process(pid)
cpu[pi] = proc.cpu_percent(interval=0.01) # CPU usage in percent
cpu[pi] = proc.cpu_percent(interval=0.01) # CPU usage in percent
mem[pi] = proc.memory_info().rss / 2.0**30 # memory used in GB
with WM.term.location(0, 2+pi):
print('{0:8d} {1:8.1f} {2:12.4f}'.format(pid, cpu[pi], mem[pi]))
with WM.term.location(0, len(pids)+2):
print('{0:>8} {1:8.1f} {2:12.4f}'.format('Total', NP.sum(cpu), NP.sum(mem)))
time.sleep(tint)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Program to monitor live memory usage')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-p', '--pids', dest='pids', type=int, nargs='+', required=True, help='List of PIDs to be monitored')
input_group.add_argument('-t', '--tint', dest='tint', type=float, default=2, required=False, help='Time interval for update')
args = vars(parser.parse_args())
pids = args['pids']
tint = args['tint']
monitor_memory(pids, tint)
| 2,118 | 36.175439 | 130 |
py
|
PRISim
|
PRISim-master/scripts/prisim_to_uvfits.py
|
#!python
import yaml
import argparse
import numpy as NP
import prisim
from prisim import interferometry as RI
prisim_path = prisim.__path__[0]+'/'
def write(parms, verbose=True):
if 'infile' not in parms:
raise KeyError('PRISim input file not specified. See example in {0}examples/ioparms/uvfitsparms.yaml'.format(prisim_path))
if parms['infile'] is None:
raise ValueError('PRISim input file not specified. See example in {0}examples/ioparms/uvfitsparms.yaml'.format(prisim_path))
infile_parsed = parms['infile'].rsplit('.', 1)
if len(infile_parsed) > 1:
extn = infile_parsed[-1]
if extn.lower() in ['hdf5', 'fits']:
parms['infile'] = '.'.join(infile_parsed[:-1])
if 'outfile' not in parms:
parms['outfile'] = parms['infile']
if parms['outfile'] is None:
parms['outfile'] = parms['infile']
if 'phase_center' not in parms:
raise KeyError('Phase center [ra, dec] (deg) as a numpy array must be specified. See example in {0}examples/ioparms/uvfitsparms.yaml'.format(prisim_path))
if 'method' not in parms:
raise KeyError('Key specifying UVFITS method is missing. See example in {0}examples/ioparms/uvfitsparms.yaml'.format(prisim_path))
if 'overwrite' not in parms:
parms['overwrite'] = True
elif not isinstance(parms['overwrite'], bool):
raise TypeError('Overwrite parameter must be boolean')
ref_point = {'location': NP.asarray(parms['phase_center']).reshape(1,-1), 'coords': 'radec'}
uvfits_parms = {'ref_point': ref_point, 'method': parms['method']}
prisimobj = RI.InterferometerArray(None, None, None, init_file=parms['infile'])
prisimobj.write_uvfits(parms['outfile'], uvfits_parms=uvfits_parms, overwrite=parms['overwrite'], verbose=verbose)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Program to write PRISim output visibilities in UVFITS format')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-p', '--parmsfile', dest='parmsfile', type=file, required=True, help='File specifying I/O and UVFITS parameters')
parser.add_argument('-v', '--verbose', dest='verbose', default=False, action='store_true')
args = vars(parser.parse_args())
with args['parmsfile'] as parms_file:
parms = yaml.safe_load(parms_file)
write(parms, verbose=args['verbose'])
| 2,451 | 42.017544 | 162 |
py
|
PRISim
|
PRISim-master/scripts/setup_prisim_data.py
|
#!python
import os
import argparse
import yaml
import gdown
import tarfile
import prisim
prisim_path = prisim.__path__[0]+'/'
tarfilename = 'prisim_data.tar.gz'
def download(url=None, outfile=None, verbose=True):
if url is not None:
if not isinstance(url, str):
raise TypeError('Input url must be a string')
if outfile is None:
outfile = prisim_path+tarfilename
elif not isinstance(outfile, str):
raise TypeError('outfile must be a string')
gdown.download(url, outfile, quiet=(not verbose))
def extract(infile=None, outdir=None, verbose=True):
if infile is None:
infile = prisim_path + tarfilename
elif not isinstance(infile, str):
raise TypeError('infile must be a string')
if outdir is None:
outdir = prisim_path
elif not isinstance(outdir, str):
raise TypeError('outdir must be a string')
if verbose:
print('Extracting PRISim package data from {0} ...'.format(infile))
with tarfile.open(infile, 'r:gz') as tar:
tar.extractall(outdir)
if verbose:
print('Extracted PRISim package data into {0}'.format(outdir))
def cleanup(infile=None, verbose=True):
if infile is None:
infile = prisim_path + tarfilename
elif not isinstance(infile, str):
raise TypeError('infile must be a string')
if verbose:
print('Cleaning up intermediate file {0} of PRISim package data ...'.format(infile))
if os.path.isfile(infile):
os.remove(infile)
if verbose:
print('Cleaned up PRISim package data.')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Program to download, extract, and install PRISim data')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-p', '--parmsfile', dest='parmsfile', type=file, required=False, default=prisim_path+'examples/ioparms/data_setup_parms.yaml', help='File specifying PRISim data setup parameters')
args = vars(parser.parse_args())
with args['parmsfile'] as parms_file:
parms = yaml.safe_load(parms_file)
action_types = ['download', 'extract', 'cleanup']
for action_type in action_types:
if action_type not in parms:
parms[action_type] = {}
parms[action_type]['action'] = False
for action_type in action_types:
if parms[action_type]['action']:
if action_type == 'download':
keys = ['url', 'fid', 'fname']
elif action_type == 'extract':
keys = ['fname', 'dir']
else:
keys = ['fname']
for key in keys:
if key not in parms[action_type]:
parms[action_type][key] = None
if action_type == 'download':
download(url=parms[action_type]['url']+parms[action_type]['fid'], outfile=parms[action_type]['fname'], verbose=parms['verbose'])
elif action_type == 'extract':
extract(infile=parms[action_type]['fname'], outdir=parms[action_type]['dir'], verbose=parms['verbose'])
else:
cleanup(infile=parms[action_type]['fname'], verbose=parms['verbose'])
if parms['verbose']:
print('PRISim package data successfully set up.')
| 3,358 | 34.734043 | 209 |
py
|
PRISim
|
PRISim-master/scripts/write_PRISim_visibilities.py
|
#!python
import yaml
import argparse
import numpy as NP
from prisim import interferometry as RI
import ipdb as PDB
def save(simobj, outfile, outformats, parmsfile=None):
parms = None
for outfmt in outformats:
if outfmt.lower() == 'hdf5':
simobj.save(outfile, fmt=outfmt, verbose=True, tabtype='BinTableHDU', npz=False, overwrite=True, uvfits_parms=None)
else:
if parmsfile is None:
parmsfile = simobj.simparms_file
if parms is None:
with open(parmsfile, 'r') as pfile:
parms = yaml.safe_load(pfile)
uvfits_parms = None
if outfmt.lower() == 'uvfits':
if parms['save_formats']['phase_center'] is None:
phase_center = simobj.pointing_center[0,:].reshape(1,-1)
phase_center_coords = simobj.pointing_coords
if phase_center_coords == 'dircos':
phase_center = GEOM.dircos2altaz(phase_center, units='degrees')
phase_center_coords = 'altaz'
if phase_center_coords == 'altaz':
phase_center = GEOM.altaz2hadec(phase_center, simobj.latitude, units='degrees')
phase_center_coords = 'hadec'
if phase_center_coords == 'hadec':
phase_center = NP.hstack((simobj.lst[0]-phase_center[0,0], phase_center[0,1]))
phase_center_coords = 'radec'
if phase_center_coords != 'radec':
raise ValueError('Invalid phase center coordinate system')
uvfits_ref_point = {'location': phase_center.reshape(1,-1), 'coords': 'radec'}
else:
uvfits_ref_point = {'location': NP.asarray(parms['save_formats']['phase_center']).reshape(1,-1), 'coords': 'radec'}
# Phase the visibilities to a phase reference point
simobj.rotate_visibilities(uvfits_ref_point)
uvfits_parms = {'ref_point': None, 'datapool': None, 'method': None}
simobj.pyuvdata_write(outfile, formats=[outfmt.lower()], uvfits_parms=uvfits_parms, overwrite=True)
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to save PRIS?im visibilities')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-s', '--simfile', dest='simfile', type=str, required=True, help='HDF5 file from PRISim simulation')
input_group.add_argument('-p', '--parmsfile', dest='parmsfile', default=None, type=str, required=False, help='File specifying simulation parameters')
output_group = parser.add_argument_group('Output parameters', 'Output specifications')
output_group.add_argument('-o', '--outfile', dest='outfile', default=None, type=str, required=True, help='Output File with redundant measurements')
output_group.add_argument('--outfmt', dest='outfmt', default=['hdf5'], type=str, required=True, nargs='*', choices=['HDF5', 'hdf5', 'UVFITS', 'uvfits', 'UVH5', 'uvh5'], help='Output file format')
misc_group = parser.add_argument_group('Misc parameters', 'Misc specifications')
misc_group.add_argument('-w', '--wait', dest='wait', action='store_true', help='Wait after run')
args = vars(parser.parse_args())
outfile = args['outfile']
outformats = args['outfmt']
parmsfile = args['parmsfile']
simobj = RI.InterferometerArray(None, None, None, init_file=args['simfile'])
if parmsfile is None:
parmsfile = simobj.simparms_file
with open(parmsfile, 'r') as pfile:
parms = yaml.safe_load(pfile)
# The following "if" statement is to allow previous buggy saved versions
# of HDF5 files that did not save the projected_baselines attribute in the
# right shape when n_acc=1
update_projected_baselines = False
if simobj.projected_baselines.ndim != 3:
update_projected_baselines = True
else:
if simobj.projected_baselines.shape[2] != simobj.n_acc:
update_projected_baselines = True
if update_projected_baselines:
uvw_ref_point = None
if parms['save_formats']['phase_center'] is None:
phase_center = simobj.pointing_center[0,:].reshape(1,-1)
phase_center_coords = simobj.pointing_coords
if phase_center_coords == 'dircos':
phase_center = GEOM.dircos2altaz(phase_center, units='degrees')
phase_center_coords = 'altaz'
if phase_center_coords == 'altaz':
phase_center = GEOM.altaz2hadec(phase_center, simobj.latitude, units='degrees')
phase_center_coords = 'hadec'
if phase_center_coords == 'hadec':
phase_center = NP.hstack((simobj.lst[0]-phase_center[0,0], phase_center[0,1]))
phase_center_coords = 'radec'
if phase_center_coords != 'radec':
raise ValueError('Invalid phase center coordinate system')
uvw_ref_point = {'location': phase_center.reshape(1,-1), 'coords': 'radec'}
else:
uvw_ref_point = {'location': NP.asarray(parms['save_formats']['phase_center']).reshape(1,-1), 'coords': 'radec'}
simobj.project_baselines(uvw_ref_point)
save(simobj, outfile, outformats, parmsfile=parmsfile)
wait_after_run = args['wait']
if wait_after_run:
PDB.set_trace()
| 5,632 | 46.737288 | 199 |
py
|
PRISim
|
PRISim-master/scripts/test_mpi4py_for_prisim.py
|
#!python
from mpi4py import MPI
## Set MPI parameters
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nproc = comm.Get_size()
name = MPI.Get_processor_name()
if rank == 0:
print('\n{0} processes initiated...'.format(nproc))
print('\tProcess #{0} completed'.format(rank))
if rank == 0:
print('MPI test successful\n')
| 326 | 17.166667 | 55 |
py
|
PRISim
|
PRISim-master/scripts/prisim_grep.py
|
#!python
import os, glob, sys
import yaml
import argparse
import numpy as NP
import astroutils.nonmathops as NMO
import prisim
prisim_path = prisim.__path__[0]+'/'
def findType(refval):
valtype = ''
if isinstance(refval, bool):
valtype = 'bool'
elif isinstance(refval, str):
valtype = 'str'
elif isinstance(refval, (int, float)):
valtype = 'num'
elif isinstance(refval, list):
if isinstance(refval[0], str):
valtype = 'str'
elif isinstance(refval[0], (int,float)):
valtype = 'num'
else:
raise TypeError('refval must be a list containing strings or scalar numbers')
elif isinstance(refval, dict):
valtype = 'dict'
else:
raise TypeError('refval must be a boolean, string, scalar, list of strings or list of numbers')
return valtype
def grepBoolean(vals, refval):
select_ind = NP.equal(vals, refval)
return select_ind
def grepString(vals, refval):
select_ind = NP.asarray([val in refval for val in vals], dtype=NP.bool)
return select_ind
def grepScalarRange(vals, refval):
select_ind = NP.logical_and(vals >= refval[0], vals <= refval[1])
return select_ind
def grepValue(vals, refval):
select_ind = NP.asarray([True]*len(vals), dtype=NP.bool)
valtype = findType(refval)
if valtype == 'bool':
vals = NP.asarray(vals, dtype=NP.bool)
select_ind = grepBoolean(vals, refval)
elif valtype == 'str':
vals = NP.asarray(vals)
select_ind = grepString(vals, refval)
elif valtype == 'num':
vals = NP.asarray(vals, dtype=NP.float)
vals[NP.equal(vals, None)] = NP.nan
select_ind = grepScalarRange(vals, refval)
elif valtype == 'dict':
for upper_level_key in refval:
lower_level_valtype = findType(refval[upper_level_key])
lower_level_vals = [val[upper_level_key] for val in vals]
lower_level_refval = refval[upper_level_key]
select_ind = NP.logical_and(select_ind, grepValue(lower_level_vals, lower_level_refval)) # Recursive call to this function to walk down possible recursive dictionaries
else:
raise TypeError('Unknown type found. Requires debugging')
return select_ind
def grepPRISim(parms, verbose=True):
rootdir = parms['dirstruct']['rootdir']
project = parms['dirstruct']['project']
if project is None:
project_dir = ''
elif isinstance(project, str):
project_dir = project
if not os.path.isdir(rootdir):
raise OSError('Specified root directory does not exist')
if not os.path.isdir(rootdir+project_dir):
raise OSError('Specified project directory does not exist')
if project is None:
projects = os.listdir(rootdir)
else:
projects = [project_dir]
simparms_list = []
metadata_list = []
for proj in projects:
for simrun in os.listdir(rootdir+proj):
try:
with open(rootdir+proj+'/'+simrun+'/metainfo/simparms.yaml', 'r') as parmsfile:
simparms_list += [{rootdir+proj+'/'+simrun+'/': yaml.safe_load(parmsfile)}]
with open(rootdir+proj+'/'+simrun+'/metainfo/meta.yaml', 'r') as metafile:
metadata_list += [{rootdir+proj+'/'+simrun+'/': yaml.safe_load(metafile)}]
except IOError:
pass
parms_list = []
for simind, parm in enumerate(simparms_list):
simrunkey = parm.keys()[0]
parm[simrunkey].update(metadata_list[simind][simrunkey])
parms_list += [parm[simrunkey]]
reduced_parms = NMO.recursive_find_notNone_in_dict(parms)
select_ind = NP.asarray([True] * len(parms_list), dtype=NP.bool)
if verbose:
print('\nThe following parameters are searched for:')
for ikey, ival in reduced_parms.iteritems():
if verbose:
print('\t'+ikey)
for subkey in ival.iterkeys():
vals = [parm[ikey][subkey] for parm in parms_list]
refval = reduced_parms[ikey][subkey]
select_ind = NP.logical_and(select_ind, grepValue(vals, refval))
if verbose:
print('\t\t'+subkey)
select_ind, = NP.where(select_ind)
outkeys = [metadata_list[ind].keys()[0] for ind in select_ind]
return outkeys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Program to search metadata of PRISim simulations')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default=prisim_path+'examples/dbparms/defaultdbparms.yaml', type=file, required=False, help='File specifying input database search parameters')
parser.add_argument('-v', '--verbose', dest='verbose', default=False, action='store_true')
parser.add_argument('-s', '--sort', dest='sort', default='alphabetical', type=str, required=False, choices=['date', 'alphabetical'], help='Sort results by timestamp or alphabetical order')
args = vars(parser.parse_args())
with args['infile'] as parms_file:
parms = yaml.safe_load(parms_file)
selectsims = grepPRISim(parms, verbose=args['verbose'])
if args['sort'] == 'alphabetical':
selectsims = sorted(selectsims)
print('\nThe following simulation runs were found to contain the searched parameters:\n')
for simrun in selectsims:
print('\t'+simrun)
print('\n')
| 5,512 | 37.552448 | 205 |
py
|
PRISim
|
PRISim-master/scripts/run_prisim.py
|
#!python
import os, shutil, subprocess, pwd, errno, warnings
from mpi4py import MPI
import yaml
import h5py
import argparse
import copy
import numpy as NP
from astropy.io import fits, ascii
from astropy.coordinates import Galactic, FK5, ICRS, SkyCoord, AltAz, EarthLocation
from astropy import units as U
from astropy.time import Time
import scipy.constants as FCNST
from scipy import interpolate
import matplotlib.pyplot as PLT
import matplotlib.colors as PLTC
import matplotlib.animation as MOV
from scipy.interpolate import griddata
import datetime as DT
import time
import progressbar as PGB
import healpy as HP
import psutil
from astroutils import MPI_modules as my_MPI
from astroutils import geometry as GEOM
from astroutils import catalog as SM
from astroutils import constants as CNST
from astroutils import DSP_modules as DSP
from astroutils import lookup_operations as LKP
from astroutils import mathops as OPS
from astroutils import ephemeris_timing as ET
import prisim
from prisim import interferometry as RI
from prisim import primary_beams as PB
from prisim import baseline_delay_horizon as DLY
try:
from pyuvdata import UVBeam
except ImportError:
uvbeam_module_found = False
else:
uvbeam_module_found = True
import ipdb as PDB
## Set MPI parameters
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nproc = comm.Get_size()
name = MPI.Get_processor_name()
## global parameters
sday = CNST.sday
sday_correction = 1 / sday
prisim_path = prisim.__path__[0]+'/'
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to simulate interferometer array data')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-i', '--infile', dest='infile', default=prisim_path+'examples/simparms/defaultparms.yaml', type=file, required=False, help='File specifying input parameters')
args = vars(parser.parse_args())
default_parms = {}
with args['infile'] as custom_parms_file:
custom_parms = yaml.safe_load(custom_parms_file)
if custom_parms['preload']['template'] is not None:
with open(custom_parms['preload']['template']) as default_parms_file:
default_parms = yaml.safe_load(default_parms_file)
if not default_parms:
parms = custom_parms
else:
parms = default_parms
if custom_parms['preload']['template'] is not None:
for key in custom_parms:
if key != 'preload':
if key in default_parms:
if not isinstance(custom_parms[key], dict):
parms[key] = custom_parms[key]
else:
for subkey in custom_parms[key]:
if subkey in default_parms[key]:
if not isinstance(custom_parms[key][subkey], dict):
parms[key][subkey] = custom_parms[key][subkey]
else:
for subsubkey in custom_parms[key][subkey]:
if subsubkey in default_parms[key][subkey]:
if not isinstance(custom_parms[key][subkey][subsubkey], dict):
parms[key][subkey][subsubkey] = custom_parms[key][subkey][subsubkey]
else:
raise TypeError('Parsing YAML simulation parameter files with this level of nesting is not supported')
else:
raise KeyError('Invalid parameter found in custom simulation parameters file')
else:
raise KeyError('Invalid parameter found in custom simulation parameters file')
else:
raise KeyError('Invalid parameter found in custom simulation parameters file')
rootdir = parms['dirstruct']['rootdir']
project = parms['dirstruct']['project']
simid = parms['dirstruct']['simid']
telescope_id = parms['telescope']['id']
label_prefix = parms['telescope']['label_prefix']
Trx = parms['telescope']['Trx']
Tant_freqref = parms['telescope']['Tant_freqref']
Tant_ref = parms['telescope']['Tant_ref']
Tant_spindex = parms['telescope']['Tant_spindex']
Tsys = parms['telescope']['Tsys']
Tsysinfo = {'Trx': Trx, 'Tant':{'f0': Tant_freqref, 'spindex': Tant_spindex, 'T0': Tant_ref}, 'Tnet': Tsys}
A_eff = parms['telescope']['A_eff']
eff_aprtr = parms['telescope']['eff_aprtr']
A_eff *= eff_aprtr
eff_Q = parms['telescope']['eff_Q']
latitude = parms['telescope']['latitude']
longitude = parms['telescope']['longitude']
altitude = parms['telescope']['altitude']
if longitude is None:
longitude = 0.0
if altitude is None:
altitude = 0.0
pfb_method = parms['bandpass']['pfb_method']
pfb_filepath = parms['bandpass']['pfb_filepath']
pfb_file = parms['bandpass']['pfb_file']
if pfb_method is not None:
if pfb_method not in ['theoretical', 'empirical']:
raise ValueError('Value specified for pfb_method is not one of accepted values')
if not isinstance(pfb_file, str):
raise TypeError('Filename containing PFB information must be a string')
if pfb_filepath == 'default':
pfb_file = prisim_path + 'data/bandpass/'+pfb_file
element_shape = parms['antenna']['shape']
element_size = parms['antenna']['size']
element_ocoords = parms['antenna']['ocoords']
element_orientation = parms['antenna']['orientation']
ground_plane = parms['antenna']['ground_plane']
phased_array = parms['antenna']['phased_array']
phased_elements_file = parms['phasedarray']['file']
if phased_array:
if not isinstance(phased_elements_file, str):
raise TypeError('Filename containing phased array elements must be a string')
if parms['phasedarray']['filepathtype'] == 'default':
phased_elements_file = prisim_path+'data/phasedarray_layouts/'+phased_elements_file
phasedarray_delayerr = parms['phasedarray']['delayerr']
phasedarray_gainerr = parms['phasedarray']['gainerr']
nrand = parms['phasedarray']['nrand']
obs_date = parms['obsparm']['obs_date']
obs_mode = parms['obsparm']['obs_mode']
n_acc = parms['obsparm']['n_acc']
t_acc = parms['obsparm']['t_acc']
t_obs = parms['obsparm']['t_obs']
freq = parms['bandpass']['freq']
freq_resolution = parms['bandpass']['freq_resolution']
nchan = parms['bandpass']['nchan']
beam_info = parms['beam']
use_external_beam = beam_info['use_external']
if use_external_beam:
if not isinstance(beam_info['file'], str):
raise TypeError('Filename containing external beam information must be a string')
external_beam_file = beam_info['file']
if beam_info['filepathtype'] == 'default':
external_beam_file = prisim_path+'data/beams/'+external_beam_file
if beam_info['filefmt'].lower() in ['hdf5', 'fits', 'uvbeam']:
beam_filefmt = beam_info['filefmt'].lower()
else:
raise ValueError('Invalid beam file format specified')
beam_pol = beam_info['pol']
beam_id = beam_info['identifier']
pbeam_spec_interp_method = beam_info['spec_interp']
beam_chromaticity = beam_info['chromatic']
select_beam_freq = beam_info['select_freq']
if select_beam_freq is None:
select_beam_freq = freq
gainparms = parms['gains']
# gaintable = None
gaininfo = None
if gainparms['file'] is not None:
gaintable = {}
if not isinstance(gainparms['file'], str):
raise TypeError('Filename of instrument gains must be a string')
gainsfile = gainparms['file']
if gainparms['filepathtype'] == 'default':
gainsfile = prisim_path + 'data/gains/'+gainsfile
gaininfo = RI.GainInfo(init_file=gainsfile, axes_order=['label', 'frequency', 'time'])
avg_drifts = parms['snapshot']['avg_drifts']
beam_switch = parms['snapshot']['beam_switch']
pick_snapshots = parms['snapshot']['pick']
all_snapshots = parms['snapshot']['all']
snapshots_range = parms['snapshot']['range']
pointing_info = parms['pointing']
pointing_file = pointing_info['file']
pointing_drift_init = pointing_info['drift_init']
pointing_track_init = pointing_info['track_init']
gradient_mode = parms['processing']['gradient_mode']
if gradient_mode is not None:
if not isinstance(gradient_mode, str):
raise TypeError('gradient_mode must be a string')
if gradient_mode.lower() not in ['baseline', 'skypos', 'grequency']:
raise ValueError('Invalid value specified for gradient_mode')
if gradient_mode.lower() != 'baseline':
raise ValueError('Specified gradient_mode not supported currently')
memuse = parms['processing']['memuse']
memory_available = parms['processing']['memavail']
if memory_available is None:
memory_available = psutil.virtual_memory().available # in Bytes
pvmemavail = None # Let it be flexible if going by memory on single node
else:
memory_available *= 2**30 # GB to bytes
pvmemavail = 1.0 * memory_available / nproc
if memuse is None:
memuse = 0.9 * memory_available
elif isinstance(memuse, (int,float)):
memuse = NP.abs(float(memuse)) # now in GB
if memuse * 2**30 > 0.9 * memory_available:
memuse = 0.9 * memory_available # now converted to bytes
else:
memuse = memuse * 2**30 # now converted to bytes
else:
raise TypeError('Usable memory must be specified as a scalar numeric value')
n_bins_baseline_orientation = parms['processing']['n_bins_blo']
n_sky_sectors = parms['processing']['n_sky_sectors']
bpass_shape = parms['processing']['bpass_shape']
ant_bpass_file = parms['processing']['ant_bpass_file']
max_abs_delay = parms['processing']['max_abs_delay']
f_pad = parms['processing']['f_pad']
n_pad = parms['processing']['n_pad']
coarse_channel_width = parms['processing']['coarse_channel_width']
bandpass_correct = parms['processing']['bp_correct']
noise_bandpass_correct = parms['processing']['noise_bp_correct']
do_delay_transform = parms['processing']['delay_transform']
memsave = parms['processing']['memsave']
store_prev_sky = parms['processing']['store_prev_sky']
if not isinstance(store_prev_sky, (bool,int)):
store_prev_sky = True
cleanup = parms['processing']['cleanup']
if not isinstance(cleanup, (bool,int)):
raise TypeError('cleanup parameter must be an integer or boolean')
else:
if isinstance(cleanup, bool):
cleanup = int(cleanup)
if (cleanup < 0) or (cleanup > 3):
raise ValueError('Value of cleanup parameter outside bounds')
flag_chan = NP.asarray(parms['flags']['flag_chan']).reshape(-1)
bp_flag_repeat = parms['flags']['bp_flag_repeat']
n_edge_flag = NP.asarray(parms['flags']['n_edge_flag']).reshape(-1)
flag_repeat_edge_channels = parms['flags']['flag_repeat_edge_channels']
sky_str = parms['skyparm']['model']
fsky = parms['skyparm']['fsky']
skycat_epoch = parms['skyparm']['epoch']
nside = parms['skyparm']['nside']
flux_unit = parms['skyparm']['flux_unit']
fluxcut_min = parms['skyparm']['flux_min']
fluxcut_max = parms['skyparm']['flux_max']
fluxcut_freq = parms['skyparm']['fluxcut_reffreq']
if fluxcut_min is None:
fluxcut_min = 0.0
spindex = parms['skyparm']['spindex']
spindex_rms = parms['skyparm']['spindex_rms']
spindex_seed = parms['skyparm']['spindex_seed']
roi_radius = parms['skyparm']['roi_radius']
if roi_radius is None:
roi_radius = 90.0
use_lidz = parms['skyparm']['lidz']
use_21cmfast = parms['skyparm']['21cmfast']
global_HI_parms = parms['skyparm']['global_EoR_parms']
catalog_filepathtype = parms['catalog']['filepathtype']
DSM_file_prefix = parms['catalog']['DSM_file_prefix']
spectrum_file = parms['catalog']['spectrum_file']
SUMSS_file = parms['catalog']['SUMSS_file']
NVSS_file = parms['catalog']['NVSS_file']
MWACS_file = parms['catalog']['MWACS_file']
GLEAM_file = parms['catalog']['GLEAM_file']
custom_catalog_file = parms['catalog']['custom_file']
skymod_file = parms['catalog']['skymod_file']
if catalog_filepathtype == 'default':
DSM_file_prefix = prisim_path + 'data/catalogs/' + DSM_file_prefix
spectrum_file = prisim_path + 'data/catalogs/' + spectrum_file
SUMSS_file = prisim_path + 'data/catalogs/' + SUMSS_file
NVSS_file = prisim_path + 'data/catalogs/' + NVSS_file
MWACS_file = prisim_path + 'data/catalogs/' + MWACS_file
GLEAM_file = prisim_path + 'data/catalogs/' + GLEAM_file
custom_catalog_file = prisim_path + 'data/catalogs/' + custom_catalog_file
skymod_file = prisim_path + 'data/catalogs/' + skymod_file
pc = parms['phasing']['center']
pc_coords = parms['phasing']['coords']
mpi_key = parms['pp']['key']
mpi_eqvol = parms['pp']['eqvol']
save_redundant = parms['save_redundant']
save_formats = parms['save_formats']
save_to_npz = save_formats['npz']
save_to_uvfits = save_formats['uvfits']
save_to_uvh5 = save_formats['uvh5']
savefmt = save_formats['fmt']
if savefmt not in ['HDF5', 'hdf5', 'FITS', 'fits']:
raise ValueError('Output format invalid')
if save_to_uvfits:
if save_formats['uvfits_method'] not in [None, 'uvdata', 'uvfits']:
raise ValueError('Invalid method specified for saving to UVFITS format')
plots = parms['plots']
diagnosis_parms = parms['diagnosis']
display_resource_monitor = diagnosis_parms['resource_monitor']
tint = diagnosis_parms['refresh_interval']
if tint is None:
tint = 2.0
elif not isinstance(tint, (int, float)):
raise TypeError('Refresh interval must be a scalar number')
else:
if tint <= 0.0:
tint = 2.0
pid = os.getpid()
pids = comm.gather(pid, root=0)
if display_resource_monitor:
if rank == 0:
cmd = ' '.join(['xterm', '-e', 'prisim_resource_monitor.py', '-p', ' '.join(map(str, pids)), '-t', '{0:.1f}'.format(tint), '&'])
subprocess.call([cmd], shell=True)
project_dir = project + '/'
try:
os.makedirs(rootdir+project_dir, 0755)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(rootdir+project_dir):
pass
else:
raise
if rank == 0:
if simid is None:
simid = time.strftime('%Y-%m-%d-%H-%M-%S', time.gmtime())
elif not isinstance(simid, str):
raise TypeError('simid must be a string')
else:
simid = None
simid = comm.bcast(simid, root=0) # Broadcast simulation ID
simid = simid + '/'
try:
os.makedirs(rootdir+project_dir+simid, 0755)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(rootdir+project_dir+simid):
pass
else:
raise
if telescope_id.lower() not in ['mwa', 'vla', 'gmrt', 'ugmrt', 'hera', 'mwa_dipole', 'custom', 'paper', 'mwa_tools', 'hirax', 'chime']:
raise ValueError('Invalid telescope specified')
if element_shape is None:
element_shape = 'delta'
elif element_shape not in ['dish', 'delta', 'dipole', 'gaussian']:
raise ValueError('Invalid antenna element shape specified')
if element_shape != 'delta':
if element_size is None:
raise ValueError('No antenna element size specified')
elif element_size <= 0.0:
raise ValueError('Antenna element size must be positive')
if not isinstance(phased_array, bool):
raise TypeError('phased_array specification must be boolean')
if phasedarray_delayerr is None:
phasedarray_delayerr_str = ''
phasedarray_delayerr = 0.0
elif phasedarray_delayerr < 0.0:
raise ValueError('phasedarray_delayerr must be non-negative.')
else:
phasedarray_delayerr_str = 'derr_{0:.3f}ns'.format(phasedarray_delayerr)
phasedarray_delayerr *= 1e-9
if phasedarray_gainerr is None:
phasedarray_gainerr_str = ''
phasedarray_gainerr = 0.0
elif phasedarray_gainerr < 0.0:
raise ValueError('phasedarray_gainerr must be non-negative.')
else:
phasedarray_gainerr_str = '_gerr_{0:.2f}dB'.format(phasedarray_gainerr)
if nrand is None:
nrandom_str = ''
nrand = 1
elif nrand < 1:
raise ValueError('nrandom must be positive')
else:
nrandom_str = '_nrand_{0:0d}_'.format(nrand)
if (phasedarray_delayerr_str == '') and (phasedarray_gainerr_str == ''):
nrand = 1
nrandom_str = ''
phasedarray_delaygain_err_str = phasedarray_delayerr_str + phasedarray_gainerr_str + nrandom_str
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_dipole'):
element_size = 0.74
element_shape = 'dipole'
if telescope_id.lower() == 'mwa': phased_array = True
elif telescope_id.lower() == 'paper':
element_size = 2.0
element_shape = 'dipole'
elif telescope_id.lower() == 'vla':
element_size = 25.0
element_shape = 'dish'
elif 'gmrt' in telescope_id.lower():
element_size = 45.0
element_shape = 'dish'
elif telescope_id.lower() == 'hera':
element_size = 14.0
element_shape = 'dish'
elif telescope_id.lower() == 'hirax':
element_size = 6.0
element_shape = 'dish'
elif telescope_id.lower() == 'custom':
if element_shape != 'delta':
if (element_shape is None) or (element_size is None):
raise ValueError('Both antenna element shape and size must be specified for the custom telescope type.')
elif element_size <= 0.0:
raise ValueError('Antenna element size must be positive.')
elif telescope_id.lower() == 'mwa_tools':
pass
else:
raise ValueError('telescope ID must be specified.')
if telescope_id.lower() == 'custom':
if element_shape == 'delta':
telescope_id = 'delta'
else:
telescope_id = '{0:.1f}m_{1:}'.format(element_size, element_shape)
if phased_array:
telescope_id = telescope_id.lower() + '_array'
telescope_str = telescope_id.lower()+'_'
if element_ocoords not in ['altaz', 'dircos']:
if element_ocoords is not None:
raise ValueError('Antenna element orientation must be "altaz" or "dircos"')
if element_orientation is None:
if element_ocoords is not None:
if element_ocoords == 'altaz':
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_dipole') or (element_shape == 'dipole'):
element_orientation = NP.asarray([0.0, 90.0]).reshape(1,-1)
else:
element_orientation = NP.asarray([90.0, 270.0]).reshape(1,-1)
elif element_ocoords == 'dircos':
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_dipole') or (element_shape == 'dipole'):
element_orientation = NP.asarray([1.0, 0.0, 0.0]).reshape(1,-1)
else:
element_orientation = NP.asarray([0.0, 0.0, 1.0]).reshape(1,-1)
else:
raise ValueError('Invalid value specified antenna element orientation coordinate system.')
else:
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_dipole') or (element_shape == 'dipole'):
element_orientation = NP.asarray([0.0, 90.0]).reshape(1,-1)
else:
element_orientation = NP.asarray([90.0, 270.0]).reshape(1,-1)
element_ocoords = 'altaz'
else:
if element_ocoords is None:
raise ValueError('Antenna element orientation coordinate system must be specified to describe the specified antenna orientation.')
element_orientation = NP.asarray(element_orientation).reshape(1,-1)
if (element_orientation.size < 2) or (element_orientation.size > 3):
raise ValueError('Antenna element orientation must be a two- or three-element vector.')
elif (element_ocoords == 'altaz') and (element_orientation.size != 2):
raise ValueError('Antenna element orientation must be a two-element vector if using Alt-Az coordinates.')
if ground_plane is None:
ground_plane_str = 'no_ground_'
else:
if ground_plane > 0.0:
ground_plane_str = '{0:.1f}m_ground_'.format(ground_plane)
else:
raise ValueError('Height of antenna element above ground plane must be positive.')
if use_external_beam:
if beam_filefmt.lower() == 'fits':
external_beam = fits.getdata(external_beam_file, extname='BEAM_{0}'.format(beam_pol))
external_beam_freqs = fits.getdata(external_beam_file, extname='FREQS_{0}'.format(beam_pol)) # in MHz
external_beam = external_beam.reshape(-1,external_beam_freqs.size) # npix x nfreqs
prihdr = fits.getheader(external_beam_file, 0)
beamunit = prihdr['GAINUNIT']
elif beam_filefmt.lower() == 'hdf5':
with h5py.File(external_beam_file, 'r') as fileobj:
external_beam = fileobj['gain_info'][beam_pol].value
external_beam = external_beam.T
external_beam_freqs = fileobj['spectral_info']['freqs'].value
beamunit = fileobj['header']['gainunit'].value
elif beam_filefmt == 'uvbeam':
if uvbeam_module_found:
uvbm = UVBeam()
uvbm.read_beamfits(external_beam_file)
axis_vec_ind = 0 # for power beam
spw_ind = 0 # spectral window index
if beam_pol.lower() in ['x', 'e']:
beam_pol_ind = 0
else:
beam_pol_ind = 1
external_beam = uvbm.data_array[axis_vec_ind,spw_ind,beam_pol_ind,:,:].T # npix x nfreqs
external_beam_freqs = uvbm.freq_array.ravel() # nfreqs (in Hz)
else:
raise ImportError('uvbeam module not installed/found')
if NP.abs(NP.abs(external_beam).max() - 1.0) > 1e-10:
external_beam /= NP.abs(external_beam).max()
beamunit = ''
else:
raise ValueError('Specified beam file format not currently supported')
if beamunit.lower() == 'db':
external_beam = 10**(external_beam/10.0)
beam_usage_str = 'extpb_'+beam_id
if beam_chromaticity:
if pbeam_spec_interp_method == 'fft':
external_beam = external_beam[:,:-1]
external_beam_freqs = external_beam_freqs[:-1]
beam_usage_str = beam_usage_str + '_chromatic'
else:
beam_usage_str = beam_usage_str + '_{0:.1f}_MHz'.format(select_beam_freq/1e6)+'_achromatic'
else:
beam_usage_str = 'funcpb'
beam_usage_str = beam_usage_str + '_chromatic'
telescope = {}
if telescope_id.lower() in ['mwa', 'vla', 'gmrt', 'ugmrt', 'hera', 'paper', 'mwa_dipole', 'mwa_tools', 'hirax', 'chime']:
telescope['id'] = telescope_id.lower()
telescope['shape'] = element_shape
telescope['size'] = element_size
telescope['orientation'] = element_orientation
telescope['ocoords'] = element_ocoords
telescope['groundplane'] = ground_plane
telescope['latitude'] = latitude
telescope['longitude'] = longitude
telescope['altitude'] = altitude
if A_eff is None:
if (telescope['shape'] == 'dipole') or (telescope['shape'] == 'delta'):
A_eff = (0.5*FCNST.c/freq)**2
if (telescope_id.lower() == 'mwa') or phased_array:
A_eff *= 16
if (telescope['shape'] == 'dish') or (telescope['shape'] == 'gaussian'):
A_eff = NP.pi * (0.5*element_size)**2
element_locs = None
if phased_array:
try:
element_locs = NP.loadtxt(phased_elements_file, skiprows=1, comments='#', usecols=(0,1,2))
except IOError:
raise IOError('Could not open the specified file for phased array of antenna elements.')
if telescope_id.lower() == 'mwa':
xlocs, ylocs = NP.meshgrid(1.1*NP.linspace(-1.5,1.5,4), 1.1*NP.linspace(1.5,-1.5,4))
element_locs = NP.hstack((xlocs.reshape(-1,1), ylocs.reshape(-1,1), NP.zeros(xlocs.size).reshape(-1,1)))
if element_locs is not None:
telescope['element_locs'] = element_locs
if avg_drifts + beam_switch + (pick_snapshots is not None) + (snapshots_range is not None) + all_snapshots != 1:
raise ValueError('One and only one of avg_drifts, beam_switch, pick_snapshots, snapshots_range, all_snapshots must be set')
snapshot_type_str = ''
if avg_drifts and (obs_mode == 'dns'):
snapshot_type_str = 'drift_averaged_'
if beam_switch and (obs_mode == 'dns'):
snapshot_type_str = 'beam_switches_'
if (snapshots_range is not None) and ((obs_mode == 'dns') or (obs_mode == 'lstbin')):
snapshot_type_str = 'snaps_{0[0]:0d}-{0[1]:0d}_'.format(snapshots_range)
duration_str = ''
if pointing_file is not None:
pointing_init = None
pointing_info_from_file = NP.loadtxt(pointing_file, comments='#', usecols=(1,2,3), delimiter=',')
obs_id = NP.loadtxt(pointing_file, comments='#', usecols=(0,), delimiter=',', dtype=str)
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_tools') or (phased_array):
delays_str = NP.loadtxt(pointing_file, comments='#', usecols=(4,), delimiter=',', dtype=str)
delays_list = [NP.fromstring(delaystr, dtype=float, sep=';', count=-1) for delaystr in delays_str]
delay_settings = NP.asarray(delays_list)
delay_settings *= 435e-12
delays = NP.copy(delay_settings)
if n_acc is None:
n_acc = pointing_info_from_file.shape[0]
pointing_info_from_file = pointing_info_from_file[:min(n_acc, pointing_info_from_file.shape[0]),:]
obs_id = obs_id[:min(n_acc, pointing_info_from_file.shape[0])]
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_tools') or (phased_array):
delays = delay_settings[:min(n_acc, pointing_info_from_file.shape[0]),:]
n_acc = min(n_acc, pointing_info_from_file.shape[0])
pointings_altaz = pointing_info_from_file[:,:2].reshape(-1,2)
pointings_altaz_orig = pointing_info_from_file[:,:2].reshape(-1,2)
lst = 15.0 * pointing_info_from_file[:,2]
lst_wrapped = lst + 0.0
lst_wrapped[lst_wrapped > 180.0] = lst_wrapped[lst_wrapped > 180.0] - 360.0
lst_edges = NP.concatenate((lst_wrapped, [lst_wrapped[-1]+lst_wrapped[-1]-lst_wrapped[-2]]))
if obs_mode is None:
obs_mode = 'custom'
if (obs_mode == 'dns') and (avg_drifts or beam_switch):
angle_diff = GEOM.sphdist(pointings_altaz[1:,1], pointings_altaz[1:,0], pointings_altaz[:-1,1], pointings_altaz[:-1,0])
angle_diff = NP.concatenate(([0.0], angle_diff))
shift_threshold = 1.0 # in degrees
lst_wrapped = NP.concatenate(([lst_wrapped[0]], lst_wrapped[angle_diff > shift_threshold], [lst_wrapped[-1]]))
n_acc = lst_wrapped.size - 1
pointings_altaz = NP.vstack((pointings_altaz[0,:].reshape(-1,2), pointings_altaz[angle_diff>shift_threshold,:].reshape(-1,2)))
obs_id = NP.concatenate(([obs_id[0]], obs_id[angle_diff>shift_threshold]))
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_tools') or (phased_array):
delays = NP.vstack((delay_settings[0,:], delay_settings[angle_diff>shift_threshold,:]))
obs_mode = 'custom'
if avg_drifts:
lst_edges = NP.concatenate(([lst_edges[0]], lst_edges[angle_diff > shift_threshold], [lst_edges[-1]]))
else:
lst_edges_left = lst_wrapped[:-1] + 0.0
lst_edges_right = NP.concatenate(([lst_edges[1]], lst_edges[NP.asarray(NP.where(angle_diff > shift_threshold)).ravel()+1]))
elif snapshots_range is not None:
snapshots_range[1] = snapshots_range[1] % n_acc
if snapshots_range[0] > snapshots_range[1]:
raise IndexError('min snaphost # must be <= max snapshot #')
lst_wrapped = lst_wrapped[snapshots_range[0]:snapshots_range[1]+2]
lst_edges = NP.copy(lst_wrapped)
pointings_altaz = pointings_altaz[snapshots_range[0]:snapshots_range[1]+1,:]
obs_id = obs_id[snapshots_range[0]:snapshots_range[1]+1]
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_tools') or (phased_array):
delays = delay_settings[snapshots_range[0]:snapshots_range[1]+1,:]
n_acc = snapshots_range[1]-snapshots_range[0]+1
elif pick_snapshots is not None:
pick_snapshots = NP.asarray(pick_snapshots)
n_acc = pick_snapshots.size
lst_begin = NP.asarray(lst_wrapped[pick_snapshots])
pointings_altaz = pointings_altaz[pick_snapshots,:]
obs_id = obs_id[pick_snapshots]
if (telescope_id.lower() == 'mwa') or (phased_array) or (telescope_id.lower() == 'mwa_tools'):
delays = delay_settings[pick_snapshots,:]
if obs_mode != 'lstbin':
lst_end = NP.asarray(lst_wrapped[pick_snapshots+1])
t_acc = (lst_end - lst_begin) / 15.0 * 3.6e3 * sday
lst = 0.5 * (lst_begin + lst_end)
obs_mode = 'custom'
else:
t_acc = 112.0 + NP.zeros(n_acc) # in seconds (needs to be generalized)
lst = lst_wrapped[pick_snapshots] + 0.5 * t_acc/3.6e3 * 15.0 / sday
if pick_snapshots is None:
if obs_mode != 'lstbin':
if not beam_switch:
lst = 0.5*(lst_edges[1:]+lst_edges[:-1])
t_acc = (lst_edges[1:]-lst_edges[:-1]) / 15.0 * 3.6e3 * sday
else:
lst = 0.5*(lst_edges_left + lst_edges_right)
t_acc = (lst_edges_right - lst_edges_left) / 15.0 * 3.6e3 * sday
else:
t_acc = 112.0 + NP.zeros(n_acc) # in seconds (needs to be generalized)
lst = lst_wrapped + 0.5 * t_acc/3.6e3 * 15.0 / sday
# Initialize time objects and LST from obs_date and chosen LST
lst_init = lst[0]
tobj0 = Time(obs_date.replace('/', '-'), format='iso', scale='utc', location=('{0:.6f}d'.format(telescope['longitude']), '{0:.6f}d'.format(telescope['latitude']), '{0:.6f}m'.format(telescope['altitude']))) # Time object at obs_date beginning
jd_init = ET.julian_date_from_LAST(lst_init/15.0, tobj0.jd, telescope['longitude']/15.0) # Julian date at beginning of observation
jd_init = jd_init[0]
tobj_init = Time(jd_init, format='jd', scale='utc', location=('{0:.6f}d'.format(telescope['longitude']), '{0:.6f}d'.format(telescope['latitude']), '{0:.6f}m'.format(telescope['altitude']))) # Time object at beginning of observation
lst_init = tobj_init.sidereal_time('apparent').deg # Update LST init
tobjs = tobj_init + NP.arange(n_acc) * t_acc * U.s # Time objects for the observation
lst = tobjs.sidereal_time('apparent').deg # Local Apparent Sidereal time (in degrees) for the observation
pointings_dircos = GEOM.altaz2dircos(pointings_altaz, units='degrees')
pointings_hadec = GEOM.altaz2hadec(pointings_altaz, latitude, units='degrees')
pointings_radec = ET.hadec2radec(pointings_hadec, lst, obstime=tobjs[0], epoch_RA=tobjs[0], time_type=None)
t_obs = NP.sum(t_acc)
elif (pointing_drift_init is not None) or (pointing_track_init is not None):
pointing_file = None
if t_acc is None:
raise NameError('t_acc must be provided for an automated observing run')
if (n_acc is None) and (t_obs is None):
raise NameError('n_acc or t_obs must be provided for an automated observing run')
elif (n_acc is not None) and (t_obs is not None):
raise ValueError('Only one of n_acc or t_obs must be provided for an automated observing run')
elif n_acc is None:
n_acc = int(t_obs/t_acc)
else:
t_obs = n_acc * t_acc
if obs_mode is None:
obs_mode = 'track'
elif obs_mode not in ['track', 'drift']:
raise ValueError('Invalid specification for obs_mode')
# Initialize time objects and LST from obs_date and chosen LST
if pointing_info['lst_init'] is not None:
lst_init = pointing_info['lst_init'] * 15.0 # in deg
else:
lst_init = None
jd_init = pointing_info['jd_init']
if jd_init is None:
if ((obs_date is not None) and (lst_init is not None)):
tobj0 = Time(obs_date.replace('/', '-'), format='iso', scale='utc', location=('{0:.6f}d'.format(telescope['longitude']), '{0:.6f}d'.format(telescope['latitude']), '{0:.6f}m'.format(telescope['altitude']))) # Time object at obs_date beginning
jd_init = ET.julian_date_from_LAST(lst_init/15.0, tobj0.jd, telescope['longitude']/15.0) # Julian date at beginning of observation
jd_init = jd_init[0]
tobj_init = Time(jd_init, format='jd', scale='utc', location=EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m)) # Time object at beginning of observation
lst_init = tobj_init.sidereal_time('apparent').deg # Update LST init
tobjs = tobj_init + NP.arange(n_acc) * t_acc * U.s # Time objects for the observation
lst = tobjs.sidereal_time('apparent').deg # Local Apparent Sidereal time (in degrees) for the observation
if obs_mode == 'drift':
alt = pointing_drift_init['alt']
az = pointing_drift_init['az']
ha = pointing_drift_init['ha']
dec = pointing_drift_init['dec']
if (alt is None) or (az is None):
if (ha is None) or (dec is None):
raise ValueError('One of alt-az or ha-dec pairs must be specified')
hadec_init = NP.asarray([ha, dec])
else:
altaz_init = NP.asarray([alt, az])
hadec_init = GEOM.altaz2hadec(altaz_init.reshape(1,-1), latitude, units='degrees')
pointings_hadec = NP.repeat(hadec_init.reshape(1,-1), n_acc, axis=0)
if obs_mode == 'track':
ra = pointing_track_init['ra']
dec = pointing_track_init['dec']
epoch = pointing_track_init['epoch']
track_init_pointing_at_epoch = SkyCoord(ra=ra*U.deg, dec=dec*U.deg, frame='fk5', equinox='J{0}'.format(epoch))
track_init_pointing_at_tinit = track_init_pointing_at_epoch.transform_to(FK5(equinox=tobj_init))
ha = lst_init - track_init_pointing_at_tinit.ra.deg # Initial HA in degrees
pointings_hadec = NP.hstack((ha + (t_acc/3.6e3)*15.0*NP.arange(n_acc).reshape(-1,1), track_init_pointing_at_tinit.dec.deg+NP.zeros(n_acc).reshape(-1,1)))
t_acc = t_acc + NP.zeros(n_acc)
pointings_altaz = GEOM.hadec2altaz(pointings_hadec, latitude, units='degrees')
pointings_dircos = GEOM.altaz2dircos(pointings_altaz, units='degrees')
pointings_radec = ET.hadec2radec(pointings_hadec, lst, obstime=tobjs[0], epoch_RA=tobjs[0], time_type=None)
# pointings_radec_v2 = ET.altaz2radec(pointings_altaz, EarthLocation(lat=telescope['latitude']*U.deg, lon=telescope['longitude']*U.deg, height=telescope['altitude']*U.m), obstime=tobjs[0], epoch_RA=tobjs[0], time_type=None)
# pointings_radec = NP.hstack(((lst-pointings_hadec[:,0]).reshape(-1,1), pointings_hadec[:,1].reshape(-1,1)))
duration_str = '_{0:0d}x{1:.1f}s'.format(n_acc, t_acc[0])
# Create organized directory structure
init_time = tobj_init
obsdatetime_dir = '{0}{1}{2}_{3}{4}{5}/'.format(init_time.datetime.year, init_time.datetime.month, init_time.datetime.day, init_time.datetime.hour, init_time.datetime.minute, init_time.datetime.second)
sim_dir = 'simdata/'
meta_dir = 'metainfo/'
roi_dir = 'roi/'
skymod_dir = 'skymodel/'
try:
os.makedirs(rootdir+project_dir+simid+sim_dir, 0755)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(rootdir+project_dir+simid+sim_dir):
pass
else:
raise
try:
os.makedirs(rootdir+project_dir+simid+meta_dir, 0755)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(rootdir+project_dir+simid+meta_dir):
pass
else:
raise
try:
os.makedirs(rootdir+project_dir+simid+roi_dir, 0755)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(rootdir+project_dir+simid+roi_dir):
pass
else:
raise
if cleanup < 3:
try:
os.makedirs(rootdir+project_dir+simid+skymod_dir, 0755)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(rootdir+project_dir+simid+skymod_dir):
pass
else:
raise
pointings_radec = NP.fmod(pointings_radec, 360.0)
pointings_hadec = NP.fmod(pointings_hadec, 360.0)
pointings_altaz = NP.fmod(pointings_altaz, 360.0)
use_GSM = False
use_DSM = False
use_spectrum = False
use_pygsm = False
use_CSM = False
use_SUMSS = False
use_GLEAM = False
use_USM = False
use_noise = False
use_MSS = False
use_custom = False
use_skymod = False
use_NVSS = False
use_HI_monopole = False
use_HI_cube = False
use_HI_fluctuations = False
use_MSS=False
if sky_str not in ['asm', 'dsm', 'csm', 'nvss', 'sumss', 'gleam', 'mwacs', 'custom', 'usm', 'noise', 'mss', 'HI_cube', 'HI_monopole', 'HI_fluctuations', 'skymod_file', 'gsm2008', 'gsm2016']:
raise ValueError('Invalid foreground model string specified.')
if sky_str == 'asm':
use_GSM = True
elif sky_str == 'dsm':
use_DSM = True
elif sky_str == 'fullspectrum':
use_spectrum = True
elif (sky_str == 'gsm2008') or (sky_str == 'gsm2016'):
use_pygsm = True
elif sky_str == 'csm':
use_CSM = True
elif sky_str == 'sumss':
use_SUMSS = True
elif sky_str == 'gleam':
use_GLEAM = True
elif sky_str == 'custom':
use_custom = True
elif sky_str == 'skymod_file':
use_skymod = True
elif sky_str == 'nvss':
use_NVSS = True
elif sky_str == 'usm':
use_USM = True
elif sky_str == 'noise':
use_noise = True
elif sky_str == 'HI_monopole':
use_HI_monopole = True
elif sky_str == 'HI_fluctuations':
use_HI_fluctuations = True
elif sky_str == 'HI_cube':
use_HI_cube = True
if global_HI_parms is not None:
try:
global_HI_parms = NP.asarray(map(float, global_HI_parms))
except ValueError:
raise ValueError('Values in global_EoR_parms must be convertible to float')
T_xi0 = NP.float(global_HI_parms[0])
freq_half = global_HI_parms[1]
dz_half = global_HI_parms[2]
arrayinfo = RI.getBaselineInfo(parms)
layout_info = arrayinfo['layout_info']
bl = arrayinfo['bl']
bl_label = arrayinfo['label']
bl_id = arrayinfo['id']
blgroups = arrayinfo['groups']
bl_reversemap = arrayinfo['reversemap']
total_baselines = bl.shape[0]
try:
labels = bl_label.tolist()
except NameError:
labels = []
labels += [label_prefix+'{0:0d}'.format(i+1) for i in xrange(bl.shape[0])]
try:
ids = bl_id.tolist()
except NameError:
ids = range(bl.shape[0])
if not isinstance(mpi_key, str):
raise TypeError('MPI key must be a string')
if mpi_key not in ['src', 'bl', 'freq']:
raise ValueError('MPI key must be set on "bl" or "src"')
if mpi_key == 'src':
mpi_on_src = True
mpi_ob_bl = False
mpi_on_freq = False
elif mpi_key == 'bl':
mpi_on_src = False
mpi_on_bl = True
mpi_on_freq = False
else:
mpi_on_freq = True
mpi_on_src = False
mpi_on_bl = False
if not isinstance(mpi_eqvol, bool):
raise TypeError('MPI equal volume parameter must be boolean')
if mpi_eqvol:
mpi_sync = True
mpi_async = False
else:
mpi_sync = False
mpi_async = True
freq = NP.float(freq)
freq_resolution = NP.float(freq_resolution)
base_bpass = 1.0*NP.ones(nchan)
bandpass_shape = 1.0*NP.ones(nchan)
chans = (freq + (NP.arange(nchan) - 0.5 * nchan) * freq_resolution)/ 1e9 # in GHz
oversampling_factor = 1.0 + f_pad
bandpass_str = '{0:0d}x{1:.1f}_kHz'.format(nchan, freq_resolution/1e3)
if fluxcut_freq is None:
fluxcut_freq = freq
else:
fluxcut_freq = NP.float(fluxcut_freq)
flagged_edge_channels = []
pfb_str = ''
pfb_str2 = ''
if pfb_method is not None:
if pfb_method == 'empirical':
bandpass_shape = DSP.PFB_empirical(nchan, 32, 0.25, 0.25)
elif pfb_method == 'theoretical':
pfbhdulist = fits.open(pfb_file)
pfbdata = pfbhdulist[0].data
pfbfreq = pfbhdulist[1].data
pfb_norm = NP.amax(pfbdata, axis=0).reshape(1,-1)
pfbdata_norm = pfbdata - pfb_norm
pfbwin = 10 * NP.log10(NP.sum(10**(pfbdata_norm/10), axis=1))
freq_range = [0.9*chans.min(), 1.1*chans.max()]
useful_freq_range = NP.logical_and(pfbfreq >= freq_range[0]*1e3, pfbfreq <=freq_range[1]*1e3)
# pfb_interp_func = interpolate.interp1d(pfbfreq[useful_freq_range]/1e3, pfbwin[useful_freq_range])
# pfbwin_interp = pfb_interp_func(chans)
pfbwin_interp = NP.interp(chans, pfbfreq[useful_freq_range]/1e3, pfbwin[useful_freq_range])
bandpass_shape = 10**(pfbwin_interp/10)
if flag_repeat_edge_channels:
if NP.any(n_edge_flag > 0):
pfb_edge_channels = (bandpass_shape.argmin() + NP.arange(nchan/coarse_channel_width)*coarse_channel_width) % nchan
# pfb_edge_channels = bandpass_shape.argsort()[:int(1.0*nchan/coarse_channel_width)]
# wts = NP.exp(-0.5*((NP.arange(bandpass_shape.size)-0.5*bandpass_shape.size)/4.0)**2)/(4.0*NP.sqrt(2*NP.pi))
# wts_shift = NP.fft.fftshift(wts)
# freq_wts = NP.fft.fft(wts_shift)
# pfb_filtered = DSP.fft_filter(bandpass_shape.ravel(), wts=freq_wts.ravel(), passband='high')
# pfb_edge_channels = pfb_filtered.argsort()[:int(1.0*nchan/coarse_channel_width)]
pfb_edge_channels = NP.hstack((pfb_edge_channels.ravel(), NP.asarray([pfb_edge_channels.min()-coarse_channel_width, pfb_edge_channels.max()+coarse_channel_width])))
flagged_edge_channels += [range(max(0,pfb_edge-n_edge_flag[0]),min(nchan,pfb_edge+n_edge_flag[1])) for pfb_edge in pfb_edge_channels]
else:
pfb_str = 'no_pfb_'
pfb_str2 = '_no_pfb'
if ant_bpass_file is not None:
with NP.load(ant_bpass_file) as ant_bpass_fileobj:
ant_bpass_freq = ant_bpass_fileobj['faxis']
ant_bpass_ref = ant_bpass_fileobj['band']
ant_bpass_ref /= NP.abs(ant_bpass_ref).max()
ant_bpass_freq = ant_bpass_freq[ant_bpass_freq.size/2:]
ant_bpass_ref = ant_bpass_ref[ant_bpass_ref.size/2:]
chanind, ant_bpass, fdist = LKP.lookup_1NN_new(ant_bpass_freq.reshape(-1,1)/1e9, ant_bpass_ref.reshape(-1,1), chans.reshape(-1,1), distance_ULIM=freq_resolution/1e9, remove_oob=True)
else:
ant_bpass = NP.ones(nchan)
window = nchan * DSP.windowing(nchan, shape=bpass_shape, pad_width=n_pad, centering=True, area_normalize=True)
if bandpass_correct:
bpcorr = 1/bandpass_shape
bandpass_shape = NP.ones(base_bpass.size)
else:
bpcorr = 1.0*NP.ones(nchan)
noise_bpcorr = 1.0*NP.ones(nchan)
if noise_bandpass_correct:
noise_bpcorr = NP.copy(bpcorr)
if not flag_repeat_edge_channels:
flagged_edge_channels += [range(0,n_edge_flag[0])]
flagged_edge_channels += [range(nchan-n_edge_flag[1],nchan)]
flagged_channels = flagged_edge_channels
if flag_chan[0] >= 0:
flag_chan = flag_chan[flag_chan < nchan]
if bp_flag_repeat:
flag_chan = NP.mod(flag_chan, coarse_channel_width)
flagged_channels += [[i*coarse_channel_width+flagchan for i in range(nchan/coarse_channel_width) for flagchan in flag_chan]]
else:
flagged_channels += [flag_chan.tolist()]
flagged_channels = [x for y in flagged_channels for x in y]
flagged_channels = list(set(flagged_channels))
bandpass_shape[flagged_channels] = 0.0
bpass = base_bpass * bandpass_shape
if not isinstance(n_sky_sectors, int):
raise TypeError('n_sky_sectors must be an integer')
elif (n_sky_sectors < 1):
n_sky_sectors = 1
if use_HI_cube:
if not isinstance(use_lidz, bool):
raise TypeError('Parameter specifying use of Lidz simulations must be Boolean')
if not isinstance(use_21cmfast, bool):
raise TypeError('Parameter specifying use of 21cmfast simulations must be Boolean')
if use_HI_monopole or use_HI_fluctuations or use_HI_cube:
if use_lidz and use_21cmfast:
raise ValueError('Only one of Adam Lidz or 21CMFAST simulations can be chosen')
if not use_lidz and not use_21cmfast:
use_lidz = True
use_21cmfast = False
eor_simfile = rootdir+'EoR_simulations/Adam_Lidz/Boom_tiles/hpxcube_138.915-195.235_MHz_80.0_kHz_nside_{0:0d}.fits'.format(nside)
elif use_lidz:
eor_simfile = rootdir+'EoR_simulations/Adam_Lidz/Boom_tiles/hpxcube_138.915-195.235_MHz_80.0_kHz_nside_{0:0d}.fits'.format(nside)
elif use_21cmfast:
pass
spindex_rms_str = ''
spindex_seed_str = ''
if not isinstance(spindex_rms, (int,float)):
raise TypeError('Spectral Index rms must be a scalar')
if spindex_rms > 0.0:
spindex_rms_str = '{0:.1f}'.format(spindex_rms)
else:
spindex_rms = 0.0
if spindex_seed is not None:
if not isinstance(spindex_seed, (int, float)):
raise TypeError('Spectral index random seed must be a scalar')
spindex_seed_str = '{0:0d}_'.format(spindex_seed)
if rank == 0:
if use_HI_fluctuations or use_HI_cube:
hdulist = fits.open(eor_simfile)
nexten = hdulist['PRIMARY'].header['NEXTEN']
fitstype = hdulist['PRIMARY'].header['FITSTYPE']
temperatures = None
extnames = [hdulist[i].header['EXTNAME'] for i in xrange(1,nexten+1)]
if fitstype == 'IMAGE':
eor_simfreq = hdulist['FREQUENCY'].data['Frequency [MHz]']
else:
eor_simfreq = [float(extname.split(' ')[0]) for extname in extnames]
eor_simfreq = NP.asarray(eor_simfreq)
eor_freq_resolution = eor_simfreq[1] - eor_simfreq[0]
ind_chans, ind_eor_simfreq, dfrequency = LKP.find_1NN(eor_simfreq.reshape(-1,1), 1e3*chans.reshape(-1,1), distance_ULIM=0.5*eor_freq_resolution, remove_oob=True)
eor_simfreq = eor_simfreq[ind_eor_simfreq]
if fitstype == 'IMAGE':
temperatures = hdulist['TEMPERATURE'].data[:,ind_eor_simfreq]
else:
for i in xrange(eor_simfreq.size):
if i == 0:
temperatures = hdulist[ind_eor_simfreq[i]+1].data['Temperature'].reshape(-1,1)
else:
temperatures = NP.hstack((temperatures, hdulist[ind_eor_simfreq[i]+1].data['Temperature'].reshape(-1,1)))
if use_HI_fluctuations:
temperatures = temperatures - NP.mean(temperatures, axis=0, keepdims=True)
pixres = hdulist['PRIMARY'].header['PIXAREA']
coords_table = hdulist['COORDINATE'].data
ra_deg_EoR = coords_table['RA']
dec_deg_EoR = coords_table['DEC']
fluxes_EoR = temperatures * (2.0* FCNST.k * freq**2 / FCNST.c**2) * pixres / CNST.Jy
freq_EoR = freq/1e9
hdulist.close()
flux_unit = 'Jy'
catlabel = 'HI-cube'
spec_type = 'spectrum'
spec_parms = {}
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg_EoR.reshape(-1,1), dec_deg_EoR.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'spectrum': fluxes_EoR}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_HI_monopole:
theta, phi = HP.pix2ang(nside, NP.arange(HP.nside2npix(nside)))
gc = Galactic(l=NP.degrees(phi), b=90.0-NP.degrees(theta), unit=(U.degree, U.degree))
radec = gc.fk5
ra_deg_EoR = radec.ra.degree
dec_deg_EoR = radec.dec.degree
pixres = HP.nside2pixarea(nside) # pixel solid angle (steradians)
catlabel = 'HI-monopole'
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('tanh', ra_deg_EoR.size)
spec_parms['freq-ref'] = freq_half + NP.zeros(ra_deg_EoR.size)
spec_parms['flux-scale'] = T_xi0 * (2.0* FCNST.k * freq**2 / FCNST.c**2) * pixres / CNST.Jy
spec_parms['flux-offset'] = 0.5*spec_parms['flux-scale'] + NP.zeros(ra_deg_EoR.size)
spec_parms['z-width'] = dz_half + NP.zeros(ra_deg_EoR.size)
flux_unit = 'Jy'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg_EoR.reshape(-1,1), dec_deg_EoR.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
spectrum = skymod.generate_spectrum()
elif use_GSM:
dsm_file = DSM_file_prefix+'_150.0_MHz_nside_{0:0d}.fits'.format(nside)
# dsm_file = DSM_file_prefix+'_{0:.1f}_MHz_nside_{1:0d}.fits'.format(freq*1e-6, nside)
hdulist = fits.open(dsm_file)
pixres = hdulist[0].header['PIXAREA']
dsm_table = hdulist[1].data
ra_deg_DSM = dsm_table['RA']
dec_deg_DSM = dsm_table['DEC']
temperatures = dsm_table['T_{0:.0f}'.format(150.0)]
# temperatures = dsm_table['T_{0:.0f}'.format(freq/1e6)]
fluxes_DSM = temperatures * 2.0 * FCNST.k * (150e6/FCNST.c)**2 * pixres / CNST.Jy
# fluxes_DSM = temperatures * (2.0* FCNST.k * freq**2 / FCNST.c**2) * pixres / CNST.Jy
spindex = dsm_table['spindex'] + 2.0
freq_DSM = 0.150 # in GHz
# freq_DSM = freq/1e9 # in GHz
freq_catalog = freq_DSM * 1e9 + NP.zeros(fluxes_DSM.size)
catlabel = NP.repeat('DSM', fluxes_DSM.size)
ra_deg = ra_deg_DSM + 0.0
dec_deg = dec_deg_DSM + 0.0
majax = NP.degrees(HP.nside2resol(nside)) * NP.ones(fluxes_DSM.size)
minax = NP.degrees(HP.nside2resol(nside)) * NP.ones(fluxes_DSM.size)
fluxes = fluxes_DSM + 0.0
freq_SUMSS = 0.843 # in GHz
catalog = NP.loadtxt(SUMSS_file, usecols=(0,1,2,3,4,5,10,12,13,14,15,16))
ra_deg_SUMSS = 15.0 * (catalog[:,0] + catalog[:,1]/60.0 + catalog[:,2]/3.6e3)
dec_dd = NP.loadtxt(SUMSS_file, usecols=(3,), dtype="|S3")
sgn_dec_str = NP.asarray([dec_dd[i][0] for i in range(dec_dd.size)])
sgn_dec = 1.0*NP.ones(dec_dd.size)
sgn_dec[sgn_dec_str == '-'] = -1.0
dec_deg_SUMSS = sgn_dec * (NP.abs(catalog[:,3]) + catalog[:,4]/60.0 + catalog[:,5]/3.6e3)
fmajax = catalog[:,7]
fminax = catalog[:,8]
fpa = catalog[:,9]
dmajax = catalog[:,10]
dminax = catalog[:,11]
PS_ind = NP.logical_and(dmajax == 0.0, dminax == 0.0)
ra_deg_SUMSS = ra_deg_SUMSS[PS_ind]
dec_deg_SUMSS = dec_deg_SUMSS[PS_ind]
fint = catalog[PS_ind,6] * 1e-3
if spindex_seed is None:
spindex_SUMSS = -0.83 + spindex_rms * NP.random.randn(fint.size)
else:
NP.random.seed(spindex_seed)
spindex_SUMSS = -0.83 + spindex_rms * NP.random.randn(fint.size)
fmajax = fmajax[PS_ind]
fminax = fminax[PS_ind]
fpa = fpa[PS_ind]
dmajax = dmajax[PS_ind]
dminax = dminax[PS_ind]
bright_source_ind = fint >= 10.0 * (freq_SUMSS*1e9/freq)**spindex_SUMSS
ra_deg_SUMSS = ra_deg_SUMSS[bright_source_ind]
dec_deg_SUMSS = dec_deg_SUMSS[bright_source_ind]
fint = fint[bright_source_ind]
fmajax = fmajax[bright_source_ind]
fminax = fminax[bright_source_ind]
fpa = fpa[bright_source_ind]
dmajax = dmajax[bright_source_ind]
dminax = dminax[bright_source_ind]
spindex_SUMSS = spindex_SUMSS[bright_source_ind]
valid_ind = NP.logical_and(fmajax > 0.0, fminax > 0.0)
ra_deg_SUMSS = ra_deg_SUMSS[valid_ind]
dec_deg_SUMSS = dec_deg_SUMSS[valid_ind]
fint = fint[valid_ind]
fmajax = fmajax[valid_ind]
fminax = fminax[valid_ind]
fpa = fpa[valid_ind]
spindex_SUMSS = spindex_SUMSS[valid_ind]
freq_catalog = NP.concatenate((freq_catalog, freq_SUMSS*1e9 + NP.zeros(fint.size)))
catlabel = NP.concatenate((catlabel, NP.repeat('SUMSS', fint.size)))
ra_deg = NP.concatenate((ra_deg, ra_deg_SUMSS))
dec_deg = NP.concatenate((dec_deg, dec_deg_SUMSS))
spindex = NP.concatenate((spindex, spindex_SUMSS))
majax = NP.concatenate((majax, fmajax/3.6e3))
minax = NP.concatenate((minax, fminax/3.6e3))
fluxes = NP.concatenate((fluxes, fint))
freq_NVSS = 1.4 # in GHz
hdulist = fits.open(NVSS_file)
ra_deg_NVSS = hdulist[1].data['RA(2000)']
dec_deg_NVSS = hdulist[1].data['DEC(2000)']
nvss_fpeak = hdulist[1].data['PEAK INT']
nvss_majax = hdulist[1].data['MAJOR AX']
nvss_minax = hdulist[1].data['MINOR AX']
hdulist.close()
if spindex_seed is None:
spindex_NVSS = -0.83 + spindex_rms * NP.random.randn(nvss_fpeak.size)
else:
NP.random.seed(2*spindex_seed)
spindex_NVSS = -0.83 + spindex_rms * NP.random.randn(nvss_fpeak.size)
not_in_SUMSS_ind = NP.logical_and(dec_deg_NVSS > -30.0, dec_deg_NVSS <= min(90.0, latitude+90.0))
bright_source_ind = nvss_fpeak >= 10.0 * (freq_NVSS*1e9/freq)**(spindex_NVSS)
PS_ind = NP.sqrt(nvss_majax**2-(0.75/60.0)**2) < 14.0/3.6e3
count_valid = NP.sum(NP.logical_and(NP.logical_and(not_in_SUMSS_ind, bright_source_ind), PS_ind))
nvss_fpeak = nvss_fpeak[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, bright_source_ind), PS_ind)]
freq_catalog = NP.concatenate((freq_catalog, freq_NVSS*1e9 + NP.zeros(count_valid)))
catlabel = NP.concatenate((catlabel, NP.repeat('NVSS',count_valid)))
ra_deg = NP.concatenate((ra_deg, ra_deg_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, bright_source_ind), PS_ind)]))
dec_deg = NP.concatenate((dec_deg, dec_deg_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, bright_source_ind), PS_ind)]))
spindex = NP.concatenate((spindex, spindex_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, bright_source_ind), PS_ind)]))
majax = NP.concatenate((majax, nvss_majax[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, bright_source_ind), PS_ind)]))
minax = NP.concatenate((minax, nvss_minax[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, bright_source_ind), PS_ind)]))
fluxes = NP.concatenate((fluxes, nvss_fpeak))
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog + NP.zeros(ra_deg.size)
spec_parms['flux-scale'] = fluxes
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = NP.zeros(ra_deg.size)
flux_unit = 'Jy'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fluxes.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_DSM:
dsm_file = DSM_file_prefix+'_150.0_MHz_nside_{0:0d}.fits'.format(nside)
# dsm_file = DSM_file_prefix+'_{0:.1f}_MHz_nside_{1:0d}.fits'.format(freq*1e-6, nside)
hdulist = fits.open(dsm_file)
pixres = hdulist[0].header['PIXAREA']
dsm_table = hdulist[1].data
ra_deg_DSM = dsm_table['RA']
dec_deg_DSM = dsm_table['DEC']
temperatures = dsm_table['T_{0:.0f}'.format(150.0)]
# temperatures = dsm_table['T_{0:.0f}'.format(freq/1e6)]
fluxes_DSM = temperatures * 2.0 * FCNST.k * (150e6/FCNST.c)**2 * pixres / CNST.Jy
# fluxes_DSM = temperatures * (2.0 * FCNST.k * freq**2 / FCNST.c**2) * pixres / CNST.Jy
flux_unit = 'Jy'
spindex = dsm_table['spindex'] + 2.0
freq_DSM = 0.150 # in GHz
# freq_DSM = freq/1e9 # in GHz
freq_catalog = freq_DSM * 1e9 + NP.zeros(fluxes_DSM.size)
catlabel = NP.repeat('DSM', fluxes_DSM.size)
ra_deg = ra_deg_DSM
dec_deg = dec_deg_DSM
majax = NP.degrees(HP.nside2resol(nside)) * NP.ones(fluxes_DSM.size)
minax = NP.degrees(HP.nside2resol(nside)) * NP.ones(fluxes_DSM.size)
# majax = NP.degrees(NP.sqrt(HP.nside2pixarea(64)*4/NP.pi) * NP.ones(fluxes_DSM.size))
# minax = NP.degrees(NP.sqrt(HP.nside2pixarea(64)*4/NP.pi) * NP.ones(fluxes_DSM.size))
fluxes = fluxes_DSM
hdulist.close()
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog + NP.zeros(ra_deg.size)
spec_parms['flux-scale'] = fluxes
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = NP.zeros(ra_deg.size)
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fluxes.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_spectrum:
skymod = SM.SkyModel(init_parms=None, init_file=spectrum_file, load_spectrum=False)
elif use_pygsm:
if not SM.pygsm_found:
print('PyGSM module not found to be installed.')
PDB.set_trace()
skymod_parallel = parms['skyparm']['parallel']
if not isinstance(skymod_parallel, bool):
warnings.warn('Input parallel for determining sky model must be boolean. Setting it to False.')
skymod_parallel = False
n_mdl_freqs = parms['skyparm']['n_mdl_freqs']
if n_mdl_freqs is None:
mdl_freqs = 1e9 * chans
elif not isinstance(n_mdl_freqs, int):
raise TypeError('Input n_mdl_freqs must be an integer')
else:
if n_mdl_freqs < 2:
n_mdl_freqs = 8
mdl_freqs = 1e9 * NP.linspace(0.99 * chans.min(), 1.01 * chans.max(), n_mdl_freqs)
if nside is None:
bl_length = NP.sqrt(NP.sum(arrayinfo['bl']**2, axis=1))
u_max = bl_length.max() * 1e9 * chans.max() / FCNST.c
angres = 1 / u_max # radians
nside = 1
hpxres = HP.nside2resol(nside)
while hpxres > 0.5 * angres:
nside *= 2
hpxres = HP.nside2resol(nside)
skymod = SM.diffuse_radio_sky_model(mdl_freqs, gsmversion=sky_str, nside=nside, ind=None, outfile=None, parallel=skymod_parallel)
elif use_USM:
dsm_file = DSM_file_prefix+'_150.0_MHz_nside_{0:0d}.fits'.format(nside)
# dsm_file = DSM_file_prefix+'_{0:.1f}_MHz_nside_{1:0d}.fits'.format(freq*1e-6, nside)
hdulist = fits.open(dsm_file)
pixres = hdulist[0].header['PIXAREA']
dsm_table = hdulist[1].data
ra_deg = dsm_table['RA']
dec_deg = dsm_table['DEC']
temperatures = dsm_table['T_{0:.0f}'.format(150.0)]
# temperatures = dsm_table['T_{0:.0f}'.format(freq/1e6)]
avg_temperature = NP.mean(temperatures)
fluxes_DSM = temperatures * 2.0 * FCNST.k * (150e6/FCNST.c)**2 * pixres / CNST.Jy
# fluxes_USM = avg_temperature * (2.0 * FCNST.k * freq**2 / FCNST.c**2) * pixres / CNST.Jy * NP.ones(temperatures.size)
spindex = NP.zeros(fluxes_USM.size)
freq_USM = 0.150 # in GHz
# freq_USM = 0.185 # in GHz
freq_catalog = freq_USM * 1e9 + NP.zeros(fluxes_USM.size)
catlabel = NP.repeat('USM', fluxes_USM.size)
majax = NP.degrees(HP.nside2resol(nside)) * NP.ones(fluxes_USM.size)
minax = NP.degrees(HP.nside2resol(nside)) * NP.ones(fluxes_USM.size)
hdulist.close()
flux_unit = 'Jy'
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog + NP.zeros(ra_deg.size)
spec_parms['flux-scale'] = fluxes_USM
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = NP.zeros(ra_deg.size)
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fluxes.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_noise:
pixres = HP.nside2pixarea(nside)
npix = HP.nside2npix(nside)
theta, phi = HP.pix2ang(nside, NP.arange(npix))
dec = NP.pi/2 - theta
flux_unit = 'Jy'
spec_type = 'spectrum'
majax = NP.degrees(HP.nside2resol(nside)) * NP.ones(npix)
minax = NP.degrees(HP.nside2resol(nside)) * NP.ones(npix)
skyspec = NP.random.randn(npix,chans.size) * (2.0 * FCNST.k * (1e9*chans.reshape(1,-1) / FCNST.c)**2) * pixres / CNST.Jy
spec_parms = {}
catlabel = 'noise-sky'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((NP.degrees(phi).reshape(-1,1), NP.degrees(dec).reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'spectrum': skyspec, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(npix).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_CSM:
freq_SUMSS = 0.843 # in GHz
catalog = NP.loadtxt(SUMSS_file, usecols=(0,1,2,3,4,5,10,12,13,14,15,16))
ra_deg_SUMSS = 15.0 * (catalog[:,0] + catalog[:,1]/60.0 + catalog[:,2]/3.6e3)
dec_dd = NP.loadtxt(SUMSS_file, usecols=(3,), dtype="|S3")
sgn_dec_str = NP.asarray([dec_dd[i][0] for i in range(dec_dd.size)])
sgn_dec = 1.0*NP.ones(dec_dd.size)
sgn_dec[sgn_dec_str == '-'] = -1.0
dec_deg_SUMSS = sgn_dec * (NP.abs(catalog[:,3]) + catalog[:,4]/60.0 + catalog[:,5]/3.6e3)
fmajax = catalog[:,7]
fminax = catalog[:,8]
fpa = catalog[:,9]
dmajax = catalog[:,10]
dminax = catalog[:,11]
PS_ind = NP.logical_and(dmajax == 0.0, dminax == 0.0)
ra_deg_SUMSS = ra_deg_SUMSS[PS_ind]
dec_deg_SUMSS = dec_deg_SUMSS[PS_ind]
fint = catalog[PS_ind,6] * 1e-3
if spindex_seed is None:
spindex_SUMSS = -0.83 + spindex_rms * NP.random.randn(fint.size)
else:
NP.random.seed(spindex_seed)
spindex_SUMSS = -0.83 + spindex_rms * NP.random.randn(fint.size)
fmajax = fmajax[PS_ind]
fminax = fminax[PS_ind]
fpa = fpa[PS_ind]
dmajax = dmajax[PS_ind]
dminax = dminax[PS_ind]
if fluxcut_max is None:
select_SUMSS_source_ind = fint >= fluxcut_min * (freq_SUMSS*1e9/fluxcut_freq)**spindex_SUMSS
else:
select_SUMSS_source_ind = NP.logical_and(fint >= fluxcut_min * (freq_SUMSS*1e9/fluxcut_freq)**spindex_SUMSS, fint <= fluxcut_max * (freq_SUMSS*1e9/fluxcut_freq)**spindex_SUMSS)
if NP.sum(select_SUMSS_source_ind) > 0:
ra_deg_SUMSS = ra_deg_SUMSS[select_SUMSS_source_ind]
dec_deg_SUMSS = dec_deg_SUMSS[select_SUMSS_source_ind]
fint = fint[select_SUMSS_source_ind]
fmajax = fmajax[select_SUMSS_source_ind]
fminax = fminax[select_SUMSS_source_ind]
fpa = fpa[select_SUMSS_source_ind]
dmajax = dmajax[select_SUMSS_source_ind]
dminax = dminax[select_SUMSS_source_ind]
spindex_SUMSS = spindex_SUMSS[select_SUMSS_source_ind]
valid_ind = NP.logical_and(fmajax > 0.0, fminax > 0.0)
ra_deg_SUMSS = ra_deg_SUMSS[valid_ind]
dec_deg_SUMSS = dec_deg_SUMSS[valid_ind]
fint = fint[valid_ind]
fmajax = fmajax[valid_ind]
fminax = fminax[valid_ind]
fpa = fpa[valid_ind]
spindex_SUMSS = spindex_SUMSS[valid_ind]
freq_catalog = freq_SUMSS*1e9 + NP.zeros(fint.size)
catlabel = NP.repeat('SUMSS', fint.size)
ra_deg = ra_deg_SUMSS + 0.0
dec_deg = dec_deg_SUMSS
spindex = spindex_SUMSS
majax = fmajax/3.6e3
minax = fminax/3.6e3
fluxes = fint + 0.0
freq_NVSS = 1.4 # in GHz
hdulist = fits.open(NVSS_file)
ra_deg_NVSS = hdulist[1].data['RA(2000)']
dec_deg_NVSS = hdulist[1].data['DEC(2000)']
nvss_fpeak = hdulist[1].data['PEAK INT']
nvss_majax = hdulist[1].data['MAJOR AX']
nvss_minax = hdulist[1].data['MINOR AX']
hdulist.close()
if spindex_seed is None:
spindex_NVSS = -0.83 + spindex_rms * NP.random.randn(nvss_fpeak.size)
else:
NP.random.seed(2*spindex_seed)
spindex_NVSS = -0.83 + spindex_rms * NP.random.randn(nvss_fpeak.size)
not_in_SUMSS_ind = dec_deg_NVSS > -30.0
# not_in_SUMSS_ind = NP.logical_and(dec_deg_NVSS > -30.0, dec_deg_NVSS <= min(90.0, latitude+90.0))
if fluxcut_max is None:
select_source_ind = nvss_fpeak >= fluxcut_min * (freq_NVSS*1e9/fluxcut_freq)**spindex_NVSS
else:
select_source_ind = NP.logical_and(nvss_fpeak >= fluxcut_min * (freq_NVSS*1e9/fluxcut_freq)**spindex_NVSS, nvss_fpeak <= fluxcut_max * (freq_NVSS*1e9/fluxcut_freq)**spindex_NVSS)
if NP.sum(select_source_ind) == 0:
raise IndexError('No sources in the catalog found satisfying flux threshold criteria')
# select_source_ind = nvss_fpeak >= 10.0 * (freq_NVSS*1e9/freq)**(spindex_NVSS)
PS_ind = NP.sqrt(nvss_majax**2-(0.75/60.0)**2) < 14.0/3.6e3
count_valid = NP.sum(NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind))
if count_valid > 0:
nvss_fpeak = nvss_fpeak[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]
if NP.sum(select_SUMSS_source_ind) > 0:
freq_catalog = NP.concatenate((freq_catalog, freq_NVSS*1e9 + NP.zeros(count_valid)))
catlabel = NP.concatenate((catlabel, NP.repeat('NVSS',count_valid)))
ra_deg = NP.concatenate((ra_deg, ra_deg_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]))
dec_deg = NP.concatenate((dec_deg, dec_deg_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]))
spindex = NP.concatenate((spindex, spindex_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]))
majax = NP.concatenate((majax, nvss_majax[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]))
minax = NP.concatenate((minax, nvss_minax[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]))
fluxes = NP.concatenate((fluxes, nvss_fpeak))
else:
freq_catalog = freq_NVSS*1e9 + NP.zeros(count_valid)
catlabel = NP.repeat('NVSS',count_valid)
ra_deg = ra_deg_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]
dec_deg = dec_deg_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]
spindex = spindex_NVSS[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]
majax = nvss_majax[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]
minax = nvss_minax[NP.logical_and(NP.logical_and(not_in_SUMSS_ind, select_source_ind), PS_ind)]
fluxes = nvss_fpeak
elif NP.sum(select_SUMSS_source_ind) == 0:
raise IndexError('No sources in the catalog found satisfying flux threshold criteria')
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog + NP.zeros(ra_deg.size)
spec_parms['flux-scale'] = fluxes
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = NP.zeros(ra_deg.size)
flux_unit = 'Jy'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fluxes.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_SUMSS:
freq_SUMSS = 0.843 # in GHz
catalog = NP.loadtxt(SUMSS_file, usecols=(0,1,2,3,4,5,10,12,13,14,15,16))
ra_deg = 15.0 * (catalog[:,0] + catalog[:,1]/60.0 + catalog[:,2]/3.6e3)
dec_dd = NP.loadtxt(SUMSS_file, usecols=(3,), dtype="|S3")
sgn_dec_str = NP.asarray([dec_dd[i][0] for i in range(dec_dd.size)])
sgn_dec = 1.0*NP.ones(dec_dd.size)
sgn_dec[sgn_dec_str == '-'] = -1.0
dec_deg = sgn_dec * (NP.abs(catalog[:,3]) + catalog[:,4]/60.0 + catalog[:,5]/3.6e3)
fmajax = catalog[:,7]
fminax = catalog[:,8]
fpa = catalog[:,9]
dmajax = catalog[:,10]
dminax = catalog[:,11]
PS_ind = NP.logical_and(dmajax == 0.0, dminax == 0.0)
ra_deg = ra_deg[PS_ind]
dec_deg = dec_deg[PS_ind]
fint = catalog[PS_ind,6] * 1e-3
if spindex_seed is None:
spindex_SUMSS = -0.83 + spindex_rms * NP.random.randn(fint.size)
else:
NP.random.seed(spindex_seed)
spindex_SUMSS = -0.83 + spindex_rms * NP.random.randn(fint.size)
fmajax = fmajax[PS_ind]
fminax = fminax[PS_ind]
fpa = fpa[PS_ind]
dmajax = dmajax[PS_ind]
dminax = dminax[PS_ind]
if fluxcut_max is None:
select_source_ind = fint >= fluxcut_min * (freq_SUMSS*1e9/fluxcut_freq)**spindex_SUMSS
else:
select_source_ind = NP.logical_and(fint >= fluxcut_min * (freq_SUMSS*1e9/fluxcut_freq)**spindex_SUMSS, fint <= fluxcut_max * (freq_SUMSS*1e9/fluxcut_freq)**spindex_SUMSS)
if NP.sum(select_source_ind) == 0:
raise IndexError('No sources in the catalog found satisfying flux threshold criteria')
ra_deg = ra_deg[select_source_ind]
dec_deg = dec_deg[select_source_ind]
fint = fint[select_source_ind]
fmajax = fmajax[select_source_ind]
fminax = fminax[select_source_ind]
fpa = fpa[select_source_ind]
dmajax = dmajax[select_source_ind]
dminax = dminax[select_source_ind]
spindex_SUMSS = spindex_SUMSS[select_source_ind]
valid_ind = NP.logical_and(fmajax > 0.0, fminax > 0.0)
ra_deg = ra_deg[valid_ind]
dec_deg = dec_deg[valid_ind]
fint = fint[valid_ind]
fmajax = fmajax[valid_ind]
fminax = fminax[valid_ind]
fpa = fpa[valid_ind]
spindex_SUMSS = spindex_SUMSS[valid_ind]
freq_catalog = freq_SUMSS*1e9 + NP.zeros(fint.size)
catlabel = NP.repeat('SUMSS', fint.size)
spindex = spindex_SUMSS
majax = fmajax/3.6e3
minax = fminax/3.6e3
fluxes = fint + 0.0
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog
spec_parms['flux-scale'] = fint
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = 1.0e-3 + NP.zeros(ra_deg.size)
flux_unit = 'Jy'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fluxes.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_NVSS:
freq_NVSS = 1.4 # in GHz
hdulist = fits.open(NVSS_file)
ra_deg_NVSS = hdulist[1].data['RA(2000)']
dec_deg_NVSS = hdulist[1].data['DEC(2000)']
nvss_fpeak = hdulist[1].data['PEAK INT']
nvss_majax = hdulist[1].data['MAJOR AX']
nvss_minax = hdulist[1].data['MINOR AX']
hdulist.close()
if spindex_seed is None:
spindex_NVSS = -0.83 + spindex_rms * NP.random.randn(nvss_fpeak.size)
else:
NP.random.seed(2*spindex_seed)
spindex_NVSS = -0.83 + spindex_rms * NP.random.randn(nvss_fpeak.size)
if fluxcut_max is None:
select_source_ind = nvss_fpeak >= fluxcut_min * (freq_NVSS*1e9/fluxcut_freq)**spindex_NVSS
else:
select_source_ind = NP.logical_and(nvss_fpeak >= fluxcut_min * (freq_NVSS*1e9/fluxcut_freq)**spindex_NVSS, nvss_fpeak <= fluxcut_max * (freq_NVSS*1e9/fluxcut_freq)**spindex_NVSS)
if NP.sum(select_source_ind) == 0:
raise IndexError('No sources in the catalog found satisfying flux threshold criteria')
# select_source_ind = nvss_fpeak >= 10.0 * (freq_NVSS*1e9/freq)**(spindex_NVSS)
PS_ind = NP.sqrt(nvss_majax**2-(0.75/60.0)**2) < 14.0/3.6e3
count_valid = NP.sum(NP.logical_and(select_source_ind, PS_ind))
if count_valid > 0:
nvss_fpeak = nvss_fpeak[NP.logical_and(select_source_ind, PS_ind)]
freq_catalog = freq_NVSS*1e9 + NP.zeros(count_valid)
catlabel = NP.repeat('NVSS',count_valid)
ra_deg = ra_deg_NVSS[NP.logical_and(select_source_ind, PS_ind)]
dec_deg = dec_deg_NVSS[NP.logical_and(select_source_ind, PS_ind)]
spindex = spindex_NVSS[NP.logical_and(select_source_ind, PS_ind)]
majax = nvss_majax[NP.logical_and(select_source_ind, PS_ind)]
minax = nvss_minax[NP.logical_and(select_source_ind, PS_ind)]
fluxes = nvss_fpeak
else:
raise IndexError('No sources in the catalog found satisfying flux threshold and point source criteria')
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog
spec_parms['flux-scale'] = fluxes
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = NP.zeros(ra_deg.size)
flux_unit = 'Jy'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fluxes.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_MSS:
pass
elif use_GLEAM:
reffreq = parms['skyparm']['custom_reffreq']
hdulist = fits.open(GLEAM_file)
colnames = [col.name for col in hdulist[1].columns if ('int_flux_' in col.name and 'err' not in col.name and 'fit' not in col.name and 'wide' not in col.name)]
colfreqs = NP.char.lstrip(colnames, 'int_flux_').astype(NP.float)
nearest_freq_ind = NP.argmin(NP.abs(colfreqs - reffreq*1e3))
freq_GLEAM = colfreqs[nearest_freq_ind] / 1e3 # in GHz
ra_deg_GLEAM = hdulist[1].data['RAJ2000']
dec_deg_GLEAM = hdulist[1].data['DEJ2000']
gleam_fint = hdulist[1].data[colnames[nearest_freq_ind]]
gleam_majax = 2 * hdulist[1].data['a_wide'] # Factor 2 to convert from semi-major axis to FWHM
gleam_minax = 2 * hdulist[1].data['b_wide'] # Factor 2 to convert from semi-minor axis to FWHM
gleam_pa = hdulist[1].data['pa_wide']
gleam_psf_majax = 2 * hdulist[1].data['psf_a_wide'] # Factor 2 to convert from semi-major axis to FWHM
gleam_psf_minax = 2 * hdulist[1].data['psf_b_wide'] # Factor 2 to convert from semi-minor axis to FWHM
spindex_GLEAM = hdulist[1].data['alpha']
hdulist.close()
nanind = NP.where(NP.isnan(spindex_GLEAM))[0]
if nanind.size > 0:
if spindex_seed is not None:
NP.random.seed(2*spindex_seed)
spindex_GLEAM = spindex + spindex_rms * NP.random.randn(gleam_fint.size)
if fluxcut_max is None:
select_source_ind = gleam_fint >= fluxcut_min * (freq_GLEAM*1e9/fluxcut_freq)**spindex_GLEAM
else:
select_source_ind = NP.logical_and(gleam_fint >= fluxcut_min * (freq_GLEAM*1e9/fluxcut_freq)**spindex_GLEAM, gleam_fint <= fluxcut_max * (freq_GLEAM*1e9/fluxcut_freq)**spindex_GLEAM)
if NP.sum(select_source_ind) == 0:
raise IndexError('No sources in the catalog found satisfying flux threshold criteria')
# bright_source_ind = gleam_fint >= 10.0 * (freq_GLEAM*1e9/freq)**spindex_GLEAM
PS_ind = NP.ones(gleam_fint.size, dtype=NP.bool)
# PS_ind = gleam_majax * gleam_minax <= 1.1 * gleam_psf_majax * gleam_psf_minax
valid_ind = NP.logical_and(select_source_ind, PS_ind)
ra_deg_GLEAM = ra_deg_GLEAM[valid_ind]
dec_deg_GLEAM = dec_deg_GLEAM[valid_ind]
gleam_fint = gleam_fint[valid_ind]
spindex_GLEAM = spindex_GLEAM[valid_ind]
gleam_majax = gleam_majax[valid_ind]
gleam_minax = gleam_minax[valid_ind]
gleam_pa = gleam_pa[valid_ind]
fluxes = gleam_fint + 0.0
catlabel = NP.repeat('GLEAM', gleam_fint.size)
ra_deg = ra_deg_GLEAM + 0.0
dec_deg = dec_deg_GLEAM + 0.0
freq_catalog = freq_GLEAM*1e9 + NP.zeros(gleam_fint.size)
majax = gleam_majax / 3.6e3
minax = gleam_minax / 3.6e3
spindex = spindex_GLEAM + 0.0
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog + NP.zeros(ra_deg.size)
spec_parms['flux-scale'] = fluxes
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = NP.zeros(ra_deg.size)
flux_unit = 'Jy'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fluxes.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
elif use_skymod:
skymod = SM.SkyModel(init_parms=None, init_file=skymod_file)
elif use_custom:
catdata = ascii.read(custom_catalog_file, comment='#', header_start=0, data_start=1)
ra_deg = catdata['RA'].data
dec_deg = catdata['DEC'].data
fint = catdata['F_INT'].data
spindex = catdata['SPINDEX'].data
majax = catdata['MAJAX'].data
minax = catdata['MINAX'].data
pa = catdata['PA'].data
freq_custom = parms['skyparm']['custom_reffreq']
freq_catalog = freq_custom * 1e9 + NP.zeros(fint.size)
catlabel = NP.repeat('custom', fint.size)
if fluxcut_max is None:
select_source_ind = fint >= fluxcut_min * (freq_custom*1e9/fluxcut_freq)**spindex
else:
select_source_ind = NP.logical_and(fint >= fluxcut_min * (freq_custom*1e9/fluxcut_freq)**spindex, fint <= fluxcut_max * (freq_custom*1e9/fluxcut_freq)**spindex)
if NP.sum(select_source_ind) == 0:
raise IndexError('No sources in the catalog found satisfying flux threshold criteria')
ra_deg = ra_deg[select_source_ind]
dec_deg = dec_deg[select_source_ind]
fint = fint[select_source_ind]
spindex = spindex[select_source_ind]
majax = majax[select_source_ind]
minax = minax[select_source_ind]
pa = pa[select_source_ind]
freq_catalog = freq_catalog[select_source_ind]
catlabel = catlabel[select_source_ind]
spec_type = 'func'
spec_parms = {}
spec_parms['name'] = NP.repeat('power-law', ra_deg.size)
spec_parms['power-law-index'] = spindex
spec_parms['freq-ref'] = freq_catalog + NP.zeros(ra_deg.size)
spec_parms['flux-scale'] = fint
spec_parms['flux-offset'] = NP.zeros(ra_deg.size)
spec_parms['freq-width'] = NP.zeros(ra_deg.size)
flux_unit = 'Jy'
skymod_init_parms = {'name': catlabel, 'frequency': chans*1e9, 'location': NP.hstack((ra_deg.reshape(-1,1), dec_deg.reshape(-1,1))), 'spec_type': spec_type, 'spec_parms': spec_parms, 'src_shape': NP.hstack((majax.reshape(-1,1),minax.reshape(-1,1),NP.zeros(fint.size).reshape(-1,1))), 'src_shape_units': ['degree','degree','degree']}
skymod = SM.SkyModel(init_parms=skymod_init_parms, init_file=None)
# Precess Sky model to observing epoch
skycoords = SkyCoord(ra=skymod.location[:,0]*U.deg, dec=skymod.location[:,1]*U.deg, frame='fk5', equinox=Time(skymod.epoch, format='jyear_str', scale='utc')).transform_to(FK5(equinox=tobjs[0]))
skymod.location = NP.hstack((skycoords.ra.deg.reshape(-1,1), skycoords.dec.deg.reshape(-1,1)))
skymod.epoch = 'J{0:.12f}'.format(skycoords.equinox.jyear)
try:
os.makedirs(rootdir+project_dir+simid+skymod_dir, 0755)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(rootdir+project_dir+simid+skymod_dir):
pass
else:
raise
skymod_extfile = rootdir+project_dir+simid+skymod_dir+'skymodel'
skymod.save(skymod_extfile, fileformat='hdf5', extspec_action='unload')
else:
skymod_extfile = None
skycoords = None
skymod_extfile = comm.bcast(skymod_extfile, root=0)
skycoords = comm.bcast(skycoords, root=0)
if rank != 0:
skymod = SM.SkyModel(init_parms=None, init_file=skymod_extfile+'.hdf5', load_spectrum=False)
# Set up chunking for parallelization
if rank == 0:
m1, m2, d12 = GEOM.spherematch(pointings_radec[:,0], pointings_radec[:,1], skycoords.ra.deg, skycoords.dec.deg, matchrad=roi_radius, nnearest=0, maxmatches=0)
m1 = NP.asarray(m1)
m2 = NP.asarray(m2)
d12 = NP.asarray(d12)
m2_lol = [m2[NP.where(m1==j)[0]] for j in range(n_acc)]
nsrc_used = max([listitem.size for listitem in m2_lol])
else:
m2_lol = None
nsrc_used = None
m2_lol = comm.bcast(m2_lol, root=0)
nsrc_used = comm.bcast(nsrc_used, root=0)
nsrc = skymod.location.shape[0]
npol = 1
nbl = total_baselines
if gradient_mode is not None:
if gradient_mode.lower() == 'baseline':
size_DFT_matrix = 1.0 * max([nsrc_used, 1]) * nchan * nbl * npol * 3
else:
raise ValueError('Specified gradient_mode is currently not supported')
else:
size_DFT_matrix = 1.0 * max([nsrc_used, 1]) * nchan * nbl * npol
if memsave: # 64 bits per complex sample (single precision)
nbytes_per_complex_sample = 8.0
else: # 128 bits per complex sample (double precision)
nbytes_per_complex_sample = 16.0
memory_DFT_matrix = size_DFT_matrix * nbytes_per_complex_sample
memory_DFT_matrix_per_process = memory_DFT_matrix / nproc
memory_use_per_process = float(memuse) / nproc
n_chunks_per_process = NP.ceil(memory_DFT_matrix/memuse)
n_chunks = NP.ceil(nproc * n_chunks_per_process)
if mpi_on_src:
src_chunk_size = int(NP.floor(1.0 * nchan / n_chunks))
if src_chunk_size == 0:
raise MemoryError('Too many chunks to fit in usable memory. Try changing number of parallel processes and amount of usable memory. Usually reducing the former or increasing the latter should help avoid this problem.')
src_bin_indices = range(0, nsrc, src_chunk_size)
src_chunk = range(len(src_bin_indices))
n_src_chunks = len(src_bin_indices)
elif mpi_on_freq:
frequency_chunk_size = int(NP.floor(1.0 * nchan / n_chunks))
if frequency_chunk_size <= 1:
raise MemoryError('Too many chunks to fit in usable memory. Try changing number of parallel processes and amount of usable memory. Usually reducing the former or increasing the latter should help avoid this problem.')
frequency_bin_indices = range(0, nchan, frequency_chunk_size)
if frequency_bin_indices[-1] == nchan-1:
if frequency_chunk_size > 2:
frequency_bin_indices[-1] -= 1
else:
warnings.warn('Chunking has run into a weird indexing problem. Rechunking is necessaray. Try changing number of parallel processes and amount of usable memory. Usually reducing either one of these should help avoid this problem.')
PDB.set_trace()
freq_chunk = range(len(frequency_bin_indices))
n_freq_chunks = len(frequency_bin_indices)
n_freq_chunk_per_rank = NP.zeros(nproc, dtype=int) + len(freq_chunk)/nproc
if len(freq_chunk) % nproc > 0:
n_freq_chunk_per_rank[:len(freq_chunk)%nproc] += 1
n_freq_chunk_per_rank = n_freq_chunk_per_rank[::-1] # Reverse for more equal distribution of chunk sizes over processes
cumm_freq_chunks = NP.concatenate(([0], NP.cumsum(n_freq_chunk_per_rank)))
else:
baseline_chunk_size = int(NP.floor(1.0 * nbl / n_chunks))
if baseline_chunk_size == 0:
raise MemoryError('Too many chunks to fit in usable given memory. Try changing number of parallel processes and amount of usable memory. Usually reducing the former or increasing the latter should help avoid this problem.')
baseline_bin_indices = range(0, nbl, baseline_chunk_size)
if baseline_bin_indices[-1] == nchan-1:
if baseline_chunk_size > 2:
baseline_bin_indices[-1] -= 1
else:
warnings.warn('Chunking has run into a weird indexing problem. Rechunking is necessaray. Try changing number of parallel processes and amount of usable memory. Usually reducing either one of these should help avoind this problem.')
PDB.set_trace()
bl_chunk = range(len(baseline_bin_indices))
n_bl_chunks = len(baseline_bin_indices)
n_bl_chunk_per_rank = NP.zeros(nproc, dtype=int) + len(bl_chunk)/nproc
if len(bl_chunk) % nproc > 0:
n_bl_chunk_per_rank[:len(bl_chunk)%nproc] += 1
n_bl_chunk_per_rank = n_bl_chunk_per_rank[::-1] # Reverse for more equal distribution of chunk sizes over processes
cumm_bl_chunks = NP.concatenate(([0], NP.cumsum(n_bl_chunk_per_rank)))
if rank == 0:
if mpi_on_freq:
chunkinfo = {'mpi_axis': 'frequency', 'naxis': nchan, 'nchunks': n_freq_chunks, 'chunk_size': frequency_chunk_size, 'nchunk_per_proc': float(NP.mean(n_freq_chunk_per_rank))}
if mpi_on_bl:
chunkinfo = {'mpi_axis': 'baseline', 'naxis': nbl, 'nchunks': n_bl_chunks, 'chunk_size': baseline_chunk_size, 'nchunk_per_proc': float(NP.mean(n_bl_chunk_per_rank))}
chunkinfo['nproc'] = nproc
chunkfile = rootdir+project_dir+simid+meta_dir+'chunkinfo.yaml'
with open(chunkfile, 'w') as cfile:
yaml.dump(chunkinfo, cfile, default_flow_style=False)
## Set up the observing run
if rank == 0:
pbinfo = None
process_complete = False
if mpi_on_src: # MPI based on source multiplexing
for i in range(len(bl_chunk)):
print('Working on baseline chunk # {0:0d} ...'.format(bl_chunk[i]))
ia = RI.InterferometerArray(labels[baseline_bin_indices[bl_chunk[i]]:min(baseline_bin_indices[bl_chunk[i]]+baseline_chunk_size,total_baselines)], bl[baseline_bin_indices[bl_chunk[i]]:min(baseline_bin_indices[bl_chunk[i]]+baseline_chunk_size,total_baselines),:], chans, telescope=telescope, eff_Q=eff_Q, latitude=latitude, longitude=longitude, altitude=altitude, A_eff=A_eff, layout=layout_info, freq_scale='GHz', pointing_coords='hadec', gaininfo=gaininfo, blgroupinfo={'groups': blgroups, 'reversemap': bl_reversemap})
if store_prev_sky:
store_prev_skymodel_file=rootdir+project_dir+simid+roi_dir+'_{0:0d}.hdf5'.format(i)
else:
store_prev_skymodel_file = None
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(), PGB.ETA()], maxval=n_acc).start()
for j in range(n_acc):
src_altaz = skycoords[m2_lol[j]].transform_to(AltAz(obstime=tobjs[j], location=EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m)))
src_altaz_current = NP.hstack((src_altaz.alt.deg.reshape(-1,1), src_altaz.az.deg.reshape(-1,1)))
roi_ind = NP.where(src_altaz_current[:,0] >= 0.0)[0]
n_src_per_rank = NP.zeros(nproc, dtype=int) + roi_ind.size/nproc
if roi_ind.size % nproc > 0:
n_src_per_rank[:roi_ind.size % nproc] += 1
cumm_src_count = NP.concatenate(([0], NP.cumsum(n_src_per_rank)))
pbinfo = None
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_tools') or (phased_array):
pbinfo = {}
pbinfo['delays'] = delays[j,:]
if (telescope_id.lower() == 'mwa') or (phased_array):
pbinfo['delayerr'] = phasedarray_delayerr
pbinfo['gainerr'] = phasedarray_gainerr
pbinfo['nrand'] = nrand
ts = time.time()
if j == 0:
ts0 = ts
ia.observe(tobjs[i], Tsysinfo, bpass, pointings_hadec[j,:], skymod.subset(m2_lol[j][roi_ind[cumm_src_count[rank]:cumm_src_count[rank+1]]].tolist(), axis='position'), t_acc[j], pb_info=pbinfo, brightness_units=flux_unit, bpcorrect=noise_bpcorr, roi_radius=roi_radius, roi_center=None, gradient_mode=gradient_mode, memsave=memsave, vmemavail=pvmemavail, store_prev_skymodel_file=store_prev_skymodel_file)
te = time.time()
progress.update(j+1)
progress.finish()
if rank == 0:
for k in range(1,nproc):
print('receiving from process {0}'.format(k))
ia.skyvis_freq = ia.skyvis_freq + comm.recv(source=k)
te0 = time.time()
print('Time on process 0 was {0:.1f} seconds'.format(te0-ts0))
ia.t_obs = t_obs
ia.delay_transform(oversampling_factor-1.0, freq_wts=window)
outfile = rootdir+project_dir+simid+sim_dir+'_part_{0:0d}'.format(i)
ia.save(outfile, fmt=savefmt, verbose=True, tabtype='BinTableHDU', npz=False, overwrite=True, uvfits_parms=None)
else:
comm.send(ia.skyvis_freq, dest=0)
elif mpi_on_freq: # MPI based on frequency multiplexing
for k in range(n_sky_sectors):
if n_sky_sectors == 1:
sky_sector_str = '_all_sky_'
else:
sky_sector_str = '_sky_sector_{0:0d}_'.format(k)
if rank == 0: # Compute ROI parameters for only one process and broadcast to all
roi = RI.ROI_parameters()
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Snapshots '.format(n_acc), PGB.ETA()], maxval=n_acc).start()
for j in range(n_acc):
if m2_lol[j].size > 0:
src_altaz = skycoords[m2_lol[j]].transform_to(AltAz(obstime=tobjs[j], location=EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m)))
src_altaz_current = NP.hstack((src_altaz.alt.deg.reshape(-1,1), src_altaz.az.deg.reshape(-1,1)))
hemisphere_current = src_altaz_current[:,0] >= 0.0
src_az_current = NP.copy(src_altaz_current[:,1])
src_az_current[src_az_current > 360.0 - 0.5*180.0/n_sky_sectors] -= 360.0
roi_ind = NP.logical_or(NP.logical_and(src_az_current >= -0.5*180.0/n_sky_sectors + k*180.0/n_sky_sectors, src_az_current < -0.5*180.0/n_sky_sectors + (k+1)*180.0/n_sky_sectors), NP.logical_and(src_az_current >= 180.0 - 0.5*180.0/n_sky_sectors + k*180.0/n_sky_sectors, src_az_current < 180.0 - 0.5*180.0/n_sky_sectors + (k+1)*180.0/n_sky_sectors))
roi_subset = NP.where(NP.logical_and(hemisphere_current, roi_ind))[0].tolist()
# src_dircos_current_subset = GEOM.altaz2dircos(src_altaz_current[roi_subset,:], units='degrees')
pbinfo = {}
if (telescope_id.lower() == 'mwa') or (phased_array) or (telescope_id.lower() == 'mwa_tools'):
if pointing_file is not None:
pbinfo['delays'] = delays[j,:]
else:
pbinfo['pointing_center'] = pointings_altaz[j,:]
pbinfo['pointing_coords'] = 'altaz'
if (telescope_id.lower() == 'mwa') or (phased_array):
pbinfo['delayerr'] = phasedarray_delayerr
pbinfo['gainerr'] = phasedarray_gainerr
pbinfo['nrand'] = nrand
else:
pbinfo['pointing_center'] = pointings_altaz[j,:]
pbinfo['pointing_coords'] = 'altaz'
roiinfo = {}
roiinfo['ind'] = NP.asarray(m2_lol[j][roi_subset])
if use_external_beam:
theta_phi = NP.hstack((NP.pi/2-NP.radians(src_altaz_current[roi_subset,0]).reshape(-1,1), NP.radians(src_altaz_current[roi_subset,1]).reshape(-1,1)))
if beam_chromaticity:
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(external_beam), theta_phi=theta_phi, inloc_axis=external_beam_freqs, outloc_axis=chans*1e9, axis=1, kind=pbeam_spec_interp_method, assume_sorted=True)
else:
nearest_freq_ind = NP.argmin(NP.abs(external_beam_freqs - select_beam_freq))
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(NP.repeat(external_beam[:,nearest_freq_ind].reshape(-1,1), chans.size, axis=1)), theta_phi=theta_phi, inloc_axis=chans*1e9, outloc_axis=chans*1e9, axis=1, assume_sorted=True)
interp_logbeam_max = NP.nanmax(interp_logbeam, axis=0)
interp_logbeam_max[interp_logbeam_max <= 0.0] = 0.0
interp_logbeam_max = interp_logbeam_max.reshape(1,-1)
interp_logbeam = interp_logbeam - interp_logbeam_max
roiinfo['pbeam'] = 10**interp_logbeam
else:
roiinfo['pbeam'] = None
roiinfo['pbeam_chromaticity'] = beam_chromaticity
roiinfo['pbeam_reffreq'] = select_beam_freq
roiinfo['radius'] = roi_radius
# roiinfo_center_altaz = AltAz(alt=NP.asarray([90.0])*U.deg, az=NP.asarray([270.0])*U.deg, obstime=tobjs[j], location=EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m))
roiinfo_center_hadec = GEOM.altaz2hadec(NP.asarray([90.0, 270.0]).reshape(1,-1), latitude, units='degrees').ravel() # Seems to be a hard-coding of ROI center to zenith, but that's only to determine the sources in the upper hemisphere
roiinfo_center_radec = [lst[j]-roiinfo_center_hadec[0], roiinfo_center_hadec[1]]
# roiinfo_center_radec = ET.altaz2radec(NP.asarray([90.0, 270.0]).reshape(1,-1), EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m), obstime=tobjs[j], epoch_RA=tobjs[j])
roiinfo['center'] = NP.asarray(roiinfo_center_radec).reshape(1,-1)
roiinfo['center_coords'] = 'radec'
roi.append_settings(skymod, chans, pinfo=pbinfo, lst=lst[j], time_jd=tobjs[j].jd, roi_info=roiinfo, telescope=telescope, freq_scale='GHz')
else: # Empty sky
roi.append_settings(None, chans, telescope=telescope, freq_scale='GHz')
progress.update(j+1)
progress.finish()
roifile = rootdir+project_dir+simid+roi_dir+'roiinfo'
roi.save(roifile, tabtype='BinTableHDU', overwrite=True, verbose=True)
del roi # to save memory if primary beam arrays or n_acc are large
else:
roi = None
pbinfo = None
roifile = None
roifile = comm.bcast(roifile, root=0) # Broadcast saved RoI filename
pbinfo = comm.bcast(pbinfo, root=0) # Broadcast PB synthesis info
frequency_bin_indices_bounds = frequency_bin_indices + [nchan]
for i in range(cumm_freq_chunks[rank], cumm_freq_chunks[rank+1]):
print('Process {0:0d} working on frequency chunk # {1:0d} ... ({2:0d}/{3:0d})'.format(rank, freq_chunk[i], i-cumm_freq_chunks[rank]+1, n_freq_chunk_per_rank[rank]))
chans_chunk_indices = NP.arange(frequency_bin_indices_bounds[i], frequency_bin_indices_bounds[i+1])
chans_chunk = NP.asarray(chans[chans_chunk_indices]).reshape(-1)
nchan_chunk = chans_chunk.size
f0_chunk = NP.mean(chans_chunk)
bw_chunk_str = '{0:0d}x{1:.1f}_kHz'.format(nchan_chunk, freq_resolution/1e3)
outfile = rootdir+project_dir+simid+sim_dir+'_part_{0:0d}'.format(i)
ia = RI.InterferometerArray(labels, bl, chans_chunk, telescope=telescope, eff_Q=eff_Q, latitude=latitude, longitude=longitude, altitude=altitude, A_eff=A_eff, layout=layout_info, freq_scale='GHz', pointing_coords='hadec', gaininfo=gaininfo, blgroupinfo={'groups': blgroups, 'reversemap': bl_reversemap})
if store_prev_sky:
store_prev_skymodel_file=rootdir+project_dir+simid+roi_dir+'_{0:0d}.hdf5'.format(i)
else:
store_prev_skymodel_file = None
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Snapshots '.format(n_acc), PGB.ETA()], maxval=n_acc).start()
for j in range(n_acc):
if m2_lol[j].size > 0:
roi_ind_snap = fits.getdata(roifile+'.fits', extname='IND_{0:0d}'.format(j), memmap=False)
roi_pbeam_snap = fits.getdata(roifile+'.fits', extname='PB_{0:0d}'.format(j), memmap=False)
roi_pbeam_snap = roi_pbeam_snap[:,chans_chunk_indices]
else:
roi_ind_snap = NP.asarray([])
roi_pbeam_snap = NP.asarray([])
roi_snap_info = {'ind': roi_ind_snap, 'pbeam': roi_pbeam_snap}
ts = time.time()
if j == 0:
ts0 = ts
ia.observe(tobjs[j], Tsysinfo, bpass[chans_chunk_indices], pointings_hadec[j,:], skymod, t_acc[j], pb_info=pbinfo, brightness_units=flux_unit, bpcorrect=noise_bpcorr[chans_chunk_indices], roi_info=roi_snap_info, roi_radius=roi_radius, roi_center=None, gradient_mode=gradient_mode, memsave=memsave, vmemavail=pvmemavail, store_prev_skymodel_file=store_prev_skymodel_file)
te = time.time()
del roi_ind_snap
del roi_pbeam_snap
progress.update(j+1)
numbytes = []
variables = []
var = None
obj = None
for var,obj in locals().iteritems():
if isinstance(obj, NP.ndarray):
variables += [var]
numbytes += [obj.nbytes]
nGB = NP.asarray(numbytes) / 2.0**30
totalmemGB = NP.sum(nGB)
progress.finish()
te0 = time.time()
print('Process {0:0d} took {1:.1f} minutes to complete frequency chunk # {2:0d} ({3:0d}/{4:0d})'.format(rank, (te0-ts0)/60.0, freq_chunk[i], i-cumm_freq_chunks[rank]+1, n_freq_chunk_per_rank[rank]))
if os.path.exists(store_prev_skymodel_file):
os.remove(store_prev_skymodel_file) # Remove the temporary skymodel file
ia.project_baselines(ref_point={'location': ia.pointing_center, 'coords': ia.pointing_coords})
ia.save(outfile, fmt=savefmt, verbose=True, tabtype='BinTableHDU', npz=False, overwrite=True, uvfits_parms=None)
else: # MPI based on baseline multiplexing
if mpi_async: # does not impose equal volume per process
print('Processing next baseline chunk asynchronously...')
processed_chunks = []
process_sequence = []
counter = my_MPI.Counter(comm)
count = -1
ptb = time.time()
ptb_str = str(DT.datetime.now())
while (count+1 < len(bl_chunk)):
count = counter.next()
if count < len(bl_chunk):
processed_chunks.append(count)
process_sequence.append(rank)
print('Process {0:0d} working on baseline chunk # {1:0d} ...'.format(rank, count))
outfile = rootdir+project_dir+simid+sim_dir+'_part_{0:0d}'.format(count)
ia = RI.InterferometerArray(labels[baseline_bin_indices[count]:min(baseline_bin_indices[count]+baseline_chunk_size,total_baselines)], bl[baseline_bin_indices[count]:min(baseline_bin_indices[count]+baseline_chunk_size,total_baselines),:], chans, telescope=telescope, eff_Q=eff_Q, latitude=latitude, longitude=longitude, altitude=altitude, A_eff=A_eff, layout=layout_info, freq_scale='GHz', pointing_coords='hadec', gaininfo=gaininfo, blgroupinfo={'groups': blgroups, 'reversemap': bl_reversemap})
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(), PGB.ETA()], maxval=n_acc).start()
for j in range(n_acc):
pbinfo = None
if (telescope_id.lower() == 'mwa') or (telescope_id.lower() == 'mwa_tools') or (phased_array):
pbinfo = {}
pbinfo['delays'] = delays[j,:]
if (telescope_id.lower() == 'mwa') or (phased_array):
pbinfo['delayerr'] = phasedarray_delayerr
pbinfo['gainerr'] = phasedarray_gainerr
pbinfo['nrand'] = nrand
ts = time.time()
if j == 0:
ts0 = ts
ia.observe(tobjs[j], Tsysinfo, bpass, pointings_hadec[j,:], skymod, t_acc[j], pb_info=pbinfo, brightness_units=flux_unit, bpcorrect=noise_bpcorr, roi_radius=roi_radius, roi_center=None, gradient_mode=gradient_mode, memsave=memsave, vmemavail=pvmemavail)
te = time.time()
progress.update(j+1)
progress.finish()
te0 = time.time()
print('Process {0:0d} took {1:.1f} minutes to complete baseline chunk # {2:0d}'.format(rank, (te0-ts0)/60.0, count))
ia.t_obs = t_obs
ia.delay_transform(oversampling_factor-1.0, freq_wts=window)
ia.save(outfile, fmt=savefmt, verbose=True, tabtype='BinTableHDU', npz=False, overwrite=True, uvfits_parms=None)
counter.free()
pte = time.time()
pte_str = str(DT.datetime.now())
pt = pte - ptb
processed_chunks = comm.allreduce(processed_chunks)
process_sequence = comm.allreduce(process_sequence)
else: # impose equal volume per process
ptb_str = str(DT.datetime.now())
for k in range(n_sky_sectors):
if n_sky_sectors == 1:
sky_sector_str = '_all_sky_'
else:
sky_sector_str = '_sky_sector_{0:0d}_'.format(k)
if rank == 0: # Compute ROI parameters for only one process and broadcast to all
roi = RI.ROI_parameters()
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Snapshots '.format(n_acc), PGB.ETA()], maxval=n_acc).start()
for j in range(n_acc):
src_altaz = skycoords[m2_lol[j]].transform_to(AltAz(obstime=tobjs[j], location=EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m)))
src_altaz_current = NP.hstack((src_altaz.alt.deg.reshape(-1,1), src_altaz.az.deg.reshape(-1,1)))
hemisphere_current = src_altaz_current[:,0] >= 0.0
# hemisphere_src_altaz_current = src_altaz_current[hemisphere_current,:]
src_az_current = NP.copy(src_altaz_current[:,1])
src_az_current[src_az_current > 360.0 - 0.5*180.0/n_sky_sectors] -= 360.0
roi_ind = NP.logical_or(NP.logical_and(src_az_current >= -0.5*180.0/n_sky_sectors + k*180.0/n_sky_sectors, src_az_current < -0.5*180.0/n_sky_sectors + (k+1)*180.0/n_sky_sectors), NP.logical_and(src_az_current >= 180.0 - 0.5*180.0/n_sky_sectors + k*180.0/n_sky_sectors, src_az_current < 180.0 - 0.5*180.0/n_sky_sectors + (k+1)*180.0/n_sky_sectors))
roi_subset = NP.where(NP.logical_and(hemisphere_current, roi_ind))[0].tolist()
# src_dircos_current_subset = GEOM.altaz2dircos(src_altaz_current[roi_subset,:], units='degrees')
pbinfo = {}
if (telescope_id.lower() == 'mwa') or (phased_array) or (telescope_id.lower() == 'mwa_tools'):
if pointing_file is not None:
pbinfo['delays'] = delays[j,:]
else:
pbinfo['pointing_center'] = pointings_altaz[j,:]
pbinfo['pointing_coords'] = 'altaz'
if (telescope_id.lower() == 'mwa') or (phased_array):
# pbinfo['element_locs'] = element_locs
pbinfo['delayerr'] = phasedarray_delayerr
pbinfo['gainerr'] = phasedarray_gainerr
pbinfo['nrand'] = nrand
else:
pbinfo['pointing_center'] = pointings_altaz[j,:]
pbinfo['pointing_coords'] = 'altaz'
roiinfo = {}
roiinfo['ind'] = NP.asarray(m2_lol[j][roi_subset])
if use_external_beam:
theta_phi = NP.hstack((NP.pi/2-NP.radians(src_altaz_current[roi_subset,0]).reshape(-1,1), NP.radians(src_altaz_current[roi_subset,1]).reshape(-1,1)))
if beam_chromaticity:
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(external_beam), theta_phi=theta_phi, inloc_axis=external_beam_freqs, outloc_axis=chans*1e9, axis=1, kind=pbeam_spec_interp_method, assume_sorted=True)
else:
nearest_freq_ind = NP.argmin(NP.abs(external_beam_freqs - select_beam_freq))
interp_logbeam = OPS.healpix_interp_along_axis(NP.log10(NP.repeat(external_beam[:,nearest_freq_ind].reshape(-1,1), chans.size, axis=1)), theta_phi=theta_phi, inloc_axis=chans*1e9, outloc_axis=chans*1e9, axis=1, assume_sorted=True)
interp_logbeam_max = NP.nanmax(interp_logbeam, axis=0)
interp_logbeam_max[interp_logbeam_max <= 0.0] = 0.0
interp_logbeam_max = interp_logbeam_max.reshape(1,-1)
interp_logbeam = interp_logbeam - interp_logbeam_max
roiinfo['pbeam'] = 10**interp_logbeam
else:
roiinfo['pbeam'] = None
roiinfo['pbeam_chromaticity'] = beam_chromaticity
roiinfo['pbeam_reffreq'] = select_beam_freq
roiinfo['radius'] = roi_radius
# roiinfo_center_altaz = AltAz(alt=NP.asarray([90.0])*U.deg, az=NP.asarray([270.0])*U.deg, obstime=tobjs[j], location=EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m))
roiinfo_center_hadec = GEOM.altaz2hadec(NP.asarray([90.0, 270.0]).reshape(1,-1), latitude, units='degrees').ravel() # Seems to be a hard-coding of ROI center to zenith, but that's only to determine the sources in the upper hemisphere
roiinfo_center_radec = [lst[j]-roiinfo_center_hadec[0], roiinfo_center_hadec[1]]
# roiinfo_center_radec = ET.altaz2radec(NP.asarray([90.0, 270.0]).reshape(1,-1), EarthLocation(lon=telescope['longitude']*U.deg, lat=telescope['latitude']*U.deg, height=telescope['altitude']*U.m), obstime=tobjs[j], epoch_RA=tobjs[j])
roiinfo['center'] = NP.asarray(roiinfo_center_radec).reshape(1,-1)
roiinfo['center_coords'] = 'radec'
roi.append_settings(skymod, chans, pinfo=pbinfo, lst=lst[j], roi_info=roiinfo, telescope=telescope, freq_scale='GHz')
progress.update(j+1)
progress.finish()
roifile = rootdir+project_dir+simid+roi_dir+'roiinfo'
roi.save(roifile, tabtype='BinTableHDU', overwrite=True, verbose=True)
del roi # to save memory if primary beam arrays or n_acc are large
else:
roi = None
pbinfo = None
roifile = None
roifile = comm.bcast(roifile, root=0) # Broadcast saved RoI filename
pbinfo = comm.bcast(pbinfo, root=0) # Broadcast PB synthesis info
if rank == 0:
if plots:
for j in xrange(n_acc):
src_ra = roi.skymodel.location[roi.info['ind'][j],0]
src_dec = roi.skymodel.location[roi.info['ind'][j],1]
src_ra[src_ra > 180.0] = src_ra[src_ra > 180.0] - 360.0
fig, axs = PLT.subplots(2, sharex=True, sharey=True, figsize=(6,6))
modelsky = axs[0].scatter(src_ra, src_dec, c=roi.skymod.spec_parms['flux-scale'][roi.info['ind'][j]], norm=PLTC.LogNorm(vmin=roi.skymod.spec_parms['flux-scale'].min(), vmax=roi.skymod.spec_parms['flux-scale'].max()), edgecolor='none', s=20)
axs[0].set_xlim(180.0, -180.0)
axs[0].set_ylim(-90.0, 90.0)
pbsky = axs[1].scatter(src_ra, src_dec, c=roi.info['pbeam'][j][:,NP.argmax(NP.abs(chans-freq))], norm=PLTC.LogNorm(vmin=roi.info['pbeam'][j].min(), vmax=1.0), edgecolor='none', s=20)
axs[1].set_xlim(180.0, -180.0)
axs[1].set_ylim(-90.0, 90.0)
cbax0 = fig.add_axes([0.88, 0.5, 0.02, 0.35])
cbar0 = fig.colorbar(modelsky, cax=cbax0, orientation='vertical')
cbax0.set_ylabel('Flux Density [Jy]', labelpad=0, fontsize=14)
cbax1 = fig.add_axes([0.88, 0.1, 0.02, 0.35])
cbar1 = fig.colorbar(pbsky, cax=cbax1, orientation='vertical')
fig.subplots_adjust(hspace=0)
big_ax = fig.add_subplot(111)
big_ax.set_axis_bgcolor('none')
big_ax.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_ylabel(r'$\delta$ [degrees]', fontsize=16, weight='medium', labelpad=30)
big_ax.set_xlabel(r'$\alpha$ [degrees]', fontsize=16, weight='medium', labelpad=20)
fig.subplots_adjust(right=0.88)
baseline_bin_indices_bounds = baseline_bin_indices + [nbl]
for i in range(cumm_bl_chunks[rank], cumm_bl_chunks[rank+1]):
print('Process {0:0d} working on baseline chunk # {1:0d} ... ({2:0d}/{3:0d})'.format(rank, bl_chunk[i], i-cumm_bl_chunks[rank]+1, n_bl_chunk_per_rank[rank]))
bls_chunk_indices = NP.arange(baseline_bin_indices_bounds[i], baseline_bin_indices_bounds[i+1])
bls_chunk = NP.asarray(bl[bls_chunk_indices,:]).reshape(-1,3)
nbl_chunk = bls_chunk.shape[0]
outfile = rootdir+project_dir+simid+sim_dir+'_part_{0:0d}'.format(i)
ia = RI.InterferometerArray(labels[bls_chunk_indices], bls_chunk, chans, telescope=telescope, eff_Q=eff_Q, latitude=latitude, longitude=longitude, altitude=altitude, A_eff=A_eff, layout=layout_info, freq_scale='GHz', pointing_coords='hadec', gaininfo=gaininfo, blgroupinfo={'groups': blgroups, 'reversemap': bl_reversemap})
# ia = RI.InterferometerArray(labels[baseline_bin_indices[bl_chunk[i]]:min(baseline_bin_indices[bl_chunk[i]]+baseline_chunk_size,total_baselines)], bl[baseline_bin_indices[bl_chunk[i]]:min(baseline_bin_indices[bl_chunk[i]]+baseline_chunk_size,total_baselines),:], chans, telescope=telescope, eff_Q=eff_Q, latitude=latitude, longitude=longitude, altitude=altitude, A_eff=A_eff, layout=layout_info, freq_scale='GHz', pointing_coords='hadec', gaininfo=gaininfo, blgroupinfo={'groups': blgroups, 'reversemap': bl_reversemap})
if store_prev_sky:
store_prev_skymodel_file=rootdir+project_dir+simid+roi_dir+'_{0:0d}.hdf5'.format(i)
else:
store_prev_skymodel_file = None
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Snapshots '.format(n_acc), PGB.ETA()], maxval=n_acc).start()
for j in range(n_acc):
roi_ind_snap = fits.getdata(roifile+'.fits', extname='IND_{0:0d}'.format(j), memmap=False)
roi_pbeam_snap = fits.getdata(roifile+'.fits', extname='PB_{0:0d}'.format(j), memmap=False)
if obs_mode in ['custom', 'dns', 'lstbin']:
timestamp = obs_id[j]
else:
# timestamp = lst[j]
timestamp = timestamps[j]
ts = time.time()
if j == 0:
ts0 = ts
ia.observe(tobjs[j], Tsysinfo, bpass, pointings_hadec[j,:], skymod, t_acc[j], pb_info=pbinfo, brightness_units=flux_unit, bpcorrect=noise_bpcorr, roi_info={'ind': roi_ind_snap, 'pbeam': roi_pbeam_snap}, roi_radius=roi_radius, roi_center=None, gradient_mode=gradient_mode, memsave=memsave, vmemavail=pvmemavail, store_prev_skymodel_file=store_prev_skymodel_file)
te = time.time()
del roi_ind_snap
del roi_pbeam_snap
progress.update(j+1)
progress.finish()
te0 = time.time()
print('Process {0:0d} took {1:.1f} minutes to complete baseline chunk # {2:0d}'.format(rank, (te0-ts0)/60, bl_chunk[i]))
ia.t_obs = t_obs
# ia.generate_noise()
# ia.add_noise()
# ia.delay_transform(oversampling_factor-1.0, freq_wts=window*NP.abs(ant_bpass)**2)
ia.project_baselines(ref_point={'location': ia.pointing_center, 'coords': ia.pointing_coords})
ia.save(outfile, fmt=savefmt, verbose=True, tabtype='BinTableHDU', npz=False, overwrite=True, uvfits_parms=None)
if os.path.exists(store_prev_skymodel_file):
os.remove(store_prev_skymodel_file) # Remove the temporary skymodel file
pte_str = str(DT.datetime.now())
if rank == 0:
parmsfile = rootdir+project_dir+simid+meta_dir+'simparms.yaml'
with open(parmsfile, 'w') as pfile:
yaml.dump(parms, pfile, default_flow_style=False)
minfo = {'user': pwd.getpwuid(os.getuid())[0], 'git#': prisim.__githash__, 'PRISim': prisim.__version__}
metafile = rootdir+project_dir+simid+meta_dir+'meta.yaml'
with open(metafile, 'w') as mfile:
yaml.dump(minfo, mfile, default_flow_style=False)
process_complete = True
all_process_complete = comm.gather(process_complete, root=0)
if rank == 0:
for k in range(n_sky_sectors):
if n_sky_sectors == 1:
sky_sector_str = '_all_sky_'
else:
sky_sector_str = '_sky_sector_{0:0d}_'.format(k)
if mpi_on_bl:
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Baseline chunks '.format(n_bl_chunks), PGB.ETA()], maxval=n_bl_chunks).start()
for i in range(0, n_bl_chunks):
bls_chunk_indices = NP.arange(baseline_bin_indices_bounds[i], baseline_bin_indices_bounds[i+1])
bls_chunk = NP.asarray(bl[bls_chunk_indices,:]).reshape(-1)
nbls_chunk = bls_chunk.shape[0]
blchunk_infile = rootdir+project_dir+simid+sim_dir+'_part_{0:0d}'.format(i)
if i == 0:
simvis = RI.InterferometerArray(None, None, None, init_file=blchunk_infile)
else:
simvis_next = RI.InterferometerArray(None, None, None, init_file=blchunk_infile)
simvis.concatenate(simvis_next, axis=0)
if cleanup >= 1:
if os.path.isfile(blchunk_infile+'.'+savefmt.lower()):
os.remove(blchunk_infile+'.'+savefmt.lower())
if os.path.isfile(blchunk_infile+'.gains.hdf5'):
os.remove(blchunk_infile+'.gains.hdf5')
progress.update(i+1)
progress.finish()
elif mpi_on_freq:
progress = PGB.ProgressBar(widgets=[PGB.Percentage(), PGB.Bar(marker='-', left=' |', right='| '), PGB.Counter(), '/{0:0d} Frequency chunks '.format(n_freq_chunks), PGB.ETA()], maxval=n_freq_chunks).start()
frequency_bin_indices_bounds = frequency_bin_indices + [nchan]
for i in range(0, n_freq_chunks):
chans_chunk_indices = NP.arange(frequency_bin_indices_bounds[i], frequency_bin_indices_bounds[i+1])
chans_chunk = NP.asarray(chans[chans_chunk_indices]).reshape(-1)
nchan_chunk = chans_chunk.size
f0_chunk = NP.mean(chans_chunk)
bw_chunk_str = '{0:0d}x{1:.1f}_kHz'.format(nchan_chunk, freq_resolution/1e3)
freqchunk_infile = rootdir+project_dir+simid+sim_dir+'_part_{0:0d}'.format(i)
if i == 0:
simvis = RI.InterferometerArray(None, None, None, init_file=freqchunk_infile)
else:
simvis_next = RI.InterferometerArray(None, None, None, init_file=freqchunk_infile)
simvis.concatenate(simvis_next, axis=1)
if cleanup > 1:
if os.path.isfile(freqchunk_infile+'.'+savefmt.lower()):
os.remove(freqchunk_infile+'.'+savefmt.lower())
if os.path.isfile(freqchunk_infile+'.gains.hdf5'):
os.remove(freqchunk_infile+'.gains.hdf5')
progress.update(i+1)
progress.finish()
simvis.generate_noise()
simvis.add_noise()
simvis.simparms_file = parmsfile
ref_point = {'coords': pc_coords, 'location': NP.asarray(pc).reshape(1,-1)}
simvis.rotate_visibilities(ref_point, do_delay_transform=do_delay_transform, verbose=True)
if do_delay_transform:
simvis.delay_transform(oversampling_factor-1.0, freq_wts=window*NP.abs(ant_bpass)**2)
consolidated_outfile = rootdir+project_dir+simid+sim_dir+'simvis'
simvis.save(consolidated_outfile, fmt=savefmt, verbose=True, tabtype='BinTableHDU', npz=save_to_npz, overwrite=True, uvfits_parms=None)
pyuvdata_formats = []
if save_to_uvh5:
pyuvdata_formats += ['uvh5']
if save_to_uvfits:
pyuvdata_formats += ['uvfits']
if len(pyuvdata_formats) > 0:
simvis_orig = copy.deepcopy(simvis)
if save_redundant: # Duplicate the redundant visibilities
consolidated_outfile = rootdir+project_dir+simid+sim_dir+'all-simvis'
for pyuvdata_fmt in pyuvdata_formats:
simvis = copy.deepcopy(simvis_orig)
uvfits_parms = None
if pyuvdata_fmt == 'uvfits':
if save_formats['phase_center'] is None:
phase_center = simvis.pointing_center[0,:].reshape(1,-1)
phase_center_coords = simvis.pointing_coords
if phase_center_coords == 'dircos':
phase_center = GEOM.dircos2altaz(phase_center, units='degrees')
phase_center_coords = 'altaz'
if phase_center_coords == 'altaz':
phase_center = GEOM.altaz2hadec(phase_center, simvis.latitude, units='degrees')
phase_center_coords = 'hadec'
if phase_center_coords == 'hadec':
phase_center = NP.hstack((simvis.lst[0]-phase_center[0,0], phase_center[0,1]))
phase_center_coords = 'radec'
if phase_center_coords != 'radec':
raise ValueError('Invalid phase center coordinate system')
uvfits_ref_point = {'location': phase_center.reshape(1,-1), 'coords': 'radec'}
else:
uvfits_ref_point = {'location': NP.asarray(save_formats['phase_center']).reshape(1,-1), 'coords': 'radec'}
# Phase the visibilities to a phase reference point
simvis.rotate_visibilities(uvfits_ref_point)
uvfits_parms = {'ref_point': None, 'datapool': None, 'method': save_formats['uvfits_method']}
if save_redundant: # Duplicate the redundant visibilities
simvis.duplicate_measurements(blgroups=blgroups)
simvis.pyuvdata_write(consolidated_outfile, formats=[pyuvdata_fmt], uvfits_parms=uvfits_parms, overwrite=True)
if cleanup >= 3:
dir_to_be_removed = rootdir+project_dir+simid+skymod_dir
shutil.rmtree(dir_to_be_removed, ignore_errors=True)
if cleanup >= 2:
dir_to_be_removed = rootdir+project_dir+simid+roi_dir
shutil.rmtree(dir_to_be_removed, ignore_errors=True)
print('Process {0} has completed.'.format(rank))
if diagnosis_parms['wait_after_run']:
PDB.set_trace()
| 122,758 | 51.461111 | 537 |
py
|
PRISim
|
PRISim-master/scripts/update_PRISim_noise.py
|
#!python
import yaml, argparse
import numpy as NP
import prisim
from prisim import interferometry as RI
import write_PRISim_visibilities as PRISimWriter
import ipdb as PDB
prisim_path = prisim.__path__[0]+'/'
if __name__ == '__main__':
## Parse input arguments
parser = argparse.ArgumentParser(description='Program to update noise in PRISim outputs')
input_group = parser.add_argument_group('Input parameters', 'Input specifications')
input_group.add_argument('-s', '--simfile', dest='simfile', type=str, required=True, help='HDF5 file from PRISim simulation')
input_group.add_argument('-p', '--parmsfile', dest='parmsfile', default=None, type=str, required=True, help='File specifying simulation parameters')
output_group = parser.add_argument_group('Output parameters', 'Output specifications')
output_group.add_argument('-o', '--outfile', dest='outfile', default=None, type=str, required=True, help='Output File with redundant measurements')
output_group.add_argument('--outfmt', dest='outfmt', default=['hdf5'], type=str, required=True, nargs='*', choices=['HDF5', 'hdf5', 'UVFITS', 'uvfits', 'UVH5', 'uvh5'], help='Output file format')
noise_parms_group = parser.add_argument_group('Noise parameters', 'Noise specifications')
noise_parms_group.add_argument('-n', '--noise_parmsfile', dest='noise_parmsfile', default=prisim_path+'examples/simparms/noise_update_parms.yaml', type=file, required=True, help='File specifying noise parameters for updating noise in PRISim output')
misc_group = parser.add_argument_group('Misc parameters', 'Misc specifications')
misc_group.add_argument('-w', '--wait', dest='wait', action='store_true', help='Wait after run')
args = vars(parser.parse_args())
outfile = args['outfile']
outformats = args['outfmt']
parmsfile = args['parmsfile']
with open(parmsfile, 'r') as pfile:
parms = yaml.safe_load(pfile)
simobj = RI.InterferometerArray(None, None, None, init_file=args['simfile'])
# The following "if" statement is to allow previous buggy saved versions
# of HDF5 files that did not save the projected_baselines attribute in the
# right shape when n_acc=1
update_projected_baselines = False
if simobj.projected_baselines.ndim != 3:
update_projected_baselines = True
else:
if simobj.projected_baselines.shape[2] != simobj.n_acc:
update_projected_baselines = True
if update_projected_baselines:
uvw_ref_point = None
if parms['save_formats']['phase_center'] is None:
phase_center = simobj.pointing_center[0,:].reshape(1,-1)
phase_center_coords = simobj.pointing_coords
if phase_center_coords == 'dircos':
phase_center = GEOM.dircos2altaz(phase_center, units='degrees')
phase_center_coords = 'altaz'
if phase_center_coords == 'altaz':
phase_center = GEOM.altaz2hadec(phase_center, simobj.latitude, units='degrees')
phase_center_coords = 'hadec'
if phase_center_coords == 'hadec':
phase_center = NP.hstack((simobj.lst[0]-phase_center[0,0], phase_center[0,1]))
phase_center_coords = 'radec'
if phase_center_coords != 'radec':
raise ValueError('Invalid phase center coordinate system')
uvw_ref_point = {'location': phase_center.reshape(1,-1), 'coords': 'radec'}
else:
uvw_ref_point = {'location': NP.asarray(parms['save_formats']['phase_center']).reshape(1,-1), 'coords': 'radec'}
simobj.project_baselines(uvw_ref_point)
freqs = simobj.channels
nchan = freqs.size
df = simobj.freq_resolution
t_acc = NP.asarray(simobj.t_acc)
ntimes = t_acc.shape[-1]
dt = NP.mean(t_acc)
nbl = simobj.baseline_lengths.size
noise_parmsfile = args['noise_parmsfile']
with args['noise_parmsfile'] as noise_parmsfile:
noise_parms = yaml.safe_load(noise_parmsfile)
Tsys = noise_parms['Tsys']
Trx = noise_parms['Trx']
Tant_freqref = noise_parms['Tant_freqref']
Tant_ref = noise_parms['Tant_ref']
Tant_spindex = noise_parms['Tant_spindex']
Tsysinfo = {'Trx': Trx, 'Tant':{'f0': Tant_freqref, 'spindex': Tant_spindex, 'T0': Tant_ref}, 'Tnet': Tsys}
if Tsys is None:
Tsys_arr = Trx + Tant_ref * (freqs/Tant_freqref)**Tant_spindex
parms['telescope']['Tsys'] = noise_parms['Tsys']
parms['telescope']['Trx'] = noise_parms['Trx']
parms['telescope']['Tant_freqref'] = noise_parms['Tant_freqref']
parms['telescope']['Tant_ref'] = noise_parms['Tant_ref']
parms['telescope']['Tant_spindex'] = noise_parms['Tant_spindex']
Tsys_arr = NP.asarray(Tsys_arr).reshape(1,-1,1)
A_eff = noise_parms['A_eff']
eff_aprtr = noise_parms['eff_aprtr']
A_eff *= eff_aprtr
eff_Q = noise_parms['eff_Q']
noiseRMS = RI.thermalNoiseRMS(A_eff, df, dt, Tsys_arr, nbl=nbl, nchan=nchan, ntimes=ntimes, flux_unit='Jy', eff_Q=eff_Q)
noise = RI.generateNoise(noiseRMS=noiseRMS, A_eff=None, df=None, dt=None, Tsys=None, nbl=nbl, nchan=nchan, ntimes=ntimes, flux_unit=None, eff_Q=None)
simobj.Tsysinfo = [Tsysinfo] * ntimes
simobj.Tsys = Tsys_arr + NP.zeros_like(simobj.Tsys)
simobj.A_eff = A_eff + NP.zeros_like(simobj.A_eff)
simobj.eff_Q = eff_Q + NP.zeros_like(simobj.eff_Q)
simobj.vis_rms_freq = noiseRMS + NP.zeros_like(simobj.vis_rms_freq)
simobj.vis_noise_freq = noise + NP.zeros_like(simobj.vis_noise_freq)
simobj.vis_freq = simobj.skyvis_freq + noise
simobj.simparms_file = parmsfile
PRISimWriter.save(simobj, outfile, outformats, parmsfile=parmsfile)
with open(parmsfile, 'w') as pfile:
yaml.dump(parms, pfile, default_flow_style=False)
wait_after_run = args['wait']
if wait_after_run:
PDB.set_trace()
| 5,922 | 43.871212 | 253 |
py
|
PRISim
|
PRISim-master/scripts/prisim_ls.py
|
#!python
import glob
import itertools
import yaml
import argparse
import numpy as NP
import prisim
prisim_path = prisim.__path__[0]+'/'
def lsPRISim(args):
project_dir = args['project']
simid = args['simid']
folder_separator = ''
if not project_dir.endswith('/'):
folder_separator = '/'
simdir_pattern = project_dir + folder_separator + simid
temp_simdirs = glob.glob(simdir_pattern)
simdirs = [temp_simdir for temp_simdir in temp_simdirs if not temp_simdir.endswith(('.', '..'))]
simparms_list = []
for simdir in simdirs:
try:
with open(simdir+'/metainfo/simparms.yaml', 'r') as parmsfile:
simparms_list += [{simdir+'/': yaml.safe_load(parmsfile)}]
except IOError:
pass
parmsDB = {}
for parmind, parm in enumerate(simparms_list):
for ikey, ival in parm.values()[0].iteritems():
if isinstance(ival, dict):
for subkey in ival.iterkeys():
key = (ikey, subkey)
if key in parmsDB:
parmsDB[key] += [parm.values()[0][ikey][subkey]]
else:
parmsDB[key] = [parm.values()[0][ikey][subkey]]
parmsDBselect = {}
nuniqDBselect = {}
for key in parmsDB:
vals = sorted(parmsDB[key])
uniqvals = [val for val,_ in itertools.groupby(vals)]
if len(uniqvals) > 1:
parmsDBselect[key] = parmsDB[key]
nuniqDBselect[key] = len(uniqvals)
linestr = '\n'
if args['format'] == 'csv':
delimiter = ','
else:
delimiter = '\t'
if args['change']:
if parmsDBselect:
keys = sorted(parmsDBselect.keys())
linestr = 'PRISim-ID'
for key in keys:
linestr += delimiter+key[0]+':'+key[1]
linestr += '\n'
for parmind, parm in enumerate(simparms_list):
linestr += '\n'+parm.keys()[0]
for key in parmsDBselect:
linestr += delimiter+str(parm.values()[0][key[0]][key[1]])
linestr += '\n\nNumber of unique values'
for key in parmsDBselect:
linestr += delimiter+'{0:0d}/{1:0d}'.format(nuniqDBselect[key], len(simparms_list))
else:
if parmsDB:
keys = sorted(parmsDB.keys())
linestr = 'PRISim-ID'
for key in keys:
linesstr += delimiter+key[0]+':'+key[1]
linestr += '\n'
for parmind, parm in enumerate(simparms_list):
linestr += '\n'+parm.keys()[0]
for key in parmsDB:
linestr += delimiter+str(parm.values()[0][key[0]][key[1]])
return linestr+'\n'
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Program to list metadata of PRISim simulations')
dir_group = parser.add_argument_group('Search targets', 'Target data directories for search')
dir_group.add_argument('-p', '--project', dest='project', required=True, type=str, help='Project directory to search simulation parameters in')
dir_group.add_argument('-s', '--simid', dest='simid', required=False, type=str, default='*', help='Simulation ID filter')
filter_group = parser.add_mutually_exclusive_group()
filter_group.add_argument('-a', '--all', dest='all', default=True, action='store_true')
filter_group.add_argument('-c', '--change', dest='change', default=False, action='store_true')
output_group = parser.add_argument_group('Output specifications', 'Output specifications')
output_group.add_argument('-f', '--format', dest='format', default='tsv', choices=['csv', 'tsv'], type=str, required=False, help='Output format (tab/comma separated)')
output_group.add_argument('-o', '--output', dest='output', type=str, required=False, help='Output file path')
args = vars(parser.parse_args())
linestr = lsPRISim(args)
if args['output'] is not None:
try:
with open(args['output'], 'w+') as outfile:
outfile.write(linestr)
except IOError:
print(linestr)
raise IOError('Specified output file/folder invalid')
else:
print(linestr)
| 4,293 | 38.394495 | 171 |
py
|
dstqa
|
dstqa-master/multiwoz_format.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
import sys
import os
import json
import pdb
import copy
import random
assert(len(sys.argv) == 4)
ontology_path = "ontology/domain_slot_list_sp.txt"
data_ratio = 100
if sys.argv[1] == "all":
domains_keep = set(["restaurant", "hotel", "train", "attraction", "taxi"])
else:
domains_keep = set([sys.argv[1]])
input_file_path = sys.argv[2]
output_file_path = sys.argv[3]
train_file_path = input_file_path + "/train_dials.json"
dev_file_path = input_file_path + "/dev_dials.json"
test_file_path = input_file_path + "/test_dials.json"
def read_ds():
with open(ontology_path) as fp:
ds = []
for line in fp:
if line[0] == "#": continue
line_arr = line.split("\t")
ds.append(line_arr[0] + "-" + line_arr[1])
return ds
ds = read_ds()
# the following function is from https://raw.githubusercontent.com/jasonwu0731/trade-dst/master/utils/fix_label.py
def fix_general_label_error(labels, type):
slots = [k.replace(" ","").lower() if ("book" not in k) else k.lower() for k in ds]
label_dict = dict([ (l[0], l[1]) for l in labels]) if type else dict([ (l["slots"][0][0], l["slots"][0][1]) for l in labels])
GENERAL_TYPO = {
# type
"guesthouse":"guest house", "guesthouses":"guest house", "guest":"guest house", "mutiple sports":"multiple sports",
"sports":"multiple sports", "mutliple sports":"multiple sports","swimmingpool":"swimming pool", "concerthall":"concert hall",
"concert":"concert hall", "pool":"swimming pool", "night club":"nightclub", "mus":"museum", "ol":"architecture",
"colleges":"college", "coll":"college", "architectural":"architecture", "musuem":"museum", "churches":"church",
# area
"center":"centre", "center of town":"centre", "near city center":"centre", "in the north":"north", "cen":"centre", "east side":"east",
"east area":"east", "west part of town":"west", "ce":"centre", "town center":"centre", "centre of cambridge":"centre",
"city center":"centre", "the south":"south", "scentre":"centre", "town centre":"centre", "in town":"centre", "north part of town":"north",
"centre of town":"centre", "cb30aq": "none",
# price
"mode":"moderate", "moderate -ly": "moderate", "mo":"moderate",
# day
"next friday":"friday", "monda": "monday", "thur": "thursday", "not given": "none",
# parking
"free parking":"free",
# internet
"free internet":"yes",
# star
"4 star":"4", "4 stars":"4", "0 star rarting":"none",
# others
"y":"yes", "any":"dontcare", "n":"no", "does not care":"dontcare", "not men":"none", "not":"none", "not mentioned":"none",
'':"none", "not mendtioned":"none", "3 .":"3", "does not":"no", "fun":"none", "art":"none", "no mentioned": "none",
}
for slot in slots:
if slot in label_dict.keys():
# general typos
if label_dict[slot] in GENERAL_TYPO.keys():
label_dict[slot] = label_dict[slot].replace(label_dict[slot], GENERAL_TYPO[label_dict[slot]])
# miss match slot and value
if slot == "hotel-type" and label_dict[slot] in ["nigh", "moderate -ly priced", "bed and breakfast", "centre", "venetian", "intern", "a cheap -er hotel"] or \
slot == "hotel-internet" and label_dict[slot] == "4" or \
slot == "hotel-pricerange" and label_dict[slot] == "2" or \
slot == "attraction-type" and label_dict[slot] in ["gastropub", "la raza", "galleria", "gallery", "science", "m"] or \
"area" in slot and label_dict[slot] in ["moderate"] or \
"day" in slot and label_dict[slot] == "t":
label_dict[slot] = "none"
elif slot == "hotel-type" and label_dict[slot] in ["hotel with free parking and free wifi", "4", "3 star hotel"]:
label_dict[slot] = "hotel"
elif slot == "hotel-star" and label_dict[slot] == "3 star hotel":
label_dict[slot] = "3"
elif "area" in slot:
if label_dict[slot] == "no": label_dict[slot] = "north"
elif label_dict[slot] == "we": label_dict[slot] = "west"
elif label_dict[slot] == "cent": label_dict[slot] = "centre"
elif "day" in slot:
if label_dict[slot] == "we": label_dict[slot] = "wednesday"
elif label_dict[slot] == "no": label_dict[slot] = "none"
elif "price" in slot and label_dict[slot] == "ch":
label_dict[slot] = "cheap"
elif "internet" in slot and label_dict[slot] == "free":
label_dict[slot] = "yes"
# some out-of-define classification slot values
if slot == "restaurant-area" and label_dict[slot] in ["stansted airport", "cambridge", "silver street"] or \
slot == "attraction-area" and label_dict[slot] in ["norwich", "ely", "museum", "same area as hotel"]:
label_dict[slot] = "none"
return label_dict
def bs_format(bs):
res = {"restaurant": {"semi": {}},
"hotel": {"semi": {}},
"train": {"semi": {}},
"attraction": {"semi": {}},
"taxi": {"semi": {}},
}
for ds, v in bs.items():
d = ds.split("-")[0]
s = ds.split("-")[1]
if v == "dontcare":
v = "dont care"
if v == "does not care":
v = "dont care"
if v == "corsican":
v = "corsica"
if v == "barbeque":
v = "barbecue"
if v == "center":
v = "centre"
if v == "east side":
v = "east"
if s == "pricerange":
s = "price range"
if s == "price range" and v == "mode":
v = "moderate"
if v == "not mentioned":
v = ""
if v == "thai and chinese": # only one such type, throw away
v = "chinese"
if s == "area" and v == "n":
v = "north"
if s == "price range" and v == "ch":
v = "cheap"
if v == "moderate -ly":
v = "moderate"
if s == "area" and v == "city center":
v = "centre"
if s == "food" and v == "sushi": # sushi only appear once in the training dataset. doesnt matter throw it away or not
v = "japanese"
if v == "oak bistro":
v = "the oak bistro"
if v == "golden curry":
v = "the golden curry"
if v == "meze bar restaurant":
v = "meze bar"
if v == "golden house golden house":
v = "golden house"
if v == "missing sock":
v = "the missing sock"
if v == "the yippee noodle bar":
v = "yippee noodle bar"
if v == "fitzbillies":
v = "fitzbillies restaurant"
if v == "slug and lettuce":
v = "the slug and lettuce"
if v == "copper kettle":
v = "the copper kettle"
if v == "city stop":
v = "city stop restaurant"
if v == "cambridge lodge":
v = "cambridge lodge restaurant"
if v == "ian hong house":
v = "lan hong house"
if v == "lan hong":
v = "lan hong house"
if v == "hotpot":
v = "the hotpot"
if v == "the dojo noodle bar":
v = "dojo noodle bar"
if v == "cambridge chop house":
v = "the cambridge chop house"
if v == "nirala":
v = "the nirala"
if v == "gardenia":
v = "the gardenia"
if v == "the americas":
v = "americas"
if v == "guest house":
v = "guesthouse"
if v == "margherita":
v = "la margherita"
if v == "gonville":
v = "gonville hotel"
if s == "parking" and v == "free":
v = "yes"
if d == "hotel" and s == "name":
if v == "acorn" or v == "acorn house":
v = "acorn guest house"
if v == "cambridge belfry":
v = "the cambridge belfry"
if v == "huntingdon hotel":
v = "huntingdon marriott hotel"
if v == "alexander":
v = "alexander bed and breakfast"
if v == "lensfield hotel":
v = "the lensfield hotel"
if v == "university arms":
v = "university arms hotel"
if v == "city roomz":
v = "cityroomz"
if v == "ashley":
v = "ashley hotel"
if d == "train":
if s == "destination" or s == "departure":
if v == "bishop stortford":
v = "bishops stortford"
if v == "bishops storford":
v = "bishops stortford"
if v == "birmingham":
v = "birmingham new street"
if v == "stansted":
v = "stansted airport"
if v == "leicaster":
v = "leicester"
if d == "attraction":
if v == "cambridge temporary art":
v = "contemporary art museum"
if v == "cafe jello":
v = "cafe jello gallery"
if v == "fitzwilliam" or v == "fitzwilliam museum":
v = "the fitzwilliam museum"
if v == "contemporary art museum":
v = "cambridge contemporary art"
if v == "man on the moon":
v = "the man on the moon"
if v == "christ college":
v = "christ s college"
if v == "old school":
v = "old schools"
if v == "cambridge punter":
v= "the cambridge punter"
if v == "queen s college":
v = "queens college"
if v == "all saint s church":
v = "all saints church"
if v == "fez club":
v = "the fez club"
if v == "parkside":
v = "parkside pools"
if v == "saint john s college .":
v = "saint john s college"
if v == "the mumford theatre":
v = "mumford theatre"
if v == "corn cambridge exchange":
v = "the cambridge corn exchange"
if d == "taxi":
if v == "london kings cross train station":
v = "london kings cross"
if v == "stevenage train station":
v = "stevenage"
if v == "junction theatre":
v = "the junction"
if v == "bishops stortford train station":
v = "bishops stortford"
if v == "cambridge train station":
v = "cambridge"
if v == "citiroomz":
v = "cityroomz"
if v == "london liverpool street train station":
v = "london liverpool street"
if v == "norwich train station":
v = "norwich"
if v == "kings college":
v = "king s college"
if v == "the ghandi" or v == "ghandi":
v = "the gandhi"
if v == "ely train station":
v = "ely"
if v == "stevenage train station":
v = "stevenage"
if v == "peterborough train station":
v = "peterborough"
if v == "london kings cross train station":
v = "london kings cross"
if v == "kings lynn train station":
v = "kings lynn"
if v == "stansted airport train station":
v = "stansted airport"
if v == "acorn house":
v = "acorn guest house"
if v == "queen s college":
v = "queens college"
if v == "leicester train station":
v = "leicester"
if v == "the gallery at 12":
v = "gallery at 12 a high street"
if v == "caffee uno":
v = "caffe uno"
if v == "stevenage train station":
v = "stevenage"
if v == "finches":
v = "finches bed and breakfast"
if v == "broxbourne train station":
v = "broxbourne"
if v == "country folk museum":
v = "cambridge and county folk museum"
if v == "ian hong":
v = "lan hong house"
if v == "the byard art museum":
v = "byard art"
if v == "cambridge belfry":
v = "the cambridge belfry"
if v == "birmingham new street train station":
v = "birmingham new street"
if v == "man on the moon concert hall":
v = "the man on the moon"
if v == "st . john s college":
v = "saint john s college"
if v == "st johns chop house":
v = "saint johns chop house"
if v == "fitzwilliam museum":
v = "the fitzwilliam museum"
if v == "cherry hinton village centre":
v = "the cherry hinton village centre"
if v == "maharajah tandoori restaurant4":
v = "maharajah tandoori restaurant"
if v == "the soul tree":
v = "soul tree nightclub"
if v == "cherry hinton village center":
v = "the cherry hinton village centre"
if v == "aylesbray lodge":
v = "aylesbray lodge guest house"
if v == "the alexander bed and breakfast":
v = "alexander bed and breakfast"
if v == "shiraz .":
v = "shiraz restaurant"
if v == "tranh binh":
v = "thanh binh"
if v == "riverboat georginawd":
v = "riverboat georgina"
if v == "lovell ldoge":
v = "lovell lodge"
if v == "alyesbray lodge hotel":
v = "aylesbray lodge guest house"
if v == "wandlebury county park":
v = "wandlebury country park"
if v == "the galleria":
v = "galleria"
if v == "cambridge artw2orks":
v = "cambridge artworks"
if d not in domains_keep:
continue
res[d]["semi"][s] = v
return res
def utt_format(utt):
utt = utt.replace("barbeque", "barbecue")
utt = utt.replace("center", "centre")
return utt
def process(file_path, is_training=False):
dialog_json = []
with open(file_path) as fp:
data_json = json.load(fp)
if is_training and data_ratio != 100:
random.Random(10).shuffle(data_json)
data_json = data_json[:int(len(data_json)*0.01*data_ratio)]
for dialog in data_json:
is_filter = True
for domain in dialog["domains"]:
if domain in domains_keep:
is_filter = False
break
if is_filter: continue
cur_dialog = {}
cur_dialog["dialogue_idx"] = dialog["dialogue_idx"]
cur_dialog["dialogue"] = []
for i, turn_info in enumerate(dialog["dialogue"]):
cur_turn = {}
cur_turn["transcript"] = utt_format(turn_info["transcript"])
cur_turn["system_transcript"] = utt_format(turn_info["system_transcript"])
cur_turn["belief_state"] = fix_general_label_error(turn_info["belief_state"], False)
cur_turn["belief_state"] = bs_format(cur_turn["belief_state"])
cur_dialog["dialogue"].append(cur_turn)
dialog_json.append(cur_dialog)
return dialog_json
# train
train_dialogs = process(train_file_path, True)
ofp = open(os.path.join(output_file_path,"./train.json"), "w")
ofp.write(json.dumps(train_dialogs, indent=2))
# dev
dev_dialogs = process(dev_file_path)
ofp = open(os.path.join(output_file_path, "./dev.json"), "w")
ofp.write(json.dumps(dev_dialogs, indent=2))
# test
test_dialogs = process(test_file_path)
ofp = open(os.path.join(output_file_path, "./test.json"), "w")
ofp.write(json.dumps(test_dialogs, indent=2))
# prediction. same as test, but one instance per line
ofp = open(os.path.join(output_file_path, "./prediction.json"), "w")
for dialog in test_dialogs:
ofp.write(json.dumps(dialog))
ofp.write("\n")
| 15,023 | 35.914005 | 171 |
py
|
dstqa
|
dstqa-master/multiwoz_2.1_format.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
import sys
import os
import json
import pdb
import copy
import random
assert(len(sys.argv) == 4)
ontology_path = "ontology/domain_slot_list_sp.txt"
data_ratio = 100
if sys.argv[1] == "all":
domains_keep = set(["restaurant", "hotel", "train", "attraction", "taxi"])
else:
domains_keep = set([sys.argv[1]])
input_file_path = sys.argv[2]
output_file_path = sys.argv[3]
train_file_path = input_file_path + "/train_dials.json"
dev_file_path = input_file_path + "/dev_dials.json"
test_file_path = input_file_path + "/test_dials.json"
def read_ds():
with open(ontology_path) as fp:
ds = []
for line in fp:
if line[0] == "#": continue
line_arr = line.split("\t")
ds.append(line_arr[0] + "-" + line_arr[1])
return ds
ds = read_ds()
# the following function is from https://raw.githubusercontent.com/jasonwu0731/trade-dst/master/utils/fix_label.py
def fix_general_label_error(labels, type):
slots = [k.replace(" ","").lower() if ("book" not in k) else k.lower() for k in ds]
label_dict = dict([ (l[0], l[1]) for l in labels]) if type else dict([ (l["slots"][0][0], l["slots"][0][1]) for l in labels])
GENERAL_TYPO = {
# type
"guesthouse":"guest house", "guesthouses":"guest house", "guest":"guest house", "mutiple sports":"multiple sports",
"sports":"multiple sports", "mutliple sports":"multiple sports","swimmingpool":"swimming pool", "concerthall":"concert hall",
"concert":"concert hall", "pool":"swimming pool", "night club":"nightclub", "mus":"museum", "ol":"architecture",
"colleges":"college", "coll":"college", "architectural":"architecture", "musuem":"museum", "churches":"church",
# area
"center":"centre", "center of town":"centre", "near city center":"centre", "in the north":"north", "cen":"centre", "east side":"east",
"east area":"east", "west part of town":"west", "ce":"centre", "town center":"centre", "centre of cambridge":"centre",
"city center":"centre", "the south":"south", "scentre":"centre", "town centre":"centre", "in town":"centre", "north part of town":"north",
"centre of town":"centre", "cb30aq": "none",
# price
"mode":"moderate", "moderate -ly": "moderate", "mo":"moderate",
# day
"next friday":"friday", "monda": "monday", "thur": "thursday", "not given": "none",
# parking
"free parking":"free",
# internet
"free internet":"yes",
# star
"4 star":"4", "4 stars":"4", "0 star rarting":"none",
# others
"y":"yes", "any":"dontcare", "n":"no", "does not care":"dontcare", "not men":"none", "not":"none", "not mentioned":"none",
'':"none", "not mendtioned":"none", "3 .":"3", "does not":"no", "fun":"none", "art":"none", "no mentioned": "none",
}
for slot in slots:
if slot in label_dict.keys():
# general typos
if label_dict[slot] in GENERAL_TYPO.keys():
label_dict[slot] = label_dict[slot].replace(label_dict[slot], GENERAL_TYPO[label_dict[slot]])
# miss match slot and value
if slot == "hotel-type" and label_dict[slot] in ["nigh", "moderate -ly priced", "bed and breakfast", "centre", "venetian", "intern", "a cheap -er hotel"] or \
slot == "hotel-internet" and label_dict[slot] == "4" or \
slot == "hotel-pricerange" and label_dict[slot] == "2" or \
slot == "attraction-type" and label_dict[slot] in ["gastropub", "la raza", "galleria", "gallery", "science", "m"] or \
"area" in slot and label_dict[slot] in ["moderate"] or \
"day" in slot and label_dict[slot] == "t":
label_dict[slot] = "none"
elif slot == "hotel-type" and label_dict[slot] in ["hotel with free parking and free wifi", "4", "3 star hotel"]:
label_dict[slot] = "hotel"
elif slot == "hotel-star" and label_dict[slot] == "3 star hotel":
label_dict[slot] = "3"
elif "area" in slot:
if label_dict[slot] == "no": label_dict[slot] = "north"
elif label_dict[slot] == "we": label_dict[slot] = "west"
elif label_dict[slot] == "cent": label_dict[slot] = "centre"
elif "day" in slot:
if label_dict[slot] == "we": label_dict[slot] = "wednesday"
elif label_dict[slot] == "no": label_dict[slot] = "none"
elif "price" in slot and label_dict[slot] == "ch":
label_dict[slot] = "cheap"
elif "internet" in slot and label_dict[slot] == "free":
label_dict[slot] = "yes"
# some out-of-define classification slot values
if slot == "restaurant-area" and label_dict[slot] in ["stansted airport", "cambridge", "silver street"] or \
slot == "attraction-area" and label_dict[slot] in ["norwich", "ely", "museum", "same area as hotel"]:
label_dict[slot] = "none"
return label_dict
def bs_format(bs):
res = {"restaurant": {"semi": {}},
"hotel": {"semi": {}},
"train": {"semi": {}},
"attraction": {"semi": {}},
"taxi": {"semi": {}},
}
for ds, v in bs.items():
d = ds.split("-")[0]
s = ds.split("-")[1]
if v == "cambridge contemporary art museum":
v = "cambridge contemporary art"
if v == "cafe jello museum":
v = "cafe jello gallery"
if v == "whippple museum":
v = "whipple museum of the history of science"
if v == "st christs college":
v = "christ s college"
if v == "abc theatre":
v = "adc theatre"
if d == "train" and v == "london":
v = "london kings cross"
if v == "the castle galleries":
v = "castle galleries"
if v == "cafe jello":
v = "cafe jello gallery"
if v == "cafe uno":
v = "caffe uno"
if v == "el shaddia guesthouse":
v = "el shaddai"
if v == "kings college":
v = "king s college"
if v == "saint johns college":
v = "saint john s college"
if v == "kettles yard":
v = "kettle s yard"
if v == "grafton hotel":
v = "grafton hotel restaurant"
if v == "churchills college":
v = "churchill college"
if v == "the churchill college":
v = "churchill college"
if v == "portugese":
v = "portuguese"
if v == "lensfield hotel":
v = "the lensfield hotel"
if v == "rosas bed and breakfast":
v = "rosa s bed and breakfast"
if v == "pizza hut fenditton":
v = "pizza hut fen ditton"
if v == "great saint marys church":
v = "great saint mary s church"
if v == "alimentum":
v = "restaurant alimentum"
if v == "cow pizza kitchen and bar":
v = "the cow pizza kitchen and bar"
if v == "shiraz":
v = "shiraz restaurant"
if v == "cherry hinton village centre":
v = "the cherry hinton village centre"
if v == "christ college":
v = "christ s college"
if v == "peoples portraits exhibition at girton college":
v = "people s portraits exhibition at girton college"
if v == "saint catharines college":
v = "saint catharine s college"
if v == "the maharajah tandoor":
v = "maharajah tandoori restaurant"
if v == "efes":
v = "efes restaurant"
if v == "the gonvile hotel":
v = "gonville hotel"
if v == "abbey pool":
v = "abbey pool and astroturf pitch"
if v == "the cambridge arts theatre":
v = "cambridge arts theatre"
if v == "sheeps green and lammas land park fen causeway":
v = "sheep s green and lammas land park fen causeway"
if v == "lensfield hotel":
v = "the lensfield hotel"
if v == "rosas bed and breakfast":
v = "rosa s bed and breakfast"
if v == "little saint marys church":
v = "little saint mary s church"
if v == "cambridge punter":
v = "the cambridge punter"
if v == "pizza hut":
v = "pizza hut city centre"
if v == "good luck":
v = "the good luck chinese food takeaway"
if v == "lucky star":
v = "the lucky star"
if v == "cambridge contemporary art museum":
v = "cambridge contemporary art"
if v == "cow pizza kitchen and bar":
v = "the cow pizza kitchen and bar"
if v == "river bar steakhouse and grill":
v = "the river bar steakhouse and grill"
if v == "chiquito":
v = "chiquito restaurant bar"
if v == "king hedges learner pool":
v = "kings hedges learner pool"
if v == "dontcare":
v = "dont care"
if v == "does not care":
v = "dont care"
if v == "corsican":
v = "corsica"
if v == "barbeque":
v = "barbecue"
if v == "center":
v = "centre"
if v == "east side":
v = "east"
if s == "pricerange":
s = "price range"
if s == "price range" and v == "mode":
v = "moderate"
if v == "not mentioned":
v = ""
if v == "thai and chinese": # only one such type, throw away
v = "chinese"
if s == "area" and v == "n":
v = "north"
if s == "price range" and v == "ch":
v = "cheap"
if v == "moderate -ly":
v = "moderate"
if s == "area" and v == "city center":
v = "centre"
if s == "food" and v == "sushi": # sushi only appear once in the training dataset. doesnt matter throw it away or not
v = "japanese"
if v == "oak bistro":
v = "the oak bistro"
if v == "golden curry":
v = "the golden curry"
if v == "meze bar restaurant":
v = "meze bar"
if v == "golden house golden house":
v = "golden house"
if v == "missing sock":
v = "the missing sock"
if v == "the yippee noodle bar":
v = "yippee noodle bar"
if v == "fitzbillies":
v = "fitzbillies restaurant"
if v == "slug and lettuce":
v = "the slug and lettuce"
if v == "copper kettle":
v = "the copper kettle"
if v == "city stop":
v = "city stop restaurant"
if v == "cambridge lodge":
v = "cambridge lodge restaurant"
if v == "ian hong house":
v = "lan hong house"
if v == "lan hong":
v = "lan hong house"
if v == "hotpot":
v = "the hotpot"
if v == "the dojo noodle bar":
v = "dojo noodle bar"
if v == "cambridge chop house":
v = "the cambridge chop house"
if v == "nirala":
v = "the nirala"
if v == "gardenia":
v = "the gardenia"
if v == "the americas":
v = "americas"
if v == "guest house":
v = "guesthouse"
if v == "margherita":
v = "la margherita"
if v == "gonville":
v = "gonville hotel"
if s == "parking" and v == "free":
v = "yes"
if d == "hotel" and s == "name":
if v == "acorn" or v == "acorn house":
v = "acorn guest house"
if v == "cambridge belfry":
v = "the cambridge belfry"
if v == "huntingdon hotel":
v = "huntingdon marriott hotel"
if v == "alexander":
v = "alexander bed and breakfast"
if v == "lensfield hotel":
v = "the lensfield hotel"
if v == "university arms":
v = "university arms hotel"
if v == "city roomz":
v = "cityroomz"
if v == "ashley":
v = "ashley hotel"
if d == "train":
if s == "destination" or s == "departure":
if v == "bishop stortford":
v = "bishops stortford"
if v == "bishops storford":
v = "bishops stortford"
if v == "birmingham":
v = "birmingham new street"
if v == "stansted":
v = "stansted airport"
if v == "leicaster":
v = "leicester"
if d == "attraction":
if v == "cambridge temporary art":
v = "contemporary art museum"
if v == "cafe jello":
v = "cafe jello gallery"
if v == "fitzwilliam" or v == "fitzwilliam museum":
v = "the fitzwilliam museum"
if v == "contemporary art museum":
v = "cambridge contemporary art"
if v == "man on the moon":
v = "the man on the moon"
if v == "christ college":
v = "christ s college"
if v == "old school":
v = "old schools"
if v == "cambridge punter":
v= "the cambridge punter"
if v == "queen s college":
v = "queens college"
if v == "all saint s church":
v = "all saints church"
if v == "fez club":
v = "the fez club"
if v == "parkside":
v = "parkside pools"
if v == "saint john s college .":
v = "saint john s college"
if v == "the mumford theatre":
v = "mumford theatre"
if v == "corn cambridge exchange":
v = "the cambridge corn exchange"
if d == "taxi":
if v == "london kings cross train station":
v = "london kings cross"
if v == "stevenage train station":
v = "stevenage"
if v == "junction theatre":
v = "the junction"
if v == "bishops stortford train station":
v = "bishops stortford"
if v == "cambridge train station":
v = "cambridge"
if v == "citiroomz":
v = "cityroomz"
if v == "london liverpool street train station":
v = "london liverpool street"
if v == "norwich train station":
v = "norwich"
if v == "kings college":
v = "king s college"
if v == "the ghandi" or v == "ghandi":
v = "the gandhi"
if v == "ely train station":
v = "ely"
if v == "stevenage train station":
v = "stevenage"
if v == "peterborough train station":
v = "peterborough"
if v == "london kings cross train station":
v = "london kings cross"
if v == "kings lynn train station":
v = "kings lynn"
if v == "stansted airport train station":
v = "stansted airport"
if v == "acorn house":
v = "acorn guest house"
if v == "queen s college":
v = "queens college"
if v == "leicester train station":
v = "leicester"
if v == "the gallery at 12":
v = "gallery at 12 a high street"
if v == "caffee uno":
v = "caffe uno"
if v == "stevenage train station":
v = "stevenage"
if v == "finches":
v = "finches bed and breakfast"
if v == "broxbourne train station":
v = "broxbourne"
if v == "country folk museum":
v = "cambridge and county folk museum"
if v == "ian hong":
v = "lan hong house"
if v == "the byard art museum":
v = "byard art"
if v == "cambridge belfry":
v = "the cambridge belfry"
if v == "birmingham new street train station":
v = "birmingham new street"
if v == "man on the moon concert hall":
v = "the man on the moon"
if v == "st . john s college":
v = "saint john s college"
if v == "st johns chop house":
v = "saint johns chop house"
if v == "fitzwilliam museum":
v = "the fitzwilliam museum"
if v == "cherry hinton village centre":
v = "the cherry hinton village centre"
if v == "maharajah tandoori restaurant4":
v = "maharajah tandoori restaurant"
if v == "the soul tree":
v = "soul tree nightclub"
if v == "cherry hinton village center":
v = "the cherry hinton village centre"
if v == "aylesbray lodge":
v = "aylesbray lodge guest house"
if v == "the alexander bed and breakfast":
v = "alexander bed and breakfast"
if v == "shiraz .":
v = "shiraz restaurant"
if v == "tranh binh":
v = "thanh binh"
if v == "riverboat georginawd":
v = "riverboat georgina"
if v == "lovell ldoge":
v = "lovell lodge"
if v == "alyesbray lodge hotel":
v = "aylesbray lodge guest house"
if v == "wandlebury county park":
v = "wandlebury country park"
if v == "the galleria":
v = "galleria"
if v == "cambridge artw2orks":
v = "cambridge artworks"
if d not in domains_keep:
continue
res[d]["semi"][s] = v
return res
def utt_format(utt):
utt = utt.replace("barbeque", "barbecue")
utt = utt.replace("center", "centre")
return utt
def process(file_path, is_training=False):
dialog_json = []
with open(file_path) as fp:
data_json = json.load(fp)
if is_training and data_ratio != 100:
random.Random(10).shuffle(data_json)
data_json = data_json[:int(len(data_json)*0.01*data_ratio)]
for dialog in data_json:
is_filter = True
for domain in dialog["domains"]:
if domain in domains_keep:
is_filter = False
break
if is_filter: continue
cur_dialog = {}
cur_dialog["dialogue_idx"] = dialog["dialogue_idx"]
cur_dialog["dialogue"] = []
for i, turn_info in enumerate(dialog["dialogue"]):
cur_turn = {}
cur_turn["transcript"] = utt_format(turn_info["transcript"])
cur_turn["system_transcript"] = utt_format(turn_info["system_transcript"])
cur_turn["belief_state"] = fix_general_label_error(turn_info["belief_state"], False)
cur_turn["belief_state"] = bs_format(cur_turn["belief_state"])
cur_dialog["dialogue"].append(cur_turn)
dialog_json.append(cur_dialog)
return dialog_json
# train
train_dialogs = process(train_file_path, True)
ofp = open(os.path.join(output_file_path,"./train.json"), "w")
ofp.write(json.dumps(train_dialogs, indent=2))
# dev
dev_dialogs = process(dev_file_path)
ofp = open(os.path.join(output_file_path, "./dev.json"), "w")
ofp.write(json.dumps(dev_dialogs, indent=2))
# test
test_dialogs = process(test_file_path)
ofp = open(os.path.join(output_file_path, "./test.json"), "w")
ofp.write(json.dumps(test_dialogs, indent=2))
# prediction. same as test, but one instance per line
ofp = open(os.path.join(output_file_path, "./prediction.json"), "w")
for dialog in test_dialogs:
ofp.write(json.dumps(dialog))
ofp.write("\n")
| 18,246 | 35.567134 | 171 |
py
|
dstqa
|
dstqa-master/calc_elmo_embeddings.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
# pre-calculate elmo embeddings of each sentence in each dialog
import sys
import pdb
import json
import pickle
from tqdm import tqdm
from allennlp.modules.elmo import Elmo, batch_to_ids
from allennlp.data.tokenizers import WordTokenizer, Token
from allennlp.data.tokenizers.word_splitter import SpacyWordSplitter
base_path = sys.argv[1]
train_data_path = base_path + "/train.json"
dev_data_path = base_path + "/dev.json"
test_data_path = base_path + "/test.json"
data_paths = {"train": train_data_path, "dev": dev_data_path, "test": test_data_path}
data_path = data_paths[sys.argv[2]]
output_path = sys.argv[3] + "/" + sys.argv[2]
options_file = "https://s3-us-west-2.amazonaws.com/allennlp/models/elmo/2x2048_256_2048cnn_1xhighway/elmo_2x2048_256_2048cnn_1xhighway_options.json"
weight_file = "https://s3-us-west-2.amazonaws.com/allennlp/models/elmo/2x2048_256_2048cnn_1xhighway/elmo_2x2048_256_2048cnn_1xhighway_weights.hdf5"
def read_dataset(file_path):
with open(file_path) as dataset_file:
tokenizer = WordTokenizer(word_splitter=SpacyWordSplitter())
dataset_json = json.load(dataset_file)
dialogs = []
for dialog in dataset_json:
dialog_idx = dialog["dialogue_idx"]
dialog = dialog['dialogue']
dialog_context = None
for turn_i, turn in enumerate(dialog):
sys_utt = turn['system_transcript']
user_utt = turn['transcript']
tokenized_sys_utt = tokenizer.tokenize(sys_utt)
if turn_i != 0:
tokenized_sys_utt = [Token(text="<S>", lemma_="<S>")] + tokenized_sys_utt
tokenized_user_utt = tokenizer.tokenize(user_utt)
if turn_i != len(dialog) - 1:
tokenized_user_utt = tokenized_user_utt + [Token(text="</S>", lemma_="</S>")]
if dialog_context is None:
dialog_context = tokenized_sys_utt + tokenized_user_utt
else:
dialog_context += tokenized_sys_utt + tokenized_user_utt
dialog_context = [t.text for t in dialog_context]
dialogs.append((dialog_idx, [dialog_context]))
return dialogs
def calc_elmo_embeddings(elmo, dialog):
# Compute two different representation for each token.
# Each representation is a linear weighted combination for the
# 3 layers in ELMo (i.e., charcnn, the outputs of the two BiLSTM))
# use batch_to_ids to convert sentences to character ids
character_ids = batch_to_ids(dialog).cuda()
dialog_embeddings = []
for i in range(3):
embeddings = elmo[i](character_ids)
batch_embeddings = embeddings['elmo_representations'][0]
batch_embeddings = batch_embeddings.squeeze(0)
dialog_embeddings.append(batch_embeddings.cpu())
return dialog_embeddings
#https://github.com/allenai/allennlp/blob/master/tutorials/how_to/elmo.md
#After loading the pre-trained model, the first few batches will be negatively impacted until the biLM can reset its internal states. You may want to run a few batches through the model to warm up the states before making predictions (although we have not worried about this issue in practice).
def elmo_warm_up(elmo, dialog):
character_ids = batch_to_ids(dialog).cuda()
for i in range(3):
for _ in range(20):
elmo[i](character_ids)
elmo = [None] * 3
elmo[0] = Elmo(options_file, weight_file, 1, dropout=0, scalar_mix_parameters=[1.0, 0, 0]).cuda()
elmo[1] = Elmo(options_file, weight_file, 1, dropout=0, scalar_mix_parameters=[0, 1.0, 0]).cuda()
elmo[2] = Elmo(options_file, weight_file, 1, dropout=0, scalar_mix_parameters=[0, 0, 1.0]).cuda()
dialogs = read_dataset(data_path)
elmo_warm_up(elmo, dialogs[0][1])
dialog_embeddings = {}
for dialog_idx, dialog in tqdm(dialogs):
dialog_embedding = calc_elmo_embeddings(elmo, dialog)
dialog_embeddings[dialog_idx] = dialog_embedding
with open(output_path, 'wb') as handle:
pickle.dump(dialog_embeddings, handle)
| 4,043 | 42.956522 | 294 |
py
|
dstqa
|
dstqa-master/formulate_pred_belief_state.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
# formulate pred generated by predictor
import sys
import json
import pdb
from copy import deepcopy
def read_domain_slot_list(filename):
with open(filename) as fp:
lines = fp.readlines()
domain_slots = []
for line in lines:
if line.startswith("#"):
continue
if len(line.strip("\n ")) == 0 :
continue
line_arr = line.split("\t")
ds = line_arr[0] + " " + line_arr[1]
if line_arr[3] == "n":
domain_slots.append(ds)
return domain_slots
fp = open(sys.argv[1])
lines = fp.readlines()
dialogs = []
js = {}
for line in lines:
line = line.strip("\n")
if line[:5] != "input" and line[:10] != "prediction":
continue
if line[:5] == "input":
js = json.loads(line[line.find(":")+1:])
if line[:10] == "prediction":
prediction = json.loads(line[line.find(":")+1:])
dialogs.append((js, prediction))
def calc_pred_belief_state(prediction, ds_list, ontology):
def dict2str(d):
res = []
for k, v in d.items():
res.append(k+":"+v)
return sorted(res)
prediction = prediction["predicted_labels"]
turn_bs = []
for turn in prediction:
cur_bs = {}
for ds in ds_list:
if ds not in ontology: continue
cur_bs[ds] = "none"
for slot_value in turn:
p = slot_value.find(":")
slot = slot_value[:p]
if slot not in ontology: continue
value = slot_value[p+1:] # value may have ":"
cur_bs[slot] = value
turn_bs.append(dict2str(cur_bs))
return turn_bs
def calc_acc(true_labels, pred_labels):
assert(len(true_labels) == len(pred_labels))
total_turn = 0.0
err_turn = 0.0
wrong_dialog = []
for d in range(len(true_labels)): # for each dialog
err_of_dialog = 0
assert(len(true_labels[d]) == len(pred_labels[d]))
for t in range(len(true_labels[d])): # for each turn
total_turn += 1
if len(true_labels[d][t]) != len(pred_labels[d][t]):
err_turn += 1
err_of_dialog += 1
continue
for x, y in zip(true_labels[d][t], pred_labels[d][t]):
if x != y:
err_turn += 1
err_of_dialog += 1
break
if err_of_dialog > 0:
wrong_dialog.append(d)
return (total_turn - err_turn) / total_turn, wrong_dialog
ds_list = read_domain_slot_list("./ontology/domain_slot_list_nosp.txt")
ontology = set(ds_list)
true_labels = []
pred_labels = []
for dialog, prediction in dialogs:
dialog_bs = []
for turn in dialog["dialogue"]:
turn_bs = []
ds_set = set(ds_list)
for domain, v in turn["belief_state"].items():
for slot, slot_value in v["semi"].items():
ds = domain + " " + slot
if ds not in ontology:
continue
if slot_value == "": slot_value = "none"
turn_bs.append(domain + " " + slot + ":" + slot_value)
ds_set.remove(domain + " " + slot)
for ds in ds_set:
if ds not in ontology: continue
turn_bs.append(ds+":"+"none")
turn_bs = sorted(turn_bs)
dialog_bs.append(turn_bs)
true_labels.append(dialog_bs)
pred_labels.append(calc_pred_belief_state(prediction, ds_list, ontology))
acc, wrong_dialogs = calc_acc(true_labels, pred_labels)
print(acc)
# for i in wrong_dialogs:
# print(dialogs[i][0])
# #print(dialogs[i][1])
# print(true_labels[i])
# print(pred_labels[i])
# print()
| 3,511 | 27.552846 | 75 |
py
|
dstqa
|
dstqa-master/dstqa/dstqa.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
import pdb
import math
import logging
import os.path
import pickle
import random
from typing import Any, Dict, List
from overrides import overrides
import numpy as np
import torch
import torch.nn.functional as F
from torch.nn.functional import nll_loss
from torch.nn import BCEWithLogitsLoss
from torch.nn import CrossEntropyLoss
from allennlp.data.token_indexers import SingleIdTokenIndexer, TokenIndexer
from allennlp.data.tokenizers import Token, Tokenizer, WordTokenizer
from allennlp.data.fields import Field, TextField, ArrayField
from allennlp.common.checks import check_dimensions_match
from allennlp.data import Token, Vocabulary, Instance
from allennlp.data.dataset import Batch
from allennlp.models.model import Model
from allennlp.modules import Seq2SeqEncoder, TimeDistributed, TokenEmbedder, TextFieldEmbedder, FeedForward, ScalarMix
from allennlp.modules.input_variational_dropout import InputVariationalDropout
from allennlp.modules.matrix_attention.linear_matrix_attention import LinearMatrixAttention
from allennlp.modules.seq2seq_encoders.pytorch_seq2seq_wrapper import PytorchSeq2SeqWrapper
from allennlp.modules.layer_norm import LayerNorm
from allennlp.nn import Activation
from allennlp.nn import InitializerApplicator, util
from allennlp.nn.util import logsumexp
from allennlp.tools import squad_eval
from allennlp.training.metrics import Average, BooleanAccuracy, CategoricalAccuracy
from allennlp.modules.elmo import batch_to_ids as elmo_batch_to_ids
from allennlp.modules.elmo import Elmo
from .accuracy import Accuracy
from . import dstqa_util
logger = logging.getLogger(__name__)
@Model.register("dstqa")
class DSTQA(Model):
def __init__(self, vocab: Vocabulary,
base_dim,
loss_scale_by_num_values,
use_pre_calc_elmo_embeddings,
elmo_embedding_path,
domain_slot_list_path,
word_embeddings,
token_indexers: Dict[str, TokenIndexer],
text_field_embedder: TextFieldEmbedder,
text_field_char_embedder: TextFieldEmbedder,
symbol_embedder: TextFieldEmbedder,
phrase_layer: Seq2SeqEncoder,
class_prediction_layer: FeedForward,
span_prediction_layer: FeedForward,
span_start_encoder: FeedForward,
span_end_encoder: FeedForward,
span_label_predictor: FeedForward,
initializer: InitializerApplicator,
use_graph,
bi_dropout: float = 0.2,
dropout: float = 0.2) -> None:
super().__init__(vocab)
self._is_in_training_mode = False
self._loss_scale_by_num_values = loss_scale_by_num_values
self._use_pre_calc_elmo_embeddings = use_pre_calc_elmo_embeddings
self._word_embeddings = word_embeddings
self._is_use_elmo = True if self._word_embeddings == "elmo" else False
self._is_use_graph = use_graph
if self._is_use_elmo and use_pre_calc_elmo_embeddings:
self._dialog_elmo_embeddings = self.load_elmo_embeddings(elmo_embedding_path)
self._dialog_scalar_mix = ScalarMix(mixture_size = 3, trainable=True)
self._domains, self._ds_id2text, self._ds_text2id, self.value_file_path, \
self._ds_type, self._ds_use_value_list, num_ds_use_value, self._ds_masked \
= self.read_domain_slot_list(domain_slot_list_path)
self._value_id2text, self._value_text2id = self.load_value_list(domain_slot_list_path)
self._span_id2text, self._class_id2text = dstqa_util.gen_id2text(self._ds_id2text, self._ds_type)
self._token_indexers = token_indexers
self._text_field_embedder = text_field_embedder
self._text_field_char_embedder = text_field_char_embedder
self._symbol_embedder = symbol_embedder
self._ds_dialog_attention = LinearMatrixAttention(base_dim, base_dim, 'x,y,x*y')
self._dialog_dsv_attention = LinearMatrixAttention(base_dim, base_dim, 'x,y,x*y')
self._dsv_dialog_attention = LinearMatrixAttention(base_dim, base_dim, 'x,y,x*y')
self._ds_attention = LinearMatrixAttention(base_dim, base_dim, 'x,y,x*y')
self._dsv_attention = LinearMatrixAttention(base_dim, base_dim, 'x,y,x*y')
self._agg_value = torch.nn.Linear(base_dim, base_dim)
self._agg_nodes = torch.nn.Linear(base_dim, base_dim)
self._graph_gamma = torch.nn.Linear(base_dim, 1)
self._class_prediction_layer = class_prediction_layer
self._span_prediction_layer = span_prediction_layer
self._span_label_predictor = span_label_predictor
self._span_start_encoder = span_start_encoder
self._span_end_encoder = span_end_encoder
self._phrase_layer = phrase_layer
self._cross_entropy = CrossEntropyLoss(ignore_index=-1)
self._accuracy = Accuracy(self._ds_id2text, self._ds_type)
self._dropout = torch.nn.Dropout(dropout)
self._bi_dropout = torch.nn.Dropout(bi_dropout)
self._dropout2 = torch.nn.Dropout(0.1)
self._sigmoid = torch.nn.Sigmoid()
initializer(self)
def load_elmo_embeddings(self, elmo_embedding_path):
elmo_embeddings = {}
for suffix in ["train", "dev", "test"]:
with open(elmo_embedding_path + suffix, "rb") as fp:
elmo_embeddings.update(pickle.load(fp))
return elmo_embeddings
def gen_utt_masks(self, turn_offset, batch_size, max_turn_count, max_dialog_len):
masks = torch.arange(0, max_dialog_len).unsqueeze(0).unsqueeze(0).cuda()
masks = masks.repeat(batch_size, max_turn_count, 1)
repeated_turn_offset = turn_offset.unsqueeze(2).repeat(1, 1, max_dialog_len)
masks = masks < repeated_turn_offset
# two types of masks: (1) all previous and current utt are marked as 1, (2) only current utt are marked as 1
bmasks = masks.clone().detach()
bmasks = (~bmasks)[:, :-1, :]
cmasks = masks.clone().detach()
cmasks[:, 1:, :] = cmasks[:, 1:, :] & bmasks
return masks, cmasks
def mix_dialog_embeddings(self, dialog_indices):
dialog_embeddings = []
max_dialog_len = 0
for idx in dialog_indices:
elmo_embeddings_cuda = []
for v in self._dialog_elmo_embeddings[idx]:
elmo_embeddings_cuda.append(v.cuda())
dialog_embeddings.append(self._dialog_scalar_mix(elmo_embeddings_cuda))
if max_dialog_len < dialog_embeddings[-1].size(0):
max_dialog_len = dialog_embeddings[-1].size(0)
for i, e in enumerate(dialog_embeddings):
pad = torch.zeros(max_dialog_len - e.size(0), e.size(1)). cuda()
dialog_embeddings[i] = torch.cat((e, pad), dim=0)
dialog_embeddings = torch.stack(dialog_embeddings, dim=0)
return dialog_embeddings
def mask_time_step(self, dialogs, dialog_masks):
batch_size, max_dialog_len, max_char_len = dialogs['token_characters'].size()
masks = self._dropout2(torch.ones(batch_size, max_dialog_len))
masks = masks < 0.5
char_masked = torch.tensor([259, 260] + [0] * (max_char_len - 2)).cuda()
char_padded = torch.tensor([0] * max_char_len).cuda()
dialogs["token_characters"][masks] = char_masked
dialogs["token_characters"][dialog_masks == 0] = char_padded
if "tokens" in dialogs:
dialogs["tokens"][masks] = 1 # 1 is the index for unknown
dialogs["tokens"][dialog_masks == 0] = 0
if "elmo" in dialogs:
elmo_masked = torch.tensor([259, 260] + [261] * (50 - 2)).cuda()
elmo_padded = torch.tensor([0] * 50).cuda()
dialogs["elmo"][masks] = elmo_masked
dialogs["elmo"][dialog_masks == 0] = elmo_padded
def forward(self, dialogs, tags, utt_lens, exact_match, dialog_indices, epoch_num = None,
labels=None, spans_start=None, spans_end=None, metadata=None, span_labels=None):
self._is_in_training_mode = self.training
# dialog embeddings
batch_size, max_dialog_len, _ = dialogs['token_characters'].size()
dialog_masks = util.get_text_field_mask(dialogs, num_wrapping_dims=0)
self.mask_time_step(dialogs, dialog_masks)
char_embedder_input = {'token_characters':dialogs['token_characters']}
dialog_char_embeddings = self._text_field_char_embedder(char_embedder_input, num_wrapping_dims=0)
if self._is_use_elmo:
if self._use_pre_calc_elmo_embeddings == False:
elmo_embedder_input = {'elmo':dialogs['elmo']}
dialog_elmo_embeddings = self._text_field_embedder(elmo_embedder_input, num_wrapping_dims=0)
dialog_embeddings = torch.cat((dialog_elmo_embeddings, dialog_char_embeddings), dim = 2)
else:
dialog_elmo_embeddings = self.mix_dialog_embeddings(dialog_indices)
dialog_embeddings = torch.cat((dialog_elmo_embeddings, dialog_char_embeddings), dim=2)
else:
embedder_input = {'tokens':dialogs['tokens']}
dialog_elmo_embeddings = self._text_field_embedder(embedder_input, num_wrapping_dims=0)
dialog_embeddings = torch.cat((dialog_elmo_embeddings, dialog_char_embeddings), dim = 2)
tag_embeddings = self._symbol_embedder(tags, num_wrapping_dims=0)
turn_offset = torch.cumsum(utt_lens, dim=1)
max_turn_count = utt_lens.size(1)
context_masks, utt_masks = self.gen_utt_masks(turn_offset, batch_size, max_turn_count, max_dialog_len)
# dsv embeddings
ds_embeddings, v_embeddings = self.get_dsv_embeddings()
# phrase layer
merged_dialog_embeddings = torch.cat((dialog_embeddings, tag_embeddings, exact_match), dim=2)
total_loss = 0.0
predictions = []
if self._is_in_training_mode == True: # # only train one domain per turn for GPU memory limits
sampled_turn = random.choice(list(range(max_turn_count)))
for turn_i in range(max_turn_count):
predictions.append(({}, {}))
if self._is_in_training_mode == True and self._is_use_graph == False:
if turn_i != sampled_turn:
continue
if self._is_in_training_mode == True:
if turn_i < sampled_turn:
self.set_module_to_eval()
if turn_i > sampled_turn: break
# compute new domain slot embeddings
attention_ds_embeddings = None
if turn_i > 0 and self._is_use_graph:
attention_ds_embeddings = self.ds_graph_embeddings(batch_size, predictions[turn_i - 1], ds_embeddings, v_embeddings)
repeated_ds_embeddings = ds_embeddings.unsqueeze(0).repeat(batch_size, 1, 1)
reduced_dialog_masks = self._phrase_layer(self._dropout(merged_dialog_embeddings), context_masks[:, turn_i, :])
span_ds_i = 0
for ds_i, ds_name in enumerate(self._ds_id2text):
cur_repeated_ds_embeddings = repeated_ds_embeddings[:, ds_i, :].unsqueeze(1)
cur_context_masks = context_masks[:, turn_i, :]
if self._ds_type[ds_name] == "classification":
cur_labels = labels[:, turn_i, ds_i]
cur_v_embeddings = v_embeddings[ds_name]
loss, prediction = self.forward_classification(ds_name, reduced_dialog_masks, cur_repeated_ds_embeddings, cur_v_embeddings, cur_context_masks, cur_labels, attention_ds_embeddings)
predictions[turn_i][0][ds_name] = prediction
if self._loss_scale_by_num_values:
loss = loss * max(1.0, math.log(cur_v_embeddings.size(0)))
elif self._ds_type[ds_name] == "span":
cur_span_labels = span_labels[:, turn_i, span_ds_i]
cur_spans_start = spans_start[:, turn_i, span_ds_i]
cur_spans_end = spans_end[:, turn_i, span_ds_i]
loss, prediction = self.forward_span(ds_name, reduced_dialog_masks, cur_repeated_ds_embeddings, cur_context_masks, cur_span_labels, cur_spans_start, cur_spans_end)
predictions[turn_i][1][ds_name] = prediction
span_ds_i += 1
if self._is_in_training_mode == True and turn_i == sampled_turn:
if not self._ds_masked[ds_name]:
total_loss += loss
if self._is_in_training_mode == True:
if turn_i < sampled_turn:
self.set_module_to_train()
output = {}
if self._is_in_training_mode == True:
output["loss"] = total_loss
output["predictions"] = predictions
output["metadata"] = metadata
return output
def set_module_to_eval(self):
self.eval()
self._phrase_layer.eval()
self._class_prediction_layer.eval()
self._span_prediction_layer.eval()
self._span_start_encoder.eval()
self._span_end_encoder.eval()
self._span_label_predictor.eval()
torch.set_grad_enabled(False)
def set_module_to_train(self):
self.train()
self._phrase_layer.train()
self._class_prediction_layer.train()
self._span_prediction_layer.train()
self._span_start_encoder.train()
self._span_end_encoder.train()
self._span_label_predictor.train()
torch.set_grad_enabled(True)
def bi_att(self, dialog_embeddings, dsv_embeddings, context_masks):
batch_size, max_dialog_len = context_masks.size()
num_values = dsv_embeddings.size(1)
dialog_dsv_similarity = self._dialog_dsv_attention(self._bi_dropout(dialog_embeddings), self._bi_dropout(dsv_embeddings))
# attention on dsv
dialog_dsv_att = util.masked_softmax(dialog_dsv_similarity.view(-1, num_values), None)
dialog_dsv_att = dialog_dsv_att.view(batch_size, max_dialog_len, num_values)
dialog_dsv = util.weighted_sum(dsv_embeddings, dialog_dsv_att)
new_dialog_embeddings = dialog_embeddings + dialog_dsv
# attention on dialog
dsv_dialog_att = util.masked_softmax(dialog_dsv_similarity.transpose(1, 2).contiguous().view(-1, max_dialog_len), context_masks.unsqueeze(1).repeat(1,num_values,1).view(-1, max_dialog_len))
dsv_dialog_att = dsv_dialog_att.view(batch_size, num_values, max_dialog_len)
dsv_dialog = util.weighted_sum(dialog_embeddings, dsv_dialog_att)
new_dsv_embeddings = dsv_embeddings + dsv_dialog
return new_dialog_embeddings, new_dsv_embeddings
def forward_classification(self, ds_name, dialog_repr, ds_embeddings, value_embeddings, context_masks, labels=None, attention_ds_embeddings=None):
batch_size, max_dialog_len = context_masks.size()
num_values = value_embeddings.size(0)
repeated_dsv_embeddings = ds_embeddings.repeat(1, num_values, 1)
repeated_dsv_embeddings += value_embeddings.unsqueeze(0).repeat(batch_size, 1, 1)
dialog_repr, repeated_dsv_embeddings = self.bi_att(dialog_repr, repeated_dsv_embeddings, context_masks)
ds_dialog_sim = self._ds_dialog_attention(self._bi_dropout(ds_embeddings), self._bi_dropout(dialog_repr))
ds_dialog_att = util.masked_softmax(ds_dialog_sim.view(-1, max_dialog_len), context_masks.view(-1, max_dialog_len))
ds_dialog_att = ds_dialog_att.view(batch_size, max_dialog_len)
ds_dialog_repr = util.weighted_sum(dialog_repr, ds_dialog_att)
if attention_ds_embeddings is not None:
self_att_matrix = self._ds_attention(self._bi_dropout(ds_dialog_repr.unsqueeze(1)), attention_ds_embeddings)
self_probs = util.masked_softmax(self_att_matrix, None)
ret = util.weighted_sum(attention_ds_embeddings, self_probs).squeeze(1)
gamma = torch.sigmoid(self._graph_gamma(ds_dialog_repr + ret))
ds_dialog_repr = (1-gamma) * ds_dialog_repr + gamma * ret
w = self._class_prediction_layer(self._bi_dropout(ds_dialog_repr)).unsqueeze(1)
logits = torch.bmm(w, repeated_dsv_embeddings.transpose(1,2)).squeeze(1)
prediction = torch.argmax(logits, dim=1)
loss = self._cross_entropy(logits.view(-1, num_values), labels.view(-1))
if labels is not None:
self._accuracy.value_acc(ds_name, logits, labels, labels != -1)
return loss, prediction
def forward_span(self, ds_name, dialog_repr, repeated_ds_embeddings, context_masks, span_labels=None, spans_start = None, spans_end = None):
batch_size, max_dialog_len = context_masks.size()
ds_dialog_sim = self._ds_dialog_attention(self._dropout(repeated_ds_embeddings), self._dropout(dialog_repr))
ds_dialog_att = util.masked_softmax(ds_dialog_sim.view(-1, max_dialog_len), context_masks.view(-1, max_dialog_len))
ds_dialog_att = ds_dialog_att.view(batch_size, max_dialog_len)
ds_dialog_repr = util.weighted_sum(dialog_repr, ds_dialog_att)
ds_dialog_repr = ds_dialog_repr + repeated_ds_embeddings.squeeze(1)
span_label_logits = self._span_label_predictor(F.relu(self._dropout(ds_dialog_repr)))
span_label_prediction = torch.argmax(span_label_logits, dim=1)
span_label_loss = 0.0
if span_labels is not None:
span_label_loss = self._cross_entropy(span_label_logits, span_labels) # loss averaged by #turn
self._accuracy.span_label_acc(ds_name, span_label_logits, span_labels, span_labels != -1)
loss = span_label_loss
w = self._span_prediction_layer(self._dropout(ds_dialog_repr)).unsqueeze(1)
span_start_repr = self._span_start_encoder(self._dropout(dialog_repr))
span_start_logits = torch.bmm(w, span_start_repr.transpose(1,2)).squeeze(1)
span_start_probs = util.masked_softmax(span_start_logits, context_masks)
span_start_logits = util.replace_masked_values(span_start_logits, context_masks.to(dtype=torch.int8), -1e7)
span_end_repr = self._span_end_encoder(self._dropout(span_start_repr))
span_end_logits = torch.bmm(w, span_end_repr.transpose(1,2)).squeeze(1)
span_end_probs = util.masked_softmax(span_end_logits, context_masks)
span_end_logits = util.replace_masked_values(span_end_logits, context_masks.to(dtype=torch.int8), -1e7)
best_span = self.get_best_span(span_start_logits, span_end_logits)
best_span = best_span.view(batch_size, -1)
spans_loss = 0.0
if spans_start is not None:
spans_loss = self._cross_entropy(span_start_logits, spans_start)
self._accuracy.span_start_acc(ds_name, span_start_logits, spans_start, spans_start != -1)
spans_loss += self._cross_entropy(span_end_logits, spans_end)
self._accuracy.span_end_acc(ds_name, span_end_logits, spans_end, spans_end != -1)
loss += spans_loss
return loss, (span_label_prediction, best_span)
@overrides
def decode(self, output_dict):
num_turns = len(output_dict["predictions"])
class_output = []
for t in range(num_turns):
class_predictions = output_dict["predictions"][t][0]
res = []
for ds_name, pred in class_predictions.items():
value = self._value_id2text[ds_name][pred.item()]
res.append(ds_name+":"+value)
class_output.append(res)
span_output = []
for t in range(num_turns):
span_predictions = output_dict["predictions"][t][1]
res = []
for ds_name, pred in span_predictions.items():
span_label = pred[0]
if span_label == 0: value = "none"
if span_label == 1: value = "dont care"
if span_label == 2:
start, end = pred[1][0][0], pred[1][0][1]
value = " ".join([output_dict["metadata"][0][i].text for i in range(start, end+1)])
value = value.lower()
res.append(ds_name+":" + value)
span_output.append(res)
# merge class output and span output
output = []
if len(span_output) != 0 and len(class_output) != 0:
for x, y in zip(class_output, span_output):
output.append(x + y)
elif len(span_output) == 0:
output = class_output
elif len(class_output) == 0:
output = span_output
else:
assert(False)
output_dict["predicted_labels"] = [output]
del output_dict["metadata"]
del output_dict["predictions"]
return output_dict
def get_metrics(self, reset = False):
acc = self._accuracy.get_metrics(reset)
return acc
def get_dsv_embeddings(self):
def batch_to_id(batch: List[List[str]]):
instances = []
for b in batch:
tokens = [Token(w) for w in b.split(" ")]
field = TextField(tokens, self._token_indexers)
instance = Instance({"b": field})
instances.append(instance)
dataset = Batch(instances)
vocab = self.vocab
dataset.index_instances(vocab)
res = {}
for k, v in dataset.as_tensor_dict()['b'].items():
res[k] = v.cuda()
return res
ds_ids = batch_to_id(self._ds_id2text)
if 'tokens' in ds_ids:
elmo_embedder_input = {'tokens':ds_ids['tokens']}
elif 'elmo' in ds_ids:
elmo_embedder_input = {'elmo':ds_ids['elmo']}
ds_elmo_embeddings = self._text_field_embedder(elmo_embedder_input, num_wrapping_dims=0).sum(1)
char_embedder_input = {'token_characters':ds_ids['token_characters']}
ds_char_embeddings = self._text_field_char_embedder(char_embedder_input, num_wrapping_dims=0).sum(1)
ds_embeddings = torch.cat((ds_elmo_embeddings, ds_char_embeddings), dim=1)
ds_masks = util.get_text_field_mask(ds_ids, num_wrapping_dims=0).sum(1).float()
ds_embeddings = ds_embeddings / ds_masks.unsqueeze(1).repeat(1, ds_embeddings.size(1))
v_embeddings = {}
for v, v_list in self._value_id2text.items():
v_ids = batch_to_id(v_list)
if 'tokens' in v_ids:
elmo_embedder_input = {'tokens':v_ids['tokens']}
elif 'elmo' in v_ids:
elmo_embedder_input = {'elmo':v_ids['elmo']}
v_elmo_embeddings = self._text_field_embedder(elmo_embedder_input, num_wrapping_dims=0).sum(1)
char_embedder_input = {'token_characters':v_ids['token_characters']}
v_char_embeddings = self._text_field_char_embedder(char_embedder_input, num_wrapping_dims=0).sum(1)
v_embeddings[v] = torch.cat((v_elmo_embeddings, v_char_embeddings), dim=1)
v_masks = util.get_text_field_mask(v_ids, num_wrapping_dims=0).sum(1).float()
v_embeddings[v] = v_embeddings[v] / v_masks.unsqueeze(1).repeat(1, v_embeddings[v].size(1))
return ds_embeddings, v_embeddings
def read_domain_slot_list(self, filename):
with open(filename) as fp:
lines = fp.readlines()
domains = []
domain_slots = []
value_file_path = {}
domain_slots_type = {}
domain_slots_use_value_list = {}
ds_masked = {}
num_ds_use_value = 0
for line in lines:
line = line.strip("\n ")
if line.startswith("#"):
continue
if len(line.strip("\n ")) == 0 :
continue
line_arr = line.split("\t")
ds = line_arr[0] + " " + line_arr[1]
if line_arr[3] == "n":
domains.append(line_arr[0])
domain_slots.append(ds)
value_file_path[ds] = line_arr[4].strip(" \n")
domain_slots_type[ds] = line_arr[2]
domain_slots_use_value_list[ds] = True if line_arr[5] == "y" else False
num_ds_use_value += 1 if line_arr[5] == "y" else 0
ds_masked[ds] = True if line_arr[6] == "y" else False
ds_text2id = {}
for i, s in enumerate(domain_slots):
ds_text2id[s] = i
return domains, domain_slots, ds_text2id, value_file_path, domain_slots_type, domain_slots_use_value_list, num_ds_use_value, ds_masked
def load_value_list(self, ds_path):
def read_value_list(ds_path, ds, value_path_list):
dir_path = os.path.dirname(ds_path)
filename = dir_path + "/" + value_path_list[ds]
with open(filename) as fp:
lines = fp.readlines()
values = []
for line_i, line in enumerate(lines):
if len(line.strip("\n ")) == 0:
continue
values.append(line.strip("\n "))
value2id = {}
for i, v in enumerate(values):
value2id[v] = i
return values, value2id
value_text2id = {}
value_id2text = {}
for ds in self._ds_text2id.keys():
if not self._ds_use_value_list[ds]: continue
id2v, v2id =read_value_list(ds_path, ds, self.value_file_path)
value_text2id[ds] = v2id
value_id2text[ds] = id2v
return value_id2text, value_text2id
# code from https://github.com/allenai/allennlp/blob/master/allennlp/models/reading_comprehension/bidaf.py
def get_best_span(self, span_start_logits, span_end_logits):
# We call the inputs "logits" - they could either be unnormalized logits or normalized log
# probabilities. A log_softmax operation is a constant shifting of the entire logit
# vector, so taking an argmax over either one gives the same result.
if span_start_logits.dim() != 2 or span_end_logits.dim() != 2:
raise ValueError("Input shapes must be (batch_size, passage_length)")
batch_size, passage_length = span_start_logits.size()
device = span_start_logits.device
# (batch_size, passage_length, passage_length)
span_log_probs = span_start_logits.unsqueeze(2) + span_end_logits.unsqueeze(1)
# Only the upper triangle of the span matrix is valid; the lower triangle has entries where
# the span ends before it starts.
span_log_mask = torch.triu(torch.ones((passage_length, passage_length),
device=device)).log().unsqueeze(0)
valid_span_log_probs = span_log_probs + span_log_mask
# Here we take the span matrix and flatten it, then find the best span using argmax. We
# can recover the start and end indices from this flattened list using simple modular
# arithmetic.
# (batch_size, passage_length * passage_length)
best_spans = valid_span_log_probs.view(batch_size, -1).argmax(-1)
span_start_indices = best_spans // passage_length
span_end_indices = best_spans % passage_length
return torch.stack([span_start_indices, span_end_indices], dim=-1)
def ds_graph_embeddings(self, batch_size, predictions, ds_embeddings, v_embeddings):
repeated_ds_embeddings = ds_embeddings.unsqueeze(0).repeat(batch_size, 1, 1)
for node_i, node in enumerate(self._ds_id2text):
if not self._ds_use_value_list[node]: continue
val_node = v_embeddings[node][predictions[0][node]]
ds_node = repeated_ds_embeddings[:, self._ds_text2id[node], :]
ds_node = ds_node + val_node
repeated_ds_embeddings = repeated_ds_embeddings.clone()
repeated_ds_embeddings[:, self._ds_text2id[node], :] = ds_node
return repeated_ds_embeddings
| 26,073 | 48.103578 | 193 |
py
|
dstqa
|
dstqa-master/dstqa/dstqa_reader.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
import os
import pdb
import json
import logging
import numpy as np
from overrides import overrides
from typing import Any, Dict, List, Tuple
from collections import Counter, defaultdict
from allennlp.common.file_utils import cached_path
from allennlp.data.dataset_readers.dataset_reader import DatasetReader
from allennlp.data.instance import Instance
from allennlp.data.dataset_readers.reading_comprehension import util
from allennlp.data.token_indexers import SingleIdTokenIndexer, TokenIndexer
from allennlp.data.tokenizers import Token, Tokenizer, WordTokenizer
from allennlp.data.fields import Field, TextField, IndexField, \
MetadataField, LabelField, ListField, SequenceLabelField, ArrayField
from . import dstqa_util
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@DatasetReader.register("dstqa")
class DSTQAReader(DatasetReader):
def __init__(self,
tokenizer: Tokenizer = None,
token_indexers: Dict[str, TokenIndexer] = None,
lazy: bool = False,
domain_slot_list_path: str = None) -> None:
super().__init__(lazy)
self._tokenizer = tokenizer or WordTokenizer()
self._token_indexers = token_indexers or {'tokens': SingleIdTokenIndexer()}
self._sys_user_symbol_indexers = {'symbols': SingleIdTokenIndexer()}
self._ds_list, self._ds_text2id, value_path_list, self._ds_type, self._ds_use_value_list, self._ds_masked = self.read_domain_slot_list(domain_slot_list_path)
self._ds_span_list, self._ds_span_text2id = self.ds_span_dict(self._ds_list, self._ds_type)
self._value_id2text = {}
self._value_text2id = {}
for domain_slot in self._ds_list:
if not self._ds_use_value_list[domain_slot]: continue
self._value_id2text[domain_slot], self._value_text2id[domain_slot] = self.read_value_list(domain_slot_list_path, domain_slot, value_path_list)
def ds_span_dict(self, ds_list, ds_type):
ds_span_list = []
ds_span_text2id = {}
for ds in ds_list:
if ds_type[ds] == "span":
ds_span_list.append(ds)
ds_span_text2id[ds] = len(ds_span_list) - 1
return ds_span_list, ds_span_text2id
@overrides
def _read(self, file_path: str):
# if `file_path` is a URL, redirect to the cache
file_path = cached_path(file_path)
logger.info("Reading file at %s", file_path)
with open(file_path) as dataset_file:
dataset_json = json.load(dataset_file)
logger.info("Reading the dataset")
for dialog in dataset_json:
dialog_idx = dialog["dialogue_idx"]
dialog = dialog['dialogue']
labels = []
spans = []
span_labels = []
utt_lens = []
dialog_context = None
exact_match_feas = None
tags = None
for turn_i, turn in enumerate(dialog):
sys_utt = turn['system_transcript']
user_utt = turn['transcript']
tokenized_sys_utt = self._tokenizer.tokenize(sys_utt)
if turn_i != 0:
tokenized_sys_utt = [Token(text="<S>", lemma_="<S>")] + tokenized_sys_utt
sys_exact_match_feas = self.exact_match(tokenized_sys_utt)
sys_tags = [Token("SYS") for token in tokenized_sys_utt]
tokenized_user_utt = self._tokenizer.tokenize(user_utt)
if turn_i != len(dialog) - 1:
tokenized_user_utt = tokenized_user_utt + [Token(text="</S>", lemma_="</S>")]
user_exact_match_feas = self.exact_match(tokenized_user_utt)
user_tags = [Token("USER") for token in tokenized_user_utt]
utt_lens.append(len(tokenized_sys_utt) + len(tokenized_user_utt))
if dialog_context is None:
dialog_context = tokenized_sys_utt + tokenized_user_utt
exact_match_feas = sys_exact_match_feas + user_exact_match_feas
tags = sys_tags + user_tags
else:
dialog_context += tokenized_sys_utt + tokenized_user_utt
exact_match_feas += sys_exact_match_feas + user_exact_match_feas
tags += sys_tags + user_tags
cur_labels = []
cur_spans = []
cur_span_labels = [] # 0: none; 1: dont care; 2: span
turn_label = turn['belief_state']
for domain, val in turn_label.items():
domain = domain.lower().strip(" \n")
val = val["semi"]
for slot, value in val.items():
ds = domain + " " + slot
if ds not in self._ds_text2id: continue
slot, value = slot.lower().strip(" \n"), value.lower().strip(" \n")
cur_labels.append((domain, slot, value))
if ds in self._ds_type and self._ds_type[ds] == "span":
s, e = dstqa_util.find_span(dialog_context, value)
cur_spans.append((domain, slot, s, e))
if value == "dont care": sl = 1
elif value == "" or value == "none": sl = 0
else: sl = 2
cur_span_labels.append((domain, slot, sl))
labels.append(cur_labels)
spans.append(cur_spans)
span_labels.append(cur_span_labels)
instance = self.text_to_instance(dialog_idx, dialog_context, exact_match_feas, tags, utt_lens, labels, spans, span_labels)
yield instance
@overrides
def text_to_instance(self, dialog_idx,
dialog_context, exact_match_feas, tags,
utt_lens, labels, spans, span_labels):
token_indexers = self._token_indexers
symbol_indexers = self._sys_user_symbol_indexers
fields: Dict[str, Field] = {}
fields['dialogs'] = TextField(dialog_context, token_indexers)
fields['tags'] = TextField(tags, symbol_indexers)
fields['utt_lens'] = ArrayField(np.array(utt_lens), dtype=np.int32)
fields['exact_match'] = ListField(exact_match_feas)
fields['metadata'] = MetadataField(dialog_context)
fields['dialog_indices'] = MetadataField(dialog_idx)
# calculate labels
if labels != None:
expanded_value_labels = []
for turn_label in labels:
turn_value_label = [-1 if self._ds_type[ds] == "span" else 0 for ds in self._ds_list] # 0 is default which is 'none' is in vocab
for each_label in turn_label:
if each_label[2] == "":
continue
ds = each_label[0] + " " + each_label[1]
if ds in self._ds_text2id:
if self._ds_type[ds] == "classification":
if each_label[2] not in self._value_text2id[ds]:
#print(ds, each_label[2])
continue
turn_value_label[self._ds_text2id[ds]] = self._value_text2id[ds][each_label[2]]
if self._ds_type[ds] == "span" and self._ds_use_value_list[ds] == True:
if each_label[2] != "none" and each_label[2] != "dont care":
if each_label[2] not in self._value_text2id[ds]:
#print(ds, each_label[2])
continue
turn_value_label[self._ds_text2id[ds]] = self._value_text2id[ds][each_label[2]]
expanded_value_labels.append(ListField([LabelField(l, skip_indexing=True) for l in turn_value_label]))
fields['labels'] = ListField(expanded_value_labels)
# calculate spans
if len(self._ds_span_list) != 0:
spans_start = []
spans_end = []
for turn_span in spans:
cur_span_start = [-1] * len(self._ds_span_list)
cur_span_end = [-1] * len(self._ds_span_list)
for each_span in turn_span:
cur_ds = each_span[0] + " " + each_span[1]
cur_span_start[self._ds_span_text2id[cur_ds]] = each_span[2]
cur_span_end[self._ds_span_text2id[cur_ds]] = each_span[3]
spans_start.append(ListField([LabelField(l, skip_indexing=True) for l in cur_span_start]))
spans_end.append(ListField([LabelField(l, skip_indexing=True) for l in cur_span_end]))
fields["spans_start"] = ListField(spans_start)
fields["spans_end"] = ListField(spans_end)
expanded_span_labels = []
for turn_span_label in span_labels:
cur_span_label = [0 for _ in self._ds_span_list]
for each_span_label in turn_span_label:
cur_ds = each_span_label[0] + " " + each_span_label[1]
cur_span_label[self._ds_span_text2id[cur_ds]] = each_span_label[2]
expanded_span_labels.append(ListField([LabelField(l, skip_indexing=True) for l in cur_span_label]))
fields["span_labels"] = ListField(expanded_span_labels)
return Instance(fields)
def read_domain_slot_list(self, filename):
with open(filename) as fp:
lines = fp.readlines()
domain_slots = []
ds_masked = {}
value_file_path = {}
domain_slots_type = {}
domain_slots_use_value_list = {}
for line in lines:
line = line.strip("\n ")
if line.startswith("#"):
continue
if len(line.strip("\n ")) == 0 :
continue
line_arr = line.split("\t")
ds = line_arr[0] + " " + line_arr[1]
if line_arr[3] == "n":
domain_slots.append(ds)
value_file_path[ds] = line_arr[4].strip(" \n")
domain_slots_type[ds] = line_arr[2]
domain_slots_use_value_list[ds] = True if line_arr[5] == "y" else False
ds_masked[ds] = True if line_arr[6] == "y" else False
ds_text2id = {}
for i, s in enumerate(domain_slots):
ds_text2id[s] = i
return domain_slots, ds_text2id, value_file_path, domain_slots_type, domain_slots_use_value_list, ds_masked
def read_value_list(self, ds_path, ds, value_path_list):
dir_path = os.path.dirname(ds_path)
filename = dir_path + "/" + value_path_list[ds]
with open(filename) as fp:
lines = fp.readlines()
values = []
for line_i, line in enumerate(lines):
if self._ds_type[ds] == "span" and line_i < 2: continue # if span, do not read none and dont care
if len(line.strip("\n ")) == 0:
continue
values.append(line.strip("\n "))
text2id = {}
for i, v in enumerate(values):
text2id[v] = i
return values, text2id
def exact_match(self, utt):
def charpos2wordpos(p, utt):
if p == -1: return p
num_blank = 0
for i in range(p):
if utt[i] == " ": num_blank += 1
return num_blank
def num_words(value):
num_blank = 1
for i in range(len(value)):
if value[i] == " ": num_blank += 1
return num_blank
# training and test data have already converted to lower cased.
# keep cases-sensitive should be better
word_text = " ".join([word.text for word in utt])
word_lemma_text = " ".join([word.lemma_ for word in utt])
ds_fea1, ds_fea2 = [], []
for ds in self._ds_list:
fea1 = [0] * len(utt)
fea2 = [0] * len(utt)
if not self._ds_use_value_list[ds]:
continue
for value in self._value_id2text[ds]:
v_nwords = num_words(value)
p1 = charpos2wordpos(word_text.find(value), word_text)
p2 = charpos2wordpos(word_lemma_text.find(value), word_lemma_text)
if p1 != -1:
for i in range(p1, p1 + v_nwords):
fea1[i] = 1
if p2 != -1:
for i in range(p2, p2 + v_nwords):
fea2[i] = 1
ds_fea1.append(fea1)
ds_fea2.append(fea2)
len_utt = len(utt)
final_output = [[] for _ in range(len_utt)]
for ori, lemma in zip(ds_fea1, ds_fea2):
for i, (s_ori, s_lemma) in enumerate(zip(ori, lemma)):
final_output[i] += [s_ori, s_lemma]
for i in range(len_utt):
final_output[i] = ArrayField(np.array(final_output[i]))
return final_output
| 12,637 | 45.29304 | 165 |
py
|
dstqa
|
dstqa-master/dstqa/accuracy.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
from allennlp.training.metrics import Average, BooleanAccuracy, CategoricalAccuracy
from . import dstqa_util
class Accuracy:
def __init__(self, ds_id2text, ds_type):
self._ds_id2text = ds_id2text
self._ds_type = ds_type
self._span_text2id, self._class_text2id, self._text2id = dstqa_util.gen_text2id(ds_id2text, ds_type)
num_span_slot = len(self._span_text2id)
num_class_slot = len(self._class_text2id)
self._span_label_acc = [CategoricalAccuracy() for _ in range(num_span_slot)]
self._span_start_acc = [CategoricalAccuracy() for _ in range(num_span_slot)]
self._span_end_acc = [CategoricalAccuracy() for _ in range(num_span_slot)]
self._class_acc = [CategoricalAccuracy() for _ in range(num_class_slot+num_span_slot)]
def span_label_acc(self, slot_name, logits, labels, label_masks):
idx = self._span_text2id[slot_name]
self._span_label_acc[idx](logits, labels, label_masks)
def value_acc(self, slot_name, logits, labels, label_masks):
idx = self._text2id[slot_name]
self._class_acc[idx](logits, labels, label_masks)
def span_start_acc(self, slot_name, logits, labels, label_masks):
idx = self._span_text2id[slot_name]
self._span_start_acc[idx](logits, labels, label_masks)
def span_end_acc(self, slot_name, logits, labels, label_masks):
idx = self._span_text2id[slot_name]
self._span_end_acc[idx](logits, labels, label_masks)
def get_metrics(self, reset = False):
acc = {}
for val_i, val_acc in enumerate(self._class_acc):
acc["val_" + str(val_i) + "_acc"] = val_acc.get_metric(reset)
for val_i, val_acc in enumerate(self._span_label_acc):
acc["sl_" + str(val_i) + "_acc"] = val_acc.get_metric(reset)
for val_i, val_acc in enumerate(self._span_start_acc):
acc["ss_" + str(val_i) + "_acc"] = val_acc.get_metric(reset)
for val_i, val_acc in enumerate(self._span_end_acc):
acc["se_" + str(val_i) + "_acc"] = val_acc.get_metric(reset)
return acc
| 2,200 | 43.02 | 104 |
py
|
dstqa
|
dstqa-master/dstqa/__init__.py
|
from .dstqa import DSTQA
from .dstqa_reader import DSTQAReader
#from .dstqa_predictor import DSTQAPredictor
| 109 | 21 | 44 |
py
|
dstqa
|
dstqa-master/dstqa/dstqa_predictor.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
import json
import os
import numpy as np
from overrides import overrides
from allennlp.common.util import JsonDict
from allennlp.data import DatasetReader, Instance
from allennlp.data.tokenizers.word_splitter import SpacyWordSplitter
from allennlp.predictors.predictor import Predictor
from allennlp.data.tokenizers import Token, Tokenizer, WordTokenizer
from allennlp.models import Model
from allennlp.data.fields import Field, TextField, IndexField, \
MetadataField, LabelField, ListField, SequenceLabelField, ArrayField
from . import dstqa_util
@Predictor.register('dstqa')
class DSTQAPredictor(Predictor):
def __init__(self, model: Model, dataset_reader: DatasetReader, language: str = 'en_core_web_sm') -> None:
super().__init__(model, dataset_reader)
domain_slot_list_path = "./ontology/domain_slot_list_nosp.txt" # if span, use "./ontology/domain_slot_list.txt"
self._splitter = SpacyWordSplitter(language=language, pos_tags=True, ner=True)
self._tokenizer = WordTokenizer(word_splitter=self._splitter)
self._ds_list, self._ds_text2id, value_path_list, self._ds_type, self._ds_use_value_list = self.read_domain_slot_list(domain_slot_list_path)
self._value_id2text = {}
self._value_text2id = {}
for domain_slot in self._ds_list:
if not self._ds_use_value_list[domain_slot]: continue
self._value_id2text[domain_slot], self._value_text2id[domain_slot] = self.read_value_list(domain_slot_list_path, domain_slot, value_path_list)
def predict(self, jsonline: str) -> JsonDict:
return self.predict_json(json.loads(jsonline))
@overrides
def _json_to_instance(self, json_dict: JsonDict) -> Instance:
dialog = json_dict['dialogue']
dialog_idx = json_dict["dialogue_idx"]
labels = []
spans = []
span_labels = []
utt_lens = []
dialog_context = None
exact_match_feas = None
tags = None
for turn_i, turn in enumerate(dialog):
sys_utt = turn['system_transcript']
user_utt = turn['transcript']
tokenized_sys_utt = self._tokenizer.tokenize(sys_utt)
if turn_i != 0:
tokenized_sys_utt = [Token(text="<S>", lemma_="<S>")] + tokenized_sys_utt
sys_exact_match_feas = self.exact_match(tokenized_sys_utt)
sys_tags = [Token("SYS") for token in tokenized_sys_utt]
tokenized_user_utt = self._tokenizer.tokenize(user_utt)
if turn_i != len(dialog) - 1:
tokenized_user_utt = tokenized_user_utt + [Token(text="</S>", lemma_="</S>")]
user_exact_match_feas = self.exact_match(tokenized_user_utt)
user_tags = [Token("USER") for token in tokenized_user_utt]
utt_lens.append(len(tokenized_sys_utt) + len(tokenized_user_utt))
if dialog_context is None:
dialog_context = tokenized_sys_utt + tokenized_user_utt
exact_match_feas = sys_exact_match_feas + user_exact_match_feas
tags = sys_tags + user_tags
else:
dialog_context += tokenized_sys_utt + tokenized_user_utt
exact_match_feas += sys_exact_match_feas + user_exact_match_feas
tags += sys_tags + user_tags
cur_labels = []
cur_spans = []
cur_span_labels = [] # 0: none; 1: dont care; 2: span
turn_label = turn['belief_state']
for domain, val in turn_label.items():
domain = domain.lower().strip(" \n")
val = val["semi"]
for slot, value in val.items():
ds = domain + " " + slot
if ds not in self._ds_text2id: continue
slot, value = slot.lower().strip(" \n"), value.lower().strip(" \n")
cur_labels.append((domain, slot, value))
if ds in self._ds_type and self._ds_type[ds] == "span":
s, e = dstqa_util.find_span(dialog_context, value)
cur_spans.append((domain, slot, s, e))
if value == "dont care": sl = 1
elif value == "" or value == "none": sl = 0
else: sl = 2
cur_span_labels.append((domain, slot, sl))
labels.append(cur_labels)
spans.append(cur_spans)
span_labels.append(cur_span_labels)
instance = self._dataset_reader.text_to_instance(dialog_idx, dialog_context, exact_match_feas, tags, utt_lens, labels, spans, span_labels)
return instance
def exact_match(self, utt):
def charpos2wordpos(p, utt):
if p == -1: return p
num_blank = 0
for i in range(p):
if utt[i] == " ": num_blank += 1
return num_blank
def num_words(value):
num_blank = 1
for i in range(len(value)):
if value[i] == " ": num_blank += 1
return num_blank
word_text = " ".join([word.text for word in utt])
word_lemma_text = " ".join([word.lemma_ for word in utt])
ds_fea1, ds_fea2 = [], []
for ds in self._ds_list:
fea1 = [0] * len(utt)
fea2 = [0] * len(utt)
if not self._ds_use_value_list[ds]:
continue
for value in self._value_id2text[ds]:
v_nwords = num_words(value)
p1 = charpos2wordpos(word_text.find(value), word_text)
p2 = charpos2wordpos(word_lemma_text.find(value), word_lemma_text)
if p1 != -1:
for i in range(p1, p1 + v_nwords):
fea1[i] = 1
if p2 != -1:
for i in range(p2, p2 + v_nwords):
fea2[i] = 1
ds_fea1.append(fea1)
ds_fea2.append(fea2)
len_utt = len(utt)
final_output = [[] for _ in range(len_utt)]
for ori, lemma in zip(ds_fea1, ds_fea2):
for i, (s_ori, s_lemma) in enumerate(zip(ori, lemma)):
final_output[i] += [s_ori, s_lemma]
for i in range(len_utt):
final_output[i] = ArrayField(np.array(final_output[i]))
return final_output
def read_domain_slot_list(self, filename):
with open(filename) as fp:
lines = fp.readlines()
domain_slots = []
value_file_path = {}
domain_slots_type = {}
domain_slots_use_value_list = {}
for line in lines:
line = line.strip("\n ")
if line.startswith("#"):
continue
if len(line.strip("\n ")) == 0 :
continue
line_arr = line.split("\t")
ds = line_arr[0] + " " + line_arr[1]
if line_arr[3] == "n":
domain_slots.append(ds)
value_file_path[ds] = line_arr[4].strip(" \n")
domain_slots_type[ds] = line_arr[2]
domain_slots_use_value_list[ds] = True if line_arr[5] == "y" else False
ds_text2id = {}
for i, s in enumerate(domain_slots):
ds_text2id[s] = i
return domain_slots, ds_text2id, value_file_path, domain_slots_type, domain_slots_use_value_list
def read_value_list(self, ds_path, ds, value_path_list):
dir_path = os.path.dirname(ds_path)
filename = dir_path + "/" + value_path_list[ds]
with open(filename) as fp:
lines = fp.readlines()
values = []
for line_i, line in enumerate(lines):
if self._ds_type[ds] == "span" and line_i < 2: continue # if span, do not read none and dont care
if len(line.strip("\n ")) == 0:
continue
values.append(line.strip("\n "))
text2id = {}
for i, v in enumerate(values):
text2id[v] = i
return values, text2id
| 7,830 | 41.559783 | 152 |
py
|
dstqa
|
dstqa-master/dstqa/dstqa_util.py
|
// Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: LicenseRef-.amazon.com.-AmznSL-1.0
// Licensed under the Amazon Software License http://aws.amazon.com/asl/
def find_span(utt, ans):
def is_match(utt, ans, i):
match = True
for j in range(len(ans)):
if utt[i+j].text.lower() != ans[j]:
match = False
return match
ans = ans.lower()
ans = ans.split(" ")
# find ans from revert direction
ans_len = len(ans)
utt_len = len(utt)
span_start = -1
span_end = -1
for i in range(utt_len - ans_len - 1, -1, -1):
if is_match(utt, ans, i):
span_start = i
span_end = span_start + ans_len - 1
break
return span_start, span_end
def gen_id2text(ds_id2text, ds_type):
span_id2text, class_id2text = [], []
for ds in ds_id2text:
if ds_type[ds] == "span":
span_id2text.append(ds)
if ds_type[ds] == "classification":
class_id2text.append(ds)
return span_id2text, class_id2text
def gen_text2id(ds_id2text, ds_type):
s_i = 0
c_i = 0
i = 0
span_text2id, class_text2id, text2id = {}, {}, {}
for ds in ds_id2text:
if ds_type[ds] == "span":
span_text2id[ds] = s_i
s_i += 1
if ds_type[ds] == "classification":
class_text2id[ds] = c_i
c_i += 1
text2id[ds] = i
i+=1
return span_text2id, class_text2id, text2id
| 1,386 | 25.673077 | 74 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Lorenz/generate.py
|
from turtle import color
import numpy as np
import math
import torch
import timeit
import numpy as np
import matplotlib.pyplot as plt
# import matplotlib
# matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
# matplotlib.rcParams['text.usetex'] = True
colors = [
[233/256, 110/256, 236/256], # #e96eec
# [0.6, 0.6, 0.2], # olive
# [0.5333333333333333, 0.13333333333333333, 0.3333333333333333], # wine
[255/255, 165/255, 0],
# [0.8666666666666667, 0.8, 0.4666666666666667], # sand
# [223/256, 73/256, 54/256], # #df4936
[107/256, 161/256,255/256], # #6ba1ff
[0.6, 0.4, 0.8], # amethyst
[0.0, 0.0, 1.0], # ao
[0.55, 0.71, 0.0], # applegreen
# [0.4, 1.0, 0.0], # brightgreen
[0.99, 0.76, 0.8], # bubblegum
[0.93, 0.53, 0.18], # cadmiumorange
[11/255, 132/255, 147/255], # deblue
[204/255, 119/255, 34/255], # {ocra}
]
colors = np.array(colors)
np.random.seed(10)
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
D_in = 3
H1 = 10
D_out = 3
model = Net(D_in,H1,D_out)
# set_state0 = torch.tensor([[3.0,5.0,6.0]])
def control_data(model,random_seed,set_state0,N=20000,dt=0.00001):
start = timeit.default_timer()
torch.manual_seed(random_seed)
X = torch.zeros([3,N])
z = torch.randn(N)
X[0,0] = set_state0[0,0]
X[1,0] = set_state0[0,1]
X[2,0] = set_state0[0,2]
for i in range(N-1):
x1 = X[0,i]
x2 = X[1,i]
x3 = X[2,i]
with torch.no_grad():
u = model(torch.tensor([x1,x2,x3]))
new_x1 = x1 + 10*(x2-x1)*dt + x1*u[0]*z[i]*math.sqrt(dt)
new_x2 = x2 + (x1*(28-x3)-x2)*dt + x2*u[1]*z[i]*math.sqrt(dt)
new_x3 = x3 + (x1*x2-8/3*x3)*dt + x3*u[2]*z[i]*math.sqrt(dt)
X[0,i+1] = new_x1
X[1,i+1] = new_x2
X[2,i+1] = new_x3
stop = timeit.default_timer()
print(stop-start)
return X
def modify_control_data(model,random_seed,set_state0,N=20000,dt=0.00001):
start = timeit.default_timer()
torch.manual_seed(random_seed)
X = torch.zeros([3,N])
z = torch.randn(N)
e = torch.tensor([6.0*math.sqrt(2), 6.0*math.sqrt(2) , 27.0])
e1,e2,e3=e
X[0,0] = set_state0[0,0]
X[1,0] = set_state0[0,1]
X[2,0] = set_state0[0,1]
for i in range(N-1):
x1 = X[0,i]
x2 = X[1,i]
x3 = X[2,i]
with torch.no_grad():
u = model(torch.tensor([x1-e1,x2-e2,x3-e3]))
new_x1 = x1 + 10*(x2-x1)*dt + (x1-e1)*u[0]*z[i]*math.sqrt(dt)
new_x2 = x2 + (x1*(28-x3)-x2)*dt + (x2-e2)*u[1]*z[i]*math.sqrt(dt)
new_x3 = x3 + (x1*x2-8/3*x3)*dt + (x3-e3)*u[2]*z[i]*math.sqrt(dt)
X[0,i+1] = new_x1
X[1,i+1] = new_x2
X[2,i+1] = new_x3
stop = timeit.default_timer()
print(stop-start)
return X
def original_data(set_state0,N=50000,dt=0.001):
start = timeit.default_timer()
X = torch.zeros([3,N])
X[0,0] = set_state0[0,0]
X[1,0] = set_state0[0,1]
X[2,0] = set_state0[0,1]
for i in range(N-1):
x1 = X[0,i]
x2 = X[1,i]
x3 = X[2,i]
new_x1 = x1 + 10*(x2-x1)*dt
new_x2 = x2 + (x1*(28-x3)-x2)*dt
new_x3 = x3 + (x1*x2-8/3*x3)*dt
X[0,i+1] = new_x1
X[1,i+1] = new_x2
X[2,i+1] = new_x3
stop = timeit.default_timer()
print(stop-start)
torch.save(X,'./data/Lorenz/original_data.pt')
return X
def plot_original_orbit():
fig = plt.figure()
X = torch.load('./data/Lorenz/original_data.pt')[:,0:50000:10]
x1,x2,x3=X[0,:],X[1,:],X[2,:]
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
ax = fig.add_subplot(111,projection = '3d')
ax.plot3D(x1,x2,x3,color=[1.0, 0.8, 0.6])
ax.plot3D(0,0,0,marker='*',label=r'$P_1$',color=colors[0])
ax.plot3D(6*math.sqrt(2),6*math.sqrt(2),27,marker='*',label=r'$P_2$',color=colors[3])
ax.plot3D(-6*math.sqrt(2),-6*math.sqrt(2),27,marker='*',label=r'$P_3$',color=colors[2])
plt.legend()
def orbit1(ax,path1,P1):
# fig = plt.figure()
Q1 =np.load('./data/Lorenz/{}_data_{}_Q1.npy'.format(path1,P1))[0,:,0:100000:10]
Q2 =np.load('./data/Lorenz/{}_data_{}_Q2.npy'.format(path1,P1))[0,:,0:100000:10]
Q3 =np.load('./data/Lorenz/{}_data_{}_Q3.npy'.format(path1,P1))[0,:,0:100000:10]
# plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
# ax = fig.add_subplot(111,projection = '3d')
ax.plot3D(Q1[0,:],Q1[1,:],Q1[2,:],color=colors[4],alpha=0.5)
ax.plot3D(Q2[0,:],Q2[1,:],Q2[2,:],color=colors[5],alpha=0.5)
ax.plot3D(Q3[0,:],Q3[1,:],Q3[2,:],color=colors[7],alpha=0.5)
ax.plot3D(0,0,0,marker='*',label=r'$P_1$',markersize=10,color=colors[0])
# ax.plot3D(6*math.sqrt(2),6*math.sqrt(2),27,marker='*',label=r'$P_2$')
# ax.plot3D(-6*math.sqrt(2),-6*math.sqrt(2),27,marker='*',label=r'$P_3$')
ax.plot3D(9,6,8,marker='*',label=r'$Q_1$',markersize=10,color=colors[4])
ax.plot3D(3,5,6,marker='*',label=r'$Q_2$',markersize=10,color=colors[5])
ax.plot3D(1,9,2,marker='*',label=r'$Q_3$',markersize=10,color=colors[7])
# ax.plot3D(8,2,1,marker='^',label=r'$Q_4$')
ax.set_xlabel(r'$X$')
# ax.set_xlim(0, 10)
ax.set_ylabel(r'$Y$')
# ax.set_ylim(0, 10)
ax.set_zlabel(r'$Z$')
# ax.set_zlim(0, 10)
plt.legend(fontsize=8,markerscale=0.5,labelspacing=0.05,borderpad=0.1,handlelength=1.0)
def orbit2(ax,path1,P1):
# fig = plt.figure()
Q1 =np.load('./data/Lorenz/{}_data_{}_Q1.npy'.format(path1,P1))[0,:,0:200000:10]
Q2 =np.load('./data/Lorenz/{}_data_{}_Q2.npy'.format(path1,P1))[0,:,0:200000:10]
Q3 =np.load('./data/Lorenz/{}_data_{}_Q3.npy'.format(path1,P1))[0,:,0:200000:10]
# plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
# ax = fig.add_subplot(111,projection = '3d')
ax.plot3D(Q1[0,:],Q1[1,:],Q1[2,:],color=colors[4],alpha=0.5)
ax.plot3D(Q2[0,:],Q2[1,:],Q2[2,:],color=colors[5],alpha=0.5)
ax.plot3D(Q3[0,:],Q3[1,:],Q3[2,:],color=colors[7],alpha=0.5)
# ax.plot3D(0,0,0,marker='*',label=r'$P_1$',markersize=10)
ax.plot3D(6*math.sqrt(2),6*math.sqrt(2),27,marker='*',label=r'$P_2$',markersize=10,color=colors[3])
# ax.plot3D(-6*math.sqrt(2),-6*math.sqrt(2),27,marker='*',label=r'$P_3$')
ax.plot3D(9,6,8,marker='*',label=r'$Q_1$',markersize=10,color=colors[4])
ax.plot3D(3,5,6,marker='*',label=r'$Q_2$',markersize=10,color=colors[5])
ax.plot3D(1,9,2,marker='*',label=r'$Q_3$',markersize=10,color=colors[7])
ax.set_xlabel(r'$X$')
# ax.set_xlim(0, 10)
ax.set_ylabel(r'$Y$')
# ax.set_ylim(0, 10)
ax.set_zlabel(r'$Z$')
# ax.set_zlim(0, 10)
plt.legend(fontsize=8,markerscale=0.5,labelspacing=0.05,borderpad=0.1,handlelength=1.0)
# plt.legend(loc='upper right',labelspacing=0.1,borderpad=0.2,handlelength=1.2)
def plot_original_tra():
X = torch.load('./data/Lorenz/original_data.pt')[:,0:40000:10]
x1,x2,x3=X[0,:],X[1,:],X[2,:]
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
plt.subplot(131)
plt.xticks([])
plt.plot(np.arange(len(x1)),x1,label='x',color='r')
plt.ylabel(r'$x$')
plt.subplot(132)
plt.xticks([])
plt.plot(np.arange(len(x1)),x2,label='y',color='g')
plt.ylabel(r'$y$')
plt.subplot(133)
plt.xticks([0,1000,2000,3000,4000],[0,10,20,30,40])
plt.plot(np.arange(len(x1)),x3,label='z',color='b')
plt.ylabel(r'$z$')
plt.xlabel('Time')
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.6, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.8, ls='-', lw=1)
def plot_tra(path1,P1,Q1,length=200000):
X = np.load('./data/Lorenz/{}_data_{}_{}.npy'.format(path1,P1,Q1))[0,:,0:length:10]
x1,x2,x3=X[0,:],X[1,:],X[2,:]
plt.plot(np.arange(len(x1)),x1,label='x',color='r')
plt.plot(np.arange(len(x1)),x2,label='y',color='g')
plt.plot(np.arange(len(x1)),x3,label='z',color='b')
plot_grid()
plt.legend(loc='upper right',labelspacing=0.1,borderpad=0.2,handlelength=1.2)
def quad_generate(set_state0,m,N,dt,case):
X = torch.zeros(m,3,N)
# model.load_state_dict(torch.load('./neural_sde/Lorenz/ES_quad_net_modify_0.pkl'))
# model.load_state_dict(torch.load('./neural_sde/Lorenz/ES_quad_net_modify_1.pkl'))
if case == 0:
model.load_state_dict(torch.load('./data/Lorenz/ES_quad_net_modify_0.pkl'))
for i in range(m):
X[i,:] = control_data(model,i*6+2,set_state0,N,dt)
print(case,i)
X = X.detach().numpy()
np.save('./data/Lorenz/quad_data_P1_Q2_20',X)
else:
model.load_state_dict(torch.load('./data/Lorenz/ES_quad_net_modify_1.pkl'))
for i in range(m):
X[i,:] = modify_control_data(model,i*6+2,set_state0,N,dt)
print(case,i)
X = X.detach().numpy()
np.save('./data/Lorenz/quad_data_P2_Q2_20',X)
# return X
def icnn_generate(set_state0,m,N,dt,case):
X = torch.zeros(m,3,N)
# model.load_state_dict(torch.load('./neural_sde/Lorenz/ES_icnn_net_100.pkl'))
# model.load_state_dict(torch.load('./neural_sde/Lorenz/ES_icnn_net_modify_1.pkl'))
if case == 0:
model.load_state_dict(torch.load('./data/Lorenz/ES_icnn_net_100.pkl'))
for i in range(m):
X[i,:] = control_data(model,i*6+6,set_state0,N,dt)
print(case,i)
X = X.detach().numpy()
np.save('./data/Lorenz/icnn_data_P1_Q2_20',X)
else:
model.load_state_dict(torch.load('./data/Lorenz/ES_icnn_net_modify_1.pkl'))
for i in range(m):
X[i,:] = modify_control_data(model,i*6+6,set_state0,N,dt)
print(case,i)
X = X.detach().numpy()
np.save('./data/Lorenz/icnn_data_P2_Q2_20',X)
# return X
font_size = 15
def plot1():
fig = plt.figure()
ax1 = fig.add_subplot(4,4,4,projection = '3d')
orbit1(ax1,'icnn','P1')
plt.title('Orbit')
ax2 = fig.add_subplot(4,4,8,projection = '3d')
orbit1(ax2,'quad','P1')
ax3 = fig.add_subplot(4,4,12,projection = '3d')
orbit2(ax3,'icnn','P2')
ax4 = fig.add_subplot(4,4,16,projection = '3d')
orbit2(ax4,'quad','P2')
def plot2():
for i in range(3):
plt.subplot(4,3,i+1)
plot_tra('icnn','P1','Q{}'.format(i+1),5000)
plt.xticks([0,200,400],['0','0.02','0.04'])
plt.title(r'$Q_{}$'.format(i+1),fontsize=font_size)
if i ==0:
plt.ylabel(r'$Value$',fontsize=font_size)
plt.text(0.1,4,r'$ICNN : P_1$',rotation=90,fontsize=font_size)
if i==1:
plt.xlabel('Time',fontsize=font_size)
for i in range(3):
plt.subplot(4,3,3+i+1)
plot_tra('quad','P1','Q{}'.format(i+1),5000)
plt.xticks([0,200,400],['0','0.02','0.04'])
if i==1:
plt.xlabel('Time',fontsize=font_size)
if i ==0:
plt.ylabel(r'$Value$',fontsize=font_size)
plt.text(0.1,3,r'$Quad : P_1$',rotation=90,fontsize=font_size)
for i in range(3):
plt.subplot(4,3,6+i+1)
plot_tra('icnn','P2','Q{}'.format(i+1),200000)
plt.xticks([0,10000,20000],['0','1.0','2.0'])
plt.ylim(-10,35)
if i==1:
plt.xlabel('Time',fontsize=font_size)
if i ==0:
plt.ylabel(r'$Value$',fontsize=font_size)
plt.text(-0.5,2,r'$ICNN : P_2$',rotation=90,fontsize=font_size)
for i in range(3):
plt.subplot(4,3,9+i+1)
plot_tra('quad','P2','Q{}'.format(i+1),200000)
plt.xticks([0,10000,20000],['0','1.0','2.0'])
plt.ylim(-10,35)
if i==1:
plt.xlabel('Time',fontsize=font_size)
if i ==0:
plt.ylabel(r'$Value$',fontsize=font_size)
plt.text(-0.5,1,r'$Quad : P_2$',rotation=90,fontsize=font_size)
if __name__ == '__main__':
Q1 = torch.tensor([[9.0,6.0,8.0]])
Q2 = torch.tensor([[3.0,5.0,6.0]])
Q3 = torch.tensor([[1.0,9.0,2.0]])
'''
generate control data
'''
icnn_generate(Q2,20,200000,0.00001,0)
quad_generate(Q2,20,200000,0.00001,0)
icnn_generate(Q2,20,200000,0.00001,1)
quad_generate(Q2,20,200000,0.0001,1)
'''
Plot figure in Lorenz Experiment
'''
# plot1()
# plot2()
# original_data(set_state0)
# plot_original_orbit()
# plot_original_tra()
# plt.show()
| 12,969 | 35.432584 | 103 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Lorenz/ES_ICNN.py
|
import torch.nn.functional as F
import timeit
from hessian import hessian
from hessian import jacobian
# from gradient import hessian
# from gradient import jacobian
import torch
import random
import numpy as np
def setup_seed(seed):
torch.manual_seed(seed)
# torch.cuda.manual_seed_all(seed)
# torch.cuda.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
setup_seed(10)
from Control_Nonlinear_Icnn import *
import math
import argparse
parser = argparse.ArgumentParser('ODE demo')
parser.add_argument('--N', type=int, default=10000)
parser.add_argument('--D_in', type=int, default=3)
parser.add_argument('--D_h', type=int, default=10)
parser.add_argument('--lr', type=float, default=0.03)
parser.add_argument('--b', type=float, default=2.1)
parser.add_argument('--niters', type=int, default=200)
parser.add_argument('--batch_size', type=int, default=100)
args = parser.parse_args()
def Lorenz_value(x):
y = []
for i in range(0,len(x)):
x1,x2,x3 = x[i,0],x[i,1],x[i,2]
f = [10*(x2-x1),x1*(28-x3)-x2,x1*x2-x3*8/3]
y.append(f)
y = torch.tensor(y)
return y
def modify_Lorenz_value(x):
y = []
e = torch.tensor([6.0*math.sqrt(2), 6.0*math.sqrt(2) , 27.0])
for i in range(0,len(x)):
x1,x2,x3 = x[i,:] + e
f = [10*(x2-x1),x1*(28-x3)-x2,x1*x2-x3*8/3]
y.append(f)
y = torch.tensor(y)
return y
def get_batch(data):
s = torch.from_numpy(np.random.choice(np.arange(args.N, dtype=np.int64), args.batch_size, replace=False))
batch_x = data[s,:] # (M, D)
return batch_x
'''
For learning
'''
N = args.N # sample size
D_in = args.D_in # input dimension
H1 = args.D_h # hidden dimension
D_out = D_in # output dimension
data_x = torch.Tensor(N, D_in).uniform_(0, 10)
eps = 0.001
start = timeit.default_timer()
model = LyapunovFunction(D_in,H1,D_out,(D_in,),0.1,[12,12,12,1],eps)
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
max_iters = 2000
for r in range(1, args.niters + 1):
# break
x = get_batch(data_x)
i = 0
L = []
while i < max_iters:
output, u = model(x)
g = u*x
f = Lorenz_value(x)
# f = modify_Lorenz_value(x)
x = x.clone().detach().requires_grad_(True)
ws = model._icnn._ws
bs = model._icnn._bs
us = model._icnn._us
smooth = model.smooth_relu
input_shape = (D_in,)
V1 = lya(ws,bs,us,smooth,x,input_shape)
V0 = lya(ws,bs,us,smooth,torch.zeros_like(x),input_shape)
num_V = smooth(V1-V0)+eps*x.pow(2).sum(dim=1)
V = torch.sum(smooth(V1-V0)+eps*x.pow(2).sum(dim=1))
Vx = jacobian(V,x)
Vxx = hessian(V,x)
loss = torch.zeros(N)
for r in range(args.batch_size):
L_V = torch.sum(Vx[0,D_in*r:D_in*r+D_in]*f[r,:]) + 0.5*torch.mm(g[r,:].unsqueeze(0),torch.mm(Vxx[D_in*r:D_in*r+D_in,D_in*r:D_in*r+D_in],g[r,:].unsqueeze(1)))
Vxg = torch.sum(Vx[0,D_in*r:D_in*r+D_in]*g[r,:])
v = num_V[0,r]
loss[r] = Vxg**2/(v**2) - args.b*L_V/v
Lyapunov_risk = (F.relu(-loss)).mean()
L.append(Lyapunov_risk.item())
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
if Lyapunov_risk < 1.0:
optimizer = torch.optim.Adam(model.parameters(), lr=0.005)
elif Lyapunov_risk > 1.0:
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
if Lyapunov_risk == 0.0:
print(Lyapunov_risk)
break
i += 1
# torch.save(model._control.state_dict(),'ES_icnn_net.pkl')
# torch.save(model._icnn.state_dict(),'ES_icnn_V_net.pkl')
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
# torch.save(model._control.state_dict(),'ES_icnn_net.pkl')
# torch.save(model._icnn.state_dict(),'ES_icnn_V_net.pkl')
# torch.save(model._control.state_dict(),'./neural_sde/Lorenz/ES_icnn_net_modify_1.pkl')
# torch.save(model._icnn.state_dict(),'./neural_sde/Lorenz/ES_icnn_V_net_modify_1.pkl')
| 4,181 | 31.169231 | 169 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Lorenz/ES_Quadratic.py
|
import torch.nn.functional as F
import timeit
from hessian import hessian
from hessian import jacobian
# from gradient import hessian
# from gradient import jacobian
import torch
import random
import math
import numpy as np
def setup_seed(seed):
torch.manual_seed(seed)
# torch.cuda.manual_seed_all(seed)
# torch.cuda.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
setup_seed(10)
import argparse
parser = argparse.ArgumentParser('ODE demo')
parser.add_argument('--N', type=int, default=10000)
parser.add_argument('--D_in', type=int, default=3)
parser.add_argument('--D_h', type=int, default=10)
parser.add_argument('--lr', type=float, default=0.03)
parser.add_argument('--b', type=float, default=2.1)
parser.add_argument('--niters', type=int, default=200)
parser.add_argument('--batch_size', type=int, default=100)
args = parser.parse_args()
class ControlNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(ControlNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
class VNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(VNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden, n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.Tanh()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self._v = VNet(n_input,12,n_output)
self._control = ControlNet(n_input,n_hidden,n_output)
def forward(self,x):
v = self._v(x)
u = self._control(x)
return v,u*x
def Lorenz_value(x):
y = []
for i in range(0,len(x)):
x1,x2,x3 = x[i,0],x[i,1],x[i,2]
f = [10*(x2-x1),x1*(28-x3)-x2,x1*x2-x3*8/3]
y.append(f)
y = torch.tensor(y)
return y
def modify_Lorenz_value(x):
y = []
e = torch.tensor([6.0*math.sqrt(2), 6.0*math.sqrt(2) , 27.0])
for i in range(0,len(x)):
x1,x2,x3 = x[i,:] + e
f = [10*(x2-x1),x1*(28-x3)-x2,x1*x2-x3*8/3]
y.append(f)
y = torch.tensor(y)
return y
def get_batch(data):
s = torch.from_numpy(np.random.choice(np.arange(args.N, dtype=np.int64), args.batch_size, replace=False))
batch_x = data[s,:] # (M, D)
return batch_x
'''
For learning
'''
N = args.N # sample size
D_in = args.D_in # input dimension
H1 = args.D_h # hidden dimension
D_out = D_in # output dimension
# torch.manual_seed(10)
data_x = torch.Tensor(N, D_in).uniform_(0, 10)
# x = torch.Tensor(N, D_in).uniform_(-10, 10)
l = 0.001
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
max_iters = 2000
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
for r in range(1, args.niters + 1):
i = 0
L = []
x = get_batch(data_x)
while i < max_iters:
V_net, u = model(x)
W1 = model._v.layer1.weight
W2 = model._v.layer2.weight
W3 = model._v.layer3.weight
# W4 = model._v.layer4.weight
B1 = model._v.layer1.bias
B2 = model._v.layer2.bias
B3 = model._v.layer3.bias
# B4 = model._v.layer4.bias
f = Lorenz_value(x)
# f = modify_Lorenz_value(x)
g = u
x = x.clone().detach().requires_grad_(True)
output = torch.mm(F.tanh(torch.mm(F.tanh(torch.mm(x,W1.T)+B1),W2.T)+B2),W3.T)+B3
# output = torch.mm(torch.tanh(torch.mm(x,W1.T)+B1),W2.T)+B2
# V = torch.sum(output)
num_v = torch.sum(l*x*x + ( x*output)**2,1)
# num_v = torch.sum(output,1)
V = torch.sum(l*x*x + (x*output)**2)
Vx = jacobian(V,x)
Vxx = hessian(V,x)
loss = torch.zeros(N)
for r in range(args.batch_size):
L_V = torch.sum(Vx[0,3*r:3*r+3]*f[r,:]) + 0.5*torch.mm(g[r,:].unsqueeze(0),torch.mm(Vxx[3*r:3*r+3,3*r:3*r+3],g[r,:].unsqueeze(1)))
Vxg = torch.sum(Vx[0,3*r:3*r+3]*g[r,:])
v = num_v[r]
loss[r] = Vxg**2/(v**2) - args.b*L_V/v
Lyapunov_risk = (F.relu(-loss)).mean()
L.append(Lyapunov_risk.item())
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
if Lyapunov_risk < 1.0:
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
elif Lyapunov_risk > 1.0:
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)
if Lyapunov_risk == 0.0:
break
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
# torch.save(model._control.state_dict(),'ES_net.pkl')
# torch.save(model._v.state_dict(),'ES_V_net.pkl')
# torch.save(model._control.state_dict(),'./data/Lorenz/ES_quad_net_modify_1.pkl')
# torch.save(model._v.state_dict(),'./data/Lorenz/ES_quad_V_net_modify_1.pkl')
| 5,582 | 30.016667 | 142 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Lorenz/Control_Nonlinear_Icnn.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class ICNN(nn.Module):
def __init__(self, input_shape, layer_sizes, activation_fn):
super(ICNN, self).__init__()
self._input_shape = input_shape
self._layer_sizes = layer_sizes
self._activation_fn = activation_fn
ws = []
bs = []
us = []
prev_layer = input_shape
w = torch.empty(layer_sizes[0], *input_shape)
nn.init.xavier_normal_(w)
ws.append(nn.Parameter(w))
b = torch.empty([layer_sizes[0], 1])
nn.init.xavier_normal_(b)
bs.append(nn.Parameter(b))
for i in range(len(layer_sizes))[1:]:
w = torch.empty(layer_sizes[i], *input_shape)
nn.init.xavier_normal_(w)
ws.append(nn.Parameter(w))
b = torch.empty([layer_sizes[i], 1])
nn.init.xavier_normal_(b)
bs.append(nn.Parameter(b))
u = torch.empty([layer_sizes[i], layer_sizes[i-1]])
nn.init.xavier_normal_(u)
us.append(nn.Parameter(u))
self._ws = nn.ParameterList(ws)
self._bs = nn.ParameterList(bs)
self._us = nn.ParameterList(us)
def forward(self, x):
# x: [batch, data]
if len(x.shape) < 2:
x = x.unsqueeze(0)
else:
data_dims = list(range(1, len(self._input_shape) + 1))
x = x.permute(*data_dims, 0)
z = self._activation_fn(torch.addmm(self._bs[0], self._ws[0], x))
for i in range(len(self._us)):
u = F.softplus(self._us[i])
w = self._ws[i + 1]
b = self._bs[i + 1]
z = self._activation_fn(torch.addmm(b, w, x) + torch.mm(u, z))
return z
class ControlNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(ControlNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
class LyapunovFunction(nn.Module):
def __init__(self,n_input,n_hidden,n_output,input_shape,smooth_relu_thresh=0.1,layer_sizes=[64, 64],lr=3e-4,eps=1e-3):
super(LyapunovFunction, self).__init__()
torch.manual_seed(2)
self._d = smooth_relu_thresh
self._icnn = ICNN(input_shape, layer_sizes, self.smooth_relu)
self._eps = eps
self._control = ControlNet(n_input,n_hidden,n_output)
def forward(self, x):
g = self._icnn(x)
g0 = self._icnn(torch.zeros_like(x))
u = self._control(x)
u0 = self._control(torch.zeros_like(x))
return self.smooth_relu(g - g0) + self._eps * x.pow(2).sum(dim=1), u*x
# return self.smooth_relu(g - g0) + self._eps * x.pow(2).sum(dim=1), u-u0
def smooth_relu(self, x):
relu = x.relu()
# TODO: Is there a clean way to avoid computing both of these on all elements?
sq = (2*self._d*relu.pow(3) -relu.pow(4)) / (2 * self._d**3)
lin = x - self._d/2
return torch.where(relu < self._d, sq, lin)
def lya(ws,bs,us,smooth,x,input_shape):
if len(x.shape) < 2:
x = x.unsqueeze(0)
else:
data_dims = list(range(1, len(input_shape) + 1))
x = x.permute(*data_dims, 0)
z = smooth(torch.addmm(bs[0],ws[0], x))
for i in range(len(us)):
u = F.softplus(us[i])
w = ws[i + 1]
b = bs[i + 1]
z = smooth(torch.addmm(b, w, x) + torch.mm(u, z))
return z
| 3,750 | 34.386792 | 122 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Energy/AS.py
|
import torch
import torch.nn.functional as F
import timeit
import math
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
out = self.layer2(h_1)
return out
def f_value(x):
y = []
for i in range(0,len(x)):
f = [x[i]*math.log(1+abs(x[i]))]
y.append(f)
y = torch.tensor(y)
return y
'''
For learning
'''
N = 4000 # sample size
D_in = 1 # input dimension
H1 = 6 # hidden dimension
D_out = 1 # output dimension
torch.manual_seed(10)
x = torch.Tensor(N, D_in).uniform_(0,50)
theta = 0.9
out_iters = 0
while out_iters < 1:
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
i = 0
t = 0
max_iters = 100
learning_rate = 0.1
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
while i < max_iters:
out = model(x)
g = out*x
f = f_value(x)
loss = (2-theta)*((x*g)**2)-x**2*(2*x*f+g**2)
Lyapunov_risk = (F.relu(-loss)).mean()
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
print("Verified time: ", t)
out_iters+=1
torch.save(model.state_dict(), './theta0.9_1d_log_net.pkl')
| 1,720 | 21.064103 | 70 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Energy/functions.py
|
import numpy as np
import math
import torch
import timeit
from scipy import integrate
start = timeit.default_timer()
np.random.seed(1)
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
# sigmoid2 = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
out = self.layer2(h_1)
return out
log_model = Net(1,6,1)
log_model.load_state_dict(torch.load('./data/Energy/theta0.9_1d_log_net.pkl'))
N = 100000
dt = 0.00001
m = 20
T = 50
x0 = [0.5] #initial
def k_list(N,dt,k,m):
# x0 = [0.5]
x0 = [20.0]
data = torch.zeros([N+1,m])
for r in range(m):
X = []
X.append(x0)
z = np.random.normal(0,1,N)
for i in range(N):
x = X[i][0]
new_x = x + x*math.log(1+abs(x))*dt + k*x*math.sqrt(dt)*z[i]
X.append([new_x])
X = torch.tensor(X)
data[:,r] = X[:,0]
return data
def learning_control(N,dt,m):
x0 = [20.0]
data = torch.zeros([2,N+1,m])
for r in range(m):
X,Y = [],[]
X.append(x0),Y.append(x0)
np.random.seed(r*4+1)
z = np.random.normal(0,1,N)
for i in range(N):
x = X[i][0]
y = Y[i][0]
k = log_model(torch.tensor([X[i]]))
new_x = x + x*math.log(1+abs(x))*dt + k[0]*x*math.sqrt(dt)*z[i]
new_y = y + y*math.log(1+abs(y))*dt + 6*y*math.sqrt(dt)*z[i]
X.append([new_x]),Y.append([new_y])
X = torch.tensor(X)
Y = torch.tensor(Y)
data[0,:,r] = X[:,0]
data[1,:,r] = Y[:,0]
print(r)
return data
def k_data():
endpoint = torch.zeros(T)
Data = torch.zeros(T,N+1,m)
for i in range(T):
k = i*0.2+0.2
data = k_list(N,dt,k,m)
endpoint[i] = data[-1].mean()
Data[i,:] = data
print(i)
torch.save({'data':Data,'end':endpoint},'./data/Energy/k_table_x0_20.pt')
def learning_data():
# data = learning_control(200000,dt,10)
data = learning_control(100000,dt,20)
# torch.save({'data':data},'./neural_sde/Energy/20_learning_control.pt')
torch.save({'data':data},'./data/Energy/20seed_learning_control.pt')
def k_energy_cost():
Data = torch.load('./data/Energy/k_table.pt')
data = Data['data']
X = data[29,:75001,:]
N = 75000
dt = 0.00001
gx = 6*X**2
a = np.linspace(0, dt*N, N+1)
print(a.shape)
v_x = 0
for i in range(20):
g_x = gx[:,i]
v_x += integrate.trapz(np.array(g_x), a)
print(i)
print(v_x/20)
def energy_cost():
Data = torch.load('./data/Energy/20seed_learning_control.pt')
data = Data['data'].detach().numpy()
X = data[1,:]
Y = data[0,:][:,np.delete(np.arange(20),15)]# Delete the diverge trajectory due to the dt is not small enough in Euler method
N = 100000
dt = 0.00001
v_x = 0
v_y = 0
# a = np.linspace(0, dt*N, N+1)
for i in range(Y.shape[1]):
g_x = 36*X[:,i]**2
g_y = (log_model(torch.tensor(Y[:,i]).unsqueeze(1))[:,0].detach().numpy()*Y[:,i])**2
norm_x = np.abs(X[:,i])
norm_y = np.abs(Y[:,i])
ind1 = np.where(norm_x<0.1)[0][0]
ind2 = np.where(norm_y<0.1)[0][0]
a1 = np.linspace(0, dt*ind1, ind1+1)
a2 = np.linspace(0, dt*ind2, ind2+1)
v_x += integrate.trapz(g_x[0:ind1+1], a1)
v_y += integrate.trapz(g_y[0:ind2+1], a2)
print(i)
print(v_x/20,v_y/19)
# energy_cost()
# learning_data()
# k_data()
stop= timeit.default_timer()
print('time:',stop-start)
| 3,792 | 26.092857 | 129 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Energy/plot.py
|
import numpy as np
import matplotlib.pyplot as plt
import torch
import matplotlib
matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
matplotlib.rcParams['text.usetex'] = True
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.6, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.8, ls='-', lw=1)
# plt.grid(b=True, which='both', color='beige', alpha=0.1, ls='-', lw=1)
pass
'''
Data corresponding to (a) in Figure 4, strength k from 0.2:10:0.2, 20 sample trajectories for each k,
we choose dt=1e-5 and N=1e5 in Euler method. Data form is dictionary with key 'data' and 'end', the size
for 'data' is [50,10001,20], 'end' corresponds to the average position over 20 trajectories for each k, the size is [50]
'''
Data = torch.load('./k_table_x0_20.pt')
data = Data['data']
endpoint = Data['end']
endpoint = torch.log(1+endpoint)
T = len(data)
dt = 0.00001
fontsize = 30
fig = plt.figure()
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
fig1 = plt.subplot(141)
plt.scatter(np.arange(T) / 5,endpoint, s=45, c=endpoint, marker='.',alpha=0.85,cmap='rainbow')
plt.axvline(28/5,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.5)
plt.ylabel(r'$\log(1+x)$', fontsize=fontsize)
plt.xlabel(r'$k$', fontsize=fontsize)
# cb = plt.colorbar()
# cb.set_ticks([0, 5, 10, 15])
# cb.ax.tick_params(labelsize=fontsize)
plt.xticks([0, 2, 4, 6, 8, 10],
# ["0", "", "0.5", "","1.0", "", "1.5", "", "2.0"]
)
plt.yticks([0, 5, 10, 15],
# ["0", "", "0.5", "","1.0", "", "1.5", "", "2.0"]
)
plot_grid()
plt.tick_params(labelsize=fontsize)
'''
Fix k=6,20 trajectories for linear control and neural stochastic control from initial 20.0,we set dt = 1e-5, N = 1e5
in Euler method, the random seeds are set as 4*r+1 for r in range(20), the data form is dictionary with key 'data', the
data size is [2,10001,20], data[0,:] corresponds to trajectories for learning control, data[1,:] corresponds to linear control.
'''
# Data = torch.load('./neural_sde/Energy/20seed_learning_control.pt')
Data = torch.load('./data/Energy/20seed_learning_control.pt')
data = Data['data']
fig2 = plt.subplot(154)
X = data[1,:]
X = X[:50000,:]
mean_data = torch.mean(X,1)
std_data = torch.std(X,1)
plt.fill_between(np.arange(len(X)) * dt,mean_data-std_data,mean_data+std_data,color='r',alpha=0.2)
plt.plot(np.arange(len(X)) * dt,mean_data,color='r',alpha=0.9,label='Linear control')
# plt.title('ME:{}'.format(38418))
plt.ylim([-100, 200])
plt.xlabel(r'Time', fontsize=fontsize)
plt.ylabel(r'$x$', fontsize=fontsize)
plt.xticks([0, 0.125, 0.25, 0.375, 0.5],
["$0$", "$~$","$0.25$","$~$", "$0.5$"]
)
plt.yticks([-100, 0, 100, 200])
plt.legend(fontsize=fontsize * 0.5)
plot_grid()
plt.tick_params(labelsize=fontsize)
fig3 = plt.subplot(155)
Y = data[0,:]
Y = Y[:14000,:]
mean_data = torch.mean(Y,1)
std_data = torch.std(Y,1)
plt.fill_between(np.arange(len(Y))*dt,mean_data-std_data,mean_data+std_data,color='g',alpha=0.2)
plt.plot(np.arange(len(Y))*dt,mean_data,color='g',alpha=0.9,label='Learned control')
# plt.ylim([-100, 200])
plt.xlabel(r'Time', fontsize=fontsize)
plt.xticks([0, 0.075/2, 0.075, (0.075 + 0.15)/2, 0.15],
["$0$", "$~$","$0.075$", "$~$", "$0.15$"]
)
plt.ylabel(r'$x$', fontsize=fontsize)
plt.yticks([-20, 0, 20, 40],
# ["0", "0.05","0.1", "0.15"]
)
plt.legend(fontsize=fontsize * 0.5)
plot_grid()
plt.tick_params(labelsize=fontsize)
# plt.title('ME:{}'.format(1375))
plt.show()
| 3,641 | 34.359223 | 127 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Table2/table2.py
|
import numpy as np
import sys
sys.path.append('./data')
# generate the data of table2
def L2_norm(a,case):
# the case for P_1
if case == 0:
Y = np.load('./data/{}_data_P1_Q2_20.npy'.format(a))
ind = np.delete(np.arange(20),np.array([1,3,11,15]))
Y = Y[ind,:].astype('float64')
Y = Y.astype('float64')
X = np.linalg.norm(Y,axis=1)
Z = np.mean(X,0)
index = np.where(Z<1e-10)
print('{} convergence time of 1e-10:'.format(a), format(index[0][0]*5e-5,'.3f'))
print('{} min :'.format(a),np.min(Z))
# the case for P_2
else:
e = np.array([[6*np.sqrt(2)],[6*np.sqrt(2)],[27]])
Y = np.load('./neural_sde/calculate/{}_data_P2_Q2_20.npy'.format(a))
ind = np.delete(np.arange(20),np.array([10,12]))
Y = Y[ind,:].astype('float64')
Y = Y.astype('float64')
for i in range(len(Y)):
Y[i,:] = Y[i,:]-e
X = np.linalg.norm(Y,axis=1)
Z = np.mean(X,0)
index = np.where(Z<0.02)
print('{} convergence time of 0.02:'.format(a), format(index[0][0]*1e-4,'.3f'))
print('{} min :'.format(a),np.min(Z))
L2_norm('icnn',0)
L2_norm('quad',0)
L2_norm('icnn',1)
L2_norm('quad',1)
| 1,237 | 31.578947 | 88 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/stuart/AS.py
|
import torch
import torch.nn.functional as F
import numpy as np
import timeit
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
out = self.layer2(h_1)
return out
'''
For learning
'''
n = 20
D_in = 2*n-1 # input dimension
H1 = 4*n # hidden dimension
D_out = 2*n-1 # output dimension
Data = torch.load('./data/stuart/20_train_data_small.pt')
# Data = torch.load('./data/stuart/20_train_data.pt')
x = Data['X']
f = Data['Y']
print(x[:,20:])
theta = 0.75
out_iters = 0
valid=True
while out_iters < 1 and valid == True:
# break
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
i = 0
t = 0
max_iters = 1000
learning_rate = 0.01
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
L = torch.zeros(1000)
while i < max_iters:
out = model(x)
g = out*x
loss = (2-theta)*((x*g)**2)-x**2*(2*x*f+g**2)
Lyapunov_risk = (F.relu(-loss)).mean()
# Lyapunov_risk.requires_grad_(True)
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
L[i] = Lyapunov_risk
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
print("Verified time: ", t)
out_iters+=1
torch.save({'loss':L},'./data/stuart/loss.pt')
# torch.save(model.state_dict(), './neural_sde/stuart/n_20/20_net_small.pkl')
| 1,843 | 22.341772 | 82 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/stuart/generate.py
|
import numpy as np
from scipy import integrate
import torch
import matplotlib.pyplot as plt
import math
import timeit
from scipy.integrate import odeint
import sys
sys.path.append('./neural_sde/stuart')
from AS import *
from functions import *
start = timeit.default_timer()
stuart_model = Net(D_in,H1,D_out)
# stuart_model.load_state_dict(torch.load('./neural_sde/stuart/n_20/20_net.pkl'))
stuart_model.load_state_dict(torch.load('./data/stuart/20_net_small.pkl'))
torch.manual_seed(6)
n = 20
L = torch.eye(n)-torch.ones([n,n])/n
N = 60000
dt = 0.0001
x0 = torch.cat([torch.Tensor(n).uniform_(0, 5),torch.Tensor(n-1).uniform_(-1.0,1.0)],0)
R = x0[:20]
dW = x0[20:]
def original_20():
# W = theta(dW)
# x0 = torch.cat([R-1,W],0)
X = torch.load('./data/stuart/20_original_data.pt')
X = X['X']
x0 = X[-1]
X = torch.zeros(N+1,2*n)
X[0,:] = x0
for i in range(N):
x = X[i,:]
dx = original_f_value(x,L)
new_x = x + dx*dt
X[i+1,:]=new_x
if i%100 == 0:
print(i)
torch.save({'X':X},'./data/stuart/20_original_data_add.pt')
def test():
torch.manual_seed(7)
X = torch.load('./data/stuart/20_test_data_try.pt')
X = X['X']
x0 = X[-1]
length = len(X)-1
# length = 0
# x0 = torch.cat([torch.Tensor(n).uniform_(0, 5),torch.Tensor(n-1).uniform_(-1.0,1.0)],0)
X = torch.zeros(N+1,2*n-1)
X[0,:] = x0
z = torch.randn(length+N,2*n-1)[length:,:]
for i in range(N):
x = X[i,:]
with torch.no_grad():
u = stuart_model(x)
dx = f_value(x,L)
new_x = x + dx*dt + x*u*z[i,:]*math.sqrt(dt)
X[i+1,:]=new_x
if i%100 == 0:
print(i)
torch.save({'X':X},'./data/stuart/20_test_data_try_add.pt')
if __name__ == '__main__':
original_20()
# test()
stop = timeit.default_timer()
print(stop-start)
| 1,915 | 24.210526 | 96 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/stuart/functions.py
|
import torch
import numpy as np
import timeit
import matplotlib.pyplot as plt
'''
x = rho_1,rho_2,rho_n, w1,w2,wn-1
'''
#Transform \Tilde{\theta} to \theta
def theta(W):
W = torch.cat([W,torch.tensor([1.0])],0)
T = torch.eye(len(W))
for i in range(len(T)):
for k in range(len(T)):
if k>i:
T[i,k]=1.0
W = W.unsqueeze(1)
ang = torch.mm(T,W)
return ang[:,0]
#Transform \theta to \Tilde{\theta}
def diff_theta(W):
T = torch.eye(len(W))
for i in range(len(W)):
for j in range(len(W)):
T[i,j] = W[j] - W[i]
return T
#Equation for \Tilde{\rho},\Tilde{\theta}
def f_value(x,L):
c1 = -1.8
c2 = 4
sigma = 0.01
k = int((len(x)+1)/2)
R = x[:k]+1.0
W = x[k:]
diff_ang = diff_theta(theta(W))
f_R = torch.zeros_like(R)
f_W = torch.zeros_like(W)
for j in range(len(R)):
f_R[j] = R[j]-R[j]**3-sigma*torch.sum(L[j,:]*R*(torch.cos(diff_ang[j,:])-c1*torch.sin(diff_ang[j,:])))
for j in range(len(W)):
f_W[j] = -c2*(R[j]**2-R[j+1]**2)-sigma*(torch.sum(L[j,:]*R*(c1*torch.cos(diff_ang[j,:])+torch.sin(diff_ang[j,:])))/R[j]\
-torch.sum(L[j+1,:]*R*(c1*torch.cos(diff_ang[j+1,:])+torch.sin(diff_ang[j+1,:])))/R[j+1])
return torch.cat([f_R,f_W],0)
#Equation for \rho, \theta
def original_f_value(x,L):
c1 = -1.8
c2 = 4
sigma = 0.01
k = int(len(x)/2)
R = x[:k]
W = x[k:]
diff_ang = diff_theta(W)
f_R = torch.zeros_like(R)
f_W = torch.zeros_like(W)
for j in range(len(R)):
f_R[j] = R[j]-R[j]**3-sigma*torch.sum(L[j,:]*R*(torch.cos(diff_ang[j,:])-c1*torch.sin(diff_ang[j,:])))
f_W[j] = -c2*(R[j]**2)-sigma*(torch.sum(L[j,:]*R*(c1*torch.cos(diff_ang[j,:])+torch.sin(diff_ang[j,:])))/R[j])
return torch.cat([f_R,f_W],0)
# Transform polar coordinate to euclidean coordinate
def transform(n,X):
Y = torch.zeros_like(X)
for i in range(n):
Y[:,i] = X[:,i]*torch.cos(X[:,i+n])
Y[:,i+n] = X[:,i]*torch.sin(X[:,i+n])
return Y
#Generate control data
def generate():
N = 5000
n = 20
torch.manual_seed(10)
# R = torch.Tensor(N, n).uniform_(0, 10)
# W = torch.Tensor(N, n-1).uniform_(-15, 15)
R = torch.Tensor(N, n).uniform_(0, 5)
W = torch.Tensor(N, n-1).uniform_(-10, 10)
X = torch.cat([R,W],1)
Y = torch.zeros_like(X)
L = torch.eye(n)-torch.ones([n,n])/n
for i in range(N):
x = X[i,:]
Y[i,:] = f_value(x,L)
if i%100:
print(i)
torch.save({'X':X,'Y':Y},'./neural_sde/stuart/n_20/20_train_data_small.pt')
# Joint trajcetories on two adjacent time intervals
def cat_data(path0='./neural_sde/stuart/n_20/20_original_data_cat.pt',path1='./neural_sde/stuart/n_20/20_original_data.pt',path2='./neural_sde/stuart/n_20/20_original_data_add.pt'):
X = torch.load(path1)
Y = torch.load(path2)
X = X['X'][0:80001:10]
Y = Y['X']
torch.save({'X':torch.cat([X,Y[1:,:]],0)},path0)
# Get the controlled trajectory for \rho,\theta
def diff_to_orig(n,path1='./neural_sde/stuart/n_20/20_original_data.pt',path2='./neural_sde/stuart/n_20/20_test_data.pt'):
X = torch.load(path1)
Y = torch.load(path2)
orig_data = X['X']
trans_data = Y['X']
Wn = orig_data[:,-1:]
R = trans_data[:,:n]
dW = trans_data[:,n:]
R = R+1
W = torch.cat([dW,Wn],1).T
T = torch.eye(len(W))
for i in range(len(T)):
for k in range(len(T)):
if k>i:
T[i,k]=1.0
orig_W = torch.mm(T,W)
return torch.cat([R,orig_W.T],1)
if __name__ == '__main__':
cat_data('./data/stuart/20_original_data_cat.pt','./data/stuart/20_original_data.pt','./data/stuart/20_original_data_add.pt')
cat_data('./data/stuart/20_test_data_cat.pt','./data/stuart/20_test_data_try.pt','./data/stuart/20_test_data_try_add.pt')
generate()
| 3,921 | 30.376 | 181 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/stuart/plot.py
|
from functions import *
import numpy as np
import torch
import matplotlib.pyplot as plt
import os
os.environ["KMP_DUPLICATE_LIB_OK"]="TRUE"
# import matplotlib
# matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
# matplotlib.rcParams['text.usetex'] = True
font_size = 35
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.6, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.8, ls='-', lw=1)
'''
Plot trajectories and orbits
'''
L = 20000
E = 50000
plt1 = plt.subplot(231)
X = torch.load('./data/stuart/20_original_data_cat.pt')
X = X['X'][L:E:10,:]
X = transform(20,X)
for i in range(20):
plt.plot(np.arange(len(X[:,0])),X[:,i],color = plt.cm.Accent(i/45))
plt.xticks([0,1000,2000,3000],[0,1.0,2.0,3.0],fontsize=font_size)
plt.yticks([-1,0,1],fontsize=font_size)
plot_grid()
plt.title(r'$x$',fontsize=font_size)
plt.ylabel('Without Control',fontsize=font_size)
plt2 = plt.subplot(232)
for i in range(20):
plt.plot(np.arange(len(X[:,0])),X[:,i+20],color = plt.cm.Accent(i/45))
plt.xticks([0,1000,2000,3000],[0,1.0,2.0,3.0],fontsize=font_size)
plt.title(r'$y$',fontsize=font_size)
plt.yticks([-1,0,1],fontsize=font_size)
plot_grid()
plt3 = plt.subplot(233)
for i in range(20):
plt.plot(X[:,i+0],X[:,i+20],color = plt.cm.Accent(i/45),label='{}'.format(i))
plt.xticks([-1,0,1],fontsize=font_size)
plt.yticks([-1,0,1],fontsize=font_size)
plt.xlabel(r"$x$",fontsize=font_size)
plt.ylabel(r'$y$',fontsize=font_size)
plot_grid()
plt.title('Orbit',fontsize=font_size)
plt4 = plt.subplot(234)
X = diff_to_orig(20,'./data/stuart/20_original_data_cat.pt','./neural_sde/stuart/n_20/20_test_data_cat.pt')[L:E:10,:]
X = transform(20,X)
for i in range(20):
plt.plot(np.arange(len(X[:,0])),X[:,i],color = plt.cm.Accent(i/45))
plot_grid()
plt.ylabel('With Control',fontsize=font_size)
plt.xticks([0,1000,2000,3000],[0,1.0,2.0,3.0],fontsize=font_size)
plt.yticks([-1,0,1],fontsize=font_size)
plt.xlabel('Time',fontsize=font_size)
plt5 = plt.subplot(235)
for i in range(20):
plt.plot(np.arange(len(X[:,0])),X[:,i+20],color = plt.cm.Accent(i/45))
plot_grid()
plt.xticks([0,1000,2000,3000],[0,1.0,2.0,3.0],fontsize=font_size)
plt.yticks([-1,0,1],fontsize=font_size)
plt.xlabel('Time',fontsize=font_size)
plt6 = plt.subplot(236)
for i in range(20):
plt.plot(X[:,i+0],X[:,i+20],color = plt.cm.Accent(i/45),label='{}'.format(i))
plt.xticks([-1,0,1],fontsize=font_size)
plt.yticks([-1,0,1],fontsize=font_size)
plt.xlabel(r"$x$",fontsize=font_size)
plt.ylabel(r'$y$',fontsize=font_size)
plot_grid()
plt.show()
'''
Plot loss function
'''
# loss = torch.load('./data/stuart/loss.pt')
# loss = loss['loss'].detach()
# loss = loss[:30]
# fig = plt.figure(figsize=(6,8))
# plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
# plt1 = plt.subplot(121)
# # loss = loss.detach().numpy()
# plt.plot(np.arange(len(loss)),loss)
# plt2=plt.subplot(122)
# loss = loss[10:30]
# # loss = loss.detach().numpy()
# plt.plot(np.arange(len(loss)),loss)
# plt.plot()
# #% start: automatic generated code from pylustrator
# plt.figure(1).ax_dict = {ax.get_label(): ax for ax in plt.figure(1).axes}
# import matplotlib as mpl
# plt.figure(1).set_size_inches(14.120000/2.54, 9.110000/2.54, forward=True)
# plt.figure(1).axes[0].set_position([0.109847, 0.124637, 0.880047, 0.838141])
# plt.figure(1).axes[0].get_xaxis().get_label().set_text("iterations")
# plt.figure(1).axes[0].get_yaxis().get_label().set_text("loss")
# plt.figure(1).axes[1].set_xlim(-0.9500000000000001, 20.0)
# plt.figure(1).axes[1].set_ylim(-0.09267258382915317, 1.9471967105529984)
# plt.figure(1).axes[1].set_xticks([0.0, 10.0, 20.0])
# plt.figure(1).axes[1].set_yticks([0.0, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75])
# plt.figure(1).axes[1].set_xticklabels(["10", "20", "30"], fontsize=10.0, fontweight="normal", color="black", fontstyle="normal", fontname="DejaVu Sans", horizontalalignment="center")
# plt.figure(1).axes[1].set_yticklabels(["0.00", "0.25", "0.50", "0.75", "1.00", "1.25", "1.50", "1.75"], fontsize=10)
# plt.figure(1).axes[1].set_position([0.610715, 0.504267, 0.336851, 0.396884])
# #% end: automatic generated code from pylustrator
# plt.show()
| 4,294 | 34.204918 | 184 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Figure_in_Prop_3.2/plot.py
|
import numpy as np
import matplotlib.pyplot as plt
import math
# import pylustrator
# pylustrator.start()
np.random.seed(10)
def nonlinear(N,dt,x0):
X = []
X.append(x0)
z = np.random.normal(0,1,N)
for i in range(N):
x = X[i][0]
new_x = x + x*math.log(abs(x))*dt + 2*x*x*math.sqrt(dt)*z[i]
X.append([new_x])
X = np.array(X)
return X
def linear(k,N,dt,x0):
X = []
X.append(x0)
z = np.random.normal(0,1,N)
for i in range(N):
x = X[i][0]
new_x = x + x*math.log(abs(x))*dt + k*x*math.sqrt(dt)*z[i]
X.append([new_x])
X = np.array(X)
return X
N=200000
dt=0.00001
X1 = linear(1,N,dt,[50.0])
X2 = linear(2,N,dt,[100.0])
X3 = linear(3,N,dt,[150.0])
N=200000
dt=0.000001
Y1 = nonlinear(N,dt,[50.0])
Y2 = nonlinear(N,dt,[100.0])
Y3 = nonlinear(N,dt,[150.0])
fig = plt.figure()
plt1 = fig.add_subplot(121)
plt1.plot(np.arange(N+1),X1,'r',label=r'k=1,$x_0=50.0$')
plt1.plot(np.arange(N+1),X2,'g',label=r'k=2,$x_0=100.0$')
plt1.plot(np.arange(N+1),X3,'b',label=r'k=3,$x_0=150.0$')
plt.legend()
plt2 = fig.add_subplot(122)
plt2.plot(np.arange(N+1),Y1,'r',label=r'$x_0=50.0$')
plt2.plot(np.arange(N+1),Y2,'g',label=r'$x_0=100.0$')
plt2.plot(np.arange(N+1),Y3,'b',label=r'$x_0=150.0$')
plt.legend()
#% start: automatic generated code from pylustrator
plt.figure(1).ax_dict = {ax.get_label(): ax for ax in plt.figure(1).axes}
import matplotlib as mpl
plt.figure(1).set_size_inches(14.710000/2.54, 6.490000/2.54, forward=True)
plt.figure(1).axes[0].set_yscale("symlog")
plt.figure(1).axes[0].set_xlim(-10000.0, 210000.0)
plt.figure(1).axes[0].set_ylim(10.0, 39047767091377.336)
plt.figure(1).axes[0].set_xticks([0.0, 50000.0, 100000.0, 150000.0, 200000.0])
plt.figure(1).axes[0].set_yticks([10.0, 1000.0, 100000.0, 10000000.0, 1000000000.0, 100000000000.0, 10000000000000.0])
plt.figure(1).axes[0].set_xticklabels(["0.0", "0.5", "1.0", "1.5", "2.0"], fontsize=10.0, fontweight="normal", color="black", fontstyle="normal", fontname="DejaVu Sans", horizontalalignment="center")
plt.figure(1).axes[0].set_yticklabels(["$\mathdefault{10^{1}}$", "$\mathdefault{10^{3}}$", "$\mathdefault{10^{5}}$", "$\mathdefault{10^{7}}$", "$\mathdefault{10^{9}}$", "$\mathdefault{10^{11}}$", "$\mathdefault{10^{13}}$"], fontsize=10.0, fontweight="normal", color="black", fontstyle="normal", fontname="DejaVu Sans", horizontalalignment="right")
plt.figure(1).axes[0].grid(True)
plt.figure(1).axes[0].legend(frameon=False, borderpad=0.0, labelspacing=0.0, fontsize=7.0, title_fontsize=10.0)
plt.figure(1).axes[0].set_facecolor("#ffffefff")
plt.figure(1).axes[0].set_position([0.097374, 0.228986, 0.368972, 0.647927])
plt.figure(1).axes[0].spines['right'].set_visible(False)
plt.figure(1).axes[0].spines['top'].set_visible(False)
plt.figure(1).axes[0].yaxis.labelpad = -6.320000
plt.figure(1).axes[0].get_legend()._set_loc((0.040311, 0.720466))
plt.figure(1).axes[0].get_legend().set_label("k=1, x(0)=50.0")
plt.figure(1).axes[0].lines[0].set_color("#e96eec")
plt.figure(1).axes[0].lines[0].set_markeredgecolor("#e96eec")
plt.figure(1).axes[0].lines[0].set_markerfacecolor("#e96eec")
plt.figure(1).axes[0].lines[1].set_color("#df4936")
plt.figure(1).axes[0].lines[1].set_markeredgecolor("#df4936")
plt.figure(1).axes[0].lines[1].set_markerfacecolor("#df4936")
plt.figure(1).axes[0].lines[2].set_color("#6ba1ff")
plt.figure(1).axes[0].lines[2].set_markeredgecolor("#6ba1ff")
plt.figure(1).axes[0].lines[2].set_markerfacecolor("#6ba1ff")
plt.figure(1).axes[0].get_xaxis().get_label().set_text("time")
plt.figure(1).axes[0].get_yaxis().get_label().set_fontsize(16)
plt.figure(1).axes[0].get_yaxis().get_label().set_text("x")
plt.figure(1).axes[1].set_xlim(-40.0, 1000.0)
plt.figure(1).axes[1].set_xticks([0.0, 300.0, 600.0, 900.0])
plt.figure(1).axes[1].set_xticklabels(["0.0", "3e-4", "6e-4", "9e-4"], fontsize=10.0, fontweight="normal", color="black", fontstyle="normal", fontname="DejaVu Sans", horizontalalignment="center")
plt.figure(1).axes[1].grid(True)
plt.figure(1).axes[1].legend(frameon=False, borderpad=0.0, labelspacing=0.0, fontsize=7.0, title_fontsize=10.0)
plt.figure(1).axes[1].set_facecolor("#ffffefff")
plt.figure(1).axes[1].set_position([0.563724, 0.228986, 0.368972, 0.647927])
plt.figure(1).axes[1].spines['right'].set_visible(False)
plt.figure(1).axes[1].spines['top'].set_visible(False)
plt.figure(1).axes[1].yaxis.labelpad = -16.967273
plt.figure(1).axes[1].get_legend()._set_loc((0.565661, 0.749353))
plt.figure(1).axes[1].get_legend().set_label("x(0)=50.0")
plt.figure(1).axes[1].lines[0].set_color("#e96eec")
plt.figure(1).axes[1].lines[0].set_markeredgecolor("#e96eec")
plt.figure(1).axes[1].lines[0].set_markerfacecolor("#e96eec")
plt.figure(1).axes[1].lines[1].set_color("#df4936")
plt.figure(1).axes[1].lines[1].set_markeredgecolor("#df4936")
plt.figure(1).axes[1].lines[1].set_markerfacecolor("#df4936")
plt.figure(1).axes[1].lines[2].set_color("#6ba1ff")
plt.figure(1).axes[1].lines[2].set_markeredgecolor("#6ba1ff")
plt.figure(1).axes[1].lines[2].set_markerfacecolor("#6ba1ff")
plt.figure(1).axes[1].get_xaxis().get_label().set_text("time")
plt.figure(1).axes[1].get_yaxis().get_label().set_fontsize(16)
plt.figure(1).axes[1].get_yaxis().get_label().set_text("x")
plt.figure(1).text(0.5, 0.5, 'New Text', transform=plt.figure(1).transFigure) # id=plt.figure(1).texts[0].new
plt.figure(1).texts[0].set_position([0.256748, 0.935065])
plt.figure(1).texts[0].set_text("(a)")
plt.figure(1).text(0.5, 0.5, 'New Text', transform=plt.figure(1).transFigure) # id=plt.figure(1).texts[1].new
plt.figure(1).texts[1].set_position([0.718745, 0.935065])
plt.figure(1).texts[1].set_text("(b)")
#% end: automatic generated code from pylustrator
plt.show()
| 5,741 | 44.212598 | 347 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/inverted_pendulum/invert_pendulum_control_1227.py
|
import numpy as np
import math
import torch
import matplotlib.pyplot as plt
from matplotlib import cm
import matplotlib.gridspec as gridspec
from functions import *
from base_function import colors
alpha = 1.0
fontsize=35
fontsize_legend = 20
MarkerSize = 60
linewidth = 5
color_w = 0.15 #0.5
framealpha = 0.7
N_seg = 100
def plt_tick_1():
# plt.ylim([-2.5, 2.5])
# plt.xlim([-2.5, 2.5])
# plt.xticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
# plt.yticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
plt.xticks([-10, -5, 0, 5, 10], ['$-10$', '', '$0$', '', '$10$'])
plt.yticks([-10, -5, 0, 5, 10], ['$-10$', '', '$0$', '', '$10$'])
def plt_tick_2():
# plt.ylim([-2.5, 2.5])
plt.xticks([0, 0.075, 0.15, 0.225, 0.3], ['$0$', '', '$0.15$', '', '$0.3$'])
plt.yticks([-10, -5, 0, 5, 10], ['$-10$', '', '$0$', '', '$10$'])
def plot_jianbian_line(
X, Y, start_color=np.array([1.0, 0.0, 0.0]),
end_color=np.array([0.0, 1.0, 0.0]),
scale = 1/3,
width_rate = 9/10,
):
# start_color = 1 - start_color
start_color= end_color
data_len = len(X)
# plt.plot(data[0,:1000], data[1, :1000], '-', alpha=alpha)
n = N_seg
seg_len = data_len // n
print('data_len:{}, n:{}, seg_len:{}'.format(data_len, n, seg_len))
for i in range(n - 1):
w = ((i) / n) ** (scale)
now_color = start_color + w * (end_color - start_color)
# print('i:{}, now_color:{}'.format(i, now_color))
# plt.plot(data[0,i:i+3], data[1,i:i+3], '-', color=now_color, alpha=alpha)
plt.plot(X[max(seg_len * i - 1, 0):seg_len * (i+1)], Y[max(seg_len * i - 1, 0):seg_len * (i+1)],
'-', color=now_color, alpha=alpha, linewidth= linewidth - w * linewidth * width_rate )
#五次倒立摆实验,angle和velocity分别保存为X1,X2
data = torch.load('./control_data.pt')
X1 = data['X1'].clone().detach() #data size=[5,10000]
X2 = data['X2'].clone().detach() #data size=[5,10000]
# fig = plt.figure()
# plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
# ax1 = plt.subplot(121)
show_indx = [0, 2, 4]
def plot_fig1(ax1):
xd = np.linspace(-10, 10, 20)
yd = np.linspace(-10, 10, 20)
Xd, Yd = np.meshgrid(xd,yd)
Plotflow(Xd, Yd) #绘制向量场
# #添加水平直线
# C1 = plt.scatter(0,0,marker='o',color='g')
# C2 = plt.scatter(math.pi,0,marker='o',color='r')
# C3 = plt.scatter(-math.pi,0,marker='o',color='b')
# ax1.add_artist(C1)
# ax1.add_artist(C2)
# ax1.add_artist(C3)
color_id = 0
# for i in range(2):
for i in show_indx:
# plt.plot(X1[i,0],X2[i,0],marker='*',color=cm.Accent(i*2))
# plt.plot(X1[i,:2000],X2[i,:2000],color=cm.Accent(i*2),alpha=0.95) #选择合适的长度
plot_jianbian_line(X=X1[i,:2000], Y=X2[i,:2000], start_color=colors[color_id] * color_w, end_color=colors[color_id],
scale=1/3, width_rate=0.5)
# plt.plot(state[0,0],state[1,0],marker='*', color=cm.Accent(i*2))
color_id += 1
color_id = 0
for i in show_indx:
# plt.scatter(X1[i,0],X2[i,0], marker='*', s=MarkerSize * 5, color='k', zorder=10)
# plt.scatter(X1[i,0],X2[i,0], marker='*', s=MarkerSize * 5, color=colors[color_id] * color_w, zorder=10)
plt.scatter(X1[i,0],X2[i,0], marker='*', s=MarkerSize * 5, color=colors[color_id]/max(colors[color_id]) * 0.7, zorder=10)
color_id += 1
#添加水平轴
C1 = plt.scatter(0, 0,marker='o',color='g', s=MarkerSize, zorder=10)
C2 = plt.scatter(math.pi,0,marker='o',color='r', s=MarkerSize, zorder=10)
C3 = plt.scatter(-math.pi,0,marker='o',color='b', s=MarkerSize, zorder=10)
ax1.add_artist(C1)
ax1.add_artist(C2)
ax1.add_artist(C3)
plt.xlim(-6,6)
plt.ylim(-6,6)
# plt.title('Orbits under Stochastic Control')
plt.legend([C1,C2,C3],[r'$(0,~0)$',r'$(\pi,~0)$',r'$(-\pi,~0)$'],loc='upper right',
borderpad=0.05, labelspacing=0.05,fontsize=fontsize_legend, framealpha=framealpha)
plt.xlabel(r'$\theta$',fontsize=fontsize)
plt.ylabel(r'$\dot{\theta}$',fontsize=fontsize)
plt_tick_1()
plt.tick_params(labelsize=fontsize)
N_data = 3000
def control_trajectory_(ax,title,path='./control_data.pt'):
data = torch.load(path)
# X = data['X'].clone().detach()
X1 = data['X1'].clone().detach()
print('X1 shape:{}'.format(X1.shape))
# X2 = data['X2']
L1 = plt.axhline(y=0.0,ls="--",linewidth=1.5,color="green")#添加水平直线
L2 = plt.axhline(y=math.pi,ls="--",linewidth=1.5,color="r")
L3 = plt.axhline(y=-math.pi,ls="--",linewidth=1.5,color="b")
ax.add_artist(L1)
ax.add_artist(L2)
ax.add_artist(L3)
color_id = 0
# for i in range(len(X1)):
for i in show_indx:
# x = X[i,:].numpy()
# m = np.max(x)
# index = np.argwhere(x == m )
# sample_length = int(index[0])
L = np.arange(len(X1[0,:N_data])) * 0.0001
# plt.plot(L[0],X1[i,0],marker='*',markersize=8,color=cm.Accent(i*2))
plot_jianbian_line(X=L, Y=X1[i, :N_data],
start_color=colors[color_id] * color_w, end_color=colors[color_id],
scale = 1/2,
width_rate = 5/10,
)
# plt.plot(L,X1[i,:3000],linestyle='--',color=cm.Accent(i*2),alpha=0.45)
color_id += 1
color_id = 0
for i in show_indx:
# plt.scatter(L[0],X1[i,0],marker='*', s=MarkerSize * 5, color=colors[color_id] * color_w, zorder=10)
plt.scatter(L[0],X1[i,0],marker='*', s=MarkerSize * 5, color=colors[color_id]/max(colors[color_id]) * 0.7, zorder=10)
color_id += 1
plt.legend([L1,L2,L3],[r'$\theta=0$',r'$\theta=\pi$',r'$\theta=-\pi$'],loc='upper right',
borderpad=0.05, labelspacing=0.05, fontsize=fontsize_legend, framealpha=framealpha)
# plt.title(title)
plt.xlabel('Time',fontsize=fontsize)
plt.ylabel(r'$\theta$',fontsize=fontsize)
# ax2 = plt.subplot(122)
def plot_fig2(ax2):
# control_trajectory(ax2,'Phase Trajectories along Time','./control_data.pt')
control_trajectory_(ax2,'Phase Trajectories along Time','./control_data.pt')
plt_tick_2()
plt.tick_params(labelsize=fontsize)
if __name__ == '__main__':
ax1 = plt.subplot(121)
plot_fig1(ax1=ax1)
ax2 = plt.subplot(122)
plot_fig2(ax2=ax2)
plt.show()
| 6,416 | 33.315508 | 129 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/inverted_pendulum/algo2.py
|
import torch
import torch.nn.functional as F
import numpy as np
import timeit
import math
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
out = self.layer2(h_1)
return out
def inverted_pendulum(x):
y = []
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1 # friction
for i in range(0,len(x)):
f = [x[i,1],G*torch.sin(x[i,0])/L +(-b*x[i,1])/(m*L**2)]
y.append(f)
y = torch.tensor(y)
return y
'''
For learning
'''
N = 1000 # sample size
D_in = 2 # input dimension
H1 = 6 # hidden dimension
D_out = 2 # output dimension
torch.manual_seed(10)
x = torch.Tensor(N, D_in).uniform_(-10, 10)
theta = 0.5
out_iters = 0
valid = False
while out_iters < 1 and not valid:
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
i = 0
t = 0
max_iters = 2000
learning_rate = 0.05
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
L = []
while i < max_iters and not valid:
out = model(x)
g = out*x
f = inverted_pendulum(x)
loss = (2-theta)*torch.diagonal(torch.mm(x,g.T))**2-torch.diagonal(torch.mm(x,x.T))*torch.diagonal(2*torch.mm(x,f.T)+torch.mm(g,g.T))
# loss = (2-theta)*((x*g)**2)-x**2*(2*x*f+g**2)
Lyapunov_risk = (F.relu(-loss)).mean()
L.append(Lyapunov_risk)
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
# if Lyapunov_risk == 0.0:
# break
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
print("Verified time: ", t)
out_iters+=1
torch.save(torch.tensor(L), './data/inverted_pendulum/loss_AS.pt')
torch.save(model.state_dict(), './data/inverted_pendulum/algo2_invert_net.pkl')
| 2,276 | 24.021978 | 141 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/inverted_pendulum/functions.py
|
import numpy as np
import math
import torch
import timeit
import matplotlib.pyplot as plt
from matplotlib import cm
import matplotlib.gridspec as gridspec
from scipy.integrate import odeint
import numpy as np
np.random.seed(10)
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
out = self.layer2(h_1)
return out
D_in = 2 # input dimension
H1 = 6 # hidden dimension
D_out = 2
inverted_model = Net(D_in,H1,D_out)
inverted_model.load_state_dict(torch.load('./data/inverted_pendulum/algo2_invert_net.pkl'))
# ang = torch.zeros([5,1]) #initial angle
# vel = torch.zeros([5,1]) #initial velocity
# for i in range(5):
# x0 = np.random.uniform(-6,6,2)
# ang[i,0] = x0[0]
# vel[i,0] = x0[1]
def invert_pendulum(state0, t):
state0 = state0.flatten()
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1 # friction
def f(state,t):
x, y = state # unpack the state vector
return y, G*np.sin(x)/L +(-b*y)/(m*L**2) # derivatives
states = odeint(f, state0, t)
return states.transpose()
#生成控制轨道数据
set_state0 = torch.tensor([[-5.0,5.0],[-3.0,4.0],[-1.0,3.0],[1.0,-3.0],[3.0,-4.0],[5.0,-5.0]])
def control_data(set_state0,M=6,N=20000,dt=0.00001):
start = timeit.default_timer()
torch.manual_seed(6)
X1,X2 = torch.zeros([M,N]),torch.zeros([M,N])
for r in range(M):
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1
z1 = torch.randn(N)
z2 = torch.randn(N)
# X1[r,0] = ang[r,0]
# X2[r,0] = vel[r,0]
X1[r,0] = set_state0[r,0]
X2[r,0] = set_state0[r,1]
for i in range(N-1):
x1 = X1[r,i]
x2 = X2[r,i]
u = inverted_model(torch.tensor([x1,x2]))
new_x1 = x1 + x2*dt + x1*u[0]*z1[i]*math.sqrt(dt)
new_x2 = x2 + (G*math.sin(x1)/L - b*x2/(m*L**2))*dt + x2*u[1]*z2[i]*math.sqrt(dt)
X1[r,i+1] = new_x1
X2[r,i+1] = new_x2
print('{} done'.format(r))
orig_data = {'X1':X1,'X2':X2}
torch.save(orig_data,'./data/inverted_pendulum/control_data.pt')
stop = timeit.default_timer()
print(stop-start)
def control_trajectory(ax,title,path='./data/inverted_pendulum/control_data.pt'):
data = torch.load(path)
# X = data['X'].clone().detach()
X1 = data['X1'].clone().detach()
# X2 = data['X2']
for i in range(len(X1)):
# x = X[i,:].numpy()
# m = np.max(x)
# index = np.argwhere(x == m )
# sample_length = int(index[0])
L = np.arange(len(X1[0,:3000]))
plt.plot(L[0],X1[i,0],marker='*',markersize=8,color=cm.Accent(i*2))
plt.plot(L,X1[i,:3000],linestyle='--',color=cm.Accent(i*2),alpha=0.45)
L1 = plt.axhline(y=0.0,ls="--",linewidth=1.5,color="green")#添加水平直线
L2 = plt.axhline(y=math.pi,ls="--",linewidth=1.5,color="r")
L3 = plt.axhline(y=-math.pi,ls="--",linewidth=1.5,color="b")
ax.add_artist(L1)
ax.add_artist(L2)
ax.add_artist(L3)
plt.legend([L1,L2,L3],[r'$\theta=0$',r'$\theta=\pi$',r'$\theta=-\pi$'],loc='upper right',borderpad=0.05, labelspacing=0.05)
plt.title(title)
plt.xlabel('t')
plt.ylabel(r'$\theta$')
def f(y) :
#parameters
G = 9.81
L = 0.5
m = 0.15
b = 0.1
x1,x2 = y
dydt =[x2, (m*G*L*np.sin(x1) - b*x2) / (m*L**2)]
return dydt
#绘制向量场
def Plotflow(Xd, Yd):
# Plot phase plane
DX, DY = f([Xd, Yd])
DX=DX/np.linalg.norm(DX, ord=2, axis=1, keepdims=True)
DY=DY/np.linalg.norm(DY, ord=2, axis=1, keepdims=True)
plt.streamplot(Xd,Yd,DX,DY, color=('gray'), linewidth=0.5,
density=0.6, arrowstyle='-|>', arrowsize=1.5)
'''
generate control data
'''
if __name__ == '__main__':
control_data(set_state0,6,20000,0.0001)
| 4,192 | 29.830882 | 127 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/inverted_pendulum/base_function.py
|
import numpy as np
import matplotlib.pyplot as plt
colors = [
[233/256, 110/256, 236/256], # #e96eec
# [0.6, 0.6, 0.2], # olive
# [0.5333333333333333, 0.13333333333333333, 0.3333333333333333], # wine
[255/255, 165/255, 0],
# [0.8666666666666667, 0.8, 0.4666666666666667], # sand
# [223/256, 73/256, 54/256], # #df4936
[107/256, 161/256,255/256], # #6ba1ff
[0.6, 0.4, 0.8], # amethyst
[0.0, 0.0, 1.0], # ao
[0.55, 0.71, 0.0], # applegreen
# [0.4, 1.0, 0.0], # brightgreen
[0.99, 0.76, 0.8], # bubblegum
[0.93, 0.53, 0.18], # cadmiumorange
[11/255, 132/255, 147/255], # deblue
[204/255, 119/255, 34/255], # {ocra}
]
colors = np.array(colors)
cfg = {
"colors": colors ,
"alpha": 1.0,
"fontsize": 35,
"fontsize_legend": 20,
"MarkerSize": 60,
"linewidth": 5,
"color_w": 0.5,
}
alpha = "alpha"
fontsize = "fontsize"
fontsize_legend = "fontsize_legend"
MarkerSize = "MarkerSize"
linewidth = "linewidth"
color_w = "color_w"
def plt_tick_1():
# plt.ylim([-2.5, 2.5])
# plt.xlim([-2.5, 2.5])
plt.xticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
plt.yticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
def plt_tick_2():
# plt.ylim([-2.5, 2.5])
plt.xticks([0, 2, 4, 6])
plt.yticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
def plot_jianbian_line(
X, Y, start_color=np.array([1.0, 0.0, 0.0]),
end_color=np.array([0.0, 1.0, 0.0]),
scale = 1/3,
width_rate = 9/10,
):
data_len = len(X)
# plt.plot(data[0,:1000], data[1, :1000], '-', alpha=alpha)
n = 500
seg_len = data_len // n
print('data_len:{}, n:{}, seg_len:{}'.format(data_len, n, seg_len))
for i in range(n - 1):
w = ((i) / n) ** (scale)
now_color = start_color + w * (end_color - start_color)
print('i:{}, now_color:{}'.format(i, now_color))
# plt.plot(data[0,i:i+3], data[1,i:i+3], '-', color=now_color, alpha=alpha)
plt.plot(X[seg_len * i:seg_len * (i+1)], Y[seg_len * i:seg_len * (i+1)],
'-', color=now_color, alpha=alpha, linewidth= linewidth - w * linewidth * width_rate )
| 2,192 | 31.731343 | 103 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/inverted_pendulum/Inver_pendulum_1227.py
|
# import matplotlib
# matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
# matplotlib.rcParams['text.usetex'] = True
from invert_pendulum_no_control_1227 import plot_fig1 as plot_fig1_no_control
from invert_pendulum_no_control_1227 import plot_fig2 as plot_fig2_no_control
from invert_pendulum_control_1227 import plot_fig1 as plot_fig1_control
from invert_pendulum_control_1227 import plot_fig2 as plot_fig2_control
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.6, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.8, ls='-', lw=1)
# plt.grid(b=True, which='both', color='beige', alpha=0.1, ls='-', lw=1)
pass
if __name__ == '__main__':
import matplotlib.pyplot as plt
ax1 = plt.subplot(222)
plot_fig1_no_control(ax1=ax1)
plot_grid()
ax2 = plt.subplot(231)
plot_fig2_no_control(ax2=ax2)
plot_grid()
ax1 = plt.subplot(224)
plot_fig1_control(ax1=ax1)
plot_grid()
ax2 = plt.subplot(234)
plot_fig2_control(ax2=ax2)
plot_grid()
plt.show()
| 1,133 | 32.352941 | 89 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/inverted_pendulum/invert_pendulum_no_control_1227.py
|
import numpy as np
import math
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
import matplotlib.gridspec as gridspec
from functions import *
from base_function import colors
# colors = [
# [233/256, 110/256, 236/256], # #e96eec
# [223/256, 73/256, 54/256], # #df4936
# [107/256, 161/256,255/256], # #6ba1ff
# [0.6, 0.4, 0.8], # amethyst
# [0.0, 0.0, 1.0], # ao
# [0.55, 0.71, 0.0], # applegreen
# # [0.4, 1.0, 0.0], # brightgreen
# [0.99, 0.76, 0.8], # bubblegum
# [0.93, 0.53, 0.18], # cadmiumorange
# [0.6, 0.6, 0.2], # olive
# [0.8666666666666667, 0.8, 0.4666666666666667], # sand
# [0.5333333333333333, 0.13333333333333333, 0.3333333333333333], # wine
# [11/255, 132/255, 147/255], # deblue
# [204/255, 119/255, 34/255], # {ocra}
# ]
# colors = np.array(colors)
alpha = 1.0
fontsize=35
fontsize_legend = 20
MarkerSize = 60
linewidth = 5
color_w = 0.15 #0.5
framealpha = 0.7
N_seg = 100
def plt_tick_1():
# plt.ylim([-10, 10])
# plt.ylim([-2.5, 2.5])
# plt.xlim([-2.5, 2.5])
plt.xticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
plt.yticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
def plt_tick_2():
# plt.xticks([0, 2, 4, 6])
plt.xticks([0, 1, 2, 3, 4], ['$0$', '', '$2$', '', '$4$'])
plt.yticks([-5, -2.5, 0, 2.5, 5], ['$-5$', '', '$0$', '', '$5$'])
def plot_jianbian_line(
X, Y, start_color=np.array([1.0, 0.0, 0.0]),
end_color=np.array([0.0, 1.0, 0.0]),
scale = 1/3,
width_rate = 9/10,
):
# start_color = 1- start_color
start_color= end_color
data_len = len(X)
# plt.plot(data[0,:1000], data[1, :1000], '-', alpha=alpha)
n = N_seg
seg_len = data_len // n
print('data_len:{}, n:{}, seg_len:{}'.format(data_len, n, seg_len))
for i in range(n - 1):
w = ((i) / n) ** (scale)
now_color = start_color + w * (end_color - start_color)
# print('i:{}, now_color:{}'.format(i, now_color))
# plt.plot(data[0,i:i+3], data[1,i:i+3], '-', color=now_color, alpha=alpha)
plt.plot(X[seg_len * i:seg_len * (i+1)], Y[seg_len * i:seg_len * (i+1)],
'-', color=now_color, alpha=alpha, linewidth= linewidth - w * linewidth * width_rate )
np.random.seed(10)
t = np.arange(0.0, 4.0, 0.0001)
set_state0 = np.array([[-5.0,5.0],[-3.0,4.0],[-1.0,3.0],[1.0,-3.0],[3.0,-4.0],[5.0,-5.0]])
# fig = plt.figure()
# plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
# ax1 = plt.subplot(121)
# show_indx = [0, 2, 3, 5]
show_indx = [0, 2, 4]
def plot_fig1(ax1):
xd = np.linspace(-5.5, 5.5, 10)
yd = np.linspace(-5.5, 5.5, 10)
Xd, Yd = np.meshgrid(xd,yd)
Plotflow(Xd, Yd) #绘制向量场
#添加水平轴
# for i in range(6):
color_id = 0
for i in show_indx:
# state0 = np.random.uniform(-6,6,2)
state0 = set_state0[i,:]
state = invert_pendulum(state0,t) #生成倒立摆轨迹
# plt.plot(state[0,:],state[1,:],color=cm.Accent(i*2),alpha=0.55)
plot_jianbian_line(X=state[0, :], Y=state[1, :], start_color=colors[color_id] * color_w, end_color=colors[color_id])
# plt.plot(state[0,0],state[1,0],marker='*', color=cm.Accent(i*2))
# plt.scatter(state[0,0],state[1,0], marker='*', s=MarkerSize * 5, color=1 - colors[color_id] * color_w)
color_id += 1
color_id = 0
for i in show_indx:
state0 = set_state0[i,:]
state = invert_pendulum(state0,t) #生成倒立摆轨迹
# plt.scatter(X1[i,0],X2[i,0], marker='*', s=MarkerSize * 5, color='k', zorder=10)
# plt.scatter(X1[i,0],X2[i,0], marker='*', s=MarkerSize * 5, color=colors[color_id] * color_w, zorder=10)
# plt.scatter(state[0,0],state[1,0], marker='*', s=MarkerSize * 5, color=colors[color_id] * color_w, zorder=10)
plt.scatter(state[0,0],state[1,0], marker='*', s=MarkerSize * 5, color=colors[color_id]/max(colors[color_id]) * 0.7, zorder=10)
color_id += 1
#添加水平轴
C1 = plt.scatter(0, 0,marker='o',color='g', s=MarkerSize, zorder=10)
C2 = plt.scatter(math.pi,0,marker='o',color='r', s=MarkerSize, zorder=10)
C3 = plt.scatter(-math.pi,0,marker='o',color='b', s=MarkerSize, zorder=10)
ax1.add_artist(C1)
ax1.add_artist(C2)
ax1.add_artist(C3)
# plt.title('Orbits along Vector Fields')
plt.legend([C1,C2,C3],[r'$(0,~0)$', r'$(\pi,~0)$',r'$(-\pi,~0)$'],loc='upper right',borderpad=0.05, labelspacing=0.05,
fontsize=fontsize_legend, framealpha=framealpha)
# plt.xlabel(r'$\theta$', fontsize=fontsize)
plt.ylabel(r'$\dot{\theta}$', fontsize=fontsize)
plt_tick_1()
plt.tick_params(labelsize=fontsize)
# ax2 = plt.subplot(122)
def plot_fig2(ax2):
#添加水平轴
L1 = plt.axhline(y=0.0,ls="--",linewidth=1.5,color="green")
L2 = plt.axhline(y=math.pi,ls="--",linewidth=1.5,color="r")
L3 = plt.axhline(y=-math.pi,ls="--",linewidth=1.5,color="b")
ax2.add_artist(L1)
ax2.add_artist(L2)
ax2.add_artist(L3)
color_id = 0
# for i in range(6):
for i in show_indx:
# state0 = np.random.uniform(-6,6,2)
state0 = set_state0[i,:]
state = invert_pendulum(state0,t) #生成倒立摆轨迹
# plt.plot(t, state[0,:],color=cm.Accent(i**2+1),alpha=0.55)
plot_jianbian_line(X=t, Y=state[0, :],
start_color=colors[color_id] * color_w, end_color=colors[color_id],
scale = 1/2,
width_rate = 5/10,
)
# plt.plot(t[0],state[0,0],marker='*',color=cm.Accent(i**2+1))
# plt.scatter(t[0],state[0,0],marker='*', s=MarkerSize * 5, color=1 - colors[color_id] * color_w)
color_id += 1
color_id = 0
for i in show_indx:
state0 = set_state0[i,:]
state = invert_pendulum(state0,t) #生成倒立摆轨迹
# plt.scatter(X1[i,0],X2[i,0], marker='*', s=MarkerSize * 5, color='k', zorder=10)
# plt.scatter(X1[i,0],X2[i,0], marker='*', s=MarkerSize * 5, color=colors[color_id] * color_w, zorder=10)
# plt.scatter(state[0,0],state[1,0], marker='*', s=MarkerSize * 5, color=colors[color_id] * color_w, zorder=10)
# plt.scatter(t[0],state[0,0],marker='*', s=MarkerSize * 5, color=colors[color_id] * color_w, zorder=10)
plt.scatter(t[0],state[0,0],marker='*', s=MarkerSize * 5, color=colors[color_id]/max(colors[color_id]) * 0.7, zorder=10)
color_id += 1
plt.legend(
[L1,L2,L3],
[r'$\theta=0$',r'$\theta=\pi$',r'$\theta=-\pi$'],loc='upper right',
borderpad=0.05, labelspacing=0.05, fontsize=fontsize_legend, framealpha=framealpha
)
# plt.title('Phase Trajectories along Time')
# plt.xlabel('t', fontsize=fontsize)
plt.ylabel(r'$\theta$', fontsize=fontsize)
plt_tick_2()
plt.tick_params(labelsize=fontsize)
if __name__ == '__main__':
ax1 = plt.subplot(121)
plot_fig1(ax1=ax1)
ax2 = plt.subplot(122)
plot_fig2(ax2=ax2)
plt.show()
| 7,041 | 36.259259 | 135 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/plot_trajectory.py
|
from statistics import mean
import sys
sys.path.append('./neural_sde')
import numpy as np
import math
import matplotlib.pyplot as plt
import torch
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
import timeit
# import pylustrator
# pylustrator.start()
start = timeit.default_timer()
A = torch.load('./neural_sde/hyper_a/data.pt')
A = A[:,-8:-1,:,:]
print(A.shape)
def plot_trajec(L,a):
mean_data = torch.mean(L,0).detach().numpy()
std_data =torch.std(L,0).detach().numpy()
plt.fill_between(np.arange(len(mean_data)),mean_data-std_data,mean_data+std_data,color='r',alpha=0.2)
plt.plot(np.arange(len(mean_data)),mean_data,color='r',alpha=0.9,label=r'$b={}$'.format(a))
plt.ylim(-1,6)
# plt.xlabel('Time')
plt.yticks([])
plt.xticks([0.0, 6000], ["$0$", "$0.6$"])
| 816 | 26.233333 | 105 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/plot_loss.py
|
import numpy as np
import matplotlib.pyplot as plt
import torch
import pylustrator
pylustrator.start()
import seaborn as sns
sns.set_theme(style="white")
def plot_a(a):
L = np.load('./neural_sde/hyper_a/a_{}.npy'.format(a))
r_L = np.zeros(1000-len(L))
L = np.concatenate((L,r_L),axis=0)
# np.concatenate((a,b),axis=0)
plt.plot(np.arange(len(L)),L,'b')
# plt.xlabel('Iterations')
plt.ylim(-0.01,1)
plt.yticks([])
plt.title(r'$\alpha={}$'.format(a))
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
plt.subplot(171)
plot_a(0.65)
plt.ylabel('Loss')
plt.yticks([0,0.25,0.5,0.75,1.0])
plt.subplot(172)
plot_a(0.7)
plt.subplot(173)
plot_a(0.75)
plt.subplot(174)
plot_a(0.8)
plt.subplot(175)
plot_a(0.85)
plt.subplot(176)
plot_a(0.9)
plt.subplot(177)
plot_a(0.95)
#% start: automatic generated code from pylustrator
plt.figure(1).ax_dict = {ax.get_label(): ax for ax in plt.figure(1).axes}
import matplotlib as mpl
plt.figure(1).set_size_inches(14.460000/2.54, 4.880000/2.54, forward=True)
plt.figure(1).axes[0].set_position([0.118581, 0.256900, 0.084156, 0.543710])
plt.figure(1).axes[1].set_position([0.244815, 0.256900, 0.084156, 0.543710])
plt.figure(1).axes[1].title.set_position([0.500000, 1.000000])
plt.figure(1).axes[2].set_position([0.371050, 0.256900, 0.084156, 0.543710])
plt.figure(1).axes[3].set_position([0.497285, 0.256900, 0.084156, 0.543710])
plt.figure(1).axes[4].set_position([0.623519, 0.256900, 0.084156, 0.543710])
plt.figure(1).axes[5].set_position([0.749754, 0.256900, 0.084156, 0.543710])
plt.figure(1).axes[6].set_position([0.875988, 0.256900, 0.084156, 0.543710])
plt.figure(1).text(0.5, 0.5, 'New Text', transform=plt.figure(1).transFigure) # id=plt.figure(1).texts[0].new
plt.figure(1).texts[0].set_position([0.474888, 0.048140])
plt.figure(1).texts[0].set_text("Iterations")
#% end: automatic generated code from pylustrator
plt.show()
| 1,949 | 30.967213 | 110 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/AS.py
|
import torch
import torch.nn.functional as F
import numpy as np
import timeit
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
def inverted_pendulum(x):
y = []
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1 # friction
for i in range(0,len(x)):
f = [x[i,1],G*torch.sin(x[i,0])/L +(-b*x[i,1])/(m*L**2)]
y.append(f)
y = torch.tensor(y)
return y
'''
For learning
'''
N = 500 # sample size
D_in = 2 # input dimension
H1 = 6 # hidden dimension
D_out = 2 # output dimension
torch.manual_seed(2)
x = torch.Tensor(N, D_in).uniform_(-10, 10)
for r in range(19):
theta = float(format(r*0.05+0.05,'.2f'))
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
i = 0
max_iters = 1000
learning_rate = 0.01
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
L = []
while i < max_iters:
out = model(x)
g = out*x
f = inverted_pendulum(x)
loss = (2-theta)*torch.diagonal(torch.mm(x,g.T))**2-torch.diagonal(torch.mm(x,x.T))*torch.diagonal(2*torch.mm(x,f.T)+torch.mm(g,g.T))
# loss = (2-theta)*((x*g)**2)-x**2*(2*x*f+g**2)
Lyapunov_risk = (F.relu(-loss)).mean()
L.append(Lyapunov_risk.item())
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
if Lyapunov_risk == 0.0:
break
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
np.save('./hyper_a/a_{}.npy'.format(theta), L)
torch.save(model.state_dict(),'./hyper_a/a_{}.pkl'.format(theta))
| 2,236 | 25.011628 | 141 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/test.py
|
import sys
sys.path.append('./neural_sde')
import numpy as np
import math
import matplotlib.pyplot as plt
import torch
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
import timeit
A = torch.ones(2,100)
# B = torch.diagonal(A)
print(A[:,0:100:10].shape)
| 273 | 20.076923 | 39 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/generate.py
|
import numpy as np
import math
import torch
import timeit
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(10)
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
D_in = 2
H1 = 6
D_out = 2
model = Net(D_in,H1,D_out)
set_state0 = torch.tensor([[3.0,5.0]]) # initial
def control_data(model,random_seed,set_state0,M=6,N=20000,dt=0.00001):
start = timeit.default_timer()
torch.manual_seed(random_seed)
X1,X2 = torch.zeros([M,N]),torch.zeros([M,N])
for r in range(M):
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1
z = torch.randn(N)
X1[r,0] = set_state0[r,0]
X2[r,0] = set_state0[r,1]
for i in range(N-1):
x1 = X1[r,i]
x2 = X2[r,i]
with torch.no_grad():
u = model(torch.tensor([x1,x2]))
new_x1 = x1 + x2*dt + x1*u[0]*z[i]*math.sqrt(dt)
new_x2 = x2 + (G*math.sin(x1)/L - b*x2/(m*L**2))*dt + x2*u[1]*z[i]*math.sqrt(dt)
X1[r,i+1] = new_x1
X2[r,i+1] = new_x2
print('{} done'.format(r))
X1=X1[:,0:N:10]
X2=X2[:,0:N:10]
# data = {'X1':X1,'X2':X2}
# torch.save(data,'./neural_sde/hyper_b/b_{}.pt'.format(b))
stop = timeit.default_timer()
print(stop-start)
return X1,X2
'''
Generate trajectories under control
'''
if __name__ == '__main__':
M = 5
N = 60000
data = torch.zeros([2,10,M,N])
for r in range(10):
b = 2.0 + r*0.1
model.load_state_dict(torch.load('./neural_sde/hyper_b/b_{}.pkl'.format(b)))
# X1,X2=torch.zeros([M,N]),torch.zeros([M,N])
for i in range(M):
x1,x2 = control_data(model,i*6,set_state0,1,N,0.0001)
# X1[i,:] = x1[0,:]
# X2[i,:] = x2[0,:]
data[0,r,i,:] = x1[0,:]
data[1,r,i,:] = x2[0,:]
print('({},{})'.format(r,i))
torch.save(data,'data.pt')
'''
Do some test
'''
# model.load_state_dict(torch.load('./neural_sde/hyper_a/a_{}.pkl'.format(0.45)))
# X1,X2 = control_data(model,6*9+1,set_state0,1,60000,0.00001)
# X1 = X1.detach().numpy()[0,:]
# print(X1.shape)
# plt.plot(np.arange(len(X1)),X1)
# plt.show()
| 2,698 | 28.021505 | 92 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/u_plot.py
|
import matplotlib.pyplot as plt
import torch
import numpy as np
from matplotlib import cm
import matplotlib as mpl
class ControlNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(ControlNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out*x
D_in = 2
H1 = 6
D_out = 2
model = ControlNet(D_in,H1,D_out)
vnorm = mpl.colors.Normalize(vmin=-80, vmax=80)
def draw_image2(f):
with torch.no_grad():
x = torch.linspace(-6, 6, 200)
y = torch.linspace(-6, 6, 200)
X, Y = torch.meshgrid(x, y)
inp = torch.stack([X, Y], dim=2)
image = f(inp)
image = image[..., 0].detach().cpu()
plt.imshow(image, extent=[-6, 6, -6, 6], cmap='rainbow',norm=vnorm)
# plt.xlabel(r'$\theta$')
plt.xticks([-6,0,6])
plt.yticks([])
return image
def draw(a):
model.load_state_dict(torch.load('./neural_sde/hyper_a/a_{}.pkl'.format(a)))
draw_image2(model)
| 1,330 | 26.729167 | 80 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/functions.py
|
from os import stat
import numpy as np
import math
import torch
import timeit
import random
import matplotlib.pyplot as plt
from matplotlib import cm
from scipy.integrate import odeint
import numpy as np
np.random.seed(10)
class ControlNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(ControlNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
D_in = 2 # input dimension
H1 = 6 # hidden dimension
D_out = 2
inverted_model = ControlNet(D_in,H1,D_out)
inverted_model.load_state_dict(torch.load('./neural_sde/hyper_b/b_2.2.pkl'))
# ang = torch.zeros([5,1]) #initial angle
# vel = torch.zeros([5,1]) #initial velocity
# for i in range(5):
# x0 = np.random.uniform(-6,6,2)
# ang[i,0] = x0[0]
# vel[i,0] = x0[1]
def invert_pendulum(state0, t):
state0 = state0.flatten()
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1 # friction
def f(state,t):
x, y = state # unpack the state vector
return y, G*np.sin(x)/L +(-b*y)/(m*L**2) # derivatives
states = odeint(f, state0, t)
return states.transpose()
#生成控制轨道数据
set_state0 = torch.tensor([[-5.0,5.0],[-3.0,4.0],[-1.0,3.0],[1.0,-3.0],[3.0,-4.0],[5.0,-5.0]])
def control_data(set_state0,M=6,N=20000,dt=0.00001):
start = timeit.default_timer()
torch.manual_seed(6)
X1,X2 = torch.zeros([M,N]),torch.zeros([M,N])
for r in range(M):
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1
z1 = torch.randn(N)
z2 = torch.randn(N)
# X1[r,0] = ang[r,0]
# X2[r,0] = vel[r,0]
X1[r,0] = set_state0[r,0]
X2[r,0] = set_state0[r,1]
for i in range(N-1):
x1 = X1[r,i]
x2 = X2[r,i]
u = inverted_model(torch.tensor([x1,x2]))
new_x1 = x1 + x2*dt + x1*u[0]*z1[i]*math.sqrt(dt)
new_x2 = x2 + (G*math.sin(x1)/L - b*x2/(m*L**2))*dt + x2*u[1]*z2[i]*math.sqrt(dt)
X1[r,i+1] = new_x1
X2[r,i+1] = new_x2
print('{} done'.format(r))
orig_data = {'X1':X1,'X2':X2}
torch.save(orig_data,'./neural_sde/inverted_ROA/control_data.pt')
stop = timeit.default_timer()
print(stop-start)
def control_trajectory(ax,title,path='./neural_sde/inverted_ROA/control_data.pt'):
data = torch.load(path)
# X = data['X'].clone().detach()
X1 = data['X1'].clone().detach()
# X2 = data['X2']
for i in range(len(X1)):
# x = X[i,:].numpy()
# m = np.max(x)
# index = np.argwhere(x == m )
# sample_length = int(index[0])
L = np.arange(len(X1[0,:3000]))
plt.plot(L[0],X1[i,0],marker='*',markersize=8,color=cm.Accent(i*2))
plt.plot(L,X1[i,:3000],linestyle='--',color=cm.Accent(i*2),alpha=0.45)
L1 = plt.axhline(y=0.0,ls="--",linewidth=1.5,color="green")#添加水平直线
L2 = plt.axhline(y=math.pi,ls="--",linewidth=1.5,color="r")
L3 = plt.axhline(y=-math.pi,ls="--",linewidth=1.5,color="b")
ax.add_artist(L1)
ax.add_artist(L2)
ax.add_artist(L3)
plt.legend([L1,L2,L3],[r'$\theta=0$',r'$\theta=\pi$',r'$\theta=-\pi$'],loc='upper right',borderpad=0.05, labelspacing=0.05)
plt.title(title)
plt.xlabel('t')
plt.ylabel(r'$\theta$')
def f(y) :
#parameters
G = 9.81
L = 0.5
m = 0.15
b = 0.1
x1,x2 = y
dydt =[x2, (m*G*L*np.sin(x1) - b*x2) / (m*L**2)]
return dydt
#绘制向量场
def Plotflow(Xd, Yd):
# Plot phase plane
DX, DY = f([Xd, Yd])
DX=DX/np.linalg.norm(DX, ord=2, axis=1, keepdims=True)
DY=DY/np.linalg.norm(DY, ord=2, axis=1, keepdims=True)
plt.streamplot(Xd,Yd,DX,DY, color=('gray'), linewidth=0.5,
density=0.6, arrowstyle='-|>', arrowsize=1.5)
if __name__ == '__main__':
control_data(set_state0,6,20000,0.0001)
| 4,265 | 30.6 | 127 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/calculate.py
|
import matplotlib.pyplot as plt
import torch
import numpy as np
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.5, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.5, ls='-', lw=1)
'''
Calculate and plot the mean end position of trajectories under learning control with each $\alpha$
'''
A = torch.load('./data/hyper_a/data.pt')
A = A[:,:-1,:,:]
print(A.shape)
end = torch.zeros([19])
for r in range(19):
end[r] = torch.mean(A[0,r,:,-1])
print(end.shape)
end = end.detach().numpy()
plt.scatter(np.arange(len(end)),end, s=45, c=end, marker='.',alpha=0.99,cmap='rainbow')
plot_grid()
# plt.axvline(7.5,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.3)
plt.axvline(11.5,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.3)
plt.axhline(0.0,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.3)
plt.yticks([0,0.03,0.06])
plt.ylabel(r'$\theta$')
plt.xlabel(r'$\alpha$')
plt.colorbar()
#% start: automatic generated code from pylustrator
plt.figure(1).ax_dict = {ax.get_label(): ax for ax in plt.figure(1).axes}
import matplotlib as mpl
plt.figure(1).set_size_inches(12.040000/2.54, 5.670000/2.54, forward=True)
plt.figure(1).ax_dict["<colorbar>"].set_position([0.895507, 0.226426, 0.016383, 0.696457])
plt.figure(1).axes[0].set_xlim(-1.0, 18.9)
plt.figure(1).axes[0].set_xticks([-1.0, 3.0, 7.0, 11.0, 15.0, 19.0])
plt.figure(1).axes[0].set_xticklabels(["0", "0.2", "0.4", "0.6", "0.8", "1.0"], fontsize=10.0, fontweight="normal", color="black", fontstyle="normal", fontname="DejaVu Sans", horizontalalignment="center")
plt.figure(1).axes[0].set_position([0.139423, 0.226426, 0.739233, 0.696457])
plt.figure(1).axes[0].get_xaxis().get_label().set_fontsize(12)
plt.figure(1).axes[0].get_yaxis().get_label().set_fontsize(12)
#% end: automatic generated code from pylustrator
plt.show()
| 1,901 | 40.347826 | 207 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_a/plot.py
|
import numpy as np
import matplotlib.pyplot as plt
from u_plot import *
from plot_trajectory import *
# import matplotlib
# matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
# matplotlib.rcParams['text.usetex'] = True
font_size = 15
'''
Pick trajectories data for corresponding $\alpha$
'''
A = torch.load('./data/hyper_a/data.pt')
A = A[:,-8:-1,:,:]
print(A.shape)
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.6, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.8, ls='-', lw=1)
def plot_a(a):
L = np.load('./data/hyper_a/a_{}.npy'.format(a))
r_L = np.zeros(1000-len(L))
L = np.concatenate((L,r_L),axis=0)
# np.concatenate((a,b),axis=0)
plt.plot(np.arange(len(L)),L,'b')
# plt.xlabel('Iterations')
plt.ylim(-0.01,1)
plt.yticks([])
plt.title(r'$\alpha={}$'.format(a))
for i in range(7):
# plt.axes([0.1+0.17*i, 0.7, 0.1, 0.1])
plt.subplot(4, 7, i+1)
plot_a(float(format(0.65+i*0.05,'.2f')))
plot_grid()
if i == 0:
plt.yticks([0,10,20])
plt.ylabel('Loss',fontsize=font_size)
plt.text(-5,5,'Training',rotation=90,fontsize=font_size)
else:
plt.yticks([0, 10, 20], ['', '', ''])
if i == 3:
plt.xlabel('Iterations',fontsize=font_size)
for i in range(7):
plt.subplot(4, 7, 7 + i+1)
plot_trajec(A[0,i,:,0:60000:10],float(format(0.65+i*0.05,'.2f')))
plot_grid()
if i == 0:
plt.yticks([-10,-5,0,5,10])
plt.ylabel(r'$\theta$',fontsize=font_size)
plt.text(-1,-5,'Trajectory',rotation=90,fontsize=font_size)
else:
plt.yticks([-10,-5, 0,5, 10], ['', '', '','',''])
if i == 3:
plt.xlabel('Time',fontsize=font_size)
for i in range(7):
plt.subplot(4, 7, 14 + i+1)
plot_trajec(A[1,i,:,0:60000:10],float(format(0.65+i*0.05,'.2f')))
plot_grid()
if i == 0:
plt.yticks([-10,-5,0,5,10])
plt.ylabel(r'$\dot{\theta}$',fontsize=font_size)
plt.text(-1,-5,'Trajectory',rotation=90,fontsize=font_size)
else:
plt.yticks([-10,-5, 0,5, 10], ['', '', '','',''])
if i == 3:
plt.xlabel('Time',fontsize=font_size)
for i in range(7):
# plt.axes([0.1+0.17*i, 0.1, 0.1, 0.1])
plt.subplot(4, 7, 21 + i+1)
draw(float(format(0.65+i*0.05,'.2f')))
if i == 0:
plt.yticks([-5,0,5])
plt.ylabel(r'$\dot{\theta}$',fontsize=font_size)
plt.text(-15,-3,r'Control $u$',rotation=90,fontsize=font_size)
if i == 3:
plt.xlabel(r'$\theta$',fontsize=font_size)
plt.colorbar()
plt.show()
| 2,666 | 27.98913 | 89 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hopf/AS.py
|
import torch
import torch.nn.functional as F
import numpy as np
import timeit
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.Tanh()
# sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
def f_value(x):
y = []
for i in range(0,len(x)):
f = [x[i]*(x[i]+5)*(x[i]+10)]
y.append(f)
y = torch.tensor(y)
return y
'''
For learning
'''
N = 3000 # sample size
D_in = 1 # input dimension
H1 = 10 # hidden dimension
D_out = 1 # output dimension
torch.manual_seed(10)
x = torch.Tensor(N, D_in).uniform_(-30, 30)
theta = 0.5
out_iters = 0
while out_iters < 1:
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
i = 0
t = 0
max_iters = 700
learning_rate = 0.05
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
L = []
while i < max_iters:
out = model(x)
g = out*x
f = f_value(x)
# loss = (2-theta)*torch.diagonal(torch.mm(x,g.T))**2-torch.diagonal(torch.mm(x,x.T))*torch.diagonal(2*torch.mm(x,f.T)+torch.mm(g,g.T))
loss = (2-theta)*((x*g)**2)-x**2*(2*x*f+g**2)
Lyapunov_risk = (F.relu(-loss)).mean()
L.append(Lyapunov_risk)
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
print("Verified time: ", t)
out_iters+=1
# torch.save(torch.tensor(L), './data/hopf/loss_AS.pt')
# torch.save(model.state_dict(), './data/hopf/1d_hopf_net.pkl')
| 2,120 | 24.554217 | 143 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hopf/generate.py
|
import numpy as np
import math
import matplotlib.pyplot as plt
import torch
import timeit
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.Tanh()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
hopf_model = Net(1,10,1)
hopf_model.load_state_dict(torch.load('./data/hopf/1d_hopf_net.pkl'))
m = 30
torch.manual_seed(10)
rad = torch.Tensor(m,1).uniform_(3, 10)
ang = torch.Tensor(m,1).uniform_(0, 6.28)
def original_data(rad,ang,m,N=400,dt=0.001):
X,W = torch.zeros([m,N]),torch.zeros([m,N])
X1,X2 = torch.zeros([m,N]),torch.zeros([m,N])
for r in range(m):
X[r,0] = rad[r,0]
W[r,0] = ang[r,0]
for i in range(N-1):
x = X[r,i]
w = W[r,i]
# u = hopf_model(torch.tensor([x-5.0]))
new_x = x + x*(x-5.0)*(x+5.0)*dt
new_w = w + dt
if new_x > 10.0:
new_x = x
new_w = w
X[r,i+1] = new_x
W[r,i+1] = new_w
X1[r,:]=X[r,:]*torch.cos(W[r,:])
X2[r,:]=X[r,:]*torch.sin(W[r,:])
orig_data = {'X':X,'W':W,'X1':X1,'X2':X2}
torch.save(orig_data,'./data/hopf/data.pt')
def control_data(rad,ang,m=30,N=30000,dt=0.0001):
start = timeit.default_timer()
torch.manual_seed(9)
X,W = torch.zeros([m,N]),torch.zeros([m,N])
X1,X2 = torch.zeros([m,N]),torch.zeros([m,N])
# z = np.random.normal(0,1,N)
for r in range(m):
z = torch.randn(N)
X[r,0] = rad[r,0]
W[r,0] = ang[r,0]
for i in range(N-1):
x = X[r,i]
w = W[r,i]
u = hopf_model(torch.tensor([x-5.0]))
new_x = x + x*(x-5.0)*(x+5.0)*dt + (x-5.0)*(u[0])*z[i]*math.sqrt(dt)
new_w = w + dt
X[r,i+1] = new_x
W[r,i+1] = new_w
X1[r,:]=X[r,:]*torch.cos(W[r,:])
X2[r,:]=X[r,:]*torch.sin(W[r,:])
print('{} done'.format(r))
orig_data = {'X':X,'W':W,'X1':X1,'X2':X2}
torch.save(orig_data,'./data/hopf/control_data.pt')
stop = timeit.default_timer()
print(stop-start)
def test():
N = 100
dt = 0.0001
X = torch.zeros([1,N])
W = torch.zeros([1,N])
X[0,0] = 8.0
W[0,0] = 3.8
z = torch.randn(N)
for i in range(N-1):
x = X[0,i]
w = W[0,i]
u = hopf_model(torch.tensor([x-5.0]))
new_x = x + x*(x-5.0)*(x+5.0)*dt + (x-5.0)*(u[0])*z[i]*math.sqrt(dt)
new_w = w + dt
X[0,i+1] = new_x
W[0,i+1] = new_w
X = X.clone().detach()
plt.plot(np.arange(N),X[0,:],'r')
plt.show()
if __name__ == '__main__':
control_data(rad,ang,m,600,0.0001)
original_data(rad,ang,m,400,0.001)
test()
| 3,077 | 27.766355 | 80 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hopf/functions.py
|
import numpy as np
import torch
import matplotlib.pyplot as plt
from matplotlib import cm
import matplotlib.gridspec as gridspec
#向量场
def f(y,t) :
#parameters
x1,x2 = y
dydt = [-25.0*x1-x2+x1*(x1**2+x2**2),x1-25*x2+x2*(x1**2+x2**2)]
return dydt
#绘制向量场
def Plotflow(Xd, Yd, t):
# Plot phase plane
DX, DY = f([Xd, Yd],t)
DX=DX/np.linalg.norm(DX, ord=2, axis=1, keepdims=True)
DY=DY/np.linalg.norm(DY, ord=2, axis=1, keepdims=True)
plt.streamplot(Xd,Yd,DX,DY, color=('gray'), linewidth=0.5,
density=0.6, arrowstyle='-|>', arrowsize=1.5)
def plot_orbit(ax,title,path='./hopf/control_data.pt'):
data = torch.load(path)
X = data['X'].clone().detach()
X1 = data['X1'].clone().detach()
X2 = data['X2'].clone().detach()
#添加极限环
C = plt.Circle((0, 0),5, color='g', linewidth=2.5, fill=False)
ax.add_artist(C)
#绘制向量场
xd = np.linspace(-10, 10, 10)
yd = np.linspace(-10, 10, 10)
Xd, Yd = np.meshgrid(xd,yd)
t = np.linspace(0,2,2000)
Plotflow(Xd, Yd,t)
m = len(X1)
for i in range(m):
if 9.6 > X[i,0] > 5.5 and torch.max(X[i,:])<10 and torch.min(X[i,:])>0: #避免扰动过大的轨道出现
plt.plot(X1[i,0],X2[i,0],marker='*',markersize=8,color='r')
plt.plot(X1[i,:],X2[i,:],linestyle='--',color='r')
elif X[i,0] < 4.5 and torch.max(X[i,:])<10 and torch.min(X[i,:])>0: #避免扰动过大的轨道出现
plt.plot(X1[i,0],X2[i,0],marker='*',markersize=8,color='b')
plt.plot(X1[i,:],X2[i,:],linestyle='--',color='b')
plt.legend([C],['limit cycle'],loc='upper right')
plt.title(title)
plt.xlabel('x')
plt.ylabel('y')
#绘制极限环外部出发的轨道
def uncontrol_trajectory1(ax,title,path='./hopf/data.pt'):
data = torch.load(path)
X = data['X']
C = plt.axhline(y=5.0,ls="--",linewidth=2.5,color="green")#添加水平直线
U = plt.axhline(y=9.5,ls="--",linewidth=2.5,color="black")
ax.add_artist(C)
ax.add_artist(U)
for i in range(len(X)):
if 9.5 > X[i,0] > 5.5:
x = X[i,:].numpy()
m = np.max(x)
index = np.argwhere(x == m )
sample_length = int(index[0])
L = np.arange(len(X[0,:sample_length]))
plt.plot(L[0],X[i,0],marker='*',markersize=8,color='r')
plt.plot(L,X[i,:sample_length],linestyle='--',color='r')
plt.legend([U,C],[r'$\rho$=9.5',r'$\rho$=5.0'],borderpad=0.01, labelspacing=0.01)
plt.title(title)
plt.xlabel('t')
plt.ylabel(r'$\rho$')
#绘制极限环内部出发的轨道,sample_length的作用是从data中选择适当的轨道长度绘图
def uncontrol_trajectory2(ax,title,sample_length = 40,path='./hopf/control_data.pt'):
data = torch.load(path)
X = data['X'].clone().detach()
C = plt.axhline(y=5.0,ls="--",linewidth=2.5,color="green") #添加水平直线,对应极限环
U = plt.axhline(y=0.0,ls="--",linewidth=2.5,color="deeppink") #添加水平直线,对应零点
ax.add_artist(C)
ax.add_artist(U)
for i in range(len(X)):
if X[i,0] < 4.5:
L = np.arange(len(X[0,:sample_length]))
plt.plot(L[0],X[i,0],marker='*',markersize=8,color='b')
plt.plot(L,X[i,:sample_length],linestyle='--',color='b')
plt.legend([C,U],[r'$\rho$=5.0',r'$\rho$=0.0'],borderpad=0.01, labelspacing=0.01)
plt.title(title)
plt.xlabel('t')
plt.ylabel(r'$\rho$')
#绘制控制下的极限环外部出发的轨道
def control_trajectory1(ax,title,sample_length,path='./hopf/data.pt'):
data = torch.load(path)
X = data['X'].clone().detach()
C = plt.axhline(y=5.0,ls="--",linewidth=2.5,color="green")#添加水平直线
ax.add_artist(C)
for i in range(len(X)):
if 9.6 > X[i,0] > 5.5:
L = np.arange(len(X[0,:sample_length]))
plt.plot(L[0],X[i,0],marker='*',markersize=8,color='r')
plt.plot(L,X[i,:sample_length],linestyle='--',color='r')
plt.legend([C],[r'$\rho$=5.0'],borderpad=0.01, labelspacing=0.01)
plt.title(title)
plt.xlabel('t')
plt.ylabel(r'$\rho$')
#绘制控制下的极限环内部出发的轨道
def control_trajectory2(ax,title,sample_length = 40,path='./hopf/control_data.pt'):
data = torch.load(path)
X = data['X'].clone().detach()
C = plt.axhline(y=5.0,ls="--",linewidth=2.5,color="green")#添加水平直线
ax.add_artist(C)
for i in range(len(X)):
if X[i,0] < 4.5:
L = np.arange(len(X[0,:sample_length]))
plt.plot(L[0],X[i,0],marker='*',markersize=8,color='b')
plt.plot(L,X[i,:sample_length],linestyle='--',color='b')
plt.legend([C],[r'$\rho$=5.0'],borderpad=0.01, labelspacing=0.01)
plt.title(title)
plt.xlabel('t')
plt.ylabel(r'$\rho$')
| 4,576 | 33.674242 | 92 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hopf/plot.py
|
import matplotlib
matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
matplotlib.rcParams['text.usetex'] = True
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from functions import *
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.6, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.8, ls='-', lw=1)
# plt.grid(b=True, which='both', color='beige', alpha=0.1, ls='-', lw=1)
if __name__ == '__main__':
max_len = 6
fig = plt.figure()
plt.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.5, hspace=0.5)
gs = gridspec.GridSpec(16, 13)
ax1 = plt.subplot(gs[0:7, 9:13])
plot_orbit(ax1,'Phase Orbits','./data.pt')
plot_grid()
ax2 = plt.subplot(gs[0:3,0:max_len])
# plot_orbit(ax1,'Phase Orbits under Stochastic Control','./neural_sde/hopf/control_data.pt')
uncontrol_trajectory1(ax2,'Plot along Trajectories',path='./data.pt')
plot_grid()
ax3 = plt.subplot(gs[4:7,0:max_len])
uncontrol_trajectory2(ax3,None,200,path='./data.pt')
plot_grid()
ax4 = plt.subplot(gs[9:16, 9:13])
plot_orbit(ax4,None,'./control_data.pt')
plot_grid()
ax5 = plt.subplot(gs[9:12,0:max_len])
control_trajectory1(ax5,None,40,path='./control_data.pt')
plot_grid()
ax6 = plt.subplot(gs[13:16,0:max_len])
control_trajectory2(ax6,None,40,path='./control_data.pt')
# control_trajectory2(ax5,None,40,path='./control_data.pt')
plot_grid()
plt.show()
| 1,598 | 30.352941 | 97 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Echo/AS.py
|
import torch
import torch.nn.functional as F
import numpy as np
import timeit
import argparse
parser = argparse.ArgumentParser('ODE demo')
parser.add_argument('--N', type=float, default=5000)
parser.add_argument('--lr', type=float, default=0.03)
args = parser.parse_args()
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
'''
For learning
'''
N = args.N # sample size
D_in = 50 # input dimension
H1 = 4*D_in # hidden dimension
D_out = D_in # output dimension
torch.manual_seed(10)
x = torch.Tensor(N, D_in).uniform_(-10, 10)
A = np.load('neural_sde/Echo/50/A_{}.npy'.format(D_in))
A = torch.tensor(A).to(torch.float32)
theta = 0.8
out_iters = 0
valid = False
while out_iters < 1 and not valid:
# break
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
i = 0
t = 0
max_iters = 10000
learning_rate = args.lr
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
while i < max_iters and not valid:
out = model(x)
g = out*x
f = torch.relu(torch.mm(A,x.T)).T
loss = (2-theta)*torch.diagonal(torch.mm(x,g.T))**2-torch.diagonal(torch.mm(x,x.T))*torch.diagonal(2*torch.mm(x,f.T)+torch.mm(g,g.T))
# loss = (2-theta)*((x*g)**2)-x**2*(2*x*f+g**2)
Lyapunov_risk = (F.relu(-loss)).mean()
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
if Lyapunov_risk == 0:
break
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
print("Verified time: ", t)
out_iters+=1
torch.save(model.state_dict(), './data/Echo/AS_{}_relu_net.pkl'.format(D_in))
| 2,257 | 24.954023 | 141 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Echo/generate.py
|
import numpy as np
import torch
import math
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
D_in = 50 # input dimension
H1 = 4*D_in # hidden dimension
D_out = D_in
A = np.load('./data/Echo/A_{}.npy'.format(D_in))
A = torch.tensor(A).to(torch.float32)
m = 10
N = 200000
dt = 0.000001
model = Net(D_in,H1,D_out)
x0 = torch.linspace(-2,2,50)
def tanh_generate(m,N,dt):
model.load_state_dict(torch.load('./data/Echo/AS_50_net.pkl'))
X = torch.zeros(m,N+1,D_in)
for r in range(m):
torch.manual_seed(6*r+6)
z = torch.randn(N)
X[r,0,:] = x0
for i in range(N):
x = X[r,i,:].unsqueeze(1)
with torch.no_grad():
u = model(X[r,i,:]).unsqueeze(1)
new_x = x + torch.tanh(torch.mm(A,x))*dt + math.sqrt(dt)*z[i]*u*x
X[r,i+1,:]=new_x[:,0]
print(r)
X = X.detach().numpy()
np.save('./data/Echo/tanh_data.npy',X)
def relu_generate(m,N,dt):
model = Net(D_in,100,D_out)
model.load_state_dict(torch.load('./data/Echo/AS_50_relu_net.pkl'))
X = torch.zeros(m,N+1,D_in)
for r in range(m):
torch.manual_seed(6*r+6)
z = torch.randn(N)
X[r,0,:] = x0
for i in range(N):
x = X[r,i,:].unsqueeze(1)
with torch.no_grad():
u = model(X[r,i,:]).unsqueeze(1)
new_x = x + torch.relu(torch.mm(A,x))*dt + math.sqrt(dt)*z[i]*u*x
X[r,i+1,:]=new_x[:,0]
print(r)
X = X.detach().numpy()
np.save('./data/Echo/relu_data.npy',X)
tanh_generate(m,N,dt)
relu_generate(m,N,dt)
| 2,073 | 26.653333 | 77 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Echo/generate_matrix_A.py
|
import numpy as np
from numpy import linalg as LA
import matplotlib.pyplot as plt
import networkx as nx
from networkx.generators.classic import empty_graph, path_graph, complete_graph
from networkx.generators.random_graphs import barabasi_albert_graph, erdos_renyi_graph
def initial_W(shape, low_bound, up_bound):
return np.random.uniform(low_bound, up_bound, size=shape)
def generate_A(shape, rho, D_r):
'''
:param shape: Shape of matrix A (D_r, D_r)
:param rho: Spectrum radius of matrix A
:param D_r: Dimension of matirx A
:return: Generated matrix A
'''
G = erdos_renyi_graph(D_r, 6 / D_r, seed=2) # Generate ER graph with D_r nodes, the connection probability is p = 3 /D_r
degree = [val for (node, val) in G.degree()]
print('average degree:', sum(degree) / len(degree))
G_A = nx.to_numpy_matrix(G) # Transform the graph to the connection matrix A
index = np.where(G_A > 0) # Find the position where has an edge
res_A = np.zeros(shape)
a = 0.3
res_A[index] = initial_W([len(index[0]), ], 0, a) # Sample value for edge from Uniform[0,a]
max_eigvalue = np.real(np.max(LA.eigvals(res_A))) # Calculate the largest eigenvalue of A
print('before max_eigvalue:{}'.format(max_eigvalue))
res_A = res_A / abs(max_eigvalue) * rho # Adjust spectrum radius of A to rho
max_eigvalue = np.real(np.max(LA.eigvals(res_A)))
print('after max_eigvalue:{}'.format(max_eigvalue))
return res_A, max_eigvalue
rho = 2.0
D_r = 50
res_A, max_eigval = generate_A(shape=(D_r, D_r), rho=rho, D_r=D_r)
np.save('./data/Echo/A_50.npy', res_A)
| 1,623 | 36.767442 | 126 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Echo/energy_plot.py
|
import numpy as np
import matplotlib.pyplot as plt
# import matplotlib
# matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
# matplotlib.rcParams['text.usetex'] = True
font_size=35
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.3, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.3, ls='-', lw=1)
plt.subplot(221)
data = np.load('./neural_sde/Echo/50/k_list.npy')
# norm = np.linalg.norm(data[30,:],axis=2)
# ind = np.where(norm[3,:]<0.1)[0][0]
# print(ind)
end = np.mean(np.linalg.norm(data[:,:,-1,:],axis=2),axis=1)
np.save('./neural_sde/Echo/50/k_end.npy',end)
print(end.shape)
print(end[-20:])
plt.scatter(np.arange(len(end)),end, s=45, c=end, marker='.',alpha=0.85,cmap='rainbow')
plt.axvline(30,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.9)
plt.xticks([0,10,20,30,40,50],[20,30,40,50,60,70])
plt.xlabel(r'$k$')
plt.ylabel(r'$\Vert x(0.01)\Vert$')
plot_grid()
plt.colorbar()
plt.subplot(222)
energy_list=np.load('./neural_sde/Echo/50/numerical_energy.npy')
plt.scatter(np.arange(len(energy_list)),energy_list, s=45, c=energy_list, marker='.',alpha=0.85,cmap='rainbow')
plt.xticks([0,10,20],[50,60,70])
plt.xlabel(r'$k$')
plt.ylabel('Energy')
plt.colorbar()
plot_grid()
plt.subplot(223)
time_list=np.load('./neural_sde/Echo/50/numerical_time.npy')
time_list1=np.load('./neural_sde/Echo/50/theory_time.npy')
plt.scatter(np.arange(len(time_list)),time_list, s=45, c='r', marker='.',alpha=0.85,label=r'$\tau_{0.05}~for~k$')
plt.scatter(np.arange(len(time_list1)),time_list1, s=45, c='b', marker='.',alpha=0.85,label=r'$T_{0.05}$')
plt.xticks([0,10,20],[50,60,70])
plt.xlabel(r'$k$')
plt.ylabel('Time')
plt.axhline(0.021285,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.9,label=r'$\tau_{0.05}~for~AS$')
plot_grid()
plt.legend()
plt.subplot(224)
energy_list=np.log(np.load('./neural_sde/Echo/50/numerical_energy.npy'))
energy_list1=np.log(np.load('./neural_sde/Echo/50/theory_energy.npy'))
plt.scatter(np.arange(len(energy_list)),energy_list, s=45, c='r', marker='.',alpha=0.85,label=r'$\mathcal{E}(\tau_{0.05},T_{0.05})~for~k$')
plt.scatter(np.arange(len(energy_list1)),energy_list1, s=45, c='b', marker='.',alpha=0.85,label=r'$E_{0.05}$')
plt.xticks([0,10,20],[50,60,70])
plt.xlabel(r'$k$')
plt.ylabel('log Energy')
plt.axhline(np.log(877.653),ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.9,label=r'$\mathcal{E}(\tau_{0.05},T_{0.05})~for~AS$')
plot_grid()
plt.legend()
plt.show()
| 2,526 | 36.716418 | 139 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/Echo/plot.py
|
import numpy as np
import matplotlib.pyplot as plt
#Use latex font
# import matplotlib
# matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
# matplotlib.rcParams['text.usetex'] = True
font_size = 15
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.6, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.8, ls='-', lw=1)
def plot_trajec(L):
mean_data = np.mean(L,0)
std_data = np.std(L,0)
plt.fill_between(np.arange(len(mean_data)),mean_data-std_data,mean_data+std_data,color='r',alpha=0.2)
plt.plot(np.arange(len(mean_data)),mean_data,color='r',alpha=0.9)
plt.yticks([])
plt.subplot(271)
X = np.load('./data/Echo/orig_data.npy')[0:40001:10,:]
for i in range(50):
plt.plot(np.arange(len(X)),X[:,i])
plt.ylabel('Value',fontsize=font_size)
plt.xlabel('Time',fontsize=font_size)
# plt.text(1,4,r'$\textbf{Tanh}$',rotation=90,fontsize=font_size)
plot_grid()
plt.title('Original',fontsize=font_size)
plt.subplot(272)
X = np.load('./data/Echo/tanh_data.npy')[6,0:50001:10,:]
for i in range(50):
plt.plot(np.arange(len(X)),X[:,i])
plt.ylim(-2,2)
plt.yticks([-2,-1,0,1,2])
plt.xticks([0,2000,4000],[0,0.02,0.04])
plot_grid()
plt.title('Controlled',fontsize=font_size)
plt.subplot(278)
X = np.load('./data/Echo/relu_orig_data.npy')[0:40001:10,:]
for i in range(50):
plt.plot(np.arange(len(X)),X[:,i])
plt.ylabel('Value',fontsize=font_size)
plt.xlabel('Time',fontsize=font_size)
plot_grid()
plt.subplot(279)
X = np.load('./data/Echo/relu_data.npy')[6,0:50001:10,:]
for i in range(50):
plt.plot(np.arange(len(X)),X[:,i])
plt.ylim(-2,2)
plt.yticks([-2,-1,0,1,2])
plt.xticks([0,2000,4000],[0,0.02,0.04])
plot_grid()
for i in range(5):
plt.subplot(2,7,i+3)
X = np.load('./data/Echo/tanh_data.npy')[np.delete(np.arange(10),1),0:5001:10,:]
plot_trajec(X[:,:,i*10+9])
plt.yticks([-2,-1,0,1,2],[])
plt.ylim(-2,2)
plt.xticks([0,200,400],[0,0.002,0.004])
plot_grid()
plt.title(r'$x_{10}$',fontsize=font_size)
for i in range(5):
plt.subplot(2,7,7+i+3)
X = np.load('./data/Echo/relu_data.npy')[np.delete(np.arange(10),1),0:5001:10,:]
plot_trajec(X[:,:,i*10+9])
plt.yticks([-2,-1,0,1,2],[])
plt.ylim(-2,2)
plt.xticks([0,200,400],[0,0.002,0.004])
plot_grid()
plt.show()
| 2,380 | 27.686747 | 105 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_b/plot_trajectory.py
|
import numpy as np
import math
import matplotlib.pyplot as plt
import torch
from mpl_toolkits.mplot3d import axes3d
from matplotlib import cm
import timeit
start = timeit.default_timer()
def plot_trajec(L,b):
mean_data = torch.mean(L,0).detach().numpy()
std_data =torch.std(L,0).detach().numpy()
plt.fill_between(np.arange(len(mean_data)),mean_data-std_data,mean_data+std_data,color='r',alpha=0.2)
plt.plot(np.arange(len(mean_data)),mean_data,color='r',alpha=0.9,label=r'$b={}$'.format(b))
plt.ylim(-10,10)
# plt.xlabel('Time')
plt.xticks([0.0, 500, 1000], ["$0$", "$0.5$", "$1.0$"])
plt.yticks([])
| 639 | 28.090909 | 105 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_b/test.py
|
import numpy as np
import matplotlib.pyplot as plt
import time
start_time = time.time()
# Example data
t = np.arange(0.0, 1.0 + 0.01, 0.01)
s = np.cos(4 * np.pi * t) + 2
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
plt.plot(t, s)
plt.xlabel(r'\textbf{time} (s)')
plt.ylabel(r'\textit{voltage} (mV)',fontsize=16)
plt.title(r"\TeX\ is Number "
r"$\displaystyle\sum_{n=1}^\infty\frac{-e^{i\pi}}{2^n}$!",
fontsize=16, color='gray')
# Make room for the ridiculously large title.
plt.subplots_adjust(top=0.8)
end_time = time.time()
used_time = end_time - start_time
print(used_time)
plt.show()
| 625 | 24.04 | 68 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_b/V_plot.py
|
import matplotlib.pyplot as plt
import torch
import numpy as np
from matplotlib import cm
import matplotlib as mpl
# import matplotlib
# matplotlib.rcParams['font.sans-serif'] = 'NSimSun,Times New Roman'
# matplotlib.rcParams['text.usetex'] = True
colors = [
[233/256, 110/256, 236/256], # #e96eec
# [0.6, 0.6, 0.2], # olive
# [0.5333333333333333, 0.13333333333333333, 0.3333333333333333], # wine
[255/255, 165/255, 0],
# [0.8666666666666667, 0.8, 0.4666666666666667], # sand
# [223/256, 73/256, 54/256], # #df4936
[107/256, 161/256,255/256], # #6ba1ff
[0.6, 0.4, 0.8], # amethyst
[0.0, 0.0, 1.0], # ao
[0.55, 0.71, 0.0], # applegreen
# [0.4, 1.0, 0.0], # brightgreen
[0.99, 0.76, 0.8], # bubblegum
[0.93, 0.53, 0.18], # cadmiumorange
[11/255, 132/255, 147/255], # deblue
[204/255, 119/255, 34/255], # {ocra}
]
colors = np.array(colors)
l = 0.01
class VNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(VNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.Tanh()
h_1 = sigmoid(self.layer1(x))
out = self.layer2(h_1)
return l*x*x + (x*out)**2
D_in = 2
H1 = 6
D_out = 2
vmodel = VNet(D_in,H1,D_out)
V_vnorm = mpl.colors.Normalize(vmin=0, vmax=2.0)
D = 6
def draw_imageV(f):
with torch.no_grad():
x = torch.linspace(-D, D, 200)
y = torch.linspace(-D, D, 200)
X, Y = torch.meshgrid(x, y)
inp = torch.stack([X, Y], dim=2)
image = f(inp)
image = image[..., 0].detach().cpu()
plt.contour(X,Y,image-0.05,0,linewidths=2, colors=colors[-3],linestyles='--')
# plt.contourf(X,Y,image,8,alpha=0.3,cmap='turbo',norm=vnorm)
plt.imshow(image, extent=[-6, 6, -6, 6], cmap='rainbow',norm=V_vnorm)
plt.xticks([-5,0,5])
plt.yticks([])
return image
def drawV(a):
vmodel.load_state_dict(torch.load('./neural_sde/hyper_b/V_b_{}.pkl'.format(a)))
draw_imageV(vmodel)
# plt.title(r'b$={}$'.format(a))
| 2,205 | 28.413333 | 83 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_b/generate.py
|
import numpy as np
import math
import torch
import timeit
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(10)
class ControlNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(ControlNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
D_in = 2
H1 = 6
D_out = 2
model = ControlNet(D_in,H1,D_out)
set_state0 = torch.tensor([[-5.0,5.0]])
# set_state0 = torch.tensor([[-5.0,5.0],[-3.0,4.0],[-1.0,3.0],[1.0,-3.0],[3.0,-4.0],[5.0,-5.0]])
def control_data(model,random_seed,set_state0,M=6,N=20000,dt=0.00001):
start = timeit.default_timer()
torch.manual_seed(random_seed)
X1,X2 = torch.zeros([M,N]),torch.zeros([M,N])
for r in range(M):
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1
z = torch.randn(N)
X1[r,0] = set_state0[r,0]
X2[r,0] = set_state0[r,1]
for i in range(N-1):
x1 = X1[r,i]
x2 = X2[r,i]
with torch.no_grad():
u = model(torch.tensor([x1,x2]))
new_x1 = x1 + x2*dt + x1*u[0]*z[i]*math.sqrt(dt)
new_x2 = x2 + (G*math.sin(x1)/L - b*x2/(m*L**2))*dt + x2*u[1]*z[i]*math.sqrt(dt)
X1[r,i+1] = new_x1
X2[r,i+1] = new_x2
print('{} done'.format(r))
# data = {'X1':X1,'X2':X2}
# torch.save(data,'./neural_sde/hyper_b/b_{}.pt'.format(b))
stop = timeit.default_timer()
print(stop-start)
return X1,X2
'''
Generate trajectories under control with corresponding b
'''
if __name__ == '__main__':
M = 5
N = 20000
data = torch.zeros([2,10,M,N])
for r in range(10):
b = 2.0 + r*0.1
model.load_state_dict(torch.load('./data/hyper_b/b_{}.pkl'.format(b)))
# X1,X2=torch.zeros([M,N]),torch.zeros([M,N])
for i in range(M):
x1,x2 = control_data(model,i*6,set_state0,1,N,0.0001)
# X1[i,:] = x1[0,:]
# X2[i,:] = x2[0,:]
data[0,r,i,:] = x1[0,:]
data[1,r,i,:] = x2[0,:]
print('({},{})'.format(r,i))
torch.save(data,'data.pt')
# model.load_state_dict(torch.load('./neural_sde/hyper_b/b_{}.pkl'.format(1.6)))
# X1,X2 = control_data(model,6,set_state0,1,30000,0.0001)
# X1 = X1.detach().numpy()[0,:]
# print(X1.shape)
# plt.plot(np.arange(len(X1)),X1)
# plt.show()
| 2,768 | 30.827586 | 96 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_b/u_plot.py
|
import matplotlib.pyplot as plt
import torch
import numpy as np
from matplotlib import cm
import matplotlib as mpl
class ControlNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(ControlNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out*x
D_in = 2
H1 = 6
D_out = 2
cmodel = ControlNet(D_in,H1,D_out)
C_vnorm = mpl.colors.Normalize(vmin=-80, vmax=80)
def draw_image(f):
with torch.no_grad():
x = torch.linspace(-6, 6, 200)
y = torch.linspace(-6, 6, 200)
X, Y = torch.meshgrid(x, y)
inp = torch.stack([X, Y], dim=2)
image = f(inp)
image = image[..., 0].detach().cpu()
plt.imshow(image, extent=[-6, 6, -6, 6], cmap='rainbow',norm=C_vnorm)
# plt.xlabel(r'$\theta$')
plt.xticks([-5,0,5])
plt.yticks([])
# plt.show()
return image
def draw(a):
cmodel.load_state_dict(torch.load('./neural_sde/hyper_b/b_{}.pkl'.format(a)))
draw_image(cmodel)
# plt.title(r'b$={}$'.format(a))
| 1,389 | 27.367347 | 81 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_b/calculate.py
|
import matplotlib.pyplot as plt
import torch
import numpy as np
# import pylustrator
# pylustrator.start()
def plot_grid():
plt.grid(b=True, which='major', color='gray', alpha=0.5, linestyle='dashdot', lw=1.5)
# minor grid lines
plt.minorticks_on()
plt.grid(b=True, which='minor', color='beige', alpha=0.5, ls='-', lw=1)
A = torch.load('./neural_sde/hyper_b/data.pt')
print(A.shape)
end = torch.zeros([20])
for r in range(20):
end[r] = torch.mean(A[0,r,:,-1])
print(end)
end = end.detach().numpy()
plt.scatter(np.arange(len(end)),end, s=45, c=end, marker='.',alpha=0.99,cmap='rainbow')
plot_grid()
plt.yticks([0,1,2])
plt.xticks([0.0, 4.0, 8.0, 12.0, 16.0, 20.0],["1.0", "1.4", "1.8", "2.2", "2.6", "3.0"])
plt.axvline(8.5,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.3)
plt.axvline(13.5,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.3)
plt.axhline(0.0,ls="--",linewidth=2.5,color="#dc8ff6",alpha=0.3)
plt.ylabel(r'$\theta$')
plt.xlabel(r'$b$')
plt.colorbar()
#% start: automatic generated code from pylustrator
plt.figure(1).ax_dict = {ax.get_label(): ax for ax in plt.figure(1).axes}
import matplotlib as mpl
plt.figure(1).set_size_inches(11.360000/2.54, 4.990000/2.54, forward=True)
plt.figure(1).ax_dict["<colorbar>"].set_position([0.931942, 0.234718, 0.014887, 0.679046])
plt.figure(1).axes[0].set_xlim(-0.9, 20.0)
# plt.figure(1).axes[0].set_xticks([0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0])
# plt.figure(1).axes[0].set_xticklabels(["1.0", "1.2", "1.4", "1.6", "1.8", "2.0", "2.2", "2.4", "2.6", "2.8", "3.0"], fontsize=10.0, fontweight="normal", color="black", fontstyle="normal", fontname="DejaVu Sans", horizontalalignment="center")
# plt.figure(1).axes[0].grid(False)
plt.figure(1).axes[0].set_position([0.092998, 0.225654, 0.826345, 0.697175])
#% end: automatic generated code from pylustrator
plt.show()
| 1,872 | 43.595238 | 243 |
py
|
Neural-Stochastic-Control
|
Neural-Stochastic-Control-main/Neural Stochastic Control/hyper_b/ES_Quadratic.py
|
import sys
sys.path.append('./neural_sde')
import torch
import torch.nn.functional as F
import numpy as np
import timeit
from hessian import hessian
from hessian import jacobian
# from gradient import hessian
# from gradient import jacobian
class ControlNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(ControlNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_hidden)
self.layer3 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.ReLU()
h_1 = sigmoid(self.layer1(x))
h_2 = sigmoid(self.layer2(h_1))
out = self.layer3(h_2)
return out
class VNet(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(VNet, self).__init__()
torch.manual_seed(2)
self.layer1 = torch.nn.Linear(n_input, n_hidden)
self.layer2 = torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
sigmoid = torch.nn.Tanh()
h_1 = sigmoid(self.layer1(x))
out = self.layer2(h_1)
return out
class Net(torch.nn.Module):
def __init__(self,n_input,n_hidden,n_output):
super(Net, self).__init__()
torch.manual_seed(2)
self._v = VNet(n_input,n_hidden,n_output)
self._control = ControlNet(n_input,n_hidden,n_output)
def forward(self,x):
v = self._v(x)
u = self._control(x)
return v,u*x
def inverted_pendulum(x):
y = []
G = 9.81 # gravity
L = 0.5 # length of the pole
m = 0.15 # ball mass
b = 0.1 # friction
for i in range(0,len(x)):
f = [x[i,1],G*torch.sin(x[i,0])/L +(-b*x[i,1])/(m*L**2)]
y.append(f)
y = torch.tensor(y)
return y
'''
For learning
'''
N = 500 # sample size
D_in = 2 # input dimension
H1 = 6 # hidden dimension
D_out = 2 # output dimension
torch.manual_seed(10)
x = torch.Tensor(N, D_in).uniform_(-10, 10)
l = 0.01
# valid = False
# while out_iters < 1:
for r in range(1):
b = float(format(2.1 + r*0.1,'.1f'))
start = timeit.default_timer()
model = Net(D_in,H1, D_out)
i = 0
t = 0
max_iters = 1000
learning_rate = 0.01
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)
L = []
while i < max_iters:
V_net, u = model(x)
W1 = model._v.layer1.weight
W2 = model._v.layer2.weight
B1 = model._v.layer1.bias
B2 = model._v.layer2.bias
f = inverted_pendulum(x)
g = u
x = x.clone().detach().requires_grad_(True)
output = torch.mm(torch.tanh(torch.mm(x,W1.T)+B1),W2.T)+B2
# V = torch.sum(output)
num_v = torch.sum(l*x*x + ( x*output)**2,1)
# num_v = torch.sum(output,1)
V = torch.sum(l*x*x + (x*output)**2)
Vx = jacobian(V,x)
Vxx = hessian(V,x)
loss = torch.zeros(N)
for r in range(N):
L_V = torch.sum(Vx[0,2*r:2*r+2]*f[r,:]) + 0.5*torch.mm(g[r,:].unsqueeze(0),torch.mm(Vxx[2*r:2*r+2,2*r:2*r+2],g[r,:].unsqueeze(1)))
Vxg = torch.sum(Vx[0,2*r:2*r+2]*g[r,:])
v = num_v[r]
loss[r] = Vxg**2/(v**2) - b*L_V/v
Lyapunov_risk = (F.relu(-loss)).mean()
L.append(Lyapunov_risk.item())
print(i, "Lyapunov Risk=",Lyapunov_risk.item())
optimizer.zero_grad()
Lyapunov_risk.backward()
optimizer.step()
# if Lyapunov_risk < 0.12:
# optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
# else:
# optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
# print(q)
# if Lyapunov_risk < 1.0:
# optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
# else:
# optimizer = torch.optim.Adam(model.parameters(), lr=0.5)
if Lyapunov_risk == 0.0:
break
i += 1
stop = timeit.default_timer()
print('\n')
print("Total time: ", stop - start)
# np.save('./neural_sde/hyper_b/b_{}.npy'.format(b), L)
# torch.save(model._control.state_dict(),'./neural_sde/hyper_b/b_{}.pkl'.format(b))
| 4,311 | 28.737931 | 142 |
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.